1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Mandelin <dmandelin@mozilla.com>
25 : *
26 : * Alternatively, the contents of this file may be used under the terms of
27 : * either of the GNU General Public License Version 2 or later (the "GPL"),
28 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 : * in which case the provisions of the GPL or the LGPL are applicable instead
30 : * of those above. If you wish to allow use of your version of this file only
31 : * under the terms of either the GPL or the LGPL, and not to allow others to
32 : * use your version of this file under the terms of the MPL, indicate your
33 : * decision by deleting the provisions above and replace them with the notice
34 : * and other provisions required by the GPL or the LGPL. If you do not delete
35 : * the provisions above, a recipient may use your version of this file under
36 : * the terms of any one of the MPL, the GPL or the LGPL.
37 : *
38 : * ***** END LICENSE BLOCK ***** */
39 : #include "PolyIC.h"
40 : #include "StubCalls.h"
41 : #include "CodeGenIncludes.h"
42 : #include "StubCalls-inl.h"
43 : #include "BaseCompiler.h"
44 : #include "assembler/assembler/LinkBuffer.h"
45 : #include "TypedArrayIC.h"
46 : #include "jsscope.h"
47 : #include "jsnum.h"
48 : #include "jstypedarray.h"
49 : #include "jsatominlines.h"
50 : #include "jsobjinlines.h"
51 : #include "jsscopeinlines.h"
52 : #include "jsinterpinlines.h"
53 : #include "jsautooplen.h"
54 :
55 : #include "vm/ScopeObject-inl.h"
56 : #include "vm/StringObject-inl.h"
57 :
58 : #if defined JS_POLYIC
59 :
60 : using namespace js;
61 : using namespace js::mjit;
62 : using namespace js::mjit::ic;
63 :
64 : typedef JSC::FunctionPtr FunctionPtr;
65 : typedef JSC::MacroAssembler::RegisterID RegisterID;
66 : typedef JSC::MacroAssembler::Jump Jump;
67 : typedef JSC::MacroAssembler::Imm32 Imm32;
68 :
69 : /* Rough over-estimate of how much memory we need to unprotect. */
70 : static const uint32_t INLINE_PATH_LENGTH = 64;
71 :
72 : // Helper class to simplify LinkBuffer usage in PIC stub generators.
73 : // This guarantees correct OOM and refcount handling for buffers while they
74 : // are instantiated and rooted.
75 : class PICLinker : public LinkerHelper
76 261473 : {
77 : ic::BasePolyIC ⁣
78 :
79 : public:
80 261473 : PICLinker(Assembler &masm, ic::BasePolyIC &ic)
81 261473 : : LinkerHelper(masm, JSC::METHOD_CODE), ic(ic)
82 261473 : { }
83 :
84 261473 : bool init(JSContext *cx) {
85 261473 : JSC::ExecutablePool *pool = LinkerHelper::init(cx);
86 261473 : if (!pool)
87 0 : return false;
88 261473 : if (!ic.addPool(cx, pool)) {
89 0 : pool->release();
90 0 : js_ReportOutOfMemory(cx);
91 0 : return false;
92 : }
93 261473 : return true;
94 : }
95 : };
96 :
97 : class PICStubCompiler : public BaseCompiler
98 : {
99 : protected:
100 : const char *type;
101 : VMFrame &f;
102 : JSScript *script;
103 : ic::PICInfo &pic;
104 : void *stub;
105 : uint64_t gcNumber;
106 :
107 : public:
108 : bool canCallHook;
109 :
110 434601 : PICStubCompiler(const char *type, VMFrame &f, JSScript *script, ic::PICInfo &pic, void *stub)
111 : : BaseCompiler(f.cx), type(type), f(f), script(script), pic(pic), stub(stub),
112 434601 : gcNumber(f.cx->runtime->gcNumber), canCallHook(pic.canCallHook)
113 434601 : { }
114 :
115 0 : LookupStatus error() {
116 : /*
117 : * N.B. Do not try to disable the IC, we do not want to guard on
118 : * whether the IC has been recompiled when propagating errors.
119 : */
120 0 : return Lookup_Error;
121 : }
122 :
123 0 : LookupStatus error(JSContext *cx) {
124 0 : return error();
125 : }
126 :
127 45745 : LookupStatus disable(const char *reason) {
128 45745 : return disable(f.cx, reason);
129 : }
130 :
131 46270 : LookupStatus disable(JSContext *cx, const char *reason) {
132 46270 : return pic.disable(f, reason, stub);
133 : }
134 :
135 10805 : LookupStatus disable(VMFrame &f, const char *reason) {
136 10805 : return pic.disable(f, reason, stub);
137 : }
138 :
139 198274 : bool hadGC() {
140 198274 : return gcNumber != f.cx->runtime->gcNumber;
141 : }
142 :
143 : protected:
144 309452 : void spew(const char *event, const char *op) {
145 : #ifdef JS_METHODJIT_SPEW
146 : JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
147 309452 : type, event, op, script->filename, CurrentLine(cx));
148 : #endif
149 309452 : }
150 : };
151 :
152 : static bool
153 37747 : GeneratePrototypeGuards(JSContext *cx, Vector<JSC::MacroAssembler::Jump,8> &mismatches, Assembler &masm,
154 : JSObject *obj, JSObject *holder,
155 : JSC::MacroAssembler::RegisterID objReg,
156 : JSC::MacroAssembler::RegisterID scratchReg)
157 : {
158 : typedef JSC::MacroAssembler::Address Address;
159 : typedef JSC::MacroAssembler::AbsoluteAddress AbsoluteAddress;
160 : typedef JSC::MacroAssembler::ImmPtr ImmPtr;
161 : typedef JSC::MacroAssembler::Jump Jump;
162 :
163 37747 : if (obj->hasUncacheableProto()) {
164 12 : masm.loadPtr(Address(objReg, JSObject::offsetOfType()), scratchReg);
165 : Jump j = masm.branchPtr(Assembler::NotEqual,
166 : Address(scratchReg, offsetof(types::TypeObject, proto)),
167 12 : ImmPtr(obj->getProto()));
168 12 : if (!mismatches.append(j))
169 0 : return false;
170 : }
171 :
172 37747 : JSObject *pobj = obj->getProto();
173 89244 : while (pobj != holder) {
174 13750 : if (pobj->hasUncacheableProto()) {
175 6271 : Jump j;
176 6271 : if (pobj->hasSingletonType()) {
177 0 : types::TypeObject *type = pobj->getType(cx);
178 : j = masm.branchPtr(Assembler::NotEqual,
179 : AbsoluteAddress(&type->proto),
180 0 : ImmPtr(pobj->getProto()),
181 0 : scratchReg);
182 : } else {
183 : j = masm.branchPtr(Assembler::NotEqual,
184 6271 : AbsoluteAddress(pobj->addressOfType()),
185 6271 : ImmPtr(pobj->type()),
186 12542 : scratchReg);
187 : }
188 6271 : if (!mismatches.append(j))
189 0 : return false;
190 : }
191 13750 : pobj = pobj->getProto();
192 : }
193 :
194 37747 : return true;
195 : }
196 :
197 : class SetPropCompiler : public PICStubCompiler
198 : {
199 : JSObject *obj;
200 : PropertyName *name;
201 : int lastStubSecondShapeGuard;
202 :
203 : public:
204 12998 : SetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, PropertyName *name,
205 : VoidStubPIC stub)
206 : : PICStubCompiler("setprop", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
207 12998 : obj(obj), name(name), lastStubSecondShapeGuard(pic.secondShapeGuard)
208 12998 : { }
209 :
210 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
211 : {
212 : SetPropLabels &labels = pic.setPropLabels();
213 : repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr));
214 : repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard),
215 : NULL);
216 : repatcher.relink(labels.getInlineShapeJump(pic.fastPathStart.labelAtOffset(pic.shapeGuard)),
217 : pic.slowPathStart);
218 :
219 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
220 : repatcher.relink(pic.slowPathCall, target);
221 : }
222 :
223 6029 : LookupStatus patchInline(const Shape *shape)
224 : {
225 6029 : JS_ASSERT(!pic.inlinePathPatched);
226 6029 : JaegerSpew(JSpew_PICs, "patch setprop inline at %p\n", pic.fastPathStart.executableAddress());
227 :
228 12058 : Repatcher repatcher(f.chunk());
229 6029 : SetPropLabels &labels = pic.setPropLabels();
230 :
231 : int32_t offset;
232 6029 : if (obj->isFixedSlot(shape->slot())) {
233 4929 : CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin, pic.u.vr);
234 4929 : repatcher.repatchLoadPtrToLEA(istr);
235 :
236 : //
237 : // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
238 : // To: | lea fslots, [obj + DSLOTS_OFFSET]
239 : //
240 : // Because the offset is wrong, it's necessary to correct it
241 : // below.
242 : //
243 4929 : int32_t diff = int32_t(JSObject::getFixedSlotOffset(0)) -
244 4929 : int32_t(JSObject::offsetOfSlots());
245 4929 : JS_ASSERT(diff != 0);
246 4929 : offset = (int32_t(shape->slot()) * sizeof(Value)) + diff;
247 : } else {
248 1100 : offset = obj->dynamicSlotIndex(shape->slot()) * sizeof(Value);
249 : }
250 :
251 6029 : repatcher.repatch(labels.getInlineShapeData(pic.fastPathStart, pic.shapeGuard),
252 12058 : obj->lastProperty());
253 6029 : repatcher.patchAddressOffsetForValueStore(labels.getInlineValueStore(pic.fastPathRejoin),
254 12058 : offset, pic.u.vr.isTypeKnown());
255 :
256 6029 : pic.inlinePathPatched = true;
257 :
258 6029 : return Lookup_Cacheable;
259 : }
260 :
261 5429 : int getLastStubSecondShapeGuard() const {
262 5429 : return lastStubSecondShapeGuard ? POST_INST_OFFSET(lastStubSecondShapeGuard) : 0;
263 : }
264 :
265 5429 : void patchPreviousToHere(CodeLocationLabel cs)
266 : {
267 10858 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
268 5429 : CodeLocationLabel label = pic.lastPathStart();
269 :
270 : // Patch either the inline fast path or a generated stub. The stub
271 : // omits the prefix of the inline fast path that loads the shape, so
272 : // the offsets are different.
273 5429 : if (pic.stubsGenerated) {
274 743 : repatcher.relink(pic.setPropLabels().getStubShapeJump(label), cs);
275 : } else {
276 4686 : CodeLocationLabel shapeGuard = label.labelAtOffset(pic.shapeGuard);
277 4686 : repatcher.relink(pic.setPropLabels().getInlineShapeJump(shapeGuard), cs);
278 : }
279 5429 : if (int secondGuardOffset = getLastStubSecondShapeGuard())
280 479 : repatcher.relink(label.jumpAtOffset(secondGuardOffset), cs);
281 5429 : }
282 :
283 5429 : LookupStatus generateStub(const Shape *initialShape, const Shape *shape, bool adding)
284 : {
285 5429 : if (hadGC())
286 0 : return Lookup_Uncacheable;
287 :
288 : /* Exits to the slow path. */
289 10858 : Vector<Jump, 8> slowExits(cx);
290 10858 : Vector<Jump, 8> otherGuards(cx);
291 :
292 10858 : Assembler masm;
293 :
294 : // Shape guard.
295 5429 : if (pic.shapeNeedsRemat()) {
296 429 : masm.loadShape(pic.objReg, pic.shapeReg);
297 429 : pic.shapeRegHasBaseShape = true;
298 : }
299 :
300 5429 : Label start = masm.label();
301 : Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
302 5429 : ImmPtr(initialShape));
303 :
304 5429 : Label stubShapeJumpLabel = masm.label();
305 :
306 5429 : pic.setPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
307 :
308 5429 : if (pic.typeMonitored) {
309 : /*
310 : * Inference does not know the type of the object being updated,
311 : * and we need to make sure that the updateMonitoredTypes() call
312 : * covers this stub, i.e. we will be writing to an object with the
313 : * same type. Add a type guard in addition to the shape guard.
314 : * Note: it is possible that this test gets a spurious hit if the
315 : * object has a lazy type, but in such cases no analyzed scripts
316 : * depend on the object and we will reconstruct its type from the
317 : * value being written here.
318 : */
319 : Jump typeGuard = masm.branchPtr(Assembler::NotEqual,
320 660 : Address(pic.objReg, JSObject::offsetOfType()),
321 1320 : ImmPtr(obj->getType(cx)));
322 660 : if (!otherGuards.append(typeGuard))
323 0 : return error();
324 : }
325 :
326 5429 : JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall());
327 :
328 5429 : MaybeJump skipOver;
329 :
330 5429 : if (adding) {
331 3434 : JS_ASSERT(shape->hasSlot());
332 3434 : pic.shapeRegHasBaseShape = false;
333 :
334 3434 : if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, NULL,
335 3434 : pic.objReg, pic.shapeReg)) {
336 0 : return error();
337 : }
338 :
339 : /* Emit shape guards for the object's prototype chain. */
340 3434 : JSObject *proto = obj->getProto();
341 3434 : RegisterID lastReg = pic.objReg;
342 13570 : while (proto) {
343 6702 : masm.loadPtr(Address(lastReg, JSObject::offsetOfType()), pic.shapeReg);
344 6702 : masm.loadPtr(Address(pic.shapeReg, offsetof(types::TypeObject, proto)), pic.shapeReg);
345 6702 : Jump protoGuard = masm.guardShape(pic.shapeReg, proto);
346 6702 : if (!otherGuards.append(protoGuard))
347 0 : return error();
348 :
349 6702 : proto = proto->getProto();
350 6702 : lastReg = pic.shapeReg;
351 : }
352 :
353 3434 : if (pic.kind == ic::PICInfo::SETMETHOD) {
354 : /*
355 : * Guard that the value is equal to the shape's method.
356 : * We already know it is a function, so test the payload.
357 : */
358 0 : JS_ASSERT(shape->isMethod());
359 0 : JSObject *funobj = obj->nativeGetMethod(shape);
360 0 : if (pic.u.vr.isConstant()) {
361 0 : JS_ASSERT(funobj == &pic.u.vr.value().toObject());
362 : } else {
363 : Jump mismatchedFunction =
364 0 : masm.branchPtr(Assembler::NotEqual, pic.u.vr.dataReg(), ImmPtr(funobj));
365 0 : if (!slowExits.append(mismatchedFunction))
366 0 : return error();
367 : }
368 : }
369 :
370 3434 : if (obj->isFixedSlot(shape->slot())) {
371 : Address address(pic.objReg,
372 2913 : JSObject::getFixedSlotOffset(shape->slot()));
373 2913 : masm.storeValue(pic.u.vr, address);
374 : } else {
375 : /*
376 : * Note: the guard on the initial shape determines the object's
377 : * number of fixed slots and slot span, which in turn determine
378 : * the number of dynamic slots allocated for the object.
379 : * We don't need to check capacity here.
380 : */
381 521 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfSlots()), pic.shapeReg);
382 521 : Address address(pic.shapeReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
383 521 : masm.storeValue(pic.u.vr, address);
384 : }
385 :
386 3434 : JS_ASSERT(shape == obj->lastProperty());
387 3434 : JS_ASSERT(shape != initialShape);
388 :
389 : /* Write the object's new shape. */
390 3434 : masm.storePtr(ImmPtr(shape), Address(pic.objReg, JSObject::offsetOfShape()));
391 1995 : } else if (shape->hasDefaultSetter()) {
392 1047 : JS_ASSERT(!shape->isMethod());
393 1047 : Address address = masm.objPropAddress(obj, pic.objReg, shape->slot());
394 1047 : masm.storeValue(pic.u.vr, address);
395 : } else {
396 : // \ / In general, two function objects with different JSFunctions
397 : // # can have the same shape, thus we must not rely on the identity
398 : // >--+--< of 'fun' remaining the same. However, since:
399 : // ||| 1. the shape includes all arguments and locals and their setters
400 : // \\ V and getters, and
401 : // \===/ 2. arguments and locals have different getters
402 : // then we can rely on fun->nargs remaining invariant.
403 948 : JSFunction *fun = obj->asCall().getCalleeFunction();
404 948 : uint16_t slot = uint16_t(shape->shortid());
405 :
406 : /* Guard that the call object has a frame. */
407 948 : masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots());
408 948 : Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
409 :
410 : {
411 948 : Address addr(pic.shapeReg, shape->setterOp() == CallObject::setArgOp
412 123 : ? StackFrame::offsetOfFormalArg(fun, slot)
413 1071 : : StackFrame::offsetOfFixed(slot));
414 948 : masm.storeValue(pic.u.vr, addr);
415 948 : skipOver = masm.jump();
416 : }
417 :
418 948 : escapedFrame.linkTo(masm.label(), &masm);
419 : {
420 948 : if (shape->setterOp() == CallObject::setVarOp)
421 825 : slot += fun->nargs;
422 :
423 948 : slot += CallObject::RESERVED_SLOTS;
424 948 : Address address = masm.objPropAddress(obj, pic.objReg, slot);
425 :
426 948 : masm.storeValue(pic.u.vr, address);
427 : }
428 :
429 948 : pic.shapeRegHasBaseShape = false;
430 : }
431 :
432 5429 : Jump done = masm.jump();
433 :
434 : // Common all secondary guards into one big exit.
435 5429 : MaybeJump slowExit;
436 5429 : if (otherGuards.length()) {
437 14310 : for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
438 10319 : pj->linkTo(masm.label(), &masm);
439 3991 : slowExit = masm.jump();
440 3991 : pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
441 : } else {
442 1438 : pic.secondShapeGuard = 0;
443 : }
444 :
445 5429 : pic.updatePCCounters(f, masm);
446 :
447 10858 : PICLinker buffer(masm, pic);
448 5429 : if (!buffer.init(cx))
449 0 : return error();
450 :
451 10858 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
452 5429 : !buffer.verifyRange(f.chunk())) {
453 0 : return disable("code memory is out of range");
454 : }
455 :
456 5429 : buffer.link(shapeGuard, pic.slowPathStart);
457 5429 : if (slowExit.isSet())
458 3991 : buffer.link(slowExit.get(), pic.slowPathStart);
459 5429 : for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
460 0 : buffer.link(*pj, pic.slowPathStart);
461 5429 : buffer.link(done, pic.fastPathRejoin);
462 5429 : if (skipOver.isSet())
463 948 : buffer.link(skipOver.get(), pic.fastPathRejoin);
464 5429 : CodeLocationLabel cs = buffer.finalize(f);
465 : JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n",
466 : (void*)&pic,
467 : (void*)initialShape,
468 : pic.stubsGenerated,
469 5429 : cs.executableAddress());
470 :
471 : // This function can patch either the inline fast path for a generated
472 : // stub. The stub omits the prefix of the inline fast path that loads
473 : // the shape, so the offsets are different.
474 5429 : patchPreviousToHere(cs);
475 :
476 5429 : pic.stubsGenerated++;
477 5429 : pic.updateLastPath(buffer, start);
478 :
479 5429 : if (pic.stubsGenerated == MAX_PIC_STUBS)
480 12 : disable("max stubs reached");
481 :
482 5429 : return Lookup_Cacheable;
483 : }
484 :
485 513 : bool updateMonitoredTypes()
486 : {
487 513 : JS_ASSERT(pic.typeMonitored);
488 :
489 513 : RecompilationMonitor monitor(cx);
490 513 : jsid id = ATOM_TO_JSID(name);
491 :
492 513 : if (!obj->getType(cx)->unknownProperties()) {
493 938 : types::AutoEnterTypeInference enter(cx);
494 469 : types::TypeSet *types = obj->getType(cx)->getProperty(cx, types::MakeTypeId(cx, id), true);
495 469 : if (!types)
496 0 : return false;
497 938 : pic.rhsTypes->addSubset(cx, types);
498 : }
499 :
500 513 : return !monitor.recompiled();
501 : }
502 :
503 12998 : LookupStatus update()
504 : {
505 12998 : JS_ASSERT(pic.hit);
506 :
507 12998 : if (obj->isDenseArray())
508 71 : return disable("dense array");
509 12927 : if (!obj->isNative())
510 148 : return disable("non-native");
511 12779 : if (obj->watched())
512 47 : return disable("watchpoint");
513 :
514 12732 : Class *clasp = obj->getClass();
515 :
516 12732 : if (clasp->setProperty != JS_StrictPropertyStub)
517 502 : return disable("set property hook");
518 12230 : if (clasp->ops.lookupProperty)
519 4 : return disable("ops lookup property hook");
520 12226 : if (clasp->ops.setProperty)
521 0 : return disable("ops set property hook");
522 :
523 : JSObject *holder;
524 12226 : JSProperty *prop = NULL;
525 :
526 : /* lookupProperty can trigger recompilations. */
527 12226 : RecompilationMonitor monitor(cx);
528 12226 : if (!obj->lookupProperty(cx, name, &holder, &prop))
529 0 : return error();
530 12226 : if (monitor.recompiled())
531 0 : return Lookup_Uncacheable;
532 :
533 : /* If the property exists but is on a prototype, treat as addprop. */
534 12226 : if (prop && holder != obj) {
535 1059 : const Shape *shape = (const Shape *) prop;
536 :
537 1059 : if (!holder->isNative())
538 0 : return disable("non-native holder");
539 :
540 1059 : if (!shape->writable())
541 0 : return disable("readonly");
542 1059 : if (!shape->hasDefaultSetter() || !shape->hasDefaultGetter())
543 496 : return disable("getter/setter in prototype");
544 563 : if (shape->hasShortID())
545 0 : return disable("short ID in prototype");
546 563 : if (!shape->hasSlot())
547 0 : return disable("missing slot");
548 :
549 563 : prop = NULL;
550 : }
551 :
552 11730 : if (!prop) {
553 : /* Adding a property to the object. */
554 3627 : if (obj->isDelegate())
555 6 : return disable("delegate");
556 3621 : if (!obj->isExtensible())
557 0 : return disable("not extensible");
558 :
559 3621 : if (clasp->addProperty != JS_PropertyStub)
560 4 : return disable("add property hook");
561 3617 : if (clasp->ops.defineProperty)
562 0 : return disable("ops define property hook");
563 :
564 : /*
565 : * When adding a property we need to check shapes along the entire
566 : * prototype chain to watch for an added setter.
567 : */
568 3617 : JSObject *proto = obj;
569 17873 : while (proto) {
570 10643 : if (!proto->isNative())
571 4 : return disable("non-native proto");
572 10639 : proto = proto->getProto();
573 : }
574 :
575 3613 : const Shape *initialShape = obj->lastProperty();
576 3613 : uint32_t slots = obj->numDynamicSlots();
577 :
578 3613 : unsigned flags = 0;
579 3613 : PropertyOp getter = clasp->getProperty;
580 :
581 3613 : if (pic.kind == ic::PICInfo::SETMETHOD) {
582 0 : if (!obj->canHaveMethodBarrier())
583 0 : return disable("can't have method barrier");
584 :
585 0 : JSObject *funobj = &f.regs.sp[-1].toObject();
586 0 : if (funobj->toFunction()->isClonedMethod())
587 0 : return disable("mismatched function");
588 :
589 0 : flags |= Shape::METHOD;
590 : }
591 :
592 : /*
593 : * Define the property but do not set it yet. For setmethod,
594 : * populate the slot to satisfy the method invariant (in case we
595 : * hit an early return below).
596 : */
597 : const Shape *shape =
598 : obj->putProperty(cx, name, getter, clasp->setProperty,
599 3613 : SHAPE_INVALID_SLOT, JSPROP_ENUMERATE, flags, 0);
600 3613 : if (!shape)
601 0 : return error();
602 3613 : if (flags & Shape::METHOD)
603 0 : obj->nativeSetSlot(shape->slot(), f.regs.sp[-1]);
604 :
605 3613 : if (monitor.recompiled())
606 0 : return Lookup_Uncacheable;
607 :
608 : /*
609 : * Test after calling putProperty since it can switch obj into
610 : * dictionary mode, specifically if the shape tree ancestor line
611 : * exceeds PropertyTree::MAX_HEIGHT.
612 : */
613 3613 : if (obj->inDictionaryMode())
614 4 : return disable("dictionary");
615 :
616 3609 : if (!shape->hasDefaultSetter())
617 0 : return disable("adding non-default setter");
618 3609 : if (!shape->hasSlot())
619 0 : return disable("adding invalid slot");
620 :
621 : /*
622 : * Watch for cases where the object reallocated its slots when
623 : * adding the property, and disable the PIC. Otherwise we will
624 : * keep generating identical PICs as side exits are taken on the
625 : * capacity checks. Alternatively, we could avoid the disable
626 : * and just not generate a stub in case there are multiple shapes
627 : * that can flow here which don't all require reallocation.
628 : * Doing this would cause us to walk down this same update path
629 : * every time a reallocation is needed, however, which will
630 : * usually be a slowdown even if there *are* other shapes that
631 : * don't realloc.
632 : */
633 3609 : if (obj->numDynamicSlots() != slots)
634 175 : return disable("insufficient slot capacity");
635 :
636 3434 : if (pic.typeMonitored && !updateMonitoredTypes())
637 0 : return Lookup_Uncacheable;
638 :
639 3434 : return generateStub(initialShape, shape, true);
640 : }
641 :
642 8103 : const Shape *shape = (const Shape *) prop;
643 8103 : if (pic.kind == ic::PICInfo::SETMETHOD && !shape->isMethod())
644 0 : return disable("set method on non-method shape");
645 8103 : if (!shape->writable())
646 2 : return disable("readonly");
647 8101 : if (shape->isMethod())
648 0 : return disable("method");
649 :
650 8101 : if (shape->hasDefaultSetter()) {
651 7076 : if (!shape->hasSlot())
652 0 : return disable("invalid slot");
653 7076 : if (pic.typeMonitored && !updateMonitoredTypes())
654 0 : return Lookup_Uncacheable;
655 : } else {
656 1025 : if (shape->hasSetterValue())
657 46 : return disable("scripted setter");
658 1835 : if (shape->setterOp() != CallObject::setArgOp &&
659 856 : shape->setterOp() != CallObject::setVarOp) {
660 31 : return disable("setter");
661 : }
662 948 : JS_ASSERT(obj->isCall());
663 948 : if (pic.typeMonitored) {
664 : /*
665 : * Update the types of the locals/args in the script according
666 : * to the possible RHS types of the assignment. Note that the
667 : * shape guards we have performed do not by themselves
668 : * guarantee that future call objects hit will be for the same
669 : * script. We also depend on the fact that the scope chains hit
670 : * at the same bytecode are all isomorphic: the same scripts,
671 : * in the same order (though the properties on their call
672 : * objects may differ due to eval(), DEFFUN, etc.).
673 : */
674 147 : RecompilationMonitor monitor(cx);
675 147 : JSFunction *fun = obj->asCall().getCalleeFunction();
676 147 : JSScript *script = fun->script();
677 147 : uint16_t slot = uint16_t(shape->shortid());
678 147 : if (!script->ensureHasTypes(cx))
679 0 : return error();
680 : {
681 294 : types::AutoEnterTypeInference enter(cx);
682 147 : if (shape->setterOp() == CallObject::setArgOp)
683 9 : pic.rhsTypes->addSubset(cx, types::TypeScript::ArgTypes(script, slot));
684 : else
685 138 : pic.rhsTypes->addSubset(cx, types::TypeScript::LocalTypes(script, slot));
686 : }
687 147 : if (monitor.recompiled())
688 0 : return Lookup_Uncacheable;
689 : }
690 : }
691 :
692 8024 : JS_ASSERT(obj == holder);
693 27879 : if (!pic.inlinePathPatched &&
694 7387 : shape->hasDefaultSetter() &&
695 6439 : !pic.typeMonitored &&
696 6029 : !obj->isDenseArray()) {
697 6029 : return patchInline(shape);
698 : }
699 :
700 1995 : return generateStub(obj->lastProperty(), shape, false);
701 : }
702 : };
703 :
704 : static bool
705 388424 : IsCacheableProtoChain(JSObject *obj, JSObject *holder)
706 : {
707 821020 : while (obj != holder) {
708 : /*
709 : * We cannot assume that we find the holder object on the prototype
710 : * chain and must check for null proto. The prototype chain can be
711 : * altered during the lookupProperty call.
712 : */
713 44194 : JSObject *proto = obj->getProto();
714 44194 : if (!proto || !proto->isNative())
715 22 : return false;
716 44172 : obj = proto;
717 : }
718 388402 : return true;
719 : }
720 :
721 : template <typename IC>
722 : struct GetPropHelper {
723 : // These fields are set in the constructor and describe a property lookup.
724 : JSContext *cx;
725 : JSObject *obj;
726 : PropertyName *name;
727 : IC ⁣
728 : VMFrame &f;
729 :
730 : // These fields are set by |bind| and |lookup|. After a call to either
731 : // function, these are set exactly as they are in JSOP_GETPROP or JSOP_NAME.
732 : JSObject *aobj;
733 : JSObject *holder;
734 : JSProperty *prop;
735 :
736 : // This field is set by |bind| and |lookup| only if they returned
737 : // Lookup_Cacheable, otherwise it is NULL.
738 : const Shape *shape;
739 :
740 390807 : GetPropHelper(JSContext *cx, JSObject *obj, PropertyName *name, IC &ic, VMFrame &f)
741 390807 : : cx(cx), obj(obj), name(name), ic(ic), f(f), holder(NULL), prop(NULL), shape(NULL)
742 390807 : { }
743 :
744 : public:
745 181272 : LookupStatus bind() {
746 181272 : RecompilationMonitor monitor(cx);
747 181272 : JSObject *scopeChain = cx->stack.currentScriptedScopeChain();
748 181272 : if (js_CodeSpec[*f.pc()].format & JOF_GNAME)
749 0 : scopeChain = &scopeChain->global();
750 181272 : if (!FindProperty(cx, name, scopeChain, &obj, &holder, &prop))
751 0 : return ic.error(cx);
752 181272 : if (monitor.recompiled())
753 2 : return Lookup_Uncacheable;
754 181270 : if (!prop)
755 521 : return ic.disable(cx, "lookup failed");
756 180749 : if (!obj->isNative())
757 0 : return ic.disable(cx, "non-native");
758 180749 : if (!IsCacheableProtoChain(obj, holder))
759 4 : return ic.disable(cx, "non-native holder");
760 180745 : shape = (const Shape *)prop;
761 180745 : return Lookup_Cacheable;
762 : }
763 :
764 209535 : LookupStatus lookup() {
765 209535 : JSObject *aobj = js_GetProtoIfDenseArray(obj);
766 209535 : if (!aobj->isNative())
767 797 : return ic.disable(f, "non-native");
768 :
769 208738 : RecompilationMonitor monitor(cx);
770 208738 : if (!aobj->lookupProperty(cx, name, &holder, &prop))
771 0 : return ic.error(cx);
772 208738 : if (monitor.recompiled())
773 0 : return Lookup_Uncacheable;
774 :
775 208738 : if (!prop)
776 1063 : return ic.disable(f, "lookup failed");
777 207675 : if (!IsCacheableProtoChain(obj, holder))
778 18 : return ic.disable(f, "non-native holder");
779 207657 : shape = (const Shape *)prop;
780 207657 : return Lookup_Cacheable;
781 : }
782 :
783 377048 : LookupStatus testForGet() {
784 377048 : if (!shape->hasDefaultGetter()) {
785 10336 : if (shape->isMethod()) {
786 0 : if (JSOp(*f.pc()) != JSOP_CALLPROP)
787 0 : return ic.disable(f, "method valued shape");
788 : } else {
789 10336 : if (shape->hasGetterValue())
790 8398 : return ic.disable(f, "getter value shape");
791 1938 : if (shape->hasSlot() && holder != obj)
792 0 : return ic.disable(f, "slotful getter hook through prototype");
793 1938 : if (!ic.canCallHook)
794 561 : return ic.disable(f, "can't call getter hook");
795 1377 : if (f.regs.inlined()) {
796 : /*
797 : * As with native stubs, getter hook stubs can't be
798 : * generated for inline frames. Mark the inner function
799 : * as uninlineable and recompile.
800 : */
801 0 : f.script()->uninlineable = true;
802 0 : MarkTypeObjectFlags(cx, f.script()->function(),
803 : types::OBJECT_FLAG_UNINLINEABLE);
804 0 : return Lookup_Uncacheable;
805 : }
806 : }
807 366712 : } else if (!shape->hasSlot()) {
808 29 : return ic.disable(f, "no slot");
809 : }
810 :
811 368060 : return Lookup_Cacheable;
812 : }
813 :
814 209152 : LookupStatus lookupAndTest() {
815 209152 : LookupStatus status = lookup();
816 209152 : if (status != Lookup_Cacheable)
817 1878 : return status;
818 207274 : return testForGet();
819 : }
820 : };
821 :
822 : class GetPropCompiler : public PICStubCompiler
823 : {
824 : JSObject *obj;
825 : PropertyName *name;
826 : int lastStubSecondShapeGuard;
827 :
828 : public:
829 236654 : GetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, PropertyName *name,
830 : VoidStubPIC stub)
831 : : PICStubCompiler("getprop", f, script, pic,
832 : JS_FUNC_TO_DATA_PTR(void *, stub)),
833 : obj(obj),
834 : name(name),
835 236654 : lastStubSecondShapeGuard(pic.secondShapeGuard)
836 236654 : { }
837 :
838 61726 : int getLastStubSecondShapeGuard() const {
839 61726 : return lastStubSecondShapeGuard ? POST_INST_OFFSET(lastStubSecondShapeGuard) : 0;
840 : }
841 :
842 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
843 : {
844 : GetPropLabels &labels = pic.getPropLabels();
845 : repatcher.repatchLEAToLoadPtr(labels.getDslotsLoad(pic.fastPathRejoin));
846 : repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), NULL);
847 : repatcher.relink(labels.getInlineShapeJump(pic.getFastShapeGuard()), pic.slowPathStart);
848 :
849 : if (pic.hasTypeCheck()) {
850 : /* TODO: combine pic.u.get into ICLabels? */
851 : repatcher.relink(labels.getInlineTypeJump(pic.fastPathStart), pic.getSlowTypeCheck());
852 : }
853 :
854 : JS_ASSERT(pic.kind == ic::PICInfo::GET);
855 :
856 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::GetProp));
857 : repatcher.relink(pic.slowPathCall, target);
858 : }
859 :
860 483 : LookupStatus generateArgsLengthStub()
861 : {
862 966 : Assembler masm;
863 :
864 483 : Jump notArgs = masm.guardShape(pic.objReg, obj);
865 :
866 483 : masm.load32(Address(pic.objReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)), pic.objReg);
867 483 : masm.move(pic.objReg, pic.shapeReg);
868 : Jump overridden = masm.branchTest32(Assembler::NonZero, pic.shapeReg,
869 483 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT));
870 483 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), pic.objReg);
871 :
872 483 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
873 483 : Jump done = masm.jump();
874 :
875 483 : pic.updatePCCounters(f, masm);
876 :
877 966 : PICLinker buffer(masm, pic);
878 483 : if (!buffer.init(cx))
879 0 : return error();
880 :
881 966 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
882 483 : !buffer.verifyRange(f.chunk())) {
883 0 : return disable("code memory is out of range");
884 : }
885 :
886 483 : buffer.link(notArgs, pic.slowPathStart);
887 483 : buffer.link(overridden, pic.slowPathStart);
888 483 : buffer.link(done, pic.fastPathRejoin);
889 :
890 483 : CodeLocationLabel start = buffer.finalize(f);
891 : JaegerSpew(JSpew_PICs, "generate args length stub at %p\n",
892 483 : start.executableAddress());
893 :
894 483 : patchPreviousToHere(start);
895 :
896 483 : disable("args length done");
897 :
898 483 : return Lookup_Cacheable;
899 : }
900 :
901 7533 : LookupStatus generateArrayLengthStub()
902 : {
903 15066 : Assembler masm;
904 :
905 7533 : masm.loadObjClass(pic.objReg, pic.shapeReg);
906 7533 : Jump isDense = masm.testClass(Assembler::Equal, pic.shapeReg, &ArrayClass);
907 7533 : Jump notArray = masm.testClass(Assembler::NotEqual, pic.shapeReg, &SlowArrayClass);
908 :
909 7533 : isDense.linkTo(masm.label(), &masm);
910 7533 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfElements()), pic.objReg);
911 7533 : masm.load32(Address(pic.objReg, ObjectElements::offsetOfLength()), pic.objReg);
912 7533 : Jump oob = masm.branch32(Assembler::Above, pic.objReg, Imm32(JSVAL_INT_MAX));
913 7533 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
914 7533 : Jump done = masm.jump();
915 :
916 7533 : pic.updatePCCounters(f, masm);
917 :
918 15066 : PICLinker buffer(masm, pic);
919 7533 : if (!buffer.init(cx))
920 0 : return error();
921 :
922 15066 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
923 7533 : !buffer.verifyRange(f.chunk())) {
924 0 : return disable("code memory is out of range");
925 : }
926 :
927 7533 : buffer.link(notArray, pic.slowPathStart);
928 7533 : buffer.link(oob, pic.slowPathStart);
929 7533 : buffer.link(done, pic.fastPathRejoin);
930 :
931 7533 : CodeLocationLabel start = buffer.finalize(f);
932 : JaegerSpew(JSpew_PICs, "generate array length stub at %p\n",
933 7533 : start.executableAddress());
934 :
935 7533 : patchPreviousToHere(start);
936 :
937 7533 : disable("array length done");
938 :
939 7533 : return Lookup_Cacheable;
940 : }
941 :
942 69 : LookupStatus generateStringObjLengthStub()
943 : {
944 138 : Assembler masm;
945 :
946 69 : Jump notStringObj = masm.guardShape(pic.objReg, obj);
947 :
948 69 : masm.loadPayload(Address(pic.objReg, StringObject::getPrimitiveValueOffset()), pic.objReg);
949 69 : masm.loadPtr(Address(pic.objReg, JSString::offsetOfLengthAndFlags()), pic.objReg);
950 69 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), pic.objReg);
951 69 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
952 69 : Jump done = masm.jump();
953 :
954 69 : pic.updatePCCounters(f, masm);
955 :
956 138 : PICLinker buffer(masm, pic);
957 69 : if (!buffer.init(cx))
958 0 : return error();
959 :
960 138 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
961 69 : !buffer.verifyRange(f.chunk())) {
962 0 : return disable("code memory is out of range");
963 : }
964 :
965 69 : buffer.link(notStringObj, pic.slowPathStart);
966 69 : buffer.link(done, pic.fastPathRejoin);
967 :
968 69 : CodeLocationLabel start = buffer.finalize(f);
969 : JaegerSpew(JSpew_PICs, "generate string object length stub at %p\n",
970 69 : start.executableAddress());
971 :
972 69 : patchPreviousToHere(start);
973 :
974 69 : disable("string object length done");
975 :
976 69 : return Lookup_Cacheable;
977 : }
978 :
979 19452 : LookupStatus generateStringPropertyStub()
980 : {
981 19452 : if (!f.fp()->script()->hasGlobal())
982 18612 : return disable("String.prototype without compile-and-go global");
983 :
984 840 : RecompilationMonitor monitor(f.cx);
985 :
986 840 : JSObject *obj = f.fp()->scopeChain().global().getOrCreateStringPrototype(f.cx);
987 840 : if (!obj)
988 0 : return error();
989 :
990 840 : if (monitor.recompiled())
991 7 : return Lookup_Uncacheable;
992 :
993 833 : GetPropHelper<GetPropCompiler> getprop(cx, obj, name, *this, f);
994 833 : LookupStatus status = getprop.lookupAndTest();
995 833 : if (status != Lookup_Cacheable)
996 4 : return status;
997 829 : if (getprop.obj != getprop.holder)
998 1 : return disable("proto walk on String.prototype");
999 828 : if (!getprop.shape->hasDefaultGetterOrIsMethod())
1000 0 : return disable("getter hook on String.prototype");
1001 828 : if (hadGC())
1002 0 : return Lookup_Uncacheable;
1003 :
1004 1656 : Assembler masm;
1005 :
1006 : /* Only strings are allowed. */
1007 : Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
1008 828 : ImmType(JSVAL_TYPE_STRING));
1009 :
1010 : /*
1011 : * Clobber objReg with String.prototype and do some PIC stuff. Well,
1012 : * really this is now a MIC, except it won't ever be patched, so we
1013 : * just disable the PIC at the end. :FIXME:? String.prototype probably
1014 : * does not get random shape changes.
1015 : */
1016 828 : masm.move(ImmPtr(obj), pic.objReg);
1017 828 : masm.loadShape(pic.objReg, pic.shapeReg);
1018 : Jump shapeMismatch = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1019 828 : ImmPtr(obj->lastProperty()));
1020 828 : masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
1021 :
1022 828 : Jump done = masm.jump();
1023 :
1024 828 : pic.updatePCCounters(f, masm);
1025 :
1026 1656 : PICLinker buffer(masm, pic);
1027 828 : if (!buffer.init(cx))
1028 0 : return error();
1029 :
1030 1656 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1031 828 : !buffer.verifyRange(f.chunk())) {
1032 0 : return disable("code memory is out of range");
1033 : }
1034 :
1035 828 : buffer.link(notString, pic.getSlowTypeCheck());
1036 828 : buffer.link(shapeMismatch, pic.slowPathStart);
1037 828 : buffer.link(done, pic.fastPathRejoin);
1038 :
1039 828 : CodeLocationLabel cs = buffer.finalize(f);
1040 : JaegerSpew(JSpew_PICs, "generate string call stub at %p\n",
1041 828 : cs.executableAddress());
1042 :
1043 : /* Patch the type check to jump here. */
1044 828 : if (pic.hasTypeCheck()) {
1045 1656 : Repatcher repatcher(f.chunk());
1046 828 : repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), cs);
1047 : }
1048 :
1049 : /* Disable the PIC so we don't keep generating stubs on the above shape mismatch. */
1050 828 : disable("generated string call stub");
1051 828 : return Lookup_Cacheable;
1052 : }
1053 :
1054 6842 : LookupStatus generateStringLengthStub()
1055 : {
1056 6842 : JS_ASSERT(pic.hasTypeCheck());
1057 :
1058 13684 : Assembler masm;
1059 : Jump notString = masm.branchPtr(Assembler::NotEqual, pic.typeReg(),
1060 6842 : ImmType(JSVAL_TYPE_STRING));
1061 6842 : masm.loadPtr(Address(pic.objReg, JSString::offsetOfLengthAndFlags()), pic.objReg);
1062 : // String length is guaranteed to be no more than 2**28, so the 32-bit operation is OK.
1063 6842 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), pic.objReg);
1064 6842 : masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
1065 6842 : Jump done = masm.jump();
1066 :
1067 6842 : pic.updatePCCounters(f, masm);
1068 :
1069 13684 : PICLinker buffer(masm, pic);
1070 6842 : if (!buffer.init(cx))
1071 0 : return error();
1072 :
1073 13684 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1074 6842 : !buffer.verifyRange(f.chunk())) {
1075 0 : return disable("code memory is out of range");
1076 : }
1077 :
1078 6842 : buffer.link(notString, pic.getSlowTypeCheck());
1079 6842 : buffer.link(done, pic.fastPathRejoin);
1080 :
1081 6842 : CodeLocationLabel start = buffer.finalize(f);
1082 : JaegerSpew(JSpew_PICs, "generate string length stub at %p\n",
1083 6842 : start.executableAddress());
1084 :
1085 6842 : if (pic.hasTypeCheck()) {
1086 13684 : Repatcher repatcher(f.chunk());
1087 6842 : repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), start);
1088 : }
1089 :
1090 6842 : disable("generated string length stub");
1091 :
1092 6842 : return Lookup_Cacheable;
1093 : }
1094 :
1095 138376 : LookupStatus patchInline(JSObject *holder, const Shape *shape)
1096 : {
1097 138376 : spew("patch", "inline");
1098 276752 : Repatcher repatcher(f.chunk());
1099 138376 : GetPropLabels &labels = pic.getPropLabels();
1100 :
1101 : int32_t offset;
1102 138376 : if (holder->isFixedSlot(shape->slot())) {
1103 96863 : CodeLocationInstruction istr = labels.getDslotsLoad(pic.fastPathRejoin);
1104 96863 : repatcher.repatchLoadPtrToLEA(istr);
1105 :
1106 : //
1107 : // We've patched | mov dslots, [obj + DSLOTS_OFFSET]
1108 : // To: | lea fslots, [obj + DSLOTS_OFFSET]
1109 : //
1110 : // Because the offset is wrong, it's necessary to correct it
1111 : // below.
1112 : //
1113 96863 : int32_t diff = int32_t(JSObject::getFixedSlotOffset(0)) -
1114 96863 : int32_t(JSObject::offsetOfSlots());
1115 96863 : JS_ASSERT(diff != 0);
1116 96863 : offset = (int32_t(shape->slot()) * sizeof(Value)) + diff;
1117 : } else {
1118 41513 : offset = holder->dynamicSlotIndex(shape->slot()) * sizeof(Value);
1119 : }
1120 :
1121 138376 : repatcher.repatch(labels.getInlineShapeData(pic.getFastShapeGuard()), obj->lastProperty());
1122 138376 : repatcher.patchAddressOffsetForValueLoad(labels.getValueLoad(pic.fastPathRejoin), offset);
1123 :
1124 138376 : pic.inlinePathPatched = true;
1125 :
1126 138376 : return Lookup_Cacheable;
1127 : }
1128 :
1129 1377 : void generateGetterStub(Assembler &masm, const Shape *shape,
1130 : Label start, Vector<Jump, 8> &shapeMismatches)
1131 : {
1132 : /*
1133 : * Getter hook needs to be called from the stub. The state is fully
1134 : * synced and no registers are live except the result registers.
1135 : */
1136 1377 : JS_ASSERT(pic.canCallHook);
1137 1377 : PropertyOp getter = shape->getterOp();
1138 :
1139 : masm.storePtr(ImmPtr((void *) REJOIN_NATIVE_GETTER),
1140 1377 : FrameAddress(offsetof(VMFrame, stubRejoin)));
1141 :
1142 1377 : Registers tempRegs = Registers::tempCallRegMask();
1143 1377 : if (tempRegs.hasReg(Registers::ClobberInCall))
1144 1377 : tempRegs.takeReg(Registers::ClobberInCall);
1145 :
1146 : /* Get a register to hold obj while we set up the rest of the frame. */
1147 1377 : RegisterID holdObjReg = pic.objReg;
1148 1377 : if (tempRegs.hasReg(pic.objReg)) {
1149 1219 : tempRegs.takeReg(pic.objReg);
1150 : } else {
1151 158 : holdObjReg = tempRegs.takeAnyReg().reg();
1152 158 : masm.move(pic.objReg, holdObjReg);
1153 : }
1154 :
1155 1377 : RegisterID t0 = tempRegs.takeAnyReg().reg();
1156 1377 : masm.bumpStubCounter(f.script(), f.pc(), t0);
1157 :
1158 : /*
1159 : * Initialize vp, which is either a slot in the object (the holder,
1160 : * actually, which must equal the object here) or undefined.
1161 : * Use vp == sp (which for CALLPROP will actually be the original
1162 : * sp + 1), to avoid clobbering stack values.
1163 : */
1164 1377 : int32_t vpOffset = (char *) f.regs.sp - (char *) f.fp();
1165 1377 : if (shape->hasSlot()) {
1166 : masm.loadObjProp(obj, holdObjReg, shape,
1167 46 : Registers::ClobberInCall, t0);
1168 46 : masm.storeValueFromComponents(Registers::ClobberInCall, t0, Address(JSFrameReg, vpOffset));
1169 : } else {
1170 1331 : masm.storeValue(UndefinedValue(), Address(JSFrameReg, vpOffset));
1171 : }
1172 :
1173 1377 : int32_t initialFrameDepth = f.regs.sp - f.fp()->slots();
1174 1377 : masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.regs.pc, initialFrameDepth);
1175 :
1176 : /* Grab cx. */
1177 : #ifdef JS_CPU_X86
1178 1377 : RegisterID cxReg = tempRegs.takeAnyReg().reg();
1179 : #else
1180 : RegisterID cxReg = Registers::ArgReg0;
1181 : #endif
1182 1377 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg);
1183 :
1184 : /* Grap vp. */
1185 1377 : RegisterID vpReg = t0;
1186 1377 : masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg);
1187 :
1188 1377 : masm.restoreStackBase();
1189 1377 : masm.setupABICall(Registers::NormalCall, 4);
1190 1377 : masm.storeArg(3, vpReg);
1191 1377 : masm.storeArg(2, ImmPtr((void *) JSID_BITS(shape->getUserId())));
1192 1377 : masm.storeArg(1, holdObjReg);
1193 1377 : masm.storeArg(0, cxReg);
1194 :
1195 1377 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, getter), false);
1196 :
1197 1377 : NativeStubLinker::FinalJump done;
1198 1377 : if (!NativeStubEpilogue(f, masm, &done, 0, vpOffset, pic.shapeReg, pic.objReg))
1199 0 : return;
1200 2754 : NativeStubLinker linker(masm, f.chunk(), f.regs.pc, done);
1201 1377 : if (!linker.init(f.cx))
1202 0 : THROW();
1203 :
1204 2754 : if (!linker.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1205 1377 : !linker.verifyRange(f.chunk())) {
1206 0 : disable("code memory is out of range");
1207 : return;
1208 : }
1209 :
1210 1377 : linker.patchJump(pic.fastPathRejoin);
1211 :
1212 1377 : linkerEpilogue(linker, start, shapeMismatches);
1213 : }
1214 :
1215 53641 : LookupStatus generateStub(JSObject *holder, const Shape *shape)
1216 : {
1217 107282 : Vector<Jump, 8> shapeMismatches(cx);
1218 :
1219 107282 : Assembler masm;
1220 :
1221 53641 : Label start;
1222 53641 : Jump shapeGuardJump;
1223 53641 : Jump argsLenGuard;
1224 :
1225 53641 : bool setStubShapeOffset = true;
1226 53641 : if (obj->isDenseArray()) {
1227 8101 : start = masm.label();
1228 : shapeGuardJump = masm.branchPtr(Assembler::NotEqual,
1229 8101 : Address(pic.objReg, JSObject::offsetOfShape()),
1230 16202 : ImmPtr(obj->lastProperty()));
1231 :
1232 : /*
1233 : * No need to assert validity of GETPROP_STUB_SHAPE_JUMP in this case:
1234 : * the IC is disabled after a dense array hit, so no patching can occur.
1235 : */
1236 : #ifndef JS_HAS_IC_LABELS
1237 8101 : setStubShapeOffset = false;
1238 : #endif
1239 : } else {
1240 45540 : if (pic.shapeNeedsRemat()) {
1241 11328 : masm.loadShape(pic.objReg, pic.shapeReg);
1242 11328 : pic.shapeRegHasBaseShape = true;
1243 : }
1244 :
1245 45540 : start = masm.label();
1246 : shapeGuardJump = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1247 45540 : ImmPtr(obj->lastProperty()));
1248 : }
1249 53641 : Label stubShapeJumpLabel = masm.label();
1250 :
1251 53641 : if (!shapeMismatches.append(shapeGuardJump))
1252 0 : return error();
1253 :
1254 53641 : RegisterID holderReg = pic.objReg;
1255 53641 : if (obj != holder) {
1256 33693 : if (!GeneratePrototypeGuards(cx, shapeMismatches, masm, obj, holder,
1257 33693 : pic.objReg, pic.shapeReg)) {
1258 0 : return error();
1259 : }
1260 :
1261 : // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
1262 33693 : holderReg = pic.shapeReg;
1263 33693 : masm.move(ImmPtr(holder), holderReg);
1264 33693 : pic.shapeRegHasBaseShape = false;
1265 :
1266 : // Guard on the holder's shape.
1267 33693 : Jump j = masm.guardShape(holderReg, holder);
1268 33693 : if (!shapeMismatches.append(j))
1269 0 : return error();
1270 :
1271 33693 : pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
1272 : } else {
1273 19948 : pic.secondShapeGuard = 0;
1274 : }
1275 :
1276 53641 : if (!shape->hasDefaultGetterOrIsMethod()) {
1277 1377 : generateGetterStub(masm, shape, start, shapeMismatches);
1278 1377 : if (setStubShapeOffset)
1279 1377 : pic.getPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
1280 1377 : return Lookup_Cacheable;
1281 : }
1282 :
1283 : /* Load the value out of the object. */
1284 52264 : masm.loadObjProp(holder, holderReg, shape, pic.shapeReg, pic.objReg);
1285 52264 : Jump done = masm.jump();
1286 :
1287 52264 : pic.updatePCCounters(f, masm);
1288 :
1289 104528 : PICLinker buffer(masm, pic);
1290 52264 : if (!buffer.init(cx))
1291 0 : return error();
1292 :
1293 104528 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1294 52264 : !buffer.verifyRange(f.chunk())) {
1295 0 : return disable("code memory is out of range");
1296 : }
1297 :
1298 : // The final exit jumps to the store-back in the inline stub.
1299 52264 : buffer.link(done, pic.fastPathRejoin);
1300 :
1301 52264 : linkerEpilogue(buffer, start, shapeMismatches);
1302 :
1303 52264 : if (setStubShapeOffset)
1304 44163 : pic.getPropLabels().setStubShapeJump(masm, start, stubShapeJumpLabel);
1305 52264 : return Lookup_Cacheable;
1306 : }
1307 :
1308 53641 : void linkerEpilogue(LinkerHelper &buffer, Label start, Vector<Jump, 8> &shapeMismatches)
1309 : {
1310 : // The guard exit jumps to the original slow case.
1311 143776 : for (Jump *pj = shapeMismatches.begin(); pj != shapeMismatches.end(); ++pj)
1312 90135 : buffer.link(*pj, pic.slowPathStart);
1313 :
1314 53641 : CodeLocationLabel cs = buffer.finalize(f);
1315 53641 : JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1316 :
1317 53641 : patchPreviousToHere(cs);
1318 :
1319 53641 : pic.stubsGenerated++;
1320 53641 : pic.updateLastPath(buffer, start);
1321 :
1322 53641 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1323 750 : disable("max stubs reached");
1324 53641 : if (obj->isDenseArray())
1325 8101 : disable("dense array");
1326 53641 : }
1327 :
1328 61726 : void patchPreviousToHere(CodeLocationLabel cs)
1329 : {
1330 123452 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1331 61726 : CodeLocationLabel label = pic.lastPathStart();
1332 :
1333 : // Patch either the inline fast path or a generated stub. The stub
1334 : // omits the prefix of the inline fast path that loads the shape, so
1335 : // the offsets are different.
1336 : int shapeGuardJumpOffset;
1337 61726 : if (pic.stubsGenerated)
1338 23138 : shapeGuardJumpOffset = pic.getPropLabels().getStubShapeJumpOffset();
1339 : else
1340 38588 : shapeGuardJumpOffset = pic.shapeGuard + pic.getPropLabels().getInlineShapeJumpOffset();
1341 61726 : int secondGuardOffset = getLastStubSecondShapeGuard();
1342 :
1343 : JaegerSpew(JSpew_PICs, "Patching previous (%d stubs) (start %p) (offset %d) (second %d)\n",
1344 : (int) pic.stubsGenerated, label.executableAddress(),
1345 61726 : shapeGuardJumpOffset, secondGuardOffset);
1346 :
1347 61726 : repatcher.relink(label.jumpAtOffset(shapeGuardJumpOffset), cs);
1348 61726 : if (secondGuardOffset)
1349 11328 : repatcher.relink(label.jumpAtOffset(secondGuardOffset), cs);
1350 61726 : }
1351 :
1352 202275 : LookupStatus update()
1353 : {
1354 202275 : JS_ASSERT(pic.hit);
1355 :
1356 202275 : GetPropHelper<GetPropCompiler> getprop(cx, obj, name, *this, f);
1357 202275 : LookupStatus status = getprop.lookupAndTest();
1358 202275 : if (status != Lookup_Cacheable)
1359 10258 : return status;
1360 192017 : if (hadGC())
1361 0 : return Lookup_Uncacheable;
1362 :
1363 508486 : if (obj == getprop.holder &&
1364 158324 : getprop.shape->hasDefaultGetterOrIsMethod() &&
1365 158145 : !pic.inlinePathPatched) {
1366 138376 : return patchInline(getprop.holder, getprop.shape);
1367 : }
1368 :
1369 53641 : return generateStub(getprop.holder, getprop.shape);
1370 : }
1371 : };
1372 :
1373 : class ScopeNameCompiler : public PICStubCompiler
1374 : {
1375 : private:
1376 : typedef Vector<Jump, 8> JumpList;
1377 :
1378 : JSObject *scopeChain;
1379 : PropertyName *name;
1380 : GetPropHelper<ScopeNameCompiler> getprop;
1381 181655 : ScopeNameCompiler *thisFromCtor() { return this; }
1382 :
1383 179814 : void patchPreviousToHere(CodeLocationLabel cs)
1384 : {
1385 179814 : ScopeNameLabels & labels = pic.scopeNameLabels();
1386 359628 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1387 179814 : CodeLocationLabel start = pic.lastPathStart();
1388 179814 : JSC::CodeLocationJump jump;
1389 :
1390 : // Patch either the inline fast path or a generated stub.
1391 179814 : if (pic.stubsGenerated)
1392 8256 : jump = labels.getStubJump(start);
1393 : else
1394 171558 : jump = labels.getInlineJump(start);
1395 179814 : repatcher.relink(jump, cs);
1396 179814 : }
1397 :
1398 179914 : LookupStatus walkScopeChain(Assembler &masm, JumpList &fails)
1399 : {
1400 : /* Walk the scope chain. */
1401 179914 : JSObject *tobj = scopeChain;
1402 :
1403 : /* For GETXPROP, we'll never enter this loop. */
1404 179914 : JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, tobj && tobj == getprop.holder);
1405 179914 : JS_ASSERT_IF(pic.kind == ic::PICInfo::XNAME, getprop.obj == tobj);
1406 :
1407 391527 : while (tobj && tobj != getprop.holder) {
1408 31799 : if (!IsCacheableNonGlobalScope(tobj))
1409 100 : return disable("non-cacheable scope chain object");
1410 31699 : JS_ASSERT(tobj->isNative());
1411 :
1412 : /* Guard on intervening shapes. */
1413 31699 : masm.loadShape(pic.objReg, pic.shapeReg);
1414 : Jump j = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1415 31699 : ImmPtr(tobj->lastProperty()));
1416 31699 : if (!fails.append(j))
1417 0 : return error();
1418 :
1419 : /* Load the next link in the scope chain. */
1420 31699 : Address parent(pic.objReg, ScopeObject::offsetOfEnclosingScope());
1421 31699 : masm.loadPayload(parent, pic.objReg);
1422 :
1423 31699 : tobj = &tobj->asScope().enclosingScope();
1424 : }
1425 :
1426 179814 : if (tobj != getprop.holder)
1427 0 : return disable("scope chain walk terminated early");
1428 :
1429 179814 : return Lookup_Cacheable;
1430 : }
1431 :
1432 : public:
1433 181655 : ScopeNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1434 : PropertyName *name, VoidStubPIC stub)
1435 : : PICStubCompiler("name", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1436 : scopeChain(scopeChain), name(name),
1437 181655 : getprop(f.cx, NULL, name, *thisFromCtor(), f)
1438 181655 : { }
1439 :
1440 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
1441 : {
1442 : ScopeNameLabels &labels = pic.scopeNameLabels();
1443 :
1444 : /* Link the inline path back to the slow path. */
1445 : JSC::CodeLocationJump inlineJump = labels.getInlineJump(pic.fastPathStart);
1446 : repatcher.relink(inlineJump, pic.slowPathStart);
1447 :
1448 : VoidStubPIC stub;
1449 : switch (pic.kind) {
1450 : case ic::PICInfo::NAME:
1451 : stub = ic::Name;
1452 : break;
1453 : case ic::PICInfo::XNAME:
1454 : stub = ic::XName;
1455 : break;
1456 : default:
1457 : JS_NOT_REACHED("Invalid pic kind in ScopeNameCompiler::reset");
1458 : return;
1459 : }
1460 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
1461 : repatcher.relink(pic.slowPathCall, target);
1462 : }
1463 :
1464 169024 : LookupStatus generateGlobalStub(JSObject *obj)
1465 : {
1466 338048 : Assembler masm;
1467 338048 : JumpList fails(cx);
1468 169024 : ScopeNameLabels &labels = pic.scopeNameLabels();
1469 :
1470 : /* For GETXPROP, the object is already in objReg. */
1471 169024 : if (pic.kind == ic::PICInfo::NAME)
1472 169014 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1473 :
1474 169024 : JS_ASSERT(obj == getprop.holder);
1475 169024 : JS_ASSERT(getprop.holder == &scopeChain->global());
1476 :
1477 169024 : LookupStatus status = walkScopeChain(masm, fails);
1478 169024 : if (status != Lookup_Cacheable)
1479 60 : return status;
1480 :
1481 : /* If a scope chain walk was required, the final object needs a NULL test. */
1482 168964 : MaybeJump finalNull;
1483 168964 : if (pic.kind == ic::PICInfo::NAME)
1484 168954 : finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1485 168964 : masm.loadShape(pic.objReg, pic.shapeReg);
1486 : Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1487 168964 : ImmPtr(getprop.holder->lastProperty()));
1488 :
1489 168964 : masm.loadObjProp(obj, pic.objReg, getprop.shape, pic.shapeReg, pic.objReg);
1490 :
1491 168964 : Jump done = masm.jump();
1492 :
1493 : /* All failures flow to here, so there is a common point to patch. */
1494 189739 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1495 20775 : pj->linkTo(masm.label(), &masm);
1496 168964 : if (finalNull.isSet())
1497 168954 : finalNull.get().linkTo(masm.label(), &masm);
1498 168964 : finalShape.linkTo(masm.label(), &masm);
1499 168964 : Label failLabel = masm.label();
1500 168964 : Jump failJump = masm.jump();
1501 :
1502 168964 : pic.updatePCCounters(f, masm);
1503 :
1504 337928 : PICLinker buffer(masm, pic);
1505 168964 : if (!buffer.init(cx))
1506 0 : return error();
1507 :
1508 337928 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1509 168964 : !buffer.verifyRange(f.chunk())) {
1510 0 : return disable("code memory is out of range");
1511 : }
1512 :
1513 168964 : buffer.link(failJump, pic.slowPathStart);
1514 168964 : buffer.link(done, pic.fastPathRejoin);
1515 168964 : CodeLocationLabel cs = buffer.finalize(f);
1516 168964 : JaegerSpew(JSpew_PICs, "generated %s global stub at %p\n", type, cs.executableAddress());
1517 168964 : spew("NAME stub", "global");
1518 :
1519 168964 : patchPreviousToHere(cs);
1520 :
1521 168964 : pic.stubsGenerated++;
1522 168964 : pic.updateLastPath(buffer, failLabel);
1523 168964 : labels.setStubJump(masm, failLabel, failJump);
1524 :
1525 168964 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1526 24 : disable("max stubs reached");
1527 :
1528 168964 : return Lookup_Cacheable;
1529 : }
1530 :
1531 : enum CallObjPropKind {
1532 : ARG,
1533 : VAR
1534 : };
1535 :
1536 11310 : LookupStatus generateCallStub(JSObject *obj)
1537 : {
1538 22620 : Assembler masm;
1539 22620 : Vector<Jump, 8> fails(cx);
1540 11310 : ScopeNameLabels &labels = pic.scopeNameLabels();
1541 :
1542 : /* For GETXPROP, the object is already in objReg. */
1543 11310 : if (pic.kind == ic::PICInfo::NAME)
1544 10945 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1545 :
1546 11310 : JS_ASSERT(obj == getprop.holder);
1547 11310 : JS_ASSERT(getprop.holder != &scopeChain->global());
1548 :
1549 : CallObjPropKind kind;
1550 11310 : const Shape *shape = getprop.shape;
1551 11310 : if (shape->getterOp() == CallObject::getArgOp) {
1552 2808 : kind = ARG;
1553 8502 : } else if (shape->getterOp() == CallObject::getVarOp) {
1554 8082 : kind = VAR;
1555 : } else {
1556 420 : return disable("unhandled callobj sprop getter");
1557 : }
1558 :
1559 10890 : LookupStatus status = walkScopeChain(masm, fails);
1560 10890 : if (status != Lookup_Cacheable)
1561 40 : return status;
1562 :
1563 : /* If a scope chain walk was required, the final object needs a NULL test. */
1564 10850 : MaybeJump finalNull;
1565 10850 : if (pic.kind == ic::PICInfo::NAME)
1566 10485 : finalNull = masm.branchTestPtr(Assembler::Zero, pic.objReg, pic.objReg);
1567 10850 : masm.loadShape(pic.objReg, pic.shapeReg);
1568 : Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1569 10850 : ImmPtr(getprop.holder->lastProperty()));
1570 :
1571 : /* Get callobj's stack frame. */
1572 10850 : masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots());
1573 :
1574 10850 : JSFunction *fun = getprop.holder->asCall().getCalleeFunction();
1575 10850 : uint16_t slot = uint16_t(shape->shortid());
1576 :
1577 10850 : Jump skipOver;
1578 10850 : Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
1579 :
1580 : /* Not-escaped case. */
1581 : {
1582 2794 : Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
1583 13644 : : StackFrame::offsetOfFixed(slot));
1584 10850 : masm.loadPayload(addr, pic.objReg);
1585 10850 : masm.loadTypeTag(addr, pic.shapeReg);
1586 10850 : skipOver = masm.jump();
1587 : }
1588 :
1589 10850 : escapedFrame.linkTo(masm.label(), &masm);
1590 :
1591 : {
1592 10850 : if (kind == VAR)
1593 8056 : slot += fun->nargs;
1594 :
1595 10850 : slot += CallObject::RESERVED_SLOTS;
1596 10850 : Address address = masm.objPropAddress(obj, pic.objReg, slot);
1597 :
1598 : /* Safe because type is loaded first. */
1599 10850 : masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
1600 : }
1601 :
1602 10850 : skipOver.linkTo(masm.label(), &masm);
1603 10850 : Jump done = masm.jump();
1604 :
1605 : // All failures flow to here, so there is a common point to patch.
1606 21766 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1607 10916 : pj->linkTo(masm.label(), &masm);
1608 10850 : if (finalNull.isSet())
1609 10485 : finalNull.get().linkTo(masm.label(), &masm);
1610 10850 : finalShape.linkTo(masm.label(), &masm);
1611 10850 : Label failLabel = masm.label();
1612 10850 : Jump failJump = masm.jump();
1613 :
1614 10850 : pic.updatePCCounters(f, masm);
1615 :
1616 21700 : PICLinker buffer(masm, pic);
1617 10850 : if (!buffer.init(cx))
1618 0 : return error();
1619 :
1620 21700 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1621 10850 : !buffer.verifyRange(f.chunk())) {
1622 0 : return disable("code memory is out of range");
1623 : }
1624 :
1625 10850 : buffer.link(failJump, pic.slowPathStart);
1626 10850 : buffer.link(done, pic.fastPathRejoin);
1627 10850 : CodeLocationLabel cs = buffer.finalize(f);
1628 10850 : JaegerSpew(JSpew_PICs, "generated %s call stub at %p\n", type, cs.executableAddress());
1629 :
1630 10850 : patchPreviousToHere(cs);
1631 :
1632 10850 : pic.stubsGenerated++;
1633 10850 : pic.updateLastPath(buffer, failLabel);
1634 10850 : labels.setStubJump(masm, failLabel, failJump);
1635 :
1636 10850 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1637 8 : disable("max stubs reached");
1638 :
1639 10850 : return Lookup_Cacheable;
1640 : }
1641 :
1642 181272 : LookupStatus updateForName()
1643 : {
1644 : // |getprop.obj| is filled by bind()
1645 181272 : LookupStatus status = getprop.bind();
1646 181272 : if (status != Lookup_Cacheable)
1647 527 : return status;
1648 :
1649 180745 : return update(getprop.obj);
1650 : }
1651 :
1652 383 : LookupStatus updateForXName()
1653 : {
1654 : // |obj| and |getprop.obj| are NULL, but should be the given scopeChain.
1655 383 : getprop.obj = scopeChain;
1656 383 : LookupStatus status = getprop.lookup();
1657 383 : if (status != Lookup_Cacheable)
1658 0 : return status;
1659 :
1660 383 : return update(getprop.obj);
1661 : }
1662 :
1663 181128 : LookupStatus update(JSObject *obj)
1664 : {
1665 181128 : if (obj != getprop.holder)
1666 44 : return disable("property is on proto of a scope object");
1667 :
1668 181084 : if (obj->isCall())
1669 11310 : return generateCallStub(obj);
1670 :
1671 169774 : LookupStatus status = getprop.testForGet();
1672 169774 : if (status != Lookup_Cacheable)
1673 543 : return status;
1674 :
1675 169231 : if (obj->isGlobal())
1676 169024 : return generateGlobalStub(obj);
1677 :
1678 207 : return disable("scope object not handled yet");
1679 : }
1680 :
1681 181655 : bool retrieve(Value *vp, PICInfo::Kind kind)
1682 : {
1683 181655 : JSObject *obj = getprop.obj;
1684 181655 : JSObject *holder = getprop.holder;
1685 181655 : const JSProperty *prop = getprop.prop;
1686 :
1687 181655 : if (!prop) {
1688 : /* Kludge to allow (typeof foo == "undefined") tests. */
1689 523 : if (kind == ic::PICInfo::NAME) {
1690 523 : JSOp op2 = JSOp(f.pc()[JSOP_NAME_LENGTH]);
1691 523 : if (op2 == JSOP_TYPEOF) {
1692 453 : vp->setUndefined();
1693 453 : return true;
1694 : }
1695 : }
1696 70 : ReportAtomNotDefined(cx, name);
1697 70 : return false;
1698 : }
1699 :
1700 : // If the property was found, but we decided not to cache it, then
1701 : // take a slow path and do a full property fetch.
1702 181132 : if (!getprop.shape) {
1703 4 : if (!obj->getProperty(cx, name, vp))
1704 0 : return false;
1705 4 : return true;
1706 : }
1707 :
1708 181128 : const Shape *shape = getprop.shape;
1709 181128 : JSObject *normalized = obj;
1710 181128 : if (obj->isWith() && !shape->hasDefaultGetter())
1711 8 : normalized = &obj->asWith().object();
1712 181128 : NATIVE_GET(cx, normalized, holder, shape, JSGET_METHOD_BARRIER, vp, return false);
1713 181119 : return true;
1714 : }
1715 : };
1716 :
1717 : class BindNameCompiler : public PICStubCompiler
1718 : {
1719 : JSObject *scopeChain;
1720 : PropertyName *name;
1721 :
1722 : public:
1723 3294 : BindNameCompiler(VMFrame &f, JSScript *script, JSObject *scopeChain, ic::PICInfo &pic,
1724 : PropertyName *name, VoidStubPIC stub)
1725 : : PICStubCompiler("bind", f, script, pic, JS_FUNC_TO_DATA_PTR(void *, stub)),
1726 3294 : scopeChain(scopeChain), name(name)
1727 3294 : { }
1728 :
1729 : static void reset(Repatcher &repatcher, ic::PICInfo &pic)
1730 : {
1731 : BindNameLabels &labels = pic.bindNameLabels();
1732 :
1733 : /* Link the inline jump back to the slow path. */
1734 : JSC::CodeLocationJump inlineJump = labels.getInlineJump(pic.getFastShapeGuard());
1735 : repatcher.relink(inlineJump, pic.slowPathStart);
1736 :
1737 : /* Link the slow path to call the IC entry point. */
1738 : FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
1739 : repatcher.relink(pic.slowPathCall, target);
1740 : }
1741 :
1742 1009 : void patchPreviousToHere(CodeLocationLabel cs)
1743 : {
1744 1009 : BindNameLabels &labels = pic.bindNameLabels();
1745 2018 : Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
1746 1009 : JSC::CodeLocationJump jump;
1747 :
1748 : /* Patch either the inline fast path or a generated stub. */
1749 1009 : if (pic.stubsGenerated)
1750 22 : jump = labels.getStubJump(pic.lastPathStart());
1751 : else
1752 987 : jump = labels.getInlineJump(pic.getFastShapeGuard());
1753 1009 : repatcher.relink(jump, cs);
1754 1009 : }
1755 :
1756 1180 : LookupStatus generateStub(JSObject *obj)
1757 : {
1758 2360 : Assembler masm;
1759 2360 : Vector<Jump, 8> fails(cx);
1760 :
1761 1180 : BindNameLabels &labels = pic.bindNameLabels();
1762 :
1763 1180 : if (!IsCacheableNonGlobalScope(scopeChain))
1764 9 : return disable("non-cacheable obj at start of scope chain");
1765 :
1766 : /* Guard on the shape of the scope chain. */
1767 1171 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
1768 1171 : masm.loadShape(pic.objReg, pic.shapeReg);
1769 : Jump firstShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1770 1171 : ImmPtr(scopeChain->lastProperty()));
1771 :
1772 1171 : if (scopeChain != obj) {
1773 : /* Walk up the scope chain. */
1774 265 : JSObject *tobj = &scopeChain->asScope().enclosingScope();
1775 265 : Address parent(pic.objReg, ScopeObject::offsetOfEnclosingScope());
1776 576 : while (tobj) {
1777 311 : if (!IsCacheableNonGlobalScope(tobj))
1778 162 : return disable("non-cacheable obj in scope chain");
1779 149 : masm.loadPayload(parent, pic.objReg);
1780 149 : masm.loadShape(pic.objReg, pic.shapeReg);
1781 : Jump shapeTest = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
1782 149 : ImmPtr(tobj->lastProperty()));
1783 149 : if (!fails.append(shapeTest))
1784 0 : return error();
1785 149 : if (tobj == obj)
1786 103 : break;
1787 46 : tobj = &tobj->asScope().enclosingScope();
1788 : }
1789 103 : if (tobj != obj)
1790 0 : return disable("indirect hit");
1791 : }
1792 :
1793 1009 : Jump done = masm.jump();
1794 :
1795 : // All failures flow to here, so there is a common point to patch.
1796 1126 : for (Jump *pj = fails.begin(); pj != fails.end(); ++pj)
1797 117 : pj->linkTo(masm.label(), &masm);
1798 1009 : firstShape.linkTo(masm.label(), &masm);
1799 1009 : Label failLabel = masm.label();
1800 1009 : Jump failJump = masm.jump();
1801 :
1802 1009 : pic.updatePCCounters(f, masm);
1803 :
1804 2018 : PICLinker buffer(masm, pic);
1805 1009 : if (!buffer.init(cx))
1806 0 : return error();
1807 :
1808 2018 : if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
1809 1009 : !buffer.verifyRange(f.chunk())) {
1810 0 : return disable("code memory is out of range");
1811 : }
1812 :
1813 1009 : buffer.link(failJump, pic.slowPathStart);
1814 1009 : buffer.link(done, pic.fastPathRejoin);
1815 1009 : CodeLocationLabel cs = buffer.finalize(f);
1816 1009 : JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
1817 :
1818 1009 : patchPreviousToHere(cs);
1819 :
1820 1009 : pic.stubsGenerated++;
1821 1009 : pic.updateLastPath(buffer, failLabel);
1822 1009 : labels.setStubJump(masm, failLabel, failJump);
1823 :
1824 1009 : if (pic.stubsGenerated == MAX_PIC_STUBS)
1825 0 : disable("max stubs reached");
1826 :
1827 1009 : return Lookup_Cacheable;
1828 : }
1829 :
1830 3294 : JSObject *update()
1831 : {
1832 3294 : RecompilationMonitor monitor(cx);
1833 :
1834 3294 : JSObject *obj = FindIdentifierBase(cx, scopeChain, name);
1835 3294 : if (!obj || monitor.recompiled())
1836 2 : return obj;
1837 :
1838 3292 : if (!pic.hit) {
1839 2112 : spew("first hit", "nop");
1840 2112 : pic.hit = true;
1841 2112 : return obj;
1842 : }
1843 :
1844 1180 : LookupStatus status = generateStub(obj);
1845 1180 : if (status == Lookup_Error)
1846 0 : return NULL;
1847 :
1848 1180 : return obj;
1849 : }
1850 : };
1851 :
1852 : static void JS_FASTCALL
1853 4979625 : DisabledGetPropIC(VMFrame &f, ic::PICInfo *pic)
1854 : {
1855 4979625 : stubs::GetProp(f, pic->name);
1856 4979625 : }
1857 :
1858 : static void JS_FASTCALL
1859 0 : DisabledGetPropNoCacheIC(VMFrame &f, ic::PICInfo *pic)
1860 : {
1861 0 : stubs::GetPropNoCache(f, pic->name);
1862 0 : }
1863 :
1864 : static inline void
1865 444543 : GetPropMaybeCached(VMFrame &f, ic::PICInfo *pic, bool cached)
1866 : {
1867 444543 : VoidStubPIC stub = cached ? DisabledGetPropIC : DisabledGetPropNoCacheIC;
1868 :
1869 444543 : JSScript *script = f.fp()->script();
1870 :
1871 444543 : PropertyName *name = pic->name;
1872 444543 : if (name == f.cx->runtime->atomState.lengthAtom) {
1873 15791 : if (f.regs.sp[-1].isMagic(JS_LAZY_ARGUMENTS)) {
1874 0 : f.regs.sp[-1].setInt32(f.regs.fp()->numActualArgs());
1875 0 : return;
1876 15791 : } else if (!f.regs.sp[-1].isPrimitive()) {
1877 8937 : JSObject *obj = &f.regs.sp[-1].toObject();
1878 11833 : if (obj->isArray() ||
1879 1975 : (obj->isArguments() && !obj->asArguments().hasOverriddenLength()) ||
1880 921 : obj->isString()) {
1881 8085 : GetPropCompiler cc(f, script, obj, *pic, NULL, stub);
1882 8085 : if (obj->isArray()) {
1883 7533 : LookupStatus status = cc.generateArrayLengthStub();
1884 7533 : if (status == Lookup_Error)
1885 0 : THROW();
1886 7533 : f.regs.sp[-1].setNumber(obj->getArrayLength());
1887 552 : } else if (obj->isArguments()) {
1888 483 : LookupStatus status = cc.generateArgsLengthStub();
1889 483 : if (status == Lookup_Error)
1890 0 : THROW();
1891 483 : f.regs.sp[-1].setInt32(int32_t(obj->asArguments().initialLength()));
1892 69 : } else if (obj->isString()) {
1893 69 : LookupStatus status = cc.generateStringObjLengthStub();
1894 69 : if (status == Lookup_Error)
1895 0 : THROW();
1896 69 : JSString *str = obj->asString().unbox();
1897 69 : f.regs.sp[-1].setInt32(str->length());
1898 : }
1899 8085 : return;
1900 : }
1901 : }
1902 : }
1903 :
1904 436458 : if (f.regs.sp[-1].isString()) {
1905 26294 : GetPropCompiler cc(f, script, NULL, *pic, name, stub);
1906 26294 : if (name == f.cx->runtime->atomState.lengthAtom) {
1907 6842 : LookupStatus status = cc.generateStringLengthStub();
1908 6842 : if (status == Lookup_Error)
1909 0 : THROW();
1910 6842 : JSString *str = f.regs.sp[-1].toString();
1911 6842 : f.regs.sp[-1].setInt32(str->length());
1912 : } else {
1913 19452 : LookupStatus status = cc.generateStringPropertyStub();
1914 19452 : if (status == Lookup_Error)
1915 0 : THROW();
1916 19452 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-1]);
1917 19452 : if (!obj)
1918 0 : THROW();
1919 19452 : if (!obj->getProperty(f.cx, name, &f.regs.sp[-1]))
1920 0 : THROW();
1921 : }
1922 26294 : return;
1923 : }
1924 :
1925 410164 : RecompilationMonitor monitor(f.cx);
1926 :
1927 410164 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-1]);
1928 410164 : if (!obj)
1929 19 : THROW();
1930 :
1931 410145 : if (!monitor.recompiled() && pic->shouldUpdate(f.cx)) {
1932 202275 : GetPropCompiler cc(f, script, obj, *pic, name, stub);
1933 202275 : if (!cc.update())
1934 0 : THROW();
1935 : }
1936 :
1937 : Value v;
1938 410145 : if (cached) {
1939 407602 : if (!GetPropertyOperation(f.cx, f.pc(), f.regs.sp[-1], &v))
1940 27 : THROW();
1941 : } else {
1942 2543 : if (!obj->getProperty(f.cx, name, &v))
1943 0 : THROW();
1944 : }
1945 :
1946 410118 : f.regs.sp[-1] = v;
1947 : }
1948 :
1949 : void JS_FASTCALL
1950 442000 : ic::GetProp(VMFrame &f, ic::PICInfo *pic)
1951 : {
1952 442000 : GetPropMaybeCached(f, pic, /* cache = */ true);
1953 442000 : }
1954 :
1955 : void JS_FASTCALL
1956 2543 : ic::GetPropNoCache(VMFrame &f, ic::PICInfo *pic)
1957 : {
1958 2543 : GetPropMaybeCached(f, pic, /* cache = */ false);
1959 2543 : }
1960 :
1961 : template <JSBool strict>
1962 : static void JS_FASTCALL
1963 56626 : DisabledSetPropIC(VMFrame &f, ic::PICInfo *pic)
1964 : {
1965 56626 : stubs::SetName<strict>(f, pic->name);
1966 56626 : }
1967 :
1968 : void JS_FASTCALL
1969 31207 : ic::SetProp(VMFrame &f, ic::PICInfo *pic)
1970 : {
1971 31207 : JSScript *script = f.fp()->script();
1972 31207 : JS_ASSERT(pic->isSet());
1973 :
1974 31207 : VoidStubPIC stub = STRICT_VARIANT(DisabledSetPropIC);
1975 :
1976 : // Save this in case the compiler triggers a recompilation of this script.
1977 31207 : PropertyName *name = pic->name;
1978 31207 : VoidStubName nstub = STRICT_VARIANT(stubs::SetName);
1979 :
1980 31207 : RecompilationMonitor monitor(f.cx);
1981 :
1982 31207 : JSObject *obj = ValueToObject(f.cx, f.regs.sp[-2]);
1983 31207 : if (!obj)
1984 0 : THROW();
1985 :
1986 : // Note, we can't use SetName for PROPINC PICs because the property
1987 : // cache can't handle a GET and SET from the same scripted PC.
1988 31207 : if (!monitor.recompiled() && pic->shouldUpdate(f.cx)) {
1989 12998 : SetPropCompiler cc(f, script, obj, *pic, name, stub);
1990 12998 : LookupStatus status = cc.update();
1991 12998 : if (status == Lookup_Error)
1992 0 : THROW();
1993 : }
1994 :
1995 31207 : nstub(f, name);
1996 : }
1997 :
1998 : static void JS_FASTCALL
1999 632241 : DisabledNameIC(VMFrame &f, ic::PICInfo *pic)
2000 : {
2001 632241 : stubs::Name(f);
2002 632241 : }
2003 :
2004 : static void JS_FASTCALL
2005 0 : DisabledXNameIC(VMFrame &f, ic::PICInfo *pic)
2006 : {
2007 0 : stubs::GetProp(f, pic->name);
2008 0 : }
2009 :
2010 : void JS_FASTCALL
2011 383 : ic::XName(VMFrame &f, ic::PICInfo *pic)
2012 : {
2013 383 : JSScript *script = f.fp()->script();
2014 :
2015 : /* GETXPROP is guaranteed to have an object. */
2016 383 : JSObject *obj = &f.regs.sp[-1].toObject();
2017 :
2018 383 : ScopeNameCompiler cc(f, script, obj, *pic, pic->name, DisabledXNameIC);
2019 :
2020 383 : LookupStatus status = cc.updateForXName();
2021 383 : if (status == Lookup_Error)
2022 0 : THROW();
2023 :
2024 : Value rval;
2025 383 : if (!cc.retrieve(&rval, PICInfo::XNAME))
2026 0 : THROW();
2027 383 : f.regs.sp[-1] = rval;
2028 : }
2029 :
2030 : void JS_FASTCALL
2031 181272 : ic::Name(VMFrame &f, ic::PICInfo *pic)
2032 : {
2033 181272 : JSScript *script = f.fp()->script();
2034 :
2035 181272 : ScopeNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->name, DisabledNameIC);
2036 :
2037 181272 : LookupStatus status = cc.updateForName();
2038 181272 : if (status == Lookup_Error)
2039 0 : THROW();
2040 :
2041 : Value rval;
2042 181272 : if (!cc.retrieve(&rval, PICInfo::NAME))
2043 79 : THROW();
2044 181193 : f.regs.sp[0] = rval;
2045 : }
2046 :
2047 : static void JS_FASTCALL
2048 1439 : DisabledBindNameIC(VMFrame &f, ic::PICInfo *pic)
2049 : {
2050 1439 : stubs::BindName(f, pic->name);
2051 1439 : }
2052 :
2053 : void JS_FASTCALL
2054 3294 : ic::BindName(VMFrame &f, ic::PICInfo *pic)
2055 : {
2056 3294 : JSScript *script = f.fp()->script();
2057 :
2058 3294 : VoidStubPIC stub = DisabledBindNameIC;
2059 3294 : BindNameCompiler cc(f, script, &f.fp()->scopeChain(), *pic, pic->name, stub);
2060 :
2061 3294 : JSObject *obj = cc.update();
2062 3294 : if (!obj)
2063 0 : THROW();
2064 :
2065 3294 : f.regs.sp[0].setObject(*obj);
2066 : }
2067 :
2068 : void
2069 295245 : BaseIC::spew(JSContext *cx, const char *event, const char *message)
2070 : {
2071 : #ifdef JS_METHODJIT_SPEW
2072 : JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
2073 295245 : js_CodeName[op], event, message, cx->fp()->script()->filename, CurrentLine(cx));
2074 : #endif
2075 295245 : }
2076 :
2077 : /* Total length of scripts preceding a frame. */
2078 0 : inline uint32_t frameCountersOffset(VMFrame &f)
2079 : {
2080 0 : JSContext *cx = f.cx;
2081 :
2082 0 : uint32_t offset = 0;
2083 0 : if (cx->regs().inlined()) {
2084 0 : offset += cx->fp()->script()->length;
2085 0 : uint32_t index = cx->regs().inlined()->inlineIndex;
2086 0 : InlineFrame *frames = f.chunk()->inlineFrames();
2087 0 : for (unsigned i = 0; i < index; i++)
2088 0 : offset += frames[i].fun->script()->length;
2089 : }
2090 :
2091 : jsbytecode *pc;
2092 0 : JSScript *script = cx->stack.currentScript(&pc);
2093 0 : offset += pc - script->code;
2094 :
2095 0 : return offset;
2096 : }
2097 :
2098 : LookupStatus
2099 61906 : BaseIC::disable(VMFrame &f, const char *reason, void *stub)
2100 : {
2101 61906 : if (f.chunk()->pcLengths) {
2102 0 : uint32_t offset = frameCountersOffset(f);
2103 0 : f.chunk()->pcLengths[offset].picsLength = 0;
2104 : }
2105 :
2106 61906 : spew(f.cx, "disabled", reason);
2107 123812 : Repatcher repatcher(f.chunk());
2108 61906 : repatcher.relink(slowPathCall, FunctionPtr(stub));
2109 61906 : return Lookup_Uncacheable;
2110 : }
2111 :
2112 : void
2113 261473 : BaseIC::updatePCCounters(VMFrame &f, Assembler &masm)
2114 : {
2115 261473 : if (f.chunk()->pcLengths) {
2116 0 : uint32_t offset = frameCountersOffset(f);
2117 0 : f.chunk()->pcLengths[offset].picsLength += masm.size();
2118 : }
2119 261473 : }
2120 :
2121 : bool
2122 441352 : BaseIC::shouldUpdate(JSContext *cx)
2123 : {
2124 441352 : if (!hit) {
2125 226079 : hit = true;
2126 226079 : spew(cx, "ignored", "first hit");
2127 226079 : return false;
2128 : }
2129 215273 : JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
2130 215273 : return true;
2131 : }
2132 :
2133 : static void JS_FASTCALL
2134 680126 : DisabledGetElem(VMFrame &f, ic::GetElementIC *ic)
2135 : {
2136 680126 : stubs::GetElem(f);
2137 680126 : }
2138 :
2139 : bool
2140 13713 : GetElementIC::shouldUpdate(JSContext *cx)
2141 : {
2142 13713 : if (!hit) {
2143 4995 : hit = true;
2144 4995 : spew(cx, "ignored", "first hit");
2145 4995 : return false;
2146 : }
2147 8718 : JS_ASSERT(stubsGenerated < MAX_GETELEM_IC_STUBS);
2148 8718 : return true;
2149 : }
2150 :
2151 : LookupStatus
2152 2936 : GetElementIC::disable(VMFrame &f, const char *reason)
2153 : {
2154 2936 : slowCallPatched = true;
2155 2936 : void *stub = JS_FUNC_TO_DATA_PTR(void *, DisabledGetElem);
2156 2936 : BaseIC::disable(f, reason, stub);
2157 2936 : return Lookup_Uncacheable;
2158 : }
2159 :
2160 : LookupStatus
2161 0 : GetElementIC::error(JSContext *cx)
2162 : {
2163 0 : return Lookup_Error;
2164 : }
2165 :
2166 : void
2167 0 : GetElementIC::purge(Repatcher &repatcher)
2168 : {
2169 : // Repatch the inline jumps.
2170 0 : if (inlineTypeGuardPatched)
2171 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
2172 0 : if (inlineShapeGuardPatched)
2173 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart);
2174 :
2175 0 : if (slowCallPatched) {
2176 : repatcher.relink(slowPathCall,
2177 0 : FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::GetElement)));
2178 : }
2179 :
2180 0 : reset();
2181 0 : }
2182 :
2183 : LookupStatus
2184 6044 : GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, PropertyName *name,
2185 : Value *vp)
2186 : {
2187 6044 : JS_ASSERT(v.isString());
2188 6044 : JSContext *cx = f.cx;
2189 :
2190 6044 : GetPropHelper<GetElementIC> getprop(cx, obj, name, *this, f);
2191 6044 : LookupStatus status = getprop.lookupAndTest();
2192 6044 : if (status != Lookup_Cacheable)
2193 61 : return status;
2194 :
2195 : // With TI enabled, string property stubs can only be added to an opcode if
2196 : // the value read will go through a type barrier afterwards. TI only
2197 : // accounts for integer-valued properties accessed by GETELEM/CALLELEM.
2198 5983 : if (cx->typeInferenceEnabled() && !forcedTypeBarrier)
2199 29 : return disable(f, "string element access may not have type barrier");
2200 :
2201 11908 : Assembler masm;
2202 :
2203 : // Guard on the string's type and identity.
2204 5954 : MaybeJump atomTypeGuard;
2205 5954 : if (hasInlineTypeGuard() && !inlineTypeGuardPatched) {
2206 : // We link all string-key dependent stubs together, and store the
2207 : // first set of guards in the IC, separately, from int-key dependent
2208 : // stubs. As long as we guarantee that the first string-key dependent
2209 : // stub guards on the key type, then all other string-key stubs can
2210 : // omit the guard.
2211 769 : JS_ASSERT(!idRemat.isTypeKnown());
2212 769 : atomTypeGuard = masm.testString(Assembler::NotEqual, typeReg);
2213 : } else {
2214 : // If there was no inline type guard, then a string type is guaranteed.
2215 : // Otherwise, we are guaranteed the type has already been checked, via
2216 : // the comment above.
2217 5185 : JS_ASSERT_IF(!hasInlineTypeGuard(), idRemat.knownType() == JSVAL_TYPE_STRING);
2218 : }
2219 :
2220 : // Reify the shape before guards that could flow into shape guarding stubs.
2221 5954 : if (!obj->isDenseArray() && !typeRegHasBaseShape) {
2222 1459 : masm.loadShape(objReg, typeReg);
2223 1459 : typeRegHasBaseShape = true;
2224 : }
2225 :
2226 5954 : MaybeJump atomIdGuard;
2227 5954 : if (!idRemat.isConstant())
2228 5940 : atomIdGuard = masm.branchPtr(Assembler::NotEqual, idRemat.dataReg(), ImmPtr(v.toString()));
2229 :
2230 : // Guard on the base shape.
2231 5954 : Jump shapeGuard = masm.branchPtr(Assembler::NotEqual, typeReg, ImmPtr(obj->lastProperty()));
2232 :
2233 11908 : Vector<Jump, 8> otherGuards(cx);
2234 :
2235 : // Guard on the prototype, if applicable.
2236 5954 : MaybeJump protoGuard;
2237 5954 : JSObject *holder = getprop.holder;
2238 5954 : RegisterID holderReg = objReg;
2239 5954 : if (obj != holder) {
2240 81 : if (!GeneratePrototypeGuards(cx, otherGuards, masm, obj, holder, objReg, typeReg))
2241 0 : return error(cx);
2242 :
2243 : // Bake in the holder identity. Careful not to clobber |objReg|, since we can't remat it.
2244 81 : holderReg = typeReg;
2245 81 : masm.move(ImmPtr(holder), holderReg);
2246 81 : typeRegHasBaseShape = false;
2247 :
2248 : // Guard on the holder's shape.
2249 81 : protoGuard = masm.guardShape(holderReg, holder);
2250 : }
2251 :
2252 5954 : if (op == JSOP_CALLELEM) {
2253 : // Emit a write of |obj| to the top of the stack, before we lose it.
2254 135 : Value *thisVp = &cx->regs().sp[-1];
2255 135 : Address thisSlot(JSFrameReg, StackFrame::offsetOfFixed(thisVp - cx->fp()->slots()));
2256 135 : masm.storeValueFromComponents(ImmType(JSVAL_TYPE_OBJECT), objReg, thisSlot);
2257 : }
2258 :
2259 : // Load the value.
2260 5954 : const Shape *shape = getprop.shape;
2261 5954 : masm.loadObjProp(holder, holderReg, shape, typeReg, objReg);
2262 :
2263 5954 : Jump done = masm.jump();
2264 :
2265 5954 : updatePCCounters(f, masm);
2266 :
2267 11908 : PICLinker buffer(masm, *this);
2268 5954 : if (!buffer.init(cx))
2269 0 : return error(cx);
2270 :
2271 5954 : if (hasLastStringStub && !buffer.verifyRange(lastStringStub))
2272 0 : return disable(f, "code memory is out of range");
2273 5954 : if (!buffer.verifyRange(f.chunk()))
2274 0 : return disable(f, "code memory is out of range");
2275 :
2276 : // Patch all guards.
2277 5954 : buffer.maybeLink(atomIdGuard, slowPathStart);
2278 5954 : buffer.maybeLink(atomTypeGuard, slowPathStart);
2279 5954 : buffer.link(shapeGuard, slowPathStart);
2280 5954 : buffer.maybeLink(protoGuard, slowPathStart);
2281 5954 : for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
2282 0 : buffer.link(*pj, slowPathStart);
2283 5954 : buffer.link(done, fastPathRejoin);
2284 :
2285 5954 : CodeLocationLabel cs = buffer.finalize(f);
2286 : #if DEBUG
2287 5954 : char *chars = DeflateString(cx, v.toString()->getChars(cx), v.toString()->length());
2288 : JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom %p (\"%s\") shape %p (%s: %d)\n",
2289 : js_CodeName[op], cs.executableAddress(), (void*)name, chars,
2290 5954 : (void*)holder->lastProperty(), cx->fp()->script()->filename, CurrentLine(cx));
2291 5954 : cx->free_(chars);
2292 : #endif
2293 :
2294 : // Update the inline guards, if needed.
2295 5954 : if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalShapeGuard()) {
2296 2884 : Repatcher repatcher(f.chunk());
2297 :
2298 1442 : if (shouldPatchInlineTypeGuard()) {
2299 : // A type guard is present in the inline path, and this is the
2300 : // first string stub, so patch it now.
2301 769 : JS_ASSERT(!inlineTypeGuardPatched);
2302 769 : JS_ASSERT(atomTypeGuard.isSet());
2303 :
2304 769 : repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), cs);
2305 769 : inlineTypeGuardPatched = true;
2306 : }
2307 :
2308 1442 : if (shouldPatchUnconditionalShapeGuard()) {
2309 : // The shape guard is unconditional, meaning there is no type
2310 : // check. This is the first stub, so it has to be patched. Note
2311 : // that it is wrong to patch the inline shape guard otherwise,
2312 : // because it follows an integer-id guard.
2313 673 : JS_ASSERT(!hasInlineTypeGuard());
2314 :
2315 673 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2316 673 : inlineShapeGuardPatched = true;
2317 : }
2318 : }
2319 :
2320 : // If there were previous stub guards, patch them now.
2321 5954 : if (hasLastStringStub) {
2322 9024 : Repatcher repatcher(lastStringStub);
2323 4512 : CodeLocationLabel stub(lastStringStub.start());
2324 4512 : if (atomGuard)
2325 4512 : repatcher.relink(stub.jumpAtOffset(atomGuard), cs);
2326 4512 : repatcher.relink(stub.jumpAtOffset(firstShapeGuard), cs);
2327 4512 : if (secondShapeGuard)
2328 57 : repatcher.relink(stub.jumpAtOffset(secondShapeGuard), cs);
2329 : }
2330 :
2331 : // Update state.
2332 5954 : hasLastStringStub = true;
2333 5954 : lastStringStub = JITCode(cs.executableAddress(), buffer.size());
2334 5954 : if (atomIdGuard.isSet()) {
2335 5940 : atomGuard = buffer.locationOf(atomIdGuard.get()) - cs;
2336 5940 : JS_ASSERT(atomGuard == buffer.locationOf(atomIdGuard.get()) - cs);
2337 5940 : JS_ASSERT(atomGuard);
2338 : } else {
2339 14 : atomGuard = 0;
2340 : }
2341 5954 : if (protoGuard.isSet()) {
2342 81 : secondShapeGuard = buffer.locationOf(protoGuard.get()) - cs;
2343 81 : JS_ASSERT(secondShapeGuard == buffer.locationOf(protoGuard.get()) - cs);
2344 81 : JS_ASSERT(secondShapeGuard);
2345 : } else {
2346 5873 : secondShapeGuard = 0;
2347 : }
2348 5954 : firstShapeGuard = buffer.locationOf(shapeGuard) - cs;
2349 5954 : JS_ASSERT(firstShapeGuard == buffer.locationOf(shapeGuard) - cs);
2350 5954 : JS_ASSERT(firstShapeGuard);
2351 :
2352 5954 : stubsGenerated++;
2353 :
2354 5954 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2355 38 : disable(f, "max stubs reached");
2356 :
2357 : // Finally, fetch the value to avoid redoing the property lookup.
2358 5954 : *vp = holder->getSlot(shape->slot());
2359 :
2360 5954 : return Lookup_Cacheable;
2361 : }
2362 :
2363 : LookupStatus
2364 438 : GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2365 : {
2366 438 : JSContext *cx = f.cx;
2367 :
2368 438 : if (!v.isInt32())
2369 2 : return disable(f, "arguments object with non-integer key");
2370 :
2371 436 : if (op == JSOP_CALLELEM)
2372 4 : return disable(f, "arguments object with call");
2373 :
2374 432 : JS_ASSERT(hasInlineTypeGuard() || idRemat.knownType() == JSVAL_TYPE_INT32);
2375 :
2376 864 : Assembler masm;
2377 :
2378 432 : Jump shapeGuard = masm.testObjClass(Assembler::NotEqual, objReg, typeReg, obj->getClass());
2379 :
2380 432 : masm.move(objReg, typeReg);
2381 432 : masm.load32(Address(objReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)),
2382 864 : objReg);
2383 : Jump overridden = masm.branchTest32(Assembler::NonZero, objReg,
2384 432 : Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT));
2385 432 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), objReg);
2386 :
2387 432 : Jump outOfBounds;
2388 432 : if (idRemat.isConstant()) {
2389 253 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, objReg, Imm32(v.toInt32()));
2390 : } else {
2391 179 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, objReg, idRemat.dataReg());
2392 : }
2393 :
2394 432 : masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::DATA_SLOT)), objReg);
2395 432 : if (idRemat.isConstant()) {
2396 253 : Address slot(objReg, offsetof(ArgumentsData, slots) + v.toInt32() * sizeof(Value));
2397 253 : masm.loadTypeTag(slot, objReg);
2398 : } else {
2399 : BaseIndex slot(objReg, idRemat.dataReg(), Assembler::JSVAL_SCALE,
2400 179 : offsetof(ArgumentsData, slots));
2401 179 : masm.loadTypeTag(slot, objReg);
2402 : }
2403 432 : Jump holeCheck = masm.branchPtr(Assembler::Equal, objReg, ImmType(JSVAL_TYPE_MAGIC));
2404 :
2405 432 : masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::STACK_FRAME_SLOT)), objReg);
2406 432 : Jump liveArguments = masm.branchPtr(Assembler::NotEqual, objReg, ImmPtr(0));
2407 :
2408 432 : masm.loadPrivate(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::DATA_SLOT)), objReg);
2409 :
2410 432 : if (idRemat.isConstant()) {
2411 253 : Address slot(objReg, offsetof(ArgumentsData, slots) + v.toInt32() * sizeof(Value));
2412 253 : masm.loadValueAsComponents(slot, typeReg, objReg);
2413 : } else {
2414 : BaseIndex slot(objReg, idRemat.dataReg(), Assembler::JSVAL_SCALE,
2415 179 : offsetof(ArgumentsData, slots));
2416 179 : masm.loadValueAsComponents(slot, typeReg, objReg);
2417 : }
2418 :
2419 432 : Jump done = masm.jump();
2420 :
2421 432 : liveArguments.linkTo(masm.label(), &masm);
2422 :
2423 432 : masm.move(objReg, typeReg);
2424 :
2425 432 : Address fun(typeReg, StackFrame::offsetOfExec());
2426 432 : masm.loadPtr(fun, objReg);
2427 :
2428 432 : Address nargs(objReg, offsetof(JSFunction, nargs));
2429 432 : masm.load16(nargs, objReg);
2430 :
2431 432 : Jump notFormalArg;
2432 432 : if (idRemat.isConstant())
2433 253 : notFormalArg = masm.branch32(Assembler::BelowOrEqual, objReg, Imm32(v.toInt32()));
2434 : else
2435 179 : notFormalArg = masm.branch32(Assembler::BelowOrEqual, objReg, idRemat.dataReg());
2436 :
2437 432 : masm.lshift32(Imm32(3), objReg); /* nargs << 3 == nargs * sizeof(Value) */
2438 432 : masm.subPtr(objReg, typeReg); /* fp - numFormalArgs => start of formal args */
2439 :
2440 432 : Label loadFromStack = masm.label();
2441 432 : masm.move(typeReg, objReg);
2442 :
2443 432 : if (idRemat.isConstant()) {
2444 253 : Address frameEntry(objReg, v.toInt32() * sizeof(Value));
2445 253 : masm.loadValueAsComponents(frameEntry, typeReg, objReg);
2446 : } else {
2447 179 : BaseIndex frameEntry(objReg, idRemat.dataReg(), Assembler::JSVAL_SCALE);
2448 179 : masm.loadValueAsComponents(frameEntry, typeReg, objReg);
2449 : }
2450 432 : Jump done2 = masm.jump();
2451 :
2452 432 : notFormalArg.linkTo(masm.label(), &masm);
2453 :
2454 432 : masm.push(typeReg);
2455 :
2456 432 : Address argsObject(typeReg, StackFrame::offsetOfArgsObj());
2457 432 : masm.loadPtr(argsObject, typeReg);
2458 :
2459 432 : masm.load32(Address(typeReg, JSObject::getFixedSlotOffset(ArgumentsObject::INITIAL_LENGTH_SLOT)),
2460 864 : typeReg);
2461 432 : masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), typeReg);
2462 :
2463 : /* This basically does fp - (numFormalArgs + numActualArgs + 2) */
2464 :
2465 432 : masm.addPtr(typeReg, objReg);
2466 432 : masm.addPtr(Imm32(2), objReg);
2467 432 : masm.lshiftPtr(Imm32(3), objReg);
2468 :
2469 432 : masm.pop(typeReg);
2470 432 : masm.subPtr(objReg, typeReg);
2471 :
2472 432 : masm.jump(loadFromStack);
2473 :
2474 432 : updatePCCounters(f, masm);
2475 :
2476 864 : PICLinker buffer(masm, *this);
2477 :
2478 432 : if (!buffer.init(cx))
2479 0 : return error(cx);
2480 :
2481 432 : if (!buffer.verifyRange(f.chunk()))
2482 0 : return disable(f, "code memory is out of range");
2483 :
2484 432 : buffer.link(shapeGuard, slowPathStart);
2485 432 : buffer.link(overridden, slowPathStart);
2486 432 : buffer.link(outOfBounds, slowPathStart);
2487 432 : buffer.link(holeCheck, slowPathStart);
2488 432 : buffer.link(done, fastPathRejoin);
2489 432 : buffer.link(done2, fastPathRejoin);
2490 :
2491 432 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2492 :
2493 432 : JaegerSpew(JSpew_PICs, "generated getelem arguments stub at %p\n", cs.executableAddress());
2494 :
2495 864 : Repatcher repatcher(f.chunk());
2496 432 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2497 :
2498 432 : JS_ASSERT(!shouldPatchUnconditionalShapeGuard());
2499 432 : JS_ASSERT(!inlineShapeGuardPatched);
2500 :
2501 432 : inlineShapeGuardPatched = true;
2502 432 : stubsGenerated++;
2503 :
2504 432 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2505 0 : disable(f, "max stubs reached");
2506 :
2507 432 : disable(f, "generated arguments stub");
2508 :
2509 432 : if (!obj->getGeneric(cx, id, vp))
2510 0 : return Lookup_Error;
2511 :
2512 432 : return Lookup_Cacheable;
2513 : }
2514 :
2515 : #if defined JS_METHODJIT_TYPED_ARRAY
2516 : LookupStatus
2517 816 : GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2518 : {
2519 816 : JSContext *cx = f.cx;
2520 :
2521 816 : if (!v.isInt32())
2522 0 : return disable(f, "typed array with string key");
2523 :
2524 816 : if (op == JSOP_CALLELEM)
2525 0 : return disable(f, "typed array with call");
2526 :
2527 : // The fast-path guarantees that after the dense shape guard, the type is
2528 : // known to be int32, either via type inference or the inline type check.
2529 816 : JS_ASSERT(hasInlineTypeGuard() || idRemat.knownType() == JSVAL_TYPE_INT32);
2530 :
2531 1632 : Assembler masm;
2532 :
2533 : // Guard on this typed array's shape/class.
2534 816 : Jump shapeGuard = masm.guardShape(objReg, obj);
2535 :
2536 : // Bounds check.
2537 816 : Jump outOfBounds;
2538 816 : Address typedArrayLength = masm.payloadOf(Address(objReg, TypedArray::lengthOffset()));
2539 816 : if (idRemat.isConstant()) {
2540 643 : JS_ASSERT(idRemat.value().toInt32() == v.toInt32());
2541 643 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, Imm32(v.toInt32()));
2542 : } else {
2543 173 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, idRemat.dataReg());
2544 : }
2545 :
2546 : // Load the array's packed data vector.
2547 816 : masm.loadPtr(Address(objReg, TypedArray::dataOffset()), objReg);
2548 :
2549 816 : Int32Key key = idRemat.isConstant()
2550 643 : ? Int32Key::FromConstant(v.toInt32())
2551 1459 : : Int32Key::FromRegister(idRemat.dataReg());
2552 :
2553 816 : JSObject *tarray = js::TypedArray::getTypedArray(obj);
2554 816 : if (!masm.supportsFloatingPoint() &&
2555 0 : (TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT32 ||
2556 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64 ||
2557 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_UINT32))
2558 : {
2559 0 : return disable(f, "fpu not supported");
2560 : }
2561 :
2562 816 : MaybeRegisterID tempReg;
2563 816 : masm.loadFromTypedArray(TypedArray::getType(tarray), objReg, key, typeReg, objReg, tempReg);
2564 :
2565 816 : Jump done = masm.jump();
2566 :
2567 816 : updatePCCounters(f, masm);
2568 :
2569 1632 : PICLinker buffer(masm, *this);
2570 816 : if (!buffer.init(cx))
2571 0 : return error(cx);
2572 :
2573 816 : if (!buffer.verifyRange(f.chunk()))
2574 0 : return disable(f, "code memory is out of range");
2575 :
2576 816 : buffer.link(shapeGuard, slowPathStart);
2577 816 : buffer.link(outOfBounds, slowPathStart);
2578 816 : buffer.link(done, fastPathRejoin);
2579 :
2580 816 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2581 816 : JaegerSpew(JSpew_PICs, "generated getelem typed array stub at %p\n", cs.executableAddress());
2582 :
2583 : // If we can generate a typed array stub, the shape guard is conditional.
2584 : // Also, we only support one typed array.
2585 816 : JS_ASSERT(!shouldPatchUnconditionalShapeGuard());
2586 816 : JS_ASSERT(!inlineShapeGuardPatched);
2587 :
2588 1632 : Repatcher repatcher(f.chunk());
2589 816 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2590 816 : inlineShapeGuardPatched = true;
2591 :
2592 816 : stubsGenerated++;
2593 :
2594 : // In the future, it might make sense to attach multiple typed array stubs.
2595 : // For simplicitly, they are currently monomorphic.
2596 816 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2597 0 : disable(f, "max stubs reached");
2598 :
2599 816 : disable(f, "generated typed array stub");
2600 :
2601 : // Fetch the value as expected of Lookup_Cacheable for GetElement.
2602 816 : if (!obj->getGeneric(cx, id, vp))
2603 0 : return Lookup_Error;
2604 :
2605 816 : return Lookup_Cacheable;
2606 : }
2607 : #endif /* JS_METHODJIT_TYPED_ARRAY */
2608 :
2609 : LookupStatus
2610 8718 : GetElementIC::update(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp)
2611 : {
2612 : /*
2613 : * Only treat this as a GETPROP for non-numeric string identifiers. The
2614 : * GETPROP IC assumes the id has already gone through filtering for string
2615 : * indexes in the emitter, i.e. js_GetProtoIfDenseArray is only valid to
2616 : * use when looking up non-integer identifiers.
2617 : */
2618 : uint32_t dummy;
2619 8718 : if (v.isString() && JSID_IS_ATOM(id) && !JSID_TO_ATOM(id)->isIndex(&dummy))
2620 6044 : return attachGetProp(f, obj, v, JSID_TO_ATOM(id)->asPropertyName(), vp);
2621 :
2622 2674 : if (obj->isArguments())
2623 438 : return attachArguments(f, obj, v, id, vp);
2624 :
2625 : #if defined JS_METHODJIT_TYPED_ARRAY
2626 : /*
2627 : * Typed array ICs can make stub calls, and need to know which registers
2628 : * are in use and need to be restored after the call. If type inference is
2629 : * enabled then we don't necessarily know the full set of such registers
2630 : * when generating the IC (loop-carried registers may be allocated later),
2631 : * and additionally the push/pop instructions used to save/restore in the
2632 : * IC are not compatible with carrying entries in floating point registers.
2633 : * Since we can use type information to generate inline paths for typed
2634 : * arrays, just don't generate these ICs with inference enabled.
2635 : */
2636 2236 : if (!f.cx->typeInferenceEnabled() && js_IsTypedArray(obj))
2637 816 : return attachTypedArray(f, obj, v, id, vp);
2638 : #endif
2639 :
2640 1420 : return disable(f, "unhandled object and key type");
2641 : }
2642 :
2643 : void JS_FASTCALL
2644 13847 : ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
2645 : {
2646 13847 : JSContext *cx = f.cx;
2647 :
2648 : // Right now, we don't optimize for strings or lazy arguments.
2649 13847 : if (!f.regs.sp[-2].isObject()) {
2650 118 : ic->disable(f, "non-object");
2651 118 : stubs::GetElem(f);
2652 118 : return;
2653 : }
2654 :
2655 13729 : Value idval = f.regs.sp[-1];
2656 :
2657 13729 : RecompilationMonitor monitor(cx);
2658 :
2659 13729 : JSObject *obj = ValueToObject(cx, f.regs.sp[-2]);
2660 13729 : if (!obj)
2661 0 : THROW();
2662 :
2663 : #if JS_HAS_XML_SUPPORT
2664 : // Some XML properties behave differently when accessed in a call vs. normal
2665 : // context, so we fall back to stubs::GetElem.
2666 13729 : if (obj->isXML()) {
2667 16 : ic->disable(f, "XML object");
2668 16 : stubs::GetElem(f);
2669 16 : return;
2670 : }
2671 : #endif
2672 :
2673 : jsid id;
2674 13713 : if (idval.isInt32() && INT_FITS_IN_JSID(idval.toInt32())) {
2675 5174 : id = INT_TO_JSID(idval.toInt32());
2676 : } else {
2677 8539 : if (!js_InternNonIntElementId(cx, obj, idval, &id))
2678 0 : THROW();
2679 : }
2680 :
2681 13713 : if (!monitor.recompiled() && ic->shouldUpdate(cx)) {
2682 : #ifdef DEBUG
2683 8718 : f.regs.sp[-2] = MagicValue(JS_GENERIC_MAGIC);
2684 : #endif
2685 8718 : LookupStatus status = ic->update(f, obj, idval, id, &f.regs.sp[-2]);
2686 8718 : if (status != Lookup_Uncacheable) {
2687 7202 : if (status == Lookup_Error)
2688 0 : THROW();
2689 :
2690 : // If the result can be cached, the value was already retrieved.
2691 7202 : JS_ASSERT(!f.regs.sp[-2].isMagic());
2692 7202 : return;
2693 : }
2694 : }
2695 :
2696 6511 : if (!obj->getGeneric(cx, id, &f.regs.sp[-2]))
2697 0 : THROW();
2698 :
2699 : #if JS_HAS_NO_SUCH_METHOD
2700 6511 : if (*f.pc() == JSOP_CALLELEM && JS_UNLIKELY(f.regs.sp[-2].isPrimitive())) {
2701 13 : if (!OnUnknownMethod(cx, obj, idval, &f.regs.sp[-2]))
2702 0 : THROW();
2703 : }
2704 : #endif
2705 : }
2706 :
2707 : #define APPLY_STRICTNESS(f, s) \
2708 : (FunctionTemplateConditional(s, f<true>, f<false>))
2709 :
2710 : LookupStatus
2711 1895 : SetElementIC::disable(VMFrame &f, const char *reason)
2712 : {
2713 1895 : slowCallPatched = true;
2714 1895 : VoidStub stub = APPLY_STRICTNESS(stubs::SetElem, strictMode);
2715 1895 : BaseIC::disable(f, reason, JS_FUNC_TO_DATA_PTR(void *, stub));
2716 1895 : return Lookup_Uncacheable;
2717 : }
2718 :
2719 : LookupStatus
2720 0 : SetElementIC::error(JSContext *cx)
2721 : {
2722 0 : return Lookup_Error;
2723 : }
2724 :
2725 : void
2726 0 : SetElementIC::purge(Repatcher &repatcher)
2727 : {
2728 : // Repatch the inline jumps.
2729 0 : if (inlineShapeGuardPatched)
2730 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), slowPathStart);
2731 0 : if (inlineHoleGuardPatched)
2732 0 : repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
2733 :
2734 0 : if (slowCallPatched) {
2735 0 : void *stub = JS_FUNC_TO_DATA_PTR(void *, APPLY_STRICTNESS(ic::SetElement, strictMode));
2736 0 : repatcher.relink(slowPathCall, FunctionPtr(stub));
2737 : }
2738 :
2739 0 : reset();
2740 0 : }
2741 :
2742 : LookupStatus
2743 547 : SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32_t keyval)
2744 : {
2745 547 : JSContext *cx = f.cx;
2746 :
2747 547 : if (keyval < 0)
2748 4 : return disable(f, "negative key index");
2749 :
2750 : // We may have failed a capacity check instead of a dense array check.
2751 : // However we should still build the IC in this case, since it could
2752 : // be in a loop that is filling in the array.
2753 :
2754 543 : if (js_PrototypeHasIndexedProperties(cx, obj))
2755 4 : return disable(f, "prototype has indexed properties");
2756 :
2757 1078 : Assembler masm;
2758 :
2759 1078 : Vector<Jump, 8> fails(cx);
2760 :
2761 539 : if (!GeneratePrototypeGuards(cx, fails, masm, obj, NULL, objReg, objReg))
2762 0 : return error(cx);
2763 :
2764 : // Test for indexed properties in Array.prototype. We test each shape
2765 : // along the proto chain. This affords us two optimizations:
2766 : // 1) Loading the prototype can be avoided because the shape would change;
2767 : // instead we can bake in their identities.
2768 : // 2) We only have to test the shape, rather than INDEXED.
2769 1613 : for (JSObject *pobj = obj->getProto(); pobj; pobj = pobj->getProto()) {
2770 1074 : if (!pobj->isNative())
2771 0 : return disable(f, "non-native array prototype");
2772 1074 : masm.move(ImmPtr(pobj), objReg);
2773 1074 : Jump j = masm.guardShape(objReg, pobj);
2774 1074 : if (!fails.append(j))
2775 0 : return error(cx);
2776 : }
2777 :
2778 : // Restore |obj|.
2779 539 : masm.rematPayload(StateRemat::FromInt32(objRemat), objReg);
2780 :
2781 : // Load the elements.
2782 539 : masm.loadPtr(Address(objReg, JSObject::offsetOfElements()), objReg);
2783 :
2784 539 : Int32Key key = hasConstantKey ? Int32Key::FromConstant(keyValue) : Int32Key::FromRegister(keyReg);
2785 :
2786 : // Guard that the initialized length is being updated exactly.
2787 : fails.append(masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(),
2788 539 : objReg, key, Assembler::NotEqual));
2789 :
2790 : // Check the array capacity.
2791 : fails.append(masm.guardArrayExtent(ObjectElements::offsetOfCapacity(),
2792 539 : objReg, key, Assembler::BelowOrEqual));
2793 :
2794 539 : masm.bumpKey(key, 1);
2795 :
2796 : // Update the length and initialized length.
2797 539 : masm.storeKey(key, Address(objReg, ObjectElements::offsetOfInitializedLength()));
2798 : Jump lengthGuard = masm.guardArrayExtent(ObjectElements::offsetOfLength(),
2799 539 : objReg, key, Assembler::AboveOrEqual);
2800 539 : masm.storeKey(key, Address(objReg, ObjectElements::offsetOfLength()));
2801 539 : lengthGuard.linkTo(masm.label(), &masm);
2802 :
2803 539 : masm.bumpKey(key, -1);
2804 :
2805 : // Store the value back.
2806 539 : if (hasConstantKey) {
2807 55 : Address slot(objReg, keyValue * sizeof(Value));
2808 55 : masm.storeValue(vr, slot);
2809 : } else {
2810 484 : BaseIndex slot(objReg, keyReg, Assembler::JSVAL_SCALE);
2811 484 : masm.storeValue(vr, slot);
2812 : }
2813 :
2814 539 : Jump done = masm.jump();
2815 :
2816 539 : JS_ASSERT(!execPool);
2817 539 : JS_ASSERT(!inlineHoleGuardPatched);
2818 :
2819 1078 : LinkerHelper buffer(masm, JSC::METHOD_CODE);
2820 539 : execPool = buffer.init(cx);
2821 539 : if (!execPool)
2822 0 : return error(cx);
2823 :
2824 539 : if (!buffer.verifyRange(f.chunk()))
2825 0 : return disable(f, "code memory is out of range");
2826 :
2827 : // Patch all guards.
2828 3216 : for (size_t i = 0; i < fails.length(); i++)
2829 2677 : buffer.link(fails[i], slowPathStart);
2830 539 : buffer.link(done, fastPathRejoin);
2831 :
2832 539 : CodeLocationLabel cs = buffer.finalize(f);
2833 539 : JaegerSpew(JSpew_PICs, "generated dense array hole stub at %p\n", cs.executableAddress());
2834 :
2835 1078 : Repatcher repatcher(f.chunk());
2836 539 : repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), cs);
2837 539 : inlineHoleGuardPatched = true;
2838 :
2839 539 : disable(f, "generated dense array hole stub");
2840 :
2841 539 : return Lookup_Cacheable;
2842 : }
2843 :
2844 : #if defined JS_METHODJIT_TYPED_ARRAY
2845 : LookupStatus
2846 640 : SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32_t key)
2847 : {
2848 : // Right now, only one shape guard extension is supported.
2849 640 : JS_ASSERT(!inlineShapeGuardPatched);
2850 :
2851 1280 : Assembler masm;
2852 640 : JSContext *cx = f.cx;
2853 :
2854 : // Restore |obj|.
2855 640 : masm.rematPayload(StateRemat::FromInt32(objRemat), objReg);
2856 :
2857 : // Guard on this typed array's shape.
2858 640 : Jump shapeGuard = masm.guardShape(objReg, obj);
2859 :
2860 : // Bounds check.
2861 640 : Jump outOfBounds;
2862 640 : Address typedArrayLength = masm.payloadOf(Address(objReg, TypedArray::lengthOffset()));
2863 640 : if (hasConstantKey)
2864 149 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, Imm32(keyValue));
2865 : else
2866 491 : outOfBounds = masm.branch32(Assembler::BelowOrEqual, typedArrayLength, keyReg);
2867 :
2868 : // Load the array's packed data vector.
2869 640 : masm.loadPtr(Address(objReg, TypedArray::dataOffset()), objReg);
2870 :
2871 640 : JSObject *tarray = js::TypedArray::getTypedArray(obj);
2872 640 : if (!masm.supportsFloatingPoint() &&
2873 0 : (TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT32 ||
2874 0 : TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64))
2875 : {
2876 0 : return disable(f, "fpu not supported");
2877 : }
2878 :
2879 640 : int shift = js::TypedArray::slotWidth(obj);
2880 640 : if (hasConstantKey) {
2881 149 : Address addr(objReg, keyValue * shift);
2882 149 : if (!StoreToTypedArray(cx, masm, tarray, addr, vr, volatileMask))
2883 0 : return error(cx);
2884 : } else {
2885 491 : Assembler::Scale scale = Assembler::TimesOne;
2886 491 : switch (shift) {
2887 : case 2:
2888 104 : scale = Assembler::TimesTwo;
2889 104 : break;
2890 : case 4:
2891 148 : scale = Assembler::TimesFour;
2892 148 : break;
2893 : case 8:
2894 32 : scale = Assembler::TimesEight;
2895 32 : break;
2896 : }
2897 491 : BaseIndex addr(objReg, keyReg, scale);
2898 491 : if (!StoreToTypedArray(cx, masm, tarray, addr, vr, volatileMask))
2899 0 : return error(cx);
2900 : }
2901 :
2902 640 : Jump done = masm.jump();
2903 :
2904 : // The stub does not rely on any pointers or numbers that could be ruined
2905 : // by a GC or shape regenerated GC. We let this stub live for the lifetime
2906 : // of the script.
2907 640 : JS_ASSERT(!execPool);
2908 1280 : LinkerHelper buffer(masm, JSC::METHOD_CODE);
2909 640 : execPool = buffer.init(cx);
2910 640 : if (!execPool)
2911 0 : return error(cx);
2912 :
2913 640 : if (!buffer.verifyRange(f.chunk()))
2914 0 : return disable(f, "code memory is out of range");
2915 :
2916 : // Note that the out-of-bounds path simply does nothing.
2917 640 : buffer.link(shapeGuard, slowPathStart);
2918 640 : buffer.link(outOfBounds, fastPathRejoin);
2919 640 : buffer.link(done, fastPathRejoin);
2920 640 : masm.finalize(buffer);
2921 :
2922 640 : CodeLocationLabel cs = buffer.finalizeCodeAddendum();
2923 640 : JaegerSpew(JSpew_PICs, "generated setelem typed array stub at %p\n", cs.executableAddress());
2924 :
2925 1280 : Repatcher repatcher(f.chunk());
2926 640 : repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
2927 640 : inlineShapeGuardPatched = true;
2928 :
2929 640 : stubsGenerated++;
2930 :
2931 : // In the future, it might make sense to attach multiple typed array stubs.
2932 : // For simplicitly, they are currently monomorphic.
2933 640 : if (stubsGenerated == MAX_GETELEM_IC_STUBS)
2934 0 : disable(f, "max stubs reached");
2935 :
2936 640 : disable(f, "generated typed array stub");
2937 :
2938 640 : return Lookup_Cacheable;
2939 : }
2940 : #endif /* JS_METHODJIT_TYPED_ARRAY */
2941 :
2942 : LookupStatus
2943 1895 : SetElementIC::update(VMFrame &f, const Value &objval, const Value &idval)
2944 : {
2945 1895 : if (!objval.isObject())
2946 0 : return disable(f, "primitive lval");
2947 1895 : if (!idval.isInt32())
2948 182 : return disable(f, "non-int32 key");
2949 :
2950 1713 : JSObject *obj = &objval.toObject();
2951 1713 : int32_t key = idval.toInt32();
2952 :
2953 1713 : if (obj->isDenseArray())
2954 547 : return attachHoleStub(f, obj, key);
2955 :
2956 : #if defined JS_METHODJIT_TYPED_ARRAY
2957 : /* Not attaching typed array stubs with linear scan allocator, see GetElementIC. */
2958 1166 : if (!f.cx->typeInferenceEnabled() && js_IsTypedArray(obj))
2959 640 : return attachTypedArray(f, obj, key);
2960 : #endif
2961 :
2962 526 : return disable(f, "unsupported object type");
2963 : }
2964 :
2965 : bool
2966 4160 : SetElementIC::shouldUpdate(JSContext *cx)
2967 : {
2968 4160 : if (!hit) {
2969 2265 : hit = true;
2970 2265 : spew(cx, "ignored", "first hit");
2971 2265 : return false;
2972 : }
2973 : #ifdef JSGC_INCREMENTAL_MJ
2974 1895 : JS_ASSERT(!cx->compartment->needsBarrier());
2975 : #endif
2976 1895 : JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
2977 1895 : return true;
2978 : }
2979 :
2980 : template<JSBool strict>
2981 : void JS_FASTCALL
2982 : ic::SetElement(VMFrame &f, ic::SetElementIC *ic)
2983 : {
2984 4160 : JSContext *cx = f.cx;
2985 :
2986 4160 : if (ic->shouldUpdate(cx)) {
2987 1895 : LookupStatus status = ic->update(f, f.regs.sp[-3], f.regs.sp[-2]);
2988 1895 : if (status == Lookup_Error)
2989 0 : THROW();
2990 : }
2991 :
2992 4160 : stubs::SetElem<strict>(f);
2993 : }
2994 :
2995 : template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic);
2996 : template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
2997 :
2998 : #endif /* JS_POLYIC */
2999 :
|