1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Anderson <danderson@mozilla.com>
25 : * David Mandelin <dmandelin@mozilla.com>
26 : *
27 : * Alternatively, the contents of this file may be used under the terms of
28 : * either of the GNU General Public License Version 2 or later (the "GPL"),
29 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 : * in which case the provisions of the GPL or the LGPL are applicable instead
31 : * of those above. If you wish to allow use of your version of this file only
32 : * under the terms of either the GPL or the LGPL, and not to allow others to
33 : * use your version of this file under the terms of the MPL, indicate your
34 : * decision by deleting the provisions above and replace them with the notice
35 : * and other provisions required by the GPL or the LGPL. If you do not delete
36 : * the provisions above, a recipient may use your version of this file under
37 : * the terms of any one of the MPL, the GPL or the LGPL.
38 : *
39 : * ***** END LICENSE BLOCK ***** */
40 : #include "jsscope.h"
41 : #include "jsnum.h"
42 : #include "MonoIC.h"
43 : #include "StubCalls.h"
44 : #include "StubCalls-inl.h"
45 : #include "assembler/assembler/LinkBuffer.h"
46 : #include "assembler/assembler/MacroAssembler.h"
47 : #include "assembler/assembler/CodeLocation.h"
48 : #include "methodjit/CodeGenIncludes.h"
49 : #include "methodjit/Compiler.h"
50 : #include "methodjit/ICRepatcher.h"
51 : #include "methodjit/PolyIC.h"
52 : #include "InlineFrameAssembler.h"
53 : #include "jsobj.h"
54 :
55 : #include "builtin/RegExp.h"
56 :
57 : #include "jsinterpinlines.h"
58 : #include "jsobjinlines.h"
59 : #include "jsscopeinlines.h"
60 : #include "jsscriptinlines.h"
61 :
62 : using namespace js;
63 : using namespace js::mjit;
64 : using namespace js::mjit::ic;
65 :
66 : typedef JSC::MacroAssembler::RegisterID RegisterID;
67 : typedef JSC::MacroAssembler::Address Address;
68 : typedef JSC::MacroAssembler::Jump Jump;
69 : typedef JSC::MacroAssembler::Imm32 Imm32;
70 : typedef JSC::MacroAssembler::ImmPtr ImmPtr;
71 : typedef JSC::MacroAssembler::Call Call;
72 : typedef JSC::MacroAssembler::Label Label;
73 : typedef JSC::MacroAssembler::DataLabel32 DataLabel32;
74 : typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr;
75 :
76 : #if defined JS_MONOIC
77 :
78 : static void
79 22 : PatchGetFallback(VMFrame &f, ic::GetGlobalNameIC *ic)
80 : {
81 44 : Repatcher repatch(f.chunk());
82 22 : JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stubs::Name));
83 22 : repatch.relink(ic->slowPathCall, fptr);
84 22 : }
85 :
86 : void JS_FASTCALL
87 148974 : ic::GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic)
88 : {
89 148974 : JSObject &obj = f.fp()->scopeChain().global();
90 148974 : PropertyName *name = f.script()->getName(GET_UINT32_INDEX(f.pc()));
91 :
92 148974 : RecompilationMonitor monitor(f.cx);
93 :
94 148974 : const Shape *shape = obj.nativeLookup(f.cx, js_CheckForStringIndex(ATOM_TO_JSID(name)));
95 :
96 148974 : if (monitor.recompiled()) {
97 0 : stubs::Name(f);
98 0 : return;
99 : }
100 :
101 443028 : if (!shape ||
102 147035 : !shape->hasDefaultGetterOrIsMethod() ||
103 147019 : !shape->hasSlot())
104 : {
105 1961 : if (shape)
106 22 : PatchGetFallback(f, ic);
107 1961 : stubs::Name(f);
108 1961 : return;
109 : }
110 147013 : uint32_t slot = shape->slot();
111 :
112 : /* Patch shape guard. */
113 294026 : Repatcher repatcher(f.chunk());
114 147013 : repatcher.repatch(ic->fastPathStart.dataLabelPtrAtOffset(ic->shapeOffset), obj.lastProperty());
115 :
116 : /* Patch loads. */
117 147013 : uint32_t index = obj.dynamicSlotIndex(slot);
118 147013 : JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset);
119 147013 : repatcher.patchAddressOffsetForValueLoad(label, index * sizeof(Value));
120 :
121 : /* Do load anyway... this time. */
122 147013 : stubs::Name(f);
123 : }
124 :
125 : template <JSBool strict>
126 : static void JS_FASTCALL
127 386 : DisabledSetGlobal(VMFrame &f, ic::SetGlobalNameIC *ic)
128 : {
129 386 : stubs::SetGlobalName<strict>(f, f.script()->getName(GET_UINT32_INDEX(f.pc())));
130 386 : }
131 :
132 : template void JS_FASTCALL DisabledSetGlobal<true>(VMFrame &f, ic::SetGlobalNameIC *ic);
133 : template void JS_FASTCALL DisabledSetGlobal<false>(VMFrame &f, ic::SetGlobalNameIC *ic);
134 :
135 : static void
136 105 : PatchSetFallback(VMFrame &f, ic::SetGlobalNameIC *ic)
137 : {
138 105 : JSScript *script = f.script();
139 210 : Repatcher repatch(f.chunk());
140 105 : VoidStubSetGlobal stub = STRICT_VARIANT(DisabledSetGlobal);
141 105 : JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stub));
142 105 : repatch.relink(ic->slowPathCall, fptr);
143 105 : }
144 :
145 : void
146 0 : SetGlobalNameIC::patchExtraShapeGuard(Repatcher &repatcher, const Shape *shape)
147 : {
148 0 : JS_ASSERT(hasExtraStub);
149 :
150 0 : JSC::CodeLocationLabel label(JSC::MacroAssemblerCodePtr(extraStub.start()));
151 0 : repatcher.repatch(label.dataLabelPtrAtOffset(extraShapeGuard), shape);
152 0 : }
153 :
154 : void
155 44683 : SetGlobalNameIC::patchInlineShapeGuard(Repatcher &repatcher, const Shape *shape)
156 : {
157 44683 : JSC::CodeLocationDataLabelPtr label = fastPathStart.dataLabelPtrAtOffset(shapeOffset);
158 44683 : repatcher.repatch(label, shape);
159 44683 : }
160 :
161 : static LookupStatus
162 47068 : UpdateSetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, const Shape *shape)
163 : {
164 : /* Give globals a chance to appear. */
165 47068 : if (!shape)
166 2280 : return Lookup_Uncacheable;
167 :
168 223780 : if (shape->isMethod() ||
169 44788 : !shape->hasDefaultSetter() ||
170 44766 : !shape->writable() ||
171 44719 : !shape->hasSlot() ||
172 44719 : obj->watched())
173 : {
174 : /* Disable the IC for weird shape attributes and watchpoints. */
175 105 : PatchSetFallback(f, ic);
176 105 : return Lookup_Uncacheable;
177 : }
178 :
179 : /* Object is not branded, so we can use the inline path. */
180 89366 : Repatcher repatcher(f.chunk());
181 44683 : ic->patchInlineShapeGuard(repatcher, obj->lastProperty());
182 :
183 44683 : uint32_t index = obj->dynamicSlotIndex(shape->slot());
184 44683 : JSC::CodeLocationLabel label = ic->fastPathStart.labelAtOffset(ic->loadStoreOffset);
185 : repatcher.patchAddressOffsetForValueStore(label, index * sizeof(Value),
186 44683 : ic->vr.isTypeKnown());
187 :
188 44683 : return Lookup_Cacheable;
189 : }
190 :
191 : void JS_FASTCALL
192 47068 : ic::SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic)
193 : {
194 47068 : JSObject &obj = f.fp()->scopeChain().global();
195 47068 : JSScript *script = f.script();
196 47068 : PropertyName *name = script->getName(GET_UINT32_INDEX(f.pc()));
197 :
198 47068 : RecompilationMonitor monitor(f.cx);
199 :
200 47068 : const Shape *shape = obj.nativeLookup(f.cx, ATOM_TO_JSID(name));
201 :
202 47068 : if (!monitor.recompiled()) {
203 47068 : LookupStatus status = UpdateSetGlobalName(f, ic, &obj, shape);
204 47068 : if (status == Lookup_Error)
205 0 : THROW();
206 : }
207 :
208 47068 : STRICT_VARIANT(stubs::SetGlobalName)(f, name);
209 : }
210 :
211 : class EqualityICLinker : public LinkerHelper
212 4148 : {
213 : VMFrame &f;
214 :
215 : public:
216 4148 : EqualityICLinker(Assembler &masm, VMFrame &f)
217 4148 : : LinkerHelper(masm, JSC::METHOD_CODE), f(f)
218 4148 : { }
219 :
220 4148 : bool init(JSContext *cx) {
221 4148 : JSC::ExecutablePool *pool = LinkerHelper::init(cx);
222 4148 : if (!pool)
223 0 : return false;
224 4148 : JS_ASSERT(!f.regs.inlined());
225 4148 : if (!f.chunk()->execPools.append(pool)) {
226 0 : pool->release();
227 0 : js_ReportOutOfMemory(cx);
228 0 : return false;
229 : }
230 4148 : return true;
231 : }
232 : };
233 :
234 : /* Rough over-estimate of how much memory we need to unprotect. */
235 : static const uint32_t INLINE_PATH_LENGTH = 64;
236 :
237 : class EqualityCompiler : public BaseCompiler
238 110012 : {
239 : VMFrame &f;
240 : EqualityICInfo ⁣
241 :
242 : Vector<Jump, 4, SystemAllocPolicy> jumpList;
243 : Jump trueJump;
244 : Jump falseJump;
245 :
246 : public:
247 110012 : EqualityCompiler(VMFrame &f, EqualityICInfo &ic)
248 110012 : : BaseCompiler(f.cx), f(f), ic(ic), jumpList(SystemAllocPolicy())
249 : {
250 110012 : }
251 :
252 9445 : void linkToStub(Jump j)
253 : {
254 9445 : jumpList.append(j);
255 9445 : }
256 :
257 4148 : void linkTrue(Jump j)
258 : {
259 4148 : trueJump = j;
260 4148 : }
261 :
262 4148 : void linkFalse(Jump j)
263 : {
264 4148 : falseJump = j;
265 4148 : }
266 :
267 3935 : void generateStringPath(Assembler &masm)
268 : {
269 3935 : const ValueRemat &lvr = ic.lvr;
270 3935 : const ValueRemat &rvr = ic.rvr;
271 :
272 3935 : JS_ASSERT_IF(lvr.isConstant(), lvr.isType(JSVAL_TYPE_STRING));
273 3935 : JS_ASSERT_IF(rvr.isConstant(), rvr.isType(JSVAL_TYPE_STRING));
274 :
275 3935 : if (!lvr.isType(JSVAL_TYPE_STRING)) {
276 3781 : Jump lhsFail = masm.testString(Assembler::NotEqual, lvr.typeReg());
277 3781 : linkToStub(lhsFail);
278 : }
279 :
280 3935 : if (!rvr.isType(JSVAL_TYPE_STRING)) {
281 532 : Jump rhsFail = masm.testString(Assembler::NotEqual, rvr.typeReg());
282 532 : linkToStub(rhsFail);
283 : }
284 :
285 3935 : RegisterID tmp = ic.tempReg;
286 :
287 : /* JSString::isAtom === (lengthAndFlags & ATOM_MASK == 0) */
288 : JS_STATIC_ASSERT(JSString::ATOM_FLAGS == 0);
289 3935 : Imm32 atomMask(JSString::ATOM_MASK);
290 :
291 3935 : masm.load32(Address(lvr.dataReg(), JSString::offsetOfLengthAndFlags()), tmp);
292 3935 : Jump lhsNotAtomized = masm.branchTest32(Assembler::NonZero, tmp, atomMask);
293 3935 : linkToStub(lhsNotAtomized);
294 :
295 3935 : if (!rvr.isConstant()) {
296 558 : masm.load32(Address(rvr.dataReg(), JSString::offsetOfLengthAndFlags()), tmp);
297 558 : Jump rhsNotAtomized = masm.branchTest32(Assembler::NonZero, tmp, atomMask);
298 558 : linkToStub(rhsNotAtomized);
299 : }
300 :
301 3935 : if (rvr.isConstant()) {
302 3377 : JSString *str = rvr.value().toString();
303 3377 : JS_ASSERT(str->isAtom());
304 3377 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), ImmPtr(str));
305 3377 : linkTrue(test);
306 : } else {
307 558 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), rvr.dataReg());
308 558 : linkTrue(test);
309 : }
310 :
311 3935 : Jump fallthrough = masm.jump();
312 3935 : linkFalse(fallthrough);
313 3935 : }
314 :
315 213 : void generateObjectPath(Assembler &masm)
316 : {
317 213 : ValueRemat &lvr = ic.lvr;
318 213 : ValueRemat &rvr = ic.rvr;
319 :
320 213 : if (!lvr.isConstant() && !lvr.isType(JSVAL_TYPE_OBJECT)) {
321 213 : Jump lhsFail = masm.testObject(Assembler::NotEqual, lvr.typeReg());
322 213 : linkToStub(lhsFail);
323 : }
324 :
325 213 : if (!rvr.isConstant() && !rvr.isType(JSVAL_TYPE_OBJECT)) {
326 213 : Jump rhsFail = masm.testObject(Assembler::NotEqual, rvr.typeReg());
327 213 : linkToStub(rhsFail);
328 : }
329 :
330 213 : masm.loadObjClass(lvr.dataReg(), ic.tempReg);
331 : Jump lhsHasEq = masm.branchPtr(Assembler::NotEqual,
332 : Address(ic.tempReg, offsetof(Class, ext.equality)),
333 213 : ImmPtr(NULL));
334 213 : linkToStub(lhsHasEq);
335 :
336 213 : if (rvr.isConstant()) {
337 0 : JSObject *obj = &rvr.value().toObject();
338 0 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), ImmPtr(obj));
339 0 : linkTrue(test);
340 : } else {
341 213 : Jump test = masm.branchPtr(ic.cond, lvr.dataReg(), rvr.dataReg());
342 213 : linkTrue(test);
343 : }
344 :
345 213 : Jump fallthrough = masm.jump();
346 213 : linkFalse(fallthrough);
347 213 : }
348 :
349 4148 : bool linkForIC(Assembler &masm)
350 : {
351 8296 : EqualityICLinker buffer(masm, f);
352 4148 : if (!buffer.init(cx))
353 0 : return false;
354 :
355 8296 : Repatcher repatcher(f.chunk());
356 :
357 : /* Overwrite the call to the IC with a call to the stub. */
358 4148 : JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic.stub));
359 4148 : repatcher.relink(ic.stubCall, fptr);
360 :
361 : // Silently fail, the IC is disabled now.
362 4148 : if (!buffer.verifyRange(f.chunk()))
363 0 : return true;
364 :
365 : /* Set the targets of all type test failures to go to the stub. */
366 13593 : for (size_t i = 0; i < jumpList.length(); i++)
367 9445 : buffer.link(jumpList[i], ic.stubEntry);
368 4148 : jumpList.clear();
369 :
370 : /* Set the targets for the the success and failure of the actual equality test. */
371 4148 : buffer.link(trueJump, ic.target);
372 4148 : buffer.link(falseJump, ic.fallThrough);
373 :
374 4148 : CodeLocationLabel cs = buffer.finalize(f);
375 :
376 : /* Jump to the newly generated code instead of to the IC. */
377 4148 : repatcher.relink(ic.jumpToStub, cs);
378 :
379 4148 : return true;
380 : }
381 :
382 110012 : bool update()
383 : {
384 110012 : if (!ic.generated) {
385 220024 : Assembler masm;
386 110012 : Value rval = f.regs.sp[-1];
387 110012 : Value lval = f.regs.sp[-2];
388 :
389 110012 : if (rval.isObject() && lval.isObject()) {
390 213 : generateObjectPath(masm);
391 213 : ic.generated = true;
392 109799 : } else if (rval.isString() && lval.isString()) {
393 3935 : generateStringPath(masm);
394 3935 : ic.generated = true;
395 : } else {
396 105864 : return true;
397 : }
398 :
399 4148 : return linkForIC(masm);
400 : }
401 :
402 0 : return true;
403 : }
404 : };
405 :
406 : JSBool JS_FASTCALL
407 110012 : ic::Equality(VMFrame &f, ic::EqualityICInfo *ic)
408 : {
409 220024 : EqualityCompiler cc(f, *ic);
410 110012 : if (!cc.update())
411 0 : THROWV(JS_FALSE);
412 :
413 110012 : return ic->stub(f);
414 : }
415 :
416 : static void * JS_FASTCALL
417 84037 : SlowCallFromIC(VMFrame &f, ic::CallICInfo *ic)
418 : {
419 84037 : stubs::SlowCall(f, ic->frameSize.getArgc(f));
420 84037 : return NULL;
421 : }
422 :
423 : static void * JS_FASTCALL
424 0 : SlowNewFromIC(VMFrame &f, ic::CallICInfo *ic)
425 : {
426 0 : stubs::SlowNew(f, ic->frameSize.staticArgc());
427 0 : return NULL;
428 : }
429 :
430 : bool
431 35521 : NativeStubLinker::init(JSContext *cx)
432 : {
433 35521 : JSC::ExecutablePool *pool = LinkerHelper::init(cx);
434 35521 : if (!pool)
435 0 : return false;
436 :
437 35521 : NativeCallStub stub;
438 35521 : stub.pc = pc;
439 35521 : stub.pool = pool;
440 35521 : stub.jump = locationOf(done);
441 35521 : if (!chunk->nativeCallStubs.append(stub)) {
442 0 : pool->release();
443 0 : return false;
444 : }
445 :
446 35521 : return true;
447 : }
448 :
449 : /*
450 : * Generate epilogue code to run after a stub ABI call to a native or getter.
451 : * This checks for an exception, and either type checks the result against the
452 : * observed types for the opcode or loads the result into a register pair
453 : * (it will go through a type barrier afterwards).
454 : */
455 : bool
456 35521 : mjit::NativeStubEpilogue(VMFrame &f, Assembler &masm, NativeStubLinker::FinalJump *result,
457 : int32_t initialFrameDepth, int32_t vpOffset,
458 : MaybeRegisterID typeReg, MaybeRegisterID dataReg)
459 : {
460 : /* Reload fp, which may have been clobbered by restoreStackBase(). */
461 35521 : masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
462 :
463 : Jump hasException = masm.branchTest32(Assembler::Zero, Registers::ReturnReg,
464 35521 : Registers::ReturnReg);
465 :
466 35521 : Address resultAddress(JSFrameReg, vpOffset);
467 :
468 71042 : Vector<Jump> mismatches(f.cx);
469 35521 : if (f.cx->typeInferenceEnabled() && !typeReg.isSet()) {
470 : /*
471 : * Test the result of this native against the known result type set for
472 : * the call. We don't assume knowledge about the types that natives can
473 : * return, except when generating specialized paths in FastBuiltins.
474 : */
475 10972 : types::TypeSet *types = f.script()->analysis()->bytecodeTypes(f.pc());
476 10972 : if (!masm.generateTypeCheck(f.cx, resultAddress, types, &mismatches))
477 0 : THROWV(false);
478 : }
479 :
480 : /*
481 : * Can no longer trigger recompilation in this stub, clear the stub rejoin
482 : * on the VMFrame.
483 : */
484 35521 : masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
485 :
486 35521 : if (typeReg.isSet())
487 1377 : masm.loadValueAsComponents(resultAddress, typeReg.reg(), dataReg.reg());
488 :
489 : /*
490 : * The final jump is a indirect on x64, so that we'll always be able
491 : * to repatch it to the interpoline later.
492 : */
493 35521 : Label finished = masm.label();
494 : #ifdef JS_CPU_X64
495 : JSC::MacroAssembler::DataLabelPtr done = masm.moveWithPatch(ImmPtr(NULL), Registers::ValueReg);
496 : masm.jump(Registers::ValueReg);
497 : #else
498 35521 : Jump done = masm.jump();
499 : #endif
500 :
501 : /* Generate a call for type check failures on the native result. */
502 35521 : if (!mismatches.empty()) {
503 22353 : for (unsigned i = 0; i < mismatches.length(); i++)
504 11389 : mismatches[i].linkTo(masm.label(), &masm);
505 10964 : masm.addPtr(Imm32(vpOffset), JSFrameReg, Registers::ArgReg1);
506 : masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::TypeBarrierReturn),
507 10964 : f.regs.pc, NULL, initialFrameDepth);
508 10964 : masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
509 10964 : masm.jump().linkTo(finished, &masm);
510 : }
511 :
512 : /* Move JaegerThrowpoline into register for very far jump on x64. */
513 35521 : hasException.linkTo(masm.label(), &masm);
514 35521 : masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
515 35521 : masm.throwInJIT();
516 :
517 35521 : *result = done;
518 35521 : return true;
519 : }
520 :
521 : /*
522 : * Calls have an inline path and an out-of-line path. The inline path is used
523 : * in the fastest case: the method has JIT'd code, and |argc == nargs|.
524 : *
525 : * The inline path and OOL path are separated by a guard on the identity of
526 : * the callee object. This guard starts as NULL and always fails on the first
527 : * hit. On the OOL path, the callee is verified to be both a function and a
528 : * scripted function. If these conditions hold, |ic::Call| is invoked.
529 : *
530 : * |ic::Call| first ensures that the callee has JIT code. If it doesn't, the
531 : * call to |ic::Call| is patched to a slow path. If it does have JIT'd code,
532 : * the following cases can occur:
533 : *
534 : * 1) args != nargs: The call to |ic::Call| is patched with a dynamically
535 : * generated stub. This stub inlines a path that looks like:
536 : * ----
537 : * push frame
538 : * if (callee is not compiled) {
539 : * Compile(callee);
540 : * }
541 : * call callee->arityLabel
542 : *
543 : * The arity label is a special entry point for correcting frames for
544 : * arity mismatches.
545 : *
546 : * 2) args == nargs, and the inline call site was not patched yet.
547 : * The guard dividing the two paths is patched to guard on the given
548 : * function object identity, and the proceeding call is patched to
549 : * directly call the JIT code.
550 : *
551 : * 3) args == nargs, and the inline call site was patched already.
552 : * A small stub is created which extends the original guard to also
553 : * guard on the JSFunction lying underneath the function object.
554 : *
555 : * If the OOL path does not have a scripted function, but does have a
556 : * scripted native, then a small stub is generated which inlines the native
557 : * invocation.
558 : */
559 : class CallCompiler : public BaseCompiler
560 : {
561 : VMFrame &f;
562 : CallICInfo ⁣
563 : bool callingNew;
564 :
565 : public:
566 504560 : CallCompiler(VMFrame &f, CallICInfo &ic, bool callingNew)
567 504560 : : BaseCompiler(f.cx), f(f), ic(ic), callingNew(callingNew)
568 : {
569 504560 : }
570 :
571 2613 : JSC::ExecutablePool *poolForSize(LinkerHelper &linker, CallICInfo::PoolIndex index)
572 : {
573 2613 : JSC::ExecutablePool *ep = linker.init(f.cx);
574 2613 : if (!ep)
575 0 : return NULL;
576 2613 : JS_ASSERT(!ic.pools[index]);
577 2613 : ic.pools[index] = ep;
578 2613 : return ep;
579 : }
580 :
581 285 : void disable()
582 : {
583 285 : JSC::CodeLocationCall oolCall = ic.slowPathStart.callAtOffset(ic.oolCallOffset);
584 570 : Repatcher repatch(f.chunk());
585 : JSC::FunctionPtr fptr = callingNew
586 : ? JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowNewFromIC))
587 285 : : JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowCallFromIC));
588 285 : repatch.relink(oolCall, fptr);
589 285 : }
590 :
591 1998 : bool generateFullCallStub(JSScript *script, uint32_t flags)
592 : {
593 : /*
594 : * Create a stub that works with arity mismatches. Like the fast-path,
595 : * this allocates a frame on the caller side, but also performs extra
596 : * checks for compilability. Perhaps this should be a separate, shared
597 : * trampoline, but for now we generate it dynamically.
598 : */
599 3996 : Assembler masm;
600 1998 : InlineFrameAssembler inlFrame(masm, ic, flags);
601 1998 : RegisterID t0 = inlFrame.tempRegs.takeAnyReg().reg();
602 :
603 : /* Generate the inline frame creation. */
604 1998 : void *ncode = ic.funGuard.labelAtOffset(ic.joinPointOffset).executableAddress();
605 1998 : inlFrame.assemble(ncode, f.pc());
606 :
607 : /* funObjReg is still valid. Check if a compilation is needed. */
608 1998 : Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript());
609 1998 : masm.loadPtr(scriptAddr, t0);
610 :
611 : /*
612 : * Test if script->nmap is NULL - same as checking ncode, but faster
613 : * here since ncode has two failure modes and we need to load out of
614 : * nmap anyway.
615 : */
616 : size_t offset = callingNew
617 : ? offsetof(JSScript, jitArityCheckCtor)
618 1998 : : offsetof(JSScript, jitArityCheckNormal);
619 1998 : masm.loadPtr(Address(t0, offset), t0);
620 1998 : Jump hasCode = masm.branchPtr(Assembler::Above, t0, ImmPtr(JS_UNJITTABLE_SCRIPT));
621 :
622 : /*
623 : * Write the rejoin state to indicate this is a compilation call made
624 : * from an IC (the recompiler cannot detect calls made from ICs
625 : * automatically).
626 : */
627 1998 : masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), false)),
628 3996 : FrameAddress(offsetof(VMFrame, stubRejoin)));
629 :
630 1998 : masm.bumpStubCounter(f.script(), f.pc(), Registers::tempCallReg());
631 :
632 : /* Try and compile. On success we get back the nmap pointer. */
633 1998 : void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
634 1998 : DataLabelPtr inlined;
635 1998 : if (ic.frameSize.isStatic()) {
636 1670 : masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1);
637 1670 : masm.fallibleVMCall(cx->typeInferenceEnabled(),
638 3340 : compilePtr, f.regs.pc, &inlined, ic.frameSize.staticLocalSlots());
639 : } else {
640 328 : masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), Registers::ArgReg1);
641 328 : masm.fallibleVMCall(cx->typeInferenceEnabled(),
642 656 : compilePtr, f.regs.pc, &inlined, -1);
643 : }
644 :
645 : Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
646 1998 : Registers::ReturnReg);
647 1998 : masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
648 :
649 : /* Compute the value of ncode to use at this call site. */
650 1998 : ncode = (uint8_t *) f.chunk()->code.m_code.executableAddress() + ic.call->codeOffset;
651 1998 : masm.storePtr(ImmPtr(ncode), Address(JSFrameReg, StackFrame::offsetOfNcode()));
652 :
653 1998 : masm.jump(Registers::ReturnReg);
654 :
655 1998 : hasCode.linkTo(masm.label(), &masm);
656 :
657 : /* Get nmap[ARITY], set argc, call. */
658 1998 : if (ic.frameSize.isStatic())
659 1670 : masm.move(Imm32(ic.frameSize.staticArgc()), JSParamReg_Argc);
660 : else
661 328 : masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), JSParamReg_Argc);
662 1998 : masm.jump(t0);
663 :
664 3996 : LinkerHelper linker(masm, JSC::METHOD_CODE);
665 1998 : JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ScriptStub);
666 1998 : if (!ep)
667 0 : return false;
668 :
669 1998 : if (!linker.verifyRange(f.chunk())) {
670 0 : disable();
671 0 : return true;
672 : }
673 :
674 1998 : linker.link(notCompiled, ic.slowPathStart.labelAtOffset(ic.slowJoinOffset));
675 1998 : JSC::CodeLocationLabel cs = linker.finalize(f);
676 :
677 : JaegerSpew(JSpew_PICs, "generated CALL stub %p (%lu bytes)\n", cs.executableAddress(),
678 1998 : (unsigned long) masm.size());
679 :
680 1998 : if (f.regs.inlined()) {
681 162 : JSC::LinkBuffer code((uint8_t *) cs.executableAddress(), masm.size(), JSC::METHOD_CODE);
682 81 : code.patch(inlined, f.regs.inlined());
683 : }
684 :
685 3996 : Repatcher repatch(f.chunk());
686 1998 : JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
687 1998 : repatch.relink(oolJump, cs);
688 :
689 1998 : return true;
690 : }
691 :
692 13004 : bool patchInlinePath(JSScript *script, JSObject *obj)
693 : {
694 13004 : JS_ASSERT(ic.frameSize.isStatic());
695 13004 : JITScript *jit = script->getJIT(callingNew);
696 :
697 : /* Very fast path. */
698 26008 : Repatcher repatch(f.chunk());
699 :
700 : /*
701 : * Use the arguments check entry if this is a monitored call, we might
702 : * not have accounted for all possible argument types.
703 : */
704 13004 : void *entry = ic.typeMonitored ? jit->argsCheckEntry : jit->fastEntry;
705 :
706 26008 : if (!repatch.canRelink(ic.funGuard.jumpAtOffset(ic.hotJumpOffset),
707 26008 : JSC::CodeLocationLabel(entry))) {
708 0 : return false;
709 : }
710 :
711 13004 : ic.fastGuardedObject = obj;
712 13004 : JS_APPEND_LINK(&ic.links, &jit->callers);
713 :
714 13004 : repatch.repatch(ic.funGuard, obj);
715 13004 : repatch.relink(ic.funGuard.jumpAtOffset(ic.hotJumpOffset),
716 26008 : JSC::CodeLocationLabel(entry));
717 :
718 : JaegerSpew(JSpew_PICs, "patched CALL path %p (obj: %p)\n",
719 : ic.funGuard.executableAddress(),
720 13004 : static_cast<void*>(ic.fastGuardedObject));
721 :
722 13004 : return true;
723 : }
724 :
725 615 : bool generateStubForClosures(JSObject *obj)
726 : {
727 615 : JS_ASSERT(ic.frameSize.isStatic());
728 :
729 : /* Slightly less fast path - guard on fun->script() instead. */
730 1230 : Assembler masm;
731 :
732 615 : Registers tempRegs(Registers::AvailRegs);
733 615 : tempRegs.takeReg(ic.funObjReg);
734 :
735 615 : RegisterID t0 = tempRegs.takeAnyReg().reg();
736 :
737 : /* Guard that it's actually a function object. */
738 615 : Jump claspGuard = masm.testObjClass(Assembler::NotEqual, ic.funObjReg, t0, &FunctionClass);
739 :
740 : /* Guard that it's the same script. */
741 615 : Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript());
742 : Jump funGuard = masm.branchPtr(Assembler::NotEqual, scriptAddr,
743 615 : ImmPtr(obj->toFunction()->script()));
744 615 : Jump done = masm.jump();
745 :
746 1230 : LinkerHelper linker(masm, JSC::METHOD_CODE);
747 615 : JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ClosureStub);
748 615 : if (!ep)
749 0 : return false;
750 :
751 615 : ic.hasJsFunCheck = true;
752 :
753 615 : if (!linker.verifyRange(f.chunk())) {
754 0 : disable();
755 0 : return true;
756 : }
757 :
758 615 : linker.link(claspGuard, ic.slowPathStart);
759 615 : linker.link(funGuard, ic.slowPathStart);
760 615 : linker.link(done, ic.funGuard.labelAtOffset(ic.hotPathOffset));
761 615 : JSC::CodeLocationLabel cs = linker.finalize(f);
762 :
763 : JaegerSpew(JSpew_PICs, "generated CALL closure stub %p (%lu bytes)\n",
764 615 : cs.executableAddress(), (unsigned long) masm.size());
765 :
766 1230 : Repatcher repatch(f.chunk());
767 615 : repatch.relink(ic.funJump, cs);
768 :
769 615 : return true;
770 : }
771 :
772 451581 : bool generateNativeStub()
773 : {
774 : /* Snapshot the frameDepth before SplatApplyArgs modifies it. */
775 451581 : unsigned initialFrameDepth = f.regs.sp - f.fp()->slots();
776 :
777 : /*
778 : * SplatApplyArgs has not been called, so we call it here before
779 : * potentially touching f.u.call.dynamicArgc.
780 : */
781 : CallArgs args;
782 451581 : if (ic.frameSize.isStatic()) {
783 445051 : JS_ASSERT(f.regs.sp - f.fp()->slots() == (int)ic.frameSize.staticLocalSlots());
784 445051 : args = CallArgsFromSp(ic.frameSize.staticArgc(), f.regs.sp);
785 : } else {
786 6530 : JS_ASSERT(!f.regs.inlined());
787 6530 : JS_ASSERT(*f.regs.pc == JSOP_FUNAPPLY && GET_ARGC(f.regs.pc) == 2);
788 6530 : if (!ic::SplatApplyArgs(f)) /* updates regs.sp */
789 0 : THROWV(true);
790 6530 : args = CallArgsFromSp(f.u.call.dynamicArgc, f.regs.sp);
791 : }
792 :
793 : JSFunction *fun;
794 451581 : if (!IsFunctionObject(args.calleev(), &fun))
795 9642 : return false;
796 :
797 441939 : if ((!callingNew && !fun->isNative()) || (callingNew && !fun->isNativeConstructor()))
798 3 : return false;
799 :
800 441936 : if (callingNew)
801 2034 : args.thisv().setMagic(JS_IS_CONSTRUCTING);
802 :
803 441936 : RecompilationMonitor monitor(cx);
804 :
805 441936 : if (!CallJSNative(cx, fun->u.n.native, args))
806 3549 : THROWV(true);
807 :
808 438387 : types::TypeScript::Monitor(f.cx, f.script(), f.pc(), args.rval());
809 :
810 : /*
811 : * Native stubs are not generated for inline frames. The overhead of
812 : * bailing out from the IC is far greater than the time saved by
813 : * inlining the parent frame in the first place, so mark the immediate
814 : * caller as uninlineable.
815 : */
816 438387 : if (f.script()->function()) {
817 430281 : f.script()->uninlineable = true;
818 430281 : MarkTypeObjectFlags(cx, f.script()->function(), types::OBJECT_FLAG_UNINLINEABLE);
819 : }
820 :
821 : /* Don't touch the IC if the call triggered a recompilation. */
822 438387 : if (monitor.recompiled())
823 3673 : return true;
824 :
825 434714 : JS_ASSERT(!f.regs.inlined());
826 :
827 : /* Right now, take slow-path for IC misses or multiple stubs. */
828 434714 : if (ic.fastGuardedNative || ic.hasJsFunCheck)
829 355898 : return true;
830 :
831 : /* Native MIC needs to warm up first. */
832 78816 : if (!ic.hit) {
833 44672 : ic.hit = true;
834 44672 : return true;
835 : }
836 :
837 : /* Generate fast-path for calling this native. */
838 68288 : Assembler masm;
839 :
840 : /* Guard on the function object identity, for now. */
841 34144 : Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(fun));
842 :
843 : /*
844 : * Write the rejoin state for the recompiler to use if this call
845 : * triggers recompilation. Natives use a different stack address to
846 : * store the return value than FASTCALLs, and without additional
847 : * information we cannot tell which one is active on a VMFrame.
848 : */
849 34144 : masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), true)),
850 68288 : FrameAddress(offsetof(VMFrame, stubRejoin)));
851 :
852 : /* N.B. After this call, the frame will have a dynamic frame size. */
853 34144 : if (ic.frameSize.isDynamic()) {
854 293 : masm.bumpStubCounter(f.script(), f.pc(), Registers::tempCallReg());
855 293 : masm.fallibleVMCall(cx->typeInferenceEnabled(),
856 : JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs),
857 586 : f.regs.pc, NULL, initialFrameDepth);
858 : }
859 :
860 34144 : Registers tempRegs = Registers::tempCallRegMask();
861 34144 : RegisterID t0 = tempRegs.takeAnyReg().reg();
862 34144 : masm.bumpStubCounter(f.script(), f.pc(), t0);
863 :
864 34144 : int32_t storeFrameDepth = ic.frameSize.isStatic() ? initialFrameDepth : -1;
865 34144 : masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.regs.pc, storeFrameDepth);
866 :
867 : /* Grab cx. */
868 : #ifdef JS_CPU_X86
869 34144 : RegisterID cxReg = tempRegs.takeAnyReg().reg();
870 : #else
871 : RegisterID cxReg = Registers::ArgReg0;
872 : #endif
873 34144 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg);
874 :
875 : /*
876 : * Compute vp. This will always be at the same offset from fp for a
877 : * given callsite, regardless of any dynamically computed argc,
878 : * so get that offset from the active call.
879 : */
880 : #ifdef JS_CPU_X86
881 34144 : RegisterID vpReg = t0;
882 : #else
883 : RegisterID vpReg = Registers::ArgReg2;
884 : #endif
885 34144 : uint32_t vpOffset = (uint32_t) ((char *) args.base() - (char *) f.fp());
886 34144 : masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg);
887 :
888 : /* Compute argc. */
889 34144 : MaybeRegisterID argcReg;
890 34144 : if (!ic.frameSize.isStatic()) {
891 293 : argcReg = tempRegs.takeAnyReg().reg();
892 293 : masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), argcReg.reg());
893 : }
894 :
895 : /* Mark vp[1] as magic for |new|. */
896 34144 : if (callingNew)
897 332 : masm.storeValue(MagicValue(JS_IS_CONSTRUCTING), Address(vpReg, sizeof(Value)));
898 :
899 34144 : masm.restoreStackBase();
900 34144 : masm.setupABICall(Registers::NormalCall, 3);
901 34144 : masm.storeArg(2, vpReg);
902 34144 : if (ic.frameSize.isStatic())
903 33851 : masm.storeArg(1, ImmIntPtr(intptr_t(ic.frameSize.staticArgc())));
904 : else
905 293 : masm.storeArg(1, argcReg.reg());
906 34144 : masm.storeArg(0, cxReg);
907 :
908 34144 : js::Native native = fun->u.n.native;
909 :
910 : /*
911 : * Call RegExp.test instead of exec if the result will not be used or
912 : * will only be used to test for existence. Note that this will not
913 : * break inferred types for the call's result and any subsequent test,
914 : * as RegExp.exec has a type handler with unknown result.
915 : */
916 34144 : if (native == regexp_exec && !CallResultEscapes(f.pc()))
917 4375 : native = regexp_test;
918 :
919 34144 : masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, native), false);
920 :
921 34144 : NativeStubLinker::FinalJump done;
922 34144 : if (!NativeStubEpilogue(f, masm, &done, initialFrameDepth, vpOffset, MaybeRegisterID(), MaybeRegisterID()))
923 0 : return false;
924 68288 : NativeStubLinker linker(masm, f.chunk(), f.regs.pc, done);
925 34144 : if (!linker.init(f.cx))
926 0 : THROWV(true);
927 :
928 34144 : if (!linker.verifyRange(f.chunk())) {
929 0 : disable();
930 0 : return true;
931 : }
932 :
933 34144 : linker.patchJump(ic.slowPathStart.labelAtOffset(ic.slowJoinOffset));
934 :
935 34144 : ic.fastGuardedNative = fun;
936 :
937 34144 : linker.link(funGuard, ic.slowPathStart);
938 34144 : JSC::CodeLocationLabel start = linker.finalize(f);
939 :
940 : JaegerSpew(JSpew_PICs, "generated native CALL stub %p (%lu bytes)\n",
941 34144 : start.executableAddress(), (unsigned long) masm.size());
942 :
943 68288 : Repatcher repatch(f.chunk());
944 34144 : repatch.relink(ic.funJump, start);
945 :
946 34144 : return true;
947 : }
948 :
949 52979 : void *update()
950 : {
951 52979 : RecompilationMonitor monitor(cx);
952 :
953 52979 : bool lowered = ic.frameSize.lowered(f.pc());
954 52979 : JS_ASSERT_IF(lowered, !callingNew);
955 :
956 : stubs::UncachedCallResult ucr;
957 52979 : if (callingNew)
958 4349 : stubs::UncachedNewHelper(f, ic.frameSize.staticArgc(), &ucr);
959 : else
960 48630 : stubs::UncachedCallHelper(f, ic.frameSize.getArgc(f), lowered, &ucr);
961 :
962 : // Watch out in case the IC was invalidated by a recompilation on the calling
963 : // script. This can happen either if the callee is executed or if it compiles
964 : // and the compilation has a static overflow.
965 52979 : if (monitor.recompiled())
966 626 : return ucr.codeAddr;
967 :
968 : // If the function cannot be jitted (generally unjittable or empty script),
969 : // patch this site to go to a slow path always.
970 52353 : if (!ucr.codeAddr) {
971 16172 : if (ucr.unjittable)
972 285 : disable();
973 16172 : return NULL;
974 : }
975 :
976 36181 : JSFunction *fun = ucr.fun;
977 36181 : JS_ASSERT(fun);
978 36181 : JSScript *script = fun->script();
979 36181 : JS_ASSERT(script);
980 :
981 36181 : uint32_t flags = callingNew ? StackFrame::CONSTRUCTING : 0;
982 :
983 36181 : if (!ic.hit) {
984 20564 : ic.hit = true;
985 20564 : return ucr.codeAddr;
986 : }
987 :
988 15617 : if (!ic.frameSize.isStatic() || ic.frameSize.staticArgc() != fun->nargs) {
989 1735 : if (!generateFullCallStub(script, flags))
990 0 : THROWV(NULL);
991 : } else {
992 13882 : if (!ic.fastGuardedObject && patchInlinePath(script, fun)) {
993 : // Nothing, done.
994 3506 : } else if (ic.fastGuardedObject &&
995 878 : !ic.hasJsFunCheck &&
996 876 : !ic.fastGuardedNative &&
997 874 : ic.fastGuardedObject->toFunction()->script() == fun->script()) {
998 : /*
999 : * Note: Multiple "function guard" stubs are not yet
1000 : * supported, thus the fastGuardedNative check.
1001 : */
1002 615 : if (!generateStubForClosures(fun))
1003 0 : THROWV(NULL);
1004 : } else {
1005 263 : if (!generateFullCallStub(script, flags))
1006 0 : THROWV(NULL);
1007 : }
1008 : }
1009 :
1010 15617 : return ucr.codeAddr;
1011 : }
1012 : };
1013 :
1014 : void * JS_FASTCALL
1015 48630 : ic::Call(VMFrame &f, CallICInfo *ic)
1016 : {
1017 48630 : CallCompiler cc(f, *ic, false);
1018 48630 : return cc.update();
1019 : }
1020 :
1021 : void * JS_FASTCALL
1022 4349 : ic::New(VMFrame &f, CallICInfo *ic)
1023 : {
1024 4349 : CallCompiler cc(f, *ic, true);
1025 4349 : return cc.update();
1026 : }
1027 :
1028 : void * JS_FASTCALL
1029 443503 : ic::NativeCall(VMFrame &f, CallICInfo *ic)
1030 : {
1031 443503 : CallCompiler cc(f, *ic, false);
1032 443503 : if (!cc.generateNativeStub())
1033 3601 : stubs::SlowCall(f, ic->frameSize.getArgc(f));
1034 443503 : return NULL;
1035 : }
1036 :
1037 : void * JS_FASTCALL
1038 8078 : ic::NativeNew(VMFrame &f, CallICInfo *ic)
1039 : {
1040 8078 : CallCompiler cc(f, *ic, true);
1041 8078 : if (!cc.generateNativeStub())
1042 6044 : stubs::SlowNew(f, ic->frameSize.staticArgc());
1043 8078 : return NULL;
1044 : }
1045 :
1046 : static JS_ALWAYS_INLINE bool
1047 301728 : BumpStack(VMFrame &f, unsigned inc)
1048 : {
1049 301728 : if (f.regs.sp + inc < f.stackLimit)
1050 301728 : return true;
1051 0 : return f.cx->stack.space().tryBumpLimit(f.cx, f.regs.sp, inc, &f.stackLimit);
1052 : }
1053 :
1054 : /*
1055 : * SplatApplyArgs is only called for expressions of the form |f.apply(x, y)|.
1056 : * Additionally, the callee has already been checked to be the native apply.
1057 : * All successful paths through SplatApplyArgs must set f.u.call.dynamicArgc
1058 : * and f.regs.sp.
1059 : */
1060 : JSBool JS_FASTCALL
1061 358156 : ic::SplatApplyArgs(VMFrame &f)
1062 : {
1063 358156 : JSContext *cx = f.cx;
1064 358156 : JS_ASSERT(!f.regs.inlined());
1065 358156 : JS_ASSERT(GET_ARGC(f.regs.pc) == 2);
1066 :
1067 : /*
1068 : * The lazyArgsObj flag indicates an optimized call |f.apply(x, arguments)|
1069 : * where the args obj has not been created or pushed on the stack. Thus,
1070 : * if lazyArgsObj is set, the stack for |f.apply(x, arguments)| is:
1071 : *
1072 : * | Function.prototype.apply | f | x |
1073 : *
1074 : * Otherwise, if !lazyArgsObj, the stack is a normal 2-argument apply:
1075 : *
1076 : * | Function.prototype.apply | f | x | arguments |
1077 : */
1078 358156 : if (f.u.call.lazyArgsObj) {
1079 282468 : Value *vp = f.regs.sp - 3;
1080 282468 : JS_ASSERT(JS_CALLEE(cx, vp).toObject().toFunction()->u.n.native == js_fun_apply);
1081 :
1082 282468 : StackFrame *fp = f.regs.fp();
1083 : unsigned n;
1084 282468 : if (!fp->hasArgsObj()) {
1085 : /* Extract the common/fast path where there is no args obj. */
1086 282240 : n = fp->numActualArgs();
1087 282240 : if (!BumpStack(f, n))
1088 0 : THROWV(false);
1089 282240 : Value *argv = JS_ARGV(cx, vp + 1 /* vp[1]'s argv */);
1090 282240 : f.regs.sp += n;
1091 282240 : fp->forEachCanonicalActualArg(CopyTo(argv));
1092 : } else {
1093 : /* Simulate the argument-pushing part of js_fun_apply: */
1094 228 : JSObject *aobj = &fp->argsObj();
1095 :
1096 : /* Steps 4-5 */
1097 : unsigned length;
1098 228 : if (!js_GetLengthProperty(cx, aobj, &length))
1099 0 : THROWV(false);
1100 :
1101 : /* Step 6. */
1102 228 : if (length > StackSpace::ARGS_LENGTH_MAX) {
1103 : JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1104 1 : JSMSG_TOO_MANY_FUN_APPLY_ARGS);
1105 1 : THROWV(false);
1106 : }
1107 :
1108 227 : n = length;
1109 227 : if (!BumpStack(f, n))
1110 0 : THROWV(false);
1111 :
1112 : /* Steps 7-8 */
1113 227 : Value *argv = JS_ARGV(cx, &vp[1]); /* vp[1] is the callee */
1114 227 : f.regs.sp += n; /* GetElements may reenter, so inc early. */
1115 227 : if (!GetElements(cx, aobj, n, argv))
1116 0 : THROWV(false);
1117 : }
1118 :
1119 282467 : f.u.call.dynamicArgc = n;
1120 282467 : return true;
1121 : }
1122 :
1123 75688 : Value *vp = f.regs.sp - 4;
1124 75688 : JS_ASSERT(JS_CALLEE(cx, vp).toObject().toFunction()->u.n.native == js_fun_apply);
1125 :
1126 : /*
1127 : * This stub should mimic the steps taken by js_fun_apply. Step 1 and part
1128 : * of Step 2 have already been taken care of by calling jit code.
1129 : */
1130 :
1131 : /* Step 2 (part 2). */
1132 75688 : if (vp[3].isNullOrUndefined()) {
1133 1 : f.regs.sp--;
1134 1 : f.u.call.dynamicArgc = 0;
1135 1 : return true;
1136 : }
1137 :
1138 : /* Step 3. */
1139 75687 : if (!vp[3].isObject()) {
1140 0 : JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_APPLY_ARGS, js_apply_str);
1141 0 : THROWV(false);
1142 : }
1143 :
1144 : /* Steps 4-5. */
1145 75687 : JSObject *aobj = &vp[3].toObject();
1146 : uint32_t length;
1147 75687 : if (!js_GetLengthProperty(cx, aobj, &length))
1148 0 : THROWV(false);
1149 :
1150 : /* Step 6. */
1151 75687 : if (length > StackSpace::ARGS_LENGTH_MAX) {
1152 : JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1153 4 : JSMSG_TOO_MANY_FUN_APPLY_ARGS);
1154 4 : THROWV(false);
1155 : }
1156 :
1157 75683 : int delta = length - 1;
1158 75683 : if (delta > 0 && !BumpStack(f, delta))
1159 0 : THROWV(false);
1160 75683 : f.regs.sp += delta;
1161 :
1162 : /* Steps 7-8. */
1163 75683 : if (!GetElements(cx, aobj, length, f.regs.sp - length))
1164 0 : THROWV(false);
1165 :
1166 75683 : f.u.call.dynamicArgc = length;
1167 75683 : return true;
1168 : }
1169 :
1170 : void
1171 890 : ic::GenerateArgumentCheckStub(VMFrame &f)
1172 : {
1173 890 : JS_ASSERT(f.cx->typeInferenceEnabled());
1174 :
1175 890 : JITScript *jit = f.jit();
1176 890 : StackFrame *fp = f.fp();
1177 890 : JSFunction *fun = fp->fun();
1178 890 : JSScript *script = fun->script();
1179 :
1180 890 : if (jit->argsCheckPool)
1181 315 : jit->resetArgsCheck();
1182 :
1183 1780 : Assembler masm;
1184 1780 : Vector<Jump> mismatches(f.cx);
1185 :
1186 890 : if (!f.fp()->isConstructing()) {
1187 872 : types::TypeSet *types = types::TypeScript::ThisTypes(script);
1188 872 : Address address(JSFrameReg, StackFrame::offsetOfThis(fun));
1189 872 : if (!masm.generateTypeCheck(f.cx, address, types, &mismatches))
1190 : return;
1191 : }
1192 :
1193 2257 : for (unsigned i = 0; i < fun->nargs; i++) {
1194 1367 : types::TypeSet *types = types::TypeScript::ArgTypes(script, i);
1195 1367 : Address address(JSFrameReg, StackFrame::offsetOfFormalArg(fun, i));
1196 1367 : if (!masm.generateTypeCheck(f.cx, address, types, &mismatches))
1197 : return;
1198 : }
1199 :
1200 890 : Jump done = masm.jump();
1201 :
1202 1780 : LinkerHelper linker(masm, JSC::METHOD_CODE);
1203 890 : JSC::ExecutablePool *ep = linker.init(f.cx);
1204 890 : if (!ep)
1205 : return;
1206 890 : jit->argsCheckPool = ep;
1207 :
1208 890 : if (!linker.verifyRange(f.chunk())) {
1209 0 : jit->resetArgsCheck();
1210 : return;
1211 : }
1212 :
1213 3785 : for (unsigned i = 0; i < mismatches.length(); i++)
1214 2895 : linker.link(mismatches[i], jit->argsCheckStub);
1215 890 : linker.link(done, jit->argsCheckFallthrough);
1216 :
1217 890 : JSC::CodeLocationLabel cs = linker.finalize(f);
1218 :
1219 : JaegerSpew(JSpew_PICs, "generated ARGS CHECK stub %p (%lu bytes)\n",
1220 890 : cs.executableAddress(), (unsigned long)masm.size());
1221 :
1222 2670 : Repatcher repatch(f.chunk());
1223 890 : repatch.relink(jit->argsCheckJump, cs);
1224 : }
1225 :
1226 : void
1227 315 : JITScript::resetArgsCheck()
1228 : {
1229 315 : argsCheckPool->release();
1230 315 : argsCheckPool = NULL;
1231 :
1232 630 : Repatcher repatch(chunk(script->code));
1233 315 : repatch.relink(argsCheckJump, argsCheckStub);
1234 315 : }
1235 :
1236 : #endif /* JS_MONOIC */
1237 :
|