1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : * David Anderson <danderson@mozilla.com>
25 : * David Mandelin <dmandelin@mozilla.com>
26 : * Jan de Mooij <jandemooij@gmail.com>
27 : *
28 : * Alternatively, the contents of this file may be used under the terms of
29 : * either of the GNU General Public License Version 2 or later (the "GPL"),
30 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 : * in which case the provisions of the GPL or the LGPL are applicable instead
32 : * of those above. If you wish to allow use of your version of this file only
33 : * under the terms of either the GPL or the LGPL, and not to allow others to
34 : * use your version of this file under the terms of the MPL, indicate your
35 : * decision by deleting the provisions above and replace them with the notice
36 : * and other provisions required by the GPL or the LGPL. If you do not delete
37 : * the provisions above, a recipient may use your version of this file under
38 : * the terms of any one of the MPL, the GPL or the LGPL.
39 : *
40 : * ***** END LICENSE BLOCK ***** */
41 :
42 : #include "MethodJIT.h"
43 : #include "jsnum.h"
44 : #include "jsbool.h"
45 : #include "jsiter.h"
46 : #include "Compiler.h"
47 : #include "StubCalls.h"
48 : #include "MonoIC.h"
49 : #include "PolyIC.h"
50 : #include "ICChecker.h"
51 : #include "Retcon.h"
52 : #include "assembler/jit/ExecutableAllocator.h"
53 : #include "assembler/assembler/LinkBuffer.h"
54 : #include "FrameState-inl.h"
55 : #include "jsobjinlines.h"
56 : #include "jsscriptinlines.h"
57 : #include "InlineFrameAssembler.h"
58 : #include "jscompartment.h"
59 : #include "jsopcodeinlines.h"
60 :
61 : #include "builtin/RegExp.h"
62 : #include "frontend/BytecodeEmitter.h"
63 : #include "vm/RegExpStatics.h"
64 : #include "vm/RegExpObject.h"
65 :
66 : #include "jsautooplen.h"
67 : #include "jstypedarrayinlines.h"
68 : #include "vm/RegExpObject-inl.h"
69 :
70 : using namespace js;
71 : using namespace js::mjit;
72 : #if defined(JS_POLYIC) || defined(JS_MONOIC)
73 : using namespace js::mjit::ic;
74 : #endif
75 : using namespace js::analyze;
76 :
77 : #define RETURN_IF_OOM(retval) \
78 : JS_BEGIN_MACRO \
79 : if (oomInVector || masm.oom() || stubcc.masm.oom()) \
80 : return retval; \
81 : JS_END_MACRO
82 :
83 : /*
84 : * Number of times a script must be called or had a backedge before we try to
85 : * inline its calls.
86 : */
87 : static const size_t USES_BEFORE_INLINING = 10000;
88 :
89 134091 : mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript,
90 : unsigned chunkIndex, bool isConstructing)
91 : : BaseCompiler(cx),
92 : outerScript(outerScript),
93 : chunkIndex(chunkIndex),
94 : isConstructing(isConstructing),
95 134091 : outerChunk(outerJIT()->chunkDescriptor(chunkIndex)),
96 : ssa(cx, outerScript),
97 134091 : globalObj(outerScript->hasGlobal() ? outerScript->global() : NULL),
98 91801 : globalSlots(globalObj ? globalObj->getRawSlots() : NULL),
99 134091 : frame(cx, *thisFromCtor(), masm, stubcc),
100 : a(NULL), outer(NULL), script(NULL), PC(NULL), loop(NULL),
101 134091 : inlineFrames(CompilerAllocPolicy(cx, *thisFromCtor())),
102 134091 : branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
103 : #if defined JS_MONOIC
104 134091 : getGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
105 134091 : setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
106 134091 : callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
107 134091 : equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
108 : #endif
109 : #if defined JS_POLYIC
110 134091 : pics(CompilerAllocPolicy(cx, *thisFromCtor())),
111 134091 : getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
112 134091 : setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
113 : #endif
114 134091 : callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
115 134091 : callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
116 134091 : doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
117 134091 : fixedIntToDoubleEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
118 134091 : fixedDoubleToAnyEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
119 134091 : jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
120 134091 : jumpTableEdges(CompilerAllocPolicy(cx, *thisFromCtor())),
121 134091 : loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
122 134091 : chunkEdges(CompilerAllocPolicy(cx, *thisFromCtor())),
123 134091 : stubcc(cx, *thisFromCtor(), frame),
124 134091 : debugMode_(cx->compartment->debugMode()),
125 : inlining_(false),
126 : hasGlobalReallocation(false),
127 : oomInVector(false),
128 : overflowICSpace(false),
129 : gcNumber(cx->runtime->gcNumber),
130 : applyTricks(NoApplyTricks),
131 3309985 : pcLengths(NULL)
132 : {
133 : /* Once a script starts getting really hot we will inline calls in it. */
134 186497 : if (!debugMode() && cx->typeInferenceEnabled() && globalObj &&
135 26302 : (outerScript->getUseCount() >= USES_BEFORE_INLINING ||
136 26104 : cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS))) {
137 24965 : inlining_ = true;
138 : }
139 134091 : }
140 :
141 : CompileStatus
142 134091 : mjit::Compiler::compile()
143 : {
144 134091 : JS_ASSERT(!outerChunkRef().chunk);
145 :
146 : void **checkAddr = isConstructing
147 : ? &outerScript->jitArityCheckCtor
148 134091 : : &outerScript->jitArityCheckNormal;
149 :
150 134091 : CompileStatus status = performCompilation();
151 134091 : if (status != Compile_Okay && status != Compile_Retry) {
152 5210 : *checkAddr = JS_UNJITTABLE_SCRIPT;
153 5210 : if (outerScript->function()) {
154 4565 : outerScript->uninlineable = true;
155 4565 : types::MarkTypeObjectFlags(cx, outerScript->function(),
156 4565 : types::OBJECT_FLAG_UNINLINEABLE);
157 : }
158 : }
159 :
160 134091 : return status;
161 : }
162 :
163 : CompileStatus
164 141758 : mjit::Compiler::checkAnalysis(JSScript *script)
165 : {
166 141758 : if (script->hasClearedGlobal()) {
167 0 : JaegerSpew(JSpew_Abort, "script has a cleared global\n");
168 0 : return Compile_Abort;
169 : }
170 :
171 141758 : if (!script->ensureRanAnalysis(cx, NULL))
172 0 : return Compile_Error;
173 :
174 141758 : if (!script->analysis()->compileable()) {
175 5210 : JaegerSpew(JSpew_Abort, "script has uncompileable opcodes\n");
176 5210 : return Compile_Abort;
177 : }
178 :
179 136548 : if (cx->typeInferenceEnabled() && !script->ensureRanInference(cx))
180 0 : return Compile_Error;
181 :
182 136548 : ScriptAnalysis *analysis = script->analysis();
183 136548 : analysis->assertMatchingDebugMode();
184 136548 : if (analysis->failed()) {
185 0 : JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
186 0 : return Compile_Abort;
187 : }
188 :
189 136548 : return Compile_Okay;
190 : }
191 :
192 : CompileStatus
193 3005 : mjit::Compiler::addInlineFrame(JSScript *script, uint32_t depth,
194 : uint32_t parent, jsbytecode *parentpc)
195 : {
196 3005 : JS_ASSERT(inlining());
197 :
198 3005 : CompileStatus status = checkAnalysis(script);
199 3005 : if (status != Compile_Okay)
200 0 : return status;
201 :
202 3005 : if (!ssa.addInlineFrame(script, depth, parent, parentpc))
203 0 : return Compile_Error;
204 :
205 3005 : uint32_t index = ssa.iterFrame(ssa.numFrames() - 1).index;
206 3005 : return scanInlineCalls(index, depth);
207 : }
208 :
209 : CompileStatus
210 27564 : mjit::Compiler::scanInlineCalls(uint32_t index, uint32_t depth)
211 : {
212 : /* Maximum number of calls we will inline at the same site. */
213 : static const uint32_t INLINE_SITE_LIMIT = 5;
214 :
215 27564 : JS_ASSERT(inlining() && globalObj);
216 :
217 : /* Not inlining yet from 'new' scripts. */
218 27564 : if (isConstructing)
219 293 : return Compile_Okay;
220 :
221 27271 : JSScript *script = ssa.getFrame(index).script;
222 27271 : ScriptAnalysis *analysis = script->analysis();
223 :
224 : /* Don't inline from functions which could have a non-global scope object. */
225 142205 : if (!script->hasGlobal() ||
226 27270 : script->global() != globalObj ||
227 43832 : (script->function() && script->function()->getParent() != globalObj) ||
228 43832 : (script->function() && script->function()->isHeavyweight()) ||
229 : script->isActiveEval) {
230 4320 : return Compile_Okay;
231 : }
232 :
233 22951 : uint32_t nextOffset = 0;
234 22951 : uint32_t lastOffset = script->length;
235 :
236 22951 : if (index == CrossScriptSSA::OUTER_FRAME) {
237 19947 : nextOffset = outerChunk.begin;
238 19947 : lastOffset = outerChunk.end;
239 : }
240 :
241 1012148 : while (nextOffset < lastOffset) {
242 966263 : uint32_t offset = nextOffset;
243 966263 : jsbytecode *pc = script->code + offset;
244 966263 : nextOffset = offset + GetBytecodeLength(pc);
245 :
246 966263 : Bytecode *code = analysis->maybeCode(pc);
247 966263 : if (!code)
248 7693 : continue;
249 :
250 : /* :XXX: Not yet inlining 'new' calls. */
251 958570 : if (JSOp(*pc) != JSOP_CALL)
252 907880 : continue;
253 :
254 : /* Not inlining at monitored call sites or those with type barriers. */
255 50690 : if (code->monitoredTypes || code->monitoredTypesReturn || analysis->typeBarriers(cx, pc) != NULL)
256 21530 : continue;
257 :
258 29160 : uint32_t argc = GET_ARGC(pc);
259 29160 : types::TypeSet *calleeTypes = analysis->poppedTypes(pc, argc + 1);
260 :
261 29160 : if (calleeTypes->getKnownTypeTag(cx) != JSVAL_TYPE_OBJECT)
262 23063 : continue;
263 :
264 6097 : if (calleeTypes->getObjectCount() >= INLINE_SITE_LIMIT)
265 18 : continue;
266 :
267 : /*
268 : * Compute the maximum height we can grow the stack for inlined frames.
269 : * We always reserve space for loop temporaries, for an extra stack
270 : * frame pushed when making a call from the deepest inlined frame, and
271 : * for the temporary slot used by type barriers.
272 : */
273 : uint32_t stackLimit = outerScript->nslots + StackSpace::STACK_JIT_EXTRA
274 6079 : - VALUES_PER_STACK_FRAME - FrameState::TEMPORARY_LIMIT - 1;
275 :
276 : /* Compute the depth of any frames inlined at this site. */
277 6079 : uint32_t nextDepth = depth + VALUES_PER_STACK_FRAME + script->nfixed + code->stackDepth;
278 :
279 : /*
280 : * Scan each of the possible callees for other conditions precluding
281 : * inlining. We only inline at a call site if all callees are inlineable.
282 : */
283 6079 : unsigned count = calleeTypes->getObjectCount();
284 6079 : bool okay = true;
285 9106 : for (unsigned i = 0; i < count; i++) {
286 6269 : if (calleeTypes->getTypeObject(i) != NULL) {
287 739 : okay = false;
288 739 : break;
289 : }
290 :
291 5530 : JSObject *obj = calleeTypes->getSingleObject(i);
292 5530 : if (!obj)
293 0 : continue;
294 :
295 5530 : if (!obj->isFunction()) {
296 3 : okay = false;
297 3 : break;
298 : }
299 :
300 5527 : JSFunction *fun = obj->toFunction();
301 5527 : if (!fun->isInterpreted()) {
302 0 : okay = false;
303 0 : break;
304 : }
305 5527 : JSScript *script = fun->script();
306 :
307 : /*
308 : * Don't inline calls to scripts which haven't been analyzed.
309 : * We need to analyze the inlined scripts to compile them, and
310 : * doing so can change type information we have queried already
311 : * in making inlining decisions.
312 : */
313 5527 : if (!script->hasAnalysis() || !script->analysis()->ranInference()) {
314 318 : okay = false;
315 318 : break;
316 : }
317 :
318 : /*
319 : * The outer and inner scripts must have the same scope. This only
320 : * allows us to inline calls between non-inner functions. Also
321 : * check for consistent strictness between the functions.
322 : */
323 10418 : if (!globalObj ||
324 5209 : fun->getParent() != globalObj ||
325 : outerScript->strictModeCode != script->strictModeCode) {
326 79 : okay = false;
327 79 : break;
328 : }
329 :
330 : /* We can't cope with inlining recursive functions yet. */
331 5130 : uint32_t nindex = index;
332 18113 : while (nindex != CrossScriptSSA::INVALID_FRAME) {
333 7853 : if (ssa.getFrame(nindex).script == script)
334 468 : okay = false;
335 7853 : nindex = ssa.getFrame(nindex).parent;
336 : }
337 5130 : if (!okay)
338 468 : break;
339 :
340 : /* Watch for excessively deep nesting of inlined frames. */
341 4662 : if (nextDepth + script->nslots >= stackLimit) {
342 0 : okay = false;
343 0 : break;
344 : }
345 :
346 4662 : if (!script->types || !script->types->hasScope()) {
347 0 : okay = false;
348 0 : break;
349 : }
350 :
351 4662 : CompileStatus status = checkAnalysis(script);
352 4662 : if (status != Compile_Okay)
353 17 : return status;
354 :
355 4645 : if (!script->analysis()->inlineable(argc)) {
356 1461 : okay = false;
357 1461 : break;
358 : }
359 :
360 3184 : if (types::TypeSet::HasObjectFlags(cx, fun->getType(cx),
361 3184 : types::OBJECT_FLAG_UNINLINEABLE)) {
362 151 : okay = false;
363 151 : break;
364 : }
365 :
366 : /*
367 : * Don't inline scripts which use 'this' if it is possible they
368 : * could be called with a 'this' value requiring wrapping. During
369 : * inlining we do not want to modify frame entries belonging to the
370 : * caller.
371 : */
372 4133 : if (script->analysis()->usesThisValue() &&
373 1100 : types::TypeScript::ThisTypes(script)->getKnownTypeTag(cx) != JSVAL_TYPE_OBJECT) {
374 6 : okay = false;
375 6 : break;
376 : }
377 : }
378 6062 : if (!okay)
379 3225 : continue;
380 :
381 2837 : calleeTypes->addFreeze(cx);
382 :
383 : /*
384 : * Add the inline frames to the cross script SSA. We will pick these
385 : * back up when compiling the call site.
386 : */
387 5842 : for (unsigned i = 0; i < count; i++) {
388 3005 : JSObject *obj = calleeTypes->getSingleObject(i);
389 3005 : if (!obj)
390 0 : continue;
391 :
392 3005 : JSFunction *fun = obj->toFunction();
393 3005 : JSScript *script = fun->script();
394 :
395 3005 : CompileStatus status = addInlineFrame(script, nextDepth, index, pc);
396 3005 : if (status != Compile_Okay)
397 0 : return status;
398 : }
399 : }
400 :
401 22934 : return Compile_Okay;
402 : }
403 :
404 : CompileStatus
405 131876 : mjit::Compiler::pushActiveFrame(JSScript *script, uint32_t argc)
406 : {
407 131876 : if (cx->runtime->profilingScripts && !script->pcCounters)
408 0 : script->initCounts(cx);
409 :
410 131876 : ActiveFrame *newa = OffTheBooks::new_<ActiveFrame>(cx);
411 131876 : if (!newa) {
412 0 : js_ReportOutOfMemory(cx);
413 0 : return Compile_Error;
414 : }
415 :
416 131876 : newa->parent = a;
417 131876 : if (a)
418 2995 : newa->parentPC = PC;
419 131876 : newa->script = script;
420 131876 : newa->mainCodeStart = masm.size();
421 131876 : newa->stubCodeStart = stubcc.size();
422 :
423 131876 : if (outer) {
424 2995 : newa->inlineIndex = uint32_t(inlineFrames.length());
425 2995 : inlineFrames.append(newa);
426 : } else {
427 128881 : newa->inlineIndex = CrossScriptSSA::OUTER_FRAME;
428 128881 : outer = newa;
429 : }
430 131876 : JS_ASSERT(ssa.getFrame(newa->inlineIndex).script == script);
431 :
432 131876 : newa->inlinePCOffset = ssa.frameLength(newa->inlineIndex);
433 :
434 131876 : ScriptAnalysis *newAnalysis = script->analysis();
435 :
436 : #ifdef JS_METHODJIT_SPEW
437 131876 : if (cx->typeInferenceEnabled() && IsJaegerSpewChannelActive(JSpew_Regalloc)) {
438 0 : unsigned nargs = script->function() ? script->function()->nargs : 0;
439 0 : for (unsigned i = 0; i < nargs; i++) {
440 0 : uint32_t slot = ArgSlot(i);
441 0 : if (!newAnalysis->slotEscapes(slot)) {
442 0 : JaegerSpew(JSpew_Regalloc, "Argument %u:", i);
443 0 : newAnalysis->liveness(slot).print();
444 : }
445 : }
446 0 : for (unsigned i = 0; i < script->nfixed; i++) {
447 0 : uint32_t slot = LocalSlot(script, i);
448 0 : if (!newAnalysis->slotEscapes(slot)) {
449 0 : JaegerSpew(JSpew_Regalloc, "Local %u:", i);
450 0 : newAnalysis->liveness(slot).print();
451 : }
452 : }
453 : }
454 : #endif
455 :
456 131876 : if (!frame.pushActiveFrame(script, argc)) {
457 0 : js_ReportOutOfMemory(cx);
458 0 : return Compile_Error;
459 : }
460 :
461 131876 : newa->jumpMap = (Label *)OffTheBooks::malloc_(sizeof(Label) * script->length);
462 131876 : if (!newa->jumpMap) {
463 0 : js_ReportOutOfMemory(cx);
464 0 : return Compile_Error;
465 : }
466 : #ifdef DEBUG
467 33567394 : for (uint32_t i = 0; i < script->length; i++)
468 33435518 : newa->jumpMap[i] = Label();
469 : #endif
470 :
471 131876 : if (cx->typeInferenceEnabled()) {
472 59845 : CompileStatus status = prepareInferenceTypes(script, newa);
473 59845 : if (status != Compile_Okay)
474 0 : return status;
475 : }
476 :
477 131876 : this->script = script;
478 131876 : this->analysis = newAnalysis;
479 131876 : this->PC = script->code;
480 131876 : this->a = newa;
481 :
482 131876 : return Compile_Okay;
483 : }
484 :
485 : void
486 2995 : mjit::Compiler::popActiveFrame()
487 : {
488 2995 : JS_ASSERT(a->parent);
489 2995 : a->mainCodeEnd = masm.size();
490 2995 : a->stubCodeEnd = stubcc.size();
491 2995 : this->PC = a->parentPC;
492 2995 : this->a = (ActiveFrame *) a->parent;
493 2995 : this->script = a->script;
494 2995 : this->analysis = this->script->analysis();
495 :
496 2995 : frame.popActiveFrame();
497 2995 : }
498 :
499 : #define CHECK_STATUS(expr) \
500 : JS_BEGIN_MACRO \
501 : CompileStatus status_ = (expr); \
502 : if (status_ != Compile_Okay) { \
503 : if (oomInVector || masm.oom() || stubcc.masm.oom()) \
504 : js_ReportOutOfMemory(cx); \
505 : return status_; \
506 : } \
507 : JS_END_MACRO
508 :
509 : CompileStatus
510 134091 : mjit::Compiler::performCompilation()
511 : {
512 : JaegerSpew(JSpew_Scripts,
513 : "compiling script (file \"%s\") (line \"%d\") (length \"%d\") (chunk \"%d\")\n",
514 134091 : outerScript->filename, outerScript->lineno, outerScript->length, chunkIndex);
515 :
516 134091 : if (inlining()) {
517 : JaegerSpew(JSpew_Inlining,
518 : "inlining calls in script (file \"%s\") (line \"%d\")\n",
519 24965 : outerScript->filename, outerScript->lineno);
520 : }
521 :
522 : #ifdef JS_METHODJIT_SPEW
523 : Profiler prof;
524 134091 : prof.start();
525 : #endif
526 :
527 : #ifdef JS_METHODJIT
528 134091 : outerScript->debugMode = debugMode();
529 : #endif
530 :
531 134091 : JS_ASSERT(cx->compartment->activeInference);
532 :
533 : {
534 268182 : types::AutoEnterCompilation enter(cx, outerScript, isConstructing, chunkIndex);
535 :
536 134091 : CHECK_STATUS(checkAnalysis(outerScript));
537 128898 : if (inlining())
538 24559 : CHECK_STATUS(scanInlineCalls(CrossScriptSSA::OUTER_FRAME, 0));
539 128881 : CHECK_STATUS(pushActiveFrame(outerScript, 0));
540 :
541 128881 : if (outerScript->pcCounters || Probes::wantNativeAddressInfo(cx)) {
542 0 : size_t length = ssa.frameLength(ssa.numFrames() - 1);
543 0 : pcLengths = (PCLengthEntry *) OffTheBooks::calloc_(sizeof(pcLengths[0]) * length);
544 0 : if (!pcLengths)
545 0 : return Compile_Error;
546 : }
547 :
548 128881 : if (chunkIndex == 0)
549 127714 : CHECK_STATUS(generatePrologue());
550 128881 : CHECK_STATUS(generateMethod());
551 128847 : if (outerJIT() && chunkIndex == outerJIT()->nchunks - 1)
552 127567 : CHECK_STATUS(generateEpilogue());
553 128847 : CHECK_STATUS(finishThisUp());
554 : }
555 :
556 : #ifdef JS_METHODJIT_SPEW
557 128827 : prof.stop();
558 128827 : JaegerSpew(JSpew_Prof, "compilation took %d us\n", prof.time_us());
559 : #endif
560 :
561 : JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%u\")\n",
562 128827 : outerChunkRef().chunk->code.m_code.executableAddress(),
563 257654 : unsigned(outerChunkRef().chunk->code.m_size));
564 :
565 128827 : return Compile_Okay;
566 : }
567 :
568 : #undef CHECK_STATUS
569 :
570 131876 : mjit::JSActiveFrame::JSActiveFrame()
571 131876 : : parent(NULL), parentPC(NULL), script(NULL), inlineIndex(UINT32_MAX)
572 : {
573 131876 : }
574 :
575 131876 : mjit::Compiler::ActiveFrame::ActiveFrame(JSContext *cx)
576 : : jumpMap(NULL),
577 : varTypes(NULL), needReturnValue(false),
578 : syncReturnValue(false), returnValueDouble(false), returnSet(false),
579 131876 : returnEntry(NULL), returnJumps(NULL), exitState(NULL)
580 131876 : {}
581 :
582 131876 : mjit::Compiler::ActiveFrame::~ActiveFrame()
583 : {
584 131876 : js::Foreground::free_(jumpMap);
585 131876 : if (varTypes)
586 59845 : js::Foreground::free_(varTypes);
587 131876 : }
588 :
589 268182 : mjit::Compiler::~Compiler()
590 : {
591 134091 : if (outer)
592 128881 : cx->delete_(outer);
593 137086 : for (unsigned i = 0; i < inlineFrames.length(); i++)
594 2995 : cx->delete_(inlineFrames[i]);
595 268194 : while (loop) {
596 12 : LoopState *nloop = loop->outer;
597 12 : cx->delete_(loop);
598 12 : loop = nloop;
599 : }
600 134091 : }
601 :
602 : CompileStatus
603 59845 : mjit::Compiler::prepareInferenceTypes(JSScript *script, ActiveFrame *a)
604 : {
605 : /*
606 : * During our walk of the script, we need to preserve the invariant that at
607 : * join points the in memory type tag is always in sync with the known type
608 : * tag of the variable's SSA value at that join point. In particular, SSA
609 : * values inferred as (int|double) must in fact be doubles, stored either
610 : * in floating point registers or in memory. There is an exception for
611 : * locals whose value is currently dead, whose type might not be synced.
612 : *
613 : * To ensure this, we need to know the SSA values for each variable at each
614 : * join point, which the SSA analysis does not store explicitly. These can
615 : * be recovered, though. During the forward walk, the SSA value of a var
616 : * (and its associated type set) change only when we see an explicit assign
617 : * to the var or get to a join point with a phi node for that var. So we
618 : * can duplicate the effects of that walk here by watching for writes to
619 : * vars (updateVarTypes) and new phi nodes at join points.
620 : *
621 : * When we get to a branch and need to know a variable's value at the
622 : * branch target, we know it will either be a phi node at the target or
623 : * the variable's current value, as no phi node is created at the target
624 : * only if a variable has the same value on all incoming edges.
625 : */
626 :
627 : a->varTypes = (VarType *)
628 59845 : OffTheBooks::calloc_(TotalSlots(script) * sizeof(VarType));
629 59845 : if (!a->varTypes) {
630 0 : js_ReportOutOfMemory(cx);
631 0 : return Compile_Error;
632 : }
633 :
634 230312 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
635 170467 : VarType &vt = a->varTypes[slot];
636 170467 : vt.setTypes(types::TypeScript::SlotTypes(script, slot));
637 : }
638 :
639 59845 : return Compile_Okay;
640 : }
641 :
642 : /*
643 : * Number of times a script must be called or have back edges taken before we
644 : * run it in the methodjit. We wait longer if type inference is enabled, to
645 : * allow more gathering of type information and less recompilation.
646 : */
647 : static const size_t USES_BEFORE_COMPILE = 16;
648 : static const size_t INFER_USES_BEFORE_COMPILE = 40;
649 :
650 : /* Target maximum size, in bytecode length, for a compiled chunk of a script. */
651 : static uint32_t CHUNK_LIMIT = 1500;
652 :
653 : void
654 27 : mjit::SetChunkLimit(uint32_t limit)
655 : {
656 27 : if (limit)
657 27 : CHUNK_LIMIT = limit;
658 27 : }
659 :
660 : JITScript *
661 113898 : MakeJITScript(JSContext *cx, JSScript *script, bool construct)
662 : {
663 113898 : if (!script->ensureRanAnalysis(cx, NULL))
664 0 : return NULL;
665 :
666 113898 : ScriptAnalysis *analysis = script->analysis();
667 :
668 113898 : JITScript *&location = construct ? script->jitCtor : script->jitNormal;
669 :
670 227796 : Vector<ChunkDescriptor> chunks(cx);
671 227796 : Vector<CrossChunkEdge> edges(cx);
672 :
673 113898 : if (script->length < CHUNK_LIMIT || !cx->typeInferenceEnabled()) {
674 113664 : ChunkDescriptor desc;
675 113664 : desc.begin = 0;
676 113664 : desc.end = script->length;
677 113664 : if (!chunks.append(desc))
678 0 : return NULL;
679 : } else {
680 234 : if (!script->ensureRanInference(cx))
681 0 : return NULL;
682 :
683 : /* Outgoing edges within the current chunk. */
684 468 : Vector<CrossChunkEdge> currentEdges(cx);
685 234 : uint32_t chunkStart = 0;
686 :
687 234 : unsigned offset, nextOffset = 0;
688 1225536 : while (nextOffset < script->length) {
689 1225068 : offset = nextOffset;
690 :
691 1225068 : jsbytecode *pc = script->code + offset;
692 1225068 : JSOp op = JSOp(*pc);
693 :
694 1225068 : nextOffset = offset + GetBytecodeLength(pc);
695 :
696 1225068 : Bytecode *code = analysis->maybeCode(offset);
697 1225068 : if (!code)
698 14352 : continue;
699 :
700 : /* Whether this should be the last opcode in the chunk. */
701 1210716 : bool finishChunk = false;
702 :
703 : /* Keep going, override finishChunk. */
704 1210716 : bool preserveChunk = false;
705 :
706 : /*
707 : * Add an edge for opcodes which perform a branch. Skip LABEL ops,
708 : * which do not actually branch. XXX LABEL should not be JOF_JUMP.
709 : */
710 1210716 : uint32_t type = JOF_TYPE(js_CodeSpec[op].format);
711 1210716 : if (type == JOF_JUMP && op != JSOP_LABEL) {
712 30006 : CrossChunkEdge edge;
713 30006 : edge.source = offset;
714 30006 : edge.target = FollowBranch(cx, script, pc - script->code);
715 30006 : if (edge.target < offset) {
716 : /* Always end chunks after loop back edges. */
717 545 : finishChunk = true;
718 545 : if (edge.target < chunkStart) {
719 69 : analysis->getCode(edge.target).safePoint = true;
720 69 : if (!edges.append(edge))
721 0 : return NULL;
722 : }
723 29461 : } else if (edge.target == nextOffset) {
724 : /*
725 : * Override finishChunk for bytecodes which directly
726 : * jump to their fallthrough opcode ('if (x) {}'). This
727 : * creates two CFG edges with the same source/target, which
728 : * will confuse the compiler's edge patching code.
729 : */
730 12 : preserveChunk = true;
731 : } else {
732 29449 : if (!currentEdges.append(edge))
733 0 : return NULL;
734 : }
735 : }
736 :
737 1210716 : if (op == JSOP_TABLESWITCH) {
738 8 : jsbytecode *pc2 = pc;
739 8 : unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
740 8 : pc2 += JUMP_OFFSET_LEN;
741 8 : int32_t low = GET_JUMP_OFFSET(pc2);
742 8 : pc2 += JUMP_OFFSET_LEN;
743 8 : int32_t high = GET_JUMP_OFFSET(pc2);
744 8 : pc2 += JUMP_OFFSET_LEN;
745 :
746 8 : CrossChunkEdge edge;
747 8 : edge.source = offset;
748 8 : edge.target = defaultOffset;
749 8 : if (!currentEdges.append(edge))
750 0 : return NULL;
751 :
752 28 : for (int32_t i = low; i <= high; i++) {
753 20 : unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
754 20 : if (targetOffset != offset) {
755 : /*
756 : * This can end up inserting duplicate edges, all but
757 : * the first of which will be ignored.
758 : */
759 20 : CrossChunkEdge edge;
760 20 : edge.source = offset;
761 20 : edge.target = targetOffset;
762 20 : if (!currentEdges.append(edge))
763 0 : return NULL;
764 : }
765 20 : pc2 += JUMP_OFFSET_LEN;
766 : }
767 : }
768 :
769 1210716 : if (op == JSOP_LOOKUPSWITCH) {
770 4 : unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
771 4 : jsbytecode *pc2 = pc + JUMP_OFFSET_LEN;
772 4 : unsigned npairs = GET_UINT16(pc2);
773 4 : pc2 += UINT16_LEN;
774 :
775 4 : CrossChunkEdge edge;
776 4 : edge.source = offset;
777 4 : edge.target = defaultOffset;
778 4 : if (!currentEdges.append(edge))
779 0 : return NULL;
780 :
781 18 : while (npairs) {
782 10 : pc2 += UINT32_INDEX_LEN;
783 10 : unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
784 10 : CrossChunkEdge edge;
785 10 : edge.source = offset;
786 10 : edge.target = targetOffset;
787 10 : if (!currentEdges.append(edge))
788 0 : return NULL;
789 10 : pc2 += JUMP_OFFSET_LEN;
790 10 : npairs--;
791 : }
792 : }
793 :
794 1210716 : if (unsigned(offset - chunkStart) > CHUNK_LIMIT)
795 2376 : finishChunk = true;
796 :
797 1210716 : if (nextOffset >= script->length || !analysis->maybeCode(nextOffset)) {
798 : /* Ensure that chunks do not start on unreachable opcodes. */
799 14554 : preserveChunk = true;
800 : } else {
801 : /*
802 : * Start new chunks at the opcode before each loop head.
803 : * This ensures that the initial goto for loops is included in
804 : * the same chunk as the loop itself.
805 : */
806 1196162 : jsbytecode *nextpc = script->code + nextOffset;
807 :
808 : /*
809 : * Don't insert a chunk boundary in the middle of two opcodes
810 : * which may be fused together.
811 : */
812 1196162 : switch (JSOp(*nextpc)) {
813 : case JSOP_POP:
814 : case JSOP_IFNE:
815 : case JSOP_IFEQ:
816 116079 : preserveChunk = true;
817 116079 : break;
818 : default:
819 1080083 : break;
820 : }
821 :
822 1196162 : uint32_t afterOffset = nextOffset + GetBytecodeLength(nextpc);
823 1196162 : if (afterOffset < script->length) {
824 2378107 : if (analysis->maybeCode(afterOffset) &&
825 1181608 : JSOp(script->code[afterOffset]) == JSOP_LOOPHEAD &&
826 543 : analysis->getLoop(afterOffset))
827 : {
828 541 : finishChunk = true;
829 : }
830 : }
831 : }
832 :
833 1210716 : if (finishChunk && !preserveChunk) {
834 3200 : ChunkDescriptor desc;
835 3200 : desc.begin = chunkStart;
836 3200 : desc.end = nextOffset;
837 3200 : if (!chunks.append(desc))
838 0 : return NULL;
839 :
840 : /* Add an edge for fallthrough from this chunk to the next one. */
841 3200 : if (!BytecodeNoFallThrough(op)) {
842 3142 : CrossChunkEdge edge;
843 3142 : edge.source = offset;
844 3142 : edge.target = nextOffset;
845 3142 : analysis->getCode(edge.target).safePoint = true;
846 3142 : if (!edges.append(edge))
847 0 : return NULL;
848 : }
849 :
850 3200 : chunkStart = nextOffset;
851 32655 : for (unsigned i = 0; i < currentEdges.length(); i++) {
852 29455 : const CrossChunkEdge &edge = currentEdges[i];
853 29455 : if (edge.target >= nextOffset) {
854 301 : analysis->getCode(edge.target).safePoint = true;
855 301 : if (!edges.append(edge))
856 0 : return NULL;
857 : }
858 : }
859 3200 : currentEdges.clear();
860 : }
861 : }
862 :
863 234 : if (chunkStart != script->length) {
864 234 : ChunkDescriptor desc;
865 234 : desc.begin = chunkStart;
866 234 : desc.end = script->length;
867 234 : if (!chunks.append(desc))
868 0 : return NULL;
869 : }
870 : }
871 :
872 : size_t dataSize = sizeof(JITScript)
873 113898 : + (chunks.length() * sizeof(ChunkDescriptor))
874 113898 : + (edges.length() * sizeof(CrossChunkEdge));
875 113898 : uint8_t *cursor = (uint8_t *) OffTheBooks::calloc_(dataSize);
876 113898 : if (!cursor)
877 0 : return NULL;
878 :
879 113898 : JITScript *jit = (JITScript *) cursor;
880 113898 : cursor += sizeof(JITScript);
881 :
882 113898 : jit->script = script;
883 113898 : JS_INIT_CLIST(&jit->callers);
884 :
885 113898 : jit->nchunks = chunks.length();
886 230996 : for (unsigned i = 0; i < chunks.length(); i++) {
887 117098 : const ChunkDescriptor &a = chunks[i];
888 117098 : ChunkDescriptor &b = jit->chunkDescriptor(i);
889 117098 : b.begin = a.begin;
890 117098 : b.end = a.end;
891 :
892 117098 : if (chunks.length() == 1) {
893 : /* Seed the chunk's count so it is immediately compiled. */
894 113664 : b.counter = INFER_USES_BEFORE_COMPILE;
895 : }
896 : }
897 :
898 113898 : if (edges.empty()) {
899 113664 : location = jit;
900 113664 : return jit;
901 : }
902 :
903 234 : jit->nedges = edges.length();
904 234 : CrossChunkEdge *jitEdges = jit->edges();
905 3746 : for (unsigned i = 0; i < edges.length(); i++) {
906 3512 : const CrossChunkEdge &a = edges[i];
907 3512 : CrossChunkEdge &b = jitEdges[i];
908 3512 : b.source = a.source;
909 3512 : b.target = a.target;
910 : }
911 :
912 : /* Generate a pool with all cross chunk shims, and set shimLabel for each edge. */
913 468 : Assembler masm;
914 3746 : for (unsigned i = 0; i < jit->nedges; i++) {
915 3512 : jsbytecode *pc = script->code + jitEdges[i].target;
916 3512 : jitEdges[i].shimLabel = (void *) masm.distanceOf(masm.label());
917 3512 : masm.move(JSC::MacroAssembler::ImmPtr(&jitEdges[i]), Registers::ArgReg1);
918 : masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::CrossChunkShim),
919 3512 : pc, NULL, script->nfixed + analysis->getCode(pc).stackDepth);
920 : }
921 468 : LinkerHelper linker(masm, JSC::METHOD_CODE);
922 234 : JSC::ExecutablePool *ep = linker.init(cx);
923 234 : if (!ep)
924 0 : return NULL;
925 234 : jit->shimPool = ep;
926 :
927 234 : masm.finalize(linker);
928 234 : uint8_t *shimCode = (uint8_t *) linker.finalizeCodeAddendum().executableAddress();
929 :
930 234 : JS_ALWAYS_TRUE(linker.verifyRange(JSC::JITCode(shimCode, masm.size())));
931 :
932 : JaegerSpew(JSpew_PICs, "generated SHIM POOL stub %p (%lu bytes)\n",
933 234 : shimCode, (unsigned long)masm.size());
934 :
935 3746 : for (unsigned i = 0; i < jit->nedges; i++) {
936 3512 : CrossChunkEdge &edge = jitEdges[i];
937 3512 : edge.shimLabel = shimCode + (size_t) edge.shimLabel;
938 : }
939 :
940 234 : location = jit;
941 234 : return jit;
942 : }
943 :
944 : CompileStatus
945 29996556 : mjit::CanMethodJIT(JSContext *cx, JSScript *script, jsbytecode *pc,
946 : bool construct, CompileRequest request)
947 : {
948 : restart:
949 29996556 : if (!cx->methodJitEnabled)
950 10263102 : return Compile_Abort;
951 :
952 19733454 : void *addr = construct ? script->jitArityCheckCtor : script->jitArityCheckNormal;
953 19733454 : if (addr == JS_UNJITTABLE_SCRIPT)
954 1512305 : return Compile_Abort;
955 :
956 18221149 : JITScript *jit = script->getJIT(construct);
957 :
958 56319368 : if (request == CompileRequest_Interpreter &&
959 18221149 : !cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
960 9938535 : (cx->typeInferenceEnabled()
961 4259938 : ? script->incUseCount() <= INFER_USES_BEFORE_COMPILE
962 5678597 : : script->incUseCount() <= USES_BEFORE_COMPILE))
963 : {
964 2036479 : return Compile_Skipped;
965 : }
966 :
967 16184670 : if (!cx->compartment->ensureJaegerCompartmentExists(cx))
968 0 : return Compile_Error;
969 :
970 : // Ensure that constructors have at least one slot.
971 16184670 : if (construct && !script->nslots)
972 216 : script->nslots++;
973 :
974 16184670 : if (!jit) {
975 113898 : jit = MakeJITScript(cx, script, construct);
976 113898 : if (!jit)
977 0 : return Compile_Error;
978 : }
979 16184670 : unsigned chunkIndex = jit->chunkIndex(pc);
980 16184670 : ChunkDescriptor &desc = jit->chunkDescriptor(chunkIndex);
981 :
982 16184670 : if (desc.chunk)
983 16006706 : return Compile_Okay;
984 :
985 355928 : if (request == CompileRequest_Interpreter &&
986 177964 : !cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS) &&
987 : ++desc.counter <= INFER_USES_BEFORE_COMPILE)
988 : {
989 43873 : return Compile_Skipped;
990 : }
991 :
992 : CompileStatus status;
993 : {
994 268182 : types::AutoEnterTypeInference enter(cx, true);
995 :
996 268182 : Compiler cc(cx, script, chunkIndex, construct);
997 134091 : status = cc.compile();
998 : }
999 :
1000 134091 : if (status == Compile_Okay) {
1001 : /*
1002 : * Compiling a script can occasionally trigger its own recompilation,
1003 : * so go back through the compilation logic.
1004 : */
1005 128827 : goto restart;
1006 : }
1007 :
1008 : /* Non-OOM errors should have an associated exception. */
1009 0 : JS_ASSERT_IF(status == Compile_Error,
1010 5264 : cx->isExceptionPending() || cx->runtime->hadOutOfMemory);
1011 :
1012 5264 : return status;
1013 : }
1014 :
1015 : CompileStatus
1016 127714 : mjit::Compiler::generatePrologue()
1017 : {
1018 127714 : invokeLabel = masm.label();
1019 :
1020 : /*
1021 : * If there is no function, then this can only be called via JaegerShot(),
1022 : * which expects an existing frame to be initialized like the interpreter.
1023 : */
1024 127714 : if (script->function()) {
1025 81593 : Jump j = masm.jump();
1026 :
1027 : /*
1028 : * Entry point #2: The caller has partially constructed a frame, and
1029 : * either argc >= nargs or the arity check has corrected the frame.
1030 : */
1031 81593 : invokeLabel = masm.label();
1032 :
1033 81593 : Label fastPath = masm.label();
1034 :
1035 : /* Store this early on so slow paths can access it. */
1036 81593 : masm.storePtr(ImmPtr(script->function()),
1037 163186 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1038 :
1039 : {
1040 : /*
1041 : * Entry point #3: The caller has partially constructed a frame,
1042 : * but argc might be != nargs, so an arity check might be called.
1043 : *
1044 : * This loops back to entry point #2.
1045 : */
1046 81593 : arityLabel = stubcc.masm.label();
1047 :
1048 : Jump argMatch = stubcc.masm.branch32(Assembler::Equal, JSParamReg_Argc,
1049 81593 : Imm32(script->function()->nargs));
1050 :
1051 : if (JSParamReg_Argc != Registers::ArgReg1)
1052 81593 : stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
1053 :
1054 : /* Slow path - call the arity check function. Returns new fp. */
1055 81593 : stubcc.masm.storePtr(ImmPtr(script->function()),
1056 163186 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1057 81593 : OOL_STUBCALL(stubs::FixupArity, REJOIN_NONE);
1058 81593 : stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
1059 81593 : argMatch.linkTo(stubcc.masm.label(), &stubcc.masm);
1060 :
1061 81593 : argsCheckLabel = stubcc.masm.label();
1062 :
1063 : /* Type check the arguments as well. */
1064 81593 : if (cx->typeInferenceEnabled()) {
1065 : #ifdef JS_MONOIC
1066 31142 : this->argsCheckJump = stubcc.masm.jump();
1067 31142 : this->argsCheckStub = stubcc.masm.label();
1068 31142 : this->argsCheckJump.linkTo(this->argsCheckStub, &stubcc.masm);
1069 : #endif
1070 31142 : stubcc.masm.storePtr(ImmPtr(script->function()),
1071 62284 : Address(JSFrameReg, StackFrame::offsetOfExec()));
1072 31142 : OOL_STUBCALL(stubs::CheckArgumentTypes, REJOIN_CHECK_ARGUMENTS);
1073 : #ifdef JS_MONOIC
1074 31142 : this->argsCheckFallthrough = stubcc.masm.label();
1075 : #endif
1076 : }
1077 :
1078 81593 : stubcc.crossJump(stubcc.masm.jump(), fastPath);
1079 : }
1080 :
1081 : /*
1082 : * Guard that there is enough stack space. Note we reserve space for
1083 : * any inline frames we end up generating, or a callee's stack frame
1084 : * we write to before the callee checks the stack.
1085 : */
1086 81593 : uint32_t nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA;
1087 81593 : masm.addPtr(Imm32(nvals * sizeof(Value)), JSFrameReg, Registers::ReturnReg);
1088 : Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
1089 81593 : FrameAddress(offsetof(VMFrame, stackLimit)));
1090 :
1091 : /*
1092 : * If the stack check fails then we need to either commit more of the
1093 : * reserved stack space or throw an error. Specify that the number of
1094 : * local slots is 0 (instead of the default script->nfixed) since the
1095 : * range [fp->slots(), fp->base()) may not be commited. (The calling
1096 : * contract requires only that the caller has reserved space for fp.)
1097 : */
1098 : {
1099 81593 : stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
1100 81593 : OOL_STUBCALL(stubs::HitStackQuota, REJOIN_NONE);
1101 81593 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
1102 : }
1103 :
1104 81593 : markUndefinedLocals();
1105 :
1106 81593 : types::TypeScriptNesting *nesting = script->nesting();
1107 :
1108 : /*
1109 : * Run the function prologue if necessary. This is always done in a
1110 : * stub for heavyweight functions (including nesting outer functions).
1111 : */
1112 81593 : JS_ASSERT_IF(nesting && nesting->children, script->function()->isHeavyweight());
1113 81593 : if (script->function()->isHeavyweight()) {
1114 5403 : prepareStubCall(Uses(0));
1115 5403 : INLINE_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
1116 : } else {
1117 : /*
1118 : * Load the scope chain into the frame if it will be needed by NAME
1119 : * opcodes or by the nesting prologue below. The scope chain is
1120 : * always set for global and eval frames, and will have been set by
1121 : * CreateFunCallObject for heavyweight function frames.
1122 : */
1123 76190 : if (analysis->usesScopeChain() || nesting) {
1124 47585 : RegisterID t0 = Registers::ReturnReg;
1125 : Jump hasScope = masm.branchTest32(Assembler::NonZero,
1126 47585 : FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
1127 47585 : masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
1128 47585 : masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
1129 47585 : masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
1130 47585 : hasScope.linkTo(masm.label(), &masm);
1131 : }
1132 :
1133 76190 : if (nesting) {
1134 : /*
1135 : * Inline the common case for the nesting prologue: the
1136 : * function is a non-heavyweight inner function with no
1137 : * children of its own. We ensure during inference that the
1138 : * outer function does not add scope objects for 'let' or
1139 : * 'with', so that the frame's scope chain will be
1140 : * the parent's call object, and if it differs from the
1141 : * parent's current activation then the parent is reentrant.
1142 : */
1143 2460 : JSScript *parent = nesting->parent;
1144 2460 : JS_ASSERT(parent);
1145 7380 : JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
1146 7380 : !parent->analysis()->addsScopeObjects());
1147 :
1148 2460 : RegisterID t0 = Registers::ReturnReg;
1149 2460 : masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
1150 2460 : masm.loadPtr(Address(t0), t0);
1151 :
1152 2460 : Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
1153 2460 : Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
1154 2460 : masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
1155 :
1156 2460 : stubcc.linkExitDirect(mismatch, stubcc.masm.label());
1157 2460 : OOL_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
1158 2460 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
1159 : }
1160 : }
1161 :
1162 81593 : if (outerScript->usesArguments && !script->function()->isHeavyweight()) {
1163 : /*
1164 : * Make sure that fp->u.nactual is always coherent. This may be
1165 : * inspected directly by JIT code, and is not guaranteed to be
1166 : * correct if the UNDERFLOW and OVERFLOW flags are not set.
1167 : */
1168 : Jump hasArgs = masm.branchTest32(Assembler::NonZero, FrameFlagsAddress(),
1169 : Imm32(StackFrame::UNDERFLOW_ARGS |
1170 : StackFrame::OVERFLOW_ARGS |
1171 1780 : StackFrame::HAS_ARGS_OBJ));
1172 1780 : masm.storePtr(ImmPtr((void *)(size_t) script->function()->nargs),
1173 3560 : Address(JSFrameReg, StackFrame::offsetOfNumActual()));
1174 1780 : hasArgs.linkTo(masm.label(), &masm);
1175 : }
1176 :
1177 81593 : j.linkTo(masm.label(), &masm);
1178 : }
1179 :
1180 127714 : if (cx->typeInferenceEnabled()) {
1181 : #ifdef DEBUG
1182 55683 : if (script->function()) {
1183 31142 : prepareStubCall(Uses(0));
1184 31142 : INLINE_STUBCALL(stubs::AssertArgumentTypes, REJOIN_NONE);
1185 : }
1186 : #endif
1187 55683 : ensureDoubleArguments();
1188 : }
1189 :
1190 127714 : if (isConstructing) {
1191 1776 : if (!constructThis())
1192 0 : return Compile_Error;
1193 : }
1194 :
1195 127714 : if (debugMode()) {
1196 69602 : prepareStubCall(Uses(0));
1197 69602 : INLINE_STUBCALL(stubs::ScriptDebugPrologue, REJOIN_RESUME);
1198 58112 : } else if (Probes::callTrackingActive(cx)) {
1199 0 : prepareStubCall(Uses(0));
1200 0 : INLINE_STUBCALL(stubs::ScriptProbeOnlyPrologue, REJOIN_RESUME);
1201 : }
1202 :
1203 127714 : recompileCheckHelper();
1204 :
1205 127714 : return Compile_Okay;
1206 : }
1207 :
1208 : void
1209 58678 : mjit::Compiler::ensureDoubleArguments()
1210 : {
1211 : /* Convert integer arguments which were inferred as (int|double) to doubles. */
1212 127072 : for (uint32_t i = 0; script->function() && i < script->function()->nargs; i++) {
1213 68394 : uint32_t slot = ArgSlot(i);
1214 68394 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE && analysis->trackSlot(slot))
1215 660 : frame.ensureDouble(frame.getArg(i));
1216 : }
1217 58678 : }
1218 :
1219 : void
1220 151244 : mjit::Compiler::markUndefinedLocal(uint32_t offset, uint32_t i)
1221 : {
1222 151244 : uint32_t depth = ssa.getFrame(a->inlineIndex).depth;
1223 151244 : uint32_t slot = LocalSlot(script, i);
1224 151244 : Address local(JSFrameReg, sizeof(StackFrame) + (depth + i) * sizeof(Value));
1225 151244 : if (!cx->typeInferenceEnabled() || !analysis->trackSlot(slot)) {
1226 130476 : masm.storeValue(UndefinedValue(), local);
1227 : } else {
1228 20768 : Lifetime *lifetime = analysis->liveness(slot).live(offset);
1229 20768 : if (lifetime)
1230 4283 : masm.storeValue(UndefinedValue(), local);
1231 : }
1232 151244 : }
1233 :
1234 : void
1235 84588 : mjit::Compiler::markUndefinedLocals()
1236 : {
1237 : /*
1238 : * Set locals to undefined, as in initCallFrameLatePrologue.
1239 : * Skip locals which aren't closed and are known to be defined before used,
1240 : */
1241 233218 : for (uint32_t i = 0; i < script->nfixed; i++)
1242 148630 : markUndefinedLocal(0, i);
1243 84588 : }
1244 :
1245 : CompileStatus
1246 127567 : mjit::Compiler::generateEpilogue()
1247 : {
1248 127567 : return Compile_Okay;
1249 : }
1250 :
1251 : CompileStatus
1252 128847 : mjit::Compiler::finishThisUp()
1253 : {
1254 : #ifdef JS_CPU_X64
1255 : /* Generate trampolines to ensure that cross chunk edges are patchable. */
1256 : for (unsigned i = 0; i < chunkEdges.length(); i++) {
1257 : chunkEdges[i].sourceTrampoline = stubcc.masm.label();
1258 : stubcc.masm.move(ImmPtr(NULL), Registers::ScratchReg);
1259 : stubcc.masm.jump(Registers::ScratchReg);
1260 : }
1261 : #endif
1262 :
1263 128847 : RETURN_IF_OOM(Compile_Error);
1264 :
1265 : /*
1266 : * Watch for reallocation of the global slots while we were in the middle
1267 : * of compiling due to, e.g. standard class initialization.
1268 : */
1269 128847 : if (globalSlots && globalObj->getRawSlots() != globalSlots)
1270 0 : return Compile_Retry;
1271 :
1272 : /*
1273 : * Watch for GCs which occurred during compilation. These may have
1274 : * renumbered shapes baked into the jitcode.
1275 : */
1276 128847 : if (cx->runtime->gcNumber != gcNumber)
1277 20 : return Compile_Retry;
1278 :
1279 : /* The JIT will not have been cleared if no GC has occurred. */
1280 128827 : JITScript *jit = outerJIT();
1281 128827 : JS_ASSERT(jit != NULL);
1282 :
1283 128827 : if (overflowICSpace) {
1284 0 : JaegerSpew(JSpew_Scripts, "dumped a constant pool while generating an IC\n");
1285 0 : return Compile_Abort;
1286 : }
1287 :
1288 128827 : a->mainCodeEnd = masm.size();
1289 128827 : a->stubCodeEnd = stubcc.size();
1290 :
1291 327290 : for (size_t i = 0; i < branchPatches.length(); i++) {
1292 198463 : Label label = labelOf(branchPatches[i].pc, branchPatches[i].inlineIndex);
1293 198463 : branchPatches[i].jump.linkTo(label, &masm);
1294 : }
1295 :
1296 : #ifdef JS_CPU_ARM
1297 : masm.forceFlushConstantPool();
1298 : stubcc.masm.forceFlushConstantPool();
1299 : #endif
1300 : JaegerSpew(JSpew_Insns, "## Fast code (masm) size = %lu, Slow code (stubcc) size = %lu.\n",
1301 128827 : (unsigned long) masm.size(), (unsigned long) stubcc.size());
1302 :
1303 : /* To make inlineDoubles and oolDoubles aligned to sizeof(double) bytes,
1304 : MIPS adds extra sizeof(double) bytes to codeSize. */
1305 128827 : size_t codeSize = masm.size() +
1306 : #if defined(JS_CPU_MIPS)
1307 : stubcc.size() + sizeof(double) +
1308 : #else
1309 128827 : stubcc.size() +
1310 : #endif
1311 128827 : (masm.numDoubles() * sizeof(double)) +
1312 128827 : (stubcc.masm.numDoubles() * sizeof(double)) +
1313 257654 : jumpTableEdges.length() * sizeof(void *);
1314 :
1315 257654 : Vector<ChunkJumpTableEdge> chunkJumps(cx);
1316 128827 : if (!chunkJumps.reserve(jumpTableEdges.length()))
1317 0 : return Compile_Error;
1318 :
1319 : JSC::ExecutablePool *execPool;
1320 : uint8_t *result = (uint8_t *)script->compartment()->jaegerCompartment()->execAlloc()->
1321 128827 : alloc(codeSize, &execPool, JSC::METHOD_CODE);
1322 128827 : if (!result) {
1323 0 : js_ReportOutOfMemory(cx);
1324 0 : return Compile_Error;
1325 : }
1326 128827 : JS_ASSERT(execPool);
1327 128827 : JSC::ExecutableAllocator::makeWritable(result, codeSize);
1328 128827 : masm.executableCopy(result);
1329 128827 : stubcc.masm.executableCopy(result + masm.size());
1330 :
1331 257654 : JSC::LinkBuffer fullCode(result, codeSize, JSC::METHOD_CODE);
1332 257654 : JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size(), JSC::METHOD_CODE);
1333 :
1334 128827 : JS_ASSERT(!loop);
1335 :
1336 128827 : size_t nNmapLive = loopEntries.length();
1337 22077982 : for (size_t i = outerChunk.begin; i < outerChunk.end; i++) {
1338 21949155 : Bytecode *opinfo = analysis->maybeCode(i);
1339 21949155 : if (opinfo && opinfo->safePoint)
1340 287084 : nNmapLive++;
1341 : }
1342 :
1343 : /* Please keep in sync with JITChunk::sizeOfIncludingThis! */
1344 : size_t dataSize = sizeof(JITChunk) +
1345 : sizeof(NativeMapEntry) * nNmapLive +
1346 128827 : sizeof(InlineFrame) * inlineFrames.length() +
1347 128827 : sizeof(CallSite) * callSites.length() +
1348 : #if defined JS_MONOIC
1349 128827 : sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
1350 128827 : sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
1351 128827 : sizeof(ic::CallICInfo) * callICs.length() +
1352 128827 : sizeof(ic::EqualityICInfo) * equalityICs.length() +
1353 : #endif
1354 : #if defined JS_POLYIC
1355 128827 : sizeof(ic::PICInfo) * pics.length() +
1356 128827 : sizeof(ic::GetElementIC) * getElemICs.length() +
1357 128827 : sizeof(ic::SetElementIC) * setElemICs.length() +
1358 : #endif
1359 1159443 : 0;
1360 :
1361 128827 : uint8_t *cursor = (uint8_t *)OffTheBooks::calloc_(dataSize);
1362 128827 : if (!cursor) {
1363 0 : execPool->release();
1364 0 : js_ReportOutOfMemory(cx);
1365 0 : return Compile_Error;
1366 : }
1367 :
1368 128827 : JITChunk *chunk = new(cursor) JITChunk;
1369 128827 : cursor += sizeof(JITChunk);
1370 :
1371 128827 : JS_ASSERT(outerScript == script);
1372 :
1373 128827 : chunk->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
1374 128827 : chunk->pcLengths = pcLengths;
1375 :
1376 128827 : if (chunkIndex == 0) {
1377 127660 : jit->invokeEntry = result;
1378 127660 : if (script->function()) {
1379 81569 : jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
1380 81569 : jit->argsCheckEntry = stubCode.locationOf(argsCheckLabel).executableAddress();
1381 81569 : jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
1382 81569 : void *&addr = isConstructing ? script->jitArityCheckCtor : script->jitArityCheckNormal;
1383 81569 : addr = jit->arityCheckEntry;
1384 : }
1385 : }
1386 :
1387 : /*
1388 : * WARNING: mics(), callICs() et al depend on the ordering of these
1389 : * variable-length sections. See JITChunk's declaration for details.
1390 : */
1391 :
1392 : /* ICs can only refer to bytecodes in the outermost script, not inlined calls. */
1393 128827 : Label *jumpMap = a->jumpMap;
1394 :
1395 : /* Build the pc -> ncode mapping. */
1396 128827 : NativeMapEntry *jitNmap = (NativeMapEntry *)cursor;
1397 128827 : chunk->nNmapPairs = nNmapLive;
1398 128827 : cursor += sizeof(NativeMapEntry) * chunk->nNmapPairs;
1399 128827 : size_t ix = 0;
1400 128827 : if (chunk->nNmapPairs > 0) {
1401 19110361 : for (size_t i = outerChunk.begin; i < outerChunk.end; i++) {
1402 19044032 : Bytecode *opinfo = analysis->maybeCode(i);
1403 19044032 : if (opinfo && opinfo->safePoint) {
1404 287084 : Label L = jumpMap[i];
1405 287084 : JS_ASSERT(L.isSet());
1406 287084 : jitNmap[ix].bcOff = i;
1407 287084 : jitNmap[ix].ncode = (uint8_t *)(result + masm.distanceOf(L));
1408 287084 : ix++;
1409 : }
1410 : }
1411 99891 : for (size_t i = 0; i < loopEntries.length(); i++) {
1412 : /* Insert the entry at the right position. */
1413 33562 : const LoopEntry &entry = loopEntries[i];
1414 : size_t j;
1415 83609 : for (j = 0; j < ix; j++) {
1416 54162 : if (jitNmap[j].bcOff > entry.pcOffset) {
1417 4115 : memmove(jitNmap + j + 1, jitNmap + j, (ix - j) * sizeof(NativeMapEntry));
1418 4115 : break;
1419 : }
1420 : }
1421 33562 : jitNmap[j].bcOff = entry.pcOffset;
1422 33562 : jitNmap[j].ncode = (uint8_t *) stubCode.locationOf(entry.label).executableAddress();
1423 33562 : ix++;
1424 : }
1425 : }
1426 128827 : JS_ASSERT(ix == chunk->nNmapPairs);
1427 :
1428 : /* Build the table of inlined frames. */
1429 128827 : InlineFrame *jitInlineFrames = (InlineFrame *)cursor;
1430 128827 : chunk->nInlineFrames = inlineFrames.length();
1431 128827 : cursor += sizeof(InlineFrame) * chunk->nInlineFrames;
1432 131822 : for (size_t i = 0; i < chunk->nInlineFrames; i++) {
1433 2995 : InlineFrame &to = jitInlineFrames[i];
1434 2995 : ActiveFrame *from = inlineFrames[i];
1435 2995 : if (from->parent != outer)
1436 1243 : to.parent = &jitInlineFrames[from->parent->inlineIndex];
1437 : else
1438 1752 : to.parent = NULL;
1439 2995 : to.parentpc = from->parentPC;
1440 2995 : to.fun = from->script->function();
1441 2995 : to.depth = ssa.getFrame(from->inlineIndex).depth;
1442 : }
1443 :
1444 : /* Build the table of call sites. */
1445 128827 : CallSite *jitCallSites = (CallSite *)cursor;
1446 128827 : chunk->nCallSites = callSites.length();
1447 128827 : cursor += sizeof(CallSite) * chunk->nCallSites;
1448 5182183 : for (size_t i = 0; i < chunk->nCallSites; i++) {
1449 5053356 : CallSite &to = jitCallSites[i];
1450 5053356 : InternalCallSite &from = callSites[i];
1451 :
1452 : /* Patch stores of f.regs.inlined for stubs called from within inline frames. */
1453 5053356 : if (cx->typeInferenceEnabled() &&
1454 : from.rejoin != REJOIN_TRAP &&
1455 : from.rejoin != REJOIN_SCRIPTED &&
1456 : from.inlineIndex != UINT32_MAX) {
1457 27093 : if (from.ool)
1458 26756 : stubCode.patch(from.inlinePatch, &to);
1459 : else
1460 337 : fullCode.patch(from.inlinePatch, &to);
1461 : }
1462 :
1463 : JSScript *script =
1464 5053356 : (from.inlineIndex == UINT32_MAX) ? outerScript : inlineFrames[from.inlineIndex]->script;
1465 : uint32_t codeOffset = from.ool
1466 3666374 : ? masm.size() + from.returnOffset
1467 8719730 : : from.returnOffset;
1468 5053356 : to.initialize(codeOffset, from.inlineIndex, from.inlinepc - script->code, from.rejoin);
1469 :
1470 : /*
1471 : * Patch stores of the base call's return address for InvariantFailure
1472 : * calls. InvariantFailure will patch its own return address to this
1473 : * pointer before triggering recompilation.
1474 : */
1475 5053356 : if (from.loopPatch.hasPatch)
1476 8231 : stubCode.patch(from.loopPatch.codePatch, result + codeOffset);
1477 : }
1478 :
1479 : #if defined JS_MONOIC
1480 128827 : if (chunkIndex == 0 && script->function()) {
1481 81569 : JS_ASSERT(jit->argsCheckPool == NULL);
1482 81569 : if (cx->typeInferenceEnabled()) {
1483 31118 : jit->argsCheckStub = stubCode.locationOf(argsCheckStub);
1484 31118 : jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough);
1485 31118 : jit->argsCheckJump = stubCode.locationOf(argsCheckJump);
1486 : }
1487 : }
1488 :
1489 128827 : ic::GetGlobalNameIC *getGlobalNames_ = (ic::GetGlobalNameIC *)cursor;
1490 128827 : chunk->nGetGlobalNames = getGlobalNames.length();
1491 128827 : cursor += sizeof(ic::GetGlobalNameIC) * chunk->nGetGlobalNames;
1492 506946 : for (size_t i = 0; i < chunk->nGetGlobalNames; i++) {
1493 378119 : ic::GetGlobalNameIC &to = getGlobalNames_[i];
1494 378119 : GetGlobalNameICInfo &from = getGlobalNames[i];
1495 378119 : from.copyTo(to, fullCode, stubCode);
1496 :
1497 378119 : int offset = fullCode.locationOf(from.load) - to.fastPathStart;
1498 378119 : to.loadStoreOffset = offset;
1499 378119 : JS_ASSERT(to.loadStoreOffset == offset);
1500 :
1501 378119 : stubCode.patch(from.addrLabel, &to);
1502 : }
1503 :
1504 128827 : ic::SetGlobalNameIC *setGlobalNames_ = (ic::SetGlobalNameIC *)cursor;
1505 128827 : chunk->nSetGlobalNames = setGlobalNames.length();
1506 128827 : cursor += sizeof(ic::SetGlobalNameIC) * chunk->nSetGlobalNames;
1507 159588 : for (size_t i = 0; i < chunk->nSetGlobalNames; i++) {
1508 30761 : ic::SetGlobalNameIC &to = setGlobalNames_[i];
1509 30761 : SetGlobalNameICInfo &from = setGlobalNames[i];
1510 30761 : from.copyTo(to, fullCode, stubCode);
1511 30761 : to.slowPathStart = stubCode.locationOf(from.slowPathStart);
1512 :
1513 30761 : int offset = fullCode.locationOf(from.store).labelAtOffset(0) -
1514 61522 : to.fastPathStart;
1515 30761 : to.loadStoreOffset = offset;
1516 30761 : JS_ASSERT(to.loadStoreOffset == offset);
1517 :
1518 30761 : to.hasExtraStub = 0;
1519 30761 : to.objConst = from.objConst;
1520 30761 : to.shapeReg = from.shapeReg;
1521 30761 : to.objReg = from.objReg;
1522 30761 : to.vr = from.vr;
1523 :
1524 : offset = fullCode.locationOf(from.shapeGuardJump) -
1525 30761 : to.fastPathStart;
1526 30761 : to.inlineShapeJump = offset;
1527 30761 : JS_ASSERT(to.inlineShapeJump == offset);
1528 :
1529 : offset = fullCode.locationOf(from.fastPathRejoin) -
1530 30761 : to.fastPathStart;
1531 30761 : to.fastRejoinOffset = offset;
1532 30761 : JS_ASSERT(to.fastRejoinOffset == offset);
1533 :
1534 30761 : stubCode.patch(from.addrLabel, &to);
1535 : }
1536 :
1537 128827 : ic::CallICInfo *jitCallICs = (ic::CallICInfo *)cursor;
1538 128827 : chunk->nCallICs = callICs.length();
1539 128827 : cursor += sizeof(ic::CallICInfo) * chunk->nCallICs;
1540 276163 : for (size_t i = 0; i < chunk->nCallICs; i++) {
1541 147336 : jitCallICs[i].reset();
1542 147336 : jitCallICs[i].funGuard = fullCode.locationOf(callICs[i].funGuard);
1543 147336 : jitCallICs[i].funJump = fullCode.locationOf(callICs[i].funJump);
1544 147336 : jitCallICs[i].slowPathStart = stubCode.locationOf(callICs[i].slowPathStart);
1545 147336 : jitCallICs[i].typeMonitored = callICs[i].typeMonitored;
1546 :
1547 : /* Compute the hot call offset. */
1548 147336 : uint32_t offset = fullCode.locationOf(callICs[i].hotJump) -
1549 294672 : fullCode.locationOf(callICs[i].funGuard);
1550 147336 : jitCallICs[i].hotJumpOffset = offset;
1551 147336 : JS_ASSERT(jitCallICs[i].hotJumpOffset == offset);
1552 :
1553 : /* Compute the join point offset. */
1554 147336 : offset = fullCode.locationOf(callICs[i].joinPoint) -
1555 294672 : fullCode.locationOf(callICs[i].funGuard);
1556 147336 : jitCallICs[i].joinPointOffset = offset;
1557 147336 : JS_ASSERT(jitCallICs[i].joinPointOffset == offset);
1558 :
1559 : /* Compute the OOL call offset. */
1560 147336 : offset = stubCode.locationOf(callICs[i].oolCall) -
1561 294672 : stubCode.locationOf(callICs[i].slowPathStart);
1562 147336 : jitCallICs[i].oolCallOffset = offset;
1563 147336 : JS_ASSERT(jitCallICs[i].oolCallOffset == offset);
1564 :
1565 : /* Compute the OOL jump offset. */
1566 147336 : offset = stubCode.locationOf(callICs[i].oolJump) -
1567 294672 : stubCode.locationOf(callICs[i].slowPathStart);
1568 147336 : jitCallICs[i].oolJumpOffset = offset;
1569 147336 : JS_ASSERT(jitCallICs[i].oolJumpOffset == offset);
1570 :
1571 : /* Compute the start of the OOL IC call. */
1572 147336 : offset = stubCode.locationOf(callICs[i].icCall) -
1573 294672 : stubCode.locationOf(callICs[i].slowPathStart);
1574 147336 : jitCallICs[i].icCallOffset = offset;
1575 147336 : JS_ASSERT(jitCallICs[i].icCallOffset == offset);
1576 :
1577 : /* Compute the slow join point offset. */
1578 147336 : offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
1579 294672 : stubCode.locationOf(callICs[i].slowPathStart);
1580 147336 : jitCallICs[i].slowJoinOffset = offset;
1581 147336 : JS_ASSERT(jitCallICs[i].slowJoinOffset == offset);
1582 :
1583 : /* Compute the join point offset for continuing on the hot path. */
1584 147336 : offset = stubCode.locationOf(callICs[i].hotPathLabel) -
1585 294672 : stubCode.locationOf(callICs[i].funGuard);
1586 147336 : jitCallICs[i].hotPathOffset = offset;
1587 147336 : JS_ASSERT(jitCallICs[i].hotPathOffset == offset);
1588 :
1589 147336 : jitCallICs[i].call = &jitCallSites[callICs[i].callIndex];
1590 147336 : jitCallICs[i].frameSize = callICs[i].frameSize;
1591 147336 : jitCallICs[i].funObjReg = callICs[i].funObjReg;
1592 147336 : stubCode.patch(callICs[i].addrLabel1, &jitCallICs[i]);
1593 147336 : stubCode.patch(callICs[i].addrLabel2, &jitCallICs[i]);
1594 : }
1595 :
1596 128827 : ic::EqualityICInfo *jitEqualityICs = (ic::EqualityICInfo *)cursor;
1597 128827 : chunk->nEqualityICs = equalityICs.length();
1598 128827 : cursor += sizeof(ic::EqualityICInfo) * chunk->nEqualityICs;
1599 141669 : for (size_t i = 0; i < chunk->nEqualityICs; i++) {
1600 12842 : if (equalityICs[i].trampoline) {
1601 311 : jitEqualityICs[i].target = stubCode.locationOf(equalityICs[i].trampolineStart);
1602 : } else {
1603 12531 : uint32_t offs = uint32_t(equalityICs[i].jumpTarget - script->code);
1604 12531 : JS_ASSERT(jumpMap[offs].isSet());
1605 12531 : jitEqualityICs[i].target = fullCode.locationOf(jumpMap[offs]);
1606 : }
1607 12842 : jitEqualityICs[i].stubEntry = stubCode.locationOf(equalityICs[i].stubEntry);
1608 12842 : jitEqualityICs[i].stubCall = stubCode.locationOf(equalityICs[i].stubCall);
1609 12842 : jitEqualityICs[i].stub = equalityICs[i].stub;
1610 12842 : jitEqualityICs[i].lvr = equalityICs[i].lvr;
1611 12842 : jitEqualityICs[i].rvr = equalityICs[i].rvr;
1612 12842 : jitEqualityICs[i].tempReg = equalityICs[i].tempReg;
1613 12842 : jitEqualityICs[i].cond = equalityICs[i].cond;
1614 12842 : if (equalityICs[i].jumpToStub.isSet())
1615 11097 : jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
1616 12842 : jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
1617 :
1618 12842 : stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
1619 : }
1620 : #endif /* JS_MONOIC */
1621 :
1622 423077 : for (size_t i = 0; i < callPatches.length(); i++) {
1623 294250 : CallPatchInfo &patch = callPatches[i];
1624 :
1625 : CodeLocationLabel joinPoint = patch.joinSlow
1626 : ? stubCode.locationOf(patch.joinPoint)
1627 294250 : : fullCode.locationOf(patch.joinPoint);
1628 :
1629 294250 : if (patch.hasFastNcode)
1630 291144 : fullCode.patch(patch.fastNcodePatch, joinPoint);
1631 294250 : if (patch.hasSlowNcode)
1632 147336 : stubCode.patch(patch.slowNcodePatch, joinPoint);
1633 : }
1634 :
1635 : #ifdef JS_POLYIC
1636 128827 : ic::GetElementIC *jitGetElems = (ic::GetElementIC *)cursor;
1637 128827 : chunk->nGetElems = getElemICs.length();
1638 128827 : cursor += sizeof(ic::GetElementIC) * chunk->nGetElems;
1639 150522 : for (size_t i = 0; i < chunk->nGetElems; i++) {
1640 21695 : ic::GetElementIC &to = jitGetElems[i];
1641 21695 : GetElementICInfo &from = getElemICs[i];
1642 :
1643 21695 : new (&to) ic::GetElementIC();
1644 21695 : from.copyTo(to, fullCode, stubCode);
1645 :
1646 21695 : to.typeReg = from.typeReg;
1647 21695 : to.objReg = from.objReg;
1648 21695 : to.idRemat = from.id;
1649 :
1650 21695 : if (from.typeGuard.isSet()) {
1651 8241 : int inlineTypeGuard = fullCode.locationOf(from.typeGuard.get()) -
1652 16482 : fullCode.locationOf(from.fastPathStart);
1653 8241 : to.inlineTypeGuard = inlineTypeGuard;
1654 8241 : JS_ASSERT(to.inlineTypeGuard == inlineTypeGuard);
1655 : }
1656 : int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) -
1657 21695 : fullCode.locationOf(from.fastPathStart);
1658 21695 : to.inlineShapeGuard = inlineShapeGuard;
1659 21695 : JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard);
1660 :
1661 21695 : stubCode.patch(from.paramAddr, &to);
1662 : }
1663 :
1664 128827 : ic::SetElementIC *jitSetElems = (ic::SetElementIC *)cursor;
1665 128827 : chunk->nSetElems = setElemICs.length();
1666 128827 : cursor += sizeof(ic::SetElementIC) * chunk->nSetElems;
1667 134519 : for (size_t i = 0; i < chunk->nSetElems; i++) {
1668 5692 : ic::SetElementIC &to = jitSetElems[i];
1669 5692 : SetElementICInfo &from = setElemICs[i];
1670 :
1671 5692 : new (&to) ic::SetElementIC();
1672 5692 : from.copyTo(to, fullCode, stubCode);
1673 :
1674 5692 : to.strictMode = script->strictModeCode;
1675 5692 : to.vr = from.vr;
1676 5692 : to.objReg = from.objReg;
1677 5692 : to.objRemat = from.objRemat.toInt32();
1678 5692 : JS_ASSERT(to.objRemat == from.objRemat.toInt32());
1679 :
1680 5692 : to.hasConstantKey = from.key.isConstant();
1681 5692 : if (from.key.isConstant())
1682 1777 : to.keyValue = from.key.index();
1683 : else
1684 3915 : to.keyReg = from.key.reg();
1685 :
1686 : int inlineShapeGuard = fullCode.locationOf(from.shapeGuard) -
1687 5692 : fullCode.locationOf(from.fastPathStart);
1688 5692 : to.inlineShapeGuard = inlineShapeGuard;
1689 5692 : JS_ASSERT(to.inlineShapeGuard == inlineShapeGuard);
1690 :
1691 : int inlineHoleGuard = fullCode.locationOf(from.holeGuard) -
1692 5692 : fullCode.locationOf(from.fastPathStart);
1693 5692 : to.inlineHoleGuard = inlineHoleGuard;
1694 5692 : JS_ASSERT(to.inlineHoleGuard == inlineHoleGuard);
1695 :
1696 5692 : CheckIsStubCall(to.slowPathCall.labelAtOffset(0));
1697 :
1698 5692 : to.volatileMask = from.volatileMask;
1699 5692 : JS_ASSERT(to.volatileMask == from.volatileMask);
1700 :
1701 5692 : stubCode.patch(from.paramAddr, &to);
1702 : }
1703 :
1704 128827 : ic::PICInfo *jitPics = (ic::PICInfo *)cursor;
1705 128827 : chunk->nPICs = pics.length();
1706 128827 : cursor += sizeof(ic::PICInfo) * chunk->nPICs;
1707 952036 : for (size_t i = 0; i < chunk->nPICs; i++) {
1708 823209 : new (&jitPics[i]) ic::PICInfo();
1709 823209 : pics[i].copyTo(jitPics[i], fullCode, stubCode);
1710 823209 : pics[i].copySimpleMembersTo(jitPics[i]);
1711 :
1712 823209 : jitPics[i].shapeGuard = masm.distanceOf(pics[i].shapeGuard) -
1713 823209 : masm.distanceOf(pics[i].fastPathStart);
1714 1646418 : JS_ASSERT(jitPics[i].shapeGuard == masm.distanceOf(pics[i].shapeGuard) -
1715 1646418 : masm.distanceOf(pics[i].fastPathStart));
1716 823209 : jitPics[i].shapeRegHasBaseShape = true;
1717 823209 : jitPics[i].pc = pics[i].pc;
1718 :
1719 1613340 : if (pics[i].kind == ic::PICInfo::SET ||
1720 790131 : pics[i].kind == ic::PICInfo::SETMETHOD) {
1721 33078 : jitPics[i].u.vr = pics[i].vr;
1722 790131 : } else if (pics[i].kind != ic::PICInfo::NAME) {
1723 451110 : if (pics[i].hasTypeCheck) {
1724 390135 : int32_t distance = stubcc.masm.distanceOf(pics[i].typeCheck) -
1725 390135 : stubcc.masm.distanceOf(pics[i].slowPathStart);
1726 390135 : JS_ASSERT(distance <= 0);
1727 390135 : jitPics[i].u.get.typeCheckOffset = distance;
1728 : }
1729 : }
1730 823209 : stubCode.patch(pics[i].paramAddr, &jitPics[i]);
1731 : }
1732 : #endif
1733 :
1734 128827 : JS_ASSERT(size_t(cursor - (uint8_t*)chunk) == dataSize);
1735 : /* Use the computed size here -- we don't want slop bytes to be counted. */
1736 128827 : JS_ASSERT(chunk->computedSizeOfIncludingThis() == dataSize);
1737 :
1738 : /* Link fast and slow paths together. */
1739 128827 : stubcc.fixCrossJumps(result, masm.size(), masm.size() + stubcc.size());
1740 :
1741 : #if defined(JS_CPU_MIPS)
1742 : /* Make sure doubleOffset is aligned to sizeof(double) bytes. */
1743 : size_t doubleOffset = (((size_t)result + masm.size() + stubcc.size() +
1744 : sizeof(double) - 1) & (~(sizeof(double) - 1))) -
1745 : (size_t)result;
1746 : JS_ASSERT((((size_t)result + doubleOffset) & 7) == 0);
1747 : #else
1748 128827 : size_t doubleOffset = masm.size() + stubcc.size();
1749 : #endif
1750 :
1751 128827 : double *inlineDoubles = (double *) (result + doubleOffset);
1752 : double *oolDoubles = (double*) (result + doubleOffset +
1753 128827 : masm.numDoubles() * sizeof(double));
1754 :
1755 : /* Generate jump tables. */
1756 128827 : void **jumpVec = (void **)(oolDoubles + stubcc.masm.numDoubles());
1757 :
1758 130226 : for (size_t i = 0; i < jumpTableEdges.length(); i++) {
1759 1399 : JumpTableEdge edge = jumpTableEdges[i];
1760 1399 : if (bytecodeInChunk(script->code + edge.target)) {
1761 1389 : JS_ASSERT(jumpMap[edge.target].isSet());
1762 1389 : jumpVec[i] = (void *)(result + masm.distanceOf(jumpMap[edge.target]));
1763 : } else {
1764 : ChunkJumpTableEdge nedge;
1765 10 : nedge.edge = edge;
1766 10 : nedge.jumpTableEntry = &jumpVec[i];
1767 10 : chunkJumps.infallibleAppend(nedge);
1768 10 : jumpVec[i] = NULL;
1769 : }
1770 : }
1771 :
1772 : /* Patch jump table references. */
1773 129101 : for (size_t i = 0; i < jumpTables.length(); i++) {
1774 274 : JumpTable &jumpTable = jumpTables[i];
1775 274 : fullCode.patch(jumpTable.label, &jumpVec[jumpTable.offsetIndex]);
1776 : }
1777 :
1778 : /* Patch all outgoing calls. */
1779 128827 : masm.finalize(fullCode, inlineDoubles);
1780 128827 : stubcc.masm.finalize(stubCode, oolDoubles);
1781 :
1782 128827 : JSC::ExecutableAllocator::makeExecutable(result, masm.size() + stubcc.size());
1783 128827 : JSC::ExecutableAllocator::cacheFlush(result, masm.size() + stubcc.size());
1784 :
1785 : Probes::registerMJITCode(cx, jit,
1786 : a,
1787 128827 : (JSActiveFrame**) inlineFrames.begin(),
1788 : result, masm.size(),
1789 257654 : result + masm.size(), stubcc.size());
1790 :
1791 128827 : outerChunkRef().chunk = chunk;
1792 :
1793 : /* Patch all incoming and outgoing cross-chunk jumps. */
1794 128827 : CrossChunkEdge *crossEdges = jit->edges();
1795 152748 : for (unsigned i = 0; i < jit->nedges; i++) {
1796 23921 : CrossChunkEdge &edge = crossEdges[i];
1797 23921 : if (bytecodeInChunk(outerScript->code + edge.source)) {
1798 1816 : JS_ASSERT(!edge.sourceJump1 && !edge.sourceJump2);
1799 1816 : void *label = edge.targetLabel ? edge.targetLabel : edge.shimLabel;
1800 1816 : CodeLocationLabel targetLabel(label);
1801 1816 : JSOp op = JSOp(script->code[edge.source]);
1802 1816 : if (op == JSOP_TABLESWITCH) {
1803 16 : if (edge.jumpTableEntries)
1804 0 : cx->free_(edge.jumpTableEntries);
1805 16 : CrossChunkEdge::JumpTableEntryVector *jumpTableEntries = NULL;
1806 16 : bool failed = false;
1807 50 : for (unsigned j = 0; j < chunkJumps.length(); j++) {
1808 34 : ChunkJumpTableEdge nedge = chunkJumps[j];
1809 34 : if (nedge.edge.source == edge.source && nedge.edge.target == edge.target) {
1810 10 : if (!jumpTableEntries) {
1811 10 : jumpTableEntries = OffTheBooks::new_<CrossChunkEdge::JumpTableEntryVector>();
1812 10 : if (!jumpTableEntries)
1813 0 : failed = true;
1814 : }
1815 10 : if (!jumpTableEntries->append(nedge.jumpTableEntry))
1816 0 : failed = true;
1817 10 : *nedge.jumpTableEntry = label;
1818 : }
1819 : }
1820 16 : if (failed) {
1821 0 : execPool->release();
1822 0 : cx->free_(chunk);
1823 0 : js_ReportOutOfMemory(cx);
1824 0 : return Compile_Error;
1825 : }
1826 16 : edge.jumpTableEntries = jumpTableEntries;
1827 : }
1828 2400 : for (unsigned j = 0; j < chunkEdges.length(); j++) {
1829 2382 : const OutgoingChunkEdge &oedge = chunkEdges[j];
1830 2382 : if (oedge.source == edge.source && oedge.target == edge.target) {
1831 : /*
1832 : * Only a single edge needs to be patched; we ensured while
1833 : * generating chunks that no two cross chunk edges can have
1834 : * the same source and target. Note that there may not be
1835 : * an edge to patch, if constant folding determined the
1836 : * jump is never taken.
1837 : */
1838 1798 : edge.sourceJump1 = fullCode.locationOf(oedge.fastJump).executableAddress();
1839 1798 : if (oedge.slowJump.isSet()) {
1840 : edge.sourceJump2 =
1841 93 : stubCode.locationOf(oedge.slowJump.get()).executableAddress();
1842 : }
1843 : #ifdef JS_CPU_X64
1844 : edge.sourceTrampoline =
1845 : stubCode.locationOf(oedge.sourceTrampoline).executableAddress();
1846 : #endif
1847 1798 : jit->patchEdge(edge, label);
1848 1798 : break;
1849 : }
1850 : }
1851 22105 : } else if (bytecodeInChunk(outerScript->code + edge.target)) {
1852 1681 : JS_ASSERT(!edge.targetLabel);
1853 1681 : JS_ASSERT(jumpMap[edge.target].isSet());
1854 1681 : edge.targetLabel = fullCode.locationOf(jumpMap[edge.target]).executableAddress();
1855 1681 : jit->patchEdge(edge, edge.targetLabel);
1856 : }
1857 : }
1858 :
1859 128827 : return Compile_Okay;
1860 : }
1861 :
1862 : #ifdef DEBUG
1863 : #define SPEW_OPCODE() \
1864 : JS_BEGIN_MACRO \
1865 : if (IsJaegerSpewChannelActive(JSpew_JSOps)) { \
1866 : Sprinter sprinter(cx); \
1867 : sprinter.init(); \
1868 : js_Disassemble1(cx, script, PC, PC - script->code, \
1869 : JS_TRUE, &sprinter); \
1870 : JaegerSpew(JSpew_JSOps, " %2d %s", \
1871 : frame.stackDepth(), sprinter.string()); \
1872 : } \
1873 : JS_END_MACRO;
1874 : #else
1875 : #define SPEW_OPCODE()
1876 : #endif /* DEBUG */
1877 :
1878 : #define BEGIN_CASE(name) case name:
1879 : #define END_CASE(name) \
1880 : JS_BEGIN_MACRO \
1881 : PC += name##_LENGTH; \
1882 : JS_END_MACRO; \
1883 : break;
1884 :
1885 : static inline void
1886 : FixDouble(Value &val)
1887 : {
1888 : if (val.isInt32())
1889 : val.setDouble((double)val.toInt32());
1890 : }
1891 :
1892 : inline bool
1893 199906 : mjit::Compiler::shouldStartLoop(jsbytecode *head)
1894 : {
1895 : /*
1896 : * Don't do loop based optimizations or register allocation for loops which
1897 : * span multiple chunks.
1898 : */
1899 199906 : if (*head == JSOP_LOOPHEAD && analysis->getLoop(head)) {
1900 67633 : uint32_t backedge = analysis->getLoop(head)->backedge;
1901 67633 : if (!bytecodeInChunk(script->code + backedge))
1902 429 : return false;
1903 67204 : return true;
1904 : }
1905 132273 : return false;
1906 : }
1907 :
1908 : CompileStatus
1909 131876 : mjit::Compiler::generateMethod()
1910 : {
1911 131876 : SrcNoteLineScanner scanner(script->notes(), script->lineno);
1912 :
1913 : /* For join points, whether there was fallthrough from the previous opcode. */
1914 131876 : bool fallthrough = true;
1915 :
1916 : /* Last bytecode processed. */
1917 131876 : jsbytecode *lastPC = NULL;
1918 :
1919 131876 : if (!outerJIT())
1920 0 : return Compile_Retry;
1921 :
1922 131876 : uint32_t chunkBegin = 0, chunkEnd = script->length;
1923 131876 : if (!a->parent) {
1924 : const ChunkDescriptor &desc =
1925 128881 : outerJIT()->chunkDescriptor(chunkIndex);
1926 128881 : chunkBegin = desc.begin;
1927 128881 : chunkEnd = desc.end;
1928 :
1929 2194435 : while (PC != script->code + chunkBegin) {
1930 1936673 : Bytecode *opinfo = analysis->maybeCode(PC);
1931 1936673 : if (opinfo) {
1932 1929397 : if (opinfo->jumpTarget) {
1933 : /* Update variable types for all new values at this bytecode. */
1934 20892 : const SlotValue *newv = analysis->newValues(PC);
1935 20892 : if (newv) {
1936 24573 : while (newv->slot) {
1937 17159 : if (newv->slot < TotalSlots(script)) {
1938 10892 : VarType &vt = a->varTypes[newv->slot];
1939 10892 : vt.setTypes(analysis->getValueTypes(newv->value));
1940 : }
1941 17159 : newv++;
1942 : }
1943 : }
1944 : }
1945 1929397 : if (analyze::BytecodeUpdatesSlot(JSOp(*PC))) {
1946 209428 : uint32_t slot = GetBytecodeSlot(script, PC);
1947 209428 : if (analysis->trackSlot(slot)) {
1948 100382 : VarType &vt = a->varTypes[slot];
1949 100382 : vt.setTypes(analysis->pushedTypes(PC, 0));
1950 : }
1951 : }
1952 : }
1953 :
1954 1936673 : PC += GetBytecodeLength(PC);
1955 : }
1956 :
1957 128881 : if (chunkIndex != 0) {
1958 1167 : uint32_t depth = analysis->getCode(PC).stackDepth;
1959 42327 : for (uint32_t i = 0; i < depth; i++)
1960 41160 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
1961 : }
1962 : }
1963 :
1964 8754193 : for (;;) {
1965 8886069 : JSOp op = JSOp(*PC);
1966 8886069 : int trap = stubs::JSTRAP_NONE;
1967 :
1968 8886069 : if (script->hasBreakpointsAt(PC))
1969 339 : trap |= stubs::JSTRAP_TRAP;
1970 :
1971 8886069 : Bytecode *opinfo = analysis->maybeCode(PC);
1972 :
1973 8886069 : if (!opinfo) {
1974 65496 : if (op == JSOP_STOP)
1975 40433 : break;
1976 25063 : if (js_CodeSpec[op].length != -1)
1977 25063 : PC += js_CodeSpec[op].length;
1978 : else
1979 0 : PC += js_GetVariableBytecodeLength(PC);
1980 25063 : continue;
1981 : }
1982 :
1983 8820573 : if (PC >= script->code + script->length)
1984 0 : break;
1985 :
1986 8820573 : scanner.advanceTo(PC - script->code);
1987 8820714 : if (script->stepModeEnabled() &&
1988 141 : (scanner.isLineHeader() || opinfo->jumpTarget))
1989 : {
1990 54 : trap |= stubs::JSTRAP_SINGLESTEP;
1991 : }
1992 :
1993 8820573 : frame.setPC(PC);
1994 8820573 : frame.setInTryBlock(opinfo->inTryBlock);
1995 :
1996 8820573 : if (fallthrough) {
1997 : /*
1998 : * If there is fallthrough from the previous opcode and we changed
1999 : * any entries into doubles for a branch at that previous op,
2000 : * revert those entries into integers. Similarly, if we forgot that
2001 : * an entry is a double then make it a double again, as the frame
2002 : * may have assigned it a normal register.
2003 : */
2004 8664343 : for (unsigned i = 0; i < fixedIntToDoubleEntries.length(); i++) {
2005 4 : FrameEntry *fe = frame.getSlotEntry(fixedIntToDoubleEntries[i]);
2006 4 : frame.ensureInteger(fe);
2007 : }
2008 8664374 : for (unsigned i = 0; i < fixedDoubleToAnyEntries.length(); i++) {
2009 35 : FrameEntry *fe = frame.getSlotEntry(fixedDoubleToAnyEntries[i]);
2010 35 : frame.syncAndForgetFe(fe);
2011 : }
2012 : }
2013 8820573 : fixedIntToDoubleEntries.clear();
2014 8820573 : fixedDoubleToAnyEntries.clear();
2015 :
2016 8820573 : if (PC >= script->code + chunkEnd) {
2017 1260 : if (fallthrough) {
2018 1260 : if (opinfo->jumpTarget)
2019 506 : fixDoubleTypes(PC);
2020 1260 : frame.syncAndForgetEverything();
2021 1260 : jsbytecode *curPC = PC;
2022 3674 : do {
2023 3674 : PC--;
2024 3674 : } while (!analysis->maybeCode(PC));
2025 1260 : if (!jumpAndRun(masm.jump(), curPC, NULL, NULL, /* fallthrough = */ true))
2026 0 : return Compile_Error;
2027 1260 : PC = curPC;
2028 : }
2029 1260 : break;
2030 : }
2031 :
2032 8819313 : if (opinfo->jumpTarget || trap) {
2033 454993 : if (fallthrough) {
2034 298759 : fixDoubleTypes(PC);
2035 298759 : fixedIntToDoubleEntries.clear();
2036 298759 : fixedDoubleToAnyEntries.clear();
2037 :
2038 : /*
2039 : * Watch for fallthrough to the head of a 'do while' loop.
2040 : * We don't know what register state we will be using at the head
2041 : * of the loop so sync, branch, and fix it up after the loop
2042 : * has been processed.
2043 : */
2044 298759 : if (cx->typeInferenceEnabled() && shouldStartLoop(PC)) {
2045 69 : frame.syncAndForgetEverything();
2046 69 : Jump j = masm.jump();
2047 69 : if (!startLoop(PC, j, PC))
2048 0 : return Compile_Error;
2049 : } else {
2050 298690 : Label start = masm.label();
2051 298690 : if (!frame.syncForBranch(PC, Uses(0)))
2052 0 : return Compile_Error;
2053 298690 : if (pcLengths && lastPC) {
2054 : /* Track this sync code for the previous op. */
2055 0 : size_t length = masm.size() - masm.distanceOf(start);
2056 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
2057 0 : pcLengths[offset].codeLength += length;
2058 : }
2059 298690 : JS_ASSERT(frame.consistentRegisters(PC));
2060 : }
2061 : }
2062 :
2063 454993 : if (!frame.discardForJoin(analysis->getAllocation(PC), opinfo->stackDepth))
2064 0 : return Compile_Error;
2065 454993 : updateJoinVarTypes();
2066 454993 : fallthrough = true;
2067 :
2068 454993 : if (!cx->typeInferenceEnabled()) {
2069 : /* All join points have synced state if we aren't doing cross-branch regalloc. */
2070 281413 : opinfo->safePoint = true;
2071 454993 : }
2072 8364320 : } else if (opinfo->safePoint) {
2073 1028 : frame.syncAndForgetEverything();
2074 : }
2075 8819313 : frame.assertValidRegisterState();
2076 8819313 : a->jumpMap[uint32_t(PC - script->code)] = masm.label();
2077 :
2078 8819313 : if (cx->typeInferenceEnabled() && opinfo->safePoint) {
2079 : /*
2080 : * We may have come in from a table switch, which does not watch
2081 : * for the new types introduced for variables at each dispatch
2082 : * target. Make sure that new SSA values at this safe point with
2083 : * double type have the correct in memory representation.
2084 : */
2085 5669 : const SlotValue *newv = analysis->newValues(PC);
2086 5669 : if (newv) {
2087 5032 : while (newv->slot) {
2088 9816 : if (newv->value.kind() == SSAValue::PHI &&
2089 2204 : newv->value.phiOffset() == uint32_t(PC - script->code) &&
2090 2128 : analysis->trackSlot(newv->slot) &&
2091 2128 : a->varTypes[newv->slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
2092 3 : FrameEntry *fe = frame.getSlotEntry(newv->slot);
2093 3 : masm.ensureInMemoryDouble(frame.addressOf(fe));
2094 : }
2095 3356 : newv++;
2096 : }
2097 : }
2098 : }
2099 :
2100 : // Now that we have the PC's register allocation, make sure it gets
2101 : // explicitly updated if this is the loop entry and new loop registers
2102 : // are allocated later on.
2103 8819313 : if (loop && !a->parent)
2104 926309 : loop->setOuterPC(PC);
2105 :
2106 8819313 : SPEW_OPCODE();
2107 8819313 : JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
2108 :
2109 8819313 : if (op == JSOP_LOOPHEAD && analysis->getLoop(PC)) {
2110 34031 : jsbytecode *backedge = script->code + analysis->getLoop(PC)->backedge;
2111 34031 : if (!bytecodeInChunk(backedge)){
2112 15438 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
2113 15009 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
2114 4 : FrameEntry *fe = frame.getSlotEntry(slot);
2115 4 : masm.ensureInMemoryDouble(frame.addressOf(fe));
2116 : }
2117 : }
2118 : }
2119 : }
2120 :
2121 : // If this is an exception entry point, then jsl_InternalThrow has set
2122 : // VMFrame::fp to the correct fp for the entry point. We need to copy
2123 : // that value here to FpReg so that FpReg also has the correct sp.
2124 : // Otherwise, we would simply be using a stale FpReg value.
2125 8819313 : if (op == JSOP_ENTERBLOCK && analysis->getCode(PC).exceptionEntry)
2126 19252 : masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
2127 :
2128 8819313 : if (trap) {
2129 393 : prepareStubCall(Uses(0));
2130 393 : masm.move(Imm32(trap), Registers::ArgReg1);
2131 393 : Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap), NULL);
2132 : InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
2133 393 : REJOIN_TRAP, false);
2134 393 : addCallSite(site);
2135 : }
2136 :
2137 : /* Don't compile fat opcodes, run the decomposed version instead. */
2138 8819313 : if (js_CodeSpec[op].format & JOF_DECOMPOSE) {
2139 205214 : PC += js_CodeSpec[op].length;
2140 205214 : continue;
2141 : }
2142 :
2143 8614099 : Label codeStart = masm.label();
2144 8614099 : bool countersUpdated = false;
2145 8614099 : bool arithUpdated = false;
2146 :
2147 8614099 : JSValueType arithFirstUseType = JSVAL_TYPE_UNKNOWN;
2148 8614099 : JSValueType arithSecondUseType = JSVAL_TYPE_UNKNOWN;
2149 8614099 : if (script->pcCounters && !!(js_CodeSpec[op].format & JOF_ARITH)) {
2150 0 : if (GetUseCount(script, PC - script->code) == 1) {
2151 0 : FrameEntry *use = frame.peek(-1);
2152 : /*
2153 : * Pretend it's a binary operation and the second operand has
2154 : * the same type as the first one.
2155 : */
2156 0 : if (use->isTypeKnown())
2157 0 : arithFirstUseType = arithSecondUseType = use->getKnownType();
2158 : } else {
2159 0 : FrameEntry *use = frame.peek(-1);
2160 0 : if (use->isTypeKnown())
2161 0 : arithFirstUseType = use->getKnownType();
2162 0 : use = frame.peek(-2);
2163 0 : if (use->isTypeKnown())
2164 0 : arithSecondUseType = use->getKnownType();
2165 : }
2166 : }
2167 :
2168 : /*
2169 : * Update PC counters for jump opcodes at their start, so that we don't
2170 : * miss them when taking the jump. This is delayed for other opcodes,
2171 : * as we want to skip updating for ops we didn't generate any code for.
2172 : */
2173 8614099 : if (script->pcCounters && JOF_OPTYPE(op) == JOF_JUMP)
2174 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2175 :
2176 : /**********************
2177 : * BEGIN COMPILER OPS *
2178 : **********************/
2179 :
2180 8614099 : lastPC = PC;
2181 :
2182 8614099 : switch (op) {
2183 : BEGIN_CASE(JSOP_NOP)
2184 24051 : END_CASE(JSOP_NOP)
2185 :
2186 : BEGIN_CASE(JSOP_UNDEFINED)
2187 142751 : frame.push(UndefinedValue());
2188 142751 : END_CASE(JSOP_UNDEFINED)
2189 :
2190 : BEGIN_CASE(JSOP_POPV)
2191 : BEGIN_CASE(JSOP_SETRVAL)
2192 : {
2193 17975 : RegisterID reg = frame.allocReg();
2194 17975 : masm.load32(FrameFlagsAddress(), reg);
2195 17975 : masm.or32(Imm32(StackFrame::HAS_RVAL), reg);
2196 17975 : masm.store32(reg, FrameFlagsAddress());
2197 17975 : frame.freeReg(reg);
2198 :
2199 : /* Scripts which write to the frame's return slot aren't inlined. */
2200 17975 : JS_ASSERT(a == outer);
2201 :
2202 17975 : FrameEntry *fe = frame.peek(-1);
2203 17975 : frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true);
2204 17975 : frame.pop();
2205 : }
2206 17975 : END_CASE(JSOP_POPV)
2207 :
2208 : BEGIN_CASE(JSOP_RETURN)
2209 55688 : if (script->pcCounters)
2210 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2211 55688 : emitReturn(frame.peek(-1));
2212 55688 : fallthrough = false;
2213 55688 : END_CASE(JSOP_RETURN)
2214 :
2215 : BEGIN_CASE(JSOP_GOTO)
2216 : BEGIN_CASE(JSOP_DEFAULT)
2217 : {
2218 122694 : unsigned targetOffset = FollowBranch(cx, script, PC - script->code);
2219 122694 : jsbytecode *target = script->code + targetOffset;
2220 :
2221 122694 : fixDoubleTypes(target);
2222 :
2223 : /*
2224 : * Watch for gotos which are entering a 'for' or 'while' loop.
2225 : * These jump to the loop condition test and are immediately
2226 : * followed by the head of the loop.
2227 : */
2228 122694 : jsbytecode *next = PC + js_CodeSpec[op].length;
2229 217937 : if (cx->typeInferenceEnabled() &&
2230 48659 : analysis->maybeCode(next) &&
2231 46584 : shouldStartLoop(next))
2232 : {
2233 33533 : frame.syncAndForgetEverything();
2234 33533 : Jump j = masm.jump();
2235 33533 : if (!startLoop(next, j, target))
2236 0 : return Compile_Error;
2237 : } else {
2238 89161 : if (!frame.syncForBranch(target, Uses(0)))
2239 0 : return Compile_Error;
2240 89161 : Jump j = masm.jump();
2241 89161 : if (!jumpAndRun(j, target))
2242 0 : return Compile_Error;
2243 : }
2244 122694 : fallthrough = false;
2245 122694 : PC += js_CodeSpec[op].length;
2246 122694 : break;
2247 : }
2248 : END_CASE(JSOP_GOTO)
2249 :
2250 : BEGIN_CASE(JSOP_IFEQ)
2251 : BEGIN_CASE(JSOP_IFNE)
2252 : {
2253 68895 : jsbytecode *target = PC + GET_JUMP_OFFSET(PC);
2254 68895 : fixDoubleTypes(target);
2255 68895 : if (!jsop_ifneq(op, target))
2256 0 : return Compile_Error;
2257 68895 : PC += js_CodeSpec[op].length;
2258 68895 : break;
2259 : }
2260 : END_CASE(JSOP_IFNE)
2261 :
2262 : BEGIN_CASE(JSOP_ARGUMENTS)
2263 : /*
2264 : * For calls of the form 'f.apply(x, arguments)' we can avoid
2265 : * creating an args object by having ic::SplatApplyArgs pull
2266 : * directly from the stack. To do this, we speculate here that
2267 : * 'apply' actually refers to js_fun_apply. If this is not true,
2268 : * the slow path in JSOP_FUNAPPLY will create the args object.
2269 : */
2270 2900 : if (canUseApplyTricks()) {
2271 : /*
2272 : * Check for interrupts at the JSOP_ARGUMENTS when using
2273 : * apply tricks, see inlineCallHelper().
2274 : */
2275 252 : interruptCheckHelper();
2276 :
2277 252 : applyTricks = LazyArgsObj;
2278 252 : pushSyncedEntry(0);
2279 4086 : } else if (cx->typeInferenceEnabled() && !script->strictModeCode &&
2280 1438 : !types::TypeSet::HasObjectFlags(cx, script->function()->getType(cx),
2281 2876 : types::OBJECT_FLAG_CREATED_ARGUMENTS)) {
2282 279 : frame.push(MagicValue(JS_LAZY_ARGUMENTS));
2283 : } else {
2284 2369 : jsop_arguments(REJOIN_FALLTHROUGH);
2285 2369 : pushSyncedEntry(0);
2286 : }
2287 2900 : END_CASE(JSOP_ARGUMENTS)
2288 :
2289 : BEGIN_CASE(JSOP_ITERNEXT)
2290 4630 : iterNext(GET_INT8(PC));
2291 4630 : END_CASE(JSOP_ITERNEXT)
2292 :
2293 : BEGIN_CASE(JSOP_DUP)
2294 361755 : frame.dup();
2295 361755 : END_CASE(JSOP_DUP)
2296 :
2297 : BEGIN_CASE(JSOP_DUP2)
2298 194278 : frame.dup2();
2299 194278 : END_CASE(JSOP_DUP2)
2300 :
2301 : BEGIN_CASE(JSOP_SWAP)
2302 164159 : frame.dup2();
2303 164159 : frame.shift(-3);
2304 164159 : frame.shift(-1);
2305 164159 : END_CASE(JSOP_SWAP)
2306 :
2307 : BEGIN_CASE(JSOP_PICK)
2308 : {
2309 587188 : uint32_t amt = GET_UINT8(PC);
2310 :
2311 : // Push -(amt + 1), say amt == 2
2312 : // Stack before: X3 X2 X1
2313 : // Stack after: X3 X2 X1 X3
2314 587188 : frame.dupAt(-int32_t(amt + 1));
2315 :
2316 : // For each item X[i...1] push it then move it down.
2317 : // The above would transition like so:
2318 : // X3 X2 X1 X3 X2 (dupAt)
2319 : // X2 X2 X1 X3 (shift)
2320 : // X2 X2 X1 X3 X1 (dupAt)
2321 : // X2 X1 X1 X3 (shift)
2322 2147446 : for (int32_t i = -int32_t(amt); i < 0; i++) {
2323 1560258 : frame.dupAt(i - 1);
2324 1560258 : frame.shift(i - 2);
2325 : }
2326 :
2327 : // The stack looks like:
2328 : // Xn ... X1 X1 X{n+1}
2329 : // So shimmy the last value down.
2330 587188 : frame.shimmy(1);
2331 : }
2332 587188 : END_CASE(JSOP_PICK)
2333 :
2334 : BEGIN_CASE(JSOP_BITOR)
2335 : BEGIN_CASE(JSOP_BITXOR)
2336 : BEGIN_CASE(JSOP_BITAND)
2337 7282 : jsop_bitop(op);
2338 7282 : END_CASE(JSOP_BITAND)
2339 :
2340 : BEGIN_CASE(JSOP_LT)
2341 : BEGIN_CASE(JSOP_LE)
2342 : BEGIN_CASE(JSOP_GT)
2343 : BEGIN_CASE(JSOP_GE)
2344 : BEGIN_CASE(JSOP_EQ)
2345 : BEGIN_CASE(JSOP_NE)
2346 : {
2347 94527 : if (script->pcCounters) {
2348 0 : updateArithCounters(PC, NULL, arithFirstUseType, arithSecondUseType);
2349 0 : arithUpdated = true;
2350 : }
2351 :
2352 : /* Detect fusions. */
2353 94527 : jsbytecode *next = &PC[JSOP_GE_LENGTH];
2354 94527 : JSOp fused = JSOp(*next);
2355 94527 : if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || analysis->jumpTarget(next))
2356 18009 : fused = JSOP_NOP;
2357 :
2358 : /* Get jump target, if any. */
2359 94527 : jsbytecode *target = NULL;
2360 94527 : if (fused != JSOP_NOP) {
2361 76518 : if (script->pcCounters)
2362 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2363 76518 : target = next + GET_JUMP_OFFSET(next);
2364 76518 : fixDoubleTypes(target);
2365 : }
2366 :
2367 94527 : BoolStub stub = NULL;
2368 94527 : switch (op) {
2369 : case JSOP_LT:
2370 53393 : stub = stubs::LessThan;
2371 53393 : break;
2372 : case JSOP_LE:
2373 4649 : stub = stubs::LessEqual;
2374 4649 : break;
2375 : case JSOP_GT:
2376 5618 : stub = stubs::GreaterThan;
2377 5618 : break;
2378 : case JSOP_GE:
2379 7417 : stub = stubs::GreaterEqual;
2380 7417 : break;
2381 : case JSOP_EQ:
2382 16800 : stub = stubs::Equal;
2383 16800 : break;
2384 : case JSOP_NE:
2385 6650 : stub = stubs::NotEqual;
2386 6650 : break;
2387 : default:
2388 0 : JS_NOT_REACHED("WAT");
2389 : break;
2390 : }
2391 :
2392 : /*
2393 : * We need to ensure in the target case that we always rejoin
2394 : * before the rval test. In the non-target case we will rejoin
2395 : * correctly after the op finishes.
2396 : */
2397 :
2398 94527 : FrameEntry *rhs = frame.peek(-1);
2399 94527 : FrameEntry *lhs = frame.peek(-2);
2400 :
2401 : /* Check for easy cases that the parser does not constant fold. */
2402 94527 : if (lhs->isConstant() && rhs->isConstant()) {
2403 : /* Primitives can be trivially constant folded. */
2404 245 : const Value &lv = lhs->getValue();
2405 245 : const Value &rv = rhs->getValue();
2406 :
2407 245 : if (lv.isPrimitive() && rv.isPrimitive()) {
2408 245 : bool result = compareTwoValues(cx, op, lv, rv);
2409 :
2410 245 : frame.pop();
2411 245 : frame.pop();
2412 :
2413 245 : if (!target) {
2414 130 : frame.push(Value(BooleanValue(result)));
2415 : } else {
2416 115 : if (fused == JSOP_IFEQ)
2417 115 : result = !result;
2418 115 : if (!constantFoldBranch(target, result))
2419 0 : return Compile_Error;
2420 : }
2421 : } else {
2422 0 : if (!emitStubCmpOp(stub, target, fused))
2423 0 : return Compile_Error;
2424 : }
2425 : } else {
2426 : /* Anything else should go through the fast path generator. */
2427 94282 : if (!jsop_relational(op, stub, target, fused))
2428 0 : return Compile_Error;
2429 : }
2430 :
2431 : /* Advance PC manually. */
2432 : JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
2433 : JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
2434 : JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
2435 : JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
2436 : JS_STATIC_ASSERT(JSOP_NE_LENGTH == JSOP_GE_LENGTH);
2437 :
2438 94527 : PC += JSOP_GE_LENGTH;
2439 94527 : if (fused != JSOP_NOP) {
2440 76518 : SPEW_OPCODE();
2441 76518 : PC += JSOP_IFNE_LENGTH;
2442 : }
2443 94527 : break;
2444 : }
2445 : END_CASE(JSOP_GE)
2446 :
2447 : BEGIN_CASE(JSOP_LSH)
2448 2132 : jsop_bitop(op);
2449 2132 : END_CASE(JSOP_LSH)
2450 :
2451 : BEGIN_CASE(JSOP_RSH)
2452 2847 : jsop_bitop(op);
2453 2847 : END_CASE(JSOP_RSH)
2454 :
2455 : BEGIN_CASE(JSOP_URSH)
2456 838 : jsop_bitop(op);
2457 838 : END_CASE(JSOP_URSH)
2458 :
2459 : BEGIN_CASE(JSOP_ADD)
2460 606760 : if (!jsop_binary(op, stubs::Add, knownPushedType(0), pushedTypeSet(0)))
2461 3 : return Compile_Retry;
2462 606757 : END_CASE(JSOP_ADD)
2463 :
2464 : BEGIN_CASE(JSOP_SUB)
2465 10669 : if (!jsop_binary(op, stubs::Sub, knownPushedType(0), pushedTypeSet(0)))
2466 0 : return Compile_Retry;
2467 10669 : END_CASE(JSOP_SUB)
2468 :
2469 : BEGIN_CASE(JSOP_MUL)
2470 6462 : if (!jsop_binary(op, stubs::Mul, knownPushedType(0), pushedTypeSet(0)))
2471 0 : return Compile_Retry;
2472 6462 : END_CASE(JSOP_MUL)
2473 :
2474 : BEGIN_CASE(JSOP_DIV)
2475 8338 : if (!jsop_binary(op, stubs::Div, knownPushedType(0), pushedTypeSet(0)))
2476 4 : return Compile_Retry;
2477 8334 : END_CASE(JSOP_DIV)
2478 :
2479 : BEGIN_CASE(JSOP_MOD)
2480 3895 : if (!jsop_mod())
2481 2 : return Compile_Retry;
2482 3893 : END_CASE(JSOP_MOD)
2483 :
2484 : BEGIN_CASE(JSOP_NOT)
2485 32963 : jsop_not();
2486 32963 : END_CASE(JSOP_NOT)
2487 :
2488 : BEGIN_CASE(JSOP_BITNOT)
2489 : {
2490 167 : FrameEntry *top = frame.peek(-1);
2491 167 : if (top->isConstant() && top->getValue().isPrimitive()) {
2492 : int32_t i;
2493 0 : JS_ALWAYS_TRUE(ToInt32(cx, top->getValue(), &i));
2494 0 : i = ~i;
2495 0 : frame.pop();
2496 0 : frame.push(Int32Value(i));
2497 : } else {
2498 167 : jsop_bitnot();
2499 : }
2500 : }
2501 167 : END_CASE(JSOP_BITNOT)
2502 :
2503 : BEGIN_CASE(JSOP_NEG)
2504 : {
2505 4215 : FrameEntry *top = frame.peek(-1);
2506 4215 : if (top->isConstant() && top->getValue().isPrimitive()) {
2507 : double d;
2508 429 : JS_ALWAYS_TRUE(ToNumber(cx, top->getValue(), &d));
2509 429 : d = -d;
2510 429 : Value v = NumberValue(d);
2511 :
2512 : /* Watch for overflow in constant propagation. */
2513 429 : types::TypeSet *pushed = pushedTypeSet(0);
2514 429 : if (!v.isInt32() && pushed && !pushed->hasType(types::Type::DoubleType())) {
2515 24 : types::TypeScript::MonitorOverflow(cx, script, PC);
2516 24 : return Compile_Retry;
2517 : }
2518 :
2519 405 : frame.pop();
2520 405 : frame.push(v);
2521 : } else {
2522 3786 : jsop_neg();
2523 : }
2524 : }
2525 4191 : END_CASE(JSOP_NEG)
2526 :
2527 : BEGIN_CASE(JSOP_POS)
2528 205410 : jsop_pos();
2529 205410 : END_CASE(JSOP_POS)
2530 :
2531 : BEGIN_CASE(JSOP_DELNAME)
2532 : {
2533 410 : uint32_t index = GET_UINT32_INDEX(PC);
2534 410 : PropertyName *name = script->getName(index);
2535 :
2536 410 : prepareStubCall(Uses(0));
2537 410 : masm.move(ImmPtr(name), Registers::ArgReg1);
2538 410 : INLINE_STUBCALL(stubs::DelName, REJOIN_FALLTHROUGH);
2539 410 : pushSyncedEntry(0);
2540 : }
2541 410 : END_CASE(JSOP_DELNAME)
2542 :
2543 : BEGIN_CASE(JSOP_DELPROP)
2544 : {
2545 271 : uint32_t index = GET_UINT32_INDEX(PC);
2546 271 : PropertyName *name = script->getName(index);
2547 :
2548 271 : prepareStubCall(Uses(1));
2549 271 : masm.move(ImmPtr(name), Registers::ArgReg1);
2550 271 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DelProp), REJOIN_FALLTHROUGH);
2551 271 : frame.pop();
2552 271 : pushSyncedEntry(0);
2553 : }
2554 271 : END_CASE(JSOP_DELPROP)
2555 :
2556 : BEGIN_CASE(JSOP_DELELEM)
2557 : {
2558 452 : prepareStubCall(Uses(2));
2559 452 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DelElem), REJOIN_FALLTHROUGH);
2560 452 : frame.popn(2);
2561 452 : pushSyncedEntry(0);
2562 : }
2563 452 : END_CASE(JSOP_DELELEM)
2564 :
2565 : BEGIN_CASE(JSOP_TYPEOF)
2566 : BEGIN_CASE(JSOP_TYPEOFEXPR)
2567 2549 : jsop_typeof();
2568 2549 : END_CASE(JSOP_TYPEOF)
2569 :
2570 : BEGIN_CASE(JSOP_VOID)
2571 524 : frame.pop();
2572 524 : frame.push(UndefinedValue());
2573 524 : END_CASE(JSOP_VOID)
2574 :
2575 : BEGIN_CASE(JSOP_GETPROP)
2576 : BEGIN_CASE(JSOP_CALLPROP)
2577 : BEGIN_CASE(JSOP_LENGTH)
2578 503925 : if (!jsop_getprop(script->getName(GET_UINT32_INDEX(PC)), knownPushedType(0)))
2579 0 : return Compile_Error;
2580 503925 : END_CASE(JSOP_GETPROP)
2581 :
2582 : BEGIN_CASE(JSOP_GETELEM)
2583 : BEGIN_CASE(JSOP_CALLELEM)
2584 252899 : if (script->pcCounters)
2585 0 : updateElemCounters(PC, frame.peek(-2), frame.peek(-1));
2586 252899 : if (!jsop_getelem())
2587 0 : return Compile_Error;
2588 252899 : END_CASE(JSOP_GETELEM)
2589 :
2590 : BEGIN_CASE(JSOP_TOID)
2591 193234 : jsop_toid();
2592 193234 : END_CASE(JSOP_TOID)
2593 :
2594 : BEGIN_CASE(JSOP_SETELEM)
2595 : {
2596 211207 : if (script->pcCounters)
2597 0 : updateElemCounters(PC, frame.peek(-3), frame.peek(-2));
2598 211207 : jsbytecode *next = &PC[JSOP_SETELEM_LENGTH];
2599 211207 : bool pop = (JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next));
2600 211207 : if (!jsop_setelem(pop))
2601 0 : return Compile_Error;
2602 : }
2603 211207 : END_CASE(JSOP_SETELEM);
2604 :
2605 : BEGIN_CASE(JSOP_EVAL)
2606 : {
2607 1983 : JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
2608 1983 : emitEval(GET_ARGC(PC));
2609 1983 : JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
2610 : }
2611 1983 : END_CASE(JSOP_EVAL)
2612 :
2613 : BEGIN_CASE(JSOP_CALL)
2614 : BEGIN_CASE(JSOP_NEW)
2615 : BEGIN_CASE(JSOP_FUNAPPLY)
2616 : BEGIN_CASE(JSOP_FUNCALL)
2617 : {
2618 297554 : bool callingNew = (op == JSOP_NEW);
2619 :
2620 297554 : bool done = false;
2621 297554 : if ((op == JSOP_CALL || op == JSOP_NEW) && !monitored(PC)) {
2622 287828 : CompileStatus status = inlineNativeFunction(GET_ARGC(PC), callingNew);
2623 287828 : if (status == Compile_Okay)
2624 3234 : done = true;
2625 284594 : else if (status != Compile_InlineAbort)
2626 0 : return status;
2627 : }
2628 297554 : if (!done && inlining()) {
2629 59592 : CompileStatus status = inlineScriptedFunction(GET_ARGC(PC), callingNew);
2630 59592 : if (status == Compile_Okay)
2631 2827 : done = true;
2632 56765 : else if (status != Compile_InlineAbort)
2633 0 : return status;
2634 59592 : if (script->pcCounters) {
2635 : /* Code generated while inlining has been accounted for. */
2636 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2637 : }
2638 : }
2639 :
2640 : FrameSize frameSize;
2641 297554 : frameSize.initStatic(frame.totalDepth(), GET_ARGC(PC));
2642 :
2643 297554 : if (!done) {
2644 291493 : JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
2645 291493 : if (!inlineCallHelper(GET_ARGC(PC), callingNew, frameSize))
2646 0 : return Compile_Error;
2647 291493 : JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
2648 : }
2649 : }
2650 297554 : END_CASE(JSOP_CALL)
2651 :
2652 : BEGIN_CASE(JSOP_NAME)
2653 : BEGIN_CASE(JSOP_CALLNAME)
2654 : {
2655 366944 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
2656 366944 : jsop_name(name, knownPushedType(0));
2657 366944 : frame.extra(frame.peek(-1)).name = name;
2658 : }
2659 366944 : END_CASE(JSOP_NAME)
2660 :
2661 : BEGIN_CASE(JSOP_IMPLICITTHIS)
2662 : {
2663 34035 : prepareStubCall(Uses(0));
2664 34035 : masm.move(ImmPtr(script->getName(GET_UINT32_INDEX(PC))), Registers::ArgReg1);
2665 34035 : INLINE_STUBCALL(stubs::ImplicitThis, REJOIN_FALLTHROUGH);
2666 34035 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
2667 : }
2668 34035 : END_CASE(JSOP_IMPLICITTHIS)
2669 :
2670 : BEGIN_CASE(JSOP_DOUBLE)
2671 : {
2672 7486 : double d = script->getConst(GET_UINT32_INDEX(PC)).toDouble();
2673 7486 : frame.push(Value(DoubleValue(d)));
2674 : }
2675 7486 : END_CASE(JSOP_DOUBLE)
2676 :
2677 : BEGIN_CASE(JSOP_STRING)
2678 213267 : frame.push(StringValue(script->getAtom(GET_UINT32_INDEX(PC))));
2679 213267 : END_CASE(JSOP_STRING)
2680 :
2681 : BEGIN_CASE(JSOP_ZERO)
2682 111556 : frame.push(JSVAL_ZERO);
2683 111556 : END_CASE(JSOP_ZERO)
2684 :
2685 : BEGIN_CASE(JSOP_ONE)
2686 261542 : frame.push(JSVAL_ONE);
2687 261542 : END_CASE(JSOP_ONE)
2688 :
2689 : BEGIN_CASE(JSOP_NULL)
2690 12915 : frame.push(NullValue());
2691 12915 : END_CASE(JSOP_NULL)
2692 :
2693 : BEGIN_CASE(JSOP_THIS)
2694 72375 : jsop_this();
2695 72375 : END_CASE(JSOP_THIS)
2696 :
2697 : BEGIN_CASE(JSOP_FALSE)
2698 11686 : frame.push(Value(BooleanValue(false)));
2699 11686 : END_CASE(JSOP_FALSE)
2700 :
2701 : BEGIN_CASE(JSOP_TRUE)
2702 16142 : frame.push(Value(BooleanValue(true)));
2703 16142 : END_CASE(JSOP_TRUE)
2704 :
2705 : BEGIN_CASE(JSOP_OR)
2706 : BEGIN_CASE(JSOP_AND)
2707 : {
2708 18723 : jsbytecode *target = PC + GET_JUMP_OFFSET(PC);
2709 18723 : fixDoubleTypes(target);
2710 18723 : if (!jsop_andor(op, target))
2711 0 : return Compile_Error;
2712 : }
2713 18723 : END_CASE(JSOP_AND)
2714 :
2715 : BEGIN_CASE(JSOP_TABLESWITCH)
2716 : /*
2717 : * Note: there is no need to syncForBranch for the various targets of
2718 : * switch statement. The liveness analysis has already marked these as
2719 : * allocated with no registers in use. There is also no need to fix
2720 : * double types, as we don't track types of slots in scripts with
2721 : * switch statements (could be fixed).
2722 : */
2723 310 : if (script->pcCounters)
2724 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2725 : #if defined JS_CPU_ARM /* Need to implement jump(BaseIndex) for ARM */
2726 : frame.syncAndKillEverything();
2727 : masm.move(ImmPtr(PC), Registers::ArgReg1);
2728 :
2729 : /* prepareStubCall() is not needed due to syncAndForgetEverything() */
2730 : INLINE_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
2731 : frame.pop();
2732 :
2733 : masm.jump(Registers::ReturnReg);
2734 : #else
2735 310 : if (!jsop_tableswitch(PC))
2736 0 : return Compile_Error;
2737 : #endif
2738 310 : PC += js_GetVariableBytecodeLength(PC);
2739 310 : break;
2740 : END_CASE(JSOP_TABLESWITCH)
2741 :
2742 : BEGIN_CASE(JSOP_LOOKUPSWITCH)
2743 191 : if (script->pcCounters)
2744 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2745 191 : frame.syncAndForgetEverything();
2746 191 : masm.move(ImmPtr(PC), Registers::ArgReg1);
2747 :
2748 : /* prepareStubCall() is not needed due to syncAndForgetEverything() */
2749 191 : INLINE_STUBCALL(stubs::LookupSwitch, REJOIN_NONE);
2750 191 : frame.pop();
2751 :
2752 191 : masm.jump(Registers::ReturnReg);
2753 191 : PC += js_GetVariableBytecodeLength(PC);
2754 191 : break;
2755 : END_CASE(JSOP_LOOKUPSWITCH)
2756 :
2757 : BEGIN_CASE(JSOP_CASE)
2758 : // X Y
2759 :
2760 1812 : frame.dupAt(-2);
2761 : // X Y X
2762 :
2763 1812 : jsop_stricteq(JSOP_STRICTEQ);
2764 : // X cond
2765 :
2766 1812 : if (!jsop_ifneq(JSOP_IFNE, PC + GET_JUMP_OFFSET(PC)))
2767 0 : return Compile_Error;
2768 1812 : END_CASE(JSOP_CASE)
2769 :
2770 : BEGIN_CASE(JSOP_STRICTEQ)
2771 : BEGIN_CASE(JSOP_STRICTNE)
2772 13270 : if (script->pcCounters) {
2773 0 : updateArithCounters(PC, NULL, arithFirstUseType, arithSecondUseType);
2774 0 : arithUpdated = true;
2775 : }
2776 13270 : jsop_stricteq(op);
2777 13270 : END_CASE(JSOP_STRICTEQ)
2778 :
2779 : BEGIN_CASE(JSOP_ITER)
2780 4390 : if (!iter(GET_UINT8(PC)))
2781 0 : return Compile_Error;
2782 4390 : END_CASE(JSOP_ITER)
2783 :
2784 : BEGIN_CASE(JSOP_MOREITER)
2785 : {
2786 : /* At the byte level, this is always fused with IFNE or IFNEX. */
2787 4283 : if (script->pcCounters)
2788 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
2789 4283 : jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
2790 4283 : JSOp next = JSOp(*target);
2791 4283 : JS_ASSERT(next == JSOP_IFNE);
2792 :
2793 4283 : target += GET_JUMP_OFFSET(target);
2794 :
2795 4283 : fixDoubleTypes(target);
2796 4283 : if (!iterMore(target))
2797 0 : return Compile_Error;
2798 4283 : PC += JSOP_MOREITER_LENGTH;
2799 4283 : PC += js_CodeSpec[next].length;
2800 4283 : break;
2801 : }
2802 : END_CASE(JSOP_MOREITER)
2803 :
2804 : BEGIN_CASE(JSOP_ENDITER)
2805 4438 : iterEnd();
2806 4438 : END_CASE(JSOP_ENDITER)
2807 :
2808 : BEGIN_CASE(JSOP_POP)
2809 790153 : frame.pop();
2810 790153 : END_CASE(JSOP_POP)
2811 :
2812 : BEGIN_CASE(JSOP_GETARG)
2813 : BEGIN_CASE(JSOP_CALLARG)
2814 : {
2815 194514 : restoreVarType();
2816 194514 : uint32_t arg = GET_SLOTNO(PC);
2817 194514 : if (JSObject *singleton = pushedSingleton(0))
2818 724 : frame.push(ObjectValue(*singleton));
2819 : else
2820 193790 : frame.pushArg(arg);
2821 : }
2822 194514 : END_CASE(JSOP_GETARG)
2823 :
2824 : BEGIN_CASE(JSOP_BINDGNAME)
2825 72877 : jsop_bindgname();
2826 72877 : END_CASE(JSOP_BINDGNAME)
2827 :
2828 : BEGIN_CASE(JSOP_SETARG)
2829 : {
2830 3655 : jsbytecode *next = &PC[JSOP_SETARG_LENGTH];
2831 3655 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2832 3655 : frame.storeArg(GET_SLOTNO(PC), pop);
2833 3655 : updateVarType();
2834 :
2835 3655 : if (pop) {
2836 3570 : frame.pop();
2837 3570 : PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
2838 3570 : break;
2839 : }
2840 : }
2841 85 : END_CASE(JSOP_SETARG)
2842 :
2843 : BEGIN_CASE(JSOP_GETLOCAL)
2844 : BEGIN_CASE(JSOP_CALLLOCAL)
2845 : {
2846 : /*
2847 : * Update the var type unless we are about to pop the variable.
2848 : * Sync is not guaranteed for types of dead locals, and GETLOCAL
2849 : * followed by POP is not regarded as a use of the variable.
2850 : */
2851 393659 : jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH];
2852 393659 : if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next))
2853 344146 : restoreVarType();
2854 393659 : uint32_t slot = GET_SLOTNO(PC);
2855 393659 : if (JSObject *singleton = pushedSingleton(0))
2856 36 : frame.push(ObjectValue(*singleton));
2857 : else
2858 393623 : frame.pushLocal(slot);
2859 : }
2860 393659 : END_CASE(JSOP_GETLOCAL)
2861 :
2862 : BEGIN_CASE(JSOP_SETLOCAL)
2863 : {
2864 226830 : jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
2865 226830 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2866 226830 : frame.storeLocal(GET_SLOTNO(PC), pop);
2867 226830 : updateVarType();
2868 :
2869 226830 : if (pop) {
2870 226302 : frame.pop();
2871 226302 : PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
2872 226302 : break;
2873 : }
2874 : }
2875 528 : END_CASE(JSOP_SETLOCAL)
2876 :
2877 : BEGIN_CASE(JSOP_SETLOCALPOP)
2878 : {
2879 19563 : uint32_t slot = GET_SLOTNO(PC);
2880 19563 : frame.storeLocal(slot, true);
2881 19563 : frame.pop();
2882 19563 : updateVarType();
2883 : }
2884 19563 : END_CASE(JSOP_SETLOCALPOP)
2885 :
2886 : BEGIN_CASE(JSOP_UINT16)
2887 187417 : frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
2888 187417 : END_CASE(JSOP_UINT16)
2889 :
2890 : BEGIN_CASE(JSOP_NEWINIT)
2891 1604 : if (!jsop_newinit())
2892 0 : return Compile_Error;
2893 1604 : END_CASE(JSOP_NEWINIT)
2894 :
2895 : BEGIN_CASE(JSOP_NEWARRAY)
2896 13548 : if (!jsop_newinit())
2897 0 : return Compile_Error;
2898 13548 : END_CASE(JSOP_NEWARRAY)
2899 :
2900 : BEGIN_CASE(JSOP_NEWOBJECT)
2901 6881 : if (!jsop_newinit())
2902 0 : return Compile_Error;
2903 6881 : END_CASE(JSOP_NEWOBJECT)
2904 :
2905 : BEGIN_CASE(JSOP_ENDINIT)
2906 22016 : END_CASE(JSOP_ENDINIT)
2907 :
2908 : BEGIN_CASE(JSOP_INITMETHOD)
2909 0 : jsop_initmethod();
2910 0 : frame.pop();
2911 0 : END_CASE(JSOP_INITMETHOD)
2912 :
2913 : BEGIN_CASE(JSOP_INITPROP)
2914 7628 : jsop_initprop();
2915 7628 : frame.pop();
2916 7628 : END_CASE(JSOP_INITPROP)
2917 :
2918 : BEGIN_CASE(JSOP_INITELEM)
2919 41805 : jsop_initelem();
2920 41805 : frame.popn(2);
2921 41805 : END_CASE(JSOP_INITELEM)
2922 :
2923 : BEGIN_CASE(JSOP_INCARG)
2924 : BEGIN_CASE(JSOP_DECARG)
2925 : BEGIN_CASE(JSOP_ARGINC)
2926 : BEGIN_CASE(JSOP_ARGDEC)
2927 444 : if (script->pcCounters) {
2928 0 : restoreVarType();
2929 0 : FrameEntry *fe = frame.getArg(GET_SLOTNO(PC));
2930 0 : if (fe->isTypeKnown())
2931 0 : arithFirstUseType = fe->getKnownType();
2932 : }
2933 :
2934 444 : if (!jsop_arginc(op, GET_SLOTNO(PC)))
2935 0 : return Compile_Retry;
2936 :
2937 444 : if (script->pcCounters) {
2938 0 : FrameEntry *fe = frame.getArg(GET_SLOTNO(PC));
2939 0 : updateArithCounters(PC, fe, arithFirstUseType, JSVAL_TYPE_INT32);
2940 0 : arithUpdated = true;
2941 : }
2942 444 : END_CASE(JSOP_ARGDEC)
2943 :
2944 : BEGIN_CASE(JSOP_INCLOCAL)
2945 : BEGIN_CASE(JSOP_DECLOCAL)
2946 : BEGIN_CASE(JSOP_LOCALINC)
2947 : BEGIN_CASE(JSOP_LOCALDEC)
2948 38629 : if (script->pcCounters) {
2949 0 : restoreVarType();
2950 0 : FrameEntry *fe = frame.getLocal(GET_SLOTNO(PC));
2951 0 : if (fe->isTypeKnown())
2952 0 : arithFirstUseType = fe->getKnownType();
2953 : }
2954 :
2955 38629 : if (!jsop_localinc(op, GET_SLOTNO(PC)))
2956 1 : return Compile_Retry;
2957 :
2958 38628 : if (script->pcCounters) {
2959 0 : FrameEntry *fe = frame.getLocal(GET_SLOTNO(PC));
2960 0 : updateArithCounters(PC, fe, arithFirstUseType, JSVAL_TYPE_INT32);
2961 0 : arithUpdated = true;
2962 : }
2963 38628 : END_CASE(JSOP_LOCALDEC)
2964 :
2965 : BEGIN_CASE(JSOP_BINDNAME)
2966 7621 : jsop_bindname(script->getName(GET_UINT32_INDEX(PC)));
2967 7621 : END_CASE(JSOP_BINDNAME)
2968 :
2969 : BEGIN_CASE(JSOP_SETPROP)
2970 : {
2971 28090 : jsbytecode *next = &PC[JSOP_SETPROP_LENGTH];
2972 28090 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2973 28090 : if (!jsop_setprop(script->getName(GET_UINT32_INDEX(PC)), pop))
2974 0 : return Compile_Error;
2975 : }
2976 28090 : END_CASE(JSOP_SETPROP)
2977 :
2978 : BEGIN_CASE(JSOP_SETNAME)
2979 : BEGIN_CASE(JSOP_SETMETHOD)
2980 : {
2981 7621 : jsbytecode *next = &PC[JSOP_SETNAME_LENGTH];
2982 7621 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
2983 7621 : if (!jsop_setprop(script->getName(GET_UINT32_INDEX(PC)), pop))
2984 0 : return Compile_Error;
2985 : }
2986 7621 : END_CASE(JSOP_SETNAME)
2987 :
2988 : BEGIN_CASE(JSOP_THROW)
2989 16757 : prepareStubCall(Uses(1));
2990 16757 : INLINE_STUBCALL(stubs::Throw, REJOIN_NONE);
2991 16757 : frame.pop();
2992 16757 : fallthrough = false;
2993 16757 : END_CASE(JSOP_THROW)
2994 :
2995 : BEGIN_CASE(JSOP_IN)
2996 : {
2997 18351 : jsop_in();
2998 : }
2999 18351 : END_CASE(JSOP_IN)
3000 :
3001 : BEGIN_CASE(JSOP_INSTANCEOF)
3002 2413 : if (!jsop_instanceof())
3003 0 : return Compile_Error;
3004 2413 : END_CASE(JSOP_INSTANCEOF)
3005 :
3006 : BEGIN_CASE(JSOP_EXCEPTION)
3007 : {
3008 19252 : prepareStubCall(Uses(0));
3009 19252 : INLINE_STUBCALL(stubs::Exception, REJOIN_FALLTHROUGH);
3010 19252 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
3011 : }
3012 19252 : END_CASE(JSOP_EXCEPTION)
3013 :
3014 : BEGIN_CASE(JSOP_LINENO)
3015 1983 : END_CASE(JSOP_LINENO)
3016 :
3017 : BEGIN_CASE(JSOP_ENUMELEM)
3018 : // Normally, SETELEM transforms the stack
3019 : // from: OBJ ID VALUE
3020 : // to: VALUE
3021 : //
3022 : // Here, the stack transition is
3023 : // from: VALUE OBJ ID
3024 : // to:
3025 : // So we make the stack look like a SETELEM, and re-use it.
3026 :
3027 : // Before: VALUE OBJ ID
3028 : // After: VALUE OBJ ID VALUE
3029 0 : frame.dupAt(-3);
3030 :
3031 : // Before: VALUE OBJ ID VALUE
3032 : // After: VALUE VALUE
3033 0 : if (!jsop_setelem(true))
3034 0 : return Compile_Error;
3035 :
3036 : // Before: VALUE VALUE
3037 : // After:
3038 0 : frame.popn(2);
3039 0 : END_CASE(JSOP_ENUMELEM)
3040 :
3041 : BEGIN_CASE(JSOP_CONDSWITCH)
3042 : /* No-op for the decompiler. */
3043 483 : END_CASE(JSOP_CONDSWITCH)
3044 :
3045 : BEGIN_CASE(JSOP_LABEL)
3046 108 : END_CASE(JSOP_LABEL)
3047 :
3048 : BEGIN_CASE(JSOP_DEFFUN)
3049 : {
3050 1507 : JSFunction *innerFun = script->getFunction(GET_UINT32_INDEX(PC));
3051 :
3052 1507 : prepareStubCall(Uses(0));
3053 1507 : masm.move(ImmPtr(innerFun), Registers::ArgReg1);
3054 1507 : INLINE_STUBCALL(STRICT_VARIANT(stubs::DefFun), REJOIN_FALLTHROUGH);
3055 : }
3056 1507 : END_CASE(JSOP_DEFFUN)
3057 :
3058 : BEGIN_CASE(JSOP_DEFVAR)
3059 : BEGIN_CASE(JSOP_DEFCONST)
3060 : {
3061 19245 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
3062 :
3063 19245 : prepareStubCall(Uses(0));
3064 19245 : masm.move(ImmPtr(name), Registers::ArgReg1);
3065 19245 : INLINE_STUBCALL(stubs::DefVarOrConst, REJOIN_FALLTHROUGH);
3066 : }
3067 19245 : END_CASE(JSOP_DEFVAR)
3068 :
3069 : BEGIN_CASE(JSOP_SETCONST)
3070 : {
3071 16592 : PropertyName *name = script->getName(GET_UINT32_INDEX(PC));
3072 :
3073 16592 : prepareStubCall(Uses(1));
3074 16592 : masm.move(ImmPtr(name), Registers::ArgReg1);
3075 16592 : INLINE_STUBCALL(stubs::SetConst, REJOIN_FALLTHROUGH);
3076 : }
3077 16592 : END_CASE(JSOP_SETCONST)
3078 :
3079 : BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
3080 : {
3081 161 : uint32_t slot = GET_SLOTNO(PC);
3082 161 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC + SLOTNO_LEN));
3083 :
3084 : /* See JSOP_DEFLOCALFUN. */
3085 161 : markUndefinedLocal(PC - script->code, slot);
3086 :
3087 161 : prepareStubCall(Uses(frame.frameSlots()));
3088 161 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3089 161 : INLINE_STUBCALL(stubs::DefLocalFun_FC, REJOIN_DEFLOCALFUN);
3090 161 : frame.takeReg(Registers::ReturnReg);
3091 161 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3092 161 : frame.storeLocal(slot, true);
3093 161 : frame.pop();
3094 161 : updateVarType();
3095 : }
3096 161 : END_CASE(JSOP_DEFLOCALFUN_FC)
3097 :
3098 : BEGIN_CASE(JSOP_LAMBDA)
3099 : {
3100 44691 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC));
3101 :
3102 44691 : JSObjStubFun stub = stubs::Lambda;
3103 44691 : uint32_t uses = 0;
3104 :
3105 44691 : jsbytecode *pc2 = NULL;
3106 :
3107 44691 : prepareStubCall(Uses(uses));
3108 44691 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3109 :
3110 44691 : INLINE_STUBCALL(stub, REJOIN_PUSH_OBJECT);
3111 :
3112 44691 : frame.takeReg(Registers::ReturnReg);
3113 44691 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3114 : }
3115 44691 : END_CASE(JSOP_LAMBDA)
3116 :
3117 : BEGIN_CASE(JSOP_TRY)
3118 19252 : frame.syncAndForgetEverything();
3119 19252 : END_CASE(JSOP_TRY)
3120 :
3121 : BEGIN_CASE(JSOP_GETFCSLOT)
3122 : BEGIN_CASE(JSOP_CALLFCSLOT)
3123 : {
3124 2117 : unsigned index = GET_UINT16(PC);
3125 :
3126 : // Load the callee's payload into a register.
3127 2117 : frame.pushCallee();
3128 2117 : RegisterID reg = frame.copyDataIntoReg(frame.peek(-1));
3129 2117 : frame.pop();
3130 :
3131 : // obj->getFlatClosureUpvars()
3132 2117 : Address upvarAddress(reg, JSFunction::getFlatClosureUpvarsOffset());
3133 2117 : masm.loadPrivate(upvarAddress, reg);
3134 : // push ((Value *) reg)[index]
3135 :
3136 : BarrierState barrier = pushAddressMaybeBarrier(Address(reg, index * sizeof(Value)),
3137 2117 : knownPushedType(0), true);
3138 2117 : finishBarrier(barrier, REJOIN_GETTER, 0);
3139 : }
3140 2117 : END_CASE(JSOP_CALLFCSLOT)
3141 :
3142 : BEGIN_CASE(JSOP_DEFLOCALFUN)
3143 : {
3144 2453 : uint32_t slot = GET_SLOTNO(PC);
3145 2453 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC + SLOTNO_LEN));
3146 :
3147 : /*
3148 : * The liveness analysis will report that the value in |slot| is
3149 : * defined at the start of this opcode. However, we don't actually
3150 : * fill it in until the stub returns. This will cause a problem if
3151 : * we GC inside the stub. So we write a safe value here so that the
3152 : * GC won't crash.
3153 : */
3154 2453 : markUndefinedLocal(PC - script->code, slot);
3155 :
3156 2453 : prepareStubCall(Uses(0));
3157 2453 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3158 2453 : INLINE_STUBCALL(stubs::DefLocalFun, REJOIN_DEFLOCALFUN);
3159 2453 : frame.takeReg(Registers::ReturnReg);
3160 2453 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3161 2453 : frame.storeLocal(slot, true);
3162 2453 : frame.pop();
3163 2453 : updateVarType();
3164 : }
3165 2453 : END_CASE(JSOP_DEFLOCALFUN)
3166 :
3167 : BEGIN_CASE(JSOP_RETRVAL)
3168 1528 : emitReturn(NULL);
3169 1528 : fallthrough = false;
3170 1528 : END_CASE(JSOP_RETRVAL)
3171 :
3172 : BEGIN_CASE(JSOP_GETGNAME)
3173 : BEGIN_CASE(JSOP_CALLGNAME)
3174 : {
3175 491901 : uint32_t index = GET_UINT32_INDEX(PC);
3176 491901 : jsop_getgname(index);
3177 491901 : frame.extra(frame.peek(-1)).name = script->getName(index);
3178 : }
3179 491901 : END_CASE(JSOP_GETGNAME)
3180 :
3181 : BEGIN_CASE(JSOP_SETGNAME)
3182 : {
3183 72863 : jsbytecode *next = &PC[JSOP_SETGNAME_LENGTH];
3184 72863 : bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
3185 72863 : jsop_setgname(script->getName(GET_UINT32_INDEX(PC)), pop);
3186 : }
3187 72863 : END_CASE(JSOP_SETGNAME)
3188 :
3189 : BEGIN_CASE(JSOP_REGEXP)
3190 17761 : if (!jsop_regexp())
3191 0 : return Compile_Error;
3192 17761 : END_CASE(JSOP_REGEXP)
3193 :
3194 : BEGIN_CASE(JSOP_OBJECT)
3195 : {
3196 2700 : JSObject *object = script->getObject(GET_UINT32_INDEX(PC));
3197 2700 : RegisterID reg = frame.allocReg();
3198 2700 : masm.move(ImmPtr(object), reg);
3199 2700 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
3200 : }
3201 2700 : END_CASE(JSOP_OBJECT)
3202 :
3203 : BEGIN_CASE(JSOP_UINT24)
3204 2459 : frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
3205 2459 : END_CASE(JSOP_UINT24)
3206 :
3207 : BEGIN_CASE(JSOP_STOP)
3208 90149 : if (script->pcCounters)
3209 0 : updatePCCounters(PC, &codeStart, &countersUpdated);
3210 90149 : emitReturn(NULL);
3211 90149 : goto done;
3212 : END_CASE(JSOP_STOP)
3213 :
3214 : BEGIN_CASE(JSOP_GETXPROP)
3215 506 : if (!jsop_xname(script->getName(GET_UINT32_INDEX(PC))))
3216 0 : return Compile_Error;
3217 506 : END_CASE(JSOP_GETXPROP)
3218 :
3219 : BEGIN_CASE(JSOP_ENTERBLOCK)
3220 : BEGIN_CASE(JSOP_ENTERLET0)
3221 : BEGIN_CASE(JSOP_ENTERLET1)
3222 28563 : enterBlock(&script->getObject(GET_UINT32_INDEX(PC))->asStaticBlock());
3223 28563 : END_CASE(JSOP_ENTERBLOCK);
3224 :
3225 : BEGIN_CASE(JSOP_LEAVEBLOCK)
3226 28996 : leaveBlock();
3227 28996 : END_CASE(JSOP_LEAVEBLOCK)
3228 :
3229 : BEGIN_CASE(JSOP_INT8)
3230 164770 : frame.push(Value(Int32Value(GET_INT8(PC))));
3231 164770 : END_CASE(JSOP_INT8)
3232 :
3233 : BEGIN_CASE(JSOP_INT32)
3234 2490 : frame.push(Value(Int32Value(GET_INT32(PC))));
3235 2490 : END_CASE(JSOP_INT32)
3236 :
3237 : BEGIN_CASE(JSOP_HOLE)
3238 2192 : frame.push(MagicValue(JS_ARRAY_HOLE));
3239 2192 : END_CASE(JSOP_HOLE)
3240 :
3241 : BEGIN_CASE(JSOP_LAMBDA_FC)
3242 : {
3243 1268 : JSFunction *fun = script->getFunction(GET_UINT32_INDEX(PC));
3244 1268 : prepareStubCall(Uses(frame.frameSlots()));
3245 1268 : masm.move(ImmPtr(fun), Registers::ArgReg1);
3246 1268 : INLINE_STUBCALL(stubs::FlatLambda, REJOIN_PUSH_OBJECT);
3247 1268 : frame.takeReg(Registers::ReturnReg);
3248 1268 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
3249 : }
3250 1268 : END_CASE(JSOP_LAMBDA_FC)
3251 :
3252 : BEGIN_CASE(JSOP_LOOPHEAD)
3253 : {
3254 60241 : if (analysis->jumpTarget(PC)) {
3255 60181 : interruptCheckHelper();
3256 60181 : recompileCheckHelper();
3257 : }
3258 : }
3259 60241 : END_CASE(JSOP_LOOPHEAD)
3260 :
3261 : BEGIN_CASE(JSOP_LOOPENTRY)
3262 59867 : END_CASE(JSOP_LOOPENTRY)
3263 :
3264 : BEGIN_CASE(JSOP_DEBUGGER)
3265 : {
3266 2083 : prepareStubCall(Uses(0));
3267 2083 : masm.move(ImmPtr(PC), Registers::ArgReg1);
3268 2083 : INLINE_STUBCALL(stubs::DebuggerStatement, REJOIN_FALLTHROUGH);
3269 : }
3270 2083 : END_CASE(JSOP_DEBUGGER)
3271 :
3272 : default:
3273 0 : JS_NOT_REACHED("Opcode not implemented");
3274 : }
3275 :
3276 : /**********************
3277 : * END COMPILER OPS *
3278 : **********************/
3279 :
3280 8523916 : if (cx->typeInferenceEnabled() && PC == lastPC + GetBytecodeLength(lastPC)) {
3281 : /*
3282 : * Inform the frame of the type sets for values just pushed. Skip
3283 : * this if we did any opcode fusions, we don't keep track of the
3284 : * associated type sets in such cases.
3285 : */
3286 1997123 : unsigned nuses = GetUseCount(script, lastPC - script->code);
3287 1997123 : unsigned ndefs = GetDefCount(script, lastPC - script->code);
3288 3738315 : for (unsigned i = 0; i < ndefs; i++) {
3289 1741192 : FrameEntry *fe = frame.getStack(opinfo->stackDepth - nuses + i);
3290 1741192 : if (fe) {
3291 : /* fe may be NULL for conditionally pushed entries, e.g. JSOP_AND */
3292 1737741 : frame.extra(fe).types = analysis->pushedTypes(lastPC - script->code, i);
3293 : }
3294 : }
3295 : }
3296 :
3297 8523916 : if (script->pcCounters) {
3298 0 : size_t length = masm.size() - masm.distanceOf(codeStart);
3299 0 : bool typesUpdated = false;
3300 :
3301 : /* Update information about the type of value pushed by arithmetic ops. */
3302 0 : if ((js_CodeSpec[op].format & JOF_ARITH) && !arithUpdated) {
3303 0 : FrameEntry *pushed = NULL;
3304 0 : if (PC == lastPC + GetBytecodeLength(lastPC))
3305 0 : pushed = frame.peek(-1);
3306 0 : updateArithCounters(lastPC, pushed, arithFirstUseType, arithSecondUseType);
3307 0 : typesUpdated = true;
3308 : }
3309 :
3310 : /* Update information about the result type of access operations. */
3311 0 : if (OpcodeCounts::accessOp(op) &&
3312 : op != JSOP_SETPROP && op != JSOP_SETMETHOD && op != JSOP_SETELEM) {
3313 0 : FrameEntry *fe = (GetDefCount(script, lastPC - script->code) == 1)
3314 0 : ? frame.peek(-1)
3315 0 : : frame.peek(-2);
3316 0 : updatePCTypes(lastPC, fe);
3317 0 : typesUpdated = true;
3318 : }
3319 :
3320 0 : if (countersUpdated || typesUpdated || length != 0) {
3321 0 : if (!countersUpdated)
3322 0 : updatePCCounters(lastPC, &codeStart, &countersUpdated);
3323 :
3324 0 : if (pcLengths) {
3325 : /* Fill in the amount of inline code generated for the op. */
3326 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
3327 0 : pcLengths[offset].codeLength += length;
3328 : }
3329 : }
3330 8523916 : } else if (pcLengths) {
3331 : /* Fill in the amount of inline code generated for the op. */
3332 0 : size_t length = masm.size() - masm.distanceOf(codeStart);
3333 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + lastPC - script->code;
3334 0 : pcLengths[offset].codeLength += length;
3335 : }
3336 :
3337 8523916 : frame.assertValidRegisterState();
3338 : }
3339 :
3340 : done:
3341 131842 : return Compile_Okay;
3342 : }
3343 :
3344 : #undef END_CASE
3345 : #undef BEGIN_CASE
3346 :
3347 : void
3348 0 : mjit::Compiler::updatePCCounters(jsbytecode *pc, Label *start, bool *updated)
3349 : {
3350 0 : JS_ASSERT(script->pcCounters);
3351 :
3352 : /*
3353 : * Bump the METHODJIT count for the opcode, read the METHODJIT_CODE_LENGTH
3354 : * and METHODJIT_PICS_LENGTH counts, indicating the amounts of inline path
3355 : * code and generated code, respectively, and add them to the accumulated
3356 : * total for the op.
3357 : */
3358 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + pc - script->code;
3359 :
3360 : /*
3361 : * Base register for addresses, we can't use AbsoluteAddress in all places.
3362 : * This may hold a live value, so write it out to the top of the stack
3363 : * first. This cannot overflow the stack, as space is always reserved for
3364 : * an extra callee frame.
3365 : */
3366 0 : RegisterID reg = Registers::ReturnReg;
3367 0 : masm.storePtr(reg, frame.addressOfTop());
3368 :
3369 0 : OpcodeCounts counts = script->getCounts(pc);
3370 :
3371 0 : double *code = &counts.get(OpcodeCounts::BASE_METHODJIT_CODE);
3372 0 : double *codeLength = &pcLengths[offset].codeLength;
3373 0 : masm.addCounter(codeLength, code, reg);
3374 :
3375 0 : double *pics = &counts.get(OpcodeCounts::BASE_METHODJIT_PICS);
3376 0 : double *picsLength = &pcLengths[offset].picsLength;
3377 0 : masm.addCounter(picsLength, pics, reg);
3378 :
3379 0 : double *counter = &counts.get(OpcodeCounts::BASE_METHODJIT);
3380 0 : masm.bumpCounter(counter, reg);
3381 :
3382 : /* Reload the base register's original value. */
3383 0 : masm.loadPtr(frame.addressOfTop(), reg);
3384 :
3385 : /* The start label should reflect the code for the op, not instrumentation. */
3386 0 : *start = masm.label();
3387 0 : *updated = true;
3388 0 : }
3389 :
3390 : static inline bool
3391 0 : HasPayloadType(types::TypeSet *types)
3392 : {
3393 0 : if (types->unknown())
3394 0 : return false;
3395 :
3396 0 : types::TypeFlags flags = types->baseFlags();
3397 0 : bool objects = !!(flags & types::TYPE_FLAG_ANYOBJECT) || !!types->getObjectCount();
3398 :
3399 0 : if (objects && !!(flags & types::TYPE_FLAG_STRING))
3400 0 : return false;
3401 :
3402 0 : flags = flags & ~(types::TYPE_FLAG_ANYOBJECT | types::TYPE_FLAG_STRING);
3403 :
3404 : return (flags == types::TYPE_FLAG_UNDEFINED)
3405 : || (flags == types::TYPE_FLAG_NULL)
3406 0 : || (flags == types::TYPE_FLAG_BOOLEAN);
3407 : }
3408 :
3409 : void
3410 0 : mjit::Compiler::updatePCTypes(jsbytecode *pc, FrameEntry *fe)
3411 : {
3412 0 : JS_ASSERT(script->pcCounters);
3413 :
3414 : /*
3415 : * Get a temporary register, as for updatePCCounters. Don't overlap with
3416 : * the backing store for the entry's type tag, if there is one.
3417 : */
3418 0 : RegisterID reg = Registers::ReturnReg;
3419 0 : if (frame.peekTypeInRegister(fe) && reg == frame.tempRegForType(fe)) {
3420 : JS_STATIC_ASSERT(Registers::ReturnReg != Registers::ArgReg1);
3421 0 : reg = Registers::ArgReg1;
3422 : }
3423 0 : masm.push(reg);
3424 :
3425 0 : OpcodeCounts counts = script->getCounts(pc);
3426 :
3427 : /* Update the counters for pushed type tags and possible access types. */
3428 0 : if (fe->isTypeKnown()) {
3429 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_MONOMORPHIC), reg);
3430 0 : OpcodeCounts::AccessCounts counter = OpcodeCounts::ACCESS_OBJECT;
3431 0 : switch (fe->getKnownType()) {
3432 0 : case JSVAL_TYPE_UNDEFINED: counter = OpcodeCounts::ACCESS_UNDEFINED; break;
3433 0 : case JSVAL_TYPE_NULL: counter = OpcodeCounts::ACCESS_NULL; break;
3434 0 : case JSVAL_TYPE_BOOLEAN: counter = OpcodeCounts::ACCESS_BOOLEAN; break;
3435 0 : case JSVAL_TYPE_INT32: counter = OpcodeCounts::ACCESS_INT32; break;
3436 0 : case JSVAL_TYPE_DOUBLE: counter = OpcodeCounts::ACCESS_DOUBLE; break;
3437 0 : case JSVAL_TYPE_STRING: counter = OpcodeCounts::ACCESS_STRING; break;
3438 0 : case JSVAL_TYPE_OBJECT: counter = OpcodeCounts::ACCESS_OBJECT; break;
3439 : default:;
3440 : }
3441 0 : if (counter)
3442 0 : masm.bumpCounter(&counts.get(counter), reg);
3443 : } else {
3444 0 : types::TypeSet *types = frame.extra(fe).types;
3445 0 : if (types && HasPayloadType(types))
3446 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_DIMORPHIC), reg);
3447 : else
3448 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_POLYMORPHIC), reg);
3449 :
3450 0 : frame.loadTypeIntoReg(fe, reg);
3451 :
3452 0 : Jump j = masm.testUndefined(Assembler::NotEqual, reg);
3453 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_UNDEFINED), reg);
3454 0 : frame.loadTypeIntoReg(fe, reg);
3455 0 : j.linkTo(masm.label(), &masm);
3456 :
3457 0 : j = masm.testNull(Assembler::NotEqual, reg);
3458 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_NULL), reg);
3459 0 : frame.loadTypeIntoReg(fe, reg);
3460 0 : j.linkTo(masm.label(), &masm);
3461 :
3462 0 : j = masm.testBoolean(Assembler::NotEqual, reg);
3463 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_BOOLEAN), reg);
3464 0 : frame.loadTypeIntoReg(fe, reg);
3465 0 : j.linkTo(masm.label(), &masm);
3466 :
3467 0 : j = masm.testInt32(Assembler::NotEqual, reg);
3468 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_INT32), reg);
3469 0 : frame.loadTypeIntoReg(fe, reg);
3470 0 : j.linkTo(masm.label(), &masm);
3471 :
3472 0 : j = masm.testDouble(Assembler::NotEqual, reg);
3473 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_DOUBLE), reg);
3474 0 : frame.loadTypeIntoReg(fe, reg);
3475 0 : j.linkTo(masm.label(), &masm);
3476 :
3477 0 : j = masm.testString(Assembler::NotEqual, reg);
3478 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_STRING), reg);
3479 0 : frame.loadTypeIntoReg(fe, reg);
3480 0 : j.linkTo(masm.label(), &masm);
3481 :
3482 0 : j = masm.testObject(Assembler::NotEqual, reg);
3483 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ACCESS_OBJECT), reg);
3484 0 : frame.loadTypeIntoReg(fe, reg);
3485 0 : j.linkTo(masm.label(), &masm);
3486 : }
3487 :
3488 : /* Update the counter for accesses with type barriers. */
3489 0 : if (js_CodeSpec[*pc].format & JOF_TYPESET) {
3490 0 : double *counter = &counts.get(hasTypeBarriers(pc)
3491 : ? OpcodeCounts::ACCESS_BARRIER
3492 0 : : OpcodeCounts::ACCESS_NOBARRIER);
3493 0 : masm.bumpCounter(counter, reg);
3494 : }
3495 :
3496 : /* Reload the base register's original value. */
3497 0 : masm.pop(reg);
3498 0 : }
3499 :
3500 : void
3501 0 : mjit::Compiler::updateArithCounters(jsbytecode *pc, FrameEntry *fe,
3502 : JSValueType firstUseType, JSValueType secondUseType)
3503 : {
3504 0 : JS_ASSERT(script->pcCounters);
3505 :
3506 0 : RegisterID reg = Registers::ReturnReg;
3507 0 : masm.push(reg);
3508 :
3509 : /*
3510 : * What counter we bump for arithmetic expressions depend on the
3511 : * known types of its operands.
3512 : *
3513 : * ARITH_INT: operands are known ints, result is int
3514 : * ARITH_OVERFLOW: operands are known ints, result is double
3515 : * ARITH_DOUBLE: either operand is a known double, result is double
3516 : * ARITH_OTHER: operands are monomorphic but not int or double
3517 : * ARITH_UNKNOWN: operands are polymorphic
3518 : */
3519 :
3520 : OpcodeCounts::ArithCounts counter;
3521 0 : if (firstUseType == JSVAL_TYPE_INT32 && secondUseType == JSVAL_TYPE_INT32 &&
3522 0 : (!fe || fe->isNotType(JSVAL_TYPE_DOUBLE))) {
3523 0 : counter = OpcodeCounts::ARITH_INT;
3524 0 : } else if (firstUseType == JSVAL_TYPE_INT32 || firstUseType == JSVAL_TYPE_DOUBLE ||
3525 : secondUseType == JSVAL_TYPE_INT32 || secondUseType == JSVAL_TYPE_DOUBLE) {
3526 0 : counter = OpcodeCounts::ARITH_DOUBLE;
3527 0 : } else if (firstUseType != JSVAL_TYPE_UNKNOWN && secondUseType != JSVAL_TYPE_UNKNOWN &&
3528 0 : (!fe || fe->isTypeKnown())) {
3529 0 : counter = OpcodeCounts::ARITH_OTHER;
3530 : } else {
3531 0 : counter = OpcodeCounts::ARITH_UNKNOWN;
3532 : }
3533 :
3534 0 : masm.bumpCounter(&script->getCounts(pc).get(counter), reg);
3535 0 : masm.pop(reg);
3536 0 : }
3537 :
3538 : void
3539 0 : mjit::Compiler::updateElemCounters(jsbytecode *pc, FrameEntry *obj, FrameEntry *id)
3540 : {
3541 0 : JS_ASSERT(script->pcCounters);
3542 :
3543 0 : RegisterID reg = Registers::ReturnReg;
3544 0 : masm.push(reg);
3545 :
3546 0 : OpcodeCounts counts = script->getCounts(pc);
3547 :
3548 : OpcodeCounts::ElementCounts counter;
3549 0 : if (id->isTypeKnown()) {
3550 0 : switch (id->getKnownType()) {
3551 0 : case JSVAL_TYPE_INT32: counter = OpcodeCounts::ELEM_ID_INT; break;
3552 0 : case JSVAL_TYPE_DOUBLE: counter = OpcodeCounts::ELEM_ID_DOUBLE; break;
3553 0 : default: counter = OpcodeCounts::ELEM_ID_OTHER; break;
3554 : }
3555 : } else {
3556 0 : counter = OpcodeCounts::ELEM_ID_UNKNOWN;
3557 : }
3558 0 : masm.bumpCounter(&counts.get(counter), reg);
3559 :
3560 0 : if (obj->mightBeType(JSVAL_TYPE_OBJECT)) {
3561 0 : types::TypeSet *types = frame.extra(obj).types;
3562 0 : if (types && !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_TYPED_ARRAY) &&
3563 0 : types->getTypedArrayType(cx) != TypedArray::TYPE_MAX) {
3564 0 : counter = OpcodeCounts::ELEM_OBJECT_TYPED;
3565 0 : } else if (types && !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY)) {
3566 0 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_PACKED_ARRAY))
3567 0 : counter = OpcodeCounts::ELEM_OBJECT_PACKED;
3568 : else
3569 0 : counter = OpcodeCounts::ELEM_OBJECT_DENSE;
3570 : } else {
3571 0 : counter = OpcodeCounts::ELEM_OBJECT_OTHER;
3572 : }
3573 0 : masm.bumpCounter(&counts.get(counter), reg);
3574 : } else {
3575 0 : masm.bumpCounter(&counts.get(OpcodeCounts::ELEM_OBJECT_OTHER), reg);
3576 : }
3577 :
3578 0 : masm.pop(reg);
3579 0 : }
3580 :
3581 : void
3582 0 : mjit::Compiler::bumpPropCounter(jsbytecode *pc, int counter)
3583 : {
3584 : /* Don't accumulate counts for property ops fused with other ops. */
3585 0 : if (!(js_CodeSpec[*pc].format & JOF_PROP))
3586 0 : return;
3587 0 : RegisterID reg = Registers::ReturnReg;
3588 0 : masm.push(reg);
3589 0 : masm.bumpCounter(&script->getCounts(pc).get(counter), reg);
3590 0 : masm.pop(reg);
3591 : }
3592 :
3593 : JSC::MacroAssembler::Label
3594 402469 : mjit::Compiler::labelOf(jsbytecode *pc, uint32_t inlineIndex)
3595 : {
3596 402469 : ActiveFrame *a = (inlineIndex == UINT32_MAX) ? outer : inlineFrames[inlineIndex];
3597 402469 : JS_ASSERT(uint32_t(pc - a->script->code) < a->script->length);
3598 :
3599 402469 : uint32_t offs = uint32_t(pc - a->script->code);
3600 402469 : JS_ASSERT(a->jumpMap[offs].isSet());
3601 402469 : return a->jumpMap[offs];
3602 : }
3603 :
3604 : bool
3605 204028 : mjit::Compiler::knownJump(jsbytecode *pc)
3606 : {
3607 204028 : return pc < PC;
3608 : }
3609 :
3610 : bool
3611 288399 : mjit::Compiler::jumpInScript(Jump j, jsbytecode *pc)
3612 : {
3613 288399 : JS_ASSERT(pc >= script->code && uint32_t(pc - script->code) < script->length);
3614 :
3615 288399 : if (pc < PC) {
3616 89936 : j.linkTo(a->jumpMap[uint32_t(pc - script->code)], &masm);
3617 89936 : return true;
3618 : }
3619 198463 : return branchPatches.append(BranchPatch(j, pc, a->inlineIndex));
3620 : }
3621 :
3622 : void
3623 236387 : mjit::Compiler::emitFinalReturn(Assembler &masm)
3624 : {
3625 236387 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfNcode()), Registers::ReturnReg);
3626 236387 : masm.jump(Registers::ReturnReg);
3627 236387 : }
3628 :
3629 : // Emits code to load a return value of the frame into the scripted-ABI
3630 : // type & data register pair. If the return value is in fp->rval, then |fe|
3631 : // is NULL. Otherwise, |fe| contains the return value.
3632 : //
3633 : // If reading from fp->rval, |undefined| is loaded optimistically, before
3634 : // checking if fp->rval is set in the frame flags and loading that instead.
3635 : //
3636 : // Otherwise, if |masm| is the inline path, it is loaded as efficiently as
3637 : // the FrameState can manage. If |masm| is the OOL path, the value is simply
3638 : // loaded from its slot in the frame, since the caller has guaranteed it's
3639 : // been synced.
3640 : //
3641 : void
3642 234825 : mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
3643 : {
3644 234825 : RegisterID typeReg = JSReturnReg_Type;
3645 234825 : RegisterID dataReg = JSReturnReg_Data;
3646 :
3647 234825 : if (fe) {
3648 : // If using the OOL assembler, the caller signifies that the |fe| is
3649 : // synced, but not to rely on its register state.
3650 44002 : if (masm != &this->masm) {
3651 21656 : if (fe->isConstant()) {
3652 5838 : stubcc.masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
3653 : } else {
3654 15818 : Address rval(frame.addressOf(fe));
3655 15818 : if (fe->isTypeKnown() && !fe->isType(JSVAL_TYPE_DOUBLE)) {
3656 3267 : stubcc.masm.loadPayload(rval, dataReg);
3657 3267 : stubcc.masm.move(ImmType(fe->getKnownType()), typeReg);
3658 : } else {
3659 12551 : stubcc.masm.loadValueAsComponents(rval, typeReg, dataReg);
3660 : }
3661 : }
3662 : } else {
3663 22346 : frame.loadForReturn(fe, typeReg, dataReg, Registers::ReturnReg);
3664 : }
3665 : } else {
3666 : // Load a return value from POPV or SETRVAL into the return registers,
3667 : // otherwise return undefined.
3668 190823 : masm->loadValueAsComponents(UndefinedValue(), typeReg, dataReg);
3669 190823 : if (analysis->usesReturnValue()) {
3670 : Jump rvalClear = masm->branchTest32(Assembler::Zero,
3671 : FrameFlagsAddress(),
3672 137027 : Imm32(StackFrame::HAS_RVAL));
3673 137027 : Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue());
3674 137027 : masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
3675 137027 : rvalClear.linkTo(masm->label(), masm);
3676 : }
3677 : }
3678 234825 : }
3679 :
3680 : // This ensures that constructor return values are an object. If a non-object
3681 : // is returned, either explicitly or implicitly, the newly created object is
3682 : // loaded out of the frame. Otherwise, the explicitly returned object is kept.
3683 : //
3684 : void
3685 3417 : mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
3686 : {
3687 3417 : JS_ASSERT(isConstructing);
3688 :
3689 3417 : bool ool = (masm != &this->masm);
3690 3417 : Address thisv(JSFrameReg, StackFrame::offsetOfThis(script->function()));
3691 :
3692 : // We can just load |thisv| if either of the following is true:
3693 : // (1) There is no explicit return value, AND fp->rval is not used.
3694 : // (2) There is an explicit return value, and it's known to be primitive.
3695 3579 : if ((!fe && !analysis->usesReturnValue()) ||
3696 162 : (fe && fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT))
3697 : {
3698 1562 : if (ool)
3699 758 : masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
3700 : else
3701 804 : frame.loadThisForReturn(JSReturnReg_Type, JSReturnReg_Data, Registers::ReturnReg);
3702 1562 : return;
3703 : }
3704 :
3705 : // If the type is known to be an object, just load the return value as normal.
3706 1855 : if (fe && fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
3707 6 : loadReturnValue(masm, fe);
3708 6 : return;
3709 : }
3710 :
3711 : // There's a return value, and its type is unknown. Test the type and load
3712 : // |thisv| if necessary. Sync the 'this' entry before doing so, as it may
3713 : // be stored in registers if we constructed it inline.
3714 1849 : frame.syncThis();
3715 1849 : loadReturnValue(masm, fe);
3716 1849 : Jump j = masm->testObject(Assembler::Equal, JSReturnReg_Type);
3717 1849 : masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
3718 1849 : j.linkTo(masm->label(), masm);
3719 : }
3720 :
3721 : // Loads the return value into the scripted ABI register pair, such that JS
3722 : // semantics in constructors are preserved.
3723 : //
3724 : void
3725 236387 : mjit::Compiler::emitReturnValue(Assembler *masm, FrameEntry *fe)
3726 : {
3727 236387 : if (isConstructing)
3728 3417 : fixPrimitiveReturn(masm, fe);
3729 : else
3730 232970 : loadReturnValue(masm, fe);
3731 236387 : }
3732 :
3733 : void
3734 2670 : mjit::Compiler::emitInlineReturnValue(FrameEntry *fe)
3735 : {
3736 2670 : JS_ASSERT(!isConstructing && a->needReturnValue);
3737 :
3738 2670 : if (a->syncReturnValue) {
3739 : /* Needed return value with unknown type, the caller's entry is synced. */
3740 209 : Address address = frame.addressForInlineReturn();
3741 209 : if (fe)
3742 209 : frame.storeTo(fe, address);
3743 : else
3744 0 : masm.storeValue(UndefinedValue(), address);
3745 209 : return;
3746 : }
3747 :
3748 : /*
3749 : * For inlined functions that simply return an entry present in the outer
3750 : * script (e.g. a loop invariant term), mark the copy and propagate it
3751 : * after popping the frame.
3752 : */
3753 2461 : if (!a->exitState && fe && fe->isCopy() && frame.isOuterSlot(fe->backing())) {
3754 20 : a->returnEntry = fe->backing();
3755 20 : return;
3756 : }
3757 :
3758 2441 : if (a->returnValueDouble) {
3759 87 : JS_ASSERT(fe);
3760 87 : frame.ensureDouble(fe);
3761 : Registers mask(a->returnSet
3762 1 : ? Registers::maskReg(a->returnRegister)
3763 88 : : Registers::AvailFPRegs);
3764 : FPRegisterID fpreg;
3765 87 : if (!fe->isConstant()) {
3766 81 : fpreg = frame.tempRegInMaskForData(fe, mask.freeMask).fpreg();
3767 81 : frame.syncAndForgetFe(fe, true);
3768 81 : frame.takeReg(fpreg);
3769 : } else {
3770 6 : fpreg = frame.allocReg(mask.freeMask).fpreg();
3771 6 : masm.slowLoadConstantDouble(fe->getValue().toDouble(), fpreg);
3772 : }
3773 87 : JS_ASSERT_IF(a->returnSet, fpreg == a->returnRegister.fpreg());
3774 87 : a->returnRegister = fpreg;
3775 : } else {
3776 : Registers mask(a->returnSet
3777 314 : ? Registers::maskReg(a->returnRegister)
3778 2668 : : Registers::AvailRegs);
3779 : RegisterID reg;
3780 2354 : if (fe && !fe->isConstant()) {
3781 2238 : reg = frame.tempRegInMaskForData(fe, mask.freeMask).reg();
3782 2238 : frame.syncAndForgetFe(fe, true);
3783 2238 : frame.takeReg(reg);
3784 : } else {
3785 116 : reg = frame.allocReg(mask.freeMask).reg();
3786 116 : Value val = fe ? fe->getValue() : UndefinedValue();
3787 116 : masm.loadValuePayload(val, reg);
3788 : }
3789 2354 : JS_ASSERT_IF(a->returnSet, reg == a->returnRegister.reg());
3790 2354 : a->returnRegister = reg;
3791 : }
3792 :
3793 2441 : a->returnSet = true;
3794 2441 : if (a->exitState)
3795 547 : a->exitState->setUnassigned(a->returnRegister);
3796 : }
3797 :
3798 : void
3799 147365 : mjit::Compiler::emitReturn(FrameEntry *fe)
3800 : {
3801 147365 : JS_ASSERT_IF(!script->function(), JSOp(*PC) == JSOP_STOP);
3802 :
3803 : /* Only the top of the stack can be returned. */
3804 147365 : JS_ASSERT_IF(fe, fe == frame.peek(-1));
3805 :
3806 147365 : if (debugMode() || Probes::callTrackingActive(cx)) {
3807 : /* If the return value isn't in the frame's rval slot, move it there. */
3808 79169 : if (fe) {
3809 30604 : frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true);
3810 :
3811 : /* Set the frame flag indicating it's there. */
3812 30604 : RegisterID reg = frame.allocReg();
3813 30604 : masm.load32(FrameFlagsAddress(), reg);
3814 30604 : masm.or32(Imm32(StackFrame::HAS_RVAL), reg);
3815 30604 : masm.store32(reg, FrameFlagsAddress());
3816 30604 : frame.freeReg(reg);
3817 :
3818 : /* Use the frame's return value when generating further code. */
3819 30604 : fe = NULL;
3820 : }
3821 :
3822 79169 : prepareStubCall(Uses(0));
3823 79169 : INLINE_STUBCALL(stubs::ScriptDebugEpilogue, REJOIN_RESUME);
3824 : }
3825 :
3826 147365 : if (a != outer) {
3827 : /*
3828 : * Returning from an inlined script. The checks we do for inlineability
3829 : * and recompilation triggered by args object construction ensure that
3830 : * there can't be an arguments or call object.
3831 : */
3832 :
3833 3276 : if (a->needReturnValue)
3834 2670 : emitInlineReturnValue(fe);
3835 :
3836 3276 : if (a->exitState) {
3837 : /*
3838 : * Restore the register state to reflect that at the original call,
3839 : * modulo entries which will be popped once the call finishes and any
3840 : * entry which will be clobbered by the return value register.
3841 : */
3842 793 : frame.syncForAllocation(a->exitState, true, Uses(0));
3843 : }
3844 :
3845 : /*
3846 : * Simple tests to see if we are at the end of the script and will
3847 : * fallthrough after the script body finishes, thus won't need to jump.
3848 : */
3849 : bool endOfScript =
3850 : (JSOp(*PC) == JSOP_STOP) ||
3851 : (JSOp(*PC) == JSOP_RETURN &&
3852 2714 : (JSOp(PC[JSOP_RETURN_LENGTH]) == JSOP_STOP &&
3853 5990 : !analysis->maybeCode(PC + JSOP_RETURN_LENGTH)));
3854 3276 : if (!endOfScript)
3855 281 : a->returnJumps->append(masm.jump());
3856 :
3857 3276 : if (a->returnSet)
3858 2441 : frame.freeReg(a->returnRegister);
3859 3276 : return;
3860 : }
3861 :
3862 : /*
3863 : * Outside the mjit, activation objects (call objects and arguments objects) are put
3864 : * by ContextStack::pop* members. For JSOP_RETURN, the interpreter only calls
3865 : * popInlineFrame if fp != entryFrame since the VM protocol is that Invoke/Execute are
3866 : * responsible for pushing/popping the initial frame. However, an mjit function
3867 : * epilogue doesn't treat the initial StackFrame of its VMFrame specially: it always
3868 : * puts activation objects. And furthermore, if the last mjit frame throws, the mjit
3869 : * does *not* put the activation objects. So we can't assume any particular state of
3870 : * puttedness upon exit from the mjit.
3871 : *
3872 : * To avoid double-putting, EnterMethodJIT calls updateEpilogueFlags to clear the
3873 : * entry frame's hasArgsObj() and hasCallObj() flags if the given objects have already
3874 : * been put.
3875 : */
3876 144089 : if (script->function()) {
3877 98176 : types::TypeScriptNesting *nesting = script->nesting();
3878 98176 : if (script->function()->isHeavyweight() || (nesting && nesting->children)) {
3879 5878 : prepareStubCall(Uses(fe ? 1 : 0));
3880 5878 : INLINE_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
3881 : } else {
3882 : /* if (hasCallObj() || hasArgsObj()) */
3883 : Jump putObjs = masm.branchTest32(Assembler::NonZero,
3884 92298 : Address(JSFrameReg, StackFrame::offsetOfFlags()),
3885 184596 : Imm32(StackFrame::HAS_CALL_OBJ | StackFrame::HAS_ARGS_OBJ));
3886 92298 : stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
3887 :
3888 92298 : stubcc.leave();
3889 92298 : OOL_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
3890 :
3891 92298 : emitReturnValue(&stubcc.masm, fe);
3892 92298 : emitFinalReturn(stubcc.masm);
3893 :
3894 : /*
3895 : * Do frame count balancing inline for inner functions in a nesting
3896 : * with no children of their own.
3897 : */
3898 92298 : if (nesting)
3899 2582 : masm.sub32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
3900 : }
3901 : }
3902 :
3903 144089 : emitReturnValue(&masm, fe);
3904 144089 : emitFinalReturn(masm);
3905 :
3906 : /*
3907 : * After we've placed the call object, all tracked state can be
3908 : * thrown away. This will happen anyway because the next live opcode (if
3909 : * any) must have an incoming edge. It's an optimization to throw it away
3910 : * early - the tracker won't be spilled on further exits or join points.
3911 : */
3912 144089 : frame.discardFrame();
3913 : }
3914 :
3915 : void
3916 1061547 : mjit::Compiler::prepareStubCall(Uses uses)
3917 : {
3918 1061547 : JaegerSpew(JSpew_Insns, " ---- STUB CALL, SYNCING FRAME ---- \n");
3919 1061547 : frame.syncAndKill(Registers(Registers::TempAnyRegs), uses);
3920 1061547 : JaegerSpew(JSpew_Insns, " ---- FRAME SYNCING DONE ---- \n");
3921 1061547 : }
3922 :
3923 : JSC::MacroAssembler::Call
3924 1095948 : mjit::Compiler::emitStubCall(void *ptr, DataLabelPtr *pinline)
3925 : {
3926 1095948 : JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n");
3927 :
3928 1095948 : masm.bumpStubCounter(script, PC, Registers::tempCallReg());
3929 :
3930 1095948 : Call cl = masm.fallibleVMCall(cx->typeInferenceEnabled(),
3931 2191896 : ptr, outerPC(), pinline, frame.totalDepth());
3932 1095948 : JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
3933 : return cl;
3934 : }
3935 :
3936 : void
3937 353657 : mjit::Compiler::interruptCheckHelper()
3938 : {
3939 353657 : Jump jump;
3940 353657 : if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) {
3941 : /* For barrier verification, always take the interrupt so we can verify. */
3942 372 : jump = masm.jump();
3943 : } else {
3944 353285 : void *interrupt = (void*) &cx->runtime->interrupt;
3945 : #if defined(JS_CPU_X86) || defined(JS_CPU_ARM) || defined(JS_CPU_MIPS)
3946 353285 : jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
3947 : #else
3948 : /* Handle processors that can't load from absolute addresses. */
3949 : RegisterID reg = frame.allocReg();
3950 : masm.move(ImmPtr(interrupt), reg);
3951 : jump = masm.branchTest32(Assembler::NonZero, Address(reg, 0));
3952 : frame.freeReg(reg);
3953 : #endif
3954 : }
3955 :
3956 353657 : stubcc.linkExitDirect(jump, stubcc.masm.label());
3957 :
3958 353657 : frame.sync(stubcc.masm, Uses(0));
3959 353657 : stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
3960 353657 : OOL_STUBCALL(stubs::Interrupt, REJOIN_RESUME);
3961 353657 : stubcc.rejoin(Changes(0));
3962 353657 : }
3963 :
3964 : void
3965 187895 : mjit::Compiler::recompileCheckHelper()
3966 : {
3967 226716 : if (inlining() || debugMode() || !globalObj ||
3968 38821 : !analysis->hasFunctionCalls() || !cx->typeInferenceEnabled()) {
3969 185158 : return;
3970 : }
3971 :
3972 2737 : size_t *addr = script->addressOfUseCount();
3973 2737 : masm.add32(Imm32(1), AbsoluteAddress(addr));
3974 : #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
3975 : Jump jump = masm.branch32(Assembler::GreaterThanOrEqual, AbsoluteAddress(addr),
3976 2737 : Imm32(USES_BEFORE_INLINING));
3977 : #else
3978 : /* Handle processors that can't load from absolute addresses. */
3979 : RegisterID reg = frame.allocReg();
3980 : masm.move(ImmPtr(addr), reg);
3981 : Jump jump = masm.branch32(Assembler::GreaterThanOrEqual, Address(reg, 0),
3982 : Imm32(USES_BEFORE_INLINING));
3983 : frame.freeReg(reg);
3984 : #endif
3985 2737 : stubcc.linkExit(jump, Uses(0));
3986 2737 : stubcc.leave();
3987 :
3988 2737 : OOL_STUBCALL(stubs::RecompileForInline, REJOIN_RESUME);
3989 2737 : stubcc.rejoin(Changes(0));
3990 : }
3991 :
3992 : void
3993 291190 : mjit::Compiler::addReturnSite()
3994 : {
3995 291190 : InternalCallSite site(masm.distanceOf(masm.label()), a->inlineIndex, PC,
3996 582380 : REJOIN_SCRIPTED, false);
3997 291190 : addCallSite(site);
3998 291190 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfPrev()), JSFrameReg);
3999 291190 : }
4000 :
4001 : void
4002 143831 : mjit::Compiler::emitUncachedCall(uint32_t argc, bool callingNew)
4003 : {
4004 143831 : CallPatchInfo callPatch;
4005 :
4006 143831 : RegisterID r0 = Registers::ReturnReg;
4007 143831 : VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
4008 :
4009 143831 : frame.syncAndKill(Uses(argc + 2));
4010 143831 : prepareStubCall(Uses(argc + 2));
4011 143831 : masm.move(Imm32(argc), Registers::ArgReg1);
4012 143831 : INLINE_STUBCALL(stub, REJOIN_CALL_PROLOGUE);
4013 :
4014 143831 : Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
4015 :
4016 143831 : masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
4017 143831 : callPatch.hasFastNcode = true;
4018 : callPatch.fastNcodePatch =
4019 : masm.storePtrWithPatch(ImmPtr(NULL),
4020 143831 : Address(JSFrameReg, StackFrame::offsetOfNcode()));
4021 :
4022 143831 : masm.jump(r0);
4023 143831 : callPatch.joinPoint = masm.label();
4024 143831 : addReturnSite();
4025 :
4026 143831 : frame.popn(argc + 2);
4027 :
4028 143831 : frame.takeReg(JSReturnReg_Type);
4029 143831 : frame.takeReg(JSReturnReg_Data);
4030 143831 : frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data, knownPushedType(0));
4031 :
4032 : BarrierState barrier = testBarrier(JSReturnReg_Type, JSReturnReg_Data,
4033 : /* testUndefined = */ false,
4034 143831 : /* testReturn = */ true);
4035 :
4036 143831 : stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
4037 143831 : stubcc.rejoin(Changes(1));
4038 143831 : callPatches.append(callPatch);
4039 :
4040 143831 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
4041 143831 : }
4042 :
4043 : static bool
4044 294792 : IsLowerableFunCallOrApply(jsbytecode *pc)
4045 : {
4046 : #ifdef JS_MONOIC
4047 9184 : return (*pc == JSOP_FUNCALL && GET_ARGC(pc) >= 1) ||
4048 303976 : (*pc == JSOP_FUNAPPLY && GET_ARGC(pc) == 2);
4049 : #else
4050 : return false;
4051 : #endif
4052 : }
4053 :
4054 : void
4055 3106 : mjit::Compiler::checkCallApplySpeculation(uint32_t callImmArgc, uint32_t speculatedArgc,
4056 : FrameEntry *origCallee, FrameEntry *origThis,
4057 : MaybeRegisterID origCalleeType, RegisterID origCalleeData,
4058 : MaybeRegisterID origThisType, RegisterID origThisData,
4059 : Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch)
4060 : {
4061 3106 : JS_ASSERT(IsLowerableFunCallOrApply(PC));
4062 :
4063 : RegisterID temp;
4064 3106 : Registers tempRegs(Registers::AvailRegs);
4065 3106 : if (origCalleeType.isSet())
4066 1363 : tempRegs.takeReg(origCalleeType.reg());
4067 3106 : tempRegs.takeReg(origCalleeData);
4068 3106 : if (origThisType.isSet())
4069 1360 : tempRegs.takeReg(origThisType.reg());
4070 3106 : tempRegs.takeReg(origThisData);
4071 3106 : temp = tempRegs.takeAnyReg().reg();
4072 :
4073 : /*
4074 : * if (origCallee.isObject() &&
4075 : * origCallee.toObject().isFunction &&
4076 : * origCallee.toObject().toFunction() == js_fun_{call,apply})
4077 : */
4078 3106 : MaybeJump isObj;
4079 3106 : if (origCalleeType.isSet())
4080 1363 : isObj = masm.testObject(Assembler::NotEqual, origCalleeType.reg());
4081 3106 : Jump isFun = masm.testFunction(Assembler::NotEqual, origCalleeData, temp);
4082 3106 : Native native = *PC == JSOP_FUNCALL ? js_fun_call : js_fun_apply;
4083 : Jump isNative = masm.branchPtr(Assembler::NotEqual,
4084 3106 : Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
4085 6212 : ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
4086 :
4087 : /*
4088 : * If speculation fails, we can't use the ic, since it is compiled on the
4089 : * assumption that speculation succeeds. Instead, just do an uncached call.
4090 : */
4091 : {
4092 3106 : if (isObj.isSet())
4093 1363 : stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
4094 3106 : stubcc.linkExitDirect(isFun, stubcc.masm.label());
4095 3106 : stubcc.linkExitDirect(isNative, stubcc.masm.label());
4096 :
4097 : int32_t frameDepthAdjust;
4098 3106 : if (applyTricks == LazyArgsObj) {
4099 252 : OOL_STUBCALL(stubs::Arguments, REJOIN_RESUME);
4100 252 : frameDepthAdjust = +1;
4101 : } else {
4102 2854 : frameDepthAdjust = 0;
4103 : }
4104 :
4105 3106 : stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
4106 3106 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
4107 3106 : OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::SlowCall),
4108 3106 : REJOIN_FALLTHROUGH, frame.totalDepth() + frameDepthAdjust);
4109 3106 : JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
4110 :
4111 : /*
4112 : * inlineCallHelper will link uncachedCallSlowRejoin to the join point
4113 : * at the end of the ic. At that join point, the return value of the
4114 : * call is assumed to be in registers, so load them before jumping.
4115 : */
4116 3106 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
4117 3106 : Address rval = frame.addressOf(origCallee); /* vp[0] == rval */
4118 3106 : if (knownPushedType(0) == JSVAL_TYPE_DOUBLE)
4119 36 : stubcc.masm.ensureInMemoryDouble(rval);
4120 3106 : stubcc.masm.loadValueAsComponents(rval, JSReturnReg_Type, JSReturnReg_Data);
4121 3106 : *uncachedCallSlowRejoin = stubcc.masm.jump();
4122 3106 : JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
4123 : }
4124 :
4125 : /*
4126 : * For simplicity, we don't statically specialize calls to
4127 : * ic::SplatApplyArgs based on applyTricks. Rather, this state is
4128 : * communicated dynamically through the VMFrame.
4129 : */
4130 3106 : if (*PC == JSOP_FUNAPPLY) {
4131 : masm.store32(Imm32(applyTricks == LazyArgsObj),
4132 1670 : FrameAddress(VMFrame::offsetOfLazyArgsObj()));
4133 : }
4134 3106 : }
4135 :
4136 : /* This predicate must be called before the current op mutates the FrameState. */
4137 : bool
4138 2900 : mjit::Compiler::canUseApplyTricks()
4139 : {
4140 2900 : JS_ASSERT(*PC == JSOP_ARGUMENTS);
4141 2900 : jsbytecode *nextpc = PC + JSOP_ARGUMENTS_LENGTH;
4142 : return *nextpc == JSOP_FUNAPPLY &&
4143 496 : IsLowerableFunCallOrApply(nextpc) &&
4144 496 : !analysis->jumpTarget(nextpc) &&
4145 489 : !debugMode() &&
4146 252 : !a->parent &&
4147 4633 : bytecodeInChunk(nextpc);
4148 : }
4149 :
4150 : /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
4151 : bool
4152 291493 : mjit::Compiler::inlineCallHelper(uint32_t callImmArgc, bool callingNew, FrameSize &callFrameSize)
4153 : {
4154 : int32_t speculatedArgc;
4155 291493 : if (applyTricks == LazyArgsObj) {
4156 252 : frame.pop();
4157 252 : speculatedArgc = 1;
4158 : } else {
4159 : /*
4160 : * Check for interrupts on function call. We don't do this for lazy
4161 : * arguments objects as the interrupt may kick this frame into the
4162 : * interpreter, which doesn't know about the apply tricks. Instead, we
4163 : * do the interrupt check at the start of the JSOP_ARGUMENTS.
4164 : */
4165 291241 : interruptCheckHelper();
4166 :
4167 291241 : speculatedArgc = callImmArgc;
4168 : }
4169 :
4170 291493 : FrameEntry *origCallee = frame.peek(-(speculatedArgc + 2));
4171 291493 : FrameEntry *origThis = frame.peek(-(speculatedArgc + 1));
4172 :
4173 : /*
4174 : * 'this' does not need to be synced for constructing. :FIXME: is it
4175 : * possible that one of the arguments is directly copying the 'this'
4176 : * entry (something like 'new x.f(x)')?
4177 : */
4178 291493 : if (callingNew) {
4179 21243 : frame.discardFe(origThis);
4180 :
4181 : /*
4182 : * If inference is enabled, the 'this' value of the pushed frame always
4183 : * needs to be coherent. If a GC gets triggered before the callee can
4184 : * fill in the slot (i.e. the GC happens on constructing the 'new'
4185 : * object or the call object for a heavyweight callee), it needs to be
4186 : * able to read the 'this' value to tell whether newScript constraints
4187 : * will need to be regenerated afterwards.
4188 : */
4189 21243 : if (cx->typeInferenceEnabled())
4190 8853 : masm.storeValue(NullValue(), frame.addressOf(origThis));
4191 : }
4192 :
4193 291493 : if (!cx->typeInferenceEnabled()) {
4194 171676 : CompileStatus status = callArrayBuiltin(callImmArgc, callingNew);
4195 171676 : if (status != Compile_InlineAbort)
4196 303 : return (status == Compile_Okay);
4197 : }
4198 :
4199 : /*
4200 : * From the presence of JSOP_FUN{CALL,APPLY}, we speculate that we are
4201 : * going to call js_fun_{call,apply}. Normally, this call would go through
4202 : * js::Invoke to ultimately call 'this'. We can do much better by having
4203 : * the callIC cache and call 'this' directly. However, if it turns out that
4204 : * we are not actually calling js_fun_call, the callIC must act as normal.
4205 : *
4206 : * Note: do *NOT* use type information or inline state in any way when
4207 : * deciding whether to lower a CALL or APPLY. The stub calls here store
4208 : * their return values in a different slot, so when recompiling we need
4209 : * to go down the exact same path.
4210 : */
4211 291190 : bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
4212 :
4213 291190 : bool newType = callingNew && cx->typeInferenceEnabled() && types::UseNewType(cx, script, PC);
4214 :
4215 : #ifdef JS_MONOIC
4216 291190 : if (debugMode() || newType) {
4217 : #endif
4218 143831 : if (applyTricks == LazyArgsObj) {
4219 : /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
4220 0 : jsop_arguments(REJOIN_RESUME);
4221 0 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4222 : }
4223 143831 : emitUncachedCall(callImmArgc, callingNew);
4224 143831 : applyTricks = NoApplyTricks;
4225 143831 : return true;
4226 : #ifdef JS_MONOIC
4227 : }
4228 :
4229 147359 : frame.forgetMismatchedObject(origCallee);
4230 147359 : if (lowerFunCallOrApply)
4231 3106 : frame.forgetMismatchedObject(origThis);
4232 :
4233 : /* Initialized by both branches below. */
4234 147359 : CallGenInfo callIC;
4235 147359 : CallPatchInfo callPatch;
4236 147359 : MaybeRegisterID icCalleeType; /* type to test for function-ness */
4237 : RegisterID icCalleeData; /* data to call */
4238 147359 : Address icRvalAddr; /* return slot on slow-path rejoin */
4239 :
4240 : /*
4241 : * IC space must be reserved (using RESERVE_IC_SPACE or RESERVE_OOL_SPACE) between the
4242 : * following labels (as used in finishThisUp):
4243 : * - funGuard -> hotJump
4244 : * - funGuard -> joinPoint
4245 : * - funGuard -> hotPathLabel
4246 : * - slowPathStart -> oolCall
4247 : * - slowPathStart -> oolJump
4248 : * - slowPathStart -> icCall
4249 : * - slowPathStart -> slowJoinPoint
4250 : * Because the call ICs are fairly long (compared to PICs), we don't reserve the space in each
4251 : * path until the first usage of funGuard (for the in-line path) or slowPathStart (for the
4252 : * out-of-line path).
4253 : */
4254 :
4255 : /* Initialized only on lowerFunCallOrApply branch. */
4256 147359 : Jump uncachedCallSlowRejoin;
4257 147359 : CallPatchInfo uncachedCallPatch;
4258 :
4259 : {
4260 147359 : MaybeRegisterID origCalleeType, maybeOrigCalleeData;
4261 : RegisterID origCalleeData;
4262 :
4263 : /* Get the callee in registers. */
4264 147359 : frame.ensureFullRegs(origCallee, &origCalleeType, &maybeOrigCalleeData);
4265 147359 : origCalleeData = maybeOrigCalleeData.reg();
4266 294718 : PinRegAcrossSyncAndKill p1(frame, origCalleeData), p2(frame, origCalleeType);
4267 :
4268 147359 : if (lowerFunCallOrApply) {
4269 3106 : MaybeRegisterID origThisType, maybeOrigThisData;
4270 : RegisterID origThisData;
4271 : {
4272 : /* Get thisv in registers. */
4273 3106 : frame.ensureFullRegs(origThis, &origThisType, &maybeOrigThisData);
4274 3106 : origThisData = maybeOrigThisData.reg();
4275 6212 : PinRegAcrossSyncAndKill p3(frame, origThisData), p4(frame, origThisType);
4276 :
4277 : /* Leaves pinned regs untouched. */
4278 3106 : frame.syncAndKill(Uses(speculatedArgc + 2));
4279 : }
4280 :
4281 : checkCallApplySpeculation(callImmArgc, speculatedArgc,
4282 : origCallee, origThis,
4283 : origCalleeType, origCalleeData,
4284 : origThisType, origThisData,
4285 3106 : &uncachedCallSlowRejoin, &uncachedCallPatch);
4286 :
4287 3106 : icCalleeType = origThisType;
4288 3106 : icCalleeData = origThisData;
4289 3106 : icRvalAddr = frame.addressOf(origThis);
4290 :
4291 : /*
4292 : * For f.call(), since we compile the ic under the (checked)
4293 : * assumption that call == js_fun_call, we still have a static
4294 : * frame size. For f.apply(), the frame size depends on the dynamic
4295 : * length of the array passed to apply.
4296 : */
4297 3106 : if (*PC == JSOP_FUNCALL)
4298 1436 : callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc - 1);
4299 : else
4300 1670 : callIC.frameSize.initDynamic();
4301 : } else {
4302 : /* Leaves pinned regs untouched. */
4303 144253 : frame.syncAndKill(Uses(speculatedArgc + 2));
4304 :
4305 144253 : icCalleeType = origCalleeType;
4306 144253 : icCalleeData = origCalleeData;
4307 144253 : icRvalAddr = frame.addressOf(origCallee);
4308 144253 : callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc);
4309 : }
4310 : }
4311 :
4312 147359 : callFrameSize = callIC.frameSize;
4313 :
4314 147359 : callIC.typeMonitored = monitored(PC) || hasTypeBarriers(PC);
4315 :
4316 : /* Test the type if necessary. Failing this always takes a really slow path. */
4317 147359 : MaybeJump notObjectJump;
4318 147359 : if (icCalleeType.isSet())
4319 111420 : notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
4320 :
4321 : /*
4322 : * For an optimized apply, keep icCalleeData in a callee-saved register for
4323 : * the subsequent ic::SplatApplyArgs call.
4324 : */
4325 147359 : Registers tempRegs(Registers::AvailRegs);
4326 147359 : if (callIC.frameSize.isDynamic() && !Registers::isSaved(icCalleeData)) {
4327 1365 : RegisterID x = tempRegs.takeAnyReg(Registers::SavedRegs).reg();
4328 1365 : masm.move(icCalleeData, x);
4329 1365 : icCalleeData = x;
4330 : } else {
4331 145994 : tempRegs.takeReg(icCalleeData);
4332 : }
4333 :
4334 : /* Reserve space just before initialization of funGuard. */
4335 : RESERVE_IC_SPACE(masm);
4336 :
4337 : /*
4338 : * Guard on the callee identity. This misses on the first run. If the
4339 : * callee is scripted, compiled/compilable, and argc == nargs, then this
4340 : * guard is patched, and the compiled code address is baked in.
4341 : */
4342 147359 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
4343 147359 : callIC.funJump = j;
4344 :
4345 : /* Reserve space just before initialization of slowPathStart. */
4346 : RESERVE_OOL_SPACE(stubcc.masm);
4347 :
4348 147359 : Jump rejoin1, rejoin2;
4349 : {
4350 : RESERVE_OOL_SPACE(stubcc.masm);
4351 147359 : stubcc.linkExitDirect(j, stubcc.masm.label());
4352 147359 : callIC.slowPathStart = stubcc.masm.label();
4353 :
4354 147359 : RegisterID tmp = tempRegs.takeAnyReg().reg();
4355 :
4356 : /*
4357 : * Test if the callee is even a function. If this doesn't match, we
4358 : * take a _really_ slow path later.
4359 : */
4360 147359 : Jump notFunction = stubcc.masm.testFunction(Assembler::NotEqual, icCalleeData, tmp);
4361 :
4362 : /* Test if the function is scripted. */
4363 147359 : stubcc.masm.load16(Address(icCalleeData, offsetof(JSFunction, flags)), tmp);
4364 147359 : stubcc.masm.and32(Imm32(JSFUN_KINDMASK), tmp);
4365 147359 : Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED));
4366 147359 : tempRegs.putReg(tmp);
4367 :
4368 : /*
4369 : * N.B. After this call, the frame will have a dynamic frame size.
4370 : * Check after the function is known not to be a native so that the
4371 : * catch-all/native path has a static depth.
4372 : */
4373 147359 : if (callIC.frameSize.isDynamic())
4374 1670 : OOL_STUBCALL(ic::SplatApplyArgs, REJOIN_CALL_SPLAT);
4375 :
4376 : /*
4377 : * No-op jump that gets patched by ic::New/Call to the stub generated
4378 : * by generateFullCallStub.
4379 : */
4380 147359 : Jump toPatch = stubcc.masm.jump();
4381 147359 : toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
4382 147359 : callIC.oolJump = toPatch;
4383 147359 : callIC.icCall = stubcc.masm.label();
4384 :
4385 147359 : RejoinState rejoinState = callIC.frameSize.rejoinState(PC, false);
4386 :
4387 : /*
4388 : * At this point the function is definitely scripted, so we try to
4389 : * compile it and patch either funGuard/funJump or oolJump. This code
4390 : * is only executed once.
4391 : */
4392 147359 : callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4393 147359 : void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
4394 147359 : if (callIC.frameSize.isStatic()) {
4395 145689 : callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, rejoinState, frame.totalDepth());
4396 : } else {
4397 1670 : callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, rejoinState, -1);
4398 : }
4399 :
4400 147359 : callIC.funObjReg = icCalleeData;
4401 :
4402 : /*
4403 : * The IC call either returns NULL, meaning call completed, or a
4404 : * function pointer to jump to.
4405 : */
4406 : rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
4407 147359 : Registers::ReturnReg);
4408 147359 : if (callIC.frameSize.isStatic())
4409 145689 : stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
4410 : else
4411 1670 : stubcc.masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), JSParamReg_Argc);
4412 147359 : stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
4413 147359 : callPatch.hasSlowNcode = true;
4414 : callPatch.slowNcodePatch =
4415 : stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
4416 147359 : Address(JSFrameReg, StackFrame::offsetOfNcode()));
4417 147359 : stubcc.masm.jump(Registers::ReturnReg);
4418 :
4419 :
4420 :
4421 : /*
4422 : * This ool path is the catch-all for everything but scripted function
4423 : * callees. For native functions, ic::NativeNew/NativeCall will repatch
4424 : * funGaurd/funJump with a fast call stub. All other cases
4425 : * (non-function callable objects and invalid callees) take the slow
4426 : * path through js::Invoke.
4427 : */
4428 147359 : if (notObjectJump.isSet())
4429 111420 : stubcc.linkExitDirect(notObjectJump.get(), stubcc.masm.label());
4430 147359 : notFunction.linkTo(stubcc.masm.label(), &stubcc.masm);
4431 147359 : isNative.linkTo(stubcc.masm.label(), &stubcc.masm);
4432 :
4433 147359 : callIC.addrLabel2 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4434 147359 : OOL_STUBCALL(callingNew ? ic::NativeNew : ic::NativeCall, rejoinState);
4435 :
4436 147359 : rejoin2 = stubcc.masm.jump();
4437 : }
4438 :
4439 : /*
4440 : * If the call site goes to a closure over the same function, it will
4441 : * generate an out-of-line stub that joins back here.
4442 : */
4443 147359 : callIC.hotPathLabel = masm.label();
4444 :
4445 147359 : uint32_t flags = 0;
4446 147359 : if (callingNew)
4447 10765 : flags |= StackFrame::CONSTRUCTING;
4448 :
4449 147359 : InlineFrameAssembler inlFrame(masm, callIC, flags);
4450 147359 : callPatch.hasFastNcode = true;
4451 147359 : callPatch.fastNcodePatch = inlFrame.assemble(NULL, PC);
4452 :
4453 147359 : callIC.hotJump = masm.jump();
4454 147359 : callIC.joinPoint = callPatch.joinPoint = masm.label();
4455 147359 : callIC.callIndex = callSites.length();
4456 147359 : addReturnSite();
4457 147359 : if (lowerFunCallOrApply)
4458 3106 : uncachedCallPatch.joinPoint = callIC.joinPoint;
4459 :
4460 : /*
4461 : * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
4462 : * in the in-line path so we can check the IC space now.
4463 : */
4464 : CHECK_IC_SPACE();
4465 :
4466 147359 : JSValueType type = knownPushedType(0);
4467 :
4468 147359 : frame.popn(speculatedArgc + 2);
4469 147359 : frame.takeReg(JSReturnReg_Type);
4470 147359 : frame.takeReg(JSReturnReg_Data);
4471 147359 : frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data, type);
4472 :
4473 : BarrierState barrier = testBarrier(JSReturnReg_Type, JSReturnReg_Data,
4474 : /* testUndefined = */ false,
4475 147359 : /* testReturn = */ true);
4476 :
4477 : /*
4478 : * Now that the frame state is set, generate the rejoin path. Note that, if
4479 : * lowerFunCallOrApply, we cannot just call 'stubcc.rejoin' since the return
4480 : * value has been placed at vp[1] which is not the stack address associated
4481 : * with frame.peek(-1).
4482 : */
4483 147359 : callIC.slowJoinPoint = stubcc.masm.label();
4484 147359 : rejoin1.linkTo(callIC.slowJoinPoint, &stubcc.masm);
4485 147359 : rejoin2.linkTo(callIC.slowJoinPoint, &stubcc.masm);
4486 147359 : JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
4487 147359 : frame.reloadEntry(stubcc.masm, icRvalAddr, frame.peek(-1));
4488 147359 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
4489 147359 : JaegerSpew(JSpew_Insns, " ---- END SLOW RESTORE CODE ---- \n");
4490 :
4491 : CHECK_OOL_SPACE();
4492 :
4493 147359 : if (lowerFunCallOrApply)
4494 3106 : stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
4495 :
4496 147359 : callICs.append(callIC);
4497 147359 : callPatches.append(callPatch);
4498 147359 : if (lowerFunCallOrApply)
4499 3106 : callPatches.append(uncachedCallPatch);
4500 :
4501 147359 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
4502 :
4503 147359 : applyTricks = NoApplyTricks;
4504 147359 : return true;
4505 : #endif
4506 : }
4507 :
4508 : CompileStatus
4509 171676 : mjit::Compiler::callArrayBuiltin(uint32_t argc, bool callingNew)
4510 : {
4511 171676 : if (!globalObj)
4512 120564 : return Compile_InlineAbort;
4513 :
4514 51112 : if (applyTricks == LazyArgsObj)
4515 34 : return Compile_InlineAbort;
4516 :
4517 51078 : FrameEntry *origCallee = frame.peek(-((int)argc + 2));
4518 51078 : if (origCallee->isNotType(JSVAL_TYPE_OBJECT))
4519 10 : return Compile_InlineAbort;
4520 :
4521 51068 : if (frame.extra(origCallee).name != cx->runtime->atomState.classAtoms[JSProto_Array])
4522 50725 : return Compile_InlineAbort;
4523 :
4524 : JSObject *arrayObj;
4525 343 : if (!js_GetClassObject(cx, globalObj, JSProto_Array, &arrayObj))
4526 0 : return Compile_Error;
4527 :
4528 343 : JSObject *arrayProto = globalObj->global().getOrCreateArrayPrototype(cx);
4529 343 : if (!arrayProto)
4530 0 : return Compile_Error;
4531 :
4532 343 : if (argc > 1)
4533 38 : return Compile_InlineAbort;
4534 305 : FrameEntry *origArg = (argc == 1) ? frame.peek(-1) : NULL;
4535 305 : if (origArg) {
4536 174 : if (origArg->isNotType(JSVAL_TYPE_INT32))
4537 2 : return Compile_InlineAbort;
4538 172 : if (origArg->isConstant() && origArg->getValue().toInt32() < 0)
4539 0 : return Compile_InlineAbort;
4540 : }
4541 :
4542 303 : if (!origCallee->isTypeKnown()) {
4543 303 : Jump notObject = frame.testObject(Assembler::NotEqual, origCallee);
4544 303 : stubcc.linkExit(notObject, Uses(argc + 2));
4545 : }
4546 :
4547 303 : RegisterID reg = frame.tempRegForData(origCallee);
4548 303 : Jump notArray = masm.branchPtr(Assembler::NotEqual, reg, ImmPtr(arrayObj));
4549 303 : stubcc.linkExit(notArray, Uses(argc + 2));
4550 :
4551 303 : int32_t knownSize = 0;
4552 303 : MaybeRegisterID sizeReg;
4553 303 : if (origArg) {
4554 172 : if (origArg->isConstant()) {
4555 103 : knownSize = origArg->getValue().toInt32();
4556 : } else {
4557 69 : if (!origArg->isTypeKnown()) {
4558 67 : Jump notInt = frame.testInt32(Assembler::NotEqual, origArg);
4559 67 : stubcc.linkExit(notInt, Uses(argc + 2));
4560 : }
4561 69 : sizeReg = frame.tempRegForData(origArg);
4562 69 : Jump belowZero = masm.branch32(Assembler::LessThan, sizeReg.reg(), Imm32(0));
4563 69 : stubcc.linkExit(belowZero, Uses(argc + 2));
4564 : }
4565 : } else {
4566 131 : knownSize = 0;
4567 : }
4568 :
4569 303 : stubcc.leave();
4570 303 : stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
4571 303 : OOL_STUBCALL(callingNew ? stubs::SlowNew : stubs::SlowCall, REJOIN_FALLTHROUGH);
4572 :
4573 : {
4574 606 : PinRegAcrossSyncAndKill p1(frame, sizeReg);
4575 303 : frame.popn(argc + 2);
4576 303 : frame.syncAndKill(Uses(0));
4577 : }
4578 :
4579 303 : prepareStubCall(Uses(0));
4580 303 : masm.storePtr(ImmPtr(arrayProto), FrameAddress(offsetof(VMFrame, scratch)));
4581 303 : if (sizeReg.isSet())
4582 69 : masm.move(sizeReg.reg(), Registers::ArgReg1);
4583 : else
4584 234 : masm.move(Imm32(knownSize), Registers::ArgReg1);
4585 303 : INLINE_STUBCALL(stubs::NewDenseUnallocatedArray, REJOIN_PUSH_OBJECT);
4586 :
4587 303 : frame.takeReg(Registers::ReturnReg);
4588 303 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
4589 303 : frame.forgetType(frame.peek(-1));
4590 :
4591 303 : stubcc.rejoin(Changes(1));
4592 :
4593 303 : return Compile_Okay;
4594 : }
4595 :
4596 : /* Maximum number of calls we will inline at the same site. */
4597 : static const uint32_t INLINE_SITE_LIMIT = 5;
4598 :
4599 : CompileStatus
4600 59592 : mjit::Compiler::inlineScriptedFunction(uint32_t argc, bool callingNew)
4601 : {
4602 59592 : JS_ASSERT(inlining());
4603 :
4604 : /* We already know which frames we are inlining at each PC, so scan the list of inline frames. */
4605 59592 : bool calleeMultipleReturns = false;
4606 119184 : Vector<JSScript *> inlineCallees(CompilerAllocPolicy(cx, *this));
4607 280640 : for (unsigned i = 0; i < ssa.numFrames(); i++) {
4608 221048 : if (ssa.iterFrame(i).parent == a->inlineIndex && ssa.iterFrame(i).parentpc == PC) {
4609 2995 : JSScript *script = ssa.iterFrame(i).script;
4610 2995 : inlineCallees.append(script);
4611 2995 : if (script->analysis()->numReturnSites() > 1)
4612 193 : calleeMultipleReturns = true;
4613 : }
4614 : }
4615 :
4616 59592 : if (inlineCallees.empty())
4617 56765 : return Compile_InlineAbort;
4618 :
4619 2827 : JS_ASSERT(!monitored(PC));
4620 :
4621 : /*
4622 : * Remove all dead entries from the frame's tracker. We will not recognize
4623 : * them as dead after pushing the new frame.
4624 : */
4625 2827 : frame.pruneDeadEntries();
4626 :
4627 2827 : RegisterAllocation *exitState = NULL;
4628 2827 : if (inlineCallees.length() > 1 || calleeMultipleReturns) {
4629 : /*
4630 : * Multiple paths through the callees, get a register allocation for
4631 : * the various incoming edges.
4632 : */
4633 344 : exitState = frame.computeAllocation(PC + JSOP_CALL_LENGTH);
4634 : }
4635 :
4636 : /*
4637 : * If this is a polymorphic callsite, get a register for the callee too.
4638 : * After this, do not touch the register state in the current frame until
4639 : * stubs for all callees have been generated.
4640 : */
4641 2827 : FrameEntry *origCallee = frame.peek(-((int)argc + 2));
4642 2827 : FrameEntry *entrySnapshot = NULL;
4643 2827 : MaybeRegisterID calleeReg;
4644 2827 : if (inlineCallees.length() > 1) {
4645 155 : frame.forgetMismatchedObject(origCallee);
4646 155 : calleeReg = frame.tempRegForData(origCallee);
4647 :
4648 155 : entrySnapshot = frame.snapshotState();
4649 155 : if (!entrySnapshot)
4650 0 : return Compile_Error;
4651 : }
4652 2827 : MaybeJump calleePrevious;
4653 :
4654 2827 : JSValueType returnType = knownPushedType(0);
4655 :
4656 2827 : bool needReturnValue = JSOP_POP != (JSOp)*(PC + JSOP_CALL_LENGTH);
4657 2827 : bool syncReturnValue = needReturnValue && returnType == JSVAL_TYPE_UNKNOWN;
4658 :
4659 : /* Track register state after the call. */
4660 2827 : bool returnSet = false;
4661 2827 : AnyRegisterID returnRegister;
4662 2827 : const FrameEntry *returnEntry = NULL;
4663 :
4664 5654 : Vector<Jump, 4, CompilerAllocPolicy> returnJumps(CompilerAllocPolicy(cx, *this));
4665 :
4666 5822 : for (unsigned i = 0; i < inlineCallees.length(); i++) {
4667 2995 : if (entrySnapshot)
4668 323 : frame.restoreFromSnapshot(entrySnapshot);
4669 :
4670 2995 : JSScript *script = inlineCallees[i];
4671 : CompileStatus status;
4672 :
4673 2995 : status = pushActiveFrame(script, argc);
4674 2995 : if (status != Compile_Okay)
4675 0 : return status;
4676 :
4677 2995 : a->exitState = exitState;
4678 :
4679 : JaegerSpew(JSpew_Inlining, "inlining call to script (file \"%s\") (line \"%d\")\n",
4680 2995 : script->filename, script->lineno);
4681 :
4682 2995 : if (calleePrevious.isSet()) {
4683 168 : calleePrevious.get().linkTo(masm.label(), &masm);
4684 168 : calleePrevious = MaybeJump();
4685 : }
4686 :
4687 2995 : if (i + 1 != inlineCallees.length()) {
4688 : /* Guard on the callee, except when this object must be the callee. */
4689 168 : JS_ASSERT(calleeReg.isSet());
4690 168 : calleePrevious = masm.branchPtr(Assembler::NotEqual, calleeReg.reg(), ImmPtr(script->function()));
4691 : }
4692 :
4693 2995 : a->returnJumps = &returnJumps;
4694 2995 : a->needReturnValue = needReturnValue;
4695 2995 : a->syncReturnValue = syncReturnValue;
4696 2995 : a->returnValueDouble = returnType == JSVAL_TYPE_DOUBLE;
4697 2995 : if (returnSet) {
4698 91 : a->returnSet = true;
4699 91 : a->returnRegister = returnRegister;
4700 : }
4701 :
4702 : /*
4703 : * Update the argument frame entries in place if the callee has had an
4704 : * argument inferred as double but we are passing an int.
4705 : */
4706 2995 : ensureDoubleArguments();
4707 :
4708 2995 : markUndefinedLocals();
4709 :
4710 2995 : status = generateMethod();
4711 2995 : if (status != Compile_Okay) {
4712 0 : popActiveFrame();
4713 0 : if (status == Compile_Abort) {
4714 : /* The callee is uncompileable, mark it as uninlineable and retry. */
4715 0 : script->uninlineable = true;
4716 0 : types::MarkTypeObjectFlags(cx, script->function(),
4717 0 : types::OBJECT_FLAG_UNINLINEABLE);
4718 0 : return Compile_Retry;
4719 : }
4720 0 : return status;
4721 : }
4722 :
4723 2995 : if (needReturnValue && !returnSet) {
4724 2319 : if (a->returnSet) {
4725 2126 : returnSet = true;
4726 2126 : returnRegister = a->returnRegister;
4727 : } else {
4728 193 : returnEntry = a->returnEntry;
4729 : }
4730 : }
4731 :
4732 2995 : popActiveFrame();
4733 :
4734 2995 : if (i + 1 != inlineCallees.length())
4735 168 : returnJumps.append(masm.jump());
4736 : }
4737 :
4738 3276 : for (unsigned i = 0; i < returnJumps.length(); i++)
4739 449 : returnJumps[i].linkTo(masm.label(), &masm);
4740 :
4741 2827 : frame.popn(argc + 2);
4742 :
4743 2827 : if (entrySnapshot)
4744 155 : cx->array_delete(entrySnapshot);
4745 :
4746 2827 : if (exitState)
4747 344 : frame.discardForJoin(exitState, analysis->getCode(PC).stackDepth - (argc + 2));
4748 :
4749 2827 : if (returnSet) {
4750 2126 : frame.takeReg(returnRegister);
4751 2126 : if (returnRegister.isReg())
4752 2040 : frame.pushTypedPayload(returnType, returnRegister.reg());
4753 : else
4754 86 : frame.pushDouble(returnRegister.fpreg());
4755 701 : } else if (returnEntry) {
4756 20 : frame.pushCopyOf((FrameEntry *) returnEntry);
4757 : } else {
4758 681 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4759 : }
4760 :
4761 : JaegerSpew(JSpew_Inlining, "finished inlining call to script (file \"%s\") (line \"%d\")\n",
4762 2827 : script->filename, script->lineno);
4763 :
4764 2827 : return Compile_Okay;
4765 : }
4766 :
4767 : /*
4768 : * This function must be called immediately after any instruction which could
4769 : * cause a new StackFrame to be pushed and could lead to a new debug trap
4770 : * being set. This includes any API callbacks and any scripted or native call.
4771 : */
4772 : void
4773 5053912 : mjit::Compiler::addCallSite(const InternalCallSite &site)
4774 : {
4775 5053912 : callSites.append(site);
4776 5053912 : }
4777 :
4778 : void
4779 1095555 : mjit::Compiler::inlineStubCall(void *stub, RejoinState rejoin, Uses uses)
4780 : {
4781 1095555 : DataLabelPtr inlinePatch;
4782 1095555 : Call cl = emitStubCall(stub, &inlinePatch);
4783 : InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
4784 1095555 : rejoin, false);
4785 1095555 : site.inlinePatch = inlinePatch;
4786 1095555 : if (loop && loop->generatingInvariants()) {
4787 1193 : Jump j = masm.jump();
4788 1193 : Label l = masm.label();
4789 1193 : loop->addInvariantCall(j, l, false, false, callSites.length(), uses);
4790 : }
4791 1095555 : addCallSite(site);
4792 1095555 : }
4793 :
4794 : bool
4795 245 : mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
4796 : {
4797 245 : JS_ASSERT(lhs.isPrimitive());
4798 245 : JS_ASSERT(rhs.isPrimitive());
4799 :
4800 245 : if (lhs.isString() && rhs.isString()) {
4801 : int32_t cmp;
4802 86 : CompareStrings(cx, lhs.toString(), rhs.toString(), &cmp);
4803 86 : switch (op) {
4804 : case JSOP_LT:
4805 0 : return cmp < 0;
4806 : case JSOP_LE:
4807 0 : return cmp <= 0;
4808 : case JSOP_GT:
4809 0 : return cmp > 0;
4810 : case JSOP_GE:
4811 0 : return cmp >= 0;
4812 : case JSOP_EQ:
4813 55 : return cmp == 0;
4814 : case JSOP_NE:
4815 31 : return cmp != 0;
4816 : default:
4817 0 : JS_NOT_REACHED("NYI");
4818 : }
4819 : } else {
4820 : double ld, rd;
4821 :
4822 : /* These should be infallible w/ primitives. */
4823 159 : JS_ALWAYS_TRUE(ToNumber(cx, lhs, &ld));
4824 159 : JS_ALWAYS_TRUE(ToNumber(cx, rhs, &rd));
4825 159 : switch(op) {
4826 : case JSOP_LT:
4827 36 : return ld < rd;
4828 : case JSOP_LE:
4829 15 : return ld <= rd;
4830 : case JSOP_GT:
4831 44 : return ld > rd;
4832 : case JSOP_GE:
4833 12 : return ld >= rd;
4834 : case JSOP_EQ: /* fall through */
4835 : case JSOP_NE:
4836 : /* Special case null/undefined/void comparisons. */
4837 52 : if (lhs.isNullOrUndefined()) {
4838 4 : if (rhs.isNullOrUndefined())
4839 0 : return op == JSOP_EQ;
4840 4 : return op == JSOP_NE;
4841 : }
4842 48 : if (rhs.isNullOrUndefined())
4843 16 : return op == JSOP_NE;
4844 :
4845 : /* Normal return. */
4846 32 : return (op == JSOP_EQ) ? (ld == rd) : (ld != rd);
4847 : default:
4848 0 : JS_NOT_REACHED("NYI");
4849 : }
4850 : }
4851 :
4852 : JS_NOT_REACHED("NYI");
4853 : return false;
4854 : }
4855 :
4856 : bool
4857 262 : mjit::Compiler::constantFoldBranch(jsbytecode *target, bool taken)
4858 : {
4859 262 : if (taken) {
4860 167 : if (!frame.syncForBranch(target, Uses(0)))
4861 0 : return false;
4862 167 : Jump j = masm.jump();
4863 167 : if (!jumpAndRun(j, target))
4864 0 : return false;
4865 : } else {
4866 : /*
4867 : * Branch is never taken, but clean up any loop
4868 : * if this is a backedge.
4869 : */
4870 95 : if (target < PC && !finishLoop(target))
4871 0 : return false;
4872 : }
4873 262 : return true;
4874 : }
4875 :
4876 : bool
4877 3044 : mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
4878 : {
4879 3044 : if (target)
4880 809 : frame.syncAndKillEverything();
4881 : else
4882 2235 : frame.syncAndKill(Uses(2));
4883 :
4884 3044 : prepareStubCall(Uses(2));
4885 3044 : INLINE_STUBCALL(stub, target ? REJOIN_BRANCH : REJOIN_PUSH_BOOLEAN);
4886 3044 : frame.popn(2);
4887 :
4888 3044 : if (!target) {
4889 2235 : frame.takeReg(Registers::ReturnReg);
4890 2235 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
4891 2235 : return true;
4892 : }
4893 :
4894 809 : JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
4895 : Jump j = masm.branchTest32(GetStubCompareCondition(fused), Registers::ReturnReg,
4896 809 : Registers::ReturnReg);
4897 809 : return jumpAndRun(j, target);
4898 : }
4899 :
4900 : void
4901 220 : mjit::Compiler::jsop_setprop_slow(PropertyName *name)
4902 : {
4903 220 : prepareStubCall(Uses(2));
4904 220 : masm.move(ImmPtr(name), Registers::ArgReg1);
4905 220 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
4906 : JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
4907 220 : frame.shimmy(1);
4908 220 : if (script->pcCounters)
4909 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
4910 220 : }
4911 :
4912 : void
4913 7192 : mjit::Compiler::jsop_getprop_slow(PropertyName *name, bool forPrototype)
4914 : {
4915 : /* See ::jsop_getprop */
4916 7192 : RejoinState rejoin = forPrototype ? REJOIN_THIS_PROTOTYPE : REJOIN_GETTER;
4917 :
4918 7192 : prepareStubCall(Uses(1));
4919 7192 : masm.move(ImmPtr(name), Registers::ArgReg1);
4920 7192 : INLINE_STUBCALL(forPrototype ? stubs::GetPropNoCache : stubs::GetProp, rejoin);
4921 :
4922 7192 : if (!forPrototype)
4923 7192 : testPushedType(rejoin, -1, /* ool = */ false);
4924 :
4925 7192 : frame.pop();
4926 7192 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
4927 :
4928 7192 : if (script->pcCounters)
4929 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
4930 7192 : }
4931 :
4932 : #ifdef JS_MONOIC
4933 : void
4934 408882 : mjit::Compiler::passMICAddress(GlobalNameICInfo &ic)
4935 : {
4936 408882 : ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4937 408882 : }
4938 : #endif
4939 :
4940 : #if defined JS_POLYIC
4941 : void
4942 850622 : mjit::Compiler::passICAddress(BaseICInfo *ic)
4943 : {
4944 850622 : ic->paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
4945 850622 : }
4946 :
4947 : bool
4948 507954 : mjit::Compiler::jsop_getprop(PropertyName *name, JSValueType knownType,
4949 : bool doTypeCheck, bool forPrototype)
4950 : {
4951 507954 : FrameEntry *top = frame.peek(-1);
4952 :
4953 : /*
4954 : * Use a different rejoin for GETPROP computing the 'this' object, as we
4955 : * can't use the current bytecode within InternalInterpret to tell this is
4956 : * fetching the 'this' value.
4957 : */
4958 507954 : RejoinState rejoin = REJOIN_GETTER;
4959 507954 : if (forPrototype) {
4960 1616 : JS_ASSERT(top->isType(JSVAL_TYPE_OBJECT) &&
4961 1616 : name == cx->runtime->atomState.classPrototypeAtom);
4962 1616 : rejoin = REJOIN_THIS_PROTOTYPE;
4963 : }
4964 :
4965 : /* Handle length accesses on known strings without using a PIC. */
4966 537603 : if (name == cx->runtime->atomState.lengthAtom &&
4967 29064 : top->isType(JSVAL_TYPE_STRING) &&
4968 585 : (!cx->typeInferenceEnabled() || knownPushedType(0) == JSVAL_TYPE_INT32)) {
4969 213 : if (top->isConstant()) {
4970 2 : JSString *str = top->getValue().toString();
4971 : Value v;
4972 2 : v.setNumber(uint32_t(str->length()));
4973 2 : frame.pop();
4974 2 : frame.push(v);
4975 : } else {
4976 211 : RegisterID str = frame.ownRegForData(top);
4977 211 : masm.loadPtr(Address(str, JSString::offsetOfLengthAndFlags()), str);
4978 211 : masm.urshift32(Imm32(JSString::LENGTH_SHIFT), str);
4979 211 : frame.pop();
4980 211 : frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
4981 : }
4982 213 : return true;
4983 : }
4984 :
4985 560972 : if (top->mightBeType(JSVAL_TYPE_OBJECT) &&
4986 28478 : JSOp(*PC) == JSOP_LENGTH && cx->typeInferenceEnabled() &&
4987 24753 : !hasTypeBarriers(PC) && knownPushedType(0) == JSVAL_TYPE_INT32) {
4988 : /* Check if this is an array we can make a loop invariant entry for. */
4989 7876 : if (loop && loop->generatingInvariants()) {
4990 235 : CrossSSAValue topv(a->inlineIndex, analysis->poppedValue(PC, 0));
4991 235 : FrameEntry *fe = loop->invariantLength(topv);
4992 235 : if (fe) {
4993 170 : frame.learnType(fe, JSVAL_TYPE_INT32, false);
4994 170 : frame.pop();
4995 170 : frame.pushCopyOf(fe);
4996 170 : if (script->pcCounters)
4997 0 : bumpPropCounter(PC, OpcodeCounts::PROP_STATIC);
4998 170 : return true;
4999 : }
5000 : }
5001 :
5002 7706 : types::TypeSet *types = analysis->poppedTypes(PC, 0);
5003 :
5004 : /*
5005 : * Check if we are accessing the 'length' property of a known dense array.
5006 : * Note that if the types are known to indicate dense arrays, their lengths
5007 : * must fit in an int32.
5008 : */
5009 7706 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY)) {
5010 7180 : bool isObject = top->isTypeKnown();
5011 7180 : if (!isObject) {
5012 380 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5013 380 : stubcc.linkExit(notObject, Uses(1));
5014 380 : stubcc.leave();
5015 380 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5016 380 : OOL_STUBCALL(stubs::GetProp, rejoin);
5017 380 : if (rejoin == REJOIN_GETTER)
5018 380 : testPushedType(rejoin, -1);
5019 : }
5020 7180 : RegisterID result = frame.allocReg();
5021 7180 : RegisterID reg = frame.tempRegForData(top);
5022 7180 : frame.pop();
5023 7180 : masm.loadPtr(Address(reg, JSObject::offsetOfElements()), result);
5024 7180 : masm.load32(Address(result, ObjectElements::offsetOfLength()), result);
5025 7180 : frame.pushTypedPayload(JSVAL_TYPE_INT32, result);
5026 7180 : if (script->pcCounters)
5027 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5028 7180 : if (!isObject)
5029 380 : stubcc.rejoin(Changes(1));
5030 7180 : return true;
5031 : }
5032 :
5033 : /*
5034 : * Check if we're accessing the 'length' property of a typed array.
5035 : * The typed array length always fits in an int32.
5036 : */
5037 526 : if (!types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_TYPED_ARRAY)) {
5038 320 : bool isObject = top->isTypeKnown();
5039 320 : if (!isObject) {
5040 176 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5041 176 : stubcc.linkExit(notObject, Uses(1));
5042 176 : stubcc.leave();
5043 176 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5044 176 : OOL_STUBCALL(stubs::GetProp, rejoin);
5045 176 : if (rejoin == REJOIN_GETTER)
5046 176 : testPushedType(rejoin, -1);
5047 : }
5048 320 : RegisterID reg = frame.copyDataIntoReg(top);
5049 320 : frame.pop();
5050 320 : masm.loadPayload(Address(reg, TypedArray::lengthOffset()), reg);
5051 320 : frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
5052 320 : if (script->pcCounters)
5053 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5054 320 : if (!isObject)
5055 176 : stubcc.rejoin(Changes(1));
5056 320 : return true;
5057 : }
5058 :
5059 : /*
5060 : * Check if we are accessing the 'length' of the lazy arguments for the
5061 : * current frame.
5062 : */
5063 206 : if (types->isLazyArguments(cx)) {
5064 0 : frame.pop();
5065 0 : frame.pushWord(Address(JSFrameReg, StackFrame::offsetOfNumActual()), JSVAL_TYPE_INT32);
5066 0 : if (script->pcCounters)
5067 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5068 0 : return true;
5069 : }
5070 : }
5071 :
5072 : /* If the access will definitely be fetching a particular value, nop it. */
5073 : bool testObject;
5074 : JSObject *singleton =
5075 500071 : (*PC == JSOP_GETPROP || *PC == JSOP_CALLPROP) ? pushedSingleton(0) : NULL;
5076 534555 : if (singleton && singleton->isFunction() && !hasTypeBarriers(PC) &&
5077 34484 : testSingletonPropertyTypes(top, ATOM_TO_JSID(name), &testObject)) {
5078 32442 : if (testObject) {
5079 1432 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5080 1432 : stubcc.linkExit(notObject, Uses(1));
5081 1432 : stubcc.leave();
5082 1432 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5083 1432 : OOL_STUBCALL(stubs::GetProp, REJOIN_FALLTHROUGH);
5084 1432 : testPushedType(REJOIN_FALLTHROUGH, -1);
5085 : }
5086 :
5087 32442 : frame.pop();
5088 32442 : frame.push(ObjectValue(*singleton));
5089 :
5090 32442 : if (script->pcCounters && cx->typeInferenceEnabled())
5091 0 : bumpPropCounter(PC, OpcodeCounts::PROP_STATIC);
5092 :
5093 32442 : if (testObject)
5094 1432 : stubcc.rejoin(Changes(1));
5095 :
5096 32442 : return true;
5097 : }
5098 :
5099 : /* Check if this is a property access we can make a loop invariant entry for. */
5100 467629 : if (loop && loop->generatingInvariants() && !hasTypeBarriers(PC)) {
5101 936 : CrossSSAValue topv(a->inlineIndex, analysis->poppedValue(PC, 0));
5102 936 : if (FrameEntry *fe = loop->invariantProperty(topv, ATOM_TO_JSID(name))) {
5103 82 : if (knownType != JSVAL_TYPE_UNKNOWN && knownType != JSVAL_TYPE_DOUBLE)
5104 82 : frame.learnType(fe, knownType, false);
5105 82 : frame.pop();
5106 82 : frame.pushCopyOf(fe);
5107 82 : if (script->pcCounters)
5108 0 : bumpPropCounter(PC, OpcodeCounts::PROP_STATIC);
5109 82 : return true;
5110 : }
5111 : }
5112 :
5113 : /* If the incoming type will never PIC, take slow path. */
5114 467547 : if (top->isNotType(JSVAL_TYPE_OBJECT)) {
5115 7192 : jsop_getprop_slow(name, forPrototype);
5116 7192 : return true;
5117 : }
5118 :
5119 460355 : frame.forgetMismatchedObject(top);
5120 :
5121 : /*
5122 : * Check if we are accessing a known type which always has the property
5123 : * in a particular inline slot. Get the property directly in this case,
5124 : * without using an IC.
5125 : */
5126 460355 : jsid id = ATOM_TO_JSID(name);
5127 460355 : types::TypeSet *types = frame.extra(top).types;
5128 612357 : if (types && !types->unknownObject() &&
5129 58141 : types->getObjectCount() == 1 &&
5130 33082 : types->getTypeObject(0) != NULL &&
5131 30412 : !types->getTypeObject(0)->unknownProperties() &&
5132 30367 : id == types::MakeTypeId(cx, id)) {
5133 30347 : JS_ASSERT(!forPrototype);
5134 30347 : types::TypeObject *object = types->getTypeObject(0);
5135 30347 : types::TypeSet *propertyTypes = object->getProperty(cx, id, false);
5136 30347 : if (!propertyTypes)
5137 0 : return false;
5138 40094 : if (propertyTypes->isDefiniteProperty() &&
5139 9747 : !propertyTypes->isOwnProperty(cx, object, true)) {
5140 9737 : types->addFreeze(cx);
5141 9737 : uint32_t slot = propertyTypes->definiteSlot();
5142 9737 : bool isObject = top->isTypeKnown();
5143 9737 : if (!isObject) {
5144 2916 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5145 2916 : stubcc.linkExit(notObject, Uses(1));
5146 2916 : stubcc.leave();
5147 2916 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5148 2916 : OOL_STUBCALL(stubs::GetProp, rejoin);
5149 2916 : if (rejoin == REJOIN_GETTER)
5150 2916 : testPushedType(rejoin, -1);
5151 : }
5152 9737 : RegisterID reg = frame.tempRegForData(top);
5153 9737 : frame.pop();
5154 :
5155 9737 : if (script->pcCounters)
5156 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5157 :
5158 9737 : Address address(reg, JSObject::getFixedSlotOffset(slot));
5159 9737 : BarrierState barrier = pushAddressMaybeBarrier(address, knownType, false);
5160 9737 : if (!isObject)
5161 2916 : stubcc.rejoin(Changes(1));
5162 9737 : finishBarrier(barrier, rejoin, 0);
5163 :
5164 9737 : return true;
5165 : }
5166 : }
5167 :
5168 : /* Check for a dynamic dispatch. */
5169 450618 : if (cx->typeInferenceEnabled()) {
5170 55422 : if (*PC == JSOP_CALLPROP && jsop_getprop_dispatch(name))
5171 6989 : return true;
5172 : }
5173 :
5174 443629 : if (script->pcCounters)
5175 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
5176 :
5177 : /*
5178 : * These two must be loaded first. The objReg because the string path
5179 : * wants to read it, and the shapeReg because it could cause a spill that
5180 : * the string path wouldn't sink back.
5181 : */
5182 443629 : RegisterID objReg = frame.copyDataIntoReg(top);
5183 443629 : RegisterID shapeReg = frame.allocReg();
5184 :
5185 : RESERVE_IC_SPACE(masm);
5186 :
5187 443629 : PICGenInfo pic(ic::PICInfo::GET, JSOp(*PC));
5188 :
5189 : /*
5190 : * If this access has been on a shape with a getter hook, make preparations
5191 : * so that we can generate a stub to call the hook directly (rather than be
5192 : * forced to make a stub call). Sync the stack up front and kill all
5193 : * registers so that PIC stubs can contain calls, and always generate a
5194 : * type barrier if inference is enabled (known property types do not
5195 : * reflect properties with getter hooks).
5196 : */
5197 : pic.canCallHook = pic.forcedTypeBarrier =
5198 443629 : !forPrototype &&
5199 : JSOp(*PC) == JSOP_GETPROP &&
5200 443629 : analysis->getCode(PC).accessGetter;
5201 :
5202 : /* Guard that the type is an object. */
5203 443629 : Label typeCheck;
5204 443629 : if (doTypeCheck && !top->isTypeKnown()) {
5205 390137 : RegisterID reg = frame.tempRegForType(top);
5206 390137 : pic.typeReg = reg;
5207 :
5208 390137 : if (pic.canCallHook) {
5209 17110 : PinRegAcrossSyncAndKill p1(frame, reg);
5210 8555 : frame.syncAndKillEverything();
5211 : }
5212 :
5213 : /* Start the hot path where it's easy to patch it. */
5214 390137 : pic.fastPathStart = masm.label();
5215 390137 : Jump j = masm.testObject(Assembler::NotEqual, reg);
5216 390137 : typeCheck = masm.label();
5217 390137 : RETURN_IF_OOM(false);
5218 :
5219 390137 : pic.typeCheck = stubcc.linkExit(j, Uses(1));
5220 390137 : pic.hasTypeCheck = true;
5221 : } else {
5222 53492 : if (pic.canCallHook)
5223 4495 : frame.syncAndKillEverything();
5224 :
5225 53492 : pic.fastPathStart = masm.label();
5226 53492 : pic.hasTypeCheck = false;
5227 53492 : pic.typeReg = Registers::ReturnReg;
5228 : }
5229 :
5230 443629 : pic.shapeReg = shapeReg;
5231 443629 : pic.name = name;
5232 :
5233 : /* Guard on shape. */
5234 443629 : masm.loadShape(objReg, shapeReg);
5235 443629 : pic.shapeGuard = masm.label();
5236 :
5237 443629 : DataLabelPtr inlineShapeLabel;
5238 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg,
5239 443629 : inlineShapeLabel, ImmPtr(NULL));
5240 443629 : Label inlineShapeJump = masm.label();
5241 :
5242 : RESERVE_OOL_SPACE(stubcc.masm);
5243 443629 : pic.slowPathStart = stubcc.linkExit(j, Uses(1));
5244 :
5245 443629 : stubcc.leave();
5246 443629 : passICAddress(&pic);
5247 443629 : pic.slowPathCall = OOL_STUBCALL(forPrototype ? ic::GetPropNoCache : ic::GetProp, rejoin);
5248 : CHECK_OOL_SPACE();
5249 443629 : if (rejoin == REJOIN_GETTER)
5250 442013 : testPushedType(rejoin, -1);
5251 :
5252 : /* Load the base slot address. */
5253 443629 : Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()),
5254 443629 : objReg);
5255 :
5256 : /* Copy the slot value to the expression stack. */
5257 443629 : Address slot(objReg, 1 << 24);
5258 443629 : frame.pop();
5259 :
5260 443629 : Label fastValueLoad = masm.loadValueWithAddressOffsetPatch(slot, shapeReg, objReg);
5261 443629 : pic.fastPathRejoin = masm.label();
5262 :
5263 443629 : RETURN_IF_OOM(false);
5264 :
5265 : /* Initialize op labels. */
5266 443629 : GetPropLabels &labels = pic.getPropLabels();
5267 443629 : labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
5268 443629 : labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeLabel);
5269 :
5270 443629 : labels.setValueLoad(masm, pic.fastPathRejoin, fastValueLoad);
5271 443629 : if (pic.hasTypeCheck)
5272 390137 : labels.setInlineTypeJump(masm, pic.fastPathStart, typeCheck);
5273 443629 : labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
5274 :
5275 : CHECK_IC_SPACE();
5276 :
5277 443629 : pic.objReg = objReg;
5278 443629 : frame.pushRegs(shapeReg, objReg, knownType);
5279 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, false, false,
5280 443629 : /* force = */ pic.canCallHook);
5281 :
5282 443629 : stubcc.rejoin(Changes(1));
5283 443629 : pics.append(pic);
5284 :
5285 443629 : finishBarrier(barrier, rejoin, 0);
5286 443629 : return true;
5287 : }
5288 :
5289 : bool
5290 70642 : mjit::Compiler::testSingletonProperty(JSObject *obj, jsid id)
5291 : {
5292 : /*
5293 : * We would like to completely no-op property/global accesses which can
5294 : * produce only a particular JSObject or undefined, provided we can
5295 : * determine the pushed value must not be undefined (or, if it could be
5296 : * undefined, a recompilation will be triggered).
5297 : *
5298 : * If the access definitely goes through obj, either directly or on the
5299 : * prototype chain, then if obj has a defined property now, and the
5300 : * property has a default or method shape, the only way it can produce
5301 : * undefined in the future is if it is deleted. Deletion causes type
5302 : * properties to be explicitly marked with undefined.
5303 : */
5304 :
5305 70642 : JSObject *nobj = obj;
5306 287356 : while (nobj) {
5307 146074 : if (!nobj->isNative())
5308 2 : return false;
5309 146072 : if (nobj->getClass()->ops.lookupGeneric)
5310 0 : return false;
5311 146072 : nobj = nobj->getProto();
5312 : }
5313 :
5314 : JSObject *holder;
5315 70640 : JSProperty *prop = NULL;
5316 70640 : if (!obj->lookupGeneric(cx, id, &holder, &prop))
5317 0 : return false;
5318 70640 : if (!prop)
5319 607 : return false;
5320 :
5321 70033 : Shape *shape = (Shape *) prop;
5322 70033 : if (shape->hasDefaultGetter()) {
5323 70029 : if (!shape->hasSlot())
5324 0 : return false;
5325 70029 : if (holder->getSlot(shape->slot()).isUndefined())
5326 0 : return false;
5327 4 : } else if (!shape->isMethod()) {
5328 4 : return false;
5329 : }
5330 :
5331 70029 : return true;
5332 : }
5333 :
5334 : bool
5335 34484 : mjit::Compiler::testSingletonPropertyTypes(FrameEntry *top, jsid id, bool *testObject)
5336 : {
5337 34484 : *testObject = false;
5338 :
5339 34484 : types::TypeSet *types = frame.extra(top).types;
5340 34484 : if (!types || types->unknownObject())
5341 16 : return false;
5342 :
5343 34468 : JSObject *singleton = types->getSingleton(cx);
5344 34468 : if (singleton)
5345 6765 : return testSingletonProperty(singleton, id);
5346 :
5347 27703 : if (!globalObj)
5348 0 : return false;
5349 :
5350 : JSProtoKey key;
5351 27703 : JSValueType type = types->getKnownTypeTag(cx);
5352 27703 : switch (type) {
5353 : case JSVAL_TYPE_STRING:
5354 8675 : key = JSProto_String;
5355 8675 : break;
5356 :
5357 : case JSVAL_TYPE_INT32:
5358 : case JSVAL_TYPE_DOUBLE:
5359 6 : key = JSProto_Number;
5360 6 : break;
5361 :
5362 : case JSVAL_TYPE_BOOLEAN:
5363 0 : key = JSProto_Boolean;
5364 0 : break;
5365 :
5366 : case JSVAL_TYPE_OBJECT:
5367 : case JSVAL_TYPE_UNKNOWN:
5368 19022 : if (types->getObjectCount() == 1 && !top->isNotType(JSVAL_TYPE_OBJECT)) {
5369 17515 : JS_ASSERT_IF(top->isTypeKnown(), top->isType(JSVAL_TYPE_OBJECT));
5370 17515 : types::TypeObject *object = types->getTypeObject(0);
5371 17515 : if (object && object->proto) {
5372 17515 : if (!testSingletonProperty(object->proto, id))
5373 515 : return false;
5374 17000 : types->addFreeze(cx);
5375 :
5376 : /* If we don't know this is an object, we will need a test. */
5377 17000 : *testObject = (type != JSVAL_TYPE_OBJECT) && !top->isTypeKnown();
5378 17000 : return true;
5379 : }
5380 : }
5381 1507 : return false;
5382 :
5383 : default:
5384 0 : return false;
5385 : }
5386 :
5387 : JSObject *proto;
5388 8681 : if (!js_GetClassPrototype(cx, globalObj, key, &proto, NULL))
5389 0 : return NULL;
5390 :
5391 8681 : return testSingletonProperty(proto, id);
5392 : }
5393 :
5394 : bool
5395 22974 : mjit::Compiler::jsop_getprop_dispatch(PropertyName *name)
5396 : {
5397 : /*
5398 : * Check for a CALLPROP which is a dynamic dispatch: every value it can
5399 : * push is a singleton, and the pushed value is determined by the type of
5400 : * the object being accessed. Return true if the CALLPROP has been fully
5401 : * processed, false if no code was generated.
5402 : */
5403 22974 : FrameEntry *top = frame.peek(-1);
5404 22974 : if (top->isNotType(JSVAL_TYPE_OBJECT))
5405 0 : return false;
5406 :
5407 22974 : jsid id = ATOM_TO_JSID(name);
5408 22974 : if (id != types::MakeTypeId(cx, id))
5409 0 : return false;
5410 :
5411 22974 : types::TypeSet *pushedTypes = pushedTypeSet(0);
5412 22974 : if (pushedTypes->unknownObject() || pushedTypes->baseFlags() != 0)
5413 988 : return false;
5414 :
5415 : /* Check every pushed value is a singleton. */
5416 25705 : for (unsigned i = 0; i < pushedTypes->getObjectCount(); i++) {
5417 3939 : if (pushedTypes->getTypeObject(i) != NULL)
5418 220 : return false;
5419 : }
5420 :
5421 21766 : types::TypeSet *objTypes = analysis->poppedTypes(PC, 0);
5422 21766 : if (objTypes->unknownObject() || objTypes->getObjectCount() == 0)
5423 13098 : return false;
5424 :
5425 8668 : pushedTypes->addFreeze(cx);
5426 :
5427 : /* Map each type in the object to the resulting pushed value. */
5428 17336 : Vector<JSObject *> results(CompilerAllocPolicy(cx, *this));
5429 :
5430 : /*
5431 : * For each type of the base object, check it has no 'own' property for the
5432 : * accessed id and that its prototype does have such a property.
5433 : */
5434 8668 : uint32_t last = 0;
5435 19341 : for (unsigned i = 0; i < objTypes->getObjectCount(); i++) {
5436 12352 : if (objTypes->getSingleObject(i) != NULL)
5437 1047 : return false;
5438 11305 : types::TypeObject *object = objTypes->getTypeObject(i);
5439 11305 : if (!object) {
5440 1643 : results.append((JSObject *) NULL);
5441 1643 : continue;
5442 : }
5443 9662 : if (object->unknownProperties() || !object->proto)
5444 4 : return false;
5445 9658 : types::TypeSet *ownTypes = object->getProperty(cx, id, false);
5446 9658 : if (ownTypes->isOwnProperty(cx, object, false))
5447 37 : return false;
5448 :
5449 9621 : if (!testSingletonProperty(object->proto, id))
5450 90 : return false;
5451 :
5452 9531 : if (object->proto->getType(cx)->unknownProperties())
5453 29 : return false;
5454 9502 : types::TypeSet *protoTypes = object->proto->type()->getProperty(cx, id, false);
5455 9502 : if (!protoTypes)
5456 0 : return false;
5457 9502 : JSObject *singleton = protoTypes->getSingleton(cx);
5458 9502 : if (!singleton)
5459 472 : return false;
5460 :
5461 9030 : results.append(singleton);
5462 9030 : last = i;
5463 : }
5464 :
5465 6989 : if (oomInVector)
5466 0 : return false;
5467 :
5468 6989 : objTypes->addFreeze(cx);
5469 :
5470 : /* Done filtering, now generate code which dispatches on the type. */
5471 :
5472 6989 : frame.forgetMismatchedObject(top);
5473 :
5474 6989 : if (!top->isType(JSVAL_TYPE_OBJECT)) {
5475 1047 : Jump notObject = frame.testObject(Assembler::NotEqual, top);
5476 1047 : stubcc.linkExit(notObject, Uses(1));
5477 : }
5478 :
5479 6989 : RegisterID reg = frame.tempRegForData(top);
5480 6989 : frame.pinReg(reg);
5481 6989 : RegisterID pushreg = frame.allocReg();
5482 6989 : frame.unpinReg(reg);
5483 :
5484 6989 : Address typeAddress(reg, JSObject::offsetOfType());
5485 :
5486 13978 : Vector<Jump> rejoins(CompilerAllocPolicy(cx, *this));
5487 6989 : MaybeJump lastMiss;
5488 :
5489 9916 : for (unsigned i = 0; i < objTypes->getObjectCount(); i++) {
5490 9916 : types::TypeObject *object = objTypes->getTypeObject(i);
5491 9916 : if (!object) {
5492 1109 : JS_ASSERT(results[i] == NULL);
5493 1109 : continue;
5494 : }
5495 8807 : if (lastMiss.isSet())
5496 1818 : lastMiss.get().linkTo(masm.label(), &masm);
5497 :
5498 : /*
5499 : * Check that the pushed result is actually in the known pushed types
5500 : * for the bytecode; this bytecode may have type barriers. Redirect to
5501 : * the stub to update said pushed types.
5502 : */
5503 8807 : if (!pushedTypes->hasType(types::Type::ObjectType(results[i]))) {
5504 6092 : JS_ASSERT(hasTypeBarriers(PC));
5505 6092 : if (i == last) {
5506 6053 : stubcc.linkExit(masm.jump(), Uses(1));
5507 6053 : break;
5508 : } else {
5509 39 : lastMiss.setJump(masm.branchPtr(Assembler::NotEqual, typeAddress, ImmPtr(object)));
5510 39 : stubcc.linkExit(masm.jump(), Uses(1));
5511 39 : continue;
5512 : }
5513 : }
5514 :
5515 2715 : if (i == last) {
5516 936 : masm.move(ImmPtr(results[i]), pushreg);
5517 936 : break;
5518 : } else {
5519 1779 : lastMiss.setJump(masm.branchPtr(Assembler::NotEqual, typeAddress, ImmPtr(object)));
5520 1779 : masm.move(ImmPtr(results[i]), pushreg);
5521 1779 : rejoins.append(masm.jump());
5522 : }
5523 : }
5524 :
5525 8768 : for (unsigned i = 0; i < rejoins.length(); i++)
5526 1779 : rejoins[i].linkTo(masm.label(), &masm);
5527 :
5528 6989 : stubcc.leave();
5529 6989 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5530 6989 : OOL_STUBCALL(stubs::GetProp, REJOIN_FALLTHROUGH);
5531 6989 : testPushedType(REJOIN_FALLTHROUGH, -1);
5532 :
5533 6989 : frame.pop();
5534 6989 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pushreg);
5535 :
5536 6989 : if (script->pcCounters)
5537 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5538 :
5539 6989 : stubcc.rejoin(Changes(2));
5540 6989 : return true;
5541 : }
5542 :
5543 : bool
5544 35711 : mjit::Compiler::jsop_setprop(PropertyName *name, bool popGuaranteed)
5545 : {
5546 35711 : FrameEntry *lhs = frame.peek(-2);
5547 35711 : FrameEntry *rhs = frame.peek(-1);
5548 :
5549 : /* If the incoming type will never PIC, take slow path. */
5550 35711 : if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
5551 176 : jsop_setprop_slow(name);
5552 176 : return true;
5553 : }
5554 :
5555 : /*
5556 : * If this is a SETNAME to a variable of a non-reentrant outer function,
5557 : * set the variable's slot directly for the active call object.
5558 : */
5559 35535 : if (cx->typeInferenceEnabled() && js_CodeSpec[*PC].format & JOF_NAME) {
5560 : ScriptAnalysis::NameAccess access =
5561 2283 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5562 2283 : if (access.nesting) {
5563 : /* Use a SavedReg so it isn't clobbered by the stub call. */
5564 510 : RegisterID nameReg = frame.allocReg(Registers::SavedRegs).reg();
5565 510 : Address address = frame.loadNameAddress(access, nameReg);
5566 :
5567 : #ifdef JSGC_INCREMENTAL_MJ
5568 : /* Write barrier. */
5569 510 : if (cx->compartment->needsBarrier()) {
5570 0 : stubcc.linkExit(masm.jump(), Uses(0));
5571 0 : stubcc.leave();
5572 :
5573 : /* sync() may have overwritten nameReg, so we reload its data. */
5574 0 : JS_ASSERT(address.base == nameReg);
5575 0 : stubcc.masm.move(ImmPtr(access.basePointer()), nameReg);
5576 0 : stubcc.masm.loadPtr(Address(nameReg), nameReg);
5577 0 : stubcc.masm.addPtr(Imm32(address.offset), nameReg, Registers::ArgReg1);
5578 :
5579 0 : OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
5580 0 : stubcc.rejoin(Changes(0));
5581 : }
5582 : #endif
5583 :
5584 510 : frame.storeTo(rhs, address, popGuaranteed);
5585 510 : frame.shimmy(1);
5586 510 : frame.freeReg(address.base);
5587 510 : return true;
5588 : }
5589 : }
5590 :
5591 : /*
5592 : * Set the property directly if we are accessing a known object which
5593 : * always has the property in a particular inline slot.
5594 : */
5595 35025 : jsid id = ATOM_TO_JSID(name);
5596 35025 : types::TypeSet *types = frame.extra(lhs).types;
5597 63866 : if (JSOp(*PC) == JSOP_SETPROP && id == types::MakeTypeId(cx, id) &&
5598 9586 : types && !types->unknownObject() &&
5599 8674 : types->getObjectCount() == 1 &&
5600 5498 : types->getTypeObject(0) != NULL &&
5601 5083 : !types->getTypeObject(0)->unknownProperties()) {
5602 5071 : types::TypeObject *object = types->getTypeObject(0);
5603 5071 : types::TypeSet *propertyTypes = object->getProperty(cx, id, false);
5604 5071 : if (!propertyTypes)
5605 0 : return false;
5606 6988 : if (propertyTypes->isDefiniteProperty() &&
5607 1917 : !propertyTypes->isOwnProperty(cx, object, true)) {
5608 1903 : types->addFreeze(cx);
5609 1903 : uint32_t slot = propertyTypes->definiteSlot();
5610 1903 : RegisterID reg = frame.tempRegForData(lhs);
5611 1903 : frame.pinReg(reg);
5612 1903 : bool isObject = lhs->isTypeKnown();
5613 1903 : MaybeJump notObject;
5614 1903 : if (!isObject)
5615 384 : notObject = frame.testObject(Assembler::NotEqual, lhs);
5616 : #ifdef JSGC_INCREMENTAL_MJ
5617 1903 : if (cx->compartment->needsBarrier() && propertyTypes->needsBarrier(cx)) {
5618 : /* Write barrier. */
5619 4 : Jump j = masm.testGCThing(Address(reg, JSObject::getFixedSlotOffset(slot)));
5620 4 : stubcc.linkExit(j, Uses(0));
5621 4 : stubcc.leave();
5622 4 : stubcc.masm.addPtr(Imm32(JSObject::getFixedSlotOffset(slot)),
5623 4 : reg, Registers::ArgReg1);
5624 4 : OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
5625 4 : stubcc.rejoin(Changes(0));
5626 : }
5627 : #endif
5628 1903 : if (!isObject) {
5629 384 : stubcc.linkExit(notObject.get(), Uses(2));
5630 384 : stubcc.leave();
5631 384 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5632 384 : OOL_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
5633 : }
5634 1903 : frame.storeTo(rhs, Address(reg, JSObject::getFixedSlotOffset(slot)), popGuaranteed);
5635 1903 : frame.unpinReg(reg);
5636 1903 : frame.shimmy(1);
5637 1903 : if (!isObject)
5638 384 : stubcc.rejoin(Changes(1));
5639 1903 : if (script->pcCounters)
5640 0 : bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
5641 1903 : return true;
5642 : }
5643 : }
5644 :
5645 33122 : if (script->pcCounters)
5646 0 : bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
5647 :
5648 33122 : JSOp op = JSOp(*PC);
5649 :
5650 : #ifdef JSGC_INCREMENTAL_MJ
5651 : /* Write barrier. We don't have type information for JSOP_SETNAME. */
5652 33158 : if (cx->compartment->needsBarrier() &&
5653 36 : (!types || op == JSOP_SETNAME || types->propertyNeedsBarrier(cx, id)))
5654 : {
5655 44 : jsop_setprop_slow(name);
5656 44 : return true;
5657 : }
5658 : #endif
5659 :
5660 : ic::PICInfo::Kind kind = (op == JSOP_SETMETHOD)
5661 : ? ic::PICInfo::SETMETHOD
5662 33078 : : ic::PICInfo::SET;
5663 33078 : PICGenInfo pic(kind, op);
5664 33078 : pic.name = name;
5665 :
5666 33078 : if (monitored(PC)) {
5667 2613 : pic.typeMonitored = true;
5668 2613 : types::TypeSet *types = frame.extra(rhs).types;
5669 2613 : if (!types) {
5670 : /* Handle FORNAME and other compound opcodes. Yuck. */
5671 0 : types = types::TypeSet::make(cx, "unknownRHS");
5672 0 : if (!types)
5673 0 : return false;
5674 0 : types->addType(cx, types::Type::UnknownType());
5675 : }
5676 2613 : pic.rhsTypes = types;
5677 : } else {
5678 30465 : pic.typeMonitored = false;
5679 30465 : pic.rhsTypes = NULL;
5680 : }
5681 :
5682 : RESERVE_IC_SPACE(masm);
5683 : RESERVE_OOL_SPACE(stubcc.masm);
5684 :
5685 : /* Guard that the type is an object. */
5686 33078 : Jump typeCheck;
5687 33078 : if (!lhs->isTypeKnown()) {
5688 11969 : RegisterID reg = frame.tempRegForType(lhs);
5689 11969 : pic.typeReg = reg;
5690 :
5691 : /* Start the hot path where it's easy to patch it. */
5692 11969 : pic.fastPathStart = masm.label();
5693 11969 : Jump j = masm.testObject(Assembler::NotEqual, reg);
5694 :
5695 11969 : pic.typeCheck = stubcc.linkExit(j, Uses(2));
5696 11969 : stubcc.leave();
5697 :
5698 11969 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
5699 11969 : OOL_STUBCALL(STRICT_VARIANT(stubs::SetName), REJOIN_FALLTHROUGH);
5700 :
5701 11969 : typeCheck = stubcc.masm.jump();
5702 11969 : pic.hasTypeCheck = true;
5703 : } else {
5704 21109 : pic.fastPathStart = masm.label();
5705 21109 : pic.hasTypeCheck = false;
5706 21109 : pic.typeReg = Registers::ReturnReg;
5707 : }
5708 :
5709 33078 : frame.forgetMismatchedObject(lhs);
5710 :
5711 : /* Get the object into a mutable register. */
5712 33078 : RegisterID objReg = frame.copyDataIntoReg(lhs);
5713 33078 : pic.objReg = objReg;
5714 :
5715 : /* Get info about the RHS and pin it. */
5716 : ValueRemat vr;
5717 33078 : frame.pinEntry(rhs, vr);
5718 33078 : pic.vr = vr;
5719 :
5720 33078 : RegisterID shapeReg = frame.allocReg();
5721 33078 : pic.shapeReg = shapeReg;
5722 :
5723 33078 : frame.unpinEntry(vr);
5724 :
5725 : /* Guard on shape. */
5726 33078 : masm.loadShape(objReg, shapeReg);
5727 33078 : pic.shapeGuard = masm.label();
5728 33078 : DataLabelPtr inlineShapeData;
5729 : Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, shapeReg,
5730 33078 : inlineShapeData, ImmPtr(NULL));
5731 33078 : Label afterInlineShapeJump = masm.label();
5732 :
5733 : /* Slow path. */
5734 : {
5735 33078 : pic.slowPathStart = stubcc.linkExit(j, Uses(2));
5736 :
5737 33078 : stubcc.leave();
5738 33078 : passICAddress(&pic);
5739 33078 : pic.slowPathCall = OOL_STUBCALL(ic::SetProp, REJOIN_FALLTHROUGH);
5740 : CHECK_OOL_SPACE();
5741 : }
5742 :
5743 : /* Load dslots. */
5744 33078 : Label dslotsLoadLabel = masm.loadPtrWithPatchToLEA(Address(objReg, JSObject::offsetOfSlots()),
5745 33078 : objReg);
5746 :
5747 : /* Store RHS into object slot. */
5748 33078 : Address slot(objReg, 1 << 24);
5749 33078 : DataLabel32 inlineValueStore = masm.storeValueWithAddressOffsetPatch(vr, slot);
5750 33078 : pic.fastPathRejoin = masm.label();
5751 :
5752 33078 : frame.freeReg(objReg);
5753 33078 : frame.freeReg(shapeReg);
5754 :
5755 : /* "Pop under", taking out object (LHS) and leaving RHS. */
5756 33078 : frame.shimmy(1);
5757 :
5758 : /* Finish slow path. */
5759 : {
5760 33078 : if (pic.hasTypeCheck)
5761 11969 : typeCheck.linkTo(stubcc.masm.label(), &stubcc.masm);
5762 33078 : stubcc.rejoin(Changes(1));
5763 : }
5764 :
5765 33078 : RETURN_IF_OOM(false);
5766 :
5767 33078 : SetPropLabels &labels = pic.setPropLabels();
5768 33078 : labels.setInlineShapeData(masm, pic.shapeGuard, inlineShapeData);
5769 33078 : labels.setDslotsLoad(masm, pic.fastPathRejoin, dslotsLoadLabel);
5770 33078 : labels.setInlineValueStore(masm, pic.fastPathRejoin, inlineValueStore);
5771 33078 : labels.setInlineShapeJump(masm, pic.shapeGuard, afterInlineShapeJump);
5772 :
5773 33078 : pics.append(pic);
5774 33078 : return true;
5775 : }
5776 :
5777 : void
5778 366944 : mjit::Compiler::jsop_name(PropertyName *name, JSValueType type)
5779 : {
5780 : /*
5781 : * If this is a NAME for a variable of a non-reentrant outer function, get
5782 : * the variable's slot directly for the active call object. We always need
5783 : * to check for undefined, however.
5784 : */
5785 366944 : if (cx->typeInferenceEnabled()) {
5786 : ScriptAnalysis::NameAccess access =
5787 47049 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5788 47049 : if (access.nesting) {
5789 27917 : Address address = frame.loadNameAddress(access);
5790 27917 : JSValueType type = knownPushedType(0);
5791 : BarrierState barrier = pushAddressMaybeBarrier(address, type, true,
5792 27917 : /* testUndefined = */ true);
5793 27917 : finishBarrier(barrier, REJOIN_GETTER, 0);
5794 27917 : return;
5795 : }
5796 : }
5797 :
5798 339027 : PICGenInfo pic(ic::PICInfo::NAME, JSOp(*PC));
5799 :
5800 : RESERVE_IC_SPACE(masm);
5801 :
5802 339027 : pic.shapeReg = frame.allocReg();
5803 339027 : pic.objReg = frame.allocReg();
5804 339027 : pic.typeReg = Registers::ReturnReg;
5805 339027 : pic.name = name;
5806 339027 : pic.hasTypeCheck = false;
5807 339027 : pic.fastPathStart = masm.label();
5808 :
5809 : /* There is no inline implementation, so we always jump to the slow path or to a stub. */
5810 339027 : pic.shapeGuard = masm.label();
5811 339027 : Jump inlineJump = masm.jump();
5812 : {
5813 : RESERVE_OOL_SPACE(stubcc.masm);
5814 339027 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
5815 339027 : stubcc.leave();
5816 339027 : passICAddress(&pic);
5817 339027 : pic.slowPathCall = OOL_STUBCALL(ic::Name, REJOIN_GETTER);
5818 : CHECK_OOL_SPACE();
5819 339027 : testPushedType(REJOIN_GETTER, 0);
5820 : }
5821 339027 : pic.fastPathRejoin = masm.label();
5822 :
5823 : /* Initialize op labels. */
5824 339027 : ScopeNameLabels &labels = pic.scopeNameLabels();
5825 339027 : labels.setInlineJump(masm, pic.fastPathStart, inlineJump);
5826 :
5827 : CHECK_IC_SPACE();
5828 :
5829 : /*
5830 : * We can't optimize away the PIC for the NAME access itself, but if we've
5831 : * only seen a single value pushed by this access, mark it as such and
5832 : * recompile if a different value becomes possible.
5833 : */
5834 339027 : JSObject *singleton = pushedSingleton(0);
5835 339027 : if (singleton) {
5836 5369 : frame.push(ObjectValue(*singleton));
5837 5369 : frame.freeReg(pic.shapeReg);
5838 5369 : frame.freeReg(pic.objReg);
5839 : } else {
5840 333658 : frame.pushRegs(pic.shapeReg, pic.objReg, type);
5841 : }
5842 339027 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, /* testUndefined = */ true);
5843 :
5844 339027 : stubcc.rejoin(Changes(1));
5845 :
5846 339027 : pics.append(pic);
5847 :
5848 339027 : finishBarrier(barrier, REJOIN_GETTER, 0);
5849 : }
5850 :
5851 : bool
5852 506 : mjit::Compiler::jsop_xname(PropertyName *name)
5853 : {
5854 : /*
5855 : * If this is a GETXPROP for a variable of a non-reentrant outer function,
5856 : * treat in the same way as a NAME.
5857 : */
5858 506 : if (cx->typeInferenceEnabled()) {
5859 : ScriptAnalysis::NameAccess access =
5860 258 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5861 258 : if (access.nesting) {
5862 116 : frame.pop();
5863 116 : Address address = frame.loadNameAddress(access);
5864 116 : JSValueType type = knownPushedType(0);
5865 : BarrierState barrier = pushAddressMaybeBarrier(address, type, true,
5866 116 : /* testUndefined = */ true);
5867 116 : finishBarrier(barrier, REJOIN_GETTER, 0);
5868 116 : return true;
5869 : }
5870 : }
5871 :
5872 390 : PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC));
5873 :
5874 390 : FrameEntry *fe = frame.peek(-1);
5875 390 : if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
5876 0 : return jsop_getprop(name, knownPushedType(0));
5877 : }
5878 :
5879 390 : if (!fe->isTypeKnown()) {
5880 0 : Jump notObject = frame.testObject(Assembler::NotEqual, fe);
5881 0 : stubcc.linkExit(notObject, Uses(1));
5882 : }
5883 :
5884 390 : frame.forgetMismatchedObject(fe);
5885 :
5886 : RESERVE_IC_SPACE(masm);
5887 :
5888 390 : pic.shapeReg = frame.allocReg();
5889 390 : pic.objReg = frame.copyDataIntoReg(fe);
5890 390 : pic.typeReg = Registers::ReturnReg;
5891 390 : pic.name = name;
5892 390 : pic.hasTypeCheck = false;
5893 390 : pic.fastPathStart = masm.label();
5894 :
5895 : /* There is no inline implementation, so we always jump to the slow path or to a stub. */
5896 390 : pic.shapeGuard = masm.label();
5897 390 : Jump inlineJump = masm.jump();
5898 : {
5899 : RESERVE_OOL_SPACE(stubcc.masm);
5900 390 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(1));
5901 390 : stubcc.leave();
5902 390 : passICAddress(&pic);
5903 390 : pic.slowPathCall = OOL_STUBCALL(ic::XName, REJOIN_GETTER);
5904 : CHECK_OOL_SPACE();
5905 390 : testPushedType(REJOIN_GETTER, -1);
5906 : }
5907 :
5908 390 : pic.fastPathRejoin = masm.label();
5909 :
5910 390 : RETURN_IF_OOM(false);
5911 :
5912 : /* Initialize op labels. */
5913 390 : ScopeNameLabels &labels = pic.scopeNameLabels();
5914 390 : labels.setInlineJumpOffset(masm.differenceBetween(pic.fastPathStart, inlineJump));
5915 :
5916 : CHECK_IC_SPACE();
5917 :
5918 390 : frame.pop();
5919 390 : frame.pushRegs(pic.shapeReg, pic.objReg, knownPushedType(0));
5920 :
5921 390 : BarrierState barrier = testBarrier(pic.shapeReg, pic.objReg, /* testUndefined = */ true);
5922 :
5923 390 : stubcc.rejoin(Changes(1));
5924 :
5925 390 : pics.append(pic);
5926 :
5927 390 : finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
5928 390 : return true;
5929 : }
5930 :
5931 : void
5932 7621 : mjit::Compiler::jsop_bindname(PropertyName *name)
5933 : {
5934 : /*
5935 : * If this is a BINDNAME for a variable of a non-reentrant outer function,
5936 : * the object is definitely the outer function's active call object.
5937 : */
5938 7621 : if (cx->typeInferenceEnabled()) {
5939 : ScriptAnalysis::NameAccess access =
5940 2283 : analysis->resolveNameAccess(cx, ATOM_TO_JSID(name), true);
5941 2283 : if (access.nesting) {
5942 510 : RegisterID reg = frame.allocReg();
5943 510 : JSObject **pobj = &access.nesting->activeCall;
5944 510 : masm.move(ImmPtr(pobj), reg);
5945 510 : masm.loadPtr(Address(reg), reg);
5946 510 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
5947 510 : return;
5948 : }
5949 : }
5950 :
5951 7111 : PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC));
5952 :
5953 : // This code does not check the frame flags to see if scopeChain has been
5954 : // set. Rather, it relies on the up-front analysis statically determining
5955 : // whether BINDNAME can be used, which reifies the scope chain at the
5956 : // prologue.
5957 7111 : JS_ASSERT(analysis->usesScopeChain());
5958 :
5959 7111 : pic.shapeReg = frame.allocReg();
5960 7111 : pic.objReg = frame.allocReg();
5961 7111 : pic.typeReg = Registers::ReturnReg;
5962 7111 : pic.name = name;
5963 7111 : pic.hasTypeCheck = false;
5964 :
5965 : RESERVE_IC_SPACE(masm);
5966 7111 : pic.fastPathStart = masm.label();
5967 :
5968 7111 : masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
5969 7111 : masm.loadPtr(Address(pic.objReg, JSObject::offsetOfShape()), pic.shapeReg);
5970 7111 : masm.loadPtr(Address(pic.shapeReg, Shape::offsetOfBase()), pic.shapeReg);
5971 7111 : Address parent(pic.shapeReg, BaseShape::offsetOfParent());
5972 :
5973 7111 : pic.shapeGuard = masm.label();
5974 7111 : Jump inlineJump = masm.branchPtr(Assembler::NotEqual, parent, ImmPtr(NULL));
5975 : {
5976 : RESERVE_OOL_SPACE(stubcc.masm);
5977 7111 : pic.slowPathStart = stubcc.linkExit(inlineJump, Uses(0));
5978 7111 : stubcc.leave();
5979 7111 : passICAddress(&pic);
5980 7111 : pic.slowPathCall = OOL_STUBCALL(ic::BindName, REJOIN_FALLTHROUGH);
5981 : CHECK_OOL_SPACE();
5982 : }
5983 :
5984 7111 : pic.fastPathRejoin = masm.label();
5985 :
5986 : /* Initialize op labels. */
5987 7111 : BindNameLabels &labels = pic.bindNameLabels();
5988 7111 : labels.setInlineJump(masm, pic.shapeGuard, inlineJump);
5989 :
5990 7111 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, pic.objReg);
5991 7111 : frame.freeReg(pic.shapeReg);
5992 :
5993 7111 : stubcc.rejoin(Changes(1));
5994 :
5995 7111 : pics.append(pic);
5996 : }
5997 :
5998 : #else /* !JS_POLYIC */
5999 :
6000 : void
6001 : mjit::Compiler::jsop_name(PropertyName *name, JSValueType type, bool isCall)
6002 : {
6003 : prepareStubCall(Uses(0));
6004 : INLINE_STUBCALL(isCall ? stubs::CallName : stubs::Name, REJOIN_FALLTHROUGH);
6005 : testPushedType(REJOIN_FALLTHROUGH, 0, /* ool = */ false);
6006 : frame.pushSynced(type);
6007 : if (isCall)
6008 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6009 : }
6010 :
6011 : bool
6012 : mjit::Compiler::jsop_xname(PropertyName *name)
6013 : {
6014 : return jsop_getprop(name, knownPushedType(0), pushedTypeSet(0));
6015 : }
6016 :
6017 : bool
6018 : mjit::Compiler::jsop_getprop(PropertyName *name, JSValueType knownType, types::TypeSet *typeSet,
6019 : bool typecheck, bool forPrototype)
6020 : {
6021 : jsop_getprop_slow(name, forPrototype);
6022 : return true;
6023 : }
6024 :
6025 : bool
6026 : mjit::Compiler::jsop_setprop(PropertyName *name)
6027 : {
6028 : jsop_setprop_slow(name);
6029 : return true;
6030 : }
6031 :
6032 : void
6033 : mjit::Compiler::jsop_bindname(PropertyName *name)
6034 : {
6035 : RegisterID reg = frame.allocReg();
6036 : Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
6037 : masm.loadPtr(scopeChain, reg);
6038 :
6039 : Address address(reg, offsetof(JSObject, parent));
6040 :
6041 : Jump j = masm.branchPtr(Assembler::NotEqual, address, ImmPtr(0));
6042 :
6043 : stubcc.linkExit(j, Uses(0));
6044 : stubcc.leave();
6045 : stubcc.masm.move(ImmPtr(name), Registers::ArgReg1);
6046 : OOL_STUBCALL(stubs::BindName, REJOIN_FALLTHROUGH);
6047 :
6048 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
6049 :
6050 : stubcc.rejoin(Changes(1));
6051 : }
6052 : #endif
6053 :
6054 : void
6055 72375 : mjit::Compiler::jsop_this()
6056 : {
6057 72375 : frame.pushThis();
6058 :
6059 : /*
6060 : * In strict mode code, we don't wrap 'this'.
6061 : * In direct-call eval code, we wrapped 'this' before entering the eval.
6062 : * In global code, 'this' is always an object.
6063 : */
6064 72375 : if (script->function() && !script->strictModeCode) {
6065 53167 : FrameEntry *thisFe = frame.peek(-1);
6066 :
6067 53167 : if (!thisFe->isType(JSVAL_TYPE_OBJECT)) {
6068 : /*
6069 : * Watch out for an obscure case where we don't know we are pushing
6070 : * an object: the script has not yet had a 'this' value assigned,
6071 : * so no pushed 'this' type has been inferred. Don't mark the type
6072 : * as known in this case, preserving the invariant that compiler
6073 : * types reflect inferred types.
6074 : */
6075 37764 : if (cx->typeInferenceEnabled() && knownPushedType(0) != JSVAL_TYPE_OBJECT) {
6076 2348 : prepareStubCall(Uses(1));
6077 2348 : INLINE_STUBCALL(stubs::This, REJOIN_FALLTHROUGH);
6078 2348 : return;
6079 : }
6080 :
6081 35416 : JSValueType type = cx->typeInferenceEnabled()
6082 5903 : ? types::TypeScript::ThisTypes(script)->getKnownTypeTag(cx)
6083 41319 : : JSVAL_TYPE_UNKNOWN;
6084 35416 : if (type != JSVAL_TYPE_OBJECT) {
6085 29535 : Jump notObj = frame.testObject(Assembler::NotEqual, thisFe);
6086 29535 : stubcc.linkExit(notObj, Uses(1));
6087 29535 : stubcc.leave();
6088 29535 : OOL_STUBCALL(stubs::This, REJOIN_FALLTHROUGH);
6089 29535 : stubcc.rejoin(Changes(1));
6090 : }
6091 :
6092 : // Now we know that |this| is an object.
6093 35416 : frame.pop();
6094 35416 : frame.learnThisIsObject(type != JSVAL_TYPE_OBJECT);
6095 35416 : frame.pushThis();
6096 : }
6097 :
6098 50819 : JS_ASSERT(thisFe->isType(JSVAL_TYPE_OBJECT));
6099 : }
6100 : }
6101 :
6102 : bool
6103 4390 : mjit::Compiler::iter(unsigned flags)
6104 : {
6105 4390 : FrameEntry *fe = frame.peek(-1);
6106 :
6107 : /*
6108 : * Stub the call if this is not a simple 'for in' loop or if the iterated
6109 : * value is known to not be an object.
6110 : */
6111 4390 : if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
6112 1287 : prepareStubCall(Uses(1));
6113 1287 : masm.move(Imm32(flags), Registers::ArgReg1);
6114 1287 : INLINE_STUBCALL(stubs::Iter, REJOIN_FALLTHROUGH);
6115 1287 : frame.pop();
6116 1287 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6117 1287 : return true;
6118 : }
6119 :
6120 3103 : if (!fe->isTypeKnown()) {
6121 1862 : Jump notObject = frame.testObject(Assembler::NotEqual, fe);
6122 1862 : stubcc.linkExit(notObject, Uses(1));
6123 : }
6124 :
6125 3103 : frame.forgetMismatchedObject(fe);
6126 :
6127 3103 : RegisterID reg = frame.tempRegForData(fe);
6128 :
6129 3103 : frame.pinReg(reg);
6130 3103 : RegisterID ioreg = frame.allocReg(); /* Will hold iterator JSObject */
6131 3103 : RegisterID nireg = frame.allocReg(); /* Will hold NativeIterator */
6132 3103 : RegisterID T1 = frame.allocReg();
6133 3103 : RegisterID T2 = frame.allocReg();
6134 3103 : frame.unpinReg(reg);
6135 :
6136 : /* Fetch the most recent iterator. */
6137 3103 : masm.loadPtr(&script->compartment()->nativeIterCache.last, ioreg);
6138 :
6139 : /* Test for NULL. */
6140 3103 : Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
6141 3103 : stubcc.linkExit(nullIterator, Uses(1));
6142 :
6143 : /* Get NativeIterator from iter obj. */
6144 3103 : masm.loadObjPrivate(ioreg, nireg, JSObject::ITER_CLASS_NFIXED_SLOTS);
6145 :
6146 : /* Test for active iterator. */
6147 3103 : Address flagsAddr(nireg, offsetof(NativeIterator, flags));
6148 3103 : masm.load32(flagsAddr, T1);
6149 : Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1,
6150 3103 : Imm32(JSITER_ACTIVE|JSITER_UNREUSABLE));
6151 3103 : stubcc.linkExit(activeIterator, Uses(1));
6152 :
6153 : /* Compare shape of object with iterator. */
6154 3103 : masm.loadShape(reg, T1);
6155 3103 : masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
6156 3103 : masm.loadPtr(Address(T2, 0), T2);
6157 3103 : Jump mismatchedObject = masm.branchPtr(Assembler::NotEqual, T1, T2);
6158 3103 : stubcc.linkExit(mismatchedObject, Uses(1));
6159 :
6160 : /* Compare shape of object's prototype with iterator. */
6161 3103 : masm.loadPtr(Address(reg, JSObject::offsetOfType()), T1);
6162 3103 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6163 3103 : masm.loadShape(T1, T1);
6164 3103 : masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
6165 3103 : masm.loadPtr(Address(T2, sizeof(Shape *)), T2);
6166 3103 : Jump mismatchedProto = masm.branchPtr(Assembler::NotEqual, T1, T2);
6167 3103 : stubcc.linkExit(mismatchedProto, Uses(1));
6168 :
6169 : /*
6170 : * Compare object's prototype's prototype with NULL. The last native
6171 : * iterator will always have a prototype chain length of one
6172 : * (i.e. it must be a plain object), so we do not need to generate
6173 : * a loop here.
6174 : */
6175 3103 : masm.loadPtr(Address(reg, JSObject::offsetOfType()), T1);
6176 3103 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6177 3103 : masm.loadPtr(Address(T1, JSObject::offsetOfType()), T1);
6178 3103 : masm.loadPtr(Address(T1, offsetof(types::TypeObject, proto)), T1);
6179 3103 : Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
6180 3103 : stubcc.linkExit(overlongChain, Uses(1));
6181 :
6182 : #ifdef JSGC_INCREMENTAL_MJ
6183 : /*
6184 : * Write barrier for stores to the iterator. We only need to take a write
6185 : * barrier if NativeIterator::obj is actually going to change.
6186 : */
6187 3103 : if (cx->compartment->needsBarrier()) {
6188 : Jump j = masm.branchPtr(Assembler::NotEqual,
6189 10 : Address(nireg, offsetof(NativeIterator, obj)), reg);
6190 10 : stubcc.linkExit(j, Uses(1));
6191 : }
6192 : #endif
6193 :
6194 : /* Found a match with the most recent iterator. Hooray! */
6195 :
6196 : /* Mark iterator as active. */
6197 3103 : masm.storePtr(reg, Address(nireg, offsetof(NativeIterator, obj)));
6198 3103 : masm.load32(flagsAddr, T1);
6199 3103 : masm.or32(Imm32(JSITER_ACTIVE), T1);
6200 3103 : masm.store32(T1, flagsAddr);
6201 :
6202 : /* Chain onto the active iterator stack. */
6203 3103 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
6204 3103 : masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
6205 3103 : masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
6206 3103 : masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
6207 :
6208 3103 : frame.freeReg(nireg);
6209 3103 : frame.freeReg(T1);
6210 3103 : frame.freeReg(T2);
6211 :
6212 3103 : stubcc.leave();
6213 3103 : stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
6214 3103 : OOL_STUBCALL(stubs::Iter, REJOIN_FALLTHROUGH);
6215 :
6216 : /* Push the iterator object. */
6217 3103 : frame.pop();
6218 3103 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
6219 :
6220 3103 : stubcc.rejoin(Changes(1));
6221 :
6222 3103 : return true;
6223 : }
6224 :
6225 : /*
6226 : * This big nasty function implements JSOP_ITERNEXT, which is used in the head
6227 : * of a for-in loop to put the next value on the stack.
6228 : */
6229 : void
6230 4630 : mjit::Compiler::iterNext(ptrdiff_t offset)
6231 : {
6232 4630 : FrameEntry *fe = frame.peek(-offset);
6233 4630 : RegisterID reg = frame.tempRegForData(fe);
6234 :
6235 : /* Is it worth trying to pin this longer? Prolly not. */
6236 4630 : frame.pinReg(reg);
6237 4630 : RegisterID T1 = frame.allocReg();
6238 4630 : frame.unpinReg(reg);
6239 :
6240 : /* Test clasp */
6241 4630 : Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass);
6242 4630 : stubcc.linkExit(notFast, Uses(1));
6243 :
6244 : /* Get private from iter obj. */
6245 4630 : masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS);
6246 :
6247 4630 : RegisterID T3 = frame.allocReg();
6248 4630 : RegisterID T4 = frame.allocReg();
6249 :
6250 : /* Test for a value iterator, which could come through an Iterator object. */
6251 4630 : masm.load32(Address(T1, offsetof(NativeIterator, flags)), T3);
6252 4630 : notFast = masm.branchTest32(Assembler::NonZero, T3, Imm32(JSITER_FOREACH));
6253 4630 : stubcc.linkExit(notFast, Uses(1));
6254 :
6255 4630 : RegisterID T2 = frame.allocReg();
6256 :
6257 : /* Get cursor. */
6258 4630 : masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
6259 :
6260 : /* Get the next string in the iterator. */
6261 4630 : masm.loadPtr(T2, T3);
6262 :
6263 : /* It's safe to increase the cursor now. */
6264 4630 : masm.addPtr(Imm32(sizeof(JSString*)), T2, T4);
6265 4630 : masm.storePtr(T4, Address(T1, offsetof(NativeIterator, props_cursor)));
6266 :
6267 4630 : frame.freeReg(T4);
6268 4630 : frame.freeReg(T1);
6269 4630 : frame.freeReg(T2);
6270 :
6271 4630 : stubcc.leave();
6272 4630 : stubcc.masm.move(Imm32(offset), Registers::ArgReg1);
6273 4630 : OOL_STUBCALL(stubs::IterNext, REJOIN_FALLTHROUGH);
6274 :
6275 4630 : frame.pushUntypedPayload(JSVAL_TYPE_STRING, T3);
6276 :
6277 : /* Join with the stub call. */
6278 4630 : stubcc.rejoin(Changes(1));
6279 4630 : }
6280 :
6281 : bool
6282 4283 : mjit::Compiler::iterMore(jsbytecode *target)
6283 : {
6284 4283 : if (!frame.syncForBranch(target, Uses(1)))
6285 0 : return false;
6286 :
6287 4283 : FrameEntry *fe = frame.peek(-1);
6288 4283 : RegisterID reg = frame.tempRegForData(fe);
6289 4283 : RegisterID tempreg = frame.allocReg();
6290 :
6291 : /* Test clasp */
6292 4283 : Jump notFast = masm.testObjClass(Assembler::NotEqual, reg, tempreg, &IteratorClass);
6293 4283 : stubcc.linkExitForBranch(notFast);
6294 :
6295 : /* Get private from iter obj. */
6296 4283 : masm.loadObjPrivate(reg, reg, JSObject::ITER_CLASS_NFIXED_SLOTS);
6297 :
6298 : /* Test that the iterator supports fast iteration. */
6299 : notFast = masm.branchTest32(Assembler::NonZero, Address(reg, offsetof(NativeIterator, flags)),
6300 4283 : Imm32(JSITER_FOREACH));
6301 4283 : stubcc.linkExitForBranch(notFast);
6302 :
6303 : /* Get props_cursor, test */
6304 4283 : masm.loadPtr(Address(reg, offsetof(NativeIterator, props_cursor)), tempreg);
6305 4283 : masm.loadPtr(Address(reg, offsetof(NativeIterator, props_end)), reg);
6306 :
6307 4283 : Jump jFast = masm.branchPtr(Assembler::LessThan, tempreg, reg);
6308 :
6309 4283 : stubcc.leave();
6310 4283 : OOL_STUBCALL(stubs::IterMore, REJOIN_BRANCH);
6311 : Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
6312 4283 : Registers::ReturnReg);
6313 :
6314 4283 : stubcc.rejoin(Changes(1));
6315 4283 : frame.freeReg(tempreg);
6316 :
6317 4283 : return jumpAndRun(jFast, target, &j);
6318 : }
6319 :
6320 : void
6321 4438 : mjit::Compiler::iterEnd()
6322 : {
6323 4438 : FrameEntry *fe= frame.peek(-1);
6324 4438 : RegisterID reg = frame.tempRegForData(fe);
6325 :
6326 4438 : frame.pinReg(reg);
6327 4438 : RegisterID T1 = frame.allocReg();
6328 4438 : frame.unpinReg(reg);
6329 :
6330 : /* Test clasp */
6331 4438 : Jump notIterator = masm.testObjClass(Assembler::NotEqual, reg, T1, &IteratorClass);
6332 4438 : stubcc.linkExit(notIterator, Uses(1));
6333 :
6334 : /* Get private from iter obj. */
6335 4438 : masm.loadObjPrivate(reg, T1, JSObject::ITER_CLASS_NFIXED_SLOTS);
6336 :
6337 4438 : RegisterID T2 = frame.allocReg();
6338 :
6339 : /* Load flags. */
6340 4438 : Address flagAddr(T1, offsetof(NativeIterator, flags));
6341 4438 : masm.loadPtr(flagAddr, T2);
6342 :
6343 : /* Test for a normal enumerate iterator. */
6344 4438 : Jump notEnumerate = masm.branchTest32(Assembler::Zero, T2, Imm32(JSITER_ENUMERATE));
6345 4438 : stubcc.linkExit(notEnumerate, Uses(1));
6346 :
6347 : /* Clear active bit. */
6348 4438 : masm.and32(Imm32(~JSITER_ACTIVE), T2);
6349 4438 : masm.storePtr(T2, flagAddr);
6350 :
6351 : /* Reset property cursor. */
6352 4438 : masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
6353 4438 : masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
6354 :
6355 : /* Advance enumerators list. */
6356 4438 : masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
6357 4438 : masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
6358 4438 : masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
6359 :
6360 4438 : frame.freeReg(T1);
6361 4438 : frame.freeReg(T2);
6362 :
6363 4438 : stubcc.leave();
6364 4438 : OOL_STUBCALL(stubs::EndIter, REJOIN_FALLTHROUGH);
6365 :
6366 4438 : frame.pop();
6367 :
6368 4438 : stubcc.rejoin(Changes(1));
6369 4438 : }
6370 :
6371 : void
6372 0 : mjit::Compiler::jsop_getgname_slow(uint32_t index)
6373 : {
6374 0 : prepareStubCall(Uses(0));
6375 0 : INLINE_STUBCALL(stubs::Name, REJOIN_GETTER);
6376 0 : testPushedType(REJOIN_GETTER, 0, /* ool = */ false);
6377 0 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6378 0 : }
6379 :
6380 : void
6381 450996 : mjit::Compiler::jsop_bindgname()
6382 : {
6383 450996 : if (globalObj) {
6384 450996 : frame.push(ObjectValue(*globalObj));
6385 450996 : return;
6386 : }
6387 :
6388 : /* :TODO: this is slower than it needs to be. */
6389 0 : prepareStubCall(Uses(0));
6390 0 : INLINE_STUBCALL(stubs::BindGlobalName, REJOIN_NONE);
6391 0 : frame.takeReg(Registers::ReturnReg);
6392 0 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
6393 : }
6394 :
6395 : void
6396 491901 : mjit::Compiler::jsop_getgname(uint32_t index)
6397 : {
6398 : /* Optimize undefined, NaN and Infinity. */
6399 491901 : PropertyName *name = script->getName(index);
6400 491901 : if (name == cx->runtime->atomState.typeAtoms[JSTYPE_VOID]) {
6401 3478 : frame.push(UndefinedValue());
6402 3478 : return;
6403 : }
6404 488423 : if (name == cx->runtime->atomState.NaNAtom) {
6405 980 : frame.push(cx->runtime->NaNValue);
6406 980 : return;
6407 : }
6408 487443 : if (name == cx->runtime->atomState.InfinityAtom) {
6409 643 : frame.push(cx->runtime->positiveInfinityValue);
6410 643 : return;
6411 : }
6412 :
6413 : /* Optimize singletons like Math for JSOP_CALLPROP. */
6414 486800 : JSObject *obj = pushedSingleton(0);
6415 486800 : if (obj && !hasTypeBarriers(PC) && testSingletonProperty(globalObj, ATOM_TO_JSID(name))) {
6416 28056 : frame.push(ObjectValue(*obj));
6417 28056 : return;
6418 : }
6419 :
6420 458744 : jsid id = ATOM_TO_JSID(name);
6421 458744 : JSValueType type = knownPushedType(0);
6422 558392 : if (cx->typeInferenceEnabled() && globalObj->isGlobal() && id == types::MakeTypeId(cx, id) &&
6423 99648 : !globalObj->getType(cx)->unknownProperties()) {
6424 99632 : types::TypeSet *propertyTypes = globalObj->getType(cx)->getProperty(cx, id, false);
6425 99632 : if (!propertyTypes)
6426 0 : return;
6427 :
6428 : /*
6429 : * If we are accessing a defined global which is a normal data property
6430 : * then bake its address into the jitcode and guard against future
6431 : * reallocation of the global object's slots.
6432 : */
6433 99632 : const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(name));
6434 99632 : if (shape && shape->hasDefaultGetterOrIsMethod() && shape->hasSlot()) {
6435 94345 : HeapSlot *value = &globalObj->getSlotRef(shape->slot());
6436 175571 : if (!value->isUndefined() &&
6437 81226 : !propertyTypes->isOwnProperty(cx, globalObj->getType(cx), true)) {
6438 80625 : watchGlobalReallocation();
6439 80625 : RegisterID reg = frame.allocReg();
6440 80625 : masm.move(ImmPtr(value), reg);
6441 :
6442 80625 : BarrierState barrier = pushAddressMaybeBarrier(Address(reg), type, true);
6443 80625 : finishBarrier(barrier, REJOIN_GETTER, 0);
6444 80625 : return;
6445 : }
6446 : }
6447 : }
6448 :
6449 : #if defined JS_MONOIC
6450 378119 : jsop_bindgname();
6451 :
6452 378119 : FrameEntry *fe = frame.peek(-1);
6453 378119 : JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
6454 :
6455 378119 : GetGlobalNameICInfo ic;
6456 : RESERVE_IC_SPACE(masm);
6457 : RegisterID objReg;
6458 378119 : Jump shapeGuard;
6459 :
6460 378119 : ic.fastPathStart = masm.label();
6461 378119 : if (fe->isConstant()) {
6462 378119 : JSObject *obj = &fe->getValue().toObject();
6463 378119 : frame.pop();
6464 378119 : JS_ASSERT(obj->isNative());
6465 :
6466 378119 : objReg = frame.allocReg();
6467 :
6468 378119 : masm.loadPtrFromImm(obj->addressOfShape(), objReg);
6469 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, objReg,
6470 378119 : ic.shape, ImmPtr(NULL));
6471 378119 : masm.move(ImmPtr(obj), objReg);
6472 : } else {
6473 0 : objReg = frame.ownRegForData(fe);
6474 0 : frame.pop();
6475 0 : RegisterID reg = frame.allocReg();
6476 :
6477 0 : masm.loadShape(objReg, reg);
6478 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, reg,
6479 0 : ic.shape, ImmPtr(NULL));
6480 0 : frame.freeReg(reg);
6481 : }
6482 378119 : stubcc.linkExit(shapeGuard, Uses(0));
6483 :
6484 378119 : stubcc.leave();
6485 378119 : passMICAddress(ic);
6486 378119 : ic.slowPathCall = OOL_STUBCALL(ic::GetGlobalName, REJOIN_GETTER);
6487 :
6488 : CHECK_IC_SPACE();
6489 :
6490 378119 : testPushedType(REJOIN_GETTER, 0);
6491 :
6492 : /* Garbage value. */
6493 378119 : uint32_t slot = 1 << 24;
6494 :
6495 378119 : masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), objReg);
6496 378119 : Address address(objReg, slot);
6497 :
6498 : /* Allocate any register other than objReg. */
6499 378119 : RegisterID treg = frame.allocReg();
6500 : /* After dreg is loaded, it's safe to clobber objReg. */
6501 378119 : RegisterID dreg = objReg;
6502 :
6503 378119 : ic.load = masm.loadValueWithAddressOffsetPatch(address, treg, dreg);
6504 :
6505 378119 : frame.pushRegs(treg, dreg, type);
6506 :
6507 : /*
6508 : * Note: no undefined check is needed for GNAME opcodes. These were not
6509 : * declared with 'var', so cannot be undefined without triggering an error
6510 : * or having been a pre-existing global whose value is undefined (which
6511 : * type inference will know about).
6512 : */
6513 378119 : BarrierState barrier = testBarrier(treg, dreg);
6514 :
6515 378119 : stubcc.rejoin(Changes(1));
6516 :
6517 378119 : getGlobalNames.append(ic);
6518 378119 : finishBarrier(barrier, REJOIN_GETTER, 0);
6519 : #else
6520 : jsop_getgname_slow(index);
6521 : #endif
6522 :
6523 : }
6524 :
6525 : void
6526 37 : mjit::Compiler::jsop_setgname_slow(PropertyName *name)
6527 : {
6528 37 : prepareStubCall(Uses(2));
6529 37 : masm.move(ImmPtr(name), Registers::ArgReg1);
6530 37 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalName), REJOIN_FALLTHROUGH);
6531 37 : frame.popn(2);
6532 37 : pushSyncedEntry(0);
6533 37 : }
6534 :
6535 : void
6536 72863 : mjit::Compiler::jsop_setgname(PropertyName *name, bool popGuaranteed)
6537 : {
6538 72863 : if (monitored(PC)) {
6539 : /* Global accesses are monitored only for a few names like __proto__. */
6540 0 : jsop_setgname_slow(name);
6541 0 : return;
6542 : }
6543 :
6544 72863 : jsid id = ATOM_TO_JSID(name);
6545 117443 : if (cx->typeInferenceEnabled() && globalObj->isGlobal() && id == types::MakeTypeId(cx, id) &&
6546 44580 : !globalObj->getType(cx)->unknownProperties()) {
6547 : /*
6548 : * Note: object branding is disabled when inference is enabled. With
6549 : * branding there is no way to ensure that a non-function property
6550 : * can't get a function later and cause the global object to become
6551 : * branded, requiring a shape change if it changes again.
6552 : */
6553 44570 : types::TypeSet *types = globalObj->getType(cx)->getProperty(cx, id, false);
6554 44570 : if (!types)
6555 0 : return;
6556 44570 : const js::Shape *shape = globalObj->nativeLookup(cx, ATOM_TO_JSID(name));
6557 170881 : if (shape && !shape->isMethod() && shape->hasDefaultSetter() &&
6558 84232 : shape->writable() && shape->hasSlot() &&
6559 42079 : !types->isOwnProperty(cx, globalObj->getType(cx), true)) {
6560 42063 : watchGlobalReallocation();
6561 42063 : HeapSlot *value = &globalObj->getSlotRef(shape->slot());
6562 42063 : RegisterID reg = frame.allocReg();
6563 : #ifdef JSGC_INCREMENTAL_MJ
6564 : /* Write barrier. */
6565 42063 : if (cx->compartment->needsBarrier() && types->needsBarrier(cx)) {
6566 65 : stubcc.linkExit(masm.jump(), Uses(0));
6567 65 : stubcc.leave();
6568 65 : stubcc.masm.move(ImmPtr(value), Registers::ArgReg1);
6569 65 : OOL_STUBCALL(stubs::WriteBarrier, REJOIN_NONE);
6570 65 : stubcc.rejoin(Changes(0));
6571 : }
6572 : #endif
6573 42063 : masm.move(ImmPtr(value), reg);
6574 42063 : frame.storeTo(frame.peek(-1), Address(reg), popGuaranteed);
6575 42063 : frame.shimmy(1);
6576 42063 : frame.freeReg(reg);
6577 42063 : return;
6578 : }
6579 : }
6580 :
6581 : #ifdef JSGC_INCREMENTAL_MJ
6582 : /* Write barrier. */
6583 30800 : if (cx->compartment->needsBarrier()) {
6584 37 : jsop_setgname_slow(name);
6585 37 : return;
6586 : }
6587 : #endif
6588 :
6589 : #if defined JS_MONOIC
6590 30763 : FrameEntry *objFe = frame.peek(-2);
6591 30763 : FrameEntry *fe = frame.peek(-1);
6592 30763 : JS_ASSERT_IF(objFe->isTypeKnown(), objFe->getKnownType() == JSVAL_TYPE_OBJECT);
6593 :
6594 30763 : if (!fe->isConstant() && fe->isType(JSVAL_TYPE_DOUBLE))
6595 2 : frame.forgetKnownDouble(fe);
6596 :
6597 30763 : SetGlobalNameICInfo ic;
6598 :
6599 30763 : frame.pinEntry(fe, ic.vr);
6600 30763 : Jump shapeGuard;
6601 :
6602 : RESERVE_IC_SPACE(masm);
6603 :
6604 30763 : ic.fastPathStart = masm.label();
6605 30763 : if (objFe->isConstant()) {
6606 30739 : JSObject *obj = &objFe->getValue().toObject();
6607 30739 : JS_ASSERT(obj->isNative());
6608 :
6609 30739 : ic.objReg = frame.allocReg();
6610 30739 : ic.shapeReg = ic.objReg;
6611 30739 : ic.objConst = true;
6612 :
6613 30739 : masm.loadPtrFromImm(obj->addressOfShape(), ic.shapeReg);
6614 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg,
6615 30739 : ic.shape, ImmPtr(NULL));
6616 30739 : masm.move(ImmPtr(obj), ic.objReg);
6617 : } else {
6618 24 : ic.objReg = frame.copyDataIntoReg(objFe);
6619 24 : ic.shapeReg = frame.allocReg();
6620 24 : ic.objConst = false;
6621 :
6622 24 : masm.loadShape(ic.objReg, ic.shapeReg);
6623 : shapeGuard = masm.branchPtrWithPatch(Assembler::NotEqual, ic.shapeReg,
6624 24 : ic.shape, ImmPtr(NULL));
6625 24 : frame.freeReg(ic.shapeReg);
6626 : }
6627 30763 : ic.shapeGuardJump = shapeGuard;
6628 30763 : ic.slowPathStart = stubcc.linkExit(shapeGuard, Uses(2));
6629 :
6630 30763 : stubcc.leave();
6631 30763 : passMICAddress(ic);
6632 30763 : ic.slowPathCall = OOL_STUBCALL(ic::SetGlobalName, REJOIN_FALLTHROUGH);
6633 :
6634 : /* Garbage value. */
6635 30763 : uint32_t slot = 1 << 24;
6636 :
6637 30763 : masm.loadPtr(Address(ic.objReg, JSObject::offsetOfSlots()), ic.objReg);
6638 30763 : Address address(ic.objReg, slot);
6639 :
6640 30763 : if (ic.vr.isConstant()) {
6641 6273 : ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.value(), address);
6642 24490 : } else if (ic.vr.isTypeKnown()) {
6643 15282 : ic.store = masm.storeValueWithAddressOffsetPatch(ImmType(ic.vr.knownType()),
6644 30564 : ic.vr.dataReg(), address);
6645 : } else {
6646 9208 : ic.store = masm.storeValueWithAddressOffsetPatch(ic.vr.typeReg(), ic.vr.dataReg(), address);
6647 : }
6648 :
6649 30763 : frame.freeReg(ic.objReg);
6650 30763 : frame.unpinEntry(ic.vr);
6651 30763 : frame.shimmy(1);
6652 :
6653 30763 : stubcc.rejoin(Changes(1));
6654 :
6655 30763 : ic.fastPathRejoin = masm.label();
6656 30763 : setGlobalNames.append(ic);
6657 : #else
6658 : jsop_setgname_slow(name);
6659 : #endif
6660 : }
6661 :
6662 : void
6663 196948 : mjit::Compiler::jsop_setelem_slow()
6664 : {
6665 196948 : prepareStubCall(Uses(3));
6666 196948 : INLINE_STUBCALL(STRICT_VARIANT(stubs::SetElem), REJOIN_FALLTHROUGH);
6667 196948 : frame.popn(3);
6668 196948 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
6669 196948 : }
6670 :
6671 : void
6672 205441 : mjit::Compiler::jsop_getelem_slow()
6673 : {
6674 205441 : prepareStubCall(Uses(2));
6675 205441 : INLINE_STUBCALL(stubs::GetElem, REJOIN_FALLTHROUGH);
6676 205441 : testPushedType(REJOIN_FALLTHROUGH, -2, /* ool = */ false);
6677 205441 : frame.popn(2);
6678 205441 : pushSyncedEntry(0);
6679 205441 : }
6680 :
6681 : bool
6682 2413 : mjit::Compiler::jsop_instanceof()
6683 : {
6684 2413 : FrameEntry *lhs = frame.peek(-2);
6685 2413 : FrameEntry *rhs = frame.peek(-1);
6686 :
6687 : // The fast path applies only when both operands are objects.
6688 2413 : if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
6689 82 : stubcc.linkExit(masm.jump(), Uses(2));
6690 82 : frame.discardFe(lhs);
6691 82 : frame.discardFe(rhs);
6692 : }
6693 :
6694 2413 : MaybeJump firstSlow;
6695 2413 : if (!rhs->isTypeKnown()) {
6696 1543 : Jump j = frame.testObject(Assembler::NotEqual, rhs);
6697 1543 : stubcc.linkExit(j, Uses(2));
6698 : }
6699 :
6700 2413 : frame.forgetMismatchedObject(lhs);
6701 2413 : frame.forgetMismatchedObject(rhs);
6702 :
6703 2413 : RegisterID tmp = frame.allocReg();
6704 2413 : RegisterID obj = frame.tempRegForData(rhs);
6705 :
6706 2413 : masm.loadBaseShape(obj, tmp);
6707 : Jump notFunction = masm.branchPtr(Assembler::NotEqual,
6708 2413 : Address(tmp, BaseShape::offsetOfClass()),
6709 4826 : ImmPtr(&FunctionClass));
6710 :
6711 2413 : stubcc.linkExit(notFunction, Uses(2));
6712 :
6713 : /* Test for bound functions. */
6714 : Jump isBound = masm.branchTest32(Assembler::NonZero,
6715 2413 : Address(tmp, BaseShape::offsetOfFlags()),
6716 4826 : Imm32(BaseShape::BOUND_FUNCTION));
6717 : {
6718 2413 : stubcc.linkExit(isBound, Uses(2));
6719 2413 : stubcc.leave();
6720 2413 : OOL_STUBCALL(stubs::InstanceOf, REJOIN_FALLTHROUGH);
6721 2413 : firstSlow = stubcc.masm.jump();
6722 : }
6723 :
6724 2413 : frame.freeReg(tmp);
6725 :
6726 : /* This is sadly necessary because the error case needs the object. */
6727 2413 : frame.dup();
6728 :
6729 2413 : if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, JSVAL_TYPE_UNKNOWN))
6730 0 : return false;
6731 :
6732 : /* Primitive prototypes are invalid. */
6733 2413 : rhs = frame.peek(-1);
6734 2413 : Jump j = frame.testPrimitive(Assembler::Equal, rhs);
6735 2413 : stubcc.linkExit(j, Uses(3));
6736 :
6737 : /* Allocate registers up front, because of branchiness. */
6738 2413 : obj = frame.copyDataIntoReg(lhs);
6739 2413 : RegisterID proto = frame.copyDataIntoReg(rhs);
6740 2413 : RegisterID temp = frame.allocReg();
6741 :
6742 2413 : MaybeJump isFalse;
6743 2413 : if (!lhs->isTypeKnown())
6744 2096 : isFalse = frame.testPrimitive(Assembler::Equal, lhs);
6745 :
6746 2413 : Label loop = masm.label();
6747 :
6748 : /* Walk prototype chain, break out on NULL or hit. */
6749 2413 : masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
6750 2413 : masm.loadPtr(Address(obj, offsetof(types::TypeObject, proto)), obj);
6751 2413 : Jump isFalse2 = masm.branchTestPtr(Assembler::Zero, obj, obj);
6752 2413 : Jump isTrue = masm.branchPtr(Assembler::NotEqual, obj, proto);
6753 2413 : isTrue.linkTo(loop, &masm);
6754 2413 : masm.move(Imm32(1), temp);
6755 2413 : isTrue = masm.jump();
6756 :
6757 2413 : if (isFalse.isSet())
6758 2096 : isFalse.getJump().linkTo(masm.label(), &masm);
6759 2413 : isFalse2.linkTo(masm.label(), &masm);
6760 2413 : masm.move(Imm32(0), temp);
6761 2413 : isTrue.linkTo(masm.label(), &masm);
6762 :
6763 2413 : frame.freeReg(proto);
6764 2413 : frame.freeReg(obj);
6765 :
6766 2413 : stubcc.leave();
6767 2413 : OOL_STUBCALL(stubs::FastInstanceOf, REJOIN_FALLTHROUGH);
6768 :
6769 2413 : frame.popn(3);
6770 2413 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, temp);
6771 :
6772 2413 : if (firstSlow.isSet())
6773 2413 : firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
6774 2413 : stubcc.rejoin(Changes(1));
6775 2413 : return true;
6776 : }
6777 :
6778 : void
6779 1983 : mjit::Compiler::emitEval(uint32_t argc)
6780 : {
6781 : /* Check for interrupts on function call */
6782 1983 : interruptCheckHelper();
6783 :
6784 1983 : frame.syncAndKill(Uses(argc + 2));
6785 1983 : prepareStubCall(Uses(argc + 2));
6786 1983 : masm.move(Imm32(argc), Registers::ArgReg1);
6787 1983 : INLINE_STUBCALL(stubs::Eval, REJOIN_FALLTHROUGH);
6788 1983 : frame.popn(argc + 2);
6789 1983 : pushSyncedEntry(0);
6790 1983 : }
6791 :
6792 : void
6793 2369 : mjit::Compiler::jsop_arguments(RejoinState rejoin)
6794 : {
6795 2369 : prepareStubCall(Uses(0));
6796 2369 : INLINE_STUBCALL(stubs::Arguments, rejoin);
6797 2369 : }
6798 :
6799 : bool
6800 22033 : mjit::Compiler::jsop_newinit()
6801 : {
6802 : bool isArray;
6803 22033 : unsigned count = 0;
6804 22033 : JSObject *baseobj = NULL;
6805 22033 : switch (*PC) {
6806 : case JSOP_NEWINIT:
6807 1604 : isArray = (GET_UINT8(PC) == JSProto_Array);
6808 1604 : break;
6809 : case JSOP_NEWARRAY:
6810 13548 : isArray = true;
6811 13548 : count = GET_UINT24(PC);
6812 13548 : break;
6813 : case JSOP_NEWOBJECT:
6814 : /*
6815 : * Scripts with NEWOBJECT must be compileAndGo, but treat these like
6816 : * NEWINIT if the script's associated global is not known (or is not
6817 : * actually a global object). This should only happen in chrome code.
6818 : */
6819 6881 : isArray = false;
6820 6881 : baseobj = globalObj ? script->getObject(GET_UINT32_INDEX(PC)) : NULL;
6821 6881 : break;
6822 : default:
6823 0 : JS_NOT_REACHED("Bad op");
6824 : return false;
6825 : }
6826 :
6827 : void *stub, *stubArg;
6828 22033 : if (isArray) {
6829 13548 : stub = JS_FUNC_TO_DATA_PTR(void *, stubs::NewInitArray);
6830 13548 : stubArg = (void *) uintptr_t(count);
6831 : } else {
6832 8485 : stub = JS_FUNC_TO_DATA_PTR(void *, stubs::NewInitObject);
6833 8485 : stubArg = (void *) baseobj;
6834 : }
6835 :
6836 : /* Don't bake in types for non-compileAndGo scripts. */
6837 22033 : types::TypeObject *type = NULL;
6838 22033 : if (globalObj) {
6839 : type = types::TypeScript::InitObject(cx, script, PC,
6840 18913 : isArray ? JSProto_Array : JSProto_Object);
6841 18913 : if (!type)
6842 0 : return false;
6843 : }
6844 :
6845 : size_t maxArraySlots =
6846 22033 : gc::GetGCKindSlots(gc::FINALIZE_OBJECT_LAST) - ObjectElements::VALUES_PER_HEADER;
6847 :
6848 66812 : if (!cx->typeInferenceEnabled() ||
6849 13812 : !globalObj ||
6850 : (isArray && count > maxArraySlots) ||
6851 13356 : (!isArray && !baseobj) ||
6852 17611 : (!isArray && baseobj->hasDynamicSlots())) {
6853 8777 : prepareStubCall(Uses(0));
6854 8777 : masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch)));
6855 8777 : masm.move(ImmPtr(stubArg), Registers::ArgReg1);
6856 8777 : INLINE_STUBCALL(stub, REJOIN_FALLTHROUGH);
6857 8777 : frame.pushSynced(JSVAL_TYPE_OBJECT);
6858 :
6859 8777 : frame.extra(frame.peek(-1)).initArray = (*PC == JSOP_NEWARRAY);
6860 8777 : frame.extra(frame.peek(-1)).initObject = baseobj;
6861 :
6862 8777 : return true;
6863 : }
6864 :
6865 : JSObject *templateObject;
6866 13256 : if (isArray) {
6867 8901 : templateObject = NewDenseUnallocatedArray(cx, count);
6868 8901 : if (!templateObject)
6869 0 : return false;
6870 8901 : templateObject->setType(type);
6871 : } else {
6872 4355 : templateObject = CopyInitializerObject(cx, baseobj, type);
6873 4355 : if (!templateObject)
6874 0 : return false;
6875 : }
6876 :
6877 13256 : RegisterID result = frame.allocReg();
6878 13256 : Jump emptyFreeList = masm.getNewObject(cx, result, templateObject);
6879 :
6880 13256 : stubcc.linkExit(emptyFreeList, Uses(0));
6881 13256 : stubcc.leave();
6882 :
6883 13256 : stubcc.masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch)));
6884 13256 : stubcc.masm.move(ImmPtr(stubArg), Registers::ArgReg1);
6885 13256 : OOL_STUBCALL(stub, REJOIN_FALLTHROUGH);
6886 :
6887 13256 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, result);
6888 :
6889 13256 : stubcc.rejoin(Changes(1));
6890 :
6891 13256 : frame.extra(frame.peek(-1)).initArray = (*PC == JSOP_NEWARRAY);
6892 13256 : frame.extra(frame.peek(-1)).initObject = baseobj;
6893 :
6894 13256 : return true;
6895 : }
6896 :
6897 : bool
6898 17761 : mjit::Compiler::jsop_regexp()
6899 : {
6900 17761 : JSObject *obj = script->getRegExp(GET_UINT32_INDEX(PC));
6901 17761 : RegExpStatics *res = globalObj ? globalObj->getRegExpStatics() : NULL;
6902 :
6903 40539 : if (!globalObj ||
6904 6281 : &obj->global() != globalObj ||
6905 6281 : !cx->typeInferenceEnabled() ||
6906 5256 : analysis->localsAliasStack() ||
6907 : types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx),
6908 4960 : types::OBJECT_FLAG_REGEXP_FLAGS_SET) ||
6909 : cx->runtime->gcIncrementalState == gc::MARK)
6910 : {
6911 12807 : prepareStubCall(Uses(0));
6912 12807 : masm.move(ImmPtr(obj), Registers::ArgReg1);
6913 12807 : INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
6914 12807 : frame.pushSynced(JSVAL_TYPE_OBJECT);
6915 12807 : return true;
6916 : }
6917 :
6918 4954 : RegExpObject *reobj = &obj->asRegExp();
6919 :
6920 9908 : DebugOnly<uint32_t> origFlags = reobj->getFlags();
6921 9908 : DebugOnly<uint32_t> staticsFlags = res->getFlags();
6922 4954 : JS_ASSERT((origFlags & staticsFlags) == staticsFlags);
6923 :
6924 : /*
6925 : * JS semantics require regular expression literals to create different
6926 : * objects every time they execute. We only need to do this cloning if the
6927 : * script could actually observe the effect of such cloning, by getting
6928 : * or setting properties on it. Particular RegExp and String natives take
6929 : * regular expressions as 'this' or an argument, and do not let that
6930 : * expression escape and be accessed by the script, so avoid cloning in
6931 : * these cases.
6932 : */
6933 : analyze::SSAUseChain *uses =
6934 4954 : analysis->useChain(analyze::SSAValue::PushedValue(PC - script->code, 0));
6935 4954 : if (uses && uses->popped && !uses->next && !reobj->global() && !reobj->sticky()) {
6936 931 : jsbytecode *use = script->code + uses->offset;
6937 931 : uint32_t which = uses->u.which;
6938 931 : if (JSOp(*use) == JSOP_CALLPROP) {
6939 0 : JSObject *callee = analysis->pushedTypes(use, 0)->getSingleton(cx);
6940 0 : if (callee && callee->isFunction()) {
6941 0 : Native native = callee->toFunction()->maybeNative();
6942 0 : if (native == js::regexp_exec || native == js::regexp_test) {
6943 0 : frame.push(ObjectValue(*obj));
6944 0 : return true;
6945 : }
6946 : }
6947 931 : } else if (JSOp(*use) == JSOP_CALL && which == 0) {
6948 69 : uint32_t argc = GET_ARGC(use);
6949 69 : JSObject *callee = analysis->poppedTypes(use, argc + 1)->getSingleton(cx);
6950 69 : if (callee && callee->isFunction() && argc >= 1 && which == argc - 1) {
6951 31 : Native native = callee->toFunction()->maybeNative();
6952 31 : if (native == js::str_match ||
6953 : native == js::str_search ||
6954 : native == js::str_replace ||
6955 : native == js::str_split) {
6956 31 : frame.push(ObjectValue(*obj));
6957 31 : return true;
6958 : }
6959 : }
6960 : }
6961 : }
6962 :
6963 : /*
6964 : * Force creation of the RegExpShared in the script's RegExpObject so that
6965 : * we grab it in the getNewObject template copy. Note that JIT code is
6966 : * discarded on every GC, which permits us to burn in the pointer to the
6967 : * RegExpShared. We don't do this during an incremental
6968 : * GC, since we don't discard JIT code after every marking slice.
6969 : */
6970 9846 : RegExpGuard g;
6971 4923 : if (!reobj->getShared(cx, &g))
6972 0 : return false;
6973 :
6974 4923 : RegisterID result = frame.allocReg();
6975 4923 : Jump emptyFreeList = masm.getNewObject(cx, result, obj);
6976 :
6977 4923 : stubcc.linkExit(emptyFreeList, Uses(0));
6978 4923 : stubcc.leave();
6979 :
6980 4923 : stubcc.masm.move(ImmPtr(obj), Registers::ArgReg1);
6981 4923 : OOL_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
6982 :
6983 4923 : frame.pushTypedPayload(JSVAL_TYPE_OBJECT, result);
6984 :
6985 4923 : stubcc.rejoin(Changes(1));
6986 4923 : return true;
6987 : }
6988 :
6989 : bool
6990 33602 : mjit::Compiler::startLoop(jsbytecode *head, Jump entry, jsbytecode *entryTarget)
6991 : {
6992 33602 : JS_ASSERT(cx->typeInferenceEnabled() && script == outerScript);
6993 33602 : JS_ASSERT(shouldStartLoop(head));
6994 :
6995 33602 : if (loop) {
6996 : /*
6997 : * Convert all loop registers in the outer loop into unassigned registers.
6998 : * We don't keep track of which registers the inner loop uses, so the only
6999 : * registers that can be carried in the outer loop must be mentioned before
7000 : * the inner loop starts.
7001 : */
7002 3756 : loop->clearLoopRegisters();
7003 : }
7004 :
7005 33602 : LoopState *nloop = OffTheBooks::new_<LoopState>(cx, &ssa, this, &frame);
7006 33602 : if (!nloop || !nloop->init(head, entry, entryTarget)) {
7007 0 : js_ReportOutOfMemory(cx);
7008 0 : return false;
7009 : }
7010 :
7011 33602 : nloop->outer = loop;
7012 33602 : loop = nloop;
7013 33602 : frame.setLoop(loop);
7014 :
7015 33602 : return true;
7016 : }
7017 :
7018 : bool
7019 59744 : mjit::Compiler::finishLoop(jsbytecode *head)
7020 : {
7021 59744 : if (!cx->typeInferenceEnabled() || !bytecodeInChunk(head))
7022 26154 : return true;
7023 :
7024 : /*
7025 : * We're done processing the current loop. Every loop has exactly one backedge
7026 : * at the end ('continue' statements are forward jumps to the loop test),
7027 : * and after jumpAndRun'ing on that edge we can pop it from the frame.
7028 : */
7029 33590 : JS_ASSERT(loop && loop->headOffset() == uint32_t(head - script->code));
7030 :
7031 33590 : jsbytecode *entryTarget = script->code + loop->entryOffset();
7032 :
7033 : /*
7034 : * Fix up the jump entering the loop. We are doing this after all code has
7035 : * been emitted for the backedge, so that we are now in the loop's fallthrough
7036 : * (where we will emit the entry code).
7037 : */
7038 33590 : Jump fallthrough = masm.jump();
7039 :
7040 : #ifdef DEBUG
7041 33590 : if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
7042 0 : RegisterAllocation *alloc = analysis->getAllocation(head);
7043 0 : JaegerSpew(JSpew_Regalloc, "loop allocation at %u:", unsigned(head - script->code));
7044 0 : frame.dumpAllocation(alloc);
7045 : }
7046 : #endif
7047 :
7048 33590 : loop->entryJump().linkTo(masm.label(), &masm);
7049 :
7050 33590 : jsbytecode *oldPC = PC;
7051 :
7052 33590 : PC = entryTarget;
7053 : {
7054 33590 : OOL_STUBCALL(stubs::MissedBoundsCheckEntry, REJOIN_RESUME);
7055 :
7056 33590 : if (loop->generatingInvariants()) {
7057 : /*
7058 : * To do the initial load of the invariants, jump to the invariant
7059 : * restore point after the call just emitted. :XXX: fix hackiness.
7060 : */
7061 2773 : if (oomInVector)
7062 0 : return false;
7063 2773 : Label label = callSites[callSites.length() - 1].loopJumpLabel;
7064 2773 : stubcc.linkExitDirect(masm.jump(), label);
7065 : }
7066 33590 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
7067 : }
7068 33590 : PC = oldPC;
7069 :
7070 33590 : frame.prepareForJump(entryTarget, masm, true);
7071 :
7072 33590 : if (!jumpInScript(masm.jump(), entryTarget))
7073 0 : return false;
7074 :
7075 33590 : PC = head;
7076 33590 : if (!analysis->getCode(head).safePoint) {
7077 : /*
7078 : * Emit a stub into the OOL path which loads registers from a synced state
7079 : * and jumps to the loop head, for rejoining from the interpreter.
7080 : */
7081 33584 : LoopEntry entry;
7082 33584 : entry.pcOffset = head - script->code;
7083 :
7084 33584 : OOL_STUBCALL(stubs::MissedBoundsCheckHead, REJOIN_RESUME);
7085 :
7086 33584 : if (loop->generatingInvariants()) {
7087 2773 : if (oomInVector)
7088 0 : return false;
7089 2773 : entry.label = callSites[callSites.length() - 1].loopJumpLabel;
7090 : } else {
7091 30811 : entry.label = stubcc.masm.label();
7092 : }
7093 :
7094 : /*
7095 : * The interpreter may store integers in slots we assume are doubles,
7096 : * make sure state is consistent before joining. Note that we don't
7097 : * need any handling for other safe points the interpreter can enter
7098 : * from, i.e. from switch and try blocks, as we don't assume double
7099 : * variables are coherent in such cases.
7100 : */
7101 156145 : for (uint32_t slot = ArgSlot(0); slot < TotalSlots(script); slot++) {
7102 122561 : if (a->varTypes[slot].getTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
7103 698 : FrameEntry *fe = frame.getSlotEntry(slot);
7104 698 : stubcc.masm.ensureInMemoryDouble(frame.addressOf(fe));
7105 : }
7106 : }
7107 :
7108 33584 : frame.prepareForJump(head, stubcc.masm, true);
7109 33584 : if (!stubcc.jumpInScript(stubcc.masm.jump(), head))
7110 0 : return false;
7111 :
7112 33584 : loopEntries.append(entry);
7113 : }
7114 33590 : PC = oldPC;
7115 :
7116 : /* Write out loads and tests of loop invariants at all calls in the loop body. */
7117 33590 : loop->flushLoop(stubcc);
7118 :
7119 33590 : LoopState *nloop = loop->outer;
7120 33590 : cx->delete_(loop);
7121 33590 : loop = nloop;
7122 33590 : frame.setLoop(loop);
7123 :
7124 33590 : fallthrough.linkTo(masm.label(), &masm);
7125 :
7126 : /*
7127 : * Clear all registers used for loop temporaries. In the case of loop
7128 : * nesting, we do not allocate temporaries for the outer loop.
7129 : */
7130 33590 : frame.clearTemporaries();
7131 :
7132 33590 : return true;
7133 : }
7134 :
7135 : /*
7136 : * The state at the fast jump must reflect the frame's current state. If specified
7137 : * the state at the slow jump must be fully synced.
7138 : *
7139 : * The 'trampoline' argument indicates whether a trampoline was emitted into
7140 : * the OOL path loading some registers for the target. If this is the case,
7141 : * the fast path jump was redirected to the stub code's initial label, and the
7142 : * same must happen for any other fast paths for the target (i.e. paths from
7143 : * inline caches).
7144 : *
7145 : * The 'fallthrough' argument indicates this is a jump emitted for a fallthrough
7146 : * at the end of the compiled chunk. In this case the opcode may not be a
7147 : * JOF_JUMP opcode, and the compiler should not watch for fusions.
7148 : */
7149 : bool
7150 260013 : mjit::Compiler::jumpAndRun(Jump j, jsbytecode *target, Jump *slow, bool *trampoline,
7151 : bool fallthrough)
7152 : {
7153 260013 : if (trampoline)
7154 12842 : *trampoline = false;
7155 :
7156 260013 : if (!a->parent && !bytecodeInChunk(target)) {
7157 : /*
7158 : * syncForBranch() must have ensured the stack is synced. Figure out
7159 : * the source of the jump, which may be the opcode after PC if two ops
7160 : * were fused for a branch.
7161 : */
7162 1798 : OutgoingChunkEdge edge;
7163 1798 : edge.source = PC - outerScript->code;
7164 1798 : JSOp op = JSOp(*PC);
7165 1798 : if (!fallthrough && !(js_CodeSpec[op].format & JOF_JUMP) && op != JSOP_TABLESWITCH)
7166 67 : edge.source += GetBytecodeLength(PC);
7167 1798 : edge.target = target - outerScript->code;
7168 1798 : edge.fastJump = j;
7169 1798 : if (slow)
7170 93 : edge.slowJump = *slow;
7171 1798 : chunkEdges.append(edge);
7172 1798 : return true;
7173 : }
7174 :
7175 : /*
7176 : * Unless we are coming from a branch which synced everything, syncForBranch
7177 : * must have been called and ensured an allocation at the target.
7178 : */
7179 258215 : RegisterAllocation *lvtarget = NULL;
7180 258215 : bool consistent = true;
7181 258215 : if (cx->typeInferenceEnabled()) {
7182 77850 : RegisterAllocation *&alloc = analysis->getAllocation(target);
7183 77850 : if (!alloc) {
7184 6877 : alloc = cx->typeLifoAlloc().new_<RegisterAllocation>(false);
7185 6877 : if (!alloc) {
7186 0 : js_ReportOutOfMemory(cx);
7187 0 : return false;
7188 : }
7189 : }
7190 77850 : lvtarget = alloc;
7191 77850 : consistent = frame.consistentRegisters(target);
7192 : }
7193 :
7194 258215 : if (!lvtarget || lvtarget->synced()) {
7195 243511 : JS_ASSERT(consistent);
7196 243511 : if (!jumpInScript(j, target))
7197 0 : return false;
7198 243511 : if (slow && !stubcc.jumpInScript(*slow, target))
7199 0 : return false;
7200 : } else {
7201 14704 : if (consistent) {
7202 11298 : if (!jumpInScript(j, target))
7203 0 : return false;
7204 : } else {
7205 : /*
7206 : * Make a trampoline to issue remaining loads for the register
7207 : * state at target.
7208 : */
7209 3406 : Label start = stubcc.masm.label();
7210 3406 : stubcc.linkExitDirect(j, start);
7211 3406 : frame.prepareForJump(target, stubcc.masm, false);
7212 3406 : if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
7213 0 : return false;
7214 3406 : if (trampoline)
7215 311 : *trampoline = true;
7216 3406 : if (pcLengths) {
7217 : /*
7218 : * This is OOL code but will usually be executed, so track
7219 : * it in the CODE_LENGTH for the opcode.
7220 : */
7221 0 : uint32_t offset = ssa.frameLength(a->inlineIndex) + PC - script->code;
7222 0 : size_t length = stubcc.masm.size() - stubcc.masm.distanceOf(start);
7223 0 : pcLengths[offset].codeLength += length;
7224 : }
7225 : }
7226 :
7227 14704 : if (slow) {
7228 12780 : slow->linkTo(stubcc.masm.label(), &stubcc.masm);
7229 12780 : frame.prepareForJump(target, stubcc.masm, true);
7230 12780 : if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
7231 0 : return false;
7232 : }
7233 : }
7234 :
7235 258215 : if (target < PC)
7236 59706 : return finishLoop(target);
7237 198509 : return true;
7238 : }
7239 :
7240 : void
7241 28563 : mjit::Compiler::enterBlock(StaticBlockObject *block)
7242 : {
7243 : /* For now, don't bother doing anything for this opcode. */
7244 28563 : frame.syncAndForgetEverything();
7245 28563 : masm.move(ImmPtr(block), Registers::ArgReg1);
7246 28563 : INLINE_STUBCALL(stubs::EnterBlock, REJOIN_NONE);
7247 28563 : if (*PC == JSOP_ENTERBLOCK)
7248 21204 : frame.enterBlock(StackDefs(script, PC));
7249 28563 : }
7250 :
7251 : void
7252 28996 : mjit::Compiler::leaveBlock()
7253 : {
7254 : /*
7255 : * Note: After bug 535912, we can pass the block obj directly, inline
7256 : * PutBlockObject, and do away with the muckiness in PutBlockObject.
7257 : */
7258 28996 : uint32_t n = StackUses(script, PC);
7259 28996 : prepareStubCall(Uses(n));
7260 28996 : INLINE_STUBCALL(stubs::LeaveBlock, REJOIN_NONE);
7261 28996 : frame.leaveBlock(n);
7262 28996 : }
7263 :
7264 : // Creates the new object expected for constructors, and places it in |thisv|.
7265 : // It is broken down into the following operations:
7266 : // CALLEE
7267 : // GETPROP "prototype"
7268 : // IFPRIMTOP:
7269 : // NULL
7270 : // call js_CreateThisFromFunctionWithProto(...)
7271 : //
7272 : bool
7273 1776 : mjit::Compiler::constructThis()
7274 : {
7275 1776 : JS_ASSERT(isConstructing);
7276 :
7277 1776 : JSFunction *fun = script->function();
7278 :
7279 : do {
7280 2974 : if (!cx->typeInferenceEnabled() ||
7281 669 : !fun->hasSingletonType() ||
7282 529 : fun->getType(cx)->unknownProperties())
7283 : {
7284 1249 : break;
7285 : }
7286 :
7287 527 : jsid id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
7288 527 : types::TypeSet *protoTypes = fun->getType(cx)->getProperty(cx, id, false);
7289 :
7290 527 : JSObject *proto = protoTypes->getSingleton(cx, true);
7291 527 : if (!proto)
7292 264 : break;
7293 :
7294 : /*
7295 : * Generate an inline path to create a 'this' object with the given
7296 : * prototype. Only do this if the type is actually known as a possible
7297 : * 'this' type of the script.
7298 : */
7299 263 : types::TypeObject *type = proto->getNewType(cx, fun);
7300 263 : if (!type)
7301 0 : return false;
7302 263 : if (!types::TypeScript::ThisTypes(script)->hasType(types::Type::ObjectType(type)))
7303 103 : break;
7304 :
7305 160 : JSObject *templateObject = js_CreateThisForFunctionWithProto(cx, fun, proto);
7306 160 : if (!templateObject)
7307 0 : return false;
7308 :
7309 : /*
7310 : * The template incorporates a shape and/or fixed slots from any
7311 : * newScript on its type, so make sure recompilation is triggered
7312 : * should this information change later.
7313 : */
7314 160 : if (templateObject->type()->newScript)
7315 104 : types::TypeSet::WatchObjectStateChange(cx, templateObject->type());
7316 :
7317 160 : RegisterID result = frame.allocReg();
7318 160 : Jump emptyFreeList = masm.getNewObject(cx, result, templateObject);
7319 :
7320 160 : stubcc.linkExit(emptyFreeList, Uses(0));
7321 160 : stubcc.leave();
7322 :
7323 160 : stubcc.masm.move(ImmPtr(proto), Registers::ArgReg1);
7324 160 : OOL_STUBCALL(stubs::CreateThis, REJOIN_RESUME);
7325 :
7326 160 : frame.setThis(result);
7327 :
7328 160 : stubcc.rejoin(Changes(1));
7329 160 : return true;
7330 : } while (false);
7331 :
7332 : // Load the callee.
7333 1616 : frame.pushCallee();
7334 :
7335 : // Get callee.prototype.
7336 1616 : if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, JSVAL_TYPE_UNKNOWN, false, /* forPrototype = */ true))
7337 0 : return false;
7338 :
7339 : // Reach into the proto Value and grab a register for its data.
7340 1616 : FrameEntry *protoFe = frame.peek(-1);
7341 1616 : RegisterID protoReg = frame.ownRegForData(protoFe);
7342 :
7343 : // Now, get the type. If it's not an object, set protoReg to NULL.
7344 1616 : JS_ASSERT_IF(protoFe->isTypeKnown(), protoFe->isType(JSVAL_TYPE_OBJECT));
7345 1616 : if (!protoFe->isType(JSVAL_TYPE_OBJECT)) {
7346 1616 : Jump isNotObject = frame.testObject(Assembler::NotEqual, protoFe);
7347 1616 : stubcc.linkExitDirect(isNotObject, stubcc.masm.label());
7348 1616 : stubcc.masm.move(ImmPtr(NULL), protoReg);
7349 1616 : stubcc.crossJump(stubcc.masm.jump(), masm.label());
7350 : }
7351 :
7352 : // Done with the protoFe.
7353 1616 : frame.pop();
7354 :
7355 1616 : prepareStubCall(Uses(0));
7356 1616 : if (protoReg != Registers::ArgReg1)
7357 1616 : masm.move(protoReg, Registers::ArgReg1);
7358 1616 : INLINE_STUBCALL(stubs::CreateThis, REJOIN_RESUME);
7359 1616 : frame.freeReg(protoReg);
7360 1616 : return true;
7361 : }
7362 :
7363 : bool
7364 310 : mjit::Compiler::jsop_tableswitch(jsbytecode *pc)
7365 : {
7366 : #if defined JS_CPU_ARM
7367 : JS_NOT_REACHED("Implement jump(BaseIndex) for ARM");
7368 : return true;
7369 : #else
7370 310 : jsbytecode *originalPC = pc;
7371 620 : DebugOnly<JSOp> op = JSOp(*originalPC);
7372 310 : JS_ASSERT(op == JSOP_TABLESWITCH);
7373 :
7374 310 : uint32_t defaultTarget = GET_JUMP_OFFSET(pc);
7375 310 : pc += JUMP_OFFSET_LEN;
7376 :
7377 310 : int32_t low = GET_JUMP_OFFSET(pc);
7378 310 : pc += JUMP_OFFSET_LEN;
7379 310 : int32_t high = GET_JUMP_OFFSET(pc);
7380 310 : pc += JUMP_OFFSET_LEN;
7381 310 : int numJumps = high + 1 - low;
7382 310 : JS_ASSERT(numJumps >= 0);
7383 :
7384 310 : FrameEntry *fe = frame.peek(-1);
7385 310 : if (fe->isNotType(JSVAL_TYPE_INT32) || numJumps > 256) {
7386 36 : frame.syncAndForgetEverything();
7387 36 : masm.move(ImmPtr(originalPC), Registers::ArgReg1);
7388 :
7389 : /* prepareStubCall() is not needed due to forgetEverything() */
7390 36 : INLINE_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
7391 36 : frame.pop();
7392 36 : masm.jump(Registers::ReturnReg);
7393 36 : return true;
7394 : }
7395 :
7396 : RegisterID dataReg;
7397 274 : if (fe->isConstant()) {
7398 8 : JS_ASSERT(fe->isType(JSVAL_TYPE_INT32));
7399 8 : dataReg = frame.allocReg();
7400 8 : masm.move(Imm32(fe->getValue().toInt32()), dataReg);
7401 : } else {
7402 266 : dataReg = frame.copyDataIntoReg(fe);
7403 : }
7404 :
7405 274 : RegisterID reg = frame.allocReg();
7406 274 : frame.syncAndForgetEverything();
7407 :
7408 274 : MaybeJump notInt;
7409 274 : if (!fe->isType(JSVAL_TYPE_INT32))
7410 194 : notInt = masm.testInt32(Assembler::NotEqual, frame.addressOf(fe));
7411 :
7412 274 : JumpTable jt;
7413 274 : jt.offsetIndex = jumpTableEdges.length();
7414 274 : jt.label = masm.moveWithPatch(ImmPtr(NULL), reg);
7415 274 : jumpTables.append(jt);
7416 :
7417 1673 : for (int i = 0; i < numJumps; i++) {
7418 1399 : uint32_t target = GET_JUMP_OFFSET(pc);
7419 1399 : if (!target)
7420 21 : target = defaultTarget;
7421 : JumpTableEdge edge;
7422 1399 : edge.source = originalPC - script->code;
7423 1399 : edge.target = (originalPC + target) - script->code;
7424 1399 : jumpTableEdges.append(edge);
7425 1399 : pc += JUMP_OFFSET_LEN;
7426 : }
7427 274 : if (low != 0)
7428 95 : masm.sub32(Imm32(low), dataReg);
7429 274 : Jump defaultCase = masm.branch32(Assembler::AboveOrEqual, dataReg, Imm32(numJumps));
7430 274 : BaseIndex jumpTarget(reg, dataReg, Assembler::ScalePtr);
7431 274 : masm.jump(jumpTarget);
7432 :
7433 274 : if (notInt.isSet()) {
7434 194 : stubcc.linkExitDirect(notInt.get(), stubcc.masm.label());
7435 194 : stubcc.leave();
7436 194 : stubcc.masm.move(ImmPtr(originalPC), Registers::ArgReg1);
7437 194 : OOL_STUBCALL(stubs::TableSwitch, REJOIN_NONE);
7438 194 : stubcc.masm.jump(Registers::ReturnReg);
7439 : }
7440 274 : frame.pop();
7441 274 : return jumpAndRun(defaultCase, originalPC + defaultTarget);
7442 : #endif
7443 : }
7444 :
7445 : void
7446 193234 : mjit::Compiler::jsop_toid()
7447 : {
7448 : /* Leave integers alone, stub everything else. */
7449 193234 : FrameEntry *top = frame.peek(-1);
7450 :
7451 193234 : if (top->isType(JSVAL_TYPE_INT32))
7452 192952 : return;
7453 :
7454 282 : if (top->isNotType(JSVAL_TYPE_INT32)) {
7455 42 : prepareStubCall(Uses(2));
7456 42 : INLINE_STUBCALL(stubs::ToId, REJOIN_FALLTHROUGH);
7457 42 : frame.pop();
7458 42 : pushSyncedEntry(0);
7459 42 : return;
7460 : }
7461 :
7462 240 : frame.syncAt(-1);
7463 :
7464 240 : Jump j = frame.testInt32(Assembler::NotEqual, top);
7465 240 : stubcc.linkExit(j, Uses(2));
7466 :
7467 240 : stubcc.leave();
7468 240 : OOL_STUBCALL(stubs::ToId, REJOIN_FALLTHROUGH);
7469 :
7470 240 : frame.pop();
7471 240 : pushSyncedEntry(0);
7472 :
7473 240 : stubcc.rejoin(Changes(1));
7474 : }
7475 :
7476 : void
7477 18351 : mjit::Compiler::jsop_in()
7478 : {
7479 18351 : FrameEntry *obj = frame.peek(-1);
7480 18351 : FrameEntry *id = frame.peek(-2);
7481 :
7482 18351 : if (cx->typeInferenceEnabled() && id->isType(JSVAL_TYPE_INT32)) {
7483 121 : types::TypeSet *types = analysis->poppedTypes(PC, 0);
7484 :
7485 274 : if (obj->mightBeType(JSVAL_TYPE_OBJECT) &&
7486 117 : !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_DENSE_ARRAY) &&
7487 36 : !types::ArrayPrototypeHasIndexedProperty(cx, outerScript))
7488 : {
7489 28 : bool isPacked = !types->hasObjectFlags(cx, types::OBJECT_FLAG_NON_PACKED_ARRAY);
7490 :
7491 28 : if (!obj->isTypeKnown()) {
7492 7 : Jump guard = frame.testObject(Assembler::NotEqual, obj);
7493 7 : stubcc.linkExit(guard, Uses(2));
7494 : }
7495 :
7496 28 : RegisterID dataReg = frame.copyDataIntoReg(obj);
7497 :
7498 28 : Int32Key key = id->isConstant()
7499 12 : ? Int32Key::FromConstant(id->getValue().toInt32())
7500 40 : : Int32Key::FromRegister(frame.tempRegForData(id));
7501 :
7502 28 : masm.loadPtr(Address(dataReg, JSObject::offsetOfElements()), dataReg);
7503 :
7504 : // Guard on the array's initialized length.
7505 : Jump initlenGuard = masm.guardArrayExtent(ObjectElements::offsetOfInitializedLength(),
7506 28 : dataReg, key, Assembler::BelowOrEqual);
7507 :
7508 : // Guard to make sure we don't have a hole. Skip it if the array is packed.
7509 28 : MaybeJump holeCheck;
7510 28 : if (!isPacked)
7511 22 : holeCheck = masm.guardElementNotHole(dataReg, key);
7512 :
7513 28 : masm.move(Imm32(1), dataReg);
7514 28 : Jump done = masm.jump();
7515 :
7516 28 : Label falseBranch = masm.label();
7517 28 : initlenGuard.linkTo(falseBranch, &masm);
7518 28 : if (!isPacked)
7519 22 : holeCheck.getJump().linkTo(falseBranch, &masm);
7520 28 : masm.move(Imm32(0), dataReg);
7521 :
7522 28 : done.linkTo(masm.label(), &masm);
7523 :
7524 28 : stubcc.leave();
7525 28 : OOL_STUBCALL_USES(stubs::In, REJOIN_PUSH_BOOLEAN, Uses(2));
7526 :
7527 28 : frame.popn(2);
7528 28 : if (dataReg != Registers::ReturnReg)
7529 28 : stubcc.masm.move(Registers::ReturnReg, dataReg);
7530 :
7531 28 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, dataReg);
7532 :
7533 28 : stubcc.rejoin(Changes(2));
7534 :
7535 28 : return;
7536 : }
7537 : }
7538 :
7539 18323 : prepareStubCall(Uses(2));
7540 18323 : INLINE_STUBCALL(stubs::In, REJOIN_PUSH_BOOLEAN);
7541 18323 : frame.popn(2);
7542 18323 : frame.takeReg(Registers::ReturnReg);
7543 18323 : frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
7544 : }
7545 :
7546 : /*
7547 : * For any locals or args which we know to be integers but are treated as
7548 : * doubles by the type inference, convert to double. These will be assumed to be
7549 : * doubles at control flow join points. This function must be called before
7550 : * branching to another opcode.
7551 : *
7552 : * We can only carry entries as doubles when we can track all incoming edges to
7553 : * a join point (no try blocks etc.) and when we can track all writes to the
7554 : * local/arg (the slot does not escape) and ensure the Compiler representation
7555 : * matches the inferred type for the variable's SSA value. These properties are
7556 : * both ensured by analysis->trackSlot.
7557 : */
7558 : void
7559 590378 : mjit::Compiler::fixDoubleTypes(jsbytecode *target)
7560 : {
7561 590378 : if (!cx->typeInferenceEnabled())
7562 358020 : return;
7563 :
7564 : /*
7565 : * Fill fixedIntToDoubleEntries with all variables that are known to be an
7566 : * int here and a double at the branch target, and fixedDoubleToAnyEntries
7567 : * with all variables that are known to be a double here but not at the
7568 : * branch target.
7569 : *
7570 : * Per prepareInferenceTypes, the target state consists of the current
7571 : * state plus any phi nodes or other new values introduced at the target.
7572 : */
7573 232358 : JS_ASSERT(fixedIntToDoubleEntries.empty());
7574 232358 : JS_ASSERT(fixedDoubleToAnyEntries.empty());
7575 232358 : const SlotValue *newv = analysis->newValues(target);
7576 232358 : if (newv) {
7577 377345 : while (newv->slot) {
7578 382267 : if (newv->value.kind() != SSAValue::PHI ||
7579 86993 : newv->value.phiOffset() != uint32_t(target - script->code) ||
7580 85301 : !analysis->trackSlot(newv->slot)) {
7581 143494 : newv++;
7582 143494 : continue;
7583 : }
7584 66479 : JS_ASSERT(newv->slot < TotalSlots(script));
7585 66479 : types::TypeSet *targetTypes = analysis->getValueTypes(newv->value);
7586 66479 : FrameEntry *fe = frame.getSlotEntry(newv->slot);
7587 66479 : VarType &vt = a->varTypes[newv->slot];
7588 66479 : JSValueType type = vt.getTypeTag(cx);
7589 66479 : if (targetTypes->getKnownTypeTag(cx) == JSVAL_TYPE_DOUBLE) {
7590 522 : if (type == JSVAL_TYPE_INT32) {
7591 102 : fixedIntToDoubleEntries.append(newv->slot);
7592 102 : frame.ensureDouble(fe);
7593 102 : frame.forgetLoopReg(fe);
7594 420 : } else if (type == JSVAL_TYPE_UNKNOWN) {
7595 : /*
7596 : * Unknown here but a double at the target. The type
7597 : * set for the existing value must be empty, so this
7598 : * code is doomed and we can just mark the value as
7599 : * a double.
7600 : */
7601 3 : frame.ensureDouble(fe);
7602 : } else {
7603 417 : JS_ASSERT(type == JSVAL_TYPE_DOUBLE);
7604 : }
7605 65957 : } else if (type == JSVAL_TYPE_DOUBLE) {
7606 279 : fixedDoubleToAnyEntries.append(newv->slot);
7607 279 : frame.syncAndForgetFe(fe);
7608 279 : frame.forgetLoopReg(fe);
7609 : }
7610 66479 : newv++;
7611 : }
7612 : }
7613 : }
7614 :
7615 : void
7616 122688 : mjit::Compiler::watchGlobalReallocation()
7617 : {
7618 122688 : JS_ASSERT(cx->typeInferenceEnabled());
7619 122688 : if (hasGlobalReallocation)
7620 99676 : return;
7621 23012 : types::TypeSet::WatchObjectStateChange(cx, globalObj->getType(cx));
7622 23012 : hasGlobalReallocation = true;
7623 : }
7624 :
7625 : void
7626 291734 : mjit::Compiler::updateVarType()
7627 : {
7628 291734 : if (!cx->typeInferenceEnabled())
7629 186229 : return;
7630 :
7631 : /*
7632 : * For any non-escaping variable written at the current opcode, update the
7633 : * associated type sets according to the written type, keeping the type set
7634 : * for each variable in sync with what the SSA analysis has determined
7635 : * (see prepareInferenceTypes).
7636 : */
7637 :
7638 105505 : types::TypeSet *types = pushedTypeSet(0);
7639 105505 : uint32_t slot = GetBytecodeSlot(script, PC);
7640 :
7641 105505 : if (analysis->trackSlot(slot)) {
7642 49353 : VarType &vt = a->varTypes[slot];
7643 49353 : vt.setTypes(types);
7644 :
7645 : /*
7646 : * Variables whose type has been inferred as a double need to be
7647 : * maintained by the frame as a double. We might forget the exact
7648 : * representation used by the next call to fixDoubleTypes, fix it now.
7649 : */
7650 49353 : if (vt.getTypeTag(cx) == JSVAL_TYPE_DOUBLE)
7651 834 : frame.ensureDouble(frame.getSlotEntry(slot));
7652 : }
7653 : }
7654 :
7655 : void
7656 454993 : mjit::Compiler::updateJoinVarTypes()
7657 : {
7658 454993 : if (!cx->typeInferenceEnabled())
7659 281413 : return;
7660 :
7661 : /* Update variable types for all new values at this bytecode. */
7662 173580 : const SlotValue *newv = analysis->newValues(PC);
7663 173580 : if (newv) {
7664 244095 : while (newv->slot) {
7665 135947 : if (newv->slot < TotalSlots(script)) {
7666 45100 : VarType &vt = a->varTypes[newv->slot];
7667 45100 : JSValueType type = vt.getTypeTag(cx);
7668 45100 : vt.setTypes(analysis->getValueTypes(newv->value));
7669 45100 : if (vt.getTypeTag(cx) != type) {
7670 : /*
7671 : * If the known type of a variable changes (even if the
7672 : * variable itself has not been reassigned) then we can't
7673 : * carry a loop register for the var.
7674 : */
7675 10262 : FrameEntry *fe = frame.getSlotEntry(newv->slot);
7676 10262 : frame.forgetLoopReg(fe);
7677 : }
7678 : }
7679 135947 : newv++;
7680 : }
7681 : }
7682 : }
7683 :
7684 : void
7685 577733 : mjit::Compiler::restoreVarType()
7686 : {
7687 577733 : if (!cx->typeInferenceEnabled())
7688 331443 : return;
7689 :
7690 246290 : uint32_t slot = GetBytecodeSlot(script, PC);
7691 :
7692 246290 : if (slot >= analyze::TotalSlots(script))
7693 12937 : return;
7694 :
7695 : /*
7696 : * Restore the known type of a live local or argument. We ensure that types
7697 : * of tracked variables match their inferred type (as tracked in varTypes),
7698 : * but may have forgotten it due to a branch or syncAndForgetEverything.
7699 : */
7700 233353 : JSValueType type = a->varTypes[slot].getTypeTag(cx);
7701 236645 : if (type != JSVAL_TYPE_UNKNOWN &&
7702 3292 : (type != JSVAL_TYPE_DOUBLE || analysis->trackSlot(slot))) {
7703 144593 : FrameEntry *fe = frame.getSlotEntry(slot);
7704 144593 : JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(type));
7705 144593 : if (!fe->isTypeKnown())
7706 93116 : frame.learnType(fe, type, false);
7707 : }
7708 : }
7709 :
7710 : JSValueType
7711 2625092 : mjit::Compiler::knownPushedType(uint32_t pushed)
7712 : {
7713 2625092 : if (!cx->typeInferenceEnabled())
7714 2052317 : return JSVAL_TYPE_UNKNOWN;
7715 572775 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7716 572775 : return types->getKnownTypeTag(cx);
7717 : }
7718 :
7719 : bool
7720 24592 : mjit::Compiler::mayPushUndefined(uint32_t pushed)
7721 : {
7722 24592 : JS_ASSERT(cx->typeInferenceEnabled());
7723 :
7724 : /*
7725 : * This should only be used when the compiler is checking if it is OK to push
7726 : * undefined without going to a stub that can trigger recompilation.
7727 : * If this returns false and undefined subsequently becomes a feasible
7728 : * value pushed by the bytecode, recompilation will *NOT* be triggered.
7729 : */
7730 24592 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7731 24592 : return types->hasType(types::Type::UndefinedType());
7732 : }
7733 :
7734 : types::TypeSet *
7735 814479 : mjit::Compiler::pushedTypeSet(uint32_t pushed)
7736 : {
7737 814479 : if (!cx->typeInferenceEnabled())
7738 603227 : return NULL;
7739 211252 : return analysis->pushedTypes(PC, pushed);
7740 : }
7741 :
7742 : bool
7743 760272 : mjit::Compiler::monitored(jsbytecode *pc)
7744 : {
7745 760272 : if (!cx->typeInferenceEnabled())
7746 504658 : return false;
7747 255614 : return analysis->getCode(pc).monitoredTypes;
7748 : }
7749 :
7750 : bool
7751 569773 : mjit::Compiler::hasTypeBarriers(jsbytecode *pc)
7752 : {
7753 569773 : if (!cx->typeInferenceEnabled())
7754 85249 : return false;
7755 :
7756 484524 : return analysis->typeBarriers(cx, pc) != NULL;
7757 : }
7758 :
7759 : void
7760 211497 : mjit::Compiler::pushSyncedEntry(uint32_t pushed)
7761 : {
7762 211497 : frame.pushSynced(knownPushedType(pushed));
7763 211497 : }
7764 :
7765 : JSObject *
7766 1888861 : mjit::Compiler::pushedSingleton(unsigned pushed)
7767 : {
7768 1888861 : if (!cx->typeInferenceEnabled())
7769 1411425 : return NULL;
7770 :
7771 477436 : types::TypeSet *types = analysis->pushedTypes(PC, pushed);
7772 477436 : return types->getSingleton(cx);
7773 : }
7774 :
7775 : /*
7776 : * Barriers overview.
7777 : *
7778 : * After a property fetch finishes, we may need to do type checks on it to make
7779 : * sure it matches the pushed type set for this bytecode. This can be either
7780 : * because there is a type barrier at the bytecode, or because we cannot rule
7781 : * out an undefined result. For such accesses, we push a register pair, and
7782 : * then use those registers to check the fetched type matches the inferred
7783 : * types for the pushed set. The flow here is tricky:
7784 : *
7785 : * frame.pushRegs(type, data, knownType);
7786 : * --- Depending on knownType, the frame's representation for the pushed entry
7787 : * may not be a register pair anymore. knownType is based on the observed
7788 : * types that have been pushed here and may not actually match type/data.
7789 : * pushRegs must not clobber either register, for the test below.
7790 : *
7791 : * testBarrier(type, data)
7792 : * --- Use the type/data regs and generate a single jump taken if the barrier
7793 : * has been violated.
7794 : *
7795 : * --- Rearrange stack, rejoin from stub paths. No code must be emitted into
7796 : * the inline path between testBarrier and finishBarrier. Since a stub path
7797 : * may be in progress we can't call finishBarrier before stubcc.rejoin,
7798 : * and since typeReg/dataReg may not be intact after the stub call rejoin
7799 : * (if knownType != JSVAL_TYPE_UNKNOWN) we can't testBarrier after calling
7800 : * stubcc.rejoin.
7801 : *
7802 : * finishBarrier()
7803 : * --- Link the barrier jump to a new stub code path which updates the pushed
7804 : * types (possibly triggering recompilation). The frame has changed since
7805 : * pushRegs to reflect the final state of the op, which is OK as no inline
7806 : * code has been emitted since the barrier jump.
7807 : */
7808 :
7809 : mjit::Compiler::BarrierState
7810 120512 : mjit::Compiler::pushAddressMaybeBarrier(Address address, JSValueType type, bool reuseBase,
7811 : bool testUndefined)
7812 : {
7813 120512 : if (!hasTypeBarriers(PC) && !testUndefined) {
7814 28852 : frame.push(address, type, reuseBase);
7815 28852 : return BarrierState();
7816 : }
7817 :
7818 : RegisterID typeReg, dataReg;
7819 91660 : frame.loadIntoRegisters(address, reuseBase, &typeReg, &dataReg);
7820 :
7821 91660 : frame.pushRegs(typeReg, dataReg, type);
7822 91660 : return testBarrier(typeReg, dataReg, testUndefined);
7823 : }
7824 :
7825 : MaybeJump
7826 203683 : mjit::Compiler::trySingleTypeTest(types::TypeSet *types, RegisterID typeReg)
7827 : {
7828 : /*
7829 : * If a type set we have a barrier on is monomorphic, generate a single
7830 : * jump taken if a type register has a match. This doesn't handle type sets
7831 : * containing objects, as these require two jumps regardless (test for
7832 : * object, then test the type of the object).
7833 : */
7834 203683 : MaybeJump res;
7835 :
7836 203683 : switch (types->getKnownTypeTag(cx)) {
7837 : case JSVAL_TYPE_INT32:
7838 10751 : res.setJump(masm.testInt32(Assembler::NotEqual, typeReg));
7839 10751 : return res;
7840 :
7841 : case JSVAL_TYPE_DOUBLE:
7842 5038 : res.setJump(masm.testNumber(Assembler::NotEqual, typeReg));
7843 5038 : return res;
7844 :
7845 : case JSVAL_TYPE_BOOLEAN:
7846 1505 : res.setJump(masm.testBoolean(Assembler::NotEqual, typeReg));
7847 1505 : return res;
7848 :
7849 : case JSVAL_TYPE_STRING:
7850 16416 : res.setJump(masm.testString(Assembler::NotEqual, typeReg));
7851 16416 : return res;
7852 :
7853 : default:
7854 169973 : return res;
7855 : }
7856 : }
7857 :
7858 : JSC::MacroAssembler::Jump
7859 169973 : mjit::Compiler::addTypeTest(types::TypeSet *types, RegisterID typeReg, RegisterID dataReg)
7860 : {
7861 : /*
7862 : * :TODO: It would be good to merge this with GenerateTypeCheck, but the
7863 : * two methods have a different format for the tested value (in registers
7864 : * vs. in memory).
7865 : */
7866 :
7867 339946 : Vector<Jump> matches(CompilerAllocPolicy(cx, *this));
7868 :
7869 169973 : if (types->hasType(types::Type::Int32Type()))
7870 1261 : matches.append(masm.testInt32(Assembler::Equal, typeReg));
7871 :
7872 169973 : if (types->hasType(types::Type::DoubleType()))
7873 544 : matches.append(masm.testDouble(Assembler::Equal, typeReg));
7874 :
7875 169973 : if (types->hasType(types::Type::UndefinedType()))
7876 12836 : matches.append(masm.testUndefined(Assembler::Equal, typeReg));
7877 :
7878 169973 : if (types->hasType(types::Type::BooleanType()))
7879 3815 : matches.append(masm.testBoolean(Assembler::Equal, typeReg));
7880 :
7881 169973 : if (types->hasType(types::Type::StringType()))
7882 1177 : matches.append(masm.testString(Assembler::Equal, typeReg));
7883 :
7884 169973 : if (types->hasType(types::Type::NullType()))
7885 6203 : matches.append(masm.testNull(Assembler::Equal, typeReg));
7886 :
7887 169973 : unsigned count = 0;
7888 169973 : if (types->hasType(types::Type::AnyObjectType()))
7889 9372 : matches.append(masm.testObject(Assembler::Equal, typeReg));
7890 : else
7891 160601 : count = types->getObjectCount();
7892 :
7893 169973 : if (count != 0) {
7894 43130 : Jump notObject = masm.testObject(Assembler::NotEqual, typeReg);
7895 43130 : Address typeAddress(dataReg, JSObject::offsetOfType());
7896 :
7897 467135 : for (unsigned i = 0; i < count; i++) {
7898 424005 : if (JSObject *object = types->getSingleObject(i))
7899 121498 : matches.append(masm.branchPtr(Assembler::Equal, dataReg, ImmPtr(object)));
7900 : }
7901 :
7902 467135 : for (unsigned i = 0; i < count; i++) {
7903 424005 : if (types::TypeObject *object = types->getTypeObject(i))
7904 60305 : matches.append(masm.branchPtr(Assembler::Equal, typeAddress, ImmPtr(object)));
7905 : }
7906 :
7907 43130 : notObject.linkTo(masm.label(), &masm);
7908 : }
7909 :
7910 169973 : Jump mismatch = masm.jump();
7911 :
7912 386984 : for (unsigned i = 0; i < matches.length(); i++)
7913 217011 : matches[i].linkTo(masm.label(), &masm);
7914 :
7915 : return mismatch;
7916 : }
7917 :
7918 : mjit::Compiler::BarrierState
7919 1585767 : mjit::Compiler::testBarrier(RegisterID typeReg, RegisterID dataReg,
7920 : bool testUndefined, bool testReturn, bool force)
7921 : {
7922 1585767 : BarrierState state;
7923 1585767 : state.typeReg = typeReg;
7924 1585767 : state.dataReg = dataReg;
7925 :
7926 1585767 : if (!cx->typeInferenceEnabled() || !(js_CodeSpec[*PC].format & JOF_TYPESET))
7927 1256437 : return state;
7928 :
7929 329330 : types::TypeSet *types = analysis->bytecodeTypes(PC);
7930 329330 : if (types->unknown()) {
7931 : /*
7932 : * If the result of this opcode is already unknown, there is no way for
7933 : * a type barrier to fail.
7934 : */
7935 634 : return state;
7936 : }
7937 :
7938 328696 : if (testReturn) {
7939 119211 : JS_ASSERT(!testUndefined);
7940 119211 : if (!analysis->getCode(PC).monitoredTypesReturn)
7941 74111 : return state;
7942 209485 : } else if (!hasTypeBarriers(PC) && !force) {
7943 50902 : if (testUndefined && !types->hasType(types::Type::UndefinedType()))
7944 849 : state.jump.setJump(masm.testUndefined(Assembler::Equal, typeReg));
7945 50902 : return state;
7946 : }
7947 :
7948 203683 : types->addFreeze(cx);
7949 :
7950 : /* Cannot have type barriers when the result of the operation is already unknown. */
7951 203683 : JS_ASSERT(!types->unknown());
7952 :
7953 203683 : state.jump = trySingleTypeTest(types, typeReg);
7954 203683 : if (!state.jump.isSet())
7955 169973 : state.jump.setJump(addTypeTest(types, typeReg, dataReg));
7956 :
7957 203683 : return state;
7958 : }
7959 :
7960 : void
7961 1620325 : mjit::Compiler::finishBarrier(const BarrierState &barrier, RejoinState rejoin, uint32_t which)
7962 : {
7963 1620325 : if (!barrier.jump.isSet())
7964 1415793 : return;
7965 :
7966 204532 : stubcc.linkExitDirect(barrier.jump.get(), stubcc.masm.label());
7967 :
7968 : /*
7969 : * Before syncing, store the entry to sp[0]. (scanInlineCalls accounted for
7970 : * this when making sure there is enough froom for all frames). The known
7971 : * type in the frame may be wrong leading to an incorrect sync, and this
7972 : * sync may also clobber typeReg and/or dataReg.
7973 : */
7974 204532 : frame.pushSynced(JSVAL_TYPE_UNKNOWN);
7975 : stubcc.masm.storeValueFromComponents(barrier.typeReg, barrier.dataReg,
7976 204532 : frame.addressOf(frame.peek(-1)));
7977 204532 : frame.pop();
7978 :
7979 204532 : stubcc.syncExit(Uses(0));
7980 204532 : stubcc.leave();
7981 :
7982 204532 : stubcc.masm.move(ImmIntPtr(intptr_t(which)), Registers::ArgReg1);
7983 204532 : OOL_STUBCALL(stubs::TypeBarrierHelper, rejoin);
7984 204532 : stubcc.rejoin(Changes(0));
7985 : }
7986 :
7987 : void
7988 1431533 : mjit::Compiler::testPushedType(RejoinState rejoin, int which, bool ool)
7989 : {
7990 1431533 : if (!cx->typeInferenceEnabled() || !(js_CodeSpec[*PC].format & JOF_TYPESET))
7991 1290589 : return;
7992 :
7993 140944 : types::TypeSet *types = analysis->bytecodeTypes(PC);
7994 140944 : if (types->unknown())
7995 28 : return;
7996 :
7997 140916 : Assembler &masm = ool ? stubcc.masm : this->masm;
7998 :
7999 140916 : JS_ASSERT(which <= 0);
8000 140916 : Address address = (which == 0) ? frame.addressOfTop() : frame.addressOf(frame.peek(which));
8001 :
8002 281832 : Vector<Jump> mismatches(cx);
8003 140916 : if (!masm.generateTypeCheck(cx, address, types, &mismatches)) {
8004 0 : oomInVector = true;
8005 : return;
8006 : }
8007 :
8008 140916 : Jump j = masm.jump();
8009 :
8010 311498 : for (unsigned i = 0; i < mismatches.length(); i++)
8011 170582 : mismatches[i].linkTo(masm.label(), &masm);
8012 :
8013 140916 : masm.move(Imm32(which), Registers::ArgReg1);
8014 140916 : if (ool)
8015 135348 : OOL_STUBCALL(stubs::StubTypeHelper, rejoin);
8016 : else
8017 5568 : INLINE_STUBCALL(stubs::StubTypeHelper, rejoin);
8018 :
8019 140916 : j.linkTo(masm.label(), &masm);
8020 : }
|