1 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=4 sw=4 et tw=99:
3 : *
4 : * ***** BEGIN LICENSE BLOCK *****
5 : * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 : *
7 : * The contents of this file are subject to the Mozilla Public License Version
8 : * 1.1 (the "License"); you may not use this file except in compliance with
9 : * the License. You may obtain a copy of the License at
10 : * http://www.mozilla.org/MPL/
11 : *
12 : * Software distributed under the License is distributed on an "AS IS" basis,
13 : * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 : * for the specific language governing rights and limitations under the
15 : * License.
16 : *
17 : * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 : * May 28, 2008.
19 : *
20 : * The Initial Developer of the Original Code is
21 : * Brendan Eich <brendan@mozilla.org>
22 : *
23 : * Contributor(s):
24 : *
25 : * Alternatively, the contents of this file may be used under the terms of
26 : * either of the GNU General Public License Version 2 or later (the "GPL"),
27 : * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
28 : * in which case the provisions of the GPL or the LGPL are applicable instead
29 : * of those above. If you wish to allow use of your version of this file only
30 : * under the terms of either the GPL or the LGPL, and not to allow others to
31 : * use your version of this file under the terms of the MPL, indicate your
32 : * decision by deleting the provisions above and replace them with the notice
33 : * and other provisions required by the GPL or the LGPL. If you do not delete
34 : * the provisions above, a recipient may use your version of this file under
35 : * the terms of any one of the MPL, the GPL or the LGPL.
36 : *
37 : * ***** END LICENSE BLOCK ***** */
38 :
39 : #if !defined jsjaeger_h__ && defined JS_METHODJIT
40 : #define jsjaeger_h__
41 :
42 : #ifdef JSGC_INCREMENTAL
43 : #define JSGC_INCREMENTAL_MJ
44 : #endif
45 :
46 : #include "jscntxt.h"
47 : #include "jscompartment.h"
48 :
49 : #include "assembler/assembler/MacroAssemblerCodeRef.h"
50 : #include "assembler/assembler/CodeLocation.h"
51 :
52 : #if !defined JS_CPU_X64 && \
53 : !defined JS_CPU_X86 && \
54 : !defined JS_CPU_SPARC && \
55 : !defined JS_CPU_ARM && \
56 : !defined JS_CPU_MIPS
57 : # error "Oh no, you should define a platform so this compiles."
58 : #endif
59 :
60 : #if !defined(JS_NUNBOX32) && !defined(JS_PUNBOX64)
61 : # error "No boxing format selected."
62 : #endif
63 :
64 : namespace js {
65 :
66 : namespace mjit {
67 : struct JITChunk;
68 : struct JITScript;
69 : }
70 :
71 : struct VMFrame
72 : {
73 : #if defined(JS_CPU_SPARC)
74 : void *savedL0;
75 : void *savedL1;
76 : void *savedL2;
77 : void *savedL3;
78 : void *savedL4;
79 : void *savedL5;
80 : void *savedL6;
81 : void *savedL7;
82 : void *savedI0;
83 : void *savedI1;
84 : void *savedI2;
85 : void *savedI3;
86 : void *savedI4;
87 : void *savedI5;
88 : void *savedI6;
89 : void *savedI7;
90 :
91 : void *str_p;
92 :
93 : void *outgoing_p0;
94 : void *outgoing_p1;
95 : void *outgoing_p2;
96 : void *outgoing_p3;
97 : void *outgoing_p4;
98 : void *outgoing_p5;
99 :
100 : void *outgoing_p6;
101 :
102 : void *reserve_0;
103 : void *reserve_1;
104 :
105 : #elif defined(JS_CPU_MIPS)
106 : /* Reserved 16 bytes for a0-a3 space in MIPS O32 ABI */
107 : void *unused0;
108 : void *unused1;
109 : void *unused2;
110 : void *unused3;
111 : #endif
112 :
113 : union Arguments {
114 : struct {
115 : void *ptr;
116 : void *ptr2;
117 : } x;
118 : struct {
119 : uint32_t lazyArgsObj;
120 : uint32_t dynamicArgc;
121 : } call;
122 : } u;
123 :
124 1670 : static size_t offsetOfLazyArgsObj() {
125 1670 : return offsetof(VMFrame, u.call.lazyArgsObj);
126 : }
127 :
128 2619 : static size_t offsetOfDynamicArgc() {
129 2619 : return offsetof(VMFrame, u.call.dynamicArgc);
130 : }
131 :
132 : VMFrame *previous;
133 : void *scratch;
134 : FrameRegs regs;
135 :
136 5191189 : static size_t offsetOfRegsSp() {
137 5191189 : return offsetof(VMFrame, regs.sp);
138 : }
139 :
140 4827962 : static size_t offsetOfRegsPc() {
141 4827962 : return offsetof(VMFrame, regs.pc);
142 : }
143 :
144 : JSContext *cx;
145 : Value *stackLimit;
146 : StackFrame *entryfp;
147 : FrameRegs *oldregs;
148 : JSRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */
149 :
150 : #if defined(JS_CPU_X86)
151 : void *unused0, *unused1; /* For 16 byte alignment */
152 : #endif
153 :
154 : #if defined(JS_CPU_X86)
155 : void *savedEBX;
156 : void *savedEDI;
157 : void *savedESI;
158 : void *savedEBP;
159 : void *savedEIP;
160 :
161 : # ifdef JS_NO_FASTCALL
162 : inline void** returnAddressLocation() {
163 : return reinterpret_cast<void**>(this) - 5;
164 : }
165 : # else
166 196230 : inline void** returnAddressLocation() {
167 196230 : return reinterpret_cast<void**>(this) - 1;
168 : }
169 : # endif
170 :
171 : /* The gap between ebp and esp in JaegerTrampoline frames on X86 platforms. */
172 : static const uint32_t STACK_BASE_DIFFERENCE = 0x38;
173 :
174 : #elif defined(JS_CPU_X64)
175 : void *savedRBX;
176 : # ifdef _WIN64
177 : void *savedRSI;
178 : void *savedRDI;
179 : # endif
180 : void *savedR15;
181 : void *savedR14;
182 : void *savedR13;
183 : void *savedR12;
184 : void *savedRBP;
185 : void *savedRIP;
186 :
187 : # ifdef _WIN64
188 : inline void** returnAddressLocation() {
189 : return reinterpret_cast<void**>(this) - 5;
190 : }
191 : # else
192 : inline void** returnAddressLocation() {
193 : return reinterpret_cast<void**>(this) - 1;
194 : }
195 : # endif
196 :
197 : #elif defined(JS_CPU_ARM)
198 : void *savedR4;
199 : void *savedR5;
200 : void *savedR6;
201 : void *savedR7;
202 : void *savedR8;
203 : void *savedR9;
204 : void *savedR10;
205 : void *savedR11;
206 : void *savedLR;
207 :
208 : inline void** returnAddressLocation() {
209 : return reinterpret_cast<void**>(this) - 1;
210 : }
211 : #elif defined(JS_CPU_SPARC)
212 : JSStackFrame *topRetrunAddr;
213 : void* veneerReturn;
214 : void* _align;
215 : inline void** returnAddressLocation() {
216 : return reinterpret_cast<void**>(&this->veneerReturn);
217 : }
218 : #elif defined(JS_CPU_MIPS)
219 : void *savedS0;
220 : void *savedS1;
221 : void *savedS2;
222 : void *savedS3;
223 : void *savedS4;
224 : void *savedS5;
225 : void *savedS6;
226 : void *savedS7;
227 : void *savedGP;
228 : void *savedRA;
229 : void *unused4; // For alignment.
230 :
231 : inline void** returnAddressLocation() {
232 : return reinterpret_cast<void**>(this) - 1;
233 : }
234 : #else
235 : # error "The VMFrame layout isn't defined for your processor architecture!"
236 : #endif
237 :
238 : JSRuntime *runtime() { return cx->runtime; }
239 :
240 : /*
241 : * Get the current frame and JIT. Note that these are NOT stable in case
242 : * of recompilations; all code which expects these to be stable should
243 : * check that cx->recompilations() has not changed across a call that could
244 : * trigger recompilation (pretty much any time the VM is called into).
245 : */
246 152788834 : StackFrame *fp() { return regs.fp(); }
247 1957843 : mjit::JITScript *jit() { return fp()->jit(); }
248 :
249 : inline mjit::JITChunk *chunk();
250 : inline unsigned chunkIndex();
251 :
252 : /* Get the inner script/PC in case of inlining. */
253 : inline JSScript *script();
254 : inline jsbytecode *pc();
255 :
256 : #if defined(JS_CPU_SPARC)
257 : static const size_t offsetOfFp = 30 * sizeof(void *) + FrameRegs::offsetOfFp;
258 : static const size_t offsetOfInlined = 30 * sizeof(void *) + FrameRegs::offsetOfInlined;
259 : #elif defined(JS_CPU_MIPS)
260 : static const size_t offsetOfFp = 8 * sizeof(void *) + FrameRegs::offsetOfFp;
261 : static const size_t offsetOfInlined = 8 * sizeof(void *) + FrameRegs::offsetOfInlined;
262 : #else
263 : static const size_t offsetOfFp = 4 * sizeof(void *) + FrameRegs::offsetOfFp;
264 : static const size_t offsetOfInlined = 4 * sizeof(void *) + FrameRegs::offsetOfInlined;
265 : #endif
266 :
267 : static void staticAssert() {
268 : JS_STATIC_ASSERT(offsetOfFp == offsetof(VMFrame, regs) + FrameRegs::offsetOfFp);
269 : JS_STATIC_ASSERT(offsetOfInlined == offsetof(VMFrame, regs) + FrameRegs::offsetOfInlined);
270 : }
271 : };
272 :
273 : #if defined(JS_CPU_ARM) || defined(JS_CPU_SPARC) || defined(JS_CPU_MIPS)
274 : // WARNING: Do not call this function directly from C(++) code because it is not ABI-compliant.
275 : extern "C" void JaegerStubVeneer(void);
276 : #endif
277 :
278 : namespace mjit {
279 :
280 : /*
281 : * For a C++ or scripted call made from JIT code, indicates properties of the
282 : * register and stack state after the call finishes, which js_InternalInterpret
283 : * must use to construct a coherent state for rejoining into the interpreter.
284 : */
285 : enum RejoinState {
286 : /*
287 : * Return value of call at this bytecode is held in ReturnReg_{Data,Type}
288 : * and needs to be restored before starting the next bytecode. f.regs.pc
289 : * is *not* intact when rejoining from a scripted call (unlike all other
290 : * rejoin states). The pc's offset into the script is stored in the upper
291 : * 31 bits of the rejoin state, and the remaining values for RejoinState
292 : * are shifted left by one in stack frames to leave the lower bit set only
293 : * for scripted calls.
294 : */
295 : REJOIN_SCRIPTED = 1,
296 :
297 : /* Recompilations and frame expansion are impossible for this call. */
298 : REJOIN_NONE,
299 :
300 : /* State is coherent for the start of the current bytecode. */
301 : REJOIN_RESUME,
302 :
303 : /*
304 : * State is coherent for the start of the current bytecode, which is a TRAP
305 : * that has already been invoked and should not be invoked again.
306 : */
307 : REJOIN_TRAP,
308 :
309 : /* State is coherent for the start of the next (fallthrough) bytecode. */
310 : REJOIN_FALLTHROUGH,
311 :
312 : /*
313 : * As for REJOIN_FALLTHROUGH, but holds a reference on the compartment's
314 : * orphaned native pools which needs to be reclaimed by InternalInterpret.
315 : * The return value needs to be adjusted if REJOIN_NATIVE_LOWERED, and
316 : * REJOIN_NATIVE_GETTER is for ABI calls made for property accesses.
317 : */
318 : REJOIN_NATIVE,
319 : REJOIN_NATIVE_LOWERED,
320 : REJOIN_NATIVE_GETTER,
321 :
322 : /*
323 : * Dummy rejoin stored in VMFrames to indicate they return into a native
324 : * stub (and their FASTCALL return address should not be observed) but
325 : * that they have already been patched and can be ignored.
326 : */
327 : REJOIN_NATIVE_PATCHED,
328 :
329 : /* Call returns a payload, which should be pushed before starting next bytecode. */
330 : REJOIN_PUSH_BOOLEAN,
331 : REJOIN_PUSH_OBJECT,
332 :
333 : /* Call returns an object, which should be assigned to a local per the current bytecode. */
334 : REJOIN_DEFLOCALFUN,
335 :
336 : /*
337 : * During the prologue of constructing scripts, after the function's
338 : * .prototype property has been fetched.
339 : */
340 : REJOIN_THIS_PROTOTYPE,
341 :
342 : /*
343 : * Type check on arguments failed during prologue, need stack check and
344 : * the rest of the JIT prologue before the script can execute.
345 : */
346 : REJOIN_CHECK_ARGUMENTS,
347 :
348 : /*
349 : * The script's jitcode was discarded after marking an outer function as
350 : * reentrant or due to a GC while creating a call object.
351 : */
352 : REJOIN_FUNCTION_PROLOGUE,
353 :
354 : /*
355 : * State after calling a stub which returns a JIT code pointer for a call
356 : * or NULL for an already-completed call.
357 : */
358 : REJOIN_CALL_PROLOGUE,
359 : REJOIN_CALL_PROLOGUE_LOWERED_CALL,
360 : REJOIN_CALL_PROLOGUE_LOWERED_APPLY,
361 :
362 : /* Triggered a recompilation while placing the arguments to an apply on the stack. */
363 : REJOIN_CALL_SPLAT,
364 :
365 : /* FALLTHROUGH ops which can be implemented as part of an IncOp. */
366 : REJOIN_GETTER,
367 : REJOIN_POS,
368 : REJOIN_BINARY,
369 :
370 : /*
371 : * For an opcode fused with IFEQ/IFNE, call returns a boolean indicating
372 : * the result of the comparison and whether to take or not take the branch.
373 : */
374 : REJOIN_BRANCH
375 : };
376 :
377 : /* Get the rejoin state for a StackFrame after returning from a scripted call. */
378 : static inline JSRejoinState
379 70968 : ScriptedRejoin(uint32_t pcOffset)
380 : {
381 70968 : return REJOIN_SCRIPTED | (pcOffset << 1);
382 : }
383 :
384 : /* Get the rejoin state for a StackFrame after returning from a stub call. */
385 : static inline JSRejoinState
386 46332 : StubRejoin(RejoinState rejoin)
387 : {
388 46332 : return rejoin << 1;
389 : }
390 :
391 : /* Helper to watch for recompilation and frame expansion activity on a compartment. */
392 : struct RecompilationMonitor
393 : {
394 : JSContext *cx;
395 :
396 : /*
397 : * If either inline frame expansion or recompilation occurs, then ICs and
398 : * stubs should not depend on the frame or JITs being intact. The two are
399 : * separated for logging.
400 : */
401 : unsigned recompilations;
402 : unsigned frameExpansions;
403 :
404 : /* If a GC occurs it may discard jit code on the stack. */
405 : uint64_t gcNumber;
406 :
407 1553999 : RecompilationMonitor(JSContext *cx)
408 : : cx(cx),
409 : recompilations(cx->compartment->types.recompilations),
410 : frameExpansions(cx->compartment->types.frameExpansions),
411 1553999 : gcNumber(cx->runtime->gcNumber)
412 1553999 : {}
413 :
414 1554028 : bool recompiled() {
415 : return cx->compartment->types.recompilations != recompilations
416 : || cx->compartment->types.frameExpansions != frameExpansions
417 1554028 : || cx->runtime->gcNumber != gcNumber;
418 : }
419 : };
420 :
421 : /*
422 : * Trampolines to force returns from jit code.
423 : * See also TrampolineCompiler::generateForceReturn(Fast).
424 : */
425 : struct Trampolines {
426 : typedef void (*TrampolinePtr)();
427 :
428 : TrampolinePtr forceReturn;
429 : JSC::ExecutablePool *forceReturnPool;
430 :
431 : #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
432 : TrampolinePtr forceReturnFast;
433 : JSC::ExecutablePool *forceReturnFastPool;
434 : #endif
435 : };
436 :
437 : /* Result status of executing mjit code on a frame. */
438 : enum JaegerStatus
439 : {
440 : /* Entry frame finished, and is throwing an exception. */
441 : Jaeger_Throwing = 0,
442 :
443 : /* Entry frame finished, and is returning. */
444 : Jaeger_Returned = 1,
445 :
446 : /*
447 : * Entry frame did not finish. cx->regs reflects where to resume execution.
448 : * This result is only possible if 'partial' is passed as true below.
449 : */
450 : Jaeger_Unfinished = 2,
451 :
452 : /*
453 : * As for Unfinished, but stopped after a TRAP triggered recompilation.
454 : * The trap has been reinstalled, but should not execute again when
455 : * resuming execution.
456 : */
457 : Jaeger_UnfinishedAtTrap = 3,
458 :
459 : /*
460 : * An exception was thrown before entering jit code, so the caller should
461 : * 'goto error'.
462 : */
463 : Jaeger_ThrowBeforeEnter = 4
464 : };
465 :
466 : static inline bool
467 4183693 : JaegerStatusToSuccess(JaegerStatus status)
468 : {
469 4183693 : JS_ASSERT(status != Jaeger_Unfinished);
470 4183693 : JS_ASSERT(status != Jaeger_UnfinishedAtTrap);
471 4183693 : return status == Jaeger_Returned;
472 : }
473 :
474 : /*
475 : * Method JIT compartment data. Currently, there is exactly one per
476 : * JS compartment. It would be safe for multiple JS compartments to
477 : * share a JaegerCompartment as long as only one thread can enter
478 : * the JaegerCompartment at a time.
479 : */
480 : class JaegerCompartment {
481 : JSC::ExecutableAllocator *execAlloc_; // allocator for jit code
482 : Trampolines trampolines; // force-return trampolines
483 : VMFrame *activeFrame_; // current active VMFrame
484 : JaegerStatus lastUnfinished_;// result status of last VM frame,
485 : // if unfinished
486 :
487 : void Finish();
488 :
489 : public:
490 : bool Initialize(JSContext *cx);
491 :
492 : JaegerCompartment();
493 11826 : ~JaegerCompartment() { Finish(); }
494 :
495 2198593 : JSC::ExecutableAllocator *execAlloc() {
496 2198593 : return execAlloc_;
497 : }
498 :
499 3533973 : VMFrame *activeFrame() {
500 3533973 : return activeFrame_;
501 : }
502 :
503 4229345 : void pushActiveFrame(VMFrame *f) {
504 4229345 : JS_ASSERT(!lastUnfinished_);
505 4229345 : f->previous = activeFrame_;
506 4229345 : f->scratch = NULL;
507 4229345 : activeFrame_ = f;
508 4229345 : }
509 :
510 4229345 : void popActiveFrame() {
511 4229345 : JS_ASSERT(activeFrame_);
512 4229345 : activeFrame_ = activeFrame_->previous;
513 4229345 : }
514 :
515 49073 : void setLastUnfinished(JaegerStatus status) {
516 49073 : JS_ASSERT(!lastUnfinished_);
517 49073 : lastUnfinished_ = status;
518 49073 : }
519 :
520 4229345 : JaegerStatus lastUnfinished() {
521 4229345 : JaegerStatus result = lastUnfinished_;
522 4229345 : lastUnfinished_ = (JaegerStatus) 0;
523 4229345 : return result;
524 : }
525 :
526 : /*
527 : * To force the top StackFrame in a VMFrame to return, when that VMFrame
528 : * has called an extern "C" function (say, js_InternalThrow or
529 : * js_InternalInterpret), change the extern "C" function's return address
530 : * to the value this method returns.
531 : */
532 46 : void *forceReturnFromExternC() const {
533 46 : return JS_FUNC_TO_DATA_PTR(void *, trampolines.forceReturn);
534 : }
535 :
536 : /*
537 : * To force the top StackFrame in a VMFrame to return, when that VMFrame has
538 : * called a fastcall function (say, most stubs:: functions), change the
539 : * fastcall function's return address to the value this method returns.
540 : */
541 156 : void *forceReturnFromFastCall() const {
542 : #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
543 : return JS_FUNC_TO_DATA_PTR(void *, trampolines.forceReturnFast);
544 : #else
545 156 : return JS_FUNC_TO_DATA_PTR(void *, trampolines.forceReturn);
546 : #endif
547 : }
548 :
549 : /*
550 : * References held on pools created for native ICs, where the IC was
551 : * destroyed and we are waiting for the pool to finish use and jump
552 : * into the interpoline.
553 : */
554 : Vector<StackFrame *, 8, SystemAllocPolicy> orphanedNativeFrames;
555 : Vector<JSC::ExecutablePool *, 8, SystemAllocPolicy> orphanedNativePools;
556 : };
557 :
558 : /*
559 : * Allocation policy for compiler jstl objects. The goal is to free the
560 : * compiler from having to check and propagate OOM after every time we
561 : * append to a vector. We do this by reporting OOM to the engine and
562 : * setting a flag on the compiler when OOM occurs. The compiler is required
563 : * to check for OOM only before trying to use the contents of the list.
564 : */
565 : class CompilerAllocPolicy : public TempAllocPolicy
566 : {
567 : bool *oomFlag;
568 :
569 215497 : void *checkAlloc(void *p) {
570 215497 : if (!p)
571 0 : *oomFlag = true;
572 215497 : return p;
573 : }
574 :
575 : public:
576 : CompilerAllocPolicy(JSContext *cx, bool *oomFlag)
577 : : TempAllocPolicy(cx), oomFlag(oomFlag) {}
578 : CompilerAllocPolicy(JSContext *cx, Compiler &compiler);
579 :
580 215363 : void *malloc_(size_t bytes) { return checkAlloc(TempAllocPolicy::malloc_(bytes)); }
581 134 : void *realloc_(void *p, size_t oldBytes, size_t bytes) {
582 134 : return checkAlloc(TempAllocPolicy::realloc_(p, oldBytes, bytes));
583 : }
584 : };
585 :
586 : namespace ic {
587 : # if defined JS_POLYIC
588 : struct PICInfo;
589 : struct GetElementIC;
590 : struct SetElementIC;
591 : # endif
592 : # if defined JS_MONOIC
593 : struct GetGlobalNameIC;
594 : struct SetGlobalNameIC;
595 : struct EqualityICInfo;
596 : struct CallICInfo;
597 : # endif
598 : }
599 : }
600 :
601 : typedef void (JS_FASTCALL *VoidStub)(VMFrame &);
602 : typedef void (JS_FASTCALL *VoidVpStub)(VMFrame &, Value *);
603 : typedef void (JS_FASTCALL *VoidStubUInt32)(VMFrame &, uint32_t);
604 : typedef void (JS_FASTCALL *VoidStubInt32)(VMFrame &, int32_t);
605 : typedef JSBool (JS_FASTCALL *BoolStub)(VMFrame &);
606 : typedef void * (JS_FASTCALL *VoidPtrStub)(VMFrame &);
607 : typedef void * (JS_FASTCALL *VoidPtrStubPC)(VMFrame &, jsbytecode *);
608 : typedef void * (JS_FASTCALL *VoidPtrStubUInt32)(VMFrame &, uint32_t);
609 : typedef JSObject * (JS_FASTCALL *JSObjStub)(VMFrame &);
610 : typedef JSObject * (JS_FASTCALL *JSObjStubUInt32)(VMFrame &, uint32_t);
611 : typedef JSObject * (JS_FASTCALL *JSObjStubFun)(VMFrame &, JSFunction *);
612 : typedef void (JS_FASTCALL *VoidStubFun)(VMFrame &, JSFunction *);
613 : typedef JSObject * (JS_FASTCALL *JSObjStubJSObj)(VMFrame &, JSObject *);
614 : typedef void (JS_FASTCALL *VoidStubName)(VMFrame &, PropertyName *);
615 : typedef JSString * (JS_FASTCALL *JSStrStub)(VMFrame &);
616 : typedef JSString * (JS_FASTCALL *JSStrStubUInt32)(VMFrame &, uint32_t);
617 : typedef void (JS_FASTCALL *VoidStubJSObj)(VMFrame &, JSObject *);
618 : typedef void (JS_FASTCALL *VoidStubPC)(VMFrame &, jsbytecode *);
619 : typedef JSBool (JS_FASTCALL *BoolStubUInt32)(VMFrame &f, uint32_t);
620 : #ifdef JS_MONOIC
621 : typedef void (JS_FASTCALL *VoidStubCallIC)(VMFrame &, js::mjit::ic::CallICInfo *);
622 : typedef void * (JS_FASTCALL *VoidPtrStubCallIC)(VMFrame &, js::mjit::ic::CallICInfo *);
623 : typedef void (JS_FASTCALL *VoidStubGetGlobal)(VMFrame &, js::mjit::ic::GetGlobalNameIC *);
624 : typedef void (JS_FASTCALL *VoidStubSetGlobal)(VMFrame &, js::mjit::ic::SetGlobalNameIC *);
625 : typedef JSBool (JS_FASTCALL *BoolStubEqualityIC)(VMFrame &, js::mjit::ic::EqualityICInfo *);
626 : #endif
627 : #ifdef JS_POLYIC
628 : typedef void (JS_FASTCALL *VoidStubPIC)(VMFrame &, js::mjit::ic::PICInfo *);
629 : typedef void (JS_FASTCALL *VoidStubGetElemIC)(VMFrame &, js::mjit::ic::GetElementIC *);
630 : typedef void (JS_FASTCALL *VoidStubSetElemIC)(VMFrame &f, js::mjit::ic::SetElementIC *);
631 : #endif
632 :
633 : namespace mjit {
634 :
635 : struct InlineFrame;
636 : struct CallSite;
637 :
638 : struct NativeMapEntry {
639 : size_t bcOff; /* bytecode offset in script */
640 : void *ncode; /* pointer to native code */
641 : };
642 :
643 : /* Per-op counts of performance metrics. */
644 : struct PCLengthEntry {
645 : double codeLength; /* amount of inline code generated */
646 : double picsLength; /* amount of PIC stub code generated */
647 : };
648 :
649 : /*
650 : * Pools and patch locations for managing stubs for non-FASTCALL C++ calls made
651 : * from native call and PropertyOp stubs. Ownership of these may be transferred
652 : * into the orphanedNativePools for the compartment.
653 : */
654 100446 : struct NativeCallStub {
655 : /* PC for the stub. Native call stubs cannot be added for inline frames. */
656 : jsbytecode *pc;
657 :
658 : /* Pool for the stub, NULL if it has been removed from the script. */
659 : JSC::ExecutablePool *pool;
660 :
661 : /*
662 : * Fallthrough jump returning to jitcode which may be patched during
663 : * recompilation. On x64 this is an indirect jump to avoid issues with far
664 : * jumps on relative branches.
665 : */
666 : #ifdef JS_CPU_X64
667 : JSC::CodeLocationDataLabelPtr jump;
668 : #else
669 : JSC::CodeLocationJump jump;
670 : #endif
671 : };
672 :
673 : struct JITChunk
674 128827 : {
675 : typedef JSC::MacroAssemblerCodeRef CodeRef;
676 : CodeRef code; /* pool & code addresses */
677 :
678 : PCLengthEntry *pcLengths; /* lengths for outer and inline frames */
679 :
680 : /*
681 : * This struct has several variable-length sections that are allocated on
682 : * the end: nmaps, MICs, callICs, etc. To save space -- worthwhile
683 : * because JITScripts are common -- we only record their lengths. We can
684 : * find any of the sections from the lengths because we know their order.
685 : * Therefore, do not change the section ordering in finishThisUp() without
686 : * changing nMICs() et al as well.
687 : */
688 : uint32_t nNmapPairs; /* The NativeMapEntrys are sorted by .bcOff.
689 : .ncode values may not be NULL. */
690 : uint32_t nInlineFrames;
691 : uint32_t nCallSites;
692 : #ifdef JS_MONOIC
693 : uint32_t nGetGlobalNames;
694 : uint32_t nSetGlobalNames;
695 : uint32_t nCallICs;
696 : uint32_t nEqualityICs;
697 : #endif
698 : #ifdef JS_POLYIC
699 : uint32_t nGetElems;
700 : uint32_t nSetElems;
701 : uint32_t nPICs;
702 : #endif
703 :
704 : #ifdef JS_MONOIC
705 : // Additional ExecutablePools that IC stubs were generated into.
706 : typedef Vector<JSC::ExecutablePool *, 0, SystemAllocPolicy> ExecPoolVector;
707 : ExecPoolVector execPools;
708 : #endif
709 :
710 : // Additional ExecutablePools for native call and getter stubs.
711 : Vector<NativeCallStub, 0, SystemAllocPolicy> nativeCallStubs;
712 :
713 : NativeMapEntry *nmap() const;
714 : js::mjit::InlineFrame *inlineFrames() const;
715 : js::mjit::CallSite *callSites() const;
716 : #ifdef JS_MONOIC
717 : ic::GetGlobalNameIC *getGlobalNames() const;
718 : ic::SetGlobalNameIC *setGlobalNames() const;
719 : ic::CallICInfo *callICs() const;
720 : ic::EqualityICInfo *equalityICs() const;
721 : #endif
722 : #ifdef JS_POLYIC
723 : ic::GetElementIC *getElems() const;
724 : ic::SetElementIC *setElems() const;
725 : ic::PICInfo *pics() const;
726 : #endif
727 :
728 1394856 : bool isValidCode(void *ptr) {
729 1394856 : char *jitcode = (char *)code.m_code.executableAddress();
730 1394856 : char *jcheck = (char *)ptr;
731 1394856 : return jcheck >= jitcode && jcheck < jitcode + code.m_size;
732 : }
733 :
734 : void nukeScriptDependentICs();
735 :
736 : size_t computedSizeOfIncludingThis();
737 : size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf);
738 :
739 : ~JITChunk();
740 :
741 : private:
742 : /* Helpers used to navigate the variable-length sections. */
743 : char *commonSectionLimit() const;
744 : char *monoICSectionsLimit() const;
745 : char *polyICSectionsLimit() const;
746 : };
747 :
748 : void
749 : SetChunkLimit(uint32_t limit);
750 :
751 : /* Information about a compilation chunk within a script. */
752 : struct ChunkDescriptor
753 122712 : {
754 : /* Bytecode range of the chunk: [begin,end) */
755 : uint32_t begin;
756 : uint32_t end;
757 :
758 : /* Use counter for the chunk. */
759 : uint32_t counter;
760 :
761 : /* Optional compiled code for the chunk. */
762 : JITChunk *chunk;
763 :
764 117098 : ChunkDescriptor() { PodZero(this); }
765 : };
766 :
767 : /* Jump or fallthrough edge in the bytecode which crosses a chunk boundary. */
768 : struct CrossChunkEdge
769 38903 : {
770 : /* Bytecode offsets of the source and target of the edge. */
771 : uint32_t source;
772 : uint32_t target;
773 :
774 : /* Locations of the jump(s) for the source, NULL if not compiled. */
775 : void *sourceJump1;
776 : void *sourceJump2;
777 :
778 : #ifdef JS_CPU_X64
779 : /*
780 : * Location of a trampoline for the edge to perform an indirect jump if
781 : * out of range, NULL if the source is not compiled.
782 : */
783 : void *sourceTrampoline;
784 : #endif
785 :
786 : /* Any jump table entries along this edge. */
787 : typedef Vector<void**,4,SystemAllocPolicy> JumpTableEntryVector;
788 : JumpTableEntryVector *jumpTableEntries;
789 :
790 : /* Location of the label for the target, NULL if not compiled. */
791 : void *targetLabel;
792 :
793 : /*
794 : * Location of a shim which will transfer control to the interpreter at the
795 : * target bytecode. The source jumps are patched to jump to this label if
796 : * the source is compiled but not the target.
797 : */
798 : void *shimLabel;
799 :
800 33190 : CrossChunkEdge() { PodZero(this); }
801 : };
802 :
803 : struct JITScript
804 : {
805 : JSScript *script;
806 :
807 : void *invokeEntry; /* invoke address */
808 : void *fastEntry; /* cached entry, fastest */
809 : void *arityCheckEntry; /* arity check address */
810 : void *argsCheckEntry; /* arguments check address */
811 :
812 : /* List of inline caches jumping to the fastEntry. */
813 : JSCList callers;
814 :
815 : uint32_t nchunks;
816 : uint32_t nedges;
817 :
818 : /*
819 : * Pool for shims which transfer control to the interpreter on cross chunk
820 : * edges to chunks which do not have compiled code.
821 : */
822 : JSC::ExecutablePool *shimPool;
823 :
824 : #ifdef JS_MONOIC
825 : /* Inline cache at function entry for checking this/argument types. */
826 : JSC::CodeLocationLabel argsCheckStub;
827 : JSC::CodeLocationLabel argsCheckFallthrough;
828 : JSC::CodeLocationJump argsCheckJump;
829 : JSC::ExecutablePool *argsCheckPool;
830 : void resetArgsCheck();
831 : #endif
832 :
833 39686993 : ChunkDescriptor &chunkDescriptor(unsigned i) {
834 39686993 : JS_ASSERT(i < nchunks);
835 39686993 : ChunkDescriptor *descs = (ChunkDescriptor *) ((char *) this + sizeof(JITScript));
836 39686993 : return descs[i];
837 : }
838 :
839 17955930 : unsigned chunkIndex(jsbytecode *pc) {
840 17955930 : unsigned offset = pc - script->code;
841 17955930 : JS_ASSERT(offset < script->length);
842 18375255 : for (unsigned i = 0; i < nchunks; i++) {
843 18375255 : const ChunkDescriptor &desc = chunkDescriptor(i);
844 18375255 : JS_ASSERT(desc.begin <= offset);
845 18375255 : if (offset < desc.end)
846 17955930 : return i;
847 : }
848 0 : JS_NOT_REACHED("Bad chunk layout");
849 : return 0;
850 : }
851 :
852 1702996 : JITChunk *chunk(jsbytecode *pc) {
853 1702996 : return chunkDescriptor(chunkIndex(pc)).chunk;
854 : }
855 :
856 : JITChunk *findCodeChunk(void *addr);
857 :
858 257888 : CrossChunkEdge *edges() {
859 257888 : return (CrossChunkEdge *) (&chunkDescriptor(0) + nchunks);
860 : }
861 :
862 : /* Patch any compiled sources in edge to jump to label. */
863 : void patchEdge(const CrossChunkEdge &edge, void *label);
864 :
865 : jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline);
866 :
867 : size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf);
868 :
869 : void destroy(JSContext *cx);
870 : void destroyChunk(JSContext *cx, unsigned chunkIndex, bool resetUses = true);
871 : };
872 :
873 : /*
874 : * Execute the given mjit code. This is a low-level call and callers must
875 : * provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT.
876 : */
877 : JaegerStatus EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit,
878 : bool partial);
879 :
880 : /* Execute a method that has been JIT compiled. */
881 : JaegerStatus JaegerShot(JSContext *cx, bool partial);
882 :
883 : /* Drop into the middle of a method at an arbitrary point, and execute. */
884 : JaegerStatus JaegerShotAtSafePoint(JSContext *cx, void *safePoint, bool partial);
885 :
886 : enum CompileStatus
887 : {
888 : Compile_Okay,
889 : Compile_Abort, // abort compilation
890 : Compile_InlineAbort, // inlining attempt failed, continue compilation
891 : Compile_Retry, // static overflow or failed inline, try to recompile
892 : Compile_Error, // OOM
893 : Compile_Skipped
894 : };
895 :
896 : void JS_FASTCALL
897 : ProfileStubCall(VMFrame &f);
898 :
899 : enum CompileRequest
900 : {
901 : CompileRequest_Interpreter,
902 : CompileRequest_JIT
903 : };
904 :
905 : CompileStatus
906 : CanMethodJIT(JSContext *cx, JSScript *script, jsbytecode *pc,
907 : bool construct, CompileRequest request);
908 :
909 : void
910 : ReleaseScriptCode(JSContext *cx, JSScript *script, bool construct);
911 :
912 : inline void
913 9468582 : ReleaseScriptCode(JSContext *cx, JSScript *script)
914 : {
915 9468582 : if (script->jitCtor)
916 1584 : mjit::ReleaseScriptCode(cx, script, true);
917 9468582 : if (script->jitNormal)
918 112314 : mjit::ReleaseScriptCode(cx, script, false);
919 9468582 : }
920 :
921 : // Expand all stack frames inlined by the JIT within a compartment.
922 : void
923 : ExpandInlineFrames(JSCompartment *compartment);
924 :
925 : // Return all VMFrames in a compartment to the interpreter. This must be
926 : // followed by destroying all JIT code in the compartment.
927 : void
928 : ClearAllFrames(JSCompartment *compartment);
929 :
930 : // Information about a frame inlined during compilation.
931 : struct InlineFrame
932 : {
933 : InlineFrame *parent;
934 : jsbytecode *parentpc;
935 : HeapPtrFunction fun;
936 :
937 : // Total distance between the start of the outer JSStackFrame and the start
938 : // of this frame, in multiples of sizeof(Value).
939 : uint32_t depth;
940 : };
941 :
942 : struct CallSite
943 : {
944 : uint32_t codeOffset;
945 : uint32_t inlineIndex;
946 : uint32_t pcOffset;
947 : RejoinState rejoin;
948 :
949 5053356 : void initialize(uint32_t codeOffset, uint32_t inlineIndex, uint32_t pcOffset,
950 : RejoinState rejoin) {
951 5053356 : this->codeOffset = codeOffset;
952 5053356 : this->inlineIndex = inlineIndex;
953 5053356 : this->pcOffset = pcOffset;
954 5053356 : this->rejoin = rejoin;
955 5053356 : }
956 :
957 : bool isTrap() const {
958 : return rejoin == REJOIN_TRAP;
959 : }
960 : };
961 :
962 : void
963 : DumpAllProfiles(JSContext *cx);
964 :
965 40283 : inline void * bsearch_nmap(NativeMapEntry *nmap, size_t nPairs, size_t bcOff)
966 : {
967 40283 : size_t lo = 1, hi = nPairs;
968 46933 : while (1) {
969 : /* current unsearched space is from lo-1 to hi-1, inclusive. */
970 87216 : if (lo > hi)
971 0 : return NULL; /* not found */
972 87216 : size_t mid = (lo + hi) / 2;
973 87216 : size_t bcOff_mid = nmap[mid-1].bcOff;
974 87216 : if (bcOff < bcOff_mid) {
975 17602 : hi = mid-1;
976 17602 : continue;
977 : }
978 69614 : if (bcOff > bcOff_mid) {
979 29331 : lo = mid+1;
980 29331 : continue;
981 : }
982 40283 : return nmap[mid-1].ncode;
983 : }
984 : }
985 :
986 : } /* namespace mjit */
987 :
988 : inline mjit::JITChunk *
989 1652819 : VMFrame::chunk()
990 : {
991 1652819 : return jit()->chunk(regs.pc);
992 : }
993 :
994 : inline unsigned
995 93 : VMFrame::chunkIndex()
996 : {
997 93 : return jit()->chunkIndex(regs.pc);
998 : }
999 :
1000 : inline JSScript *
1001 17125212 : VMFrame::script()
1002 : {
1003 17125212 : if (regs.inlined())
1004 13566 : return chunk()->inlineFrames()[regs.inlined()->inlineIndex].fun->script();
1005 17111646 : return fp()->script();
1006 : }
1007 :
1008 : inline jsbytecode *
1009 29940280 : VMFrame::pc()
1010 : {
1011 29940280 : if (regs.inlined())
1012 12277 : return script()->code + regs.inlined()->pcOffset;
1013 29928003 : return regs.pc;
1014 : }
1015 :
1016 : } /* namespace js */
1017 :
1018 : inline void *
1019 40283 : JSScript::nativeCodeForPC(bool constructing, jsbytecode *pc)
1020 : {
1021 40283 : js::mjit::JITScript *jit = getJIT(constructing);
1022 40283 : if (!jit)
1023 0 : return NULL;
1024 40283 : js::mjit::JITChunk *chunk = jit->chunk(pc);
1025 40283 : if (!chunk)
1026 0 : return NULL;
1027 40283 : return bsearch_nmap(chunk->nmap(), chunk->nNmapPairs, (size_t)(pc - code));
1028 : }
1029 :
1030 : extern "C" void JaegerTrampolineReturn();
1031 : extern "C" void JaegerInterpoline();
1032 : extern "C" void JaegerInterpolineScripted();
1033 :
1034 : #if defined(_MSC_VER) || defined(_WIN64)
1035 : extern "C" void *JaegerThrowpoline(js::VMFrame *vmFrame);
1036 : #else
1037 : extern "C" void JaegerThrowpoline();
1038 : #endif
1039 :
1040 : #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
1041 : extern "C" void JaegerInterpolinePatched();
1042 : #endif
1043 :
1044 : #endif /* jsjaeger_h__ */
1045 :
|