LCOV - code coverage report
Current view: directory - js/src/methodjit - BaseAssembler.h (source / functions) Found Hit Coverage
Test: app.info Lines: 598 557 93.1 %
Date: 2012-06-02 Functions: 102 99 97.1 %

       1                 : /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
       2                 :  * vim: set ts=4 sw=4 et tw=99:
       3                 :  *
       4                 :  * ***** BEGIN LICENSE BLOCK *****
       5                 :  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
       6                 :  *
       7                 :  * The contents of this file are subject to the Mozilla Public License Version
       8                 :  * 1.1 (the "License"); you may not use this file except in compliance with
       9                 :  * the License. You may obtain a copy of the License at
      10                 :  * http://www.mozilla.org/MPL/
      11                 :  *
      12                 :  * Software distributed under the License is distributed on an "AS IS" basis,
      13                 :  * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
      14                 :  * for the specific language governing rights and limitations under the
      15                 :  * License.
      16                 :  *
      17                 :  * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
      18                 :  * May 28, 2008.
      19                 :  *
      20                 :  * The Initial Developer of the Original Code is
      21                 :  *   Brendan Eich <brendan@mozilla.org>
      22                 :  *
      23                 :  * Contributor(s):
      24                 :  *   David Anderson <danderson@mozilla.com>
      25                 :  *   David Mandelin <dmandelin@mozilla.com>
      26                 :  *
      27                 :  * Alternatively, the contents of this file may be used under the terms of
      28                 :  * either of the GNU General Public License Version 2 or later (the "GPL"),
      29                 :  * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
      30                 :  * in which case the provisions of the GPL or the LGPL are applicable instead
      31                 :  * of those above. If you wish to allow use of your version of this file only
      32                 :  * under the terms of either the GPL or the LGPL, and not to allow others to
      33                 :  * use your version of this file under the terms of the MPL, indicate your
      34                 :  * decision by deleting the provisions above and replace them with the notice
      35                 :  * and other provisions required by the GPL or the LGPL. If you do not delete
      36                 :  * the provisions above, a recipient may use your version of this file under
      37                 :  * the terms of any one of the MPL, the GPL or the LGPL.
      38                 :  *
      39                 :  * ***** END LICENSE BLOCK ***** */
      40                 : 
      41                 : #if !defined jsjaeger_baseassembler_h__ && defined JS_METHODJIT
      42                 : #define jsjaeger_baseassembler_h__
      43                 : 
      44                 : #include "jscntxt.h"
      45                 : #include "assembler/assembler/MacroAssemblerCodeRef.h"
      46                 : #include "assembler/assembler/MacroAssembler.h"
      47                 : #include "assembler/assembler/LinkBuffer.h"
      48                 : #include "assembler/moco/MocoStubs.h"
      49                 : #include "methodjit/MethodJIT.h"
      50                 : #include "methodjit/MachineRegs.h"
      51                 : #include "CodeGenIncludes.h"
      52                 : #include "jsobjinlines.h"
      53                 : #include "jsscopeinlines.h"
      54                 : 
      55                 : namespace js {
      56                 : namespace mjit {
      57                 : 
      58                 : // Represents an int32_t property name in generated code, which must be either
      59                 : // a RegisterID or a constant value.
      60            5692 : struct Int32Key {
      61                 :     typedef JSC::MacroAssembler::RegisterID RegisterID;
      62                 : 
      63                 :     MaybeRegisterID reg_;
      64                 :     int32_t index_;
      65                 : 
      66           68812 :     Int32Key() : index_(0) { }
      67                 : 
      68           41305 :     static Int32Key FromRegister(RegisterID reg) {
      69           41305 :         Int32Key key;
      70           41305 :         key.reg_ = reg;
      71                 :         return key;
      72                 :     }
      73           21815 :     static Int32Key FromConstant(int32_t index) {
      74           21815 :         Int32Key key;
      75           21815 :         key.index_ = index;
      76                 :         return key;
      77                 :     }
      78                 : 
      79           50267 :     int32_t index() const {
      80           50267 :         JS_ASSERT(!reg_.isSet());
      81           50267 :         return index_;
      82                 :     }
      83                 : 
      84          220319 :     RegisterID reg() const { return reg_.reg(); }
      85          227753 :     bool isConstant() const { return !reg_.isSet(); }
      86                 : };
      87                 : 
      88                 : struct FrameAddress : JSC::MacroAssembler::Address
      89                 : {
      90        21416338 :     FrameAddress(int32_t offset)
      91        21416338 :       : Address(JSC::MacroAssembler::stackPointerRegister, offset)
      92        21416338 :     { }
      93                 : };
      94                 : 
      95                 : struct ImmIntPtr : public JSC::MacroAssembler::ImmPtr
      96                 : {
      97          238383 :     ImmIntPtr(intptr_t val)
      98          238383 :       : ImmPtr(reinterpret_cast<void*>(val))
      99          238383 :     { }
     100                 : };
     101                 : 
     102                 : struct StackMarker {
     103                 :     uint32_t base;
     104                 :     uint32_t bytes;
     105                 : 
     106             216 :     StackMarker(uint32_t base, uint32_t bytes)
     107             216 :       : base(base), bytes(bytes)
     108             216 :     { }
     109                 : };
     110                 : 
     111                 : class Assembler : public ValueAssembler
     112          692621 : {
     113         7471503 :     struct CallPatch {
     114         4905615 :         CallPatch(Call cl, void *fun)
     115         4905615 :           : call(cl), fun(fun)
     116         4905615 :         { }
     117                 : 
     118                 :         Call call;
     119                 :         JSC::FunctionPtr fun;
     120                 :     };
     121                 : 
     122          582830 :     struct DoublePatch {
     123                 :         double d;
     124                 :         DataLabelPtr label;
     125                 :     };
     126                 : 
     127                 :     /* :TODO: OOM */
     128                 :     Label startLabel;
     129                 :     Vector<CallPatch, 64, SystemAllocPolicy> callPatches;
     130                 :     Vector<DoublePatch, 16, SystemAllocPolicy> doublePatches;
     131                 : 
     132                 :     // Registers that can be clobbered during a call sequence.
     133                 :     Registers   availInCall;
     134                 : 
     135                 :     // Extra number of bytes that can be used for storing structs/references
     136                 :     // across calls.
     137                 :     uint32_t    extraStackSpace;
     138                 : 
     139                 :     // Calling convention used by the currently in-progress call.
     140                 :     Registers::CallConvention callConvention;
     141                 : 
     142                 :     // Amount of stack space reserved for the currently in-progress call. This
     143                 :     // includes alignment and parameters.
     144                 :     uint32_t    stackAdjust;
     145                 : 
     146                 :     // Debug flag to make sure calls do not nest.
     147                 : #ifdef DEBUG
     148                 :     bool        callIsAligned;
     149                 : #endif
     150                 : 
     151                 :   public:
     152          692621 :     Assembler()
     153                 :       : callPatches(SystemAllocPolicy()),
     154                 :         availInCall(0),
     155                 :         extraStackSpace(0),
     156                 :         stackAdjust(0)
     157                 : #ifdef DEBUG
     158          692621 :         , callIsAligned(false)
     159                 : #endif
     160                 :     {
     161          692621 :         startLabel = label();
     162          692621 :     }
     163                 : 
     164                 :     /* Register pair storing returned type/data for calls. */
     165                 : #if defined(JS_CPU_X86) || defined(JS_CPU_X64)
     166                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Type  = JSC::X86Registers::edi;
     167                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Data  = JSC::X86Registers::esi;
     168                 : static const JSC::MacroAssembler::RegisterID JSParamReg_Argc   = JSC::X86Registers::ecx;
     169                 : #elif defined(JS_CPU_ARM)
     170                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Type  = JSC::ARMRegisters::r5;
     171                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Data  = JSC::ARMRegisters::r4;
     172                 : static const JSC::MacroAssembler::RegisterID JSParamReg_Argc   = JSC::ARMRegisters::r1;
     173                 : #elif defined(JS_CPU_SPARC)
     174                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = JSC::SparcRegisters::l2;
     175                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::SparcRegisters::l3;
     176                 : static const JSC::MacroAssembler::RegisterID JSParamReg_Argc  = JSC::SparcRegisters::l4;
     177                 : #elif defined(JS_CPU_MIPS)
     178                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = JSC::MIPSRegisters::a0;
     179                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::MIPSRegisters::a2;
     180                 : static const JSC::MacroAssembler::RegisterID JSParamReg_Argc  = JSC::MIPSRegisters::a1;
     181                 : #endif
     182                 : 
     183         4743475 :     size_t distanceOf(Label l) {
     184         4743475 :         return differenceBetween(startLabel, l);
     185                 :     }
     186                 : 
     187          408858 :     void loadPtrFromImm(void *ptr, RegisterID reg) {
     188          408858 :         loadPtr(ptr, reg);
     189          408858 :     }
     190                 : 
     191          709814 :     void loadShape(RegisterID obj, RegisterID shape) {
     192          709814 :         loadPtr(Address(obj, JSObject::offsetOfShape()), shape);
     193          709814 :     }
     194                 : 
     195           69647 :     Jump guardShape(RegisterID objReg, const Shape *shape) {
     196           69647 :         return branchPtr(NotEqual, Address(objReg, JSObject::offsetOfShape()), ImmPtr(shape));
     197                 :     }
     198                 : 
     199           43558 :     Jump guardShape(RegisterID objReg, JSObject *obj) {
     200           43558 :         return guardShape(objReg, obj->lastProperty());
     201                 :     }
     202                 : 
     203                 :     /*
     204                 :      * Finds and returns the address of a known object and slot.
     205                 :      */
     206                 :     Address objSlotRef(JSObject *obj, RegisterID reg, uint32_t slot) {
     207                 :         move(ImmPtr(obj), reg);
     208                 :         if (obj->isFixedSlot(slot)) {
     209                 :             return Address(reg, JSObject::getFixedSlotOffset(slot));
     210                 :         } else {
     211                 :             loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
     212                 :             return Address(reg, obj->dynamicSlotIndex(slot) * sizeof(Value));
     213                 :         }
     214                 :     }
     215                 : 
     216                 : #ifdef JS_CPU_X86
     217            3813 :     void idiv(RegisterID reg) {
     218            3813 :         m_assembler.cdq();
     219            3813 :         m_assembler.idivl_r(reg);
     220            3813 :     }
     221                 : 
     222         1322273 :     void fastLoadDouble(RegisterID lo, RegisterID hi, FPRegisterID fpReg) {
     223         1322273 :         JS_ASSERT(fpReg != Registers::FPConversionTemp);
     224         1322273 :         if (MacroAssemblerX86Common::getSSEState() >= HasSSE4_1) {
     225         1321980 :             m_assembler.movd_rr(lo, fpReg);
     226         1321980 :             m_assembler.pinsrd_rr(hi, fpReg);
     227                 :         } else {
     228             293 :             m_assembler.movd_rr(lo, fpReg);
     229             293 :             m_assembler.movd_rr(hi, Registers::FPConversionTemp);
     230             293 :             m_assembler.unpcklps_rr(Registers::FPConversionTemp, fpReg);
     231                 :         }
     232         1322273 :     }
     233                 : #endif
     234                 : 
     235                 :     /*
     236                 :      * Move a register pair which may indicate either an int32_t or double into fpreg,
     237                 :      * converting to double in the int32_t case.
     238                 :      */
     239           10152 :     void moveInt32OrDouble(RegisterID data, RegisterID type, Address address, FPRegisterID fpreg)
     240                 :     {
     241                 : #ifdef JS_CPU_X86
     242           10152 :         fastLoadDouble(data, type, fpreg);
     243           10152 :         Jump notInteger = testInt32(Assembler::NotEqual, type);
     244           10152 :         convertInt32ToDouble(data, fpreg);
     245           10152 :         notInteger.linkTo(label(), this);
     246                 : #else
     247                 :         Jump notInteger = testInt32(Assembler::NotEqual, type);
     248                 :         convertInt32ToDouble(data, fpreg);
     249                 :         Jump fallthrough = jump();
     250                 :         notInteger.linkTo(label(), this);
     251                 : 
     252                 :         /* Store the components, then read it back out as a double. */
     253                 :         storeValueFromComponents(type, data, address);
     254                 :         loadDouble(address, fpreg);
     255                 : 
     256                 :         fallthrough.linkTo(label(), this);
     257                 : #endif
     258           10152 :     }
     259                 : 
     260                 :     /*
     261                 :      * Move a memory address which contains either an int32_t or double into fpreg,
     262                 :      * converting to double in the int32_t case.
     263                 :      */
     264                 :     template <typename T>
     265            2205 :     void moveInt32OrDouble(T address, FPRegisterID fpreg)
     266                 :     {
     267            2205 :         Jump notInteger = testInt32(Assembler::NotEqual, address);
     268            2205 :         convertInt32ToDouble(payloadOf(address), fpreg);
     269            2205 :         Jump fallthrough = jump();
     270            2205 :         notInteger.linkTo(label(), this);
     271            2205 :         loadDouble(address, fpreg);
     272            2205 :         fallthrough.linkTo(label(), this);
     273            2205 :     }
     274                 : 
     275                 :     /* Ensure that an in-memory address is definitely a double. */
     276           20454 :     void ensureInMemoryDouble(Address address)
     277                 :     {
     278           20454 :         Jump notInteger = testInt32(Assembler::NotEqual, address);
     279           20454 :         convertInt32ToDouble(payloadOf(address), Registers::FPConversionTemp);
     280           20454 :         storeDouble(Registers::FPConversionTemp, address);
     281           20454 :         notInteger.linkTo(label(), this);
     282           20454 :     }
     283                 : 
     284            3250 :     void negateDouble(FPRegisterID fpreg)
     285                 :     {
     286                 : #if defined JS_CPU_X86 || defined JS_CPU_X64
     287                 :         static const uint64_t DoubleNegMask = 0x8000000000000000ULL;
     288            3250 :         loadDouble(&DoubleNegMask, Registers::FPConversionTemp);
     289            3250 :         xorDouble(Registers::FPConversionTemp, fpreg);
     290                 : #elif defined JS_CPU_ARM || defined JS_CPU_SPARC || defined JS_CPU_MIPS
     291                 :         negDouble(fpreg, fpreg);
     292                 : #endif
     293            3250 :     }
     294                 : 
     295                 :     /* Prepare for a call that might THROW. */
     296         4905615 :     void *getFallibleCallTarget(void *fun) {
     297                 : #ifdef JS_CPU_ARM
     298                 :         /*
     299                 :          * Insert a veneer for ARM to allow it to catch exceptions. There is no
     300                 :          * reliable way to determine the location of the return address on the
     301                 :          * stack, so a typical C(++) return address cannot be hijacked.
     302                 :          *
     303                 :          * We put the real target address into IP, as this won't conflict with
     304                 :          * the EABI argument-passing mechanism. JaegerStubVeneer is responsible
     305                 :          * for calling 'fun' (in IP) and catching exceptions.
     306                 :          *
     307                 :          * Note that we must use 'moveWithPatch' here, rather than 'move',
     308                 :          * because 'move' might try to optimize the constant load, and we need a
     309                 :          * consistent code sequence for patching.
     310                 :          */
     311                 :         moveWithPatch(Imm32(intptr_t(fun)), JSC::ARMRegisters::ip);
     312                 : 
     313                 :         return JS_FUNC_TO_DATA_PTR(void *, JaegerStubVeneer);
     314                 : #elif defined(JS_CPU_SPARC)
     315                 :         /*
     316                 :          * We can simulate the situation in jited code to let call return to a
     317                 :          * target address located on stack without veneer. We record the return
     318                 :          * address and jump to that address after call return to jited code. The
     319                 :          * reason we take veneer back is jited code maybe released when
     320                 :          * exceptions happened. That will make the call have no chance to return
     321                 :          * back to jited code.
     322                 :          */
     323                 :         moveWithPatch(Imm32(intptr_t(fun)), JSC::SparcRegisters::i0);
     324                 :         return JS_FUNC_TO_DATA_PTR(void *, JaegerStubVeneer);
     325                 : #elif defined(JS_CPU_MIPS)
     326                 :         /*
     327                 :          * For MIPS, we need to call JaegerStubVeneer by passing
     328                 :          * the real target address in v0.
     329                 :          */
     330                 :         moveWithPatch(Imm32(intptr_t(fun)), JSC::MIPSRegisters::v0);
     331                 :         return JS_FUNC_TO_DATA_PTR(void *, JaegerStubVeneer);
     332                 : #else
     333                 :         /*
     334                 :          * Architectures that push the return address to an easily-determined
     335                 :          * location on the stack can hijack C++'s return mechanism by overwriting
     336                 :          * that address, so a veneer is not required.
     337                 :          */
     338         4905615 :         return fun;
     339                 : #endif
     340                 :     }
     341                 : 
     342         4905831 :     static inline uint32_t align(uint32_t bytes, uint32_t alignment) {
     343         4905831 :         return (alignment - (bytes % alignment)) % alignment;
     344                 :     }
     345                 : 
     346                 :     // Specifies extra stack space that is available across a call, for storing
     347                 :     // large parameters (structs) or returning values via references. All extra
     348                 :     // stack space must be reserved up-front, and is aligned on an 8-byte
     349                 :     // boundary.
     350                 :     //
     351                 :     // Returns an offset that can be used to index into this stack 
     352             216 :     StackMarker allocStack(uint32_t bytes, uint32_t alignment = 4) {
     353             216 :         bytes += align(bytes + extraStackSpace, alignment);
     354             216 :         subPtr(Imm32(bytes), stackPointerRegister);
     355             216 :         extraStackSpace += bytes;
     356             216 :         return StackMarker(extraStackSpace, bytes);
     357                 :     }
     358                 : 
     359                 :     // Similar to allocStack(), but combines it with a push().
     360             570 :     void saveReg(RegisterID reg) {
     361             570 :         push(reg);
     362             570 :         extraStackSpace += sizeof(void *);
     363             570 :     }
     364                 : 
     365                 :     // Similar to freeStack(), but combines it with a pop().
     366             570 :     void restoreReg(RegisterID reg) {
     367             570 :         JS_ASSERT(extraStackSpace >= sizeof(void *));
     368             570 :         extraStackSpace -= sizeof(void *);
     369             570 :         pop(reg);
     370             570 :     }
     371                 : 
     372                 : #if defined JS_CPU_MIPS
     373                 :     static const uint32_t StackAlignment = 8;
     374                 : #else
     375                 :     static const uint32_t StackAlignment = 16;
     376                 : #endif
     377                 : 
     378         4905615 :     static inline uint32_t alignForCall(uint32_t stackBytes) {
     379                 : #if defined(JS_CPU_X86) || defined(JS_CPU_X64) || defined(JS_CPU_MIPS)
     380                 :         // If StackAlignment is a power of two, % is just two shifts.
     381                 :         // 16 - (x % 16) gives alignment, extra % 16 handles total == 0.
     382         4905615 :         return align(stackBytes, StackAlignment);
     383                 : #else
     384                 :         return 0;
     385                 : #endif
     386                 :     }
     387                 : 
     388                 :     // Some platforms require stack manipulation before making stub calls.
     389                 :     // When using THROW/V, the return address is replaced, meaning the
     390                 :     // stack de-adjustment will not have occured. JaegerThrowpoline accounts
     391                 :     // for this. For stub calls, which are always invoked as if they use
     392                 :     // two parameters, the stack adjustment is constant.
     393                 :     //
     394                 :     // When using callWithABI() manually, for example via an IC, it might
     395                 :     // be necessary to jump directly to JaegerThrowpoline. In this case,
     396                 :     // the constant is provided here in order to appropriately adjust the
     397                 :     // stack.
     398                 : #ifdef _WIN64
     399                 :     static const uint32_t ReturnStackAdjustment = 32;
     400                 : #elif defined(JS_CPU_X86) && defined(JS_NO_FASTCALL)
     401                 :     static const uint32_t ReturnStackAdjustment = 16;
     402                 : #else
     403                 :     static const uint32_t ReturnStackAdjustment = 0;
     404                 : #endif
     405                 : 
     406           35521 :     void throwInJIT() {
     407                 :         if (ReturnStackAdjustment)
     408                 :             subPtr(Imm32(ReturnStackAdjustment), stackPointerRegister);
     409           35521 :         move(ImmPtr(JS_FUNC_TO_DATA_PTR(void *, JaegerThrowpoline)), Registers::ReturnReg);
     410           35521 :         jump(Registers::ReturnReg);
     411           35521 :     }
     412                 : 
     413                 :     // Windows x64 requires extra space in between calls.
     414                 : #ifdef _WIN64
     415                 :     static const uint32_t ShadowStackSpace = 32;
     416                 : #elif defined(JS_CPU_SPARC)
     417                 :     static const uint32_t ShadowStackSpace = 92;
     418                 : #else
     419                 :     static const uint32_t ShadowStackSpace = 0;
     420                 : #endif
     421                 : 
     422                 : #if defined(JS_CPU_SPARC)
     423                 :     static const uint32_t BaseStackSpace = 104;
     424                 : #else
     425                 :     static const uint32_t BaseStackSpace = 0;
     426                 : #endif
     427                 : 
     428                 :     // Prepare the stack for a call sequence. This must be called AFTER all
     429                 :     // volatile regs have been saved, and BEFORE pushArg() is used. The stack
     430                 :     // is assumed to be aligned to 16-bytes plus any pushes that occured via
     431                 :     // saveRegs().
     432                 :     //
     433                 :     // During a call sequence all registers are "owned" by the Assembler.
     434                 :     // Attempts to perform loads, nested calls, or anything that can clobber
     435                 :     // a register, is asking for breaking on some platform or some situation.
     436                 :     // Be careful to limit to storeArg() during setupABICall.
     437         4905615 :     void setupABICall(Registers::CallConvention convention, uint32_t generalArgs) {
     438         4905615 :         JS_ASSERT(!callIsAligned);
     439                 : 
     440         4905615 :         uint32_t numArgRegs = Registers::numArgRegs(convention);
     441                 :         uint32_t pushCount = (generalArgs > numArgRegs)
     442                 :                            ? generalArgs - numArgRegs
     443         4905615 :                            : 0;
     444                 : 
     445                 :         // Assume all temporary regs are available to clobber.
     446         4905615 :         availInCall = Registers::TempRegs;
     447                 : 
     448                 :         // Find the total number of bytes the stack will have been adjusted by,
     449                 :         // in order to compute alignment.
     450                 :         uint32_t total = (pushCount * sizeof(void *)) +
     451         4905615 :                        extraStackSpace;
     452                 : 
     453                 :         stackAdjust = (pushCount * sizeof(void *)) +
     454         4905615 :                       alignForCall(total);
     455                 : 
     456                 : #ifdef _WIN64
     457                 :         // Windows x64 ABI requires 32 bytes of "shadow space" for the callee
     458                 :         // to spill its parameters.
     459                 :         stackAdjust += ShadowStackSpace;
     460                 : #endif
     461                 : 
     462         4905615 :         if (stackAdjust)
     463           35592 :             subPtr(Imm32(stackAdjust), stackPointerRegister);
     464                 : 
     465         4905615 :         callConvention = convention;
     466                 : #ifdef DEBUG
     467         4905615 :         callIsAligned = true;
     468                 : #endif
     469         4905615 :     }
     470                 : 
     471                 :     // Computes an interior pointer into VMFrame during a call.
     472             216 :     Address vmFrameOffset(uint32_t offs) {
     473             216 :         return Address(stackPointerRegister, stackAdjust + extraStackSpace + offs);
     474                 :     }
     475                 : 
     476                 :     // Get an Address to the extra space already allocated before the call.
     477             483 :     Address addressOfExtra(const StackMarker &marker) {
     478                 :         // Stack looks like this:
     479                 :         //   extraStackSpace
     480                 :         //   stackAdjust
     481                 :         // To get to the requested offset into extraStackSpace, we can walk
     482                 :         // up to the top of the extra stack space, then subtract |offs|.
     483                 :         //
     484                 :         // Note that it's not required we're in a call - stackAdjust can be 0.
     485             483 :         JS_ASSERT(marker.base <= extraStackSpace);
     486             483 :         return Address(stackPointerRegister, BaseStackSpace + stackAdjust + extraStackSpace - marker.base);
     487                 :     }
     488                 : 
     489                 :     // This is an internal function only for use inside a setupABICall(),
     490                 :     // callWithABI() sequence, and only for arguments known to fit in
     491                 :     // registers.
     492          107940 :     Address addressOfArg(uint32_t i) {
     493          107940 :         uint32_t numArgRegs = Registers::numArgRegs(callConvention);
     494          107940 :         JS_ASSERT(i >= numArgRegs);
     495                 : 
     496                 :         // Note that shadow space is for the callee to spill, and thus it must
     497                 :         // be skipped when writing its arguments.
     498          107940 :         int32_t spOffset = ((i - numArgRegs) * sizeof(void *)) + ShadowStackSpace;
     499          107940 :         return Address(stackPointerRegister, spOffset);
     500                 :     }
     501                 : 
     502                 :     // Push an argument for a call.
     503         9812468 :     void storeArg(uint32_t i, RegisterID reg) {
     504         9812468 :         JS_ASSERT(callIsAligned);
     505                 :         RegisterID to;
     506         9812468 :         if (Registers::regForArg(callConvention, i, &to)) {
     507         9739756 :             if (reg != to)
     508               0 :                 move(reg, to);
     509         9739756 :             availInCall.takeRegUnchecked(to);
     510                 :         } else {
     511           72712 :             storePtr(reg, addressOfArg(i));
     512                 :         }
     513         9812468 :     }
     514                 : 
     515                 :     // This variant can clobber temporary registers. However, it will NOT
     516                 :     // clobber any registers that have already been set via storeArg().
     517             216 :     void storeArg(uint32_t i, Address address) {
     518             216 :         JS_ASSERT(callIsAligned);
     519                 :         RegisterID to;
     520             216 :         if (Registers::regForArg(callConvention, i, &to)) {
     521             216 :             loadPtr(address, to);
     522             216 :             availInCall.takeRegUnchecked(to);
     523               0 :         } else if (!availInCall.empty()) {
     524                 :             // Memory-to-memory, and there is a temporary register free.
     525               0 :             RegisterID reg = availInCall.takeAnyReg().reg();
     526               0 :             loadPtr(address, reg);
     527               0 :             storeArg(i, reg);
     528               0 :             availInCall.putReg(reg);
     529                 :         } else {
     530                 :             // Memory-to-memory, but no temporary registers are free.
     531                 :             // This shouldn't happen on any platforms, because
     532                 :             // (TempRegs) Union (ArgRegs) != 0
     533               0 :             JS_NOT_REACHED("too much reg pressure");
     534                 :         }
     535             216 :     }
     536                 : 
     537                 :     // This variant can clobber temporary registers. However, it will NOT
     538                 :     // clobber any registers that have already been set via storeArg().
     539             216 :     void storeArgAddr(uint32_t i, Address address) {
     540             216 :         JS_ASSERT(callIsAligned);
     541                 :         RegisterID to;
     542             216 :         if (Registers::regForArg(callConvention, i, &to)) {
     543             216 :             lea(address, to);
     544             216 :             availInCall.takeRegUnchecked(to);
     545               0 :         } else if (!availInCall.empty()) {
     546                 :             // Memory-to-memory, and there is a temporary register free.
     547               0 :             RegisterID reg = availInCall.takeAnyReg().reg();
     548               0 :             lea(address, reg);
     549               0 :             storeArg(i, reg);
     550               0 :             availInCall.putReg(reg);
     551                 :         } else {
     552                 :             // Memory-to-memory, but no temporary registers are free.
     553                 :             // This shouldn't happen on any platforms, because
     554                 :             // (TempRegs) Union (ArgRegs) != 0
     555               0 :             JS_NOT_REACHED("too much reg pressure");
     556                 :         }
     557             216 :     }
     558                 : 
     559           35228 :     void storeArg(uint32_t i, ImmPtr imm) {
     560           35228 :         JS_ASSERT(callIsAligned);
     561                 :         RegisterID to;
     562           35228 :         if (Registers::regForArg(callConvention, i, &to)) {
     563               0 :             move(imm, to);
     564               0 :             availInCall.takeRegUnchecked(to);
     565                 :         } else {
     566           35228 :             storePtr(imm, addressOfArg(i));
     567                 :         }
     568           35228 :     }
     569                 : 
     570                 :     // High-level call helper, given an optional function pointer and a
     571                 :     // calling convention. setupABICall() must have been called beforehand,
     572                 :     // as well as each numbered argument stored with storeArg().
     573                 :     //
     574                 :     // After callWithABI(), the call state is reset, so a new call may begin.
     575         4905615 :     Call callWithABI(void *fun, bool canThrow) {
     576                 : #ifdef JS_CPU_ARM
     577                 :         // the repatcher requires that these instructions are adjacent in
     578                 :         // memory, make sure that they are in fact adjacent.
     579                 :         // Theoretically, this requires only 12 bytes of space, however
     580                 :         // there are at least a couple of off-by-one errors that I've noticed
     581                 :         // that make 12 insufficent.  In case 16 is also insufficent, I've bumped
     582                 :         // it to 20.
     583                 :         ensureSpace(20);
     584                 :         int initFlushCount = flushCount();
     585                 : #endif
     586                 :         // [Bug 614953]: This can only be made conditional once the ARM back-end
     587                 :         // is able to distinguish and patch both call sequences. Other
     588                 :         // architecutres are unaffected regardless.
     589                 :         //if (canThrow) {
     590                 :             // Some platforms (such as ARM) require a call veneer if the target
     591                 :             // might THROW. For other platforms, getFallibleCallTarget does
     592                 :             // nothing.
     593         4905615 :             fun = getFallibleCallTarget(fun);
     594                 :         //}
     595                 : 
     596         4905615 :         JS_ASSERT(callIsAligned);
     597                 : 
     598         4905615 :         Call cl = call();
     599         4905615 :         callPatches.append(CallPatch(cl, fun));
     600                 : #ifdef JS_CPU_ARM
     601                 :         JS_ASSERT(initFlushCount == flushCount());
     602                 : #endif
     603         4905615 :         if (stackAdjust)
     604           35592 :             addPtr(Imm32(stackAdjust), stackPointerRegister);
     605                 : 
     606         4905615 :         stackAdjust = 0;
     607                 : 
     608                 : #ifdef DEBUG
     609         4905615 :         callIsAligned = false;
     610                 : #endif
     611                 :         return cl;
     612                 :     }
     613                 : 
     614                 :     // Frees stack space allocated by allocStack().
     615             216 :     void freeStack(const StackMarker &mark) {
     616             216 :         JS_ASSERT(!callIsAligned);
     617             216 :         JS_ASSERT(mark.bytes <= extraStackSpace);
     618                 : 
     619             216 :         extraStackSpace -= mark.bytes;
     620             216 :         addPtr(Imm32(mark.bytes), stackPointerRegister);
     621             216 :     }
     622                 : 
     623                 :     // Wrap AbstractMacroAssembler::getLinkerCallReturnOffset which is protected.
     624         4762722 :     unsigned callReturnOffset(Call call) {
     625         4762722 :         return getLinkerCallReturnOffset(call);
     626                 :     }
     627                 : 
     628                 : 
     629                 : #define STUB_CALL_TYPE(type)                                                  \
     630                 :     Call callWithVMFrame(bool inlining, type stub, jsbytecode *pc,            \
     631                 :                          DataLabelPtr *pinlined, uint32_t fd) {               \
     632                 :         return fallibleVMCall(inlining, JS_FUNC_TO_DATA_PTR(void *, stub),    \
     633                 :                               pc, pinlined, fd);                              \
     634                 :     }
     635                 : 
     636                 :     STUB_CALL_TYPE(JSObjStub);
     637                 :     STUB_CALL_TYPE(VoidPtrStubUInt32);
     638                 :     STUB_CALL_TYPE(VoidStubUInt32);
     639                 :     STUB_CALL_TYPE(VoidStub);
     640                 : 
     641                 : #undef STUB_CALL_TYPE
     642                 : 
     643         4905399 :     void setupFrameDepth(int32_t frameDepth) {
     644                 :         // |frameDepth < 0| implies ic::SplatApplyArgs has been called which
     645                 :         // means regs.sp has already been set in the VMFrame.
     646         4905399 :         if (frameDepth >= 0) {
     647                 :             // sp = fp->slots() + frameDepth
     648                 :             // regs->sp = sp
     649                 :             addPtr(Imm32(sizeof(StackFrame) + frameDepth * sizeof(jsval)),
     650                 :                    JSFrameReg,
     651         4896003 :                    Registers::ClobberInCall);
     652         4896003 :             storePtr(Registers::ClobberInCall, FrameAddress(VMFrame::offsetOfRegsSp()));
     653                 :         }
     654         4905399 :     }
     655                 : 
     656         4869878 :     void setupInfallibleVMFrame(int32_t frameDepth) {
     657         4869878 :         setupFrameDepth(frameDepth);
     658                 : 
     659                 :         // The JIT has moved Arg1 already, and we've guaranteed to not clobber
     660                 :         // it. Move ArgReg0 into place now. setupFallibleVMFrame will not
     661                 :         // clobber it either.
     662         4869878 :         move(MacroAssembler::stackPointerRegister, Registers::ArgReg0);
     663         4869878 :     }
     664                 : 
     665         4792441 :     void setupFallibleVMFrame(bool inlining, jsbytecode *pc,
     666                 :                               DataLabelPtr *pinlined, int32_t frameDepth) {
     667         4792441 :         setupInfallibleVMFrame(frameDepth);
     668                 : 
     669                 :         /* regs->fp = fp */
     670         4792441 :         storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp));
     671                 : 
     672                 :         /* PC -> regs->pc :( */
     673         4792441 :         storePtr(ImmPtr(pc), FrameAddress(VMFrame::offsetOfRegsPc()));
     674                 : 
     675         4792441 :         if (inlining) {
     676                 :             /* inlined -> regs->inlined :( */
     677                 :             DataLabelPtr ptr = storePtrWithPatch(ImmPtr(NULL),
     678         1420058 :                                                  FrameAddress(VMFrame::offsetOfInlined));
     679         1420058 :             if (pinlined)
     680         1392271 :                 *pinlined = ptr;
     681                 :         }
     682                 : 
     683         4792441 :         restoreStackBase();
     684         4792441 :     }
     685                 : 
     686           35521 :     void setupFallibleABICall(bool inlining, jsbytecode *pc, int32_t frameDepth) {
     687           35521 :         setupFrameDepth(frameDepth);
     688                 : 
     689                 :         /* Store fp and pc */
     690           35521 :         storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp));
     691           35521 :         storePtr(ImmPtr(pc), FrameAddress(VMFrame::offsetOfRegsPc()));
     692                 : 
     693           35521 :         if (inlining) {
     694                 :             /* ABI calls cannot be made from inlined frames. */
     695           11086 :             storePtr(ImmPtr(NULL), FrameAddress(VMFrame::offsetOfInlined));
     696                 :         }
     697           35521 :     }
     698                 : 
     699         4827962 :     void restoreStackBase() {
     700                 : #if defined(JS_CPU_X86)
     701                 :         /*
     702                 :          * We use the %ebp base stack pointer on x86 to store the JSStackFrame.
     703                 :          * Restore this before calling so that debuggers can construct a
     704                 :          * coherent stack if we crash outside of JIT code.
     705                 :          */
     706                 :         JS_STATIC_ASSERT(JSFrameReg == JSC::X86Registers::ebp);
     707         4827962 :         move(JSC::X86Registers::esp, JSFrameReg);
     708         4827962 :         addPtr(Imm32(VMFrame::STACK_BASE_DIFFERENCE), JSFrameReg);
     709                 : #endif
     710         4827962 :     }
     711                 : 
     712                 :     // An infallible VM call is a stub call (taking a VMFrame & and one
     713                 :     // optional parameter) that does not need |pc| and |fp| updated, since
     714                 :     // the call is guaranteed to not fail. However, |sp| is always coherent.
     715           77437 :     Call infallibleVMCall(void *ptr, int32_t frameDepth) {
     716           77437 :         setupInfallibleVMFrame(frameDepth);
     717           77437 :         return wrapVMCall(ptr);
     718                 :     }
     719                 : 
     720                 :     // A fallible VM call is a stub call (taking a VMFrame & and one optional
     721                 :     // parameter) that needs the entire VMFrame to be coherent, meaning that
     722                 :     // |pc|, |inlined| and |fp| are guaranteed to be up-to-date.
     723         4792441 :     Call fallibleVMCall(bool inlining, void *ptr, jsbytecode *pc,
     724                 :                         DataLabelPtr *pinlined, int32_t frameDepth) {
     725         4792441 :         setupFallibleVMFrame(inlining, pc, pinlined, frameDepth);
     726         4792441 :         Call call = wrapVMCall(ptr);
     727                 : 
     728                 :         // Restore the frame pointer from the VM.
     729         4792441 :         loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
     730                 : 
     731                 :         return call;
     732                 :     }
     733                 : 
     734         4869878 :     Call wrapVMCall(void *ptr) {
     735         4869878 :         JS_ASSERT(!callIsAligned);
     736                 : 
     737                 :         // Every stub call has at most two arguments.
     738         4869878 :         setupABICall(Registers::FastCall, 2);
     739                 : 
     740                 :         // On x86, if JS_NO_FASTCALL is present, these will result in actual
     741                 :         // pushes to the stack, which the caller will clean up. Otherwise,
     742                 :         // they'll be ignored because the registers fit into the calling
     743                 :         // sequence.
     744         4869878 :         storeArg(0, Registers::ArgReg0);
     745         4869878 :         storeArg(1, Registers::ArgReg1);
     746                 : 
     747                 :         // [Bug 614953]: The second argument, 'canThrow', can be set to 'false'
     748                 :         // for infallibleVMCall invocations. However, this changes the call
     749                 :         // sequence on ARM, and the ARM repatcher cannot currently distinguish
     750                 :         // between the two sequences. The argument does not affect the code
     751                 :         // generated by x86 or amd64.
     752         4869878 :         return callWithABI(ptr, true);
     753                 :     }
     754                 : 
     755                 :     // Constant doubles can't be directly moved into a register, we need to put
     756                 :     // them in memory and load them back with.
     757          277319 :     void slowLoadConstantDouble(double d, FPRegisterID fpreg) {
     758          277319 :         DoublePatch patch;
     759          277319 :         patch.d = d;
     760          277319 :         patch.label = loadDouble(NULL, fpreg);
     761          277319 :         doublePatches.append(patch);
     762          277319 :     }
     763                 : 
     764          515308 :     size_t numDoubles() { return doublePatches.length(); }
     765                 : 
     766          574290 :     void finalize(JSC::LinkBuffer &linker, double *doubleVec = NULL) {
     767         5479395 :         for (size_t i = 0; i < callPatches.length(); i++) {
     768         4905105 :             CallPatch &patch = callPatches[i];
     769         4905105 :             linker.link(patch.call, JSC::FunctionPtr(patch.fun));
     770                 :         }
     771          851605 :         for (size_t i = 0; i < doublePatches.length(); i++) {
     772          277315 :             DoublePatch &patch = doublePatches[i];
     773          277315 :             doubleVec[i] = patch.d;
     774          277315 :             linker.patch(patch.label, &doubleVec[i]);
     775                 :         }
     776          574290 :     }
     777                 : 
     778           20397 :     struct FastArrayLoadFails {
     779                 :         Jump rangeCheck;
     780                 :         Jump holeCheck;
     781                 :     };
     782                 : 
     783                 :     // Guard an extent (capacity, length or initialized length) on an array or typed array.
     784           85861 :     Jump guardArrayExtent(int offset, RegisterID reg,
     785                 :                           const Int32Key &key, Condition cond) {
     786           85861 :         Address extent(reg, offset);
     787           85861 :         if (key.isConstant())
     788           24144 :             return branch32(cond, extent, Imm32(key.index()));
     789           61717 :         return branch32(cond, extent, key.reg());
     790                 :     }
     791                 : 
     792              22 :     Jump guardElementNotHole(RegisterID elements, const Int32Key &key) {
     793              22 :         Jump jmp;
     794                 : 
     795              22 :         if (key.isConstant()) {
     796               8 :             Address slot(elements, key.index() * sizeof(Value));
     797               8 :             jmp = guardNotHole(slot);
     798                 :         } else {
     799              14 :             BaseIndex slot(elements, key.reg(), JSVAL_SCALE);
     800              14 :             jmp = guardNotHole(slot);
     801                 :         }
     802                 : 
     803                 :         return jmp;
     804                 :     }
     805                 : 
     806                 :     // Load a jsval from an array slot, given a key. |objReg| is clobbered.
     807           20397 :     FastArrayLoadFails fastArrayLoad(RegisterID objReg, const Int32Key &key,
     808                 :                                      RegisterID typeReg, RegisterID dataReg) {
     809           20397 :         JS_ASSERT(objReg != typeReg);
     810                 : 
     811           20397 :         RegisterID elementsReg = objReg;
     812           20397 :         loadPtr(Address(objReg, JSObject::offsetOfElements()), elementsReg);
     813                 : 
     814           20397 :         FastArrayLoadFails fails;
     815                 :         fails.rangeCheck = guardArrayExtent(ObjectElements::offsetOfInitializedLength(),
     816           20397 :                                             objReg, key, BelowOrEqual);
     817                 : 
     818                 :         // Load the slot out of the array.
     819           20397 :         if (key.isConstant()) {
     820           10205 :             Address slot(elementsReg, key.index() * sizeof(Value));
     821           10205 :             fails.holeCheck = fastArrayLoadSlot(slot, true, typeReg, dataReg);
     822                 :         } else {
     823           10192 :             BaseIndex slot(elementsReg, key.reg(), JSVAL_SCALE);
     824           10192 :             fails.holeCheck = fastArrayLoadSlot(slot, true, typeReg, dataReg);
     825                 :         }
     826                 : 
     827                 :         return fails;
     828                 :     }
     829                 : 
     830           16128 :     void storeKey(const Int32Key &key, Address address) {
     831           16128 :         if (key.isConstant())
     832            2320 :             store32(Imm32(key.index()), address);
     833                 :         else
     834           13808 :             store32(key.reg(), address);
     835           16128 :     }
     836                 : 
     837           17424 :     void bumpKey(Int32Key &key, int32_t delta) {
     838           17424 :         if (key.isConstant())
     839            2320 :             key.index_ += delta;
     840                 :         else
     841           15104 :             add32(Imm32(delta), key.reg());
     842           17424 :     }
     843                 : 
     844             536 :     void loadFrameActuals(JSFunction *fun, RegisterID reg) {
     845                 :         /* Bias for the case where there was an arguments overflow. */
     846             536 :         load32(Address(JSFrameReg, StackFrame::offsetOfNumActual()), reg);
     847             536 :         add32(Imm32(fun->nargs + 2), reg);
     848                 :         Jump overflowArgs = branchTest32(Assembler::NonZero,
     849             536 :                                          Address(JSFrameReg, StackFrame::offsetOfFlags()),
     850            1072 :                                          Imm32(StackFrame::OVERFLOW_ARGS));
     851             536 :         move(Imm32(fun->nargs), reg);
     852             536 :         overflowArgs.linkTo(label(), this);
     853             536 :         lshiftPtr(Imm32(3), reg);
     854             536 :         negPtr(reg);
     855             536 :         addPtr(JSFrameReg, reg);
     856             536 :     }
     857                 : 
     858          175022 :     void loadBaseShape(RegisterID obj, RegisterID dest) {
     859          175022 :         loadPtr(Address(obj, JSObject::offsetOfShape()), dest);
     860          175022 :         loadPtr(Address(dest, Shape::offsetOfBase()), dest);
     861          175022 :     }
     862                 : 
     863            7746 :     void loadObjClass(RegisterID obj, RegisterID dest) {
     864            7746 :         loadBaseShape(obj, dest);
     865            7746 :         loadPtr(Address(dest, BaseShape::offsetOfClass()), dest);
     866            7746 :     }
     867                 : 
     868           15066 :     Jump testClass(Condition cond, RegisterID claspReg, js::Class *clasp) {
     869           15066 :         return branchPtr(cond, claspReg, ImmPtr(clasp));
     870                 :     }
     871                 : 
     872          164863 :     Jump testObjClass(Condition cond, RegisterID obj, RegisterID temp, js::Class *clasp) {
     873          164863 :         loadBaseShape(obj, temp);
     874          164863 :         return branchPtr(cond, Address(temp, BaseShape::offsetOfClass()), ImmPtr(clasp));
     875                 :     }
     876                 : 
     877          150465 :     Jump testFunction(Condition cond, RegisterID fun, RegisterID temp) {
     878          150465 :         return testObjClass(cond, fun, temp, &js::FunctionClass);
     879                 :     }
     880                 : 
     881            7689 :     void branchValue(Condition cond, RegisterID reg, int32_t value, RegisterID result)
     882                 :     {
     883            7689 :         if (Registers::maskReg(result) & Registers::SingleByteRegs) {
     884            7244 :             set32(cond, reg, Imm32(value), result);
     885                 :         } else {
     886             445 :             Jump j = branch32(cond, reg, Imm32(value));
     887             445 :             move(Imm32(0), result);
     888             445 :             Jump skip = jump();
     889             445 :             j.linkTo(label(), this);
     890             445 :             move(Imm32(1), result);
     891             445 :             skip.linkTo(label(), this);
     892                 :         }
     893            7689 :     }
     894                 : 
     895            1304 :     void branchValue(Condition cond, RegisterID lreg, RegisterID rreg, RegisterID result)
     896                 :     {
     897            1304 :         if (Registers::maskReg(result) & Registers::SingleByteRegs) {
     898            1087 :             set32(cond, lreg, rreg, result);
     899                 :         } else {
     900             217 :             Jump j = branch32(cond, lreg, rreg);
     901             217 :             move(Imm32(0), result);
     902             217 :             Jump skip = jump();
     903             217 :             j.linkTo(label(), this);
     904             217 :             move(Imm32(1), result);
     905             217 :             skip.linkTo(label(), this);
     906                 :         }
     907            1304 :     }
     908                 : 
     909            1179 :     void rematPayload(const StateRemat &remat, RegisterID reg) {
     910            1179 :         if (remat.inMemory())
     911             343 :             loadPayload(remat.address(), reg);
     912                 :         else
     913             836 :             move(remat.reg(), reg);
     914            1179 :     }
     915                 : 
     916          213367 :     void loadDynamicSlot(RegisterID objReg, uint32_t index,
     917                 :                          RegisterID typeReg, RegisterID dataReg) {
     918          213367 :         loadPtr(Address(objReg, JSObject::offsetOfSlots()), dataReg);
     919          213367 :         loadValueAsComponents(Address(dataReg, index * sizeof(Value)), typeReg, dataReg);
     920          213367 :     }
     921                 : 
     922          228056 :     void loadObjProp(JSObject *obj, RegisterID objReg,
     923                 :                      const js::Shape *shape,
     924                 :                      RegisterID typeReg, RegisterID dataReg)
     925                 :     {
     926          228056 :         if (obj->isFixedSlot(shape->slot()))
     927           14689 :             loadInlineSlot(objReg, shape->slot(), typeReg, dataReg);
     928                 :         else
     929          213367 :             loadDynamicSlot(objReg, obj->dynamicSlotIndex(shape->slot()), typeReg, dataReg);
     930          228056 :     }
     931                 : 
     932                 : #ifdef JS_METHODJIT_TYPED_ARRAY
     933                 :     // Load a value from a typed array's packed data vector into dataReg.
     934                 :     // This function expects the following combinations of typeReg, dataReg and tempReg:
     935                 :     // 1) for all INT arrays other than UINT32:
     936                 :     //    - dataReg is a GP-register
     937                 :     //    - typeReg is optional
     938                 :     //    - tempReg is not set
     939                 :     // 2) for UINT32:
     940                 :     //    - dataReg is either a FP-register or a GP-register
     941                 :     //    - typeReg is set if dataReg is a GP-register
     942                 :     //    - tempReg is set if dataReg is a FP-register
     943                 :     // 3) for FLOAT32 and FLOAT64:
     944                 :     //    - dataReg is either a FP-register or a GP-register
     945                 :     //    - typeReg is set if dataReg is a GP-register
     946                 :     //    - tempReg is not set
     947                 :     template <typename T>
     948            1793 :     void loadFromTypedArray(int atype, T address, MaybeRegisterID typeReg,
     949                 :                             AnyRegisterID dataReg, MaybeRegisterID tempReg)
     950                 :     {
     951                 :         // If dataReg is an FP-register we don't use typeReg.
     952            1793 :         JS_ASSERT_IF(dataReg.isFPReg(), !typeReg.isSet());
     953                 : 
     954                 :         // We only need tempReg for Uint32Array and only if dataReg is an FP-register.
     955            1793 :         JS_ASSERT_IF(atype != js::TypedArray::TYPE_UINT32 || dataReg.isReg(), !tempReg.isSet());
     956                 : 
     957            1793 :         switch (atype) {
     958                 :           case js::TypedArray::TYPE_INT8:
     959             192 :             load8SignExtend(address, dataReg.reg());
     960             192 :             if (typeReg.isSet())
     961              95 :                 move(ImmType(JSVAL_TYPE_INT32), typeReg.reg());
     962             192 :             break;
     963                 :           case js::TypedArray::TYPE_UINT8:
     964                 :           case js::TypedArray::TYPE_UINT8_CLAMPED:
     965             369 :             load8ZeroExtend(address, dataReg.reg());
     966             369 :             if (typeReg.isSet())
     967             180 :                 move(ImmType(JSVAL_TYPE_INT32), typeReg.reg());
     968             369 :             break;
     969                 :           case js::TypedArray::TYPE_INT16:
     970             185 :             load16SignExtend(address, dataReg.reg());
     971             185 :             if (typeReg.isSet())
     972              95 :                 move(ImmType(JSVAL_TYPE_INT32), typeReg.reg());
     973             185 :             break;
     974                 :           case js::TypedArray::TYPE_UINT16:
     975             202 :             load16(address, dataReg.reg());
     976             202 :             if (typeReg.isSet())
     977             102 :                 move(ImmType(JSVAL_TYPE_INT32), typeReg.reg());
     978             202 :             break;
     979                 :           case js::TypedArray::TYPE_INT32:
     980             135 :             load32(address, dataReg.reg());
     981             135 :             if (typeReg.isSet())
     982              72 :                 move(ImmType(JSVAL_TYPE_INT32), typeReg.reg());
     983             135 :             break;
     984                 :           case js::TypedArray::TYPE_UINT32:
     985                 :           {
     986                 :             // For Uint32Array the result is either int32_t or double.
     987                 :             // If dataReg is a GP-register, load a double or int32_t into dataReg/typeReg.
     988                 :             // If dataReg is a FP-register, load the value as double.
     989             195 :             if (dataReg.isReg()) {
     990             176 :                 load32(address, dataReg.reg());
     991             176 :                 move(ImmType(JSVAL_TYPE_INT32), typeReg.reg());
     992             176 :                 Jump safeInt = branch32(Assembler::Below, dataReg.reg(), Imm32(0x80000000));
     993             176 :                 convertUInt32ToDouble(dataReg.reg(), Registers::FPConversionTemp);
     994             176 :                 breakDouble(Registers::FPConversionTemp, typeReg.reg(), dataReg.reg());
     995             176 :                 safeInt.linkTo(label(), this);
     996                 :             } else {
     997              19 :                 load32(address, tempReg.reg());
     998              19 :                 convertUInt32ToDouble(tempReg.reg(), dataReg.fpreg());
     999                 :             }
    1000             195 :             break;
    1001                 :           }
    1002                 :           case js::TypedArray::TYPE_FLOAT32:
    1003                 :           case js::TypedArray::TYPE_FLOAT64:
    1004                 :           {
    1005                 :             FPRegisterID fpreg = dataReg.isReg()
    1006                 :                                ? Registers::FPConversionTemp
    1007             515 :                                : dataReg.fpreg();
    1008             515 :             if (atype == js::TypedArray::TYPE_FLOAT32)
    1009             341 :                 loadFloat(address, fpreg);
    1010                 :             else
    1011             174 :                 loadDouble(address, fpreg);
    1012                 :             // Make sure NaN gets canonicalized. If dataReg is not an FP-register
    1013                 :             // we have to use loadStaticDouble as we were probably called from an
    1014                 :             // IC and we can't use slowLoadConstantDouble.
    1015             515 :             Jump notNaN = branchDouble(Assembler::DoubleEqual, fpreg, fpreg);
    1016             515 :             if (dataReg.isReg())
    1017             209 :                 loadStaticDouble(&js_NaN, Registers::FPConversionTemp, dataReg.reg());
    1018                 :             else
    1019             306 :                 slowLoadConstantDouble(js_NaN, fpreg);
    1020             515 :             notNaN.linkTo(label(), this);
    1021             515 :             if (dataReg.isReg())
    1022             209 :                 breakDouble(Registers::FPConversionTemp, typeReg.reg(), dataReg.reg());
    1023             515 :             break;
    1024                 :           }
    1025                 :         }
    1026            1793 :     }
    1027                 : 
    1028            1793 :     void loadFromTypedArray(int atype, RegisterID objReg, Int32Key key,
    1029                 :                             MaybeRegisterID typeReg, AnyRegisterID dataReg,
    1030                 :                             MaybeRegisterID tempReg)
    1031                 :     {
    1032            1793 :         int shift = TypedArray::slotWidth(atype);
    1033                 : 
    1034            1793 :         if (key.isConstant()) {
    1035            1459 :             Address addr(objReg, key.index() * shift);
    1036            1459 :             loadFromTypedArray(atype, addr, typeReg, dataReg, tempReg);
    1037                 :         } else {
    1038             334 :             Assembler::Scale scale = Assembler::TimesOne;
    1039             334 :             switch (shift) {
    1040                 :               case 2:
    1041              75 :                 scale = Assembler::TimesTwo;
    1042              75 :                 break;
    1043                 :               case 4:
    1044             116 :                 scale = Assembler::TimesFour;
    1045             116 :                 break;
    1046                 :               case 8:
    1047              50 :                 scale = Assembler::TimesEight;
    1048              50 :                 break;
    1049                 :             }
    1050             334 :             BaseIndex addr(objReg, key.reg(), scale);
    1051             334 :             loadFromTypedArray(atype, addr, typeReg, dataReg, tempReg);
    1052                 :         }
    1053            1793 :     }
    1054                 : 
    1055                 :     template <typename S, typename T>
    1056            1139 :     void storeToTypedIntArray(int atype, S src, T address)
    1057                 :     {
    1058            1139 :         switch (atype) {
    1059                 :           case js::TypedArray::TYPE_INT8:
    1060                 :           case js::TypedArray::TYPE_UINT8:
    1061                 :           case js::TypedArray::TYPE_UINT8_CLAMPED:
    1062             598 :             store8(src, address);
    1063             598 :             break;
    1064                 :           case js::TypedArray::TYPE_INT16:
    1065                 :           case js::TypedArray::TYPE_UINT16:
    1066             260 :             store16(src, address);
    1067             260 :             break;
    1068                 :           case js::TypedArray::TYPE_INT32:
    1069                 :           case js::TypedArray::TYPE_UINT32:
    1070             281 :             store32(src, address);
    1071             281 :             break;
    1072                 :           default:
    1073               0 :             JS_NOT_REACHED("unknown int array type");
    1074                 :         }
    1075            1139 :     }
    1076                 : 
    1077                 :     template <typename S, typename T>
    1078             253 :     void storeToTypedFloatArray(int atype, S src, T address)
    1079                 :     {
    1080             253 :         if (atype == js::TypedArray::TYPE_FLOAT32)
    1081             174 :             storeFloat(src, address);
    1082                 :         else
    1083              79 :             storeDouble(src, address);
    1084             253 :     }
    1085                 : 
    1086                 :     template <typename T>
    1087             752 :     void storeToTypedArray(int atype, ValueRemat vr, T address)
    1088                 :     {
    1089             752 :         if (atype == js::TypedArray::TYPE_FLOAT32 || atype == js::TypedArray::TYPE_FLOAT64) {
    1090             262 :             if (vr.isConstant())
    1091              53 :                 storeToTypedFloatArray(atype, ImmDouble(vr.value().toDouble()), address);
    1092                 :             else
    1093              78 :                 storeToTypedFloatArray(atype, vr.fpReg(), address);
    1094                 :         } else {
    1095             621 :             if (vr.isConstant())
    1096             399 :                 storeToTypedIntArray(atype, Imm32(vr.value().toInt32()), address);
    1097                 :             else
    1098             222 :                 storeToTypedIntArray(atype, vr.dataReg(), address);
    1099                 :         }
    1100             752 :     }
    1101                 : 
    1102             752 :     void storeToTypedArray(int atype, RegisterID objReg, Int32Key key, ValueRemat vr)
    1103                 :     {
    1104             752 :         int shift = TypedArray::slotWidth(atype);
    1105             752 :         if (key.isConstant()) {
    1106             264 :             Address addr(objReg, key.index() * shift);
    1107             264 :             storeToTypedArray(atype, vr, addr);
    1108                 :         } else {
    1109             488 :             Assembler::Scale scale = Assembler::TimesOne;
    1110             488 :             switch (shift) {
    1111                 :             case 2:
    1112              70 :                 scale = Assembler::TimesTwo;
    1113              70 :                 break;
    1114                 :             case 4:
    1115             144 :                 scale = Assembler::TimesFour;
    1116             144 :                 break;
    1117                 :             case 8:
    1118              18 :                 scale = Assembler::TimesEight;
    1119              18 :                 break;
    1120                 :             }
    1121             488 :             BaseIndex addr(objReg, key.reg(), scale);
    1122             488 :             storeToTypedArray(atype, vr, addr);
    1123                 :         }
    1124             752 :     }
    1125                 : 
    1126              52 :     void clampInt32ToUint8(RegisterID reg)
    1127                 :     {
    1128              52 :         Jump j = branch32(Assembler::GreaterThanOrEqual, reg, Imm32(0));
    1129              52 :         move(Imm32(0), reg);
    1130              52 :         Jump done = jump();
    1131              52 :         j.linkTo(label(), this);
    1132              52 :         j = branch32(Assembler::LessThanOrEqual, reg, Imm32(255));
    1133              52 :         move(Imm32(255), reg);
    1134              52 :         j.linkTo(label(), this);
    1135              52 :         done.linkTo(label(), this);
    1136              52 :     }
    1137                 : 
    1138                 :     // Inline version of js_TypedArray_uint8_clamp_double.
    1139              11 :     void clampDoubleToUint8(FPRegisterID fpReg, FPRegisterID fpTemp, RegisterID reg)
    1140                 :     {
    1141              11 :         JS_ASSERT(fpTemp != Registers::FPConversionTemp);
    1142                 : 
    1143                 :         // <= 0 or NaN ==> 0
    1144              11 :         zeroDouble(fpTemp);
    1145              11 :         Jump positive = branchDouble(Assembler::DoubleGreaterThan, fpReg, fpTemp);
    1146              11 :         move(Imm32(0), reg);
    1147              11 :         Jump done1 = jump();
    1148                 : 
    1149                 :         // Add 0.5 and truncate.
    1150              11 :         positive.linkTo(label(), this);
    1151              11 :         slowLoadConstantDouble(0.5, fpTemp);
    1152              11 :         addDouble(fpReg, fpTemp);
    1153              11 :         Jump notInt = branchTruncateDoubleToInt32(fpTemp, reg);
    1154                 : 
    1155                 :         // > 255 ==> 255
    1156              11 :         Jump inRange = branch32(Assembler::BelowOrEqual, reg, Imm32(255));
    1157              11 :         notInt.linkTo(label(), this);
    1158              11 :         move(Imm32(255), reg);
    1159              11 :         Jump done2 = jump();
    1160                 : 
    1161                 :         // Check if we had a tie.
    1162              11 :         inRange.linkTo(label(), this);
    1163              11 :         convertInt32ToDouble(reg, Registers::FPConversionTemp);
    1164              11 :         Jump done3 = branchDouble(Assembler::DoubleNotEqual, fpTemp, Registers::FPConversionTemp);
    1165                 : 
    1166                 :         // It was a tie. Mask out the ones bit to get an even value.
    1167                 :         // See js_TypedArray_uint8_clamp_double for the reasoning behind this.
    1168              11 :         and32(Imm32(~1), reg);
    1169                 : 
    1170              11 :         done1.linkTo(label(), this);
    1171              11 :         done2.linkTo(label(), this);
    1172              11 :         done3.linkTo(label(), this);
    1173              11 :     }
    1174                 : #endif /* JS_METHODJIT_TYPED_ARRAY */
    1175                 : 
    1176           17444 :     Address objPropAddress(JSObject *obj, RegisterID objReg, uint32_t slot)
    1177                 :     {
    1178           17444 :         if (obj->isFixedSlot(slot))
    1179           12272 :             return Address(objReg, JSObject::getFixedSlotOffset(slot));
    1180            5172 :         loadPtr(Address(objReg, JSObject::offsetOfSlots()), objReg);
    1181            5172 :         return Address(objReg, obj->dynamicSlotIndex(slot) * sizeof(Value));
    1182                 :     }
    1183                 : 
    1184               0 :     static uint32_t maskAddress(Address address) {
    1185               0 :         return Registers::maskReg(address.base);
    1186                 :     }
    1187                 : 
    1188              63 :     static uint32_t maskAddress(BaseIndex address) {
    1189              63 :         return Registers::maskReg(address.base) |
    1190              63 :                Registers::maskReg(address.index);
    1191                 :     }
    1192                 : 
    1193                 :     /*
    1194                 :      * Generate code testing whether an in memory value at address has a type
    1195                 :      * in the specified set. Updates mismatches with any failure jumps. Assumes
    1196                 :      * that no temporary (caller save) registers are live.
    1197                 :      */
    1198          154127 :     bool generateTypeCheck(JSContext *cx, Address address,
    1199                 :                            types::TypeSet *types, Vector<Jump> *mismatches)
    1200                 :     {
    1201          154127 :         if (types->unknown())
    1202             246 :             return true;
    1203                 : 
    1204          307762 :         Vector<Jump> matches(cx);
    1205                 : 
    1206          153881 :         if (types->hasType(types::Type::DoubleType())) {
    1207                 :             /* Type sets containing double also contain int. */
    1208           10290 :             if (!matches.append(testNumber(Assembler::Equal, address)))
    1209               0 :                 return false;
    1210          143591 :         } else if (types->hasType(types::Type::Int32Type())) {
    1211           13538 :             if (!matches.append(testInt32(Assembler::Equal, address)))
    1212               0 :                 return false;
    1213                 :         }
    1214                 : 
    1215          153881 :         if (types->hasType(types::Type::UndefinedType())) {
    1216            4169 :             if (!matches.append(testUndefined(Assembler::Equal, address)))
    1217               0 :                 return false;
    1218                 :         }
    1219                 : 
    1220          153881 :         if (types->hasType(types::Type::BooleanType())) {
    1221            3502 :             if (!matches.append(testBoolean(Assembler::Equal, address)))
    1222               0 :                 return false;
    1223                 :         }
    1224                 : 
    1225          153881 :         if (types->hasType(types::Type::StringType())) {
    1226            8383 :             if (!matches.append(testString(Assembler::Equal, address)))
    1227               0 :                 return false;
    1228                 :         }
    1229                 : 
    1230          153881 :         if (types->hasType(types::Type::NullType())) {
    1231            4213 :             if (!matches.append(testNull(Assembler::Equal, address)))
    1232               0 :                 return false;
    1233                 :         }
    1234                 : 
    1235          153881 :         unsigned count = 0;
    1236          153881 :         if (types->hasType(types::Type::AnyObjectType())) {
    1237            4282 :             if (!matches.append(testObject(Assembler::Equal, address)))
    1238               0 :                 return false;
    1239                 :         } else {
    1240          149599 :             count = types->getObjectCount();
    1241                 :         }
    1242                 : 
    1243          153881 :         if (count != 0) {
    1244           30985 :             if (!mismatches->append(testObject(Assembler::NotEqual, address)))
    1245               0 :                 return false;
    1246           30985 :             RegisterID reg = Registers::ArgReg1;
    1247                 : 
    1248           30985 :             loadPayload(address, reg);
    1249                 : 
    1250          132994 :             for (unsigned i = 0; i < count; i++) {
    1251          102009 :                 if (JSObject *object = types->getSingleObject(i)) {
    1252           25720 :                     if (!matches.append(branchPtr(Assembler::Equal, reg, ImmPtr(object))))
    1253               0 :                         return false;
    1254                 :                 }
    1255                 :             }
    1256                 : 
    1257           30985 :             loadPtr(Address(reg, JSObject::offsetOfType()), reg);
    1258                 : 
    1259          132994 :             for (unsigned i = 0; i < count; i++) {
    1260          102009 :                 if (types::TypeObject *object = types->getTypeObject(i)) {
    1261           36046 :                     if (!matches.append(branchPtr(Assembler::Equal, reg, ImmPtr(object))))
    1262               0 :                         return false;
    1263                 :                 }
    1264                 :             }
    1265                 :         }
    1266                 : 
    1267          153881 :         if (!mismatches->append(jump()))
    1268               0 :             return false;
    1269                 : 
    1270          264024 :         for (unsigned i = 0; i < matches.length(); i++)
    1271          110143 :             matches[i].linkTo(label(), this);
    1272                 : 
    1273          153881 :         return true;
    1274                 :     }
    1275                 : 
    1276                 :     /*
    1277                 :      * Get a free object for the specified GC kind in compartment, writing it
    1278                 :      * to result and filling it in according to templateObject. Returns a jump
    1279                 :      * taken if a free thing was not retrieved.
    1280                 :      */
    1281           19149 :     Jump getNewObject(JSContext *cx, RegisterID result, JSObject *templateObject)
    1282                 :     {
    1283           19149 :         gc::AllocKind allocKind = templateObject->getAllocKind();
    1284                 : 
    1285           19149 :         JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
    1286           19149 :         int thingSize = (int)gc::Arena::thingSize(allocKind);
    1287                 : 
    1288           19149 :         JS_ASSERT(cx->typeInferenceEnabled());
    1289           19149 :         JS_ASSERT(!templateObject->hasDynamicSlots());
    1290           19149 :         JS_ASSERT(!templateObject->hasDynamicElements());
    1291                 : 
    1292                 : #ifdef JS_GC_ZEAL
    1293           19149 :         if (cx->runtime->needZealousGC())
    1294              22 :             return jump();
    1295                 : #endif
    1296                 : 
    1297                 :         /*
    1298                 :          * Inline FreeSpan::allocate. Only the case where the current freelist
    1299                 :          * span is not empty is handled.
    1300                 :          */
    1301                 :         gc::FreeSpan *list = const_cast<gc::FreeSpan *>
    1302           19127 :                              (cx->compartment->arenas.getFreeList(allocKind));
    1303           19127 :         loadPtr(&list->first, result);
    1304                 : 
    1305           19127 :         Jump jump = branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result);
    1306                 : 
    1307           19127 :         addPtr(Imm32(thingSize), result);
    1308           19127 :         storePtr(result, &list->first);
    1309                 : 
    1310                 :         /*
    1311                 :          * Fill in the blank object. Order doesn't matter here, from here
    1312                 :          * everything is infallible. Note that this bakes GC thing pointers
    1313                 :          * into the code without explicitly pinning them. With type inference
    1314                 :          * enabled, JIT code is collected on GC except when analysis or
    1315                 :          * compilation is active, in which case type objects won't be collected
    1316                 :          * but other things may be. The shape held by templateObject *must* be
    1317                 :          * pinned against GC either by the script or by some type object.
    1318                 :          */
    1319                 : 
    1320           19127 :         int elementsOffset = JSObject::offsetOfFixedElements();
    1321                 : 
    1322                 :         /*
    1323                 :          * Write out the elements pointer before readjusting the result register,
    1324                 :          * as for dense arrays we will need to get the address of the fixed
    1325                 :          * elements first.
    1326                 :          */
    1327           19127 :         if (templateObject->isDenseArray()) {
    1328            9709 :             JS_ASSERT(!templateObject->getDenseArrayInitializedLength());
    1329            9709 :             addPtr(Imm32(-thingSize + elementsOffset), result);
    1330            9709 :             storePtr(result, Address(result, -elementsOffset + JSObject::offsetOfElements()));
    1331            9709 :             addPtr(Imm32(-elementsOffset), result);
    1332                 :         } else {
    1333            9418 :             addPtr(Imm32(-thingSize), result);
    1334            9418 :             storePtr(ImmPtr(emptyObjectElements), Address(result, JSObject::offsetOfElements()));
    1335                 :         }
    1336                 : 
    1337           19127 :         storePtr(ImmPtr(templateObject->lastProperty()), Address(result, JSObject::offsetOfShape()));
    1338           19127 :         storePtr(ImmPtr(templateObject->type()), Address(result, JSObject::offsetOfType()));
    1339           19127 :         storePtr(ImmPtr(NULL), Address(result, JSObject::offsetOfSlots()));
    1340                 : 
    1341           19127 :         if (templateObject->isDenseArray()) {
    1342                 :             /* Fill in the elements header. */
    1343            9709 :             store32(Imm32(templateObject->getDenseArrayCapacity()),
    1344           19418 :                     Address(result, elementsOffset + ObjectElements::offsetOfCapacity()));
    1345            9709 :             store32(Imm32(templateObject->getDenseArrayInitializedLength()),
    1346           19418 :                     Address(result, elementsOffset + ObjectElements::offsetOfInitializedLength()));
    1347            9709 :             store32(Imm32(templateObject->getArrayLength()),
    1348           19418 :                     Address(result, elementsOffset + ObjectElements::offsetOfLength()));
    1349                 :         } else {
    1350                 :             /*
    1351                 :              * Fixed slots of non-array objects are required to be initialized;
    1352                 :              * Use the values currently in the template object.
    1353                 :              */
    1354           41759 :             for (unsigned i = 0; i < templateObject->slotSpan(); i++) {
    1355           32341 :                 storeValue(templateObject->getFixedSlot(i),
    1356           64682 :                            Address(result, JSObject::getFixedSlotOffset(i)));
    1357                 :             }
    1358                 :         }
    1359                 : 
    1360           19127 :         if (templateObject->hasPrivate()) {
    1361            4923 :             uint32_t nfixed = templateObject->numFixedSlots();
    1362            4923 :             storePtr(ImmPtr(templateObject->getPrivate()),
    1363            9846 :                      Address(result, JSObject::getPrivateDataOffset(nfixed)));
    1364                 :         }
    1365                 : 
    1366           19127 :         return jump;
    1367                 :     }
    1368                 : 
    1369                 :     /* Add the value stored in 'value' to the accumulator 'counter'. */
    1370               0 :     void addCounter(const double *value, double *counter, RegisterID scratch)
    1371                 :     {
    1372               0 :         loadDouble(value, Registers::FPConversionTemp);
    1373               0 :         move(ImmPtr(counter), scratch);
    1374               0 :         addDouble(Address(scratch), Registers::FPConversionTemp);
    1375               0 :         storeDouble(Registers::FPConversionTemp, Address(scratch));
    1376               0 :     }
    1377                 : 
    1378                 :     /* Add one to the accumulator 'counter'. */
    1379               0 :     void bumpCounter(double *counter, RegisterID scratch)
    1380                 :     {
    1381               0 :         addCounter(&oneDouble, counter, scratch);
    1382               0 :     }
    1383                 : 
    1384                 :     /* Bump the stub call count for script/pc if they are being counted. */
    1385         4800534 :     void bumpStubCounter(JSScript *script, jsbytecode *pc, RegisterID scratch)
    1386                 :     {
    1387         4800534 :         if (script->pcCounters) {
    1388               0 :             OpcodeCounts counts = script->getCounts(pc);
    1389               0 :             double *counter = &counts.get(OpcodeCounts::BASE_METHODJIT_STUBS);
    1390               0 :             bumpCounter(counter, scratch);
    1391                 :         }
    1392         4800534 :     }
    1393                 : 
    1394                 :     static const double oneDouble;
    1395                 : };
    1396                 : 
    1397                 : /* Return f<true> if the script is strict mode code, f<false> otherwise. */
    1398                 : #define STRICT_VARIANT(f)                                                     \
    1399                 :     (FunctionTemplateConditional(script->strictModeCode,                      \
    1400                 :                                  f<true>, f<false>))
    1401                 : 
    1402                 : /* Save some typing. */
    1403                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Type = Assembler::JSReturnReg_Type;
    1404                 : static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = Assembler::JSReturnReg_Data;
    1405                 : static const JSC::MacroAssembler::RegisterID JSParamReg_Argc  = Assembler::JSParamReg_Argc;
    1406                 : 
    1407                 : struct FrameFlagsAddress : JSC::MacroAssembler::Address
    1408                 : {
    1409          295376 :     FrameFlagsAddress()
    1410          295376 :       : Address(JSFrameReg, StackFrame::offsetOfFlags())
    1411          295376 :     {}
    1412                 : };
    1413                 : 
    1414                 : class PreserveRegisters {
    1415                 :     typedef JSC::MacroAssembler::RegisterID RegisterID;
    1416                 : 
    1417                 :     Assembler   &masm;
    1418                 :     uint32_t    count;
    1419                 :     RegisterID  regs[JSC::MacroAssembler::TotalRegisters];
    1420                 : 
    1421                 :   public:
    1422            1252 :     PreserveRegisters(Assembler &masm) : masm(masm), count(0) { }
    1423            1252 :     ~PreserveRegisters() { JS_ASSERT(!count); }
    1424                 : 
    1425             272 :     void preserve(Registers mask) {
    1426             272 :         JS_ASSERT(!count);
    1427                 : 
    1428            1114 :         while (!mask.empty()) {
    1429             570 :             RegisterID reg = mask.takeAnyReg().reg();
    1430             570 :             regs[count++] = reg;
    1431             570 :             masm.saveReg(reg);
    1432                 :         }
    1433             272 :     }
    1434                 : 
    1435            1252 :     void restore() {
    1436            3074 :         while (count)
    1437             570 :             masm.restoreReg(regs[--count]);
    1438            1252 :     }
    1439                 : };
    1440                 : 
    1441                 : } /* namespace mjit */
    1442                 : } /* namespace js */
    1443                 : 
    1444                 : #endif
    1445                 : 

Generated by: LCOV version 1.7