LCOV - code coverage report
Current view: directory - js/src - jsgc.cpp (source / functions) Found Hit Coverage
Test: app.info Lines: 2003 1491 74.4 %
Date: 2012-06-02 Functions: 212 172 81.1 %

       1                 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
       2                 :  * vim: set ts=8 sw=4 et tw=78:
       3                 :  *
       4                 :  * ***** BEGIN LICENSE BLOCK *****
       5                 :  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
       6                 :  *
       7                 :  * The contents of this file are subject to the Mozilla Public License Version
       8                 :  * 1.1 (the "License"); you may not use this file except in compliance with
       9                 :  * the License. You may obtain a copy of the License at
      10                 :  * http://www.mozilla.org/MPL/
      11                 :  *
      12                 :  * Software distributed under the License is distributed on an "AS IS" basis,
      13                 :  * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
      14                 :  * for the specific language governing rights and limitations under the
      15                 :  * License.
      16                 :  *
      17                 :  * The Original Code is Mozilla Communicator client code, released
      18                 :  * March 31, 1998.
      19                 :  *
      20                 :  * The Initial Developer of the Original Code is
      21                 :  * Netscape Communications Corporation.
      22                 :  * Portions created by the Initial Developer are Copyright (C) 1998
      23                 :  * the Initial Developer. All Rights Reserved.
      24                 :  *
      25                 :  * Contributor(s):
      26                 :  *
      27                 :  * Alternatively, the contents of this file may be used under the terms of
      28                 :  * either of the GNU General Public License Version 2 or later (the "GPL"),
      29                 :  * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
      30                 :  * in which case the provisions of the GPL or the LGPL are applicable instead
      31                 :  * of those above. If you wish to allow use of your version of this file only
      32                 :  * under the terms of either the GPL or the LGPL, and not to allow others to
      33                 :  * use your version of this file under the terms of the MPL, indicate your
      34                 :  * decision by deleting the provisions above and replace them with the notice
      35                 :  * and other provisions required by the GPL or the LGPL. If you do not delete
      36                 :  * the provisions above, a recipient may use your version of this file under
      37                 :  * the terms of any one of the MPL, the GPL or the LGPL.
      38                 :  *
      39                 :  * ***** END LICENSE BLOCK ***** */
      40                 : 
      41                 : /* JS Mark-and-Sweep Garbage Collector. */
      42                 : 
      43                 : #include "mozilla/Attributes.h"
      44                 : #include "mozilla/Util.h"
      45                 : 
      46                 : /*
      47                 :  * This code implements a mark-and-sweep garbage collector. The mark phase is
      48                 :  * incremental. Most sweeping is done on a background thread. A GC is divided
      49                 :  * into slices as follows:
      50                 :  *
      51                 :  * Slice 1: Roots pushed onto the mark stack. The mark stack is processed by
      52                 :  * popping an element, marking it, and pushing its children.
      53                 :  *   ... JS code runs ...
      54                 :  * Slice 2: More mark stack processing.
      55                 :  *   ... JS code runs ...
      56                 :  * Slice n-1: More mark stack processing.
      57                 :  *   ... JS code runs ...
      58                 :  * Slice n: Mark stack is completely drained. Some sweeping is done.
      59                 :  *   ... JS code runs, remaining sweeping done on background thread ...
      60                 :  *
      61                 :  * When background sweeping finishes the GC is complete.
      62                 :  *
      63                 :  * Incremental GC requires close collaboration with the mutator (i.e., JS code):
      64                 :  *
      65                 :  * 1. During an incremental GC, if a memory location (except a root) is written
      66                 :  * to, then the value it previously held must be marked. Write barriers ensure
      67                 :  * this.
      68                 :  * 2. Any object that is allocated during incremental GC must start out marked.
      69                 :  * 3. Roots are special memory locations that don't need write
      70                 :  * barriers. However, they must be marked in the first slice. Roots are things
      71                 :  * like the C stack and the VM stack, since it would be too expensive to put
      72                 :  * barriers on them.
      73                 :  */
      74                 : 
      75                 : #include <math.h>
      76                 : #include <string.h>     /* for memset used when DEBUG */
      77                 : 
      78                 : #include "jstypes.h"
      79                 : #include "jsutil.h"
      80                 : #include "jshash.h"
      81                 : #include "jsclist.h"
      82                 : #include "jsprf.h"
      83                 : #include "jsapi.h"
      84                 : #include "jsatom.h"
      85                 : #include "jscompartment.h"
      86                 : #include "jscrashreport.h"
      87                 : #include "jscrashformat.h"
      88                 : #include "jscntxt.h"
      89                 : #include "jsversion.h"
      90                 : #include "jsdbgapi.h"
      91                 : #include "jsexn.h"
      92                 : #include "jsfun.h"
      93                 : #include "jsgc.h"
      94                 : #include "jsgcmark.h"
      95                 : #include "jsinterp.h"
      96                 : #include "jsiter.h"
      97                 : #include "jslock.h"
      98                 : #include "jsnum.h"
      99                 : #include "jsobj.h"
     100                 : #include "jsprobes.h"
     101                 : #include "jsproxy.h"
     102                 : #include "jsscope.h"
     103                 : #include "jsscript.h"
     104                 : #include "jswatchpoint.h"
     105                 : #include "jsweakmap.h"
     106                 : #if JS_HAS_XML_SUPPORT
     107                 : #include "jsxml.h"
     108                 : #endif
     109                 : 
     110                 : #include "frontend/Parser.h"
     111                 : #include "gc/Memory.h"
     112                 : #include "methodjit/MethodJIT.h"
     113                 : #include "vm/Debugger.h"
     114                 : #include "vm/String.h"
     115                 : 
     116                 : #include "jsinterpinlines.h"
     117                 : #include "jsobjinlines.h"
     118                 : 
     119                 : #include "vm/ScopeObject-inl.h"
     120                 : #include "vm/String-inl.h"
     121                 : 
     122                 : #ifdef MOZ_VALGRIND
     123                 : # define JS_VALGRIND
     124                 : #endif
     125                 : #ifdef JS_VALGRIND
     126                 : # include <valgrind/memcheck.h>
     127                 : #endif
     128                 : 
     129                 : #ifdef XP_WIN
     130                 : # include "jswin.h"
     131                 : #else
     132                 : # include <unistd.h>
     133                 : #endif
     134                 : 
     135                 : using namespace mozilla;
     136                 : using namespace js;
     137                 : using namespace js::gc;
     138                 : 
     139                 : namespace js {
     140                 : namespace gc {
     141                 : 
     142                 : /*
     143                 :  * Lower limit after which we limit the heap growth
     144                 :  */
     145                 : const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
     146                 : 
     147                 : /*
     148                 :  * A GC is triggered once the number of newly allocated arenas is
     149                 :  * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
     150                 :  * starting after the lower limit of GC_ALLOCATION_THRESHOLD. This number is
     151                 :  * used for non-incremental GCs.
     152                 :  */
     153                 : const float GC_HEAP_GROWTH_FACTOR = 3.0f;
     154                 : 
     155                 : /* Perform a Full GC every 20 seconds if MaybeGC is called */
     156                 : static const uint64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
     157                 : 
     158                 : #ifdef JS_GC_ZEAL
     159                 : static void
     160                 : StartVerifyBarriers(JSContext *cx);
     161                 : 
     162                 : static void
     163                 : EndVerifyBarriers(JSContext *cx);
     164                 : 
     165                 : void
     166                 : FinishVerifier(JSRuntime *rt);
     167                 : #endif
     168                 : 
     169                 : /* This array should be const, but that doesn't link right under GCC. */
     170                 : AllocKind slotsToThingKind[] = {
     171                 :     /* 0 */  FINALIZE_OBJECT0,  FINALIZE_OBJECT2,  FINALIZE_OBJECT2,  FINALIZE_OBJECT4,
     172                 :     /* 4 */  FINALIZE_OBJECT4,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,
     173                 :     /* 8 */  FINALIZE_OBJECT8,  FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
     174                 :     /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
     175                 :     /* 16 */ FINALIZE_OBJECT16
     176                 : };
     177                 : 
     178                 : JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
     179                 : 
     180                 : const uint32_t Arena::ThingSizes[] = {
     181                 :     sizeof(JSObject),           /* FINALIZE_OBJECT0             */
     182                 :     sizeof(JSObject),           /* FINALIZE_OBJECT0_BACKGROUND  */
     183                 :     sizeof(JSObject_Slots2),    /* FINALIZE_OBJECT2             */
     184                 :     sizeof(JSObject_Slots2),    /* FINALIZE_OBJECT2_BACKGROUND  */
     185                 :     sizeof(JSObject_Slots4),    /* FINALIZE_OBJECT4             */
     186                 :     sizeof(JSObject_Slots4),    /* FINALIZE_OBJECT4_BACKGROUND  */
     187                 :     sizeof(JSObject_Slots8),    /* FINALIZE_OBJECT8             */
     188                 :     sizeof(JSObject_Slots8),    /* FINALIZE_OBJECT8_BACKGROUND  */
     189                 :     sizeof(JSObject_Slots12),   /* FINALIZE_OBJECT12            */
     190                 :     sizeof(JSObject_Slots12),   /* FINALIZE_OBJECT12_BACKGROUND */
     191                 :     sizeof(JSObject_Slots16),   /* FINALIZE_OBJECT16            */
     192                 :     sizeof(JSObject_Slots16),   /* FINALIZE_OBJECT16_BACKGROUND */
     193                 :     sizeof(JSScript),           /* FINALIZE_SCRIPT              */
     194                 :     sizeof(Shape),              /* FINALIZE_SHAPE               */
     195                 :     sizeof(BaseShape),          /* FINALIZE_BASE_SHAPE          */
     196                 :     sizeof(types::TypeObject),  /* FINALIZE_TYPE_OBJECT         */
     197                 : #if JS_HAS_XML_SUPPORT
     198                 :     sizeof(JSXML),              /* FINALIZE_XML                 */
     199                 : #endif
     200                 :     sizeof(JSShortString),      /* FINALIZE_SHORT_STRING        */
     201                 :     sizeof(JSString),           /* FINALIZE_STRING              */
     202                 :     sizeof(JSExternalString),   /* FINALIZE_EXTERNAL_STRING     */
     203                 : };
     204                 : 
     205                 : #define OFFSET(type) uint32_t(sizeof(ArenaHeader) + (ArenaSize - sizeof(ArenaHeader)) % sizeof(type))
     206                 : 
     207                 : const uint32_t Arena::FirstThingOffsets[] = {
     208                 :     OFFSET(JSObject),           /* FINALIZE_OBJECT0             */
     209                 :     OFFSET(JSObject),           /* FINALIZE_OBJECT0_BACKGROUND  */
     210                 :     OFFSET(JSObject_Slots2),    /* FINALIZE_OBJECT2             */
     211                 :     OFFSET(JSObject_Slots2),    /* FINALIZE_OBJECT2_BACKGROUND  */
     212                 :     OFFSET(JSObject_Slots4),    /* FINALIZE_OBJECT4             */
     213                 :     OFFSET(JSObject_Slots4),    /* FINALIZE_OBJECT4_BACKGROUND  */
     214                 :     OFFSET(JSObject_Slots8),    /* FINALIZE_OBJECT8             */
     215                 :     OFFSET(JSObject_Slots8),    /* FINALIZE_OBJECT8_BACKGROUND  */
     216                 :     OFFSET(JSObject_Slots12),   /* FINALIZE_OBJECT12            */
     217                 :     OFFSET(JSObject_Slots12),   /* FINALIZE_OBJECT12_BACKGROUND */
     218                 :     OFFSET(JSObject_Slots16),   /* FINALIZE_OBJECT16            */
     219                 :     OFFSET(JSObject_Slots16),   /* FINALIZE_OBJECT16_BACKGROUND */
     220                 :     OFFSET(JSScript),           /* FINALIZE_SCRIPT              */
     221                 :     OFFSET(Shape),              /* FINALIZE_SHAPE               */
     222                 :     OFFSET(BaseShape),          /* FINALIZE_BASE_SHAPE          */
     223                 :     OFFSET(types::TypeObject),  /* FINALIZE_TYPE_OBJECT         */
     224                 : #if JS_HAS_XML_SUPPORT
     225                 :     OFFSET(JSXML),              /* FINALIZE_XML                 */
     226                 : #endif
     227                 :     OFFSET(JSShortString),      /* FINALIZE_SHORT_STRING        */
     228                 :     OFFSET(JSString),           /* FINALIZE_STRING              */
     229                 :     OFFSET(JSExternalString),   /* FINALIZE_EXTERNAL_STRING     */
     230                 : };
     231                 : 
     232                 : #undef OFFSET
     233                 : 
     234                 : class GCCompartmentsIter {
     235                 :   private:
     236                 :     JSCompartment **it, **end;
     237                 : 
     238                 :   public:
     239          467508 :     GCCompartmentsIter(JSRuntime *rt) {
     240          467508 :         if (rt->gcCurrentCompartment) {
     241            1944 :             it = &rt->gcCurrentCompartment;
     242            1944 :             end = &rt->gcCurrentCompartment + 1;
     243                 :         } else {
     244          465564 :             it = rt->compartments.begin();
     245          465564 :             end = rt->compartments.end();
     246                 :         }
     247          467508 :     }
     248                 : 
     249         3865289 :     bool done() const { return it == end; }
     250                 : 
     251         1128848 :     void next() {
     252         1128848 :         JS_ASSERT(!done());
     253         1128848 :         it++;
     254         1128848 :     }
     255                 : 
     256         1140085 :     JSCompartment *get() const {
     257         1140085 :         JS_ASSERT(!done());
     258         1140085 :         return *it;
     259                 :     }
     260                 : 
     261          122478 :     operator JSCompartment *() const { return get(); }
     262         1017607 :     JSCompartment *operator->() const { return get(); }
     263                 : };
     264                 : 
     265                 : #ifdef DEBUG
     266                 : void
     267        16429870 : ArenaHeader::checkSynchronizedWithFreeList() const
     268                 : {
     269                 :     /*
     270                 :      * Do not allow to access the free list when its real head is still stored
     271                 :      * in FreeLists and is not synchronized with this one.
     272                 :      */
     273        16429870 :     JS_ASSERT(allocated());
     274                 : 
     275                 :     /*
     276                 :      * We can be called from the background finalization thread when the free
     277                 :      * list in the compartment can mutate at any moment. We cannot do any
     278                 :      * checks in this case.
     279                 :      */
     280        16429870 :     if (!compartment->rt->gcRunning)
     281         2552286 :         return;
     282                 : 
     283        13877584 :     FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
     284        13877584 :     if (firstSpan.isEmpty())
     285         2507344 :         return;
     286        11370240 :     const FreeSpan *list = compartment->arenas.getFreeList(getAllocKind());
     287        11370240 :     if (list->isEmpty() || firstSpan.arenaAddress() != list->arenaAddress())
     288        10353641 :         return;
     289                 : 
     290                 :     /*
     291                 :      * Here this arena has free things, FreeList::lists[thingKind] is not
     292                 :      * empty and also points to this arena. Thus they must the same.
     293                 :      */
     294         1016599 :     JS_ASSERT(firstSpan.isSameNonEmptySpan(list));
     295                 : }
     296                 : #endif
     297                 : 
     298                 : /* static */ void
     299               0 : Arena::staticAsserts()
     300                 : {
     301                 :     JS_STATIC_ASSERT(sizeof(Arena) == ArenaSize);
     302                 :     JS_STATIC_ASSERT(JS_ARRAY_LENGTH(ThingSizes) == FINALIZE_LIMIT);
     303                 :     JS_STATIC_ASSERT(JS_ARRAY_LENGTH(FirstThingOffsets) == FINALIZE_LIMIT);
     304               0 : }
     305                 : 
     306                 : template<typename T>
     307                 : inline bool
     308         5544918 : Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize, bool background)
     309                 : {
     310                 :     /* Enforce requirements on size of T. */
     311         5544918 :     JS_ASSERT(thingSize % Cell::CellSize == 0);
     312         5544918 :     JS_ASSERT(thingSize <= 255);
     313                 : 
     314         5544918 :     JS_ASSERT(aheader.allocated());
     315         5544918 :     JS_ASSERT(thingKind == aheader.getAllocKind());
     316         5544918 :     JS_ASSERT(thingSize == aheader.getThingSize());
     317         5544918 :     JS_ASSERT(!aheader.hasDelayedMarking);
     318         5544918 :     JS_ASSERT(!aheader.markOverflow);
     319         5544918 :     JS_ASSERT(!aheader.allocatedDuringIncremental);
     320                 : 
     321         5544918 :     uintptr_t thing = thingsStart(thingKind);
     322         5544918 :     uintptr_t lastByte = thingsEnd() - 1;
     323                 : 
     324         5544918 :     FreeSpan nextFree(aheader.getFirstFreeSpan());
     325         5544918 :     nextFree.checkSpan();
     326                 : 
     327         5544918 :     FreeSpan newListHead;
     328         5544918 :     FreeSpan *newListTail = &newListHead;
     329         5544918 :     uintptr_t newFreeSpanStart = 0;
     330         5544918 :     bool allClear = true;
     331        11089836 :     DebugOnly<size_t> nmarked = 0;
     332       688815542 :     for (;; thing += thingSize) {
     333       694360460 :         JS_ASSERT(thing <= lastByte + 1);
     334       694360460 :         if (thing == nextFree.first) {
     335        17254399 :             JS_ASSERT(nextFree.last <= lastByte);
     336        17254399 :             if (nextFree.last == lastByte)
     337                 :                 break;
     338        11709481 :             JS_ASSERT(Arena::isAligned(nextFree.last, thingSize));
     339        11709481 :             if (!newFreeSpanStart)
     340        10092227 :                 newFreeSpanStart = thing;
     341        11709481 :             thing = nextFree.last;
     342        11709481 :             nextFree = *nextFree.nextSpan();
     343        11709481 :             nextFree.checkSpan();
     344                 :         } else {
     345       677106061 :             T *t = reinterpret_cast<T *>(thing);
     346       677106061 :             if (t->isMarked()) {
     347       495384348 :                 allClear = false;
     348       495384348 :                 nmarked++;
     349       495384348 :                 if (newFreeSpanStart) {
     350        13528507 :                     JS_ASSERT(thing >= thingsStart(thingKind) + thingSize);
     351        13528507 :                     newListTail->first = newFreeSpanStart;
     352        13528507 :                     newListTail->last = thing - thingSize;
     353        13528507 :                     newListTail = newListTail->nextSpanUnchecked(thingSize);
     354        13528507 :                     newFreeSpanStart = 0;
     355                 :                 }
     356                 :             } else {
     357       181721713 :                 if (!newFreeSpanStart)
     358         5373690 :                     newFreeSpanStart = thing;
     359       181721713 :                 t->finalize(cx, background);
     360       181721713 :                 JS_POISON(t, JS_FREE_PATTERN, thingSize);
     361                 :             }
     362                 :         }
     363                 :     }
     364                 : 
     365         5544918 :     if (allClear) {
     366         1665873 :         JS_ASSERT(newListTail == &newListHead);
     367         1665873 :         JS_ASSERT(newFreeSpanStart == thingsStart(thingKind));
     368         1665873 :         return true;
     369                 :     }
     370                 : 
     371         3879045 :     newListTail->first = newFreeSpanStart ? newFreeSpanStart : nextFree.first;
     372         3879045 :     JS_ASSERT(Arena::isAligned(newListTail->first, thingSize));
     373         3879045 :     newListTail->last = lastByte;
     374                 : 
     375                 : #ifdef DEBUG
     376         3879045 :     size_t nfree = 0;
     377        17407552 :     for (const FreeSpan *span = &newListHead; span != newListTail; span = span->nextSpan()) {
     378        13528507 :         span->checkSpan();
     379        13528507 :         JS_ASSERT(Arena::isAligned(span->first, thingSize));
     380        13528507 :         JS_ASSERT(Arena::isAligned(span->last, thingSize));
     381        13528507 :         nfree += (span->last - span->first) / thingSize + 1;
     382        13528507 :         JS_ASSERT(nfree + nmarked <= thingsPerArena(thingSize));
     383                 :     }
     384         3879045 :     nfree += (newListTail->last + 1 - newListTail->first) / thingSize;
     385         3879045 :     JS_ASSERT(nfree + nmarked == thingsPerArena(thingSize));
     386                 : #endif
     387         3879045 :     aheader.setFirstFreeSpan(&newListHead);
     388                 : 
     389         3879045 :     return false;
     390                 : }
     391                 : 
     392                 : template<typename T>
     393                 : inline void
     394         1846804 : FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background)
     395                 : {
     396                 :     /*
     397                 :      * Release empty arenas and move non-full arenas with some free things into
     398                 :      * a separated list that we append to al after the loop to ensure that any
     399                 :      * arena before al->cursor is full.
     400                 :      */
     401         1846804 :     JS_ASSERT_IF(!al->head, al->cursor == &al->head);
     402         1846804 :     ArenaLists::ArenaList available;
     403         1846804 :     ArenaHeader **ap = &al->head;
     404         1846804 :     size_t thingSize = Arena::thingSize(thingKind);
     405        12936640 :     while (ArenaHeader *aheader = *ap) {
     406         5544918 :         bool allClear = aheader->getArena()->finalize<T>(cx, thingKind, thingSize, background);
     407         5544918 :         if (allClear) {
     408         1665873 :             *ap = aheader->next;
     409         1665873 :             aheader->chunk()->releaseArena(aheader);
     410         3879045 :         } else if (aheader->hasFreeThings()) {
     411         2212258 :             *ap = aheader->next;
     412         2212258 :             *available.cursor = aheader;
     413         2212258 :             available.cursor = &aheader->next;
     414                 :         } else {
     415         1666787 :             ap = &aheader->next;
     416                 :         }
     417                 :     }
     418                 : 
     419                 :     /* Terminate the available list and append it to al. */
     420         1846804 :     *available.cursor = NULL;
     421         1846804 :     *ap = available.head;
     422         1846804 :     al->cursor = ap;
     423         3090709 :     JS_ASSERT_IF(!al->head, al->cursor == &al->head);
     424         1846804 : }
     425                 : 
     426                 : /*
     427                 :  * Finalize the list. On return al->cursor points to the first non-empty arena
     428                 :  * after the al->head.
     429                 :  */
     430                 : static void
     431         1846804 : FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind, bool background)
     432                 : {
     433         1846804 :     switch(thingKind) {
     434                 :       case FINALIZE_OBJECT0:
     435                 :       case FINALIZE_OBJECT0_BACKGROUND:
     436                 :       case FINALIZE_OBJECT2:
     437                 :       case FINALIZE_OBJECT2_BACKGROUND:
     438                 :       case FINALIZE_OBJECT4:
     439                 :       case FINALIZE_OBJECT4_BACKGROUND:
     440                 :       case FINALIZE_OBJECT8:
     441                 :       case FINALIZE_OBJECT8_BACKGROUND:
     442                 :       case FINALIZE_OBJECT12:
     443                 :       case FINALIZE_OBJECT12_BACKGROUND:
     444                 :       case FINALIZE_OBJECT16:
     445                 :       case FINALIZE_OBJECT16_BACKGROUND:
     446          948579 :         FinalizeTypedArenas<JSObject>(cx, al, thingKind, background);
     447          948579 :         break;
     448                 :       case FINALIZE_SCRIPT:
     449          122478 :         FinalizeTypedArenas<JSScript>(cx, al, thingKind, background);
     450          122478 :         break;
     451                 :       case FINALIZE_SHAPE:
     452          122478 :         FinalizeTypedArenas<Shape>(cx, al, thingKind, background);
     453          122478 :         break;
     454                 :       case FINALIZE_BASE_SHAPE:
     455          122478 :         FinalizeTypedArenas<BaseShape>(cx, al, thingKind, background);
     456          122478 :         break;
     457                 :       case FINALIZE_TYPE_OBJECT:
     458          122478 :         FinalizeTypedArenas<types::TypeObject>(cx, al, thingKind, background);
     459          122478 :         break;
     460                 : #if JS_HAS_XML_SUPPORT
     461                 :       case FINALIZE_XML:
     462          122478 :         FinalizeTypedArenas<JSXML>(cx, al, thingKind, background);
     463          122478 :         break;
     464                 : #endif
     465                 :       case FINALIZE_STRING:
     466          101713 :         FinalizeTypedArenas<JSString>(cx, al, thingKind, background);
     467          101713 :         break;
     468                 :       case FINALIZE_SHORT_STRING:
     469           61644 :         FinalizeTypedArenas<JSShortString>(cx, al, thingKind, background);
     470           61644 :         break;
     471                 :       case FINALIZE_EXTERNAL_STRING:
     472          122478 :         FinalizeTypedArenas<JSExternalString>(cx, al, thingKind, background);
     473          122478 :         break;
     474                 :     }
     475         1846804 : }
     476                 : 
     477                 : static inline Chunk *
     478           43620 : AllocChunk() {
     479           43620 :     return static_cast<Chunk *>(MapAlignedPages(ChunkSize, ChunkSize));
     480                 : }
     481                 : 
     482                 : static inline void
     483           43616 : FreeChunk(Chunk *p) {
     484           43616 :     UnmapPages(static_cast<void *>(p), ChunkSize);
     485           43616 : }
     486                 : 
     487                 : #ifdef JS_THREADSAFE
     488                 : inline bool
     489           46233 : ChunkPool::wantBackgroundAllocation(JSRuntime *rt) const
     490                 : {
     491                 :     /*
     492                 :      * To minimize memory waste we do not want to run the background chunk
     493                 :      * allocation if we have empty chunks or when the runtime needs just few
     494                 :      * of them.
     495                 :      */
     496           46233 :     return rt->gcHelperThread.canBackgroundAllocate() &&
     497                 :            emptyCount == 0 &&
     498           46233 :            rt->gcChunkSet.count() >= 4;
     499                 : }
     500                 : #endif
     501                 : 
     502                 : /* Must be called with the GC lock taken. */
     503                 : inline Chunk *
     504           43467 : ChunkPool::get(JSRuntime *rt)
     505                 : {
     506           43467 :     JS_ASSERT(this == &rt->gcChunkPool);
     507                 : 
     508           43467 :     Chunk *chunk = emptyChunkListHead;
     509           43467 :     if (chunk) {
     510            2613 :         JS_ASSERT(emptyCount);
     511            2613 :         emptyChunkListHead = chunk->info.next;
     512            2613 :         --emptyCount;
     513                 :     } else {
     514           40854 :         JS_ASSERT(!emptyCount);
     515           40854 :         chunk = Chunk::allocate(rt);
     516           40854 :         if (!chunk)
     517               0 :             return NULL;
     518           40854 :         JS_ASSERT(chunk->info.numArenasFreeCommitted == ArenasPerChunk);
     519           40854 :         rt->gcNumArenasFreeCommitted += ArenasPerChunk;
     520                 :     }
     521           43467 :     JS_ASSERT(chunk->unused());
     522           43467 :     JS_ASSERT(!rt->gcChunkSet.has(chunk));
     523                 : 
     524                 : #ifdef JS_THREADSAFE
     525           43467 :     if (wantBackgroundAllocation(rt))
     526            2766 :         rt->gcHelperThread.startBackgroundAllocationIfIdle();
     527                 : #endif
     528                 : 
     529           43467 :     return chunk;
     530                 : }
     531                 : 
     532                 : /* Must be called either during the GC or with the GC lock taken. */
     533                 : inline void
     534           46229 : ChunkPool::put(Chunk *chunk)
     535                 : {
     536           46229 :     chunk->info.age = 0;
     537           46229 :     chunk->info.next = emptyChunkListHead;
     538           46229 :     emptyChunkListHead = chunk;
     539           46229 :     emptyCount++;
     540           46229 : }
     541                 : 
     542                 : /* Must be called either during the GC or with the GC lock taken. */
     543                 : Chunk *
     544           51101 : ChunkPool::expire(JSRuntime *rt, bool releaseAll)
     545                 : {
     546           51101 :     JS_ASSERT(this == &rt->gcChunkPool);
     547                 : 
     548                 :     /*
     549                 :      * Return old empty chunks to the system while preserving the order of
     550                 :      * other chunks in the list. This way, if the GC runs several times
     551                 :      * without emptying the list, the older chunks will stay at the tail
     552                 :      * and are more likely to reach the max age.
     553                 :      */
     554           51101 :     Chunk *freeList = NULL;
     555          151177 :     for (Chunk **chunkp = &emptyChunkListHead; *chunkp; ) {
     556           48975 :         JS_ASSERT(emptyCount);
     557           48975 :         Chunk *chunk = *chunkp;
     558           48975 :         JS_ASSERT(chunk->unused());
     559           48975 :         JS_ASSERT(!rt->gcChunkSet.has(chunk));
     560           48975 :         JS_ASSERT(chunk->info.age <= MAX_EMPTY_CHUNK_AGE);
     561           48975 :         if (releaseAll || chunk->info.age == MAX_EMPTY_CHUNK_AGE) {
     562           43616 :             *chunkp = chunk->info.next;
     563           43616 :             --emptyCount;
     564           43616 :             chunk->prepareToBeFreed(rt);
     565           43616 :             chunk->info.next = freeList;
     566           43616 :             freeList = chunk;
     567                 :         } else {
     568                 :             /* Keep the chunk but increase its age. */
     569            5359 :             ++chunk->info.age;
     570            5359 :             chunkp = &chunk->info.next;
     571                 :         }
     572                 :     }
     573           51101 :     JS_ASSERT_IF(releaseAll, !emptyCount);
     574           51101 :     return freeList;
     575                 : }
     576                 : 
     577                 : static void
     578           63615 : FreeChunkList(Chunk *chunkListHead)
     579                 : {
     580          107231 :     while (Chunk *chunk = chunkListHead) {
     581           43616 :         JS_ASSERT(!chunk->info.numArenasFreeCommitted);
     582           43616 :         chunkListHead = chunk->info.next;
     583           43616 :         FreeChunk(chunk);
     584                 :     }
     585           19999 : }
     586                 : 
     587                 : void
     588           19908 : ChunkPool::expireAndFree(JSRuntime *rt, bool releaseAll)
     589                 : {
     590           19908 :     FreeChunkList(expire(rt, releaseAll));
     591           19908 : }
     592                 : 
     593                 : JS_FRIEND_API(int64_t)
     594               3 : ChunkPool::countCleanDecommittedArenas(JSRuntime *rt)
     595                 : {
     596               3 :     JS_ASSERT(this == &rt->gcChunkPool);
     597                 : 
     598               3 :     int64_t numDecommitted = 0;
     599               3 :     Chunk *chunk = emptyChunkListHead;
     600               6 :     while (chunk) {
     601               0 :         for (uint32_t i = 0; i < ArenasPerChunk; ++i)
     602               0 :             if (chunk->decommittedArenas.get(i))
     603               0 :                 ++numDecommitted;
     604               0 :         chunk = chunk->info.next;
     605                 :     }
     606               3 :     return numDecommitted;
     607                 : }
     608                 : 
     609                 : /* static */ Chunk *
     610           43620 : Chunk::allocate(JSRuntime *rt)
     611                 : {
     612           43620 :     Chunk *chunk = static_cast<Chunk *>(AllocChunk());
     613           43620 :     if (!chunk)
     614               0 :         return NULL;
     615           43620 :     chunk->init();
     616           43620 :     rt->gcStats.count(gcstats::STAT_NEW_CHUNK);
     617           43620 :     return chunk;
     618                 : }
     619                 : 
     620                 : /* Must be called with the GC lock taken. */
     621                 : /* static */ inline void
     622               0 : Chunk::release(JSRuntime *rt, Chunk *chunk)
     623                 : {
     624               0 :     JS_ASSERT(chunk);
     625               0 :     chunk->prepareToBeFreed(rt);
     626               0 :     FreeChunk(chunk);
     627               0 : }
     628                 : 
     629                 : inline void
     630           43616 : Chunk::prepareToBeFreed(JSRuntime *rt)
     631                 : {
     632           43616 :     JS_ASSERT(rt->gcNumArenasFreeCommitted >= info.numArenasFreeCommitted);
     633           43616 :     rt->gcNumArenasFreeCommitted -= info.numArenasFreeCommitted;
     634           43616 :     rt->gcStats.count(gcstats::STAT_DESTROY_CHUNK);
     635                 : 
     636                 : #ifdef DEBUG
     637                 :     /*
     638                 :      * Let FreeChunkList detect a missing prepareToBeFreed call before it
     639                 :      * frees chunk.
     640                 :      */
     641           43616 :     info.numArenasFreeCommitted = 0;
     642                 : #endif
     643           43616 : }
     644                 : 
     645                 : void
     646           43620 : Chunk::init()
     647                 : {
     648           43620 :     JS_POISON(this, JS_FREE_PATTERN, ChunkSize);
     649                 : 
     650                 :     /*
     651                 :      * We clear the bitmap to guard against xpc_IsGrayGCThing being called on
     652                 :      * uninitialized data, which would happen before the first GC cycle.
     653                 :      */
     654           43620 :     bitmap.clear();
     655                 : 
     656                 :     /* Initialize the arena tracking bitmap. */
     657           43620 :     decommittedArenas.clear(false);
     658                 : 
     659                 :     /* Initialize the chunk info. */
     660           43620 :     info.freeArenasHead = &arenas[0].aheader;
     661           43620 :     info.lastDecommittedArenaOffset = 0;
     662           43620 :     info.numArenasFree = ArenasPerChunk;
     663           43620 :     info.numArenasFreeCommitted = ArenasPerChunk;
     664           43620 :     info.age = 0;
     665                 : 
     666                 :     /* Initialize the arena header state. */
     667        11035860 :     for (unsigned i = 0; i < ArenasPerChunk; i++) {
     668        10992240 :         arenas[i].aheader.setAsNotAllocated();
     669                 :         arenas[i].aheader.next = (i + 1 < ArenasPerChunk)
     670                 :                                  ? &arenas[i + 1].aheader
     671        10992240 :                                  : NULL;
     672                 :     }
     673                 : 
     674                 :     /* The rest of info fields are initialized in PickChunk. */
     675           43620 : }
     676                 : 
     677                 : inline Chunk **
     678         2115151 : GetAvailableChunkList(JSCompartment *comp)
     679                 : {
     680         2115151 :     JSRuntime *rt = comp->rt;
     681                 :     return comp->isSystemCompartment
     682                 :            ? &rt->gcSystemAvailableChunkListHead
     683         2115151 :            : &rt->gcUserAvailableChunkListHead;
     684                 : }
     685                 : 
     686                 : inline void
     687           47684 : Chunk::addToAvailableList(JSCompartment *comp)
     688                 : {
     689           47684 :     insertToAvailableList(GetAvailableChunkList(comp));
     690           47684 : }
     691                 : 
     692                 : inline void
     693           47684 : Chunk::insertToAvailableList(Chunk **insertPoint)
     694                 : {
     695           47684 :     JS_ASSERT(hasAvailableArenas());
     696           47684 :     JS_ASSERT(!info.prevp);
     697           47684 :     JS_ASSERT(!info.next);
     698           47684 :     info.prevp = insertPoint;
     699           47684 :     Chunk *insertBefore = *insertPoint;
     700           47684 :     if (insertBefore) {
     701            3959 :         JS_ASSERT(insertBefore->info.prevp == insertPoint);
     702            3959 :         insertBefore->info.prevp = &info.next;
     703                 :     }
     704           47684 :     info.next = insertBefore;
     705           47684 :     *insertPoint = this;
     706           47684 : }
     707                 : 
     708                 : inline void
     709           47680 : Chunk::removeFromAvailableList()
     710                 : {
     711           47680 :     JS_ASSERT(info.prevp);
     712           47680 :     *info.prevp = info.next;
     713           47680 :     if (info.next) {
     714            3231 :         JS_ASSERT(info.next->info.prevp == &info.next);
     715            3231 :         info.next->info.prevp = info.prevp;
     716                 :     }
     717           47680 :     info.prevp = NULL;
     718           47680 :     info.next = NULL;
     719           47680 : }
     720                 : 
     721                 : /*
     722                 :  * Search for and return the next decommitted Arena. Our goal is to keep
     723                 :  * lastDecommittedArenaOffset "close" to a free arena. We do this by setting
     724                 :  * it to the most recently freed arena when we free, and forcing it to
     725                 :  * the last alloc + 1 when we allocate.
     726                 :  */
     727                 : uint32_t
     728               0 : Chunk::findDecommittedArenaOffset()
     729                 : {
     730                 :     /* Note: lastFreeArenaOffset can be past the end of the list. */
     731               0 :     for (unsigned i = info.lastDecommittedArenaOffset; i < ArenasPerChunk; i++)
     732               0 :         if (decommittedArenas.get(i))
     733               0 :             return i;
     734               0 :     for (unsigned i = 0; i < info.lastDecommittedArenaOffset; i++)
     735               0 :         if (decommittedArenas.get(i))
     736               0 :             return i;
     737               0 :     JS_NOT_REACHED("No decommitted arenas found.");
     738                 :     return -1;
     739                 : }
     740                 : 
     741                 : ArenaHeader *
     742               0 : Chunk::fetchNextDecommittedArena()
     743                 : {
     744               0 :     JS_ASSERT(info.numArenasFreeCommitted == 0);
     745               0 :     JS_ASSERT(info.numArenasFree > 0);
     746                 : 
     747               0 :     unsigned offset = findDecommittedArenaOffset();
     748               0 :     info.lastDecommittedArenaOffset = offset + 1;
     749               0 :     --info.numArenasFree;
     750               0 :     decommittedArenas.unset(offset);
     751                 : 
     752               0 :     Arena *arena = &arenas[offset];
     753               0 :     MarkPagesInUse(arena, ArenaSize);
     754               0 :     arena->aheader.setAsNotAllocated();
     755                 : 
     756               0 :     return &arena->aheader;
     757                 : }
     758                 : 
     759                 : inline ArenaHeader *
     760         2071567 : Chunk::fetchNextFreeArena(JSRuntime *rt)
     761                 : {
     762         2071567 :     JS_ASSERT(info.numArenasFreeCommitted > 0);
     763         2071567 :     JS_ASSERT(info.numArenasFreeCommitted <= info.numArenasFree);
     764         2071567 :     JS_ASSERT(info.numArenasFreeCommitted <= rt->gcNumArenasFreeCommitted);
     765                 : 
     766         2071567 :     ArenaHeader *aheader = info.freeArenasHead;
     767         2071567 :     info.freeArenasHead = aheader->next;
     768         2071567 :     --info.numArenasFreeCommitted;
     769         2071567 :     --info.numArenasFree;
     770         2071567 :     --rt->gcNumArenasFreeCommitted;
     771                 : 
     772         2071567 :     return aheader;
     773                 : }
     774                 : 
     775                 : ArenaHeader *
     776         2067467 : Chunk::allocateArena(JSCompartment *comp, AllocKind thingKind)
     777                 : {
     778         2067467 :     JS_ASSERT(hasAvailableArenas());
     779                 : 
     780         2067467 :     JSRuntime *rt = comp->rt;
     781         2067467 :     JS_ASSERT(rt->gcBytes <= rt->gcMaxBytes);
     782         2067467 :     if (rt->gcMaxBytes - rt->gcBytes < ArenaSize)
     783              40 :         return NULL;
     784                 : 
     785         2067427 :     ArenaHeader *aheader = JS_LIKELY(info.numArenasFreeCommitted > 0)
     786                 :                            ? fetchNextFreeArena(rt)
     787         2067427 :                            : fetchNextDecommittedArena();
     788         2067427 :     aheader->init(comp, thingKind);
     789         2067427 :     if (JS_UNLIKELY(!hasAvailableArenas()))
     790            4217 :         removeFromAvailableList();
     791                 : 
     792         2067427 :     Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes + ArenaSize);
     793         2067427 :     rt->gcBytes += ArenaSize;
     794         2067427 :     comp->gcBytes += ArenaSize;
     795         2067427 :     if (comp->gcBytes >= comp->gcTriggerBytes)
     796           97581 :         TriggerCompartmentGC(comp, gcreason::ALLOC_TRIGGER);
     797                 : 
     798         2067427 :     return aheader;
     799                 : }
     800                 : 
     801                 : inline void
     802         2067320 : Chunk::addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader)
     803                 : {
     804         2067320 :     JS_ASSERT(!aheader->allocated());
     805         2067320 :     aheader->next = info.freeArenasHead;
     806         2067320 :     info.freeArenasHead = aheader;
     807         2067320 :     ++info.numArenasFreeCommitted;
     808         2067320 :     ++info.numArenasFree;
     809         2067320 :     ++rt->gcNumArenasFreeCommitted;
     810         2067320 : }
     811                 : 
     812                 : void
     813         2067320 : Chunk::releaseArena(ArenaHeader *aheader)
     814                 : {
     815         2067320 :     JS_ASSERT(aheader->allocated());
     816         2067320 :     JS_ASSERT(!aheader->hasDelayedMarking);
     817         2067320 :     JSCompartment *comp = aheader->compartment;
     818         2067320 :     JSRuntime *rt = comp->rt;
     819                 : #ifdef JS_THREADSAFE
     820         4134640 :     AutoLockGC maybeLock;
     821         2067320 :     if (rt->gcHelperThread.sweeping())
     822          402654 :         maybeLock.lock(rt);
     823                 : #endif
     824                 : 
     825         2067320 :     Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
     826         2067320 :     JS_ASSERT(rt->gcBytes >= ArenaSize);
     827         2067320 :     JS_ASSERT(comp->gcBytes >= ArenaSize);
     828                 : #ifdef JS_THREADSAFE
     829         2067320 :     if (rt->gcHelperThread.sweeping())
     830          402654 :         comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
     831                 : #endif
     832         2067320 :     rt->gcBytes -= ArenaSize;
     833         2067320 :     comp->gcBytes -= ArenaSize;
     834                 : 
     835         2067320 :     aheader->setAsNotAllocated();
     836         2067320 :     addArenaToFreeList(rt, aheader);
     837                 : 
     838         2067320 :     if (info.numArenasFree == 1) {
     839            4217 :         JS_ASSERT(!info.prevp);
     840            4217 :         JS_ASSERT(!info.next);
     841            4217 :         addToAvailableList(comp);
     842         2063103 :     } else if (!unused()) {
     843         2019640 :         JS_ASSERT(info.prevp);
     844                 :     } else {
     845           43463 :         rt->gcChunkSet.remove(this);
     846           43463 :         removeFromAvailableList();
     847           43463 :         rt->gcChunkPool.put(this);
     848                 :     }
     849         2067320 : }
     850                 : 
     851                 : } /* namespace gc */
     852                 : } /* namespace js */
     853                 : 
     854                 : /* The caller must hold the GC lock. */
     855                 : static Chunk *
     856         2067467 : PickChunk(JSCompartment *comp)
     857                 : {
     858         2067467 :     JSRuntime *rt = comp->rt;
     859         2067467 :     Chunk **listHeadp = GetAvailableChunkList(comp);
     860         2067467 :     Chunk *chunk = *listHeadp;
     861         2067467 :     if (chunk)
     862         2024000 :         return chunk;
     863                 : 
     864           43467 :     chunk = rt->gcChunkPool.get(rt);
     865           43467 :     if (!chunk)
     866               0 :         return NULL;
     867                 : 
     868           43467 :     rt->gcChunkAllocationSinceLastGC = true;
     869                 : 
     870                 :     /*
     871                 :      * FIXME bug 583732 - chunk is newly allocated and cannot be present in
     872                 :      * the table so using ordinary lookupForAdd is suboptimal here.
     873                 :      */
     874           43467 :     GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
     875           43467 :     JS_ASSERT(!p);
     876           43467 :     if (!rt->gcChunkSet.add(p, chunk)) {
     877               0 :         Chunk::release(rt, chunk);
     878               0 :         return NULL;
     879                 :     }
     880                 : 
     881           43467 :     chunk->info.prevp = NULL;
     882           43467 :     chunk->info.next = NULL;
     883           43467 :     chunk->addToAvailableList(comp);
     884                 : 
     885           43467 :     return chunk;
     886                 : }
     887                 : 
     888                 : JS_FRIEND_API(bool)
     889        99507045 : IsAboutToBeFinalized(const Cell *thing)
     890                 : {
     891        99507045 :     JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
     892        99507045 :     JSRuntime *rt = thingCompartment->rt;
     893        99507045 :     if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
     894          131427 :         return false;
     895        99375618 :     return !reinterpret_cast<const Cell *>(thing)->isMarked();
     896                 : }
     897                 : 
     898                 : bool
     899          174147 : IsAboutToBeFinalized(const Value &v)
     900                 : {
     901          174147 :     JS_ASSERT(v.isMarkable());
     902          174147 :     return IsAboutToBeFinalized((Cell *)v.toGCThing());
     903                 : }
     904                 : 
     905                 : /* Lifetime for type sets attached to scripts containing observed types. */
     906                 : static const int64_t JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000;
     907                 : 
     908                 : JSBool
     909           19910 : js_InitGC(JSRuntime *rt, uint32_t maxbytes)
     910                 : {
     911           19910 :     if (!rt->gcChunkSet.init(INITIAL_CHUNK_CAPACITY))
     912               0 :         return false;
     913                 : 
     914           19910 :     if (!rt->gcRootsHash.init(256))
     915               0 :         return false;
     916                 : 
     917           19910 :     if (!rt->gcLocksHash.init(256))
     918               0 :         return false;
     919                 : 
     920                 : #ifdef JS_THREADSAFE
     921           19910 :     rt->gcLock = PR_NewLock();
     922           19910 :     if (!rt->gcLock)
     923               0 :         return false;
     924           19910 :     if (!rt->gcHelperThread.init())
     925               0 :         return false;
     926                 : #endif
     927                 : 
     928                 :     /*
     929                 :      * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
     930                 :      * for default backward API compatibility.
     931                 :      */
     932           19910 :     rt->gcMaxBytes = maxbytes;
     933           19910 :     rt->setGCMaxMallocBytes(maxbytes);
     934                 : 
     935           19910 :     rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
     936           19910 :     return true;
     937                 : }
     938                 : 
     939                 : namespace js {
     940                 : 
     941                 : inline bool
     942        11421528 : InFreeList(ArenaHeader *aheader, uintptr_t addr)
     943                 : {
     944        11421528 :     if (!aheader->hasFreeThings())
     945         2469690 :         return false;
     946                 : 
     947         8951838 :     FreeSpan firstSpan(aheader->getFirstFreeSpan());
     948                 : 
     949        16808288 :     for (const FreeSpan *span = &firstSpan;;) {
     950                 :         /* If the thing comes fore the current span, it's not free. */
     951        16808288 :         if (addr < span->first)
     952         8950982 :             return false;
     953                 : 
     954                 :         /*
     955                 :          * If we find it inside the span, it's dead. We use here "<=" and not
     956                 :          * "<" even for the last span as we know that thing is inside the
     957                 :          * arena. Thus for the last span thing < span->end.
     958                 :          */
     959         7857306 :         if (addr <= span->last)
     960             856 :             return true;
     961                 : 
     962                 :         /*
     963                 :          * The last possible empty span is an the end of the arena. Here
     964                 :          * span->end < thing < thingsEnd and so we must have more spans.
     965                 :          */
     966         7856450 :         span = span->nextSpan();
     967                 :     }
     968                 : }
     969                 : 
     970                 : enum ConservativeGCTest
     971                 : {
     972                 :     CGCT_VALID,
     973                 :     CGCT_LOWBITSET, /* excluded because one of the low bits was set */
     974                 :     CGCT_NOTARENA,  /* not within arena range in a chunk */
     975                 :     CGCT_OTHERCOMPARTMENT,  /* in another compartment */
     976                 :     CGCT_NOTCHUNK,  /* not within a valid chunk */
     977                 :     CGCT_FREEARENA, /* within arena containing only free things */
     978                 :     CGCT_NOTLIVE,   /* gcthing is not allocated */
     979                 :     CGCT_END
     980                 : };
     981                 : 
     982                 : /*
     983                 :  * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and
     984                 :  * details about the thing if so. On failure, returns the reason for rejection.
     985                 :  */
     986                 : inline ConservativeGCTest
     987       259324077 : IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
     988                 :                      gc::AllocKind *thingKindPtr, ArenaHeader **arenaHeader, void **thing)
     989                 : {
     990                 :     /*
     991                 :      * We assume that the compiler never uses sub-word alignment to store
     992                 :      * pointers and does not tag pointers on its own. Additionally, the value
     993                 :      * representation for all values and the jsid representation for GC-things
     994                 :      * do not touch the low two bits. Thus any word with the low two bits set
     995                 :      * is not a valid GC-thing.
     996                 :      */
     997                 :     JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
     998       259324077 :     if (w & 0x3)
     999        44383635 :         return CGCT_LOWBITSET;
    1000                 : 
    1001                 :     /*
    1002                 :      * An object jsid has its low bits tagged. In the value representation on
    1003                 :      * 64-bit, the high bits are tagged.
    1004                 :      */
    1005       214940442 :     const uintptr_t JSID_PAYLOAD_MASK = ~uintptr_t(JSID_TYPE_MASK);
    1006                 : #if JS_BITS_PER_WORD == 32
    1007       214940442 :     uintptr_t addr = w & JSID_PAYLOAD_MASK;
    1008                 : #elif JS_BITS_PER_WORD == 64
    1009                 :     uintptr_t addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
    1010                 : #endif
    1011                 : 
    1012       214940442 :     Chunk *chunk = Chunk::fromAddress(addr);
    1013                 : 
    1014       214940442 :     if (!rt->gcChunkSet.has(chunk))
    1015       203310310 :         return CGCT_NOTCHUNK;
    1016                 : 
    1017                 :     /*
    1018                 :      * We query for pointers outside the arena array after checking for an
    1019                 :      * allocated chunk. Such pointers are rare and we want to reject them
    1020                 :      * after doing more likely rejections.
    1021                 :      */
    1022        11630132 :     if (!Chunk::withinArenasRange(addr))
    1023            8163 :         return CGCT_NOTARENA;
    1024                 : 
    1025                 :     /* If the arena is not currently allocated, don't access the header. */
    1026        11621969 :     size_t arenaOffset = Chunk::arenaIndex(addr);
    1027        11621969 :     if (chunk->decommittedArenas.get(arenaOffset))
    1028               0 :         return CGCT_FREEARENA;
    1029                 : 
    1030        11621969 :     ArenaHeader *aheader = &chunk->arenas[arenaOffset].aheader;
    1031                 : 
    1032        11621969 :     if (!aheader->allocated())
    1033            4739 :         return CGCT_FREEARENA;
    1034                 : 
    1035        11617230 :     JSCompartment *curComp = rt->gcCurrentCompartment;
    1036        11617230 :     if (curComp && curComp != aheader->compartment)
    1037            4435 :         return CGCT_OTHERCOMPARTMENT;
    1038                 : 
    1039        11612795 :     AllocKind thingKind = aheader->getAllocKind();
    1040        11612795 :     uintptr_t offset = addr & ArenaMask;
    1041        11612795 :     uintptr_t minOffset = Arena::firstThingOffset(thingKind);
    1042        11612795 :     if (offset < minOffset)
    1043          191267 :         return CGCT_NOTARENA;
    1044                 : 
    1045                 :     /* addr can point inside the thing so we must align the address. */
    1046        11421528 :     uintptr_t shift = (offset - minOffset) % Arena::thingSize(thingKind);
    1047        11421528 :     addr -= shift;
    1048                 : 
    1049        11421528 :     if (thing)
    1050        11421528 :         *thing = reinterpret_cast<void *>(addr);
    1051        11421528 :     if (arenaHeader)
    1052        11421528 :         *arenaHeader = aheader;
    1053        11421528 :     if (thingKindPtr)
    1054        11421528 :         *thingKindPtr = thingKind;
    1055        11421528 :     return CGCT_VALID;
    1056                 : }
    1057                 : 
    1058                 : /*
    1059                 :  * Returns CGCT_VALID and mark it if the w can be a  live GC thing and sets
    1060                 :  * thingKind accordingly. Otherwise returns the reason for rejection.
    1061                 :  */
    1062                 : inline ConservativeGCTest
    1063       259324077 : MarkIfGCThingWord(JSTracer *trc, uintptr_t w)
    1064                 : {
    1065                 :     void *thing;
    1066                 :     ArenaHeader *aheader;
    1067                 :     AllocKind thingKind;
    1068       259324077 :     ConservativeGCTest status = IsAddressableGCThing(trc->runtime, w, &thingKind, &aheader, &thing);
    1069       259324077 :     if (status != CGCT_VALID)
    1070       247902549 :         return status;
    1071                 : 
    1072                 :     /*
    1073                 :      * Check if the thing is free. We must use the list of free spans as at
    1074                 :      * this point we no longer have the mark bits from the previous GC run and
    1075                 :      * we must account for newly allocated things.
    1076                 :      */
    1077        11421528 :     if (InFreeList(aheader, uintptr_t(thing)))
    1078             856 :         return CGCT_NOTLIVE;
    1079                 : 
    1080        11420672 :     JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
    1081                 : #ifdef DEBUG
    1082        11420672 :     const char pattern[] = "machine_stack %p";
    1083                 :     char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2];
    1084        11420672 :     JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing);
    1085        11420672 :     JS_SET_TRACING_NAME(trc, nameBuf);
    1086                 : #endif
    1087        11420672 :     MarkKind(trc, thing, traceKind);
    1088                 : 
    1089                 : #ifdef DEBUG
    1090        11420672 :     if (trc->runtime->gcIncrementalState == MARK_ROOTS)
    1091          175857 :         trc->runtime->gcSavedRoots.append(JSRuntime::SavedGCRoot(thing, traceKind));
    1092                 : #endif
    1093                 : 
    1094        11420672 :     return CGCT_VALID;
    1095                 : }
    1096                 : 
    1097                 : static void
    1098       259189402 : MarkWordConservatively(JSTracer *trc, uintptr_t w)
    1099                 : {
    1100                 :     /*
    1101                 :      * The conservative scanner may access words that valgrind considers as
    1102                 :      * undefined. To avoid false positives and not to alter valgrind view of
    1103                 :      * the memory we make as memcheck-defined the argument, a copy of the
    1104                 :      * original word. See bug 572678.
    1105                 :      */
    1106                 : #ifdef JS_VALGRIND
    1107                 :     JS_SILENCE_UNUSED_VALUE_IN_EXPR(VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w)));
    1108                 : #endif
    1109                 : 
    1110       259189402 :     MarkIfGCThingWord(trc, w);
    1111       259189402 : }
    1112                 : 
    1113                 : static void
    1114           51700 : MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end)
    1115                 : {
    1116           51700 :     JS_ASSERT(begin <= end);
    1117       259241102 :     for (const uintptr_t *i = begin; i < end; ++i)
    1118       259189402 :         MarkWordConservatively(trc, *i);
    1119           51700 : }
    1120                 : 
    1121                 : static JS_NEVER_INLINE void
    1122           34358 : MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
    1123                 : {
    1124           34358 :     JSRuntime *rt = trc->runtime;
    1125                 : 
    1126                 : #ifdef DEBUG
    1127           34358 :     if (useSavedRoots) {
    1128          322518 :         for (JSRuntime::SavedGCRoot *root = rt->gcSavedRoots.begin();
    1129          161259 :              root != rt->gcSavedRoots.end();
    1130                 :              root++)
    1131                 :         {
    1132          160051 :             JS_SET_TRACING_NAME(trc, "cstack");
    1133          160051 :             MarkKind(trc, root->thing, root->kind);
    1134                 :         }
    1135            1208 :         return;
    1136                 :     }
    1137                 : 
    1138           33150 :     if (rt->gcIncrementalState == MARK_ROOTS)
    1139            1399 :         rt->gcSavedRoots.clearAndFree();
    1140                 : #endif
    1141                 : 
    1142           33150 :     ConservativeGCData *cgcd = &rt->conservativeGC;
    1143           33150 :     if (!cgcd->hasStackToScan()) {
    1144                 : #ifdef JS_THREADSAFE
    1145            7300 :         JS_ASSERT(!rt->suspendCount);
    1146            7300 :         JS_ASSERT(rt->requestDepth <= cgcd->requestThreshold);
    1147                 : #endif
    1148            7300 :         return;
    1149                 :     }
    1150                 : 
    1151                 :     uintptr_t *stackMin, *stackEnd;
    1152                 : #if JS_STACK_GROWTH_DIRECTION > 0
    1153                 :     stackMin = rt->nativeStackBase;
    1154                 :     stackEnd = cgcd->nativeStackTop;
    1155                 : #else
    1156           25850 :     stackMin = cgcd->nativeStackTop + 1;
    1157           25850 :     stackEnd = reinterpret_cast<uintptr_t *>(rt->nativeStackBase);
    1158                 : #endif
    1159                 : 
    1160           25850 :     JS_ASSERT(stackMin <= stackEnd);
    1161           25850 :     MarkRangeConservatively(trc, stackMin, stackEnd);
    1162                 :     MarkRangeConservatively(trc, cgcd->registerSnapshot.words,
    1163           25850 :                             ArrayEnd(cgcd->registerSnapshot.words));
    1164                 : }
    1165                 : 
    1166                 : void
    1167               0 : MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
    1168                 : {
    1169                 :     /*
    1170                 :      * Normally, the drainMarkStack phase of marking will never trace outside
    1171                 :      * of the compartment currently being collected. However, conservative
    1172                 :      * scanning during drainMarkStack (as is done for generators) can break
    1173                 :      * this invariant. So we disable the compartment assertions in this
    1174                 :      * situation.
    1175                 :      */
    1176                 :     struct AutoSkipChecking {
    1177                 :         JSRuntime *runtime;
    1178                 :         JSCompartment *savedCompartment;
    1179                 : 
    1180               0 :         AutoSkipChecking(JSRuntime *rt)
    1181               0 :           : runtime(rt), savedCompartment(rt->gcCheckCompartment) {
    1182               0 :             rt->gcCheckCompartment = NULL;
    1183               0 :         }
    1184               0 :         ~AutoSkipChecking() { runtime->gcCheckCompartment = savedCompartment; }
    1185               0 :     } as(trc->runtime);
    1186                 : 
    1187               0 :     const uintptr_t *begin = beginv->payloadWord();
    1188               0 :     const uintptr_t *end = endv->payloadWord();
    1189                 : #ifdef JS_NUNBOX32
    1190                 :     /*
    1191                 :      * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
    1192                 :      * scanning only the payloads.
    1193                 :      */
    1194               0 :     JS_ASSERT(begin <= end);
    1195               0 :     for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t))
    1196               0 :         MarkWordConservatively(trc, *i);
    1197                 : #else
    1198                 :     MarkRangeConservatively(trc, begin, end);
    1199                 : #endif
    1200               0 : }
    1201                 : 
    1202                 : 
    1203                 : 
    1204                 : JS_NEVER_INLINE void
    1205          383410 : ConservativeGCData::recordStackTop()
    1206                 : {
    1207                 :     /* Update the native stack pointer if it points to a bigger stack. */
    1208                 :     uintptr_t dummy;
    1209          383410 :     nativeStackTop = &dummy;
    1210                 : 
    1211                 :     /*
    1212                 :      * To record and update the register snapshot for the conservative scanning
    1213                 :      * with the latest values we use setjmp.
    1214                 :      */
    1215                 : #if defined(_MSC_VER)
    1216                 : # pragma warning(push)
    1217                 : # pragma warning(disable: 4611)
    1218                 : #endif
    1219          383410 :     (void) setjmp(registerSnapshot.jmpbuf);
    1220                 : #if defined(_MSC_VER)
    1221                 : # pragma warning(pop)
    1222                 : #endif
    1223          383410 : }
    1224                 : 
    1225                 : static void
    1226           54557 : RecordNativeStackTopForGC(JSRuntime *rt)
    1227                 : {
    1228           54557 :     ConservativeGCData *cgcd = &rt->conservativeGC;
    1229                 : 
    1230                 : #ifdef JS_THREADSAFE
    1231                 :     /* Record the stack top here only if we are called from a request. */
    1232           54557 :     JS_ASSERT(rt->requestDepth >= cgcd->requestThreshold);
    1233           54557 :     if (rt->requestDepth == cgcd->requestThreshold)
    1234           27414 :         return;
    1235                 : #endif
    1236           27143 :     cgcd->recordStackTop();
    1237                 : }
    1238                 : 
    1239                 : } /* namespace js */
    1240                 : 
    1241                 : bool
    1242               0 : js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, gc::AllocKind *thingKind, void **thing)
    1243                 : {
    1244               0 :     return js::IsAddressableGCThing(rt, w, thingKind, NULL, thing) == CGCT_VALID;
    1245                 : }
    1246                 : 
    1247                 : #ifdef DEBUG
    1248                 : static void
    1249                 : CheckLeakedRoots(JSRuntime *rt);
    1250                 : #endif
    1251                 : 
    1252                 : void
    1253           19908 : js_FinishGC(JSRuntime *rt)
    1254                 : {
    1255                 :     /*
    1256                 :      * Wait until the background finalization stops and the helper thread
    1257                 :      * shuts down before we forcefully release any remaining GC memory.
    1258                 :      */
    1259                 : #ifdef JS_THREADSAFE
    1260           19908 :     rt->gcHelperThread.finish();
    1261                 : #endif
    1262                 : 
    1263                 : #ifdef JS_GC_ZEAL
    1264                 :     /* Free memory associated with GC verification. */
    1265           19908 :     FinishVerifier(rt);
    1266                 : #endif
    1267                 : 
    1268                 :     /* Delete all remaining Compartments. */
    1269           39816 :     for (CompartmentsIter c(rt); !c.done(); c.next())
    1270           19908 :         Foreground::delete_(c.get());
    1271           19908 :     rt->compartments.clear();
    1272           19908 :     rt->atomsCompartment = NULL;
    1273                 : 
    1274           19908 :     rt->gcSystemAvailableChunkListHead = NULL;
    1275           19908 :     rt->gcUserAvailableChunkListHead = NULL;
    1276           19908 :     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
    1277               0 :         Chunk::release(rt, r.front());
    1278           19908 :     rt->gcChunkSet.clear();
    1279                 : 
    1280           19908 :     rt->gcChunkPool.expireAndFree(rt, true);
    1281                 : 
    1282                 : #ifdef DEBUG
    1283           19908 :     if (!rt->gcRootsHash.empty())
    1284               0 :         CheckLeakedRoots(rt);
    1285                 : #endif
    1286           19908 :     rt->gcRootsHash.clear();
    1287           19908 :     rt->gcLocksHash.clear();
    1288           19908 : }
    1289                 : 
    1290                 : JSBool
    1291          442479 : js_AddRoot(JSContext *cx, Value *vp, const char *name)
    1292                 : {
    1293          442479 :     JSBool ok = js_AddRootRT(cx->runtime, vp, name);
    1294          442479 :     if (!ok)
    1295               0 :         JS_ReportOutOfMemory(cx);
    1296          442479 :     return ok;
    1297                 : }
    1298                 : 
    1299                 : JSBool
    1300           30230 : js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
    1301                 : {
    1302           30230 :     JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
    1303           30230 :     if (!ok)
    1304               0 :         JS_ReportOutOfMemory(cx);
    1305           30230 :     return ok;
    1306                 : }
    1307                 : 
    1308                 : JS_FRIEND_API(JSBool)
    1309          448850 : js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
    1310                 : {
    1311                 :     return !!rt->gcRootsHash.put((void *)vp,
    1312          448850 :                                  RootInfo(name, JS_GC_ROOT_VALUE_PTR));
    1313                 : }
    1314                 : 
    1315                 : JS_FRIEND_API(JSBool)
    1316           30230 : js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
    1317                 : {
    1318                 :     return !!rt->gcRootsHash.put((void *)rp,
    1319           30230 :                                  RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
    1320                 : }
    1321                 : 
    1322                 : JS_FRIEND_API(JSBool)
    1323          479080 : js_RemoveRoot(JSRuntime *rt, void *rp)
    1324                 : {
    1325          479080 :     rt->gcRootsHash.remove(rp);
    1326          479080 :     rt->gcPoke = JS_TRUE;
    1327          479080 :     return JS_TRUE;
    1328                 : }
    1329                 : 
    1330                 : typedef RootedValueMap::Range RootRange;
    1331                 : typedef RootedValueMap::Entry RootEntry;
    1332                 : typedef RootedValueMap::Enum RootEnum;
    1333                 : 
    1334                 : #ifdef DEBUG
    1335                 : 
    1336                 : static void
    1337               0 : CheckLeakedRoots(JSRuntime *rt)
    1338                 : {
    1339               0 :     uint32_t leakedroots = 0;
    1340                 : 
    1341                 :     /* Warn (but don't assert) debug builds of any remaining roots. */
    1342               0 :     for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
    1343               0 :         RootEntry &entry = r.front();
    1344               0 :         leakedroots++;
    1345                 :         fprintf(stderr,
    1346                 :                 "JS engine warning: leaking GC root \'%s\' at %p\n",
    1347               0 :                 entry.value.name ? entry.value.name : "", entry.key);
    1348                 :     }
    1349                 : 
    1350               0 :     if (leakedroots > 0) {
    1351               0 :         if (leakedroots == 1) {
    1352                 :             fprintf(stderr,
    1353                 : "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
    1354                 : "                   This root may point to freed memory. Objects reachable\n"
    1355                 : "                   through it have not been finalized.\n",
    1356               0 :                     (void *) rt);
    1357                 :         } else {
    1358                 :             fprintf(stderr,
    1359                 : "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
    1360                 : "                   These roots may point to freed memory. Objects reachable\n"
    1361                 : "                   through them have not been finalized.\n",
    1362               0 :                     (unsigned long) leakedroots, (void *) rt);
    1363                 :         }
    1364                 :     }
    1365               0 : }
    1366                 : 
    1367                 : void
    1368               0 : js_DumpNamedRoots(JSRuntime *rt,
    1369                 :                   void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
    1370                 :                   void *data)
    1371                 : {
    1372               0 :     for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
    1373               0 :         RootEntry &entry = r.front();
    1374               0 :         if (const char *name = entry.value.name)
    1375               0 :             dump(name, entry.key, entry.value.type, data);
    1376                 :     }
    1377               0 : }
    1378                 : 
    1379                 : #endif /* DEBUG */
    1380                 : 
    1381                 : uint32_t
    1382               0 : js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
    1383                 : {
    1384               0 :     int ct = 0;
    1385               0 :     for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
    1386               0 :         RootEntry &entry = e.front();
    1387                 : 
    1388               0 :         ct++;
    1389               0 :         int mapflags = map(entry.key, entry.value.type, entry.value.name, data);
    1390                 : 
    1391               0 :         if (mapflags & JS_MAP_GCROOT_REMOVE)
    1392               0 :             e.removeFront();
    1393               0 :         if (mapflags & JS_MAP_GCROOT_STOP)
    1394               0 :             break;
    1395                 :     }
    1396                 : 
    1397               0 :     return ct;
    1398                 : }
    1399                 : 
    1400                 : static size_t
    1401          285430 : ComputeTriggerBytes(size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind)
    1402                 : {
    1403          285430 :     size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, GC_ALLOCATION_THRESHOLD);
    1404          285430 :     float trigger = float(base) * GC_HEAP_GROWTH_FACTOR;
    1405          285430 :     return size_t(Min(float(maxBytes), trigger));
    1406                 : }
    1407                 : 
    1408                 : void
    1409          142715 : JSCompartment::setGCLastBytes(size_t lastBytes, size_t lastMallocBytes, JSGCInvocationKind gckind)
    1410                 : {
    1411          142715 :     gcTriggerBytes = ComputeTriggerBytes(lastBytes, rt->gcMaxBytes, gckind);
    1412          142715 :     gcTriggerMallocAndFreeBytes = ComputeTriggerBytes(lastMallocBytes, SIZE_MAX, gckind);
    1413          142715 : }
    1414                 : 
    1415                 : void
    1416          402654 : JSCompartment::reduceGCTriggerBytes(size_t amount)
    1417                 : {
    1418          402654 :     JS_ASSERT(amount > 0);
    1419                 :     JS_ASSERT(gcTriggerBytes - amount >= 0);
    1420          402654 :     if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
    1421          219261 :         return;
    1422          183393 :     gcTriggerBytes -= amount;
    1423                 : }
    1424                 : 
    1425                 : namespace js {
    1426                 : namespace gc {
    1427                 : 
    1428                 : inline void
    1429            5278 : ArenaLists::prepareForIncrementalGC(JSRuntime *rt)
    1430                 : {
    1431          110838 :     for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
    1432          105560 :         FreeSpan *headSpan = &freeLists[i];
    1433          105560 :         if (!headSpan->isEmpty()) {
    1434           51808 :             ArenaHeader *aheader = headSpan->arenaHeader();
    1435           51808 :             aheader->allocatedDuringIncremental = true;
    1436           51808 :             rt->gcMarker.delayMarkingArena(aheader);
    1437                 :         }
    1438                 :     }
    1439            5278 : }
    1440                 : 
    1441                 : inline void *
    1442         2339936 : ArenaLists::allocateFromArena(JSCompartment *comp, AllocKind thingKind)
    1443                 : {
    1444         2339936 :     Chunk *chunk = NULL;
    1445                 : 
    1446         2339936 :     ArenaList *al = &arenaLists[thingKind];
    1447         4679872 :     AutoLockGC maybeLock;
    1448                 : 
    1449                 : #ifdef JS_THREADSAFE
    1450         2339936 :     volatile uintptr_t *bfs = &backgroundFinalizeState[thingKind];
    1451         2339936 :     if (*bfs != BFS_DONE) {
    1452                 :         /*
    1453                 :          * We cannot search the arena list for free things while the
    1454                 :          * background finalization runs and can modify head or cursor at any
    1455                 :          * moment. So we always allocate a new arena in that case.
    1456                 :          */
    1457           57233 :         maybeLock.lock(comp->rt);
    1458           57233 :         if (*bfs == BFS_RUN) {
    1459           20449 :             JS_ASSERT(!*al->cursor);
    1460           20449 :             chunk = PickChunk(comp);
    1461           20449 :             if (!chunk) {
    1462                 :                 /*
    1463                 :                  * Let the caller to wait for the background allocation to
    1464                 :                  * finish and restart the allocation attempt.
    1465                 :                  */
    1466               0 :                 return NULL;
    1467                 :             }
    1468           36784 :         } else if (*bfs == BFS_JUST_FINISHED) {
    1469                 :             /* See comments before BackgroundFinalizeState definition. */
    1470           36784 :             *bfs = BFS_DONE;
    1471                 :         } else {
    1472               0 :             JS_ASSERT(*bfs == BFS_DONE);
    1473                 :         }
    1474                 :     }
    1475                 : #endif /* JS_THREADSAFE */
    1476                 : 
    1477         2339936 :     if (!chunk) {
    1478         2319487 :         if (ArenaHeader *aheader = *al->cursor) {
    1479          272469 :             JS_ASSERT(aheader->hasFreeThings());
    1480                 : 
    1481                 :             /*
    1482                 :              * The empty arenas are returned to the chunk and should not present on
    1483                 :              * the list.
    1484                 :              */
    1485          272469 :             JS_ASSERT(!aheader->isEmpty());
    1486          272469 :             al->cursor = &aheader->next;
    1487                 : 
    1488                 :             /*
    1489                 :              * Move the free span stored in the arena to the free list and
    1490                 :              * allocate from it.
    1491                 :              */
    1492          272469 :             freeLists[thingKind] = aheader->getFirstFreeSpan();
    1493          272469 :             aheader->setAsFullyUsed();
    1494          272469 :             if (JS_UNLIKELY(comp->needsBarrier())) {
    1495              67 :                 aheader->allocatedDuringIncremental = true;
    1496              67 :                 comp->rt->gcMarker.delayMarkingArena(aheader);
    1497                 :             }
    1498          272469 :             return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind));
    1499                 :         }
    1500                 : 
    1501                 :         /* Make sure we hold the GC lock before we call PickChunk. */
    1502         2047018 :         if (!maybeLock.locked())
    1503         2047015 :             maybeLock.lock(comp->rt);
    1504         2047018 :         chunk = PickChunk(comp);
    1505         2047018 :         if (!chunk)
    1506               0 :             return NULL;
    1507                 :     }
    1508                 : 
    1509                 :     /*
    1510                 :      * While we still hold the GC lock get an arena from some chunk, mark it
    1511                 :      * as full as its single free span is moved to the free lits, and insert
    1512                 :      * it to the list as a fully allocated arena.
    1513                 :      *
    1514                 :      * We add the arena before the the head, not after the tail pointed by the
    1515                 :      * cursor, so after the GC the most recently added arena will be used first
    1516                 :      * for allocations improving cache locality.
    1517                 :      */
    1518         2067467 :     JS_ASSERT(!*al->cursor);
    1519         2067467 :     ArenaHeader *aheader = chunk->allocateArena(comp, thingKind);
    1520         2067467 :     if (!aheader)
    1521              40 :         return NULL;
    1522                 : 
    1523         2067427 :     if (JS_UNLIKELY(comp->needsBarrier())) {
    1524             135 :         aheader->allocatedDuringIncremental = true;
    1525             135 :         comp->rt->gcMarker.delayMarkingArena(aheader);
    1526                 :     }
    1527         2067427 :     aheader->next = al->head;
    1528         2067427 :     if (!al->head) {
    1529          397627 :         JS_ASSERT(al->cursor == &al->head);
    1530          397627 :         al->cursor = &aheader->next;
    1531                 :     }
    1532         2067427 :     al->head = aheader;
    1533                 : 
    1534                 :     /* See comments before allocateFromNewArena about this assert. */
    1535         2067427 :     JS_ASSERT(!aheader->hasFreeThings());
    1536         2067427 :     uintptr_t arenaAddr = aheader->arenaAddress();
    1537         2067427 :     return freeLists[thingKind].allocateFromNewArena(arenaAddr,
    1538                 :                                                      Arena::firstThingOffset(thingKind),
    1539         4134854 :                                                      Arena::thingSize(thingKind));
    1540                 : }
    1541                 : 
    1542                 : void
    1543         1469736 : ArenaLists::finalizeNow(JSContext *cx, AllocKind thingKind)
    1544                 : {
    1545                 : #ifdef JS_THREADSAFE
    1546         1469736 :     JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
    1547                 : #endif
    1548         1469736 :     FinalizeArenas(cx, &arenaLists[thingKind], thingKind, false);
    1549         1469736 : }
    1550                 : 
    1551                 : inline void
    1552          979824 : ArenaLists::finalizeLater(JSContext *cx, AllocKind thingKind)
    1553                 : {
    1554               0 :     JS_ASSERT(thingKind == FINALIZE_OBJECT0_BACKGROUND  ||
    1555                 :               thingKind == FINALIZE_OBJECT2_BACKGROUND  ||
    1556                 :               thingKind == FINALIZE_OBJECT4_BACKGROUND  ||
    1557                 :               thingKind == FINALIZE_OBJECT8_BACKGROUND  ||
    1558                 :               thingKind == FINALIZE_OBJECT12_BACKGROUND ||
    1559                 :               thingKind == FINALIZE_OBJECT16_BACKGROUND ||
    1560                 :               thingKind == FINALIZE_SHORT_STRING        ||
    1561          979824 :               thingKind == FINALIZE_STRING);
    1562                 : 
    1563                 : #ifdef JS_THREADSAFE
    1564          979824 :     JS_ASSERT(!cx->runtime->gcHelperThread.sweeping());
    1565                 : 
    1566          979824 :     ArenaList *al = &arenaLists[thingKind];
    1567          979824 :     if (!al->head) {
    1568          602756 :         JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
    1569          602756 :         JS_ASSERT(al->cursor == &al->head);
    1570          602756 :         return;
    1571                 :     }
    1572                 : 
    1573                 :     /*
    1574                 :      * The state can be just-finished if we have not allocated any GC things
    1575                 :      * from the arena list after the previous background finalization.
    1576                 :      */
    1577          559687 :     JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE ||
    1578          936755 :               backgroundFinalizeState[thingKind] == BFS_JUST_FINISHED);
    1579                 : 
    1580          377068 :     if (cx->gcBackgroundFree) {
    1581                 :         /*
    1582                 :          * To ensure the finalization order even during the background GC we
    1583                 :          * must use infallibleAppend so arenas scheduled for background
    1584                 :          * finalization would not be finalized now if the append fails.
    1585                 :          */
    1586          237329 :         cx->gcBackgroundFree->finalizeVector.infallibleAppend(al->head);
    1587          237329 :         al->clear();
    1588          237329 :         backgroundFinalizeState[thingKind] = BFS_RUN;
    1589                 :     } else {
    1590          139739 :         FinalizeArenas(cx, al, thingKind, false);
    1591          139739 :         backgroundFinalizeState[thingKind] = BFS_DONE;
    1592                 :     }
    1593                 : 
    1594                 : #else /* !JS_THREADSAFE */
    1595                 : 
    1596                 :     finalizeNow(cx, thingKind);
    1597                 : 
    1598                 : #endif
    1599                 : }
    1600                 : 
    1601                 : #ifdef JS_THREADSAFE
    1602                 : /*static*/ void
    1603          237329 : ArenaLists::backgroundFinalize(JSContext *cx, ArenaHeader *listHead)
    1604                 : {
    1605          237329 :     JS_ASSERT(listHead);
    1606          237329 :     AllocKind thingKind = listHead->getAllocKind();
    1607          237329 :     JSCompartment *comp = listHead->compartment;
    1608          237329 :     ArenaList finalized;
    1609          237329 :     finalized.head = listHead;
    1610          237329 :     FinalizeArenas(cx, &finalized, thingKind, true);
    1611                 : 
    1612                 :     /*
    1613                 :      * After we finish the finalization al->cursor must point to the end of
    1614                 :      * the head list as we emptied the list before the background finalization
    1615                 :      * and the allocation adds new arenas before the cursor.
    1616                 :      */
    1617          237329 :     ArenaLists *lists = &comp->arenas;
    1618          237329 :     ArenaList *al = &lists->arenaLists[thingKind];
    1619                 : 
    1620          474658 :     AutoLockGC lock(cx->runtime);
    1621          237329 :     JS_ASSERT(lists->backgroundFinalizeState[thingKind] == BFS_RUN);
    1622          237329 :     JS_ASSERT(!*al->cursor);
    1623                 : 
    1624                 :     /*
    1625                 :      * We must set the state to BFS_JUST_FINISHED if we touch arenaList list,
    1626                 :      * even if we add to the list only fully allocated arenas without any free
    1627                 :      * things. It ensures that the allocation thread takes the GC lock and all
    1628                 :      * writes to the free list elements are propagated. As we always take the
    1629                 :      * GC lock when allocating new arenas from the chunks we can set the state
    1630                 :      * to BFS_DONE if we have released all finalized arenas back to their
    1631                 :      * chunks.
    1632                 :      */
    1633          237329 :     if (finalized.head) {
    1634          219410 :         *al->cursor = finalized.head;
    1635          219410 :         if (finalized.cursor != &finalized.head)
    1636           62296 :             al->cursor = finalized.cursor;
    1637          219410 :         lists->backgroundFinalizeState[thingKind] = BFS_JUST_FINISHED;
    1638                 :     } else {
    1639           17919 :         lists->backgroundFinalizeState[thingKind] = BFS_DONE;
    1640                 :     }
    1641          237329 : }
    1642                 : #endif /* JS_THREADSAFE */
    1643                 : 
    1644                 : void
    1645          122478 : ArenaLists::finalizeObjects(JSContext *cx)
    1646                 : {
    1647          122478 :     finalizeNow(cx, FINALIZE_OBJECT0);
    1648          122478 :     finalizeNow(cx, FINALIZE_OBJECT2);
    1649          122478 :     finalizeNow(cx, FINALIZE_OBJECT4);
    1650          122478 :     finalizeNow(cx, FINALIZE_OBJECT8);
    1651          122478 :     finalizeNow(cx, FINALIZE_OBJECT12);
    1652          122478 :     finalizeNow(cx, FINALIZE_OBJECT16);
    1653                 : 
    1654                 : #ifdef JS_THREADSAFE
    1655          122478 :     finalizeLater(cx, FINALIZE_OBJECT0_BACKGROUND);
    1656          122478 :     finalizeLater(cx, FINALIZE_OBJECT2_BACKGROUND);
    1657          122478 :     finalizeLater(cx, FINALIZE_OBJECT4_BACKGROUND);
    1658          122478 :     finalizeLater(cx, FINALIZE_OBJECT8_BACKGROUND);
    1659          122478 :     finalizeLater(cx, FINALIZE_OBJECT12_BACKGROUND);
    1660          122478 :     finalizeLater(cx, FINALIZE_OBJECT16_BACKGROUND);
    1661                 : #endif
    1662                 : 
    1663                 : #if JS_HAS_XML_SUPPORT
    1664          122478 :     finalizeNow(cx, FINALIZE_XML);
    1665                 : #endif
    1666          122478 : }
    1667                 : 
    1668                 : void
    1669          122478 : ArenaLists::finalizeStrings(JSContext *cx)
    1670                 : {
    1671          122478 :     finalizeLater(cx, FINALIZE_SHORT_STRING);
    1672          122478 :     finalizeLater(cx, FINALIZE_STRING);
    1673                 : 
    1674          122478 :     finalizeNow(cx, FINALIZE_EXTERNAL_STRING);
    1675          122478 : }
    1676                 : 
    1677                 : void
    1678          122478 : ArenaLists::finalizeShapes(JSContext *cx)
    1679                 : {
    1680          122478 :     finalizeNow(cx, FINALIZE_SHAPE);
    1681          122478 :     finalizeNow(cx, FINALIZE_BASE_SHAPE);
    1682          122478 :     finalizeNow(cx, FINALIZE_TYPE_OBJECT);
    1683          122478 : }
    1684                 : 
    1685                 : void
    1686          122478 : ArenaLists::finalizeScripts(JSContext *cx)
    1687                 : {
    1688          122478 :     finalizeNow(cx, FINALIZE_SCRIPT);
    1689          122478 : }
    1690                 : 
    1691                 : static void
    1692            9052 : RunLastDitchGC(JSContext *cx, gcreason::Reason reason)
    1693                 : {
    1694            9052 :     JSRuntime *rt = cx->runtime;
    1695                 : 
    1696                 :     /* The last ditch GC preserves all atoms. */
    1697           18104 :     AutoKeepAtoms keep(rt);
    1698            9052 :     GC(cx, rt->gcTriggerCompartment, GC_NORMAL, reason);
    1699            9052 : }
    1700                 : 
    1701                 : /* static */ void *
    1702         2339906 : ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
    1703                 : {
    1704         2339906 :     JS_ASSERT(cx->compartment->arenas.freeLists[thingKind].isEmpty());
    1705                 : 
    1706         2339906 :     JSCompartment *comp = cx->compartment;
    1707         2339906 :     JSRuntime *rt = comp->rt;
    1708         2339906 :     JS_ASSERT(!rt->gcRunning);
    1709                 : 
    1710         2339906 :     bool runGC = rt->gcIncrementalState != NO_INCREMENTAL && comp->gcBytes > comp->gcTriggerBytes;
    1711              10 :     for (;;) {
    1712         2339916 :         if (JS_UNLIKELY(runGC)) {
    1713              10 :             RunLastDitchGC(cx, gcreason::LAST_DITCH);
    1714                 : 
    1715                 :             /*
    1716                 :              * The JSGC_END callback can legitimately allocate new GC
    1717                 :              * things and populate the free list. If that happens, just
    1718                 :              * return that list head.
    1719                 :              */
    1720              10 :             size_t thingSize = Arena::thingSize(thingKind);
    1721              10 :             if (void *thing = comp->arenas.allocateFromFreeList(thingKind, thingSize))
    1722               0 :                 return thing;
    1723                 :         }
    1724                 : 
    1725                 :         /*
    1726                 :          * allocateFromArena may fail while the background finalization still
    1727                 :          * run. In that case we want to wait for it to finish and restart.
    1728                 :          * However, checking for that is racy as the background finalization
    1729                 :          * could free some things after allocateFromArena decided to fail but
    1730                 :          * at this point it may have already stopped. To avoid this race we
    1731                 :          * always try to allocate twice.
    1732                 :          */
    1733         2339936 :         for (bool secondAttempt = false; ; secondAttempt = true) {
    1734         2339936 :             void *thing = comp->arenas.allocateFromArena(comp, thingKind);
    1735         2339936 :             if (JS_LIKELY(!!thing))
    1736         2339896 :                 return thing;
    1737              40 :             if (secondAttempt)
    1738                 :                 break;
    1739                 : 
    1740              40 :             AutoLockGC lock(rt);
    1741                 : #ifdef JS_THREADSAFE
    1742              20 :             rt->gcHelperThread.waitBackgroundSweepEnd();
    1743                 : #endif
    1744                 :         }
    1745                 : 
    1746                 :         /*
    1747                 :          * We failed to allocate. Run the GC if we haven't done it already.
    1748                 :          * Otherwise report OOM.
    1749                 :          */
    1750              20 :         if (runGC)
    1751                 :             break;
    1752              10 :         runGC = true;
    1753                 :     }
    1754                 : 
    1755              10 :     js_ReportOutOfMemory(cx);
    1756              10 :     return NULL;
    1757                 : }
    1758                 : 
    1759                 : } /* namespace gc */
    1760                 : } /* namespace js */
    1761                 : 
    1762                 : JSGCTraceKind
    1763         3168282 : js_GetGCThingTraceKind(void *thing)
    1764                 : {
    1765         3168282 :     return GetGCThingTraceKind(thing);
    1766                 : }
    1767                 : 
    1768                 : JSBool
    1769               0 : js_LockGCThingRT(JSRuntime *rt, void *thing)
    1770                 : {
    1771               0 :     if (!thing)
    1772               0 :         return true;
    1773                 : 
    1774               0 :     if (GCLocks::Ptr p = rt->gcLocksHash.lookupWithDefault(thing, 0)) {
    1775               0 :         p->value++;
    1776               0 :         return true;
    1777                 :     }
    1778                 : 
    1779               0 :     return false;
    1780                 : }
    1781                 : 
    1782                 : void
    1783               0 : js_UnlockGCThingRT(JSRuntime *rt, void *thing)
    1784                 : {
    1785               0 :     if (!thing)
    1786               0 :         return;
    1787                 : 
    1788               0 :     if (GCLocks::Ptr p = rt->gcLocksHash.lookup(thing)) {
    1789               0 :         rt->gcPoke = true;
    1790               0 :         if (--p->value == 0)
    1791               0 :             rt->gcLocksHash.remove(p);
    1792                 :     }
    1793                 : }
    1794                 : 
    1795                 : namespace js {
    1796                 : 
    1797                 : void
    1798          862951 : InitTracer(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback)
    1799                 : {
    1800          862951 :     trc->runtime = rt;
    1801          862951 :     trc->callback = callback;
    1802          862951 :     trc->debugPrinter = NULL;
    1803          862951 :     trc->debugPrintArg = NULL;
    1804          862951 :     trc->debugPrintIndex = size_t(-1);
    1805          862951 :     trc->eagerlyTraceWeakMaps = true;
    1806          862951 : }
    1807                 : 
    1808                 : /* static */ int64_t
    1809              50 : SliceBudget::TimeBudget(int64_t millis)
    1810                 : {
    1811              50 :     return millis * PRMJ_USEC_PER_MSEC;
    1812                 : }
    1813                 : 
    1814                 : /* static */ int64_t
    1815               0 : SliceBudget::WorkBudget(int64_t work)
    1816                 : {
    1817               0 :     return -work;
    1818                 : }
    1819                 : 
    1820          102504 : SliceBudget::SliceBudget()
    1821                 :   : deadline(INT64_MAX),
    1822          102504 :     counter(INTPTR_MAX)
    1823                 : {
    1824          102504 : }
    1825                 : 
    1826               0 : SliceBudget::SliceBudget(int64_t budget)
    1827                 : {
    1828               0 :     if (budget == Unlimited) {
    1829               0 :         deadline = INT64_MAX;
    1830               0 :         counter = INTPTR_MAX;
    1831               0 :     } else if (budget > 0) {
    1832               0 :         deadline = PRMJ_Now() + budget;
    1833               0 :         counter = CounterReset;
    1834                 :     } else {
    1835               0 :         deadline = 0;
    1836               0 :         counter = -budget;
    1837                 :     }
    1838               0 : }
    1839                 : 
    1840                 : bool
    1841               0 : SliceBudget::checkOverBudget()
    1842                 : {
    1843               0 :     bool over = PRMJ_Now() > deadline;
    1844               0 :     if (!over)
    1845               0 :         counter = CounterReset;
    1846               0 :     return over;
    1847                 : }
    1848                 : 
    1849           19910 : GCMarker::GCMarker()
    1850                 :   : stack(size_t(-1)),
    1851                 :     color(BLACK),
    1852                 :     started(false),
    1853                 :     unmarkedArenaStackTop(NULL),
    1854                 :     markLaterArenas(0),
    1855           19910 :     grayFailed(false)
    1856                 : {
    1857           19910 : }
    1858                 : 
    1859                 : bool
    1860           19910 : GCMarker::init()
    1861                 : {
    1862           19910 :     return stack.init(MARK_STACK_LENGTH);
    1863                 : }
    1864                 : 
    1865                 : void
    1866           52591 : GCMarker::start(JSRuntime *rt)
    1867                 : {
    1868           52591 :     InitTracer(this, rt, NULL);
    1869           52591 :     JS_ASSERT(!started);
    1870           52591 :     started = true;
    1871           52591 :     color = BLACK;
    1872                 : 
    1873           52591 :     JS_ASSERT(!unmarkedArenaStackTop);
    1874           52591 :     JS_ASSERT(markLaterArenas == 0);
    1875                 : 
    1876           52591 :     JS_ASSERT(grayRoots.empty());
    1877           52591 :     JS_ASSERT(!grayFailed);
    1878                 : 
    1879                 :     /*
    1880                 :      * The GC is recomputing the liveness of WeakMap entries, so we delay
    1881                 :      * visting entries.
    1882                 :      */
    1883           52591 :     eagerlyTraceWeakMaps = JS_FALSE;
    1884           52591 : }
    1885                 : 
    1886                 : void
    1887           52491 : GCMarker::stop()
    1888                 : {
    1889           52491 :     JS_ASSERT(isDrained());
    1890                 : 
    1891           52491 :     JS_ASSERT(started);
    1892           52491 :     started = false;
    1893                 : 
    1894           52491 :     JS_ASSERT(!unmarkedArenaStackTop);
    1895           52491 :     JS_ASSERT(markLaterArenas == 0);
    1896                 : 
    1897           52491 :     JS_ASSERT(grayRoots.empty());
    1898           52491 :     grayFailed = false;
    1899                 : 
    1900                 :     /* Free non-ballast stack memory. */
    1901           52491 :     stack.reset();
    1902           52491 :     grayRoots.clearAndFree();
    1903           52491 : }
    1904                 : 
    1905                 : void
    1906            1399 : GCMarker::reset()
    1907                 : {
    1908            1399 :     color = BLACK;
    1909                 : 
    1910            1399 :     stack.reset();
    1911            1399 :     JS_ASSERT(isMarkStackEmpty());
    1912                 : 
    1913           54808 :     while (unmarkedArenaStackTop) {
    1914           52010 :         ArenaHeader *aheader = unmarkedArenaStackTop;
    1915           52010 :         JS_ASSERT(aheader->hasDelayedMarking);
    1916           52010 :         JS_ASSERT(markLaterArenas);
    1917           52010 :         unmarkedArenaStackTop = aheader->getNextDelayedMarking();
    1918           52010 :         aheader->hasDelayedMarking = 0;
    1919           52010 :         aheader->markOverflow = 0;
    1920           52010 :         aheader->allocatedDuringIncremental = 0;
    1921           52010 :         markLaterArenas--;
    1922                 :     }
    1923            1399 :     JS_ASSERT(isDrained());
    1924            1399 :     JS_ASSERT(!markLaterArenas);
    1925                 : 
    1926            1399 :     grayRoots.clearAndFree();
    1927            1399 :     grayFailed = false;
    1928            1399 : }
    1929                 : 
    1930                 : /*
    1931                 :  * When the native stack is low, the GC does not call JS_TraceChildren to mark
    1932                 :  * the reachable "children" of the thing. Rather the thing is put aside and
    1933                 :  * JS_TraceChildren is called later with more space on the C stack.
    1934                 :  *
    1935                 :  * To implement such delayed marking of the children with minimal overhead for
    1936                 :  * the normal case of sufficient native stack, the code adds a field per
    1937                 :  * arena. The field markingDelay->link links all arenas with delayed things
    1938                 :  * into a stack list with the pointer to stack top in
    1939                 :  * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
    1940                 :  * arenas to the stack as necessary while markDelayedChildren pops the arenas
    1941                 :  * from the stack until it empties.
    1942                 :  */
    1943                 : 
    1944                 : inline void
    1945           52010 : GCMarker::delayMarkingArena(ArenaHeader *aheader)
    1946                 : {
    1947           52010 :     if (aheader->hasDelayedMarking) {
    1948                 :         /* Arena already scheduled to be marked later */
    1949               0 :         return;
    1950                 :     }
    1951           52010 :     aheader->setNextDelayedMarking(unmarkedArenaStackTop);
    1952           52010 :     unmarkedArenaStackTop = aheader;
    1953           52010 :     markLaterArenas++;
    1954                 : }
    1955                 : 
    1956                 : void
    1957               0 : GCMarker::delayMarkingChildren(const void *thing)
    1958                 : {
    1959               0 :     const Cell *cell = reinterpret_cast<const Cell *>(thing);
    1960               0 :     cell->arenaHeader()->markOverflow = 1;
    1961               0 :     delayMarkingArena(cell->arenaHeader());
    1962               0 : }
    1963                 : 
    1964                 : void
    1965               0 : GCMarker::markDelayedChildren(ArenaHeader *aheader)
    1966                 : {
    1967               0 :     if (aheader->markOverflow) {
    1968               0 :         bool always = aheader->allocatedDuringIncremental;
    1969               0 :         aheader->markOverflow = 0;
    1970                 : 
    1971               0 :         for (CellIterUnderGC i(aheader); !i.done(); i.next()) {
    1972               0 :             Cell *t = i.getCell();
    1973               0 :             if (always || t->isMarked()) {
    1974               0 :                 t->markIfUnmarked();
    1975               0 :                 JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
    1976                 :             }
    1977                 :         }
    1978                 :     } else {
    1979               0 :         JS_ASSERT(aheader->allocatedDuringIncremental);
    1980               0 :         PushArena(this, aheader);
    1981                 :     }
    1982               0 :     aheader->allocatedDuringIncremental = 0;
    1983               0 : }
    1984                 : 
    1985                 : bool
    1986               0 : GCMarker::markDelayedChildren(SliceBudget &budget)
    1987                 : {
    1988               0 :     gcstats::AutoPhase ap(runtime->gcStats, gcstats::PHASE_MARK_DELAYED);
    1989                 : 
    1990               0 :     JS_ASSERT(unmarkedArenaStackTop);
    1991               0 :     do {
    1992                 :         /*
    1993                 :          * If marking gets delayed at the same arena again, we must repeat
    1994                 :          * marking of its things. For that we pop arena from the stack and
    1995                 :          * clear its hasDelayedMarking flag before we begin the marking.
    1996                 :          */
    1997               0 :         ArenaHeader *aheader = unmarkedArenaStackTop;
    1998               0 :         JS_ASSERT(aheader->hasDelayedMarking);
    1999               0 :         JS_ASSERT(markLaterArenas);
    2000               0 :         unmarkedArenaStackTop = aheader->getNextDelayedMarking();
    2001               0 :         aheader->hasDelayedMarking = 0;
    2002               0 :         markLaterArenas--;
    2003               0 :         markDelayedChildren(aheader);
    2004                 : 
    2005               0 :         if (budget.checkOverBudget())
    2006               0 :             return false;
    2007                 :     } while (unmarkedArenaStackTop);
    2008               0 :     JS_ASSERT(!markLaterArenas);
    2009                 : 
    2010               0 :     return true;
    2011                 : }
    2012                 : 
    2013                 : #ifdef DEBUG
    2014                 : void
    2015        65392819 : GCMarker::checkCompartment(void *p)
    2016                 : {
    2017        65392819 :     JS_ASSERT(started);
    2018                 : 
    2019        65392819 :     Cell *cell = static_cast<Cell *>(p);
    2020        65392819 :     if (runtime->gcRunning && runtime->gcCurrentCompartment)
    2021          123128 :         JS_ASSERT(cell->compartment() == runtime->gcCurrentCompartment);
    2022        65269691 :     else if (runtime->gcIncrementalCompartment)
    2023               0 :         JS_ASSERT(cell->compartment() == runtime->gcIncrementalCompartment);
    2024        65392819 : }
    2025                 : #endif
    2026                 : 
    2027                 : bool
    2028           51092 : GCMarker::hasBufferedGrayRoots() const
    2029                 : {
    2030           51092 :     return !grayFailed;
    2031                 : }
    2032                 : 
    2033                 : void
    2034           14728 : GCMarker::startBufferingGrayRoots()
    2035                 : {
    2036           14728 :     JS_ASSERT(!callback);
    2037           14728 :     callback = GrayCallback;
    2038           14728 :     JS_ASSERT(IS_GC_MARKING_TRACER(this));
    2039           14728 : }
    2040                 : 
    2041                 : void
    2042           14728 : GCMarker::endBufferingGrayRoots()
    2043                 : {
    2044           14728 :     JS_ASSERT(callback == GrayCallback);
    2045           14728 :     callback = NULL;
    2046           14728 :     JS_ASSERT(IS_GC_MARKING_TRACER(this));
    2047           14728 : }
    2048                 : 
    2049                 : void
    2050           51092 : GCMarker::markBufferedGrayRoots()
    2051                 : {
    2052           51092 :     JS_ASSERT(!grayFailed);
    2053                 : 
    2054          250617 :     for (GrayRoot *elem = grayRoots.begin(); elem != grayRoots.end(); elem++) {
    2055                 : #ifdef DEBUG
    2056          199525 :         debugPrinter = elem->debugPrinter;
    2057          199525 :         debugPrintArg = elem->debugPrintArg;
    2058          199525 :         debugPrintIndex = elem->debugPrintIndex;
    2059                 : #endif
    2060          199525 :         MarkKind(this, elem->thing, elem->kind);
    2061                 :     }
    2062                 : 
    2063           51092 :     grayRoots.clearAndFree();
    2064           51092 : }
    2065                 : 
    2066                 : void
    2067          199525 : GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
    2068                 : {
    2069          199525 :     JS_ASSERT(started);
    2070                 : 
    2071          199525 :     if (grayFailed)
    2072               0 :         return;
    2073                 : 
    2074          199525 :     GrayRoot root(thing, kind);
    2075                 : #ifdef DEBUG
    2076          199525 :     root.debugPrinter = debugPrinter;
    2077          199525 :     root.debugPrintArg = debugPrintArg;
    2078          199525 :     root.debugPrintIndex = debugPrintIndex;
    2079                 : #endif
    2080                 : 
    2081          199525 :     if (!grayRoots.append(root)) {
    2082               0 :         grayRoots.clearAndFree();
    2083               0 :         grayFailed = true;
    2084                 :     }
    2085                 : }
    2086                 : 
    2087                 : void
    2088          199525 : GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
    2089                 : {
    2090          199525 :     GCMarker *gcmarker = static_cast<GCMarker *>(trc);
    2091          199525 :     gcmarker->appendGrayRoot(*thingp, kind);
    2092          199525 : }
    2093                 : 
    2094                 : size_t
    2095               3 : GCMarker::sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const
    2096                 : {
    2097               3 :     return stack.sizeOfExcludingThis(mallocSizeOf) +
    2098               3 :            grayRoots.sizeOfExcludingThis(mallocSizeOf);
    2099                 : }
    2100                 : 
    2101                 : void
    2102               0 : SetMarkStackLimit(JSRuntime *rt, size_t limit)
    2103                 : {
    2104               0 :     JS_ASSERT(!rt->gcRunning);
    2105               0 :     rt->gcMarker.setSizeLimit(limit);
    2106               0 : }
    2107                 : 
    2108                 : } /* namespace js */
    2109                 : 
    2110                 : #ifdef DEBUG
    2111                 : static void
    2112          134675 : EmptyMarkCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
    2113                 : {
    2114          134675 : }
    2115                 : #endif
    2116                 : 
    2117                 : static void
    2118          135049 : gc_root_traversal(JSTracer *trc, const RootEntry &entry)
    2119                 : {
    2120                 : #ifdef DEBUG
    2121                 :     void *ptr;
    2122          135049 :     if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
    2123          134797 :         ptr = *reinterpret_cast<void **>(entry.key);
    2124                 :     } else {
    2125             252 :         Value *vp = reinterpret_cast<Value *>(entry.key);
    2126             252 :         ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
    2127                 :     }
    2128                 : 
    2129          135049 :     if (ptr && !trc->runtime->gcCurrentCompartment) {
    2130                 :         /*
    2131                 :          * Use conservative machinery to find if ptr is a valid GC thing.
    2132                 :          * We only do this during global GCs, to preserve the invariant
    2133                 :          * that mark callbacks are not in place during compartment GCs.
    2134                 :          */
    2135                 :         JSTracer checker;
    2136          134675 :         JS_TracerInit(&checker, trc->runtime, EmptyMarkCallback);
    2137          134675 :         ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast<uintptr_t>(ptr));
    2138          134675 :         if (test != CGCT_VALID && entry.value.name) {
    2139                 :             fprintf(stderr,
    2140                 : "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
    2141                 : "invalid gcthing.  This is usually caused by a missing call to JS_RemoveRoot.\n"
    2142                 : "The root's name is \"%s\".\n",
    2143               0 :                     entry.value.name);
    2144                 :         }
    2145          134675 :         JS_ASSERT(test == CGCT_VALID);
    2146                 :     }
    2147                 : #endif
    2148          135049 :     const char *name = entry.value.name ? entry.value.name : "root";
    2149          135049 :     if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
    2150          134797 :         MarkGCThingRoot(trc, *reinterpret_cast<void **>(entry.key), name);
    2151                 :     else
    2152             252 :         MarkValueRoot(trc, reinterpret_cast<Value *>(entry.key), name);
    2153          135049 : }
    2154                 : 
    2155                 : static void
    2156               0 : gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
    2157                 : {
    2158               0 :     JS_ASSERT(entry.value >= 1);
    2159               0 :     MarkGCThingRoot(trc, entry.key, "locked object");
    2160               0 : }
    2161                 : 
    2162                 : namespace js {
    2163                 : 
    2164                 : void
    2165               0 : MarkCompartmentActive(StackFrame *fp)
    2166                 : {
    2167               0 :     if (fp->isScriptFrame())
    2168               0 :         fp->script()->compartment()->active = true;
    2169               0 : }
    2170                 : 
    2171                 : } /* namespace js */
    2172                 : 
    2173                 : void
    2174                 : AutoIdArray::trace(JSTracer *trc)
    2175                 : {
    2176                 :     JS_ASSERT(tag == IDARRAY);
    2177                 :     gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
    2178                 : }
    2179                 : 
    2180                 : void
    2181               0 : AutoEnumStateRooter::trace(JSTracer *trc)
    2182                 : {
    2183               0 :     gc::MarkObjectRoot(trc, &obj, "JS::AutoEnumStateRooter.obj");
    2184               0 : }
    2185                 : 
    2186                 : inline void
    2187           21923 : AutoGCRooter::trace(JSTracer *trc)
    2188                 : {
    2189           21923 :     switch (tag) {
    2190                 :       case JSVAL:
    2191            1583 :         MarkValueRoot(trc, &static_cast<AutoValueRooter *>(this)->val, "JS::AutoValueRooter.val");
    2192            1583 :         return;
    2193                 : 
    2194                 :       case PARSER:
    2195               9 :         static_cast<Parser *>(this)->trace(trc);
    2196               9 :         return;
    2197                 : 
    2198                 :       case ENUMERATOR:
    2199               0 :         static_cast<AutoEnumStateRooter *>(this)->trace(trc);
    2200               0 :         return;
    2201                 : 
    2202                 :       case IDARRAY: {
    2203               0 :         JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
    2204               0 :         MarkIdRange(trc, ida->length, ida->vector, "JS::AutoIdArray.idArray");
    2205               0 :         return;
    2206                 :       }
    2207                 : 
    2208                 :       case DESCRIPTORS: {
    2209                 :         PropDescArray &descriptors =
    2210               9 :             static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
    2211              18 :         for (size_t i = 0, len = descriptors.length(); i < len; i++) {
    2212               9 :             PropDesc &desc = descriptors[i];
    2213               9 :             MarkValueRoot(trc, &desc.pd, "PropDesc::pd");
    2214               9 :             MarkValueRoot(trc, &desc.value, "PropDesc::value");
    2215               9 :             MarkValueRoot(trc, &desc.get, "PropDesc::get");
    2216               9 :             MarkValueRoot(trc, &desc.set, "PropDesc::set");
    2217                 :         }
    2218               9 :         return;
    2219                 :       }
    2220                 : 
    2221                 :       case DESCRIPTOR : {
    2222               9 :         PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
    2223               9 :         if (desc.obj)
    2224               9 :             MarkObjectRoot(trc, &desc.obj, "Descriptor::obj");
    2225               9 :         MarkValueRoot(trc, &desc.value, "Descriptor::value");
    2226               9 :         if ((desc.attrs & JSPROP_GETTER) && desc.getter) {
    2227               0 :             JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.getter);
    2228               0 :             MarkObjectRoot(trc, &tmp, "Descriptor::get");
    2229               0 :             desc.getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp);
    2230                 :         }
    2231               9 :         if (desc.attrs & JSPROP_SETTER && desc.setter) {
    2232               0 :             JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.setter);
    2233               0 :             MarkObjectRoot(trc, &tmp, "Descriptor::set");
    2234               0 :             desc.setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp);
    2235                 :         }
    2236               9 :         return;
    2237                 :       }
    2238                 : 
    2239                 :       case NAMESPACES: {
    2240               0 :         JSXMLArray<JSObject> &array = static_cast<AutoNamespaceArray *>(this)->array;
    2241               0 :         MarkObjectRange(trc, array.length, array.vector, "JSXMLArray.vector");
    2242               0 :         js_XMLArrayCursorTrace(trc, array.cursors);
    2243               0 :         return;
    2244                 :       }
    2245                 : 
    2246                 :       case XML:
    2247               0 :         js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
    2248               0 :         return;
    2249                 : 
    2250                 :       case OBJECT:
    2251           10284 :         if (static_cast<AutoObjectRooter *>(this)->obj)
    2252                 :             MarkObjectRoot(trc, &static_cast<AutoObjectRooter *>(this)->obj,
    2253           10284 :                            "JS::AutoObjectRooter.obj");
    2254           10284 :         return;
    2255                 : 
    2256                 :       case ID:
    2257               0 :         MarkIdRoot(trc, &static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_");
    2258               0 :         return;
    2259                 : 
    2260                 :       case VALVECTOR: {
    2261            6474 :         AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
    2262            6474 :         MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
    2263            6474 :         return;
    2264                 :       }
    2265                 : 
    2266                 :       case STRING:
    2267             713 :         if (static_cast<AutoStringRooter *>(this)->str)
    2268                 :             MarkStringRoot(trc, &static_cast<AutoStringRooter *>(this)->str,
    2269             713 :                            "JS::AutoStringRooter.str");
    2270             713 :         return;
    2271                 : 
    2272                 :       case IDVECTOR: {
    2273            1639 :         AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
    2274            1639 :         MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
    2275            1639 :         return;
    2276                 :       }
    2277                 : 
    2278                 :       case SHAPEVECTOR: {
    2279             611 :         AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
    2280             611 :         MarkShapeRootRange(trc, vector.length(), const_cast<Shape **>(vector.begin()), 
    2281             611 :                            "js::AutoShapeVector.vector");
    2282             611 :         return;
    2283                 :       }
    2284                 : 
    2285                 :       case OBJVECTOR: {
    2286               9 :         AutoObjectVector::VectorImpl &vector = static_cast<AutoObjectVector *>(this)->vector;
    2287               9 :         MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector");
    2288               9 :         return;
    2289                 :       }
    2290                 : 
    2291                 :       case VALARRAY: {
    2292               0 :         AutoValueArray *array = static_cast<AutoValueArray *>(this);
    2293               0 :         MarkValueRootRange(trc, array->length(), array->start(), "js::AutoValueArray");
    2294               0 :         return;
    2295                 :       }
    2296                 : 
    2297                 :       case SCRIPTVECTOR: {
    2298               0 :         AutoScriptVector::VectorImpl &vector = static_cast<AutoScriptVector *>(this)->vector;
    2299               0 :         for (size_t i = 0; i < vector.length(); i++)
    2300               0 :             MarkScriptRoot(trc, &vector[i], "AutoScriptVector element");
    2301               0 :         return;
    2302                 :       }
    2303                 :     }
    2304                 : 
    2305             583 :     JS_ASSERT(tag >= 0);
    2306                 :     MarkValueRootRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array,
    2307             583 :                        "JS::AutoArrayRooter.array");
    2308                 : }
    2309                 : 
    2310                 : /* static */ void
    2311           55865 : AutoGCRooter::traceAll(JSTracer *trc)
    2312                 : {
    2313           77788 :     for (js::AutoGCRooter *gcr = trc->runtime->autoGCRooters; gcr; gcr = gcr->down)
    2314           21923 :         gcr->trace(trc);
    2315           55865 : }
    2316                 : 
    2317                 : namespace js {
    2318                 : 
    2319                 : static void
    2320           54466 : MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
    2321                 : {
    2322           54466 :     JSRuntime *rt = trc->runtime;
    2323           54466 :     JS_ASSERT(trc->callback != GCMarker::GrayCallback);
    2324           54466 :     if (rt->gcCurrentCompartment) {
    2325             756 :         for (CompartmentsIter c(rt); !c.done(); c.next())
    2326             540 :             c->markCrossCompartmentWrappers(trc);
    2327             216 :         Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
    2328                 :     }
    2329                 : 
    2330           54466 :     AutoGCRooter::traceAll(trc);
    2331                 : 
    2332           54466 :     if (rt->hasContexts())
    2333           34358 :         MarkConservativeStackRoots(trc, useSavedRoots);
    2334                 : 
    2335          189515 :     for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
    2336          135049 :         gc_root_traversal(trc, r.front());
    2337                 : 
    2338           54466 :     for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
    2339               0 :         gc_lock_traversal(r.front(), trc);
    2340                 : 
    2341           54466 :     if (rt->scriptPCCounters) {
    2342               0 :         ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters;
    2343               0 :         for (size_t i = 0; i < vec.length(); i++)
    2344               0 :             MarkScriptRoot(trc, &vec[i].script, "scriptPCCounters");
    2345                 :     }
    2346                 : 
    2347           54466 :     js_TraceAtomState(trc);
    2348           54466 :     rt->staticStrings.trace(trc);
    2349                 : 
    2350          108741 :     for (ContextIter acx(rt); !acx.done(); acx.next())
    2351           54275 :         acx->mark(trc);
    2352                 : 
    2353          188145 :     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
    2354          133679 :         if (c->activeAnalysis)
    2355              36 :             c->markTypes(trc);
    2356                 : 
    2357                 :         /* During a GC, these are treated as weak pointers. */
    2358          133679 :         if (!IS_GC_MARKING_TRACER(trc)) {
    2359           11201 :             if (c->watchpointMap)
    2360               0 :                 c->watchpointMap->markAll(trc);
    2361                 :         }
    2362                 : 
    2363                 :         /* Do not discard scripts with counters while profiling. */
    2364          133679 :         if (rt->profilingScripts) {
    2365               0 :             for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
    2366               0 :                 JSScript *script = i.get<JSScript>();
    2367               0 :                 if (script->pcCounters) {
    2368               0 :                     MarkScriptRoot(trc, &script, "profilingScripts");
    2369               0 :                     JS_ASSERT(script == i.get<JSScript>());
    2370                 :                 }
    2371                 :             }
    2372                 :         }
    2373                 :     }
    2374                 : 
    2375                 : #ifdef JS_METHODJIT
    2376                 :     /* We need to expand inline frames before stack scanning. */
    2377          188469 :     for (CompartmentsIter c(rt); !c.done(); c.next())
    2378          134003 :         mjit::ExpandInlineFrames(c);
    2379                 : #endif
    2380                 : 
    2381           54466 :     rt->stackSpace.mark(trc);
    2382                 : 
    2383                 :     /* The embedding can register additional roots here. */
    2384           54466 :     if (JSTraceDataOp op = rt->gcBlackRootsTraceOp)
    2385           14728 :         (*op)(trc, rt->gcBlackRootsData);
    2386                 : 
    2387                 :     /* During GC, this buffers up the gray roots and doesn't mark them. */
    2388           54466 :     if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
    2389           14728 :         if (IS_GC_MARKING_TRACER(trc)) {
    2390           14728 :             GCMarker *gcmarker = static_cast<GCMarker *>(trc);
    2391           14728 :             gcmarker->startBufferingGrayRoots();
    2392           14728 :             (*op)(trc, rt->gcGrayRootsData);
    2393           14728 :             gcmarker->endBufferingGrayRoots();
    2394                 :         } else {
    2395               0 :             (*op)(trc, rt->gcGrayRootsData);
    2396                 :         }
    2397                 :     }
    2398           54466 : }
    2399                 : 
    2400                 : void
    2401             177 : TriggerGC(JSRuntime *rt, gcreason::Reason reason)
    2402                 : {
    2403             177 :     JS_ASSERT(rt->onOwnerThread());
    2404                 : 
    2405             177 :     if (rt->gcRunning || rt->gcIsNeeded)
    2406             102 :         return;
    2407                 : 
    2408                 :     /* Trigger the GC when it is safe to call an operation callback. */
    2409              75 :     rt->gcIsNeeded = true;
    2410              75 :     rt->gcTriggerCompartment = NULL;
    2411              75 :     rt->gcTriggerReason = reason;
    2412              75 :     rt->triggerOperationCallback();
    2413                 : }
    2414                 : 
    2415                 : void
    2416           97699 : TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
    2417                 : {
    2418           97699 :     JSRuntime *rt = comp->rt;
    2419           97699 :     JS_ASSERT(!rt->gcRunning);
    2420                 : 
    2421           97699 :     if (rt->gcZeal() == ZealAllocValue) {
    2422               0 :         TriggerGC(rt, reason);
    2423               0 :         return;
    2424                 :     }
    2425                 : 
    2426           97699 :     if (rt->gcMode == JSGC_MODE_GLOBAL || comp == rt->atomsCompartment) {
    2427                 :         /* We can't do a compartmental GC of the default compartment. */
    2428              31 :         TriggerGC(rt, reason);
    2429              31 :         return;
    2430                 :     }
    2431                 : 
    2432           97668 :     if (rt->gcIsNeeded) {
    2433                 :         /* If we need to GC more than one compartment, run a full GC. */
    2434           97560 :         if (rt->gcTriggerCompartment != comp)
    2435           97560 :             rt->gcTriggerCompartment = NULL;
    2436           97560 :         return;
    2437                 :     }
    2438                 : 
    2439                 :     /*
    2440                 :      * Trigger the GC when it is safe to call an operation callback on any
    2441                 :      * thread.
    2442                 :      */
    2443             108 :     rt->gcIsNeeded = true;
    2444             108 :     rt->gcTriggerCompartment = comp;
    2445             108 :     rt->gcTriggerReason = reason;
    2446             108 :     rt->triggerOperationCallback();
    2447                 : }
    2448                 : 
    2449                 : void
    2450               0 : MaybeGC(JSContext *cx)
    2451                 : {
    2452               0 :     JSRuntime *rt = cx->runtime;
    2453               0 :     JS_ASSERT(rt->onOwnerThread());
    2454                 : 
    2455               0 :     if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
    2456               0 :         GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
    2457               0 :         return;
    2458                 :     }
    2459                 : 
    2460               0 :     JSCompartment *comp = cx->compartment;
    2461               0 :     if (rt->gcIsNeeded) {
    2462                 :         GCSlice(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL,
    2463               0 :                 GC_NORMAL, gcreason::MAYBEGC);
    2464               0 :         return;
    2465                 :     }
    2466                 : 
    2467               0 :     if (comp->gcBytes > 8192 &&
    2468                 :         comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4) &&
    2469                 :         rt->gcIncrementalState == NO_INCREMENTAL)
    2470                 :     {
    2471                 :         GCSlice(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL,
    2472               0 :                 GC_NORMAL, gcreason::MAYBEGC);
    2473               0 :         return;
    2474                 :     }
    2475                 : 
    2476               0 :     if (comp->gcMallocAndFreeBytes > comp->gcTriggerMallocAndFreeBytes) {
    2477               0 :         GCSlice(cx, rt->gcMode == JSGC_MODE_GLOBAL ? NULL : comp, GC_NORMAL, gcreason::MAYBEGC);
    2478               0 :         return;
    2479                 :     }
    2480                 : 
    2481                 :     /*
    2482                 :      * Access to the counters and, on 32 bit, setting gcNextFullGCTime below
    2483                 :      * is not atomic and a race condition could trigger or suppress the GC. We
    2484                 :      * tolerate this.
    2485                 :      */
    2486               0 :     int64_t now = PRMJ_Now();
    2487               0 :     if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) {
    2488               0 :         if (rt->gcChunkAllocationSinceLastGC ||
    2489                 :             rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
    2490                 :         {
    2491               0 :             GCSlice(cx, NULL, GC_SHRINK, gcreason::MAYBEGC);
    2492                 :         } else {
    2493               0 :             rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
    2494                 :         }
    2495                 :     }
    2496                 : }
    2497                 : 
    2498                 : static void
    2499              18 : DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
    2500                 : {
    2501              18 :     Chunk *chunk = *availableListHeadp;
    2502              18 :     if (!chunk)
    2503               0 :         return;
    2504                 : 
    2505                 :     /*
    2506                 :      * Decommit is expensive so we avoid holding the GC lock while calling it.
    2507                 :      *
    2508                 :      * We decommit from the tail of the list to minimize interference with the
    2509                 :      * main thread that may start to allocate things at this point.
    2510                 :      *
    2511                 :      * The arena that is been decommitted outside the GC lock must not be
    2512                 :      * available for allocations either via the free list or via the
    2513                 :      * decommittedArenas bitmap. For that we just fetch the arena from the
    2514                 :      * free list before the decommit pretending as it was allocated. If this
    2515                 :      * arena also is the single free arena in the chunk, then we must remove
    2516                 :      * from the available list before we release the lock so the allocation
    2517                 :      * thread would not see chunks with no free arenas on the available list.
    2518                 :      *
    2519                 :      * After we retake the lock, we mark the arena as free and decommitted if
    2520                 :      * the decommit was successful. We must also add the chunk back to the
    2521                 :      * available list if we removed it previously or when the main thread
    2522                 :      * have allocated all remaining free arenas in the chunk.
    2523                 :      *
    2524                 :      * We also must make sure that the aheader is not accessed again after we
    2525                 :      * decommit the arena.
    2526                 :      */
    2527              18 :     JS_ASSERT(chunk->info.prevp == availableListHeadp);
    2528              18 :     while (Chunk *next = chunk->info.next) {
    2529               0 :         JS_ASSERT(next->info.prevp == &chunk->info.next);
    2530               0 :         chunk = next;
    2531                 :     }
    2532                 : 
    2533               0 :     for (;;) {
    2534            4176 :         while (chunk->info.numArenasFreeCommitted != 0) {
    2535            4140 :             ArenaHeader *aheader = chunk->fetchNextFreeArena(rt);
    2536                 : 
    2537            4140 :             Chunk **savedPrevp = chunk->info.prevp;
    2538            4140 :             if (!chunk->hasAvailableArenas())
    2539               0 :                 chunk->removeFromAvailableList();
    2540                 : 
    2541            4140 :             size_t arenaIndex = Chunk::arenaIndex(aheader->arenaAddress());
    2542                 :             bool ok;
    2543                 :             {
    2544                 :                 /*
    2545                 :                  * If the main thread waits for the decommit to finish, skip
    2546                 :                  * potentially expensive unlock/lock pair on the contested
    2547                 :                  * lock.
    2548                 :                  */
    2549            8280 :                 Maybe<AutoUnlockGC> maybeUnlock;
    2550            4140 :                 if (!rt->gcRunning)
    2551            4140 :                     maybeUnlock.construct(rt);
    2552            4140 :                 ok = MarkPagesUnused(aheader->getArena(), ArenaSize);
    2553                 :             }
    2554                 : 
    2555            4140 :             if (ok) {
    2556            4140 :                 ++chunk->info.numArenasFree;
    2557            4140 :                 chunk->decommittedArenas.set(arenaIndex);
    2558                 :             } else {
    2559               0 :                 chunk->addArenaToFreeList(rt, aheader);
    2560                 :             }
    2561            4140 :             JS_ASSERT(chunk->hasAvailableArenas());
    2562            4140 :             JS_ASSERT(!chunk->unused());
    2563            4140 :             if (chunk->info.numArenasFree == 1) {
    2564                 :                 /*
    2565                 :                  * Put the chunk back to the available list either at the
    2566                 :                  * point where it was before to preserve the available list
    2567                 :                  * that we enumerate, or, when the allocation thread has fully
    2568                 :                  * used all the previous chunks, at the beginning of the
    2569                 :                  * available list.
    2570                 :                  */
    2571               0 :                 Chunk **insertPoint = savedPrevp;
    2572               0 :                 if (savedPrevp != availableListHeadp) {
    2573               0 :                     Chunk *prev = Chunk::fromPointerToNext(savedPrevp);
    2574               0 :                     if (!prev->hasAvailableArenas())
    2575               0 :                         insertPoint = availableListHeadp;
    2576                 :                 }
    2577               0 :                 chunk->insertToAvailableList(insertPoint);
    2578                 :             } else {
    2579            4140 :                 JS_ASSERT(chunk->info.prevp);
    2580                 :             }
    2581                 : 
    2582            4140 :             if (rt->gcChunkAllocationSinceLastGC) {
    2583                 :                 /*
    2584                 :                  * The allocator thread has started to get new chunks. We should stop
    2585                 :                  * to avoid decommitting arenas in just allocated chunks.
    2586                 :                  */
    2587               0 :                 return;
    2588                 :             }
    2589                 :         }
    2590                 : 
    2591                 :         /*
    2592                 :          * chunk->info.prevp becomes null when the allocator thread consumed
    2593                 :          * all chunks from the available list.
    2594                 :          */
    2595              18 :         JS_ASSERT_IF(chunk->info.prevp, *chunk->info.prevp == chunk);
    2596              18 :         if (chunk->info.prevp == availableListHeadp || !chunk->info.prevp)
    2597              18 :             break;
    2598                 : 
    2599                 :         /*
    2600                 :          * prevp exists and is not the list head. It must point to the next
    2601                 :          * field of the previous chunk.
    2602                 :          */
    2603               0 :         chunk = chunk->getPrevious();
    2604                 :     }
    2605                 : }
    2606                 : 
    2607                 : static void
    2608               9 : DecommitArenas(JSRuntime *rt)
    2609                 : {
    2610               9 :     DecommitArenasFromAvailableList(rt, &rt->gcSystemAvailableChunkListHead);
    2611               9 :     DecommitArenasFromAvailableList(rt, &rt->gcUserAvailableChunkListHead);
    2612               9 : }
    2613                 : 
    2614                 : /* Must be called with the GC lock taken. */
    2615                 : static void
    2616           31193 : ExpireChunksAndArenas(JSRuntime *rt, bool shouldShrink)
    2617                 : {
    2618           31193 :     if (Chunk *toFree = rt->gcChunkPool.expire(rt, shouldShrink)) {
    2619             182 :         AutoUnlockGC unlock(rt);
    2620              91 :         FreeChunkList(toFree);
    2621                 :     }
    2622                 : 
    2623           31193 :     if (shouldShrink)
    2624               9 :         DecommitArenas(rt);
    2625           31193 : }
    2626                 : 
    2627                 : #ifdef JS_THREADSAFE
    2628                 : 
    2629                 : static unsigned
    2630           19910 : GetCPUCount()
    2631                 : {
    2632                 :     static unsigned ncpus = 0;
    2633           19910 :     if (ncpus == 0) {
    2634                 : # ifdef XP_WIN
    2635                 :         SYSTEM_INFO sysinfo;
    2636                 :         GetSystemInfo(&sysinfo);
    2637                 :         ncpus = unsigned(sysinfo.dwNumberOfProcessors);
    2638                 : # else
    2639           19811 :         long n = sysconf(_SC_NPROCESSORS_ONLN);
    2640           19811 :         ncpus = (n > 0) ? unsigned(n) : 1;
    2641                 : # endif
    2642                 :     }
    2643           19910 :     return ncpus;
    2644                 : }
    2645                 : 
    2646                 : bool
    2647           19910 : GCHelperThread::init()
    2648                 : {
    2649           19910 :     if (!(wakeup = PR_NewCondVar(rt->gcLock)))
    2650               0 :         return false;
    2651           19910 :     if (!(done = PR_NewCondVar(rt->gcLock)))
    2652               0 :         return false;
    2653                 : 
    2654                 :     thread = PR_CreateThread(PR_USER_THREAD, threadMain, this, PR_PRIORITY_NORMAL,
    2655           19910 :                              PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
    2656           19910 :     if (!thread)
    2657               0 :         return false;
    2658                 : 
    2659           19910 :     backgroundAllocation = (GetCPUCount() >= 2);
    2660           19910 :     return true;
    2661                 : }
    2662                 : 
    2663                 : void
    2664           19908 : GCHelperThread::finish()
    2665                 : {
    2666           19908 :     PRThread *join = NULL;
    2667                 :     {
    2668           39816 :         AutoLockGC lock(rt);
    2669           19908 :         if (thread && state != SHUTDOWN) {
    2670                 :             /*
    2671                 :              * We cannot be in the ALLOCATING or CANCEL_ALLOCATION states as
    2672                 :              * the allocations should have been stopped during the last GC.
    2673                 :              */
    2674           19908 :             JS_ASSERT(state == IDLE || state == SWEEPING);
    2675           19908 :             if (state == IDLE)
    2676           19908 :                 PR_NotifyCondVar(wakeup);
    2677           19908 :             state = SHUTDOWN;
    2678           19908 :             join = thread;
    2679                 :         }
    2680                 :     }
    2681           19908 :     if (join) {
    2682                 :         /* PR_DestroyThread is not necessary. */
    2683           19908 :         PR_JoinThread(join);
    2684                 :     }
    2685           19908 :     if (wakeup)
    2686           19908 :         PR_DestroyCondVar(wakeup);
    2687           19908 :     if (done)
    2688           19908 :         PR_DestroyCondVar(done);
    2689           19908 : }
    2690                 : 
    2691                 : /* static */
    2692                 : void
    2693           19910 : GCHelperThread::threadMain(void *arg)
    2694                 : {
    2695           19910 :     static_cast<GCHelperThread *>(arg)->threadLoop();
    2696           19908 : }
    2697                 : 
    2698                 : void
    2699           19910 : GCHelperThread::threadLoop()
    2700                 : {
    2701           39818 :     AutoLockGC lock(rt);
    2702                 : 
    2703                 :     /*
    2704                 :      * Even on the first iteration the state can be SHUTDOWN or SWEEPING if
    2705                 :      * the stop request or the GC and the corresponding startBackgroundSweep call
    2706                 :      * happen before this thread has a chance to run.
    2707                 :      */
    2708           87821 :     for (;;) {
    2709          107731 :         switch (state) {
    2710                 :           case SHUTDOWN:
    2711                 :             return;
    2712                 :           case IDLE:
    2713           53864 :             PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
    2714           53862 :             break;
    2715                 :           case SWEEPING:
    2716           31193 :             doSweep();
    2717           31193 :             if (state == SWEEPING)
    2718           31193 :                 state = IDLE;
    2719           31193 :             PR_NotifyAllCondVar(done);
    2720           31193 :             break;
    2721                 :           case ALLOCATING:
    2722            5532 :             do {
    2723                 :                 Chunk *chunk;
    2724                 :                 {
    2725            5532 :                     AutoUnlockGC unlock(rt);
    2726            2766 :                     chunk = Chunk::allocate(rt);
    2727                 :                 }
    2728                 : 
    2729                 :                 /* OOM stops the background allocation. */
    2730            2766 :                 if (!chunk)
    2731               0 :                     break;
    2732            2766 :                 JS_ASSERT(chunk->info.numArenasFreeCommitted == ArenasPerChunk);
    2733            2766 :                 rt->gcNumArenasFreeCommitted += ArenasPerChunk;
    2734            2766 :                 rt->gcChunkPool.put(chunk);
    2735            2766 :             } while (state == ALLOCATING && rt->gcChunkPool.wantBackgroundAllocation(rt));
    2736            2766 :             if (state == ALLOCATING)
    2737            2766 :                 state = IDLE;
    2738            2766 :             break;
    2739                 :           case CANCEL_ALLOCATION:
    2740               0 :             state = IDLE;
    2741               0 :             PR_NotifyAllCondVar(done);
    2742               0 :             break;
    2743                 :         }
    2744                 :     }
    2745                 : }
    2746                 : 
    2747                 : bool
    2748           31184 : GCHelperThread::prepareForBackgroundSweep()
    2749                 : {
    2750           31184 :     JS_ASSERT(state == IDLE);
    2751           31184 :     size_t maxArenaLists = MAX_BACKGROUND_FINALIZE_KINDS * rt->compartments.length();
    2752           31184 :     return finalizeVector.reserve(maxArenaLists);
    2753                 : }
    2754                 : 
    2755                 : /* Must be called with the GC lock taken. */
    2756                 : void
    2757           31184 : GCHelperThread::startBackgroundSweep(JSContext *cx, bool shouldShrink)
    2758                 : {
    2759                 :     /* The caller takes the GC lock. */
    2760           31184 :     JS_ASSERT(state == IDLE);
    2761           31184 :     JS_ASSERT(cx);
    2762           31184 :     JS_ASSERT(!finalizationContext);
    2763           31184 :     finalizationContext = cx;
    2764           31184 :     shrinkFlag = shouldShrink;
    2765           31184 :     state = SWEEPING;
    2766           31184 :     PR_NotifyCondVar(wakeup);
    2767           31184 : }
    2768                 : 
    2769                 : /* Must be called with the GC lock taken. */
    2770                 : void
    2771               9 : GCHelperThread::startBackgroundShrink()
    2772                 : {
    2773               9 :     switch (state) {
    2774                 :       case IDLE:
    2775               9 :         JS_ASSERT(!finalizationContext);
    2776               9 :         shrinkFlag = true;
    2777               9 :         state = SWEEPING;
    2778               9 :         PR_NotifyCondVar(wakeup);
    2779               9 :         break;
    2780                 :       case SWEEPING:
    2781               0 :         shrinkFlag = true;
    2782               0 :         break;
    2783                 :       case ALLOCATING:
    2784                 :       case CANCEL_ALLOCATION:
    2785                 :         /*
    2786                 :          * If we have started background allocation there is nothing to
    2787                 :          * shrink.
    2788                 :          */
    2789               0 :         break;
    2790                 :       case SHUTDOWN:
    2791               0 :         JS_NOT_REACHED("No shrink on shutdown");
    2792                 :     }
    2793               9 : }
    2794                 : 
    2795                 : /* Must be called with the GC lock taken. */
    2796                 : void
    2797           46729 : GCHelperThread::waitBackgroundSweepEnd()
    2798                 : {
    2799           99001 :     while (state == SWEEPING)
    2800            5543 :         PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
    2801           46729 : }
    2802                 : 
    2803                 : /* Must be called with the GC lock taken. */
    2804                 : void
    2805           54005 : GCHelperThread::waitBackgroundSweepOrAllocEnd()
    2806                 : {
    2807           54005 :     if (state == ALLOCATING)
    2808               0 :         state = CANCEL_ALLOCATION;
    2809          113677 :     while (state == SWEEPING || state == CANCEL_ALLOCATION)
    2810            5667 :         PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
    2811           54005 : }
    2812                 : 
    2813                 : /* Must be called with the GC lock taken. */
    2814                 : inline void
    2815            2766 : GCHelperThread::startBackgroundAllocationIfIdle()
    2816                 : {
    2817            2766 :     if (state == IDLE) {
    2818            2766 :         state = ALLOCATING;
    2819            2766 :         PR_NotifyCondVar(wakeup);
    2820                 :     }
    2821            2766 : }
    2822                 : 
    2823                 : JS_FRIEND_API(void)
    2824           22779 : GCHelperThread::replenishAndFreeLater(void *ptr)
    2825                 : {
    2826           22779 :     JS_ASSERT(freeCursor == freeCursorEnd);
    2827                 :     do {
    2828           22779 :         if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
    2829               0 :             break;
    2830           22779 :         freeCursor = (void **) OffTheBooks::malloc_(FREE_ARRAY_SIZE);
    2831           22779 :         if (!freeCursor) {
    2832               0 :             freeCursorEnd = NULL;
    2833               0 :             break;
    2834                 :         }
    2835           22779 :         freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
    2836           22779 :         *freeCursor++ = ptr;
    2837           22779 :         return;
    2838                 :     } while (false);
    2839               0 :     Foreground::free_(ptr);
    2840                 : }
    2841                 : 
    2842                 : /* Must be called with the GC lock taken. */
    2843                 : void
    2844           31193 : GCHelperThread::doSweep()
    2845                 : {
    2846           31193 :     if (JSContext *cx = finalizationContext) {
    2847           31184 :         finalizationContext = NULL;
    2848           62368 :         AutoUnlockGC unlock(rt);
    2849                 : 
    2850                 :         /*
    2851                 :          * We must finalize in the insert order, see comments in
    2852                 :          * finalizeObjects.
    2853                 :          */
    2854          268513 :         for (ArenaHeader **i = finalizeVector.begin(); i != finalizeVector.end(); ++i)
    2855          237329 :             ArenaLists::backgroundFinalize(cx, *i);
    2856           31184 :         finalizeVector.resize(0);
    2857                 : 
    2858           31184 :         if (freeCursor) {
    2859           22774 :             void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
    2860           22774 :             freeElementsAndArray(array, freeCursor);
    2861           22774 :             freeCursor = freeCursorEnd = NULL;
    2862                 :         } else {
    2863            8410 :             JS_ASSERT(!freeCursorEnd);
    2864                 :         }
    2865           31189 :         for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
    2866               5 :             void **array = *iter;
    2867               5 :             freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
    2868                 :         }
    2869           31184 :         freeVector.resize(0);
    2870                 :     }
    2871                 : 
    2872           31193 :     bool shrinking = shrinkFlag;
    2873           31193 :     ExpireChunksAndArenas(rt, shrinking);
    2874                 : 
    2875                 :     /*
    2876                 :      * The main thread may have called ShrinkGCBuffers while
    2877                 :      * ExpireChunksAndArenas(rt, false) was running, so we recheck the flag
    2878                 :      * afterwards.
    2879                 :      */
    2880           31193 :     if (!shrinking && shrinkFlag) {
    2881               0 :         shrinkFlag = false;
    2882               0 :         ExpireChunksAndArenas(rt, true);
    2883                 :     }
    2884           31193 : }
    2885                 : 
    2886                 : #endif /* JS_THREADSAFE */
    2887                 : 
    2888                 : } /* namespace js */
    2889                 : 
    2890                 : static bool
    2891           50876 : ReleaseObservedTypes(JSRuntime *rt)
    2892                 : {
    2893           50876 :     bool releaseTypes = false;
    2894           50876 :     int64_t now = PRMJ_Now();
    2895           50876 :     if (now >= rt->gcJitReleaseTime) {
    2896               3 :         releaseTypes = true;
    2897               3 :         rt->gcJitReleaseTime = now + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
    2898                 :     }
    2899                 : 
    2900           50876 :     return releaseTypes;
    2901                 : }
    2902                 : 
    2903                 : static void
    2904           50876 : SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
    2905                 : {
    2906           50876 :     JSRuntime *rt = cx->runtime;
    2907           50876 :     JSCompartmentCallback callback = rt->compartmentCallback;
    2908                 : 
    2909                 :     /* Skip the atomsCompartment. */
    2910           50876 :     JSCompartment **read = rt->compartments.begin() + 1;
    2911           50876 :     JSCompartment **end = rt->compartments.end();
    2912           50876 :     JSCompartment **write = read;
    2913           50876 :     JS_ASSERT(rt->compartments.length() >= 1);
    2914           50876 :     JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
    2915                 : 
    2916          173138 :     while (read < end) {
    2917           71386 :         JSCompartment *compartment = *read++;
    2918                 : 
    2919          188505 :         if (!compartment->hold &&
    2920          117119 :             (compartment->arenas.arenaListsAreEmpty() || !rt->hasContexts()))
    2921                 :         {
    2922           25663 :             compartment->arenas.checkEmptyFreeLists();
    2923           25663 :             if (callback)
    2924            3647 :                 JS_ALWAYS_TRUE(callback(cx, compartment, JSCOMPARTMENT_DESTROY));
    2925           25663 :             if (compartment->principals)
    2926            3370 :                 JS_DropPrincipals(rt, compartment->principals);
    2927           25663 :             cx->delete_(compartment);
    2928           25663 :             continue;
    2929                 :         }
    2930           45723 :         *write++ = compartment;
    2931                 :     }
    2932           50876 :     rt->compartments.resize(write - rt->compartments.begin());
    2933           50876 : }
    2934                 : 
    2935                 : static void
    2936           52591 : PurgeRuntime(JSRuntime *rt)
    2937                 : {
    2938          180347 :     for (GCCompartmentsIter c(rt); !c.done(); c.next())
    2939          127756 :         c->purge();
    2940                 : 
    2941           52591 :     rt->tempLifoAlloc.freeUnused();
    2942           52591 :     rt->gsnCache.purge();
    2943                 : 
    2944                 :     /* FIXME: bug 506341 */
    2945           52591 :     rt->propertyCache.purge(rt);
    2946                 : 
    2947          105091 :     for (ContextIter acx(rt); !acx.done(); acx.next())
    2948           52500 :         acx->purge();
    2949           52591 : }
    2950                 : 
    2951                 : static void
    2952           51092 : BeginMarkPhase(JSRuntime *rt)
    2953                 : {
    2954           51092 :     GCMarker *gcmarker = &rt->gcMarker;
    2955                 : 
    2956           51092 :     rt->gcStartNumber = rt->gcNumber;
    2957                 : 
    2958                 :     /* Reset weak map list. */
    2959           51092 :     WeakMapBase::resetWeakMapList(rt);
    2960                 : 
    2961                 :     /*
    2962                 :      * We must purge the runtime at the beginning of an incremental GC. The
    2963                 :      * danger if we purge later is that the snapshot invariant of incremental
    2964                 :      * GC will be broken, as follows. If some object is reachable only through
    2965                 :      * some cache (say the dtoaCache) then it will not be part of the snapshot.
    2966                 :      * If we purge after root marking, then the mutator could obtain a pointer
    2967                 :      * to the object and start using it. This object might never be marked, so
    2968                 :      * a GC hazard would exist.
    2969                 :      */
    2970           51092 :     PurgeRuntime(rt);
    2971                 : 
    2972                 :     /*
    2973                 :      * Mark phase.
    2974                 :      */
    2975          102184 :     gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
    2976          102184 :     gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_ROOTS);
    2977                 : 
    2978          162509 :     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
    2979          111417 :         r.front()->bitmap.clear();
    2980                 : 
    2981           51092 :     MarkRuntime(gcmarker);
    2982           51092 : }
    2983                 : 
    2984                 : void
    2985          102184 : MarkWeakReferences(GCMarker *gcmarker)
    2986                 : {
    2987          102184 :     JS_ASSERT(gcmarker->isDrained());
    2988          409448 :     while (WatchpointMap::markAllIteratively(gcmarker) ||
    2989          102504 :            WeakMapBase::markAllIteratively(gcmarker) ||
    2990          102256 :            Debugger::markAllIteratively(gcmarker))
    2991                 :     {
    2992             320 :         SliceBudget budget;
    2993             320 :         gcmarker->drainMarkStack(budget);
    2994                 :     }
    2995          102184 :     JS_ASSERT(gcmarker->isDrained());
    2996          102184 : }
    2997                 : 
    2998                 : static void
    2999           51092 : MarkGrayAndWeak(JSRuntime *rt)
    3000                 : {
    3001           51092 :     GCMarker *gcmarker = &rt->gcMarker;
    3002                 : 
    3003           51092 :     JS_ASSERT(gcmarker->isDrained());
    3004           51092 :     MarkWeakReferences(gcmarker);
    3005                 : 
    3006           51092 :     gcmarker->setMarkColorGray();
    3007           51092 :     if (gcmarker->hasBufferedGrayRoots()) {
    3008           51092 :         gcmarker->markBufferedGrayRoots();
    3009                 :     } else {
    3010               0 :         if (JSTraceDataOp op = rt->gcGrayRootsTraceOp)
    3011               0 :             (*op)(gcmarker, rt->gcGrayRootsData);
    3012                 :     }
    3013           51092 :     SliceBudget budget;
    3014           51092 :     gcmarker->drainMarkStack(budget);
    3015           51092 :     MarkWeakReferences(gcmarker);
    3016           51092 :     JS_ASSERT(gcmarker->isDrained());
    3017           51092 : }
    3018                 : 
    3019                 : #ifdef DEBUG
    3020                 : static void
    3021                 : ValidateIncrementalMarking(JSContext *cx);
    3022                 : #endif
    3023                 : 
    3024                 : static void
    3025           51092 : EndMarkPhase(JSContext *cx)
    3026                 : {
    3027           51092 :     JSRuntime *rt = cx->runtime;
    3028                 : 
    3029                 :     {
    3030          102184 :         gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
    3031          102184 :         gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_OTHER);
    3032           51092 :         MarkGrayAndWeak(rt);
    3033                 :     }
    3034                 : 
    3035           51092 :     JS_ASSERT(rt->gcMarker.isDrained());
    3036                 : 
    3037                 : #ifdef DEBUG
    3038           51092 :     if (rt->gcIncrementalState != NO_INCREMENTAL)
    3039               0 :         ValidateIncrementalMarking(cx);
    3040                 : #endif
    3041                 : 
    3042                 : #ifdef DEBUG
    3043                 :     /* Make sure that we didn't mark an object in another compartment */
    3044           51092 :     if (rt->gcCurrentCompartment) {
    3045             756 :         for (CompartmentsIter c(rt); !c.done(); c.next()) {
    3046             972 :             JS_ASSERT_IF(c != rt->gcCurrentCompartment && c != rt->atomsCompartment,
    3047             972 :                          c->arenas.checkArenaListAllUnmarked());
    3048                 :         }
    3049                 :     }
    3050                 : #endif
    3051           51092 : }
    3052                 : 
    3053                 : #ifdef DEBUG
    3054                 : static void
    3055               0 : ValidateIncrementalMarking(JSContext *cx)
    3056                 : {
    3057                 :     typedef HashMap<Chunk *, uintptr_t *, GCChunkHasher, SystemAllocPolicy> BitmapMap;
    3058               0 :     BitmapMap map;
    3059               0 :     if (!map.init())
    3060                 :         return;
    3061                 : 
    3062               0 :     JSRuntime *rt = cx->runtime;
    3063               0 :     GCMarker *gcmarker = &rt->gcMarker;
    3064                 : 
    3065                 :     /* Save existing mark bits. */
    3066               0 :     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
    3067               0 :         ChunkBitmap *bitmap = &r.front()->bitmap;
    3068               0 :         uintptr_t *entry = (uintptr_t *)js_malloc(sizeof(bitmap->bitmap));
    3069               0 :         if (!entry)
    3070                 :             return;
    3071                 : 
    3072               0 :         memcpy(entry, bitmap->bitmap, sizeof(bitmap->bitmap));
    3073               0 :         if (!map.putNew(r.front(), entry))
    3074                 :             return;
    3075                 :     }
    3076                 : 
    3077                 :     /* Save the existing weakmaps. */
    3078               0 :     WeakMapVector weakmaps;
    3079               0 :     if (!WeakMapBase::saveWeakMapList(rt, weakmaps))
    3080                 :         return;
    3081                 : 
    3082                 :     /*
    3083                 :      * After this point, the function should run to completion, so we shouldn't
    3084                 :      * do anything fallible.
    3085                 :      */
    3086                 : 
    3087                 :     /* Re-do all the marking, but non-incrementally. */
    3088               0 :     js::gc::State state = rt->gcIncrementalState;
    3089               0 :     rt->gcIncrementalState = NO_INCREMENTAL;
    3090                 : 
    3091                 :     /* As we're re-doing marking, we need to reset the weak map list. */
    3092               0 :     WeakMapBase::resetWeakMapList(rt);
    3093                 : 
    3094               0 :     JS_ASSERT(gcmarker->isDrained());
    3095               0 :     gcmarker->reset();
    3096                 : 
    3097               0 :     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
    3098               0 :         r.front()->bitmap.clear();
    3099                 : 
    3100               0 :     MarkRuntime(gcmarker, true);
    3101               0 :     SliceBudget budget;
    3102               0 :     rt->gcMarker.drainMarkStack(budget);
    3103               0 :     MarkGrayAndWeak(rt);
    3104                 : 
    3105                 :     /* Now verify that we have the same mark bits as before. */
    3106               0 :     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
    3107               0 :         Chunk *chunk = r.front();
    3108               0 :         ChunkBitmap *bitmap = &chunk->bitmap;
    3109               0 :         uintptr_t *entry = map.lookup(r.front())->value;
    3110                 :         ChunkBitmap incBitmap;
    3111                 : 
    3112               0 :         memcpy(incBitmap.bitmap, entry, sizeof(incBitmap.bitmap));
    3113               0 :         js_free(entry);
    3114                 : 
    3115               0 :         for (size_t i = 0; i < ArenasPerChunk; i++) {
    3116               0 :             Arena *arena = &chunk->arenas[i];
    3117               0 :             if (!arena->aheader.allocated())
    3118               0 :                 continue;
    3119               0 :             if (rt->gcCurrentCompartment && arena->aheader.compartment != rt->gcCurrentCompartment)
    3120               0 :                 continue;
    3121               0 :             if (arena->aheader.allocatedDuringIncremental)
    3122               0 :                 continue;
    3123                 : 
    3124               0 :             AllocKind kind = arena->aheader.getAllocKind();
    3125               0 :             uintptr_t thing = arena->thingsStart(kind);
    3126               0 :             uintptr_t end = arena->thingsEnd();
    3127               0 :             while (thing < end) {
    3128               0 :                 Cell *cell = (Cell *)thing;
    3129               0 :                 if (bitmap->isMarked(cell, BLACK) && !incBitmap.isMarked(cell, BLACK)) {
    3130               0 :                     JS_DumpHeap(rt, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL);
    3131               0 :                     printf("Assertion cell: %p (%d)\n", (void *)cell, cell->getAllocKind());
    3132                 :                 }
    3133               0 :                 JS_ASSERT_IF(bitmap->isMarked(cell, BLACK), incBitmap.isMarked(cell, BLACK));
    3134               0 :                 thing += Arena::thingSize(kind);
    3135                 :             }
    3136                 :         }
    3137                 : 
    3138               0 :         memcpy(bitmap->bitmap, incBitmap.bitmap, sizeof(incBitmap.bitmap));
    3139                 :     }
    3140                 : 
    3141                 :     /* Restore the weak map list. */
    3142               0 :     WeakMapBase::resetWeakMapList(rt);
    3143               0 :     WeakMapBase::restoreWeakMapList(rt, weakmaps);
    3144                 : 
    3145               0 :     rt->gcIncrementalState = state;
    3146                 : }
    3147                 : #endif
    3148                 : 
    3149                 : static void
    3150           51092 : SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
    3151                 : {
    3152           51092 :     JSRuntime *rt = cx->runtime;
    3153                 : 
    3154                 : #ifdef JS_THREADSAFE
    3155           51092 :     if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep())
    3156           31184 :         cx->gcBackgroundFree = &rt->gcHelperThread;
    3157                 : #endif
    3158                 : 
    3159                 :     /* Purge the ArenaLists before sweeping. */
    3160          173570 :     for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3161          122478 :         c->arenas.purge();
    3162                 : 
    3163           51092 :     if (rt->gcFinalizeCallback)
    3164           14730 :         rt->gcFinalizeCallback(cx, JSFINALIZE_START);
    3165                 : 
    3166                 :     /*
    3167                 :      * Sweep phase.
    3168                 :      *
    3169                 :      * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
    3170                 :      * so that any attempt to allocate a GC-thing from a finalizer will fail,
    3171                 :      * rather than nest badly and leave the unmarked newborn to be swept.
    3172                 :      *
    3173                 :      * We first sweep atom state so we can use IsAboutToBeFinalized on
    3174                 :      * JSString held in a hashtable to check if the hashtable entry can be
    3175                 :      * freed. Note that even after the entry is freed, JSObject finalizers can
    3176                 :      * continue to access the corresponding JSString* assuming that they are
    3177                 :      * unique. This works since the atomization API must not be called during
    3178                 :      * the GC.
    3179                 :      */
    3180          102184 :     gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP);
    3181                 : 
    3182                 :     /* Finalize unreachable (key,value) pairs in all weak maps. */
    3183           51092 :     WeakMapBase::sweepAll(&rt->gcMarker);
    3184                 : 
    3185           51092 :     js_SweepAtomState(rt);
    3186                 : 
    3187                 :     /* Collect watch points associated with unreachable objects. */
    3188           51092 :     WatchpointMap::sweepAll(rt);
    3189                 : 
    3190           51092 :     if (!rt->gcCurrentCompartment)
    3191           50876 :         Debugger::sweepAll(cx);
    3192                 : 
    3193           51092 :     bool releaseTypes = !rt->gcCurrentCompartment && ReleaseObservedTypes(rt);
    3194          173570 :     for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3195          122478 :         c->sweep(cx, releaseTypes);
    3196                 : 
    3197                 :     {
    3198          102184 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_OBJECT);
    3199                 : 
    3200                 :         /*
    3201                 :          * We finalize objects before other GC things to ensure that the object's
    3202                 :          * finalizer can access the other things even if they will be freed.
    3203                 :          */
    3204          173570 :         for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3205          122478 :             c->arenas.finalizeObjects(cx);
    3206                 :     }
    3207                 : 
    3208                 :     {
    3209          102184 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_STRING);
    3210          173570 :         for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3211          122478 :             c->arenas.finalizeStrings(cx);
    3212                 :     }
    3213                 : 
    3214                 :     {
    3215          102184 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_SCRIPT);
    3216          173570 :         for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3217          122478 :             c->arenas.finalizeScripts(cx);
    3218                 :     }
    3219                 : 
    3220                 :     {
    3221          102184 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_SHAPE);
    3222          173570 :         for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3223          122478 :             c->arenas.finalizeShapes(cx);
    3224                 :     }
    3225                 : 
    3226                 : #ifdef DEBUG
    3227           51092 :      PropertyTree::dumpShapes(cx);
    3228                 : #endif
    3229                 : 
    3230                 :     {
    3231          102184 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DESTROY);
    3232                 : 
    3233                 :         /*
    3234                 :          * Sweep script filenames after sweeping functions in the generic loop
    3235                 :          * above. In this way when a scripted function's finalizer destroys the
    3236                 :          * script and calls rt->destroyScriptHook, the hook can still access the
    3237                 :          * script's filename. See bug 323267.
    3238                 :          */
    3239          173570 :         for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3240          122478 :             js_SweepScriptFilenames(c);
    3241                 : 
    3242                 :         /*
    3243                 :          * This removes compartments from rt->compartment, so we do it last to make
    3244                 :          * sure we don't miss sweeping any compartments.
    3245                 :          */
    3246           51092 :         if (!rt->gcCurrentCompartment)
    3247           50876 :             SweepCompartments(cx, gckind);
    3248                 : 
    3249                 : #ifndef JS_THREADSAFE
    3250                 :         /*
    3251                 :          * Destroy arenas after we finished the sweeping so finalizers can safely
    3252                 :          * use IsAboutToBeFinalized().
    3253                 :          * This is done on the GCHelperThread if JS_THREADSAFE is defined.
    3254                 :          */
    3255                 :         ExpireChunksAndArenas(rt, gckind == GC_SHRINK);
    3256                 : #endif
    3257                 :     }
    3258                 : 
    3259                 :     {
    3260          102184 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_XPCONNECT);
    3261           51092 :         if (rt->gcFinalizeCallback)
    3262           14730 :             rt->gcFinalizeCallback(cx, JSFINALIZE_END);
    3263                 :     }
    3264                 : 
    3265          148231 :     for (CompartmentsIter c(rt); !c.done(); c.next())
    3266           97139 :         c->setGCLastBytes(c->gcBytes, c->gcMallocAndFreeBytes, gckind);
    3267           51092 : }
    3268                 : 
    3269                 : /* Perform mark-and-sweep GC. If comp is set, we perform a single-compartment GC. */
    3270                 : static void
    3271           51092 : MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind)
    3272                 : {
    3273           51092 :     JSRuntime *rt = cx->runtime;
    3274                 : 
    3275          102184 :     AutoUnlockGC unlock(rt);
    3276                 : 
    3277           51092 :     rt->gcMarker.start(rt);
    3278           51092 :     JS_ASSERT(!rt->gcMarker.callback);
    3279                 : 
    3280           51092 :     BeginMarkPhase(rt);
    3281                 :     {
    3282          102184 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
    3283           51092 :         SliceBudget budget;
    3284           51092 :         rt->gcMarker.drainMarkStack(budget);
    3285                 :     }
    3286           51092 :     EndMarkPhase(cx);
    3287           51092 :     SweepPhase(cx, gckind);
    3288                 : 
    3289           51092 :     rt->gcMarker.stop();
    3290           51092 : }
    3291                 : 
    3292                 : /*
    3293                 :  * This class should be used by any code that needs to exclusive access to the
    3294                 :  * heap in order to trace through it...
    3295                 :  */
    3296                 : class AutoHeapSession {
    3297                 :   public:
    3298                 :     explicit AutoHeapSession(JSRuntime *rt);
    3299                 :     ~AutoHeapSession();
    3300                 : 
    3301                 :   protected:
    3302                 :     JSRuntime *runtime;
    3303                 : 
    3304                 :   private:
    3305                 :     AutoHeapSession(const AutoHeapSession&) MOZ_DELETE;
    3306                 :     void operator=(const AutoHeapSession&) MOZ_DELETE;
    3307                 : };
    3308                 : 
    3309                 : /* ...while this class is to be used only for garbage collection. */
    3310                 : class AutoGCSession : AutoHeapSession {
    3311                 :   public:
    3312                 :     explicit AutoGCSession(JSRuntime *rt, JSCompartment *comp);
    3313                 :     ~AutoGCSession();
    3314                 : };
    3315                 : 
    3316                 : /* Start a new heap session. */
    3317           54694 : AutoHeapSession::AutoHeapSession(JSRuntime *rt)
    3318           54694 :   : runtime(rt)
    3319                 : {
    3320           54694 :     JS_ASSERT(!rt->noGCOrAllocationCheck);
    3321           54694 :     JS_ASSERT(!rt->gcRunning);
    3322           54694 :     rt->gcRunning = true;
    3323           54694 : }
    3324                 : 
    3325           54694 : AutoHeapSession::~AutoHeapSession()
    3326                 : {
    3327           54694 :     JS_ASSERT(runtime->gcRunning);
    3328           54694 :     runtime->gcRunning = false;
    3329           54694 : }
    3330                 : 
    3331           51092 : AutoGCSession::AutoGCSession(JSRuntime *rt, JSCompartment *comp)
    3332           51092 :   : AutoHeapSession(rt)
    3333                 : {
    3334           51092 :     JS_ASSERT(!runtime->gcCurrentCompartment);
    3335           51092 :     runtime->gcCurrentCompartment = comp;
    3336                 : 
    3337           51092 :     runtime->gcIsNeeded = false;
    3338           51092 :     runtime->gcTriggerCompartment = NULL;
    3339           51092 :     runtime->gcInterFrameGC = true;
    3340                 : 
    3341           51092 :     runtime->gcNumber++;
    3342                 : 
    3343           51092 :     runtime->resetGCMallocBytes();
    3344                 : 
    3345                 :     /* Clear gcMallocBytes for all compartments */
    3346          173894 :     for (CompartmentsIter c(runtime); !c.done(); c.next())
    3347          122802 :         c->resetGCMallocBytes();
    3348           51092 : }
    3349                 : 
    3350          102184 : AutoGCSession::~AutoGCSession()
    3351                 : {
    3352           51092 :     runtime->gcCurrentCompartment = NULL;
    3353           51092 :     runtime->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
    3354           51092 :     runtime->gcChunkAllocationSinceLastGC = false;
    3355           51092 : }
    3356                 : 
    3357                 : static void
    3358           51092 : ResetIncrementalGC(JSRuntime *rt, const char *reason)
    3359                 : {
    3360           51092 :     if (rt->gcIncrementalState == NO_INCREMENTAL)
    3361           51092 :         return;
    3362                 : 
    3363               0 :     for (CompartmentsIter c(rt); !c.done(); c.next()) {
    3364               0 :         if (!rt->gcIncrementalCompartment || rt->gcIncrementalCompartment == c)
    3365               0 :             c->needsBarrier_ = false;
    3366                 : 
    3367               0 :         JS_ASSERT(!c->needsBarrier_);
    3368                 :     }
    3369                 : 
    3370               0 :     rt->gcIncrementalCompartment = NULL;
    3371               0 :     rt->gcMarker.reset();
    3372               0 :     rt->gcMarker.stop();
    3373               0 :     rt->gcIncrementalState = NO_INCREMENTAL;
    3374                 : 
    3375               0 :     rt->gcStats.reset(reason);
    3376                 : }
    3377                 : 
    3378                 : class AutoGCSlice {
    3379                 :   public:
    3380                 :     AutoGCSlice(JSContext *cx);
    3381                 :     ~AutoGCSlice();
    3382                 : 
    3383                 :   private:
    3384                 :     JSContext *context;
    3385                 : };
    3386                 : 
    3387               0 : AutoGCSlice::AutoGCSlice(JSContext *cx)
    3388               0 :   : context(cx)
    3389                 : {
    3390               0 :     JSRuntime *rt = context->runtime;
    3391                 : 
    3392                 :     /*
    3393                 :      * During incremental GC, the compartment's active flag determines whether
    3394                 :      * there are stack frames active for any of its scripts. Normally this flag
    3395                 :      * is set at the beginning of the mark phase. During incremental GC, we also
    3396                 :      * set it at the start of every phase.
    3397                 :      */
    3398               0 :     rt->stackSpace.markActiveCompartments();
    3399                 : 
    3400               0 :     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
    3401                 :         /* Clear this early so we don't do any write barriers during GC. */
    3402               0 :         if (rt->gcIncrementalState == MARK)
    3403               0 :             c->needsBarrier_ = false;
    3404                 :         else
    3405               0 :             JS_ASSERT(!c->needsBarrier_);
    3406                 :     }
    3407               0 : }
    3408                 : 
    3409               0 : AutoGCSlice::~AutoGCSlice()
    3410                 : {
    3411               0 :     JSRuntime *rt = context->runtime;
    3412                 : 
    3413               0 :     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
    3414               0 :         if (rt->gcIncrementalState == MARK) {
    3415               0 :             c->needsBarrier_ = true;
    3416               0 :             c->arenas.prepareForIncrementalGC(rt);
    3417                 :         } else {
    3418               0 :             JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
    3419                 : 
    3420               0 :             c->needsBarrier_ = false;
    3421                 :         }
    3422                 :     }
    3423               0 : }
    3424                 : 
    3425                 : class AutoCopyFreeListToArenas {
    3426                 :     JSRuntime *rt;
    3427                 : 
    3428                 :   public:
    3429           54560 :     AutoCopyFreeListToArenas(JSRuntime *rt)
    3430           54560 :       : rt(rt) {
    3431          189042 :         for (CompartmentsIter c(rt); !c.done(); c.next())
    3432          134482 :             c->arenas.copyFreeListsToArenas();
    3433           54560 :     }
    3434                 : 
    3435           54560 :     ~AutoCopyFreeListToArenas() {
    3436          163379 :         for (CompartmentsIter c(rt); !c.done(); c.next())
    3437          108819 :             c->arenas.clearFreeListsInArenas();
    3438           54560 :     }
    3439                 : };
    3440                 : 
    3441                 : static void
    3442               0 : IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
    3443                 : {
    3444               0 :     JSRuntime *rt = cx->runtime;
    3445                 : 
    3446               0 :     AutoUnlockGC unlock(rt);
    3447               0 :     AutoGCSlice slice(cx);
    3448                 : 
    3449               0 :     gc::State initialState = rt->gcIncrementalState;
    3450                 : 
    3451               0 :     if (rt->gcIncrementalState == NO_INCREMENTAL) {
    3452               0 :         JS_ASSERT(!rt->gcIncrementalCompartment);
    3453               0 :         rt->gcIncrementalCompartment = rt->gcCurrentCompartment;
    3454               0 :         rt->gcIncrementalState = MARK_ROOTS;
    3455               0 :         rt->gcLastMarkSlice = false;
    3456                 :     }
    3457                 : 
    3458               0 :     if (rt->gcIncrementalState == MARK_ROOTS) {
    3459               0 :         rt->gcMarker.start(rt);
    3460               0 :         JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker));
    3461                 : 
    3462               0 :         for (GCCompartmentsIter c(rt); !c.done(); c.next())
    3463               0 :             c->discardJitCode(cx);
    3464                 : 
    3465               0 :         BeginMarkPhase(rt);
    3466                 : 
    3467               0 :         rt->gcIncrementalState = MARK;
    3468                 :     }
    3469                 : 
    3470               0 :     if (rt->gcIncrementalState == MARK) {
    3471               0 :         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
    3472               0 :         SliceBudget sliceBudget(budget);
    3473                 : 
    3474                 :         /* If we needed delayed marking for gray roots, then collect until done. */
    3475               0 :         if (!rt->gcMarker.hasBufferedGrayRoots())
    3476               0 :             sliceBudget.reset();
    3477                 : 
    3478               0 :         bool finished = rt->gcMarker.drainMarkStack(sliceBudget);
    3479                 : 
    3480               0 :         if (finished) {
    3481               0 :             JS_ASSERT(rt->gcMarker.isDrained());
    3482               0 :             if (initialState == MARK && !rt->gcLastMarkSlice)
    3483               0 :                 rt->gcLastMarkSlice = true;
    3484                 :             else
    3485               0 :                 rt->gcIncrementalState = SWEEP;
    3486                 :         }
    3487                 :     }
    3488                 : 
    3489               0 :     if (rt->gcIncrementalState == SWEEP) {
    3490               0 :         EndMarkPhase(cx);
    3491               0 :         SweepPhase(cx, gckind);
    3492                 : 
    3493               0 :         rt->gcMarker.stop();
    3494                 : 
    3495                 :         /* JIT code was already discarded during sweeping. */
    3496                 : 
    3497               0 :         rt->gcIncrementalCompartment = NULL;
    3498                 : 
    3499               0 :         rt->gcIncrementalState = NO_INCREMENTAL;
    3500                 :     }
    3501               0 : }
    3502                 : 
    3503                 : class IncrementalSafety
    3504                 : {
    3505                 :     const char *reason_;
    3506                 : 
    3507            3023 :     IncrementalSafety(const char *reason) : reason_(reason) {}
    3508                 : 
    3509                 :   public:
    3510            2807 :     static IncrementalSafety Safe() { return IncrementalSafety(NULL); }
    3511             216 :     static IncrementalSafety Unsafe(const char *reason) { return IncrementalSafety(reason); }
    3512                 : 
    3513                 :     typedef void (IncrementalSafety::* ConvertibleToBool)();
    3514               0 :     void nonNull() {}
    3515                 : 
    3516            3023 :     operator ConvertibleToBool() const {
    3517            3023 :         return reason_ == NULL ? &IncrementalSafety::nonNull : 0;
    3518                 :     }
    3519                 : 
    3520               0 :     const char *reason() {
    3521               0 :         JS_ASSERT(reason_);
    3522               0 :         return reason_;
    3523                 :     }
    3524                 : };
    3525                 : 
    3526                 : static IncrementalSafety
    3527            3023 : IsIncrementalGCSafe(JSRuntime *rt)
    3528                 : {
    3529            3023 :     if (rt->gcCompartmentCreated) {
    3530              48 :         rt->gcCompartmentCreated = false;
    3531              48 :         return IncrementalSafety::Unsafe("compartment created");
    3532                 :     }
    3533                 : 
    3534            2975 :     if (rt->gcKeepAtoms)
    3535             168 :         return IncrementalSafety::Unsafe("gcKeepAtoms set");
    3536                 : 
    3537           12874 :     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
    3538           10067 :         if (c->activeAnalysis)
    3539               0 :             return IncrementalSafety::Unsafe("activeAnalysis set");
    3540                 :     }
    3541                 : 
    3542            2807 :     if (!rt->gcIncrementalEnabled)
    3543               0 :         return IncrementalSafety::Unsafe("incremental permanently disabled");
    3544                 : 
    3545            2807 :     return IncrementalSafety::Safe();
    3546                 : }
    3547                 : 
    3548                 : static void
    3549               0 : BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
    3550                 : {
    3551               0 :     IncrementalSafety safe = IsIncrementalGCSafe(rt);
    3552               0 :     if (!safe) {
    3553               0 :         ResetIncrementalGC(rt, safe.reason());
    3554               0 :         *budget = SliceBudget::Unlimited;
    3555               0 :         rt->gcStats.nonincremental(safe.reason());
    3556               0 :         return;
    3557                 :     }
    3558                 : 
    3559               0 :     if (rt->gcMode != JSGC_MODE_INCREMENTAL) {
    3560               0 :         ResetIncrementalGC(rt, "GC mode change");
    3561               0 :         *budget = SliceBudget::Unlimited;
    3562               0 :         rt->gcStats.nonincremental("GC mode");
    3563               0 :         return;
    3564                 :     }
    3565                 : 
    3566                 : #ifdef ANDROID
    3567                 :     JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
    3568                 :     *budget = SliceBudget::Unlimited;
    3569                 :     rt->gcStats.nonincremental("Android");
    3570                 :     return;
    3571                 : #endif
    3572                 : 
    3573               0 :     if (rt->gcIncrementalState != NO_INCREMENTAL &&
    3574                 :         rt->gcCurrentCompartment != rt->gcIncrementalCompartment)
    3575                 :     {
    3576               0 :         ResetIncrementalGC(rt, "compartment change");
    3577               0 :         return;
    3578                 :     }
    3579                 : 
    3580               0 :     for (CompartmentsIter c(rt); !c.done(); c.next()) {
    3581               0 :         if (c->gcBytes > c->gcTriggerBytes) {
    3582               0 :             *budget = SliceBudget::Unlimited;
    3583               0 :             rt->gcStats.nonincremental("allocation trigger");
    3584               0 :             return;
    3585                 :         }
    3586                 :     }
    3587                 : }
    3588                 : 
    3589                 : /*
    3590                 :  * GC, repeatedly if necessary, until we think we have not created any new
    3591                 :  * garbage. We disable inlining to ensure that the bottom of the stack with
    3592                 :  * possible GC roots recorded in js_GC excludes any pointers we use during the
    3593                 :  * marking implementation.
    3594                 :  */
    3595                 : static JS_NEVER_INLINE void
    3596           51092 : GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind gckind)
    3597                 : {
    3598           51092 :     JSRuntime *rt = cx->runtime;
    3599                 : 
    3600           51092 :     JS_ASSERT_IF(comp, comp != rt->atomsCompartment);
    3601           51092 :     JS_ASSERT_IF(comp, rt->gcMode != JSGC_MODE_GLOBAL);
    3602                 : 
    3603                 :     /* Recursive GC is no-op. */
    3604           51092 :     if (rt->gcRunning)
    3605               0 :         return;
    3606                 : 
    3607          102184 :     AutoGCSession gcsession(rt, comp);
    3608                 : 
    3609                 :     /* Don't GC if we are reporting an OOM. */
    3610           51092 :     if (rt->inOOMReport)
    3611                 :         return;
    3612                 : 
    3613                 : #ifdef JS_THREADSAFE
    3614                 :     /*
    3615                 :      * As we about to purge caches and clear the mark bits we must wait for
    3616                 :      * any background finalization to finish. We must also wait for the
    3617                 :      * background allocation to finish so we can avoid taking the GC lock
    3618                 :      * when manipulating the chunks during the GC.
    3619                 :      */
    3620           51092 :     JS_ASSERT(!cx->gcBackgroundFree);
    3621           51092 :     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
    3622                 : #endif
    3623                 : 
    3624           51092 :     if (budget == SliceBudget::Unlimited) {
    3625                 :         /* If non-incremental GC was requested, reset incremental GC. */
    3626           51092 :         ResetIncrementalGC(rt, "requested");
    3627           51092 :         rt->gcStats.nonincremental("requested");
    3628                 :     } else {
    3629               0 :         BudgetIncrementalGC(rt, &budget);
    3630                 :     }
    3631                 : 
    3632          102184 :     AutoCopyFreeListToArenas copy(rt);
    3633                 : 
    3634           51092 :     if (budget == SliceBudget::Unlimited && rt->gcIncrementalState == NO_INCREMENTAL)
    3635           51092 :         MarkAndSweep(cx, gckind);
    3636                 :     else
    3637               0 :         IncrementalGCSlice(cx, budget, gckind);
    3638                 : 
    3639                 : #ifdef DEBUG
    3640           51092 :     if (rt->gcIncrementalState == NO_INCREMENTAL) {
    3641          148231 :         for (CompartmentsIter c(rt); !c.done(); c.next())
    3642           97139 :             JS_ASSERT(!c->needsBarrier_);
    3643                 :     }
    3644                 : #endif
    3645                 : #ifdef JS_THREADSAFE
    3646           51092 :     if (rt->gcIncrementalState == NO_INCREMENTAL) {
    3647           51092 :         if (cx->gcBackgroundFree) {
    3648           31184 :             JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
    3649           31184 :             cx->gcBackgroundFree = NULL;
    3650           31184 :             rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
    3651                 :         }
    3652                 :     }
    3653                 : #endif
    3654                 : }
    3655                 : 
    3656                 : #ifdef JS_GC_ZEAL
    3657                 : static bool
    3658               0 : IsDeterministicGCReason(gcreason::Reason reason)
    3659                 : {
    3660               0 :     if (reason > gcreason::DEBUG_GC && reason != gcreason::CC_FORCED)
    3661               0 :         return false;
    3662                 : 
    3663               0 :     if (reason == gcreason::MAYBEGC)
    3664               0 :         return false;
    3665                 : 
    3666               0 :     return true;
    3667                 : }
    3668                 : #endif
    3669                 : 
    3670                 : static void
    3671           51092 : Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
    3672                 :         JSGCInvocationKind gckind, gcreason::Reason reason)
    3673                 : {
    3674           51092 :     JSRuntime *rt = cx->runtime;
    3675           51092 :     JS_AbortIfWrongThread(rt);
    3676                 : 
    3677                 : #ifdef JS_GC_ZEAL
    3678           51092 :     if (rt->gcDeterministicOnly && !IsDeterministicGCReason(reason))
    3679               0 :         return;
    3680                 : #endif
    3681                 : 
    3682                 :     JS_ASSERT_IF(budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
    3683                 : 
    3684                 : #ifdef JS_GC_ZEAL
    3685                 :     struct AutoVerifyBarriers {
    3686                 :         JSContext *cx;
    3687                 :         bool inVerify;
    3688           51092 :         AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) {
    3689           51092 :             if (inVerify) EndVerifyBarriers(cx);
    3690           51092 :         }
    3691           51092 :         ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); }
    3692          102184 :     } av(cx);
    3693                 : #endif
    3694                 : 
    3695           51092 :     RecordNativeStackTopForGC(rt);
    3696                 : 
    3697                 :     /* This is a heuristic to avoid resets. */
    3698           51092 :     if (rt->gcIncrementalState != NO_INCREMENTAL && !rt->gcIncrementalCompartment)
    3699               0 :         comp = NULL;
    3700                 : 
    3701          102184 :     gcstats::AutoGCSlice agc(rt->gcStats, comp, reason);
    3702                 : 
    3703           51092 :     do {
    3704                 :         /*
    3705                 :          * Let the API user decide to defer a GC if it wants to (unless this
    3706                 :          * is the last context). Invoke the callback regardless.
    3707                 :          */
    3708           51092 :         if (rt->gcIncrementalState == NO_INCREMENTAL) {
    3709           51092 :             if (JSGCCallback callback = rt->gcCallback)
    3710           14728 :                 callback(rt, JSGC_BEGIN);
    3711                 :         }
    3712                 : 
    3713                 :         {
    3714                 :             /* Lock out other GC allocator and collector invocations. */
    3715          102184 :             AutoLockGC lock(rt);
    3716           51092 :             rt->gcPoke = false;
    3717           51092 :             GCCycle(cx, comp, budget, gckind);
    3718                 :         }
    3719                 : 
    3720           51092 :         if (rt->gcIncrementalState == NO_INCREMENTAL) {
    3721           51092 :             if (JSGCCallback callback = rt->gcCallback)
    3722           14728 :                 callback(rt, JSGC_END);
    3723                 :         }
    3724                 : 
    3725                 :         /*
    3726                 :          * On shutdown, iterate until finalizers or the JSGC_END callback
    3727                 :          * stop creating garbage.
    3728                 :          */
    3729           51092 :     } while (!rt->hasContexts() && rt->gcPoke);
    3730                 : }
    3731                 : 
    3732                 : namespace js {
    3733                 : 
    3734                 : void
    3735           50917 : GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
    3736                 : {
    3737           50917 :     Collect(cx, comp, SliceBudget::Unlimited, gckind, reason);
    3738           50917 : }
    3739                 : 
    3740                 : void
    3741             175 : GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
    3742                 : {
    3743             175 :     Collect(cx, comp, cx->runtime->gcSliceBudget, gckind, reason);
    3744             175 : }
    3745                 : 
    3746                 : void
    3747               0 : GCDebugSlice(JSContext *cx, int64_t objCount)
    3748                 : {
    3749               0 :     Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
    3750               0 : }
    3751                 : 
    3752                 : void
    3753               9 : ShrinkGCBuffers(JSRuntime *rt)
    3754                 : {
    3755              18 :     AutoLockGC lock(rt);
    3756               9 :     JS_ASSERT(!rt->gcRunning);
    3757                 : #ifndef JS_THREADSAFE
    3758                 :     ExpireChunksAndArenas(rt, true);
    3759                 : #else
    3760               9 :     rt->gcHelperThread.startBackgroundShrink();
    3761                 : #endif
    3762               9 : }
    3763                 : 
    3764                 : void
    3765             567 : TraceRuntime(JSTracer *trc)
    3766                 : {
    3767             567 :     JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
    3768                 : 
    3769                 : #ifdef JS_THREADSAFE
    3770                 :     {
    3771             567 :         JSRuntime *rt = trc->runtime;
    3772             567 :         if (!rt->gcRunning) {
    3773            1134 :             AutoLockGC lock(rt);
    3774            1134 :             AutoHeapSession session(rt);
    3775                 : 
    3776             567 :             rt->gcHelperThread.waitBackgroundSweepEnd();
    3777            1134 :             AutoUnlockGC unlock(rt);
    3778                 : 
    3779            1134 :             AutoCopyFreeListToArenas copy(rt);
    3780             567 :             RecordNativeStackTopForGC(rt);
    3781             567 :             MarkRuntime(trc);
    3782                 :             return;
    3783                 :         }
    3784                 :     }
    3785                 : #else
    3786                 :     AutoCopyFreeListToArenas copy(trc->runtime);
    3787                 :     RecordNativeStackTopForGC(trc->runtime);
    3788                 : #endif
    3789                 : 
    3790                 :     /*
    3791                 :      * Calls from inside a normal GC or a recursive calls are OK and do not
    3792                 :      * require session setup.
    3793                 :      */
    3794               0 :     MarkRuntime(trc);
    3795                 : }
    3796                 : 
    3797                 : struct IterateArenaCallbackOp
    3798                 : {
    3799                 :     JSRuntime *rt;
    3800                 :     void *data;
    3801                 :     IterateArenaCallback callback;
    3802                 :     JSGCTraceKind traceKind;
    3803                 :     size_t thingSize;
    3804             180 :     IterateArenaCallbackOp(JSRuntime *rt, void *data, IterateArenaCallback callback,
    3805                 :                            JSGCTraceKind traceKind, size_t thingSize)
    3806             180 :         : rt(rt), data(data), callback(callback), traceKind(traceKind), thingSize(thingSize)
    3807             180 :     {}
    3808            1089 :     void operator()(Arena *arena) { (*callback)(rt, data, arena, traceKind, thingSize); }
    3809                 : };
    3810                 : 
    3811                 : struct IterateCellCallbackOp
    3812                 : {
    3813                 :     JSRuntime *rt;
    3814                 :     void *data;
    3815                 :     IterateCellCallback callback;
    3816                 :     JSGCTraceKind traceKind;
    3817                 :     size_t thingSize;
    3818             180 :     IterateCellCallbackOp(JSRuntime *rt, void *data, IterateCellCallback callback,
    3819                 :                           JSGCTraceKind traceKind, size_t thingSize)
    3820             180 :         : rt(rt), data(data), callback(callback), traceKind(traceKind), thingSize(thingSize)
    3821             180 :     {}
    3822          142578 :     void operator()(Cell *cell) { (*callback)(rt, data, cell, traceKind, thingSize); }
    3823                 : };
    3824                 : 
    3825                 : void
    3826               3 : IterateCompartmentsArenasCells(JSRuntime *rt, void *data,
    3827                 :                                JSIterateCompartmentCallback compartmentCallback,
    3828                 :                                IterateArenaCallback arenaCallback,
    3829                 :                                IterateCellCallback cellCallback)
    3830                 : {
    3831               3 :     JS_ASSERT(!rt->gcRunning);
    3832                 : 
    3833               6 :     AutoLockGC lock(rt);
    3834               6 :     AutoHeapSession session(rt);
    3835                 : #ifdef JS_THREADSAFE
    3836               3 :     rt->gcHelperThread.waitBackgroundSweepEnd();
    3837                 : #endif
    3838               6 :     AutoUnlockGC unlock(rt);
    3839                 : 
    3840               6 :     AutoCopyFreeListToArenas copy(rt);
    3841              12 :     for (CompartmentsIter c(rt); !c.done(); c.next()) {
    3842               9 :         (*compartmentCallback)(rt, data, c);
    3843                 : 
    3844             189 :         for (size_t thingKind = 0; thingKind != FINALIZE_LIMIT; thingKind++) {
    3845             180 :             JSGCTraceKind traceKind = MapAllocToTraceKind(AllocKind(thingKind));
    3846             180 :             size_t thingSize = Arena::thingSize(AllocKind(thingKind));
    3847             180 :             IterateArenaCallbackOp arenaOp(rt, data, arenaCallback, traceKind, thingSize);
    3848             180 :             IterateCellCallbackOp cellOp(rt, data, cellCallback, traceKind, thingSize);
    3849             180 :             ForEachArenaAndCell(c, AllocKind(thingKind), arenaOp, cellOp);
    3850                 :         }
    3851                 :     }
    3852               3 : }
    3853                 : 
    3854                 : void
    3855               3 : IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback)
    3856                 : {
    3857                 :     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
    3858               3 :     JS_ASSERT(!rt->gcRunning);
    3859                 : 
    3860               6 :     AutoLockGC lock(rt);
    3861               6 :     AutoHeapSession session(rt);
    3862                 : #ifdef JS_THREADSAFE
    3863               3 :     rt->gcHelperThread.waitBackgroundSweepEnd();
    3864                 : #endif
    3865               6 :     AutoUnlockGC unlock(rt);
    3866                 : 
    3867              12 :     for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
    3868               9 :         chunkCallback(rt, data, r.front());
    3869               3 : }
    3870                 : 
    3871                 : void
    3872               0 : IterateCells(JSRuntime *rt, JSCompartment *compartment, AllocKind thingKind,
    3873                 :              void *data, IterateCellCallback cellCallback)
    3874                 : {
    3875                 :     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
    3876               0 :     JS_ASSERT(!rt->gcRunning);
    3877                 : 
    3878               0 :     AutoLockGC lock(rt);
    3879               0 :     AutoHeapSession session(rt);
    3880                 : #ifdef JS_THREADSAFE
    3881               0 :     rt->gcHelperThread.waitBackgroundSweepEnd();
    3882                 : #endif
    3883               0 :     AutoUnlockGC unlock(rt);
    3884                 : 
    3885               0 :     AutoCopyFreeListToArenas copy(rt);
    3886                 : 
    3887               0 :     JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
    3888               0 :     size_t thingSize = Arena::thingSize(thingKind);
    3889                 : 
    3890               0 :     if (compartment) {
    3891               0 :         for (CellIterUnderGC i(compartment, thingKind); !i.done(); i.next())
    3892               0 :             cellCallback(rt, data, i.getCell(), traceKind, thingSize);
    3893                 :     } else {
    3894               0 :         for (CompartmentsIter c(rt); !c.done(); c.next()) {
    3895               0 :             for (CellIterUnderGC i(c, thingKind); !i.done(); i.next())
    3896               0 :                 cellCallback(rt, data, i.getCell(), traceKind, thingSize);
    3897                 :         }
    3898                 :     }
    3899               0 : }
    3900                 : 
    3901                 : namespace gc {
    3902                 : 
    3903                 : JSCompartment *
    3904           25666 : NewCompartment(JSContext *cx, JSPrincipals *principals)
    3905                 : {
    3906           25666 :     JSRuntime *rt = cx->runtime;
    3907           25666 :     JS_AbortIfWrongThread(rt);
    3908                 : 
    3909           25666 :     JSCompartment *compartment = cx->new_<JSCompartment>(rt);
    3910           25666 :     if (compartment && compartment->init(cx)) {
    3911                 :         // Any compartment with the trusted principals -- and there can be
    3912                 :         // multiple -- is a system compartment.
    3913           25666 :         compartment->isSystemCompartment = principals && rt->trustedPrincipals() == principals;
    3914           25666 :         if (principals) {
    3915            3372 :             compartment->principals = principals;
    3916            3372 :             JS_HoldPrincipals(principals);
    3917                 :         }
    3918                 : 
    3919           25666 :         compartment->setGCLastBytes(8192, 8192, GC_NORMAL);
    3920                 : 
    3921                 :         /*
    3922                 :          * Before reporting the OOM condition, |lock| needs to be cleaned up,
    3923                 :          * hence the scoping.
    3924                 :          */
    3925                 :         {
    3926           51332 :             AutoLockGC lock(rt);
    3927                 : 
    3928                 :             /*
    3929                 :              * If we're in the middle of an incremental GC, we cancel
    3930                 :              * it. Otherwise we might fail the mark the newly created
    3931                 :              * compartment fully.
    3932                 :              */
    3933           25666 :             if (rt->gcIncrementalState == MARK)
    3934              81 :                 rt->gcCompartmentCreated = true;
    3935                 : 
    3936           25666 :             if (rt->compartments.append(compartment))
    3937           25666 :                 return compartment;
    3938                 :         }
    3939                 : 
    3940               0 :         js_ReportOutOfMemory(cx);
    3941                 :     }
    3942               0 :     Foreground::delete_(compartment);
    3943               0 :     return NULL;
    3944                 : }
    3945                 : 
    3946                 : void
    3947            9042 : RunDebugGC(JSContext *cx)
    3948                 : {
    3949                 : #ifdef JS_GC_ZEAL
    3950            9042 :     JSRuntime *rt = cx->runtime;
    3951                 : 
    3952                 :     /*
    3953                 :      * If rt->gcDebugCompartmentGC is true, only GC the current
    3954                 :      * compartment. But don't GC the atoms compartment.
    3955                 :      */
    3956            9042 :     rt->gcTriggerCompartment = rt->gcDebugCompartmentGC ? cx->compartment : NULL;
    3957            9042 :     if (rt->gcTriggerCompartment == rt->atomsCompartment)
    3958               0 :         rt->gcTriggerCompartment = NULL;
    3959                 : 
    3960            9042 :     RunLastDitchGC(cx, gcreason::DEBUG_GC);
    3961                 : #endif
    3962            9042 : }
    3963                 : 
    3964                 : void
    3965               0 : SetDeterministicGC(JSContext *cx, bool enabled)
    3966                 : {
    3967                 : #ifdef JS_GC_ZEAL
    3968               0 :     JSRuntime *rt = cx->runtime;
    3969               0 :     rt->gcDeterministicOnly = enabled;
    3970                 : #endif
    3971               0 : }
    3972                 : 
    3973                 : #if defined(DEBUG) && defined(JSGC_ROOT_ANALYSIS) && !defined(JS_THREADSAFE)
    3974                 : 
    3975                 : static void
    3976                 : CheckStackRoot(JSTracer *trc, uintptr_t *w)
    3977                 : {
    3978                 :     /* Mark memory as defined for valgrind, as in MarkWordConservatively. */
    3979                 : #ifdef JS_VALGRIND
    3980                 :     VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
    3981                 : #endif
    3982                 : 
    3983                 :     ConservativeGCTest test = MarkIfGCThingWord(trc, *w, DONT_MARK_THING);
    3984                 : 
    3985                 :     if (test == CGCT_VALID) {
    3986                 :         JSContext *iter = NULL;
    3987                 :         bool matched = false;
    3988                 :         JSRuntime *rt = trc->runtime;
    3989                 :         for (unsigned i = 0; i < THING_ROOT_COUNT; i++) {
    3990                 :             Root<Cell*> *rooter = rt->thingGCRooters[i];
    3991                 :             while (rooter) {
    3992                 :                 if (rooter->address() == (Cell **) w)
    3993                 :                     matched = true;
    3994                 :                 rooter = rooter->previous();
    3995                 :             }
    3996                 :         }
    3997                 :         CheckRoot *check = rt->checkGCRooters;
    3998                 :         while (check) {
    3999                 :             if (check->contains(static_cast<uint8_t*>(w), sizeof(w)))
    4000                 :                 matched = true;
    4001                 :             check = check->previous();
    4002                 :         }
    4003                 :         if (!matched) {
    4004                 :             /*
    4005                 :              * Only poison the last byte in the word. It is easy to get
    4006                 :              * accidental collisions when a value that does not occupy a full
    4007                 :              * word is used to overwrite a now-dead GC thing pointer. In this
    4008                 :              * case we want to avoid damaging the smaller value.
    4009                 :              */
    4010                 :             PoisonPtr(w);
    4011                 :         }
    4012                 :     }
    4013                 : }
    4014                 : 
    4015                 : static void
    4016                 : CheckStackRootsRange(JSTracer *trc, uintptr_t *begin, uintptr_t *end)
    4017                 : {
    4018                 :     JS_ASSERT(begin <= end);
    4019                 :     for (uintptr_t *i = begin; i != end; ++i)
    4020                 :         CheckStackRoot(trc, i);
    4021                 : }
    4022                 : 
    4023                 : void
    4024                 : CheckStackRoots(JSContext *cx)
    4025                 : {
    4026                 :     AutoCopyFreeListToArenas copy(cx->runtime);
    4027                 : 
    4028                 :     JSTracer checker;
    4029                 :     JS_TracerInit(&checker, cx, EmptyMarkCallback);
    4030                 : 
    4031                 :     ThreadData *td = JS_THREAD_DATA(cx);
    4032                 : 
    4033                 :     ConservativeGCThreadData *ctd = &td->conservativeGC;
    4034                 :     ctd->recordStackTop();
    4035                 : 
    4036                 :     JS_ASSERT(ctd->hasStackToScan());
    4037                 :     uintptr_t *stackMin, *stackEnd;
    4038                 : #if JS_STACK_GROWTH_DIRECTION > 0
    4039                 :     stackMin = td->nativeStackBase;
    4040                 :     stackEnd = ctd->nativeStackTop;
    4041                 : #else
    4042                 :     stackMin = ctd->nativeStackTop + 1;
    4043                 :     stackEnd = td->nativeStackBase;
    4044                 : #endif
    4045                 : 
    4046                 :     JS_ASSERT(stackMin <= stackEnd);
    4047                 :     CheckStackRootsRange(&checker, stackMin, stackEnd);
    4048                 :     CheckStackRootsRange(&checker, ctd->registerSnapshot.words,
    4049                 :                          ArrayEnd(ctd->registerSnapshot.words));
    4050                 : }
    4051                 : 
    4052                 : #endif /* DEBUG && JSGC_ROOT_ANALYSIS && !JS_THREADSAFE */
    4053                 : 
    4054                 : #ifdef JS_GC_ZEAL
    4055                 : 
    4056                 : /*
    4057                 :  * Write barrier verification
    4058                 :  *
    4059                 :  * The next few functions are for incremental write barrier verification. When
    4060                 :  * StartVerifyBarriers is called, a snapshot is taken of all objects in the GC
    4061                 :  * heap and saved in an explicit graph data structure. Later, EndVerifyBarriers
    4062                 :  * traverses the heap again. Any pointer values that were in the snapshot and
    4063                 :  * are no longer found must be marked; otherwise an assertion triggers. Note
    4064                 :  * that we must not GC in between starting and finishing a verification phase.
    4065                 :  *
    4066                 :  * The VerifyBarriers function is a shorthand. It checks if a verification phase
    4067                 :  * is currently running. If not, it starts one. Otherwise, it ends the current
    4068                 :  * phase and starts a new one.
    4069                 :  *
    4070                 :  * The user can adjust the frequency of verifications, which causes
    4071                 :  * VerifyBarriers to be a no-op all but one out of N calls. However, if the
    4072                 :  * |always| parameter is true, it starts a new phase no matter what.
    4073                 :  */
    4074                 : 
    4075                 : struct EdgeValue
    4076                 : {
    4077                 :     void *thing;
    4078                 :     JSGCTraceKind kind;
    4079                 :     char *label;
    4080                 : };
    4081                 : 
    4082                 : struct VerifyNode
    4083                 : {
    4084                 :     void *thing;
    4085                 :     JSGCTraceKind kind;
    4086                 :     uint32_t count;
    4087                 :     EdgeValue edges[1];
    4088                 : };
    4089                 : 
    4090                 : typedef HashMap<void *, VerifyNode *, DefaultHasher<void *>, SystemAllocPolicy> NodeMap;
    4091                 : 
    4092                 : /*
    4093                 :  * The verifier data structures are simple. The entire graph is stored in a
    4094                 :  * single block of memory. At the beginning is a VerifyNode for the root
    4095                 :  * node. It is followed by a sequence of EdgeValues--the exact number is given
    4096                 :  * in the node. After the edges come more nodes and their edges.
    4097                 :  *
    4098                 :  * The edgeptr and term fields are used to allocate out of the block of memory
    4099                 :  * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
    4100                 :  * we just abandon the verification.
    4101                 :  *
    4102                 :  * The nodemap field is a hashtable that maps from the address of the GC thing
    4103                 :  * to the VerifyNode that represents it.
    4104                 :  */
    4105                 : struct VerifyTracer : JSTracer {
    4106                 :     /* The gcNumber when the verification began. */
    4107                 :     uint64_t number;
    4108                 : 
    4109                 :     /* This counts up to JS_VERIFIER_FREQ to decide whether to verify. */
    4110                 :     uint32_t count;
    4111                 : 
    4112                 :     /* This graph represents the initial GC "snapshot". */
    4113                 :     VerifyNode *curnode;
    4114                 :     VerifyNode *root;
    4115                 :     char *edgeptr;
    4116                 :     char *term;
    4117                 :     NodeMap nodemap;
    4118                 : 
    4119            1499 :     VerifyTracer() : root(NULL) {}
    4120            1499 :     ~VerifyTracer() { js_free(root); }
    4121                 : };
    4122                 : 
    4123                 : /*
    4124                 :  * This function builds up the heap snapshot by adding edges to the current
    4125                 :  * node.
    4126                 :  */
    4127                 : static void
    4128        18878894 : AccumulateEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
    4129                 : {
    4130        18878894 :     VerifyTracer *trc = (VerifyTracer *)jstrc;
    4131                 : 
    4132        18878894 :     trc->edgeptr += sizeof(EdgeValue);
    4133        18878894 :     if (trc->edgeptr >= trc->term) {
    4134               0 :         trc->edgeptr = trc->term;
    4135               0 :         return;
    4136                 :     }
    4137                 : 
    4138        18878894 :     VerifyNode *node = trc->curnode;
    4139        18878894 :     uint32_t i = node->count;
    4140                 : 
    4141        18878894 :     node->edges[i].thing = *thingp;
    4142        18878894 :     node->edges[i].kind = kind;
    4143        18878894 :     node->edges[i].label = trc->debugPrinter ? NULL : (char *)trc->debugPrintArg;
    4144        18878894 :     node->count++;
    4145                 : }
    4146                 : 
    4147                 : static VerifyNode *
    4148        18880393 : MakeNode(VerifyTracer *trc, void *thing, JSGCTraceKind kind)
    4149                 : {
    4150        18880393 :     NodeMap::AddPtr p = trc->nodemap.lookupForAdd(thing);
    4151        18880393 :     if (!p) {
    4152        11393978 :         VerifyNode *node = (VerifyNode *)trc->edgeptr;
    4153        11393978 :         trc->edgeptr += sizeof(VerifyNode) - sizeof(EdgeValue);
    4154        11393978 :         if (trc->edgeptr >= trc->term) {
    4155               0 :             trc->edgeptr = trc->term;
    4156               0 :             return NULL;
    4157                 :         }
    4158                 : 
    4159        11393978 :         node->thing = thing;
    4160        11393978 :         node->count = 0;
    4161        11393978 :         node->kind = kind;
    4162        11393978 :         trc->nodemap.add(p, thing, node);
    4163        11393978 :         return node;
    4164                 :     }
    4165         7486415 :     return NULL;
    4166                 : }
    4167                 : 
    4168                 : static
    4169                 : VerifyNode *
    4170        21558815 : NextNode(VerifyNode *node)
    4171                 : {
    4172        21558815 :     if (node->count == 0)
    4173        14505682 :         return (VerifyNode *)((char *)node + sizeof(VerifyNode) - sizeof(EdgeValue));
    4174                 :     else
    4175                 :         return (VerifyNode *)((char *)node + sizeof(VerifyNode) +
    4176         7053133 :                               sizeof(EdgeValue)*(node->count - 1));
    4177                 : }
    4178                 : 
    4179                 : static void
    4180            1624 : StartVerifyBarriers(JSContext *cx)
    4181                 : {
    4182            1624 :     JSRuntime *rt = cx->runtime;
    4183                 : 
    4184            1624 :     if (rt->gcVerifyData || rt->gcIncrementalState != NO_INCREMENTAL)
    4185               0 :         return;
    4186                 : 
    4187            3248 :     AutoLockGC lock(rt);
    4188            3248 :     AutoHeapSession session(rt);
    4189                 : 
    4190            1624 :     if (!IsIncrementalGCSafe(rt))
    4191                 :         return;
    4192                 : 
    4193                 : #ifdef JS_THREADSAFE
    4194            1499 :     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
    4195                 : #endif
    4196                 : 
    4197            2998 :     AutoUnlockGC unlock(rt);
    4198                 : 
    4199            2998 :     AutoCopyFreeListToArenas copy(rt);
    4200            1499 :     RecordNativeStackTopForGC(rt);
    4201                 : 
    4202            4397 :     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
    4203            2898 :         r.front()->bitmap.clear();
    4204                 : 
    4205            6777 :     for (CompartmentsIter c(rt); !c.done(); c.next())
    4206            5278 :         c->discardJitCode(cx);
    4207                 : 
    4208            1499 :     PurgeRuntime(rt);
    4209                 : 
    4210            1499 :     VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer;
    4211                 : 
    4212            1499 :     rt->gcNumber++;
    4213            1499 :     trc->number = rt->gcNumber;
    4214            1499 :     trc->count = 0;
    4215                 : 
    4216            1499 :     JS_TracerInit(trc, rt, AccumulateEdge);
    4217                 : 
    4218            1499 :     const size_t size = 64 * 1024 * 1024;
    4219            1499 :     trc->root = (VerifyNode *)js_malloc(size);
    4220            1499 :     JS_ASSERT(trc->root);
    4221            1499 :     trc->edgeptr = (char *)trc->root;
    4222            1499 :     trc->term = trc->edgeptr + size;
    4223                 : 
    4224            1499 :     trc->nodemap.init();
    4225                 : 
    4226                 :     /* Create the root node. */
    4227            1499 :     trc->curnode = MakeNode(trc, NULL, JSGCTraceKind(0));
    4228                 : 
    4229                 :     /* We want MarkRuntime to save the roots to gcSavedRoots. */
    4230            1499 :     rt->gcIncrementalState = MARK_ROOTS;
    4231                 : 
    4232                 :     /* Make all the roots be edges emanating from the root node. */
    4233            1499 :     MarkRuntime(trc);
    4234                 : 
    4235            1499 :     VerifyNode *node = trc->curnode;
    4236            1499 :     if (trc->edgeptr == trc->term)
    4237               0 :         goto oom;
    4238                 : 
    4239                 :     /* For each edge, make a node for it if one doesn't already exist. */
    4240        11396976 :     while ((char *)node < trc->edgeptr) {
    4241        30272872 :         for (uint32_t i = 0; i < node->count; i++) {
    4242        18878894 :             EdgeValue &e = node->edges[i];
    4243        18878894 :             VerifyNode *child = MakeNode(trc, e.thing, e.kind);
    4244        18878894 :             if (child) {
    4245        11392479 :                 trc->curnode = child;
    4246        11392479 :                 JS_TraceChildren(trc, e.thing, e.kind);
    4247                 :             }
    4248        18878894 :             if (trc->edgeptr == trc->term)
    4249               0 :                 goto oom;
    4250                 :         }
    4251                 : 
    4252        11393978 :         node = NextNode(node);
    4253                 :     }
    4254                 : 
    4255            1499 :     rt->gcVerifyData = trc;
    4256            1499 :     rt->gcIncrementalState = MARK;
    4257            1499 :     rt->gcMarker.start(rt);
    4258            6777 :     for (CompartmentsIter c(rt); !c.done(); c.next()) {
    4259            5278 :         c->needsBarrier_ = true;
    4260            5278 :         c->arenas.prepareForIncrementalGC(rt);
    4261                 :     }
    4262                 : 
    4263                 :     return;
    4264                 : 
    4265                 : oom:
    4266               0 :     rt->gcIncrementalState = NO_INCREMENTAL;
    4267               0 :     trc->~VerifyTracer();
    4268            1499 :     js_free(trc);
    4269                 : }
    4270                 : 
    4271                 : static void
    4272             759 : MarkFromAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
    4273                 : {
    4274             759 :     static_cast<Cell *>(*thingp)->markIfUnmarked();
    4275             759 : }
    4276                 : 
    4277                 : static bool
    4278            2519 : IsMarkedOrAllocated(Cell *cell)
    4279                 : {
    4280            2519 :     return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
    4281                 : }
    4282                 : 
    4283                 : const static uint32_t MAX_VERIFIER_EDGES = 1000;
    4284                 : 
    4285                 : /*
    4286                 :  * This function is called by EndVerifyBarriers for every heap edge. If the edge
    4287                 :  * already existed in the original snapshot, we "cancel it out" by overwriting
    4288                 :  * it with NULL. EndVerifyBarriers later asserts that the remaining non-NULL
    4289                 :  * edges (i.e., the ones from the original snapshot that must have been
    4290                 :  * modified) must point to marked objects.
    4291                 :  */
    4292                 : static void
    4293        10642730 : CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
    4294                 : {
    4295        10642730 :     VerifyTracer *trc = (VerifyTracer *)jstrc;
    4296        10642730 :     VerifyNode *node = trc->curnode;
    4297                 : 
    4298                 :     /* Avoid n^2 behavior. */
    4299        10642730 :     if (node->count > MAX_VERIFIER_EDGES)
    4300               0 :         return;
    4301                 : 
    4302        49199592 :     for (uint32_t i = 0; i < node->count; i++) {
    4303        49197752 :         if (node->edges[i].thing == *thingp) {
    4304        10640890 :             JS_ASSERT(node->edges[i].kind == kind);
    4305        10640890 :             node->edges[i].thing = NULL;
    4306        10640890 :             return;
    4307                 :         }
    4308                 :     }
    4309                 : 
    4310                 :     /*
    4311                 :      * Anything that is reachable now should have been reachable before, or else
    4312                 :      * it should be marked.
    4313                 :      */
    4314            1840 :     NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
    4315            1840 :     JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
    4316                 : }
    4317                 : 
    4318                 : static void
    4319         6401906 : CheckReachable(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
    4320                 : {
    4321         6401906 :     VerifyTracer *trc = (VerifyTracer *)jstrc;
    4322         6401906 :     NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
    4323         6401906 :     JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
    4324         6401906 : }
    4325                 : 
    4326                 : static void
    4327            1399 : EndVerifyBarriers(JSContext *cx)
    4328                 : {
    4329            1399 :     JSRuntime *rt = cx->runtime;
    4330                 : 
    4331            2798 :     AutoLockGC lock(rt);
    4332            2798 :     AutoHeapSession session(rt);
    4333                 : 
    4334                 : #ifdef JS_THREADSAFE
    4335            1399 :     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
    4336                 : #endif
    4337                 : 
    4338            2798 :     AutoUnlockGC unlock(rt);
    4339                 : 
    4340            2798 :     AutoCopyFreeListToArenas copy(rt);
    4341            1399 :     RecordNativeStackTopForGC(rt);
    4342                 : 
    4343            1399 :     VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData;
    4344                 : 
    4345            1399 :     if (!trc)
    4346                 :         return;
    4347                 : 
    4348                 :     /*
    4349                 :      * We need to bump gcNumber so that the methodjit knows that jitcode has
    4350                 :      * been discarded.
    4351                 :      */
    4352            1399 :     JS_ASSERT(trc->number == rt->gcNumber);
    4353            1399 :     rt->gcNumber++;
    4354                 : 
    4355                 :     /* We need to disable barriers before tracing, which may invoke barriers. */
    4356            6658 :     for (CompartmentsIter c(rt); !c.done(); c.next())
    4357            5259 :         c->needsBarrier_ = false;
    4358                 : 
    4359            6658 :     for (CompartmentsIter c(rt); !c.done(); c.next())
    4360            5259 :         c->discardJitCode(cx);
    4361                 : 
    4362            1399 :     rt->gcVerifyData = NULL;
    4363            1399 :     rt->gcIncrementalState = NO_INCREMENTAL;
    4364                 : 
    4365            1399 :     JS_TracerInit(trc, rt, MarkFromAutorooter);
    4366                 : 
    4367            1399 :     AutoGCRooter::traceAll(trc);
    4368                 : 
    4369            1399 :     if (IsIncrementalGCSafe(rt)) {
    4370                 :         /*
    4371                 :          * Verify that all the current roots were reachable previously, or else
    4372                 :          * are marked.
    4373                 :          */
    4374            1308 :         JS_TracerInit(trc, rt, CheckReachable);
    4375            1308 :         MarkRuntime(trc, true);
    4376                 : 
    4377            1308 :         JS_TracerInit(trc, rt, CheckEdge);
    4378                 : 
    4379                 :         /* Start after the roots. */
    4380            1308 :         VerifyNode *node = NextNode(trc->root);
    4381        10166145 :         while ((char *)node < trc->edgeptr) {
    4382        10163529 :             trc->curnode = node;
    4383        10163529 :             JS_TraceChildren(trc, node->thing, node->kind);
    4384                 : 
    4385        10163529 :             if (node->count <= MAX_VERIFIER_EDGES) {
    4386        20805311 :                 for (uint32_t i = 0; i < node->count; i++) {
    4387        10641782 :                     void *thing = node->edges[i].thing;
    4388        10641782 :                     JS_ASSERT_IF(thing, IsMarkedOrAllocated(static_cast<Cell *>(thing)));
    4389                 :                 }
    4390                 :             }
    4391                 : 
    4392        10163529 :             node = NextNode(node);
    4393                 :         }
    4394                 :     }
    4395                 : 
    4396            1399 :     rt->gcMarker.reset();
    4397            1399 :     rt->gcMarker.stop();
    4398                 : 
    4399            1399 :     trc->~VerifyTracer();
    4400            2798 :     js_free(trc);
    4401                 : }
    4402                 : 
    4403                 : void
    4404           19908 : FinishVerifier(JSRuntime *rt)
    4405                 : {
    4406           19908 :     if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
    4407             100 :         trc->~VerifyTracer();
    4408             100 :         js_free(trc);
    4409                 :     }
    4410           19908 : }
    4411                 : 
    4412                 : void
    4413              18 : VerifyBarriers(JSContext *cx)
    4414                 : {
    4415              18 :     JSRuntime *rt = cx->runtime;
    4416              18 :     if (rt->gcVerifyData)
    4417               9 :         EndVerifyBarriers(cx);
    4418                 :     else
    4419               9 :         StartVerifyBarriers(cx);
    4420              18 : }
    4421                 : 
    4422                 : void
    4423     -1793801277 : MaybeVerifyBarriers(JSContext *cx, bool always)
    4424                 : {
    4425     -1793801277 :     if (cx->runtime->gcZeal() != ZealVerifierValue)
    4426     -1793810139 :         return;
    4427                 : 
    4428            8862 :     uint32_t freq = cx->runtime->gcZealFrequency;
    4429                 : 
    4430            8862 :     JSRuntime *rt = cx->runtime;
    4431            8862 :     if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
    4432            8637 :         if (++trc->count < freq && !always)
    4433            7372 :             return;
    4434                 : 
    4435            1265 :         EndVerifyBarriers(cx);
    4436                 :     }
    4437            1490 :     StartVerifyBarriers(cx);
    4438                 : }
    4439                 : 
    4440                 : #endif /* JS_GC_ZEAL */
    4441                 : 
    4442                 : } /* namespace gc */
    4443                 : 
    4444               0 : static void ReleaseAllJITCode(JSContext *cx)
    4445                 : {
    4446                 : #ifdef JS_METHODJIT
    4447               0 :     for (GCCompartmentsIter c(cx->runtime); !c.done(); c.next()) {
    4448               0 :         mjit::ClearAllFrames(c);
    4449               0 :         for (CellIter i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
    4450               0 :             JSScript *script = i.get<JSScript>();
    4451               0 :             mjit::ReleaseScriptCode(cx, script);
    4452                 :         }
    4453                 :     }
    4454                 : #endif
    4455               0 : }
    4456                 : 
    4457                 : /*
    4458                 :  * There are three possible PCCount profiling states:
    4459                 :  *
    4460                 :  * 1. None: Neither scripts nor the runtime have counter information.
    4461                 :  * 2. Profile: Active scripts have counter information, the runtime does not.
    4462                 :  * 3. Query: Scripts do not have counter information, the runtime does.
    4463                 :  *
    4464                 :  * When starting to profile scripts, counting begins immediately, with all JIT
    4465                 :  * code discarded and recompiled with counters as necessary. Active interpreter
    4466                 :  * frames will not begin profiling until they begin executing another script
    4467                 :  * (via a call or return).
    4468                 :  *
    4469                 :  * The below API functions manage transitions to new states, according
    4470                 :  * to the table below.
    4471                 :  *
    4472                 :  *                                  Old State
    4473                 :  *                          -------------------------
    4474                 :  * Function                 None      Profile   Query
    4475                 :  * --------
    4476                 :  * StartPCCountProfiling    Profile   Profile   Profile
    4477                 :  * StopPCCountProfiling     None      Query     Query
    4478                 :  * PurgePCCounts            None      None      None
    4479                 :  */
    4480                 : 
    4481                 : static void
    4482               0 : ReleaseScriptPCCounters(JSContext *cx)
    4483                 : {
    4484               0 :     JSRuntime *rt = cx->runtime;
    4485               0 :     JS_ASSERT(rt->scriptPCCounters);
    4486                 : 
    4487               0 :     ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters;
    4488                 : 
    4489               0 :     for (size_t i = 0; i < vec.length(); i++)
    4490               0 :         vec[i].counters.destroy(cx);
    4491                 : 
    4492               0 :     cx->delete_(rt->scriptPCCounters);
    4493               0 :     rt->scriptPCCounters = NULL;
    4494               0 : }
    4495                 : 
    4496                 : JS_FRIEND_API(void)
    4497               0 : StartPCCountProfiling(JSContext *cx)
    4498                 : {
    4499               0 :     JSRuntime *rt = cx->runtime;
    4500                 : 
    4501               0 :     if (rt->profilingScripts)
    4502               0 :         return;
    4503                 : 
    4504               0 :     if (rt->scriptPCCounters)
    4505               0 :         ReleaseScriptPCCounters(cx);
    4506                 : 
    4507               0 :     ReleaseAllJITCode(cx);
    4508                 : 
    4509               0 :     rt->profilingScripts = true;
    4510                 : }
    4511                 : 
    4512                 : JS_FRIEND_API(void)
    4513               0 : StopPCCountProfiling(JSContext *cx)
    4514                 : {
    4515               0 :     JSRuntime *rt = cx->runtime;
    4516                 : 
    4517               0 :     if (!rt->profilingScripts)
    4518               0 :         return;
    4519               0 :     JS_ASSERT(!rt->scriptPCCounters);
    4520                 : 
    4521               0 :     ReleaseAllJITCode(cx);
    4522                 : 
    4523               0 :     ScriptOpcodeCountsVector *vec = cx->new_<ScriptOpcodeCountsVector>(SystemAllocPolicy());
    4524               0 :     if (!vec)
    4525               0 :         return;
    4526                 : 
    4527               0 :     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
    4528               0 :         for (CellIter i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
    4529               0 :             JSScript *script = i.get<JSScript>();
    4530               0 :             if (script->pcCounters && script->types) {
    4531               0 :                 ScriptOpcodeCountsPair info;
    4532               0 :                 info.script = script;
    4533               0 :                 info.counters.steal(script->pcCounters);
    4534               0 :                 if (!vec->append(info))
    4535               0 :                     info.counters.destroy(cx);
    4536                 :             }
    4537                 :         }
    4538                 :     }
    4539                 : 
    4540               0 :     rt->profilingScripts = false;
    4541               0 :     rt->scriptPCCounters = vec;
    4542                 : }
    4543                 : 
    4544                 : JS_FRIEND_API(void)
    4545               0 : PurgePCCounts(JSContext *cx)
    4546                 : {
    4547               0 :     JSRuntime *rt = cx->runtime;
    4548                 : 
    4549               0 :     if (!rt->scriptPCCounters)
    4550               0 :         return;
    4551               0 :     JS_ASSERT(!rt->profilingScripts);
    4552                 : 
    4553               0 :     ReleaseScriptPCCounters(cx);
    4554                 : }
    4555                 : 
    4556                 : } /* namespace js */
    4557                 : 
    4558                 : JS_PUBLIC_API(void)
    4559               6 : JS_IterateCompartments(JSRuntime *rt, void *data,
    4560                 :                        JSIterateCompartmentCallback compartmentCallback)
    4561                 : {
    4562               6 :     JS_ASSERT(!rt->gcRunning);
    4563                 : 
    4564              12 :     AutoLockGC lock(rt);
    4565              12 :     AutoHeapSession session(rt);
    4566                 : #ifdef JS_THREADSAFE
    4567               6 :     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
    4568                 : #endif
    4569              12 :     AutoUnlockGC unlock(rt);
    4570                 : 
    4571              24 :     for (CompartmentsIter c(rt); !c.done(); c.next())
    4572              18 :         (*compartmentCallback)(rt, data, c);
    4573               6 : }
    4574                 : 
    4575                 : #if JS_HAS_XML_SUPPORT
    4576                 : extern size_t sE4XObjectsCreated;
    4577                 : 
    4578                 : JSXML *
    4579         4727800 : js_NewGCXML(JSContext *cx)
    4580                 : {
    4581         4727800 :     if (!cx->runningWithTrustedPrincipals())
    4582         4727677 :         ++sE4XObjectsCreated;
    4583                 : 
    4584         4727800 :     return NewGCThing<JSXML>(cx, js::gc::FINALIZE_XML, sizeof(JSXML));
    4585                 : }
    4586                 : #endif
    4587                 : 

Generated by: LCOV version 1.7