1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS Mark-and-Sweep Garbage Collector.
44 * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
45 * jsgc.h). It allocates from a special GC arena pool with each arena allocated
46 * using malloc. It uses an ideally parallel array of flag bytes to hold the
47 * mark bit, finalizer type index, etc.
49 * XXX swizzle page to freelist for better locality of reference
51 #include <stdlib.h> /* for free */
53 #include <string.h> /* for memset used when DEBUG */
64 #include "jsversion.h"
69 #include "jsgcchunk.h"
79 #include "jsstaticcheck.h"
82 #include "methodjit/MethodJIT.h"
84 #if JS_HAS_XML_SUPPORT
89 #include "jscntxtinlines.h"
90 #include "jsinterpinlines.h"
91 #include "jsobjinlines.h"
92 #include "jshashtable.h"
94 #include "jsstrinlines.h"
95 #include "jscompartment.h"
101 # include <valgrind/memcheck.h>
105 using namespace js::gc;
108 * Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
110 JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
111 JS_STATIC_ASSERT(JSTRACE_STRING == 1);
112 JS_STATIC_ASSERT(JSTRACE_XML == 2);
115 * JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
116 * trace kind when JS_HAS_XML_SUPPORT is false.
118 JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML);
121 * Everything we store in the heap must be a multiple of the cell size.
123 JS_STATIC_ASSERT(sizeof(JSString) % sizeof(FreeCell) == 0);
124 JS_STATIC_ASSERT(sizeof(JSShortString) % sizeof(FreeCell) == 0);
125 JS_STATIC_ASSERT(sizeof(JSObject) % sizeof(FreeCell) == 0);
126 JS_STATIC_ASSERT(sizeof(JSFunction) % sizeof(FreeCell) == 0);
128 JS_STATIC_ASSERT(sizeof(JSXML) % sizeof(FreeCell) == 0);
132 * All arenas must be exactly 4k.
134 JS_STATIC_ASSERT(sizeof(Arena<JSString>) == 4096);
135 JS_STATIC_ASSERT(sizeof(Arena<JSExternalString>) == 4096);
136 JS_STATIC_ASSERT(sizeof(Arena<JSShortString>) == 4096);
137 JS_STATIC_ASSERT(sizeof(Arena<JSObject>) == 4096);
138 JS_STATIC_ASSERT(sizeof(Arena<JSFunction>) == 4096);
139 JS_STATIC_ASSERT(sizeof(Arena<JSXML>) == 4096);
142 # define METER(x) ((void) (x))
143 # define METER_IF(condition, x) ((void) ((condition) && (x)))
145 # define METER(x) ((void) 0)
146 # define METER_IF(condition, x) ((void) 0)
149 # define METER_UPDATE_MAX(maxLval, rval) \
150 METER_IF((maxLval) < (rval), (maxLval) = (rval))
155 /* This array should be const, but that doesn't link right under GCC. */
156 FinalizeKind slotsToThingKind[] = {
157 /* 0 */ FINALIZE_OBJECT0, FINALIZE_OBJECT2, FINALIZE_OBJECT2, FINALIZE_OBJECT4,
158 /* 4 */ FINALIZE_OBJECT4, FINALIZE_OBJECT8, FINALIZE_OBJECT8, FINALIZE_OBJECT8,
159 /* 8 */ FINALIZE_OBJECT8, FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
160 /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
161 /* 16 */ FINALIZE_OBJECT16
164 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
166 /* Initialize the arena and setup the free list. */
167 template <typename T>
169 Arena<T>::init(JSCompartment *compartment, unsigned thingKind)
171 aheader.compartment = compartment;
172 aheader.thingKind = thingKind;
173 aheader.freeList = &t.things[0].cell;
174 aheader.thingSize = sizeof(T);
175 aheader.isUsed = true;
176 JS_ASSERT(sizeof(T) == sizeof(ThingOrCell<T>));
177 ThingOrCell<T> *thing = &t.things[0];
178 ThingOrCell<T> *last = &t.things[JS_ARRAY_LENGTH(t.things) - 1];
179 while (thing < last) {
180 thing->cell.link = &(thing + 1)->cell;
183 last->cell.link = NULL;
185 aheader.hasFreeThings = true;
189 template <typename T>
191 Arena<T>::inFreeList(void *thing) const
193 FreeCell *cursor = aheader.freeList;
195 JS_ASSERT(aheader.thingSize == sizeof(T));
196 JS_ASSERT(!cursor->isMarked());
198 /* If the cursor moves past the thing, it's not in the freelist. */
202 /* If we find it on the freelist, it's dead. */
205 JS_ASSERT_IF(cursor->link, cursor < cursor->link);
206 cursor = cursor->link;
212 inline ConservativeGCTest
213 Arena<T>::mark(T *thing, JSTracer *trc)
215 JS_ASSERT(sizeof(T) == aheader.thingSize);
217 T *alignedThing = getAlignedThing(thing);
219 if (alignedThing > &t.things[ThingsPerArena-1].t || alignedThing < &t.things[0].t)
220 return CGCT_NOTARENA;
222 if (!aheader.isUsed || inFreeList(alignedThing))
225 JS_SET_TRACING_NAME(trc, "machine stack");
226 Mark(trc, alignedThing);
228 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
229 if (alignedThing != thing)
230 return CGCT_VALIDWITHOFFSET;
237 checkArenaListsForThing(JSCompartment *comp, void *thing) {
238 if (comp->arenas[FINALIZE_OBJECT0].arenasContainThing<JSObject>(thing) ||
239 comp->arenas[FINALIZE_OBJECT2].arenasContainThing<JSObject_Slots2>(thing) ||
240 comp->arenas[FINALIZE_OBJECT4].arenasContainThing<JSObject_Slots4>(thing) ||
241 comp->arenas[FINALIZE_OBJECT8].arenasContainThing<JSObject_Slots8>(thing) ||
242 comp->arenas[FINALIZE_OBJECT12].arenasContainThing<JSObject_Slots12>(thing) ||
243 comp->arenas[FINALIZE_OBJECT16].arenasContainThing<JSObject_Slots16>(thing) ||
244 comp->arenas[FINALIZE_FUNCTION].arenasContainThing<JSFunction>(thing) ||
245 #if JS_HAS_XML_SUPPORT
246 comp->arenas[FINALIZE_XML].arenasContainThing<JSXML>(thing) ||
248 comp->arenas[FINALIZE_STRING].arenasContainThing<JSString>(thing) ||
249 comp->arenas[FINALIZE_EXTERNAL_STRING].arenasContainThing<JSExternalString>(thing) ||
250 comp->arenas[FINALIZE_SHORT_STRING].arenasContainThing<JSShortString>(thing)) {
258 checkArenaListAllUnmarked(JSCompartment *comp) {
259 for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
260 if (comp->arenas[i].markedThingsInArenaList())
271 JSCompartment::finishArenaLists()
273 for (int i = 0; i < FINALIZE_LIMIT; i++)
274 arenas[i].releaseAll();
278 Chunk::clearMarkBitmap()
280 PodZero(&bitmaps[0], ArenasPerChunk);
284 Chunk::init(JSRuntime *rt)
288 info.emptyArenaLists.init();
289 info.emptyArenaLists.cellFreeList = &arenas[0];
290 Arena<FreeCell> *arena = &arenas[0];
291 Arena<FreeCell> *last = &arenas[JS_ARRAY_LENGTH(arenas) - 1];
292 while (arena < last) {
293 arena->header()->next = arena + 1;
294 arena->header()->isUsed = false;
297 last->header()->next = NULL;
298 last->header()->isUsed = false;
299 info.numFree = ArenasPerChunk;
305 return info.numFree == ArenasPerChunk;
309 Chunk::hasAvailableArenas()
311 return info.numFree > 0;
315 Chunk::withinArenasRange(Cell *cell)
317 uintptr_t addr = uintptr_t(cell);
318 if (addr >= uintptr_t(&arenas[0]) && addr < uintptr_t(&arenas[ArenasPerChunk]))
323 template <typename T>
325 Chunk::allocateArena(JSCompartment *comp, unsigned thingKind)
327 JS_ASSERT(hasAvailableArenas());
328 Arena<T> *arena = info.emptyArenaLists.getNext<T>(comp, thingKind);
330 JS_ASSERT(arena->header()->isUsed);
333 JSRuntime *rt = info.runtime;
334 rt->gcBytes += sizeof(Arena<T>);
335 comp->gcBytes += sizeof(Arena<T>);
336 if (comp->gcBytes >= comp->gcTriggerBytes)
337 TriggerCompartmentGC(comp);
338 METER(rt->gcStats.nallarenas++);
342 template <typename T>
344 Chunk::releaseArena(Arena<T> *arena)
346 JSRuntime *rt = info.runtime;
347 JSCompartment *comp = arena->header()->compartment;
348 METER(rt->gcStats.afree++);
349 JS_ASSERT(rt->gcStats.nallarenas != 0);
350 METER(rt->gcStats.nallarenas--);
351 JS_ASSERT(rt->gcBytes >= sizeof(Arena<T>));
352 JS_ASSERT(comp->gcBytes >= sizeof(Arena<T>));
354 rt->gcBytes -= sizeof(Arena<T>);
355 comp->gcBytes -= sizeof(Arena<T>);
356 info.emptyArenaLists.insert((Arena<Cell> *)arena);
357 arena->header()->isUsed = false;
370 GetGCChunk(JSRuntime *rt)
372 void *p = rt->gcChunkAllocator->alloc();
375 JS_ATOMIC_INCREMENT(&newChunkCount);
377 METER_IF(p, rt->gcStats.nchunks++);
378 METER_UPDATE_MAX(rt->gcStats.maxnchunks, rt->gcStats.nchunks);
379 return reinterpret_cast<jsuword>(p);
383 ReleaseGCChunk(JSRuntime *rt, jsuword chunk)
385 void *p = reinterpret_cast<void *>(chunk);
388 JS_ATOMIC_INCREMENT(&destroyChunkCount);
390 JS_ASSERT(rt->gcStats.nchunks != 0);
391 METER(rt->gcStats.nchunks--);
392 rt->gcChunkAllocator->free(p);
396 AllocateGCChunk(JSRuntime *rt)
398 Chunk *p = (Chunk *)rt->gcChunkAllocator->alloc();
401 JS_ATOMIC_INCREMENT(&newChunkCount);
403 METER_IF(p, rt->gcStats.nchunks++);
408 ReleaseGCChunk(JSRuntime *rt, Chunk *p)
412 JS_ATOMIC_INCREMENT(&destroyChunkCount);
414 JS_ASSERT(rt->gcStats.nchunks != 0);
415 METER(rt->gcStats.nchunks--);
416 rt->gcChunkAllocator->free(p);
420 PickChunk(JSRuntime *rt)
423 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
424 if (r.front()->hasAvailableArenas())
428 chunk = AllocateGCChunk(rt);
433 * FIXME bug 583732 - chunk is newly allocated and cannot be present in
434 * the table so using ordinary lookupForAdd is suboptimal here.
436 GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
438 if (!rt->gcChunkSet.add(p, chunk)) {
439 ReleaseGCChunk(rt, chunk);
449 ExpireGCChunks(JSRuntime *rt)
451 static const size_t MaxAge = 3;
453 /* Remove unused chunks. */
456 rt->gcChunksWaitingToExpire = 0;
457 for (GCChunkSet::Enum e(rt->gcChunkSet); !e.empty(); e.popFront()) {
458 Chunk *chunk = e.front();
459 JS_ASSERT(chunk->info.runtime == rt);
460 if (chunk->unused()) {
461 if (chunk->info.age++ > MaxAge) {
463 ReleaseGCChunk(rt, chunk);
466 rt->gcChunksWaitingToExpire++;
471 template <typename T>
473 AllocateArena(JSContext *cx, unsigned thingKind)
475 JSRuntime *rt = cx->runtime;
477 Chunk *chunk = cx->compartment->chunk;
478 if (!chunk || !chunk->hasAvailableArenas()) {
479 chunk = PickChunk(rt);
484 cx->compartment->chunk = chunk;
486 return chunk->allocateArena<T>(cx->compartment, thingKind);
490 IsAboutToBeFinalized(JSContext *cx, void *thing)
492 if (JSString::isStatic(thing))
496 JSCompartment *thingCompartment = reinterpret_cast<Cell *>(thing)->compartment();
497 JSRuntime *rt = cx->runtime;
498 JS_ASSERT(rt == thingCompartment->rt);
499 if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
502 return !reinterpret_cast<Cell *>(thing)->isMarked();
506 js_GCThingIsMarked(void *thing, uintN color = BLACK)
509 AssertValidColor(thing, color);
510 return reinterpret_cast<Cell *>(thing)->isMarked(color);
514 * 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
515 * JIT code is discarded in inactive compartments, regardless of how often that
518 static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 120 * 1000 * 1000;
521 js_InitGC(JSRuntime *rt, uint32 maxbytes)
524 * Make room for at least 16 chunks so the table would not grow before
525 * the browser starts up.
527 if (!rt->gcChunkSet.init(16))
530 if (!rt->gcRootsHash.init(256))
533 if (!rt->gcLocksHash.init(256))
537 rt->gcLock = JS_NEW_LOCK();
540 rt->gcDone = JS_NEW_CONDVAR(rt->gcLock);
543 rt->requestDone = JS_NEW_CONDVAR(rt->gcLock);
544 if (!rt->requestDone)
546 if (!rt->gcHelperThread.init(rt))
551 * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
552 * for default backward API compatibility.
554 rt->gcMaxBytes = maxbytes;
555 rt->setGCMaxMallocBytes(maxbytes);
557 rt->gcEmptyArenaPoolLifespan = 30000;
559 rt->gcTriggerFactor = uint32(100.0f * GC_HEAP_GROWTH_FACTOR);
561 rt->atomsCompartment->setGCLastBytes(8192);
564 * The assigned value prevents GC from running when GC memory is too low
565 * (during JS engine start).
567 rt->setGCLastBytes(8192);
569 rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
571 METER(PodZero(&rt->gcStats));
577 template <typename T>
578 static inline ConservativeGCTest
579 MarkCell(Cell *cell, JSTracer *trc)
581 return GetArena<T>(cell)->mark((T *)cell, trc);
585 * Returns CGCT_VALID or CGCT_VALIDWITHOFFSET and mark it if the w can be a
586 * live GC thing and sets thingKind accordingly. Otherwise returns the
587 * reason for rejection.
589 inline ConservativeGCTest
590 MarkIfGCThingWord(JSTracer *trc, jsuword w, uint32 &thingKind)
592 JSRuntime *rt = trc->context->runtime;
594 * The conservative scanner may access words that valgrind considers as
595 * undefined. To avoid false positives and not to alter valgrind view of
596 * the memory we make as memcheck-defined the argument, a copy of the
597 * original word. See bug 572678.
600 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
604 * We assume that the compiler never uses sub-word alignment to store
605 * pointers and does not tag pointers on its own. Additionally, the value
606 * representation for all values and the jsid representation for GC-things
607 * do not touch the low two bits. Thus any word with the low two bits set
608 * is not a valid GC-thing.
610 JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
612 return CGCT_LOWBITSET;
615 * An object jsid has its low bits tagged. In the value representation on
616 * 64-bit, the high bits are tagged.
618 const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
619 #if JS_BITS_PER_WORD == 32
620 jsuword payload = w & JSID_PAYLOAD_MASK;
621 #elif JS_BITS_PER_WORD == 64
622 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
625 Cell *cell = reinterpret_cast<Cell *>(payload);
626 Chunk *chunk = cell->chunk();
628 if (!rt->gcChunkSet.has(chunk))
629 return CGCT_NOTCHUNK;
631 if (!chunk->withinArenasRange(cell))
632 return CGCT_NOTARENA;
634 ArenaHeader *aheader = cell->arena()->header();
636 if (!aheader->isUsed)
637 return CGCT_FREEARENA;
639 ConservativeGCTest test;
640 thingKind = aheader->thingKind;
643 case FINALIZE_OBJECT0:
644 test = MarkCell<JSObject>(cell, trc);
646 case FINALIZE_OBJECT2:
647 test = MarkCell<JSObject_Slots2>(cell, trc);
649 case FINALIZE_OBJECT4:
650 test = MarkCell<JSObject_Slots4>(cell, trc);
652 case FINALIZE_OBJECT8:
653 test = MarkCell<JSObject_Slots8>(cell, trc);
655 case FINALIZE_OBJECT12:
656 test = MarkCell<JSObject_Slots12>(cell, trc);
658 case FINALIZE_OBJECT16:
659 test = MarkCell<JSObject_Slots16>(cell, trc);
661 case FINALIZE_STRING:
662 test = MarkCell<JSString>(cell, trc);
664 case FINALIZE_EXTERNAL_STRING:
665 test = MarkCell<JSExternalString>(cell, trc);
667 case FINALIZE_SHORT_STRING:
668 test = MarkCell<JSShortString>(cell, trc);
670 case FINALIZE_FUNCTION:
671 test = MarkCell<JSFunction>(cell, trc);
673 #if JS_HAS_XML_SUPPORT
675 test = MarkCell<JSXML>(cell, trc);
679 test = CGCT_WRONGTAG;
680 JS_NOT_REACHED("wrong tag");
686 inline ConservativeGCTest
687 MarkIfGCThingWord(JSTracer *trc, jsuword w)
690 return MarkIfGCThingWord(trc, w, thingKind);
694 MarkWordConservatively(JSTracer *trc, jsuword w)
697 * The conservative scanner may access words that valgrind considers as
698 * undefined. To avoid false positives and not to alter valgrind view of
699 * the memory we make as memcheck-defined the argument, a copy of the
700 * original word. See bug 572678.
703 VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
707 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
708 ConservativeGCTest test =
710 MarkIfGCThingWord(trc, w, thingKind);
712 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
713 if (test == CGCT_VALID || test == CGCT_VALIDWITHOFFSET) {
714 if (IS_GC_MARKING_TRACER(trc) && static_cast<GCMarker *>(trc)->conservativeDumpFileName) {
715 const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
716 #if JS_BITS_PER_WORD == 32
717 jsuword payload = w & JSID_PAYLOAD_MASK;
718 #elif JS_BITS_PER_WORD == 64
719 jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
721 void *thing = (test == CGCT_VALIDWITHOFFSET)
722 ? GetAlignedThing((void *)payload, thingKind)
724 GCMarker::ConservativeRoot root = {thing, thingKind};
725 static_cast<GCMarker *>(trc)->conservativeRoots.append(root);
730 #if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
731 if (IS_GC_MARKING_TRACER(trc))
732 static_cast<GCMarker *>(trc)->conservativeStats.counter[test]++;
737 MarkRangeConservatively(JSTracer *trc, const jsuword *begin, const jsuword *end)
739 JS_ASSERT(begin <= end);
740 for (const jsuword *i = begin; i != end; ++i)
741 MarkWordConservatively(trc, *i);
745 MarkThreadDataConservatively(JSTracer *trc, JSThreadData *td)
747 ConservativeGCThreadData *ctd = &td->conservativeGC;
748 JS_ASSERT(ctd->hasStackToScan());
749 jsuword *stackMin, *stackEnd;
750 #if JS_STACK_GROWTH_DIRECTION > 0
751 stackMin = td->nativeStackBase;
752 stackEnd = ctd->nativeStackTop;
754 stackMin = ctd->nativeStackTop + 1;
755 stackEnd = td->nativeStackBase;
757 JS_ASSERT(stackMin <= stackEnd);
758 MarkRangeConservatively(trc, stackMin, stackEnd);
759 MarkRangeConservatively(trc, ctd->registerSnapshot.words,
760 JS_ARRAY_END(ctd->registerSnapshot.words));
765 MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
767 const jsuword *begin = beginv->payloadWord();
768 const jsuword *end = endv->payloadWord();;
771 * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
772 * scanning only the payloads.
774 JS_ASSERT(begin <= end);
775 for (const jsuword *i = begin; i != end; i += sizeof(Value)/sizeof(jsuword))
776 MarkWordConservatively(trc, *i);
778 MarkRangeConservatively(trc, begin, end);
783 MarkConservativeStackRoots(JSTracer *trc)
786 for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
787 JSThread *thread = r.front().value;
788 ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
789 if (ctd->hasStackToScan()) {
790 JS_ASSERT_IF(!thread->data.requestDepth, thread->suspendCount);
791 MarkThreadDataConservatively(trc, &thread->data);
793 JS_ASSERT(!thread->suspendCount);
794 JS_ASSERT(thread->data.requestDepth <= ctd->requestThreshold);
798 MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
803 ConservativeGCThreadData::recordStackTop()
805 /* Update the native stack pointer if it points to a bigger stack. */
807 nativeStackTop = &dummy;
810 * To record and update the register snapshot for the conservative
811 * scanning with the latest values we use setjmp.
813 #if defined(_MSC_VER)
814 # pragma warning(push)
815 # pragma warning(disable: 4611)
817 (void) setjmp(registerSnapshot.jmpbuf);
818 #if defined(_MSC_VER)
819 # pragma warning(pop)
824 RecordNativeStackTopForGC(JSContext *cx)
826 ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
829 /* Record the stack top here only if we are called from a request. */
830 JS_ASSERT(cx->thread->data.requestDepth >= ctd->requestThreshold);
831 if (cx->thread->data.requestDepth == ctd->requestThreshold)
834 ctd->recordStackTop();
841 CheckLeakedRoots(JSRuntime *rt);
845 js_FinishGC(JSRuntime *rt)
848 JS_DumpArenaStats(stdout);
851 if (JS_WANT_GC_METER_PRINT)
852 js_DumpGCStats(rt, stdout);
855 /* Delete all remaining Compartments. */
856 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
857 JSCompartment *comp = *c;
858 comp->finishArenaLists();
861 rt->compartments.clear();
862 rt->atomsCompartment = NULL;
864 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
865 ReleaseGCChunk(rt, r.front());
866 rt->gcChunkSet.clear();
869 rt->gcHelperThread.finish(rt);
873 if (!rt->gcRootsHash.empty())
874 CheckLeakedRoots(rt);
876 rt->gcRootsHash.clear();
877 rt->gcLocksHash.clear();
881 js_AddRoot(JSContext *cx, Value *vp, const char *name)
883 JSBool ok = js_AddRootRT(cx->runtime, Jsvalify(vp), name);
885 JS_ReportOutOfMemory(cx);
890 js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
892 JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
894 JS_ReportOutOfMemory(cx);
898 JS_FRIEND_API(JSBool)
899 js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
902 * Due to the long-standing, but now removed, use of rt->gcLock across the
903 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
904 * properly with a racing GC, without calling JS_AddRoot from a request.
905 * We have to preserve API compatibility here, now that we avoid holding
906 * rt->gcLock across the mark phase (including the root hashtable mark).
911 return !!rt->gcRootsHash.put((void *)vp,
912 RootInfo(name, JS_GC_ROOT_VALUE_PTR));
915 JS_FRIEND_API(JSBool)
916 js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
919 * Due to the long-standing, but now removed, use of rt->gcLock across the
920 * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
921 * properly with a racing GC, without calling JS_AddRoot from a request.
922 * We have to preserve API compatibility here, now that we avoid holding
923 * rt->gcLock across the mark phase (including the root hashtable mark).
928 return !!rt->gcRootsHash.put((void *)rp,
929 RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
932 JS_FRIEND_API(JSBool)
933 js_RemoveRoot(JSRuntime *rt, void *rp)
936 * Due to the JS_RemoveRootRT API, we may be called outside of a request.
937 * Same synchronization drill as above in js_AddRoot.
941 rt->gcRootsHash.remove(rp);
942 rt->gcPoke = JS_TRUE;
946 typedef RootedValueMap::Range RootRange;
947 typedef RootedValueMap::Entry RootEntry;
948 typedef RootedValueMap::Enum RootEnum;
953 CheckLeakedRoots(JSRuntime *rt)
955 uint32 leakedroots = 0;
957 /* Warn (but don't assert) debug builds of any remaining roots. */
958 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
959 RootEntry &entry = r.front();
962 "JS engine warning: leaking GC root \'%s\' at %p\n",
963 entry.value.name ? entry.value.name : "", entry.key);
966 if (leakedroots > 0) {
967 if (leakedroots == 1) {
969 "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
970 " This root may point to freed memory. Objects reachable\n"
971 " through it have not been finalized.\n",
975 "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
976 " These roots may point to freed memory. Objects reachable\n"
977 " through them have not been finalized.\n",
978 (unsigned long) leakedroots, (void *) rt);
984 js_DumpNamedRoots(JSRuntime *rt,
985 void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
988 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
989 RootEntry &entry = r.front();
990 if (const char *name = entry.value.name)
991 dump(name, entry.key, entry.value.type, data);
998 js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
1000 AutoLockGC lock(rt);
1002 for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
1003 RootEntry &entry = e.front();
1006 intN mapflags = map(entry.key, entry.value.type, entry.value.name, data);
1008 if (mapflags & JS_MAP_GCROOT_REMOVE)
1010 if (mapflags & JS_MAP_GCROOT_STOP)
1018 JSRuntime::setGCTriggerFactor(uint32 factor)
1020 JS_ASSERT(factor >= 100);
1022 gcTriggerFactor = factor;
1023 setGCLastBytes(gcLastBytes);
1025 for (JSCompartment **c = compartments.begin(); c != compartments.end(); ++c) {
1026 (*c)->setGCLastBytes(gcLastBytes);
1031 JSRuntime::setGCLastBytes(size_t lastBytes)
1033 gcLastBytes = lastBytes;
1035 /* FIXME bug 603916 - we should unify the triggers here. */
1036 float trigger1 = float(lastBytes) * float(gcTriggerFactor) / 100.0f;
1037 float trigger2 = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) *
1038 GC_HEAP_GROWTH_FACTOR;
1039 float maxtrigger = Max(trigger1, trigger2);
1040 gcTriggerBytes = (float(gcMaxBytes) < maxtrigger) ? gcMaxBytes : size_t(maxtrigger);
1044 JSCompartment::setGCLastBytes(size_t lastBytes)
1046 gcLastBytes = lastBytes;
1048 /* FIXME bug 603916 - we should unify the triggers here. */
1049 float trigger1 = float(lastBytes) * float(rt->gcTriggerFactor) / 100.0f;
1050 float trigger2 = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) *
1051 GC_HEAP_GROWTH_FACTOR;
1052 float maxtrigger = Max(trigger1, trigger2);
1053 gcTriggerBytes = (float(rt->gcMaxBytes) < maxtrigger) ? rt->gcMaxBytes : size_t(maxtrigger);
1060 * Return the free list back to the arena so the GC finalization will not
1061 * run the finalizers over unitialized bytes from free things.
1063 for (FreeCell ***p = finalizables; p != JS_ARRAY_END(finalizables); ++p)
1067 class JSShortString;
1070 GetFinalizableArenaList(JSCompartment *c, unsigned thingKind) {
1071 JS_ASSERT(thingKind < FINALIZE_LIMIT);
1072 return &c->arenas[thingKind];
1077 CheckAllocation(JSContext *cx)
1079 #ifdef JS_THREADSAFE
1080 JS_ASSERT(cx->thread);
1082 JS_ASSERT(!cx->runtime->gcRunning);
1088 NeedLastDitchGC(JSContext *cx)
1090 JSRuntime *rt = cx->runtime;
1092 if (rt->gcZeal >= 1)
1095 return rt->gcIsNeeded;
1099 * Return false only if the GC run but could not bring its memory usage under
1100 * JSRuntime::gcMaxBytes.
1103 RunLastDitchGC(JSContext *cx)
1105 JSRuntime *rt = cx->runtime;
1106 METER(rt->gcStats.lastditch++);
1107 #ifdef JS_THREADSAFE
1108 Conditionally<AutoUnlockAtomsCompartment>
1109 unlockAtomsCompartmenIf(cx->compartment == rt->atomsCompartment &&
1110 rt->atomsCompartmentIsLocked, cx);
1112 /* The last ditch GC preserves all atoms. */
1113 AutoKeepAtoms keep(rt);
1114 js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL);
1116 return rt->gcBytes < rt->gcMaxBytes;
1119 template <typename T>
1121 RefillTypedFreeList(JSContext *cx, unsigned thingKind)
1123 JSCompartment *compartment = cx->compartment;
1124 JS_ASSERT_IF(compartment->freeLists.finalizables[thingKind],
1125 !*compartment->freeLists.finalizables[thingKind]);
1127 JS_ASSERT(!cx->runtime->gcRunning);
1128 if (cx->runtime->gcRunning)
1131 bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
1133 if (canGC && JS_UNLIKELY(NeedLastDitchGC(cx))) {
1134 if (!RunLastDitchGC(cx))
1138 * The JSGC_END callback can legitimately allocate new GC
1139 * things and populate the free list. If that happens, just
1140 * return that list head.
1142 if (compartment->freeLists.finalizables[thingKind])
1147 ArenaList *arenaList = GetFinalizableArenaList(compartment, thingKind);
1148 Arena<T> *a = reinterpret_cast<Arena<T> *>(arenaList->getNextWithFreeList());
1150 JS_ASSERT(a->header()->freeList);
1151 JS_ASSERT(sizeof(T) == a->header()->thingSize);
1152 compartment->freeLists.populate(a, thingKind);
1157 * If the allocation fails rt->gcIsNeeded will be set and we will run
1158 * the GC on the next loop iteration if the last ditch GC is allowed.
1160 a = AllocateArena<T>(cx, thingKind);
1162 compartment->freeLists.populate(a, thingKind);
1163 arenaList->insert((Arena<FreeCell> *) a);
1164 a->getMarkingDelay()->init();
1169 METER(cx->runtime->gcStats.fail++);
1170 js_ReportOutOfMemory(cx);
1175 RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
1177 switch (thingKind) {
1178 case FINALIZE_OBJECT0:
1179 return RefillTypedFreeList<JSObject>(cx, thingKind);
1180 case FINALIZE_OBJECT2:
1181 return RefillTypedFreeList<JSObject_Slots2>(cx, thingKind);
1182 case FINALIZE_OBJECT4:
1183 return RefillTypedFreeList<JSObject_Slots4>(cx, thingKind);
1184 case FINALIZE_OBJECT8:
1185 return RefillTypedFreeList<JSObject_Slots8>(cx, thingKind);
1186 case FINALIZE_OBJECT12:
1187 return RefillTypedFreeList<JSObject_Slots12>(cx, thingKind);
1188 case FINALIZE_OBJECT16:
1189 return RefillTypedFreeList<JSObject_Slots16>(cx, thingKind);
1190 case FINALIZE_STRING:
1191 return RefillTypedFreeList<JSString>(cx, thingKind);
1192 case FINALIZE_EXTERNAL_STRING:
1193 return RefillTypedFreeList<JSExternalString>(cx, thingKind);
1194 case FINALIZE_SHORT_STRING:
1195 return RefillTypedFreeList<JSShortString>(cx, thingKind);
1196 case FINALIZE_FUNCTION:
1197 return RefillTypedFreeList<JSFunction>(cx, thingKind);
1198 #if JS_HAS_XML_SUPPORT
1200 return RefillTypedFreeList<JSXML>(cx, thingKind);
1203 JS_NOT_REACHED("bad finalize kind");
1209 js_GetExternalStringGCType(JSString *str) {
1210 return GetExternalStringGCType((JSExternalString *)str);
1214 js_GetGCThingTraceKind(void *thing) {
1215 return GetGCThingTraceKind(thing);
1219 js_LockGCThingRT(JSRuntime *rt, void *thing)
1225 locks = &rt->gcLocksHash;
1226 AutoLockGC lock(rt);
1227 GCLocks::AddPtr p = locks->lookupForAdd(thing);
1230 if (!locks->add(p, thing, 1))
1233 JS_ASSERT(p->value >= 1);
1237 METER(rt->gcStats.lock++);
1242 js_UnlockGCThingRT(JSRuntime *rt, void *thing)
1247 AutoLockGC lock(rt);
1248 GCLocks::Ptr p = rt->gcLocksHash.lookup(thing);
1252 if (--p->value == 0)
1253 rt->gcLocksHash.remove(p);
1255 METER(rt->gcStats.unlock++);
1260 JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind)
1263 case JSTRACE_OBJECT: {
1264 MarkChildren(trc, (JSObject *)thing);
1268 case JSTRACE_STRING: {
1269 MarkChildren(trc, (JSString *)thing);
1273 #if JS_HAS_XML_SUPPORT
1275 MarkChildren(trc, (JSXML *)thing);
1284 * When the native stack is low, the GC does not call JS_TraceChildren to mark
1285 * the reachable "children" of the thing. Rather the thing is put aside and
1286 * JS_TraceChildren is called later with more space on the C stack.
1288 * To implement such delayed marking of the children with minimal overhead for
1289 * the normal case of sufficient native stack, the code adds a field per
1290 * arena. The field marlingdelay->link links all arenas with delayed things
1291 * into a stack list with the pointer to stack top in
1292 * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
1293 * arenas to the stack as necessary while markDelayedChildren pops the arenas
1294 * from the stack until it empties.
1297 GCMarker::GCMarker(JSContext *cx)
1298 : color(0), stackLimit(0), unmarkedArenaStackTop(NULL)
1300 JS_TRACER_INIT(this, cx, NULL);
1304 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1305 conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
1306 memset(&conservativeStats, 0, sizeof(conservativeStats));
1310 GCMarker::~GCMarker()
1312 #ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
1313 dumpConservativeRoots();
1316 /* Update total stats. */
1317 context->runtime->gcStats.conservative.add(conservativeStats);
1322 GCMarker::delayMarkingChildren(void *thing)
1324 Cell *cell = reinterpret_cast<Cell *>(thing);
1325 Arena<Cell> *a = cell->arena();
1326 JS_ASSERT(cell->isMarked());
1327 METER(cell->compartment()->rt->gcStats.unmarked++);
1328 MarkingDelay *markingDelay = a->getMarkingDelay();
1330 if (markingDelay->link) {
1331 if (markingDelay->start > (jsuword)cell)
1332 markingDelay->start = (jsuword)cell;
1333 /* Arena already scheduled to be marked again */
1336 markingDelay->start = (jsuword)cell;
1337 Arena<Cell> *tos = unmarkedArenaStackTop;
1338 markingDelay->link = tos ? tos : a;
1339 unmarkedArenaStackTop = a;
1341 JSCompartment *comp = cell->compartment();
1342 markLaterCount += Arena<FreeCell>::ThingsPerArena;
1343 METER_UPDATE_MAX(comp->rt->gcStats.maxunmarked, markLaterCount);
1347 template<typename T>
1349 Arena<T>::markDelayedChildren(JSTracer *trc)
1351 T* thing = (T *)getMarkingDelay()->start;
1352 T *thingsEnd = &t.things[ThingsPerArena-1].t;
1353 JS_ASSERT(thing == getAlignedThing(thing));
1354 while (thing <= thingsEnd) {
1355 if (thing->asCell()->isMarked())
1356 MarkChildren(trc, thing);
1363 GCMarker::markDelayedChildren()
1365 while (Arena<Cell> *a = unmarkedArenaStackTop) {
1367 * markingDelay->link == current arena indicates last arena on stack.
1368 * If marking gets delayed at the same arena again, the arena is pushed
1369 * again in delayMarkingChildren. markingDelay->link has to be cleared,
1370 * otherwise the arena is not pushed again.
1372 MarkingDelay *markingDelay = a->getMarkingDelay();
1373 unmarkedArenaStackTop = (markingDelay->link != a)
1374 ? markingDelay->link
1376 markingDelay->link = NULL;
1378 markLaterCount -= Arena<FreeCell>::ThingsPerArena;
1381 switch (a->header()->thingKind) {
1382 case FINALIZE_OBJECT0:
1383 reinterpret_cast<Arena<JSObject> *>(a)->markDelayedChildren(this);
1385 case FINALIZE_OBJECT2:
1386 reinterpret_cast<Arena<JSObject_Slots2> *>(a)->markDelayedChildren(this);
1388 case FINALIZE_OBJECT4:
1389 reinterpret_cast<Arena<JSObject_Slots4> *>(a)->markDelayedChildren(this);
1391 case FINALIZE_OBJECT8:
1392 reinterpret_cast<Arena<JSObject_Slots8> *>(a)->markDelayedChildren(this);
1394 case FINALIZE_OBJECT12:
1395 reinterpret_cast<Arena<JSObject_Slots12> *>(a)->markDelayedChildren(this);
1397 case FINALIZE_OBJECT16:
1398 reinterpret_cast<Arena<JSObject_Slots16> *>(a)->markDelayedChildren(this);
1400 case FINALIZE_STRING:
1401 reinterpret_cast<Arena<JSString> *>(a)->markDelayedChildren(this);
1403 case FINALIZE_EXTERNAL_STRING:
1404 reinterpret_cast<Arena<JSExternalString> *>(a)->markDelayedChildren(this);
1406 case FINALIZE_SHORT_STRING:
1409 case FINALIZE_FUNCTION:
1410 reinterpret_cast<Arena<JSFunction> *>(a)->markDelayedChildren(this);
1412 #if JS_HAS_XML_SUPPORT
1414 reinterpret_cast<Arena<JSXML> *>(a)->markDelayedChildren(this);
1418 JS_NOT_REACHED("wrong thingkind");
1421 JS_ASSERT(markLaterCount == 0);
1422 JS_ASSERT(!unmarkedArenaStackTop);
1425 } /* namespace js */
1428 gc_root_traversal(JSTracer *trc, const RootEntry &entry)
1432 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
1433 ptr = *reinterpret_cast<void **>(entry.key);
1435 Value *vp = reinterpret_cast<Value *>(entry.key);
1436 ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
1440 if (!JSString::isStatic(ptr)) {
1441 bool root_points_to_gcArenaList = false;
1442 JSCompartment **c = trc->context->runtime->compartments.begin();
1443 for (; c != trc->context->runtime->compartments.end(); ++c) {
1444 JSCompartment *comp = *c;
1445 if (checkArenaListsForThing(comp, ptr)) {
1446 root_points_to_gcArenaList = true;
1450 if (!root_points_to_gcArenaList && entry.value.name) {
1452 "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
1453 "invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
1454 "The root's name is \"%s\".\n",
1457 JS_ASSERT(root_points_to_gcArenaList);
1461 JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
1462 if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
1463 MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
1465 MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
1469 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
1471 JS_ASSERT(entry.value >= 1);
1472 MarkGCThing(trc, entry.key, "locked object");
1476 js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
1478 MarkObject(trc, fp->scopeChain(), "scope chain");
1479 if (fp->isDummyFrame())
1482 if (fp->hasCallObj())
1483 MarkObject(trc, fp->callObj(), "call");
1484 if (fp->hasArgsObj())
1485 MarkObject(trc, fp->argsObj(), "arguments");
1486 if (fp->isScriptFrame()) {
1487 js_TraceScript(trc, fp->script());
1488 fp->script()->compartment->active = true;
1491 MarkValue(trc, fp->returnValue(), "rval");
1495 AutoIdArray::trace(JSTracer *trc)
1497 JS_ASSERT(tag == IDARRAY);
1498 gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
1502 AutoEnumStateRooter::trace(JSTracer *trc)
1504 js::gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj");
1508 AutoGCRooter::trace(JSTracer *trc)
1512 MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
1516 static_cast<AutoShapeRooter *>(this)->shape->trace(trc);
1520 static_cast<Parser *>(this)->trace(trc);
1524 if (JSScript *script = static_cast<AutoScriptRooter *>(this)->script)
1525 js_TraceScript(trc, script);
1529 static_cast<AutoEnumStateRooter *>(this)->trace(trc);
1533 JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
1534 MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
1539 PropDescArray &descriptors =
1540 static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
1541 for (size_t i = 0, len = descriptors.length(); i < len; i++) {
1542 PropDesc &desc = descriptors[i];
1543 MarkValue(trc, desc.pd, "PropDesc::pd");
1544 MarkValue(trc, desc.value, "PropDesc::value");
1545 MarkValue(trc, desc.get, "PropDesc::get");
1546 MarkValue(trc, desc.set, "PropDesc::set");
1547 MarkId(trc, desc.id, "PropDesc::id");
1553 PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
1555 MarkObject(trc, *desc.obj, "Descriptor::obj");
1556 MarkValue(trc, desc.value, "Descriptor::value");
1557 if ((desc.attrs & JSPROP_GETTER) && desc.getter)
1558 MarkObject(trc, *CastAsObject(desc.getter), "Descriptor::get");
1559 if (desc.attrs & JSPROP_SETTER && desc.setter)
1560 MarkObject(trc, *CastAsObject(desc.setter), "Descriptor::set");
1565 JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
1566 MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
1567 "JSXMLArray.vector");
1568 array.cursors->trace(trc);
1573 js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
1577 if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
1578 MarkObject(trc, *obj, "js::AutoObjectRooter.obj");
1582 MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
1586 Vector<Value, 8> &vector = static_cast<js::AutoValueVector *>(this)->vector;
1587 MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
1592 if (JSString *str = static_cast<js::AutoStringRooter *>(this)->str)
1593 MarkString(trc, str, "js::AutoStringRooter.str");
1597 Vector<jsid, 8> &vector = static_cast<js::AutoIdVector *>(this)->vector;
1598 MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
1603 Vector<const Shape *, 8> &vector = static_cast<js::AutoShapeVector *>(this)->vector;
1604 MarkShapeRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
1609 static_cast<js::AutoBindingsRooter *>(this)->bindings.trace(trc);
1614 JS_ASSERT(tag >= 0);
1615 MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
1621 MarkContext(JSTracer *trc, JSContext *acx)
1623 /* Stack frames and slots are traced by StackSpace::mark. */
1625 /* Mark other roots-by-definition in acx. */
1626 if (acx->globalObject && !acx->hasRunOption(JSOPTION_UNROOTED_GLOBAL))
1627 MarkObject(trc, *acx->globalObject, "global object");
1628 if (acx->isExceptionPending())
1629 MarkValue(trc, acx->getPendingException(), "exception");
1631 for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
1634 if (acx->sharpObjectMap.depth > 0)
1635 js_TraceSharpMap(trc, &acx->sharpObjectMap);
1637 MarkValue(trc, acx->iterValue, "iterValue");
1639 if (acx->compartment)
1640 acx->compartment->mark(trc);
1643 JS_REQUIRES_STACK void
1644 MarkRuntime(JSTracer *trc)
1646 JSRuntime *rt = trc->context->runtime;
1648 if (rt->state != JSRTS_LANDING)
1649 MarkConservativeStackRoots(trc);
1652 * Verify that we do not have at this point unmarked GC things stored in
1653 * autorooters. To maximize test coverage we abort even in non-debug
1654 * builds for now, see bug 574313.
1659 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
1660 for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
1661 #ifdef JS_THREADSAFE
1662 JS_ASSERT_IF(!acx->thread->data.requestDepth, acx->thread->suspendCount);
1664 JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan());
1669 case AutoGCRooter::JSVAL: {
1670 const Value &v = static_cast<AutoValueRooter *>(gcr)->val;
1671 if (!v.isMarkable())
1673 thing = v.toGCThing();
1676 case AutoGCRooter::XML:
1677 thing = static_cast<AutoXMLRooter *>(gcr)->xml;
1679 case AutoGCRooter::OBJECT:
1680 thing = static_cast<AutoObjectRooter *>(gcr)->obj;
1684 case AutoGCRooter::ID: {
1685 jsid id = static_cast<AutoIdRooter *>(gcr)->id();
1686 if (!JSID_IS_GCTHING(id))
1688 thing = JSID_TO_GCTHING(id);
1693 if (JSString::isStatic(thing))
1696 if (!reinterpret_cast<Cell *>(thing)->isMarked()) {
1697 ConservativeGCTest test = MarkIfGCThingWord(trc, reinterpret_cast<jsuword>(thing));
1699 "Conservative GC scanner has missed the root 0x%p with tag %ld"
1700 " on the stack due to %d. The root location 0x%p, distance from"
1701 " the stack base %ld, conservative gc span %ld."
1702 " Consevtaive GC status for the thread %d."
1704 thing, (long) gcr->tag, int(test), (void *) gcr,
1705 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase - (jsword) gcr),
1706 (long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase -
1707 (jsword) JS_THREAD_DATA(acx)->conservativeGC.nativeStackTop),
1708 int(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan()));
1716 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
1717 gc_root_traversal(trc, r.front());
1719 for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
1720 gc_lock_traversal(r.front(), trc);
1722 js_TraceAtomState(trc);
1726 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
1727 MarkContext(trc, acx);
1729 rt->atomsCompartment->mark(trc);
1732 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
1733 (*c)->traceMonitor.mark(trc);
1736 for (ThreadDataIter i(rt); !i.empty(); i.popFront())
1737 i.threadData()->mark(trc);
1740 * We mark extra roots at the last thing so it can use use additional
1741 * colors to implement cycle collection.
1743 if (rt->gcExtraRootsTraceOp)
1744 rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
1747 if (rt->functionMeterFilename) {
1748 for (int k = 0; k < 2; k++) {
1749 typedef JSRuntime::FunctionCountMap HM;
1750 HM &h = (k == 0) ? rt->methodReadBarrierCountMap : rt->unjoinedFunctionCountMap;
1751 for (HM::Range r = h.all(); !r.empty(); r.popFront()) {
1752 JSFunction *fun = r.front().key;
1753 JS_CALL_OBJECT_TRACER(trc, fun, "FunctionCountMap key");
1761 TriggerGC(JSRuntime *rt)
1763 JS_ASSERT(!rt->gcRunning);
1768 * Trigger the GC when it is safe to call an operation callback on any
1771 rt->gcIsNeeded = true;
1772 rt->gcTriggerCompartment = NULL;
1773 TriggerAllOperationCallbacks(rt);
1777 TriggerCompartmentGC(JSCompartment *comp)
1779 JSRuntime *rt = comp->rt;
1780 JS_ASSERT(!rt->gcRunning);
1783 if (rt->gcZeal >= 1) {
1789 if (rt->gcMode != JSGC_MODE_COMPARTMENT || comp == rt->atomsCompartment) {
1790 /* We can't do a compartmental GC of the default compartment. */
1795 if (rt->gcIsNeeded) {
1796 /* If we need to GC more than one compartment, run a full GC. */
1797 if (rt->gcTriggerCompartment != comp)
1798 rt->gcTriggerCompartment = NULL;
1802 if (rt->gcBytes > 8192 && rt->gcBytes >= 3 * (rt->gcTriggerBytes / 2)) {
1803 /* If we're using significantly more than our quota, do a full GC. */
1809 * Trigger the GC when it is safe to call an operation callback on any
1812 rt->gcIsNeeded = true;
1813 rt->gcTriggerCompartment = comp;
1814 TriggerAllOperationCallbacks(comp->rt);
1818 MaybeGC(JSContext *cx)
1820 JSRuntime *rt = cx->runtime;
1823 if (rt->gcZeal > 0) {
1824 js_GC(cx, NULL, GC_NORMAL);
1829 JSCompartment *comp = cx->compartment;
1830 if (rt->gcIsNeeded) {
1831 js_GC(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, GC_NORMAL);
1835 if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4))
1836 js_GC(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL, GC_NORMAL);
1839 } /* namespace js */
1842 js_DestroyScriptsToGC(JSContext *cx, JSCompartment *comp)
1844 JSScript **listp, *script;
1846 for (size_t i = 0; i != JS_ARRAY_LENGTH(comp->scriptsToGC); ++i) {
1847 listp = &comp->scriptsToGC[i];
1848 while ((script = *listp) != NULL) {
1849 *listp = script->u.nextToGC;
1850 script->u.nextToGC = NULL;
1851 js_DestroyCachedScript(cx, script);
1857 * This function is called from js_FinishAtomState to force the finalization
1858 * of the permanently interned strings when cx is not available.
1861 js_FinalizeStringRT(JSRuntime *rt, JSString *str)
1863 JS_RUNTIME_UNMETER(rt, liveStrings);
1864 JS_ASSERT(!JSString::isStatic(str));
1865 JS_ASSERT(!str->isRope());
1867 if (str->isDependent()) {
1868 /* A dependent string can not be external and must be valid. */
1869 JS_ASSERT(str->asCell()->arena()->header()->thingKind == FINALIZE_STRING);
1870 JS_ASSERT(str->dependentBase());
1871 JS_RUNTIME_UNMETER(rt, liveDependentStrings);
1873 unsigned thingKind = str->asCell()->arena()->header()->thingKind;
1874 JS_ASSERT(IsFinalizableStringKind(thingKind));
1876 /* A stillborn string has null chars, so is not valid. */
1877 jschar *chars = const_cast<jschar *>(str->flatChars());
1880 if (thingKind == FINALIZE_STRING) {
1881 rt->stringMemoryUsed -= str->length() * 2;
1883 } else if (thingKind == FINALIZE_EXTERNAL_STRING) {
1884 ((JSExternalString *)str)->finalize();
1889 template<typename T>
1891 FinalizeArenaList(JSCompartment *comp, JSContext *cx, unsigned thingKind)
1893 JS_STATIC_ASSERT(!(sizeof(T) & Cell::CellMask));
1894 ArenaList *arenaList = GetFinalizableArenaList(comp, thingKind);
1895 Arena<FreeCell> **ap = &arenaList->head;
1896 Arena<T> *a = (Arena<T> *) *ap;
1899 JS_ASSERT(sizeof(T) == arenaList->head->header()->thingSize);
1902 uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0;
1905 ArenaHeader *header = a->header();
1906 JS_ASSERT_IF(header->hasFreeThings, header->freeList);
1907 JS_ASSERT(header->thingKind == thingKind);
1908 JS_ASSERT(!a->getMarkingDelay()->link);
1909 JS_ASSERT(a->getMarkingDelay()->unmarkedChildren == 0);
1910 JS_ASSERT(a->header()->isUsed);
1912 FreeCell *nextFree = header->freeList;
1913 FreeCell *freeList = NULL;
1914 FreeCell **tailp = &freeList;
1915 bool allClear = true;
1917 T *thingsEnd = &a->t.things[a->ThingsPerArena-1].t;
1918 T *thing = &a->t.things[0].t;
1922 nextFree = thingsEnd->asFreeCell();
1924 JS_ASSERT(thing->asCell() <= nextFree);
1925 JS_ASSERT(nextFree < thingsEnd->asCell());
1929 if (thing->asCell() == nextFree) {
1930 if (thing == thingsEnd)
1932 nextFree = nextFree->link;
1934 nextFree = thingsEnd->asFreeCell();
1936 JS_ASSERT(thing->asCell() < nextFree);
1937 JS_ASSERT(nextFree < thingsEnd->asFreeCell());
1939 } else if (thing->asCell()->isMarked()) {
1944 thing->finalize(cx);
1946 memset(thing, JS_FREE_PATTERN, sizeof(T));
1949 FreeCell *t = thing->asFreeCell();
1955 /* Check that the free list is consistent. */
1958 JS_ASSERT(tailp != &freeList);
1959 FreeCell *t = freeList;
1962 if (&t->link == tailp)
1964 JS_ASSERT(t < t->link);
1971 * Forget just assembled free list head for the arena and
1972 * add the arena itself to the destroy list.
1974 JS_ASSERT(nfree == a->ThingsPerArena);
1975 JS_ASSERT((T *)tailp == &a->t.things[a->ThingsPerArena-1].t);
1977 header->freeList = freeList;
1979 header->hasFreeThings = true;
1981 *ap = (header->next);
1982 JS_ASSERT((T *)header->freeList == &a->t.things[0].t);
1983 a->chunk()->releaseArena(a);
1984 METER(nkilledarenas++);
1986 JS_ASSERT(nfree < a->ThingsPerArena);
1988 header->freeList = freeList;
1990 header->hasFreeThings = (nfree == 0) ? false : true;
1993 METER(nlivearenas++);
1995 if (!(a = (Arena<T> *) *ap))
1998 arenaList->cursor = arenaList->head;
1999 METER(UpdateCompartmentStats(comp, thingKind, nlivearenas, nkilledarenas, nthings));
2003 JSCompartment::finalizeObjectArenaLists(JSContext *cx)
2005 FinalizeArenaList<JSObject>(this, cx, FINALIZE_OBJECT0);
2006 FinalizeArenaList<JSObject_Slots2>(this, cx, FINALIZE_OBJECT2);
2007 FinalizeArenaList<JSObject_Slots4>(this, cx, FINALIZE_OBJECT4);
2008 FinalizeArenaList<JSObject_Slots8>(this, cx, FINALIZE_OBJECT8);
2009 FinalizeArenaList<JSObject_Slots12>(this, cx, FINALIZE_OBJECT12);
2010 FinalizeArenaList<JSObject_Slots16>(this, cx, FINALIZE_OBJECT16);
2011 FinalizeArenaList<JSFunction>(this, cx, FINALIZE_FUNCTION);
2012 #if JS_HAS_XML_SUPPORT
2013 FinalizeArenaList<JSXML>(this, cx, FINALIZE_XML);
2018 JSCompartment::finalizeStringArenaLists(JSContext *cx)
2020 FinalizeArenaList<JSShortString>(this, cx, FINALIZE_SHORT_STRING);
2021 FinalizeArenaList<JSString>(this, cx, FINALIZE_STRING);
2022 FinalizeArenaList<JSExternalString>(this, cx, FINALIZE_EXTERNAL_STRING);
2025 #ifdef JS_THREADSAFE
2030 GCHelperThread::init(JSRuntime *rt)
2032 if (!(wakeup = PR_NewCondVar(rt->gcLock)))
2034 if (!(sweepingDone = PR_NewCondVar(rt->gcLock)))
2037 thread = PR_CreateThread(PR_USER_THREAD, threadMain, rt, PR_PRIORITY_NORMAL,
2038 PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
2044 GCHelperThread::finish(JSRuntime *rt)
2046 PRThread *join = NULL;
2048 AutoLockGC lock(rt);
2049 if (thread && !shutdown) {
2051 PR_NotifyCondVar(wakeup);
2056 /* PR_DestroyThread is not necessary. */
2057 PR_JoinThread(join);
2060 PR_DestroyCondVar(wakeup);
2062 PR_DestroyCondVar(sweepingDone);
2067 GCHelperThread::threadMain(void *arg)
2069 JSRuntime *rt = static_cast<JSRuntime *>(arg);
2070 rt->gcHelperThread.threadLoop(rt);
2074 GCHelperThread::threadLoop(JSRuntime *rt)
2076 AutoLockGC lock(rt);
2079 * Sweeping can be true here on the first iteration if a GC and the
2080 * corresponding startBackgroundSweep call happen before this thread
2081 * has a chance to run.
2084 PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
2086 AutoUnlockGC unlock(rt);
2090 PR_NotifyAllCondVar(sweepingDone);
2095 GCHelperThread::startBackgroundSweep(JSRuntime *rt)
2097 /* The caller takes the GC lock. */
2098 JS_ASSERT(!sweeping);
2100 PR_NotifyCondVar(wakeup);
2104 GCHelperThread::waitBackgroundSweepEnd(JSRuntime *rt)
2106 AutoLockGC lock(rt);
2108 PR_WaitCondVar(sweepingDone, PR_INTERVAL_NO_TIMEOUT);
2112 GCHelperThread::replenishAndFreeLater(void *ptr)
2114 JS_ASSERT(freeCursor == freeCursorEnd);
2116 if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
2118 freeCursor = (void **) js_malloc(FREE_ARRAY_SIZE);
2120 freeCursorEnd = NULL;
2123 freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
2124 *freeCursor++ = ptr;
2131 GCHelperThread::doSweep()
2134 void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
2135 freeElementsAndArray(array, freeCursor);
2136 freeCursor = freeCursorEnd = NULL;
2138 JS_ASSERT(!freeCursorEnd);
2140 for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
2141 void **array = *iter;
2142 freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
2144 freeVector.resize(0);
2149 #endif /* JS_THREADSAFE */
2152 SweepCrossCompartmentWrappers(JSContext *cx)
2154 JSRuntime *rt = cx->runtime;
2156 * Figure out how much JIT code should be released from inactive compartments.
2157 * If multiple eighth-lives have passed, compound the release interval linearly;
2158 * if enough time has passed, all inactive JIT code will be released.
2160 uint32 releaseInterval = 0;
2161 int64 now = PRMJ_Now();
2162 if (now >= rt->gcJitReleaseTime) {
2163 releaseInterval = 8;
2164 while (now >= rt->gcJitReleaseTime) {
2165 if (--releaseInterval == 1)
2166 rt->gcJitReleaseTime = now;
2167 rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
2171 /* Remove dead wrappers from the compartment map. */
2172 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2173 (*c)->sweep(cx, releaseInterval);
2177 SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
2179 JSRuntime *rt = cx->runtime;
2180 JSCompartmentCallback callback = rt->compartmentCallback;
2182 /* Skip the atomsCompartment. */
2183 JSCompartment **read = rt->compartments.begin() + 1;
2184 JSCompartment **end = rt->compartments.end();
2185 JSCompartment **write = read;
2186 JS_ASSERT(rt->compartments.length() >= 1);
2187 JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
2189 while (read < end) {
2190 JSCompartment *compartment = *read++;
2193 * Unmarked compartments containing marked objects don't get deleted,
2194 * except when LAST_CONTEXT GC is performed.
2196 if ((!compartment->isMarked() && compartment->arenaListsAreEmpty()) ||
2197 gckind == GC_LAST_CONTEXT)
2199 JS_ASSERT(compartment->freeLists.isEmpty());
2201 (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY);
2202 if (compartment->principals)
2203 JSPRINCIPALS_DROP(cx, compartment->principals);
2204 js_delete(compartment);
2207 *write++ = compartment;
2209 rt->compartments.resize(write - rt->compartments.begin());
2213 * Common cache invalidation and so forth that must be done before GC. Even if
2214 * GCUntilDone calls GC several times, this work needs to be done only once.
2217 PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
2219 JSRuntime *rt = cx->runtime;
2221 /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
2222 rt->gcIsNeeded = false;
2223 rt->gcTriggerCompartment = NULL;
2225 /* Reset malloc counter. */
2226 rt->resetGCMallocBytes();
2228 #ifdef JS_DUMP_SCOPE_METERS
2230 extern void js_DumpScopeMeters(JSRuntime *rt);
2231 js_DumpScopeMeters(rt);
2236 * Reset the property cache's type id generator so we can compress ids.
2237 * Same for the protoHazardShape proxy-shape standing in for all object
2238 * prototypes having readonly or setter properties.
2240 if (rt->shapeGen & SHAPE_OVERFLOW_BIT
2245 rt->gcRegenShapes = true;
2247 rt->protoHazardShape = 0;
2250 if (rt->gcCurrentCompartment) {
2251 rt->gcCurrentCompartment->purge(cx);
2253 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2257 js_PurgeThreads(cx);
2259 JSContext *iter = NULL;
2260 while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
2266 MarkAndSweepCompartment(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
2268 JSRuntime *rt = cx->runtime;
2270 JS_ASSERT(!rt->gcRegenShapes);
2271 JS_ASSERT(gckind != GC_LAST_CONTEXT);
2272 JS_ASSERT(comp != rt->atomsCompartment);
2273 JS_ASSERT(!comp->isMarked());
2274 JS_ASSERT(comp->rt->gcMode == JSGC_MODE_COMPARTMENT);
2279 GCMarker gcmarker(cx);
2280 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
2281 JS_ASSERT(gcmarker.getMarkColor() == BLACK);
2282 rt->gcMarkingTracer = &gcmarker;
2283 gcmarker.stackLimit = cx->stackLimit;
2285 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
2286 r.front()->clearMarkBitmap();
2288 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2289 (*c)->markCrossCompartment(&gcmarker);
2291 comp->mark(&gcmarker);
2293 MarkRuntime(&gcmarker);
2296 * Mark children of things that caused too deep recursion during the above
2299 gcmarker.markDelayedChildren();
2301 rt->gcMarkingTracer = NULL;
2304 (void) rt->gcCallback(cx, JSGC_MARK_END);
2306 #ifdef JS_THREADSAFE
2308 * cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
2311 if(!cx->gcBackgroundFree) {
2312 /* Wait until the sweeping from the previois GC finishes. */
2313 rt->gcHelperThread.waitBackgroundSweepEnd(rt);
2314 cx->gcBackgroundFree = &rt->gcHelperThread;
2318 /* Make sure that we didn't mark an object in another compartment */
2319 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2320 JS_ASSERT_IF(*c != comp, checkArenaListAllUnmarked(*c));
2326 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2327 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2328 * rather than nest badly and leave the unmarked newborn to be swept.
2330 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2331 * JSString held in a hashtable to check if the hashtable entry can be
2332 * freed. Note that even after the entry is freed, JSObject finalizers can
2333 * continue to access the corresponding JSString* assuming that they are
2334 * unique. This works since the atomization API must not be called during
2337 TIMESTAMP(startSweep);
2338 js_SweepAtomState(cx);
2340 /* Finalize watch points associated with unreachable objects. */
2341 js_SweepWatchPoints(cx);
2344 /* Save the pre-sweep count of scope-mapped properties. */
2345 rt->liveObjectPropsPreSweep = rt->liveObjectProps;
2349 * We finalize iterators before other objects so the iterator can use the
2350 * object which properties it enumerates over to finalize the enumeration
2351 * state. We finalize objects before other GC things to ensure that
2352 * object's finalizer can access them even if they will be freed.
2356 comp->finalizeObjectArenaLists(cx);
2357 TIMESTAMP(sweepObjectEnd);
2359 comp->finalizeStringArenaLists(cx);
2360 TIMESTAMP(sweepStringEnd);
2363 * Unmark all shapes. Even a per-compartment GC can mark shapes in other
2364 * compartments, and we need to clear these bits. See bug 635873. This will
2365 * be fixed in bug 569422.
2367 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2368 (*c)->propertyTree.unmarkShapes(cx);
2370 PropertyTree::dumpShapes(cx);
2371 TIMESTAMP(sweepShapeEnd);
2374 * Destroy arenas after we finished the sweeping so finalizers can safely
2375 * use js_IsAboutToBeFinalized().
2378 TIMESTAMP(sweepDestroyEnd);
2383 (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
2387 * Perform mark-and-sweep GC.
2389 * In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
2390 * other thread must be either outside all requests or blocked waiting for GC
2391 * to finish. Note that the caller does not hold rt->gcLock.
2394 MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
2396 JSRuntime *rt = cx->runtime;
2402 GCMarker gcmarker(cx);
2403 JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
2404 JS_ASSERT(gcmarker.getMarkColor() == BLACK);
2405 rt->gcMarkingTracer = &gcmarker;
2406 gcmarker.stackLimit = cx->stackLimit;
2408 for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
2409 r.front()->clearMarkBitmap();
2411 MarkRuntime(&gcmarker);
2412 js_MarkScriptFilenames(rt);
2415 * Mark children of things that caused too deep recursion during the above
2418 gcmarker.markDelayedChildren();
2420 rt->gcMarkingTracer = NULL;
2423 (void) rt->gcCallback(cx, JSGC_MARK_END);
2425 #ifdef JS_THREADSAFE
2427 * cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
2430 if(!cx->gcBackgroundFree) {
2431 /* Wait until the sweeping from the previois GC finishes. */
2432 rt->gcHelperThread.waitBackgroundSweepEnd(rt);
2433 cx->gcBackgroundFree = &rt->gcHelperThread;
2440 * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
2441 * so that any attempt to allocate a GC-thing from a finalizer will fail,
2442 * rather than nest badly and leave the unmarked newborn to be swept.
2444 * We first sweep atom state so we can use js_IsAboutToBeFinalized on
2445 * JSString held in a hashtable to check if the hashtable entry can be
2446 * freed. Note that even after the entry is freed, JSObject finalizers can
2447 * continue to access the corresponding JSString* assuming that they are
2448 * unique. This works since the atomization API must not be called during
2451 TIMESTAMP(startSweep);
2452 js_SweepAtomState(cx);
2454 /* Finalize watch points associated with unreachable objects. */
2455 js_SweepWatchPoints(cx);
2458 /* Save the pre-sweep count of scope-mapped properties. */
2459 rt->liveObjectPropsPreSweep = rt->liveObjectProps;
2462 SweepCrossCompartmentWrappers(cx);
2465 * We finalize iterators before other objects so the iterator can use the
2466 * object which properties it enumerates over to finalize the enumeration
2467 * state. We finalize objects before other GC things to ensure that
2468 * object's finalizer can access them even if they will be freed.
2470 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
2471 (*c)->finalizeObjectArenaLists(cx);
2473 TIMESTAMP(sweepObjectEnd);
2475 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
2476 (*c)->finalizeStringArenaLists(cx);
2478 TIMESTAMP(sweepStringEnd);
2481 * Sweep the runtime's property trees after finalizing objects, in case any
2482 * had watchpoints referencing tree nodes.
2484 * Do this before sweeping compartments, so that we sweep all shapes in
2485 * unreachable compartments.
2487 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2488 (*c)->propertyTree.sweepShapes(cx);
2490 PropertyTree::dumpShapes(cx);
2491 TIMESTAMP(sweepShapeEnd);
2493 SweepCompartments(cx, gckind);
2496 * Sweep script filenames after sweeping functions in the generic loop
2497 * above. In this way when a scripted function's finalizer destroys the
2498 * script and calls rt->destroyScriptHook, the hook can still access the
2499 * script's filename. See bug 323267.
2501 js_SweepScriptFilenames(rt);
2504 * Destroy arenas after we finished the sweeping so finalizers can safely
2505 * use js_IsAboutToBeFinalized().
2508 TIMESTAMP(sweepDestroyEnd);
2510 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2514 (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
2515 #ifdef DEBUG_srcnotesize
2516 { extern void DumpSrcNoteSizeHist();
2517 DumpSrcNoteSizeHist();
2518 printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
2522 #ifdef JS_SCOPE_DEPTH_METER
2523 DumpScopeDepthMeter(rt);
2526 #ifdef JS_DUMP_LOOP_STATS
2531 #ifdef JS_THREADSAFE
2534 * If the GC is running and we're called on another thread, wait for this GC
2535 * activation to finish. We can safely wait here without fear of deadlock (in
2536 * the case where we are called within a request on another thread's context)
2537 * because the GC doesn't set rt->gcRunning until after it has waited for all
2538 * active requests to end.
2540 * We call here js_CurrentThreadId() after checking for rt->gcState to avoid
2541 * an expensive call when the GC is not running.
2544 js_WaitForGC(JSRuntime *rt)
2546 if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {
2548 JS_AWAIT_GC_DONE(rt);
2549 } while (rt->gcRunning);
2554 * GC is running on another thread. Temporarily suspend all requests running
2555 * on the current thread and wait until the GC is done.
2558 LetOtherGCFinish(JSContext *cx)
2560 JSRuntime *rt = cx->runtime;
2561 JS_ASSERT(rt->gcThread);
2562 JS_ASSERT(cx->thread != rt->gcThread);
2564 size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
2565 JS_ASSERT(requestDebit <= rt->requestCount);
2567 JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
2569 if (requestDebit != 0) {
2571 if (JS_ON_TRACE(cx)) {
2573 * Leave trace before we decrease rt->requestCount and notify the
2574 * GC. Otherwise the GC may start immediately after we unlock while
2575 * this thread is still on trace.
2577 AutoUnlockGC unlock(rt);
2581 rt->requestCount -= requestDebit;
2582 if (rt->requestCount == 0)
2583 JS_NOTIFY_REQUEST_DONE(rt);
2587 * Check that we did not release the GC lock above and let the GC to
2588 * finish before we wait.
2590 JS_ASSERT(rt->gcThread);
2593 * Wait for GC to finish on the other thread, even if requestDebit is 0
2594 * and even if GC has not started yet because the gcThread is waiting in
2595 * AutoGCSession. This ensures that js_GC never returns without a full GC
2599 JS_AWAIT_GC_DONE(rt);
2600 } while (rt->gcThread);
2602 rt->requestCount += requestDebit;
2607 class AutoGCSession {
2609 explicit AutoGCSession(JSContext *cx);
2615 /* Disable copy constructor or assignments */
2616 AutoGCSession(const AutoGCSession&);
2617 void operator=(const AutoGCSession&);
2621 * Start a new GC session. Together with LetOtherGCFinish this function
2622 * contains the rendezvous algorithm by which we stop the world for GC.
2624 * This thread becomes the GC thread. Wait for all other threads to quiesce.
2625 * Then set rt->gcRunning and return.
2627 AutoGCSession::AutoGCSession(JSContext *cx)
2630 JSRuntime *rt = cx->runtime;
2632 #ifdef JS_THREADSAFE
2633 if (rt->gcThread && rt->gcThread != cx->thread)
2634 LetOtherGCFinish(cx);
2637 JS_ASSERT(!rt->gcRunning);
2639 #ifdef JS_THREADSAFE
2640 /* No other thread is in GC, so indicate that we're now in GC. */
2641 JS_ASSERT(!rt->gcThread);
2642 rt->gcThread = cx->thread;
2645 * Notify operation callbacks on other threads, which will give them a
2646 * chance to yield their requests. Threads without requests perform their
2647 * callback at some later point, which then will be unnecessary, but
2650 for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
2651 JSThread *thread = r.front().value;
2652 if (thread != cx->thread)
2653 thread->data.triggerOperationCallback(rt);
2657 * Discount the request on the current thread from contributing to
2658 * rt->requestCount before we wait for all other requests to finish.
2659 * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
2660 * rt->requestCount transitions to 0.
2662 size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
2663 JS_ASSERT(requestDebit <= rt->requestCount);
2664 if (requestDebit != rt->requestCount) {
2665 rt->requestCount -= requestDebit;
2668 JS_AWAIT_REQUEST_DONE(rt);
2669 } while (rt->requestCount > 0);
2670 rt->requestCount += requestDebit;
2673 #endif /* JS_THREADSAFE */
2676 * Set rt->gcRunning here within the GC lock, and after waiting for any
2677 * active requests to end. This way js_WaitForGC called outside a request
2678 * would not block on the GC that is waiting for other requests to finish
2679 * with rt->gcThread set while JS_BeginRequest would do such wait.
2681 rt->gcRunning = true;
2684 /* End the current GC session and allow other threads to proceed. */
2685 AutoGCSession::~AutoGCSession()
2687 JSRuntime *rt = context->runtime;
2688 rt->gcRunning = false;
2689 #ifdef JS_THREADSAFE
2690 JS_ASSERT(rt->gcThread == context->thread);
2691 rt->gcThread = NULL;
2692 JS_NOTIFY_GC_DONE(rt);
2697 * GC, repeatedly if necessary, until we think we have not created any new
2698 * garbage and no other threads are demanding more GC.
2701 GCUntilDone(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
2703 if (JS_ON_TRACE(cx))
2706 JSRuntime *rt = cx->runtime;
2708 /* Recursive GC or a call from another thread restarts the GC cycle. */
2709 if (rt->gcMarkAndSweep) {
2711 #ifdef JS_THREADSAFE
2712 JS_ASSERT(rt->gcThread);
2713 if (rt->gcThread != cx->thread) {
2714 /* We do not return until another GC finishes. */
2715 LetOtherGCFinish(cx);
2721 AutoGCSession gcsession(cx);
2723 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2724 JS_ASSERT(!(*c)->isMarked());
2727 * We should not be depending on cx->compartment in the GC, so set it to
2728 * NULL to look for violations.
2730 SwitchToCompartment(cx, (JSCompartment *)NULL);
2732 JS_ASSERT(!rt->gcCurrentCompartment);
2733 rt->gcCurrentCompartment = comp;
2735 METER(rt->gcStats.poke++);
2737 bool firstRun = true;
2738 rt->gcMarkAndSweep = true;
2739 #ifdef JS_THREADSAFE
2740 JS_ASSERT(!cx->gcBackgroundFree);
2745 AutoUnlockGC unlock(rt);
2747 PreGCCleanup(cx, gckind);
2748 TIMESTAMP(startMark);
2753 MarkAndSweepCompartment(cx, comp, gckind GCTIMER_ARG);
2755 MarkAndSweep(cx, gckind GCTIMER_ARG);
2758 // - another thread, not in a request, called js_GC
2759 // - js_GC was called recursively
2760 // - a finalizer called js_RemoveRoot or js_UnlockGCThingRT.
2761 } while (rt->gcPoke);
2763 #ifdef JS_THREADSAFE
2764 JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
2765 cx->gcBackgroundFree = NULL;
2766 rt->gcHelperThread.startBackgroundSweep(rt);
2769 rt->gcMarkAndSweep = false;
2770 rt->gcRegenShapes = false;
2771 rt->setGCLastBytes(rt->gcBytes);
2772 rt->gcCurrentCompartment = NULL;
2774 for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
2775 (*c)->setGCLastBytes((*c)->gcBytes);
2779 js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
2781 JSRuntime *rt = cx->runtime;
2784 * Don't collect garbage if the runtime isn't up, and cx is not the last
2785 * context in the runtime. The last context must force a GC, and nothing
2786 * should suppress that final collection or there may be shutdown leaks,
2787 * or runtime bloat until the next context is created.
2789 if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
2792 RecordNativeStackTopForGC(cx);
2796 # if JS_STACK_GROWTH_DIRECTION > 0
2797 /* cx->stackLimit is set to jsuword(-1) by default. */
2798 JS_ASSERT_IF(cx->stackLimit != jsuword(-1),
2799 JS_CHECK_STACK_SIZE(cx->stackLimit + (1 << 14), &stackDummy));
2801 /* -16k because it is possible to perform a GC during an overrecursion report. */
2802 JS_ASSERT_IF(cx->stackLimit, JS_CHECK_STACK_SIZE(cx->stackLimit - (1 << 14), &stackDummy));
2810 * Let the API user decide to defer a GC if it wants to (unless this
2811 * is the last context). Invoke the callback regardless. Sample the
2812 * callback in case we are freely racing with a JS_SetGCCallback{,RT}
2813 * on another thread.
2815 if (JSGCCallback callback = rt->gcCallback) {
2816 if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
2821 /* Lock out other GC allocator and collector invocations. */
2822 AutoLockGC lock(rt);
2824 GCUntilDone(cx, comp, gckind GCTIMER_ARG);
2827 /* We re-sample the callback again as the finalizers can change it. */
2828 if (JSGCCallback callback = rt->gcCallback)
2829 (void) callback(cx, JSGC_END);
2832 * On shutdown, iterate until the JSGC_END callback stops creating
2835 } while (gckind == GC_LAST_CONTEXT && rt->gcPoke);
2837 js_DumpGCStats(cx->runtime, stderr);
2839 GCTIMER_END(gckind == GC_LAST_CONTEXT);
2846 SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
2849 * This function cannot be called during the GC and always requires a
2852 #ifdef JS_THREADSAFE
2853 JS_ASSERT(cx->thread->data.requestDepth);
2856 * This is only necessary if AutoGCSession below would wait for GC to
2857 * finish on another thread, but to capture the minimal stack space and
2858 * for code simplicity we do it here unconditionally.
2860 RecordNativeStackTopForGC(cx);
2863 JSRuntime *rt = cx->runtime;
2864 AutoLockGC lock(rt);
2865 AutoGCSession gcsession(cx);
2866 AutoUnlockGC unlock(rt);
2869 for (JSObject *obj2 = proto; obj2;) {
2874 obj2 = obj2->getProto();
2877 obj->setProto(proto);
2883 NewCompartment(JSContext *cx, JSPrincipals *principals)
2885 JSRuntime *rt = cx->runtime;
2886 JSCompartment *compartment = js_new<JSCompartment>(rt);
2887 if (!compartment || !compartment->init()) {
2888 js_delete(compartment);
2889 JS_ReportOutOfMemory(cx);
2894 compartment->principals = principals;
2895 JSPRINCIPALS_HOLD(cx, principals);
2898 compartment->setGCLastBytes(8192);
2901 AutoLockGC lock(rt);
2903 if (!rt->compartments.append(compartment)) {
2904 AutoUnlockGC unlock(rt);
2905 JS_ReportOutOfMemory(cx);
2910 JSCompartmentCallback callback = rt->compartmentCallback;
2911 if (callback && !callback(cx, compartment, JSCOMPARTMENT_NEW)) {
2912 AutoLockGC lock(rt);
2913 rt->compartments.popBack();
2919 } /* namespace gc */
2922 TraceRuntime(JSTracer *trc)
2924 LeaveTrace(trc->context);
2926 #ifdef JS_THREADSAFE
2928 JSContext *cx = trc->context;
2929 JSRuntime *rt = cx->runtime;
2930 AutoLockGC lock(rt);
2932 if (rt->gcThread != cx->thread) {
2933 AutoGCSession gcsession(cx);
2934 AutoUnlockGC unlock(rt);
2935 RecordNativeStackTopForGC(trc->context);
2941 RecordNativeStackTopForGC(trc->context);
2945 * Calls from inside a normal GC or a recursive calls are OK and do not
2946 * require session setup.
2951 } /* namespace js */