diff options
Diffstat (limited to 'js/src/gc')
-rw-r--r-- | js/src/gc/GC.cpp | 50 | ||||
-rw-r--r-- | js/src/gc/GC.h | 3 | ||||
-rw-r--r-- | js/src/gc/GCEnum.h | 1 | ||||
-rw-r--r-- | js/src/gc/GCLock.h | 21 | ||||
-rw-r--r-- | js/src/gc/GCMarker.h | 13 | ||||
-rw-r--r-- | js/src/gc/GCParallelTask.cpp | 6 | ||||
-rw-r--r-- | js/src/gc/GCParallelTask.h | 5 | ||||
-rw-r--r-- | js/src/gc/GCRuntime.h | 1 | ||||
-rw-r--r-- | js/src/gc/Marking-inl.h | 53 | ||||
-rw-r--r-- | js/src/gc/Marking.cpp | 175 | ||||
-rw-r--r-- | js/src/gc/Memory.cpp | 1 | ||||
-rw-r--r-- | js/src/gc/Nursery.cpp | 5 | ||||
-rw-r--r-- | js/src/gc/Scheduling.h | 3 | ||||
-rw-r--r-- | js/src/gc/Tenuring.cpp | 8 | ||||
-rw-r--r-- | js/src/gc/Tracer.h | 6 | ||||
-rw-r--r-- | js/src/gc/WeakMap-inl.h | 36 | ||||
-rw-r--r-- | js/src/gc/WeakMap.cpp | 59 | ||||
-rw-r--r-- | js/src/gc/WeakMap.h | 32 | ||||
-rw-r--r-- | js/src/gc/Zone.cpp | 88 | ||||
-rw-r--r-- | js/src/gc/Zone.h | 7 |
20 files changed, 295 insertions, 278 deletions
diff --git a/js/src/gc/GC.cpp b/js/src/gc/GC.cpp index 68dd66898c..bdd252907b 100644 --- a/js/src/gc/GC.cpp +++ b/js/src/gc/GC.cpp @@ -411,6 +411,8 @@ GCRuntime::GCRuntime(JSRuntime* rt) helperThreadRatio(TuningDefaults::HelperThreadRatio), maxHelperThreads(TuningDefaults::MaxHelperThreads), helperThreadCount(1), + maxMarkingThreads(TuningDefaults::MaxMarkingThreads), + markingThreadCount(1), createBudgetCallback(nullptr), minEmptyChunkCount_(TuningDefaults::MinEmptyChunkCount), maxEmptyChunkCount_(TuningDefaults::MaxEmptyChunkCount), @@ -1049,7 +1051,7 @@ bool GCRuntime::setParameter(JSContext* cx, JSGCParamKey key, uint32_t value) { static bool IsGCThreadParameter(JSGCParamKey key) { return key == JSGC_HELPER_THREAD_RATIO || key == JSGC_MAX_HELPER_THREADS || - key == JSGC_MARKING_THREAD_COUNT; + key == JSGC_MAX_MARKING_THREADS; } bool GCRuntime::setParameter(JSGCParamKey key, uint32_t value, @@ -1120,8 +1122,8 @@ bool GCRuntime::setThreadParameter(JSGCParamKey key, uint32_t value, } maxHelperThreads = value; break; - case JSGC_MARKING_THREAD_COUNT: - markingThreadCount = std::min(size_t(value), MaxParallelWorkers); + case JSGC_MAX_MARKING_THREADS: + maxMarkingThreads = std::min(size_t(value), MaxParallelWorkers); break; default: MOZ_CRASH("Unexpected parameter key"); @@ -1201,8 +1203,8 @@ void GCRuntime::resetThreadParameter(JSGCParamKey key, AutoLockGC& lock) { case JSGC_MAX_HELPER_THREADS: maxHelperThreads = TuningDefaults::MaxHelperThreads; break; - case JSGC_MARKING_THREAD_COUNT: - markingThreadCount = 0; + case JSGC_MAX_MARKING_THREADS: + maxMarkingThreads = TuningDefaults::MaxMarkingThreads; break; default: MOZ_CRASH("Unexpected parameter key"); @@ -1265,6 +1267,8 @@ uint32_t GCRuntime::getParameter(JSGCParamKey key, const AutoLockGC& lock) { return maxHelperThreads; case JSGC_HELPER_THREAD_COUNT: return helperThreadCount; + case JSGC_MAX_MARKING_THREADS: + return maxMarkingThreads; case JSGC_MARKING_THREAD_COUNT: return markingThreadCount; case JSGC_SYSTEM_PAGE_SIZE_KB: @@ -1316,8 +1320,12 @@ void GCRuntime::updateHelperThreadCount() { std::clamp(size_t(double(cpuCount) * helperThreadRatio.ref()), size_t(1), maxHelperThreads.ref()); + // Calculate the target thread count for parallel marking, which uses separate + // parameters to let us adjust this independently. + markingThreadCount = std::min(cpuCount / 2, maxMarkingThreads.ref()); + // Calculate the overall target thread count taking into account the separate - // parameter for parallel marking threads. Add spare threads to avoid blocking + // target for parallel marking threads. Add spare threads to avoid blocking // parallel marking when there is other GC work happening. size_t targetCount = std::max(helperThreadCount.ref(), @@ -1334,9 +1342,13 @@ void GCRuntime::updateHelperThreadCount() { MOZ_ASSERT(availableThreadCount != 0); targetCount = std::min(targetCount, availableThreadCount); helperThreadCount = std::min(helperThreadCount.ref(), availableThreadCount); - markingThreadCount = - std::min(markingThreadCount.ref(), - availableThreadCount - SpareThreadsDuringParallelMarking); + if (availableThreadCount < SpareThreadsDuringParallelMarking) { + markingThreadCount = 1; + } else { + markingThreadCount = + std::min(markingThreadCount.ref(), + availableThreadCount - SpareThreadsDuringParallelMarking); + } // Update the maximum number of threads that will be used for GC work. maxParallelThreads = targetCount; @@ -2948,6 +2960,10 @@ void GCRuntime::beginMarkPhase(AutoGCSession& session) { checkNoRuntimeRoots(session); } else { AutoUpdateLiveCompartments updateLive(this); +#ifdef DEBUG + AutoSetThreadIsMarking threadIsMarking; +#endif // DEBUG + marker().setRootMarkingMode(true); traceRuntimeForMajorGC(marker().tracer(), session); marker().setRootMarkingMode(false); @@ -3093,6 +3109,10 @@ IncrementalProgress GCRuntime::markUntilBudgetExhausted( } } +#ifdef DEBUG + AutoSetThreadIsMarking threadIsMarking; +#endif // DEBUG + if (processTestMarkQueue() == QueueYielded) { return NotFinished; } @@ -3112,10 +3132,6 @@ IncrementalProgress GCRuntime::markUntilBudgetExhausted( return Finished; } -#ifdef DEBUG - AutoSetThreadIsMarking threadIsMarking; -#endif // DEBUG - return marker().markUntilBudgetExhausted(sliceBudget, reportTime) ? Finished : NotFinished; @@ -5054,7 +5070,7 @@ void GCRuntime::checkHashTablesAfterMovingGC() { } for (ZonesIter zone(this, SkipAtoms); !zone.done(); zone.next()) { zone->checkUniqueIdTableAfterMovingGC(); - zone->shapeZone().checkTablesAfterMovingGC(); + zone->shapeZone().checkTablesAfterMovingGC(zone); zone->checkAllCrossCompartmentWrappersAfterMovingGC(); zone->checkScriptMapsAfterMovingGC(); @@ -5063,15 +5079,17 @@ void GCRuntime::checkHashTablesAfterMovingGC() { for (auto map = zone->cellIterUnsafe<NormalPropMap>(); !map.done(); map.next()) { if (PropMapTable* table = map->asLinked()->maybeTable(nogc)) { - table->checkAfterMovingGC(); + table->checkAfterMovingGC(zone); } } for (auto map = zone->cellIterUnsafe<DictionaryPropMap>(); !map.done(); map.next()) { if (PropMapTable* table = map->asLinked()->maybeTable(nogc)) { - table->checkAfterMovingGC(); + table->checkAfterMovingGC(zone); } } + + WeakMapBase::checkWeakMapsAfterMovingGC(zone); } for (CompartmentsIter c(this); !c.done(); c.next()) { diff --git a/js/src/gc/GC.h b/js/src/gc/GC.h index 7f603b066f..b907b0cddb 100644 --- a/js/src/gc/GC.h +++ b/js/src/gc/GC.h @@ -82,7 +82,8 @@ class TenuredChunk; _("helperThreadRatio", JSGC_HELPER_THREAD_RATIO, true) \ _("maxHelperThreads", JSGC_MAX_HELPER_THREADS, true) \ _("helperThreadCount", JSGC_HELPER_THREAD_COUNT, false) \ - _("markingThreadCount", JSGC_MARKING_THREAD_COUNT, true) \ + _("maxMarkingThreads", JSGC_MAX_MARKING_THREADS, true) \ + _("markingThreadCount", JSGC_MARKING_THREAD_COUNT, false) \ _("systemPageSizeKB", JSGC_SYSTEM_PAGE_SIZE_KB, false) \ _("semispaceNurseryEnabled", JSGC_SEMISPACE_NURSERY_ENABLED, true) diff --git a/js/src/gc/GCEnum.h b/js/src/gc/GCEnum.h index d60cfaea76..572d5f34a6 100644 --- a/js/src/gc/GCEnum.h +++ b/js/src/gc/GCEnum.h @@ -122,6 +122,7 @@ enum class GCAbortReason { _(ModuleCyclicFields) \ _(ModuleSyntheticFields) \ _(ModuleExports) \ + _(ModuleImportAttributes) \ _(BaselineScript) \ _(IonScript) \ _(ArgumentsData) \ diff --git a/js/src/gc/GCLock.h b/js/src/gc/GCLock.h index 64c28ac544..86429106d2 100644 --- a/js/src/gc/GCLock.h +++ b/js/src/gc/GCLock.h @@ -15,8 +15,6 @@ namespace js { -class AutoUnlockGC; - /* * RAII class that takes the GC lock while it is live. * @@ -32,7 +30,7 @@ class MOZ_RAII AutoLockGC { ~AutoLockGC() { lockGuard_.reset(); } - js::LockGuard<js::Mutex>& guard() { return lockGuard_.ref(); } + LockGuard<Mutex>& guard() { return lockGuard_.ref(); } protected: void lock() { @@ -48,12 +46,12 @@ class MOZ_RAII AutoLockGC { gc::GCRuntime* const gc; private: - mozilla::Maybe<js::LockGuard<js::Mutex>> lockGuard_; + mozilla::Maybe<LockGuard<Mutex>> lockGuard_; AutoLockGC(const AutoLockGC&) = delete; AutoLockGC& operator=(const AutoLockGC&) = delete; - friend class AutoUnlockGC; // For lock/unlock. + friend class UnlockGuard<AutoLockGC>; // For lock/unlock. }; /* @@ -92,18 +90,7 @@ class MOZ_RAII AutoLockGCBgAlloc : public AutoLockGC { bool startBgAlloc = false; }; -class MOZ_RAII AutoUnlockGC { - public: - explicit AutoUnlockGC(AutoLockGC& lock) : lock(lock) { lock.unlock(); } - - ~AutoUnlockGC() { lock.lock(); } - - private: - AutoLockGC& lock; - - AutoUnlockGC(const AutoUnlockGC&) = delete; - AutoUnlockGC& operator=(const AutoUnlockGC&) = delete; -}; +using AutoUnlockGC = UnlockGuard<AutoLockGC>; } // namespace js diff --git a/js/src/gc/GCMarker.h b/js/src/gc/GCMarker.h index 9d34d0a0dc..898f458c02 100644 --- a/js/src/gc/GCMarker.h +++ b/js/src/gc/GCMarker.h @@ -124,6 +124,7 @@ class MarkStack { public: TaggedPtr() = default; TaggedPtr(Tag tag, Cell* ptr); + uintptr_t asBits() const; Tag tag() const; uintptr_t tagUnchecked() const; template <typename T> @@ -144,6 +145,7 @@ class MarkStack { TaggedPtr ptr() const; void setStart(size_t newStart); + void setEmpty(); private: static constexpr size_t StartShift = 2; @@ -385,12 +387,6 @@ class GCMarker { // structures. void abortLinearWeakMarking(); - // 'delegate' is no longer the delegate of 'key'. - void severWeakDelegate(JSObject* key, JSObject* delegate); - - // 'delegate' is now the delegate of 'key'. Update weakmap marking state. - void restoreWeakDelegate(JSObject* key, JSObject* delegate); - #ifdef DEBUG // We can't check atom marking if the helper thread lock is already held by // the current thread. This allows us to disable the check. @@ -429,7 +425,7 @@ class GCMarker { void markAndTraverse(T* thing); template <typename T> - void markImplicitEdges(T* oldThing); + void markImplicitEdges(T* markedThing); private: /* @@ -524,9 +520,6 @@ class GCMarker { inline void pushValueRange(JSObject* obj, SlotsOrElementsKind kind, size_t start, size_t end); - template <typename T> - void markImplicitEdgesHelper(T markedThing); - // Mark through edges whose target color depends on the colors of two source // entities (eg a WeakMap and one of its keys), and push the target onto the // mark stack. diff --git a/js/src/gc/GCParallelTask.cpp b/js/src/gc/GCParallelTask.cpp index 27cb39df36..f33174ea2e 100644 --- a/js/src/gc/GCParallelTask.cpp +++ b/js/src/gc/GCParallelTask.cpp @@ -100,6 +100,12 @@ void js::GCParallelTask::joinWithLockHeld(AutoLockHelperThreadState& lock, return; } + if (lock.hasQueuedTasks()) { + // Unlock to allow task dispatch without lock held, otherwise we could wait + // forever. + AutoUnlockHelperThreadState unlock(lock); + } + if (isNotYetRunning(lock) && deadline.isNothing()) { // If the task was dispatched but has not yet started then cancel the task // and run it from the main thread. This stops us from blocking here when diff --git a/js/src/gc/GCParallelTask.h b/js/src/gc/GCParallelTask.h index 9ac51d02be..88e0ad6255 100644 --- a/js/src/gc/GCParallelTask.h +++ b/js/src/gc/GCParallelTask.h @@ -161,8 +161,6 @@ class GCParallelTask : private mozilla::LinkedListElement<GCParallelTask>, void joinWithLockHeld( AutoLockHelperThreadState& lock, mozilla::Maybe<mozilla::TimeStamp> deadline = mozilla::Nothing()); - void joinNonIdleTask(mozilla::Maybe<mozilla::TimeStamp> deadline, - AutoLockHelperThreadState& lock); // Instead of dispatching to a helper, run the task on the current thread. void runFromMainThread(); @@ -247,6 +245,9 @@ class GCParallelTask : private mozilla::LinkedListElement<GCParallelTask>, } friend class gc::GCRuntime; + void joinNonIdleTask(mozilla::Maybe<mozilla::TimeStamp> deadline, + AutoLockHelperThreadState& lock); + void runTask(JS::GCContext* gcx, AutoLockHelperThreadState& lock); // Implement the HelperThreadTask interface. diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h index 851e477359..6b85223e9e 100644 --- a/js/src/gc/GCRuntime.h +++ b/js/src/gc/GCRuntime.h @@ -1027,6 +1027,7 @@ class GCRuntime { MainThreadData<double> helperThreadRatio; MainThreadData<size_t> maxHelperThreads; MainThreadOrGCTaskData<size_t> helperThreadCount; + MainThreadData<size_t> maxMarkingThreads; MainThreadData<size_t> markingThreadCount; // Per-runtime helper thread task queue. Can be accessed from helper threads diff --git a/js/src/gc/Marking-inl.h b/js/src/gc/Marking-inl.h index 4afc9f7c8c..90f6337657 100644 --- a/js/src/gc/Marking-inl.h +++ b/js/src/gc/Marking-inl.h @@ -188,9 +188,40 @@ inline void PreWriteBarrierDuringFlattening(JSString* str) { #ifdef JSGC_HASH_TABLE_CHECKS +// Moving GC things whose pointers are used in hash table keys has the potential +// to break hash tables in subtle and terrifying ways. For example, a key might +// be reported as not present but iterating the table could still return it. +// +// Check that a table is correct following a moving GC, ensuring that nothing is +// present in the table that points into the nursery or that has not been moved, +// and that the hash table entries are discoverable. +// +// |checkEntryAndGetLookup| should check any GC thing pointers in the entry are +// valid and return the lookup required to get this entry from the table. + +template <typename Table, typename Range, typename Lookup> +void CheckTableEntryAfterMovingGC(const Table& table, const Range& r, + const Lookup& lookup) { + auto ptr = table.lookup(lookup); + MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front()); +} + +template <typename Table, typename F> +void CheckTableAfterMovingGC(const Table& table, F&& checkEntryAndGetLookup) { + for (auto r = table.all(); !r.empty(); r.popFront()) { + auto lookup = checkEntryAndGetLookup(r.front()); + CheckTableEntryAfterMovingGC(table, r, lookup); + } +} + template <typename T> inline bool IsGCThingValidAfterMovingGC(T* t) { - return !IsInsideNursery(t) && !t->isForwarded(); + if (!t->isTenured()) { + return false; + } + + TenuredCell* cell = &t->asTenured(); + return cell->arena()->allocated() && !cell->isForwarded(); } template <typename T> @@ -201,8 +232,24 @@ inline void CheckGCThingAfterMovingGC(T* t) { } template <typename T> -inline void CheckGCThingAfterMovingGC(const WeakHeapPtr<T*>& t) { - CheckGCThingAfterMovingGC(t.unbarrieredGet()); +inline void CheckGCThingAfterMovingGC(T* t, JS::Zone* expectedZone) { + if (t) { + MOZ_RELEASE_ASSERT(IsGCThingValidAfterMovingGC(t)); + JS::Zone* zone = t->zoneFromAnyThread(); + MOZ_RELEASE_ASSERT(zone == expectedZone || zone->isAtomsZone()); + } +} + +template <typename T> +inline void CheckGCThingAfterMovingGC(const WeakHeapPtr<T*>& t, + JS::Zone* expectedZone) { + CheckGCThingAfterMovingGC(t.unbarrieredGet(), expectedZone); +} + +inline void CheckProtoAfterMovingGC(const TaggedProto& proto, JS::Zone* zone) { + if (proto.isObject()) { + CheckGCThingAfterMovingGC(proto.toObject(), zone); + } } #endif // JSGC_HASH_TABLE_CHECKS diff --git a/js/src/gc/Marking.cpp b/js/src/gc/Marking.cpp index 6b8742c980..6bec46940f 100644 --- a/js/src/gc/Marking.cpp +++ b/js/src/gc/Marking.cpp @@ -331,12 +331,20 @@ static bool ShouldTraceCrossCompartment(JSTracer* trc, JSObject* src, #ifdef DEBUG -inline void js::gc::AssertShouldMarkInZone(GCMarker* marker, Cell* thing) { - if (!thing->isMarkedBlack()) { - Zone* zone = thing->zone(); - MOZ_ASSERT(zone->isAtomsZone() || - zone->shouldMarkInZone(marker->markColor())); +template <typename T> +void js::gc::AssertShouldMarkInZone(GCMarker* marker, T* thing) { + if (thing->isMarkedBlack()) { + return; } + + // Allow marking marking atoms if we're not collected the atoms zone, except + // for symbols which may entrain other GC things if they're used as weakmap + // keys. + bool allowAtoms = !std::is_same_v<T, JS::Symbol>; + + Zone* zone = thing->zone(); + MOZ_ASSERT(zone->shouldMarkInZone(marker->markColor()) || + (allowAtoms && zone->isAtomsZone())); } void js::gc::AssertRootMarkingPhase(JSTracer* trc) { @@ -718,26 +726,6 @@ void js::gc::TraceRangeInternal(JSTracer* trc, size_t len, T* vec, namespace js { -using HasNoImplicitEdgesType = bool; - -template <typename T> -struct ImplicitEdgeHolderType { - using Type = HasNoImplicitEdgesType; -}; - -// For now, we only handle JSObject* and BaseScript* keys, but the linear time -// algorithm can be easily extended by adding in more types here, then making -// GCMarker::traverse<T> call markImplicitEdges. -template <> -struct ImplicitEdgeHolderType<JSObject*> { - using Type = JSObject*; -}; - -template <> -struct ImplicitEdgeHolderType<BaseScript*> { - using Type = BaseScript*; -}; - void GCMarker::markEphemeronEdges(EphemeronEdgeVector& edges, gc::MarkColor srcColor) { // This is called as part of GC weak marking or by barriers outside of GC. @@ -771,86 +759,21 @@ void GCMarker::markEphemeronEdges(EphemeronEdgeVector& edges, } } -// 'delegate' is no longer the delegate of 'key'. -void GCMarker::severWeakDelegate(JSObject* key, JSObject* delegate) { - MOZ_ASSERT(CurrentThreadIsMainThread()); - - JS::Zone* zone = delegate->zone(); - MOZ_ASSERT(zone->needsIncrementalBarrier()); - - auto* p = zone->gcEphemeronEdges(delegate).get(delegate); - if (!p) { - return; - } - - // We are losing 3 edges here: key -> delegate, delegate -> key, and - // <delegate, map> -> value. Maintain snapshot-at-beginning (hereafter, - // S-A-B) by conservatively assuming the delegate will end up black and - // marking through the latter 2 edges. - // - // Note that this does not fully give S-A-B: - // - // 1. If the map is gray, then the value will only be marked gray here even - // though the map could later be discovered to be black. - // - // 2. If the map has not yet been marked, we won't have any entries to mark - // here in the first place. - // - // 3. We're not marking the delegate, since that would cause eg nukeAllCCWs - // to keep everything alive for another collection. - // - // We can't even assume that the delegate passed in here is live, because we - // could have gotten here from nukeAllCCWs, which iterates over all CCWs - // including dead ones. - // - // This is ok because S-A-B is only needed to prevent the case where an - // unmarked object is removed from the graph and then re-inserted where it is - // reachable only by things that have already been marked. None of the 3 - // target objects will be re-inserted anywhere as a result of this action. - - EphemeronEdgeVector& edges = p->value; - MOZ_ASSERT(markColor() == MarkColor::Black); - markEphemeronEdges(edges, MarkColor::Black); -} - -// 'delegate' is now the delegate of 'key'. Update weakmap marking state. -void GCMarker::restoreWeakDelegate(JSObject* key, JSObject* delegate) { - MOZ_ASSERT(CurrentThreadIsMainThread()); - - MOZ_ASSERT(key->zone()->needsIncrementalBarrier()); - - if (!delegate->zone()->needsIncrementalBarrier()) { - // Normally we should not have added the key -> value edge if the delegate - // zone is not marking (because the delegate would have been seen as black, - // so we would mark the key immediately instead). But if there wasn't a - // delegate (the key was nuked), then we won't have consulted it. So we - // can't do the same assertion as above. - // - // Specifically, the sequence would be: - // 1. Nuke the key. - // 2. Start the incremental GC. - // 3. Mark the WeakMap. Insert a key->value edge with a DeadObjectProxy key. - // 4. Un-nuke the key with a delegate in a nonmarking Zone. - // - // The result is an ephemeron edge (from <map,key> to value, but stored - // as key to value) involving a key with a delegate in a nonmarking Zone, - // something that ordinarily would not happen. - return; - } +template <typename T> +struct TypeCanHaveImplicitEdges : std::false_type {}; +template <> +struct TypeCanHaveImplicitEdges<JSObject> : std::true_type {}; +template <> +struct TypeCanHaveImplicitEdges<BaseScript> : std::true_type {}; +template <> +struct TypeCanHaveImplicitEdges<JS::Symbol> : std::true_type {}; - auto* p = key->zone()->gcEphemeronEdges(key).get(key); - if (!p) { +template <typename T> +void GCMarker::markImplicitEdges(T* markedThing) { + if constexpr (!TypeCanHaveImplicitEdges<T>::value) { return; } - // Similar to severWeakDelegate above, mark through the key -> value edge. - EphemeronEdgeVector& edges = p->value; - MOZ_ASSERT(markColor() == MarkColor::Black); - markEphemeronEdges(edges, MarkColor::Black); -} - -template <typename T> -void GCMarker::markImplicitEdgesHelper(T markedThing) { if (!isWeakMarking()) { return; } @@ -859,30 +782,34 @@ void GCMarker::markImplicitEdgesHelper(T markedThing) { MOZ_ASSERT(zone->isGCMarking()); MOZ_ASSERT(!zone->isGCSweeping()); - auto p = zone->gcEphemeronEdges().get(markedThing); + auto& ephemeronTable = zone->gcEphemeronEdges(); + auto* p = ephemeronTable.get(markedThing); if (!p) { return; } + EphemeronEdgeVector& edges = p->value; // markedThing might be a key in a debugger weakmap, which can end up marking // values that are in a different compartment. AutoClearTracingSource acts(tracer()); - CellColor thingColor = gc::detail::GetEffectiveColor(this, markedThing); - markEphemeronEdges(edges, AsMarkColor(thingColor)); -} + MarkColor thingColor = markColor(); + MOZ_ASSERT(CellColor(thingColor) == + gc::detail::GetEffectiveColor(this, markedThing)); -template <> -void GCMarker::markImplicitEdgesHelper(HasNoImplicitEdgesType) {} + markEphemeronEdges(edges, thingColor); -template <typename T> -void GCMarker::markImplicitEdges(T* thing) { - markImplicitEdgesHelper<typename ImplicitEdgeHolderType<T*>::Type>(thing); + if (edges.empty()) { + ephemeronTable.remove(p); + } } template void GCMarker::markImplicitEdges(JSObject*); template void GCMarker::markImplicitEdges(BaseScript*); +#ifdef NIGHTLY_BUILD +template void GCMarker::markImplicitEdges(JS::Symbol*); +#endif } // namespace js @@ -959,6 +886,9 @@ static void TraceEdgeForBarrier(GCMarker* gcmarker, TenuredCell* thing, MOZ_ASSERT(ShouldMark(gcmarker, thing)); CheckTracedThing(gcmarker->tracer(), thing); AutoClearTracingSource acts(gcmarker->tracer()); +#ifdef DEBUG + AutoSetThreadIsMarking threadIsMarking; +#endif // DEBUG gcmarker->markAndTraverse<NormalMarkingOptions>(thing); }); } @@ -1100,6 +1030,11 @@ void GCMarker::traverse(GetterSetter* thing) { } template <uint32_t opts> void GCMarker::traverse(JS::Symbol* thing) { +#ifdef NIGHTLY_BUILD + if constexpr (bool(opts & MarkingOptions::MarkImplicitEdges)) { + markImplicitEdges(thing); + } +#endif traceChildren<opts>(thing); } template <uint32_t opts> @@ -1253,6 +1188,13 @@ bool js::GCMarker::mark(T* thing) { return false; } + // Don't mark symbols if we're not collecting the atoms zone. + if constexpr (std::is_same_v<T, JS::Symbol>) { + if (!thing->zone()->isGCMarkingOrVerifyingPreBarriers()) { + return false; + } + } + AssertShouldMarkInZone(this, thing); MarkColor color = @@ -1465,7 +1407,10 @@ void GCMarker::updateRangesAtStartOfSlice() { for (MarkStackIter iter(stack); !iter.done(); iter.next()) { if (iter.isSlotsOrElementsRange()) { MarkStack::SlotsOrElementsRange& range = iter.slotsOrElementsRange(); - if (range.kind() == SlotsOrElementsKind::Elements) { + JSObject* obj = range.ptr().asRangeObject(); + if (!obj->is<NativeObject>()) { + range.setEmpty(); + } else if (range.kind() == SlotsOrElementsKind::Elements) { NativeObject* obj = &range.ptr().asRangeObject()->as<NativeObject>(); size_t index = range.start(); size_t numShifted = obj->getElementsHeader()->numShiftedElements(); @@ -1736,6 +1681,8 @@ inline MarkStack::TaggedPtr::TaggedPtr(Tag tag, Cell* ptr) assertValid(); } +inline uintptr_t MarkStack::TaggedPtr::asBits() const { return bits; } + inline uintptr_t MarkStack::TaggedPtr::tagUnchecked() const { return bits & TagMask; } @@ -1801,6 +1748,12 @@ inline void MarkStack::SlotsOrElementsRange::setStart(size_t newStart) { MOZ_ASSERT(start() == newStart); } +inline void MarkStack::SlotsOrElementsRange::setEmpty() { + TaggedPtr entry = TaggedPtr(ObjectTag, ptr().asRangeObject()); + ptr_ = entry; + startAndKind_ = entry.asBits(); +} + inline MarkStack::TaggedPtr MarkStack::SlotsOrElementsRange::ptr() const { return ptr_; } diff --git a/js/src/gc/Memory.cpp b/js/src/gc/Memory.cpp index 7d7e22640d..8f35f833ce 100644 --- a/js/src/gc/Memory.cpp +++ b/js/src/gc/Memory.cpp @@ -437,6 +437,7 @@ void* MapAlignedPages(size_t length, size_t alignment) { void* region = nullptr; if (int err = posix_memalign(®ion, alignment, length)) { MOZ_ASSERT(err == ENOMEM); + (void)err; return nullptr; } MOZ_ASSERT(region != nullptr); diff --git a/js/src/gc/Nursery.cpp b/js/src/gc/Nursery.cpp index 4753848c56..e3afd5d055 100644 --- a/js/src/gc/Nursery.cpp +++ b/js/src/gc/Nursery.cpp @@ -613,6 +613,9 @@ void js::Nursery::leaveZealMode() { MOZ_ASSERT(isEmpty()); + // Reset the nursery size. + setCapacity(minSpaceSize()); + toSpace.moveToStartOfChunk(this, 0); toSpace.setStartToCurrentPosition(); @@ -1810,7 +1813,7 @@ void Nursery::requestMinorGC(JS::GCReason reason) { } else if (heapState == JS::HeapState::MajorCollecting) { // The GC runs sweeping tasks that may access the storebuffer in parallel // and these require taking the store buffer lock. - MOZ_ASSERT(CurrentThreadIsGCSweeping()); + MOZ_ASSERT(!CurrentThreadIsGCMarking()); runtime()->gc.assertCurrentThreadHasLockedStoreBuffer(); } else { MOZ_CRASH("Unexpected heap state"); diff --git a/js/src/gc/Scheduling.h b/js/src/gc/Scheduling.h index c5ed56dd5f..a48f5397fd 100644 --- a/js/src/gc/Scheduling.h +++ b/js/src/gc/Scheduling.h @@ -545,6 +545,9 @@ static const double HelperThreadRatio = 0.5; /* JSGC_MAX_HELPER_THREADS */ static const size_t MaxHelperThreads = 8; +/* JSGC_MAX_MARKING_THREADS */ +static const size_t MaxMarkingThreads = 2; + } // namespace TuningDefaults /* diff --git a/js/src/gc/Tenuring.cpp b/js/src/gc/Tenuring.cpp index d38a374599..bcd2bd3304 100644 --- a/js/src/gc/Tenuring.cpp +++ b/js/src/gc/Tenuring.cpp @@ -521,7 +521,7 @@ void JSDependentString::sweepTypedAfterMinorGC() { const CharT* newBaseChars = tenuredBase->JSString::nonInlineCharsRaw<CharT>(); relocateNonInlineChars(newBaseChars, offset); - + MOZ_ASSERT(tenuredBase->assertIsValidBase()); d.s.u3.base = tenuredBase; } @@ -1051,6 +1051,8 @@ void js::gc::TenuringTracer::relocateDependentStringChars( tenuredDependentStr->relocateNonInlineChars<const CharT*>( tenuredRootBase->nonInlineChars<CharT>(nogc), *offset); tenuredDependentStr->setBase(tenuredRootBase); + MOZ_ASSERT(tenuredRootBase->assertIsValidBase()); + if (tenuredDependentStr->isTenured() && !tenuredRootBase->isTenured()) { runtime()->gc.storeBuffer().putWholeCell(tenuredDependentStr); } @@ -1077,6 +1079,7 @@ void js::gc::TenuringTracer::relocateDependentStringChars( } tenuredDependentStr->setBase(*rootBase); + MOZ_ASSERT((*rootBase)->assertIsValidBase()); return; } @@ -1138,7 +1141,7 @@ void js::gc::TenuringTracer::collectToStringFixedPoint() { bool rootBaseNotYetForwarded = false; JSLinearString* rootBase = nullptr; - if (str->isDependent()) { + if (str->isDependent() && !str->isAtomRef()) { if (str->hasTwoByteChars()) { relocateDependentStringChars<char16_t>( &str->asDependent(), p->savedNurseryBaseOrRelocOverlay(), &offset, @@ -1173,6 +1176,7 @@ void js::gc::TenuringTracer::collectToStringFixedPoint() { } str->setBase(tenuredRootBase); + MOZ_ASSERT(tenuredRootBase->assertIsValidBase()); if (str->isTenured() && !tenuredRootBase->isTenured()) { runtime()->gc.storeBuffer().putWholeCell(str); } diff --git a/js/src/gc/Tracer.h b/js/src/gc/Tracer.h index 52c60cd27f..186a87d9c5 100644 --- a/js/src/gc/Tracer.h +++ b/js/src/gc/Tracer.h @@ -122,10 +122,12 @@ bool TraceWeakMapKeyInternal(JSTracer* trc, Zone* zone, T* thingp, #ifdef DEBUG void AssertRootMarkingPhase(JSTracer* trc); -void AssertShouldMarkInZone(GCMarker* marker, gc::Cell* thing); +template <typename T> +void AssertShouldMarkInZone(GCMarker* marker, T* thing); #else inline void AssertRootMarkingPhase(JSTracer* trc) {} -inline void AssertShouldMarkInZone(GCMarker* marker, gc::Cell* thing) {} +template <typename T> +void AssertShouldMarkInZone(GCMarker* marker, T* thing) {} #endif } // namespace gc diff --git a/js/src/gc/WeakMap-inl.h b/js/src/gc/WeakMap-inl.h index d7b5feb5a6..b1e52aacc8 100644 --- a/js/src/gc/WeakMap-inl.h +++ b/js/src/gc/WeakMap-inl.h @@ -22,6 +22,7 @@ #include "js/TraceKind.h" #include "vm/JSContext.h" +#include "gc/Marking-inl.h" #include "gc/StableCellHasher-inl.h" namespace js { @@ -184,23 +185,24 @@ bool WeakMap<K, V>::markEntry(GCMarker* marker, gc::CellColor mapColor, K& key, } if (populateWeakKeysTable) { + MOZ_ASSERT(trc->weakMapAction() == JS::WeakMapTraceAction::Expand); + // Note that delegateColor >= keyColor because marking a key marks its // delegate, so we only need to check whether keyColor < mapColor to tell // this. - if (keyColor < mapColor) { - MOZ_ASSERT(trc->weakMapAction() == JS::WeakMapTraceAction::Expand); - // The final color of the key is not yet known. Record this weakmap and - // the lookup key in the list of weak keys. If the key has a delegate, - // then the lookup key is the delegate (because marking the key will end - // up marking the delegate and thereby mark the entry.) + // The final color of the key is not yet known. Add an edge to the + // relevant ephemerons table to ensure that the value will be marked if + // the key is marked. If the key has a delegate, also add an edge to + // ensure the key is marked if the delegate is marked. + gc::TenuredCell* tenuredValue = nullptr; if (cellValue && cellValue->isTenured()) { tenuredValue = &cellValue->asTenured(); } - if (!this->addImplicitEdges(AsMarkColor(mapColor), keyCell, delegate, - tenuredValue)) { + if (!this->addEphemeronEdgesForEntry(AsMarkColor(mapColor), keyCell, + delegate, tenuredValue)) { marker->abortLinearWeakMarking(); } } @@ -392,6 +394,24 @@ bool WeakMap<K, V>::checkMarking() const { } #endif +#ifdef JSGC_HASH_TABLE_CHECKS +template <class K, class V> +void WeakMap<K, V>::checkAfterMovingGC() const { + for (Range r = all(); !r.empty(); r.popFront()) { + gc::Cell* key = gc::ToMarkable(r.front().key()); + gc::Cell* value = gc::ToMarkable(r.front().value()); + CheckGCThingAfterMovingGC(key); + if (!allowKeysInOtherZones()) { + Zone* keyZone = key->zoneFromAnyThread(); + MOZ_RELEASE_ASSERT(keyZone == zone() || keyZone->isAtomsZone()); + } + CheckGCThingAfterMovingGC(value, zone()); + auto ptr = lookupUnbarriered(r.front().key()); + MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front()); + } +} +#endif // JSGC_HASH_TABLE_CHECKS + inline HashNumber GetHash(JS::Symbol* sym) { return sym->hash(); } inline bool HashMatch(JS::Symbol* key, JS::Symbol* lookup) { diff --git a/js/src/gc/WeakMap.cpp b/js/src/gc/WeakMap.cpp index 749ea52937..61d6b14e6d 100644 --- a/js/src/gc/WeakMap.cpp +++ b/js/src/gc/WeakMap.cpp @@ -72,42 +72,27 @@ bool WeakMapBase::markMap(MarkColor markColor) { } } -bool WeakMapBase::addImplicitEdges(MarkColor mapColor, Cell* key, - Cell* delegate, TenuredCell* value) { - if (delegate) { - return addEphemeronTableEntries(mapColor, delegate, key, value); +bool WeakMapBase::addEphemeronEdgesForEntry(MarkColor mapColor, Cell* key, + Cell* delegate, + TenuredCell* value) { + if (delegate && !addEphemeronEdge(mapColor, delegate, key)) { + return false; } - if (value) { - return addEphemeronTableEntries(mapColor, key, value, nullptr); + if (value && !addEphemeronEdge(mapColor, key, value)) { + return false; } return true; } -bool WeakMapBase::addEphemeronTableEntries(MarkColor mapColor, gc::Cell* key, - gc::Cell* value1, - gc::Cell* maybeValue2) { - // Add implicit edges from |key| to |value1| and |maybeValue2| if supplied. - // - // Note the key and values do not necessarily correspond to the weak map - // entry's key and value at this point. - - auto& edgeTable = key->zone()->gcEphemeronEdges(key); - auto* ptr = edgeTable.getOrAdd(key); - if (!ptr) { - return false; - } - - if (!ptr->value.emplaceBack(mapColor, value1)) { - return false; - } - - if (maybeValue2 && !ptr->value.emplaceBack(mapColor, maybeValue2)) { - return false; - } +bool WeakMapBase::addEphemeronEdge(MarkColor color, gc::Cell* src, + gc::Cell* dst) { + // Add an implicit edge from |src| to |dst|. - return true; + auto& edgeTable = src->zone()->gcEphemeronEdges(src); + auto* ptr = edgeTable.getOrAdd(src); + return ptr && ptr->value.emplaceBack(color, dst); } #if defined(JS_GC_ZEAL) || defined(DEBUG) @@ -126,6 +111,15 @@ bool WeakMapBase::checkMarkingForZone(JS::Zone* zone) { } #endif +#ifdef JSGC_HASH_TABLE_CHECKS +/* static */ +void WeakMapBase::checkWeakMapsAfterMovingGC(JS::Zone* zone) { + for (WeakMapBase* map : zone->gcWeakMapList()) { + map->checkAfterMovingGC(); + } +} +#endif + bool WeakMapBase::markZoneIteratively(JS::Zone* zone, GCMarker* marker) { bool markedAny = false; for (WeakMapBase* m : zone->gcWeakMapList()) { @@ -228,12 +222,3 @@ void ObjectWeakMap::trace(JSTracer* trc) { map.trace(trc); } size_t ObjectWeakMap::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) { return map.shallowSizeOfExcludingThis(mallocSizeOf); } - -#ifdef JSGC_HASH_TABLE_CHECKS -void ObjectWeakMap::checkAfterMovingGC() { - for (ObjectValueWeakMap::Range r = map.all(); !r.empty(); r.popFront()) { - CheckGCThingAfterMovingGC(r.front().key().get()); - CheckGCThingAfterMovingGC(&r.front().value().toObject()); - } -} -#endif // JSGC_HASH_TABLE_CHECKS diff --git a/js/src/gc/WeakMap.h b/js/src/gc/WeakMap.h index 959a6fa57e..76c40749d7 100644 --- a/js/src/gc/WeakMap.h +++ b/js/src/gc/WeakMap.h @@ -140,6 +140,10 @@ class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase> { static bool checkMarkingForZone(JS::Zone* zone); #endif +#ifdef JSGC_HASH_TABLE_CHECKS + static void checkWeakMapsAfterMovingGC(JS::Zone* zone); +#endif + protected: // Instance member functions called by the above. Instantiations of WeakMap // override these with definitions appropriate for their Key and Value types. @@ -150,14 +154,14 @@ class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase> { virtual void clearAndCompact() = 0; // We have a key that, if it or its delegate is marked, may lead to a WeakMap - // value getting marked. Insert it or its delegate (if any) into the - // appropriate zone's gcEphemeronEdges or gcNurseryEphemeronEdges. - [[nodiscard]] bool addImplicitEdges(gc::MarkColor mapColor, gc::Cell* key, - gc::Cell* delegate, - gc::TenuredCell* value); - [[nodiscard]] bool addEphemeronTableEntries(gc::MarkColor mapColor, - gc::Cell* key, gc::Cell* value, - gc::Cell* maybeValue); + // value getting marked. Insert the necessary edges into the appropriate + // zone's gcEphemeronEdges or gcNurseryEphemeronEdges tables. + [[nodiscard]] bool addEphemeronEdgesForEntry(gc::MarkColor mapColor, + gc::Cell* key, + gc::Cell* delegate, + gc::TenuredCell* value); + [[nodiscard]] bool addEphemeronEdge(gc::MarkColor color, gc::Cell* src, + gc::Cell* dst); virtual bool markEntries(GCMarker* marker) = 0; @@ -172,6 +176,10 @@ class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase> { gc::Cell*); #endif +#ifdef JSGC_HASH_TABLE_CHECKS + virtual void checkAfterMovingGC() const = 0; +#endif + // Object that this weak map is part of, if any. HeapPtr<JSObject*> memberOf; @@ -329,6 +337,10 @@ class WeakMap #ifdef JS_GC_ZEAL bool checkMarking() const override; #endif + +#ifdef JSGC_HASH_TABLE_CHECKS + void checkAfterMovingGC() const override; +#endif }; using ObjectValueWeakMap = WeakMap<HeapPtr<JSObject*>, HeapPtr<Value>>; @@ -355,10 +367,6 @@ class ObjectWeakMap { } ObjectValueWeakMap& valueMap() { return map; } - -#ifdef JSGC_HASH_TABLE_CHECKS - void checkAfterMovingGC(); -#endif }; // Get the hash from the Symbol. diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp index 6437f6f4c3..13324d7e43 100644 --- a/js/src/gc/Zone.cpp +++ b/js/src/gc/Zone.cpp @@ -27,6 +27,7 @@ #include "gc/GC-inl.h" #include "gc/Marking-inl.h" #include "gc/Nursery-inl.h" +#include "gc/StableCellHasher-inl.h" #include "gc/WeakMap-inl.h" #include "vm/JSScript-inl.h" #include "vm/Realm-inl.h" @@ -367,17 +368,12 @@ void Zone::checkAllCrossCompartmentWrappersAfterMovingGC() { } void Zone::checkStringWrappersAfterMovingGC() { - for (StringWrapperMap::Enum e(crossZoneStringWrappers()); !e.empty(); - e.popFront()) { - // Assert that the postbarriers have worked and that nothing is left in the - // wrapper map that points into the nursery, and that the hash table entries - // are discoverable. - auto key = e.front().key(); - CheckGCThingAfterMovingGC(key.get()); - - auto ptr = crossZoneStringWrappers().lookup(key); - MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &e.front()); - } + CheckTableAfterMovingGC(crossZoneStringWrappers(), [this](const auto& entry) { + JSString* key = entry.key().get(); + CheckGCThingAfterMovingGC(key); // Keys may be in a different zone. + CheckGCThingAfterMovingGC(entry.value().unbarrieredGet(), this); + return key; + }); } #endif @@ -546,25 +542,24 @@ void JS::Zone::traceWeakJitScripts(JSTracer* trc) { void JS::Zone::beforeClearDelegateInternal(JSObject* wrapper, JSObject* delegate) { + // 'delegate' is no longer the delegate of 'wrapper'. MOZ_ASSERT(js::gc::detail::GetDelegate(wrapper) == delegate); MOZ_ASSERT(needsIncrementalBarrier()); MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this)); - runtimeFromMainThread()->gc.marker().severWeakDelegate(wrapper, delegate); -} -void JS::Zone::afterAddDelegateInternal(JSObject* wrapper) { - MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(this)); - JSObject* delegate = js::gc::detail::GetDelegate(wrapper); - if (delegate) { - runtimeFromMainThread()->gc.marker().restoreWeakDelegate(wrapper, delegate); + // If |wrapper| might be a key in a weak map, trigger a barrier to account for + // the removal of the automatically added edge from delegate to wrapper. + if (HasUniqueId(wrapper)) { + PreWriteBarrier(wrapper); } } #ifdef JSGC_HASH_TABLE_CHECKS void JS::Zone::checkUniqueIdTableAfterMovingGC() { - for (auto r = uniqueIds().all(); !r.empty(); r.popFront()) { - js::gc::CheckGCThingAfterMovingGC(r.front().key()); - } + CheckTableAfterMovingGC(uniqueIds(), [this](const auto& entry) { + js::gc::CheckGCThingAfterMovingGC(entry.key(), this); + return entry.key(); + }); } #endif @@ -862,47 +857,42 @@ void Zone::fixupScriptMapsAfterMovingGC(JSTracer* trc) { #ifdef JSGC_HASH_TABLE_CHECKS void Zone::checkScriptMapsAfterMovingGC() { + // |debugScriptMap| is checked automatically because it is s a WeakMap. + if (scriptCountsMap) { - for (auto r = scriptCountsMap->all(); !r.empty(); r.popFront()) { - BaseScript* script = r.front().key(); - MOZ_ASSERT(script->zone() == this); - CheckGCThingAfterMovingGC(script); - auto ptr = scriptCountsMap->lookup(script); - MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front()); - } + CheckTableAfterMovingGC(*scriptCountsMap, [this](const auto& entry) { + BaseScript* script = entry.key(); + CheckGCThingAfterMovingGC(script, this); + return script; + }); } if (scriptLCovMap) { - for (auto r = scriptLCovMap->all(); !r.empty(); r.popFront()) { - BaseScript* script = r.front().key(); - MOZ_ASSERT(script->zone() == this); - CheckGCThingAfterMovingGC(script); - auto ptr = scriptLCovMap->lookup(script); - MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front()); - } + CheckTableAfterMovingGC(*scriptLCovMap, [this](const auto& entry) { + BaseScript* script = entry.key(); + CheckGCThingAfterMovingGC(script, this); + return script; + }); } # ifdef MOZ_VTUNE if (scriptVTuneIdMap) { - for (auto r = scriptVTuneIdMap->all(); !r.empty(); r.popFront()) { - BaseScript* script = r.front().key(); - MOZ_ASSERT(script->zone() == this); - CheckGCThingAfterMovingGC(script); - auto ptr = scriptVTuneIdMap->lookup(script); - MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front()); - } + CheckTableAfterMovingGC(*scriptVTuneIdMap, [this](const auto& entry) { + BaseScript* script = entry.key(); + CheckGCThingAfterMovingGC(script, this); + return script; + }); } # endif // MOZ_VTUNE # ifdef JS_CACHEIR_SPEW if (scriptFinalWarmUpCountMap) { - for (auto r = scriptFinalWarmUpCountMap->all(); !r.empty(); r.popFront()) { - BaseScript* script = r.front().key(); - MOZ_ASSERT(script->zone() == this); - CheckGCThingAfterMovingGC(script); - auto ptr = scriptFinalWarmUpCountMap->lookup(script); - MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &r.front()); - } + CheckTableAfterMovingGC(*scriptFinalWarmUpCountMap, + [this](const auto& entry) { + BaseScript* script = entry.key(); + CheckGCThingAfterMovingGC(script, this); + return script; + }); } # endif // JS_CACHEIR_SPEW } diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h index a5ce161cc4..b47d5b186f 100644 --- a/js/src/gc/Zone.h +++ b/js/src/gc/Zone.h @@ -715,14 +715,7 @@ class Zone : public js::ZoneAllocator, public js::gc::GraphNodeBase<JS::Zone> { } } - void afterAddDelegate(JSObject* wrapper) { - if (needsIncrementalBarrier()) { - afterAddDelegateInternal(wrapper); - } - } - void beforeClearDelegateInternal(JSObject* wrapper, JSObject* delegate); - void afterAddDelegateInternal(JSObject* wrapper); js::gc::EphemeronEdgeTable& gcEphemeronEdges() { return gcEphemeronEdges_.ref(); } |