1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef gc_GCMarker_h
#define gc_GCMarker_h
#include "mozilla/Maybe.h"
#include "mozilla/Unused.h"
#include "ds/OrderedHashTable.h"
#include "gc/Barrier.h"
#include "js/SliceBudget.h"
#include "js/TracingAPI.h"
#include "js/TypeDecls.h"
class JSRope;
namespace js {
class AutoAccessAtomsZone;
class WeakMapBase;
static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
static const size_t SMALL_MARK_STACK_BASE_CAPACITY = 256;
enum class SlotsOrElementsKind { Elements, FixedSlots, DynamicSlots };
namespace gc {
enum IncrementalProgress { NotFinished = 0, Finished };
struct Cell;
struct WeakKeyTableHashPolicy {
using Lookup = Cell*;
static HashNumber hash(const Lookup& v,
const mozilla::HashCodeScrambler& hcs) {
return hcs.scramble(mozilla::HashGeneric(v));
}
static bool match(Cell* const& k, const Lookup& l) { return k == l; }
static bool isEmpty(Cell* const& v) { return !v; }
static void makeEmpty(Cell** vp) { *vp = nullptr; }
};
struct WeakMarkable {
WeakMapBase* weakmap;
Cell* key;
WeakMarkable(WeakMapBase* weakmapArg, Cell* keyArg)
: weakmap(weakmapArg), key(keyArg) {}
bool operator==(const WeakMarkable& other) const {
return weakmap == other.weakmap && key == other.key;
}
};
using WeakEntryVector = Vector<WeakMarkable, 2, js::SystemAllocPolicy>;
using WeakKeyTable =
OrderedHashMap<Cell*, WeakEntryVector, WeakKeyTableHashPolicy,
js::SystemAllocPolicy>;
/*
* When the mark stack is full, the GC does not call js::TraceChildren to mark
* the reachable "children" of the thing. Rather the thing is put aside and
* js::TraceChildren is called later when the mark stack is empty.
*
* To implement such delayed marking of the children with minimal overhead for
* the normal case of sufficient stack, we link arenas into a list using
* Arena::setNextDelayedMarkingArena(). The head of the list is stored in
* GCMarker::delayedMarkingList. GCMarker::delayMarkingChildren() adds arenas
* to the list as necessary while markAllDelayedChildren() pops the arenas from
* the stack until it is empty.
*/
class MarkStack {
public:
/*
* We use a common mark stack to mark GC things of different types and use
* the explicit tags to distinguish them when it cannot be deduced from
* the context of push or pop operation.
*/
enum Tag {
SlotsOrElementsRangeTag,
ObjectTag,
GroupTag,
JitCodeTag,
ScriptTag,
TempRopeTag,
LastTag = TempRopeTag
};
static const uintptr_t TagMask = 7;
static_assert(TagMask >= uintptr_t(LastTag),
"The tag mask must subsume the tags.");
static_assert(TagMask <= gc::CellAlignMask,
"The tag mask must be embeddable in a Cell*.");
class TaggedPtr {
uintptr_t bits;
Cell* ptr() const;
public:
TaggedPtr() = default;
TaggedPtr(Tag tag, Cell* ptr);
Tag tag() const;
template <typename T>
T* as() const;
JSObject* asRangeObject() const;
JSRope* asTempRope() const;
void assertValid() const;
};
struct SlotsOrElementsRange {
SlotsOrElementsRange(SlotsOrElementsKind kind, JSObject* obj, size_t start);
void assertValid() const;
SlotsOrElementsKind kind() const;
size_t start() const;
TaggedPtr ptr() const;
static constexpr size_t StartShift = 2;
static constexpr size_t KindMask = (1 << StartShift) - 1;
private:
uintptr_t startAndKind_;
TaggedPtr ptr_;
};
explicit MarkStack(size_t maxCapacity = DefaultCapacity);
~MarkStack();
static const size_t DefaultCapacity = SIZE_MAX;
// The unit for MarkStack::capacity() is mark stack entries.
size_t capacity() { return stack().length(); }
size_t position() const { return topIndex_; }
enum StackType { MainStack, AuxiliaryStack };
MOZ_MUST_USE bool init(StackType which, bool incrementalGCEnabled);
MOZ_MUST_USE bool setStackCapacity(StackType which,
bool incrementalGCEnabled);
size_t maxCapacity() const { return maxCapacity_; }
void setMaxCapacity(size_t maxCapacity);
template <typename T>
MOZ_MUST_USE bool push(T* ptr);
MOZ_MUST_USE bool push(JSObject* obj, SlotsOrElementsKind kind, size_t start);
MOZ_MUST_USE bool push(const SlotsOrElementsRange& array);
// GCMarker::eagerlyMarkChildren uses unused marking stack as temporary
// storage to hold rope pointers.
MOZ_MUST_USE bool pushTempRope(JSRope* ptr);
bool isEmpty() const { return topIndex_ == 0; }
Tag peekTag() const;
TaggedPtr popPtr();
SlotsOrElementsRange popSlotsOrElementsRange();
void clear() {
// Fall back to the smaller initial capacity so we don't hold on to excess
// memory between GCs.
stack().clearAndFree();
mozilla::Unused << stack().resize(NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY);
topIndex_ = 0;
}
void poisonUnused();
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
private:
using StackVector = Vector<TaggedPtr, 0, SystemAllocPolicy>;
const StackVector& stack() const { return stack_.ref(); }
StackVector& stack() { return stack_.ref(); }
MOZ_MUST_USE bool ensureSpace(size_t count);
/* Grow the stack, ensuring there is space for at least count elements. */
MOZ_MUST_USE bool enlarge(size_t count);
MOZ_MUST_USE bool resize(size_t newCapacity);
TaggedPtr* topPtr();
const TaggedPtr& peekPtr() const;
MOZ_MUST_USE bool pushTaggedPtr(Tag tag, Cell* ptr);
// Index of the top of the stack.
MainThreadOrGCTaskData<size_t> topIndex_;
// The maximum stack capacity to grow to.
MainThreadOrGCTaskData<size_t> maxCapacity_;
// Vector containing allocated stack memory. Unused beyond topIndex_.
MainThreadOrGCTaskData<StackVector> stack_;
#ifdef DEBUG
mutable size_t iteratorCount_;
#endif
friend class MarkStackIter;
};
class MarkStackIter {
MarkStack& stack_;
size_t pos_;
public:
explicit MarkStackIter(MarkStack& stack);
~MarkStackIter();
bool done() const;
MarkStack::Tag peekTag() const;
MarkStack::TaggedPtr peekPtr() const;
MarkStack::SlotsOrElementsRange peekSlotsOrElementsRange() const;
void next();
void nextPtr();
void nextArray();
private:
size_t position() const;
};
} /* namespace gc */
enum MarkingState : uint8_t {
// Have not yet started marking.
NotActive,
// Main marking mode. Weakmap marking will be populating the weakKeys tables
// but not consulting them. The state will transition to WeakMarking until it
// is done, then back to RegularMarking.
RegularMarking,
// Same as RegularMarking except now every marked obj/script is immediately
// looked up in the weakKeys table to see if it is a weakmap key, and
// therefore might require marking its value. Transitions back to
// RegularMarking when done.
WeakMarking,
// Same as RegularMarking, but we OOMed (or obeyed a directive in the test
// marking queue) and fell back to iterating until the next GC.
IterativeMarking
};
class GCMarker final : public JSTracer {
public:
explicit GCMarker(JSRuntime* rt);
MOZ_MUST_USE bool init();
void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
size_t maxCapacity() const { return stack.maxCapacity(); }
bool isActive() const { return state != MarkingState::NotActive; }
void start();
void stop();
void reset();
// Mark the given GC thing and traverse its children at some point.
template <typename T>
void traverse(T thing);
// Calls traverse on target after making additional assertions.
template <typename S, typename T>
void traverseEdge(S source, T* target);
template <typename S, typename T>
void traverseEdge(S source, const T& target);
// Helper methods that coerce their second argument to the base pointer
// type.
template <typename S>
void traverseObjectEdge(S source, JSObject* target) {
traverseEdge(source, target);
}
template <typename S>
void traverseStringEdge(S source, JSString* target) {
traverseEdge(source, target);
}
template <typename S, typename T>
void checkTraversedEdge(S source, T* target);
#ifdef DEBUG
// We can't check atom marking if the helper thread lock is already held by
// the current thread. This allows us to disable the check.
void setCheckAtomMarking(bool check);
#endif
/*
* Care must be taken changing the mark color from gray to black. The cycle
* collector depends on the invariant that there are no black to gray edges
* in the GC heap. This invariant lets the CC not trace through black
* objects. If this invariant is violated, the cycle collector may free
* objects that are still reachable.
*/
void setMarkColor(gc::MarkColor newColor);
void setMarkColorUnchecked(gc::MarkColor newColor);
gc::MarkColor markColor() const { return color; }
// Declare which color the main mark stack will be used for. The whole stack
// must be empty when this is called.
void setMainStackColor(gc::MarkColor newColor);
bool enterWeakMarkingMode();
void leaveWeakMarkingMode();
// Do not use linear-time weak marking for the rest of this collection.
// Currently, this will only be triggered by an OOM when updating needed data
// structures.
void abortLinearWeakMarking() {
if (state == MarkingState::WeakMarking) {
leaveWeakMarkingMode();
}
state = MarkingState::IterativeMarking;
}
void delayMarkingChildren(gc::Cell* cell);
// Remove <map,toRemove> from the weak keys table indexed by 'key'.
void forgetWeakKey(js::gc::WeakKeyTable& weakKeys, WeakMapBase* map,
gc::Cell* keyOrDelegate, gc::Cell* keyToRemove);
// Purge all mention of 'map' from the weak keys table.
void forgetWeakMap(WeakMapBase* map, Zone* zone);
// 'delegate' is no longer the delegate of 'key'.
void severWeakDelegate(JSObject* key, JSObject* delegate);
// 'delegate' is now the delegate of 'key'. Update weakmap marking state.
void restoreWeakDelegate(JSObject* key, JSObject* delegate);
bool isDrained() { return isMarkStackEmpty() && !delayedMarkingList; }
// The mark queue is a testing-only feature for controlling mark ordering and
// yield timing.
enum MarkQueueProgress {
QueueYielded, // End this incremental GC slice, if possible
QueueComplete, // Done with the queue
QueueSuspended // Continue the GC without ending the slice
};
MarkQueueProgress processMarkQueue();
enum ShouldReportMarkTime : bool {
ReportMarkTime = true,
DontReportMarkTime = false
};
MOZ_MUST_USE bool markUntilBudgetExhausted(
SliceBudget& budget, ShouldReportMarkTime reportTime = ReportMarkTime);
void setIncrementalGCEnabled(bool enabled) {
// Ignore failure to resize the stack and keep using the existing stack.
mozilla::Unused << stack.setStackCapacity(gc::MarkStack::MainStack,
enabled);
}
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
#ifdef DEBUG
bool shouldCheckCompartments() { return strictCompartmentChecking; }
#endif
void markEphemeronValues(gc::Cell* markedCell, gc::WeakEntryVector& entry);
size_t getMarkCount() const { return markCount; }
void clearMarkCount() { markCount = 0; }
static GCMarker* fromTracer(JSTracer* trc) {
MOZ_ASSERT(trc->isMarkingTracer());
return static_cast<GCMarker*>(trc);
}
template <typename T>
void markImplicitEdges(T* oldThing);
bool isWeakMarking() const { return state == MarkingState::WeakMarking; }
private:
#ifdef DEBUG
void checkZone(void* p);
#else
void checkZone(void* p) {}
#endif
// Push an object onto the stack for later tracing and assert that it has
// already been marked.
inline void repush(JSObject* obj);
template <typename T>
void markAndTraceChildren(T* thing);
template <typename T>
void markAndPush(T* thing);
template <typename T>
void markAndScan(T* thing);
template <typename T>
void markImplicitEdgesHelper(T oldThing);
void eagerlyMarkChildren(JSLinearString* str);
void eagerlyMarkChildren(JSRope* rope);
void eagerlyMarkChildren(JSString* str);
void eagerlyMarkChildren(Shape* shape);
void eagerlyMarkChildren(Scope* scope);
void lazilyMarkChildren(ObjectGroup* group);
// We may not have concrete types yet, so this has to be outside the header.
template <typename T>
void dispatchToTraceChildren(T* thing);
// Mark the given GC thing, but do not trace its children. Return true
// if the thing became marked.
template <typename T>
MOZ_MUST_USE bool mark(T* thing);
template <typename T>
inline void pushTaggedPtr(T* ptr);
inline void pushValueRange(JSObject* obj, SlotsOrElementsKind kind,
size_t start, size_t end);
bool isMarkStackEmpty() { return stack.isEmpty() && auxStack.isEmpty(); }
bool hasBlackEntries() const {
return !getStack(gc::MarkColor::Black).isEmpty();
}
bool hasGrayEntries() const {
return !getStack(gc::MarkColor::Gray).isEmpty();
}
inline void processMarkStackTop(SliceBudget& budget);
void markDelayedChildren(gc::Arena* arena, gc::MarkColor color);
MOZ_MUST_USE bool markAllDelayedChildren(SliceBudget& budget);
bool processDelayedMarkingList(gc::MarkColor color, SliceBudget& budget);
bool hasDelayedChildren() const { return !!delayedMarkingList; }
void rebuildDelayedMarkingList();
void appendToDelayedMarkingList(gc::Arena** listTail, gc::Arena* arena);
template <typename F>
void forEachDelayedMarkingArena(F&& f);
/*
* The mark stack. Pointers in this stack are "gray" in the GC sense, but may
* mark the contained items either black or gray (in the CC sense) depending
* on mainStackColor.
*/
gc::MarkStack stack;
/*
* A smaller, auxiliary stack, currently only used to accumulate the rare
* objects that need to be marked black during gray marking.
*/
gc::MarkStack auxStack;
/* The color is only applied to objects and functions. */
MainThreadOrGCTaskData<gc::MarkColor> color;
MainThreadOrGCTaskData<gc::MarkColor> mainStackColor;
MainThreadOrGCTaskData<gc::MarkStack*> currentStackPtr;
gc::MarkStack& getStack(gc::MarkColor which) {
return which == mainStackColor ? stack : auxStack;
}
const gc::MarkStack& getStack(gc::MarkColor which) const {
return which == mainStackColor ? stack : auxStack;
}
gc::MarkStack& currentStack() {
MOZ_ASSERT(currentStackPtr);
return *currentStackPtr;
}
/* Pointer to the top of the stack of arenas we are delaying marking on. */
MainThreadOrGCTaskData<js::gc::Arena*> delayedMarkingList;
/* Whether more work has been added to the delayed marking list. */
MainThreadOrGCTaskData<bool> delayedMarkingWorkAdded;
/* The count of marked objects during GC. */
size_t markCount;
/* Track the state of marking. */
MainThreadOrGCTaskData<MarkingState> state;
public:
/*
* Whether weakmaps can be marked incrementally.
*
* JSGC_INCREMENTAL_WEAKMAP_ENABLED
* pref: javascript.options.mem.incremental_weakmap
*/
MainThreadOrGCTaskData<bool> incrementalWeakMapMarkingEnabled;
#ifdef DEBUG
private:
/* Count of arenas that are currently in the stack. */
MainThreadOrGCTaskData<size_t> markLaterArenas;
/* Assert that start and stop are called with correct ordering. */
MainThreadOrGCTaskData<bool> started;
/*
* Whether to check that atoms traversed are present in atom marking
* bitmap.
*/
MainThreadOrGCTaskData<bool> checkAtomMarking;
/* The test marking queue might want to be marking a particular color. */
mozilla::Maybe<js::gc::MarkColor> queueMarkColor;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
*/
MainThreadOrGCTaskData<bool> strictCompartmentChecking;
public:
/*
* The compartment and zone of the object whose trace hook is currently being
* called, if any. Used to catch cross-compartment edges traced without use of
* TraceCrossCompartmentEdge.
*/
MainThreadOrGCTaskData<Compartment*> tracingCompartment;
MainThreadOrGCTaskData<Zone*> tracingZone;
/*
* List of objects to mark at the beginning of a GC. May also contains string
* directives to change mark color or wait until different phases of the GC.
*
* This is a WeakCache because not everything in this list is guaranteed to
* end up marked (eg if you insert an object from an already-processed sweep
* group in the middle of an incremental GC). Also, the mark queue is not
* used during shutdown GCs. In either case, unmarked objects may need to be
* discarded.
*/
JS::WeakCache<GCVector<JS::Heap<JS::Value>, 0, SystemAllocPolicy>> markQueue;
/* Position within the test mark queue. */
size_t queuePos;
#endif // DEBUG
};
namespace gc {
/*
* Temporarily change the mark color while this class is on the stack.
*
* During incremental sweeping this also transitions zones in the
* current sweep group into the Mark or MarkGray state as appropriate.
*/
class MOZ_RAII AutoSetMarkColor {
GCMarker& marker_;
MarkColor initialColor_;
public:
AutoSetMarkColor(GCMarker& marker, MarkColor newColor)
: marker_(marker), initialColor_(marker.markColor()) {
marker_.setMarkColor(newColor);
}
AutoSetMarkColor(GCMarker& marker, CellColor newColor)
: AutoSetMarkColor(marker, newColor.asMarkColor()) {}
~AutoSetMarkColor() { marker_.setMarkColor(initialColor_); }
};
} /* namespace gc */
} /* namespace js */
#endif /* gc_GCMarker_h */
|