1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef gc_Cell_h
#define gc_Cell_h
#include "mozilla/Atomics.h"
#include "mozilla/EndianUtils.h"
#include <type_traits>
#include "gc/GCEnum.h"
#include "gc/Heap.h"
#include "js/GCAnnotations.h"
#include "js/shadow/Zone.h" // JS::shadow::Zone
#include "js/TraceKind.h"
#include "js/TypeDecls.h"
namespace JS {
enum class TraceKind;
} /* namespace JS */
namespace js {
class GenericPrinter;
extern bool RuntimeFromMainThreadIsHeapMajorCollecting(
JS::shadow::Zone* shadowZone);
#ifdef DEBUG
// Barriers can't be triggered during backend Ion compilation, which may run on
// a helper thread.
extern bool CurrentThreadIsIonCompiling();
extern bool CurrentThreadIsGCMarking();
extern bool CurrentThreadIsGCSweeping();
extern bool CurrentThreadIsGCFinalizing();
extern bool RuntimeIsVerifyingPreBarriers(JSRuntime* runtime);
#endif
extern void TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc,
gc::Cell** thingp,
const char* name);
namespace gc {
class Arena;
enum class AllocKind : uint8_t;
class StoreBuffer;
class TenuredCell;
extern void UnmarkGrayGCThingRecursively(TenuredCell* cell);
// Like gc::MarkColor but allows the possibility of the cell being unmarked.
//
// This class mimics an enum class, but supports operator overloading.
class CellColor {
public:
enum Color { White = 0, Gray = 1, Black = 2 };
CellColor() : color(White) {}
MOZ_IMPLICIT CellColor(MarkColor markColor)
: color(markColor == MarkColor::Black ? Black : Gray) {}
MOZ_IMPLICIT constexpr CellColor(Color c) : color(c) {}
MarkColor asMarkColor() const {
MOZ_ASSERT(color != White);
return color == Black ? MarkColor::Black : MarkColor::Gray;
}
// Implement a total ordering for CellColor, with white being 'least marked'
// and black being 'most marked'.
bool operator<(const CellColor other) const { return color < other.color; }
bool operator>(const CellColor other) const { return color > other.color; }
bool operator<=(const CellColor other) const { return color <= other.color; }
bool operator>=(const CellColor other) const { return color >= other.color; }
bool operator!=(const CellColor other) const { return color != other.color; }
bool operator==(const CellColor other) const { return color == other.color; }
explicit operator bool() const { return color != White; }
#if defined(JS_GC_ZEAL) || defined(DEBUG)
const char* name() const {
switch (color) {
case CellColor::White:
return "white";
case CellColor::Black:
return "black";
case CellColor::Gray:
return "gray";
default:
MOZ_CRASH("Unexpected cell color");
}
}
#endif
private:
Color color;
};
// [SMDOC] GC Cell
//
// A GC cell is the ultimate base class for all GC things. All types allocated
// on the GC heap extend either gc::Cell or gc::TenuredCell. If a type is always
// tenured, prefer the TenuredCell class as base.
//
// The first word of Cell is a uintptr_t that reserves the low three bits for GC
// purposes. The remaining bits are available to sub-classes and can be used
// store a pointer to another gc::Cell. It can also be used for temporary
// storage (see setTemporaryGCUnsafeData). To make use of the remaining space,
// sub-classes derive from a helper class such as TenuredCellWithNonGCPointer.
//
// During moving GC operation a Cell may be marked as forwarded. This indicates
// that a gc::RelocationOverlay is currently stored in the Cell's memory and
// should be used to find the new location of the Cell.
struct Cell {
protected:
// Cell header word. Stores GC flags and derived class data.
//
// This is atomic since it can be read from and written to by different
// threads during compacting GC, in a limited way. Specifically, writes that
// update the derived class data can race with reads that check the forwarded
// flag. The writes do not change the forwarded flag (which is always false in
// this situation).
mozilla::Atomic<uintptr_t, mozilla::MemoryOrdering::Relaxed> header_;
public:
static_assert(gc::CellFlagBitsReservedForGC >= 3,
"Not enough flag bits reserved for GC");
static constexpr uintptr_t RESERVED_MASK =
BitMask(gc::CellFlagBitsReservedForGC);
// Indicates whether the cell has been forwarded (moved) by generational or
// compacting GC and is now a RelocationOverlay.
static constexpr uintptr_t FORWARD_BIT = Bit(0);
// Bits 1 and 2 are currently unused.
bool isForwarded() const { return header_ & FORWARD_BIT; }
uintptr_t flags() const { return header_ & RESERVED_MASK; }
MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
MOZ_ALWAYS_INLINE TenuredCell& asTenured();
MOZ_ALWAYS_INLINE bool isMarkedAny() const;
MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
MOZ_ALWAYS_INLINE bool isMarkedGray() const;
MOZ_ALWAYS_INLINE bool isMarked(gc::MarkColor color) const;
MOZ_ALWAYS_INLINE bool isMarkedAtLeast(gc::MarkColor color) const;
MOZ_ALWAYS_INLINE CellColor color() const {
return isMarkedBlack() ? CellColor::Black
: isMarkedGray() ? CellColor::Gray
: CellColor::White;
}
inline JSRuntime* runtimeFromMainThread() const;
// Note: Unrestricted access to the runtime of a GC thing from an arbitrary
// thread can easily lead to races. Use this method very carefully.
inline JSRuntime* runtimeFromAnyThread() const;
// May be overridden by GC thing kinds that have a compartment pointer.
inline JS::Compartment* maybeCompartment() const { return nullptr; }
// The StoreBuffer used to record incoming pointers from the tenured heap.
// This will return nullptr for a tenured cell.
inline StoreBuffer* storeBuffer() const;
inline JS::TraceKind getTraceKind() const;
static MOZ_ALWAYS_INLINE bool needPreWriteBarrier(JS::Zone* zone);
template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
inline bool is() const {
return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
}
template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
inline T* as() {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<T*>(this);
}
template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
inline const T* as() const {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<const T*>(this);
}
inline JS::Zone* zone() const;
inline JS::Zone* zoneFromAnyThread() const;
// Get the zone for a cell known to be in the nursery.
inline JS::Zone* nurseryZone() const;
inline JS::Zone* nurseryZoneFromAnyThread() const;
// Default implementation for kinds that cannot be permanent. This may be
// overriden by derived classes.
MOZ_ALWAYS_INLINE bool isPermanentAndMayBeShared() const { return false; }
#ifdef DEBUG
static inline void assertThingIsNotGray(Cell* cell);
inline bool isAligned() const;
void dump(GenericPrinter& out) const;
void dump() const;
#endif
protected:
uintptr_t address() const;
inline TenuredChunk* chunk() const;
private:
// Cells are destroyed by the GC. Do not delete them directly.
void operator delete(void*) = delete;
} JS_HAZ_GC_THING;
// A GC TenuredCell gets behaviors that are valid for things in the Tenured
// heap, such as access to the arena and mark bits.
class TenuredCell : public Cell {
public:
MOZ_ALWAYS_INLINE bool isTenured() const {
MOZ_ASSERT(!IsInsideNursery(this));
return true;
}
// Mark bit management.
MOZ_ALWAYS_INLINE bool isMarkedAny() const;
MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
MOZ_ALWAYS_INLINE bool isMarkedGray() const;
// Same as Cell::color, but skips nursery checks.
MOZ_ALWAYS_INLINE CellColor color() const {
return isMarkedBlack() ? CellColor::Black
: isMarkedGray() ? CellColor::Gray
: CellColor::White;
}
// The return value indicates if the cell went from unmarked to marked.
MOZ_ALWAYS_INLINE bool markIfUnmarked(
MarkColor color = MarkColor::Black) const;
MOZ_ALWAYS_INLINE void markBlack() const;
MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
MOZ_ALWAYS_INLINE void unmark();
// Access to the arena.
inline Arena* arena() const;
inline AllocKind getAllocKind() const;
inline JS::TraceKind getTraceKind() const;
inline JS::Zone* zone() const;
inline JS::Zone* zoneFromAnyThread() const;
inline bool isInsideZone(JS::Zone* zone) const;
MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZone() const {
return JS::shadow::Zone::from(zone());
}
MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
return JS::shadow::Zone::from(zoneFromAnyThread());
}
template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
inline bool is() const {
return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
}
template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
inline T* as() {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<T*>(this);
}
template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
inline const T* as() const {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<const T*>(this);
}
// Default implementation for kinds that don't require fixup.
void fixupAfterMovingGC() {}
#ifdef DEBUG
inline bool isAligned() const;
#endif
};
MOZ_ALWAYS_INLINE const TenuredCell& Cell::asTenured() const {
MOZ_ASSERT(isTenured());
return *static_cast<const TenuredCell*>(this);
}
MOZ_ALWAYS_INLINE TenuredCell& Cell::asTenured() {
MOZ_ASSERT(isTenured());
return *static_cast<TenuredCell*>(this);
}
MOZ_ALWAYS_INLINE bool Cell::isMarkedAny() const {
return !isTenured() || asTenured().isMarkedAny();
}
MOZ_ALWAYS_INLINE bool Cell::isMarkedBlack() const {
return !isTenured() || asTenured().isMarkedBlack();
}
MOZ_ALWAYS_INLINE bool Cell::isMarkedGray() const {
return isTenured() && asTenured().isMarkedGray();
}
MOZ_ALWAYS_INLINE bool Cell::isMarked(gc::MarkColor color) const {
return color == MarkColor::Gray ? isMarkedGray() : isMarkedBlack();
}
MOZ_ALWAYS_INLINE bool Cell::isMarkedAtLeast(gc::MarkColor color) const {
return color == MarkColor::Gray ? isMarkedAny() : isMarkedBlack();
}
inline JSRuntime* Cell::runtimeFromMainThread() const {
JSRuntime* rt = chunk()->runtime;
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
return rt;
}
inline JSRuntime* Cell::runtimeFromAnyThread() const {
return chunk()->runtime;
}
inline uintptr_t Cell::address() const {
uintptr_t addr = uintptr_t(this);
MOZ_ASSERT(addr % CellAlignBytes == 0);
MOZ_ASSERT(TenuredChunk::withinValidRange(addr));
return addr;
}
TenuredChunk* Cell::chunk() const {
uintptr_t addr = uintptr_t(this);
MOZ_ASSERT(addr % CellAlignBytes == 0);
addr &= ~ChunkMask;
return reinterpret_cast<TenuredChunk*>(addr);
}
inline StoreBuffer* Cell::storeBuffer() const { return chunk()->storeBuffer; }
JS::Zone* Cell::zone() const {
if (isTenured()) {
return asTenured().zone();
}
return nurseryZone();
}
JS::Zone* Cell::zoneFromAnyThread() const {
if (isTenured()) {
return asTenured().zoneFromAnyThread();
}
return nurseryZoneFromAnyThread();
}
JS::Zone* Cell::nurseryZone() const {
JS::Zone* zone = nurseryZoneFromAnyThread();
MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
return zone;
}
JS::Zone* Cell::nurseryZoneFromAnyThread() const {
return NurseryCellHeader::from(this)->zone();
}
#ifdef DEBUG
extern Cell* UninlinedForwarded(const Cell* cell);
#endif
inline JS::TraceKind Cell::getTraceKind() const {
if (isTenured()) {
MOZ_ASSERT_IF(isForwarded(), UninlinedForwarded(this)->getTraceKind() ==
asTenured().getTraceKind());
return asTenured().getTraceKind();
}
return NurseryCellHeader::from(this)->traceKind();
}
/* static */ MOZ_ALWAYS_INLINE bool Cell::needPreWriteBarrier(JS::Zone* zone) {
return JS::shadow::Zone::from(zone)->needsIncrementalBarrier();
}
bool TenuredCell::isMarkedAny() const {
MOZ_ASSERT(arena()->allocated());
return chunk()->markBits.isMarkedAny(this);
}
bool TenuredCell::isMarkedBlack() const {
MOZ_ASSERT(arena()->allocated());
return chunk()->markBits.isMarkedBlack(this);
}
bool TenuredCell::isMarkedGray() const {
MOZ_ASSERT(arena()->allocated());
return chunk()->markBits.isMarkedGray(this);
}
bool TenuredCell::markIfUnmarked(MarkColor color /* = Black */) const {
return chunk()->markBits.markIfUnmarked(this, color);
}
void TenuredCell::markBlack() const { chunk()->markBits.markBlack(this); }
void TenuredCell::copyMarkBitsFrom(const TenuredCell* src) {
MarkBitmap& markBits = chunk()->markBits;
markBits.copyMarkBit(this, src, ColorBit::BlackBit);
markBits.copyMarkBit(this, src, ColorBit::GrayOrBlackBit);
}
void TenuredCell::unmark() { chunk()->markBits.unmark(this); }
inline Arena* TenuredCell::arena() const {
MOZ_ASSERT(isTenured());
uintptr_t addr = address();
addr &= ~ArenaMask;
return reinterpret_cast<Arena*>(addr);
}
AllocKind TenuredCell::getAllocKind() const { return arena()->getAllocKind(); }
JS::TraceKind TenuredCell::getTraceKind() const {
return MapAllocToTraceKind(getAllocKind());
}
JS::Zone* TenuredCell::zone() const {
JS::Zone* zone = arena()->zone;
MOZ_ASSERT(CurrentThreadIsGCMarking() || CurrentThreadCanAccessZone(zone));
return zone;
}
JS::Zone* TenuredCell::zoneFromAnyThread() const { return arena()->zone; }
bool TenuredCell::isInsideZone(JS::Zone* zone) const {
return zone == arena()->zone;
}
// Read barrier and pre-write barrier implementation for GC cells.
template <typename T>
MOZ_ALWAYS_INLINE void ReadBarrier(T* thing) {
static_assert(std::is_base_of_v<Cell, T>);
static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
if (thing && !thing->isPermanentAndMayBeShared()) {
ReadBarrierImpl(thing);
}
}
MOZ_ALWAYS_INLINE void ReadBarrierImpl(TenuredCell* thing) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
MOZ_ASSERT(!CurrentThreadIsGCMarking());
MOZ_ASSERT(thing);
MOZ_ASSERT(CurrentThreadCanAccessZone(thing->zoneFromAnyThread()));
// Barriers should not be triggered on main thread while collecting.
mozilla::DebugOnly<JSRuntime*> runtime = thing->runtimeFromAnyThread();
MOZ_ASSERT_IF(CurrentThreadCanAccessRuntime(runtime),
!JS::RuntimeHeapIsCollecting());
JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
// We should only observe barriers being enabled on the main thread.
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
Cell* tmp = thing;
TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp,
"read barrier");
MOZ_ASSERT(tmp == thing);
return;
}
if (thing->isMarkedGray()) {
// There shouldn't be anything marked gray unless we're on the main thread.
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
UnmarkGrayGCThingRecursively(thing);
}
}
MOZ_ALWAYS_INLINE void ReadBarrierImpl(Cell* thing) {
MOZ_ASSERT(!CurrentThreadIsGCMarking());
if (thing->isTenured()) {
ReadBarrierImpl(&thing->asTenured());
}
}
MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(TenuredCell* thing) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
MOZ_ASSERT(!CurrentThreadIsGCMarking());
if (!thing) {
return;
}
// Barriers can be triggered on the main thread while collecting, but are
// disabled. For example, this happens when destroying HeapPtr wrappers.
JS::shadow::Zone* zone = thing->shadowZoneFromAnyThread();
if (!zone->needsIncrementalBarrier()) {
return;
}
// Barriers can be triggered on off the main thread in two situations:
// - background finalization of HeapPtrs to the atoms zone
// - while we are verifying pre-barriers for a worker runtime
// The barrier is not required in either case.
bool checkThread = zone->isAtomsZone();
#ifdef JS_GC_ZEAL
checkThread = checkThread || zone->isSelfHostingZone();
#endif
JSRuntime* runtime = thing->runtimeFromAnyThread();
if (checkThread && !CurrentThreadCanAccessRuntime(runtime)) {
MOZ_ASSERT(CurrentThreadIsGCFinalizing() ||
RuntimeIsVerifyingPreBarriers(runtime));
return;
}
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime));
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(zone));
Cell* tmp = thing;
TraceManuallyBarrieredGenericPointerEdge(zone->barrierTracer(), &tmp,
"pre barrier");
MOZ_ASSERT(tmp == thing);
}
MOZ_ALWAYS_INLINE void PreWriteBarrierImpl(Cell* thing) {
MOZ_ASSERT(!CurrentThreadIsGCMarking());
if (thing && thing->isTenured()) {
PreWriteBarrierImpl(&thing->asTenured());
}
}
template <typename T>
MOZ_ALWAYS_INLINE void PreWriteBarrier(T* thing) {
static_assert(std::is_base_of_v<Cell, T>);
static_assert(!std::is_same_v<Cell, T> && !std::is_same_v<TenuredCell, T>);
if (thing && !thing->isPermanentAndMayBeShared()) {
PreWriteBarrierImpl(thing);
}
}
#ifdef DEBUG
/* static */ void Cell::assertThingIsNotGray(Cell* cell) {
JS::AssertCellIsNotGray(cell);
}
bool Cell::isAligned() const {
if (!isTenured()) {
return true;
}
return asTenured().isAligned();
}
bool TenuredCell::isAligned() const {
return Arena::isAligned(address(), arena()->getThingSize());
}
#endif
// Base class for nusery-allocatable GC things that have 32-bit length and
// 32-bit flags (currently JSString and BigInt).
//
// This tries to store both in Cell::header_, but if that isn't large enough the
// length is stored separately.
//
// 32 0
// ------------------
// | Length | Flags |
// ------------------
//
// The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
// for GC. Derived classes must ensure they don't use these flags for non-GC
// purposes.
class alignas(gc::CellAlignBytes) CellWithLengthAndFlags : public Cell {
#if JS_BITS_PER_WORD == 32
// Additional storage for length if |header_| is too small to fit both.
uint32_t length_;
#endif
protected:
uint32_t headerLengthField() const {
#if JS_BITS_PER_WORD == 32
return length_;
#else
return uint32_t(header_ >> 32);
#endif
}
uint32_t headerFlagsField() const { return uint32_t(header_); }
void setHeaderFlagBit(uint32_t flag) { header_ |= uintptr_t(flag); }
void clearHeaderFlagBit(uint32_t flag) { header_ &= ~uintptr_t(flag); }
void toggleHeaderFlagBit(uint32_t flag) { header_ ^= uintptr_t(flag); }
void setHeaderLengthAndFlags(uint32_t len, uint32_t flags) {
#if JS_BITS_PER_WORD == 32
header_ = flags;
length_ = len;
#else
header_ = (uint64_t(len) << 32) | uint64_t(flags);
#endif
}
// Sub classes can store temporary data in the flags word. This is not GC safe
// and users must ensure flags/length are never checked (including by asserts)
// while this data is stored. Use of this method is strongly discouraged!
void setTemporaryGCUnsafeData(uintptr_t data) { header_ = data; }
// To get back the data, values to safely re-initialize clobbered flags
// must be provided.
uintptr_t unsetTemporaryGCUnsafeData(uint32_t len, uint32_t flags) {
uintptr_t data = header_;
setHeaderLengthAndFlags(len, flags);
return data;
}
public:
// Returns the offset of header_. JIT code should use offsetOfFlags
// below.
static constexpr size_t offsetOfRawHeaderFlagsField() {
return offsetof(CellWithLengthAndFlags, header_);
}
// Offsets for direct field from jit code. A number of places directly
// access 32-bit length and flags fields so do endian trickery here.
#if JS_BITS_PER_WORD == 32
static constexpr size_t offsetOfHeaderFlags() {
return offsetof(CellWithLengthAndFlags, header_);
}
static constexpr size_t offsetOfHeaderLength() {
return offsetof(CellWithLengthAndFlags, length_);
}
#elif MOZ_LITTLE_ENDIAN()
static constexpr size_t offsetOfHeaderFlags() {
return offsetof(CellWithLengthAndFlags, header_);
}
static constexpr size_t offsetOfHeaderLength() {
return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
}
#else
static constexpr size_t offsetOfHeaderFlags() {
return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
}
static constexpr size_t offsetOfHeaderLength() {
return offsetof(CellWithLengthAndFlags, header_);
}
#endif
};
// Base class for non-nursery-allocatable GC things that allows storing a non-GC
// thing pointer in the first word.
//
// The low bits of the word (see CellFlagBitsReservedForGC) are reserved for GC.
template <class PtrT>
class alignas(gc::CellAlignBytes) TenuredCellWithNonGCPointer
: public TenuredCell {
static_assert(!std::is_pointer_v<PtrT>,
"PtrT should be the type of the referent, not of the pointer");
static_assert(
!std::is_base_of_v<Cell, PtrT>,
"Don't use TenuredCellWithNonGCPointer for pointers to GC things");
protected:
TenuredCellWithNonGCPointer() = default;
explicit TenuredCellWithNonGCPointer(PtrT* initial) {
uintptr_t data = uintptr_t(initial);
MOZ_ASSERT((data & RESERVED_MASK) == 0);
header_ = data;
}
PtrT* headerPtr() const {
// Currently we never observe any flags set here because this base class is
// only used for JSObject (for which the nursery kind flags are always
// clear) or GC things that are always tenured (for which the nursery kind
// flags are also always clear). This means we don't need to use masking to
// get and set the pointer.
MOZ_ASSERT(flags() == 0);
return reinterpret_cast<PtrT*>(uintptr_t(header_));
}
void setHeaderPtr(PtrT* newValue) {
// As above, no flags are expected to be set here.
uintptr_t data = uintptr_t(newValue);
MOZ_ASSERT(flags() == 0);
MOZ_ASSERT((data & RESERVED_MASK) == 0);
header_ = data;
}
public:
static constexpr size_t offsetOfHeaderPtr() {
return offsetof(TenuredCellWithNonGCPointer, header_);
}
};
// Base class for GC things that have a tenured GC pointer as their first word.
//
// The low bits of the first word (see CellFlagBitsReservedForGC) are reserved
// for GC.
//
// This includes a pre write barrier when the pointer is update. No post barrier
// is necessary as the pointer is always tenured.
template <class BaseCell, class PtrT>
class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
static void staticAsserts() {
// These static asserts are not in class scope because the PtrT may not be
// defined when this class template is instantiated.
static_assert(
std::is_same_v<BaseCell, Cell> || std::is_same_v<BaseCell, TenuredCell>,
"BaseCell must be either Cell or TenuredCell");
static_assert(
!std::is_pointer_v<PtrT>,
"PtrT should be the type of the referent, not of the pointer");
static_assert(
std::is_base_of_v<Cell, PtrT>,
"Only use CellWithTenuredGCPointer for pointers to GC things");
}
protected:
CellWithTenuredGCPointer() = default;
explicit CellWithTenuredGCPointer(PtrT* initial) { initHeaderPtr(initial); }
void initHeaderPtr(PtrT* initial) {
MOZ_ASSERT(!IsInsideNursery(initial));
uintptr_t data = uintptr_t(initial);
MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
this->header_ = data;
}
void setHeaderPtr(PtrT* newValue) {
// As above, no flags are expected to be set here.
MOZ_ASSERT(!IsInsideNursery(newValue));
PreWriteBarrier(headerPtr());
unbarrieredSetHeaderPtr(newValue);
}
public:
PtrT* headerPtr() const {
// Currently we never observe any flags set here because this base class is
// only used for GC things that are always tenured (for which the nursery
// kind flags are also always clear). This means we don't need to use
// masking to get and set the pointer.
staticAsserts();
MOZ_ASSERT(this->flags() == 0);
return reinterpret_cast<PtrT*>(uintptr_t(this->header_));
}
void unbarrieredSetHeaderPtr(PtrT* newValue) {
uintptr_t data = uintptr_t(newValue);
MOZ_ASSERT(this->flags() == 0);
MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
this->header_ = data;
}
static constexpr size_t offsetOfHeaderPtr() {
return offsetof(CellWithTenuredGCPointer, header_);
}
};
} /* namespace gc */
} /* namespace js */
#endif /* gc_Cell_h */
|