summaryrefslogtreecommitdiffstats
path: root/js
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-17 09:03:13 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-17 09:03:13 +0000
commit0681b3ac9a6ab4879ca2fbfcf8aa9d00a67b8365 (patch)
tree1437375a1c16af40bb2982577c25eb9608e17566 /js
parentAdding debian version 115.11.0esr-1~deb12u1. (diff)
downloadfirefox-esr-0681b3ac9a6ab4879ca2fbfcf8aa9d00a67b8365.tar.xz
firefox-esr-0681b3ac9a6ab4879ca2fbfcf8aa9d00a67b8365.zip
Merging upstream version 115.12.0esr.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'js')
-rw-r--r--js/src/gc/GC.cpp16
-rw-r--r--js/src/gc/GCMarker.h43
-rw-r--r--js/src/gc/Marking.cpp137
-rw-r--r--js/src/jit/Bailouts.cpp2
-rw-r--r--js/src/vm/JSObject.cpp8
-rw-r--r--js/src/wasm/WasmStubs.cpp45
6 files changed, 224 insertions, 27 deletions
diff --git a/js/src/gc/GC.cpp b/js/src/gc/GC.cpp
index b8e1d21f2a..33665e9b45 100644
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -3100,20 +3100,14 @@ GCRuntime::MarkQueueProgress GCRuntime::processTestMarkQueue() {
return QueueSuspended;
}
- // Mark the object and push it onto the stack.
- size_t oldPosition = marker().stack.position();
- marker().markAndTraverse<NormalMarkingOptions>(obj);
-
- // If we overflow the stack here and delay marking, then we won't be
- // testing what we think we're testing.
- if (marker().stack.position() == oldPosition) {
+ // Mark the object.
+ AutoEnterOOMUnsafeRegion oomUnsafe;
+ if (!marker().markOneObjectForTest(obj)) {
+ // If we overflowed the stack here and delayed marking, then we won't be
+ // testing what we think we're testing.
MOZ_ASSERT(obj->asTenured().arena()->onDelayedMarkingList());
- AutoEnterOOMUnsafeRegion oomUnsafe;
oomUnsafe.crash("Overflowed stack while marking test queue");
}
-
- SliceBudget unlimited = SliceBudget::unlimited();
- marker().processMarkStackTop<NormalMarkingOptions>(unlimited);
} else if (val.isString()) {
JSLinearString* str = &val.toString()->asLinear();
if (js::StringEqualsLiteral(str, "yield") && isIncrementalGc()) {
diff --git a/js/src/gc/GCMarker.h b/js/src/gc/GCMarker.h
index 053ba90e18..1baec84e49 100644
--- a/js/src/gc/GCMarker.h
+++ b/js/src/gc/GCMarker.h
@@ -38,7 +38,9 @@ namespace gc {
enum IncrementalProgress { NotFinished = 0, Finished };
class AutoSetMarkColor;
+class AutoUpdateMarkStackRanges;
struct Cell;
+class MarkStackIter;
class ParallelMarker;
class UnmarkGrayTracer;
@@ -117,6 +119,7 @@ class MarkStack {
public:
TaggedPtr() = default;
TaggedPtr(Tag tag, Cell* ptr);
+ uintptr_t asBits() const;
Tag tag() const;
uintptr_t tagUnchecked() const;
template <typename T>
@@ -136,10 +139,13 @@ class MarkStack {
size_t start() const;
TaggedPtr ptr() const;
+ void setStart(size_t newStart);
+ void setEmpty();
+
+ private:
static constexpr size_t StartShift = 2;
static constexpr size_t KindMask = (1 << StartShift) - 1;
- private:
uintptr_t startAndKind_;
TaggedPtr ptr_;
};
@@ -224,6 +230,13 @@ class MarkStack {
// The maximum stack capacity to grow to.
MainThreadOrGCTaskData<size_t> maxCapacity_{SIZE_MAX};
#endif
+
+#ifdef DEBUG
+ MainThreadOrGCTaskData<bool> elementsRangesAreValid;
+ friend class js::GCMarker;
+#endif
+
+ friend class MarkStackIter;
};
static_assert(unsigned(SlotsOrElementsKind::Unused) ==
@@ -232,6 +245,25 @@ static_assert(unsigned(SlotsOrElementsKind::Unused) ==
"difference between SlotsOrElementsRange::startAndKind_ and a "
"tagged SlotsOrElementsRange");
+class MOZ_STACK_CLASS MarkStackIter {
+ MarkStack& stack_;
+ size_t pos_;
+
+ public:
+ explicit MarkStackIter(MarkStack& stack);
+
+ bool done() const;
+ void next();
+
+ MarkStack::Tag peekTag() const;
+ bool isSlotsOrElementsRange() const;
+ MarkStack::SlotsOrElementsRange& slotsOrElementsRange();
+
+ private:
+ size_t position() const;
+ MarkStack::TaggedPtr peekPtr() const;
+};
+
// Bitmask of options to parameterize MarkingTracerT.
namespace MarkingOptions {
enum : uint32_t {
@@ -361,6 +393,8 @@ class GCMarker {
void setCheckAtomMarking(bool check);
bool shouldCheckCompartments() { return strictCompartmentChecking; }
+
+ bool markOneObjectForTest(JSObject* obj);
#endif
bool markCurrentColorInParallel(SliceBudget& budget);
@@ -403,6 +437,13 @@ class GCMarker {
template <typename Tracer>
void setMarkingStateAndTracer(MarkingState prev, MarkingState next);
+ // The mutator can shift object elements which could invalidate any elements
+ // index on the mark stack. Change the index to be relative to the elements
+ // allocation (to ignore shifted elements) while the mutator is running.
+ void updateRangesAtStartOfSlice();
+ void updateRangesAtEndOfSlice();
+ friend class gc::AutoUpdateMarkStackRanges;
+
template <uint32_t markingOptions>
bool processMarkStackTop(SliceBudget& budget);
friend class gc::GCRuntime;
diff --git a/js/src/gc/Marking.cpp b/js/src/gc/Marking.cpp
index b92cd5f3ac..0201e20aa7 100644
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -1307,9 +1307,20 @@ bool GCMarker::doMarking(SliceBudget& budget, ShouldReportMarkTime reportTime) {
return true;
}
+class MOZ_RAII gc::AutoUpdateMarkStackRanges {
+ GCMarker& marker_;
+
+ public:
+ explicit AutoUpdateMarkStackRanges(GCMarker& marker) : marker_(marker) {
+ marker_.updateRangesAtStartOfSlice();
+ }
+ ~AutoUpdateMarkStackRanges() { marker_.updateRangesAtEndOfSlice(); }
+};
+
template <uint32_t opts, MarkColor color>
bool GCMarker::markOneColor(SliceBudget& budget) {
AutoSetMarkColor setColor(*this, color);
+ AutoUpdateMarkStackRanges updateRanges(*this);
while (processMarkStackTop<opts>(budget)) {
if (stack.isEmpty()) {
@@ -1321,6 +1332,8 @@ bool GCMarker::markOneColor(SliceBudget& budget) {
}
bool GCMarker::markCurrentColorInParallel(SliceBudget& budget) {
+ AutoUpdateMarkStackRanges updateRanges(*this);
+
ParallelMarker::AtomicCount& waitingTaskCount =
parallelMarker_->waitingTaskCountRef();
@@ -1340,6 +1353,26 @@ bool GCMarker::markCurrentColorInParallel(SliceBudget& budget) {
return false;
}
+#ifdef DEBUG
+bool GCMarker::markOneObjectForTest(JSObject* obj) {
+ MOZ_ASSERT(obj->zone()->isGCMarking());
+ MOZ_ASSERT(!obj->isMarked(markColor()));
+
+ size_t oldPosition = stack.position();
+ markAndTraverse<NormalMarkingOptions>(obj);
+ if (stack.position() == oldPosition) {
+ return false;
+ }
+
+ AutoUpdateMarkStackRanges updateRanges(*this);
+
+ SliceBudget unlimited = SliceBudget::unlimited();
+ processMarkStackTop<NormalMarkingOptions>(unlimited);
+
+ return true;
+}
+#endif
+
static inline void CheckForCompartmentMismatch(JSObject* obj, JSObject* obj2) {
#ifdef DEBUG
if (MOZ_UNLIKELY(obj->compartment() != obj2->compartment())) {
@@ -1366,6 +1399,47 @@ static inline size_t NumUsedDynamicSlots(NativeObject* obj) {
return nslots - nfixed;
}
+void GCMarker::updateRangesAtStartOfSlice() {
+ for (MarkStackIter iter(stack); !iter.done(); iter.next()) {
+ if (iter.isSlotsOrElementsRange()) {
+ MarkStack::SlotsOrElementsRange& range = iter.slotsOrElementsRange();
+ JSObject* obj = range.ptr().asRangeObject();
+ if (!obj->is<NativeObject>()) {
+ range.setEmpty();
+ } else if (range.kind() == SlotsOrElementsKind::Elements) {
+ NativeObject* obj = &range.ptr().asRangeObject()->as<NativeObject>();
+ size_t index = range.start();
+ size_t numShifted = obj->getElementsHeader()->numShiftedElements();
+ index -= std::min(numShifted, index);
+ range.setStart(index);
+ }
+ }
+ }
+
+#ifdef DEBUG
+ MOZ_ASSERT(!stack.elementsRangesAreValid);
+ stack.elementsRangesAreValid = true;
+#endif
+}
+
+void GCMarker::updateRangesAtEndOfSlice() {
+ for (MarkStackIter iter(stack); !iter.done(); iter.next()) {
+ if (iter.isSlotsOrElementsRange()) {
+ MarkStack::SlotsOrElementsRange& range = iter.slotsOrElementsRange();
+ if (range.kind() == SlotsOrElementsKind::Elements) {
+ NativeObject* obj = &range.ptr().asRangeObject()->as<NativeObject>();
+ size_t numShifted = obj->getElementsHeader()->numShiftedElements();
+ range.setStart(range.start() + numShifted);
+ }
+ }
+ }
+
+#ifdef DEBUG
+ MOZ_ASSERT(stack.elementsRangesAreValid);
+ stack.elementsRangesAreValid = false;
+#endif
+}
+
template <uint32_t opts>
inline bool GCMarker::processMarkStackTop(SliceBudget& budget) {
/*
@@ -1379,6 +1453,7 @@ inline bool GCMarker::processMarkStackTop(SliceBudget& budget) {
*/
MOZ_ASSERT(!stack.isEmpty());
+ MOZ_ASSERT(stack.elementsRangesAreValid);
MOZ_ASSERT_IF(markColor() == MarkColor::Gray, !hasBlackEntries());
JSObject* obj; // The object being scanned.
@@ -1409,12 +1484,7 @@ inline bool GCMarker::processMarkStackTop(SliceBudget& budget) {
case SlotsOrElementsKind::Elements: {
base = nobj->getDenseElements();
-
- // Account for shifted elements.
- size_t numShifted = nobj->getElementsHeader()->numShiftedElements();
- size_t initlen = nobj->getDenseInitializedLength();
- index = std::max(index, numShifted) - numShifted;
- end = initlen;
+ end = nobj->getDenseInitializedLength();
break;
}
@@ -1590,17 +1660,17 @@ struct MapTypeToMarkStackTag<BaseScript*> {
static const auto value = MarkStack::ScriptTag;
};
-#ifdef DEBUG
static inline bool TagIsRangeTag(MarkStack::Tag tag) {
return tag == MarkStack::SlotsOrElementsRangeTag;
}
-#endif
inline MarkStack::TaggedPtr::TaggedPtr(Tag tag, Cell* ptr)
: bits(tag | uintptr_t(ptr)) {
assertValid();
}
+inline uintptr_t MarkStack::TaggedPtr::asBits() const { return bits; }
+
inline uintptr_t MarkStack::TaggedPtr::tagUnchecked() const {
return bits & TagMask;
}
@@ -1661,6 +1731,17 @@ inline size_t MarkStack::SlotsOrElementsRange::start() const {
return startAndKind_ >> StartShift;
}
+inline void MarkStack::SlotsOrElementsRange::setStart(size_t newStart) {
+ startAndKind_ = (newStart << StartShift) | uintptr_t(kind());
+ MOZ_ASSERT(start() == newStart);
+}
+
+inline void MarkStack::SlotsOrElementsRange::setEmpty() {
+ TaggedPtr entry = TaggedPtr(ObjectTag, ptr().asRangeObject());
+ ptr_ = entry;
+ startAndKind_ = entry.asBits();
+}
+
inline MarkStack::TaggedPtr MarkStack::SlotsOrElementsRange::ptr() const {
return ptr_;
}
@@ -1931,6 +2012,45 @@ size_t MarkStack::sizeOfExcludingThis(
return stack().sizeOfExcludingThis(mallocSizeOf);
}
+MarkStackIter::MarkStackIter(MarkStack& stack)
+ : stack_(stack), pos_(stack.position()) {}
+
+inline size_t MarkStackIter::position() const { return pos_; }
+
+inline bool MarkStackIter::done() const { return position() == 0; }
+
+inline void MarkStackIter::next() {
+ if (isSlotsOrElementsRange()) {
+ MOZ_ASSERT(position() >= ValueRangeWords);
+ pos_ -= ValueRangeWords;
+ return;
+ }
+
+ MOZ_ASSERT(!done());
+ pos_--;
+}
+
+inline bool MarkStackIter::isSlotsOrElementsRange() const {
+ return TagIsRangeTag(peekTag());
+}
+
+inline MarkStack::Tag MarkStackIter::peekTag() const { return peekPtr().tag(); }
+
+inline MarkStack::TaggedPtr MarkStackIter::peekPtr() const {
+ MOZ_ASSERT(!done());
+ return stack_.stack()[pos_ - 1];
+}
+
+inline MarkStack::SlotsOrElementsRange& MarkStackIter::slotsOrElementsRange() {
+ MOZ_ASSERT(TagIsRangeTag(peekTag()));
+ MOZ_ASSERT(position() >= ValueRangeWords);
+
+ MarkStack::TaggedPtr* ptr = &stack_.stack()[pos_ - ValueRangeWords];
+ auto& range = *reinterpret_cast<MarkStack::SlotsOrElementsRange*>(ptr);
+ range.assertValid();
+ return range;
+}
+
/*** GCMarker ***************************************************************/
/*
@@ -2244,6 +2364,7 @@ void GCRuntime::processDelayedMarkingList(MarkColor color) {
// were added.
AutoSetMarkColor setColor(marker(), color);
+ AutoUpdateMarkStackRanges updateRanges(marker());
do {
delayedMarkingWorkAdded = false;
diff --git a/js/src/jit/Bailouts.cpp b/js/src/jit/Bailouts.cpp
index 3730d8997a..1d2657c399 100644
--- a/js/src/jit/Bailouts.cpp
+++ b/js/src/jit/Bailouts.cpp
@@ -54,9 +54,11 @@ class js::jit::BailoutStack {
# pragma pack(pop)
#endif
+#if !defined(JS_CODEGEN_NONE)
// Make sure the compiler doesn't add extra padding on 32-bit platforms.
static_assert((sizeof(BailoutStack) % 8) == 0,
"BailoutStack should be 8-byte aligned.");
+#endif
BailoutFrameInfo::BailoutFrameInfo(const JitActivationIterator& activations,
BailoutStack* bailout)
diff --git a/js/src/vm/JSObject.cpp b/js/src/vm/JSObject.cpp
index 292971cf3e..4398725fde 100644
--- a/js/src/vm/JSObject.cpp
+++ b/js/src/vm/JSObject.cpp
@@ -1221,6 +1221,10 @@ void JSObject::swap(JSContext* cx, HandleObject a, HandleObject b,
MOZ_RELEASE_ASSERT(js::ObjectMayBeSwapped(a));
MOZ_RELEASE_ASSERT(js::ObjectMayBeSwapped(b));
+ // Don't allow a GC which may observe intermediate state or run before we
+ // execute all necessary barriers.
+ gc::AutoSuppressGC nogc(cx);
+
if (!Watchtower::watchObjectSwap(cx, a, b)) {
oomUnsafe.crash("watchObjectSwap");
}
@@ -1311,10 +1315,6 @@ void JSObject::swap(JSContext* cx, HandleObject a, HandleObject b,
a->as<ProxyObject>().setInlineValueArray();
}
} else {
- // Avoid GC in here to avoid confusing the tracing code with our
- // intermediate state.
- gc::AutoSuppressGC suppress(cx);
-
// When the objects have different sizes, they will have different numbers
// of fixed slots before and after the swap, so the slots for native objects
// will need to be rearranged. Remember the original values from the
diff --git a/js/src/wasm/WasmStubs.cpp b/js/src/wasm/WasmStubs.cpp
index 7fc61381b9..9c8b93a7d7 100644
--- a/js/src/wasm/WasmStubs.cpp
+++ b/js/src/wasm/WasmStubs.cpp
@@ -1937,6 +1937,39 @@ static void FillArgumentArrayForJitExit(MacroAssembler& masm, Register instance,
GenPrintf(DebugChannel::Import, masm, "\n");
}
+static bool AddStackCheckForImportFunctionEntry(jit::MacroAssembler& masm,
+ unsigned reserve,
+ const FuncType& funcType,
+ StackMaps* stackMaps) {
+ std::pair<CodeOffset, uint32_t> pair =
+ masm.wasmReserveStackChecked(reserve, BytecodeOffset(0));
+
+ // Attempt to create stack maps for masm.wasmReserveStackChecked.
+ ArgTypeVector argTypes(funcType);
+ RegisterOffsets trapExitLayout;
+ size_t trapExitLayoutNumWords;
+ GenerateTrapExitRegisterOffsets(&trapExitLayout, &trapExitLayoutNumWords);
+ CodeOffset trapInsnOffset = pair.first;
+ size_t nBytesReservedBeforeTrap = pair.second;
+ size_t nInboundStackArgBytes = StackArgAreaSizeUnaligned(argTypes);
+ wasm::StackMap* stackMap = nullptr;
+ if (!CreateStackMapForFunctionEntryTrap(
+ argTypes, trapExitLayout, trapExitLayoutNumWords,
+ nBytesReservedBeforeTrap, nInboundStackArgBytes, &stackMap)) {
+ return false;
+ }
+
+ // In debug builds, we'll always have a stack map, even if there are no
+ // refs to track.
+ MOZ_ASSERT(stackMap);
+ if (stackMap &&
+ !stackMaps->add((uint8_t*)(uintptr_t)trapInsnOffset.offset(), stackMap)) {
+ stackMap->destroy();
+ return false;
+ }
+ return true;
+}
+
// Generate a wrapper function with the standard intra-wasm call ABI which
// simply calls an import. This wrapper function allows any import to be treated
// like a normal wasm function for the purposes of exports and table calls. In
@@ -1948,7 +1981,7 @@ static bool GenerateImportFunction(jit::MacroAssembler& masm,
const FuncImport& fi,
const FuncType& funcType,
CallIndirectId callIndirectId,
- FuncOffsets* offsets) {
+ FuncOffsets* offsets, StackMaps* stackMaps) {
AutoCreatedBy acb(masm, "wasm::GenerateImportFunction");
AssertExpectedSP(masm);
@@ -1961,7 +1994,12 @@ static bool GenerateImportFunction(jit::MacroAssembler& masm,
WasmStackAlignment,
sizeof(Frame), // pushed by prologue
StackArgBytesForWasmABI(funcType) + sizeOfInstanceSlot);
- masm.wasmReserveStackChecked(framePushed, BytecodeOffset(0));
+
+ if (!AddStackCheckForImportFunctionEntry(masm, framePushed, funcType,
+ stackMaps)) {
+ return false;
+ }
+
MOZ_ASSERT(masm.framePushed() == framePushed);
masm.storePtr(InstanceReg, Address(masm.getStackPointer(),
@@ -2025,7 +2063,8 @@ bool wasm::GenerateImportFunctions(const ModuleEnvironment& env,
CallIndirectId callIndirectId = CallIndirectId::forFunc(env, funcIndex);
FuncOffsets offsets;
- if (!GenerateImportFunction(masm, fi, funcType, callIndirectId, &offsets)) {
+ if (!GenerateImportFunction(masm, fi, funcType, callIndirectId, &offsets,
+ &code->stackMaps)) {
return false;
}
if (!code->codeRanges.emplaceBack(funcIndex, /* bytecodeOffset = */ 0,