summaryrefslogtreecommitdiffstats
path: root/js/src/jit/Lowering.cpp
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:14:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:14:29 +0000
commitfbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8 (patch)
tree4c1ccaf5486d4f2009f9a338a98a83e886e29c97 /js/src/jit/Lowering.cpp
parentReleasing progress-linux version 124.0.1-1~progress7.99u1. (diff)
downloadfirefox-fbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8.tar.xz
firefox-fbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8.zip
Merging upstream version 125.0.1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'js/src/jit/Lowering.cpp')
-rw-r--r--js/src/jit/Lowering.cpp139
1 files changed, 123 insertions, 16 deletions
diff --git a/js/src/jit/Lowering.cpp b/js/src/jit/Lowering.cpp
index 8a28ea123c..b0007a114d 100644
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -3828,6 +3828,20 @@ void LIRGenerator::visitGetNextEntryForIterator(MGetNextEntryForIterator* ins) {
define(lir, ins);
}
+static auto SynchronizeLoad(MemoryBarrierRequirement requiresBarrier) {
+ if (requiresBarrier == MemoryBarrierRequirement::Required) {
+ return Synchronization::Load();
+ }
+ return Synchronization::None();
+}
+
+static auto SynchronizeStore(MemoryBarrierRequirement requiresBarrier) {
+ if (requiresBarrier == MemoryBarrierRequirement::Required) {
+ return Synchronization::Store();
+ }
+ return Synchronization::None();
+}
+
void LIRGenerator::visitArrayBufferByteLength(MArrayBufferByteLength* ins) {
MOZ_ASSERT(ins->object()->type() == MIRType::Object);
MOZ_ASSERT(ins->type() == MIRType::IntPtr);
@@ -3870,6 +3884,70 @@ void LIRGenerator::visitTypedArrayElementSize(MTypedArrayElementSize* ins) {
ins);
}
+void LIRGenerator::visitResizableTypedArrayLength(
+ MResizableTypedArrayLength* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->type() == MIRType::IntPtr);
+
+ auto sync = SynchronizeLoad(ins->requiresMemoryBarrier());
+ auto* lir = new (alloc())
+ LResizableTypedArrayLength(useRegister(ins->object()), temp(), sync);
+ define(lir, ins);
+}
+
+void LIRGenerator::visitResizableTypedArrayByteOffsetMaybeOutOfBounds(
+ MResizableTypedArrayByteOffsetMaybeOutOfBounds* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->type() == MIRType::IntPtr);
+
+ auto* lir = new (alloc()) LResizableTypedArrayByteOffsetMaybeOutOfBounds(
+ useRegister(ins->object()), temp());
+ define(lir, ins);
+}
+
+void LIRGenerator::visitResizableDataViewByteLength(
+ MResizableDataViewByteLength* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->type() == MIRType::IntPtr);
+
+ auto sync = SynchronizeLoad(ins->requiresMemoryBarrier());
+ auto* lir = new (alloc())
+ LResizableDataViewByteLength(useRegister(ins->object()), temp(), sync);
+ define(lir, ins);
+}
+
+void LIRGenerator::visitGrowableSharedArrayBufferByteLength(
+ MGrowableSharedArrayBufferByteLength* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->type() == MIRType::IntPtr);
+
+ auto* lir = new (alloc())
+ LGrowableSharedArrayBufferByteLength(useRegisterAtStart(ins->object()));
+ define(lir, ins);
+}
+
+void LIRGenerator::visitGuardResizableArrayBufferViewInBounds(
+ MGuardResizableArrayBufferViewInBounds* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+
+ auto* lir = new (alloc()) LGuardResizableArrayBufferViewInBounds(
+ useRegister(ins->object()), temp());
+ assignSnapshot(lir, ins->bailoutKind());
+ add(lir, ins);
+ redefine(ins, ins->object());
+}
+
+void LIRGenerator::visitGuardResizableArrayBufferViewInBoundsOrDetached(
+ MGuardResizableArrayBufferViewInBoundsOrDetached* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+
+ auto* lir = new (alloc()) LGuardResizableArrayBufferViewInBoundsOrDetached(
+ useRegister(ins->object()), temp());
+ assignSnapshot(lir, ins->bailoutKind());
+ add(lir, ins);
+ redefine(ins, ins->object());
+}
+
void LIRGenerator::visitGuardHasAttachedArrayBuffer(
MGuardHasAttachedArrayBuffer* ins) {
MOZ_ASSERT(ins->object()->type() == MIRType::Object);
@@ -4298,8 +4376,9 @@ void LIRGenerator::visitLoadUnboxedScalar(MLoadUnboxedScalar* ins) {
MOZ_ASSERT(ins->index()->type() == MIRType::IntPtr);
MOZ_ASSERT(IsNumericType(ins->type()) || ins->type() == MIRType::Boolean);
- if (Scalar::isBigIntType(ins->storageType()) &&
- ins->requiresMemoryBarrier()) {
+ auto sync = SynchronizeLoad(ins->requiresMemoryBarrier());
+
+ if (Scalar::isBigIntType(ins->storageType()) && !sync.isNone()) {
lowerAtomicLoad64(ins);
return;
}
@@ -4310,8 +4389,7 @@ void LIRGenerator::visitLoadUnboxedScalar(MLoadUnboxedScalar* ins) {
// NOTE: the generated code must match the assembly code in gen_load in
// GenerateAtomicOperations.py
- Synchronization sync = Synchronization::Load();
- if (ins->requiresMemoryBarrier()) {
+ if (!sync.isNone()) {
LMemoryBarrier* fence = new (alloc()) LMemoryBarrier(sync.barrierBefore);
add(fence, ins);
}
@@ -4338,7 +4416,7 @@ void LIRGenerator::visitLoadUnboxedScalar(MLoadUnboxedScalar* ins) {
assignSafepoint(lir, ins);
}
- if (ins->requiresMemoryBarrier()) {
+ if (!sync.isNone()) {
LMemoryBarrier* fence = new (alloc()) LMemoryBarrier(sync.barrierAfter);
add(fence, ins);
}
@@ -4431,29 +4509,32 @@ void LIRGenerator::visitClampToUint8(MClampToUint8* ins) {
void LIRGenerator::visitLoadTypedArrayElementHole(
MLoadTypedArrayElementHole* ins) {
- MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->elements()->type() == MIRType::Elements);
MOZ_ASSERT(ins->index()->type() == MIRType::IntPtr);
+ MOZ_ASSERT(ins->length()->type() == MIRType::IntPtr);
MOZ_ASSERT(ins->type() == MIRType::Value);
- const LUse object = useRegister(ins->object());
+ const LUse elements = useRegister(ins->elements());
const LAllocation index = useRegister(ins->index());
+ const LAllocation length = useRegister(ins->length());
if (!Scalar::isBigIntType(ins->arrayType())) {
- auto* lir = new (alloc()) LLoadTypedArrayElementHole(object, index, temp());
+ auto* lir =
+ new (alloc()) LLoadTypedArrayElementHole(elements, index, length);
if (ins->fallible()) {
assignSnapshot(lir, ins->bailoutKind());
}
defineBox(lir, ins);
} else {
#ifdef JS_CODEGEN_X86
- LDefinition tmp = LDefinition::BogusTemp();
+ LInt64Definition temp64 = LInt64Definition::BogusTemp();
#else
- LDefinition tmp = temp();
+ LInt64Definition temp64 = tempInt64();
#endif
- auto* lir = new (alloc())
- LLoadTypedArrayElementHoleBigInt(object, index, tmp, tempInt64());
+ auto* lir = new (alloc()) LLoadTypedArrayElementHoleBigInt(
+ elements, index, length, temp(), temp64);
defineBox(lir, ins);
assignSafepoint(lir, ins);
}
@@ -4474,7 +4555,9 @@ void LIRGenerator::visitStoreUnboxedScalar(MStoreUnboxedScalar* ins) {
MOZ_ASSERT(ins->value()->type() == MIRType::Int32);
}
- if (ins->isBigIntWrite() && ins->requiresMemoryBarrier()) {
+ auto sync = SynchronizeStore(ins->requiresMemoryBarrier());
+
+ if (ins->isBigIntWrite() && !sync.isNone()) {
lowerAtomicStore64(ins);
return;
}
@@ -4500,8 +4583,7 @@ void LIRGenerator::visitStoreUnboxedScalar(MStoreUnboxedScalar* ins) {
//
// NOTE: the generated code must match the assembly code in gen_store in
// GenerateAtomicOperations.py
- Synchronization sync = Synchronization::Store();
- if (ins->requiresMemoryBarrier()) {
+ if (!sync.isNone()) {
LMemoryBarrier* fence = new (alloc()) LMemoryBarrier(sync.barrierBefore);
add(fence, ins);
}
@@ -4511,7 +4593,7 @@ void LIRGenerator::visitStoreUnboxedScalar(MStoreUnboxedScalar* ins) {
add(new (alloc()) LStoreUnboxedBigInt(elements, index, value, tempInt64()),
ins);
}
- if (ins->requiresMemoryBarrier()) {
+ if (!sync.isNone()) {
LMemoryBarrier* fence = new (alloc()) LMemoryBarrier(sync.barrierAfter);
add(fence, ins);
}
@@ -5154,6 +5236,17 @@ void LIRGenerator::visitGuardIsFixedLengthTypedArray(
redefine(ins, ins->object());
}
+void LIRGenerator::visitGuardIsResizableTypedArray(
+ MGuardIsResizableTypedArray* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+
+ auto* lir = new (alloc())
+ LGuardIsResizableTypedArray(useRegister(ins->object()), temp());
+ assignSnapshot(lir, ins->bailoutKind());
+ add(lir, ins);
+ redefine(ins, ins->object());
+}
+
void LIRGenerator::visitGuardHasProxyHandler(MGuardHasProxyHandler* ins) {
MOZ_ASSERT(ins->object()->type() == MIRType::Object);
@@ -5694,6 +5787,15 @@ void LIRGenerator::visitGuardToClass(MGuardToClass* ins) {
defineReuseInput(lir, ins, 0);
}
+void LIRGenerator::visitGuardToEitherClass(MGuardToEitherClass* ins) {
+ MOZ_ASSERT(ins->object()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->type() == MIRType::Object);
+ auto* lir = new (alloc())
+ LGuardToEitherClass(useRegisterAtStart(ins->object()), temp());
+ assignSnapshot(lir, ins->bailoutKind());
+ defineReuseInput(lir, ins, 0);
+}
+
void LIRGenerator::visitGuardToFunction(MGuardToFunction* ins) {
MOZ_ASSERT(ins->object()->type() == MIRType::Object);
MOZ_ASSERT(ins->type() == MIRType::Object);
@@ -7018,6 +7120,11 @@ void LIRGenerator::visitMapObjectSize(MMapObjectSize* ins) {
define(lir, ins);
}
+void LIRGenerator::visitPostIntPtrConversion(MPostIntPtrConversion* ins) {
+ // This operation is a no-op.
+ redefine(ins, ins->input());
+}
+
void LIRGenerator::visitConstant(MConstant* ins) {
if (!IsFloatingPointType(ins->type()) && ins->canEmitAtUses()) {
emitAtUses(ins);