summaryrefslogtreecommitdiffstats
path: root/js/src/jit/mips64/MacroAssembler-mips64.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/jit/mips64/MacroAssembler-mips64.cpp')
-rw-r--r--js/src/jit/mips64/MacroAssembler-mips64.cpp42
1 files changed, 20 insertions, 22 deletions
diff --git a/js/src/jit/mips64/MacroAssembler-mips64.cpp b/js/src/jit/mips64/MacroAssembler-mips64.cpp
index cbf66ccac4..1530bfcbc8 100644
--- a/js/src/jit/mips64/MacroAssembler-mips64.cpp
+++ b/js/src/jit/mips64/MacroAssembler-mips64.cpp
@@ -2611,7 +2611,7 @@ void MacroAssemblerMIPS64Compat::wasmStoreI64Impl(
template <typename T>
static void CompareExchange64(MacroAssembler& masm,
const wasm::MemoryAccessDesc* access,
- const Synchronization& sync, const T& mem,
+ Synchronization sync, const T& mem,
Register64 expect, Register64 replace,
Register64 output) {
MOZ_ASSERT(expect != output && replace != output);
@@ -2658,13 +2658,13 @@ void MacroAssembler::wasmCompareExchange64(const wasm::MemoryAccessDesc& access,
output);
}
-void MacroAssembler::compareExchange64(const Synchronization& sync,
- const Address& mem, Register64 expect,
- Register64 replace, Register64 output) {
+void MacroAssembler::compareExchange64(Synchronization sync, const Address& mem,
+ Register64 expect, Register64 replace,
+ Register64 output) {
CompareExchange64(*this, nullptr, sync, mem, expect, replace, output);
}
-void MacroAssembler::compareExchange64(const Synchronization& sync,
+void MacroAssembler::compareExchange64(Synchronization sync,
const BaseIndex& mem, Register64 expect,
Register64 replace, Register64 output) {
CompareExchange64(*this, nullptr, sync, mem, expect, replace, output);
@@ -2673,7 +2673,7 @@ void MacroAssembler::compareExchange64(const Synchronization& sync,
template <typename T>
static void AtomicExchange64(MacroAssembler& masm,
const wasm::MemoryAccessDesc* access,
- const Synchronization& sync, const T& mem,
+ Synchronization sync, const T& mem,
Register64 value, Register64 output) {
MOZ_ASSERT(value != output);
masm.computeEffectiveAddress(mem, SecondScratchReg);
@@ -2717,13 +2717,12 @@ void MacroAssembler::wasmAtomicExchange64(const wasm::MemoryAccessDesc& access,
WasmAtomicExchange64(*this, access, mem, src, output);
}
-void MacroAssembler::atomicExchange64(const Synchronization& sync,
- const Address& mem, Register64 value,
- Register64 output) {
+void MacroAssembler::atomicExchange64(Synchronization sync, const Address& mem,
+ Register64 value, Register64 output) {
AtomicExchange64(*this, nullptr, sync, mem, value, output);
}
-void MacroAssembler::atomicExchange64(const Synchronization& sync,
+void MacroAssembler::atomicExchange64(Synchronization sync,
const BaseIndex& mem, Register64 value,
Register64 output) {
AtomicExchange64(*this, nullptr, sync, mem, value, output);
@@ -2732,9 +2731,8 @@ void MacroAssembler::atomicExchange64(const Synchronization& sync,
template <typename T>
static void AtomicFetchOp64(MacroAssembler& masm,
const wasm::MemoryAccessDesc* access,
- const Synchronization& sync, AtomicOp op,
- Register64 value, const T& mem, Register64 temp,
- Register64 output) {
+ Synchronization sync, AtomicOp op, Register64 value,
+ const T& mem, Register64 temp, Register64 output) {
MOZ_ASSERT(value != output);
MOZ_ASSERT(value != temp);
masm.computeEffectiveAddress(mem, SecondScratchReg);
@@ -2751,19 +2749,19 @@ static void AtomicFetchOp64(MacroAssembler& masm,
masm.as_lld(output.reg, SecondScratchReg, 0);
switch (op) {
- case AtomicFetchAddOp:
+ case AtomicOp::Add:
masm.as_daddu(temp.reg, output.reg, value.reg);
break;
- case AtomicFetchSubOp:
+ case AtomicOp::Sub:
masm.as_dsubu(temp.reg, output.reg, value.reg);
break;
- case AtomicFetchAndOp:
+ case AtomicOp::And:
masm.as_and(temp.reg, output.reg, value.reg);
break;
- case AtomicFetchOrOp:
+ case AtomicOp::Or:
masm.as_or(temp.reg, output.reg, value.reg);
break;
- case AtomicFetchXorOp:
+ case AtomicOp::Xor:
masm.as_xor(temp.reg, output.reg, value.reg);
break;
default:
@@ -2790,25 +2788,25 @@ void MacroAssembler::wasmAtomicFetchOp64(const wasm::MemoryAccessDesc& access,
AtomicFetchOp64(*this, &access, access.sync(), op, value, mem, temp, output);
}
-void MacroAssembler::atomicFetchOp64(const Synchronization& sync, AtomicOp op,
+void MacroAssembler::atomicFetchOp64(Synchronization sync, AtomicOp op,
Register64 value, const Address& mem,
Register64 temp, Register64 output) {
AtomicFetchOp64(*this, nullptr, sync, op, value, mem, temp, output);
}
-void MacroAssembler::atomicFetchOp64(const Synchronization& sync, AtomicOp op,
+void MacroAssembler::atomicFetchOp64(Synchronization sync, AtomicOp op,
Register64 value, const BaseIndex& mem,
Register64 temp, Register64 output) {
AtomicFetchOp64(*this, nullptr, sync, op, value, mem, temp, output);
}
-void MacroAssembler::atomicEffectOp64(const Synchronization& sync, AtomicOp op,
+void MacroAssembler::atomicEffectOp64(Synchronization sync, AtomicOp op,
Register64 value, const Address& mem,
Register64 temp) {
AtomicFetchOp64(*this, nullptr, sync, op, value, mem, temp, temp);
}
-void MacroAssembler::atomicEffectOp64(const Synchronization& sync, AtomicOp op,
+void MacroAssembler::atomicEffectOp64(Synchronization sync, AtomicOp op,
Register64 value, const BaseIndex& mem,
Register64 temp) {
AtomicFetchOp64(*this, nullptr, sync, op, value, mem, temp, temp);