summaryrefslogtreecommitdiffstats
path: root/js
diff options
context:
space:
mode:
Diffstat (limited to 'js')
-rw-r--r--js/loader/LoadContextBase.h3
-rw-r--r--js/loader/LoadedScript.cpp60
-rw-r--r--js/loader/LoadedScript.h32
-rw-r--r--js/loader/ModuleLoadRequest.cpp4
-rw-r--r--js/loader/ModuleLoaderBase.cpp109
-rw-r--r--js/loader/ModuleLoaderBase.h28
-rw-r--r--js/moz.configure25
-rw-r--r--js/public/CallArgs.h7
-rw-r--r--js/public/CallNonGenericMethod.h2
-rw-r--r--js/public/GCAPI.h14
-rw-r--r--js/public/GCHashTable.h3
-rw-r--r--js/public/HeapAPI.h49
-rw-r--r--js/public/Modules.h2
-rw-r--r--js/public/Promise.h6
-rw-r--r--js/public/PropertySpec.h20
-rw-r--r--js/public/RealmOptions.h11
-rw-r--r--js/public/RootingAPI.h2
-rw-r--r--js/public/ValueArray.h3
-rw-r--r--js/public/experimental/JitInfo.h6
-rw-r--r--js/public/friend/ErrorNumbers.msg21
-rw-r--r--js/src/build/moz.build10
-rw-r--r--js/src/builtin/Array.cpp572
-rw-r--r--js/src/builtin/Array.h151
-rw-r--r--js/src/builtin/Array.js97
-rw-r--r--js/src/builtin/JSON.cpp268
-rw-r--r--js/src/builtin/MapObject.cpp160
-rw-r--r--js/src/builtin/MapObject.h20
-rw-r--r--js/src/builtin/ModuleObject.cpp11
-rw-r--r--js/src/builtin/ModuleObject.h1
-rw-r--r--js/src/builtin/ParseRecordObject.cpp16
-rw-r--r--js/src/builtin/ParseRecordObject.h21
-rw-r--r--js/src/builtin/Promise.cpp52
-rw-r--r--js/src/builtin/RawJSONObject.cpp41
-rw-r--r--js/src/builtin/RawJSONObject.h27
-rw-r--r--js/src/builtin/Reflect.js2
-rw-r--r--js/src/builtin/Sorting.js97
-rw-r--r--js/src/builtin/TestingFunctions.cpp154
-rw-r--r--js/src/builtin/TestingUtility.cpp15
-rw-r--r--js/src/builtin/TestingUtility.h2
-rw-r--r--js/src/builtin/Tuple.js2
-rw-r--r--js/src/builtin/WeakMapObject-inl.h15
-rw-r--r--js/src/builtin/WeakMapObject.cpp8
-rw-r--r--js/src/builtin/WeakSetObject.cpp9
-rw-r--r--js/src/builtin/embedjs.py4
-rw-r--r--js/src/ctypes/CTypes.cpp4
-rw-r--r--js/src/ctypes/typedefs.h6
-rw-r--r--js/src/debugger/DebugAPI-inl.h8
-rw-r--r--js/src/debugger/DebugAPI.h5
-rw-r--r--js/src/debugger/Debugger.cpp18
-rw-r--r--js/src/debugger/Environment.cpp18
-rw-r--r--js/src/debugger/Frame.cpp18
-rw-r--r--js/src/debugger/Frame.h1
-rw-r--r--js/src/devtools/automation/variants/rootanalysis2
-rw-r--r--js/src/ds/BitArray.h6
-rw-r--r--js/src/ds/Bitmap.h22
-rw-r--r--js/src/ds/OrderedHashTable.h71
-rw-r--r--js/src/frontend/Parser.cpp22
-rw-r--r--js/src/frontend/Stencil.cpp27
-rw-r--r--js/src/frontend/TokenStream.cpp3
-rw-r--r--js/src/fuzz-tests/gluesmith/Cargo.toml2
-rw-r--r--js/src/fuzz-tests/gluesmith/src/lib.rs1
-rw-r--r--js/src/gc/Allocator.cpp13
-rw-r--r--js/src/gc/Allocator.h2
-rw-r--r--js/src/gc/AtomMarking.cpp57
-rw-r--r--js/src/gc/AtomMarking.h17
-rw-r--r--js/src/gc/Cell.h3
-rw-r--r--js/src/gc/Compacting.cpp2
-rw-r--r--js/src/gc/GC.cpp138
-rw-r--r--js/src/gc/GC.h3
-rw-r--r--js/src/gc/GCAPI.cpp12
-rw-r--r--js/src/gc/GCInternals.h2
-rw-r--r--js/src/gc/GCRuntime.h3
-rw-r--r--js/src/gc/Heap-inl.h4
-rw-r--r--js/src/gc/MallocedBlockCache.h23
-rw-r--r--js/src/gc/Nursery-inl.h92
-rw-r--r--js/src/gc/Nursery.cpp876
-rw-r--r--js/src/gc/Nursery.h296
-rw-r--r--js/src/gc/NurseryAwareHashMap.h64
-rw-r--r--js/src/gc/Pretenuring.cpp22
-rw-r--r--js/src/gc/Pretenuring.h33
-rw-r--r--js/src/gc/PublicIterators.h2
-rw-r--r--js/src/gc/Scheduling.h3
-rw-r--r--js/src/gc/StableCellHasher-inl.h1
-rw-r--r--js/src/gc/StoreBuffer-inl.h5
-rw-r--r--js/src/gc/StoreBuffer.cpp32
-rw-r--r--js/src/gc/StoreBuffer.h54
-rw-r--r--js/src/gc/Sweeping.cpp25
-rw-r--r--js/src/gc/Tenuring.cpp710
-rw-r--r--js/src/gc/Tenuring.h87
-rw-r--r--js/src/gc/TraceMethods-inl.h17
-rw-r--r--js/src/gc/WeakMap-inl.h54
-rw-r--r--js/src/gc/Zone.cpp19
-rw-r--r--js/src/gc/Zone.h170
-rw-r--r--js/src/jit-test/lib/pretenure.js18
-rw-r--r--js/src/jit-test/tests/arrays/sort-trampoline.js153
-rw-r--r--js/src/jit-test/tests/basic/bug1875795.js7
-rw-r--r--js/src/jit-test/tests/basic/bug1888746.js12
-rw-r--r--js/src/jit-test/tests/basic/bug1890200.js12
-rw-r--r--js/src/jit-test/tests/cacheir/bug1888346.js8
-rw-r--r--js/src/jit-test/tests/collections/bug-1884927.js10
-rw-r--r--js/src/jit-test/tests/collections/bug-1885775.js12
-rw-r--r--js/src/jit-test/tests/collections/bug-1887939-1.js7
-rw-r--r--js/src/jit-test/tests/collections/bug-1887939-2.js7
-rw-r--r--js/src/jit-test/tests/debug/Debugger-onNativeCall-03.js18
-rw-r--r--js/src/jit-test/tests/debug/Environment-methods-toPrimitive.js21
-rw-r--r--js/src/jit-test/tests/debug/Frame-onStep-21.js19
-rw-r--r--js/src/jit-test/tests/debug/private-methods-eval-in-frame.js9
-rw-r--r--js/src/jit-test/tests/errors/bug-1886940-2.js6
-rw-r--r--js/src/jit-test/tests/errors/bug-1886940.js2
-rw-r--r--js/src/jit-test/tests/fuses/optimized-getiterator-invalidation.js37
-rw-r--r--js/src/jit-test/tests/gc/alllcation-metadata-builder-over-recursion.js22
-rw-r--r--js/src/jit-test/tests/gc/bug-1568740.js2
-rw-r--r--js/src/jit-test/tests/gc/bug-1569840.js2
-rw-r--r--js/src/jit-test/tests/gc/bug-1885819-2.js12
-rw-r--r--js/src/jit-test/tests/gc/bug-1885819.js10
-rw-r--r--js/src/jit-test/tests/gc/bug-1886466.js5
-rw-r--r--js/src/jit-test/tests/gc/bug-1888717.js3
-rw-r--r--js/src/jit-test/tests/gc/dedupe-03.js66
-rw-r--r--js/src/jit-test/tests/gc/deduplicateTenuringStrings.js1
-rw-r--r--js/src/jit-test/tests/gc/gcparam.js1
-rw-r--r--js/src/jit-test/tests/gc/pretenuring.js1
-rw-r--r--js/src/jit-test/tests/heap-analysis/byteSize-of-string.js87
-rw-r--r--js/src/jit-test/tests/ion/apply-native-arguments-object.js46
-rw-r--r--js/src/jit-test/tests/ion/apply-native-arguments.js39
-rw-r--r--js/src/jit-test/tests/ion/apply-native-array.js39
-rw-r--r--js/src/jit-test/tests/ion/apply-native-spreadcall-arguments.js39
-rw-r--r--js/src/jit-test/tests/ion/apply-native-spreadcall-array.js39
-rw-r--r--js/src/jit-test/tests/ion/apply-native-spreadcall-rest.js39
-rw-r--r--js/src/jit-test/tests/ion/apply-native-spreadnew-arguments.js39
-rw-r--r--js/src/jit-test/tests/ion/apply-native-spreadnew-array.js39
-rw-r--r--js/src/jit-test/tests/ion/apply-native-spreadnew-newtarget.js66
-rw-r--r--js/src/jit-test/tests/ion/apply-native-spreadnew-rest.js39
-rw-r--r--js/src/jit-test/tests/ion/recover-atomics-islockfree.js25
-rw-r--r--js/src/jit-test/tests/ion/recover-string-from-charcode.js13
-rw-r--r--js/src/jit-test/tests/modules/bug-1888902.js16
-rw-r--r--js/src/jit-test/tests/modules/dynamic-import-error.js2
-rw-r--r--js/src/jit-test/tests/modules/dynamic-import-module.js2
-rw-r--r--js/src/jit-test/tests/modules/inline-data-2.js12
-rw-r--r--js/src/jit-test/tests/modules/inline-data.js13
-rw-r--r--js/src/jit-test/tests/modules/shell-wrapper.js7
-rw-r--r--js/src/jit-test/tests/parser/bug1887176.js46
-rw-r--r--js/src/jit-test/tests/parser/dumpStencil-02.js8
-rw-r--r--js/src/jit-test/tests/parser/module-filename.js13
-rw-r--r--js/src/jit-test/tests/profiler/native-trampoline-2.js7
-rw-r--r--js/src/jit-test/tests/profiler/native-trampoline-3.js32
-rw-r--r--js/src/jit-test/tests/profiler/native-trampoline.js40
-rw-r--r--js/src/jit-test/tests/profiler/wasm-to-js-1.js20
-rw-r--r--js/src/jit-test/tests/profiler/wasm-to-js-2.js19
-rw-r--r--js/src/jit-test/tests/promise/allSettled-dead.js20
-rw-r--r--js/src/jit-test/tests/promise/jobqueue-interrupt-01.js23
-rw-r--r--js/src/jit-test/tests/promise/jobqueue-interrupt-02.js14
-rw-r--r--js/src/jit-test/tests/proxy/bug1885774.js25
-rw-r--r--js/src/jit-test/tests/structured-clone/bug1888727.js21
-rw-r--r--js/src/jit-test/tests/structured-clone/tenuring.js3
-rw-r--r--js/src/jit-test/tests/typedarray/resizable-typedarray-from-pinned-buffer.js9
-rw-r--r--js/src/jit-test/tests/warp/bug1876425.js62
-rw-r--r--js/src/jit-test/tests/wasm/directiveless/bug1877358.js2
-rw-r--r--js/src/jit-test/tests/wasm/gc/casting.js69
-rw-r--r--js/src/jit-test/tests/wasm/gc/i31ref.js18
-rw-r--r--js/src/jit-test/tests/wasm/regress/bug1886870.js8
-rw-r--r--js/src/jit-test/tests/wasm/regress/bug1887535.js25
-rw-r--r--js/src/jit-test/tests/wasm/regress/bug1887596.js14
-rw-r--r--js/src/jit/ABIFunctionList-inl.h2
-rw-r--r--js/src/jit/BaselineBailouts.cpp26
-rw-r--r--js/src/jit/BaselineFrame.h4
-rw-r--r--js/src/jit/CacheIR.cpp15
-rw-r--r--js/src/jit/CacheIR.h6
-rw-r--r--js/src/jit/CacheIRCompiler.cpp39
-rw-r--r--js/src/jit/CacheIROps.yaml3
-rw-r--r--js/src/jit/CacheIRReader.h24
-rw-r--r--js/src/jit/CodeGenerator.cpp552
-rw-r--r--js/src/jit/CodeGenerator.h38
-rw-r--r--js/src/jit/Ion.cpp16
-rw-r--r--js/src/jit/JSJitFrameIter.cpp93
-rw-r--r--js/src/jit/JSJitFrameIter.h49
-rw-r--r--js/src/jit/JitFrames.cpp68
-rw-r--r--js/src/jit/JitFrames.h11
-rw-r--r--js/src/jit/JitOptions.cpp3
-rw-r--r--js/src/jit/JitRuntime.h30
-rw-r--r--js/src/jit/LIROps.yaml18
-rw-r--r--js/src/jit/Lowering.cpp134
-rw-r--r--js/src/jit/MIR.cpp65
-rw-r--r--js/src/jit/MIR.h3
-rw-r--r--js/src/jit/MIROps.yaml12
-rw-r--r--js/src/jit/MacroAssembler.cpp20
-rw-r--r--js/src/jit/MacroAssembler.h1
-rw-r--r--js/src/jit/PerfSpewer.cpp6
-rw-r--r--js/src/jit/ProcessExecutableMemory.cpp12
-rw-r--r--js/src/jit/Recover.cpp370
-rw-r--r--js/src/jit/Recover.h9
-rw-r--r--js/src/jit/Trampoline.cpp36
-rw-r--r--js/src/jit/TrampolineNatives.cpp274
-rw-r--r--js/src/jit/TrampolineNatives.h60
-rw-r--r--js/src/jit/VMFunctionList-inl.h1
-rw-r--r--js/src/jit/VMFunctions.cpp35
-rw-r--r--js/src/jit/VMFunctions.h4
-rw-r--r--js/src/jit/WarpCacheIRTranspiler.cpp10
-rw-r--r--js/src/jit/arm/Architecture-arm.h2
-rw-r--r--js/src/jit/arm64/Architecture-arm64.h2
-rw-r--r--js/src/jit/arm64/vixl/Cpu-vixl.cpp2
-rw-r--r--js/src/jit/loong64/Architecture-loong64.h2
-rw-r--r--js/src/jit/mips32/Architecture-mips32.h2
-rw-r--r--js/src/jit/mips64/Architecture-mips64.h2
-rw-r--r--js/src/jit/moz.build1
-rw-r--r--js/src/jit/none/Architecture-none.h2
-rw-r--r--js/src/jit/riscv64/Architecture-riscv64.h2
-rw-r--r--js/src/jit/shared/LIR-shared.h182
-rw-r--r--js/src/jit/wasm32/Architecture-wasm32.h2
-rw-r--r--js/src/jit/x86-shared/Architecture-x86-shared.h4
-rw-r--r--js/src/jsapi-tests/moz.build1
-rw-r--r--js/src/jsapi-tests/testDeduplication.cpp2
-rw-r--r--js/src/jsapi-tests/testGCHeapBarriers.cpp1
-rw-r--r--js/src/jsapi-tests/testGCMarking.cpp1
-rw-r--r--js/src/jsapi-tests/testGCUniqueId.cpp1
-rw-r--r--js/src/jsapi-tests/testIsInsideNursery.cpp83
-rw-r--r--js/src/jsapi-tests/testSparseBitmap.cpp47
-rw-r--r--js/src/jsapi-tests/testWasmEncoder.cpp117
-rw-r--r--js/src/jsapi.cpp13
-rw-r--r--js/src/jsapi.h2
-rw-r--r--js/src/jsdate.cpp11
-rw-r--r--js/src/jsfriendapi.cpp17
-rw-r--r--js/src/jsfriendapi.h10
-rwxr-xr-xjs/src/make-source-package.py2
-rw-r--r--js/src/moz.build1
-rw-r--r--js/src/old-configure.in22
-rw-r--r--js/src/rust/Cargo.toml2
-rw-r--r--js/src/shell/js.cpp82
-rw-r--r--js/src/shell/jsshell.h3
-rw-r--r--js/src/tests/jstests.list10
-rwxr-xr-xjs/src/tests/lib/jittests.py7
-rw-r--r--js/src/tests/non262/Date/parse-day-of-week.js2
-rw-r--r--js/src/tests/non262/Iterator/proto.js9
-rw-r--r--js/src/tests/non262/JSON/parse-with-source.js112
-rw-r--r--js/src/tests/non262/PrivateName/nested-class-in-computed-property-key.js17
-rw-r--r--js/src/tests/non262/module/bug1689499.js2
-rwxr-xr-xjs/src/tests/test262-update.py5
-rw-r--r--js/src/tests/test262/prs/3994/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/alphabet.js25
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/descriptor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/ignores-receiver.js22
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/illegal-characters.js26
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/last-chunk-handling.js67
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/length.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/name.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/nonconstructor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/option-coercion.js62
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/results.js30
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/shell.js24
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/string-coercion.js44
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/whitespace.js25
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/descriptor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/ignores-receiver.js22
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/illegal-characters.js28
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/length.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/name.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/nonconstructor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/odd-length-input.js14
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/results.js31
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/shell.js24
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/string-coercion.js23
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/alphabet.js44
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/descriptor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/detached-buffer.js32
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/illegal-characters.js27
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/last-chunk-handling.js156
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/length.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/name.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/nonconstructor.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/option-coercion.js72
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/results.js33
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/shell.js42
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/string-coercion.js46
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/subarray.js20
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/target-size.js67
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/whitespace.js26
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/descriptor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/detached-buffer.js17
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/illegal-characters.js29
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/length.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/name.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/nonconstructor.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/results.js34
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/shell.js42
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/string-coercion.js24
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/subarray.js20
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/target-size.js32
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/shell.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/alphabet.js20
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/descriptor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/detached-buffer.js42
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/length.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/name.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/nonconstructor.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/option-coercion.js51
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/receiver-not-uint8array.js36
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/results.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/shell.js203
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/descriptor.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/detached-buffer.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/length.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/name.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/nonconstructor.js19
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/receiver-not-uint8array.js29
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/results.js18
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/shell.js203
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/Uint8Array/shell.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/browser.js0
-rw-r--r--js/src/tests/test262/prs/3994/built-ins/shell.js0
-rw-r--r--js/src/tests/test262/prs/3994/shell.js723
-rw-r--r--js/src/tests/test262/staging/JSON/json-parse-with-source-snapshot.js2
-rw-r--r--js/src/tests/test262/staging/JSON/json-parse-with-source.js2
-rw-r--r--js/src/util/StructuredSpewer.cpp2
-rw-r--r--js/src/vm/ArrayBufferObject.cpp50
-rw-r--r--js/src/vm/ArrayBufferObject.h19
-rw-r--r--js/src/vm/ArrayBufferViewObject.cpp1
-rw-r--r--js/src/vm/AsyncFunction.cpp2
-rw-r--r--js/src/vm/AsyncIteration.cpp2
-rw-r--r--js/src/vm/AtomsTable.h76
-rw-r--r--js/src/vm/BigIntType.h2
-rw-r--r--js/src/vm/CommonPropertyNames.h12
-rw-r--r--js/src/vm/FrameIter.cpp7
-rw-r--r--js/src/vm/FunctionFlags.h41
-rw-r--r--js/src/vm/GeckoProfiler.cpp8
-rw-r--r--js/src/vm/GeneratorObject.cpp14
-rw-r--r--js/src/vm/GeneratorObject.h10
-rw-r--r--js/src/vm/GlobalObject.h5
-rw-r--r--js/src/vm/Interpreter.cpp4
-rw-r--r--js/src/vm/InvalidatingFuse.cpp20
-rw-r--r--js/src/vm/InvalidatingFuse.h24
-rw-r--r--js/src/vm/Iteration.cpp118
-rw-r--r--js/src/vm/Iteration.h2
-rw-r--r--js/src/vm/JSAtomUtils.cpp159
-rw-r--r--js/src/vm/JSContext.cpp19
-rw-r--r--js/src/vm/JSContext.h14
-rw-r--r--js/src/vm/JSFunction.h22
-rw-r--r--js/src/vm/JSONParser.cpp198
-rw-r--r--js/src/vm/JSONParser.h153
-rw-r--r--js/src/vm/JSONPrinter.cpp4
-rw-r--r--js/src/vm/JSONPrinter.h2
-rw-r--r--js/src/vm/JSObject-inl.h5
-rw-r--r--js/src/vm/JSObject.cpp44
-rw-r--r--js/src/vm/Modules.cpp223
-rw-r--r--js/src/vm/Modules.h34
-rw-r--r--js/src/vm/NativeObject.cpp26
-rw-r--r--js/src/vm/NativeObject.h4
-rw-r--r--js/src/vm/PortableBaselineInterpret.cpp6
-rw-r--r--js/src/vm/RealmFuses.cpp21
-rw-r--r--js/src/vm/RealmFuses.h17
-rw-r--r--js/src/vm/Runtime.cpp2
-rw-r--r--js/src/vm/SelfHosting.cpp5
-rw-r--r--js/src/vm/Stack.cpp6
-rw-r--r--js/src/vm/StringType-inl.h28
-rw-r--r--js/src/vm/StringType.cpp60
-rw-r--r--js/src/vm/StringType.h40
-rw-r--r--js/src/vm/StructuredClone.cpp2
-rw-r--r--js/src/vm/TypedArrayObject-inl.h20
-rw-r--r--js/src/vm/TypedArrayObject.cpp1135
-rw-r--r--js/src/vm/TypedArrayObject.h1
-rw-r--r--js/src/wasm/AsmJS.cpp6
-rw-r--r--js/src/wasm/WasmAnyRef.h7
-rw-r--r--js/src/wasm/WasmBCClass.h3
-rw-r--r--js/src/wasm/WasmBaselineCompile.cpp100
-rw-r--r--js/src/wasm/WasmBuiltins.cpp24
-rw-r--r--js/src/wasm/WasmCodegenTypes.h60
-rw-r--r--js/src/wasm/WasmFrame.h27
-rw-r--r--js/src/wasm/WasmGC.h2
-rw-r--r--js/src/wasm/WasmGcObject.cpp31
-rw-r--r--js/src/wasm/WasmInstance.cpp7
-rw-r--r--js/src/wasm/WasmInstance.h2
-rw-r--r--js/src/wasm/WasmJS.cpp25
-rw-r--r--js/src/wasm/WasmModule.cpp4
-rw-r--r--js/src/wasm/WasmStubs.cpp15
-rw-r--r--js/src/wasm/WasmTypeDef.cpp15
-rw-r--r--js/src/wasm/WasmTypeDef.h2
-rw-r--r--js/src/wasm/WasmValidate.cpp43
-rw-r--r--js/src/wasm/WasmValidate.h9
-rw-r--r--js/src/wasm/WasmValue.h6
-rw-r--r--js/xpconnect/idl/xpccomponents.idl12
-rw-r--r--js/xpconnect/loader/mozJSModuleLoader.cpp28
-rw-r--r--js/xpconnect/loader/mozJSModuleLoader.h1
-rw-r--r--js/xpconnect/src/Sandbox.cpp4
-rw-r--r--js/xpconnect/src/XPCJSRuntime.cpp3
-rw-r--r--js/xpconnect/tests/mochitest/mochitest.toml2
-rw-r--r--js/xpconnect/tests/mochitest/test_bug871887.html43
-rw-r--r--js/xpconnect/tests/unit/test_import_global_current.js2
-rw-r--r--js/xpconnect/tests/unit/test_scriptable_nsIClassInfo.js41
-rw-r--r--js/xpconnect/tests/unit/xpcshell.toml2
-rw-r--r--js/xpconnect/wrappers/XrayWrapper.cpp24
396 files changed, 13147 insertions, 2771 deletions
diff --git a/js/loader/LoadContextBase.h b/js/loader/LoadContextBase.h
index a8ce045546..5dc78070cb 100644
--- a/js/loader/LoadContextBase.h
+++ b/js/loader/LoadContextBase.h
@@ -52,6 +52,9 @@ class LoadContextBase : public nsISupports {
// Used to output a string for the Gecko Profiler.
virtual void GetProfilerLabel(nsACString& aOutString);
+ // Whether this is a preload, for contexts that support them.
+ virtual bool IsPreload() const { return false; }
+
// Casting to the different contexts
bool IsWindowContext() const { return mKind == ContextKind::Window; }
mozilla::dom::ScriptLoadContext* AsWindowContext();
diff --git a/js/loader/LoadedScript.cpp b/js/loader/LoadedScript.cpp
index 7ddd04168e..c84ed3ac55 100644
--- a/js/loader/LoadedScript.cpp
+++ b/js/loader/LoadedScript.cpp
@@ -20,6 +20,8 @@ namespace JS::loader {
// LoadedScript
//////////////////////////////////////////////////////////////
+MOZ_DEFINE_MALLOC_SIZE_OF(LoadedScriptMallocSizeOf)
+
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(LoadedScript)
NS_INTERFACE_MAP_ENTRY(nsISupports)
NS_INTERFACE_MAP_END
@@ -52,7 +54,59 @@ LoadedScript::LoadedScript(ScriptKind aKind,
MOZ_ASSERT(mURI);
}
-LoadedScript::~LoadedScript() { mozilla::DropJSObjects(this); }
+LoadedScript::~LoadedScript() {
+ mozilla::UnregisterWeakMemoryReporter(this);
+ mozilla::DropJSObjects(this);
+}
+
+void LoadedScript::RegisterMemoryReport() {
+ mozilla::RegisterWeakMemoryReporter(this);
+}
+
+NS_IMETHODIMP
+LoadedScript::CollectReports(nsIHandleReportCallback* aHandleReport,
+ nsISupports* aData, bool aAnonymize) {
+#define COLLECT_REPORT(path, kind) \
+ MOZ_COLLECT_REPORT(path, KIND_HEAP, UNITS_BYTES, \
+ SizeOfIncludingThis(LoadedScriptMallocSizeOf), \
+ "Memory used for LoadedScript to hold on " kind \
+ " across documents")
+
+ switch (mKind) {
+ case ScriptKind::eClassic:
+ COLLECT_REPORT("explicit/js/script/loaded-script/classic", "scripts");
+ break;
+ case ScriptKind::eImportMap:
+ COLLECT_REPORT("explicit/js/script/loaded-script/import-map",
+ "import-maps");
+ break;
+ case ScriptKind::eModule:
+ COLLECT_REPORT("explicit/js/script/loaded-script/module", "modules");
+ break;
+ case ScriptKind::eEvent:
+ COLLECT_REPORT("explicit/js/script/loaded-script/event", "event scripts");
+ break;
+ }
+
+#undef COLLECT_REPORT
+ return NS_OK;
+}
+
+size_t LoadedScript::SizeOfIncludingThis(
+ mozilla::MallocSizeOf aMallocSizeOf) const {
+ size_t bytes = aMallocSizeOf(this);
+
+ if (IsTextSource()) {
+ if (IsUTF16Text()) {
+ bytes += ScriptText<char16_t>().sizeOfExcludingThis(aMallocSizeOf);
+ } else {
+ bytes += ScriptText<Utf8Unit>().sizeOfExcludingThis(aMallocSizeOf);
+ }
+ }
+
+ bytes += mScriptBytecode.sizeOfExcludingThis(aMallocSizeOf);
+ return bytes;
+}
void LoadedScript::AssociateWithScript(JSScript* aScript) {
// Verify that the rewritten URL is available when manipulating LoadedScript.
@@ -207,6 +261,7 @@ NS_IMPL_CYCLE_COLLECTION_TRACE_END
ModuleScript::ModuleScript(mozilla::dom::ReferrerPolicy aReferrerPolicy,
ScriptFetchOptions* aFetchOptions, nsIURI* aURI)
: LoadedScript(ScriptKind::eModule, aReferrerPolicy, aFetchOptions, aURI),
+ mHadImportMap(false),
mDebuggerDataInitialized(false) {
MOZ_ASSERT(!ModuleRecord());
MOZ_ASSERT(!HasParseError());
@@ -279,6 +334,9 @@ void ModuleScript::SetErrorToRethrow(const JS::Value& aError) {
mErrorToRethrow = aError;
}
+void ModuleScript::SetForPreload(bool aValue) { mForPreload = aValue; }
+void ModuleScript::SetHadImportMap(bool aValue) { mHadImportMap = aValue; }
+
void ModuleScript::SetDebuggerDataInitialized() {
MOZ_ASSERT(ModuleRecord());
MOZ_ASSERT(!mDebuggerDataInitialized);
diff --git a/js/loader/LoadedScript.h b/js/loader/LoadedScript.h
index ca6d1fc179..5759800c27 100644
--- a/js/loader/LoadedScript.h
+++ b/js/loader/LoadedScript.h
@@ -12,12 +12,14 @@
#include "mozilla/Maybe.h"
#include "mozilla/MaybeOneOf.h"
+#include "mozilla/MemoryReporting.h"
#include "mozilla/Utf8.h" // mozilla::Utf8Unit
#include "mozilla/Variant.h"
#include "mozilla/Vector.h"
#include "nsCOMPtr.h"
#include "nsCycleCollectionParticipant.h"
+#include "nsIMemoryReporter.h"
#include "jsapi.h"
#include "ScriptKind.h"
@@ -39,7 +41,17 @@ class ModuleScript;
class EventScript;
class LoadContextBase;
-class LoadedScript : public nsISupports {
+// A LoadedScript is a place where the Script is stored once it is loaded. It is
+// not unique to a load, and can be shared across loads as long as it is
+// properly ref-counted by each load instance.
+//
+// When the load is not performed, the URI represents the resource to be loaded,
+// and it is replaced by the absolute resource location once loaded.
+//
+// As the LoadedScript can be shared, using the SharedSubResourceCache, it is
+// exposed to the memory reporter such that sharing might be accounted for
+// properly.
+class LoadedScript : public nsIMemoryReporter {
ScriptKind mKind;
const mozilla::dom::ReferrerPolicy mReferrerPolicy;
RefPtr<ScriptFetchOptions> mFetchOptions;
@@ -53,7 +65,17 @@ class LoadedScript : public nsISupports {
virtual ~LoadedScript();
public:
- NS_DECL_CYCLE_COLLECTING_ISUPPORTS
+ // When the memory should be reported, register it using RegisterMemoryReport,
+ // and make sure to call SizeOfIncludingThis in the enclosing container.
+ //
+ // Each reported script would be listed under
+ // `explicit/js/script/loaded-script/<kind>`.
+ void RegisterMemoryReport();
+ size_t SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
+
+ public:
+ NS_DECL_CYCLE_COLLECTING_ISUPPORTS;
+ NS_DECL_NSIMEMORYREPORTER;
NS_DECL_CYCLE_COLLECTION_CLASS(LoadedScript)
bool IsClassicScript() const { return mKind == ScriptKind::eClassic; }
@@ -332,6 +354,8 @@ class ModuleScript final : public LoadedScript {
JS::Heap<JSObject*> mModuleRecord;
JS::Heap<JS::Value> mParseError;
JS::Heap<JS::Value> mErrorToRethrow;
+ bool mForPreload;
+ bool mHadImportMap;
bool mDebuggerDataInitialized;
~ModuleScript();
@@ -352,6 +376,8 @@ class ModuleScript final : public LoadedScript {
void SetModuleRecord(JS::Handle<JSObject*> aModuleRecord);
void SetParseError(const JS::Value& aError);
void SetErrorToRethrow(const JS::Value& aError);
+ void SetForPreload(bool aValue);
+ void SetHadImportMap(bool aValue);
void SetDebuggerDataInitialized();
JSObject* ModuleRecord() const { return mModuleRecord; }
@@ -360,6 +386,8 @@ class ModuleScript final : public LoadedScript {
JS::Value ErrorToRethrow() const { return mErrorToRethrow; }
bool HasParseError() const { return !mParseError.isUndefined(); }
bool HasErrorToRethrow() const { return !mErrorToRethrow.isUndefined(); }
+ bool ForPreload() const { return mForPreload; }
+ bool HadImportMap() const { return mHadImportMap; }
bool DebuggerDataInitialized() const { return mDebuggerDataInitialized; }
void Shutdown();
diff --git a/js/loader/ModuleLoadRequest.cpp b/js/loader/ModuleLoadRequest.cpp
index 7e188160fc..7313563df9 100644
--- a/js/loader/ModuleLoadRequest.cpp
+++ b/js/loader/ModuleLoadRequest.cpp
@@ -205,6 +205,7 @@ void ModuleLoadRequest::CheckModuleDependenciesLoaded() {
if (!mModuleScript || mModuleScript->HasParseError()) {
return;
}
+
for (const auto& childRequest : mImports) {
ModuleScript* childScript = childRequest->mModuleScript;
if (!childScript) {
@@ -213,6 +214,9 @@ void ModuleLoadRequest::CheckModuleDependenciesLoaded() {
childRequest.get()));
return;
}
+
+ MOZ_DIAGNOSTIC_ASSERT(mModuleScript->HadImportMap() ==
+ childScript->HadImportMap());
}
LOG(("ScriptLoadRequest (%p): all ok", this));
diff --git a/js/loader/ModuleLoaderBase.cpp b/js/loader/ModuleLoaderBase.cpp
index 228c96ad69..020542754d 100644
--- a/js/loader/ModuleLoaderBase.cpp
+++ b/js/loader/ModuleLoaderBase.cpp
@@ -55,17 +55,17 @@ mozilla::LazyLogModule ModuleLoaderBase::gModuleLoaderBaseLog(
MOZ_LOG_TEST(ModuleLoaderBase::gModuleLoaderBaseLog, mozilla::LogLevel::Debug)
//////////////////////////////////////////////////////////////
-// ModuleLoaderBase::WaitingRequests
+// ModuleLoaderBase::LoadingRequest
//////////////////////////////////////////////////////////////
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(ModuleLoaderBase::WaitingRequests)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(ModuleLoaderBase::LoadingRequest)
NS_INTERFACE_MAP_ENTRY(nsISupports)
NS_INTERFACE_MAP_END
-NS_IMPL_CYCLE_COLLECTION(ModuleLoaderBase::WaitingRequests, mWaiting)
+NS_IMPL_CYCLE_COLLECTION(ModuleLoaderBase::LoadingRequest, mRequest, mWaiting)
-NS_IMPL_CYCLE_COLLECTING_ADDREF(ModuleLoaderBase::WaitingRequests)
-NS_IMPL_CYCLE_COLLECTING_RELEASE(ModuleLoaderBase::WaitingRequests)
+NS_IMPL_CYCLE_COLLECTING_ADDREF(ModuleLoaderBase::LoadingRequest)
+NS_IMPL_CYCLE_COLLECTING_RELEASE(ModuleLoaderBase::LoadingRequest)
//////////////////////////////////////////////////////////////
// ModuleLoaderBase
@@ -308,6 +308,12 @@ bool ModuleLoaderBase::HostImportModuleDynamically(
return false;
}
+ if (!loader->IsDynamicImportSupported()) {
+ JS_ReportErrorNumberASCII(aCx, js::GetErrorMessage, nullptr,
+ JSMSG_DYNAMIC_IMPORT_NOT_SUPPORTED);
+ return false;
+ }
+
auto result = loader->ResolveModuleSpecifier(script, specifier);
if (result.isErr()) {
JS::Rooted<JS::Value> error(aCx);
@@ -509,7 +515,9 @@ void ModuleLoaderBase::SetModuleFetchStarted(ModuleLoadRequest* aRequest) {
MOZ_ASSERT(aRequest->IsFetching() || aRequest->IsPendingFetchingError());
MOZ_ASSERT(!ModuleMapContainsURL(aRequest->mURI));
- mFetchingModules.InsertOrUpdate(aRequest->mURI, nullptr);
+ RefPtr<LoadingRequest> loadingRequest = new LoadingRequest();
+ loadingRequest->mRequest = aRequest;
+ mFetchingModules.InsertOrUpdate(aRequest->mURI, loadingRequest);
}
void ModuleLoaderBase::SetModuleFetchFinishedAndResumeWaitingRequests(
@@ -526,9 +534,8 @@ void ModuleLoaderBase::SetModuleFetchFinishedAndResumeWaitingRequests(
"%u)",
aRequest, aRequest->mModuleScript.get(), unsigned(aResult)));
- RefPtr<WaitingRequests> waitingRequests;
- if (!mFetchingModules.Remove(aRequest->mURI,
- getter_AddRefs(waitingRequests))) {
+ auto entry = mFetchingModules.Lookup(aRequest->mURI);
+ if (!entry) {
LOG(
("ScriptLoadRequest (%p): Key not found in mFetchingModules, "
"assuming we have an inline module or have finished fetching already",
@@ -536,20 +543,35 @@ void ModuleLoaderBase::SetModuleFetchFinishedAndResumeWaitingRequests(
return;
}
+ // It's possible for a request to be cancelled and removed from the fetching
+ // modules map and a new request started for the same URI and added to the
+ // map. In this case we don't want the first cancelled request to complete the
+ // later request (which will cause it to fail) so we ignore it.
+ RefPtr<LoadingRequest> loadingRequest = entry.Data();
+ if (loadingRequest->mRequest != aRequest) {
+ MOZ_ASSERT(aRequest->IsCanceled());
+ LOG(
+ ("ScriptLoadRequest (%p): Ignoring completion of cancelled request "
+ "that was removed from the map",
+ aRequest));
+ return;
+ }
+
+ MOZ_ALWAYS_TRUE(mFetchingModules.Remove(aRequest->mURI));
+
RefPtr<ModuleScript> moduleScript(aRequest->mModuleScript);
MOZ_ASSERT(NS_FAILED(aResult) == !moduleScript);
mFetchedModules.InsertOrUpdate(aRequest->mURI, RefPtr{moduleScript});
- if (waitingRequests) {
- LOG(("ScriptLoadRequest (%p): Resuming waiting requests", aRequest));
- ResumeWaitingRequests(waitingRequests, bool(moduleScript));
- }
+ LOG(("ScriptLoadRequest (%p): Resuming waiting requests", aRequest));
+ MOZ_ASSERT(loadingRequest->mRequest == aRequest);
+ ResumeWaitingRequests(loadingRequest, bool(moduleScript));
}
-void ModuleLoaderBase::ResumeWaitingRequests(WaitingRequests* aWaitingRequests,
+void ModuleLoaderBase::ResumeWaitingRequests(LoadingRequest* aLoadingRequest,
bool aSuccess) {
- for (ModuleLoadRequest* request : aWaitingRequests->mWaiting) {
+ for (ModuleLoadRequest* request : aLoadingRequest->mWaiting) {
ResumeWaitingRequest(request, aSuccess);
}
}
@@ -568,13 +590,8 @@ void ModuleLoaderBase::WaitForModuleFetch(ModuleLoadRequest* aRequest) {
MOZ_ASSERT(ModuleMapContainsURL(uri));
if (auto entry = mFetchingModules.Lookup(uri)) {
- RefPtr<WaitingRequests> waitingRequests = entry.Data();
- if (!waitingRequests) {
- waitingRequests = new WaitingRequests();
- mFetchingModules.InsertOrUpdate(uri, waitingRequests);
- }
-
- waitingRequests->mWaiting.AppendElement(aRequest);
+ RefPtr<LoadingRequest> loadingRequest = entry.Data();
+ loadingRequest->mWaiting.AppendElement(aRequest);
return;
}
@@ -678,6 +695,9 @@ nsresult ModuleLoaderBase::CreateModuleScript(ModuleLoadRequest* aRequest) {
aRequest->mLoadedScript->AsModuleScript();
aRequest->mModuleScript = moduleScript;
+ moduleScript->SetForPreload(aRequest->mLoadContext->IsPreload());
+ moduleScript->SetHadImportMap(HasImportMapRegistered());
+
if (!module) {
LOG(("ScriptLoadRequest (%p): compilation failed (%d)", aRequest,
unsigned(rv)));
@@ -769,6 +789,7 @@ ResolveResult ModuleLoaderBase::ResolveModuleSpecifier(
// Import Maps are not supported on workers/worklets.
// See https://github.com/WICG/import-maps/issues/2
MOZ_ASSERT_IF(!NS_IsMainThread(), mImportMap == nullptr);
+
// Forward to the updated 'Resolve a module specifier' algorithm defined in
// the Import Maps spec.
return ImportMap::ResolveModuleSpecifier(mImportMap.get(), mLoader, aScript,
@@ -1034,9 +1055,9 @@ void ModuleLoaderBase::Shutdown() {
CancelAndClearDynamicImports();
for (const auto& entry : mFetchingModules) {
- RefPtr<WaitingRequests> waitingRequests(entry.GetData());
- if (waitingRequests) {
- ResumeWaitingRequests(waitingRequests, false);
+ RefPtr<LoadingRequest> loadingRequest(entry.GetData());
+ if (loadingRequest) {
+ ResumeWaitingRequests(loadingRequest, false);
}
}
@@ -1100,6 +1121,9 @@ JS::Value ModuleLoaderBase::FindFirstParseError(ModuleLoadRequest* aRequest) {
}
for (ModuleLoadRequest* childRequest : aRequest->mImports) {
+ MOZ_DIAGNOSTIC_ASSERT(moduleScript->HadImportMap() ==
+ childRequest->mModuleScript->HadImportMap());
+
JS::Value error = FindFirstParseError(childRequest);
if (!error.isUndefined()) {
return error;
@@ -1393,6 +1417,41 @@ void ModuleLoaderBase::RegisterImportMap(UniquePtr<ImportMap> aImportMap) {
// Step 3. Set global's import map to result's import map.
mImportMap = std::move(aImportMap);
+
+ // Any import resolution has been invalidated by the addition of the import
+ // map. If speculative preloading is currently fetching any modules then
+ // cancel their requests and remove them from the map.
+ //
+ // The cancelled requests will still complete later so we have to check this
+ // in SetModuleFetchFinishedAndResumeWaitingRequests.
+ for (const auto& entry : mFetchingModules) {
+ LoadingRequest* loadingRequest = entry.GetData();
+ MOZ_DIAGNOSTIC_ASSERT(loadingRequest->mRequest->mLoadContext->IsPreload());
+ loadingRequest->mRequest->Cancel();
+ for (const auto& request : loadingRequest->mWaiting) {
+ MOZ_DIAGNOSTIC_ASSERT(request->mLoadContext->IsPreload());
+ request->Cancel();
+ }
+ }
+ mFetchingModules.Clear();
+
+ // If speculative preloading has added modules to the module map, remove
+ // them.
+ for (const auto& entry : mFetchedModules) {
+ ModuleScript* script = entry.GetData();
+ if (script) {
+ MOZ_DIAGNOSTIC_ASSERT(
+ script->ForPreload(),
+ "Non-preload module loads should block import maps");
+ MOZ_DIAGNOSTIC_ASSERT(!script->HadImportMap(),
+ "Only one import map can be registered");
+ if (JSObject* module = script->ModuleRecord()) {
+ MOZ_DIAGNOSTIC_ASSERT(!JS::ModuleIsLinked(module));
+ }
+ script->Shutdown();
+ }
+ }
+ mFetchedModules.Clear();
}
void ModuleLoaderBase::CopyModulesTo(ModuleLoaderBase* aDest) {
diff --git a/js/loader/ModuleLoaderBase.h b/js/loader/ModuleLoaderBase.h
index 2c2c385a30..89d23e12bc 100644
--- a/js/loader/ModuleLoaderBase.h
+++ b/js/loader/ModuleLoaderBase.h
@@ -165,20 +165,30 @@ class ScriptLoaderInterface : public nsISupports {
*/
class ModuleLoaderBase : public nsISupports {
/*
- * The set of requests that are waiting for an ongoing fetch to complete.
+ * Represents an ongoing load operation for a URI initiated for one request
+ * and which may have other requests waiting for it to complete.
+ *
+ * These are tracked in the mFetchingModules map.
*/
- class WaitingRequests final : public nsISupports {
- virtual ~WaitingRequests() = default;
+ class LoadingRequest final : public nsISupports {
+ virtual ~LoadingRequest() = default;
public:
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
- NS_DECL_CYCLE_COLLECTION_CLASS(WaitingRequests)
+ NS_DECL_CYCLE_COLLECTION_CLASS(LoadingRequest)
+
+ // The request that initiated the load and which is currently fetching or
+ // being compiled.
+ RefPtr<ModuleLoadRequest> mRequest;
+ // A list of any other requests for the same URI that are waiting for the
+ // initial load to complete. These will be resumed by ResumeWaitingRequests
+ // when that happens.
nsTArray<RefPtr<ModuleLoadRequest>> mWaiting;
};
// Module map
- nsRefPtrHashtable<nsURIHashKey, WaitingRequests> mFetchingModules;
+ nsRefPtrHashtable<nsURIHashKey, LoadingRequest> mFetchingModules;
nsRefPtrHashtable<nsURIHashKey, ModuleScript> mFetchedModules;
// List of dynamic imports that are currently being loaded.
@@ -241,6 +251,8 @@ class ModuleLoaderBase : public nsISupports {
JSContext* aCx, nsIURI* aURI, LoadedScript* aMaybeActiveScript,
JS::Handle<JSString*> aSpecifier, JS::Handle<JSObject*> aPromise) = 0;
+ virtual bool IsDynamicImportSupported() { return true; }
+
// Called when dynamic import started successfully.
virtual void OnDynamicImportStarted(ModuleLoadRequest* aRequest) {}
@@ -333,10 +345,6 @@ class ModuleLoaderBase : public nsISupports {
nsresult GetFetchedModuleURLs(nsTArray<nsCString>& aURLs);
- // Removed a fetched module from the module map. Asserts that the module is
- // unlinked. Extreme care should be taken when calling this method.
- bool RemoveFetchedModule(nsIURI* aURL);
-
// Override the module loader with given loader until ResetOverride is called.
// While overridden, ModuleLoaderBase::GetCurrentModuleLoader returns aLoader.
//
@@ -421,7 +429,7 @@ class ModuleLoaderBase : public nsISupports {
void SetModuleFetchFinishedAndResumeWaitingRequests(
ModuleLoadRequest* aRequest, nsresult aResult);
- void ResumeWaitingRequests(WaitingRequests* aWaitingRequests, bool aSuccess);
+ void ResumeWaitingRequests(LoadingRequest* aLoadingRequest, bool aSuccess);
void ResumeWaitingRequest(ModuleLoadRequest* aRequest, bool aSuccess);
void StartFetchingModuleDependencies(ModuleLoadRequest* aRequest);
diff --git a/js/moz.configure b/js/moz.configure
index cbcaf38f01..675736a797 100644
--- a/js/moz.configure
+++ b/js/moz.configure
@@ -173,18 +173,12 @@ def enable_decorators(value):
set_config("ENABLE_DECORATORS", enable_decorators)
set_define("ENABLE_DECORATORS", enable_decorators)
+
# Enable JSON.parse with source
# ===================================================
-option(
- "--enable-json-parse-with-source",
- default=False,
- help="Enable experimental JS JSON.parse with source support",
-)
-
-
-@depends("--enable-json-parse-with-source")
-def enable_json_parse_with_source(value):
- if value:
+@depends(milestone.is_nightly)
+def enable_json_parse_with_source(is_nightly):
+ if is_nightly:
return True
@@ -597,13 +591,14 @@ set_define("MOZ_AARCH64_JSCVT", aarch64_jscvt)
@depends(target)
-def has_pthread_jit_write_protect_np(target):
- return target.os == "OSX" and target.cpu == "aarch64"
+def has_apple_fast_wx(target):
+ return target.kernel == "Darwin" and target.cpu == "aarch64"
-# On Apple Silicon we use MAP_JIT with pthread_jit_write_protect_np to implement
-# JIT code write protection.
-set_define("JS_USE_APPLE_FAST_WX", True, when=has_pthread_jit_write_protect_np)
+# On Apple Silicon macOS we use MAP_JIT with pthread_jit_write_protect_np to
+# implement JIT code write protection, while on iOS we use MAP_JIT with
+# be_memory_inline_jit_restrict_*.
+set_define("JS_USE_APPLE_FAST_WX", True, when=has_apple_fast_wx)
# CTypes
diff --git a/js/public/CallArgs.h b/js/public/CallArgs.h
index 16f8d6f904..8be1b382f2 100644
--- a/js/public/CallArgs.h
+++ b/js/public/CallArgs.h
@@ -112,7 +112,7 @@ class MOZ_STACK_CLASS NoUsedRval {
};
template <class WantUsedRval>
-class MOZ_STACK_CLASS CallArgsBase {
+class MOZ_STACK_CLASS MOZ_NON_PARAM CallArgsBase {
static_assert(std::is_same_v<WantUsedRval, IncludeUsedRval> ||
std::is_same_v<WantUsedRval, NoUsedRval>,
"WantUsedRval can only be IncludeUsedRval or NoUsedRval");
@@ -294,7 +294,7 @@ class MOZ_STACK_CLASS CallArgsBase {
} // namespace detail
-class MOZ_STACK_CLASS CallArgs
+class MOZ_STACK_CLASS MOZ_NON_PARAM CallArgs
: public detail::CallArgsBase<detail::IncludeUsedRval> {
private:
friend CallArgs CallArgsFromVp(unsigned argc, Value* vp);
@@ -315,6 +315,9 @@ class MOZ_STACK_CLASS CallArgs
for (unsigned i = 0; i < argc; ++i) {
AssertValueIsNotGray(argv[i]);
}
+ if (constructing) {
+ AssertValueIsNotGray(args.newTarget());
+ }
#endif
return args;
}
diff --git a/js/public/CallNonGenericMethod.h b/js/public/CallNonGenericMethod.h
index 80359f7a6f..73613260b9 100644
--- a/js/public/CallNonGenericMethod.h
+++ b/js/public/CallNonGenericMethod.h
@@ -60,7 +60,7 @@ extern JS_PUBLIC_API bool CallMethodIfWrapped(JSContext* cx,
// its interface is the same as that of JSNative.
//
// static bool
-// answer_getAnswer_impl(JSContext* cx, JS::CallArgs args)
+// answer_getAnswer_impl(JSContext* cx, const JS::CallArgs& args)
// {
// args.rval().setInt32(42);
// return true;
diff --git a/js/public/GCAPI.h b/js/public/GCAPI.h
index 9bdaca9661..5773838e46 100644
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -459,6 +459,15 @@ typedef enum JSGCParamKey {
* Default: ParallelMarkingThresholdMB
*/
JSGC_PARALLEL_MARKING_THRESHOLD_MB = 50,
+
+ /**
+ * Whether the semispace nursery is enabled.
+ *
+ * Pref: javascript.options.mem.gc_experimental_semispace_nursery
+ * Default: SemispaceNurseryEnabled
+ */
+ JSGC_SEMISPACE_NURSERY_ENABLED = 51,
+
} JSGCParamKey;
/*
@@ -1344,6 +1353,11 @@ namespace gc {
extern JS_PUBLIC_API JSObject* NewMemoryInfoObject(JSContext* cx);
/*
+ * Return whether |obj| is a dead nursery object during a minor GC.
+ */
+JS_PUBLIC_API bool IsDeadNurseryObject(JSObject* obj);
+
+/*
* Run the finalizer of a nursery-allocated JSObject that is known to be dead.
*
* This is a dangerous operation - only use this if you know what you're doing!
diff --git a/js/public/GCHashTable.h b/js/public/GCHashTable.h
index 69487b97a3..2c9a15072a 100644
--- a/js/public/GCHashTable.h
+++ b/js/public/GCHashTable.h
@@ -62,7 +62,8 @@ class GCHashMap : public js::HashMap<Key, Value, HashPolicy, AllocPolicy> {
public:
using EntryGCPolicy = MapEntryGCPolicy;
- explicit GCHashMap(AllocPolicy a = AllocPolicy()) : Base(std::move(a)) {}
+ explicit GCHashMap() : Base(AllocPolicy()) {}
+ explicit GCHashMap(AllocPolicy a) : Base(std::move(a)) {}
explicit GCHashMap(size_t length) : Base(length) {}
GCHashMap(AllocPolicy a, size_t length) : Base(std::move(a), length) {}
diff --git a/js/public/HeapAPI.h b/js/public/HeapAPI.h
index 26cca9e1c3..cabadeb75d 100644
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -45,7 +45,7 @@ const size_t ArenaShift = 12;
const size_t ArenaSize = size_t(1) << ArenaShift;
const size_t ArenaMask = ArenaSize - 1;
-#if defined(XP_MACOSX) && defined(__aarch64__)
+#if defined(XP_DARWIN) && defined(__aarch64__)
const size_t PageShift = 14;
#else
const size_t PageShift = 12;
@@ -82,28 +82,61 @@ const size_t ArenaBitmapBits = ArenaSize / CellBytesPerMarkBit;
const size_t ArenaBitmapBytes = HowMany(ArenaBitmapBits, 8);
const size_t ArenaBitmapWords = HowMany(ArenaBitmapBits, JS_BITS_PER_WORD);
+enum class ChunkKind : uint8_t {
+ Invalid = 0,
+ TenuredHeap,
+ NurseryToSpace,
+ NurseryFromSpace
+};
+
// The base class for all GC chunks, either in the nursery or in the tenured
// heap memory. This structure is locatable from any GC pointer by aligning to
// the chunk size.
-class alignas(CellAlignBytes) ChunkBase {
+class ChunkBase {
protected:
- ChunkBase(JSRuntime* rt, StoreBuffer* sb) {
+ // Initialize a tenured heap chunk.
+ explicit ChunkBase(JSRuntime* rt) {
MOZ_ASSERT((uintptr_t(this) & ChunkMask) == 0);
- initBase(rt, sb);
+ initBaseForTenuredChunk(rt);
}
- void initBase(JSRuntime* rt, StoreBuffer* sb) {
+ void initBaseForTenuredChunk(JSRuntime* rt) {
runtime = rt;
- storeBuffer = sb;
+ storeBuffer = nullptr;
+ kind = ChunkKind::TenuredHeap;
+ nurseryChunkIndex = UINT8_MAX;
+ }
+
+ // Initialize a nursery chunk.
+ ChunkBase(JSRuntime* rt, StoreBuffer* sb, ChunkKind kind, uint8_t chunkIndex)
+ : storeBuffer(sb),
+ runtime(rt),
+ kind(kind),
+ nurseryChunkIndex(chunkIndex) {
+ MOZ_ASSERT(kind == ChunkKind::NurseryFromSpace ||
+ kind == ChunkKind::NurseryToSpace);
+ MOZ_ASSERT((uintptr_t(this) & ChunkMask) == 0);
+ MOZ_ASSERT(storeBuffer);
}
public:
+ ChunkKind getKind() const {
+ MOZ_ASSERT_IF(storeBuffer, kind == ChunkKind::NurseryToSpace ||
+ kind == ChunkKind::NurseryFromSpace);
+ MOZ_ASSERT_IF(!storeBuffer, kind == ChunkKind::TenuredHeap);
+ return kind;
+ }
+
// The store buffer for pointers from tenured things to things in this
// chunk. Will be non-null if and only if this is a nursery chunk.
StoreBuffer* storeBuffer;
// Provide quick access to the runtime from absolutely anywhere.
JSRuntime* runtime;
+
+ ChunkKind kind;
+
+ uint8_t nurseryChunkIndex;
};
// Information about tenured heap chunks.
@@ -234,7 +267,7 @@ class TenuredChunkBase : public ChunkBase {
ChunkPageBitmap decommittedPages;
protected:
- explicit TenuredChunkBase(JSRuntime* runtime) : ChunkBase(runtime, nullptr) {
+ explicit TenuredChunkBase(JSRuntime* runtime) : ChunkBase(runtime) {
info.numArenasFree = ArenasPerChunk;
}
@@ -523,6 +556,7 @@ static MOZ_ALWAYS_INLINE ChunkBase* GetCellChunkBase(const Cell* cell) {
MOZ_ASSERT(cell);
auto* chunk = reinterpret_cast<ChunkBase*>(uintptr_t(cell) & ~ChunkMask);
MOZ_ASSERT(chunk->runtime);
+ MOZ_ASSERT(chunk->kind != ChunkKind::Invalid);
return chunk;
}
@@ -532,6 +566,7 @@ static MOZ_ALWAYS_INLINE TenuredChunkBase* GetCellChunkBase(
auto* chunk =
reinterpret_cast<TenuredChunkBase*>(uintptr_t(cell) & ~ChunkMask);
MOZ_ASSERT(chunk->runtime);
+ MOZ_ASSERT(chunk->kind == ChunkKind::TenuredHeap);
return chunk;
}
diff --git a/js/public/Modules.h b/js/public/Modules.h
index 2e7192e120..4a8d45acb0 100644
--- a/js/public/Modules.h
+++ b/js/public/Modules.h
@@ -299,6 +299,8 @@ extern JS_PUBLIC_API JSObject* GetModuleEnvironment(
*/
extern JS_PUBLIC_API void ClearModuleEnvironment(JSObject* moduleObj);
+extern JS_PUBLIC_API bool ModuleIsLinked(JSObject* moduleObj);
+
} // namespace JS
#endif // js_Modules_h
diff --git a/js/public/Promise.h b/js/public/Promise.h
index 221d6fdfee..75bc7fd837 100644
--- a/js/public/Promise.h
+++ b/js/public/Promise.h
@@ -80,6 +80,12 @@ class JS_PUBLIC_API JobQueue {
*/
virtual bool empty() const = 0;
+ /**
+ * Returns true if the job queue stops draining, which results in `empty()`
+ * being false after `runJobs()`.
+ */
+ virtual bool isDrainingStopped() const = 0;
+
protected:
friend class AutoDebuggerJobQueueInterruption;
diff --git a/js/public/PropertySpec.h b/js/public/PropertySpec.h
index 9d8115508d..0c37e598fb 100644
--- a/js/public/PropertySpec.h
+++ b/js/public/PropertySpec.h
@@ -373,6 +373,10 @@ constexpr uint8_t CheckAccessorAttrs() {
JSPropertySpec::nativeAccessors(::JS::SymbolCode::symbol, \
CheckAccessorAttrs<attributes>(), getter, \
nullptr)
+#define JS_SYM_GETSET(symbol, getter, setter, attributes) \
+ JSPropertySpec::nativeAccessors(::JS::SymbolCode::symbol, \
+ CheckAccessorAttrs<attributes>(), getter, \
+ nullptr, setter, nullptr)
#define JS_SELF_HOSTED_GET(name, getterName, attributes) \
JSPropertySpec::selfHostedAccessors(name, CheckAccessorAttrs<attributes>(), \
getterName)
@@ -417,11 +421,15 @@ struct JSFunctionSpec {
#define JS_FS_END JS_FN(nullptr, nullptr, 0, 0)
/*
- * Initializer macros for a JSFunctionSpec array element. JS_FNINFO allows the
- * simple adding of JSJitInfos. JS_SELF_HOSTED_FN declares a self-hosted
- * function. JS_INLINABLE_FN allows specifying an InlinableNative enum value for
- * natives inlined or specialized by the JIT. Finally JS_FNSPEC has slots for
- * all the fields.
+ * Initializer macros for a JSFunctionSpec array element.
+ *
+ * - JS_FNINFO allows the simple adding of JSJitInfos.
+ * - JS_SELF_HOSTED_FN declares a self-hosted function.
+ * - JS_INLINABLE_FN allows specifying an InlinableNative enum value for natives
+ * inlined or specialized by the JIT.
+ * - JS_TRAMPOLINE_FN allows specifying a TrampolineNative enum value for
+ * natives that have a JitEntry trampoline.
+ * - JS_FNSPEC has slots for all the fields.
*
* The _SYM variants allow defining a function with a symbol key rather than a
* string key. For example, use JS_SYM_FN(iterator, ...) to define an
@@ -431,6 +439,8 @@ struct JSFunctionSpec {
JS_FNSPEC(name, call, nullptr, nargs, flags, nullptr)
#define JS_INLINABLE_FN(name, call, nargs, flags, native) \
JS_FNSPEC(name, call, &js::jit::JitInfo_##native, nargs, flags, nullptr)
+#define JS_TRAMPOLINE_FN(name, call, nargs, flags, native) \
+ JS_FNSPEC(name, call, &js::jit::JitInfo_##native, nargs, flags, nullptr)
#define JS_SYM_FN(symbol, call, nargs, flags) \
JS_SYM_FNSPEC(symbol, call, nullptr, nargs, flags, nullptr)
#define JS_FNINFO(name, call, info, nargs, flags) \
diff --git a/js/public/RealmOptions.h b/js/public/RealmOptions.h
index 0a0c1faeff..b3ab20915a 100644
--- a/js/public/RealmOptions.h
+++ b/js/public/RealmOptions.h
@@ -178,14 +178,6 @@ class JS_PUBLIC_API RealmCreationOptions {
return *this;
}
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- bool getJSONParseWithSource() const { return jsonParseWithSource; }
- RealmCreationOptions& setJSONParseWithSource(bool flag) {
- jsonParseWithSource = flag;
- return *this;
- }
-#endif
-
// This flag doesn't affect JS engine behavior. It is used by Gecko to
// mark whether content windows and workers are "Secure Context"s. See
// https://w3c.github.io/webappsec-secure-contexts/
@@ -246,9 +238,6 @@ class JS_PUBLIC_API RealmCreationOptions {
bool defineSharedArrayBufferConstructor_ = true;
bool coopAndCoep_ = false;
bool toSource_ = false;
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- bool jsonParseWithSource = false;
-#endif
bool secureContext_ = false;
bool freezeBuiltins_ = false;
diff --git a/js/public/RootingAPI.h b/js/public/RootingAPI.h
index e35a2c5bc8..eaae2db1b7 100644
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -232,7 +232,7 @@ struct SafelyInitialized {
// |T| per C++11 [expr.type.conv]p2 -- will produce a safely-initialized,
// safely-usable T that it can return.
-#if defined(XP_WIN) || defined(XP_MACOSX) || \
+#if defined(XP_WIN) || defined(XP_DARWIN) || \
(defined(XP_UNIX) && !defined(__clang__))
// That presumption holds for pointers, where value initialization produces
diff --git a/js/public/ValueArray.h b/js/public/ValueArray.h
index 0d520fb9b6..734d8aca65 100644
--- a/js/public/ValueArray.h
+++ b/js/public/ValueArray.h
@@ -57,6 +57,9 @@ class HandleValueArray {
MOZ_IMPLICIT HandleValueArray(const RootedVector<Value>& values)
: length_(values.length()), elements_(values.begin()) {}
+ MOZ_IMPLICIT HandleValueArray(const PersistentRootedVector<Value>& values)
+ : length_(values.length()), elements_(values.begin()) {}
+
template <size_t N>
MOZ_IMPLICIT HandleValueArray(const RootedValueArray<N>& values)
: length_(N), elements_(values.begin()) {}
diff --git a/js/public/experimental/JitInfo.h b/js/public/experimental/JitInfo.h
index 1b070021bd..896b5fef68 100644
--- a/js/public/experimental/JitInfo.h
+++ b/js/public/experimental/JitInfo.h
@@ -8,6 +8,7 @@
#define js_experimental_JitInfo_h
#include "mozilla/Assertions.h" // MOZ_ASSERT
+#include "mozilla/Attributes.h" // MOZ_NON_PARAM
#include <stddef.h> // size_t
#include <stdint.h> // uint16_t, uint32_t
@@ -21,6 +22,7 @@ namespace js {
namespace jit {
enum class InlinableNative : uint16_t;
+enum class TrampolineNative : uint16_t;
} // namespace jit
@@ -72,7 +74,7 @@ struct JSJitMethodCallArgsTraits;
* A class, expected to be passed by reference, which represents the CallArgs
* for a JSJitMethodOp.
*/
-class JSJitMethodCallArgs
+class MOZ_NON_PARAM JSJitMethodCallArgs
: protected JS::detail::CallArgsBase<JS::detail::NoUsedRval> {
private:
using Base = JS::detail::CallArgsBase<JS::detail::NoUsedRval>;
@@ -137,6 +139,7 @@ class JSJitInfo {
Method,
StaticMethod,
InlinableNative,
+ TrampolineNative,
IgnoresReturnValueNative,
// Must be last
OpTypeCount
@@ -226,6 +229,7 @@ class JSJitInfo {
union {
uint16_t protoID;
js::jit::InlinableNative inlinableNative;
+ js::jit::TrampolineNative trampolineNative;
};
union {
diff --git a/js/public/friend/ErrorNumbers.msg b/js/public/friend/ErrorNumbers.msg
index e75e7b973c..42f9a6615d 100644
--- a/js/public/friend/ErrorNumbers.msg
+++ b/js/public/friend/ErrorNumbers.msg
@@ -80,6 +80,8 @@ MSG_DEF(JSMSG_SPREAD_TOO_LARGE, 0, JSEXN_RANGEERR, "array too large due t
MSG_DEF(JSMSG_BAD_WEAKMAP_KEY, 0, JSEXN_TYPEERR, "cannot use the given object as a weak map key")
MSG_DEF(JSMSG_WEAKMAP_KEY_CANT_BE_HELD_WEAKLY, 1, JSEXN_TYPEERR, "WeakMap key {0} must be an object or an unregistered symbol")
MSG_DEF(JSMSG_WEAKSET_VAL_CANT_BE_HELD_WEAKLY, 1, JSEXN_TYPEERR, "WeakSet value {0} must be an object or an unregistered symbol")
+MSG_DEF(JSMSG_WEAKMAP_KEY_MUST_BE_AN_OBJECT, 1, JSEXN_TYPEERR, "WeakMap key {0} must be an object")
+MSG_DEF(JSMSG_WEAKSET_VAL_MUST_BE_AN_OBJECT, 1, JSEXN_TYPEERR, "WeakSet value {0} must be an object")
MSG_DEF(JSMSG_BAD_GETTER_OR_SETTER, 1, JSEXN_TYPEERR, "invalid {0} usage")
MSG_DEF(JSMSG_BAD_ARRAY_LENGTH, 0, JSEXN_RANGEERR, "invalid array length")
MSG_DEF(JSMSG_SOURCE_ARRAY_TOO_LONG, 0, JSEXN_RANGEERR, "source array is too long")
@@ -137,6 +139,9 @@ MSG_DEF(JSMSG_CONSTRUCTOR_DISABLED, 1, JSEXN_TYPEERR, "{0} is disabled")
// JSON
MSG_DEF(JSMSG_JSON_BAD_PARSE, 3, JSEXN_SYNTAXERR, "JSON.parse: {0} at line {1} column {2} of the JSON data")
MSG_DEF(JSMSG_JSON_CYCLIC_VALUE, 0, JSEXN_TYPEERR, "cyclic object value")
+MSG_DEF(JSMSG_JSON_RAW_EMPTY, 0, JSEXN_SYNTAXERR, "JSON.rawJSON: argument cannot be empty when converted to string")
+MSG_DEF(JSMSG_JSON_RAW_ARRAY_OR_OBJECT, 0, JSEXN_SYNTAXERR, "JSON.rawJSON: outermost value must not be an array or object")
+MSG_DEF(JSMSG_JSON_RAW_WHITESPACE, 0, JSEXN_SYNTAXERR, "JSON.rawJSON: first or last character cannot be any of tab, line feed, carriage return, or space")
// Runtime errors
MSG_DEF(JSMSG_ASSIGN_TO_CALL, 0, JSEXN_REFERENCEERR, "cannot assign to function call")
@@ -669,6 +674,15 @@ MSG_DEF(JSMSG_TYPED_ARRAY_CONSTRUCT_OFFSET_MISALIGNED, 2, JSEXN_RANGEERR, "buffe
MSG_DEF(JSMSG_TYPED_ARRAY_CONSTRUCT_OFFSET_LENGTH_BOUNDS, 1, JSEXN_RANGEERR, "size of buffer is too small for {0}Array with byteOffset")
MSG_DEF(JSMSG_TYPED_ARRAY_CONSTRUCT_ARRAY_LENGTH_BOUNDS, 1, JSEXN_RANGEERR, "attempting to construct out-of-bounds {0}Array on ArrayBuffer")
MSG_DEF(JSMSG_TYPED_ARRAY_CONSTRUCT_TOO_LARGE, 1, JSEXN_RANGEERR, "{0}Array too large")
+MSG_DEF(JSMSG_TYPED_ARRAY_BAD_HEX_STRING_LENGTH, 0, JSEXN_SYNTAXERR, "hex-string must have an even number of characters")
+MSG_DEF(JSMSG_TYPED_ARRAY_BAD_HEX_DIGIT, 1, JSEXN_SYNTAXERR, "'{0}' is not a valid hex-digit")
+MSG_DEF(JSMSG_TYPED_ARRAY_BAD_BASE64_ALPHABET, 0, JSEXN_TYPEERR, "\"alphabet\" option must be one of \"base64\" or \"base64url\"")
+MSG_DEF(JSMSG_TYPED_ARRAY_BAD_BASE64_LAST_CHUNK_HANDLING, 0, JSEXN_TYPEERR, "\"lastChunkHandling\" option must be one of \"loose\", \"strict\", or \"stop-before-partial\"")
+MSG_DEF(JSMSG_TYPED_ARRAY_BAD_BASE64_CHAR, 1, JSEXN_SYNTAXERR, "'{0}' is not a valid base64 character")
+MSG_DEF(JSMSG_TYPED_ARRAY_BAD_INCOMPLETE_CHUNK, 0, JSEXN_SYNTAXERR, "unexpected incomplete base64 chunk")
+MSG_DEF(JSMSG_TYPED_ARRAY_BAD_BASE64_AFTER_PADDING, 1, JSEXN_SYNTAXERR, "unexpected '{0}' after base64 padding")
+MSG_DEF(JSMSG_TYPED_ARRAY_MISSING_BASE64_PADDING, 0, JSEXN_SYNTAXERR, "missing padding '=' character")
+MSG_DEF(JSMSG_TYPED_ARRAY_EXTRA_BASE64_BITS, 0, JSEXN_SYNTAXERR, "unexpected extra bits in last character")
MSG_DEF(JSMSG_TYPED_ARRAY_CALL_OR_CONSTRUCT, 1, JSEXN_TYPEERR, "cannot directly {0} builtin %TypedArray%")
MSG_DEF(JSMSG_NON_TYPED_ARRAY_RETURNED, 0, JSEXN_TYPEERR, "constructor didn't return TypedArray object")
@@ -714,10 +728,9 @@ MSG_DEF(JSMSG_CANT_DELETE_SUPER, 0, JSEXN_REFERENCEERR, "invalid delete involvin
MSG_DEF(JSMSG_REINIT_THIS, 0, JSEXN_REFERENCEERR, "super() called twice in derived class constructor")
// Modules
-MSG_DEF(JSMSG_MISSING_INDIRECT_EXPORT, 0, JSEXN_SYNTAXERR, "indirect export not found")
-MSG_DEF(JSMSG_AMBIGUOUS_INDIRECT_EXPORT, 0, JSEXN_SYNTAXERR, "ambiguous indirect export")
-MSG_DEF(JSMSG_MISSING_IMPORT, 0, JSEXN_SYNTAXERR, "import not found")
-MSG_DEF(JSMSG_AMBIGUOUS_IMPORT, 0, JSEXN_SYNTAXERR, "ambiguous import")
+MSG_DEF(JSMSG_MODULE_NO_EXPORT, 2, JSEXN_SYNTAXERR, "The requested module '{0}' doesn't provide an export named: '{1}'")
+MSG_DEF(JSMSG_MODULE_CIRCULAR_IMPORT, 2, JSEXN_SYNTAXERR, "The requested module '{0}' contains circular import: '{1}'")
+MSG_DEF(JSMSG_MODULE_AMBIGUOUS, 4, JSEXN_SYNTAXERR, "The requested module '{0}' contains ambiguous star export: '{1}' from '{2}' and '{3}'")
MSG_DEF(JSMSG_MISSING_EXPORT, 1, JSEXN_SYNTAXERR, "local binding for export '{0}' not found")
MSG_DEF(JSMSG_BAD_MODULE_STATUS, 1, JSEXN_INTERNALERR, "module record has unexpected status: {0}")
MSG_DEF(JSMSG_DYNAMIC_IMPORT_FAILED, 1, JSEXN_TYPEERR, "error loading dynamically imported module: {0}")
diff --git a/js/src/build/moz.build b/js/src/build/moz.build
index 089ef37b05..b38f848c7a 100644
--- a/js/src/build/moz.build
+++ b/js/src/build/moz.build
@@ -82,10 +82,20 @@ if CONFIG["OS_ARCH"] == "WINNT":
"bcrypt",
"ntdll",
]
+ # Version string comparison is generally wrong, but by the time it would
+ # actually matter, either bug 1489995 would be fixed, or the build would
+ # require version >= 1.78.
+ if CONFIG["RUSTC_VERSION"] and CONFIG["RUSTC_VERSION"] >= "1.78.0":
+ OS_LIBS += [
+ "synchronization",
+ ]
if CONFIG["MOZ_NEEDS_LIBATOMIC"]:
OS_LIBS += ["atomic"]
+if CONFIG["TARGET_OS"] == "iOS":
+ OS_LIBS += ["-framework BrowserEngineCore"]
+
OS_LIBS += CONFIG["REALTIME_LIBS"]
NO_EXPAND_LIBS = True
diff --git a/js/src/builtin/Array.cpp b/js/src/builtin/Array.cpp
index 76493eee7f..2ced07b6b9 100644
--- a/js/src/builtin/Array.cpp
+++ b/js/src/builtin/Array.cpp
@@ -10,6 +10,7 @@
#include "mozilla/DebugOnly.h"
#include "mozilla/MathAlgorithms.h"
#include "mozilla/Maybe.h"
+#include "mozilla/ScopeExit.h"
#include "mozilla/SIMD.h"
#include "mozilla/TextUtils.h"
@@ -23,6 +24,7 @@
#include "ds/Sort.h"
#include "jit/InlinableNatives.h"
+#include "jit/TrampolineNatives.h"
#include "js/Class.h"
#include "js/Conversions.h"
#include "js/experimental/JitInfo.h" // JSJitGetterOp, JSJitInfo
@@ -2026,57 +2028,10 @@ static bool FillWithUndefined(JSContext* cx, HandleObject obj, uint32_t start,
return true;
}
-static bool ArrayNativeSortImpl(JSContext* cx, Handle<JSObject*> obj,
- Handle<Value> fval, ComparatorMatchResult comp);
-
-bool js::intrinsic_ArrayNativeSort(JSContext* cx, unsigned argc, Value* vp) {
- // This function is called from the self-hosted Array.prototype.sort
- // implementation. It returns |true| if the array was sorted, otherwise it
- // returns |false| to notify the self-hosted code to perform the sorting.
- CallArgs args = CallArgsFromVp(argc, vp);
- MOZ_ASSERT(args.length() == 1);
-
- HandleValue fval = args[0];
- MOZ_ASSERT(fval.isUndefined() || IsCallable(fval));
-
- ComparatorMatchResult comp;
- if (fval.isObject()) {
- comp = MatchNumericComparator(cx, &fval.toObject());
- if (comp == Match_Failure) {
- return false;
- }
-
- if (comp == Match_None) {
- // Non-optimized user supplied comparators perform much better when
- // called from within a self-hosted sorting function.
- args.rval().setBoolean(false);
- return true;
- }
- } else {
- comp = Match_None;
- }
-
- Rooted<JSObject*> obj(cx, &args.thisv().toObject());
-
- if (!ArrayNativeSortImpl(cx, obj, fval, comp)) {
- return false;
- }
-
- args.rval().setBoolean(true);
- return true;
-}
-
-static bool ArrayNativeSortImpl(JSContext* cx, Handle<JSObject*> obj,
- Handle<Value> fval,
- ComparatorMatchResult comp) {
- uint64_t length;
- if (!GetLengthPropertyInlined(cx, obj, &length)) {
- return false;
- }
- if (length < 2) {
- /* [] and [a] remain unchanged when sorted. */
- return true;
- }
+static bool ArraySortWithoutComparator(JSContext* cx, Handle<JSObject*> obj,
+ uint64_t length,
+ ComparatorMatchResult comp) {
+ MOZ_ASSERT(length > 1);
if (length > UINT32_MAX) {
ReportAllocationOverflow(cx);
@@ -2225,6 +2180,519 @@ static bool ArrayNativeSortImpl(JSContext* cx, Handle<JSObject*> obj,
return true;
}
+void ArraySortData::init(JSObject* obj, JSObject* comparator, ValueVector&& vec,
+ uint32_t length, uint32_t denseLen) {
+ MOZ_ASSERT(!vec.empty(), "must have items to sort");
+ MOZ_ASSERT(denseLen <= length);
+
+ obj_ = obj;
+ comparator_ = comparator;
+
+ this->length = length;
+ this->denseLen = denseLen;
+ this->vec = std::move(vec);
+
+ auto getComparatorKind = [](JSContext* cx, JSObject* comparator) {
+ if (!comparator->is<JSFunction>()) {
+ return ComparatorKind::Unoptimized;
+ }
+ JSFunction* fun = &comparator->as<JSFunction>();
+ if (!fun->hasJitEntry() || fun->isClassConstructor()) {
+ return ComparatorKind::Unoptimized;
+ }
+ if (fun->realm() == cx->realm() && fun->nargs() <= ComparatorActualArgs) {
+ return ComparatorKind::JSSameRealmNoRectifier;
+ }
+ return ComparatorKind::JS;
+ };
+ comparatorKind_ = getComparatorKind(cx(), comparator);
+}
+
+// This function handles sorting without a comparator function (or with a
+// trivial comparator function that we can pattern match) by calling
+// ArraySortWithoutComparator.
+//
+// If there's a non-trivial comparator function, it initializes the
+// ArraySortData struct for ArraySortData::sortWithComparator. This function
+// must be called next to perform the sorting.
+//
+// This is separate from ArraySortData::sortWithComparator because it lets the
+// compiler generate better code for ArraySortData::sortWithComparator.
+//
+// https://tc39.es/ecma262/#sec-array.prototype.sort
+// 23.1.3.30 Array.prototype.sort ( comparefn )
+static MOZ_ALWAYS_INLINE bool ArraySortPrologue(JSContext* cx,
+ Handle<Value> thisv,
+ Handle<Value> comparefn,
+ ArraySortData* d, bool* done) {
+ // Step 1.
+ if (MOZ_UNLIKELY(!comparefn.isUndefined() && !IsCallable(comparefn))) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_BAD_SORT_ARG);
+ return false;
+ }
+
+ // Step 2.
+ Rooted<JSObject*> obj(cx, ToObject(cx, thisv));
+ if (!obj) {
+ return false;
+ }
+
+ // Step 3.
+ uint64_t length;
+ if (MOZ_UNLIKELY(!GetLengthPropertyInlined(cx, obj, &length))) {
+ return false;
+ }
+
+ // Arrays with less than two elements remain unchanged when sorted.
+ if (length <= 1) {
+ d->setReturnValue(obj);
+ *done = true;
+ return true;
+ }
+
+ // Use a fast path if there's no comparator or if the comparator is a function
+ // that we can pattern match.
+ do {
+ ComparatorMatchResult comp = Match_None;
+ if (comparefn.isObject()) {
+ comp = MatchNumericComparator(cx, &comparefn.toObject());
+ if (comp == Match_Failure) {
+ return false;
+ }
+ if (comp == Match_None) {
+ // Pattern matching failed.
+ break;
+ }
+ }
+ if (!ArraySortWithoutComparator(cx, obj, length, comp)) {
+ return false;
+ }
+ d->setReturnValue(obj);
+ *done = true;
+ return true;
+ } while (false);
+
+ // Ensure length * 2 (used below) doesn't overflow UINT32_MAX.
+ if (MOZ_UNLIKELY(length > UINT32_MAX / 2)) {
+ ReportAllocationOverflow(cx);
+ return false;
+ }
+ uint32_t len = uint32_t(length);
+
+ // Merge sort requires extra scratch space.
+ bool needsScratchSpace = len >= ArraySortData::InsertionSortLimit;
+
+ Rooted<ArraySortData::ValueVector> vec(cx);
+ if (MOZ_UNLIKELY(!vec.reserve(needsScratchSpace ? (2 * len) : len))) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
+
+ // Append elements to the vector. Skip holes.
+ if (IsPackedArray(obj)) {
+ Handle<ArrayObject*> array = obj.as<ArrayObject>();
+ const Value* elements = array->getDenseElements();
+ vec.infallibleAppend(elements, len);
+ } else {
+ RootedValue v(cx);
+ for (uint32_t i = 0; i < len; i++) {
+ if (!CheckForInterrupt(cx)) {
+ return false;
+ }
+
+ bool hole;
+ if (!HasAndGetElement(cx, obj, i, &hole, &v)) {
+ return false;
+ }
+ if (hole) {
+ continue;
+ }
+ vec.infallibleAppend(v);
+ }
+ // If there are only holes, the object is already sorted.
+ if (vec.empty()) {
+ d->setReturnValue(obj);
+ *done = true;
+ return true;
+ }
+ }
+
+ uint32_t denseLen = vec.length();
+ if (needsScratchSpace) {
+ MOZ_ALWAYS_TRUE(vec.resize(denseLen * 2));
+ }
+ d->init(obj, &comparefn.toObject(), std::move(vec.get()), len, denseLen);
+
+ // Continue in ArraySortData::sortWithComparator.
+ MOZ_ASSERT(!*done);
+ return true;
+}
+
+static ArraySortResult CallComparatorSlow(ArraySortData* d, const Value& x,
+ const Value& y) {
+ JSContext* cx = d->cx();
+ FixedInvokeArgs<2> callArgs(cx);
+ callArgs[0].set(x);
+ callArgs[1].set(y);
+ Rooted<Value> comparefn(cx, ObjectValue(*d->comparator()));
+ Rooted<Value> rval(cx);
+ if (!js::Call(cx, comparefn, UndefinedHandleValue, callArgs, &rval)) {
+ return ArraySortResult::Failure;
+ }
+ d->setComparatorReturnValue(rval);
+ return ArraySortResult::Done;
+}
+
+static MOZ_ALWAYS_INLINE ArraySortResult
+MaybeYieldToComparator(ArraySortData* d, const Value& x, const Value& y) {
+ // https://tc39.es/ecma262/#sec-comparearrayelements
+ // 23.1.3.30.2 CompareArrayElements ( x, y, comparefn )
+
+ // Steps 1-2.
+ if (x.isUndefined()) {
+ d->setComparatorReturnValue(Int32Value(y.isUndefined() ? 0 : 1));
+ return ArraySortResult::Done;
+ }
+
+ // Step 3.
+ if (y.isUndefined()) {
+ d->setComparatorReturnValue(Int32Value(-1));
+ return ArraySortResult::Done;
+ }
+
+ // Step 4. Yield to the JIT trampoline (or js::array_sort) if the comparator
+ // is a JS function we can call more efficiently from JIT code.
+ auto kind = d->comparatorKind();
+ if (MOZ_LIKELY(kind != ArraySortData::ComparatorKind::Unoptimized)) {
+ d->setComparatorArgs(x, y);
+ return (kind == ArraySortData::ComparatorKind::JSSameRealmNoRectifier)
+ ? ArraySortResult::CallJSSameRealmNoRectifier
+ : ArraySortResult::CallJS;
+ }
+ return CallComparatorSlow(d, x, y);
+}
+
+static MOZ_ALWAYS_INLINE bool RvalIsLessOrEqual(ArraySortData* data,
+ bool* lessOrEqual) {
+ // https://tc39.es/ecma262/#sec-comparearrayelements
+ // 23.1.3.30.2 CompareArrayElements ( x, y, comparefn )
+
+ // Fast path for int32 return values.
+ Value rval = data->comparatorReturnValue();
+ if (MOZ_LIKELY(rval.isInt32())) {
+ *lessOrEqual = rval.toInt32() <= 0;
+ return true;
+ }
+
+ // Step 4.a.
+ Rooted<Value> rvalRoot(data->cx(), rval);
+ double d;
+ if (MOZ_UNLIKELY(!ToNumber(data->cx(), rvalRoot, &d))) {
+ return false;
+ }
+
+ // Step 4.b-c.
+ *lessOrEqual = std::isnan(d) ? true : (d <= 0);
+ return true;
+}
+
+static void CopyValues(Value* out, const Value* list, uint32_t start,
+ uint32_t end) {
+ for (uint32_t i = start; i <= end; i++) {
+ out[i] = list[i];
+ }
+}
+
+// static
+ArraySortResult ArraySortData::sortWithComparator(ArraySortData* d) {
+ JSContext* cx = d->cx();
+ auto& vec = d->vec;
+
+ // This function is like a generator that is called repeatedly from the JIT
+ // trampoline or js::array_sort. Resume the sorting algorithm where we left
+ // off before calling the comparator.
+ switch (d->state) {
+ case State::Initial:
+ break;
+ case State::InsertionSortCall1:
+ goto insertion_sort_call1;
+ case State::InsertionSortCall2:
+ goto insertion_sort_call2;
+ case State::MergeSortCall1:
+ goto merge_sort_call1;
+ case State::MergeSortCall2:
+ goto merge_sort_call2;
+ }
+
+ d->list = vec.begin();
+
+ // Use insertion sort for small arrays.
+ if (d->denseLen < InsertionSortLimit) {
+ for (d->i = 1; d->i < d->denseLen; d->i++) {
+ d->item = vec[d->i];
+ d->j = d->i - 1;
+ do {
+ {
+ ArraySortResult res = MaybeYieldToComparator(d, vec[d->j], d->item);
+ if (res != ArraySortResult::Done) {
+ d->state = State::InsertionSortCall1;
+ return res;
+ }
+ }
+ insertion_sort_call1:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ if (lessOrEqual) {
+ break;
+ }
+ vec[d->j + 1] = vec[d->j];
+ } while (d->j-- > 0);
+ vec[d->j + 1] = d->item;
+ }
+ } else {
+ static constexpr size_t InitialWindowSize = 4;
+
+ // Use insertion sort for initial ranges.
+ for (d->start = 0; d->start < d->denseLen - 1;
+ d->start += InitialWindowSize) {
+ d->end =
+ std::min<uint32_t>(d->start + InitialWindowSize - 1, d->denseLen - 1);
+ for (d->i = d->start + 1; d->i <= d->end; d->i++) {
+ d->item = vec[d->i];
+ d->j = d->i - 1;
+ do {
+ {
+ ArraySortResult res = MaybeYieldToComparator(d, vec[d->j], d->item);
+ if (res != ArraySortResult::Done) {
+ d->state = State::InsertionSortCall2;
+ return res;
+ }
+ }
+ insertion_sort_call2:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ if (lessOrEqual) {
+ break;
+ }
+ vec[d->j + 1] = vec[d->j];
+ } while (d->j-- > d->start);
+ vec[d->j + 1] = d->item;
+ }
+ }
+
+ // Merge sort. Set d->out to scratch space initially.
+ d->out = vec.begin() + d->denseLen;
+ for (d->windowSize = InitialWindowSize; d->windowSize < d->denseLen;
+ d->windowSize *= 2) {
+ for (d->start = 0; d->start < d->denseLen;
+ d->start += 2 * d->windowSize) {
+ // The midpoint between the two subarrays.
+ d->mid = d->start + d->windowSize - 1;
+
+ // To keep from going over the edge.
+ d->end = std::min<uint32_t>(d->start + 2 * d->windowSize - 1,
+ d->denseLen - 1);
+
+ // Merge comparator-sorted slices list[start..<=mid] and
+ // list[mid+1..<=end], storing the merged sequence in out[start..<=end].
+
+ // Skip lopsided runs to avoid doing useless work.
+ if (d->mid >= d->end) {
+ CopyValues(d->out, d->list, d->start, d->end);
+ continue;
+ }
+
+ // Skip calling the comparator if the sub-list is already sorted.
+ {
+ ArraySortResult res =
+ MaybeYieldToComparator(d, d->list[d->mid], d->list[d->mid + 1]);
+ if (res != ArraySortResult::Done) {
+ d->state = State::MergeSortCall1;
+ return res;
+ }
+ }
+ merge_sort_call1:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ if (lessOrEqual) {
+ CopyValues(d->out, d->list, d->start, d->end);
+ continue;
+ }
+
+ d->i = d->start;
+ d->j = d->mid + 1;
+ d->k = d->start;
+
+ while (d->i <= d->mid && d->j <= d->end) {
+ {
+ ArraySortResult res =
+ MaybeYieldToComparator(d, d->list[d->i], d->list[d->j]);
+ if (res != ArraySortResult::Done) {
+ d->state = State::MergeSortCall2;
+ return res;
+ }
+ }
+ merge_sort_call2:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ d->out[d->k++] = lessOrEqual ? d->list[d->i++] : d->list[d->j++];
+ }
+
+ // Empty out any remaining elements. Use local variables to let the
+ // compiler generate more efficient code.
+ Value* out = d->out;
+ Value* list = d->list;
+ uint32_t k = d->k;
+ uint32_t mid = d->mid;
+ uint32_t end = d->end;
+ for (uint32_t i = d->i; i <= mid; i++) {
+ out[k++] = list[i];
+ }
+ for (uint32_t j = d->j; j <= end; j++) {
+ out[k++] = list[j];
+ }
+ }
+
+ // Swap both lists.
+ std::swap(d->list, d->out);
+ }
+ }
+
+ // Copy elements to the array.
+ Rooted<JSObject*> obj(cx, d->obj_);
+ if (!SetArrayElements(cx, obj, 0, d->denseLen, d->list)) {
+ return ArraySortResult::Failure;
+ }
+
+ // Re-create any holes that sorted to the end of the array.
+ for (uint32_t i = d->denseLen; i < d->length; i++) {
+ if (!CheckForInterrupt(cx) || !DeletePropertyOrThrow(cx, obj, i)) {
+ return ArraySortResult::Failure;
+ }
+ }
+
+ d->freeMallocData();
+ d->setReturnValue(obj);
+ return ArraySortResult::Done;
+}
+
+// https://tc39.es/ecma262/#sec-array.prototype.sort
+// 23.1.3.30 Array.prototype.sort ( comparefn )
+bool js::array_sort(JSContext* cx, unsigned argc, Value* vp) {
+ AutoJSMethodProfilerEntry pseudoFrame(cx, "Array.prototype", "sort");
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // If we have a comparator argument, use the JIT trampoline implementation
+ // instead. This avoids a performance cliff (especially with large arrays)
+ // because C++ => JIT calls are much slower than Trampoline => JIT calls.
+ if (args.hasDefined(0) && jit::IsBaselineInterpreterEnabled()) {
+ return CallTrampolineNativeJitCode(cx, jit::TrampolineNative::ArraySort,
+ args);
+ }
+
+ Rooted<ArraySortData> data(cx, cx);
+
+ // On all return paths other than ArraySortWithComparatorLoop returning Done,
+ // we call freeMallocData to not fail debug assertions. This matches the JIT
+ // trampoline where we can't rely on C++ destructors.
+ auto freeData =
+ mozilla::MakeScopeExit([&]() { data.get().freeMallocData(); });
+
+ bool done = false;
+ if (!ArraySortPrologue(cx, args.thisv(), args.get(0), data.address(),
+ &done)) {
+ return false;
+ }
+ if (done) {
+ args.rval().set(data.get().returnValue());
+ return true;
+ }
+
+ FixedInvokeArgs<2> callArgs(cx);
+ Rooted<Value> rval(cx);
+
+ while (true) {
+ ArraySortResult res = ArraySortData::sortWithComparator(data.address());
+ switch (res) {
+ case ArraySortResult::Failure:
+ return false;
+
+ case ArraySortResult::Done:
+ freeData.release();
+ args.rval().set(data.get().returnValue());
+ return true;
+
+ case ArraySortResult::CallJS:
+ case ArraySortResult::CallJSSameRealmNoRectifier:
+ MOZ_ASSERT(data.get().comparatorThisValue().isUndefined());
+ MOZ_ASSERT(&args[0].toObject() == data.get().comparator());
+ callArgs[0].set(data.get().comparatorArg(0));
+ callArgs[1].set(data.get().comparatorArg(1));
+ if (!js::Call(cx, args[0], UndefinedHandleValue, callArgs, &rval)) {
+ return false;
+ }
+ data.get().setComparatorReturnValue(rval);
+ break;
+ }
+ }
+}
+
+ArraySortResult js::ArraySortFromJit(JSContext* cx,
+ jit::TrampolineNativeFrameLayout* frame) {
+ // Initialize the ArraySortData class stored in the trampoline frame.
+ void* dataUninit = frame->getFrameData<ArraySortData>();
+ auto* data = new (dataUninit) ArraySortData(cx);
+
+ Rooted<Value> thisv(cx, frame->thisv());
+ Rooted<Value> comparefn(cx);
+ if (frame->numActualArgs() > 0) {
+ comparefn = frame->actualArgs()[0];
+ }
+
+ bool done = false;
+ if (!ArraySortPrologue(cx, thisv, comparefn, data, &done)) {
+ return ArraySortResult::Failure;
+ }
+ if (done) {
+ data->freeMallocData();
+ return ArraySortResult::Done;
+ }
+
+ return ArraySortData::sortWithComparator(data);
+}
+
+ArraySortData::ArraySortData(JSContext* cx) : cx_(cx) {
+#ifdef DEBUG
+ cx_->liveArraySortDataInstances++;
+#endif
+}
+
+void ArraySortData::freeMallocData() {
+ vec.clearAndFree();
+#ifdef DEBUG
+ MOZ_ASSERT(cx_->liveArraySortDataInstances > 0);
+ cx_->liveArraySortDataInstances--;
+#endif
+}
+
+void ArraySortData::trace(JSTracer* trc) {
+ TraceNullableRoot(trc, &comparator_, "comparator_");
+ TraceRoot(trc, &thisv, "thisv");
+ TraceRoot(trc, &callArgs[0], "callArgs0");
+ TraceRoot(trc, &callArgs[1], "callArgs1");
+ vec.trace(trc);
+ TraceRoot(trc, &item, "item");
+ TraceNullableRoot(trc, &obj_, "obj");
+}
+
bool js::NewbornArrayPush(JSContext* cx, HandleObject obj, const Value& v) {
Handle<ArrayObject*> arr = obj.as<ArrayObject>();
@@ -4846,7 +5314,7 @@ static const JSFunctionSpec array_methods[] = {
/* Perl-ish methods. */
JS_INLINABLE_FN("join", array_join, 1, 0, ArrayJoin),
JS_FN("reverse", array_reverse, 0, 0),
- JS_SELF_HOSTED_FN("sort", "ArraySort", 1, 0),
+ JS_TRAMPOLINE_FN("sort", array_sort, 1, 0, ArraySort),
JS_INLINABLE_FN("push", array_push, 1, 0, ArrayPush),
JS_INLINABLE_FN("pop", array_pop, 0, 0, ArrayPop),
JS_INLINABLE_FN("shift", array_shift, 0, 0, ArrayShift),
diff --git a/js/src/builtin/Array.h b/js/src/builtin/Array.h
index f0de034998..f861505e7b 100644
--- a/js/src/builtin/Array.h
+++ b/js/src/builtin/Array.h
@@ -15,6 +15,10 @@
namespace js {
+namespace jit {
+class TrampolineNativeFrameLayout;
+}
+
class ArrayObject;
MOZ_ALWAYS_INLINE bool IdIsIndex(jsid id, uint32_t* indexp) {
@@ -107,16 +111,12 @@ extern bool GetElements(JSContext* cx, HandleObject aobj, uint32_t length,
/* Natives exposed for optimization by the interpreter and JITs. */
-extern bool intrinsic_ArrayNativeSort(JSContext* cx, unsigned argc,
- js::Value* vp);
-
extern bool array_includes(JSContext* cx, unsigned argc, js::Value* vp);
extern bool array_indexOf(JSContext* cx, unsigned argc, js::Value* vp);
extern bool array_lastIndexOf(JSContext* cx, unsigned argc, js::Value* vp);
-
extern bool array_pop(JSContext* cx, unsigned argc, js::Value* vp);
-
extern bool array_join(JSContext* cx, unsigned argc, js::Value* vp);
+extern bool array_sort(JSContext* cx, unsigned argc, js::Value* vp);
extern void ArrayShiftMoveElements(ArrayObject* arr);
@@ -172,6 +172,147 @@ extern bool ArrayLengthGetter(JSContext* cx, HandleObject obj, HandleId id,
extern bool ArrayLengthSetter(JSContext* cx, HandleObject obj, HandleId id,
HandleValue v, ObjectOpResult& result);
+// Note: we use uint32_t because the JIT code uses branch32.
+enum class ArraySortResult : uint32_t {
+ Failure,
+ Done,
+ CallJS,
+ CallJSSameRealmNoRectifier
+};
+
+// We use a JIT trampoline to optimize sorting with a comparator function. The
+// trampoline frame has an ArraySortData instance that contains all state used
+// by the sorting algorithm. The sorting algorithm is implemented as a C++
+// "generator" that can yield to the trampoline to perform a fast JIT => JIT
+// call to the comparator function. When the comparator function returns, the
+// trampoline calls back into C++ to resume the sorting algorithm.
+//
+// ArraySortData stores the JS Values in a js::Vector. To ensure we don't leak
+// its memory, we have debug assertions to check that for each C++ constructor
+// call we call |freeMallocData| exactly once. C++ code calls |freeMallocData|
+// when it's done sorting and the JIT exception handler calls it when unwinding
+// the trampoline frame.
+class ArraySortData {
+ public:
+ enum class ComparatorKind : uint8_t {
+ Unoptimized,
+ JS,
+ JSSameRealmNoRectifier,
+ };
+
+ static constexpr size_t ComparatorActualArgs = 2;
+
+ // Insertion sort is used if the length is < InsertionSortLimit.
+ static constexpr size_t InsertionSortLimit = 24;
+
+ using ValueVector = GCVector<Value, 8, SystemAllocPolicy>;
+
+ protected: // Silence Clang warning about unused private fields.
+ // Data for the comparator call. These fields must match the JitFrameLayout
+ // to let us perform efficient calls to the comparator from JIT code.
+ // This is asserted in the JIT trampoline code.
+ // callArgs[0] is also used to store the return value of the sort function and
+ // the comparator.
+ uintptr_t descriptor_;
+ JSObject* comparator_ = nullptr;
+ Value thisv;
+ Value callArgs[ComparatorActualArgs];
+
+ private:
+ ValueVector vec;
+ Value item;
+ JSContext* cx_;
+ JSObject* obj_ = nullptr;
+
+ Value* list;
+ Value* out;
+
+ // The value of the .length property.
+ uint32_t length;
+
+ // The number of items to sort. Can be less than |length| if the object has
+ // holes.
+ uint32_t denseLen;
+
+ uint32_t windowSize;
+ uint32_t start;
+ uint32_t mid;
+ uint32_t end;
+ uint32_t i, j, k;
+
+ // The state value determines where we resume in sortWithComparator.
+ enum class State : uint8_t {
+ Initial,
+ InsertionSortCall1,
+ InsertionSortCall2,
+ MergeSortCall1,
+ MergeSortCall2
+ };
+ State state = State::Initial;
+ ComparatorKind comparatorKind_;
+
+ // Optional padding to ensure proper alignment of the comparator JIT frame.
+#if !defined(JS_64BIT) && !defined(DEBUG)
+ protected: // Silence Clang warning about unused private field.
+ size_t padding;
+#endif
+
+ public:
+ explicit ArraySortData(JSContext* cx);
+
+ void MOZ_ALWAYS_INLINE init(JSObject* obj, JSObject* comparator,
+ ValueVector&& vec, uint32_t length,
+ uint32_t denseLen);
+
+ JSContext* cx() const { return cx_; }
+
+ JSObject* comparator() const {
+ MOZ_ASSERT(comparator_);
+ return comparator_;
+ }
+
+ Value returnValue() const { return callArgs[0]; }
+ void setReturnValue(JSObject* obj) { callArgs[0].setObject(*obj); }
+
+ Value comparatorArg(size_t index) {
+ MOZ_ASSERT(index < ComparatorActualArgs);
+ return callArgs[index];
+ }
+ Value comparatorThisValue() const { return thisv; }
+ Value comparatorReturnValue() const { return callArgs[0]; }
+ void setComparatorArgs(const Value& x, const Value& y) {
+ callArgs[0] = x;
+ callArgs[1] = y;
+ }
+ void setComparatorReturnValue(const Value& v) { callArgs[0] = v; }
+
+ ComparatorKind comparatorKind() const { return comparatorKind_; }
+
+ static ArraySortResult sortWithComparator(ArraySortData* d);
+
+ void freeMallocData();
+ void trace(JSTracer* trc);
+
+ static constexpr int32_t offsetOfDescriptor() {
+ return offsetof(ArraySortData, descriptor_);
+ }
+ static constexpr int32_t offsetOfComparator() {
+ return offsetof(ArraySortData, comparator_);
+ }
+ static constexpr int32_t offsetOfComparatorReturnValue() {
+ return offsetof(ArraySortData, callArgs[0]);
+ }
+ static constexpr int32_t offsetOfComparatorThis() {
+ return offsetof(ArraySortData, thisv);
+ }
+ static constexpr int32_t offsetOfComparatorArgs() {
+ return offsetof(ArraySortData, callArgs);
+ }
+};
+
+extern ArraySortResult ArraySortFromJit(
+ JSContext* cx, jit::TrampolineNativeFrameLayout* frame);
+
class MOZ_NON_TEMPORARY_CLASS ArraySpeciesLookup final {
/*
* An ArraySpeciesLookup holds the following:
diff --git a/js/src/builtin/Array.js b/js/src/builtin/Array.js
index f831615041..b62edef7e9 100644
--- a/js/src/builtin/Array.js
+++ b/js/src/builtin/Array.js
@@ -76,85 +76,6 @@ function ArraySome(callbackfn /*, thisArg*/) {
// Inlining this enables inlining of the callback function.
SetIsInlinableLargeFunction(ArraySome);
-// ES2023 draft rev cb4224156c54156f30c18c50784c1b0148ebfae5
-// 23.1.3.30 Array.prototype.sort ( comparefn )
-function ArraySortCompare(comparefn) {
- return function(x, y) {
- // Steps 4.a-c.
- if (x === undefined) {
- if (y === undefined) {
- return 0;
- }
- return 1;
- }
- if (y === undefined) {
- return -1;
- }
-
- // Step 4.d.i.
- var v = ToNumber(callContentFunction(comparefn, undefined, x, y));
-
- // Steps 4.d.ii-iii.
- return v !== v ? 0 : v;
- };
-}
-
-// ES2023 draft rev cb4224156c54156f30c18c50784c1b0148ebfae5
-// 23.1.3.30 Array.prototype.sort ( comparefn )
-function ArraySort(comparefn) {
- // Step 1.
- if (comparefn !== undefined) {
- if (!IsCallable(comparefn)) {
- ThrowTypeError(JSMSG_BAD_SORT_ARG);
- }
- }
-
- // Step 2.
- var O = ToObject(this);
-
- // First try to sort the array in native code, if that fails, indicated by
- // returning |false| from ArrayNativeSort, sort it in self-hosted code.
- if (callFunction(ArrayNativeSort, O, comparefn)) {
- return O;
- }
-
- // Step 3.
- var len = ToLength(O.length);
-
- // Arrays with less than two elements remain unchanged when sorted.
- if (len <= 1) {
- return O;
- }
-
- // Step 4.
- var wrappedCompareFn = ArraySortCompare(comparefn);
-
- // Step 5.
- // To save effort we will do all of our work on a dense list, then create
- // holes at the end.
- var denseList = [];
- var denseLen = 0;
-
- for (var i = 0; i < len; i++) {
- if (i in O) {
- DefineDataProperty(denseList, denseLen++, O[i]);
- }
- }
-
- if (denseLen < 1) {
- return O;
- }
-
- var sorted = MergeSort(denseList, denseLen, wrappedCompareFn);
-
- assert(IsPackedArray(sorted), "sorted is a packed array");
- assert(sorted.length === denseLen, "sorted array has the correct length");
-
- MoveHoles(O, len, sorted, denseLen);
-
- return O;
-}
-
/* ES5 15.4.4.18. */
function ArrayForEach(callbackfn /*, thisArg*/) {
/* Step 1. */
@@ -1335,22 +1256,8 @@ function ArrayToSorted(comparefn) {
return items;
}
- // First try to sort the array in native code, if that fails, indicated by
- // returning |false| from ArrayNativeSort, sort it in self-hosted code.
- if (callFunction(ArrayNativeSort, items, comparefn)) {
- return items;
- }
-
- // Step 5.
- var wrappedCompareFn = ArraySortCompare(comparefn);
-
- // Steps 6-9.
- var sorted = MergeSort(items, len, wrappedCompareFn);
-
- assert(IsPackedArray(sorted), "sorted is a packed array");
- assert(sorted.length === len, "sorted array has the correct length");
-
- return sorted;
+ // Steps 5-9.
+ return callFunction(std_Array_sort, items, comparefn);
}
// https://github.com/tc39/proposal-array-find-from-last
diff --git a/js/src/builtin/JSON.cpp b/js/src/builtin/JSON.cpp
index dbfab8b43a..1423595937 100644
--- a/js/src/builtin/JSON.cpp
+++ b/js/src/builtin/JSON.cpp
@@ -20,15 +20,18 @@
#include "builtin/Array.h"
#include "builtin/BigInt.h"
#include "builtin/ParseRecordObject.h"
+#include "builtin/RawJSONObject.h"
#include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
#include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit
#include "js/Object.h" // JS::GetBuiltinClass
+#include "js/Prefs.h" // JS::Prefs
#include "js/PropertySpec.h"
#include "js/StableStringChars.h"
#include "js/TypeDecls.h"
#include "js/Value.h"
#include "util/StringBuffer.h"
-#include "vm/BooleanObject.h" // js::BooleanObject
+#include "vm/BooleanObject.h" // js::BooleanObject
+#include "vm/EqualityOperations.h" // js::SameValue
#include "vm/Interpreter.h"
#include "vm/Iteration.h"
#include "vm/JSAtomUtils.h" // ToAtom
@@ -436,6 +439,16 @@ class CycleDetector {
bool appended_;
};
+static inline JSString* MaybeGetRawJSON(JSContext* cx, JSObject* obj) {
+ if (!obj->is<RawJSONObject>()) {
+ return nullptr;
+ }
+
+ JSString* rawJSON = obj->as<js::RawJSONObject>().rawJSON(cx);
+ MOZ_ASSERT(rawJSON);
+ return rawJSON;
+}
+
#ifdef ENABLE_RECORD_TUPLE
enum class JOType { Record, Object };
template <JOType type = JOType::Object>
@@ -783,6 +796,12 @@ static bool SerializeJSONProperty(JSContext* cx, const Value& v,
MOZ_ASSERT(v.hasObjectPayload());
RootedObject obj(cx, &v.getObjectPayload());
+ /* https://tc39.es/proposal-json-parse-with-source/#sec-serializejsonproperty
+ * Step 4a.*/
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, obj)) {
+ return scx->sb.append(rawJSON);
+ }
+
MOZ_ASSERT(
!scx->maybeSafely || obj->is<PlainObject>() || obj->is<ArrayObject>(),
"input to JS::ToJSONMaybeSafely must not include reachable "
@@ -1233,6 +1252,10 @@ static bool FastSerializeJSONProperty(JSContext* cx, Handle<Value> v,
MOZ_ASSERT(*whySlow == BailReason::NO_REASON);
MOZ_ASSERT(v.isObject());
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, &v.toObject())) {
+ return scx->sb.append(rawJSON);
+ }
+
/*
* FastSerializeJSONProperty is an optimistic fast path for the
* SerializeJSONProperty algorithm that applies in limited situations. It
@@ -1356,19 +1379,24 @@ static bool FastSerializeJSONProperty(JSContext* cx, Handle<Value> v,
}
if (val.isObject()) {
- if (stack.length() >= MAX_STACK_DEPTH - 1) {
- *whySlow = BailReason::DEEP_RECURSION;
- return true;
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, &val.toObject())) {
+ if (!scx->sb.append(rawJSON)) {
+ return false;
+ }
+ } else {
+ if (stack.length() >= MAX_STACK_DEPTH - 1) {
+ *whySlow = BailReason::DEEP_RECURSION;
+ return true;
+ }
+ // Save the current iterator position on the stack and
+ // switch to processing the nested value.
+ stack.infallibleAppend(std::move(top));
+ top = FastStackEntry(&val.toObject().as<NativeObject>());
+ wroteMember = false;
+ nestedObject = true; // Break out to the outer loop.
+ break;
}
- // Save the current iterator position on the stack and
- // switch to processing the nested value.
- stack.infallibleAppend(std::move(top));
- top = FastStackEntry(&val.toObject().as<NativeObject>());
- wroteMember = false;
- nestedObject = true; // Break out to the outer loop.
- break;
- }
- if (!EmitSimpleValue(cx, scx->sb, val)) {
+ } else if (!EmitSimpleValue(cx, scx->sb, val)) {
return false;
}
}
@@ -1433,19 +1461,24 @@ static bool FastSerializeJSONProperty(JSContext* cx, Handle<Value> v,
return false;
}
if (val.isObject()) {
- if (stack.length() >= MAX_STACK_DEPTH - 1) {
- *whySlow = BailReason::DEEP_RECURSION;
- return true;
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, &val.toObject())) {
+ if (!scx->sb.append(rawJSON)) {
+ return false;
+ }
+ } else {
+ if (stack.length() >= MAX_STACK_DEPTH - 1) {
+ *whySlow = BailReason::DEEP_RECURSION;
+ return true;
+ }
+ // Save the current iterator position on the stack and
+ // switch to processing the nested value.
+ stack.infallibleAppend(std::move(top));
+ top = FastStackEntry(&val.toObject().as<NativeObject>());
+ wroteMember = false;
+ nesting = true; // Break out to the outer loop.
+ break;
}
- // Save the current iterator position on the stack and
- // switch to processing the nested value.
- stack.infallibleAppend(std::move(top));
- top = FastStackEntry(&val.toObject().as<NativeObject>());
- wroteMember = false;
- nesting = true; // Break out to the outer loop.
- break;
- }
- if (!EmitSimpleValue(cx, scx->sb, val)) {
+ } else if (!EmitSimpleValue(cx, scx->sb, val)) {
return false;
}
}
@@ -1733,6 +1766,41 @@ static bool InternalizeJSONProperty(
return false;
}
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ RootedObject context(cx);
+ Rooted<UniquePtr<ParseRecordObject::EntryMap>> entries(cx);
+ if (JS::Prefs::experimental_json_parse_with_source()) {
+ // https://tc39.es/proposal-json-parse-with-source/#sec-internalizejsonproperty
+ bool sameVal = false;
+ Rooted<Value> parsedValue(cx, parseRecord.get().value);
+ if (!SameValue(cx, parsedValue, val, &sameVal)) {
+ return false;
+ }
+ if (!parseRecord.get().isEmpty() && sameVal) {
+ if (parseRecord.get().parseNode) {
+ MOZ_ASSERT(!val.isObject());
+ Rooted<IdValueVector> props(cx, cx);
+ if (!props.emplaceBack(
+ IdValuePair(NameToId(cx->names().source),
+ StringValue(parseRecord.get().parseNode)))) {
+ return false;
+ }
+ context = NewPlainObjectWithUniqueNames(cx, props);
+ if (!context) {
+ return false;
+ }
+ }
+ entries = std::move(parseRecord.get().entries);
+ }
+ if (!context) {
+ context = NewPlainObject(cx);
+ if (!context) {
+ return false;
+ }
+ }
+ }
+#endif
+
/* Step 2. */
if (val.isObject()) {
RootedObject obj(cx, &val.toObject());
@@ -1763,6 +1831,13 @@ static bool InternalizeJSONProperty(
/* Step 2a(iii)(1). */
Rooted<ParseRecordObject> elementRecord(cx);
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ if (entries) {
+ if (auto entry = entries->lookup(id)) {
+ elementRecord = std::move(entry->value());
+ }
+ }
+#endif
if (!InternalizeJSONProperty(cx, obj, id, reviver, &elementRecord,
&newElement)) {
return false;
@@ -1804,6 +1879,13 @@ static bool InternalizeJSONProperty(
/* Step 2c(ii)(1). */
id = keys[i];
Rooted<ParseRecordObject> entryRecord(cx);
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ if (entries) {
+ if (auto entry = entries->lookup(id)) {
+ entryRecord = std::move(entry->value());
+ }
+ }
+#endif
if (!InternalizeJSONProperty(cx, obj, id, reviver, &entryRecord,
&newElement)) {
return false;
@@ -1838,15 +1920,7 @@ static bool InternalizeJSONProperty(
RootedValue keyVal(cx, StringValue(key));
#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- if (cx->realm()->creationOptions().getJSONParseWithSource()) {
- RootedObject context(cx, NewPlainObject(cx));
- if (!context) {
- return false;
- }
- Rooted<Value> parseNode(cx, StringValue(parseRecord.get().parseNode));
- if (!DefineDataProperty(cx, context, cx->names().source, parseNode)) {
- return false;
- }
+ if (JS::Prefs::experimental_json_parse_with_source()) {
RootedValue contextVal(cx, ObjectValue(*context));
return js::Call(cx, reviver, holder, keyVal, val, contextVal, vp);
}
@@ -1867,7 +1941,7 @@ static bool Revive(JSContext* cx, HandleValue reviver,
}
#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- MOZ_ASSERT_IF(cx->realm()->creationOptions().getJSONParseWithSource(),
+ MOZ_ASSERT_IF(JS::Prefs::experimental_json_parse_with_source(),
pro.get().value == vp.get());
#endif
Rooted<jsid> id(cx, NameToId(cx->names().empty_));
@@ -1876,10 +1950,10 @@ static bool Revive(JSContext* cx, HandleValue reviver,
template <typename CharT>
bool ParseJSON(JSContext* cx, const mozilla::Range<const CharT> chars,
- MutableHandleValue vp, MutableHandle<ParseRecordObject> pro) {
+ MutableHandleValue vp) {
Rooted<JSONParser<CharT>> parser(cx, cx, chars,
JSONParser<CharT>::ParseType::JSONParse);
- return parser.parse(vp, pro);
+ return parser.parse(vp);
}
template <typename CharT>
@@ -1888,7 +1962,15 @@ bool js::ParseJSONWithReviver(JSContext* cx,
HandleValue reviver, MutableHandleValue vp) {
/* https://262.ecma-international.org/14.0/#sec-json.parse steps 2-10. */
Rooted<ParseRecordObject> pro(cx);
- if (!ParseJSON(cx, chars, vp, &pro)) {
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ if (JS::Prefs::experimental_json_parse_with_source() && IsCallable(reviver)) {
+ Rooted<JSONReviveParser<CharT>> parser(cx, cx, chars);
+ if (!parser.get().parse(vp, &pro)) {
+ return false;
+ }
+ } else
+#endif
+ if (!ParseJSON(cx, chars, vp)) {
return false;
}
@@ -2086,14 +2168,13 @@ static bool json_parseImmutable(JSContext* cx, unsigned argc, Value* vp) {
HandleValue reviver = args.get(1);
RootedValue unfiltered(cx);
- Rooted<ParseRecordObject> pro(cx);
if (linearChars.isLatin1()) {
- if (!ParseJSON(cx, linearChars.latin1Range(), &unfiltered, &pro)) {
+ if (!ParseJSON(cx, linearChars.latin1Range(), &unfiltered)) {
return false;
}
} else {
- if (!ParseJSON(cx, linearChars.twoByteRange(), &unfiltered, &pro)) {
+ if (!ParseJSON(cx, linearChars.twoByteRange(), &unfiltered)) {
return false;
}
}
@@ -2103,6 +2184,104 @@ static bool json_parseImmutable(JSContext* cx, unsigned argc, Value* vp) {
}
#endif
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+/* https://tc39.es/proposal-json-parse-with-source/#sec-json.israwjson */
+static bool json_isRawJSON(JSContext* cx, unsigned argc, Value* vp) {
+ AutoJSMethodProfilerEntry pseudoFrame(cx, "JSON", "isRawJSON");
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ /* Step 1. */
+ if (args.get(0).isObject()) {
+ Rooted<JSObject*> obj(cx, &args[0].toObject());
+# ifdef DEBUG
+ if (obj->is<RawJSONObject>()) {
+ bool objIsFrozen = false;
+ MOZ_ASSERT(js::TestIntegrityLevel(cx, obj, IntegrityLevel::Frozen,
+ &objIsFrozen));
+ MOZ_ASSERT(objIsFrozen);
+ }
+# endif // DEBUG
+ args.rval().setBoolean(obj->is<RawJSONObject>());
+ return true;
+ }
+
+ /* Step 2. */
+ args.rval().setBoolean(false);
+ return true;
+}
+
+static inline bool IsJSONWhitespace(char16_t ch) {
+ return ch == '\t' || ch == '\n' || ch == '\r' || ch == ' ';
+}
+
+/* https://tc39.es/proposal-json-parse-with-source/#sec-json.rawjson */
+static bool json_rawJSON(JSContext* cx, unsigned argc, Value* vp) {
+ AutoJSMethodProfilerEntry pseudoFrame(cx, "JSON", "rawJSON");
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ /* Step 1. */
+ JSString* jsonString = ToString<CanGC>(cx, args.get(0));
+ if (!jsonString) {
+ return false;
+ }
+
+ Rooted<JSLinearString*> linear(cx, jsonString->ensureLinear(cx));
+ if (!linear) {
+ return false;
+ }
+
+ AutoStableStringChars linearChars(cx);
+ if (!linearChars.init(cx, linear)) {
+ return false;
+ }
+
+ /* Step 2. */
+ if (linear->empty()) {
+ JS_ReportErrorNumberASCII(cx, js::GetErrorMessage, nullptr,
+ JSMSG_JSON_RAW_EMPTY);
+ return false;
+ }
+ if (IsJSONWhitespace(linear->latin1OrTwoByteChar(0)) ||
+ IsJSONWhitespace(linear->latin1OrTwoByteChar(linear->length() - 1))) {
+ JS_ReportErrorNumberASCII(cx, js::GetErrorMessage, nullptr,
+ JSMSG_JSON_RAW_WHITESPACE);
+ return false;
+ }
+
+ /* Step 3. */
+ RootedValue parsedValue(cx);
+ if (linearChars.isLatin1()) {
+ if (!ParseJSON(cx, linearChars.latin1Range(), &parsedValue)) {
+ return false;
+ }
+ } else {
+ if (!ParseJSON(cx, linearChars.twoByteRange(), &parsedValue)) {
+ return false;
+ }
+ }
+
+ if (parsedValue.isObject()) {
+ JS_ReportErrorNumberASCII(cx, js::GetErrorMessage, nullptr,
+ JSMSG_JSON_RAW_ARRAY_OR_OBJECT);
+ return false;
+ }
+
+ /* Steps 4-6. */
+ Rooted<RawJSONObject*> obj(cx, RawJSONObject::create(cx, linear));
+ if (!obj) {
+ return false;
+ }
+
+ /* Step 7. */
+ if (!js::FreezeObject(cx, obj)) {
+ return false;
+ }
+
+ args.rval().setObject(*obj);
+ return true;
+}
+#endif // ENABLE_JSON_PARSE_WITH_SOURCE
+
/* https://262.ecma-international.org/14.0/#sec-json.stringify */
bool json_stringify(JSContext* cx, unsigned argc, Value* vp) {
AutoJSMethodProfilerEntry pseudoFrame(cx, "JSON", "stringify");
@@ -2141,11 +2320,16 @@ bool json_stringify(JSContext* cx, unsigned argc, Value* vp) {
}
static const JSFunctionSpec json_static_methods[] = {
- JS_FN("toSource", json_toSource, 0, 0), JS_FN("parse", json_parse, 2, 0),
+ JS_FN("toSource", json_toSource, 0, 0),
+ JS_FN("parse", json_parse, 2, 0),
JS_FN("stringify", json_stringify, 3, 0),
#ifdef ENABLE_RECORD_TUPLE
JS_FN("parseImmutable", json_parseImmutable, 2, 0),
#endif
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ JS_FN("isRawJSON", json_isRawJSON, 1, 0),
+ JS_FN("rawJSON", json_rawJSON, 1, 0),
+#endif
JS_FS_END};
static const JSPropertySpec json_static_properties[] = {
diff --git a/js/src/builtin/MapObject.cpp b/js/src/builtin/MapObject.cpp
index fd35b8e776..cd4ae7f46a 100644
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -333,23 +333,42 @@ size_t MapIteratorObject::objectMoved(JSObject* obj, JSObject* old) {
Nursery& nursery = iter->runtimeFromMainThread()->gc.nursery();
if (!nursery.isInside(range)) {
nursery.removeMallocedBufferDuringMinorGC(range);
- return 0;
}
+ size_t size = RoundUp(sizeof(ValueMap::Range), gc::CellAlignBytes);
AutoEnterOOMUnsafeRegion oomUnsafe;
- auto newRange = iter->zone()->new_<ValueMap::Range>(*range);
- if (!newRange) {
- oomUnsafe.crash(
- "MapIteratorObject failed to allocate Range data while tenuring.");
+ void* buffer = nursery.allocateBufferSameLocation(obj, size, js::MallocArena);
+ if (!buffer) {
+ oomUnsafe.crash("MapIteratorObject::objectMoved");
}
+ bool iteratorIsInNursery = IsInsideNursery(obj);
+ MOZ_ASSERT(iteratorIsInNursery == nursery.isInside(buffer));
+ auto* newRange = new (buffer) ValueMap::Range(*range, iteratorIsInNursery);
range->~Range();
iter->setReservedSlot(MapIteratorObject::RangeSlot, PrivateValue(newRange));
- return sizeof(ValueMap::Range);
+
+ if (iteratorIsInNursery && iter->target()) {
+ SetHasNurseryMemory(iter->target(), true);
+ }
+
+ return size;
+}
+
+MapObject* MapIteratorObject::target() const {
+ Value value = getFixedSlot(TargetSlot);
+ if (value.isUndefined()) {
+ return nullptr;
+ }
+
+ return &MaybeForwarded(&value.toObject())->as<MapObject>();
}
template <typename Range>
static void DestroyRange(JSObject* iterator, Range* range) {
+ MOZ_ASSERT(IsInsideNursery(iterator) ==
+ iterator->runtimeFromMainThread()->gc.nursery().isInside(range));
+
range->~Range();
if (!IsInsideNursery(iterator)) {
js_free(range);
@@ -533,7 +552,7 @@ void MapObject::trace(JSTracer* trc, JSObject* obj) {
}
}
-using NurseryKeysVector = mozilla::Vector<Value, 0, SystemAllocPolicy>;
+using NurseryKeysVector = GCVector<Value, 0, SystemAllocPolicy>;
template <typename TableObject>
static NurseryKeysVector* GetNurseryKeys(TableObject* t) {
@@ -577,17 +596,34 @@ class js::OrderedHashTableRef : public gc::BufferableRef {
reinterpret_cast<typename ObjectT::UnbarrieredTable*>(realTable);
NurseryKeysVector* keys = GetNurseryKeys(object);
MOZ_ASSERT(keys);
- for (Value key : *keys) {
- MOZ_ASSERT(unbarrieredTable->hash(key) ==
- realTable->hash(*reinterpret_cast<HashableValue*>(&key)));
- // Note: we use a lambda to avoid tenuring keys that have been removed
- // from the Map or Set.
- unbarrieredTable->rekeyOneEntry(key, [trc](const Value& prior) {
- Value key = prior;
- TraceManuallyBarrieredEdge(trc, &key, "ordered hash table key");
- return key;
- });
+
+ keys->mutableEraseIf([&](Value& key) {
+ MOZ_ASSERT(
+ unbarrieredTable->hash(key) ==
+ realTable->hash(*reinterpret_cast<const HashableValue*>(&key)));
+ MOZ_ASSERT(IsInsideNursery(key.toGCThing()));
+
+ auto result =
+ unbarrieredTable->rekeyOneEntry(key, [trc](const Value& prior) {
+ Value key = prior;
+ TraceManuallyBarrieredEdge(trc, &key, "ordered hash table key");
+ return key;
+ });
+
+ if (result.isNothing()) {
+ return true; // Key removed.
+ }
+
+ key = result.value();
+ return !IsInsideNursery(key.toGCThing());
+ });
+
+ if (!keys->empty()) {
+ trc->runtime()->gc.storeBuffer().putGeneric(
+ OrderedHashTableRef<ObjectT>(object));
+ return;
}
+
DeleteNurseryKeys(object);
}
};
@@ -739,6 +775,8 @@ void MapObject::finalize(JS::GCContext* gcx, JSObject* obj) {
return;
}
+ MOZ_ASSERT_IF(obj->isTenured(), !table->hasNurseryRanges());
+
bool needsPostBarriers = obj->isTenured();
if (needsPostBarriers) {
// Use the ValueMap representation which has post barriers.
@@ -750,21 +788,36 @@ void MapObject::finalize(JS::GCContext* gcx, JSObject* obj) {
}
}
+void MapObject::clearNurseryRangesBeforeMinorGC() {
+ getTableUnchecked()->destroyNurseryRanges();
+ SetHasNurseryMemory(this, false);
+}
+
/* static */
-void MapObject::sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj) {
- bool wasInsideNursery = IsInsideNursery(mapobj);
- if (wasInsideNursery && !IsForwarded(mapobj)) {
+MapObject* MapObject::sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj) {
+ Nursery& nursery = gcx->runtime()->gc.nursery();
+ bool wasInCollectedRegion = nursery.inCollectedRegion(mapobj);
+ if (wasInCollectedRegion && !IsForwarded(mapobj)) {
finalize(gcx, mapobj);
- return;
+ return nullptr;
}
mapobj = MaybeForwarded(mapobj);
- mapobj->getTableUnchecked()->destroyNurseryRanges();
- SetHasNurseryMemory(mapobj, false);
- if (wasInsideNursery) {
+ bool insideNursery = IsInsideNursery(mapobj);
+ if (insideNursery) {
+ SetHasNurseryMemory(mapobj, true);
+ }
+
+ if (wasInCollectedRegion && mapobj->isTenured()) {
AddCellMemory(mapobj, sizeof(ValueMap), MemoryUse::MapObjectTable);
}
+
+ if (!HasNurseryMemory(mapobj)) {
+ return nullptr;
+ }
+
+ return mapobj;
}
bool MapObject::construct(JSContext* cx, unsigned argc, Value* vp) {
@@ -1199,19 +1252,36 @@ size_t SetIteratorObject::objectMoved(JSObject* obj, JSObject* old) {
Nursery& nursery = iter->runtimeFromMainThread()->gc.nursery();
if (!nursery.isInside(range)) {
nursery.removeMallocedBufferDuringMinorGC(range);
- return 0;
}
+ size_t size = RoundUp(sizeof(ValueSet::Range), gc::CellAlignBytes);
+ ;
AutoEnterOOMUnsafeRegion oomUnsafe;
- auto newRange = iter->zone()->new_<ValueSet::Range>(*range);
- if (!newRange) {
- oomUnsafe.crash(
- "SetIteratorObject failed to allocate Range data while tenuring.");
+ void* buffer = nursery.allocateBufferSameLocation(obj, size, js::MallocArena);
+ if (!buffer) {
+ oomUnsafe.crash("SetIteratorObject::objectMoved");
}
+ bool iteratorIsInNursery = IsInsideNursery(obj);
+ MOZ_ASSERT(iteratorIsInNursery == nursery.isInside(buffer));
+ auto* newRange = new (buffer) ValueSet::Range(*range, iteratorIsInNursery);
range->~Range();
iter->setReservedSlot(SetIteratorObject::RangeSlot, PrivateValue(newRange));
- return sizeof(ValueSet::Range);
+
+ if (iteratorIsInNursery && iter->target()) {
+ SetHasNurseryMemory(iter->target(), true);
+ }
+
+ return size;
+}
+
+SetObject* SetIteratorObject::target() const {
+ Value value = getFixedSlot(TargetSlot);
+ if (value.isUndefined()) {
+ return nullptr;
+ }
+
+ return &MaybeForwarded(&value.toObject())->as<SetObject>();
}
bool SetIteratorObject::next(SetIteratorObject* setIterator,
@@ -1449,25 +1519,41 @@ void SetObject::finalize(JS::GCContext* gcx, JSObject* obj) {
MOZ_ASSERT(gcx->onMainThread());
SetObject* setobj = static_cast<SetObject*>(obj);
if (ValueSet* set = setobj->getData()) {
+ MOZ_ASSERT_IF(obj->isTenured(), !set->hasNurseryRanges());
gcx->delete_(obj, set, MemoryUse::MapObjectTable);
}
}
+void SetObject::clearNurseryRangesBeforeMinorGC() {
+ getTableUnchecked()->destroyNurseryRanges();
+ SetHasNurseryMemory(this, false);
+}
+
/* static */
-void SetObject::sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj) {
- bool wasInsideNursery = IsInsideNursery(setobj);
- if (wasInsideNursery && !IsForwarded(setobj)) {
+SetObject* SetObject::sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj) {
+ Nursery& nursery = gcx->runtime()->gc.nursery();
+ bool wasInCollectedRegion = nursery.inCollectedRegion(setobj);
+ if (wasInCollectedRegion && !IsForwarded(setobj)) {
finalize(gcx, setobj);
- return;
+ return nullptr;
}
setobj = MaybeForwarded(setobj);
- setobj->getData()->destroyNurseryRanges();
- SetHasNurseryMemory(setobj, false);
- if (wasInsideNursery) {
+ bool insideNursery = IsInsideNursery(setobj);
+ if (insideNursery) {
+ SetHasNurseryMemory(setobj, true);
+ }
+
+ if (wasInCollectedRegion && setobj->isTenured()) {
AddCellMemory(setobj, sizeof(ValueSet), MemoryUse::MapObjectTable);
}
+
+ if (!HasNurseryMemory(setobj)) {
+ return nullptr;
+ }
+
+ return setobj;
}
bool SetObject::isBuiltinAdd(HandleValue add) {
diff --git a/js/src/builtin/MapObject.h b/js/src/builtin/MapObject.h
index ef37b9912e..d47797eff7 100644
--- a/js/src/builtin/MapObject.h
+++ b/js/src/builtin/MapObject.h
@@ -168,7 +168,14 @@ class MapObject : public NativeObject {
OrderedHashMap<Value, Value, UnbarrieredHashPolicy, CellAllocPolicy>;
friend class OrderedHashTableRef<MapObject>;
- static void sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj);
+ void clearNurseryRangesBeforeMinorGC();
+
+ // Sweeps a map that had nursery memory associated with it after a minor
+ // GC. This may finalize the map if it was in the nursery and has died.
+ //
+ // Returns a pointer to the map if it still has nursery memory associated with
+ // it, or nullptr.
+ static MapObject* sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj);
size_t sizeOfData(mozilla::MallocSizeOf mallocSizeOf);
@@ -276,6 +283,7 @@ class MapIteratorObject : public NativeObject {
private:
inline MapObject::IteratorKind kind() const;
+ MapObject* target() const;
};
class SetObject : public NativeObject {
@@ -326,7 +334,14 @@ class SetObject : public NativeObject {
OrderedHashSet<Value, UnbarrieredHashPolicy, CellAllocPolicy>;
friend class OrderedHashTableRef<SetObject>;
- static void sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj);
+ void clearNurseryRangesBeforeMinorGC();
+
+ // Sweeps a set that had nursery memory associated with it after a minor
+ // GC. This may finalize the set if it was in the nursery and has died.
+ //
+ // Returns a pointer to the set if it still has nursery memory associated with
+ // it, or nullptr.
+ static SetObject* sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj);
size_t sizeOfData(mozilla::MallocSizeOf mallocSizeOf);
@@ -414,6 +429,7 @@ class SetIteratorObject : public NativeObject {
private:
inline SetObject::IteratorKind kind() const;
+ SetObject* target() const;
};
using SetInitGetPrototypeOp = NativeObject* (*)(JSContext*,
diff --git a/js/src/builtin/ModuleObject.cpp b/js/src/builtin/ModuleObject.cpp
index b9db9bf02d..0365f744a6 100644
--- a/js/src/builtin/ModuleObject.cpp
+++ b/js/src/builtin/ModuleObject.cpp
@@ -1109,6 +1109,17 @@ JSScript* ModuleObject::script() const {
return ptr;
}
+const char* ModuleObject::filename() const {
+ // The ScriptSlot will be cleared once the module is evaluated, so we try to
+ // get the filename from cyclicModuleFields().
+
+ // TODO: Bug 1885483: Provide filename for JSON modules
+ if (!hasCyclicModuleFields()) {
+ return "(JSON module)";
+ }
+ return cyclicModuleFields()->scriptSourceObject->source()->filename();
+}
+
static inline void AssertValidModuleStatus(ModuleStatus status) {
MOZ_ASSERT(status >= ModuleStatus::Unlinked &&
status <= ModuleStatus::Evaluated_Error);
diff --git a/js/src/builtin/ModuleObject.h b/js/src/builtin/ModuleObject.h
index d39d65b18c..cb74d67c40 100644
--- a/js/src/builtin/ModuleObject.h
+++ b/js/src/builtin/ModuleObject.h
@@ -360,6 +360,7 @@ class ModuleObject : public NativeObject {
JSScript* maybeScript() const;
JSScript* script() const;
+ const char* filename() const;
ModuleEnvironmentObject& initialEnvironment() const;
ModuleEnvironmentObject* environment() const;
ModuleNamespaceObject* namespace_();
diff --git a/js/src/builtin/ParseRecordObject.cpp b/js/src/builtin/ParseRecordObject.cpp
index a453c30c0e..efb05d1845 100644
--- a/js/src/builtin/ParseRecordObject.cpp
+++ b/js/src/builtin/ParseRecordObject.cpp
@@ -19,8 +19,24 @@ ParseRecordObject::ParseRecordObject(Handle<js::JSONParseNode*> parseNode,
const Value& val)
: parseNode(parseNode), key(JS::PropertyKey::Void()), value(val) {}
+bool ParseRecordObject::addEntries(JSContext* cx, EntryMap&& appendEntries) {
+ if (!entries) {
+ entries = js::MakeUnique<EntryMap>(std::move(appendEntries));
+ return !!entries;
+ }
+ for (auto iter = appendEntries.iter(); !iter.done(); iter.next()) {
+ if (!entries->put(iter.get().key(), std::move(iter.get().value()))) {
+ return false;
+ }
+ }
+ return true;
+}
+
void ParseRecordObject::trace(JSTracer* trc) {
JS::TraceRoot(trc, &parseNode, "ParseRecordObject parse node");
JS::TraceRoot(trc, &key, "ParseRecordObject key");
JS::TraceRoot(trc, &value, "ParseRecordObject value");
+ if (entries) {
+ entries->trace(trc);
+ }
}
diff --git a/js/src/builtin/ParseRecordObject.h b/js/src/builtin/ParseRecordObject.h
index 60a902f19b..7e3176a23f 100644
--- a/js/src/builtin/ParseRecordObject.h
+++ b/js/src/builtin/ParseRecordObject.h
@@ -7,7 +7,8 @@
#ifndef builtin_ParseRecordObject_h
#define builtin_ParseRecordObject_h
-#include "js/TraceKind.h"
+#include "js/HashTable.h"
+#include "js/TracingAPI.h"
#include "vm/JSContext.h"
namespace js {
@@ -16,24 +17,40 @@ using JSONParseNode = JSString;
class ParseRecordObject {
public:
+ using EntryMap = js::GCHashMap<PropertyKey, ParseRecordObject>;
+
+ // The source text that was parsed for this record. According to the spec, we
+ // don't track this for objects and arrays, so it will be a null pointer.
JSONParseNode* parseNode;
+ // For object members, the member key. For arrays, the index. For JSON
+ // primitives, it will be undefined.
JS::PropertyKey key;
+ // The original value corresponding to this record, used to determine if the
+ // reviver function has modified it.
Value value;
+ // For objects and arrays, the records for the members and elements
+ // (respectively). If there are none, or for JSON primitives, we won't
+ // allocate an EntryMap.
+ UniquePtr<EntryMap> entries;
ParseRecordObject();
ParseRecordObject(Handle<js::JSONParseNode*> parseNode, const Value& val);
ParseRecordObject(ParseRecordObject&& other)
: parseNode(std::move(other.parseNode)),
key(std::move(other.key)),
- value(std::move(other.value)) {}
+ value(std::move(other.value)),
+ entries(std::move(other.entries)) {}
bool isEmpty() const { return value.isUndefined(); }
+ bool addEntries(JSContext* cx, EntryMap&& appendEntries);
+
// move assignment
ParseRecordObject& operator=(ParseRecordObject&& other) noexcept {
parseNode = other.parseNode;
key = other.key;
value = other.value;
+ entries = std::move(other.entries);
return *this;
}
diff --git a/js/src/builtin/Promise.cpp b/js/src/builtin/Promise.cpp
index bd40add77f..92e72b9cb9 100644
--- a/js/src/builtin/Promise.cpp
+++ b/js/src/builtin/Promise.cpp
@@ -940,13 +940,7 @@ static JSFunction* GetRejectFunctionFromResolve(JSFunction* resolve);
/**
* Returns Promise Resolve Function's [[AlreadyResolved]].[[Value]].
*/
-static bool IsAlreadyResolvedMaybeWrappedResolveFunction(
- JSObject* resolveFunObj) {
- if (IsWrapper(resolveFunObj)) {
- resolveFunObj = UncheckedUnwrap(resolveFunObj);
- }
-
- JSFunction* resolveFun = &resolveFunObj->as<JSFunction>();
+static bool IsAlreadyResolvedResolveFunction(JSFunction* resolveFun) {
MOZ_ASSERT(resolveFun->maybeNative() == ResolvePromiseFunction);
bool alreadyResolved =
@@ -970,13 +964,7 @@ static bool IsAlreadyResolvedMaybeWrappedResolveFunction(
/**
* Returns Promise Reject Function's [[AlreadyResolved]].[[Value]].
*/
-static bool IsAlreadyResolvedMaybeWrappedRejectFunction(
- JSObject* rejectFunObj) {
- if (IsWrapper(rejectFunObj)) {
- rejectFunObj = UncheckedUnwrap(rejectFunObj);
- }
-
- JSFunction* rejectFun = &rejectFunObj->as<JSFunction>();
+static bool IsAlreadyResolvedRejectFunction(JSFunction* rejectFun) {
MOZ_ASSERT(rejectFun->maybeNative() == RejectPromiseFunction);
bool alreadyResolved =
@@ -1023,8 +1011,8 @@ static void SetAlreadyResolvedResolutionFunction(JSFunction* resolutionFun) {
reject->setExtendedSlot(RejectFunctionSlot_Promise, UndefinedValue());
reject->setExtendedSlot(RejectFunctionSlot_ResolveFunction, UndefinedValue());
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedResolveFunction(resolve));
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedRejectFunction(reject));
+ MOZ_ASSERT(IsAlreadyResolvedResolveFunction(resolve));
+ MOZ_ASSERT(IsAlreadyResolvedRejectFunction(reject));
}
/**
@@ -1132,8 +1120,8 @@ void js::SetAlreadyResolvedPromiseWithDefaultResolvingFunction(
rejectFun->initExtendedSlot(RejectFunctionSlot_ResolveFunction,
ObjectValue(*resolveFun));
- MOZ_ASSERT(!IsAlreadyResolvedMaybeWrappedResolveFunction(resolveFun));
- MOZ_ASSERT(!IsAlreadyResolvedMaybeWrappedRejectFunction(rejectFun));
+ MOZ_ASSERT(!IsAlreadyResolvedResolveFunction(resolveFun));
+ MOZ_ASSERT(!IsAlreadyResolvedRejectFunction(rejectFun));
// Step 12. Return the Record { [[Resolve]]: resolve, [[Reject]]: reject }.
return true;
@@ -1181,8 +1169,7 @@ static bool RejectPromiseFunction(JSContext* cx, unsigned argc, Value* vp) {
// If the Promise isn't available anymore, it has been resolved and the
// reference to it removed to make it eligible for collection.
bool alreadyResolved = promiseVal.isUndefined();
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedRejectFunction(reject) ==
- alreadyResolved);
+ MOZ_ASSERT(IsAlreadyResolvedRejectFunction(reject) == alreadyResolved);
if (alreadyResolved) {
args.rval().setUndefined();
return true;
@@ -1362,8 +1349,7 @@ static bool ResolvePromiseFunction(JSContext* cx, unsigned argc, Value* vp) {
//
// NOTE: We use the reference to the reject function as [[AlreadyResolved]].
bool alreadyResolved = promiseVal.isUndefined();
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedResolveFunction(resolve) ==
- alreadyResolved);
+ MOZ_ASSERT(IsAlreadyResolvedResolveFunction(resolve) == alreadyResolved);
if (alreadyResolved) {
args.rval().setUndefined();
return true;
@@ -3164,7 +3150,8 @@ static bool PromiseAllResolveElementFunction(JSContext* cx, unsigned argc,
for (size_t i = 0, len = promises.length(); i < len; i++) {
JSObject* obj = promises[i];
cx->check(obj);
- MOZ_ASSERT(UncheckedUnwrap(obj)->is<PromiseObject>());
+ JSObject* unwrapped = UncheckedUnwrap(obj);
+ MOZ_ASSERT(unwrapped->is<PromiseObject>() || JS_IsDeadWrapper(unwrapped));
}
#endif
@@ -3265,7 +3252,13 @@ static bool PromiseAllResolveElementFunction(JSContext* cx, unsigned argc,
// compartments with principals inaccessible from the current
// compartment. To make that work, it unwraps promises with
// UncheckedUnwrap,
- nextPromise = &UncheckedUnwrap(nextPromiseObj)->as<PromiseObject>();
+ JSObject* unwrapped = UncheckedUnwrap(nextPromiseObj);
+ if (JS_IsDeadWrapper(unwrapped)) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_DEAD_OBJECT);
+ return nullptr;
+ }
+ nextPromise = &unwrapped->as<PromiseObject>();
if (!PerformPromiseThen(cx, nextPromise, resolveFunVal, rejectFunVal,
resultCapabilityWithoutResolving)) {
@@ -5036,7 +5029,8 @@ static PromiseReactionRecord* NewReactionRecord(
// This is the only case where we allow `resolve` and `reject` to
// be null when the `promise` field is not a PromiseObject.
JSObject* unwrappedPromise = UncheckedUnwrap(resultCapability.promise());
- MOZ_ASSERT(unwrappedPromise->is<PromiseObject>());
+ MOZ_ASSERT(unwrappedPromise->is<PromiseObject>() ||
+ JS_IsDeadWrapper(unwrappedPromise));
MOZ_ASSERT(!resultCapability.resolve());
MOZ_ASSERT(!resultCapability.reject());
}
@@ -6218,6 +6212,11 @@ bool js::Promise_then(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
+ if (JS_IsDeadWrapper(UncheckedUnwrap(dependentPromise))) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_DEAD_OBJECT);
+ return false;
+ }
+
// `dependentPromise` should be a maybe-wrapped Promise.
MOZ_ASSERT(UncheckedUnwrap(dependentPromise)->is<PromiseObject>());
@@ -6934,7 +6933,8 @@ JS::AutoDebuggerJobQueueInterruption::AutoDebuggerJobQueueInterruption()
: cx(nullptr) {}
JS::AutoDebuggerJobQueueInterruption::~AutoDebuggerJobQueueInterruption() {
- MOZ_ASSERT_IF(initialized(), cx->jobQueue->empty());
+ MOZ_ASSERT_IF(initialized() && !cx->jobQueue->isDrainingStopped(),
+ cx->jobQueue->empty());
}
bool JS::AutoDebuggerJobQueueInterruption::init(JSContext* cx) {
diff --git a/js/src/builtin/RawJSONObject.cpp b/js/src/builtin/RawJSONObject.cpp
new file mode 100644
index 0000000000..08a98835c1
--- /dev/null
+++ b/js/src/builtin/RawJSONObject.cpp
@@ -0,0 +1,41 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "builtin/RawJSONObject.h"
+#include "js/PropertyDescriptor.h"
+
+#include "vm/JSObject-inl.h"
+
+using namespace js;
+
+const JSClass RawJSONObject::class_ = {"RawJSON",
+ JSCLASS_HAS_RESERVED_SLOTS(SlotCount)};
+
+/* static */
+RawJSONObject* RawJSONObject::create(JSContext* cx,
+ Handle<JSString*> jsonString) {
+ Rooted<RawJSONObject*> obj(
+ cx, NewObjectWithGivenProto<RawJSONObject>(cx, nullptr));
+ if (!obj) {
+ return nullptr;
+ }
+ Rooted<PropertyKey> id(cx, NameToId(cx->names().rawJSON));
+ Rooted<Value> jsonStringVal(cx, StringValue(jsonString));
+ if (!NativeDefineDataProperty(cx, obj, id, jsonStringVal, 0)) {
+ return nullptr;
+ }
+ return obj;
+}
+
+JSString* RawJSONObject::rawJSON(JSContext* cx) {
+ // RawJSONObjects are frozen on creation, so must always have a rawJSON string
+ // property.
+ PropertyKey id(NameToId(cx->names().rawJSON));
+ JS::Value vp;
+ MOZ_ALWAYS_TRUE(GetPropertyNoGC(cx, this, ObjectValue(*this), id, &vp));
+ MOZ_ASSERT(vp.isString());
+ return vp.toString();
+}
diff --git a/js/src/builtin/RawJSONObject.h b/js/src/builtin/RawJSONObject.h
new file mode 100644
index 0000000000..cf8821a844
--- /dev/null
+++ b/js/src/builtin/RawJSONObject.h
@@ -0,0 +1,27 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef builtin_RawJSONObject_h
+#define builtin_RawJSONObject_h
+
+#include "vm/NativeObject.h"
+
+namespace js {
+
+class RawJSONObject : public NativeObject {
+ enum { SlotCount = 0 };
+
+ public:
+ static const JSClass class_;
+
+ static RawJSONObject* create(JSContext* cx, Handle<JSString*> jsonString);
+
+ JSString* rawJSON(JSContext* cx);
+};
+
+} // namespace js
+
+#endif /* builtin_RawJSONObject_h */
diff --git a/js/src/builtin/Reflect.js b/js/src/builtin/Reflect.js
index f56d603ca3..34ca34af60 100644
--- a/js/src/builtin/Reflect.js
+++ b/js/src/builtin/Reflect.js
@@ -52,6 +52,8 @@ function Reflect_apply(target, thisArgument, argumentsList) {
// Steps 2-4.
return callFunction(std_Function_apply, target, thisArgument, argumentsList);
}
+// This function is only barely too long for normal inlining.
+SetIsInlinableLargeFunction(Reflect_apply);
// ES2017 draft rev a785b0832b071f505a694e1946182adeab84c972
// 26.1.2 Reflect.construct ( target, argumentsList [ , newTarget ] )
diff --git a/js/src/builtin/Sorting.js b/js/src/builtin/Sorting.js
index 4530a82818..175b506192 100644
--- a/js/src/builtin/Sorting.js
+++ b/js/src/builtin/Sorting.js
@@ -6,7 +6,7 @@
// consolidated here to avoid confusion and re-implementation of existing
// algorithms.
-// For sorting small arrays.
+// For sorting small typed arrays.
function InsertionSort(array, from, to, comparefn) {
var item, swap, i, j;
for (i = from + 1; i <= to; i++) {
@@ -22,101 +22,6 @@ function InsertionSort(array, from, to, comparefn) {
}
}
-// A helper function for MergeSort.
-//
-// Merge comparefn-sorted slices list[start..<=mid] and list[mid+1..<=end],
-// storing the merged sequence in out[start..<=end].
-function Merge(list, out, start, mid, end, comparefn) {
- // Skip lopsided runs to avoid doing useless work.
- // Skip calling the comparator if the sub-list is already sorted.
- if (
- mid >= end ||
- callContentFunction(comparefn, undefined, list[mid], list[mid + 1]) <= 0
- ) {
- for (var i = start; i <= end; i++) {
- DefineDataProperty(out, i, list[i]);
- }
- return;
- }
-
- var i = start;
- var j = mid + 1;
- var k = start;
- while (i <= mid && j <= end) {
- var lvalue = list[i];
- var rvalue = list[j];
- if (callContentFunction(comparefn, undefined, lvalue, rvalue) <= 0) {
- DefineDataProperty(out, k++, lvalue);
- i++;
- } else {
- DefineDataProperty(out, k++, rvalue);
- j++;
- }
- }
-
- // Empty out any remaining elements.
- while (i <= mid) {
- DefineDataProperty(out, k++, list[i++]);
- }
- while (j <= end) {
- DefineDataProperty(out, k++, list[j++]);
- }
-}
-
-// Helper function for overwriting a sparse array with a
-// dense array, filling remaining slots with holes.
-function MoveHoles(sparse, sparseLen, dense, denseLen) {
- for (var i = 0; i < denseLen; i++) {
- sparse[i] = dense[i];
- }
- for (var j = denseLen; j < sparseLen; j++) {
- delete sparse[j];
- }
-}
-
-// Iterative, bottom up, mergesort.
-function MergeSort(array, len, comparefn) {
- assert(IsPackedArray(array), "array is packed");
- assert(array.length === len, "length mismatch");
- assert(len > 0, "array should be non-empty");
-
- // Insertion sort for small arrays, where "small" is defined by performance
- // testing.
- if (len < 24) {
- InsertionSort(array, 0, len - 1, comparefn);
- return array;
- }
-
- // We do all of our allocating up front
- var lBuffer = array;
- var rBuffer = [];
-
- // Use insertion sort for initial ranges.
- var windowSize = 4;
- for (var start = 0; start < len - 1; start += windowSize) {
- var end = std_Math_min(start + windowSize - 1, len - 1);
- InsertionSort(lBuffer, start, end, comparefn);
- }
-
- for (; windowSize < len; windowSize = 2 * windowSize) {
- for (var start = 0; start < len; start += 2 * windowSize) {
- // The midpoint between the two subarrays.
- var mid = start + windowSize - 1;
-
- // To keep from going over the edge.
- var end = std_Math_min(start + 2 * windowSize - 1, len - 1);
-
- Merge(lBuffer, rBuffer, start, mid, end, comparefn);
- }
-
- // Swap both lists.
- var swap = lBuffer;
- lBuffer = rBuffer;
- rBuffer = swap;
- }
- return lBuffer;
-}
-
// A helper function for MergeSortTypedArray.
//
// Merge comparefn-sorted slices list[start..<=mid] and list[mid+1..<=end],
diff --git a/js/src/builtin/TestingFunctions.cpp b/js/src/builtin/TestingFunctions.cpp
index 498fa1746d..da7efd2fcc 100644
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -604,15 +604,6 @@ static bool GetBuildConfiguration(JSContext* cx, unsigned argc, Value* vp) {
return false;
}
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- value = BooleanValue(true);
-#else
- value = BooleanValue(false);
-#endif
- if (!JS_SetProperty(cx, info, "json-parse-with-source", value)) {
- return false;
- }
-
#ifdef FUZZING
value = BooleanValue(true);
#else
@@ -1620,7 +1611,7 @@ static bool WasmLosslessInvoke(JSContext* cx, unsigned argc, Value* vp) {
if (!wasmCallFrame.resize(len)) {
return false;
}
- wasmCallFrame[0].set(args.calleev());
+ wasmCallFrame[0].set(ObjectValue(*func));
wasmCallFrame[1].set(args.thisv());
// Copy over the arguments needed to invoke the provided wasm function,
// skipping the wasm function we're calling that is at `args.get(0)`.
@@ -3714,6 +3705,85 @@ static bool NewString(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
+static bool NewDependentString(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ RootedString src(cx, ToString(cx, args.get(0)));
+ if (!src) {
+ return false;
+ }
+
+ uint64_t indexStart = 0;
+ mozilla::Maybe<uint64_t> indexEnd;
+ gc::Heap heap = gc::Heap::Default;
+ mozilla::Maybe<gc::Heap> requiredHeap;
+
+ if (!ToIndex(cx, args.get(1), &indexStart)) {
+ return false;
+ }
+
+ Rooted<Value> options(cx);
+ if (args.get(2).isObject()) {
+ options = args[2];
+ } else {
+ uint64_t idx;
+ if (args.hasDefined(2)) {
+ if (!ToIndex(cx, args.get(2), &idx)) {
+ return false;
+ }
+ indexEnd.emplace(idx);
+ }
+ options = args.get(3);
+ }
+
+ if (options.isObject()) {
+ Rooted<Value> v(cx);
+ Rooted<JSObject*> optObj(cx, &options.toObject());
+ if (!JS_GetProperty(cx, optObj, "tenured", &v)) {
+ return false;
+ }
+ if (v.isBoolean()) {
+ requiredHeap.emplace(v.toBoolean() ? gc::Heap::Tenured
+ : gc::Heap::Default);
+ heap = *requiredHeap;
+ }
+ }
+
+ if (indexEnd.isNothing()) {
+ // Read the length now that no more JS code can run.
+ indexEnd.emplace(src->length());
+ }
+ if (indexStart > src->length() || *indexEnd > src->length() ||
+ indexStart >= *indexEnd) {
+ JS_ReportErrorASCII(cx, "invalid dependent string bounds");
+ return false;
+ }
+ if (!src->ensureLinear(cx)) {
+ return false;
+ }
+ Rooted<JSString*> result(
+ cx, js::NewDependentString(cx, src, indexStart, *indexEnd - indexStart,
+ heap));
+ if (!result) {
+ return false;
+ }
+ if (!result->isDependent()) {
+ JS_ReportErrorASCII(cx, "resulting string is not dependent (too short?)");
+ return false;
+ }
+
+ if (requiredHeap.isSome()) {
+ MOZ_ASSERT_IF(*requiredHeap == gc::Heap::Tenured, result->isTenured());
+ if ((*requiredHeap == gc::Heap::Default) && result->isTenured()) {
+ JS_ReportErrorASCII(cx, "nursery string created in tenured heap");
+ return false;
+ }
+ }
+
+ args.rval().setString(result);
+ return true;
+}
+
// Warning! This will let you create ropes that I'm not sure would be possible
// otherwise, specifically:
//
@@ -7183,7 +7253,7 @@ static bool SetImmutablePrototype(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
-#ifdef DEBUG
+#if defined(DEBUG) || defined(JS_JITSPEW)
static bool DumpStringRepresentation(JSContext* cx, unsigned argc, Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
@@ -7221,7 +7291,6 @@ static bool GetStringRepresentation(JSContext* cx, unsigned argc, Value* vp) {
args.rval().setString(rep);
return true;
}
-
#endif
static bool ParseCompileOptionsForModule(JSContext* cx,
@@ -7237,9 +7306,7 @@ static bool ParseCompileOptionsForModule(JSContext* cx,
options.setModule();
isModule = true;
- // js::ParseCompileOptions should already be called.
- if (options.lineno == 0) {
- JS_ReportErrorASCII(cx, "Module cannot be compiled with lineNumber == 0");
+ if (!ValidateModuleCompileOptions(cx, options)) {
return false;
}
} else {
@@ -9435,6 +9502,15 @@ static const JSFunctionSpecWithHelp TestingFunctions[] = {
" - maybeExternal: create an external string, unless the data fits within an\n"
" inline string. Inline strings may be nursery-allocated."),
+ JS_FN_HELP("newDependentString", NewDependentString, 2, 0,
+"newDependentString(str, indexStart[, indexEnd] [, options])",
+" Essentially the same as str.substring() but insist on\n"
+" creating a dependent string and failing if not. Also has options to\n"
+" control the heap the string object is allocated into:\n"
+" \n"
+" - tenured: if true, allocate in the tenured heap or throw. If false,\n"
+" allocate in the nursery or throw."),
+
JS_FN_HELP("ensureLinearString", EnsureLinearString, 1, 0,
"ensureLinearString(str)",
" Ensures str is a linear (non-rope) string and returns it."),
@@ -10058,20 +10134,6 @@ JS_FOR_WASM_FEATURES(WASM_FEATURE)
"wasmMetadataAnalysis(wasmObject)",
" Prints an analysis of the size of metadata on this wasm object.\n"),
-#if defined(DEBUG) || defined(JS_JITSPEW)
- JS_FN_HELP("dumpObject", DumpObject, 1, 0,
-"dumpObject(obj)",
-" Dump an internal representation of an object."),
-
- JS_FN_HELP("dumpValue", DumpValue, 1, 0,
-"dumpValue(v)",
-" Dump an internal representation of a value."),
-
- JS_FN_HELP("dumpValueToString", DumpValueToString, 1, 0,
-"dumpValue(v)",
-" Return a dump of an internal representation of a value."),
-#endif
-
JS_FN_HELP("sharedMemoryEnabled", SharedMemoryEnabled, 0, 0,
"sharedMemoryEnabled()",
" Return true if SharedArrayBuffer and Atomics are enabled"),
@@ -10129,17 +10191,6 @@ JS_FOR_WASM_FEATURES(WASM_FEATURE)
" of internal error, or if the operation doesn't even make sense (for example,\n"
" because the object is a revoked proxy)."),
-#ifdef DEBUG
- JS_FN_HELP("dumpStringRepresentation", DumpStringRepresentation, 1, 0,
-"dumpStringRepresentation(str)",
-" Print a human-readable description of how the string |str| is represented.\n"),
-
- JS_FN_HELP("stringRepresentation", GetStringRepresentation, 1, 0,
-"stringRepresentation(str)",
-" Return a human-readable description of how the string |str| is represented.\n"),
-
-#endif
-
JS_FN_HELP("allocationMarker", AllocationMarker, 0, 0,
"allocationMarker([options])",
" Return a freshly allocated object whose [[Class]] name is\n"
@@ -10428,6 +10479,29 @@ JS_FN_HELP("getEnvironmentObjectType", GetEnvironmentObjectType, 1, 0,
" Return an object describing the calling realm's fuse state, "
"as well as the state of any runtime fuses."),
+#if defined(DEBUG) || defined(JS_JITSPEW)
+ JS_FN_HELP("dumpObject", DumpObject, 1, 0,
+"dumpObject(obj)",
+" Dump an internal representation of an object."),
+
+ JS_FN_HELP("dumpValue", DumpValue, 1, 0,
+"dumpValue(v)",
+" Dump an internal representation of a value."),
+
+ JS_FN_HELP("dumpValueToString", DumpValueToString, 1, 0,
+"dumpValue(v)",
+" Return a dump of an internal representation of a value."),
+
+ JS_FN_HELP("dumpStringRepresentation", DumpStringRepresentation, 1, 0,
+"dumpStringRepresentation(str)",
+" Print a human-readable description of how the string |str| is represented.\n"),
+
+ JS_FN_HELP("stringRepresentation", GetStringRepresentation, 1, 0,
+"stringRepresentation(str)",
+" Return a human-readable description of how the string |str| is represented.\n"),
+
+#endif
+
JS_FS_HELP_END
};
// clang-format on
diff --git a/js/src/builtin/TestingUtility.cpp b/js/src/builtin/TestingUtility.cpp
index 12a99c7f08..9d4af5897c 100644
--- a/js/src/builtin/TestingUtility.cpp
+++ b/js/src/builtin/TestingUtility.cpp
@@ -308,3 +308,18 @@ bool js::ValidateLazinessOfStencilAndGlobal(
return true;
}
+
+bool js::ValidateModuleCompileOptions(JSContext* cx,
+ JS::CompileOptions& options) {
+ if (options.lineno == 0) {
+ JS_ReportErrorASCII(cx, "Module cannot be compiled with lineNumber == 0");
+ return false;
+ }
+
+ if (!options.filename()) {
+ JS_ReportErrorASCII(cx, "Module should have filename");
+ return false;
+ }
+
+ return true;
+}
diff --git a/js/src/builtin/TestingUtility.h b/js/src/builtin/TestingUtility.h
index 4c666540f7..82225068a1 100644
--- a/js/src/builtin/TestingUtility.h
+++ b/js/src/builtin/TestingUtility.h
@@ -72,6 +72,8 @@ JSObject* CreateScriptPrivate(JSContext* cx,
bool ValidateLazinessOfStencilAndGlobal(
JSContext* cx, const frontend::CompilationStencil& stencil);
+bool ValidateModuleCompileOptions(JSContext* cx, JS::CompileOptions& options);
+
} /* namespace js */
#endif /* builtin_TestingUtility_h */
diff --git a/js/src/builtin/Tuple.js b/js/src/builtin/Tuple.js
index 98177ac35d..bade1739d0 100644
--- a/js/src/builtin/Tuple.js
+++ b/js/src/builtin/Tuple.js
@@ -25,7 +25,7 @@ function TupleToSorted(comparefn) {
/* Step 3. */
var items = TupleToArray(T);
- var sorted = callFunction(ArraySort, items, comparefn);
+ var sorted = callFunction(std_Array_sort, items, comparefn);
return std_Tuple_unchecked(sorted);
}
diff --git a/js/src/builtin/WeakMapObject-inl.h b/js/src/builtin/WeakMapObject-inl.h
index 28ab1abeb2..3c5f2ceb27 100644
--- a/js/src/builtin/WeakMapObject-inl.h
+++ b/js/src/builtin/WeakMapObject-inl.h
@@ -92,6 +92,21 @@ static MOZ_ALWAYS_INLINE bool CanBeHeldWeakly(JSContext* cx,
return false;
}
+static unsigned GetErrorNumber(bool isWeakMap) {
+#ifdef NIGHTLY_BUILD
+ bool symbolsAsWeakMapKeysEnabled =
+ JS::Prefs::experimental_symbols_as_weakmap_keys();
+
+ if (symbolsAsWeakMapKeysEnabled) {
+ return isWeakMap ? JSMSG_WEAKMAP_KEY_CANT_BE_HELD_WEAKLY
+ : JSMSG_WEAKSET_VAL_CANT_BE_HELD_WEAKLY;
+ }
+#endif
+
+ return isWeakMap ? JSMSG_WEAKMAP_KEY_MUST_BE_AN_OBJECT
+ : JSMSG_WEAKSET_VAL_MUST_BE_AN_OBJECT;
+}
+
} // namespace js
#endif /* builtin_WeakMapObject_inl_h */
diff --git a/js/src/builtin/WeakMapObject.cpp b/js/src/builtin/WeakMapObject.cpp
index 680f49bc3e..a9d7dcdedc 100644
--- a/js/src/builtin/WeakMapObject.cpp
+++ b/js/src/builtin/WeakMapObject.cpp
@@ -122,8 +122,8 @@ bool WeakMapObject::delete_(JSContext* cx, unsigned argc, Value* vp) {
MOZ_ASSERT(WeakMapObject::is(args.thisv()));
if (!CanBeHeldWeakly(cx, args.get(0))) {
- ReportValueError(cx, JSMSG_WEAKMAP_KEY_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, args.get(0), nullptr);
+ unsigned errorNum = GetErrorNumber(true);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, args.get(0), nullptr);
return false;
}
@@ -238,8 +238,8 @@ JS_PUBLIC_API bool JS::SetWeakMapEntry(JSContext* cx, HandleObject mapObj,
CHECK_THREAD(cx);
cx->check(key, val);
if (!CanBeHeldWeakly(cx, key)) {
- ReportValueError(cx, JSMSG_WEAKMAP_KEY_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, key, nullptr);
+ unsigned errorNum = GetErrorNumber(true);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, key, nullptr);
return false;
}
diff --git a/js/src/builtin/WeakSetObject.cpp b/js/src/builtin/WeakSetObject.cpp
index 06e190f51e..3705e94218 100644
--- a/js/src/builtin/WeakSetObject.cpp
+++ b/js/src/builtin/WeakSetObject.cpp
@@ -31,8 +31,8 @@ using namespace js;
// Step 4.
if (!CanBeHeldWeakly(cx, args.get(0))) {
- ReportValueError(cx, JSMSG_WEAKSET_VAL_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, args.get(0), nullptr);
+ unsigned errorNum = GetErrorNumber(false);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, args.get(0), nullptr);
return false;
}
@@ -198,8 +198,9 @@ bool WeakSetObject::construct(JSContext* cx, unsigned argc, Value* vp) {
MOZ_ASSERT(!keyVal.isMagic(JS_ELEMENTS_HOLE));
if (!CanBeHeldWeakly(cx, keyVal)) {
- ReportValueError(cx, JSMSG_WEAKSET_VAL_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, keyVal, nullptr);
+ unsigned errorNum = GetErrorNumber(false);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, args.get(0),
+ nullptr);
return false;
}
diff --git a/js/src/builtin/embedjs.py b/js/src/builtin/embedjs.py
index 6a2c25b999..685f511e49 100644
--- a/js/src/builtin/embedjs.py
+++ b/js/src/builtin/embedjs.py
@@ -142,7 +142,9 @@ def preprocess(cxx, preprocessorOption, source, args=[]):
with open(tmpIn, "wb") as input:
input.write(source.encode("utf-8"))
- print(" ".join(cxx + outputArg + args + [tmpIn]))
+
+ if os.environ.get("BUILD_VERBOSE_LOG"):
+ print("Executing:", " ".join(cxx + outputArg + args + [tmpIn]))
result = subprocess.Popen(cxx + outputArg + args + [tmpIn]).wait()
if result != 0:
sys.exit(result)
diff --git a/js/src/ctypes/CTypes.cpp b/js/src/ctypes/CTypes.cpp
index 1296e6cbb9..c684d4ee22 100644
--- a/js/src/ctypes/CTypes.cpp
+++ b/js/src/ctypes/CTypes.cpp
@@ -26,9 +26,7 @@
#include <iterator>
#include <limits>
#include <stdint.h>
-#ifdef HAVE_SSIZE_T
-# include <sys/types.h>
-#endif
+#include <sys/types.h>
#include <type_traits>
#include "jsapi.h"
diff --git a/js/src/ctypes/typedefs.h b/js/src/ctypes/typedefs.h
index 51b020a09f..062c027496 100644
--- a/js/src/ctypes/typedefs.h
+++ b/js/src/ctypes/typedefs.h
@@ -34,10 +34,10 @@
#define ctypes_typedefs_h
// MSVC doesn't have ssize_t. Help it along a little.
-#ifdef HAVE_SSIZE_T
-# define CTYPES_SSIZE_T ssize_t
-#else
+#ifdef _WIN32
# define CTYPES_SSIZE_T intptr_t
+#else
+# define CTYPES_SSIZE_T ssize_t
#endif
// Some #defines to make handling of types whose length varies by platform
diff --git a/js/src/debugger/DebugAPI-inl.h b/js/src/debugger/DebugAPI-inl.h
index 3762fcf05e..77f81b800f 100644
--- a/js/src/debugger/DebugAPI-inl.h
+++ b/js/src/debugger/DebugAPI-inl.h
@@ -96,6 +96,14 @@ bool DebugAPI::onNewGenerator(JSContext* cx, AbstractFramePtr frame,
}
/* static */
+void DebugAPI::onGeneratorClosed(JSContext* cx,
+ AbstractGeneratorObject* genObj) {
+ if (cx->realm()->isDebuggee()) {
+ slowPathOnGeneratorClosed(cx, genObj);
+ }
+}
+
+/* static */
bool DebugAPI::checkNoExecute(JSContext* cx, HandleScript script) {
if (!cx->realm()->isDebuggee() || !cx->noExecuteDebuggerTop) {
return true;
diff --git a/js/src/debugger/DebugAPI.h b/js/src/debugger/DebugAPI.h
index c821dd412f..df082ab5ba 100644
--- a/js/src/debugger/DebugAPI.h
+++ b/js/src/debugger/DebugAPI.h
@@ -342,6 +342,9 @@ class DebugAPI {
JSContext* cx, AbstractFramePtr frame,
Handle<AbstractGeneratorObject*> genObj);
+ static inline void onGeneratorClosed(JSContext* cx,
+ AbstractGeneratorObject* genObj);
+
// If necessary, record an object that was just allocated for any observing
// debuggers.
[[nodiscard]] static inline bool onLogAllocationSite(
@@ -371,6 +374,8 @@ class DebugAPI {
[[nodiscard]] static bool slowPathOnNewGenerator(
JSContext* cx, AbstractFramePtr frame,
Handle<AbstractGeneratorObject*> genObj);
+ static void slowPathOnGeneratorClosed(JSContext* cx,
+ AbstractGeneratorObject* genObj);
[[nodiscard]] static bool slowPathCheckNoExecute(JSContext* cx,
HandleScript script);
[[nodiscard]] static bool slowPathOnEnterFrame(JSContext* cx,
diff --git a/js/src/debugger/Debugger.cpp b/js/src/debugger/Debugger.cpp
index 8a99f8d4fe..37c1e79a9d 100644
--- a/js/src/debugger/Debugger.cpp
+++ b/js/src/debugger/Debugger.cpp
@@ -1794,7 +1794,7 @@ static bool CheckResumptionValue(JSContext* cx, AbstractFramePtr frame,
}
// 2. The generator must be closed.
- genObj->setClosed();
+ genObj->setClosed(cx);
// Async generators have additionally bookkeeping which must be adjusted
// when switching over to the closed state.
@@ -1823,7 +1823,7 @@ static bool CheckResumptionValue(JSContext* cx, AbstractFramePtr frame,
vp.setObject(*promise);
// 2. The generator must be closed.
- generator->setClosed();
+ generator->setClosed(cx);
} else {
// We're before entering the actual function code.
@@ -2855,6 +2855,20 @@ void DebugAPI::slowPathOnNewGlobalObject(JSContext* cx,
}
/* static */
+void DebugAPI::slowPathOnGeneratorClosed(JSContext* cx,
+ AbstractGeneratorObject* genObj) {
+ JS::AutoAssertNoGC nogc;
+ for (Realm::DebuggerVectorEntry& entry : cx->global()->getDebuggers(nogc)) {
+ Debugger* dbg = entry.dbg;
+ if (Debugger::GeneratorWeakMap::Ptr frameEntry =
+ dbg->generatorFrames.lookup(genObj)) {
+ DebuggerFrame* frameObj = frameEntry->value();
+ frameObj->onGeneratorClosed(cx->gcContext());
+ }
+ }
+}
+
+/* static */
void DebugAPI::slowPathNotifyParticipatesInGC(uint64_t majorGCNumber,
Realm::DebuggerVector& dbgs,
const JS::AutoRequireNoGC& nogc) {
diff --git a/js/src/debugger/Environment.cpp b/js/src/debugger/Environment.cpp
index 9c90573c25..8549b47471 100644
--- a/js/src/debugger/Environment.cpp
+++ b/js/src/debugger/Environment.cpp
@@ -289,12 +289,12 @@ bool DebuggerEnvironment::CallData::findMethod() {
return false;
}
- if (!environment->requireDebuggee(cx)) {
+ RootedId id(cx);
+ if (!ValueToIdentifier(cx, args[0], &id)) {
return false;
}
- RootedId id(cx);
- if (!ValueToIdentifier(cx, args[0], &id)) {
+ if (!environment->requireDebuggee(cx)) {
return false;
}
@@ -312,12 +312,12 @@ bool DebuggerEnvironment::CallData::getVariableMethod() {
return false;
}
- if (!environment->requireDebuggee(cx)) {
+ RootedId id(cx);
+ if (!ValueToIdentifier(cx, args[0], &id)) {
return false;
}
- RootedId id(cx);
- if (!ValueToIdentifier(cx, args[0], &id)) {
+ if (!environment->requireDebuggee(cx)) {
return false;
}
@@ -329,12 +329,12 @@ bool DebuggerEnvironment::CallData::setVariableMethod() {
return false;
}
- if (!environment->requireDebuggee(cx)) {
+ RootedId id(cx);
+ if (!ValueToIdentifier(cx, args[0], &id)) {
return false;
}
- RootedId id(cx);
- if (!ValueToIdentifier(cx, args[0], &id)) {
+ if (!environment->requireDebuggee(cx)) {
return false;
}
diff --git a/js/src/debugger/Frame.cpp b/js/src/debugger/Frame.cpp
index 9767e2e7ed..63978b90ab 100644
--- a/js/src/debugger/Frame.cpp
+++ b/js/src/debugger/Frame.cpp
@@ -439,6 +439,24 @@ void DebuggerFrame::terminate(JS::GCContext* gcx, AbstractFramePtr frame) {
gcx->delete_(this, info, MemoryUse::DebuggerFrameGeneratorInfo);
}
+void DebuggerFrame::onGeneratorClosed(JS::GCContext* gcx) {
+ GeneratorInfo* info = generatorInfo();
+
+ // If the generator is closed, eagerly drop the onStep handler, to make sure
+ // the stepper counter matches in the assertion in DebugAPI::onSingleStep.
+ // Also clear the slot in order to suppress the decrementStepperCounter in
+ // DebuggerFrame::terminate.
+ if (!info->isGeneratorScriptAboutToBeFinalized()) {
+ JSScript* generatorScript = info->generatorScript();
+ OnStepHandler* handler = onStepHandler();
+ if (handler) {
+ decrementStepperCounter(gcx, generatorScript);
+ setReservedSlot(ONSTEP_HANDLER_SLOT, UndefinedValue());
+ handler->drop(gcx, this);
+ }
+ }
+}
+
void DebuggerFrame::suspend(JS::GCContext* gcx) {
// There must be generator info because otherwise this would be the same
// overall behavior as terminate() except that here we do not properly
diff --git a/js/src/debugger/Frame.h b/js/src/debugger/Frame.h
index 675accbcf7..44ee39d9db 100644
--- a/js/src/debugger/Frame.h
+++ b/js/src/debugger/Frame.h
@@ -287,6 +287,7 @@ class DebuggerFrame : public NativeObject {
FrameIter getFrameIter(JSContext* cx);
void terminate(JS::GCContext* gcx, AbstractFramePtr frame);
+ void onGeneratorClosed(JS::GCContext* gcx);
void suspend(JS::GCContext* gcx);
[[nodiscard]] bool replaceFrameIterData(JSContext* cx, const FrameIter&);
diff --git a/js/src/devtools/automation/variants/rootanalysis b/js/src/devtools/automation/variants/rootanalysis
index 7c0fb5242b..89da2eaf1a 100644
--- a/js/src/devtools/automation/variants/rootanalysis
+++ b/js/src/devtools/automation/variants/rootanalysis
@@ -4,6 +4,6 @@
"debug": true,
"env": {
"JS_GC_ZEAL": "GenerationalGC",
- "JSTESTS_EXTRA_ARGS": "--jitflags=debug"
+ "JSTESTS_EXTRA_ARGS": "--jitflags=debug --args='--gc-param=semispaceNurseryEnabled=1'"
}
}
diff --git a/js/src/ds/BitArray.h b/js/src/ds/BitArray.h
index bdd78873fd..f8a6aad455 100644
--- a/js/src/ds/BitArray.h
+++ b/js/src/ds/BitArray.h
@@ -99,6 +99,12 @@ class BitArray {
return map[elementIndex];
}
+ // Update a word at a time.
+ void setWord(size_t elementIndex, WordT value) {
+ MOZ_ASSERT(elementIndex < nbits);
+ map[elementIndex] = value;
+ }
+
static void getIndexAndMask(size_t offset, size_t* indexp, WordT* maskp) {
MOZ_ASSERT(offset < nbits);
static_assert(bitsPerElement == 32, "unexpected bitsPerElement value");
diff --git a/js/src/ds/Bitmap.h b/js/src/ds/Bitmap.h
index 6770585a61..f564e2a328 100644
--- a/js/src/ds/Bitmap.h
+++ b/js/src/ds/Bitmap.h
@@ -162,8 +162,26 @@ class SparseBitmap {
void bitwiseOrWith(const SparseBitmap& other);
void bitwiseOrInto(DenseBitmap& other) const;
- // Currently, this API only supports a range of words that is in a single bit
- // block.
+ // Currently, the following APIs only supports a range of words that is in a
+ // single bit block.
+
+ template <typename T>
+ typename std::enable_if_t<std::is_convertible_v<T, uintptr_t>, void>
+ bitwiseAndRangeWith(size_t wordStart, size_t numWords, T* source) {
+ size_t blockWord = blockStartWord(wordStart);
+
+ // We only support using a single bit block in this API.
+ MOZ_ASSERT(numWords &&
+ (blockWord == blockStartWord(wordStart + numWords - 1)));
+
+ BitBlock* block = getBlock(blockWord / WordsInBlock);
+ if (block) {
+ for (size_t i = 0; i < numWords; i++) {
+ (*block)[wordStart - blockWord + i] &= source[i];
+ }
+ }
+ }
+
template <typename T>
typename std::enable_if_t<std::is_convertible_v<T, uintptr_t>, void>
bitwiseOrRangeInto(size_t wordStart, size_t numWords, T* target) const {
diff --git a/js/src/ds/OrderedHashTable.h b/js/src/ds/OrderedHashTable.h
index 3ef366abc3..75c22bd123 100644
--- a/js/src/ds/OrderedHashTable.h
+++ b/js/src/ds/OrderedHashTable.h
@@ -39,6 +39,7 @@
#include "mozilla/HashFunctions.h"
#include "mozilla/Likely.h"
+#include "mozilla/Maybe.h"
#include "mozilla/MemoryReporting.h"
#include "mozilla/TemplateLib.h"
@@ -85,9 +86,16 @@ class OrderedHashTable {
uint32_t dataCapacity; // size of data, in elements
uint32_t liveCount; // dataLength less empty (removed) entries
uint32_t hashShift; // multiplicative hash shift
- Range* ranges; // list of all live Ranges on this table in malloc memory
- Range*
- nurseryRanges; // list of all live Ranges on this table in the GC nursery
+
+ // List of all live Ranges on this table in malloc memory. Populated when
+ // ranges are created.
+ Range* ranges;
+
+ // List of all live Ranges on this table in the GC nursery. Populated when
+ // ranges are created. This is cleared at the start of minor GC and rebuilt
+ // when ranges are moved.
+ Range* nurseryRanges;
+
AllocPolicy alloc;
mozilla::HashCodeScrambler hcs; // don't reveal pointer hash codes
@@ -382,22 +390,28 @@ class OrderedHashTable {
next->prevp = &next;
}
seek();
+ MOZ_ASSERT(valid());
}
public:
- Range(const Range& other)
+ Range(const Range& other, bool inNursery)
: ht(other.ht),
i(other.i),
count(other.count),
- prevp(&ht->ranges),
- next(ht->ranges) {
+ prevp(inNursery ? &ht->nurseryRanges : &ht->ranges),
+ next(*prevp) {
*prevp = this;
if (next) {
next->prevp = &next;
}
+ MOZ_ASSERT(valid());
}
~Range() {
+ if (!prevp) {
+ // Head of removed nursery ranges.
+ return;
+ }
*prevp = next;
if (next) {
next->prevp = prevp;
@@ -446,12 +460,15 @@ class OrderedHashTable {
i = count = 0;
}
- bool valid() const { return next != this; }
+#ifdef DEBUG
+ bool valid() const { return /* *prevp == this && */ next != this; }
+#endif
void onTableDestroyed() {
MOZ_ASSERT(valid());
prevp = &next;
next = this;
+ MOZ_ASSERT(!valid());
}
public:
@@ -559,9 +576,6 @@ class OrderedHashTable {
/*
* Allocate a new Range, possibly in nursery memory. The buffer must be
* large enough to hold a Range object.
- *
- * All nursery-allocated ranges can be freed in one go by calling
- * destroyNurseryRanges().
*/
Range* createRange(void* buffer, bool inNursery) const {
auto* self = const_cast<OrderedHashTable*>(this);
@@ -570,7 +584,16 @@ class OrderedHashTable {
return static_cast<Range*>(buffer);
}
- void destroyNurseryRanges() { nurseryRanges = nullptr; }
+ void destroyNurseryRanges() {
+ if (nurseryRanges) {
+ nurseryRanges->prevp = nullptr;
+ }
+ nurseryRanges = nullptr;
+ }
+
+#ifdef DEBUG
+ bool hasNurseryRanges() const { return nurseryRanges; }
+#endif
/*
* Change the value of the given key.
@@ -959,13 +982,17 @@ class OrderedHashMap {
HashNumber hash(const Lookup& key) const { return impl.prepareHash(key); }
template <typename GetNewKey>
- void rekeyOneEntry(const Lookup& current, const GetNewKey& getNewKey) {
+ mozilla::Maybe<Key> rekeyOneEntry(Lookup& current, GetNewKey&& getNewKey) {
+ // TODO: This is inefficient because we also look up the entry in
+ // impl.rekeyOneEntry below.
const Entry* e = get(current);
if (!e) {
- return;
+ return mozilla::Nothing();
}
+
Key newKey = getNewKey(current);
- return impl.rekeyOneEntry(current, newKey, Entry(newKey, e->value));
+ impl.rekeyOneEntry(current, newKey, Entry(newKey, e->value));
+ return mozilla::Some(newKey);
}
Range* createRange(void* buffer, bool inNursery) const {
@@ -973,6 +1000,9 @@ class OrderedHashMap {
}
void destroyNurseryRanges() { impl.destroyNurseryRanges(); }
+#ifdef DEBUG
+ bool hasNurseryRanges() const { return impl.hasNurseryRanges(); }
+#endif
void trace(JSTracer* trc) { impl.trace(trc); }
@@ -1048,12 +1078,16 @@ class OrderedHashSet {
HashNumber hash(const Lookup& value) const { return impl.prepareHash(value); }
template <typename GetNewKey>
- void rekeyOneEntry(const Lookup& current, const GetNewKey& getNewKey) {
+ mozilla::Maybe<T> rekeyOneEntry(Lookup& current, GetNewKey&& getNewKey) {
+ // TODO: This is inefficient because we also look up the entry in
+ // impl.rekeyOneEntry below.
if (!has(current)) {
- return;
+ return mozilla::Nothing();
}
+
T newKey = getNewKey(current);
- return impl.rekeyOneEntry(current, newKey, newKey);
+ impl.rekeyOneEntry(current, newKey, newKey);
+ return mozilla::Some(newKey);
}
Range* createRange(void* buffer, bool inNursery) const {
@@ -1061,6 +1095,9 @@ class OrderedHashSet {
}
void destroyNurseryRanges() { impl.destroyNurseryRanges(); }
+#ifdef DEBUG
+ bool hasNurseryRanges() const { return impl.hasNurseryRanges(); }
+#endif
void trace(JSTracer* trc) { impl.trace(trc); }
diff --git a/js/src/frontend/Parser.cpp b/js/src/frontend/Parser.cpp
index cd586ad2a7..997b7ce8ee 100644
--- a/js/src/frontend/Parser.cpp
+++ b/js/src/frontend/Parser.cpp
@@ -811,11 +811,18 @@ bool GeneralParser<ParseHandler, Unit>::noteDeclaredPrivateName(
AddDeclaredNamePtr p = scope->lookupDeclaredNameForAdd(name);
DeclarationKind declKind = DeclarationKind::PrivateName;
- ClosedOver closedOver = ClosedOver::No;
+
+ // Our strategy for enabling debugger functionality is to mark names as closed
+ // over, even if they don't necessarily need to be, to ensure that they are
+ // included in the environment object. This allows us to easily look them up
+ // by name when needed, even if there is no corresponding property on an
+ // object, as is the case with getter, setters and private methods.
+ ClosedOver closedOver = ClosedOver::Yes;
PrivateNameKind kind;
switch (propType) {
case PropertyType::Field:
kind = PrivateNameKind::Field;
+ closedOver = ClosedOver::No;
break;
case PropertyType::FieldWithAccessor:
// In this case, we create a new private field for the underlying storage,
@@ -831,11 +838,6 @@ bool GeneralParser<ParseHandler, Unit>::noteDeclaredPrivateName(
// DeclarationKind::Synthetic.
declKind = DeclarationKind::PrivateMethod;
}
-
- // Methods must be marked closed-over so that
- // EmitterScope::lookupPrivate() works even if the method is used, but not
- // within any method (from a computed property name, or debugger frame)
- closedOver = ClosedOver::Yes;
kind = PrivateNameKind::Method;
break;
case PropertyType::Getter:
@@ -845,7 +847,7 @@ bool GeneralParser<ParseHandler, Unit>::noteDeclaredPrivateName(
kind = PrivateNameKind::Setter;
break;
default:
- kind = PrivateNameKind::None;
+ MOZ_CRASH("Invalid Property Type for noteDeclarePrivateName");
}
if (p) {
@@ -8004,7 +8006,11 @@ GeneralParser<ParseHandler, Unit>::classDefinition(
// position in order to provide it for the nodes created later.
TokenPos namePos = pos();
- bool isInClass = pc_->sc()->inClass();
+ auto isClass = [](ParseContext::Statement* stmt) {
+ return stmt->kind() == StatementKind::Class;
+ };
+
+ bool isInClass = pc_->sc()->inClass() || pc_->findInnermostStatement(isClass);
// Push a ParseContext::ClassStatement to keep track of the constructor
// funbox.
diff --git a/js/src/frontend/Stencil.cpp b/js/src/frontend/Stencil.cpp
index 30d1588415..67ed2a90ca 100644
--- a/js/src/frontend/Stencil.cpp
+++ b/js/src/frontend/Stencil.cpp
@@ -759,6 +759,29 @@ void ScopeContext::cacheEnclosingScope(const InputScope& enclosingScope) {
MOZ_CRASH("Malformed scope chain");
}
+// Given an input scope, possibly refine this to a more precise scope.
+// This is used during eval in the debugger to provide the appropriate scope and
+// ThisBinding kind and environment, which is key to making private field eval
+// work correctly.
+//
+// The trick here is that an eval may have a non-syntatic scope but nevertheless
+// have an 'interesting' environment which can be traversed to find the
+// appropriate scope the the eval to function as desired. See the diagram below.
+//
+// Eval Scope Eval Env Frame Env Frame Scope
+// ============ ============= ========= =============
+//
+// NonSyntactic
+// |
+// v
+// null DebugEnvProxy LexicalScope
+// | |
+// v v
+// DebugEnvProxy --> CallObj --> FunctionScope
+// | | |
+// v v v
+// ... ... ...
+//
InputScope ScopeContext::determineEffectiveScope(InputScope& scope,
JSObject* environment) {
MOZ_ASSERT(effectiveScopeHops == 0);
@@ -4286,8 +4309,8 @@ void js::DumpFunctionFlagsItems(js::JSONPrinter& json,
case FunctionFlags::Flags::LAMBDA:
json.value("LAMBDA");
break;
- case FunctionFlags::Flags::WASM_JIT_ENTRY:
- json.value("WASM_JIT_ENTRY");
+ case FunctionFlags::Flags::NATIVE_JIT_ENTRY:
+ json.value("NATIVE_JIT_ENTRY");
break;
case FunctionFlags::Flags::HAS_INFERRED_NAME:
json.value("HAS_INFERRED_NAME");
diff --git a/js/src/frontend/TokenStream.cpp b/js/src/frontend/TokenStream.cpp
index 2134972bf4..7060a6edb1 100644
--- a/js/src/frontend/TokenStream.cpp
+++ b/js/src/frontend/TokenStream.cpp
@@ -1487,9 +1487,8 @@ bool TokenStreamAnyChars::fillExceptingContext(ErrorMetadata* err,
err->filename = JS::ConstUTF8CharsZ(iter.filename());
JS::TaggedColumnNumberOneOrigin columnNumber;
err->lineNumber = iter.computeLine(&columnNumber);
- // NOTE: Wasm frame cannot appear here.
err->columnNumber =
- JS::ColumnNumberOneOrigin(columnNumber.toLimitedColumnNumber());
+ JS::ColumnNumberOneOrigin(columnNumber.oneOriginValue());
return false;
}
}
diff --git a/js/src/fuzz-tests/gluesmith/Cargo.toml b/js/src/fuzz-tests/gluesmith/Cargo.toml
index 7bd7f6652a..c5f7118ba4 100644
--- a/js/src/fuzz-tests/gluesmith/Cargo.toml
+++ b/js/src/fuzz-tests/gluesmith/Cargo.toml
@@ -5,6 +5,6 @@ authors = ["Christian Holler"]
license = "MPL-2.0"
[dependencies]
-wasm-smith = "0.15.0"
+wasm-smith = "0.201.0"
arbitrary = { version = "1.0.0", features = ["derive"] }
libc = "0.2"
diff --git a/js/src/fuzz-tests/gluesmith/src/lib.rs b/js/src/fuzz-tests/gluesmith/src/lib.rs
index 41aac369a0..83d9c9859d 100644
--- a/js/src/fuzz-tests/gluesmith/src/lib.rs
+++ b/js/src/fuzz-tests/gluesmith/src/lib.rs
@@ -41,6 +41,7 @@ pub unsafe extern "C" fn gluesmith(
simd_enabled: true,
tail_call_enabled: true,
threads_enabled: true,
+ gc_enabled: true,
..Config::default()
};
let module = match Module::new(config, &mut u) {
diff --git a/js/src/gc/Allocator.cpp b/js/src/gc/Allocator.cpp
index c070ac1eef..9e7f16cc3f 100644
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -77,9 +77,10 @@ MOZ_NEVER_INLINE void* CellAllocator::RetryNurseryAlloc(JSContext* cx,
size_t thingSize,
AllocSite* site) {
MOZ_ASSERT(cx->isNurseryAllocAllowed());
- MOZ_ASSERT(cx->zone() == site->zone());
- MOZ_ASSERT(!cx->zone()->isAtomsZone());
- MOZ_ASSERT(cx->zone()->allocKindInNursery(traceKind));
+
+ Zone* zone = site->zone();
+ MOZ_ASSERT(!zone->isAtomsZone());
+ MOZ_ASSERT(zone->allocKindInNursery(traceKind));
Nursery& nursery = cx->nursery();
JS::GCReason reason = nursery.handleAllocationFailure();
@@ -102,7 +103,7 @@ MOZ_NEVER_INLINE void* CellAllocator::RetryNurseryAlloc(JSContext* cx,
cx->runtime()->gc.minorGC(reason);
// Exceeding gcMaxBytes while tenuring can disable the Nursery.
- if (cx->zone()->allocKindInNursery(traceKind)) {
+ if (zone->allocKindInNursery(traceKind)) {
void* ptr = cx->nursery().allocateCell(site, thingSize, traceKind);
if (ptr) {
return ptr;
@@ -291,7 +292,7 @@ void CellAllocator::CheckIncrementalZoneState(JSContext* cx, void* ptr) {
}
#endif
-void* js::gc::AllocateCellInGC(Zone* zone, AllocKind thingKind) {
+void* js::gc::AllocateTenuredCellInGC(Zone* zone, AllocKind thingKind) {
void* ptr = zone->arenas.allocateFromFreeList(thingKind);
if (!ptr) {
AutoEnterOOMUnsafeRegion oomUnsafe;
@@ -541,7 +542,7 @@ TenuredChunk* GCRuntime::getOrAllocChunk(AutoLockGCBgAlloc& lock) {
// Reinitialize ChunkBase; arenas are all free and may or may not be
// committed.
SetMemCheckKind(chunk, sizeof(ChunkBase), MemCheckKind::MakeUndefined);
- chunk->initBase(rt, nullptr);
+ chunk->initBaseForTenuredChunk(rt);
MOZ_ASSERT(chunk->unused());
} else {
void* ptr = TenuredChunk::allocate(this);
diff --git a/js/src/gc/Allocator.h b/js/src/gc/Allocator.h
index 3b1566e7f5..c317bfb10b 100644
--- a/js/src/gc/Allocator.h
+++ b/js/src/gc/Allocator.h
@@ -21,6 +21,7 @@ namespace gc {
class AllocSite;
struct Cell;
class TenuredCell;
+class TenuringTracer;
// Allocator implementation functions. SpiderMonkey code outside this file
// should use:
@@ -82,6 +83,7 @@ class CellAllocator {
static void* AllocNurseryOrTenuredCell(JSContext* cx, gc::AllocKind allocKind,
size_t thingSize, gc::Heap heap,
AllocSite* site);
+ friend class TenuringTracer;
// Allocate a cell in the tenured heap.
template <AllowGC allowGC>
diff --git a/js/src/gc/AtomMarking.cpp b/js/src/gc/AtomMarking.cpp
index eb30758263..a5b7eb1acf 100644
--- a/js/src/gc/AtomMarking.cpp
+++ b/js/src/gc/AtomMarking.cpp
@@ -12,6 +12,7 @@
#include "gc/GC-inl.h"
#include "gc/Heap-inl.h"
+#include "gc/PrivateIterators-inl.h"
namespace js {
namespace gc {
@@ -71,7 +72,38 @@ void AtomMarkingRuntime::unregisterArena(Arena* arena, const AutoLockGC& lock) {
(void)freeArenaIndexes.ref().emplaceBack(arena->atomBitmapStart());
}
-bool AtomMarkingRuntime::computeBitmapFromChunkMarkBits(JSRuntime* runtime,
+void AtomMarkingRuntime::refineZoneBitmapsForCollectedZones(
+ GCRuntime* gc, size_t collectedZones) {
+ // If there is more than one zone to update, copy the chunk mark bits into a
+ // bitmap and AND that into the atom marking bitmap for each zone.
+ DenseBitmap marked;
+ if (collectedZones > 1 && computeBitmapFromChunkMarkBits(gc, marked)) {
+ for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
+ refineZoneBitmapForCollectedZone(zone, marked);
+ }
+ return;
+ }
+
+ // If there's only one zone (or on OOM), AND the mark bits for each arena into
+ // the zones' atom marking bitmaps directly.
+ for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
+ if (zone->isAtomsZone()) {
+ continue;
+ }
+
+ for (auto thingKind : AllAllocKinds()) {
+ for (ArenaIterInGC aiter(gc->atomsZone(), thingKind); !aiter.done();
+ aiter.next()) {
+ Arena* arena = aiter.get();
+ MarkBitmapWord* chunkWords = arena->chunk()->markBits.arenaBits(arena);
+ zone->markedAtoms().bitwiseAndRangeWith(arena->atomBitmapStart(),
+ ArenaBitmapWords, chunkWords);
+ }
+ }
+ }
+}
+
+bool AtomMarkingRuntime::computeBitmapFromChunkMarkBits(GCRuntime* gc,
DenseBitmap& bitmap) {
MOZ_ASSERT(CurrentThreadIsPerformingGC());
@@ -79,7 +111,7 @@ bool AtomMarkingRuntime::computeBitmapFromChunkMarkBits(JSRuntime* runtime,
return false;
}
- Zone* atomsZone = runtime->unsafeAtomsZone();
+ Zone* atomsZone = gc->atomsZone();
for (auto thingKind : AllAllocKinds()) {
for (ArenaIterInGC aiter(atomsZone, thingKind); !aiter.done();
aiter.next()) {
@@ -109,13 +141,12 @@ void AtomMarkingRuntime::refineZoneBitmapForCollectedZone(
// Set any bits in the chunk mark bitmaps for atoms which are marked in bitmap.
template <typename Bitmap>
-static void BitwiseOrIntoChunkMarkBits(JSRuntime* runtime, Bitmap& bitmap) {
+static void BitwiseOrIntoChunkMarkBits(Zone* atomsZone, Bitmap& bitmap) {
// Make sure that by copying the mark bits for one arena in word sizes we
// do not affect the mark bits for other arenas.
static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
"ArenaBitmapWords must evenly divide ArenaBitmapBits");
- Zone* atomsZone = runtime->unsafeAtomsZone();
for (auto thingKind : AllAllocKinds()) {
for (ArenaIterInGC aiter(atomsZone, thingKind); !aiter.done();
aiter.next()) {
@@ -127,16 +158,10 @@ static void BitwiseOrIntoChunkMarkBits(JSRuntime* runtime, Bitmap& bitmap) {
}
}
-void AtomMarkingRuntime::markAtomsUsedByUncollectedZones(JSRuntime* runtime) {
+void AtomMarkingRuntime::markAtomsUsedByUncollectedZones(
+ GCRuntime* gc, size_t uncollectedZones) {
MOZ_ASSERT(CurrentThreadIsPerformingGC());
- size_t uncollectedZones = 0;
- for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
- if (!zone->isCollecting()) {
- uncollectedZones++;
- }
- }
-
// If there are no uncollected non-atom zones then there's no work to do.
if (uncollectedZones == 0) {
return;
@@ -149,15 +174,15 @@ void AtomMarkingRuntime::markAtomsUsedByUncollectedZones(JSRuntime* runtime) {
DenseBitmap markedUnion;
if (uncollectedZones == 1 || !markedUnion.ensureSpace(allocatedWords)) {
- for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
+ for (ZonesIter zone(gc, SkipAtoms); !zone.done(); zone.next()) {
if (!zone->isCollecting()) {
- BitwiseOrIntoChunkMarkBits(runtime, zone->markedAtoms());
+ BitwiseOrIntoChunkMarkBits(gc->atomsZone(), zone->markedAtoms());
}
}
return;
}
- for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
+ for (ZonesIter zone(gc, SkipAtoms); !zone.done(); zone.next()) {
// We only need to update the chunk mark bits for zones which were
// not collected in the current GC. Atoms which are referenced by
// collected zones have already been marked.
@@ -166,7 +191,7 @@ void AtomMarkingRuntime::markAtomsUsedByUncollectedZones(JSRuntime* runtime) {
}
}
- BitwiseOrIntoChunkMarkBits(runtime, markedUnion);
+ BitwiseOrIntoChunkMarkBits(gc->atomsZone(), markedUnion);
}
template <typename T>
diff --git a/js/src/gc/AtomMarking.h b/js/src/gc/AtomMarking.h
index e7e97fb389..e5d8ef8418 100644
--- a/js/src/gc/AtomMarking.h
+++ b/js/src/gc/AtomMarking.h
@@ -19,6 +19,7 @@ class DenseBitmap;
namespace gc {
class Arena;
+class GCRuntime;
// This class manages state used for marking atoms during GCs.
// See AtomMarking.cpp for details.
@@ -42,19 +43,25 @@ class AtomMarkingRuntime {
// Mark an arena as no longer holding things in the atoms zone.
void unregisterArena(Arena* arena, const AutoLockGC& lock);
+ // Update the atom marking bitmaps in all collected zones according to the
+ // atoms zone mark bits.
+ void refineZoneBitmapsForCollectedZones(GCRuntime* gc, size_t collectedZones);
+
+ // Set any bits in the chunk mark bitmaps for atoms which are marked in any
+ // uncollected zone in the runtime.
+ void markAtomsUsedByUncollectedZones(GCRuntime* gc, size_t uncollectedZones);
+
+ private:
// Fill |bitmap| with an atom marking bitmap based on the things that are
// currently marked in the chunks used by atoms zone arenas. This returns
// false on an allocation failure (but does not report an exception).
- bool computeBitmapFromChunkMarkBits(JSRuntime* runtime, DenseBitmap& bitmap);
+ bool computeBitmapFromChunkMarkBits(GCRuntime* gc, DenseBitmap& bitmap);
// Update the atom marking bitmap in |zone| according to another
// overapproximation of the reachable atoms in |bitmap|.
void refineZoneBitmapForCollectedZone(Zone* zone, const DenseBitmap& bitmap);
- // Set any bits in the chunk mark bitmaps for atoms which are marked in any
- // uncollected zone in the runtime.
- void markAtomsUsedByUncollectedZones(JSRuntime* runtime);
-
+ public:
// Mark an atom or id as being newly reachable by the context's zone.
template <typename T>
void markAtom(JSContext* cx, T* thing);
diff --git a/js/src/gc/Cell.h b/js/src/gc/Cell.h
index 5a89ad794d..f91163e2f5 100644
--- a/js/src/gc/Cell.h
+++ b/js/src/gc/Cell.h
@@ -209,6 +209,8 @@ struct Cell {
inline JS::Zone* nurseryZone() const;
inline JS::Zone* nurseryZoneFromAnyThread() const;
+ inline ChunkBase* chunk() const;
+
// Default implementation for kinds that cannot be permanent. This may be
// overriden by derived classes.
MOZ_ALWAYS_INLINE bool isPermanentAndMayBeShared() const { return false; }
@@ -222,7 +224,6 @@ struct Cell {
protected:
uintptr_t address() const;
- inline ChunkBase* chunk() const;
private:
// Cells are destroyed by the GC. Do not delete them directly.
diff --git a/js/src/gc/Compacting.cpp b/js/src/gc/Compacting.cpp
index 442fa3fe47..79e8e0b71d 100644
--- a/js/src/gc/Compacting.cpp
+++ b/js/src/gc/Compacting.cpp
@@ -215,7 +215,7 @@ static void RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind,
// Allocate a new cell.
MOZ_ASSERT(zone == src->zone());
TenuredCell* dst =
- reinterpret_cast<TenuredCell*>(AllocateCellInGC(zone, thingKind));
+ reinterpret_cast<TenuredCell*>(AllocateTenuredCellInGC(zone, thingKind));
// Copy source cell contents to destination.
memcpy(dst, src, thingSize);
diff --git a/js/src/gc/GC.cpp b/js/src/gc/GC.cpp
index c01dfe3660..68dd66898c 100644
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -444,7 +444,9 @@ GCRuntime::GCRuntime(JSRuntime* rt)
#endif
requestSliceAfterBackgroundTask(false),
lifoBlocksToFree((size_t)JSContext::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
- lifoBlocksToFreeAfterMinorGC(
+ lifoBlocksToFreeAfterFullMinorGC(
+ (size_t)JSContext::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
+ lifoBlocksToFreeAfterNextMinorGC(
(size_t)JSContext::TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
sweepGroupIndex(0),
sweepGroups(nullptr),
@@ -658,7 +660,7 @@ void GCRuntime::setZeal(uint8_t zeal, uint32_t frequency) {
if (zeal == 0) {
if (hasZealMode(ZealMode::GenerationalGC)) {
- evictNursery(JS::GCReason::DEBUG_GC);
+ evictNursery();
nursery().leaveZealMode();
}
@@ -669,7 +671,7 @@ void GCRuntime::setZeal(uint8_t zeal, uint32_t frequency) {
ZealMode zealMode = ZealMode(zeal);
if (zealMode == ZealMode::GenerationalGC) {
- evictNursery(JS::GCReason::DEBUG_GC);
+ evictNursery(JS::GCReason::EVICT_NURSERY);
nursery().enterZealMode();
}
@@ -704,7 +706,7 @@ void GCRuntime::unsetZeal(uint8_t zeal) {
}
if (zealMode == ZealMode::GenerationalGC) {
- evictNursery(JS::GCReason::DEBUG_GC);
+ evictNursery();
nursery().leaveZealMode();
}
@@ -1073,6 +1075,11 @@ bool GCRuntime::setParameter(JSGCParamKey key, uint32_t value,
marker->incrementalWeakMapMarkingEnabled = value != 0;
}
break;
+ case JSGC_SEMISPACE_NURSERY_ENABLED: {
+ AutoUnlockGC unlock(lock);
+ nursery().setSemispaceEnabled(value);
+ break;
+ }
case JSGC_MIN_EMPTY_CHUNK_COUNT:
setMinEmptyChunkCount(value, lock);
break;
@@ -1160,6 +1167,11 @@ void GCRuntime::resetParameter(JSGCParamKey key, AutoLockGC& lock) {
TuningDefaults::IncrementalWeakMapMarkingEnabled;
}
break;
+ case JSGC_SEMISPACE_NURSERY_ENABLED: {
+ AutoUnlockGC unlock(lock);
+ nursery().setSemispaceEnabled(TuningDefaults::SemispaceNurseryEnabled);
+ break;
+ }
case JSGC_MIN_EMPTY_CHUNK_COUNT:
setMinEmptyChunkCount(TuningDefaults::MinEmptyChunkCount, lock);
break;
@@ -1241,6 +1253,8 @@ uint32_t GCRuntime::getParameter(JSGCParamKey key, const AutoLockGC& lock) {
return parallelMarkingEnabled;
case JSGC_INCREMENTAL_WEAKMAP_ENABLED:
return marker().incrementalWeakMapMarkingEnabled;
+ case JSGC_SEMISPACE_NURSERY_ENABLED:
+ return nursery().semispaceEnabled();
case JSGC_CHUNK_BYTES:
return ChunkSize;
case JSGC_HELPER_THREAD_RATIO:
@@ -2135,7 +2149,7 @@ void GCRuntime::queueUnusedLifoBlocksForFree(LifoAlloc* lifo) {
}
void GCRuntime::queueAllLifoBlocksForFreeAfterMinorGC(LifoAlloc* lifo) {
- lifoBlocksToFreeAfterMinorGC.ref().transferFrom(lifo);
+ lifoBlocksToFreeAfterFullMinorGC.ref().transferFrom(lifo);
}
void GCRuntime::queueBuffersForFreeAfterMinorGC(Nursery::BufferSet& buffers) {
@@ -2741,24 +2755,7 @@ void GCRuntime::endPreparePhase(JS::GCReason reason) {
MOZ_ASSERT(unmarkTask.isIdle());
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
- /*
- * In an incremental GC, clear the area free lists to ensure that subsequent
- * allocations refill them and end up marking new cells back. See
- * arenaAllocatedDuringGC().
- */
- zone->arenas.clearFreeLists();
-
zone->setPreservingCode(false);
-
-#ifdef JS_GC_ZEAL
- if (hasZealMode(ZealMode::YieldBeforeRootMarking)) {
- for (auto kind : AllAllocKinds()) {
- for (ArenaIterInGC arena(zone, kind); !arena.done(); arena.next()) {
- arena->checkNoMarkedCells();
- }
- }
- }
-#endif
}
// Discard JIT code more aggressively if the process is approaching its
@@ -2800,7 +2797,7 @@ void GCRuntime::endPreparePhase(JS::GCReason reason) {
*/
{
- gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::PREPARE);
+ gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::PREPARE);
AutoLockHelperThreadState helperLock;
@@ -2817,20 +2814,6 @@ void GCRuntime::endPreparePhase(JS::GCReason reason) {
haveDiscardedJITCodeThisSlice = true;
/*
- * Relazify functions after discarding JIT code (we can't relazify
- * functions with JIT code) and before the actual mark phase, so that
- * the current GC can collect the JSScripts we're unlinking here. We do
- * this only when we're performing a shrinking GC, as too much
- * relazification can cause performance issues when we have to reparse
- * the same functions over and over.
- */
- if (isShrinkingGC()) {
- relazifyFunctionsForShrinkingGC();
- purgePropMapTablesForShrinkingGC();
- purgeSourceURLsForShrinkingGC();
- }
-
- /*
* We must purge the runtime at the beginning of an incremental GC. The
* danger if we purge later is that the snapshot invariant of
* incremental GC will be broken, as follows. If some object is
@@ -2840,8 +2823,25 @@ void GCRuntime::endPreparePhase(JS::GCReason reason) {
* it. This object might never be marked, so a GC hazard would exist.
*/
purgeRuntime();
+ }
+
+ // This will start background free for lifo blocks queued by purgeRuntime,
+ // even if there's nothing in the nursery.
+ collectNurseryFromMajorGC(reason);
- startBackgroundFreeAfterMinorGC();
+ {
+ gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::PREPARE);
+ // Relazify functions after discarding JIT code (we can't relazify functions
+ // with JIT code) and before the actual mark phase, so that the current GC
+ // can collect the JSScripts we're unlinking here. We do this only when
+ // we're performing a shrinking GC, as too much relazification can cause
+ // performance issues when we have to reparse the same functions over and
+ // over.
+ if (isShrinkingGC()) {
+ relazifyFunctionsForShrinkingGC();
+ purgePropMapTablesForShrinkingGC();
+ purgeSourceURLsForShrinkingGC();
+ }
if (isShutdownGC()) {
/* Clear any engine roots that may hold external data live. */
@@ -2903,6 +2903,21 @@ void GCRuntime::beginMarkPhase(AutoGCSession& session) {
#endif
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
+ // In an incremental GC, clear the arena free lists to ensure that
+ // subsequent allocations refill them and end up marking new cells black.
+ // See arenaAllocatedDuringGC().
+ zone->arenas.clearFreeLists();
+
+#ifdef JS_GC_ZEAL
+ if (hasZealMode(ZealMode::YieldBeforeRootMarking)) {
+ for (auto kind : AllAllocKinds()) {
+ for (ArenaIter arena(zone, kind); !arena.done(); arena.next()) {
+ arena->checkNoMarkedCells();
+ }
+ }
+ }
+#endif
+
// Incremental marking barriers are enabled at this point.
zone->changeGCState(Zone::Prepare, zone->initialMarkingState());
@@ -3715,11 +3730,8 @@ void GCRuntime::incrementalSlice(SliceBudget& budget, JS::GCReason reason,
[[fallthrough]];
case State::MarkRoots:
- if (NeedToCollectNursery(this)) {
- collectNurseryFromMajorGC(reason);
- }
-
endPreparePhase(reason);
+
beginMarkPhase(session);
incrementalState = State::Mark;
@@ -3878,8 +3890,11 @@ void GCRuntime::incrementalSlice(SliceBudget& budget, JS::GCReason reason,
}
void GCRuntime::collectNurseryFromMajorGC(JS::GCReason reason) {
- collectNursery(gcOptions(), reason,
+ collectNursery(gcOptions(), JS::GCReason::EVICT_NURSERY,
gcstats::PhaseKind::EVICT_NURSERY_FOR_MAJOR_GC);
+
+ MOZ_ASSERT(nursery().isEmpty());
+ MOZ_ASSERT(storeBuffer().isEmpty());
}
bool GCRuntime::hasForegroundWork() const {
@@ -4733,26 +4748,43 @@ void GCRuntime::collectNursery(JS::GCOptions options, JS::GCReason reason,
gcstats::AutoPhase ap(stats(), phase);
nursery().collect(options, reason);
- MOZ_ASSERT(nursery().isEmpty());
startBackgroundFreeAfterMinorGC();
+
+ // We ignore gcMaxBytes when allocating for minor collection. However, if we
+ // overflowed, we disable the nursery. The next time we allocate, we'll fail
+ // because bytes >= gcMaxBytes.
+ if (heapSize.bytes() >= tunables.gcMaxBytes()) {
+ if (!nursery().isEmpty()) {
+ nursery().collect(options, JS::GCReason::DISABLE_GENERATIONAL_GC);
+ MOZ_ASSERT(nursery().isEmpty());
+ startBackgroundFreeAfterMinorGC();
+ }
+ nursery().disable();
+ }
}
void GCRuntime::startBackgroundFreeAfterMinorGC() {
- MOZ_ASSERT(nursery().isEmpty());
+ // Called after nursery collection. Free whatever blocks are safe to free now.
- {
- AutoLockHelperThreadState lock;
+ AutoLockHelperThreadState lock;
- lifoBlocksToFree.ref().transferFrom(&lifoBlocksToFreeAfterMinorGC.ref());
+ lifoBlocksToFree.ref().transferFrom(&lifoBlocksToFreeAfterNextMinorGC.ref());
- if (lifoBlocksToFree.ref().isEmpty() &&
- buffersToFreeAfterMinorGC.ref().empty()) {
- return;
- }
+ if (nursery().tenuredEverything) {
+ lifoBlocksToFree.ref().transferFrom(
+ &lifoBlocksToFreeAfterFullMinorGC.ref());
+ } else {
+ lifoBlocksToFreeAfterNextMinorGC.ref().transferFrom(
+ &lifoBlocksToFreeAfterFullMinorGC.ref());
+ }
+
+ if (lifoBlocksToFree.ref().isEmpty() &&
+ buffersToFreeAfterMinorGC.ref().empty()) {
+ return;
}
- startBackgroundFree();
+ freeTask.startOrRunIfIdle(lock);
}
bool GCRuntime::gcIfRequestedImpl(bool eagerOk) {
diff --git a/js/src/gc/GC.h b/js/src/gc/GC.h
index 4e4634d804..7f603b066f 100644
--- a/js/src/gc/GC.h
+++ b/js/src/gc/GC.h
@@ -83,7 +83,8 @@ class TenuredChunk;
_("maxHelperThreads", JSGC_MAX_HELPER_THREADS, true) \
_("helperThreadCount", JSGC_HELPER_THREAD_COUNT, false) \
_("markingThreadCount", JSGC_MARKING_THREAD_COUNT, true) \
- _("systemPageSizeKB", JSGC_SYSTEM_PAGE_SIZE_KB, false)
+ _("systemPageSizeKB", JSGC_SYSTEM_PAGE_SIZE_KB, false) \
+ _("semispaceNurseryEnabled", JSGC_SEMISPACE_NURSERY_ENABLED, true)
// Get the key and writability give a GC parameter name.
extern bool GetGCParameterInfo(const char* name, JSGCParamKey* keyOut,
diff --git a/js/src/gc/GCAPI.cpp b/js/src/gc/GCAPI.cpp
index 293bfce80d..d2eab44dea 100644
--- a/js/src/gc/GCAPI.cpp
+++ b/js/src/gc/GCAPI.cpp
@@ -487,8 +487,7 @@ JS_PUBLIC_API bool JS::WasIncrementalGC(JSRuntime* rt) {
bool js::gc::CreateUniqueIdForNativeObject(NativeObject* nobj, uint64_t* uidp) {
JSRuntime* runtime = nobj->runtimeFromMainThread();
*uidp = NextCellUniqueId(runtime);
- JSContext* cx = runtime->mainContextFromOwnThread();
- return nobj->setUniqueId(cx, *uidp);
+ return nobj->setUniqueId(runtime, *uidp);
}
bool js::gc::CreateUniqueIdForNonNativeObject(Cell* cell,
@@ -793,6 +792,15 @@ const char* CellColorName(CellColor color) {
} /* namespace gc */
} /* namespace js */
+JS_PUBLIC_API bool js::gc::IsDeadNurseryObject(JSObject* obj) {
+ MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
+ MOZ_ASSERT(obj);
+ MOZ_ASSERT(IsInsideNursery(obj));
+ MOZ_ASSERT(!IsForwarded(obj));
+
+ return obj->runtimeFromMainThread()->gc.nursery().inCollectedRegion(obj);
+}
+
JS_PUBLIC_API void js::gc::FinalizeDeadNurseryObject(JSContext* cx,
JSObject* obj) {
CHECK_THREAD(cx);
diff --git a/js/src/gc/GCInternals.h b/js/src/gc/GCInternals.h
index c234ad4b2b..9a3edc2c9d 100644
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -329,7 +329,7 @@ inline bool IsOOMReason(JS::GCReason reason) {
reason == JS::GCReason::MEM_PRESSURE;
}
-void* AllocateCellInGC(JS::Zone* zone, AllocKind thingKind);
+void* AllocateTenuredCellInGC(JS::Zone* zone, AllocKind thingKind);
void ReadProfileEnv(const char* envName, const char* helpText, bool* enableOut,
bool* workersOut, mozilla::TimeDuration* thresholdOut);
diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h
index c9f660b4d7..851e477359 100644
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -1205,7 +1205,8 @@ class GCRuntime {
* a background thread.
*/
HelperThreadLockData<LifoAlloc> lifoBlocksToFree;
- MainThreadData<LifoAlloc> lifoBlocksToFreeAfterMinorGC;
+ MainThreadData<LifoAlloc> lifoBlocksToFreeAfterFullMinorGC;
+ MainThreadData<LifoAlloc> lifoBlocksToFreeAfterNextMinorGC;
HelperThreadLockData<Nursery::BufferSet> buffersToFreeAfterMinorGC;
/* Index of current sweep group (for stats). */
diff --git a/js/src/gc/Heap-inl.h b/js/src/gc/Heap-inl.h
index 95bd841cae..30937a7236 100644
--- a/js/src/gc/Heap-inl.h
+++ b/js/src/gc/Heap-inl.h
@@ -44,6 +44,10 @@ inline void js::gc::Arena::init(JS::Zone* zoneArg, AllocKind kind,
}
setAsFullyUnused();
+
+#ifdef DEBUG
+ checkNoMarkedCells();
+#endif
}
inline void js::gc::Arena::release(const AutoLockGC& lock) {
diff --git a/js/src/gc/MallocedBlockCache.h b/js/src/gc/MallocedBlockCache.h
index cd7d1e1064..6fc577044e 100644
--- a/js/src/gc/MallocedBlockCache.h
+++ b/js/src/gc/MallocedBlockCache.h
@@ -70,6 +70,8 @@ class MallocedBlockCache {
~MallocedBlockCache();
+ static inline size_t listIDForSize(size_t size);
+
// Allocation and freeing. Use `alloc` to allocate. `allocSlow` is
// `alloc`s fallback path. Do not call it directly, since it doesn't handle
// all cases by itself.
@@ -89,7 +91,8 @@ class MallocedBlockCache {
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
};
-inline PointerAndUint7 MallocedBlockCache::alloc(size_t size) {
+/* static */
+inline size_t MallocedBlockCache::listIDForSize(size_t size) {
// Figure out which free list can give us a block of size `size`, after it
// has been rounded up to a multiple of `step`.
//
@@ -122,11 +125,23 @@ inline PointerAndUint7 MallocedBlockCache::alloc(size_t size) {
size_t i = size / STEP;
MOZ_ASSERT(i > 0);
+ if (i >= NUM_LISTS) {
+ return OVERSIZE_BLOCK_LIST_ID;
+ }
+
+ return i;
+}
+
+inline PointerAndUint7 MallocedBlockCache::alloc(size_t size) {
+ size_t i = listIDForSize(size);
+ MOZ_ASSERT(i < NUM_LISTS);
+
// Fast path: try to pull a block from the relevant list.
- if (MOZ_LIKELY(i < NUM_LISTS && // "block is small enough to cache"
- !lists[i].empty())) { // "a cached block is available"
+ if (MOZ_LIKELY(
+ i != OVERSIZE_BLOCK_LIST_ID && // "block is small enough to cache"
+ !lists[i].empty())) { // "a cached block is available"
// Check that i is the right list
- MOZ_ASSERT(i * STEP == size);
+ MOZ_ASSERT(i * STEP == js::RoundUp(size, STEP));
void* block = lists[i].popCopy();
return PointerAndUint7(block, i);
}
diff --git a/js/src/gc/Nursery-inl.h b/js/src/gc/Nursery-inl.h
index 3a29d76c07..3be063697a 100644
--- a/js/src/gc/Nursery-inl.h
+++ b/js/src/gc/Nursery-inl.h
@@ -16,6 +16,12 @@
#include "vm/JSContext.h"
#include "vm/NativeObject.h"
+namespace js {
+namespace gc {
+struct Cell;
+} // namespace gc
+} // namespace js
+
inline JSRuntime* js::Nursery::runtime() const { return gc->rt; }
template <typename T>
@@ -23,6 +29,50 @@ bool js::Nursery::isInside(const SharedMem<T>& p) const {
return isInside(p.unwrap(/*safe - used for value in comparison above*/));
}
+inline bool js::Nursery::shouldTenure(gc::Cell* cell) {
+ MOZ_ASSERT(semispaceEnabled());
+ MOZ_ASSERT(inCollectedRegion(cell));
+
+ size_t offset = fromSpace.offsetFromAddress(uintptr_t(cell));
+ MOZ_ASSERT(offset >=
+ fromSpace.offsetFromExclusiveAddress(fromSpace.startPosition_));
+ return offset <= tenureThreshold_;
+}
+
+inline bool js::Nursery::inCollectedRegion(gc::Cell* cell) const {
+ gc::ChunkBase* chunk = gc::detail::GetCellChunkBase(cell);
+ return chunk->getKind() == gc::ChunkKind::NurseryFromSpace;
+}
+
+inline bool js::Nursery::inCollectedRegion(void* ptr) const {
+ if (!semispaceEnabled()) {
+ return toSpace.isInside(ptr);
+ }
+
+ return fromSpace.isInside(ptr);
+}
+
+inline size_t js::Nursery::Space::offsetFromExclusiveAddress(
+ uintptr_t addr) const {
+ if ((addr & gc::ChunkMask) == 0) {
+ // |addr| points one past the end of the previous chunk.
+ return offsetFromAddress(addr - 1) + 1;
+ }
+
+ return offsetFromAddress(addr);
+}
+
+inline size_t js::Nursery::Space::offsetFromAddress(uintptr_t addr) const {
+ gc::ChunkBase* chunk =
+ gc::detail::GetCellChunkBase(reinterpret_cast<gc::Cell*>(addr));
+ MOZ_ASSERT(chunk->getKind() == kind);
+ MOZ_ASSERT(findChunkIndex(addr & ~gc::ChunkMask) == chunk->nurseryChunkIndex);
+
+ uint32_t offset = addr & gc::ChunkMask;
+ MOZ_ASSERT(offset >= sizeof(gc::ChunkBase));
+ return (chunk->nurseryChunkIndex << gc::ChunkShift) | offset;
+}
+
MOZ_ALWAYS_INLINE /* static */ bool js::Nursery::getForwardedPointer(
js::gc::Cell** ref) {
js::gc::Cell* cell = (*ref);
@@ -80,7 +130,7 @@ inline void js::Nursery::setForwardingPointer(void* oldData, void* newData,
inline void js::Nursery::setDirectForwardingPointer(void* oldData,
void* newData) {
MOZ_ASSERT(isInside(oldData));
- MOZ_ASSERT(!isInside(newData));
+ MOZ_ASSERT_IF(isInside(newData), !inCollectedRegion(newData));
new (oldData) BufferRelocationOverlay{newData};
}
@@ -123,8 +173,8 @@ inline void* js::Nursery::tryAllocateCell(gc::AllocSite* site, size_t size,
inline void* js::Nursery::tryAllocate(size_t size) {
MOZ_ASSERT(isEnabled());
- MOZ_ASSERT(!JS::RuntimeHeapIsBusy());
- MOZ_ASSERT_IF(currentChunk_ == startChunk_, position() >= startPosition_);
+ MOZ_ASSERT_IF(JS::RuntimeHeapIsBusy(), JS::RuntimeHeapIsMinorCollecting());
+ MOZ_ASSERT_IF(currentChunk() == startChunk(), position() >= startPosition());
MOZ_ASSERT(size % gc::CellAlignBytes == 0);
MOZ_ASSERT(position() % gc::CellAlignBytes == 0);
@@ -138,7 +188,7 @@ inline void* js::Nursery::tryAllocate(size_t size) {
"Successful allocation cannot result in nullptr");
}
- position_ = position() + size;
+ toSpace.position_ = position() + size;
DebugOnlyPoison(ptr, JS_ALLOCATED_NURSERY_PATTERN, size,
MemCheckKind::MakeUndefined);
@@ -148,30 +198,33 @@ inline void* js::Nursery::tryAllocate(size_t size) {
inline bool js::Nursery::registerTrailer(PointerAndUint7 blockAndListID,
size_t nBytes) {
- MOZ_ASSERT(trailersAdded_.length() == trailersRemoved_.length());
+ MOZ_ASSERT(toSpace.trailersAdded_.length() ==
+ toSpace.trailersRemoved_.length());
MOZ_ASSERT(nBytes > 0);
- if (MOZ_UNLIKELY(!trailersAdded_.append(blockAndListID))) {
+ if (MOZ_UNLIKELY(!toSpace.trailersAdded_.append(blockAndListID))) {
return false;
}
- if (MOZ_UNLIKELY(!trailersRemoved_.append(nullptr))) {
- trailersAdded_.popBack();
+ if (MOZ_UNLIKELY(!toSpace.trailersRemoved_.append(nullptr))) {
+ toSpace.trailersAdded_.popBack();
return false;
}
// This is a clone of the logic in ::registerMallocedBuffer. It may be
// that some other heuristic is better, once we know more about the
// typical behaviour of wasm-GC applications.
- trailerBytes_ += nBytes;
- if (MOZ_UNLIKELY(trailerBytes_ > capacity() * 8)) {
+ toSpace.trailerBytes_ += nBytes;
+ if (MOZ_UNLIKELY(toSpace.trailerBytes_ > capacity() * 8)) {
requestMinorGC(JS::GCReason::NURSERY_TRAILERS);
}
return true;
}
inline void js::Nursery::unregisterTrailer(void* block) {
- MOZ_ASSERT(trailersRemovedUsed_ < trailersRemoved_.length());
- trailersRemoved_[trailersRemovedUsed_] = block;
- trailersRemovedUsed_++;
+ // Unlike removeMallocedBuffer this is only called during minor GC.
+ MOZ_ASSERT(fromSpace.trailersRemovedUsed_ <
+ fromSpace.trailersRemoved_.length());
+ fromSpace.trailersRemoved_[fromSpace.trailersRemovedUsed_] = block;
+ fromSpace.trailersRemovedUsed_++;
}
namespace js {
@@ -181,13 +234,20 @@ namespace js {
// instead.
template <typename T>
-static inline T* AllocateCellBuffer(JSContext* cx, gc::Cell* cell,
+static inline T* AllocateCellBuffer(Nursery& nursery, gc::Cell* cell,
uint32_t count) {
size_t nbytes = RoundUp(count * sizeof(T), sizeof(Value));
- auto* buffer = static_cast<T*>(cx->nursery().allocateBuffer(
- cell->zone(), cell, nbytes, js::MallocArena));
+ return static_cast<T*>(
+ nursery.allocateBuffer(cell->zone(), cell, nbytes, js::MallocArena));
+}
+
+template <typename T>
+static inline T* AllocateCellBuffer(JSContext* cx, gc::Cell* cell,
+ uint32_t count) {
+ T* buffer = AllocateCellBuffer<T>(cx->nursery(), cell, count);
if (!buffer) {
ReportOutOfMemory(cx);
+ return nullptr;
}
return buffer;
diff --git a/js/src/gc/Nursery.cpp b/js/src/gc/Nursery.cpp
index 660daa8d4c..4753848c56 100644
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -38,6 +38,7 @@
#include "gc/Heap-inl.h"
#include "gc/Marking-inl.h"
#include "gc/StableCellHasher-inl.h"
+#include "gc/StoreBuffer-inl.h"
#include "vm/GeckoProfiler-inl.h"
using namespace js;
@@ -50,15 +51,22 @@ using mozilla::TimeStamp;
namespace js {
+static constexpr size_t NurseryChunkHeaderSize =
+ RoundUp(sizeof(ChunkBase), CellAlignBytes);
+
+// The amount of space in a nursery chunk available to allocations.
+static constexpr size_t NurseryChunkUsableSize =
+ ChunkSize - NurseryChunkHeaderSize;
+
struct NurseryChunk : public ChunkBase {
- char data[Nursery::NurseryChunkUsableSize];
+ alignas(CellAlignBytes) uint8_t data[NurseryChunkUsableSize];
- static NurseryChunk* fromChunk(gc::TenuredChunk* chunk);
+ static NurseryChunk* fromChunk(TenuredChunk* chunk, ChunkKind kind,
+ uint8_t index);
- explicit NurseryChunk(JSRuntime* runtime)
- : ChunkBase(runtime, &runtime->gc.storeBuffer()) {}
+ explicit NurseryChunk(JSRuntime* runtime, ChunkKind kind, uint8_t chunkIndex)
+ : ChunkBase(runtime, &runtime->gc.storeBuffer(), kind, chunkIndex) {}
- void poisonAndInit(JSRuntime* rt, size_t size = ChunkSize);
void poisonRange(size_t start, size_t end, uint8_t value,
MemCheckKind checkKind);
void poisonAfterEvict(size_t extent = ChunkSize);
@@ -75,22 +83,29 @@ struct NurseryChunk : public ChunkBase {
uintptr_t start() const { return uintptr_t(&data); }
uintptr_t end() const { return uintptr_t(this) + ChunkSize; }
};
-static_assert(sizeof(js::NurseryChunk) == gc::ChunkSize,
- "Nursery chunk size must match gc::Chunk size.");
+static_assert(sizeof(NurseryChunk) == ChunkSize,
+ "Nursery chunk size must match Chunk size.");
+static_assert(offsetof(NurseryChunk, data) == NurseryChunkHeaderSize);
class NurseryDecommitTask : public GCParallelTask {
public:
explicit NurseryDecommitTask(gc::GCRuntime* gc);
- bool reserveSpaceForBytes(size_t nbytes);
+ bool reserveSpaceForChunks(size_t nchunks);
bool isEmpty(const AutoLockHelperThreadState& lock) const;
void queueChunk(NurseryChunk* chunk, const AutoLockHelperThreadState& lock);
- void queueRange(size_t newCapacity, NurseryChunk& chunk,
+ void queueRange(size_t newCapacity, NurseryChunk* chunk,
const AutoLockHelperThreadState& lock);
private:
+ struct Region {
+ NurseryChunk* chunk;
+ size_t startOffset;
+ };
+
using NurseryChunkVector = Vector<NurseryChunk*, 0, SystemAllocPolicy>;
+ using RegionVector = Vector<Region, 2, SystemAllocPolicy>;
void run(AutoLockHelperThreadState& lock) override;
@@ -98,25 +113,21 @@ class NurseryDecommitTask : public GCParallelTask {
const NurseryChunkVector& chunksToDecommit() const {
return chunksToDecommit_.ref();
}
+ RegionVector& regionsToDecommit() { return regionsToDecommit_.ref(); }
+ const RegionVector& regionsToDecommit() const {
+ return regionsToDecommit_.ref();
+ }
MainThreadOrGCTaskData<NurseryChunkVector> chunksToDecommit_;
-
- MainThreadOrGCTaskData<NurseryChunk*> partialChunk;
- MainThreadOrGCTaskData<size_t> partialCapacity;
+ MainThreadOrGCTaskData<RegionVector> regionsToDecommit_;
};
} // namespace js
-inline void js::NurseryChunk::poisonAndInit(JSRuntime* rt, size_t size) {
- MOZ_ASSERT(size >= sizeof(ChunkBase));
- MOZ_ASSERT(size <= ChunkSize);
- poisonRange(0, size, JS_FRESH_NURSERY_PATTERN, MemCheckKind::MakeUndefined);
- new (this) NurseryChunk(rt);
-}
-
inline void js::NurseryChunk::poisonRange(size_t start, size_t end,
uint8_t value,
MemCheckKind checkKind) {
+ MOZ_ASSERT(start >= NurseryChunkHeaderSize);
MOZ_ASSERT((start % gc::CellAlignBytes) == 0);
MOZ_ASSERT((end % gc::CellAlignBytes) == 0);
MOZ_ASSERT(end >= start);
@@ -132,12 +143,12 @@ inline void js::NurseryChunk::poisonRange(size_t start, size_t end,
}
inline void js::NurseryChunk::poisonAfterEvict(size_t extent) {
- poisonRange(sizeof(ChunkBase), extent, JS_SWEPT_NURSERY_PATTERN,
+ poisonRange(NurseryChunkHeaderSize, extent, JS_SWEPT_NURSERY_PATTERN,
MemCheckKind::MakeNoAccess);
}
inline void js::NurseryChunk::markPagesUnusedHard(size_t startOffset) {
- MOZ_ASSERT(startOffset >= sizeof(ChunkBase)); // Don't touch the header.
+ MOZ_ASSERT(startOffset >= NurseryChunkHeaderSize); // Don't touch the header.
MOZ_ASSERT(startOffset >= SystemPageSize());
MOZ_ASSERT(startOffset <= ChunkSize);
uintptr_t start = uintptr_t(this) + startOffset;
@@ -146,7 +157,7 @@ inline void js::NurseryChunk::markPagesUnusedHard(size_t startOffset) {
}
inline bool js::NurseryChunk::markPagesInUseHard(size_t endOffset) {
- MOZ_ASSERT(endOffset >= sizeof(ChunkBase));
+ MOZ_ASSERT(endOffset >= NurseryChunkHeaderSize);
MOZ_ASSERT(endOffset >= SystemPageSize());
MOZ_ASSERT(endOffset <= ChunkSize);
uintptr_t start = uintptr_t(this) + SystemPageSize();
@@ -155,23 +166,25 @@ inline bool js::NurseryChunk::markPagesInUseHard(size_t endOffset) {
}
// static
-inline js::NurseryChunk* js::NurseryChunk::fromChunk(TenuredChunk* chunk) {
- return reinterpret_cast<NurseryChunk*>(chunk);
+inline js::NurseryChunk* js::NurseryChunk::fromChunk(TenuredChunk* chunk,
+ ChunkKind kind,
+ uint8_t index) {
+ return new (chunk) NurseryChunk(chunk->runtime, kind, index);
}
js::NurseryDecommitTask::NurseryDecommitTask(gc::GCRuntime* gc)
: GCParallelTask(gc, gcstats::PhaseKind::NONE) {
// This can occur outside GCs so doesn't have a stats phase.
+ MOZ_ALWAYS_TRUE(regionsToDecommit().reserve(2));
}
bool js::NurseryDecommitTask::isEmpty(
const AutoLockHelperThreadState& lock) const {
- return chunksToDecommit().empty() && !partialChunk;
+ return chunksToDecommit().empty() && regionsToDecommit().empty();
}
-bool js::NurseryDecommitTask::reserveSpaceForBytes(size_t nbytes) {
+bool js::NurseryDecommitTask::reserveSpaceForChunks(size_t nchunks) {
MOZ_ASSERT(isIdle());
- size_t nchunks = HowMany(nbytes, ChunkSize);
return chunksToDecommit().reserve(nchunks);
}
@@ -182,15 +195,14 @@ void js::NurseryDecommitTask::queueChunk(
}
void js::NurseryDecommitTask::queueRange(
- size_t newCapacity, NurseryChunk& newChunk,
+ size_t newCapacity, NurseryChunk* chunk,
const AutoLockHelperThreadState& lock) {
MOZ_ASSERT(isIdle(lock));
- MOZ_ASSERT(!partialChunk);
+ MOZ_ASSERT(regionsToDecommit_.ref().length() < 2);
MOZ_ASSERT(newCapacity < ChunkSize);
MOZ_ASSERT(newCapacity % SystemPageSize() == 0);
- partialChunk = &newChunk;
- partialCapacity = newCapacity;
+ regionsToDecommit().infallibleAppend(Region{chunk, newCapacity});
}
void js::NurseryDecommitTask::run(AutoLockHelperThreadState& lock) {
@@ -204,25 +216,20 @@ void js::NurseryDecommitTask::run(AutoLockHelperThreadState& lock) {
gc->recycleChunk(tenuredChunk, lock);
}
- if (partialChunk) {
- {
- AutoUnlockHelperThreadState unlock(lock);
- partialChunk->markPagesUnusedHard(partialCapacity);
- }
- partialChunk = nullptr;
- partialCapacity = 0;
+ while (!regionsToDecommit().empty()) {
+ Region region = regionsToDecommit().popCopy();
+ AutoUnlockHelperThreadState unlock(lock);
+ region.chunk->markPagesUnusedHard(region.startOffset);
}
}
js::Nursery::Nursery(GCRuntime* gc)
- : position_(0),
- currentEnd_(0),
+ : toSpace(ChunkKind::NurseryToSpace),
+ fromSpace(ChunkKind::NurseryFromSpace),
gc(gc),
- currentChunk_(0),
- startChunk_(0),
- startPosition_(0),
capacity_(0),
enableProfiling_(false),
+ semispaceEnabled_(gc::TuningDefaults::SemispaceNurseryEnabled),
canAllocateStrings_(true),
canAllocateBigInts_(true),
reportDeduplications_(false),
@@ -232,6 +239,11 @@ js::Nursery::Nursery(GCRuntime* gc)
prevPosition_(0),
hasRecentGrowthData(false),
smoothedTargetSize(0.0) {
+ // Try to keep fields used by allocation fast path together at the start of
+ // the nursery.
+ static_assert(offsetof(Nursery, toSpace.position_) < TypicalCacheLineSize);
+ static_assert(offsetof(Nursery, toSpace.currentEnd_) < TypicalCacheLineSize);
+
const char* env = getenv("MOZ_NURSERY_STRINGS");
if (env && *env) {
canAllocateStrings_ = (*env == '1');
@@ -316,12 +328,13 @@ bool js::Nursery::init(AutoLockGCBgAlloc& lock) {
js::Nursery::~Nursery() { disable(); }
void js::Nursery::enable() {
- MOZ_ASSERT(isEmpty());
- MOZ_ASSERT(!gc->isVerifyPreBarriersEnabled());
if (isEnabled()) {
return;
}
+ MOZ_ASSERT(isEmpty());
+ MOZ_ASSERT(!gc->isVerifyPreBarriersEnabled());
+
{
AutoLockGCBgAlloc lock(gc);
if (!initFirstChunk(lock)) {
@@ -344,25 +357,60 @@ void js::Nursery::enable() {
bool js::Nursery::initFirstChunk(AutoLockGCBgAlloc& lock) {
MOZ_ASSERT(!isEnabled());
+ MOZ_ASSERT(toSpace.chunks_.length() == 0);
+ MOZ_ASSERT(fromSpace.chunks_.length() == 0);
- capacity_ = tunables().gcMinNurseryBytes();
+ setCapacity(minSpaceSize());
- if (!decommitTask->reserveSpaceForBytes(capacity_) ||
- !allocateNextChunk(0, lock)) {
- capacity_ = 0;
+ size_t nchunks = toSpace.maxChunkCount_ + fromSpace.maxChunkCount_;
+ if (!decommitTask->reserveSpaceForChunks(nchunks) ||
+ !allocateNextChunk(lock)) {
+ setCapacity(0);
+ MOZ_ASSERT(toSpace.isEmpty());
+ MOZ_ASSERT(fromSpace.isEmpty());
return false;
}
- moveToStartOfChunk(0);
- setStartToCurrentPosition();
+ toSpace.moveToStartOfChunk(this, 0);
+ toSpace.setStartToCurrentPosition();
+
+ if (semispaceEnabled_) {
+ fromSpace.moveToStartOfChunk(this, 0);
+ fromSpace.setStartToCurrentPosition();
+ }
+
+ MOZ_ASSERT(toSpace.isEmpty());
+ MOZ_ASSERT(fromSpace.isEmpty());
+
poisonAndInitCurrentChunk();
// Clear any information about previous collections.
clearRecentGrowthData();
+ tenureThreshold_ = 0;
+
+#ifdef DEBUG
+ toSpace.checkKind(ChunkKind::NurseryToSpace);
+ fromSpace.checkKind(ChunkKind::NurseryFromSpace);
+#endif
+
return true;
}
+size_t RequiredChunkCount(size_t nbytes) {
+ return nbytes <= ChunkSize ? 1 : nbytes / ChunkSize;
+}
+
+void js::Nursery::setCapacity(size_t newCapacity) {
+ MOZ_ASSERT(newCapacity == roundSize(newCapacity));
+ capacity_ = newCapacity;
+ size_t count = RequiredChunkCount(newCapacity);
+ toSpace.maxChunkCount_ = count;
+ if (semispaceEnabled_) {
+ fromSpace.maxChunkCount_ = count;
+ }
+}
+
void js::Nursery::disable() {
MOZ_ASSERT(isEmpty());
if (!isEnabled()) {
@@ -371,15 +419,19 @@ void js::Nursery::disable() {
// Free all chunks.
decommitTask->join();
- freeChunksFrom(0);
+ freeChunksFrom(toSpace, 0);
+ freeChunksFrom(fromSpace, 0);
decommitTask->runFromMainThread();
- capacity_ = 0;
+ setCapacity(0);
// We must reset currentEnd_ so that there is no space for anything in the
// nursery. JIT'd code uses this even if the nursery is disabled.
- currentEnd_ = 0;
- position_ = 0;
+ toSpace = Space(ChunkKind::NurseryToSpace);
+ fromSpace = Space(ChunkKind::NurseryFromSpace);
+ MOZ_ASSERT(toSpace.isEmpty());
+ MOZ_ASSERT(fromSpace.isEmpty());
+
gc->storeBuffer().disable();
if (gc->wasInitialized()) {
@@ -464,16 +516,59 @@ void js::Nursery::discardCodeAndSetJitFlagsForZone(JS::Zone* zone) {
}
}
+void js::Nursery::setSemispaceEnabled(bool enabled) {
+ if (semispaceEnabled() == enabled) {
+ return;
+ }
+
+ bool wasEnabled = isEnabled();
+ if (wasEnabled) {
+ if (!isEmpty()) {
+ gc->minorGC(JS::GCReason::EVICT_NURSERY);
+ }
+ disable();
+ }
+
+ semispaceEnabled_ = enabled;
+
+ if (wasEnabled) {
+ enable();
+ }
+}
+
bool js::Nursery::isEmpty() const {
+ MOZ_ASSERT(fromSpace.isEmpty());
+
if (!isEnabled()) {
return true;
}
if (!gc->hasZealMode(ZealMode::GenerationalGC)) {
- MOZ_ASSERT(startChunk_ == 0);
- MOZ_ASSERT(startPosition_ == chunk(0).start());
+ MOZ_ASSERT(startChunk() == 0);
+ MOZ_ASSERT(startPosition() == chunk(0).start());
}
- return position() == startPosition_;
+
+ return toSpace.isEmpty();
+}
+
+bool js::Nursery::Space::isEmpty() const { return position_ == startPosition_; }
+
+static size_t AdjustSizeForSemispace(size_t size, bool semispaceEnabled) {
+ if (!semispaceEnabled) {
+ return size;
+ }
+
+ return Nursery::roundSize(size / 2);
+}
+
+size_t js::Nursery::maxSpaceSize() const {
+ return AdjustSizeForSemispace(tunables().gcMaxNurseryBytes(),
+ semispaceEnabled_);
+}
+
+size_t js::Nursery::minSpaceSize() const {
+ return AdjustSizeForSemispace(tunables().gcMinNurseryBytes(),
+ semispaceEnabled_);
}
#ifdef JS_GC_ZEAL
@@ -501,9 +596,10 @@ void js::Nursery::enterZealMode() {
MemCheckKind::MakeUndefined);
}
- capacity_ = RoundUp(tunables().gcMaxNurseryBytes(), ChunkSize);
+ setCapacity(maxSpaceSize());
- if (!decommitTask->reserveSpaceForBytes(capacity_)) {
+ size_t nchunks = toSpace.maxChunkCount_ + fromSpace.maxChunkCount_;
+ if (!decommitTask->reserveSpaceForChunks(nchunks)) {
oomUnsafe.crash("Nursery::enterZealMode");
}
@@ -517,8 +613,14 @@ void js::Nursery::leaveZealMode() {
MOZ_ASSERT(isEmpty());
- moveToStartOfChunk(0);
- setStartToCurrentPosition();
+ toSpace.moveToStartOfChunk(this, 0);
+ toSpace.setStartToCurrentPosition();
+
+ if (semispaceEnabled_) {
+ fromSpace.moveToStartOfChunk(this, 0);
+ fromSpace.setStartToCurrentPosition();
+ }
+
poisonAndInitCurrentChunk();
}
#endif // JS_GC_ZEAL
@@ -573,7 +675,7 @@ MOZ_NEVER_INLINE JS::GCReason Nursery::handleAllocationFailure() {
}
bool Nursery::moveToNextChunk() {
- unsigned chunkno = currentChunk_ + 1;
+ unsigned chunkno = currentChunk() + 1;
MOZ_ASSERT(chunkno <= maxChunkCount());
MOZ_ASSERT(chunkno <= allocatedChunkCount());
if (chunkno == maxChunkCount()) {
@@ -584,7 +686,7 @@ bool Nursery::moveToNextChunk() {
TimeStamp start = TimeStamp::Now();
{
AutoLockGCBgAlloc lock(gc);
- if (!allocateNextChunk(chunkno, lock)) {
+ if (!allocateNextChunk(lock)) {
return false;
}
}
@@ -688,16 +790,16 @@ void* js::Nursery::reallocateBuffer(Zone* zone, Cell* cell, void* oldBuffer,
}
if (!isInside(oldBuffer)) {
- MOZ_ASSERT(mallocedBufferBytes >= oldBytes);
+ MOZ_ASSERT(toSpace.mallocedBufferBytes >= oldBytes);
void* newBuffer =
zone->pod_realloc<uint8_t>((uint8_t*)oldBuffer, oldBytes, newBytes);
if (newBuffer) {
if (oldBuffer != newBuffer) {
MOZ_ALWAYS_TRUE(
- mallocedBuffers.rekeyAs(oldBuffer, newBuffer, newBuffer));
+ toSpace.mallocedBuffers.rekeyAs(oldBuffer, newBuffer, newBuffer));
}
- mallocedBufferBytes -= oldBytes;
- mallocedBufferBytes += newBytes;
+ toSpace.mallocedBufferBytes -= oldBytes;
+ toSpace.mallocedBufferBytes += newBytes;
}
return newBuffer;
}
@@ -723,27 +825,21 @@ void js::Nursery::freeBuffer(void* buffer, size_t nbytes) {
#ifdef DEBUG
/* static */
-inline bool Nursery::checkForwardingPointerLocation(void* ptr,
- bool expectedInside) {
- if (isInside(ptr) == expectedInside) {
- return true;
- }
-
+inline bool Nursery::checkForwardingPointerInsideNursery(void* ptr) {
// If a zero-capacity elements header lands right at the end of a chunk then
// elements data will appear to be in the next chunk. If we have a pointer to
// the very start of a chunk, check the previous chunk.
- if ((uintptr_t(ptr) & ChunkMask) == 0 &&
- isInside(reinterpret_cast<uint8_t*>(ptr) - 1) == expectedInside) {
- return true;
+ if ((uintptr_t(ptr) & ChunkMask) == 0) {
+ return isInside(reinterpret_cast<uint8_t*>(ptr) - 1);
}
- return false;
+ return isInside(ptr);
}
#endif
void Nursery::setIndirectForwardingPointer(void* oldData, void* newData) {
- MOZ_ASSERT(checkForwardingPointerLocation(oldData, true));
- MOZ_ASSERT(checkForwardingPointerLocation(newData, false));
+ MOZ_ASSERT(checkForwardingPointerInsideNursery(oldData));
+ // |newData| may be either in the nursery or in the malloc heap.
AutoEnterOOMUnsafeRegion oomUnsafe;
#ifdef DEBUG
@@ -791,7 +887,7 @@ void js::Nursery::forwardBufferPointer(uintptr_t* pSlotsElems) {
MOZ_ASSERT(IsWriteableAddress(buffer));
}
- MOZ_ASSERT(!isInside(buffer));
+ MOZ_ASSERT_IF(isInside(buffer), !inCollectedRegion(buffer));
*pSlotsElems = reinterpret_cast<uintptr_t>(buffer);
}
@@ -1063,7 +1159,7 @@ bool js::Nursery::wantEagerCollection() const {
return false;
}
- if (isEmpty() && capacity() == tunables().gcMinNurseryBytes()) {
+ if (isEmpty() && capacity() == minSpaceSize()) {
return false;
}
@@ -1108,7 +1204,7 @@ inline bool js::Nursery::isUnderused() const {
return false;
}
- if (capacity() == tunables().gcMinNurseryBytes()) {
+ if (capacity() == minSpaceSize()) {
return false;
}
@@ -1126,8 +1222,8 @@ void js::Nursery::collect(JS::GCOptions options, JS::GCReason reason) {
MOZ_ASSERT(!rt->mainContextFromOwnThread()->suppressGC);
if (minorGCRequested()) {
- MOZ_ASSERT(position_ == chunk(currentChunk_).end());
- position_ = prevPosition_;
+ MOZ_ASSERT(position() == chunk(currentChunk()).end());
+ toSpace.position_ = prevPosition_;
prevPosition_ = 0;
minorGCTriggerReason_ = JS::GCReason::NO_REASON;
rt->mainContextFromOwnThread()->clearPendingInterrupt(
@@ -1141,7 +1237,7 @@ void js::Nursery::collect(JS::GCOptions options, JS::GCReason reason) {
// freed after this point.
gc->storeBuffer().clear();
- MOZ_ASSERT(!pretenuringNursery.hasAllocatedSites());
+ MOZ_ASSERT_IF(!semispaceEnabled_, !pretenuringNursery.hasAllocatedSites());
}
if (!isEnabled()) {
@@ -1162,10 +1258,11 @@ void js::Nursery::collect(JS::GCOptions options, JS::GCReason reason) {
previousGC.reason = JS::GCReason::NO_REASON;
previousGC.nurseryUsedBytes = usedSpace();
previousGC.nurseryCapacity = capacity();
- previousGC.nurseryCommitted = committed();
- previousGC.nurseryUsedChunkCount = currentChunk_ + 1;
+ previousGC.nurseryCommitted = totalCommitted();
+ previousGC.nurseryUsedChunkCount = currentChunk() + 1;
previousGC.tenuredBytes = 0;
previousGC.tenuredCells = 0;
+ tenuredEverything = true;
// If it isn't empty, it will call doCollection, and possibly after that
// isEmpty() will become true, so use another variable to keep track of the
@@ -1177,29 +1274,19 @@ void js::Nursery::collect(JS::GCOptions options, JS::GCReason reason) {
// space does not represent data that can be tenured
MOZ_ASSERT(result.tenuredBytes <=
(previousGC.nurseryUsedBytes -
- (sizeof(ChunkBase) * previousGC.nurseryUsedChunkCount)));
+ (NurseryChunkHeaderSize * previousGC.nurseryUsedChunkCount)));
previousGC.reason = reason;
previousGC.tenuredBytes = result.tenuredBytes;
previousGC.tenuredCells = result.tenuredCells;
- previousGC.nurseryUsedChunkCount = currentChunk_ + 1;
+ previousGC.nurseryUsedChunkCount = currentChunk() + 1;
}
// Resize the nursery.
maybeResizeNursery(options, reason);
- // Poison/initialise the first chunk.
- if (previousGC.nurseryUsedBytes) {
- // In most cases Nursery::clear() has not poisoned this chunk or marked it
- // as NoAccess; so we only need to poison the region used during the last
- // cycle. Also, if the heap was recently expanded we don't want to
- // re-poison the new memory. In both cases we only need to poison until
- // previousGC.nurseryUsedBytes.
- //
- // In cases where this is not true, like generational zeal mode or subchunk
- // mode, poisonAndInitCurrentChunk() will ignore its parameter. It will
- // also clamp the parameter.
- poisonAndInitCurrentChunk(previousGC.nurseryUsedBytes);
+ if (!semispaceEnabled()) {
+ poisonAndInitCurrentChunk();
}
bool validPromotionRate;
@@ -1207,19 +1294,10 @@ void js::Nursery::collect(JS::GCOptions options, JS::GCReason reason) {
startProfile(ProfileKey::Pretenure);
size_t sitesPretenured = 0;
- if (!wasEmpty) {
- sitesPretenured =
- doPretenuring(rt, reason, validPromotionRate, promotionRate);
- }
+ sitesPretenured =
+ doPretenuring(rt, reason, validPromotionRate, promotionRate);
endProfile(ProfileKey::Pretenure);
- // We ignore gcMaxBytes when allocating for minor collection. However, if we
- // overflowed, we disable the nursery. The next time we allocate, we'll fail
- // because bytes >= gcMaxBytes.
- if (gc->heapSize.bytes() >= tunables().gcMaxBytes()) {
- disable();
- }
-
previousGC.endTime =
TimeStamp::Now(); // Must happen after maybeResizeNursery.
endProfile(ProfileKey::Total);
@@ -1268,7 +1346,7 @@ void js::Nursery::sendTelemetry(JS::GCReason reason, TimeDuration totalTime,
rt->metrics().GC_MINOR_REASON_LONG(uint32_t(reason));
}
rt->metrics().GC_MINOR_US(totalTime);
- rt->metrics().GC_NURSERY_BYTES_2(committed());
+ rt->metrics().GC_NURSERY_BYTES_2(totalCommitted());
if (!wasEmpty) {
rt->metrics().GC_PRETENURE_COUNT_2(sitesPretenured);
@@ -1289,14 +1367,30 @@ void js::Nursery::printDeduplicationData(js::StringStats& prev,
}
}
-void js::Nursery::freeTrailerBlocks(void) {
+void js::Nursery::freeTrailerBlocks(JS::GCOptions options,
+ JS::GCReason reason) {
+ fromSpace.freeTrailerBlocks(mallocedBlockCache_);
+
+ if (options == JS::GCOptions::Shrink || gc::IsOOMReason(reason)) {
+ mallocedBlockCache_.clear();
+ return;
+ }
+
+ // Discard blocks from the cache at 0.05% per megabyte of nursery capacity,
+ // that is, 0.8% of blocks for a 16-megabyte nursery. This allows the cache
+ // to gradually discard unneeded blocks in long running applications.
+ mallocedBlockCache_.preen(0.05 * double(capacity()) / (1024.0 * 1024.0));
+}
+
+void js::Nursery::Space::freeTrailerBlocks(
+ MallocedBlockCache& mallocedBlockCache) {
// This routine frees those blocks denoted by the set
//
// trailersAdded_ (all of it)
// - trailersRemoved_ (entries with index below trailersRemovedUsed_)
//
// For each block, places it back on the nursery's small-malloced-block pool
- // by calling mallocedBlockCache_.free.
+ // by calling mallocedBlockCache.free.
MOZ_ASSERT(trailersAdded_.length() == trailersRemoved_.length());
MOZ_ASSERT(trailersRemovedUsed_ <= trailersRemoved_.length());
@@ -1321,7 +1415,7 @@ void js::Nursery::freeTrailerBlocks(void) {
if (!std::binary_search(trailersRemoved_.begin(),
trailersRemoved_.begin() + trailersRemovedUsed_,
blockPointer)) {
- mallocedBlockCache_.free(block);
+ mallocedBlockCache.free(block);
}
}
} else {
@@ -1348,7 +1442,7 @@ void js::Nursery::freeTrailerBlocks(void) {
const PointerAndUint7 blockAdded = trailersAdded_[iAdded];
const void* blockRemoved = trailersRemoved_[iRemoved];
if (blockAdded.pointer() < blockRemoved) {
- mallocedBlockCache_.free(blockAdded);
+ mallocedBlockCache.free(blockAdded);
continue;
}
// If this doesn't hold
@@ -1362,7 +1456,7 @@ void js::Nursery::freeTrailerBlocks(void) {
// added set.
for (/*keep going*/; iAdded < nAdded; iAdded++) {
const PointerAndUint7 block = trailersAdded_[iAdded];
- mallocedBlockCache_.free(block);
+ mallocedBlockCache.free(block);
}
}
@@ -1371,17 +1465,14 @@ void js::Nursery::freeTrailerBlocks(void) {
trailersRemoved_.clear();
trailersRemovedUsed_ = 0;
trailerBytes_ = 0;
-
- // Discard blocks from the cache at 0.05% per megabyte of nursery capacity,
- // that is, 0.8% of blocks for a 16-megabyte nursery. This allows the cache
- // to gradually discard unneeded blocks in long running applications.
- mallocedBlockCache_.preen(0.05 * double(capacity()) / (1024.0 * 1024.0));
}
size_t Nursery::sizeOfTrailerBlockSets(
mozilla::MallocSizeOf mallocSizeOf) const {
- return trailersAdded_.sizeOfExcludingThis(mallocSizeOf) +
- trailersRemoved_.sizeOfExcludingThis(mallocSizeOf);
+ MOZ_ASSERT(fromSpace.trailersAdded_.empty());
+ MOZ_ASSERT(fromSpace.trailersRemoved_.empty());
+ return toSpace.trailersAdded_.sizeOfExcludingThis(mallocSizeOf) +
+ toSpace.trailersRemoved_.sizeOfExcludingThis(mallocSizeOf);
}
js::Nursery::CollectionResult js::Nursery::doCollection(AutoGCSession& session,
@@ -1393,8 +1484,19 @@ js::Nursery::CollectionResult js::Nursery::doCollection(AutoGCSession& session,
AutoDisableProxyCheck disableStrictProxyChecking;
mozilla::DebugOnly<AutoEnterOOMUnsafeRegion> oomUnsafeRegion;
+ // Swap nursery spaces.
+ swapSpaces();
+ MOZ_ASSERT(toSpace.isEmpty());
+ MOZ_ASSERT(toSpace.mallocedBuffers.empty());
+ if (semispaceEnabled_) {
+ poisonAndInitCurrentChunk();
+ }
+
+ clearMapAndSetNurseryRanges();
+
// Move objects pointed to by roots from the nursery to the major heap.
- TenuringTracer mover(rt, this);
+ tenuredEverything = shouldTenureEverything(reason);
+ TenuringTracer mover(rt, this, tenuredEverything);
// Trace everything considered as a root by a minor GC.
traceRoots(session, mover);
@@ -1433,17 +1535,14 @@ js::Nursery::CollectionResult js::Nursery::doCollection(AutoGCSession& session,
// Sweep.
startProfile(ProfileKey::FreeMallocedBuffers);
- gc->queueBuffersForFreeAfterMinorGC(mallocedBuffers);
- mallocedBufferBytes = 0;
+ gc->queueBuffersForFreeAfterMinorGC(fromSpace.mallocedBuffers);
+ fromSpace.mallocedBufferBytes = 0;
endProfile(ProfileKey::FreeMallocedBuffers);
// Give trailer blocks associated with non-tenured Wasm{Struct,Array}Objects
// back to our `mallocedBlockCache_`.
startProfile(ProfileKey::FreeTrailerBlocks);
- freeTrailerBlocks();
- if (options == JS::GCOptions::Shrink || gc::IsOOMReason(reason)) {
- mallocedBlockCache_.clear();
- }
+ freeTrailerBlocks(options, reason);
endProfile(ProfileKey::FreeTrailerBlocks);
startProfile(ProfileKey::ClearNursery);
@@ -1466,7 +1565,28 @@ js::Nursery::CollectionResult js::Nursery::doCollection(AutoGCSession& session,
#endif
endProfile(ProfileKey::CheckHashTables);
- return {mover.getTenuredSize(), mover.getTenuredCells()};
+ if (semispaceEnabled_) {
+ // On the next collection, tenure everything before |tenureThreshold_|.
+ tenureThreshold_ = toSpace.offsetFromExclusiveAddress(position());
+ } else {
+ // Swap nursery spaces back because we only use one.
+ swapSpaces();
+ MOZ_ASSERT(toSpace.isEmpty());
+ }
+
+ MOZ_ASSERT(fromSpace.isEmpty());
+
+ if (semispaceEnabled_) {
+ poisonAndInitCurrentChunk();
+ }
+
+ return {mover.getPromotedSize(), mover.getPromotedCells()};
+}
+
+void js::Nursery::swapSpaces() {
+ std::swap(toSpace, fromSpace);
+ toSpace.setKind(ChunkKind::NurseryToSpace);
+ fromSpace.setKind(ChunkKind::NurseryFromSpace);
}
void js::Nursery::traceRoots(AutoGCSession& session, TenuringTracer& mover) {
@@ -1490,14 +1610,12 @@ void js::Nursery::traceRoots(AutoGCSession& session, TenuringTracer& mover) {
MOZ_ASSERT(gc->storeBuffer().isEnabled());
MOZ_ASSERT(gc->storeBuffer().isEmpty());
- // Strings in the whole cell buffer must be traced first, in order to mark
- // tenured dependent strings' bases as non-deduplicatable. The rest of
- // nursery collection (whole non-string cells, edges, etc.) can happen
- // later.
startProfile(ProfileKey::TraceWholeCells);
sb.traceWholeCells(mover);
endProfile(ProfileKey::TraceWholeCells);
+ cellsToSweep = sb.releaseCellSweepSet();
+
startProfile(ProfileKey::TraceValues);
sb.traceValues(mover);
endProfile(ProfileKey::TraceValues);
@@ -1523,8 +1641,6 @@ void js::Nursery::traceRoots(AutoGCSession& session, TenuringTracer& mover) {
endProfile(ProfileKey::MarkRuntime);
}
- MOZ_ASSERT(gc->storeBuffer().isEmpty());
-
startProfile(ProfileKey::MarkDebugger);
{
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_ROOTS);
@@ -1533,6 +1649,15 @@ void js::Nursery::traceRoots(AutoGCSession& session, TenuringTracer& mover) {
endProfile(ProfileKey::MarkDebugger);
}
+bool js::Nursery::shouldTenureEverything(JS::GCReason reason) {
+ if (!semispaceEnabled()) {
+ return true;
+ }
+
+ return reason == JS::GCReason::EVICT_NURSERY ||
+ reason == JS::GCReason::DISABLE_GENERATIONAL_GC;
+}
+
size_t js::Nursery::doPretenuring(JSRuntime* rt, JS::GCReason reason,
bool validPromotionRate,
double promotionRate) {
@@ -1590,12 +1715,13 @@ bool js::Nursery::registerMallocedBuffer(void* buffer, size_t nbytes) {
MOZ_ASSERT(buffer);
MOZ_ASSERT(nbytes > 0);
MOZ_ASSERT(!isInside(buffer));
- if (!mallocedBuffers.putNew(buffer)) {
+
+ if (!toSpace.mallocedBuffers.putNew(buffer)) {
return false;
}
- mallocedBufferBytes += nbytes;
- if (MOZ_UNLIKELY(mallocedBufferBytes > capacity() * 8)) {
+ toSpace.mallocedBufferBytes += nbytes;
+ if (MOZ_UNLIKELY(toSpace.mallocedBufferBytes > capacity() * 8)) {
requestMinorGC(JS::GCReason::NURSERY_MALLOC_BUFFERS);
}
@@ -1613,21 +1739,19 @@ bool js::Nursery::registerMallocedBuffer(void* buffer, size_t nbytes) {
Nursery::WasBufferMoved js::Nursery::maybeMoveRawBufferOnPromotion(
void** bufferp, gc::Cell* owner, size_t nbytes, MemoryUse use,
arena_id_t arena) {
- MOZ_ASSERT(!IsInsideNursery(owner));
-
void* buffer = *bufferp;
if (!isInside(buffer)) {
- // This is a malloced buffer. Remove it from the nursery's list of buffers
- // so we don't free it and add it to the memory accounting for the zone
+ // This is a malloced buffer. Remove it from the nursery's previous list of
+ // buffers so we don't free it.
removeMallocedBufferDuringMinorGC(buffer);
- AddCellMemory(owner, nbytes, use);
+ trackMallocedBufferOnPromotion(buffer, owner, nbytes, use);
return BufferNotMoved;
}
// Copy the nursery-allocated buffer into a new malloc allocation.
AutoEnterOOMUnsafeRegion oomUnsafe;
- Zone* zone = owner->asTenured().zone();
+ Zone* zone = owner->zone();
void* movedBuffer = zone->pod_arena_malloc<uint8_t>(arena, nbytes);
if (!movedBuffer) {
oomUnsafe.crash("Nursery::updateBufferOnPromotion");
@@ -1635,38 +1759,111 @@ Nursery::WasBufferMoved js::Nursery::maybeMoveRawBufferOnPromotion(
memcpy(movedBuffer, buffer, nbytes);
- AddCellMemory(owner, nbytes, use);
+ trackMallocedBufferOnPromotion(movedBuffer, owner, nbytes, use);
*bufferp = movedBuffer;
return BufferMoved;
}
+void js::Nursery::trackMallocedBufferOnPromotion(void* buffer, gc::Cell* owner,
+ size_t nbytes, MemoryUse use) {
+ if (owner->isTenured()) {
+ // If we tenured the owner then account for the memory.
+ AddCellMemory(owner, nbytes, use);
+ return;
+ }
+
+ // Otherwise add it to the nursery's new buffer list.
+ AutoEnterOOMUnsafeRegion oomUnsafe;
+ if (!registerMallocedBuffer(buffer, nbytes)) {
+ oomUnsafe.crash("Nursery::trackMallocedBufferOnPromotion");
+ }
+}
+
+void js::Nursery::trackTrailerOnPromotion(void* buffer, gc::Cell* owner,
+ size_t nbytes, size_t overhead,
+ MemoryUse use) {
+ MOZ_ASSERT(!isInside(buffer));
+ unregisterTrailer(buffer);
+
+ if (owner->isTenured()) {
+ // If we tenured the owner then account for the memory.
+ AddCellMemory(owner, nbytes + overhead, use);
+ return;
+ }
+
+ // Otherwise add it to the nursery's new buffer list.
+ PointerAndUint7 blockAndListID(buffer,
+ MallocedBlockCache::listIDForSize(nbytes));
+ AutoEnterOOMUnsafeRegion oomUnsafe;
+ if (!registerTrailer(blockAndListID, nbytes)) {
+ oomUnsafe.crash("Nursery::trackTrailerOnPromotion");
+ }
+}
+
void Nursery::requestMinorGC(JS::GCReason reason) {
+ JS::HeapState heapState = runtime()->heapState();
+#ifdef DEBUG
+ if (heapState == JS::HeapState::Idle ||
+ heapState == JS::HeapState::MinorCollecting) {
+ MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
+ } else if (heapState == JS::HeapState::MajorCollecting) {
+ // The GC runs sweeping tasks that may access the storebuffer in parallel
+ // and these require taking the store buffer lock.
+ MOZ_ASSERT(CurrentThreadIsGCSweeping());
+ runtime()->gc.assertCurrentThreadHasLockedStoreBuffer();
+ } else {
+ MOZ_CRASH("Unexpected heap state");
+ }
+#endif
+
MOZ_ASSERT(reason != JS::GCReason::NO_REASON);
- MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
MOZ_ASSERT(isEnabled());
if (minorGCRequested()) {
return;
}
+ if (heapState == JS::HeapState::MinorCollecting) {
+ // This can happen when we promote a lot of data to the second generation in
+ // a semispace collection. This can trigger a GC due to the amount of store
+ // buffer entries added.
+ return;
+ }
+
// Set position to end of chunk to block further allocation.
MOZ_ASSERT(prevPosition_ == 0);
- prevPosition_ = position_;
- position_ = chunk(currentChunk_).end();
+ prevPosition_ = position();
+ toSpace.position_ = chunk(currentChunk()).end();
minorGCTriggerReason_ = reason;
- runtime()->mainContextFromOwnThread()->requestInterrupt(
+ runtime()->mainContextFromAnyThread()->requestInterrupt(
InterruptReason::MinorGC);
}
+size_t SemispaceSizeFactor(bool semispaceEnabled) {
+ return semispaceEnabled ? 2 : 1;
+}
+
+size_t js::Nursery::totalCapacity() const {
+ return capacity() * SemispaceSizeFactor(semispaceEnabled_);
+}
+
+size_t js::Nursery::totalCommitted() const {
+ size_t size = std::min(capacity_, allocatedChunkCount() * gc::ChunkSize);
+ return size * SemispaceSizeFactor(semispaceEnabled_);
+}
+
size_t Nursery::sizeOfMallocedBuffers(
mozilla::MallocSizeOf mallocSizeOf) const {
+ MOZ_ASSERT(fromSpace.mallocedBuffers.empty());
+
size_t total = 0;
- for (BufferSet::Range r = mallocedBuffers.all(); !r.empty(); r.popFront()) {
+ for (BufferSet::Range r = toSpace.mallocedBuffers.all(); !r.empty();
+ r.popFront()) {
total += mallocSizeOf(r.front());
}
- total += mallocedBuffers.shallowSizeOfExcludingThis(mallocSizeOf);
+ total += toSpace.mallocedBuffers.shallowSizeOfExcludingThis(mallocSizeOf);
return total;
}
@@ -1680,121 +1877,171 @@ void js::Nursery::sweep() {
// Sweep unique IDs first before we sweep any tables that may be keyed based
// on them.
- for (Cell* cell : cellsWithUid_) {
+ cellsWithUid_.mutableEraseIf([](Cell*& cell) {
auto* obj = static_cast<JSObject*>(cell);
if (!IsForwarded(obj)) {
gc::RemoveUniqueId(obj);
- } else {
- JSObject* dst = Forwarded(obj);
- gc::TransferUniqueId(dst, obj);
+ return true;
}
- }
- cellsWithUid_.clear();
+
+ JSObject* dst = Forwarded(obj);
+ gc::TransferUniqueId(dst, obj);
+
+ if (!IsInsideNursery(dst)) {
+ return true;
+ }
+
+ cell = dst;
+ return false;
+ });
for (ZonesIter zone(runtime(), SkipAtoms); !zone.done(); zone.next()) {
zone->sweepAfterMinorGC(&trc);
}
sweepMapAndSetObjects();
+ cellsToSweep.sweep();
+ CellSweepSet empty;
+ std::swap(cellsToSweep, empty);
runtime()->caches().sweepAfterMinorGC(&trc);
}
+void gc::CellSweepSet::sweep() {
+ if (head_) {
+ head_->sweepDependentStrings();
+ head_ = nullptr;
+ }
+ if (storage_) {
+ storage_->freeAll();
+ }
+}
+
void js::Nursery::clear() {
+ fromSpace.clear(this);
+ MOZ_ASSERT(fromSpace.isEmpty());
+}
+
+void js::Nursery::Space::clear(Nursery* nursery) {
+ GCRuntime* gc = nursery->gc;
+
// Poison the nursery contents so touching a freed object will crash.
unsigned firstClearChunk;
- if (gc->hasZealMode(ZealMode::GenerationalGC)) {
- // Poison all the chunks used in this cycle. The new start chunk is
- // reposioned in Nursery::collect() but there's no point optimising that in
- // this case.
+ if (gc->hasZealMode(ZealMode::GenerationalGC) || nursery->semispaceEnabled_) {
+ // Poison all the chunks used in this cycle.
firstClearChunk = startChunk_;
} else {
- // In normal mode we start at the second chunk, the first one will be used
+ // Poison from the second chunk onwards as the first one will be used
// in the next cycle and poisoned in Nusery::collect();
MOZ_ASSERT(startChunk_ == 0);
firstClearChunk = 1;
}
for (unsigned i = firstClearChunk; i < currentChunk_; ++i) {
- chunk(i).poisonAfterEvict();
+ chunks_[i]->poisonAfterEvict();
}
// Clear only the used part of the chunk because that's the part we touched,
// but only if it's not going to be re-used immediately (>= firstClearChunk).
if (currentChunk_ >= firstClearChunk) {
- chunk(currentChunk_)
- .poisonAfterEvict(position() - chunk(currentChunk_).start());
+ size_t usedBytes = position_ - chunks_[currentChunk_]->start();
+ chunks_[currentChunk_]->poisonAfterEvict(NurseryChunkHeaderSize +
+ usedBytes);
}
// Reset the start chunk & position if we're not in this zeal mode, or we're
// in it and close to the end of the nursery.
- MOZ_ASSERT(maxChunkCount() > 0);
+ MOZ_ASSERT(maxChunkCount_ > 0);
if (!gc->hasZealMode(ZealMode::GenerationalGC) ||
- (gc->hasZealMode(ZealMode::GenerationalGC) &&
- currentChunk_ + 1 == maxChunkCount())) {
- moveToStartOfChunk(0);
+ currentChunk_ + 1 == maxChunkCount_) {
+ moveToStartOfChunk(nursery, 0);
}
// Set current start position for isEmpty checks.
setStartToCurrentPosition();
}
-MOZ_ALWAYS_INLINE void js::Nursery::moveToStartOfChunk(unsigned chunkno) {
- MOZ_ASSERT(chunkno < allocatedChunkCount());
+void js::Nursery::moveToStartOfChunk(unsigned chunkno) {
+ toSpace.moveToStartOfChunk(this, chunkno);
+}
+
+void js::Nursery::Space::moveToStartOfChunk(Nursery* nursery,
+ unsigned chunkno) {
+ MOZ_ASSERT(chunkno < chunks_.length());
currentChunk_ = chunkno;
- position_ = chunk(chunkno).start();
- setCurrentEnd();
+ position_ = chunks_[chunkno]->start();
+ setCurrentEnd(nursery);
MOZ_ASSERT(position_ != 0);
MOZ_ASSERT(currentEnd_ > position_); // Check this cannot wrap.
}
-void js::Nursery::poisonAndInitCurrentChunk(size_t extent) {
- if (gc->hasZealMode(ZealMode::GenerationalGC) || !isSubChunkMode()) {
- chunk(currentChunk_).poisonAndInit(runtime());
- } else {
- extent = std::min(capacity_, extent);
- chunk(currentChunk_).poisonAndInit(runtime(), extent);
- }
+void js::Nursery::poisonAndInitCurrentChunk() {
+ NurseryChunk& chunk = this->chunk(currentChunk());
+ size_t start = position() - uintptr_t(&chunk);
+ size_t end = isSubChunkMode() ? capacity_ : ChunkSize;
+ chunk.poisonRange(start, end, JS_FRESH_NURSERY_PATTERN,
+ MemCheckKind::MakeUndefined);
+ new (&chunk)
+ NurseryChunk(runtime(), ChunkKind::NurseryToSpace, currentChunk());
}
-MOZ_ALWAYS_INLINE void js::Nursery::setCurrentEnd() {
- MOZ_ASSERT_IF(isSubChunkMode(),
- currentChunk_ == 0 && currentEnd_ <= chunk(0).end());
- currentEnd_ =
- uintptr_t(&chunk(currentChunk_)) + std::min(capacity_, ChunkSize);
+void js::Nursery::setCurrentEnd() { toSpace.setCurrentEnd(this); }
- MOZ_ASSERT_IF(!isSubChunkMode(), currentEnd_ == chunk(currentChunk_).end());
- MOZ_ASSERT(currentEnd_ != chunk(currentChunk_).start());
+void js::Nursery::Space::setCurrentEnd(Nursery* nursery) {
+ currentEnd_ = uintptr_t(chunks_[currentChunk_]) +
+ std::min(nursery->capacity(), ChunkSize);
}
-bool js::Nursery::allocateNextChunk(const unsigned chunkno,
- AutoLockGCBgAlloc& lock) {
- const unsigned priorCount = allocatedChunkCount();
+bool js::Nursery::allocateNextChunk(AutoLockGCBgAlloc& lock) {
+ // Allocate a new nursery chunk. If semispace collection is enabled, we have
+ // to allocate one for both spaces.
+
+ const unsigned priorCount = toSpace.chunks_.length();
const unsigned newCount = priorCount + 1;
- MOZ_ASSERT((chunkno == currentChunk_ + 1) ||
- (chunkno == 0 && allocatedChunkCount() == 0));
- MOZ_ASSERT(chunkno == allocatedChunkCount());
- MOZ_ASSERT(chunkno < HowMany(capacity(), ChunkSize));
+ MOZ_ASSERT(newCount <= maxChunkCount());
+ MOZ_ASSERT(fromSpace.chunks_.length() ==
+ (semispaceEnabled_ ? priorCount : 0));
+
+ if (!toSpace.chunks_.reserve(newCount) ||
+ (semispaceEnabled_ && !fromSpace.chunks_.reserve(newCount))) {
+ return false;
+ }
- if (!chunks_.resize(newCount)) {
+ TenuredChunk* toSpaceChunk = gc->getOrAllocChunk(lock);
+ if (!toSpaceChunk) {
return false;
}
- TenuredChunk* newChunk;
- newChunk = gc->getOrAllocChunk(lock);
- if (!newChunk) {
- chunks_.shrinkTo(priorCount);
+ TenuredChunk* fromSpaceChunk = nullptr;
+ if (semispaceEnabled_ && !(fromSpaceChunk = gc->getOrAllocChunk(lock))) {
+ gc->recycleChunk(toSpaceChunk, lock);
return false;
}
- chunks_[chunkno] = NurseryChunk::fromChunk(newChunk);
+ uint8_t index = toSpace.chunks_.length();
+ NurseryChunk* nurseryChunk =
+ NurseryChunk::fromChunk(toSpaceChunk, ChunkKind::NurseryToSpace, index);
+ toSpace.chunks_.infallibleAppend(nurseryChunk);
+
+ if (semispaceEnabled_) {
+ MOZ_ASSERT(index == fromSpace.chunks_.length());
+ nurseryChunk = NurseryChunk::fromChunk(fromSpaceChunk,
+ ChunkKind::NurseryFromSpace, index);
+ fromSpace.chunks_.infallibleAppend(nurseryChunk);
+ }
+
return true;
}
-MOZ_ALWAYS_INLINE void js::Nursery::setStartToCurrentPosition() {
+void js::Nursery::setStartToCurrentPosition() {
+ toSpace.setStartToCurrentPosition();
+}
+
+void js::Nursery::Space::setStartToCurrentPosition() {
startChunk_ = currentChunk_;
- startPosition_ = position();
+ startPosition_ = position_;
+ MOZ_ASSERT(isEmpty());
}
void js::Nursery::maybeResizeNursery(JS::GCOptions options,
@@ -1809,8 +2056,7 @@ void js::Nursery::maybeResizeNursery(JS::GCOptions options,
decommitTask->join();
size_t newCapacity = mozilla::Clamp(targetSize(options, reason),
- tunables().gcMinNurseryBytes(),
- tunables().gcMaxNurseryBytes());
+ minSpaceSize(), maxSpaceSize());
MOZ_ASSERT(roundSize(newCapacity) == newCapacity);
MOZ_ASSERT(newCapacity >= SystemPageSize());
@@ -1862,7 +2108,7 @@ size_t js::Nursery::targetSize(JS::GCOptions options, JS::GCReason reason) {
TimeStamp now = TimeStamp::Now();
if (reason == JS::GCReason::PREPARE_FOR_PAGELOAD) {
- return roundSize(tunables().gcMaxNurseryBytes());
+ return roundSize(maxSpaceSize());
}
// If the nursery is completely unused then minimise it.
@@ -1960,38 +2206,58 @@ size_t js::Nursery::roundSize(size_t size) {
}
void js::Nursery::growAllocableSpace(size_t newCapacity) {
- MOZ_ASSERT_IF(!isSubChunkMode(), newCapacity > currentChunk_ * ChunkSize);
- MOZ_ASSERT(newCapacity <= tunables().gcMaxNurseryBytes());
+ MOZ_ASSERT_IF(!isSubChunkMode(), newCapacity > currentChunk() * ChunkSize);
+ MOZ_ASSERT(newCapacity <= maxSpaceSize());
MOZ_ASSERT(newCapacity > capacity());
- if (!decommitTask->reserveSpaceForBytes(newCapacity)) {
+ size_t nchunks =
+ RequiredChunkCount(newCapacity) * SemispaceSizeFactor(semispaceEnabled_);
+ if (!decommitTask->reserveSpaceForChunks(nchunks)) {
return;
}
if (isSubChunkMode()) {
- MOZ_ASSERT(currentChunk_ == 0);
-
- // The remainder of the chunk may have been decommitted.
- if (!chunk(0).markPagesInUseHard(std::min(newCapacity, ChunkSize))) {
- // The OS won't give us the memory we need, we can't grow.
+ if (!toSpace.commitSubChunkRegion(capacity(), newCapacity) ||
+ (semispaceEnabled_ &&
+ !fromSpace.commitSubChunkRegion(capacity(), newCapacity))) {
return;
}
+ }
- // The capacity has changed and since we were in sub-chunk mode we need to
- // update the poison values / asan information for the now-valid region of
- // this chunk.
- size_t end = std::min(newCapacity, ChunkSize);
- chunk(0).poisonRange(capacity(), end, JS_FRESH_NURSERY_PATTERN,
- MemCheckKind::MakeUndefined);
+ setCapacity(newCapacity);
+
+ toSpace.setCurrentEnd(this);
+ if (semispaceEnabled_) {
+ fromSpace.setCurrentEnd(this);
}
+}
- capacity_ = newCapacity;
+bool js::Nursery::Space::commitSubChunkRegion(size_t oldCapacity,
+ size_t newCapacity) {
+ MOZ_ASSERT(currentChunk_ == 0);
+ MOZ_ASSERT(oldCapacity < ChunkSize);
+ MOZ_ASSERT(newCapacity > oldCapacity);
- setCurrentEnd();
+ size_t newChunkEnd = std::min(newCapacity, ChunkSize);
+
+ // The remainder of the chunk may have been decommitted.
+ if (!chunks_[0]->markPagesInUseHard(newChunkEnd)) {
+ // The OS won't give us the memory we need, we can't grow.
+ return false;
+ }
+
+ // The capacity has changed and since we were in sub-chunk mode we need to
+ // update the poison values / asan information for the now-valid region of
+ // this chunk.
+ chunks_[0]->poisonRange(oldCapacity, newChunkEnd, JS_FRESH_NURSERY_PATTERN,
+ MemCheckKind::MakeUndefined);
+ return true;
}
-void js::Nursery::freeChunksFrom(const unsigned firstFreeChunk) {
- MOZ_ASSERT(firstFreeChunk < chunks_.length());
+void js::Nursery::freeChunksFrom(Space& space, const unsigned firstFreeChunk) {
+ if (firstFreeChunk >= space.chunks_.length()) {
+ return;
+ }
// The loop below may need to skip the first chunk, so we may use this so we
// can modify it.
@@ -2000,61 +2266,112 @@ void js::Nursery::freeChunksFrom(const unsigned firstFreeChunk) {
if ((firstChunkToDecommit == 0) && isSubChunkMode()) {
// Part of the first chunk may be hard-decommitted, un-decommit it so that
// the GC's normal chunk-handling doesn't segfault.
- MOZ_ASSERT(currentChunk_ == 0);
- if (!chunk(0).markPagesInUseHard(ChunkSize)) {
+ MOZ_ASSERT(space.currentChunk_ == 0);
+ if (!space.chunks_[0]->markPagesInUseHard(ChunkSize)) {
// Free the chunk if we can't allocate its pages.
- UnmapPages(static_cast<void*>(&chunk(0)), ChunkSize);
+ UnmapPages(space.chunks_[0], ChunkSize);
firstChunkToDecommit = 1;
}
}
{
AutoLockHelperThreadState lock;
- for (size_t i = firstChunkToDecommit; i < chunks_.length(); i++) {
- decommitTask->queueChunk(chunks_[i], lock);
+ for (size_t i = firstChunkToDecommit; i < space.chunks_.length(); i++) {
+ decommitTask->queueChunk(space.chunks_[i], lock);
}
}
- chunks_.shrinkTo(firstFreeChunk);
+ space.chunks_.shrinkTo(firstFreeChunk);
}
void js::Nursery::shrinkAllocableSpace(size_t newCapacity) {
-#ifdef JS_GC_ZEAL
- if (gc->hasZealMode(ZealMode::GenerationalGC)) {
- return;
- }
-#endif
+ MOZ_ASSERT(!gc->hasZealMode(ZealMode::GenerationalGC));
+ MOZ_ASSERT(newCapacity < capacity_);
- // Don't shrink the nursery to zero (use Nursery::disable() instead)
- // This can't happen due to the rounding-down performed above because of the
- // clamping in maybeResizeNursery().
- MOZ_ASSERT(newCapacity != 0);
- // Don't attempt to shrink it to the same size.
- if (newCapacity == capacity_) {
+ if (semispaceEnabled() && usedSpace() >= newCapacity) {
+ // Can't shrink below what we've already used.
return;
}
- MOZ_ASSERT(newCapacity < capacity_);
unsigned newCount = HowMany(newCapacity, ChunkSize);
if (newCount < allocatedChunkCount()) {
- freeChunksFrom(newCount);
+ freeChunksFrom(toSpace, newCount);
+ freeChunksFrom(fromSpace, newCount);
}
size_t oldCapacity = capacity_;
- capacity_ = newCapacity;
+ setCapacity(newCapacity);
- setCurrentEnd();
+ toSpace.setCurrentEnd(this);
+ if (semispaceEnabled_) {
+ fromSpace.setCurrentEnd(this);
+ }
if (isSubChunkMode()) {
- MOZ_ASSERT(currentChunk_ == 0);
- size_t end = std::min(oldCapacity, ChunkSize);
- chunk(0).poisonRange(newCapacity, end, JS_SWEPT_NURSERY_PATTERN,
- MemCheckKind::MakeNoAccess);
+ toSpace.decommitSubChunkRegion(this, oldCapacity, newCapacity);
+ if (semispaceEnabled_) {
+ fromSpace.decommitSubChunkRegion(this, oldCapacity, newCapacity);
+ }
+ }
+}
- AutoLockHelperThreadState lock;
- decommitTask->queueRange(capacity_, chunk(0), lock);
+void js::Nursery::Space::decommitSubChunkRegion(Nursery* nursery,
+ size_t oldCapacity,
+ size_t newCapacity) {
+ MOZ_ASSERT(currentChunk_ == 0);
+ MOZ_ASSERT(newCapacity < ChunkSize);
+ MOZ_ASSERT(newCapacity < oldCapacity);
+
+ size_t oldChunkEnd = std::min(oldCapacity, ChunkSize);
+ chunks_[0]->poisonRange(newCapacity, oldChunkEnd, JS_SWEPT_NURSERY_PATTERN,
+ MemCheckKind::MakeNoAccess);
+
+ AutoLockHelperThreadState lock;
+ nursery->decommitTask->queueRange(newCapacity, chunks_[0], lock);
+}
+
+js::Nursery::Space::Space(gc::ChunkKind kind) : kind(kind) {
+ MOZ_ASSERT(kind == ChunkKind::NurseryFromSpace ||
+ kind == ChunkKind::NurseryToSpace);
+}
+
+void js::Nursery::Space::setKind(ChunkKind newKind) {
+#ifdef DEBUG
+ MOZ_ASSERT(newKind == ChunkKind::NurseryFromSpace ||
+ newKind == ChunkKind::NurseryToSpace);
+ checkKind(kind);
+#endif
+
+ kind = newKind;
+ for (NurseryChunk* chunk : chunks_) {
+ chunk->kind = newKind;
+ }
+
+#ifdef DEBUG
+ checkKind(newKind);
+#endif
+}
+
+#ifdef DEBUG
+void js::Nursery::Space::checkKind(ChunkKind expected) const {
+ MOZ_ASSERT(kind == expected);
+ for (NurseryChunk* chunk : chunks_) {
+ MOZ_ASSERT(chunk->getKind() == expected);
+ }
+}
+#endif
+
+#ifdef DEBUG
+size_t js::Nursery::Space::findChunkIndex(uintptr_t chunkAddr) const {
+ for (size_t i = 0; i < chunks_.length(); i++) {
+ if (uintptr_t(chunks_[i]) == chunkAddr) {
+ return i;
+ }
}
+
+ MOZ_CRASH("Nursery chunk not found");
}
+#endif
gcstats::Statistics& js::Nursery::stats() const { return gc->stats(); }
@@ -2067,18 +2384,55 @@ bool js::Nursery::isSubChunkMode() const {
return capacity() <= NurseryChunkUsableSize;
}
+void js::Nursery::clearMapAndSetNurseryRanges() {
+ // Clears the lists of nursery ranges used by map and set iterators. These
+ // lists are cleared at the start of minor GC and rebuilt when iterators are
+ // promoted during minor GC.
+ for (auto* map : mapsWithNurseryMemory_) {
+ map->clearNurseryRangesBeforeMinorGC();
+ }
+ for (auto* set : setsWithNurseryMemory_) {
+ set->clearNurseryRangesBeforeMinorGC();
+ }
+}
+
void js::Nursery::sweepMapAndSetObjects() {
+ // This processes all Map and Set objects that are known to have associated
+ // nursery memory (either they are nursery allocated themselves or they have
+ // iterator objects that are nursery allocated).
+ //
+ // These objects may die and be finalized or if not their internal state and
+ // memory tracking are updated.
+ //
+ // Finally the lists themselves are rebuilt so as to remove objects that are
+ // no longer associated with nursery memory (either because they died or
+ // because the nursery object was promoted to the tenured heap).
+
auto* gcx = runtime()->gcContext();
- for (auto* mapobj : mapsWithNurseryMemory_) {
- MapObject::sweepAfterMinorGC(gcx, mapobj);
+ AutoEnterOOMUnsafeRegion oomUnsafe;
+
+ MapObjectVector maps;
+ std::swap(mapsWithNurseryMemory_, maps);
+ for (auto* mapobj : maps) {
+ mapobj = MapObject::sweepAfterMinorGC(gcx, mapobj);
+ if (mapobj) {
+ if (!mapsWithNurseryMemory_.append(mapobj)) {
+ oomUnsafe.crash("sweepAfterMinorGC");
+ }
+ }
}
- mapsWithNurseryMemory_.clearAndFree();
- for (auto* setobj : setsWithNurseryMemory_) {
- SetObject::sweepAfterMinorGC(gcx, setobj);
+ SetObjectVector sets;
+ std::swap(setsWithNurseryMemory_, sets);
+ for (auto* setobj : sets) {
+ setobj = SetObject::sweepAfterMinorGC(gcx, setobj);
+ if (setobj) {
+ if (!setsWithNurseryMemory_.append(setobj)) {
+ oomUnsafe.crash("sweepAfterMinorGC");
+ }
+ }
}
- setsWithNurseryMemory_.clearAndFree();
}
void js::Nursery::joinDecommitTask() { decommitTask->join(); }
diff --git a/js/src/gc/Nursery.h b/js/src/gc/Nursery.h
index 0d7b607ff8..647dbbb24f 100644
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -13,6 +13,7 @@
#include <tuple>
+#include "ds/LifoAlloc.h"
#include "gc/GCEnum.h"
#include "gc/GCProbes.h"
#include "gc/Heap.h"
@@ -21,6 +22,8 @@
#include "js/AllocPolicy.h"
#include "js/Class.h"
#include "js/GCAPI.h"
+#include "js/GCVector.h"
+#include "js/HeapAPI.h"
#include "js/TypeDecls.h"
#include "js/UniquePtr.h"
#include "js/Utility.h"
@@ -69,7 +72,20 @@ namespace gc {
class AutoGCSession;
struct Cell;
class GCSchedulingTunables;
+class StoreBuffer;
class TenuringTracer;
+
+// A set of cells that need to be swept at the end of a minor GC,
+// represented as a linked list of ArenaCellSet structs extracted from a
+// WholeCellBuffer.
+struct CellSweepSet {
+ UniquePtr<LifoAlloc> storage_;
+ ArenaCellSet* head_ = nullptr;
+
+ // Fixup the tenured dependent strings stored in the ArenaCellSet list.
+ void sweep();
+};
+
} // namespace gc
class Nursery {
@@ -79,18 +95,6 @@ class Nursery {
[[nodiscard]] bool init(AutoLockGCBgAlloc& lock);
- // Number of allocated (ready to use) chunks.
- unsigned allocatedChunkCount() const { return chunks_.length(); }
-
- // Total number of chunks and the capacity of the nursery. Chunks will be
- // lazilly allocated and added to the chunks array up to this limit, after
- // that the nursery must be collected, this limit may be raised during
- // collection.
- unsigned maxChunkCount() const {
- MOZ_ASSERT(capacity());
- return HowMany(capacity(), gc::ChunkSize);
- }
-
void enable();
void disable();
bool isEnabled() const { return capacity() != 0; }
@@ -103,20 +107,16 @@ class Nursery {
void disableBigInts();
bool canAllocateBigInts() const { return canAllocateBigInts_; }
+ void setSemispaceEnabled(bool enabled);
+ bool semispaceEnabled() const { return semispaceEnabled_; }
+
// Return true if no allocations have been made since the last collection.
bool isEmpty() const;
// Check whether an arbitrary pointer is within the nursery. This is
// slower than IsInsideNursery(Cell*), but works on all types of pointers.
- MOZ_ALWAYS_INLINE bool isInside(gc::Cell* cellp) const = delete;
- MOZ_ALWAYS_INLINE bool isInside(const void* p) const {
- for (auto* chunk : chunks_) {
- if (uintptr_t(p) - uintptr_t(chunk) < gc::ChunkSize) {
- return true;
- }
- }
- return false;
- }
+ bool isInside(gc::Cell* cellp) const = delete;
+ inline bool isInside(const void* p) const;
template <typename T>
inline bool isInside(const SharedMem<T>& p) const;
@@ -223,11 +223,12 @@ class Nursery {
// Mark a malloced buffer as no longer needing to be freed.
void removeMallocedBuffer(void* buffer, size_t nbytes) {
- MOZ_ASSERT(mallocedBuffers.has(buffer));
+ MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
+ MOZ_ASSERT(toSpace.mallocedBuffers.has(buffer));
MOZ_ASSERT(nbytes > 0);
- MOZ_ASSERT(mallocedBufferBytes >= nbytes);
- mallocedBuffers.remove(buffer);
- mallocedBufferBytes -= nbytes;
+ MOZ_ASSERT(toSpace.mallocedBufferBytes >= nbytes);
+ toSpace.mallocedBuffers.remove(buffer);
+ toSpace.mallocedBufferBytes -= nbytes;
}
// Mark a malloced buffer as no longer needing to be freed during minor
@@ -235,8 +236,8 @@ class Nursery {
// buffers will soon be freed.
void removeMallocedBufferDuringMinorGC(void* buffer) {
MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
- MOZ_ASSERT(mallocedBuffers.has(buffer));
- mallocedBuffers.remove(buffer);
+ MOZ_ASSERT(fromSpace.mallocedBuffers.has(buffer));
+ fromSpace.mallocedBuffers.remove(buffer);
}
[[nodiscard]] bool addedUniqueIdToCell(gc::Cell* cell) {
@@ -277,26 +278,8 @@ class Nursery {
inline void unregisterTrailer(void* block);
size_t sizeOfTrailerBlockSets(mozilla::MallocSizeOf mallocSizeOf) const;
- size_t capacity() const { return capacity_; }
- size_t committed() const {
- return std::min(capacity_, allocatedChunkCount() * gc::ChunkSize);
- }
-
- // Used and free space both include chunk headers for that part of the
- // nursery.
- //
- // usedSpace() + freeSpace() == capacity()
- //
- MOZ_ALWAYS_INLINE size_t usedSpace() const {
- return capacity() - freeSpace();
- }
- MOZ_ALWAYS_INLINE size_t freeSpace() const {
- MOZ_ASSERT(isEnabled());
- MOZ_ASSERT(currentEnd_ - position_ <= NurseryChunkUsableSize);
- MOZ_ASSERT(currentChunk_ < maxChunkCount());
- return (currentEnd_ - position_) +
- (maxChunkCount() - currentChunk_ - 1) * gc::ChunkSize;
- }
+ size_t totalCapacity() const;
+ size_t totalCommitted() const;
#ifdef JS_GC_ZEAL
void enterZealMode();
@@ -312,9 +295,10 @@ class Nursery {
// Print total profile times on shutdown.
void printTotalProfileTimes();
- void* addressOfPosition() const { return (void**)&position_; }
+ void* addressOfPosition() const { return (void**)&toSpace.position_; }
static constexpr int32_t offsetOfCurrentEndFromPosition() {
- return offsetof(Nursery, currentEnd_) - offsetof(Nursery, position_);
+ return offsetof(Nursery, toSpace.currentEnd_) -
+ offsetof(Nursery, toSpace.position_);
}
void* addressOfNurseryAllocatedSites() {
@@ -343,10 +327,6 @@ class Nursery {
return setsWithNurseryMemory_.append(obj);
}
- // The amount of space in the mapped nursery available to allocations.
- static const size_t NurseryChunkUsableSize =
- gc::ChunkSize - sizeof(gc::ChunkBase);
-
void joinDecommitTask();
mozilla::TimeStamp collectionStartTime() {
@@ -362,6 +342,16 @@ class Nursery {
void setAllocFlagsForZone(JS::Zone* zone);
+ bool shouldTenureEverything(JS::GCReason reason);
+
+ inline bool inCollectedRegion(gc::Cell* cell) const;
+ inline bool inCollectedRegion(void* ptr) const;
+
+ void trackMallocedBufferOnPromotion(void* buffer, gc::Cell* owner,
+ size_t nbytes, MemoryUse use);
+ void trackTrailerOnPromotion(void* buffer, gc::Cell* owner, size_t nbytes,
+ size_t overhead, MemoryUse use);
+
// Round a size in bytes to the nearest valid nursery size.
static size_t roundSize(size_t size);
@@ -374,6 +364,8 @@ class Nursery {
mozilla::TimeStamp lastCollectionEndTime() const;
private:
+ struct Space;
+
enum class ProfileKey {
#define DEFINE_TIME_KEY(name, text) name,
FOR_EACH_NURSERY_PROFILE_TIME(DEFINE_TIME_KEY)
@@ -387,6 +379,36 @@ class Nursery {
mozilla::EnumeratedArray<ProfileKey, mozilla::TimeDuration,
size_t(ProfileKey::KeyCount)>;
+ size_t capacity() const { return capacity_; }
+
+ // Total number of chunks and the capacity of the current nursery
+ // space. Chunks will be lazily allocated and added to the chunks array up to
+ // this limit. After that the nursery must be collected. This limit may be
+ // changed at the end of collection by maybeResizeNursery.
+ uint32_t maxChunkCount() const {
+ MOZ_ASSERT(toSpace.maxChunkCount_);
+ return toSpace.maxChunkCount_;
+ }
+
+ // Number of allocated (ready to use) chunks.
+ unsigned allocatedChunkCount() const { return toSpace.chunks_.length(); }
+
+ uint32_t currentChunk() const { return toSpace.currentChunk_; }
+ uint32_t startChunk() const { return toSpace.startChunk_; }
+ uintptr_t startPosition() const { return toSpace.startPosition_; }
+
+ // Used and free space both include chunk headers for that part of the
+ // nursery.
+ MOZ_ALWAYS_INLINE size_t usedSpace() const {
+ return capacity() - freeSpace();
+ }
+ MOZ_ALWAYS_INLINE size_t freeSpace() const {
+ MOZ_ASSERT(isEnabled());
+ MOZ_ASSERT(currentChunk() < maxChunkCount());
+ return (currentEnd() - position()) +
+ (maxChunkCount() - currentChunk() - 1) * gc::ChunkSize;
+ }
+
// Calculate the promotion rate of the most recent minor GC.
// The valid_for_tenuring parameter is used to return whether this
// promotion rate is accurate enough (the nursery was full enough) to be
@@ -395,31 +417,27 @@ class Nursery {
// Must only be called if the previousGC data is initialised.
double calcPromotionRate(bool* validForTenuring) const;
- void freeTrailerBlocks();
+ void freeTrailerBlocks(JS::GCOptions options, JS::GCReason reason);
- NurseryChunk& chunk(unsigned index) const { return *chunks_[index]; }
+ NurseryChunk& chunk(unsigned index) const { return *toSpace.chunks_[index]; }
// Set the allocation position to the start of a chunk. This sets
// currentChunk_, position_ and currentEnd_ values as appropriate.
void moveToStartOfChunk(unsigned chunkno);
bool initFirstChunk(AutoLockGCBgAlloc& lock);
+ void setCapacity(size_t newCapacity);
- // extent is advisory, it will be ignored in sub-chunk and generational zeal
- // modes. It will be clamped to Min(NurseryChunkUsableSize, capacity_).
- void poisonAndInitCurrentChunk(size_t extent = gc::ChunkSize);
+ void poisonAndInitCurrentChunk();
void setCurrentEnd();
void setStartToCurrentPosition();
- // Allocate the next chunk, or the first chunk for initialization.
- // Callers will probably want to call moveToStartOfChunk(0) next.
- [[nodiscard]] bool allocateNextChunk(unsigned chunkno,
- AutoLockGCBgAlloc& lock);
+ // Allocate another chunk.
+ [[nodiscard]] bool allocateNextChunk(AutoLockGCBgAlloc& lock);
- uintptr_t currentEnd() const { return currentEnd_; }
-
- uintptr_t position() const { return position_; }
+ uintptr_t position() const { return toSpace.position_; }
+ uintptr_t currentEnd() const { return toSpace.currentEnd_; }
MOZ_ALWAYS_INLINE bool isSubChunkMode() const;
@@ -451,6 +469,7 @@ class Nursery {
};
CollectionResult doCollection(gc::AutoGCSession& session,
JS::GCOptions options, JS::GCReason reason);
+ void swapSpaces();
void traceRoots(gc::AutoGCSession& session, gc::TenuringTracer& mover);
size_t doPretenuring(JSRuntime* rt, JS::GCReason reason,
@@ -469,22 +488,30 @@ class Nursery {
uint32_t capacity);
#ifdef DEBUG
- bool checkForwardingPointerLocation(void* ptr, bool expectedInside);
+ bool checkForwardingPointerInsideNursery(void* ptr);
#endif
// Updates pointers to nursery objects that have been tenured and discards
// pointers to objects that have been freed.
void sweep();
- // Reset the current chunk and position after a minor collection. Also poison
+ // In a minor GC, resets the start and end positions, the current chunk and
+ // current position.
+ void setNewExtentAndPosition();
+
// the nursery on debug & nightly builds.
void clear();
+ void clearMapAndSetNurseryRanges();
void sweepMapAndSetObjects();
// Allocate a buffer for a given zone, using the nursery if possible.
void* allocateBuffer(JS::Zone* zone, size_t nbytes);
+ // Get per-space size limits.
+ size_t maxSpaceSize() const;
+ size_t minSpaceSize() const;
+
// Change the allocable space provided by the nursery.
void maybeResizeNursery(JS::GCOptions options, JS::GCReason reason);
size_t targetSize(JS::GCOptions options, JS::GCReason reason);
@@ -495,7 +522,9 @@ class Nursery {
// Free the chunks starting at firstFreeChunk until the end of the chunks
// vector. Shrinks the vector but does not update maxChunkCount().
- void freeChunksFrom(unsigned firstFreeChunk);
+ void freeChunksFrom(Space& space, unsigned firstFreeChunk);
+
+ inline bool shouldTenure(gc::Cell* cell);
void sendTelemetry(JS::GCReason reason, mozilla::TimeDuration totalTime,
bool wasEmpty, double promotionRate,
@@ -514,35 +543,87 @@ class Nursery {
mozilla::TimeStamp collectionStartTime() const;
private:
- // Fields used during allocation fast path are grouped first:
+ using BufferRelocationOverlay = void*;
+ using BufferSet = HashSet<void*, PointerHasher<void*>, SystemAllocPolicy>;
- // Pointer to the first unallocated byte in the nursery.
- uintptr_t position_;
+ struct Space {
+ // Fields used during allocation fast path go first:
- // Pointer to the last byte of space in the current chunk.
- uintptr_t currentEnd_;
+ // Pointer to the first unallocated byte in the nursery.
+ uintptr_t position_ = 0;
- // Other fields not necessarily used during allocation follow:
+ // Pointer to the last byte of space in the current chunk.
+ uintptr_t currentEnd_ = 0;
- gc::GCRuntime* const gc;
+ // Vector of allocated chunks to allocate from.
+ Vector<NurseryChunk*, 0, SystemAllocPolicy> chunks_;
+
+ // The index of the chunk that is currently being allocated from.
+ uint32_t currentChunk_ = 0;
+
+ // The maximum number of chunks to allocate based on capacity_.
+ uint32_t maxChunkCount_ = 0;
+
+ // These fields refer to the beginning of the nursery. They're normally 0
+ // and chunk(0).start() respectively. Except when a generational GC zeal
+ // mode is active, then they may be arbitrary (see Nursery::clear()).
+ uint32_t startChunk_ = 0;
+ uintptr_t startPosition_ = 0;
+
+ // The set of malloced-allocated buffers owned by nursery objects. Any
+ // buffers that do not belong to a promoted thing at the end of a minor GC
+ // must be freed.
+ BufferSet mallocedBuffers;
+ size_t mallocedBufferBytes = 0;
+
+ // Wasm "trailer" (C++-heap-allocated) blocks. See comments above on
+ // ::registerTrailer and ::unregisterTrailer.
+ Vector<PointerAndUint7, 0, SystemAllocPolicy> trailersAdded_;
+ Vector<void*, 0, SystemAllocPolicy> trailersRemoved_;
+ size_t trailersRemovedUsed_ = 0;
+ size_t trailerBytes_ = 0;
+
+ gc::ChunkKind kind;
+
+ explicit Space(gc::ChunkKind kind);
+
+ inline bool isEmpty() const;
+ inline bool isInside(const void* p) const;
- // Vector of allocated chunks to allocate from.
- Vector<NurseryChunk*, 0, SystemAllocPolicy> chunks_;
+ // Return the logical offset within the nursery of an address in a nursery
+ // chunk (chunks are discontiguous in memory).
+ inline size_t offsetFromAddress(uintptr_t addr) const;
+ inline size_t offsetFromExclusiveAddress(uintptr_t addr) const;
- // The index of the chunk that is currently being allocated from.
- uint32_t currentChunk_;
+ void setKind(gc::ChunkKind newKind);
- // These fields refer to the beginning of the nursery. They're normally 0
- // and chunk(0).start() respectively. Except when a generational GC zeal
- // mode is active, then they may be arbitrary (see Nursery::clear()).
- uint32_t startChunk_;
- uintptr_t startPosition_;
+ void clear(Nursery* nursery);
+ void moveToStartOfChunk(Nursery* nursery, unsigned chunkno);
+ void setCurrentEnd(Nursery* nursery);
+ void setStartToCurrentPosition();
+ bool commitSubChunkRegion(size_t oldCapacity, size_t newCapacity);
+ void decommitSubChunkRegion(Nursery* nursery, size_t oldCapacity,
+ size_t newCapacity);
+ void freeTrailerBlocks(gc::MallocedBlockCache& mallocedBlockCache);
+
+#ifdef DEBUG
+ void checkKind(gc::ChunkKind expected) const;
+ size_t findChunkIndex(uintptr_t chunkAddr) const;
+#endif
+ };
+
+ Space toSpace;
+ Space fromSpace;
+
+ gc::GCRuntime* const gc;
// The current nursery capacity measured in bytes. It may grow up to this
// value without a collection, allocating chunks on demand. This limit may be
// changed by maybeResizeNursery() each collection. It includes chunk headers.
size_t capacity_;
+ uintptr_t tenureThreshold_ = 0;
+
gc::PretenuringNursery pretenuringNursery;
mozilla::TimeDuration timeInChunkAlloc_;
@@ -553,6 +634,9 @@ class Nursery {
mozilla::TimeDuration profileThreshold_;
+ // Whether to use semispace collection.
+ bool semispaceEnabled_;
+
// Whether we will nursery-allocate strings.
bool canAllocateStrings_;
@@ -595,21 +679,6 @@ class Nursery {
bool hasRecentGrowthData;
double smoothedTargetSize;
- // The set of externally malloced buffers potentially kept live by objects
- // stored in the nursery. Any external buffers that do not belong to a
- // tenured thing at the end of a minor GC must be freed.
- using BufferRelocationOverlay = void*;
- using BufferSet = HashSet<void*, PointerHasher<void*>, SystemAllocPolicy>;
- BufferSet mallocedBuffers;
- size_t mallocedBufferBytes = 0;
-
- // Wasm "trailer" (C++-heap-allocated) blocks. See comments above on
- // ::registerTrailer and ::unregisterTrailer.
- Vector<PointerAndUint7, 0, SystemAllocPolicy> trailersAdded_;
- Vector<void*, 0, SystemAllocPolicy> trailersRemoved_;
- size_t trailersRemovedUsed_ = 0;
- size_t trailerBytes_ = 0;
-
// During a collection most hoisted slot and element buffers indicate their
// new location with a forwarding pointer at the base. This does not work
// for buffers whose length is less than pointer width, or when different
@@ -619,6 +688,8 @@ class Nursery {
HashMap<void*, void*, PointerHasher<void*>, SystemAllocPolicy>;
ForwardedBufferMap forwardedBuffers;
+ gc::CellSweepSet cellsToSweep;
+
// When we assign a unique id to cell in the nursery, that almost always
// means that the cell will be in a hash table, and thus, held live,
// automatically moving the uid from the nursery to its new home in
@@ -629,13 +700,15 @@ class Nursery {
// Note: we store the pointers as Cell* here, resulting in an ugly cast in
// sweep. This is because this structure is used to help implement
// stable object hashing and we have to break the cycle somehow.
- using CellsWithUniqueIdVector = Vector<gc::Cell*, 8, SystemAllocPolicy>;
+ using CellsWithUniqueIdVector = JS::GCVector<gc::Cell*, 8, SystemAllocPolicy>;
CellsWithUniqueIdVector cellsWithUid_;
// Lists of map and set objects allocated in the nursery or with iterators
// allocated there. Such objects need to be swept after minor GC.
- Vector<MapObject*, 0, SystemAllocPolicy> mapsWithNurseryMemory_;
- Vector<SetObject*, 0, SystemAllocPolicy> setsWithNurseryMemory_;
+ using MapObjectVector = Vector<MapObject*, 0, SystemAllocPolicy>;
+ MapObjectVector mapsWithNurseryMemory_;
+ using SetObjectVector = Vector<SetObject*, 0, SystemAllocPolicy>;
+ SetObjectVector setsWithNurseryMemory_;
UniquePtr<NurseryDecommitTask> decommitTask;
@@ -648,11 +721,30 @@ class Nursery {
// no correctness impact, only a performance impact.
gc::MallocedBlockCache mallocedBlockCache_;
+ // Whether the previous collection tenured everything. This may be false if
+ // semispace is in use.
+ bool tenuredEverything;
+
friend class gc::GCRuntime;
friend class gc::TenuringTracer;
friend struct NurseryChunk;
};
+MOZ_ALWAYS_INLINE bool Nursery::isInside(const void* p) const {
+ // TODO: Split this into separate methods.
+ // TODO: Do we ever need to check both?
+ return toSpace.isInside(p) || fromSpace.isInside(p);
+}
+
+MOZ_ALWAYS_INLINE bool Nursery::Space::isInside(const void* p) const {
+ for (auto* chunk : chunks_) {
+ if (uintptr_t(p) - uintptr_t(chunk) < gc::ChunkSize) {
+ return true;
+ }
+ }
+ return false;
+}
+
} // namespace js
#endif // gc_Nursery_h
diff --git a/js/src/gc/NurseryAwareHashMap.h b/js/src/gc/NurseryAwareHashMap.h
index 35c2bebcea..4df50c1627 100644
--- a/js/src/gc/NurseryAwareHashMap.h
+++ b/js/src/gc/NurseryAwareHashMap.h
@@ -11,7 +11,8 @@
#include "gc/Tracer.h"
#include "js/GCHashTable.h"
#include "js/GCPolicyAPI.h"
-#include "js/HashTable.h"
+#include "js/GCVector.h"
+#include "js/Utility.h"
namespace js {
@@ -81,7 +82,8 @@ class NurseryAwareHashMap {
// Keep a list of all keys for which key->isTenured() is false. This lets us
// avoid a full traversal of the map on each minor GC, keeping the minor GC
// times proportional to the nursery heap size.
- Vector<Key, 0, AllocPolicy> nurseryEntries;
+ using KeyVector = GCVector<Key, 0, AllocPolicy>;
+ KeyVector nurseryEntries;
public:
using Lookup = typename MapType::Lookup;
@@ -127,16 +129,16 @@ class NurseryAwareHashMap {
}
void sweepAfterMinorGC(JSTracer* trc) {
- for (auto& key : nurseryEntries) {
+ nurseryEntries.mutableEraseIf([this, trc](Key& key) {
auto p = map.lookup(key);
if (!p) {
- continue;
+ return true;
}
// Drop the entry if the value is not marked.
if (!JS::GCPolicy<MapValue>::traceWeak(trc, &p->value())) {
map.remove(p);
- continue;
+ return true;
}
// Update and relocate the key, if the value is still needed.
@@ -147,33 +149,65 @@ class NurseryAwareHashMap {
// wrappee, as they are just copies. The wrapper map entry is merely used
// as a cache to avoid re-copying the string, and currently that entire
// cache is flushed on major GC.
- MapKey copy(key);
- if (!JS::GCPolicy<MapKey>::traceWeak(trc, &copy)) {
+ //
+ // Since |key| is a reference, this updates the content of the
+ // nurseryEntries vector.
+ Key prior = key;
+ if (!TraceManuallyBarrieredWeakEdge(trc, &key,
+ "NurseryAwareHashMap key")) {
map.remove(p);
- continue;
+ return true;
}
- if (AllowDuplicates) {
+ bool valueIsTenured = p->value().unbarrieredGet()->isTenured();
+
+ if constexpr (AllowDuplicates) {
// Drop duplicated keys.
//
// A key can be forwarded to another place. In this case, rekey the
// item. If two or more different keys are forwarded to the same new
// key, simply drop the later ones.
- if (key == copy) {
+ if (key == prior) {
// No rekey needed.
- } else if (map.has(copy)) {
+ } else if (map.has(key)) {
// Key was forwarded to the same place that another key was already
// forwarded to.
map.remove(p);
+ return true;
} else {
- map.rekeyAs(key, copy, copy);
+ map.rekeyAs(prior, key, key);
}
} else {
- MOZ_ASSERT(key == copy || !map.has(copy));
- map.rekeyIfMoved(key, copy);
+ MOZ_ASSERT(key == prior || !map.has(key));
+ map.rekeyIfMoved(prior, key);
+ }
+
+ return key->isTenured() && valueIsTenured;
+ });
+
+ checkNurseryEntries();
+ }
+
+ void checkNurseryEntries() const {
+#ifdef DEBUG
+ AutoEnterOOMUnsafeRegion oomUnsafe;
+ HashSet<Key, DefaultHasher<Key>, SystemAllocPolicy> set;
+ for (const auto& key : nurseryEntries) {
+ if (!set.put(key)) {
+ oomUnsafe.crash("NurseryAwareHashMap::checkNurseryEntries");
}
}
- nurseryEntries.clear();
+
+ for (auto i = map.iter(); !i.done(); i.next()) {
+ Key key = i.get().key().get();
+ MOZ_ASSERT(gc::IsCellPointerValid(key));
+ MOZ_ASSERT_IF(IsInsideNursery(key), set.has(key));
+
+ Value value = i.get().value().unbarrieredGet();
+ MOZ_ASSERT(gc::IsCellPointerValid(value));
+ MOZ_ASSERT_IF(IsInsideNursery(value), set.has(key));
+ }
+#endif
}
void traceWeak(JSTracer* trc) { map.traceWeak(trc); }
diff --git a/js/src/gc/Pretenuring.cpp b/js/src/gc/Pretenuring.cpp
index a191dbba90..b44bbc901f 100644
--- a/js/src/gc/Pretenuring.cpp
+++ b/js/src/gc/Pretenuring.cpp
@@ -49,12 +49,12 @@ static constexpr double LowYoungSurvivalThreshold = 0.05;
// that must occur before recovery is attempted.
static constexpr size_t LowYoungSurvivalCountBeforeRecovery = 2;
-// The proportion of the nursery that must be tenured above which a minor
+// The proportion of the nursery that must be promoted above which a minor
// collection may be determined to have a high nursery survival rate.
static constexpr double HighNurserySurvivalPromotionThreshold = 0.6;
// The number of nursery allocations made by optimized JIT code that must be
-// tenured above which a minor collection may be determined to have a high
+// promoted above which a minor collection may be determined to have a high
// nursery survival rate.
static constexpr size_t HighNurserySurvivalOptimizedAllocThreshold = 10000;
@@ -95,7 +95,7 @@ size_t PretenuringNursery::doPretenuring(GCRuntime* gc, JS::GCReason reason,
for (ZonesIter zone(gc, SkipAtoms); !zone.done(); zone.next()) {
bool highNurserySurvivalRate =
promotionRate > HighNurserySurvivalPromotionThreshold &&
- zone->optimizedAllocSite()->nurseryTenuredCount >=
+ zone->optimizedAllocSite()->nurseryPromotedCount >=
HighNurserySurvivalOptimizedAllocThreshold;
zone->pretenuring.noteHighNurserySurvivalRate(highNurserySurvivalRate);
if (highNurserySurvivalRate) {
@@ -136,6 +136,7 @@ size_t PretenuringNursery::doPretenuring(GCRuntime* gc, JS::GCReason reason,
// Catch-all sites don't end up on the list if they are only used from
// optimized JIT code, so process them here.
+
for (ZonesIter zone(gc, SkipAtoms); !zone.done(); zone.next()) {
for (auto& site : zone->pretenuring.unknownAllocSites) {
updateTotalAllocCounts(&site);
@@ -150,6 +151,11 @@ size_t PretenuringNursery::doPretenuring(GCRuntime* gc, JS::GCReason reason,
updateTotalAllocCounts(zone->optimizedAllocSite());
zone->optimizedAllocSite()->processCatchAllSite(reportInfo,
reportThreshold);
+
+ // The data from the promoted alloc sites is never used so clear them here.
+ for (AllocSite& site : zone->pretenuring.promotedAllocSites) {
+ site.resetNurseryAllocations();
+ }
}
if (reportInfo) {
@@ -171,7 +177,7 @@ AllocSite::SiteResult AllocSite::processSite(GCRuntime* gc,
bool reportInfo,
size_t reportThreshold) {
MOZ_ASSERT(kind() != Kind::Optimized);
- MOZ_ASSERT(nurseryAllocCount >= nurseryTenuredCount);
+ MOZ_ASSERT(nurseryAllocCount >= nurseryPromotedCount);
SiteResult result = NoChange;
@@ -180,7 +186,7 @@ AllocSite::SiteResult AllocSite::processSite(GCRuntime* gc,
bool wasInvalidated = false;
if (nurseryAllocCount > attentionThreshold) {
- promotionRate = double(nurseryTenuredCount) / double(nurseryAllocCount);
+ promotionRate = double(nurseryPromotedCount) / double(nurseryAllocCount);
hasPromotionRate = true;
AllocSite::State prevState = state();
@@ -402,7 +408,7 @@ bool PretenuringZone::shouldResetPretenuredAllocSites() {
/* static */
void AllocSite::printInfoHeader(JS::GCReason reason, double promotionRate) {
fprintf(stderr, " %-16s %-16s %-20s %-8s %-8s %-6s %-10s\n", "site", "zone",
- "script/kind", "nallocs", "tenures", "prate", "state");
+ "script/kind", "nallocs", "promotes", "prate", "state");
}
/* static */
@@ -455,8 +461,8 @@ void AllocSite::printInfo(bool hasPromotionRate, double promotionRate,
}
fprintf(stderr, " %8s", buffer);
- // Nursery tenure count.
- fprintf(stderr, " %8" PRIu32, nurseryTenuredCount);
+ // Nursery promotion count.
+ fprintf(stderr, " %8" PRIu32, nurseryPromotedCount);
// Promotion rate, if there were enough allocations.
buffer[0] = '\0';
diff --git a/js/src/gc/Pretenuring.h b/js/src/gc/Pretenuring.h
index 5aeb5c50d7..93677532e4 100644
--- a/js/src/gc/Pretenuring.h
+++ b/js/src/gc/Pretenuring.h
@@ -83,7 +83,7 @@ class AllocSite {
uint32_t nurseryAllocCount = 0;
// Number of nursery allocations that survived. Used during collection.
- uint32_t nurseryTenuredCount : 24;
+ uint32_t nurseryPromotedCount : 24;
// Number of times the script has been invalidated.
uint32_t invalidationCount : 4;
@@ -103,12 +103,12 @@ class AllocSite {
uintptr_t rawScript() const { return scriptAndState & ~STATE_MASK; }
public:
- AllocSite() : nurseryTenuredCount(0), invalidationCount(0), traceKind_(0) {}
+ AllocSite() : nurseryPromotedCount(0), invalidationCount(0), traceKind_(0) {}
// Create a dummy site to use for unknown allocations.
explicit AllocSite(JS::Zone* zone, JS::TraceKind kind)
: zone_(zone),
- nurseryTenuredCount(0),
+ nurseryPromotedCount(0),
invalidationCount(0),
traceKind_(uint32_t(kind)) {
MOZ_ASSERT(traceKind_ < NurseryTraceKinds);
@@ -126,7 +126,7 @@ class AllocSite {
void initUnknownSite(JS::Zone* zone, JS::TraceKind kind) {
MOZ_ASSERT(!zone_ && scriptAndState == uintptr_t(State::Unknown));
zone_ = zone;
- nurseryTenuredCount = 0;
+ nurseryPromotedCount = 0;
invalidationCount = 0;
traceKind_ = uint32_t(kind);
MOZ_ASSERT(traceKind_ < NurseryTraceKinds);
@@ -137,7 +137,7 @@ class AllocSite {
MOZ_ASSERT(!zone_ && scriptAndState == uintptr_t(State::Unknown));
zone_ = zone;
setScript(WasmScript);
- nurseryTenuredCount = 0;
+ nurseryPromotedCount = 0;
invalidationCount = 0;
traceKind_ = uint32_t(JS::TraceKind::Object);
}
@@ -179,24 +179,24 @@ class AllocSite {
}
bool hasNurseryAllocations() const {
- return nurseryAllocCount != 0 || nurseryTenuredCount != 0;
+ return nurseryAllocCount != 0 || nurseryPromotedCount != 0;
}
void resetNurseryAllocations() {
nurseryAllocCount = 0;
- nurseryTenuredCount = 0;
+ nurseryPromotedCount = 0;
}
uint32_t incAllocCount() { return ++nurseryAllocCount; }
uint32_t* nurseryAllocCountAddress() { return &nurseryAllocCount; }
- void incTenuredCount() {
+ void incPromotedCount() {
// The nursery is not large enough for this to overflow.
- nurseryTenuredCount++;
- MOZ_ASSERT(nurseryTenuredCount != 0);
+ nurseryPromotedCount++;
+ MOZ_ASSERT(nurseryPromotedCount != 0);
}
size_t allocCount() const {
- return std::max(nurseryAllocCount, nurseryTenuredCount);
+ return std::max(nurseryAllocCount, nurseryPromotedCount);
}
// Called for every active alloc site after minor GC.
@@ -259,6 +259,10 @@ class PretenuringZone {
// not recorded by optimized JIT code.
AllocSite optimizedAllocSite;
+ // Allocation sites used for nursery cells promoted to the next nursery
+ // generation that didn't come from optimized alloc sites.
+ AllocSite promotedAllocSites[NurseryTraceKinds];
+
// Count of tenured cell allocations made between each major collection and
// how many survived.
uint32_t allocCountInNewlyCreatedArenas = 0;
@@ -282,6 +286,7 @@ class PretenuringZone {
: optimizedAllocSite(zone, JS::TraceKind::Object) {
for (uint32_t i = 0; i < NurseryTraceKinds; i++) {
unknownAllocSites[i].initUnknownSite(zone, JS::TraceKind(i));
+ promotedAllocSites[i].initUnknownSite(zone, JS::TraceKind(i));
}
}
@@ -291,6 +296,12 @@ class PretenuringZone {
return unknownAllocSites[i];
}
+ AllocSite& promotedAllocSite(JS::TraceKind kind) {
+ size_t i = size_t(kind);
+ MOZ_ASSERT(i < NurseryTraceKinds);
+ return promotedAllocSites[i];
+ }
+
void clearCellCountsInNewlyCreatedArenas() {
allocCountInNewlyCreatedArenas = 0;
survivorCountInNewlyCreatedArenas = 0;
diff --git a/js/src/gc/PublicIterators.h b/js/src/gc/PublicIterators.h
index d1072cfe98..7ef6271f63 100644
--- a/js/src/gc/PublicIterators.h
+++ b/js/src/gc/PublicIterators.h
@@ -35,7 +35,7 @@ class ZonesIter {
public:
ZonesIter(gc::GCRuntime* gc, ZoneSelector selector)
: iterMarker(gc), it(gc->zones().begin()), end(gc->zones().end()) {
- if (selector == SkipAtoms) {
+ if (selector == SkipAtoms && !done()) {
MOZ_ASSERT(get()->isAtomsZone());
next();
}
diff --git a/js/src/gc/Scheduling.h b/js/src/gc/Scheduling.h
index cbaeb1f353..c5ed56dd5f 100644
--- a/js/src/gc/Scheduling.h
+++ b/js/src/gc/Scheduling.h
@@ -536,6 +536,9 @@ static const bool ParallelMarkingEnabled = false;
/* JSGC_INCREMENTAL_WEAKMAP_ENABLED */
static const bool IncrementalWeakMapMarkingEnabled = true;
+/* JSGC_SEMISPACE_NURSERY_ENABLED */
+static const bool SemispaceNurseryEnabled = false;
+
/* JSGC_HELPER_THREAD_RATIO */
static const double HelperThreadRatio = 0.5;
diff --git a/js/src/gc/StableCellHasher-inl.h b/js/src/gc/StableCellHasher-inl.h
index af0caaad89..34a8827cfb 100644
--- a/js/src/gc/StableCellHasher-inl.h
+++ b/js/src/gc/StableCellHasher-inl.h
@@ -132,7 +132,6 @@ inline bool HasUniqueId(Cell* cell) {
inline void TransferUniqueId(Cell* tgt, Cell* src) {
MOZ_ASSERT(src != tgt);
- MOZ_ASSERT(!IsInsideNursery(tgt));
MOZ_ASSERT(CurrentThreadCanAccessRuntime(tgt->runtimeFromAnyThread()));
MOZ_ASSERT(src->zone() == tgt->zone());
diff --git a/js/src/gc/StoreBuffer-inl.h b/js/src/gc/StoreBuffer-inl.h
index 1c97654761..343b953e3a 100644
--- a/js/src/gc/StoreBuffer-inl.h
+++ b/js/src/gc/StoreBuffer-inl.h
@@ -49,9 +49,10 @@ inline void ArenaCellSet::check() const {
MOZ_ASSERT(isEmpty() == !arena);
if (!isEmpty()) {
MOZ_ASSERT(IsCellPointerValid(arena));
- MOZ_ASSERT(arena->bufferedCells() == this);
JSRuntime* runtime = arena->zone->runtimeFromMainThread();
- MOZ_ASSERT(runtime->gc.minorGCCount() == minorGCNumberAtCreation);
+ uint64_t minorGCCount = runtime->gc.minorGCCount();
+ MOZ_ASSERT(minorGCCount == minorGCNumberAtCreation ||
+ minorGCCount == minorGCNumberAtCreation + 1);
}
#endif
}
diff --git a/js/src/gc/StoreBuffer.cpp b/js/src/gc/StoreBuffer.cpp
index d637eb62b1..1f8023bc62 100644
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -20,7 +20,8 @@ void StoreBuffer::checkAccess() const {
// The GC runs tasks that may access the storebuffer in parallel and so must
// take a lock. The mutator may only access the storebuffer from the main
// thread.
- if (runtime_->heapState() != JS::HeapState::Idle) {
+ if (runtime_->heapState() != JS::HeapState::Idle &&
+ runtime_->heapState() != JS::HeapState::MinorCollecting) {
MOZ_ASSERT(!CurrentThreadIsGCMarking());
runtime_->gc.assertCurrentThreadHasLockedStoreBuffer();
} else {
@@ -30,8 +31,7 @@ void StoreBuffer::checkAccess() const {
#endif
bool StoreBuffer::WholeCellBuffer::init() {
- MOZ_ASSERT(!stringHead_);
- MOZ_ASSERT(!nonStringHead_);
+ MOZ_ASSERT(!head_);
if (!storage_) {
storage_ = MakeUnique<LifoAlloc>(LifoAllocBlockSize);
// This prevents LifoAlloc::Enum from crashing with a release
@@ -75,8 +75,7 @@ StoreBuffer::StoreBuffer(JSRuntime* rt)
mayHavePointersToDeadCells_(false)
#ifdef DEBUG
,
- mEntered(false),
- markingNondeduplicatable(false)
+ mEntered(false)
#endif
{
}
@@ -97,13 +96,11 @@ StoreBuffer::StoreBuffer(StoreBuffer&& other)
mayHavePointersToDeadCells_(other.mayHavePointersToDeadCells_)
#ifdef DEBUG
,
- mEntered(other.mEntered),
- markingNondeduplicatable(other.markingNondeduplicatable)
+ mEntered(other.mEntered)
#endif
{
MOZ_ASSERT(enabled_);
MOZ_ASSERT(!mEntered);
- MOZ_ASSERT(!markingNondeduplicatable);
other.disable();
}
@@ -213,21 +210,14 @@ ArenaCellSet* StoreBuffer::WholeCellBuffer::allocateCellSet(Arena* arena) {
return nullptr;
}
- // Maintain separate lists for strings and non-strings, so that all buffered
- // string whole cells will be processed before anything else (to prevent them
- // from being deduplicated when their chars are used by a tenured string.)
- bool isString =
- MapAllocToTraceKind(arena->getAllocKind()) == JS::TraceKind::String;
-
AutoEnterOOMUnsafeRegion oomUnsafe;
- ArenaCellSet*& head = isString ? stringHead_ : nonStringHead_;
- auto* cells = storage_->new_<ArenaCellSet>(arena, head);
+ auto* cells = storage_->new_<ArenaCellSet>(arena, head_);
if (!cells) {
oomUnsafe.crash("Failed to allocate ArenaCellSet");
}
arena->bufferedCells() = cells;
- head = cells;
+ head_ = cells;
if (isAboutToOverflow()) {
rt->gc.storeBuffer().setAboutToOverflow(
@@ -243,12 +233,10 @@ void gc::CellHeaderPostWriteBarrier(JSObject** ptr, JSObject* prev,
}
void StoreBuffer::WholeCellBuffer::clear() {
- for (auto** headPtr : {&stringHead_, &nonStringHead_}) {
- for (auto* set = *headPtr; set; set = set->next) {
- set->arena->bufferedCells() = &ArenaCellSet::Empty;
- }
- *headPtr = nullptr;
+ for (auto* set = head_; set; set = set->next) {
+ set->arena->bufferedCells() = &ArenaCellSet::Empty;
}
+ head_ = nullptr;
if (storage_) {
storage_->used() ? storage_->releaseAll() : storage_->freeAll();
diff --git a/js/src/gc/StoreBuffer.h b/js/src/gc/StoreBuffer.h
index 2e117f02c8..0b1ca8af9b 100644
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -169,8 +169,7 @@ class StoreBuffer {
struct WholeCellBuffer {
UniquePtr<LifoAlloc> storage_;
- ArenaCellSet* stringHead_ = nullptr;
- ArenaCellSet* nonStringHead_ = nullptr;
+ ArenaCellSet* head_ = nullptr;
const Cell* last_ = nullptr;
WholeCellBuffer() = default;
@@ -180,11 +179,9 @@ class StoreBuffer {
WholeCellBuffer(WholeCellBuffer&& other)
: storage_(std::move(other.storage_)),
- stringHead_(other.stringHead_),
- nonStringHead_(other.nonStringHead_),
+ head_(other.head_),
last_(other.last_) {
- other.stringHead_ = nullptr;
- other.nonStringHead_ = nullptr;
+ other.head_ = nullptr;
other.last_ = nullptr;
}
WholeCellBuffer& operator=(WholeCellBuffer&& other) {
@@ -214,13 +211,20 @@ class StoreBuffer {
}
bool isEmpty() const {
- MOZ_ASSERT_IF(!stringHead_ && !nonStringHead_,
- !storage_ || storage_->isEmpty());
- return !stringHead_ && !nonStringHead_;
+ MOZ_ASSERT_IF(!head_, !storage_ || storage_->isEmpty());
+ return !head_;
}
const Cell** lastBufferedPtr() { return &last_; }
+ CellSweepSet releaseCellSweepSet() {
+ CellSweepSet set;
+ std::swap(storage_, set.storage_);
+ std::swap(head_, set.head_);
+ last_ = nullptr;
+ return set;
+ }
+
private:
ArenaCellSet* allocateCellSet(Arena* arena);
};
@@ -339,9 +343,10 @@ class StoreBuffer {
bool operator==(const ValueEdge& other) const { return edge == other.edge; }
bool operator!=(const ValueEdge& other) const { return edge != other.edge; }
+ bool isGCThing() const { return edge->isGCThing(); }
+
Cell* deref() const {
- return edge->isGCThing() ? static_cast<Cell*>(edge->toGCThing())
- : nullptr;
+ return isGCThing() ? static_cast<Cell*>(edge->toGCThing()) : nullptr;
}
bool maybeInRememberedSet(const Nursery& nursery) const {
@@ -449,10 +454,10 @@ class StoreBuffer {
return edge != other.edge;
}
+ bool isGCThing() const { return edge->isGCThing(); }
+
Cell* deref() const {
- return edge->isGCThing() ? static_cast<Cell*>(edge->toGCThing())
- : nullptr;
- return nullptr;
+ return isGCThing() ? static_cast<Cell*>(edge->toGCThing()) : nullptr;
}
bool maybeInRememberedSet(const Nursery& nursery) const {
@@ -525,10 +530,6 @@ class StoreBuffer {
#endif
public:
-#ifdef DEBUG
- bool markingNondeduplicatable;
-#endif
-
explicit StoreBuffer(JSRuntime* rt);
StoreBuffer(const StoreBuffer& other) = delete;
@@ -629,6 +630,10 @@ class StoreBuffer {
}
void traceGenericEntries(JSTracer* trc) { bufferGeneric.trace(trc, this); }
+ gc::CellSweepSet releaseCellSweepSet() {
+ return bufferWholeCell.releaseCellSweepSet();
+ }
+
/* For use by our owned buffers and for testing. */
void setAboutToOverflow(JS::GCReason);
@@ -639,6 +644,7 @@ class StoreBuffer {
};
// A set of cells in an arena used to implement the whole cell store buffer.
+// Also used to store a set of cells that need to be swept.
class ArenaCellSet {
friend class StoreBuffer;
@@ -693,7 +699,17 @@ class ArenaCellSet {
WordT getWord(size_t wordIndex) const { return bits.getWord(wordIndex); }
- void trace(TenuringTracer& mover);
+ void setWord(size_t wordIndex, WordT value) {
+ bits.setWord(wordIndex, value);
+ }
+
+ // Returns the list of ArenaCellSets that need to be swept.
+ ArenaCellSet* trace(TenuringTracer& mover);
+
+ // At the end of a minor GC, sweep through all tenured dependent strings that
+ // may point to nursery-allocated chars to update their pointers in case the
+ // base string moved its chars.
+ void sweepDependentStrings();
// Sentinel object used for all empty sets.
//
diff --git a/js/src/gc/Sweeping.cpp b/js/src/gc/Sweeping.cpp
index 123b2c9650..2cd6a2c662 100644
--- a/js/src/gc/Sweeping.cpp
+++ b/js/src/gc/Sweeping.cpp
@@ -441,6 +441,12 @@ void GCRuntime::waitBackgroundSweepEnd() {
void GCRuntime::startBackgroundFree() {
AutoLockHelperThreadState lock;
+
+ if (lifoBlocksToFree.ref().isEmpty() &&
+ buffersToFreeAfterMinorGC.ref().empty()) {
+ return;
+ }
+
freeTask.startOrRunIfIdle(lock);
}
@@ -1194,18 +1200,19 @@ class ImmediateSweepWeakCacheTask : public GCParallelTask {
};
void GCRuntime::updateAtomsBitmap() {
- DenseBitmap marked;
- if (atomMarking.computeBitmapFromChunkMarkBits(rt, marked)) {
- for (GCZonesIter zone(this); !zone.done(); zone.next()) {
- atomMarking.refineZoneBitmapForCollectedZone(zone, marked);
+ size_t collectedZones = 0;
+ size_t uncollectedZones = 0;
+ for (ZonesIter zone(this, SkipAtoms); !zone.done(); zone.next()) {
+ if (zone->isCollecting()) {
+ collectedZones++;
+ } else {
+ uncollectedZones++;
}
- } else {
- // Ignore OOM in computeBitmapFromChunkMarkBits. The
- // refineZoneBitmapForCollectedZone call can only remove atoms from the
- // zone bitmap, so it is conservative to just not call it.
}
- atomMarking.markAtomsUsedByUncollectedZones(rt);
+ atomMarking.refineZoneBitmapsForCollectedZones(this, collectedZones);
+
+ atomMarking.markAtomsUsedByUncollectedZones(this, uncollectedZones);
// For convenience sweep these tables non-incrementally as part of bitmap
// sweeping; they are likely to be much smaller than the main atoms table.
diff --git a/js/src/gc/Tenuring.cpp b/js/src/gc/Tenuring.cpp
index a9506cfa14..d38a374599 100644
--- a/js/src/gc/Tenuring.cpp
+++ b/js/src/gc/Tenuring.cpp
@@ -18,6 +18,7 @@
#include "gc/Pretenuring.h"
#include "gc/Zone.h"
#include "jit/JitCode.h"
+#include "js/TypeDecls.h"
#include "proxy/Proxy.h"
#include "vm/BigIntType.h"
#include "vm/JSScript.h"
@@ -44,104 +45,120 @@ using mozilla::PodCopy;
constexpr size_t MAX_DEDUPLICATABLE_STRING_LENGTH = 500;
-TenuringTracer::TenuringTracer(JSRuntime* rt, Nursery* nursery)
+TenuringTracer::TenuringTracer(JSRuntime* rt, Nursery* nursery,
+ bool tenureEverything)
: JSTracer(rt, JS::TracerKind::Tenuring,
JS::WeakMapTraceAction::TraceKeysAndValues),
- nursery_(*nursery) {
+ nursery_(*nursery),
+ tenureEverything(tenureEverything) {
stringDeDupSet.emplace();
}
-size_t TenuringTracer::getTenuredSize() const {
- return tenuredSize + tenuredCells * sizeof(NurseryCellHeader);
+size_t TenuringTracer::getPromotedSize() const {
+ return promotedSize + promotedCells * sizeof(NurseryCellHeader);
}
-size_t TenuringTracer::getTenuredCells() const { return tenuredCells; }
-
-static inline void UpdateAllocSiteOnTenure(Cell* cell) {
- AllocSite* site = NurseryCellHeader::from(cell)->allocSite();
- site->incTenuredCount();
-}
+size_t TenuringTracer::getPromotedCells() const { return promotedCells; }
void TenuringTracer::onObjectEdge(JSObject** objp, const char* name) {
JSObject* obj = *objp;
- if (!IsInsideNursery(obj)) {
+ if (!nursery_.inCollectedRegion(obj)) {
+ MOZ_ASSERT(!obj->isForwarded());
return;
}
+ *objp = promoteOrForward(obj);
+ MOZ_ASSERT(!(*objp)->isForwarded());
+}
+
+JSObject* TenuringTracer::promoteOrForward(JSObject* obj) {
+ MOZ_ASSERT(nursery_.inCollectedRegion(obj));
+
if (obj->isForwarded()) {
const gc::RelocationOverlay* overlay = gc::RelocationOverlay::fromCell(obj);
- *objp = static_cast<JSObject*>(overlay->forwardingAddress());
- return;
+ obj = static_cast<JSObject*>(overlay->forwardingAddress());
+ if (IsInsideNursery(obj)) {
+ promotedToNursery = true;
+ }
+ return obj;
}
- onNonForwardedNurseryObjectEdge(objp);
+ return onNonForwardedNurseryObject(obj);
}
-void TenuringTracer::onNonForwardedNurseryObjectEdge(JSObject** objp) {
- JSObject* obj = *objp;
+JSObject* TenuringTracer::onNonForwardedNurseryObject(JSObject* obj) {
MOZ_ASSERT(IsInsideNursery(obj));
MOZ_ASSERT(!obj->isForwarded());
- UpdateAllocSiteOnTenure(obj);
-
- // Take a fast path for tenuring a plain object which is by far the most
+ // Take a fast path for promoting a plain object as this is by far the most
// common case.
if (obj->is<PlainObject>()) {
- *objp = movePlainObjectToTenured(&obj->as<PlainObject>());
- return;
+ return promotePlainObject(&obj->as<PlainObject>());
}
- *objp = moveToTenuredSlow(obj);
+ return promoteObjectSlow(obj);
}
void TenuringTracer::onStringEdge(JSString** strp, const char* name) {
JSString* str = *strp;
- if (!IsInsideNursery(str)) {
+ if (!nursery_.inCollectedRegion(str)) {
return;
}
+ *strp = promoteOrForward(str);
+}
+
+JSString* TenuringTracer::promoteOrForward(JSString* str) {
+ MOZ_ASSERT(nursery_.inCollectedRegion(str));
+
if (str->isForwarded()) {
const gc::RelocationOverlay* overlay = gc::RelocationOverlay::fromCell(str);
- *strp = static_cast<JSString*>(overlay->forwardingAddress());
- return;
+ str = static_cast<JSString*>(overlay->forwardingAddress());
+ if (IsInsideNursery(str)) {
+ promotedToNursery = true;
+ }
+ return str;
}
- onNonForwardedNurseryStringEdge(strp);
+ return onNonForwardedNurseryString(str);
}
-void TenuringTracer::onNonForwardedNurseryStringEdge(JSString** strp) {
- JSString* str = *strp;
+JSString* TenuringTracer::onNonForwardedNurseryString(JSString* str) {
MOZ_ASSERT(IsInsideNursery(str));
MOZ_ASSERT(!str->isForwarded());
- UpdateAllocSiteOnTenure(str);
-
- *strp = moveToTenured(str);
+ return promoteString(str);
}
void TenuringTracer::onBigIntEdge(JS::BigInt** bip, const char* name) {
JS::BigInt* bi = *bip;
- if (!IsInsideNursery(bi)) {
+ if (!nursery_.inCollectedRegion(bi)) {
return;
}
+ *bip = promoteOrForward(bi);
+}
+
+JS::BigInt* TenuringTracer::promoteOrForward(JS::BigInt* bi) {
+ MOZ_ASSERT(nursery_.inCollectedRegion(bi));
+
if (bi->isForwarded()) {
const gc::RelocationOverlay* overlay = gc::RelocationOverlay::fromCell(bi);
- *bip = static_cast<JS::BigInt*>(overlay->forwardingAddress());
- return;
+ bi = static_cast<JS::BigInt*>(overlay->forwardingAddress());
+ if (IsInsideNursery(bi)) {
+ promotedToNursery = true;
+ }
+ return bi;
}
- onNonForwardedNurseryBigIntEdge(bip);
+ return onNonForwardedNurseryBigInt(bi);
}
-void TenuringTracer::onNonForwardedNurseryBigIntEdge(JS::BigInt** bip) {
- JS::BigInt* bi = *bip;
+JS::BigInt* TenuringTracer::onNonForwardedNurseryBigInt(JS::BigInt* bi) {
MOZ_ASSERT(IsInsideNursery(bi));
MOZ_ASSERT(!bi->isForwarded());
- UpdateAllocSiteOnTenure(bi);
-
- *bip = moveToTenured(bi);
+ return promoteBigInt(bi);
}
void TenuringTracer::onSymbolEdge(JS::Symbol** symp, const char* name) {}
@@ -156,7 +173,7 @@ void TenuringTracer::onJitCodeEdge(jit::JitCode** codep, const char* name) {}
void TenuringTracer::onScopeEdge(Scope** scopep, const char* name) {}
void TenuringTracer::traverse(JS::Value* thingp) {
- MOZ_ASSERT(!nursery().isInside(thingp));
+ MOZ_ASSERT(!nursery().inCollectedRegion(thingp));
Value value = *thingp;
CheckTracedThing(this, value);
@@ -166,66 +183,71 @@ void TenuringTracer::traverse(JS::Value* thingp) {
}
Cell* cell = value.toGCThing();
- if (!IsInsideNursery(cell)) {
+ if (!nursery_.inCollectedRegion(cell)) {
return;
}
if (cell->isForwarded()) {
const gc::RelocationOverlay* overlay =
gc::RelocationOverlay::fromCell(cell);
- thingp->changeGCThingPayload(overlay->forwardingAddress());
+ Cell* target = overlay->forwardingAddress();
+ thingp->changeGCThingPayload(target);
+ if (IsInsideNursery(target)) {
+ promotedToNursery = true;
+ }
return;
}
// We only care about a few kinds of GC thing here and this generates much
// tighter code than using MapGCThingTyped.
if (value.isObject()) {
- JSObject* obj = &value.toObject();
- onNonForwardedNurseryObjectEdge(&obj);
+ JSObject* obj = onNonForwardedNurseryObject(&value.toObject());
MOZ_ASSERT(obj != &value.toObject());
*thingp = JS::ObjectValue(*obj);
return;
}
#ifdef ENABLE_RECORD_TUPLE
if (value.isExtendedPrimitive()) {
- JSObject* obj = &value.toExtendedPrimitive();
- onNonForwardedNurseryObjectEdge(&obj);
+ JSObject* obj = onNonForwardedNurseryObject(&value.toExtendedPrimitive());
MOZ_ASSERT(obj != &value.toExtendedPrimitive());
*thingp = JS::ExtendedPrimitiveValue(*obj);
return;
}
#endif
if (value.isString()) {
- JSString* str = value.toString();
- onNonForwardedNurseryStringEdge(&str);
+ JSString* str = onNonForwardedNurseryString(value.toString());
MOZ_ASSERT(str != value.toString());
*thingp = JS::StringValue(str);
return;
}
MOZ_ASSERT(value.isBigInt());
- JS::BigInt* bi = value.toBigInt();
- onNonForwardedNurseryBigIntEdge(&bi);
+ JS::BigInt* bi = onNonForwardedNurseryBigInt(value.toBigInt());
MOZ_ASSERT(bi != value.toBigInt());
*thingp = JS::BigIntValue(bi);
}
void TenuringTracer::traverse(wasm::AnyRef* thingp) {
- MOZ_ASSERT(!nursery().isInside(thingp));
+ MOZ_ASSERT(!nursery().inCollectedRegion(thingp));
wasm::AnyRef value = *thingp;
CheckTracedThing(this, value);
+ Cell* cell = value.toGCThing();
+ if (!nursery_.inCollectedRegion(cell)) {
+ return;
+ }
+
wasm::AnyRef post = wasm::AnyRef::invalid();
switch (value.kind()) {
case wasm::AnyRefKind::Object: {
- JSObject* obj = &value.toJSObject();
- onObjectEdge(&obj, "value");
+ JSObject* obj = promoteOrForward(&value.toJSObject());
+ MOZ_ASSERT(obj != &value.toJSObject());
post = wasm::AnyRef::fromJSObject(*obj);
break;
}
case wasm::AnyRefKind::String: {
- JSString* str = value.toJSString();
- onStringEdge(&str, "string");
+ JSString* str = promoteOrForward(value.toJSString());
+ MOZ_ASSERT(str != value.toJSString());
post = wasm::AnyRef::fromJSString(str);
break;
}
@@ -236,11 +258,20 @@ void TenuringTracer::traverse(wasm::AnyRef* thingp) {
}
}
- if (post != value) {
- *thingp = post;
- }
+ *thingp = post;
}
+class MOZ_RAII TenuringTracer::AutoPromotedAnyToNursery {
+ public:
+ explicit AutoPromotedAnyToNursery(TenuringTracer& trc) : trc_(trc) {
+ trc.promotedToNursery = false;
+ }
+ explicit operator bool() const { return trc_.promotedToNursery; }
+
+ private:
+ TenuringTracer& trc_;
+};
+
template <typename T>
void js::gc::StoreBuffer::MonoTypeBuffer<T>::trace(TenuringTracer& mover,
StoreBuffer* owner) {
@@ -279,6 +310,8 @@ void js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const {
MOZ_ASSERT(!IsInsideNursery(obj), "obj shouldn't live in nursery.");
+ TenuringTracer::AutoPromotedAnyToNursery promotedToNursery(mover);
+
if (kind() == ElementKind) {
uint32_t initLen = obj->getDenseInitializedLength();
uint32_t numShifted = obj->getElementsHeader()->numShiftedElements();
@@ -289,99 +322,60 @@ void js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const {
clampedEnd = numShifted < clampedEnd ? clampedEnd - numShifted : 0;
clampedEnd = std::min(clampedEnd, initLen);
MOZ_ASSERT(clampedStart <= clampedEnd);
- mover.traceSlots(
- static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)
- ->unbarrieredAddress(),
- clampedEnd - clampedStart);
+ auto* slotStart =
+ static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart);
+ uint32_t nslots = clampedEnd - clampedStart;
+ mover.traceObjectElements(slotStart->unbarrieredAddress(), nslots);
} else {
uint32_t start = std::min(start_, obj->slotSpan());
uint32_t end = std::min(start_ + count_, obj->slotSpan());
MOZ_ASSERT(start <= end);
mover.traceObjectSlots(obj, start, end);
}
+
+ if (promotedToNursery) {
+ mover.runtime()->gc.storeBuffer().putSlot(obj, kind(), start_, count_);
+ }
}
static inline void TraceWholeCell(TenuringTracer& mover, JSObject* object) {
- MOZ_ASSERT_IF(object->storeBuffer(),
- !object->storeBuffer()->markingNondeduplicatable);
mover.traceObject(object);
}
-// Non-deduplicatable marking is necessary because of the following 2 reasons:
+// Return whether the string needs to be swept.
//
-// 1. Tenured string chars cannot be updated:
+// We can break down the relevant dependency chains as follows:
//
-// If any of the tenured string's bases were deduplicated during tenuring,
-// the tenured string's chars pointer would need to be adjusted. This would
-// then require updating any other tenured strings that are dependent on the
-// first tenured string, and we have no way to find them without scanning
-// the entire tenured heap.
+// T -> T2 : will not be swept, but safe because T2.chars is fixed.
+// T -> N1 -> ... -> T2 : safe because T2.chars is fixed
+// T -> N1 -> ... -> N2 : update T.chars += tenured(N2).chars - N2.chars
//
-// 2. Tenured string cannot store its nursery base or base's chars:
+// Collapse the base chain down to simply T -> T2 or T -> N2. The pointer update
+// will happen during sweeping.
//
-// Tenured strings have no place to stash a pointer to their nursery base or
-// its chars. You need to be able to traverse any dependent string's chain
-// of bases up to a nursery "root base" that points to the malloced chars
-// that the dependent strings started out pointing to, so that you can
-// calculate the offset of any dependent string and update the ptr+offset if
-// the root base gets deduplicated to a different allocation. Tenured
-// strings in this base chain will stop you from reaching the nursery
-// version of the root base; you can only get to the tenured version, and it
-// has no place to store the original chars pointer.
-static inline void PreventDeduplicationOfReachableStrings(JSString* str) {
- MOZ_ASSERT(str->isTenured());
- MOZ_ASSERT(!str->isForwarded());
-
- JSLinearString* baseOrRelocOverlay = str->nurseryBaseOrRelocOverlay();
-
- // Walk along the chain of dependent strings' base string pointers
- // to mark them all non-deduplicatable.
- while (true) {
- // baseOrRelocOverlay can be one of the three cases:
- // 1. forwarded nursery string:
- // The forwarded string still retains the flag that can tell whether
- // this string is a dependent string with a base. Its
- // StringRelocationOverlay holds a saved pointer to its base in the
- // nursery.
- // 2. not yet forwarded nursery string:
- // Retrieve the base field directly from the string.
- // 3. tenured string:
- // The nursery base chain ends here, so stop traversing.
- if (baseOrRelocOverlay->isForwarded()) {
- JSLinearString* tenuredBase = Forwarded(baseOrRelocOverlay);
- if (!tenuredBase->hasBase()) {
- break;
- }
- baseOrRelocOverlay = StringRelocationOverlay::fromCell(baseOrRelocOverlay)
- ->savedNurseryBaseOrRelocOverlay();
- } else {
- JSLinearString* base = baseOrRelocOverlay;
- if (base->isTenured()) {
- break;
- }
- if (base->isDeduplicatable()) {
- base->setNonDeduplicatable();
- }
- if (!base->hasBase()) {
- break;
- }
- baseOrRelocOverlay = base->nurseryBaseOrRelocOverlay();
- }
- }
-}
-
-static inline void TraceWholeCell(TenuringTracer& mover, JSString* str) {
- MOZ_ASSERT_IF(str->storeBuffer(),
- str->storeBuffer()->markingNondeduplicatable);
-
- // Mark all strings reachable from the tenured string `str` as
- // non-deduplicatable. These strings are the bases of the tenured dependent
- // string.
+// Note that in cases like T -> N1 -> T2 -> T3 -> N2, both T -> N1 and T3 -> N2
+// will be processed by the whole cell buffer (or rather, only T and T3 will
+// be in the store buffer). The order that these strings are
+// visited does not matter because the nursery bases are left alone until
+// sweeping.
+static inline bool TraceWholeCell(TenuringTracer& mover, JSString* str) {
if (str->hasBase()) {
- PreventDeduplicationOfReachableStrings(str);
+ // For tenured dependent strings -> nursery string edges, sweep the
+ // (tenured) strings at the end of nursery marking to update chars pointers
+ // that were in the nursery. Rather than updating the base pointer to point
+ // directly to the tenured version of itself, we will leave it pointing at
+ // the nursery Cell (which will become a StringRelocationOverlay during the
+ // minor GC.)
+ JSLinearString* base = str->nurseryBaseOrRelocOverlay();
+ if (IsInsideNursery(base)) {
+ str->traceBaseFromStoreBuffer(&mover);
+ return IsInsideNursery(str->nurseryBaseOrRelocOverlay());
+ }
}
str->traceChildren(&mover);
+
+ return false;
}
static inline void TraceWholeCell(TenuringTracer& mover, BaseScript* script) {
@@ -394,70 +388,173 @@ static inline void TraceWholeCell(TenuringTracer& mover,
}
template <typename T>
-static void TraceBufferedCells(TenuringTracer& mover, Arena* arena,
- ArenaCellSet* cells) {
+bool TenuringTracer::traceBufferedCells(Arena* arena, ArenaCellSet* cells) {
for (size_t i = 0; i < MaxArenaCellIndex; i += cells->BitsPerWord) {
ArenaCellSet::WordT bitset = cells->getWord(i / cells->BitsPerWord);
while (bitset) {
size_t bit = i + js::detail::CountTrailingZeroes(bitset);
+ bitset &= bitset - 1; // Clear the low bit.
+
auto cell =
reinterpret_cast<T*>(uintptr_t(arena) + ArenaCellIndexBytes * bit);
- TraceWholeCell(mover, cell);
- bitset &= bitset - 1; // Clear the low bit.
+
+ TenuringTracer::AutoPromotedAnyToNursery promotedToNursery(*this);
+
+ TraceWholeCell(*this, cell);
+
+ if (promotedToNursery) {
+ runtime()->gc.storeBuffer().putWholeCell(cell);
+ }
}
}
+
+ return false;
}
-void ArenaCellSet::trace(TenuringTracer& mover) {
- for (ArenaCellSet* cells = this; cells; cells = cells->next) {
+template <>
+bool TenuringTracer::traceBufferedCells<JSString>(Arena* arena,
+ ArenaCellSet* cells) {
+ bool needsSweep = false;
+ for (size_t i = 0; i < MaxArenaCellIndex; i += cells->BitsPerWord) {
+ ArenaCellSet::WordT bitset = cells->getWord(i / cells->BitsPerWord);
+ ArenaCellSet::WordT tosweep = bitset;
+ while (bitset) {
+ size_t bit = i + js::detail::CountTrailingZeroes(bitset);
+ auto* cell = reinterpret_cast<JSString*>(uintptr_t(arena) +
+ ArenaCellIndexBytes * bit);
+ TenuringTracer::AutoPromotedAnyToNursery promotedToNursery(*this);
+ bool needsSweep = TraceWholeCell(*this, cell);
+ if (promotedToNursery) {
+ runtime()->gc.storeBuffer().putWholeCell(cell);
+ }
+ ArenaCellSet::WordT mask = bitset - 1;
+ bitset &= mask;
+ if (!needsSweep) {
+ tosweep &= mask;
+ }
+ }
+
+ cells->setWord(i / cells->BitsPerWord, tosweep);
+ if (tosweep) {
+ needsSweep = true;
+ }
+ }
+
+ return needsSweep;
+}
+
+ArenaCellSet* ArenaCellSet::trace(TenuringTracer& mover) {
+ ArenaCellSet* head = nullptr;
+
+ ArenaCellSet* cells = this;
+ while (cells) {
cells->check();
Arena* arena = cells->arena;
arena->bufferedCells() = &ArenaCellSet::Empty;
JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
+ bool needsSweep;
switch (kind) {
case JS::TraceKind::Object:
- TraceBufferedCells<JSObject>(mover, arena, cells);
+ needsSweep = mover.traceBufferedCells<JSObject>(arena, cells);
break;
case JS::TraceKind::String:
- TraceBufferedCells<JSString>(mover, arena, cells);
+ needsSweep = mover.traceBufferedCells<JSString>(arena, cells);
break;
case JS::TraceKind::Script:
- TraceBufferedCells<BaseScript>(mover, arena, cells);
+ needsSweep = mover.traceBufferedCells<BaseScript>(arena, cells);
break;
case JS::TraceKind::JitCode:
- TraceBufferedCells<jit::JitCode>(mover, arena, cells);
+ needsSweep = mover.traceBufferedCells<jit::JitCode>(arena, cells);
break;
default:
MOZ_CRASH("Unexpected trace kind");
}
+
+ ArenaCellSet* next = cells->next;
+ if (needsSweep) {
+ cells->next = head;
+ head = cells;
+ }
+
+ cells = next;
}
+
+ return head;
}
void js::gc::StoreBuffer::WholeCellBuffer::trace(TenuringTracer& mover,
StoreBuffer* owner) {
MOZ_ASSERT(owner->isEnabled());
-#ifdef DEBUG
- // Verify that all string whole cells are traced first before any other
- // strings are visited for any reason.
- MOZ_ASSERT(!owner->markingNondeduplicatable);
- owner->markingNondeduplicatable = true;
-#endif
- // Trace all of the strings to mark the non-deduplicatable bits, then trace
- // all other whole cells.
- if (stringHead_) {
- stringHead_->trace(mover);
+ if (head_) {
+ head_ = head_->trace(mover);
}
-#ifdef DEBUG
- owner->markingNondeduplicatable = false;
-#endif
- if (nonStringHead_) {
- nonStringHead_->trace(mover);
+}
+
+// Sweep a tenured dependent string with a nursery base. The base chain will
+// have been collapsed to a single link before this string was added to the
+// sweep set, so only the simple case of a tenured dependent string with a
+// nursery base needs to be considered.
+template <typename CharT>
+void JSDependentString::sweepTypedAfterMinorGC() {
+ MOZ_ASSERT(isTenured());
+ MOZ_ASSERT(IsInsideNursery(nurseryBaseOrRelocOverlay()));
+
+ JSLinearString* base = nurseryBaseOrRelocOverlay();
+ MOZ_ASSERT(IsInsideNursery(base));
+ MOZ_ASSERT(!Forwarded(base)->hasBase(), "base chain should be collapsed");
+ MOZ_ASSERT(base->isForwarded(), "root base should be kept alive");
+ auto* baseOverlay = js::gc::StringRelocationOverlay::fromCell(base);
+ const CharT* oldBaseChars = baseOverlay->savedNurseryChars<CharT>();
+
+ // We have the base's original chars pointer and its current chars pointer.
+ // Update our chars pointer, which is an offset from the original base
+ // chars, and make it point to the same offset within the root's chars.
+ // (Most of the time, the base chars didn't move and so this has no
+ // effect.)
+ const CharT* oldChars = JSString::nonInlineCharsRaw<CharT>();
+ size_t offset = oldChars - oldBaseChars;
+ JSLinearString* tenuredBase = Forwarded(base);
+ MOZ_ASSERT(offset < tenuredBase->length());
+
+ const CharT* newBaseChars = tenuredBase->JSString::nonInlineCharsRaw<CharT>();
+ relocateNonInlineChars(newBaseChars, offset);
+
+ d.s.u3.base = tenuredBase;
+}
+
+inline void JSDependentString::sweepAfterMinorGC() {
+ if (hasTwoByteChars()) {
+ sweepTypedAfterMinorGC<char16_t>();
+ } else {
+ sweepTypedAfterMinorGC<JS::Latin1Char>();
+ }
+}
+
+static void SweepDependentStrings(Arena* arena, ArenaCellSet* cells) {
+ for (size_t i = 0; i < MaxArenaCellIndex; i += cells->BitsPerWord) {
+ ArenaCellSet::WordT bitset = cells->getWord(i / cells->BitsPerWord);
+ while (bitset) {
+ size_t bit = i + js::detail::CountTrailingZeroes(bitset);
+ auto* str = reinterpret_cast<JSString*>(uintptr_t(arena) +
+ ArenaCellIndexBytes * bit);
+ MOZ_ASSERT(str->isTenured());
+ str->asDependent().sweepAfterMinorGC();
+ bitset &= bitset - 1; // Clear the low bit.
+ }
}
+}
- stringHead_ = nonStringHead_ = nullptr;
+void ArenaCellSet::sweepDependentStrings() {
+ for (ArenaCellSet* cells = this; cells; cells = cells->next) {
+ Arena* arena = cells->arena;
+ arena->bufferedCells() = &ArenaCellSet::Empty;
+ MOZ_ASSERT(MapAllocToTraceKind(arena->getAllocKind()) ==
+ JS::TraceKind::String);
+ SweepDependentStrings(arena, cells);
+ }
}
template <typename T>
@@ -481,18 +578,42 @@ void js::gc::StoreBuffer::CellPtrEdge<T>::trace(TenuringTracer& mover) const {
edge, JS::TraceKind::String));
}
- DispatchToOnEdge(&mover, edge, "CellPtrEdge");
+ if (!mover.nursery().inCollectedRegion(thing)) {
+ return;
+ }
+
+ *edge = mover.promoteOrForward(thing);
+
+ if (IsInsideNursery(*edge)) {
+ mover.runtime()->gc.storeBuffer().putCell(edge);
+ }
}
void js::gc::StoreBuffer::ValueEdge::trace(TenuringTracer& mover) const {
- if (deref()) {
- mover.traverse(edge);
+ if (!isGCThing()) {
+ return;
+ }
+
+ TenuringTracer::AutoPromotedAnyToNursery promotedToNursery(mover);
+
+ mover.traverse(edge);
+
+ if (promotedToNursery) {
+ mover.runtime()->gc.storeBuffer().putValue(edge);
}
}
void js::gc::StoreBuffer::WasmAnyRefEdge::trace(TenuringTracer& mover) const {
- if (deref()) {
- mover.traverse(edge);
+ if (!isGCThing()) {
+ return;
+ }
+
+ TenuringTracer::AutoPromotedAnyToNursery promotedToNursery(mover);
+
+ mover.traverse(edge);
+
+ if (promotedToNursery) {
+ mover.runtime()->gc.storeBuffer().putWasmAnyRef(edge);
}
}
@@ -513,7 +634,7 @@ void js::gc::TenuringTracer::traceObject(JSObject* obj) {
if (!nobj->hasEmptyElements()) {
HeapSlotArray elements = nobj->getDenseElements();
Value* elems = elements.begin()->unbarrieredAddress();
- traceSlots(elems, elems + nobj->getDenseInitializedLength());
+ traceObjectElements(elems, nobj->getDenseInitializedLength());
}
traceObjectSlots(nobj, 0, nobj->slotSpan());
@@ -527,16 +648,17 @@ void js::gc::TenuringTracer::traceObjectSlots(NativeObject* nobj,
nobj->forEachSlotRange(start, end, traceRange);
}
+void js::gc::TenuringTracer::traceObjectElements(JS::Value* vp,
+ uint32_t count) {
+ traceSlots(vp, vp + count);
+}
+
void js::gc::TenuringTracer::traceSlots(Value* vp, Value* end) {
for (; vp != end; ++vp) {
traverse(vp);
}
}
-inline void js::gc::TenuringTracer::traceSlots(JS::Value* vp, uint32_t nslots) {
- traceSlots(vp, vp + nslots);
-}
-
void js::gc::TenuringTracer::traceString(JSString* str) {
str->traceChildren(this);
}
@@ -564,25 +686,73 @@ inline void js::gc::TenuringTracer::insertIntoObjectFixupList(
}
template <typename T>
-inline T* js::gc::TenuringTracer::allocTenured(Zone* zone, AllocKind kind) {
- return static_cast<T*>(static_cast<Cell*>(AllocateCellInGC(zone, kind)));
+inline T* js::gc::TenuringTracer::alloc(Zone* zone, AllocKind kind, Cell* src) {
+ AllocSite* site = NurseryCellHeader::from(src)->allocSite();
+ site->incPromotedCount();
+
+ void* ptr = allocCell<T::TraceKind>(zone, kind, site, src);
+ auto* cell = reinterpret_cast<T*>(ptr);
+ if (IsInsideNursery(cell)) {
+ MOZ_ASSERT(!nursery().inCollectedRegion(cell));
+ promotedToNursery = true;
+ }
+
+ return cell;
}
-JSString* js::gc::TenuringTracer::allocTenuredString(JSString* src, Zone* zone,
- AllocKind dstKind) {
- JSString* dst = allocTenured<JSString>(zone, dstKind);
- tenuredSize += moveStringToTenured(dst, src, dstKind);
- tenuredCells++;
+template <JS::TraceKind traceKind>
+void* js::gc::TenuringTracer::allocCell(Zone* zone, AllocKind allocKind,
+ AllocSite* site, Cell* src) {
+ MOZ_ASSERT(zone == src->zone());
+
+ if (!shouldTenure(zone, traceKind, src)) {
+ // Allocations from the optimized alloc site continue to use that site,
+ // otherwise a special promoted alloc site it used.
+ if (site->kind() != AllocSite::Kind::Optimized) {
+ site = &zone->pretenuring.promotedAllocSite(traceKind);
+ }
+
+ size_t thingSize = Arena::thingSize(allocKind);
+ void* ptr = nursery_.tryAllocateCell(site, thingSize, traceKind);
+ if (MOZ_LIKELY(ptr)) {
+ return ptr;
+ }
+
+ JSContext* cx = runtime()->mainContextFromOwnThread();
+ ptr = CellAllocator::RetryNurseryAlloc<NoGC>(cx, traceKind, allocKind,
+ thingSize, site);
+ if (MOZ_LIKELY(ptr)) {
+ return ptr;
+ }
+
+ // The nursery is full. This is unlikely but can happen. Fall through to
+ // the tenured allocation path.
+ }
+
+ return AllocateTenuredCellInGC(zone, allocKind);
+}
+
+JSString* js::gc::TenuringTracer::allocString(JSString* src, Zone* zone,
+ AllocKind dstKind) {
+ JSString* dst = alloc<JSString>(zone, dstKind, src);
+ promotedSize += moveString(dst, src, dstKind);
+ promotedCells++;
return dst;
}
-JSObject* js::gc::TenuringTracer::moveToTenuredSlow(JSObject* src) {
+bool js::gc::TenuringTracer::shouldTenure(Zone* zone, JS::TraceKind traceKind,
+ Cell* cell) {
+ return tenureEverything || !zone->allocKindInNursery(traceKind) ||
+ nursery_.shouldTenure(cell);
+}
+
+JSObject* js::gc::TenuringTracer::promoteObjectSlow(JSObject* src) {
MOZ_ASSERT(IsInsideNursery(src));
MOZ_ASSERT(!src->is<PlainObject>());
AllocKind dstKind = src->allocKindForTenure(nursery());
- auto* dst = allocTenured<JSObject>(src->nurseryZone(), dstKind);
+ auto* dst = alloc<JSObject>(src->nurseryZone(), dstKind, src);
size_t srcSize = Arena::thingSize(dstKind);
@@ -590,8 +760,8 @@ JSObject* js::gc::TenuringTracer::moveToTenuredSlow(JSObject* src) {
// and dst. We deal with this by copying elements manually, possibly
// re-inlining them if there is adequate room inline in dst.
//
- // For Arrays and Tuples we're reducing tenuredSize to the smaller srcSize
- // because moveElementsToTenured() accounts for all Array or Tuple elements,
+ // For Arrays and Tuples we're reducing promotedSize to the smaller srcSize
+ // because moveElements() accounts for all Array or Tuple elements,
// even if they are inlined.
if (src->is<FixedLengthTypedArrayObject>()) {
auto* tarray = &src->as<FixedLengthTypedArrayObject>();
@@ -613,8 +783,8 @@ JSObject* js::gc::TenuringTracer::moveToTenuredSlow(JSObject* src) {
srcSize = sizeof(NativeObject);
}
- tenuredSize += srcSize;
- tenuredCells++;
+ promotedSize += srcSize;
+ promotedCells++;
// Copy the Cell contents.
MOZ_ASSERT(OffsetFromChunkStart(src) >= sizeof(ChunkBase));
@@ -625,8 +795,8 @@ JSObject* js::gc::TenuringTracer::moveToTenuredSlow(JSObject* src) {
if (src->is<NativeObject>()) {
NativeObject* ndst = &dst->as<NativeObject>();
NativeObject* nsrc = &src->as<NativeObject>();
- tenuredSize += moveSlotsToTenured(ndst, nsrc);
- tenuredSize += moveElementsToTenured(ndst, nsrc, dstKind);
+ promotedSize += moveSlots(ndst, nsrc);
+ promotedSize += moveElements(ndst, nsrc, dstKind);
}
JSObjectMovedOp op = dst->getClass()->extObjectMovedOp();
@@ -634,7 +804,7 @@ JSObject* js::gc::TenuringTracer::moveToTenuredSlow(JSObject* src) {
if (op) {
// Tell the hazard analysis that the object moved hook can't GC.
JS::AutoSuppressGCAnalysis nogc;
- tenuredSize += op(dst, src);
+ promotedSize += op(dst, src);
} else {
MOZ_ASSERT_IF(src->getClass()->hasFinalize(),
CanNurseryAllocateFinalizedClass(src->getClass()));
@@ -647,18 +817,17 @@ JSObject* js::gc::TenuringTracer::moveToTenuredSlow(JSObject* src) {
return dst;
}
-inline JSObject* js::gc::TenuringTracer::movePlainObjectToTenured(
- PlainObject* src) {
- // Fast path version of moveToTenuredSlow() for specialized for PlainObject.
+inline JSObject* js::gc::TenuringTracer::promotePlainObject(PlainObject* src) {
+ // Fast path version of promoteObjectSlow() for specialized for PlainObject.
MOZ_ASSERT(IsInsideNursery(src));
AllocKind dstKind = src->allocKindForTenure();
- auto* dst = allocTenured<PlainObject>(src->nurseryZone(), dstKind);
+ auto* dst = alloc<PlainObject>(src->nurseryZone(), dstKind, src);
size_t srcSize = Arena::thingSize(dstKind);
- tenuredSize += srcSize;
- tenuredCells++;
+ promotedSize += srcSize;
+ promotedCells++;
// Copy the Cell contents.
MOZ_ASSERT(OffsetFromChunkStart(src) >= sizeof(ChunkBase));
@@ -666,8 +835,8 @@ inline JSObject* js::gc::TenuringTracer::movePlainObjectToTenured(
js_memcpy(dst, src, srcSize);
// Move the slots and elements.
- tenuredSize += moveSlotsToTenured(dst, src);
- tenuredSize += moveElementsToTenured(dst, src, dstKind);
+ promotedSize += moveSlots(dst, src);
+ promotedSize += moveElements(dst, src, dstKind);
MOZ_ASSERT(!dst->getClass()->extObjectMovedOp());
@@ -678,8 +847,7 @@ inline JSObject* js::gc::TenuringTracer::movePlainObjectToTenured(
return dst;
}
-size_t js::gc::TenuringTracer::moveSlotsToTenured(NativeObject* dst,
- NativeObject* src) {
+size_t js::gc::TenuringTracer::moveSlots(NativeObject* dst, NativeObject* src) {
/* Fixed slots have already been copied over. */
if (!src->hasDynamicSlots()) {
return 0;
@@ -702,9 +870,9 @@ size_t js::gc::TenuringTracer::moveSlotsToTenured(NativeObject* dst,
return allocSize;
}
-size_t js::gc::TenuringTracer::moveElementsToTenured(NativeObject* dst,
- NativeObject* src,
- AllocKind dstKind) {
+size_t js::gc::TenuringTracer::moveElements(NativeObject* dst,
+ NativeObject* src,
+ AllocKind dstKind) {
if (src->hasEmptyElements()) {
return 0;
}
@@ -751,7 +919,7 @@ inline void js::gc::TenuringTracer::insertIntoStringFixupList(
stringHead = entry;
}
-JSString* js::gc::TenuringTracer::moveToTenured(JSString* src) {
+JSString* js::gc::TenuringTracer::promoteString(JSString* src) {
MOZ_ASSERT(IsInsideNursery(src));
MOZ_ASSERT(!src->isExternal());
@@ -762,29 +930,32 @@ JSString* js::gc::TenuringTracer::moveToTenured(JSString* src) {
// the atom. Don't do this for dependent strings because they're more
// complicated. See StringRelocationOverlay and DeduplicationStringHasher
// comments.
+ MOZ_ASSERT(!src->isAtom());
if (src->isLinear() && src->inStringToAtomCache() &&
src->isDeduplicatable() && !src->hasBase()) {
JSLinearString* linear = &src->asLinear();
JSAtom* atom = runtime()->caches().stringToAtomCache.lookupInMap(linear);
- MOZ_ASSERT(atom, "Why was the cache purged before minor GC?");
-
- // Only deduplicate if both strings have the same encoding, to not confuse
- // dependent strings.
- if (src->hasTwoByteChars() == atom->hasTwoByteChars()) {
- // The StringToAtomCache isn't used for inline strings (due to the minimum
- // length) so canOwnDependentChars must be true for both src and atom.
- // This means if there are dependent strings floating around using str's
- // chars, they will be able to use the chars from the atom.
- static_assert(StringToAtomCache::MinStringLength >
- JSFatInlineString::MAX_LENGTH_LATIN1);
- static_assert(StringToAtomCache::MinStringLength >
- JSFatInlineString::MAX_LENGTH_TWO_BYTE);
- MOZ_ASSERT(src->canOwnDependentChars());
- MOZ_ASSERT(atom->canOwnDependentChars());
-
- StringRelocationOverlay::forwardCell(src, atom);
- gcprobes::PromoteToTenured(src, atom);
- return atom;
+ // The string will not be present in the cache if it was previously promoted
+ // to the second nursery generation.
+ if (atom) {
+ // Only deduplicate if both strings have the same encoding, to not confuse
+ // dependent strings.
+ if (src->hasTwoByteChars() == atom->hasTwoByteChars()) {
+ // The StringToAtomCache isn't used for inline strings (due to the
+ // minimum length) so canOwnDependentChars must be true for both src and
+ // atom. This means if there are dependent strings floating around using
+ // str's chars, they will be able to use the chars from the atom.
+ static_assert(StringToAtomCache::MinStringLength >
+ JSFatInlineString::MAX_LENGTH_LATIN1);
+ static_assert(StringToAtomCache::MinStringLength >
+ JSFatInlineString::MAX_LENGTH_TWO_BYTE);
+ MOZ_ASSERT(src->canOwnDependentChars());
+ MOZ_ASSERT(atom->canOwnDependentChars());
+
+ StringRelocationOverlay::forwardCell(src, atom);
+ gcprobes::PromoteToTenured(src, atom);
+ return atom;
+ }
}
}
@@ -798,9 +969,12 @@ JSString* js::gc::TenuringTracer::moveToTenured(JSString* src) {
// 3. It is deduplicatable:
// The JSString NON_DEDUP_BIT flag is unset.
// 4. It matches an entry in stringDeDupSet.
+ // 5. It is moved to the tenured heap.
- if (src->length() < MAX_DEDUPLICATABLE_STRING_LENGTH && src->isLinear() &&
+ if (shouldTenure(zone, JS::TraceKind::String, src) &&
+ src->length() < MAX_DEDUPLICATABLE_STRING_LENGTH && src->isLinear() &&
src->isDeduplicatable() && stringDeDupSet.isSome()) {
+ src->clearBitsOnTenure();
auto p = stringDeDupSet->lookupForAdd(src);
if (p) {
// Deduplicate to the looked-up string!
@@ -811,7 +985,7 @@ JSString* js::gc::TenuringTracer::moveToTenured(JSString* src) {
return dst;
}
- dst = allocTenuredString(src, zone, dstKind);
+ dst = allocString(src, zone, dstKind);
using DedupHasher [[maybe_unused]] = DeduplicationStringHasher<JSString*>;
MOZ_ASSERT(DedupHasher::hash(src) == DedupHasher::hash(dst),
@@ -823,14 +997,17 @@ JSString* js::gc::TenuringTracer::moveToTenured(JSString* src) {
stringDeDupSet.reset();
}
} else {
- dst = allocTenuredString(src, zone, dstKind);
- dst->clearNonDeduplicatable();
+ dst = allocString(src, zone, dstKind);
+ if (dst->isTenured()) {
+ src->clearBitsOnTenure();
+ dst->clearBitsOnTenure();
+ }
}
zone->stringStats.ref().noteTenured(src->allocSize());
auto* overlay = StringRelocationOverlay::forwardCell(src, dst);
- MOZ_ASSERT(dst->isDeduplicatable());
+ MOZ_ASSERT_IF(dst->isTenured() && dst->isLinear(), dst->isDeduplicatable());
if (dst->hasBase() || dst->isRope()) {
// dst or one of its leaves might have a base that will be deduplicated.
@@ -874,6 +1051,9 @@ void js::gc::TenuringTracer::relocateDependentStringChars(
tenuredDependentStr->relocateNonInlineChars<const CharT*>(
tenuredRootBase->nonInlineChars<CharT>(nogc), *offset);
tenuredDependentStr->setBase(tenuredRootBase);
+ if (tenuredDependentStr->isTenured() && !tenuredRootBase->isTenured()) {
+ runtime()->gc.storeBuffer().putWholeCell(tenuredDependentStr);
+ }
return;
}
@@ -889,7 +1069,7 @@ void js::gc::TenuringTracer::relocateDependentStringChars(
// The root base can be in either the nursery or the tenured heap.
// dependentStr chars needs to be relocated after traceString if the
// root base is in the nursery.
- if (!(*rootBase)->isTenured()) {
+ if (nursery().inCollectedRegion(*rootBase)) {
*rootBaseNotYetForwarded = true;
const CharT* rootBaseChars = (*rootBase)->nonInlineChars<CharT>(nogc);
*offset = dependentStrChars - rootBaseChars;
@@ -906,15 +1086,15 @@ void js::gc::TenuringTracer::relocateDependentStringChars(
}
}
-JS::BigInt* js::gc::TenuringTracer::moveToTenured(JS::BigInt* src) {
+JS::BigInt* js::gc::TenuringTracer::promoteBigInt(JS::BigInt* src) {
MOZ_ASSERT(IsInsideNursery(src));
AllocKind dstKind = src->getAllocKind();
Zone* zone = src->nurseryZone();
- JS::BigInt* dst = allocTenured<JS::BigInt>(zone, dstKind);
- tenuredSize += moveBigIntToTenured(dst, src, dstKind);
- tenuredCells++;
+ JS::BigInt* dst = alloc<JS::BigInt>(zone, dstKind, src);
+ promotedSize += moveBigInt(dst, src, dstKind);
+ promotedCells++;
RelocationOverlay::forwardCell(src, dst);
@@ -924,41 +1104,57 @@ JS::BigInt* js::gc::TenuringTracer::moveToTenured(JS::BigInt* src) {
void js::gc::TenuringTracer::collectToObjectFixedPoint() {
while (RelocationOverlay* p = objHead) {
+ MOZ_ASSERT(nursery().inCollectedRegion(p));
objHead = objHead->next();
auto* obj = static_cast<JSObject*>(p->forwardingAddress());
+
+ MOZ_ASSERT_IF(IsInsideNursery(obj), !nursery().inCollectedRegion(obj));
+
+ AutoPromotedAnyToNursery promotedAnyToNursery(*this);
traceObject(obj);
+ if (obj->isTenured() && promotedAnyToNursery) {
+ runtime()->gc.storeBuffer().putWholeCell(obj);
+ }
}
}
void js::gc::TenuringTracer::collectToStringFixedPoint() {
while (StringRelocationOverlay* p = stringHead) {
+ MOZ_ASSERT(nursery().inCollectedRegion(p));
stringHead = stringHead->next();
- auto* tenuredStr = static_cast<JSString*>(p->forwardingAddress());
+ auto* str = static_cast<JSString*>(p->forwardingAddress());
+ MOZ_ASSERT_IF(IsInsideNursery(str), !nursery().inCollectedRegion(str));
+
// To ensure the NON_DEDUP_BIT was reset properly.
- MOZ_ASSERT(tenuredStr->isDeduplicatable());
+ MOZ_ASSERT(!str->isAtom());
+ MOZ_ASSERT_IF(str->isTenured() && str->isLinear(), str->isDeduplicatable());
// The nursery root base might not be forwarded before
- // traceString(tenuredStr). traceString(tenuredStr) will forward the root
+ // traceString(str). traceString(str) will forward the root
// base if that's the case. Dependent string chars needs to be relocated
// after traceString if root base was not forwarded.
size_t offset = 0;
bool rootBaseNotYetForwarded = false;
JSLinearString* rootBase = nullptr;
- if (tenuredStr->isDependent()) {
- if (tenuredStr->hasTwoByteChars()) {
+ if (str->isDependent()) {
+ if (str->hasTwoByteChars()) {
relocateDependentStringChars<char16_t>(
- &tenuredStr->asDependent(), p->savedNurseryBaseOrRelocOverlay(),
- &offset, &rootBaseNotYetForwarded, &rootBase);
+ &str->asDependent(), p->savedNurseryBaseOrRelocOverlay(), &offset,
+ &rootBaseNotYetForwarded, &rootBase);
} else {
relocateDependentStringChars<JS::Latin1Char>(
- &tenuredStr->asDependent(), p->savedNurseryBaseOrRelocOverlay(),
- &offset, &rootBaseNotYetForwarded, &rootBase);
+ &str->asDependent(), p->savedNurseryBaseOrRelocOverlay(), &offset,
+ &rootBaseNotYetForwarded, &rootBase);
}
}
- traceString(tenuredStr);
+ AutoPromotedAnyToNursery promotedAnyToNursery(*this);
+ traceString(str);
+ if (str->isTenured() && promotedAnyToNursery) {
+ runtime()->gc.storeBuffer().putWholeCell(str);
+ }
if (rootBaseNotYetForwarded) {
MOZ_ASSERT(rootBase->isForwarded(),
@@ -968,25 +1164,34 @@ void js::gc::TenuringTracer::collectToStringFixedPoint() {
JSLinearString* tenuredRootBase = Forwarded(rootBase);
MOZ_ASSERT(offset < tenuredRootBase->length());
- if (tenuredStr->hasTwoByteChars()) {
- tenuredStr->asDependent().relocateNonInlineChars<const char16_t*>(
+ if (str->hasTwoByteChars()) {
+ str->asDependent().relocateNonInlineChars<const char16_t*>(
tenuredRootBase->twoByteChars(nogc), offset);
} else {
- tenuredStr->asDependent().relocateNonInlineChars<const JS::Latin1Char*>(
+ str->asDependent().relocateNonInlineChars<const JS::Latin1Char*>(
tenuredRootBase->latin1Chars(nogc), offset);
}
- tenuredStr->setBase(tenuredRootBase);
+
+ str->setBase(tenuredRootBase);
+ if (str->isTenured() && !tenuredRootBase->isTenured()) {
+ runtime()->gc.storeBuffer().putWholeCell(str);
+ }
+ }
+
+ if (str->hasBase()) {
+ MOZ_ASSERT(!str->base()->isForwarded());
+ MOZ_ASSERT_IF(!str->base()->isTenured(),
+ !nursery().inCollectedRegion(str->base()));
}
}
}
-size_t js::gc::TenuringTracer::moveStringToTenured(JSString* dst, JSString* src,
- AllocKind dstKind) {
+size_t js::gc::TenuringTracer::moveString(JSString* dst, JSString* src,
+ AllocKind dstKind) {
size_t size = Arena::thingSize(dstKind);
- // At the moment, strings always have the same AllocKind between src and
- // dst. This may change in the future.
- MOZ_ASSERT(dst->asTenured().getAllocKind() == src->getAllocKind());
+ MOZ_ASSERT_IF(dst->isTenured(),
+ dst->asTenured().getAllocKind() == src->getAllocKind());
// Copy the Cell contents.
MOZ_ASSERT(OffsetToChunkEnd(src) >= size);
@@ -999,7 +1204,8 @@ size_t js::gc::TenuringTracer::moveStringToTenured(JSString* dst, JSString* src,
if (src->ownsMallocedChars()) {
void* chars = src->asLinear().nonInlineCharsRaw();
nursery().removeMallocedBufferDuringMinorGC(chars);
- AddCellMemory(dst, dst->asLinear().allocSize(), MemoryUse::StringContents);
+ nursery().trackMallocedBufferOnPromotion(
+ chars, dst, dst->asLinear().allocSize(), MemoryUse::StringContents);
return size;
}
@@ -1016,14 +1222,12 @@ size_t js::gc::TenuringTracer::moveStringToTenured(JSString* dst, JSString* src,
return size;
}
-size_t js::gc::TenuringTracer::moveBigIntToTenured(JS::BigInt* dst,
- JS::BigInt* src,
- AllocKind dstKind) {
+size_t js::gc::TenuringTracer::moveBigInt(JS::BigInt* dst, JS::BigInt* src,
+ AllocKind dstKind) {
size_t size = Arena::thingSize(dstKind);
- // At the moment, BigInts always have the same AllocKind between src and
- // dst. This may change in the future.
- MOZ_ASSERT(dst->asTenured().getAllocKind() == src->getAllocKind());
+ MOZ_ASSERT_IF(dst->isTenured(),
+ dst->asTenured().getAllocKind() == src->getAllocKind());
// Copy the Cell contents.
MOZ_ASSERT(OffsetToChunkEnd(src) >= size);
diff --git a/js/src/gc/Tenuring.h b/js/src/gc/Tenuring.h
index 3eca5f4bc3..8bb9efd4f8 100644
--- a/js/src/gc/Tenuring.h
+++ b/js/src/gc/Tenuring.h
@@ -26,6 +26,8 @@ class AnyRef;
namespace gc {
+class AllocSite;
+class ArenaCellSet;
class RelocationOverlay;
class StringRelocationOverlay;
@@ -39,10 +41,10 @@ struct DeduplicationStringHasher {
class TenuringTracer final : public JSTracer {
Nursery& nursery_;
- // Amount of data moved to the tenured generation during collection.
- size_t tenuredSize = 0;
- // Number of cells moved to the tenured generation.
- size_t tenuredCells = 0;
+ // Size of data promoted during collection.
+ size_t promotedSize = 0;
+ // Number of cells promoted during collection.
+ size_t promotedCells = 0;
// These lists are threaded through the Nursery using the space from
// already moved things. The lists are used to fix up the moved things and
@@ -59,27 +61,34 @@ class TenuringTracer final : public JSTracer {
// collection when out of memory to insert new entries.
mozilla::Maybe<StringDeDupSet> stringDeDupSet;
+ bool tenureEverything;
+
+ // A flag set when a GC thing is promoted to the next nursery generation (as
+ // opposed to the tenured heap). This is used to check when we need to add an
+ // edge to the remembered set during nursery collection.
+ bool promotedToNursery = false;
+
#define DEFINE_ON_EDGE_METHOD(name, type, _1, _2) \
void on##name##Edge(type** thingp, const char* name) override;
JS_FOR_EACH_TRACEKIND(DEFINE_ON_EDGE_METHOD)
#undef DEFINE_ON_EDGE_METHOD
public:
- TenuringTracer(JSRuntime* rt, Nursery* nursery);
+ TenuringTracer(JSRuntime* rt, Nursery* nursery, bool tenureEverything);
Nursery& nursery() { return nursery_; }
- // Move all objects and everything they can reach to the tenured heap. Called
- // after all roots have been traced.
+ // Promote all live objects and everything they can reach. Called after all
+ // roots have been traced.
void collectToObjectFixedPoint();
- // Move all strings and all strings they can reach to the tenured heap, and
- // additionally do any fixups for when strings are pointing into memory that
- // was deduplicated. Called after collectToObjectFixedPoint().
+ // Promote all live strings and all strings they can reach, and additionally
+ // do any fixups for when strings are pointing into memory that was
+ // deduplicated. Called after collectToObjectFixedPoint().
void collectToStringFixedPoint();
- size_t getTenuredSize() const;
- size_t getTenuredCells() const;
+ size_t getPromotedSize() const;
+ size_t getPromotedCells() const;
void traverse(JS::Value* thingp);
void traverse(wasm::AnyRef* thingp);
@@ -87,14 +96,26 @@ class TenuringTracer final : public JSTracer {
// The store buffers need to be able to call these directly.
void traceObject(JSObject* obj);
void traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_t end);
- void traceSlots(JS::Value* vp, uint32_t nslots);
+ void traceObjectElements(JS::Value* vp, uint32_t count);
void traceString(JSString* str);
void traceBigInt(JS::BigInt* bi);
+ // Methods to promote a live cell or get the pointer to its new location if
+ // that has already happened. The store buffers call these.
+ JSObject* promoteOrForward(JSObject* obj);
+ JSString* promoteOrForward(JSString* str);
+ JS::BigInt* promoteOrForward(JS::BigInt* bip);
+
+ // Returns whether any cells in the arena require sweeping.
+ template <typename T>
+ bool traceBufferedCells(Arena* arena, ArenaCellSet* cells);
+
+ class AutoPromotedAnyToNursery;
+
private:
- MOZ_ALWAYS_INLINE void onNonForwardedNurseryObjectEdge(JSObject** objp);
- MOZ_ALWAYS_INLINE void onNonForwardedNurseryStringEdge(JSString** strp);
- MOZ_ALWAYS_INLINE void onNonForwardedNurseryBigIntEdge(JS::BigInt** bip);
+ MOZ_ALWAYS_INLINE JSObject* onNonForwardedNurseryObject(JSObject* obj);
+ MOZ_ALWAYS_INLINE JSString* onNonForwardedNurseryString(JSString* str);
+ MOZ_ALWAYS_INLINE JS::BigInt* onNonForwardedNurseryBigInt(JS::BigInt* bi);
// The dependent string chars needs to be relocated if the base which it's
// using chars from has been deduplicated.
@@ -109,22 +130,24 @@ class TenuringTracer final : public JSTracer {
inline void insertIntoStringFixupList(gc::StringRelocationOverlay* entry);
template <typename T>
- inline T* allocTenured(JS::Zone* zone, gc::AllocKind kind);
- JSString* allocTenuredString(JSString* src, JS::Zone* zone,
- gc::AllocKind dstKind);
-
- inline JSObject* movePlainObjectToTenured(PlainObject* src);
- JSObject* moveToTenuredSlow(JSObject* src);
- JSString* moveToTenured(JSString* src);
- JS::BigInt* moveToTenured(JS::BigInt* src);
-
- size_t moveElementsToTenured(NativeObject* dst, NativeObject* src,
- gc::AllocKind dstKind);
- size_t moveSlotsToTenured(NativeObject* dst, NativeObject* src);
- size_t moveStringToTenured(JSString* dst, JSString* src,
- gc::AllocKind dstKind);
- size_t moveBigIntToTenured(JS::BigInt* dst, JS::BigInt* src,
- gc::AllocKind dstKind);
+ T* alloc(JS::Zone* zone, gc::AllocKind kind, gc::Cell* src);
+ template <JS::TraceKind traceKind>
+ void* allocCell(JS::Zone* zone, gc::AllocKind allocKind, gc::AllocSite* site,
+ gc::Cell* src);
+ JSString* allocString(JSString* src, JS::Zone* zone, gc::AllocKind dstKind);
+
+ bool shouldTenure(Zone* zone, JS::TraceKind traceKind, Cell* cell);
+
+ inline JSObject* promotePlainObject(PlainObject* src);
+ JSObject* promoteObjectSlow(JSObject* src);
+ JSString* promoteString(JSString* src);
+ JS::BigInt* promoteBigInt(JS::BigInt* src);
+
+ size_t moveElements(NativeObject* dst, NativeObject* src,
+ gc::AllocKind dstKind);
+ size_t moveSlots(NativeObject* dst, NativeObject* src);
+ size_t moveString(JSString* dst, JSString* src, gc::AllocKind dstKind);
+ size_t moveBigInt(JS::BigInt* dst, JS::BigInt* src, gc::AllocKind dstKind);
void traceSlots(JS::Value* vp, JS::Value* end);
};
diff --git a/js/src/gc/TraceMethods-inl.h b/js/src/gc/TraceMethods-inl.h
index 37da24fe6e..e7066c5ebc 100644
--- a/js/src/gc/TraceMethods-inl.h
+++ b/js/src/gc/TraceMethods-inl.h
@@ -30,6 +30,9 @@
#include "vm/SymbolType.h"
#include "wasm/WasmJS.h"
+#include "gc/Marking-inl.h"
+#include "vm/StringType-inl.h"
+
inline void js::BaseScript::traceChildren(JSTracer* trc) {
TraceNullableEdge(trc, &function_, "function");
TraceEdge(trc, &sourceObject_, "sourceObject");
@@ -76,6 +79,20 @@ inline void JSString::traceChildren(JSTracer* trc) {
asRope().traceChildren(trc);
}
}
+inline void JSString::traceBaseFromStoreBuffer(JSTracer* trc) {
+ MOZ_ASSERT(!d.s.u3.base->isTenured());
+
+ // Contract the base chain so that this tenured dependent string points
+ // directly at the root base that owns its chars.
+ JSLinearString* root = asDependent().rootBaseDuringMinorGC();
+ d.s.u3.base = root;
+ if (!root->isTenured()) {
+ js::TraceManuallyBarrieredEdge(trc, &root, "base");
+ // Do not update the actual base to the tenured string yet. This string will
+ // need to be swept in order to update its chars ptr to be relative to the
+ // root, and that update requires information from the overlay.
+ }
+}
template <uint32_t opts>
void js::GCMarker::eagerlyMarkChildren(JSString* str) {
if (str->isLinear()) {
diff --git a/js/src/gc/WeakMap-inl.h b/js/src/gc/WeakMap-inl.h
index 015e5071ed..d7b5feb5a6 100644
--- a/js/src/gc/WeakMap-inl.h
+++ b/js/src/gc/WeakMap-inl.h
@@ -18,6 +18,7 @@
#include "gc/GCLock.h"
#include "gc/Marking.h"
#include "gc/Zone.h"
+#include "js/Prefs.h"
#include "js/TraceKind.h"
#include "vm/JSContext.h"
@@ -71,6 +72,16 @@ static inline JSObject* GetDelegate(const T& key) {
template <>
inline JSObject* GetDelegate(gc::Cell* const&) = delete;
+template <typename T>
+static inline bool IsSymbol(const T& key) {
+ return false;
+}
+
+template <>
+inline bool IsSymbol(const HeapPtr<JS::Value>& key) {
+ return key.isSymbol();
+}
+
} // namespace gc::detail
// Weakmap entry -> value edges are only visible if the map is traced, which
@@ -303,25 +314,42 @@ bool WeakMap<K, V>::findSweepGroupEdges() {
for (Range r = all(); !r.empty(); r.popFront()) {
const K& key = r.front().key();
- // If the key type doesn't have delegates, then this will always return
- // nullptr and the optimizer can remove the entire body of this function.
JSObject* delegate = gc::detail::GetDelegate(key);
- if (!delegate) {
+ if (delegate) {
+ // Marking a WeakMap key's delegate will mark the key, so process the
+ // delegate zone no later than the key zone.
+ Zone* delegateZone = delegate->zone();
+ gc::Cell* keyCell = gc::ToMarkable(key);
+ MOZ_ASSERT(keyCell);
+ Zone* keyZone = keyCell->zone();
+ if (delegateZone != keyZone && delegateZone->isGCMarking() &&
+ keyZone->isGCMarking()) {
+ if (!delegateZone->addSweepGroupEdgeTo(keyZone)) {
+ return false;
+ }
+ }
+ }
+
+#ifdef NIGHTLY_BUILD
+ bool symbolsAsWeakMapKeysEnabled =
+ JS::Prefs::experimental_symbols_as_weakmap_keys();
+ if (!symbolsAsWeakMapKeysEnabled) {
continue;
}
- // Marking a WeakMap key's delegate will mark the key, so process the
- // delegate zone no later than the key zone.
- Zone* delegateZone = delegate->zone();
- gc::Cell* keyCell = gc::ToMarkable(key);
- MOZ_ASSERT(keyCell);
- Zone* keyZone = keyCell->zone();
- if (delegateZone != keyZone && delegateZone->isGCMarking() &&
- keyZone->isGCMarking()) {
- if (!delegateZone->addSweepGroupEdgeTo(keyZone)) {
- return false;
+ bool isSym = gc::detail::IsSymbol(key);
+ if (isSym) {
+ gc::Cell* keyCell = gc::ToMarkable(key);
+ Zone* keyZone = keyCell->zone();
+ MOZ_ASSERT(keyZone->isAtomsZone());
+
+ if (zone()->isGCMarking() && keyZone->isGCMarking()) {
+ if (!keyZone->addSweepGroupEdgeTo(zone())) {
+ return false;
+ }
}
}
+#endif
}
return true;
}
diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp
index d0586d5d56..6437f6f4c3 100644
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -622,7 +622,7 @@ void Zone::fixupAfterMovingGC() {
}
void Zone::purgeAtomCache() {
- atomCache().clearAndCompact();
+ atomCache_.ref().reset();
// Also purge the dtoa caches so that subsequent lookups populate atom
// cache too.
@@ -981,20 +981,3 @@ bool Zone::registerObjectWithWeakPointers(JSObject* obj) {
MOZ_ASSERT(!IsInsideNursery(obj));
return objectsWithWeakPointers.ref().append(obj);
}
-
-js::DependentScriptSet* Zone::getOrCreateDependentScriptSet(
- JSContext* cx, js::InvalidatingFuse* fuse) {
- for (auto& dss : fuseDependencies) {
- if (dss.associatedFuse == fuse) {
- return &dss;
- }
- }
-
- if (!fuseDependencies.emplaceBack(cx, fuse)) {
- return nullptr;
- }
-
- auto& dss = fuseDependencies.back();
- MOZ_ASSERT(dss.associatedFuse == fuse);
- return &dss;
-}
diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h
index fd91de8626..a5ce161cc4 100644
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -15,6 +15,8 @@
#include "mozilla/PodOperations.h"
#include "mozilla/TimeStamp.h"
+#include <array>
+
#include "jstypes.h"
#include "ds/Bitmap.h"
@@ -139,6 +141,148 @@ class MOZ_NON_TEMPORARY_CLASS FunctionToStringCache {
MOZ_ALWAYS_INLINE void put(BaseScript* script, JSString* string);
};
+// HashAndLength is a simple class encapsulating the combination of a HashNumber
+// and a (string) length into a single 64-bit value. Having them bundled
+// together like this enables us to compare pairs of hashes and lengths with a
+// single 64-bit comparison.
+class HashAndLength {
+ public:
+ MOZ_ALWAYS_INLINE explicit HashAndLength(uint64_t initialValue = unsetValue())
+ : mHashAndLength(initialValue) {}
+ MOZ_ALWAYS_INLINE HashAndLength(HashNumber hash, uint32_t length)
+ : mHashAndLength(uint64FromHashAndLength(hash, length)) {}
+
+ void MOZ_ALWAYS_INLINE set(HashNumber hash, uint32_t length) {
+ mHashAndLength = uint64FromHashAndLength(hash, length);
+ }
+
+ constexpr MOZ_ALWAYS_INLINE HashNumber hash() const {
+ return hashFromUint64(mHashAndLength);
+ }
+ constexpr MOZ_ALWAYS_INLINE uint32_t length() const {
+ return lengthFromUint64(mHashAndLength);
+ }
+
+ constexpr MOZ_ALWAYS_INLINE bool isEqual(HashNumber hash,
+ uint32_t length) const {
+ return mHashAndLength == uint64FromHashAndLength(hash, length);
+ }
+
+ // This function is used at compile-time to verify and that we pack and unpack
+ // hash and length values consistently.
+ static constexpr bool staticChecks() {
+ std::array<HashNumber, 5> hashes{0x00000000, 0xffffffff, 0xf0f0f0f0,
+ 0x0f0f0f0f, 0x73737373};
+ std::array<uint32_t, 6> lengths{0, 1, 2, 3, 11, 56};
+
+ for (const HashNumber hash : hashes) {
+ for (const uint32_t length : lengths) {
+ const uint64_t lengthAndHash = uint64FromHashAndLength(hash, length);
+ if (hashFromUint64(lengthAndHash) != hash) {
+ return false;
+ }
+ if (lengthFromUint64(lengthAndHash) != length) {
+ return false;
+ }
+ }
+ }
+
+ return true;
+ }
+
+ static constexpr MOZ_ALWAYS_INLINE uint64_t unsetValue() {
+ // This needs to be a combination of hash and length that would never occur
+ // together. There is only one string of length zero, and its hash is zero,
+ // so the hash here can be anything except zero.
+ return uint64FromHashAndLength(0xffffffff, 0);
+ }
+
+ private:
+ uint64_t mHashAndLength;
+
+ static constexpr MOZ_ALWAYS_INLINE uint64_t
+ uint64FromHashAndLength(HashNumber hash, uint32_t length) {
+ return (static_cast<uint64_t>(length) << 32) | hash;
+ }
+
+ static constexpr MOZ_ALWAYS_INLINE uint32_t
+ lengthFromUint64(uint64_t hashAndLength) {
+ return static_cast<uint32_t>(hashAndLength >> 32);
+ }
+
+ static constexpr MOZ_ALWAYS_INLINE HashNumber
+ hashFromUint64(uint64_t hashAndLength) {
+ return hashAndLength & 0xffffffff;
+ }
+};
+
+static_assert(HashAndLength::staticChecks());
+
+class AtomCacheHashTable {
+ public:
+ MOZ_ALWAYS_INLINE AtomCacheHashTable() { clear(); }
+
+ MOZ_ALWAYS_INLINE void clear() {
+ mEntries.fill({HashAndLength{HashAndLength::unsetValue()}, nullptr});
+ }
+
+ static MOZ_ALWAYS_INLINE constexpr uint32_t computeIndexFromHash(
+ const HashNumber hash) {
+ // Simply use the low bits of the hash value as the cache index.
+ return hash & (sSize - 1);
+ }
+
+ MOZ_ALWAYS_INLINE JSAtom* lookupForAdd(
+ const AtomHasher::Lookup& lookup) const {
+ MOZ_ASSERT(lookup.atom == nullptr, "Lookup by atom is not supported");
+
+ const uint32_t index = computeIndexFromHash(lookup.hash);
+
+ JSAtom* const atom = mEntries[index].mAtom;
+
+ if (!mEntries[index].mHashAndLength.isEqual(lookup.hash, lookup.length)) {
+ return nullptr;
+ }
+
+ // This is annotated with MOZ_UNLIKELY because it virtually never happens
+ // that, after matching the hash and the length, the string isn't a match.
+ if (MOZ_UNLIKELY(!lookup.StringsMatch(*atom))) {
+ return nullptr;
+ }
+
+ return atom;
+ }
+
+ MOZ_ALWAYS_INLINE void add(const HashNumber hash, JSAtom* atom) {
+ const uint32_t index = computeIndexFromHash(hash);
+
+ mEntries[index].set(hash, atom->length(), atom);
+ }
+
+ private:
+ struct Entry {
+ MOZ_ALWAYS_INLINE void set(const HashNumber hash, const uint32_t length,
+ JSAtom* const atom) {
+ mHashAndLength.set(hash, length);
+ mAtom = atom;
+ }
+
+ // Hash and length are also available, from JSAtom and JSString
+ // respectively, but are cached here to avoid likely cache misses in the
+ // frequent case of a missed lookup.
+ HashAndLength mHashAndLength;
+ // No read barrier is required here because the table is cleared at the
+ // start of GC.
+ JSAtom* mAtom;
+ };
+
+ // This value was picked empirically based on performance testing using SP2
+ // and SP3. 4k was better than 2k but 8k was not much better than 4k.
+ static constexpr uint32_t sSize = 4 * 1024;
+ static_assert(mozilla::IsPowerOfTwo(sSize));
+ std::array<Entry, sSize> mEntries;
+};
+
} // namespace js
namespace JS {
@@ -281,7 +425,7 @@ class Zone : public js::ZoneAllocator, public js::gc::GraphNodeBase<JS::Zone> {
js::MainThreadOrGCTaskData<js::SparseBitmap> markedAtoms_;
// Set of atoms recently used by this Zone. Purged on GC.
- js::MainThreadOrGCTaskData<js::AtomSet> atomCache_;
+ js::MainThreadOrGCTaskData<js::UniquePtr<js::AtomCacheHashTable>> atomCache_;
// Cache storing allocated external strings. Purged on GC.
js::MainThreadOrGCTaskData<js::ExternalStringCache> externalStringCache_;
@@ -613,7 +757,16 @@ class Zone : public js::ZoneAllocator, public js::gc::GraphNodeBase<JS::Zone> {
js::SparseBitmap& markedAtoms() { return markedAtoms_.ref(); }
- js::AtomSet& atomCache() { return atomCache_.ref(); }
+ // The atom cache is "allocate-on-demand". This function can return nullptr if
+ // the allocation failed.
+ js::AtomCacheHashTable* atomCache() {
+ if (atomCache_.ref()) {
+ return atomCache_.ref().get();
+ }
+
+ atomCache_ = js::MakeUnique<js::AtomCacheHashTable>();
+ return atomCache_.ref().get();
+ }
void purgeAtomCache();
@@ -677,18 +830,7 @@ class Zone : public js::ZoneAllocator, public js::gc::GraphNodeBase<JS::Zone> {
#endif
// Support for invalidating fuses
-
- // A dependent script set pairs a fuse with a set of scripts which depend
- // on said fuse; this is a vector of script sets because the expectation for
- // now is that the number of runtime wide invalidating fuses will be small.
- // This will need to be revisited (convert to HashMap?) should that no
- // longer be the case
- //
- // Note: This isn't traced through the zone, but rather through the use
- // of JS::WeakCache.
- js::Vector<js::DependentScriptSet, 1, js::SystemAllocPolicy> fuseDependencies;
- js::DependentScriptSet* getOrCreateDependentScriptSet(
- JSContext* cx, js::InvalidatingFuse* fuse);
+ js::DependentScriptGroup fuseDependencies;
private:
js::jit::JitZone* createJitZone(JSContext* cx);
diff --git a/js/src/jit-test/lib/pretenure.js b/js/src/jit-test/lib/pretenure.js
index 214f1d44d1..c1184fefc4 100644
--- a/js/src/jit-test/lib/pretenure.js
+++ b/js/src/jit-test/lib/pretenure.js
@@ -22,8 +22,12 @@ function setupPretenureTest() {
gczeal(0);
// Restrict nursery size so we can fill it quicker, and ensure it is resized.
- gcparam("minNurseryBytes", 1024 * 1024);
- gcparam("maxNurseryBytes", 1024 * 1024);
+ let size = 1024 * 1024;
+ if (gcparam("semispaceNurseryEnabled")) {
+ size *= 2;
+ }
+ gcparam("minNurseryBytes", size);
+ gcparam("maxNurseryBytes", size);
// Limit allocation threshold so we trigger major GCs sooner.
gcparam("allocationThreshold", 1 /* MB */);
@@ -67,8 +71,14 @@ function allocateArrays(count, longLived) {
}
function gcCounts() {
- return { minor: gcparam("minorGCNumber"),
- major: gcparam("majorGCNumber") };
+ let major = gcparam("majorGCNumber")
+ let minor = gcparam("minorGCNumber");
+
+ // Only report minor collections that didn't happen as part of a major GC.
+ assertEq(minor >= major, true);
+ minor -= major;
+
+ return { minor, major };
}
function runTestAndCountCollections(thunk) {
diff --git a/js/src/jit-test/tests/arrays/sort-trampoline.js b/js/src/jit-test/tests/arrays/sort-trampoline.js
new file mode 100644
index 0000000000..b81d4628b0
--- /dev/null
+++ b/js/src/jit-test/tests/arrays/sort-trampoline.js
@@ -0,0 +1,153 @@
+function testGC() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ for (var i = 0; i < 20; i++) {
+ arr.sort((x, y) => {
+ if (i === 17) {
+ gc();
+ }
+ return x.n - y.n;
+ });
+ }
+ assertEq(arr.map(x => x.n).join(""), "0135");
+}
+testGC();
+
+function testException() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ var ex;
+ try {
+ for (var i = 0; i < 20; i++) {
+ arr.sort((x, y) => {
+ if (i === 17) {
+ throw "fit";
+ }
+ return x.n - y.n;
+ });
+ }
+ } catch (e) {
+ ex = e;
+ }
+ assertEq(ex, "fit");
+ assertEq(i, 17);
+}
+testException();
+
+function testRectifier() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ for (var i = 0; i < 20; i++) {
+ arr.sort(function(x, y, a) {
+ assertEq(arguments.length, 2);
+ assertEq(a, undefined);
+ return y.n - x.n;
+ });
+ }
+ assertEq(arr.map(x => x.n).join(""), "5310");
+}
+testRectifier();
+
+function testClassConstructor() {
+ var normal = (x, y) => x.n - y.n;
+ var dummy = {};
+ var ctor = (class { constructor(x, y) {
+ assertEq(x, dummy);
+ }});
+ // Warm up the constructor.
+ for (var i = 0; i < 20; i++) {
+ new ctor(dummy, dummy);
+ }
+ for (var i = 0; i < 20; i++) {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ var ex;
+ try {
+ arr.sort(i < 17 ? normal : ctor);
+ } catch (e) {
+ ex = e;
+ }
+ assertEq(ex instanceof TypeError, i >= 17);
+ assertEq(arr.map(x => x.n).join(""), i >= 17 ? "1305" : "0135");
+ }
+}
+testClassConstructor();
+
+function testSwitchRealms() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ var g = newGlobal({sameCompartmentAs: this});
+ g.foo = 123;
+ var comp = g.evaluate(`((x, y) => {
+ assertEq(foo, 123);
+ return x.n - y.n;
+ })`);
+ for (var i = 0; i < 20; i++) {
+ arr.sort(comp);
+ }
+ assertEq(arr.map(x => x.n).join(""), "0135");
+}
+testSwitchRealms();
+
+function testCrossCompartment() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ var g = newGlobal({newCompartment: true});
+ var wrapper = g.evaluate(`((x, y) => {
+ return x.n - y.n;
+ })`);
+ for (var i = 0; i < 20; i++) {
+ arr.sort(wrapper);
+ }
+ assertEq(arr.map(x => x.n).join(""), "0135");
+}
+testCrossCompartment();
+
+function testBound() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ var fun = (function(a, x, y) {
+ "use strict";
+ assertEq(this, null);
+ assertEq(a, 1);
+ return x.n - y.n;
+ }).bind(null, 1);
+ for (var i = 0; i < 20; i++) {
+ arr.sort(fun);
+ }
+ assertEq(arr.map(x => x.n).join(""), "0135");
+}
+testBound();
+
+function testExtraArgs() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ var cmp = (x, y) => x.n - y.n;
+ for (var i = 0; i < 20; i++) {
+ arr.sort(cmp, cmp, cmp, cmp, cmp, cmp, cmp);
+ }
+ assertEq(arr.map(x => x.n).join(""), "0135");
+}
+testExtraArgs();
+
+function testBailout() {
+ var arr = [{n: 1}, {n: 3}, {n: 0}, {n: 5}];
+ for (var i = 0; i < 110; i++) {
+ arr.sort(function(x, y) {
+ if (i === 108) {
+ bailout();
+ }
+ return x.n - y.n;
+ });
+ }
+ assertEq(arr.map(x => x.n).join(""), "0135");
+}
+testBailout();
+
+function testExceptionHandlerSwitchRealm() {
+ var g = newGlobal({sameCompartmentAs: this});
+ for (var i = 0; i < 25; i++) {
+ var ex = null;
+ try {
+ g.Array.prototype.toSorted.call([2, 3], () => {
+ throw "fit";
+ });
+ } catch (e) {
+ ex = e;
+ }
+ assertEq(ex, "fit");
+ }
+}
+testExceptionHandlerSwitchRealm();
diff --git a/js/src/jit-test/tests/basic/bug1875795.js b/js/src/jit-test/tests/basic/bug1875795.js
new file mode 100644
index 0000000000..1a5b54acfe
--- /dev/null
+++ b/js/src/jit-test/tests/basic/bug1875795.js
@@ -0,0 +1,7 @@
+// |jit-test| --fast-warmup; --no-threads; skip-if: !('oomTest' in this)
+oomTest(function() {
+ var o = {};
+ for (var p in this) {
+ o[p] = 1;
+ }
+});
diff --git a/js/src/jit-test/tests/basic/bug1888746.js b/js/src/jit-test/tests/basic/bug1888746.js
new file mode 100644
index 0000000000..8e6d0cd0b9
--- /dev/null
+++ b/js/src/jit-test/tests/basic/bug1888746.js
@@ -0,0 +1,12 @@
+function comparator(x, y) {
+ saveStack();
+ return {valueOf: function() {
+ saveStack();
+ return x - y;
+ }};
+}
+for (let i = 0; i < 20; i++) {
+ let arr = [3, 1, 2];
+ arr.sort(comparator);
+ assertEq(arr.toString(), "1,2,3");
+}
diff --git a/js/src/jit-test/tests/basic/bug1890200.js b/js/src/jit-test/tests/basic/bug1890200.js
new file mode 100644
index 0000000000..caa97fcece
--- /dev/null
+++ b/js/src/jit-test/tests/basic/bug1890200.js
@@ -0,0 +1,12 @@
+let triggerGC = false;
+let proxy = new Proxy({}, {get: function(target, key) {
+ if (key === "sameCompartmentAs" || key === "sameZoneAs") {
+ triggerGC = true;
+ return newGlobal({newCompartment: true});
+ }
+ if (triggerGC) {
+ gc();
+ triggerGC = false;
+ }
+}});
+newGlobal(proxy);
diff --git a/js/src/jit-test/tests/cacheir/bug1888346.js b/js/src/jit-test/tests/cacheir/bug1888346.js
new file mode 100644
index 0000000000..8e63d86089
--- /dev/null
+++ b/js/src/jit-test/tests/cacheir/bug1888346.js
@@ -0,0 +1,8 @@
+setJitCompilerOption("ion.frequent-bailout-threshold", 1);
+for (let i = 0; i < 49; i++) {
+ (function () {
+ let x = new (function () {})();
+ Object.defineProperty(x, "z", {});
+ x.z;
+ })();
+}
diff --git a/js/src/jit-test/tests/collections/bug-1884927.js b/js/src/jit-test/tests/collections/bug-1884927.js
new file mode 100644
index 0000000000..263d9df8a0
--- /dev/null
+++ b/js/src/jit-test/tests/collections/bug-1884927.js
@@ -0,0 +1,10 @@
+// |jit-test| --enable-symbols-as-weakmap-keys; skip-if: getBuildConfiguration("release_or_beta")
+for (x=0; x<10000; ++x) {
+ try {
+ m13 = new WeakMap;
+ sym = Symbol();
+ m13.set(sym, new Debugger);
+ startgc(1, );
+ } catch (exc) {}
+}
+
diff --git a/js/src/jit-test/tests/collections/bug-1885775.js b/js/src/jit-test/tests/collections/bug-1885775.js
new file mode 100644
index 0000000000..bc14c6d58b
--- /dev/null
+++ b/js/src/jit-test/tests/collections/bug-1885775.js
@@ -0,0 +1,12 @@
+// |jit-test| --enable-symbols-as-weakmap-keys; skip-if: getBuildConfiguration("release_or_beta")
+var code = `
+var m58 = new WeakMap;
+var sym = Symbol();
+m58.set(sym, ({ entry16: 0, length: 1 }));
+function testCompacting() {
+ gcslice(50000);
+}
+testCompacting(2, 100000, 50000);
+`;
+for (x = 0; x < 10000; ++x)
+ evaluate(code);
diff --git a/js/src/jit-test/tests/collections/bug-1887939-1.js b/js/src/jit-test/tests/collections/bug-1887939-1.js
new file mode 100644
index 0000000000..292c44d492
--- /dev/null
+++ b/js/src/jit-test/tests/collections/bug-1887939-1.js
@@ -0,0 +1,7 @@
+var map = new WeakMap();
+var sym = Symbol();
+try {
+ map.set(sym, 1);
+} catch (e) {
+ assertEq(!!e.message.match(/an unregistered symbol/), false);
+}
diff --git a/js/src/jit-test/tests/collections/bug-1887939-2.js b/js/src/jit-test/tests/collections/bug-1887939-2.js
new file mode 100644
index 0000000000..2ec4e4c585
--- /dev/null
+++ b/js/src/jit-test/tests/collections/bug-1887939-2.js
@@ -0,0 +1,7 @@
+// |jit-test| --enable-symbols-as-weakmap-keys; skip-if: getBuildConfiguration("release_or_beta")
+var map = new WeakMap();
+try {
+ map.set(1, 1);
+} catch (e) {
+ assertEq(!!e.message.match(/an unregistered symbol/), true);
+}
diff --git a/js/src/jit-test/tests/debug/Debugger-onNativeCall-03.js b/js/src/jit-test/tests/debug/Debugger-onNativeCall-03.js
index 7e9c0b280a..27ab5b2b23 100644
--- a/js/src/jit-test/tests/debug/Debugger-onNativeCall-03.js
+++ b/js/src/jit-test/tests/debug/Debugger-onNativeCall-03.js
@@ -1,4 +1,5 @@
-// Test onNativeCall's behavior when used with self-hosted functions.
+// Test onNativeCall's behavior when used with self-hosted functions
+// and trampoline natives.
load(libdir + 'eqArrayHelper.js');
@@ -18,13 +19,22 @@ dbg.onNativeCall = f => {
gdbg.executeInGlobal(`
var x = [1,3,2];
+ x.forEach((a) => {print(a)});
x.sort((a, b) => {print(a)});
+ x.sort(print);
`);
assertEqArray(rv, [
- "EnterFrame", "sort",
- "ArraySortCompare/<",
+ "EnterFrame", "forEach",
"EnterFrame", "print",
- "ArraySortCompare/<",
"EnterFrame", "print",
+ "EnterFrame", "print",
+
+ "sort",
+ "EnterFrame","print",
+ "EnterFrame","print",
+
+ "sort",
+ "print",
+ "print"
]);
diff --git a/js/src/jit-test/tests/debug/Environment-methods-toPrimitive.js b/js/src/jit-test/tests/debug/Environment-methods-toPrimitive.js
new file mode 100644
index 0000000000..106728901d
--- /dev/null
+++ b/js/src/jit-test/tests/debug/Environment-methods-toPrimitive.js
@@ -0,0 +1,21 @@
+// removeDebuggee can be called through ToPrimitive while converting the argument
+// passed to Debugger.Environment.{find,getVariable,setVariable} to string.
+
+var g = newGlobal({newCompartment: true});
+g.eval("function f() { debugger; }");
+var dbg = new Debugger();
+var oddball = {[Symbol.toPrimitive]: () => dbg.removeDebuggee(g)};
+
+for (var method of ["find", "getVariable", "setVariable"]) {
+ dbg.addDebuggee(g);
+ dbg.onDebuggerStatement = frame => {
+ var ex;
+ try {
+ frame.environment[method](oddball, oddball);
+ } catch (e) {
+ ex = e;
+ }
+ assertEq(ex.message, "Debugger.Environment is not a debuggee environment");
+ };
+ g.f();
+}
diff --git a/js/src/jit-test/tests/debug/Frame-onStep-21.js b/js/src/jit-test/tests/debug/Frame-onStep-21.js
new file mode 100644
index 0000000000..7bea2e3a95
--- /dev/null
+++ b/js/src/jit-test/tests/debug/Frame-onStep-21.js
@@ -0,0 +1,19 @@
+// |jit-test| error: too much recursion
+
+// Generator closed due to over-recursion shouldn't cause crash around onStep.
+
+async function* foo() {
+ const g = this.newGlobal({sameZoneAs: this});
+ g.Debugger(this).getNewestFrame().onStep = g.evaluate(`(function() {})`);
+ return {};
+}
+function f() {
+ try {
+ f.apply(undefined, f);
+ } catch {
+ drainJobQueue();
+ foo().next();
+ }
+}
+foo().next();
+f();
diff --git a/js/src/jit-test/tests/debug/private-methods-eval-in-frame.js b/js/src/jit-test/tests/debug/private-methods-eval-in-frame.js
index 5122cfa56b..318a36f614 100644
--- a/js/src/jit-test/tests/debug/private-methods-eval-in-frame.js
+++ b/js/src/jit-test/tests/debug/private-methods-eval-in-frame.js
@@ -150,13 +150,6 @@ if ('dis' in this) {
assertEq(b.ef(`var x = () => { return this.#priv(); }; x()`), 12);
assertEq(b.ef(`function x(o) { function y(o) { return o.#priv(); }; return y(o); } x(this)`), 12);
-assertEq(b.ef("B.#smethod()"), 14)
-assertEq(b.ef("B.#unusedmethod()"), 19);
-assertEq(b.ef("B.#unusedgetter"), 10);
-
-b.ef("B.#unusedsetter = 19");
-assertEq(B.setter, 19);
-
assertEq(B.f(), 14);
assertEq(B.sef(`this.#smethod()`), 14);
assertEq(B.sLayerEf(`this.#smethod()`), 14);
@@ -215,4 +208,4 @@ var x = () => {
})();
};
x()
-`), 12); \ No newline at end of file
+`), 12);
diff --git a/js/src/jit-test/tests/errors/bug-1886940-2.js b/js/src/jit-test/tests/errors/bug-1886940-2.js
new file mode 100644
index 0000000000..654071be04
--- /dev/null
+++ b/js/src/jit-test/tests/errors/bug-1886940-2.js
@@ -0,0 +1,6 @@
+oomTest(function () {
+ (function () {
+ var x = [disassemble, new Int8Array(2 ** 8 + 1)];
+ x.shift().apply([], x);
+ })();
+});
diff --git a/js/src/jit-test/tests/errors/bug-1886940.js b/js/src/jit-test/tests/errors/bug-1886940.js
new file mode 100644
index 0000000000..f8d3020d8c
--- /dev/null
+++ b/js/src/jit-test/tests/errors/bug-1886940.js
@@ -0,0 +1,2 @@
+// |jit-test| error: RangeError
+[].with(Symbol.hasInstance);
diff --git a/js/src/jit-test/tests/fuses/optimized-getiterator-invalidation.js b/js/src/jit-test/tests/fuses/optimized-getiterator-invalidation.js
new file mode 100644
index 0000000000..6505f8d023
--- /dev/null
+++ b/js/src/jit-test/tests/fuses/optimized-getiterator-invalidation.js
@@ -0,0 +1,37 @@
+
+const ITERS = 1000;
+
+// A function which when warp compiled should use
+// OptimizedGetIterator elision, and rely on invalidation
+function f(x) {
+ let sum = 0;
+ for (let i = 0; i < ITERS; i++) {
+ const [a, b, c] = x
+ sum = a + b + c;
+ }
+ return sum
+}
+
+// Run the function f 1000 times to warp compile it. Use 4 elements here to ensure
+// the return property of the ArrayIteratorPrototype is called.
+let arr = [1, 2, 3, 4];
+for (let i = 0; i < 1000; i++) {
+ f(arr);
+}
+
+// Initialize the globally scoped counter
+let counter = 0;
+const ArrayIteratorPrototype = Object.getPrototypeOf([][Symbol.iterator]());
+
+// Setting the return property should invalidate the warp script here.
+ArrayIteratorPrototype.return = function () {
+ counter++;
+ return { done: true };
+};
+
+
+// Call f one more time
+f(arr);
+
+// Use assertEq to check the value of counter.
+assertEq(counter, ITERS);
diff --git a/js/src/jit-test/tests/gc/alllcation-metadata-builder-over-recursion.js b/js/src/jit-test/tests/gc/alllcation-metadata-builder-over-recursion.js
new file mode 100644
index 0000000000..67093026b4
--- /dev/null
+++ b/js/src/jit-test/tests/gc/alllcation-metadata-builder-over-recursion.js
@@ -0,0 +1,22 @@
+// |jit-test| allow-unhandlable-oom
+
+// Over-recursion should suppress alloation metadata builder, to avoid another
+// over-recursion while generating an error object for the first over-recursion.
+//
+// This test should catch the error for the "load" testing function's arguments,
+// or crash with unhandlable OOM inside allocation metadata builder.
+
+const g = newGlobal();
+g.enableShellAllocationMetadataBuilder();
+function run() {
+ const g_load = g.load;
+ g_load.toString = run;
+ return g_load(g_load);
+}
+let caught = false;
+try {
+ run();
+} catch (e) {
+ caught = true;
+}
+assertEq(caught, true);
diff --git a/js/src/jit-test/tests/gc/bug-1568740.js b/js/src/jit-test/tests/gc/bug-1568740.js
index 6cc003cb94..5c311b855d 100644
--- a/js/src/jit-test/tests/gc/bug-1568740.js
+++ b/js/src/jit-test/tests/gc/bug-1568740.js
@@ -1,11 +1,11 @@
gczeal(0);
+gcparam("semispaceNurseryEnabled", 0);
function setAndTest(param, value) {
gcparam(param, value);
assertEq(gcparam(param), value);
}
-
// Set a large nursery size.
setAndTest("maxNurseryBytes", 1024*1024);
setAndTest("minNurseryBytes", 1024*1024);
diff --git a/js/src/jit-test/tests/gc/bug-1569840.js b/js/src/jit-test/tests/gc/bug-1569840.js
index 70d28add73..45df339405 100644
--- a/js/src/jit-test/tests/gc/bug-1569840.js
+++ b/js/src/jit-test/tests/gc/bug-1569840.js
@@ -1,5 +1,5 @@
-
gczeal(0);
+gcparam("semispaceNurseryEnabled", 0);
gcparam("maxNurseryBytes", 1024*1024);
gcparam("minNurseryBytes", 1024*1024);
diff --git a/js/src/jit-test/tests/gc/bug-1885819-2.js b/js/src/jit-test/tests/gc/bug-1885819-2.js
new file mode 100644
index 0000000000..a87e4c701a
--- /dev/null
+++ b/js/src/jit-test/tests/gc/bug-1885819-2.js
@@ -0,0 +1,12 @@
+let g = newGlobal();
+function f() {
+ var o = {};
+ o["prop" + Date.now()] = 1;
+ gc();
+ schedulezone("atoms");
+ schedulezone(g);
+ gc("zone");
+ let [x] = [0];
+}
+f();
+oomTest(f);
diff --git a/js/src/jit-test/tests/gc/bug-1885819.js b/js/src/jit-test/tests/gc/bug-1885819.js
new file mode 100644
index 0000000000..8341c3ff52
--- /dev/null
+++ b/js/src/jit-test/tests/gc/bug-1885819.js
@@ -0,0 +1,10 @@
+function f() {
+ var o = {};
+ o["prop" + Date.now()] = 1;
+ gc();
+ schedulezone("atoms");
+ gc("zone");
+ let [x] = [0];
+}
+f();
+oomTest(f);
diff --git a/js/src/jit-test/tests/gc/bug-1886466.js b/js/src/jit-test/tests/gc/bug-1886466.js
new file mode 100644
index 0000000000..4347ea3e6b
--- /dev/null
+++ b/js/src/jit-test/tests/gc/bug-1886466.js
@@ -0,0 +1,5 @@
+gczeal(7, 6)
+a = new WeakSet
+for (let i = 0; i < 200000; i++) {
+ a.add({})
+}
diff --git a/js/src/jit-test/tests/gc/bug-1888717.js b/js/src/jit-test/tests/gc/bug-1888717.js
new file mode 100644
index 0000000000..7e54543994
--- /dev/null
+++ b/js/src/jit-test/tests/gc/bug-1888717.js
@@ -0,0 +1,3 @@
+// |jit-test| --no-ggc
+gcparam("semispaceNurseryEnabled", 1);
+let o = {};
diff --git a/js/src/jit-test/tests/gc/dedupe-03.js b/js/src/jit-test/tests/gc/dedupe-03.js
new file mode 100644
index 0000000000..4e9b4c1bbc
--- /dev/null
+++ b/js/src/jit-test/tests/gc/dedupe-03.js
@@ -0,0 +1,66 @@
+// |jit-test| skip-if: !hasFunction.stringRepresentation
+
+// Test handling of tenured dependent strings pointing to nursery base strings.
+
+gczeal(0);
+
+function makeExtensibleStrFrom(str) {
+ var left = str.substr(0, str.length/2);
+ var right = str.substr(str.length/2, str.length);
+ var ropeStr = left + right;
+ return ensureLinearString(ropeStr);
+}
+
+function repr(s) {
+ return JSON.parse(stringRepresentation(s));
+}
+
+function dependsOn(s1, s2) {
+ const rep1 = JSON.parse(stringRepresentation(s1));
+ const rep2 = JSON.parse(stringRepresentation(s2));
+ return rep1.base && rep1.base.address == rep2.address;
+}
+
+// Make a string to deduplicate to.
+var original = makeExtensibleStrFrom('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklm');
+
+// Construct T1 -> Nbase.
+var Nbase = makeExtensibleStrFrom('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklm');
+var T1 = newDependentString(Nbase, 0, 60, { tenured: true });
+
+// Get prevented from creating T2 -> T1 -> Nbase
+// (will be T2 -> Nbase instead to avoid dependency chains).
+var T2 = newDependentString(T1, 30, { tenured: true });
+
+assertEq(dependsOn(T2, Nbase), "expect: T2 -> base");
+
+// Construct T1 -> Ndep1 (was Nbase) -> Nbase2.
+var Nbase2 = newRope(Nbase, "ABC");
+ensureLinearString(Nbase2);
+var Ndep1 = Nbase;
+
+assertEq(dependsOn(T1, Ndep1), "expect: T1 -> Ndep1");
+assertEq(dependsOn(Ndep1, Nbase2), "expect: Ndep1 -> Nbase2");
+
+// Fail to construct T3 -> Tbase3 -> Nbase4. It will refuse because T3 would be using
+// chars from Nbase4 that can't be updated since T3 is not in the store buffer. Instead,
+// it will allocate a new buffer for the rope root, leaving Tbase3 alone and keeping
+// T3 -> Tbase3.
+var Tbase3 = makeExtensibleStrFrom('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklm');
+minorgc();
+var T3 = newDependentString(Tbase3, 0, 30, { tenured: true });
+var Nbase4 = newRope(Tbase3, "DEF");
+ensureLinearString(Nbase4);
+assertEq(repr(Tbase3).isTenured, true, "Tbase3 is tenured");
+assertEq(repr(Tbase3).flags.includes("EXTENSIBLE"), true, "Tbase3 is extensible");
+assertEq(repr(Nbase4).flags.includes("DEPENDENT_BIT"), false, "expect: Nbase4 is not a dependent string")
+assertEq(repr(T3).flags.includes("DEPENDENT_BIT"), true, "expect: T3 is a dependent string")
+assertEq(dependsOn(T3, Tbase3), "expect: T3 -> Tbase3");
+
+function bug1879918() {
+ const s = JSON.parse('["abcdefabcdefabcdefabcdefabcdefabcdefabcdef"]')[0];
+ const dep = newDependentString(s, 1, { tenured: true });
+ minorgc();
+ assertEq(dep, "bcdefabcdefabcdefabcdefabcdefabcdefabcdef");
+}
+bug1879918();
diff --git a/js/src/jit-test/tests/gc/deduplicateTenuringStrings.js b/js/src/jit-test/tests/gc/deduplicateTenuringStrings.js
index 1b8259cc15..de2fb0c028 100644
--- a/js/src/jit-test/tests/gc/deduplicateTenuringStrings.js
+++ b/js/src/jit-test/tests/gc/deduplicateTenuringStrings.js
@@ -13,6 +13,7 @@
// We require predictable GC timing to make sure the correct
// strings are tenured together.
gczeal(0);
+gcparam('semispaceNurseryEnabled', 0);
var helperCode = `
function makeInlineStr(isLatin1) {
diff --git a/js/src/jit-test/tests/gc/gcparam.js b/js/src/jit-test/tests/gc/gcparam.js
index 05e0359088..c57b400642 100644
--- a/js/src/jit-test/tests/gc/gcparam.js
+++ b/js/src/jit-test/tests/gc/gcparam.js
@@ -60,3 +60,4 @@ testChangeParam("mallocThresholdBase");
testChangeParam("urgentThreshold");
testChangeParam("helperThreadRatio");
testChangeParam("maxHelperThreads");
+testChangeParam("semispaceNurseryEnabled");
diff --git a/js/src/jit-test/tests/gc/pretenuring.js b/js/src/jit-test/tests/gc/pretenuring.js
index 6f20706e9b..30156b0e98 100644
--- a/js/src/jit-test/tests/gc/pretenuring.js
+++ b/js/src/jit-test/tests/gc/pretenuring.js
@@ -1,6 +1,7 @@
// Test nursery string allocation and pretenuring.
gczeal(0);
+gcparam("semispaceNurseryEnabled", 0);
gcparam("minNurseryBytes", 4096 * 1024);
gcparam("maxNurseryBytes", 4096 * 1024);
diff --git a/js/src/jit-test/tests/heap-analysis/byteSize-of-string.js b/js/src/jit-test/tests/heap-analysis/byteSize-of-string.js
index aaac0c4f1f..b4cfdffb04 100644
--- a/js/src/jit-test/tests/heap-analysis/byteSize-of-string.js
+++ b/js/src/jit-test/tests/heap-analysis/byteSize-of-string.js
@@ -12,6 +12,7 @@
// stable.
gczeal(0); // Need to control when tenuring happens
+gcparam('semispaceNurseryEnabled', 0);
// Hack to skip this test if strings are not allocated in the nursery.
{
@@ -75,16 +76,20 @@ function tByteSize(str) {
// JSExternalString - limited by MaxStringLength - E
// JSThinInlineString 8 4 16 8 T
// JSFatInlineString 24 12 24 12 F
+// ThinInlineAtom 12 6 20 10 T
+// FatInlineAtom 20 10 20 10 F
// JSExtensibleString - limited by MaxStringLength - X
// Notes:
// - labels are suffixed with A for atoms and N for non-atoms
-// - atoms are 8 bytes larger than non-atoms, to store the atom's hash code.
+// - atoms store a 4 byte hash code, and some add to the size to adjust
// - Nursery-allocated strings require a header that stores the zone.
// Expected sizes based on type of string
const m32 = (getBuildConfiguration("pointer-byte-size") == 4);
-const TA = m32 ? 24 : 32; // ThinInlineString atom, includes a hash value
+const TA = m32 ? 24 : 32; // ThinInlineAtom (includes a hash value)
+const FA = m32 ? 32 : 32; // FatInlineAtom (includes a hash value)
+const NA = m32 ? 24 : 32; // NormalAtom
const TN = m32 ? 16 : 24; // ThinInlineString
const FN = m32 ? 32 : 32; // FatInlineString
const XN = m32 ? 16 : 24; // ExtensibleString, has additional storage buffer
@@ -95,8 +100,8 @@ const EN = m32 ? 16 : 24; // ExternalString
// A function that pads out a tenured size to the nursery size. We store a zone
// pointer in the nursery just before the string (4 bytes on 32-bit, 8 bytes on
// 64-bit), and the string struct itself must be 8-byte aligned (resulting in
-// +4 bytes on 32-bit, +0 bytes on 64-bit). The end result? Nursery strings are
-// 8 bytes larger.
+// +4 bytes on 32-bit, +0 bytes on 64-bit). The end result is that nursery
+// strings are 8 bytes larger.
const Nursery = m32 ? s => s + 4 + 4 : s => s + 8 + 0;
// Latin-1
@@ -130,6 +135,23 @@ assertEq(nByteSize("123456789.123456789.123456789.1"), s(Nursery(
assertEq(nByteSize("123456789.123456789.123456789.12"), s(Nursery(XN)+32,Nursery(XN)+32));
assertEq(nByteSize("123456789.123456789.123456789.123"), s(Nursery(XN)+64,Nursery(XN)+64));
+function Atom(s) { return Object.keys({ [s]: true })[0]; }
+assertEq(byteSize(Atom("1234567")), s(TA, TA));
+assertEq(byteSize(Atom("12345678")), s(TA, FA));
+assertEq(byteSize(Atom("123456789.12")), s(TA, FA));
+assertEq(byteSize(Atom("123456789.123")), s(FA, FA));
+assertEq(byteSize(Atom("123456789.12345")), s(FA, FA));
+assertEq(byteSize(Atom("123456789.123456")), s(FA, FA));
+assertEq(byteSize(Atom("123456789.1234567")), s(FA, FA));
+assertEq(byteSize(Atom("123456789.123456789.")), s(FA, FA));
+assertEq(byteSize(Atom("123456789.123456789.1")), s(NA+32, NA+32));
+assertEq(byteSize(Atom("123456789.123456789.123")), s(NA+32, NA+32));
+assertEq(byteSize(Atom("123456789.123456789.1234")), s(NA+32, NA+32));
+assertEq(byteSize(Atom("123456789.123456789.12345")), s(NA+32, NA+32));
+assertEq(byteSize(Atom("123456789.123456789.123456789.1")), s(NA+32, NA+32));
+assertEq(byteSize(Atom("123456789.123456789.123456789.12")), s(NA+32, NA+32));
+assertEq(byteSize(Atom("123456789.123456789.123456789.123")), s(NA+48, NA+48));
+
// Inline char16_t atoms.
// "Impassionate gods have never seen the red that is the Tatsuta River."
// - Ariwara no Narihira
@@ -183,20 +205,43 @@ assertEq(byteSize(rope8), s(Nurser
minorgc();
assertEq(byteSize(rope8), s(RN, RN));
var matches8 = rope8.match(/(de cuyo nombre no quiero acordarme)/);
-assertEq(byteSize(rope8), s(XN + 65536, XN + 65536));
+assertEq(byteSize(rope8), s(XN + 64 * 1024, XN + 64 * 1024));
+var ext8 = rope8; // Stop calling it what it's not (though it'll change again soon.)
// Test extensible strings.
//
// Appending another copy of the fragment should yield another rope.
//
-// Flatting that should turn the original rope into a dependent string, and
+// Flattening that should turn the original rope into a dependent string, and
// yield a new linear string, of the same size as the original.
-rope8a = rope8 + fragment8;
+var rope8a = ext8 + fragment8;
assertEq(byteSize(rope8a), s(Nursery(RN), Nursery(RN)));
rope8a.match(/x/, function() { assertEq(true, false); });
assertEq(byteSize(rope8a), s(Nursery(XN) + 65536, Nursery(XN) + 65536));
+assertEq(byteSize(ext8), s(DN, DN));
+
+// Latin-1 dependent strings in the nursery.
+assertEq(byteSize(ext8.substr(1000, 2000)), s(Nursery(DN), Nursery(DN)));
+assertEq(byteSize(matches8[0]), s(Nursery(DN), Nursery(DN)));
+assertEq(byteSize(matches8[1]), s(Nursery(DN), Nursery(DN)));
+
+// Tenure everything and do it again.
+ext8 = copyString(ext8);
+rope8a = ext8 + fragment8;
+minorgc();
+assertEq(byteSize(rope8a), s(RN, RN));
+rope8a.match(/x/, function() { assertEq(true, false); });
+assertEq(byteSize(rope8a), s(XN + 65536, XN + 65536));
assertEq(byteSize(rope8), s(RN, RN));
+// Latin-1 tenured dependent strings.
+function tenure(s) {
+ minorgc();
+ return s;
+}
+assertEq(byteSize(tenure(rope8.substr(1000, 2000))), s(DN, DN));
+assertEq(byteSize(matches8[0]), s(DN, DN));
+assertEq(byteSize(matches8[1]), s(DN, DN));
// A char16_t rope. This changes size when flattened.
// "From the Heliconian Muses let us begin to sing"
@@ -207,13 +252,11 @@ for (var i = 0; i < 10; i++) // 1024 repetitions
rope16 = rope16 + rope16;
assertEq(byteSize(rope16), s(Nursery(RN), Nursery(RN)));
let matches16 = rope16.match(/(Ἑλικωνιάδων ἀρχώμεθ᾽)/);
-assertEq(byteSize(rope16), s(Nursery(RN) + 131072, Nursery(RN) + 131072));
+assertEq(byteSize(rope16), s(Nursery(XN) + 128 * 1024, Nursery(XN) + 128 * 1024));
+var ext16 = rope16;
-// Latin-1 and char16_t dependent strings.
-assertEq(byteSize(rope8.substr(1000, 2000)), s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(rope16.substr(1000, 2000)), s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(matches8[0]), s(Nursery(DN), Nursery(DN)));
-assertEq(byteSize(matches8[1]), s(Nursery(DN), Nursery(DN)));
+// char16_t dependent strings in the nursery.
+assertEq(byteSize(ext16.substr(1000, 2000)), s(Nursery(DN), Nursery(DN)));
assertEq(byteSize(matches16[0]), s(Nursery(DN), Nursery(DN)));
assertEq(byteSize(matches16[1]), s(Nursery(DN), Nursery(DN)));
@@ -221,13 +264,23 @@ assertEq(byteSize(matches16[1]), s(Nurser
//
// Appending another copy of the fragment should yield another rope.
//
-// Flatting that should turn the original rope into a dependent string, and
+// Flattening that should turn the original rope into a dependent string, and
// yield a new linear string, of the some size as the original.
-rope16a = rope16 + fragment16;
+rope16a = ext16 + fragment16;
assertEq(byteSize(rope16a), s(Nursery(RN), Nursery(RN)));
rope16a.match(/x/, function() { assertEq(true, false); });
-assertEq(byteSize(rope16a), s(Nursery(XN) + 131072, Nursery(XN) + 131072));
-assertEq(byteSize(rope16), s(Nursery(XN), Nursery(XN)));
+assertEq(byteSize(rope16a), s(Nursery(XN) + 128 * 1024, Nursery(XN) + 128 * 1024));
+assertEq(byteSize(ext16), s(Nursery(DN), Nursery(DN)));
+
+// Tenure everything and try again. This time it should steal the extensible
+// characters and convert the root into an extensible string using them.
+ext16 = copyString(ext16);
+rope16a = ext16 + fragment16;
+minorgc();
+assertEq(byteSize(rope16a), s(RN, RN));
+rope16a.match(/x/, function() { assertEq(true, false); });
+assertEq(byteSize(rope16a), s(XN + 128 * 1024, XN + 128 * 1024));
+assertEq(byteSize(ext16), s(RN, RN));
// Test external strings.
//
diff --git a/js/src/jit-test/tests/ion/apply-native-arguments-object.js b/js/src/jit-test/tests/ion/apply-native-arguments-object.js
new file mode 100644
index 0000000000..e06a5e0965
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-arguments-object.js
@@ -0,0 +1,46 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function escape() {
+ with ({}) ;
+}
+
+function f() {
+ // Let |arguments| escape to force the allocation of an arguments object.
+ escape(arguments);
+
+ // FunApply to a native function with an arguments object.
+ return Array.apply(null, arguments);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-arguments.js b/js/src/jit-test/tests/ion/apply-native-arguments.js
new file mode 100644
index 0000000000..3d6729ca76
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-arguments.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f() {
+ // FunApply to a native function with frame arguments.
+ return Array.apply(null, arguments);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-array.js b/js/src/jit-test/tests/ion/apply-native-array.js
new file mode 100644
index 0000000000..0dfa2df947
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-array.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f(x) {
+ // FunApply to a native function with an array.
+ return Array.apply(null, x);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f(x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-spreadcall-arguments.js b/js/src/jit-test/tests/ion/apply-native-spreadcall-arguments.js
new file mode 100644
index 0000000000..9f769e4a59
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-spreadcall-arguments.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f() {
+ // SpreadCall to a native function with frame arguments.
+ return Array(...arguments);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-spreadcall-array.js b/js/src/jit-test/tests/ion/apply-native-spreadcall-array.js
new file mode 100644
index 0000000000..24e5621484
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-spreadcall-array.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f(x) {
+ // SpreadCall to a native function with an array.
+ return Array(...x);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f(x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-spreadcall-rest.js b/js/src/jit-test/tests/ion/apply-native-spreadcall-rest.js
new file mode 100644
index 0000000000..ba7038244d
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-spreadcall-rest.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f(...x) {
+ // SpreadCall to a native function with rest-args.
+ return Array(...x);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-spreadnew-arguments.js b/js/src/jit-test/tests/ion/apply-native-spreadnew-arguments.js
new file mode 100644
index 0000000000..7e31cdcbd6
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-spreadnew-arguments.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f() {
+ // SpreadNew to a native function with frame arguments.
+ return new Array(...arguments);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-spreadnew-array.js b/js/src/jit-test/tests/ion/apply-native-spreadnew-array.js
new file mode 100644
index 0000000000..5c716f48b4
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-spreadnew-array.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f(x) {
+ // SpreadNew to a native function with an array.
+ return new Array(...x);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f(x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-spreadnew-newtarget.js b/js/src/jit-test/tests/ion/apply-native-spreadnew-newtarget.js
new file mode 100644
index 0000000000..9ffe53277b
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-spreadnew-newtarget.js
@@ -0,0 +1,66 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+class ArrayWithExplicitConstructor extends Array {
+ constructor(...args) {
+ super(...args);
+ }
+}
+
+class ArrayWithImplicitConstructor extends Array {
+ constructor(...args) {
+ super(...args);
+ }
+}
+
+function f(...x) {
+ return new ArrayWithExplicitConstructor(...x);
+}
+
+function g(...x) {
+ return new ArrayWithImplicitConstructor(...x);
+}
+
+// Don't inline |f| and |g| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+ assertEq(Object.getPrototypeOf(result), ArrayWithExplicitConstructor.prototype);
+}
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = g.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+ assertEq(Object.getPrototypeOf(result), ArrayWithImplicitConstructor.prototype);
+}
diff --git a/js/src/jit-test/tests/ion/apply-native-spreadnew-rest.js b/js/src/jit-test/tests/ion/apply-native-spreadnew-rest.js
new file mode 100644
index 0000000000..58de8fa239
--- /dev/null
+++ b/js/src/jit-test/tests/ion/apply-native-spreadnew-rest.js
@@ -0,0 +1,39 @@
+load(libdir + "array-compare.js");
+
+const xs = [
+ // Zero arguments.
+ [],
+
+ // Single argument.
+ [1],
+
+ // Few arguments. Even number of arguments.
+ [1, 2],
+
+ // Few arguments. Odd number of arguments.
+ [1, 2, 3],
+
+ // Many arguments. Even number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
+
+ // Many arguments. Odd number of arguments.
+ [1, 2, 3, 4, 5, 6, 7, 8, 9],
+];
+
+function f(...x) {
+ // SpreadNew to a native function with rest-args.
+ return new Array(...x);
+}
+
+// Don't inline |f| into the top-level script.
+with ({}) ;
+
+for (let i = 0; i < 400; ++i) {
+ let x = xs[i % xs.length];
+
+ // NB: Array(1) creates the array `[,]`.
+ let expected = x.length !== 1 ? x : [,];
+
+ let result = f.apply(null, x);
+ assertEq(arraysEqual(result, expected), true);
+}
diff --git a/js/src/jit-test/tests/ion/recover-atomics-islockfree.js b/js/src/jit-test/tests/ion/recover-atomics-islockfree.js
new file mode 100644
index 0000000000..2a57afd49b
--- /dev/null
+++ b/js/src/jit-test/tests/ion/recover-atomics-islockfree.js
@@ -0,0 +1,25 @@
+// |jit-test| --fast-warmup; --ion-offthread-compile=off
+
+function foo(n, trigger) {
+ let result = Atomics.isLockFree(n * -1);
+ if (trigger) {
+ assertEq(result, false);
+ }
+}
+
+for (var i = 0; i < 100; i++) {
+ foo(-50, false);
+}
+foo(0, true);
+
+function bar(n, trigger) {
+ let result = Atomics.isLockFree(n * 4);
+ if (trigger) {
+ assertEq(result, false);
+ }
+}
+
+for (var i = 0; i < 100; i++) {
+ bar(1, false);
+}
+bar(0x40000001, true);
diff --git a/js/src/jit-test/tests/ion/recover-string-from-charcode.js b/js/src/jit-test/tests/ion/recover-string-from-charcode.js
new file mode 100644
index 0000000000..be060be8e7
--- /dev/null
+++ b/js/src/jit-test/tests/ion/recover-string-from-charcode.js
@@ -0,0 +1,13 @@
+// |jit-test| --fast-warmup; --ion-offthread-compile=off
+
+function foo(n, trigger) {
+ let result = String.fromCharCode(n * -1);
+ if (trigger) {
+ assertEq(result, "\0");
+ }
+}
+
+for (var i = 0; i < 100; i++) {
+ foo(-50, false);
+}
+foo(0, true);
diff --git a/js/src/jit-test/tests/modules/bug-1888902.js b/js/src/jit-test/tests/modules/bug-1888902.js
new file mode 100644
index 0000000000..7804bef98a
--- /dev/null
+++ b/js/src/jit-test/tests/modules/bug-1888902.js
@@ -0,0 +1,16 @@
+// |jit-test| error:Error
+
+const v0 = `
+ function F1() {
+ const v11 = registerModule("module1", parseModule(\`import {} from "module2";
+ import {} from "module3";\`));
+ const v13 = "await 1;";
+ drainJobQueue();
+ registerModule("module2", parseModule(v13));
+ registerModule("module3", parseModule(v0));
+ moduleLink(v11);
+ moduleEvaluate(v11);
+ }
+ F1();
+`;
+eval(v0);
diff --git a/js/src/jit-test/tests/modules/dynamic-import-error.js b/js/src/jit-test/tests/modules/dynamic-import-error.js
index 98a6af75d0..56713c8485 100644
--- a/js/src/jit-test/tests/modules/dynamic-import-error.js
+++ b/js/src/jit-test/tests/modules/dynamic-import-error.js
@@ -1,5 +1,3 @@
-// |jit-test| module
-
let result = null;
let error = null;
let promise = import("nonexistent.js");
diff --git a/js/src/jit-test/tests/modules/dynamic-import-module.js b/js/src/jit-test/tests/modules/dynamic-import-module.js
index 3c004258a3..fa19b74303 100644
--- a/js/src/jit-test/tests/modules/dynamic-import-module.js
+++ b/js/src/jit-test/tests/modules/dynamic-import-module.js
@@ -1,5 +1,3 @@
-// |jit-test| module
-
function testImport(path, name, value) {
let result = null;
let error = null;
diff --git a/js/src/jit-test/tests/modules/inline-data-2.js b/js/src/jit-test/tests/modules/inline-data-2.js
new file mode 100644
index 0000000000..8dbf92574d
--- /dev/null
+++ b/js/src/jit-test/tests/modules/inline-data-2.js
@@ -0,0 +1,12 @@
+let result = null;
+let error = null;
+let promise = import("javascript: export let b = 100;");
+promise.then((ns) => {
+ result = ns;
+}).catch((e) => {
+ error = e;
+});
+
+drainJobQueue();
+assertEq(error, null);
+assertEq(result.b, 100);
diff --git a/js/src/jit-test/tests/modules/inline-data.js b/js/src/jit-test/tests/modules/inline-data.js
index 9c56856f8d..d81da0efe4 100644
--- a/js/src/jit-test/tests/modules/inline-data.js
+++ b/js/src/jit-test/tests/modules/inline-data.js
@@ -2,16 +2,3 @@
import { a } from "javascript: export let a = 42;";
assertEq(a, 42);
-
-let result = null;
-let error = null;
-let promise = import("javascript: export let b = 100;");
-promise.then((ns) => {
- result = ns;
-}).catch((e) => {
- error = e;
-});
-
-drainJobQueue();
-assertEq(error, null);
-assertEq(result.b, 100);
diff --git a/js/src/jit-test/tests/modules/shell-wrapper.js b/js/src/jit-test/tests/modules/shell-wrapper.js
index 1be1c486c6..058e574d4e 100644
--- a/js/src/jit-test/tests/modules/shell-wrapper.js
+++ b/js/src/jit-test/tests/modules/shell-wrapper.js
@@ -1,4 +1,3 @@
-// |jit-test| module
// Test shell ModuleObject wrapper's accessors and methods
load(libdir + "asserts.js");
@@ -49,10 +48,8 @@ const d = registerModule('d', parseModule(`
f();
`));
moduleLink(d);
-try {
- await moduleEvaluate(d);
-} catch (e) {
-}
+moduleEvaluate(d).catch(e => undefined);
+drainJobQueue();
assertEq(d.evaluationError instanceof ReferenceError, true);
testGetter(d, "evaluationError");
diff --git a/js/src/jit-test/tests/parser/bug1887176.js b/js/src/jit-test/tests/parser/bug1887176.js
new file mode 100644
index 0000000000..bea2db519b
--- /dev/null
+++ b/js/src/jit-test/tests/parser/bug1887176.js
@@ -0,0 +1,46 @@
+
+// This tests a case where TokenStreamAnyChars::fillExceptingContext
+// mishandled a wasm frame, leading to an assertion failure.
+
+if (!wasmIsSupported())
+ quit();
+
+const v0 = `
+ const o6 = {
+ f() {
+ function F2() {
+ if (!new.target) { throw 'must be called with new'; }
+ }
+ return F2();
+ return {}; // This can be anything, but it must be present
+ },
+ };
+
+ const o7 = {
+ "main": o6,
+ };
+
+ const v15 = new WebAssembly.Module(wasmTextToBinary(\`
+ (module
+ (import "main" "f" (func))
+ (func (export "go")
+ call 0
+ )
+ )\`));
+ const v16 = new WebAssembly.Instance(v15, o7);
+ v16.exports.go();
+`;
+
+const o27 = {
+ // Both "fileName" and null are necessary
+ "fileName": null,
+};
+
+let caught = false;
+try {
+ evaluate(v0, o27);
+} catch (e) {
+ assertEq(e, "must be called with new");
+ caught = true;
+}
+assertEq(caught, true);
diff --git a/js/src/jit-test/tests/parser/dumpStencil-02.js b/js/src/jit-test/tests/parser/dumpStencil-02.js
new file mode 100644
index 0000000000..e21962c36b
--- /dev/null
+++ b/js/src/jit-test/tests/parser/dumpStencil-02.js
@@ -0,0 +1,8 @@
+let caught = false;
+try {
+ dumpStencil("export var z;", { module : true, lineNumber: 0 });
+} catch (e) {
+ caught = true;
+ assertEq(e.message.includes("Module cannot be compiled with lineNumber == 0"), true);
+}
+assertEq(caught, true);
diff --git a/js/src/jit-test/tests/parser/module-filename.js b/js/src/jit-test/tests/parser/module-filename.js
new file mode 100644
index 0000000000..59017dd674
--- /dev/null
+++ b/js/src/jit-test/tests/parser/module-filename.js
@@ -0,0 +1,13 @@
+load(libdir + "asserts.js");
+
+compileToStencil("", { fileName: "", module: true });
+assertThrowsInstanceOf(() => {
+ compileToStencil("", { fileName: null, module: true });
+}, Error);
+
+if (helperThreadCount() > 0) {
+ offThreadCompileModuleToStencil("", { fileName: "", module: true });
+ assertThrowsInstanceOf(() => {
+ offThreadCompileModuleToStencil("", { fileName: null, module: true });
+ }, Error);
+}
diff --git a/js/src/jit-test/tests/profiler/native-trampoline-2.js b/js/src/jit-test/tests/profiler/native-trampoline-2.js
new file mode 100644
index 0000000000..a85913431b
--- /dev/null
+++ b/js/src/jit-test/tests/profiler/native-trampoline-2.js
@@ -0,0 +1,7 @@
+let arr = [1, 2, 3, 4, 5, 6, 7, 8];
+arr.sort((x, y) => {
+ enableGeckoProfilingWithSlowAssertions();
+ readGeckoProfilingStack();
+ return y - x;
+});
+assertEq(arr.toString(), "8,7,6,5,4,3,2,1");
diff --git a/js/src/jit-test/tests/profiler/native-trampoline-3.js b/js/src/jit-test/tests/profiler/native-trampoline-3.js
new file mode 100644
index 0000000000..16fe547051
--- /dev/null
+++ b/js/src/jit-test/tests/profiler/native-trampoline-3.js
@@ -0,0 +1,32 @@
+// |jit-test| skip-if: !wasmIsSupported()
+
+// Use a Wasm module to get the following stack frames:
+//
+// .. => array sort trampoline => wasmfunc comparator (Wasm) => comparator (JS)
+
+let binary = wasmTextToBinary(`
+(module
+ (import "" "comparator" (func $comparator (param i32) (param i32) (result i32)))
+ (func $wasmfunc
+ (export "wasmfunc")
+ (param $x i32)
+ (param $y i32)
+ (result i32)
+ (return (call $comparator (local.get $x) (local.get $y)))
+ )
+)`);
+let mod = new WebAssembly.Module(binary);
+let instance = new WebAssembly.Instance(mod, {"": {comparator}});
+
+function comparator(x, y) {
+ readGeckoProfilingStack();
+ return y - x;
+}
+
+enableGeckoProfilingWithSlowAssertions();
+
+for (let i = 0; i < 20; i++) {
+ let arr = [3, 1, 2, -1, 0, 4];
+ arr.sort(instance.exports.wasmfunc);
+ assertEq(arr.toString(), "4,3,2,1,0,-1");
+}
diff --git a/js/src/jit-test/tests/profiler/native-trampoline.js b/js/src/jit-test/tests/profiler/native-trampoline.js
new file mode 100644
index 0000000000..e140874a15
--- /dev/null
+++ b/js/src/jit-test/tests/profiler/native-trampoline.js
@@ -0,0 +1,40 @@
+enableGeckoProfilingWithSlowAssertions();
+
+function testBasic() {
+ var arr = [2, -1];
+ var cmp = function(x, y) {
+ readGeckoProfilingStack();
+ return x - y;
+ };
+ for (var i = 0; i < 20; i++) {
+ arr.sort(cmp);
+ }
+}
+testBasic();
+
+function testRectifierFrame() {
+ var arr = [2, -1];
+ var cmp = function(x, y, z, a) {
+ readGeckoProfilingStack();
+ return x - y;
+ };
+ for (var i = 0; i < 20; i++) {
+ arr.sort(cmp);
+ }
+}
+testRectifierFrame();
+
+function testRectifierFrameCaller() {
+ var o = {};
+ var calls = 0;
+ Object.defineProperty(o, "length", {get: function() {
+ calls++;
+ readGeckoProfilingStack();
+ return 0;
+ }});
+ for (var i = 0; i < 20; i++) {
+ Array.prototype.sort.call(o);
+ }
+ assertEq(calls, 20);
+}
+testRectifierFrameCaller();
diff --git a/js/src/jit-test/tests/profiler/wasm-to-js-1.js b/js/src/jit-test/tests/profiler/wasm-to-js-1.js
new file mode 100644
index 0000000000..2ce48f391c
--- /dev/null
+++ b/js/src/jit-test/tests/profiler/wasm-to-js-1.js
@@ -0,0 +1,20 @@
+// |jit-test| skip-if: !wasmIsSupported(); --fast-warmup
+function sample() {
+ enableGeckoProfiling();
+ readGeckoProfilingStack();
+ disableGeckoProfiling();
+}
+const text = `(module
+ (import "m" "f" (func $f))
+ (func (export "test")
+ (call $f)
+))`;
+const bytes = wasmTextToBinary(text);
+const mod = new WebAssembly.Module(bytes);
+const imports = {"m": {"f": sample}};
+const instance = new WebAssembly.Instance(mod, imports);
+sample();
+for (let i = 0; i < 5; i++) {
+ gc(this, "shrinking");
+ instance.exports.test();
+}
diff --git a/js/src/jit-test/tests/profiler/wasm-to-js-2.js b/js/src/jit-test/tests/profiler/wasm-to-js-2.js
new file mode 100644
index 0000000000..3949c3a587
--- /dev/null
+++ b/js/src/jit-test/tests/profiler/wasm-to-js-2.js
@@ -0,0 +1,19 @@
+// |jit-test| skip-if: !wasmIsSupported()
+// Ensure readGeckoProfilingStack finds at least 1 Wasm frame on the stack.
+function calledFromWasm() {
+ let frames = readGeckoProfilingStack().flat();
+ assertEq(frames.filter(f => f.kind === "wasm").length >= 1, true);
+}
+enableGeckoProfiling();
+const text = `(module
+ (import "m" "f" (func $f))
+ (func (export "test")
+ (call $f)
+))`;
+const bytes = wasmTextToBinary(text);
+const mod = new WebAssembly.Module(bytes);
+const imports = {"m": {"f": calledFromWasm}};
+const instance = new WebAssembly.Instance(mod, imports);
+for (let i = 0; i < 150; i++) {
+ instance.exports.test();
+}
diff --git a/js/src/jit-test/tests/promise/allSettled-dead.js b/js/src/jit-test/tests/promise/allSettled-dead.js
new file mode 100644
index 0000000000..8ae8e53d6b
--- /dev/null
+++ b/js/src/jit-test/tests/promise/allSettled-dead.js
@@ -0,0 +1,20 @@
+newGlobal();
+const g = newGlobal({
+ "newCompartment": true,
+});
+const p1 = g.eval(`
+Promise.resolve();
+`);
+const p2 = p1.then();
+nukeAllCCWs();
+ignoreUnhandledRejections();
+Promise.resolve = function() {
+ return p2;
+};
+let caught = false;
+Promise.allSettled([1]).catch(e => {
+ caught = true;
+ assertEq(e.message.includes("dead object"), true);
+});
+drainJobQueue();
+assertEq(caught, true);
diff --git a/js/src/jit-test/tests/promise/jobqueue-interrupt-01.js b/js/src/jit-test/tests/promise/jobqueue-interrupt-01.js
new file mode 100644
index 0000000000..758680e031
--- /dev/null
+++ b/js/src/jit-test/tests/promise/jobqueue-interrupt-01.js
@@ -0,0 +1,23 @@
+// catchTermination should undo the quit() operation and let the remaining jobs
+// run.
+
+evaluate(`
+ quit();
+`, {
+ catchTermination : true
+});
+
+const global = newGlobal({ newCompartment: true });
+
+let called = false;
+const dbg = new Debugger(global);
+dbg.onDebuggerStatement = function (frame) {
+ Promise.resolve(42).then(v => { called = true; });
+};
+global.eval(`
+ debugger;
+`);
+
+drainJobQueue();
+
+assertEq(called, true);
diff --git a/js/src/jit-test/tests/promise/jobqueue-interrupt-02.js b/js/src/jit-test/tests/promise/jobqueue-interrupt-02.js
new file mode 100644
index 0000000000..8d8f27ef91
--- /dev/null
+++ b/js/src/jit-test/tests/promise/jobqueue-interrupt-02.js
@@ -0,0 +1,14 @@
+// quit() while draining job queue leaves the remaining jobs untouched.
+
+const global = newGlobal({ newCompartment:true });
+const dbg = Debugger(global);
+dbg.onDebuggerStatement = function() {
+ Promise.resolve().then(() => {
+ quit();
+ });
+ Promise.resolve().then(() => {
+ // This shouldn't be called.
+ assertEq(true, false);
+ });
+};
+global.eval("debugger");
diff --git a/js/src/jit-test/tests/proxy/bug1885774.js b/js/src/jit-test/tests/proxy/bug1885774.js
new file mode 100644
index 0000000000..fa88cbf823
--- /dev/null
+++ b/js/src/jit-test/tests/proxy/bug1885774.js
@@ -0,0 +1,25 @@
+// |jit-test| --no-threads; --fast-warmup
+
+var {proxy, revoke} = Proxy.revocable({x:1}, {});
+
+function foo(o) {
+ var res = 0;
+ for (var i = 0; i < 2; i++) {
+ res += o.x;
+ }
+ return res;
+}
+
+with ({}) {}
+for (var i = 0; i < 100; i++) {
+ assertEq(foo(proxy), 2);
+}
+
+revoke();
+var caught = false;
+try {
+ foo(proxy);
+} catch {
+ caught = true;
+}
+assertEq(caught, true);
diff --git a/js/src/jit-test/tests/structured-clone/bug1888727.js b/js/src/jit-test/tests/structured-clone/bug1888727.js
new file mode 100644
index 0000000000..7958781c92
--- /dev/null
+++ b/js/src/jit-test/tests/structured-clone/bug1888727.js
@@ -0,0 +1,21 @@
+function test() {
+ // Construct a structured clone of a random BigInt value.
+ const n = 0xfeeddeadbeef2dadfeeddeadbeef2dadfeeddeadbeef2dadfeeddeadbeef2dadn;
+ const s = serialize(n, [], {scope: 'DifferentProcess'});
+ assertEq(deserialize(s), n);
+
+ // Truncate it by chopping off the last 8 bytes.
+ s.clonebuffer = s.arraybuffer.slice(0, -8);
+
+ // Deserialization should now throw a catchable exception.
+ try {
+ deserialize(s);
+ // The bug was throwing an uncatchable error, so this next assertion won't
+ // be reached in either the buggy or fixed code.
+ assertEq(true, false, "should have thrown truncation error");
+ } catch (e) {
+ assertEq(e.message.includes("truncated"), true);
+ }
+}
+
+test();
diff --git a/js/src/jit-test/tests/structured-clone/tenuring.js b/js/src/jit-test/tests/structured-clone/tenuring.js
index 0fffa064fa..cec53a6956 100644
--- a/js/src/jit-test/tests/structured-clone/tenuring.js
+++ b/js/src/jit-test/tests/structured-clone/tenuring.js
@@ -1,4 +1,4 @@
-// Check that we switch to allocating in the tenure heap after the first
+// Check that we switch to allocating in the tenured heap after the first
// nursery collection.
function buildObjectTree(depth) {
@@ -82,6 +82,7 @@ function countHeapLocations(tree, objectTree, counts) {
gczeal(0);
gcparam('minNurseryBytes', 1024 * 1024);
gcparam('maxNurseryBytes', 1024 * 1024);
+gcparam('semispaceNurseryEnabled', 0);
gc();
testRoundTrip(1, true, true);
diff --git a/js/src/jit-test/tests/typedarray/resizable-typedarray-from-pinned-buffer.js b/js/src/jit-test/tests/typedarray/resizable-typedarray-from-pinned-buffer.js
new file mode 100644
index 0000000000..b17c7c0157
--- /dev/null
+++ b/js/src/jit-test/tests/typedarray/resizable-typedarray-from-pinned-buffer.js
@@ -0,0 +1,9 @@
+// |jit-test| --enable-arraybuffer-resizable
+
+let ab = new ArrayBuffer(8, {maxByteLength: 10});
+
+pinArrayBufferOrViewLength(ab);
+
+let ta = new Int8Array(ab);
+
+assertEq(ta.length, 8);
diff --git a/js/src/jit-test/tests/warp/bug1876425.js b/js/src/jit-test/tests/warp/bug1876425.js
new file mode 100644
index 0000000000..aca528aac6
--- /dev/null
+++ b/js/src/jit-test/tests/warp/bug1876425.js
@@ -0,0 +1,62 @@
+// 1) Trial inline f1 => g (g1) => h.
+// 2) Set g to g2, to fail the f1 => g1 call site.
+// 3) Set g to g1 again.
+// 4) Make g1's generic ICScript trial inline a different callee, h2.
+// 5) Bail out from f1 => g1 => h.
+//
+// The bailout must not confuse the ICScripts of h1 and h2.
+
+function noninlined1(x) {
+ with (this) {};
+ if (x === 4002) {
+ // Step 4.
+ f2();
+ // Step 5.
+ return true;
+ }
+ return false;
+}
+function noninlined2(x) {
+ with (this) {};
+ if (x === 4000) {
+ // Step 2.
+ g = (h, x) => {
+ return x + 1;
+ };
+ }
+ if (x === 4001) {
+ // Step 3.
+ g = g1;
+ }
+}
+var h = function(x) {
+ if (noninlined1(x)) {
+ // Step 5.
+ bailout();
+ }
+ return x + 1;
+};
+var g = function(callee, x) {
+ return callee(x) + 1;
+};
+var g1 = g;
+
+function f2() {
+ var h2 = x => x + 1;
+ for (var i = 0; i < 300; i++) {
+ var x = (i % 2 === 0) ? "foo" : i; // Force trial inlining.
+ g1(h2, x);
+ }
+}
+
+function f1() {
+ for (var i = 0; i < 4200; i++) {
+ var x = (i < 900 && i % 2 === 0) ? "foo" : i; // Force trial inlining.
+ g(h, x);
+ noninlined2(i);
+ if (i === 200) {
+ trialInline();
+ }
+ }
+}
+f1();
diff --git a/js/src/jit-test/tests/wasm/directiveless/bug1877358.js b/js/src/jit-test/tests/wasm/directiveless/bug1877358.js
index 10cb54398a..8d512efcfe 100644
--- a/js/src/jit-test/tests/wasm/directiveless/bug1877358.js
+++ b/js/src/jit-test/tests/wasm/directiveless/bug1877358.js
@@ -1,4 +1,4 @@
-// |jit-test| -P wasm_exceptions=false; include:wasm.js
+// |jit-test| include:wasm.js
let {test} = wasmEvalText(`(module
(func $m (import "" "m"))
diff --git a/js/src/jit-test/tests/wasm/gc/casting.js b/js/src/jit-test/tests/wasm/gc/casting.js
index a71a589db8..3b550e6415 100644
--- a/js/src/jit-test/tests/wasm/gc/casting.js
+++ b/js/src/jit-test/tests/wasm/gc/casting.js
@@ -114,3 +114,72 @@ function testAllCasts(types) {
}
}
testAllCasts(TYPES);
+
+// Test that combinations of ref.test and ref.cast compile correctly.
+// (These can be optimized together.)
+{
+ const { make, test1, test2, test3, test4 } = wasmEvalText(`(module
+ (type $a (array i32))
+ (func (export "make") (param i32) (result anyref)
+ local.get 0
+ local.get 0
+ array.new_fixed $a 2
+ )
+ (func (export "test1") (param anyref) (result i32)
+ (if (ref.test (ref $a) (local.get 0))
+ (then
+ (ref.cast (ref $a) (local.get 0))
+ (array.get $a (i32.const 0))
+ return
+ )
+ )
+ i32.const -1
+ )
+ (func (export "test2") (param anyref) (result i32)
+ (if (ref.test (ref $a) (local.get 0))
+ (then)
+ (else
+ (ref.cast (ref $a) (local.get 0))
+ (array.get $a (i32.const 0))
+ return
+ )
+ )
+ i32.const -1
+ )
+ (func (export "test3") (param anyref) (result i32)
+ (if (ref.test (ref $a) (local.get 0))
+ (then
+ (if (ref.test (ref $a) (local.get 0))
+ (then)
+ (else
+ (ref.cast (ref $a) (local.get 0))
+ (array.get $a (i32.const 0))
+ return
+ )
+ )
+ )
+ )
+ i32.const -1
+ )
+ (func (export "test4") (param anyref) (result i32)
+ (if (ref.test (ref $a) (local.get 0))
+ (then
+ (if (ref.test (ref $a) (local.get 0))
+ (then
+ local.get 0
+ ref.cast (ref $a)
+ ref.cast (ref $a)
+ (array.get $a (i32.const 0))
+ return
+ )
+ )
+ )
+ )
+ i32.const -1
+ )
+ )`).exports;
+ assertEq(test1(make(99)), 99);
+ assertEq(test2(make(99)), -1);
+ assertEq(test3(make(99)), -1);
+ assertEq(test4(make(99)), 99);
+}
diff --git a/js/src/jit-test/tests/wasm/gc/i31ref.js b/js/src/jit-test/tests/wasm/gc/i31ref.js
index 65f2fccc3f..298447e848 100644
--- a/js/src/jit-test/tests/wasm/gc/i31ref.js
+++ b/js/src/jit-test/tests/wasm/gc/i31ref.js
@@ -149,6 +149,24 @@ for (const {input, expected} of bigI32Tests) {
assertEq(getElem(), expected);
}
+// Test that (ref.i31 (i32 const value)) optimization is correct
+for (let value of WasmI31refValues) {
+ let {compare} = wasmEvalText(`(module
+ (func $innerCompare (param i32) (param i31ref) (result i32)
+ (ref.eq
+ (ref.i31 local.get 0)
+ local.get 1
+ )
+ )
+ (func (export "compare") (result i32)
+ i32.const ${value}
+ (ref.i31 i32.const ${value})
+ call $innerCompare
+ )
+)`).exports;
+ assertEq(compare(value), 1);
+}
+
const { i31GetU_null, i31GetS_null } = wasmEvalText(`(module
(func (export "i31GetU_null") (result i32)
ref.null i31
diff --git a/js/src/jit-test/tests/wasm/regress/bug1886870.js b/js/src/jit-test/tests/wasm/regress/bug1886870.js
new file mode 100644
index 0000000000..a4947bd91a
--- /dev/null
+++ b/js/src/jit-test/tests/wasm/regress/bug1886870.js
@@ -0,0 +1,8 @@
+// Check proper handling of OOM after toQuotedString().
+
+oomTest(function () {
+ new WebAssembly.Instance(
+ new WebAssembly.Module(wasmTextToBinary('(import "m" "f" (func $f))')),
+ {}
+ );
+});
diff --git a/js/src/jit-test/tests/wasm/regress/bug1887535.js b/js/src/jit-test/tests/wasm/regress/bug1887535.js
new file mode 100644
index 0000000000..e2793831bf
--- /dev/null
+++ b/js/src/jit-test/tests/wasm/regress/bug1887535.js
@@ -0,0 +1,25 @@
+// |jit-test| slow;
+
+// Tests the exception handling works during stack overflow.
+const v1 = newGlobal({sameZoneAs: this});
+class C2 {
+ static { }
+}
+
+function f() { v1.constructor; }
+
+const { test } = wasmEvalText(`
+(module
+ (import "" "f" (func $f))
+ (export "test" (func $f))
+)`, { "": { f, },}).exports;
+
+
+function f4() {
+ try {
+ f4();
+ } catch(_) {
+ test(); test();
+ }
+}
+f4();
diff --git a/js/src/jit-test/tests/wasm/regress/bug1887596.js b/js/src/jit-test/tests/wasm/regress/bug1887596.js
new file mode 100644
index 0000000000..8ff579fc35
--- /dev/null
+++ b/js/src/jit-test/tests/wasm/regress/bug1887596.js
@@ -0,0 +1,14 @@
+const t = `
+ (module
+ (func $f (result f32)
+ f32.const 1.25
+ )
+ (table (export "table") 10 funcref)
+ (elem (i32.const 0) $f)
+ )`;
+const i = new WebAssembly.Instance(new WebAssembly.Module(wasmTextToBinary(t)));
+const f = i.exports.table.get(0);
+
+// These FP equality comparisons are safe because 1.25 is representable exactly.
+assertEq(1.25, f());
+assertEq(1.25, this.wasmLosslessInvoke(f).value);
diff --git a/js/src/jit/ABIFunctionList-inl.h b/js/src/jit/ABIFunctionList-inl.h
index f8a52beeff..eb2123f7a2 100644
--- a/js/src/jit/ABIFunctionList-inl.h
+++ b/js/src/jit/ABIFunctionList-inl.h
@@ -103,6 +103,8 @@ namespace jit {
_(js::ArgumentsObject::finishForIonPure) \
_(js::ArgumentsObject::finishInlineForIonPure) \
_(js::ArrayShiftMoveElements) \
+ _(js::ArraySortData::sortWithComparator) \
+ _(js::ArraySortFromJit) \
_(js::ecmaAtan2) \
_(js::ecmaHypot) \
_(js::ecmaPow) \
diff --git a/js/src/jit/BaselineBailouts.cpp b/js/src/jit/BaselineBailouts.cpp
index d916d000ed..150e16b618 100644
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -517,7 +517,12 @@ void BaselineStackBuilder::setNextCallee(
//
// Also use the callee's own ICScript if we purged callee ICScripts.
icScript_ = nextCallee->nonLazyScript()->jitScript()->icScript();
+
if (trialInliningState != TrialInliningState::MonomorphicInlined) {
+ // Don't use specialized ICScripts for any of the callees if we had an
+ // inlining failure. We're now using the generic ICScript but compilation
+ // might have used the trial-inlined ICScript and these can have very
+ // different inlining graphs.
canUseTrialInlinedICScripts_ = false;
}
}
@@ -1567,6 +1572,7 @@ bool jit::BailoutIonToBaseline(JSContext* cx, JitActivation* activation,
prevFrameType == FrameType::IonJS ||
prevFrameType == FrameType::BaselineStub ||
prevFrameType == FrameType::Rectifier ||
+ prevFrameType == FrameType::TrampolineNative ||
prevFrameType == FrameType::IonICCall ||
prevFrameType == FrameType::BaselineJS ||
prevFrameType == FrameType::BaselineInterpreterEntry);
@@ -1965,14 +1971,6 @@ bool jit::FinishBailoutToBaseline(BaselineBailoutInfo* bailoutInfoArg) {
UnwindEnvironment(cx, ei, bailoutInfo->tryPC);
}
- // Check for interrupts now because we might miss an interrupt check in JIT
- // code when resuming in the prologue, after the stack/interrupt check.
- if (!cx->isExceptionPending()) {
- if (!CheckForInterrupt(cx)) {
- return false;
- }
- }
-
BailoutKind bailoutKind = *bailoutInfo->bailoutKind;
JitSpew(JitSpew_BaselineBailouts,
" Restored outerScript=(%s:%u:%u,%u) innerScript=(%s:%u:%u,%u) "
@@ -2169,7 +2167,17 @@ bool jit::FinishBailoutToBaseline(BaselineBailoutInfo* bailoutInfoArg) {
ionScript->incNumFixableBailouts();
if (ionScript->shouldInvalidate()) {
#ifdef DEBUG
- if (saveFailedICHash && !JitOptions.disableBailoutLoopCheck) {
+ // To detect bailout loops, we save a hash of the CacheIR used to
+ // compile this script, and assert that we don't recompile with the
+ // exact same inputs. Some of our bailout detection strategies, like
+ // LICM and stub folding, rely on bailing out, updating some state
+ // when we hit the baseline fallback, and using that information when
+ // we invalidate. If the frequentBailoutThreshold is set too low, we
+ // will instead invalidate the first time we bail out, so we don't
+ // have the chance to make those decisions. That doesn't happen in
+ // regular code, so we just skip bailout loop detection in that case.
+ if (saveFailedICHash && !JitOptions.disableBailoutLoopCheck &&
+ JitOptions.frequentBailoutThreshold > 1) {
outerScript->jitScript()->setFailedICHash(ionScript->icHash());
}
#endif
diff --git a/js/src/jit/BaselineFrame.h b/js/src/jit/BaselineFrame.h
index 6138332c81..f2a6811177 100644
--- a/js/src/jit/BaselineFrame.h
+++ b/js/src/jit/BaselineFrame.h
@@ -109,7 +109,9 @@ class BaselineFrame {
bool isConstructing() const {
return CalleeTokenIsConstructing(calleeToken());
}
- JSScript* script() const { return ScriptFromCalleeToken(calleeToken()); }
+ JSScript* script() const {
+ return MaybeForwardedScriptFromCalleeToken(calleeToken());
+ }
JSFunction* callee() const { return CalleeTokenToFunction(calleeToken()); }
Value calleev() const { return ObjectValue(*callee()); }
diff --git a/js/src/jit/CacheIR.cpp b/js/src/jit/CacheIR.cpp
index 68dbd6bfee..03eae14140 100644
--- a/js/src/jit/CacheIR.cpp
+++ b/js/src/jit/CacheIR.cpp
@@ -1199,7 +1199,8 @@ static ObjOperandId GuardAndLoadWindowProxyWindow(CacheIRWriter& writer,
ObjOperandId objId,
GlobalObject* windowObj) {
writer.guardClass(objId, GuardClassKind::WindowProxy);
- ObjOperandId windowObjId = writer.loadWrapperTarget(objId);
+ ObjOperandId windowObjId = writer.loadWrapperTarget(objId,
+ /*fallible = */ false);
writer.guardSpecificObject(windowObjId, windowObj);
return windowObjId;
}
@@ -1357,7 +1358,8 @@ AttachDecision GetPropIRGenerator::tryAttachCrossCompartmentWrapper(
writer.guardHasProxyHandler(objId, Wrapper::wrapperHandler(obj));
// Load the object wrapped by the CCW
- ObjOperandId wrapperTargetId = writer.loadWrapperTarget(objId);
+ ObjOperandId wrapperTargetId =
+ writer.loadWrapperTarget(objId, /*fallible = */ false);
// If the compartment of the wrapped object is different we should fail.
writer.guardCompartment(wrapperTargetId, wrappedTargetGlobal,
@@ -1468,7 +1470,8 @@ AttachDecision GetPropIRGenerator::tryAttachXrayCrossCompartmentWrapper(
writer.guardHasProxyHandler(objId, GetProxyHandler(obj));
// Load the object wrapped by the CCW
- ObjOperandId wrapperTargetId = writer.loadWrapperTarget(objId);
+ ObjOperandId wrapperTargetId =
+ writer.loadWrapperTarget(objId, /*fallible = */ false);
// Test the wrapped object's class. The properties held by xrays or their
// prototypes will be invariant for objects of a given class, except for
@@ -1578,9 +1581,9 @@ AttachDecision GetPropIRGenerator::tryAttachScriptedProxy(
writer.guardIsProxy(objId);
writer.guardHasProxyHandler(objId, &ScriptedProxyHandler::singleton);
- ValOperandId handlerValId = writer.loadScriptedProxyHandler(objId);
- ObjOperandId handlerObjId = writer.guardToObject(handlerValId);
- ObjOperandId targetObjId = writer.loadWrapperTarget(objId);
+ ObjOperandId handlerObjId = writer.loadScriptedProxyHandler(objId);
+ ObjOperandId targetObjId =
+ writer.loadWrapperTarget(objId, /*fallible =*/true);
writer.guardIsNativeObject(targetObjId);
diff --git a/js/src/jit/CacheIR.h b/js/src/jit/CacheIR.h
index 9bedbb7ddc..132070d535 100644
--- a/js/src/jit/CacheIR.h
+++ b/js/src/jit/CacheIR.h
@@ -321,6 +321,12 @@ class CallFlags {
CallFlags() = default;
explicit CallFlags(ArgFormat format) : argFormat_(format) {}
+ CallFlags(ArgFormat format, bool isConstructing, bool isSameRealm,
+ bool needsUninitializedThis)
+ : argFormat_(format),
+ isConstructing_(isConstructing),
+ isSameRealm_(isSameRealm),
+ needsUninitializedThis_(needsUninitializedThis) {}
CallFlags(bool isConstructing, bool isSpread, bool isSameRealm = false,
bool needsUninitializedThis = false)
: argFormat_(isSpread ? Spread : Standard),
diff --git a/js/src/jit/CacheIRCompiler.cpp b/js/src/jit/CacheIRCompiler.cpp
index 1467cebe08..9a26b0816c 100644
--- a/js/src/jit/CacheIRCompiler.cpp
+++ b/js/src/jit/CacheIRCompiler.cpp
@@ -2379,19 +2379,23 @@ bool CacheIRCompiler::emitGuardDynamicSlotValue(ObjOperandId objId,
return true;
}
-bool CacheIRCompiler::emitLoadScriptedProxyHandler(ValOperandId resultId,
+bool CacheIRCompiler::emitLoadScriptedProxyHandler(ObjOperandId resultId,
ObjOperandId objId) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
Register obj = allocator.useRegister(masm, objId);
- ValueOperand output = allocator.defineValueRegister(masm, resultId);
+ Register output = allocator.defineRegister(masm, resultId);
+
+ FailurePath* failure;
+ if (!addFailurePath(&failure)) {
+ return false;
+ }
+
+ masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), output);
+ Address handlerAddr(output, js::detail::ProxyReservedSlots::offsetOfSlot(
+ ScriptedProxyHandler::HANDLER_EXTRA));
+ masm.fallibleUnboxObject(handlerAddr, output, failure->label());
- masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()),
- output.scratchReg());
- masm.loadValue(
- Address(output.scratchReg(), js::detail::ProxyReservedSlots::offsetOfSlot(
- ScriptedProxyHandler::HANDLER_EXTRA)),
- output);
return true;
}
@@ -2937,14 +2941,27 @@ bool CacheIRCompiler::emitLoadEnclosingEnvironment(ObjOperandId objId,
}
bool CacheIRCompiler::emitLoadWrapperTarget(ObjOperandId objId,
- ObjOperandId resultId) {
+ ObjOperandId resultId,
+ bool fallible) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
Register obj = allocator.useRegister(masm, objId);
Register reg = allocator.defineRegister(masm, resultId);
+ FailurePath* failure;
+ if (fallible && !addFailurePath(&failure)) {
+ return false;
+ }
+
masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), reg);
- masm.unboxObject(
- Address(reg, js::detail::ProxyReservedSlots::offsetOfPrivateSlot()), reg);
+
+ Address targetAddr(reg,
+ js::detail::ProxyReservedSlots::offsetOfPrivateSlot());
+ if (fallible) {
+ masm.fallibleUnboxObject(targetAddr, reg, failure->label());
+ } else {
+ masm.unboxObject(targetAddr, reg);
+ }
+
return true;
}
diff --git a/js/src/jit/CacheIROps.yaml b/js/src/jit/CacheIROps.yaml
index 974404d5c0..2f3097dfd8 100644
--- a/js/src/jit/CacheIROps.yaml
+++ b/js/src/jit/CacheIROps.yaml
@@ -708,7 +708,7 @@
transpile: true
cost_estimate: 1
args:
- result: ValId
+ result: ObjId
obj: ObjId
- name: IdToStringOrSymbol
@@ -837,6 +837,7 @@
args:
obj: ObjId
result: ObjId
+ fallible: BoolImm
- name: LoadValueTag
shared: true
diff --git a/js/src/jit/CacheIRReader.h b/js/src/jit/CacheIRReader.h
index 54b298c999..59483424a3 100644
--- a/js/src/jit/CacheIRReader.h
+++ b/js/src/jit/CacheIRReader.h
@@ -129,21 +129,15 @@ class MOZ_RAII CacheIRReader {
bool isSameRealm = encoded & CallFlags::IsSameRealm;
bool needsUninitializedThis = encoded & CallFlags::NeedsUninitializedThis;
MOZ_ASSERT_IF(needsUninitializedThis, isConstructing);
- switch (format) {
- case CallFlags::Unknown:
- MOZ_CRASH("Unexpected call flags");
- case CallFlags::Standard:
- return CallFlags(isConstructing, /*isSpread =*/false, isSameRealm,
- needsUninitializedThis);
- case CallFlags::Spread:
- return CallFlags(isConstructing, /*isSpread =*/true, isSameRealm,
- needsUninitializedThis);
- default:
- // The existing non-standard argument formats (FunCall and FunApply)
- // can't be constructors.
- MOZ_ASSERT(!isConstructing);
- return CallFlags(format);
- }
+
+ // FunCall and FunApply can't be constructors.
+ MOZ_ASSERT_IF(format == CallFlags::FunCall, !isConstructing);
+ MOZ_ASSERT_IF(format == CallFlags::FunApplyArgsObj, !isConstructing);
+ MOZ_ASSERT_IF(format == CallFlags::FunApplyArray, !isConstructing);
+ MOZ_ASSERT_IF(format == CallFlags::FunApplyNullUndefined, !isConstructing);
+
+ return CallFlags(format, isConstructing, isSameRealm,
+ needsUninitializedThis);
}
uint8_t readByte() { return buffer_.readByte(); }
diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp
index 10a69f0cb3..559ac50cc7 100644
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -4205,13 +4205,23 @@ void CodeGenerator::visitGuardShape(LGuardShape* guard) {
}
void CodeGenerator::visitGuardFuse(LGuardFuse* guard) {
+ auto fuseIndex = guard->mir()->fuseIndex();
+ switch (fuseIndex) {
+ case RealmFuses::FuseIndex::OptimizeGetIteratorFuse:
+ addOptimizeGetIteratorFuseDependency();
+ return;
+ default:
+ // validateAndRegisterFuseDependencies doesn't have
+ // handling for this yet, actively check fuse instead.
+ break;
+ }
+
Register temp = ToRegister(guard->temp0());
Label bail;
// Bake specific fuse address for Ion code, because we won't share this code
// across realms.
- GuardFuse* fuse =
- mirGen().realm->realmFuses().getFuseByIndex(guard->mir()->fuseIndex());
+ GuardFuse* fuse = mirGen().realm->realmFuses().getFuseByIndex(fuseIndex);
masm.loadPtr(AbsoluteAddress(fuse->fuseRef()), temp);
masm.branchPtr(Assembler::NotEqual, temp, ImmPtr(nullptr), &bail);
@@ -6269,7 +6279,8 @@ void CodeGenerator::visitCallKnown(LCallKnown* call) {
UnusedStackBytesForCall(call->mir()->paddedNumStackArgs());
WrappedFunction* target = call->getSingleTarget();
- // Native single targets (except wasm) are handled by LCallNative.
+ // Native single targets (except Wasm and TrampolineNative functions) are
+ // handled by LCallNative.
MOZ_ASSERT(target->hasJitEntry());
// Missing arguments must have been explicitly appended by WarpBuilder.
@@ -6341,12 +6352,7 @@ void CodeGenerator::visitCallKnown(LCallKnown* call) {
template <typename T>
void CodeGenerator::emitCallInvokeFunction(T* apply) {
- Register objreg = ToRegister(apply->getTempObject());
-
- // Push the space used by the arguments.
- masm.moveStackPtrTo(objreg);
-
- pushArg(objreg); // argv.
+ pushArg(masm.getStackPointer()); // argv.
pushArg(ToRegister(apply->getArgc())); // argc.
pushArg(Imm32(apply->mir()->ignoresReturnValue())); // ignoresReturnValue.
pushArg(Imm32(apply->mir()->isConstructing())); // isConstructing.
@@ -6370,7 +6376,13 @@ void CodeGenerator::emitAllocateSpaceForApply(Register argcreg,
"Stack padding assumes that the frameSize is correct");
MOZ_ASSERT(JitStackValueAlignment == 2);
Label noPaddingNeeded;
- // if the number of arguments is odd, then we do not need any padding.
+ // If the number of arguments is odd, then we do not need any padding.
+ //
+ // Note: The |JitStackValueAlignment == 2| condition requires that the
+ // overall number of values on the stack is even. When we have an odd number
+ // of arguments, we don't need any padding, because the |thisValue| is
+ // pushed after the arguments, so the overall number of values on the stack
+ // is even.
masm.branchTestPtr(Assembler::NonZero, argcreg, Imm32(1), &noPaddingNeeded);
masm.addPtr(Imm32(1), scratch);
masm.bind(&noPaddingNeeded);
@@ -6382,13 +6394,13 @@ void CodeGenerator::emitAllocateSpaceForApply(Register argcreg,
masm.subFromStackPtr(scratch);
#ifdef DEBUG
- // Put a magic value in the space reserved for padding. Note, this code
- // cannot be merged with the previous test, as not all architectures can
- // write below their stack pointers.
+ // Put a magic value in the space reserved for padding. Note, this code cannot
+ // be merged with the previous test, as not all architectures can write below
+ // their stack pointers.
if (JitStackValueAlignment > 1) {
MOZ_ASSERT(JitStackValueAlignment == 2);
Label noPaddingNeeded;
- // if the number of arguments is odd, then we do not need any padding.
+ // If the number of arguments is odd, then we do not need any padding.
masm.branchTestPtr(Assembler::NonZero, argcreg, Imm32(1), &noPaddingNeeded);
BaseValueIndex dstPtr(masm.getStackPointer(), argcreg);
masm.storeValue(MagicValue(JS_ARG_POISON), dstPtr);
@@ -6403,8 +6415,8 @@ void CodeGenerator::emitAllocateSpaceForConstructAndPushNewTarget(
Register argcreg, Register newTargetAndScratch) {
// Align the JitFrameLayout on the JitStackAlignment. Contrary to
// |emitAllocateSpaceForApply()|, we're always pushing a magic value, because
- // we can't write to |newTargetAndScratch| before |new.target| has
- // been pushed onto the stack.
+ // we can't write to |newTargetAndScratch| before |new.target| has been pushed
+ // onto the stack.
if (JitStackValueAlignment > 1) {
MOZ_ASSERT(frameSize() % JitStackAlignment == 0,
"Stack padding assumes that the frameSize is correct");
@@ -6412,6 +6424,12 @@ void CodeGenerator::emitAllocateSpaceForConstructAndPushNewTarget(
Label noPaddingNeeded;
// If the number of arguments is even, then we do not need any padding.
+ //
+ // Note: The |JitStackValueAlignment == 2| condition requires that the
+ // overall number of values on the stack is even. When we have an even
+ // number of arguments, we don't need any padding, because |new.target| is
+ // is pushed before the arguments and |thisValue| is pushed after all
+ // arguments, so the overall number of values on the stack is even.
masm.branchTestPtr(Assembler::Zero, argcreg, Imm32(1), &noPaddingNeeded);
masm.pushValue(MagicValue(JS_ARG_POISON));
masm.bind(&noPaddingNeeded);
@@ -6437,9 +6455,8 @@ void CodeGenerator::emitCopyValuesForApply(Register argvSrcBase,
Label loop;
masm.bind(&loop);
- // As argvIndex is off by 1, and we use the decBranchPtr instruction
- // to loop back, we have to substract the size of the word which are
- // copied.
+ // As argvIndex is off by 1, and we use the decBranchPtr instruction to loop
+ // back, we have to substract the size of the word which are copied.
BaseValueIndex srcPtr(argvSrcBase, argvIndex,
int32_t(argvSrcOffset) - sizeof(void*));
BaseValueIndex dstPtr(masm.getStackPointer(), argvIndex,
@@ -6488,6 +6505,9 @@ void CodeGenerator::emitPushArguments(Register argcreg, Register scratch,
// clang-format on
// Compute the source and destination offsets into the stack.
+ //
+ // The |extraFormals| parameter is used when copying rest-parameters and
+ // allows to skip the initial parameters before the actual rest-parameters.
Register argvSrcBase = FramePointer;
size_t argvSrcOffset =
JitFrameLayout::offsetOfActualArgs() + extraFormals * sizeof(JS::Value);
@@ -6500,17 +6520,18 @@ void CodeGenerator::emitPushArguments(Register argcreg, Register scratch,
emitCopyValuesForApply(argvSrcBase, argvIndex, copyreg, argvSrcOffset,
argvDstOffset);
- // Join with all arguments copied and the extra stack usage computed.
+ // Join with all arguments copied.
masm.bind(&end);
}
-void CodeGenerator::emitPushArguments(LApplyArgsGeneric* apply,
- Register scratch) {
- // Holds the function nargs. Initially the number of args to the caller.
+void CodeGenerator::emitPushArguments(LApplyArgsGeneric* apply) {
+ // Holds the function nargs.
Register argcreg = ToRegister(apply->getArgc());
Register copyreg = ToRegister(apply->getTempObject());
+ Register scratch = ToRegister(apply->getTempForArgCopy());
uint32_t extraFormals = apply->numExtraFormals();
+ // Allocate space on the stack for arguments.
emitAllocateSpaceForApply(argcreg, scratch);
emitPushArguments(argcreg, scratch, copyreg, extraFormals);
@@ -6519,22 +6540,21 @@ void CodeGenerator::emitPushArguments(LApplyArgsGeneric* apply,
masm.pushValue(ToValue(apply, LApplyArgsGeneric::ThisIndex));
}
-void CodeGenerator::emitPushArguments(LApplyArgsObj* apply, Register scratch) {
- // argc and argsObj are mapped to the same calltemp register.
- MOZ_ASSERT(apply->getArgsObj() == apply->getArgc());
-
- Register tmpArgc = ToRegister(apply->getTempObject());
+void CodeGenerator::emitPushArguments(LApplyArgsObj* apply) {
Register argsObj = ToRegister(apply->getArgsObj());
+ Register tmpArgc = ToRegister(apply->getTempObject());
+ Register scratch = ToRegister(apply->getTempForArgCopy());
+
+ // argc and argsObj are mapped to the same calltemp register.
+ MOZ_ASSERT(argsObj == ToRegister(apply->getArgc()));
// Load argc into tmpArgc.
- Address lengthAddr(argsObj, ArgumentsObject::getInitialLengthSlotOffset());
- masm.unboxInt32(lengthAddr, tmpArgc);
- masm.rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), tmpArgc);
+ masm.loadArgumentsObjectLength(argsObj, tmpArgc);
- // Allocate space on the stack for arguments. This modifies scratch.
+ // Allocate space on the stack for arguments.
emitAllocateSpaceForApply(tmpArgc, scratch);
- // Load arguments data
+ // Load arguments data.
masm.loadPrivate(Address(argsObj, ArgumentsObject::getDataSlotOffset()),
argsObj);
size_t argsSrcOffset = ArgumentsData::offsetOfArgs();
@@ -6543,6 +6563,7 @@ void CodeGenerator::emitPushArguments(LApplyArgsObj* apply, Register scratch) {
// After this call, the argsObj register holds the argument count instead.
emitPushArrayAsArguments(tmpArgc, argsObj, scratch, argsSrcOffset);
+ // Push |this|.
masm.pushValue(ToValue(apply, LApplyArgsObj::ThisIndex));
}
@@ -6566,69 +6587,72 @@ void CodeGenerator::emitPushArrayAsArguments(Register tmpArgc,
// Skip the copy of arguments if there are none.
masm.branchTestPtr(Assembler::Zero, tmpArgc, tmpArgc, &noCopy);
+ {
+ // Copy the values. This code is skipped entirely if there are no values.
+ size_t argvDstOffset = 0;
- // Copy the values. This code is skipped entirely if there are
- // no values.
- size_t argvDstOffset = 0;
-
- Register argvSrcBase = srcBaseAndArgc;
- Register copyreg = scratch;
-
- masm.push(tmpArgc);
- Register argvIndex = tmpArgc;
- argvDstOffset += sizeof(void*);
+ Register argvSrcBase = srcBaseAndArgc;
- // Copy
- emitCopyValuesForApply(argvSrcBase, argvIndex, copyreg, argvSrcOffset,
- argvDstOffset);
+ // Stash away |tmpArgc| and adjust argvDstOffset accordingly.
+ masm.push(tmpArgc);
+ Register argvIndex = tmpArgc;
+ argvDstOffset += sizeof(void*);
- // Restore.
- masm.pop(srcBaseAndArgc); // srcBaseAndArgc now contains argc.
- masm.jump(&epilogue);
+ // Copy
+ emitCopyValuesForApply(argvSrcBase, argvIndex, scratch, argvSrcOffset,
+ argvDstOffset);
- // Clear argc if we skipped the copy step.
+ // Restore.
+ masm.pop(srcBaseAndArgc); // srcBaseAndArgc now contains argc.
+ masm.jump(&epilogue);
+ }
masm.bind(&noCopy);
- masm.movePtr(ImmWord(0), srcBaseAndArgc);
+ {
+ // Clear argc if we skipped the copy step.
+ masm.movePtr(ImmWord(0), srcBaseAndArgc);
+ }
- // Join with all arguments copied and the extra stack usage computed.
+ // Join with all arguments copied.
// Note, "srcBase" has become "argc".
masm.bind(&epilogue);
}
-void CodeGenerator::emitPushArguments(LApplyArrayGeneric* apply,
- Register scratch) {
+void CodeGenerator::emitPushArguments(LApplyArrayGeneric* apply) {
+ Register elements = ToRegister(apply->getElements());
Register tmpArgc = ToRegister(apply->getTempObject());
- Register elementsAndArgc = ToRegister(apply->getElements());
+ Register scratch = ToRegister(apply->getTempForArgCopy());
+
+ // argc and elements are mapped to the same calltemp register.
+ MOZ_ASSERT(elements == ToRegister(apply->getArgc()));
// Invariants guarded in the caller:
// - the array is not too long
// - the array length equals its initialized length
// The array length is our argc for the purposes of allocating space.
- Address length(ToRegister(apply->getElements()),
- ObjectElements::offsetOfLength());
- masm.load32(length, tmpArgc);
+ masm.load32(Address(elements, ObjectElements::offsetOfLength()), tmpArgc);
// Allocate space for the values.
emitAllocateSpaceForApply(tmpArgc, scratch);
// After this call "elements" has become "argc".
size_t elementsOffset = 0;
- emitPushArrayAsArguments(tmpArgc, elementsAndArgc, scratch, elementsOffset);
+ emitPushArrayAsArguments(tmpArgc, elements, scratch, elementsOffset);
// Push |this|.
masm.pushValue(ToValue(apply, LApplyArrayGeneric::ThisIndex));
}
-void CodeGenerator::emitPushArguments(LConstructArgsGeneric* construct,
- Register scratch) {
- MOZ_ASSERT(scratch == ToRegister(construct->getNewTarget()));
-
- // Holds the function nargs. Initially the number of args to the caller.
+void CodeGenerator::emitPushArguments(LConstructArgsGeneric* construct) {
+ // Holds the function nargs.
Register argcreg = ToRegister(construct->getArgc());
Register copyreg = ToRegister(construct->getTempObject());
+ Register scratch = ToRegister(construct->getTempForArgCopy());
uint32_t extraFormals = construct->numExtraFormals();
+ // newTarget and scratch are mapped to the same calltemp register.
+ MOZ_ASSERT(scratch == ToRegister(construct->getNewTarget()));
+
// Allocate space for the values.
// After this call "newTarget" has become "scratch".
emitAllocateSpaceForConstructAndPushNewTarget(argcreg, scratch);
@@ -6639,29 +6663,31 @@ void CodeGenerator::emitPushArguments(LConstructArgsGeneric* construct,
masm.pushValue(ToValue(construct, LConstructArgsGeneric::ThisIndex));
}
-void CodeGenerator::emitPushArguments(LConstructArrayGeneric* construct,
- Register scratch) {
- MOZ_ASSERT(scratch == ToRegister(construct->getNewTarget()));
-
+void CodeGenerator::emitPushArguments(LConstructArrayGeneric* construct) {
+ Register elements = ToRegister(construct->getElements());
Register tmpArgc = ToRegister(construct->getTempObject());
- Register elementsAndArgc = ToRegister(construct->getElements());
+ Register scratch = ToRegister(construct->getTempForArgCopy());
+
+ // argc and elements are mapped to the same calltemp register.
+ MOZ_ASSERT(elements == ToRegister(construct->getArgc()));
+
+ // newTarget and scratch are mapped to the same calltemp register.
+ MOZ_ASSERT(scratch == ToRegister(construct->getNewTarget()));
// Invariants guarded in the caller:
// - the array is not too long
// - the array length equals its initialized length
// The array length is our argc for the purposes of allocating space.
- Address length(ToRegister(construct->getElements()),
- ObjectElements::offsetOfLength());
- masm.load32(length, tmpArgc);
+ masm.load32(Address(elements, ObjectElements::offsetOfLength()), tmpArgc);
// Allocate space for the values.
+ // After this call "newTarget" has become "scratch".
emitAllocateSpaceForConstructAndPushNewTarget(tmpArgc, scratch);
- // After this call "elements" has become "argc" and "newTarget" has become
- // "scratch".
+ // After this call "elements" has become "argc".
size_t elementsOffset = 0;
- emitPushArrayAsArguments(tmpArgc, elementsAndArgc, scratch, elementsOffset);
+ emitPushArrayAsArguments(tmpArgc, elements, scratch, elementsOffset);
// Push |this|.
masm.pushValue(ToValue(construct, LConstructArrayGeneric::ThisIndex));
@@ -6682,43 +6708,24 @@ void CodeGenerator::emitApplyGeneric(T* apply) {
// Copy the arguments of the current function.
//
- // In the case of ApplyArray, ConstructArray, or ApplyArgsObj, also
- // compute argc. The argc register and the elements/argsObj register
- // are the same; argc must not be referenced before the call to
- // emitPushArguments() and elements/argsObj must not be referenced
- // after it returns.
+ // In the case of ApplyArray, ConstructArray, or ApplyArgsObj, also compute
+ // argc. The argc register and the elements/argsObj register are the same;
+ // argc must not be referenced before the call to emitPushArguments() and
+ // elements/argsObj must not be referenced after it returns.
//
- // In the case of ConstructArray or ConstructArgs, also overwrite newTarget
- // with scratch; newTarget must not be referenced after this point.
+ // In the case of ConstructArray or ConstructArgs, also overwrite newTarget;
+ // newTarget must not be referenced after this point.
//
// objreg is dead across this call.
- emitPushArguments(apply, scratch);
+ emitPushArguments(apply);
masm.checkStackAlignment();
bool constructing = apply->mir()->isConstructing();
- // If the function is native, only emit the call to InvokeFunction.
- if (apply->hasSingleTarget() &&
- apply->getSingleTarget()->isNativeWithoutJitEntry()) {
- emitCallInvokeFunction(apply);
-
-#ifdef DEBUG
- // Native constructors are guaranteed to return an Object value, so we never
- // have to replace a primitive result with the previously allocated Object
- // from CreateThis.
- if (constructing) {
- Label notPrimitive;
- masm.branchTestPrimitive(Assembler::NotEqual, JSReturnOperand,
- &notPrimitive);
- masm.assumeUnreachable("native constructors don't return primitives");
- masm.bind(&notPrimitive);
- }
-#endif
-
- emitRestoreStackPointerFromFP();
- return;
- }
+ // If the function is native, the call is compiled through emitApplyNative.
+ MOZ_ASSERT_IF(apply->hasSingleTarget(),
+ !apply->getSingleTarget()->isNativeWithoutJitEntry());
Label end, invoke;
@@ -6812,8 +6819,8 @@ void CodeGenerator::emitApplyGeneric(T* apply) {
masm.bind(&end);
- // If the return value of the constructing function is Primitive,
- // replace the return value with the Object from CreateThis.
+ // If the return value of the constructing function is Primitive, replace the
+ // return value with the Object from CreateThis.
if (constructing) {
Label notPrimitive;
masm.branchTestPrimitive(Assembler::NotEqual, JSReturnOperand,
@@ -6833,17 +6840,200 @@ void CodeGenerator::emitApplyGeneric(T* apply) {
emitRestoreStackPointerFromFP();
}
-void CodeGenerator::visitApplyArgsGeneric(LApplyArgsGeneric* apply) {
+template <typename T>
+void CodeGenerator::emitCallInvokeNativeFunction(T* apply) {
+ pushArg(masm.getStackPointer()); // argv.
+ pushArg(ToRegister(apply->getArgc())); // argc.
+ pushArg(Imm32(apply->mir()->ignoresReturnValue())); // ignoresReturnValue.
+ pushArg(Imm32(apply->mir()->isConstructing())); // isConstructing.
+
+ using Fn =
+ bool (*)(JSContext*, bool, bool, uint32_t, Value*, MutableHandleValue);
+ callVM<Fn, jit::InvokeNativeFunction>(apply);
+}
+
+template <typename T>
+void CodeGenerator::emitPushNativeArguments(T* apply) {
+ Register argc = ToRegister(apply->getArgc());
+ Register tmpArgc = ToRegister(apply->getTempObject());
+ Register scratch = ToRegister(apply->getTempForArgCopy());
+ uint32_t extraFormals = apply->numExtraFormals();
+
+ // Push arguments.
+ Label noCopy;
+ masm.branchTestPtr(Assembler::Zero, argc, argc, &noCopy);
+ {
+ // Use scratch register to calculate stack space (no padding needed).
+ masm.movePtr(argc, scratch);
+
+ // Reserve space for copying the arguments.
+ NativeObject::elementsSizeMustNotOverflow();
+ masm.lshiftPtr(Imm32(ValueShift), scratch);
+ masm.subFromStackPtr(scratch);
+
+ // Compute the source and destination offsets into the stack.
+ Register argvSrcBase = FramePointer;
+ size_t argvSrcOffset =
+ JitFrameLayout::offsetOfActualArgs() + extraFormals * sizeof(JS::Value);
+ size_t argvDstOffset = 0;
+
+ Register argvIndex = tmpArgc;
+ masm.move32(argc, argvIndex);
+
+ // Copy arguments.
+ emitCopyValuesForApply(argvSrcBase, argvIndex, scratch, argvSrcOffset,
+ argvDstOffset);
+ }
+ masm.bind(&noCopy);
+}
+
+template <typename T>
+void CodeGenerator::emitPushArrayAsNativeArguments(T* apply) {
+ Register argc = ToRegister(apply->getArgc());
+ Register elements = ToRegister(apply->getElements());
+ Register tmpArgc = ToRegister(apply->getTempObject());
+ Register scratch = ToRegister(apply->getTempForArgCopy());
+
+ // NB: argc and elements are mapped to the same register.
+ MOZ_ASSERT(argc == elements);
+
+ // Invariants guarded in the caller:
+ // - the array is not too long
+ // - the array length equals its initialized length
+
+ // The array length is our argc.
+ masm.load32(Address(elements, ObjectElements::offsetOfLength()), tmpArgc);
+
+ // Skip the copy of arguments if there are none.
+ Label noCopy;
+ masm.branchTestPtr(Assembler::Zero, tmpArgc, tmpArgc, &noCopy);
+ {
+ // |tmpArgc| is off-by-one, so adjust the offset accordingly.
+ BaseObjectElementIndex srcPtr(elements, tmpArgc,
+ -int32_t(sizeof(JS::Value)));
+
+ Label loop;
+ masm.bind(&loop);
+ masm.pushValue(srcPtr, scratch);
+ masm.decBranchPtr(Assembler::NonZero, tmpArgc, Imm32(1), &loop);
+ }
+ masm.bind(&noCopy);
+
+ // Set argc in preparation for emitCallInvokeNativeFunction.
+ masm.load32(Address(elements, ObjectElements::offsetOfLength()), argc);
+}
+
+void CodeGenerator::emitPushArguments(LApplyArgsNative* apply) {
+ emitPushNativeArguments(apply);
+}
+
+void CodeGenerator::emitPushArguments(LApplyArrayNative* apply) {
+ emitPushArrayAsNativeArguments(apply);
+}
+
+void CodeGenerator::emitPushArguments(LConstructArgsNative* construct) {
+ emitPushNativeArguments(construct);
+}
+
+void CodeGenerator::emitPushArguments(LConstructArrayNative* construct) {
+ emitPushArrayAsNativeArguments(construct);
+}
+
+void CodeGenerator::emitPushArguments(LApplyArgsObjNative* apply) {
+ Register argc = ToRegister(apply->getArgc());
+ Register argsObj = ToRegister(apply->getArgsObj());
+ Register tmpArgc = ToRegister(apply->getTempObject());
+ Register scratch = ToRegister(apply->getTempForArgCopy());
+
+ // NB: argc and argsObj are mapped to the same register.
+ MOZ_ASSERT(argc == argsObj);
+
+ // Load argc into tmpArgc.
+ masm.loadArgumentsObjectLength(argsObj, tmpArgc);
+
+ // Push arguments.
+ Label noCopy, epilogue;
+ masm.branchTestPtr(Assembler::Zero, tmpArgc, tmpArgc, &noCopy);
+ {
+ // Use scratch register to calculate stack space (no padding needed).
+ masm.movePtr(tmpArgc, scratch);
+
+ // Reserve space for copying the arguments.
+ NativeObject::elementsSizeMustNotOverflow();
+ masm.lshiftPtr(Imm32(ValueShift), scratch);
+ masm.subFromStackPtr(scratch);
+
+ // Load arguments data.
+ Register argvSrcBase = argsObj;
+ masm.loadPrivate(Address(argsObj, ArgumentsObject::getDataSlotOffset()),
+ argvSrcBase);
+ size_t argvSrcOffset = ArgumentsData::offsetOfArgs();
+ size_t argvDstOffset = 0;
+
+ // Stash away |tmpArgc| and adjust argvDstOffset accordingly.
+ masm.push(tmpArgc);
+ argvDstOffset += sizeof(void*);
+
+ // Copy the values.
+ emitCopyValuesForApply(argvSrcBase, tmpArgc, scratch, argvSrcOffset,
+ argvDstOffset);
+
+ // Set argc in preparation for emitCallInvokeNativeFunction.
+ masm.pop(argc);
+ masm.jump(&epilogue);
+ }
+ masm.bind(&noCopy);
+ {
+ // Set argc in preparation for emitCallInvokeNativeFunction.
+ masm.movePtr(ImmWord(0), argc);
+ }
+ masm.bind(&epilogue);
+}
+
+template <typename T>
+void CodeGenerator::emitApplyNative(T* apply) {
+ MOZ_ASSERT(apply->mir()->getSingleTarget()->isNativeWithoutJitEntry());
+
+ constexpr bool isConstructing = T::isConstructing();
+ MOZ_ASSERT(isConstructing == apply->mir()->isConstructing(),
+ "isConstructing condition must be consistent");
+
+ // Push newTarget.
+ if constexpr (isConstructing) {
+ masm.pushValue(JSVAL_TYPE_OBJECT, ToRegister(apply->getNewTarget()));
+ }
+
+ // Push arguments.
+ emitPushArguments(apply);
+
+ // Push |this|.
+ if constexpr (isConstructing) {
+ masm.pushValue(MagicValue(JS_IS_CONSTRUCTING));
+ } else {
+ masm.pushValue(ToValue(apply, T::ThisIndex));
+ }
+
+ // Push callee.
+ masm.pushValue(JSVAL_TYPE_OBJECT, ToRegister(apply->getFunction()));
+
+ // Call the native function.
+ emitCallInvokeNativeFunction(apply);
+
+ // Pop arguments and continue.
+ emitRestoreStackPointerFromFP();
+}
+
+template <typename T>
+void CodeGenerator::emitApplyArgsGuard(T* apply) {
LSnapshot* snapshot = apply->snapshot();
Register argcreg = ToRegister(apply->getArgc());
// Ensure that we have a reasonable number of arguments.
bailoutCmp32(Assembler::Above, argcreg, Imm32(JIT_ARGS_LENGTH_MAX), snapshot);
-
- emitApplyGeneric(apply);
}
-void CodeGenerator::visitApplyArgsObj(LApplyArgsObj* apply) {
+template <typename T>
+void CodeGenerator::emitApplyArgsObjGuard(T* apply) {
Register argsObj = ToRegister(apply->getArgsObj());
Register temp = ToRegister(apply->getTempObject());
@@ -6851,16 +7041,15 @@ void CodeGenerator::visitApplyArgsObj(LApplyArgsObj* apply) {
masm.loadArgumentsObjectLength(argsObj, temp, &bail);
masm.branch32(Assembler::Above, temp, Imm32(JIT_ARGS_LENGTH_MAX), &bail);
bailoutFrom(&bail, apply->snapshot());
-
- emitApplyGeneric(apply);
}
-void CodeGenerator::visitApplyArrayGeneric(LApplyArrayGeneric* apply) {
+template <typename T>
+void CodeGenerator::emitApplyArrayGuard(T* apply) {
LSnapshot* snapshot = apply->snapshot();
+ Register elements = ToRegister(apply->getElements());
Register tmp = ToRegister(apply->getTempObject());
- Address length(ToRegister(apply->getElements()),
- ObjectElements::offsetOfLength());
+ Address length(elements, ObjectElements::offsetOfLength());
masm.load32(length, tmp);
// Ensure that we have a reasonable number of arguments.
@@ -6868,43 +7057,60 @@ void CodeGenerator::visitApplyArrayGeneric(LApplyArrayGeneric* apply) {
// Ensure that the array does not contain an uninitialized tail.
- Address initializedLength(ToRegister(apply->getElements()),
+ Address initializedLength(elements,
ObjectElements::offsetOfInitializedLength());
masm.sub32(initializedLength, tmp);
bailoutCmp32(Assembler::NotEqual, tmp, Imm32(0), snapshot);
+}
+void CodeGenerator::visitApplyArgsGeneric(LApplyArgsGeneric* apply) {
+ emitApplyArgsGuard(apply);
emitApplyGeneric(apply);
}
-void CodeGenerator::visitConstructArgsGeneric(LConstructArgsGeneric* lir) {
- LSnapshot* snapshot = lir->snapshot();
- Register argcreg = ToRegister(lir->getArgc());
+void CodeGenerator::visitApplyArgsObj(LApplyArgsObj* apply) {
+ emitApplyArgsObjGuard(apply);
+ emitApplyGeneric(apply);
+}
- // Ensure that we have a reasonable number of arguments.
- bailoutCmp32(Assembler::Above, argcreg, Imm32(JIT_ARGS_LENGTH_MAX), snapshot);
+void CodeGenerator::visitApplyArrayGeneric(LApplyArrayGeneric* apply) {
+ emitApplyArrayGuard(apply);
+ emitApplyGeneric(apply);
+}
+void CodeGenerator::visitConstructArgsGeneric(LConstructArgsGeneric* lir) {
+ emitApplyArgsGuard(lir);
emitApplyGeneric(lir);
}
void CodeGenerator::visitConstructArrayGeneric(LConstructArrayGeneric* lir) {
- LSnapshot* snapshot = lir->snapshot();
- Register tmp = ToRegister(lir->getTempObject());
+ emitApplyArrayGuard(lir);
+ emitApplyGeneric(lir);
+}
- Address length(ToRegister(lir->getElements()),
- ObjectElements::offsetOfLength());
- masm.load32(length, tmp);
+void CodeGenerator::visitApplyArgsNative(LApplyArgsNative* lir) {
+ emitApplyArgsGuard(lir);
+ emitApplyNative(lir);
+}
- // Ensure that we have a reasonable number of arguments.
- bailoutCmp32(Assembler::Above, tmp, Imm32(JIT_ARGS_LENGTH_MAX), snapshot);
+void CodeGenerator::visitApplyArgsObjNative(LApplyArgsObjNative* lir) {
+ emitApplyArgsObjGuard(lir);
+ emitApplyNative(lir);
+}
- // Ensure that the array does not contain an uninitialized tail.
+void CodeGenerator::visitApplyArrayNative(LApplyArrayNative* lir) {
+ emitApplyArrayGuard(lir);
+ emitApplyNative(lir);
+}
- Address initializedLength(ToRegister(lir->getElements()),
- ObjectElements::offsetOfInitializedLength());
- masm.sub32(initializedLength, tmp);
- bailoutCmp32(Assembler::NotEqual, tmp, Imm32(0), snapshot);
+void CodeGenerator::visitConstructArgsNative(LConstructArgsNative* lir) {
+ emitApplyArgsGuard(lir);
+ emitApplyNative(lir);
+}
- emitApplyGeneric(lir);
+void CodeGenerator::visitConstructArrayNative(LConstructArrayNative* lir) {
+ emitApplyArrayGuard(lir);
+ emitApplyNative(lir);
}
void CodeGenerator::visitBail(LBail* lir) { bailout(lir->snapshot()); }
@@ -15460,15 +15666,37 @@ void CodeGenerator::validateAndRegisterFuseDependencies(JSContext* cx,
if (!hasSeenObjectEmulateUndefinedFuse.intact()) {
JitSpew(JitSpew_Codegen,
- "tossing compilation; fuse dependency no longer valid\n");
+ "tossing compilation; hasSeenObjectEmulateUndefinedFuse fuse "
+ "dependency no longer valid\n");
*isValid = false;
return;
}
if (!hasSeenObjectEmulateUndefinedFuse.addFuseDependency(cx, script)) {
- JitSpew(
- JitSpew_Codegen,
- "tossing compilation; failed to register script dependency\n");
+ JitSpew(JitSpew_Codegen,
+ "tossing compilation; failed to register "
+ "hasSeenObjectEmulateUndefinedFuse script dependency\n");
+ *isValid = false;
+ return;
+ }
+ break;
+ }
+
+ case FuseDependencyKind::OptimizeGetIteratorFuse: {
+ auto& optimizeGetIteratorFuse =
+ cx->realm()->realmFuses.optimizeGetIteratorFuse;
+ if (!optimizeGetIteratorFuse.intact()) {
+ JitSpew(JitSpew_Codegen,
+ "tossing compilation; optimizeGetIteratorFuse fuse "
+ "dependency no longer valid\n");
+ *isValid = false;
+ return;
+ }
+
+ if (!optimizeGetIteratorFuse.addFuseDependency(cx, script)) {
+ JitSpew(JitSpew_Codegen,
+ "tossing compilation; failed to register "
+ "optimizeGetIteratorFuse script dependency\n");
*isValid = false;
return;
}
@@ -15837,15 +16065,16 @@ void CodeGenerator::visitMegamorphicSetElement(LMegamorphicSetElement* lir) {
void CodeGenerator::visitLoadScriptedProxyHandler(
LLoadScriptedProxyHandler* ins) {
- const Register obj = ToRegister(ins->getOperand(0));
- ValueOperand output = ToOutValue(ins);
+ Register obj = ToRegister(ins->getOperand(0));
+ Register output = ToRegister(ins->output());
- masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()),
- output.scratchReg());
- masm.loadValue(
- Address(output.scratchReg(), js::detail::ProxyReservedSlots::offsetOfSlot(
- ScriptedProxyHandler::HANDLER_EXTRA)),
- output);
+ masm.loadPtr(Address(obj, ProxyObject::offsetOfReservedSlots()), output);
+
+ Label bail;
+ Address handlerAddr(output, js::detail::ProxyReservedSlots::offsetOfSlot(
+ ScriptedProxyHandler::HANDLER_EXTRA));
+ masm.fallibleUnboxObject(handlerAddr, output, &bail);
+ bailoutFrom(&bail, ins->snapshot());
}
#ifdef JS_PUNBOX64
@@ -19861,9 +20090,17 @@ void CodeGenerator::visitLoadWrapperTarget(LLoadWrapperTarget* lir) {
Register output = ToRegister(lir->output());
masm.loadPtr(Address(object, ProxyObject::offsetOfReservedSlots()), output);
- masm.unboxObject(
- Address(output, js::detail::ProxyReservedSlots::offsetOfPrivateSlot()),
- output);
+
+ // Bail for revoked proxies.
+ Label bail;
+ Address targetAddr(output,
+ js::detail::ProxyReservedSlots::offsetOfPrivateSlot());
+ if (lir->mir()->fallible()) {
+ masm.fallibleUnboxObject(targetAddr, output, &bail);
+ bailoutFrom(&bail, lir->snapshot());
+ } else {
+ masm.unboxObject(targetAddr, output);
+ }
}
void CodeGenerator::visitGuardHasGetterSetter(LGuardHasGetterSetter* lir) {
@@ -20642,9 +20879,20 @@ void CodeGenerator::visitWasmAnyRefFromJSString(LWasmAnyRefFromJSString* lir) {
}
void CodeGenerator::visitWasmNewI31Ref(LWasmNewI31Ref* lir) {
- Register value = ToRegister(lir->value());
- Register output = ToRegister(lir->output());
- masm.truncate32ToWasmI31Ref(value, output);
+ if (lir->value()->isConstant()) {
+ // i31ref are often created with constants. If that's the case we will
+ // do the operation statically here. This is similar to what is done
+ // in masm.truncate32ToWasmI31Ref.
+ Register output = ToRegister(lir->output());
+ uint32_t value =
+ static_cast<uint32_t>(lir->value()->toConstant()->toInt32());
+ uintptr_t ptr = wasm::AnyRef::fromUint32Truncate(value).rawValue();
+ masm.movePtr(ImmWord(ptr), output);
+ } else {
+ Register value = ToRegister(lir->value());
+ Register output = ToRegister(lir->output());
+ masm.truncate32ToWasmI31Ref(value, output);
+ }
}
void CodeGenerator::visitWasmI31RefGet(LWasmI31RefGet* lir) {
diff --git a/js/src/jit/CodeGenerator.h b/js/src/jit/CodeGenerator.h
index 274c876e4d..282771a79e 100644
--- a/js/src/jit/CodeGenerator.h
+++ b/js/src/jit/CodeGenerator.h
@@ -239,11 +239,34 @@ class CodeGenerator final : public CodeGeneratorSpecific {
uint32_t extraFormals);
void emitPushArrayAsArguments(Register tmpArgc, Register srcBaseAndArgc,
Register scratch, size_t argvSrcOffset);
- void emitPushArguments(LApplyArgsGeneric* apply, Register scratch);
- void emitPushArguments(LApplyArgsObj* apply, Register scratch);
- void emitPushArguments(LApplyArrayGeneric* apply, Register scratch);
- void emitPushArguments(LConstructArgsGeneric* construct, Register scratch);
- void emitPushArguments(LConstructArrayGeneric* construct, Register scratch);
+ void emitPushArguments(LApplyArgsGeneric* apply);
+ void emitPushArguments(LApplyArgsObj* apply);
+ void emitPushArguments(LApplyArrayGeneric* apply);
+ void emitPushArguments(LConstructArgsGeneric* construct);
+ void emitPushArguments(LConstructArrayGeneric* construct);
+
+ template <typename T>
+ void emitApplyNative(T* apply);
+ template <typename T>
+ void emitCallInvokeNativeFunction(T* apply);
+ template <typename T>
+ void emitPushNativeArguments(T* apply);
+ template <typename T>
+ void emitPushArrayAsNativeArguments(T* apply);
+ void emitPushArguments(LApplyArgsNative* apply);
+ void emitPushArguments(LApplyArgsObjNative* apply);
+ void emitPushArguments(LApplyArrayNative* apply);
+ void emitPushArguments(LConstructArgsNative* construct);
+ void emitPushArguments(LConstructArrayNative* construct);
+
+ template <typename T>
+ void emitApplyArgsGuard(T* apply);
+
+ template <typename T>
+ void emitApplyArgsObjGuard(T* apply);
+
+ template <typename T>
+ void emitApplyArrayGuard(T* apply);
template <class GetInlinedArgument>
void emitGetInlinedArgument(GetInlinedArgument* lir, Register index,
@@ -439,6 +462,7 @@ class CodeGenerator final : public CodeGeneratorSpecific {
// be mapped to an actual fuse by validateAndRegisterFuseDependencies.
enum class FuseDependencyKind {
HasSeenObjectEmulateUndefinedFuse,
+ OptimizeGetIteratorFuse,
};
// The set of fuses this code generation depends on.
@@ -449,6 +473,10 @@ class CodeGenerator final : public CodeGeneratorSpecific {
fuseDependencies += FuseDependencyKind::HasSeenObjectEmulateUndefinedFuse;
}
+ void addOptimizeGetIteratorFuseDependency() {
+ fuseDependencies += FuseDependencyKind::OptimizeGetIteratorFuse;
+ }
+
// Called during linking on main-thread: Ensures that the fuses are still
// intact, and registers a script dependency on a specific fuse before
// finishing compilation.
diff --git a/js/src/jit/Ion.cpp b/js/src/jit/Ion.cpp
index 85008006e1..e209ace846 100644
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -253,6 +253,10 @@ bool JitRuntime::generateTrampolines(JSContext* cx) {
generateIonGenericCallStub(masm, IonGenericCallKind::Construct);
rangeRecorder.recordOffset("Trampoline: IonGenericConstruct");
+ JitSpew(JitSpew_Codegen, "# Emitting trampoline natives");
+ TrampolineNativeJitEntryOffsets nativeOffsets;
+ generateTrampolineNatives(masm, nativeOffsets, rangeRecorder);
+
Linker linker(masm);
trampolineCode_ = linker.newCode(cx, CodeKind::Other);
if (!trampolineCode_) {
@@ -264,6 +268,14 @@ bool JitRuntime::generateTrampolines(JSContext* cx) {
vtune::MarkStub(trampolineCode_, "Trampolines");
#endif
+ // Initialize TrampolineNative JitEntry array.
+ for (size_t i = 0; i < size_t(TrampolineNative::Count); i++) {
+ TrampolineNative native = TrampolineNative(i);
+ uint32_t offset = nativeOffsets[native];
+ MOZ_ASSERT(offset > 0 && offset < trampolineCode_->instructionsSize());
+ trampolineNativeJitEntries_[native] = trampolineCode_->raw() + offset;
+ }
+
return true;
}
@@ -2346,6 +2358,10 @@ static void InvalidateActivation(JS::GCContext* gcx,
JitSpew(JitSpew_IonInvalidate, "#%zu rectifier frame @ %p", frameno,
frame.fp());
break;
+ case FrameType::TrampolineNative:
+ JitSpew(JitSpew_IonInvalidate, "#%zu TrampolineNative frame @ %p",
+ frameno, frame.fp());
+ break;
case FrameType::IonICCall:
JitSpew(JitSpew_IonInvalidate, "#%zu ion IC call frame @ %p", frameno,
frame.fp());
diff --git a/js/src/jit/JSJitFrameIter.cpp b/js/src/jit/JSJitFrameIter.cpp
index 89d3de3128..fbfef8f210 100644
--- a/js/src/jit/JSJitFrameIter.cpp
+++ b/js/src/jit/JSJitFrameIter.cpp
@@ -78,7 +78,7 @@ CalleeToken JSJitFrameIter::calleeToken() const {
}
JSFunction* JSJitFrameIter::callee() const {
- MOZ_ASSERT(isScripted());
+ MOZ_ASSERT(isScripted() || isTrampolineNative());
MOZ_ASSERT(isFunctionFrame());
return CalleeTokenToFunction(calleeToken());
}
@@ -110,7 +110,7 @@ bool JSJitFrameIter::isFunctionFrame() const {
JSScript* JSJitFrameIter::script() const {
MOZ_ASSERT(isScripted());
- JSScript* script = ScriptFromCalleeToken(calleeToken());
+ JSScript* script = MaybeForwardedScriptFromCalleeToken(calleeToken());
MOZ_ASSERT(script);
return script;
}
@@ -383,6 +383,10 @@ void JSJitFrameIter::dump() const {
fprintf(stderr, " Rectifier frame\n");
fprintf(stderr, " Caller frame ptr: %p\n", current()->callerFramePtr());
break;
+ case FrameType::TrampolineNative:
+ fprintf(stderr, " TrampolineNative frame\n");
+ fprintf(stderr, " Caller frame ptr: %p\n", current()->callerFramePtr());
+ break;
case FrameType::IonICCall:
fprintf(stderr, " Ion IC call\n");
fprintf(stderr, " Caller frame ptr: %p\n", current()->callerFramePtr());
@@ -707,47 +711,47 @@ void JSJitProfilingFrameIterator::moveToNextFrame(CommonFrameLayout* frame) {
* |
* ^--- WasmToJSJit <---- (other wasm frames, not handled by this iterator)
* |
- * ^--- Arguments Rectifier
- * | ^
- * | |
- * | ^--- Ion
- * | |
- * | ^--- Baseline Stub <---- Baseline
- * | |
- * | ^--- WasmToJSJit <--- (other wasm frames)
- * | |
- * | ^--- Entry Frame (CppToJSJit)
+ * ^--- Entry Frame (BaselineInterpreter) (unwrapped)
* |
- * ^--- Entry Frame (CppToJSJit)
+ * ^--- Arguments Rectifier (unwrapped)
+ * |
+ * ^--- Trampoline Native (unwrapped)
* |
- * ^--- Entry Frame (BaselineInterpreter)
- * | ^
- * | |
- * | ^--- Ion
- * | |
- * | ^--- Baseline Stub <---- Baseline
- * | |
- * | ^--- WasmToJSJit <--- (other wasm frames)
- * | |
- * | ^--- Entry Frame (CppToJSJit)
- * | |
- * | ^--- Arguments Rectifier
+ * ^--- Entry Frame (CppToJSJit)
*
* NOTE: Keep this in sync with JitRuntime::generateProfilerExitFrameTailStub!
*/
- // Unwrap baseline interpreter entry frame.
- if (frame->prevType() == FrameType::BaselineInterpreterEntry) {
- frame = GetPreviousRawFrame<BaselineInterpreterEntryFrameLayout*>(frame);
- }
+ while (true) {
+ // Unwrap baseline interpreter entry frame.
+ if (frame->prevType() == FrameType::BaselineInterpreterEntry) {
+ frame = GetPreviousRawFrame<BaselineInterpreterEntryFrameLayout*>(frame);
+ continue;
+ }
+
+ // Unwrap rectifier frames.
+ if (frame->prevType() == FrameType::Rectifier) {
+ frame = GetPreviousRawFrame<RectifierFrameLayout*>(frame);
+ MOZ_ASSERT(frame->prevType() == FrameType::IonJS ||
+ frame->prevType() == FrameType::BaselineStub ||
+ frame->prevType() == FrameType::TrampolineNative ||
+ frame->prevType() == FrameType::WasmToJSJit ||
+ frame->prevType() == FrameType::CppToJSJit);
+ continue;
+ }
- // Unwrap rectifier frames.
- if (frame->prevType() == FrameType::Rectifier) {
- frame = GetPreviousRawFrame<RectifierFrameLayout*>(frame);
- MOZ_ASSERT(frame->prevType() == FrameType::IonJS ||
- frame->prevType() == FrameType::BaselineStub ||
- frame->prevType() == FrameType::WasmToJSJit ||
- frame->prevType() == FrameType::CppToJSJit);
+ // Unwrap TrampolineNative frames.
+ if (frame->prevType() == FrameType::TrampolineNative) {
+ frame = GetPreviousRawFrame<TrampolineNativeFrameLayout*>(frame);
+ MOZ_ASSERT(frame->prevType() == FrameType::IonJS ||
+ frame->prevType() == FrameType::BaselineStub ||
+ frame->prevType() == FrameType::Rectifier ||
+ frame->prevType() == FrameType::WasmToJSJit ||
+ frame->prevType() == FrameType::CppToJSJit);
+ continue;
+ }
+
+ break;
}
FrameType prevType = frame->prevType();
@@ -773,24 +777,31 @@ void JSJitProfilingFrameIterator::moveToNextFrame(CommonFrameLayout* frame) {
}
case FrameType::WasmToJSJit:
- // No previous js jit frame, this is a transition frame, used to
- // pass a wasm iterator the correct value of FP.
+ // No previous JS JIT frame. Set fp_ to nullptr to indicate the
+ // JSJitProfilingFrameIterator is done(). Also set wasmCallerFP_ so that
+ // the caller can pass it to a Wasm frame iterator.
resumePCinCurrentFrame_ = nullptr;
- fp_ = GetPreviousRawFrame<uint8_t*>(frame);
+ fp_ = nullptr;
type_ = FrameType::WasmToJSJit;
- MOZ_ASSERT(!done());
+ MOZ_ASSERT(!wasmCallerFP_);
+ wasmCallerFP_ = GetPreviousRawFrame<uint8_t*>(frame);
+ MOZ_ASSERT(wasmCallerFP_);
+ MOZ_ASSERT(done());
return;
case FrameType::CppToJSJit:
- // No previous frame, set to nullptr to indicate that
+ // No previous JS JIT frame. Set fp_ to nullptr to indicate the
// JSJitProfilingFrameIterator is done().
resumePCinCurrentFrame_ = nullptr;
fp_ = nullptr;
type_ = FrameType::CppToJSJit;
+ MOZ_ASSERT(!wasmCallerFP_);
+ MOZ_ASSERT(done());
return;
case FrameType::BaselineInterpreterEntry:
case FrameType::Rectifier:
+ case FrameType::TrampolineNative:
case FrameType::Exit:
case FrameType::Bailout:
case FrameType::JSJitToWasm:
diff --git a/js/src/jit/JSJitFrameIter.h b/js/src/jit/JSJitFrameIter.h
index d40a533a20..03fd06852e 100644
--- a/js/src/jit/JSJitFrameIter.h
+++ b/js/src/jit/JSJitFrameIter.h
@@ -73,6 +73,10 @@ enum class FrameType {
// wasm, and is a special kind of exit frame that doesn't have the exit
// footer. From the point of view of the jit, it can be skipped as an exit.
JSJitToWasm,
+
+ // Frame for a TrampolineNative, a JS builtin implemented with a JIT
+ // trampoline. See jit/TrampolineNatives.h.
+ TrampolineNative,
};
enum class ReadFrameArgsBehavior {
@@ -173,6 +177,9 @@ class JSJitFrameIter {
return type_ == FrameType::BaselineInterpreterEntry;
}
bool isRectifier() const { return type_ == FrameType::Rectifier; }
+ bool isTrampolineNative() const {
+ return type_ == FrameType::TrampolineNative;
+ }
bool isBareExit() const;
bool isUnwoundJitExit() const;
template <typename T>
@@ -263,6 +270,7 @@ class JitcodeGlobalTable;
class JSJitProfilingFrameIterator {
uint8_t* fp_;
+ uint8_t* wasmCallerFP_ = nullptr;
// See JS::ProfilingFrameIterator::endStackAddress_ comment.
void* endStackAddress_ = nullptr;
FrameType type_;
@@ -290,6 +298,11 @@ class JSJitProfilingFrameIterator {
MOZ_ASSERT(!done());
return fp_;
}
+ void* wasmCallerFP() const {
+ MOZ_ASSERT(done());
+ MOZ_ASSERT(bool(wasmCallerFP_) == (type_ == FrameType::WasmToJSJit));
+ return wasmCallerFP_;
+ }
inline JitFrameLayout* framePtr() const;
void* stackAddress() const { return fp(); }
FrameType frameType() const {
@@ -491,6 +504,42 @@ class SnapshotIterator {
Value read() { return allocationValue(readAllocation()); }
+ int32_t readInt32() {
+ Value val = read();
+ MOZ_RELEASE_ASSERT(val.isInt32());
+ return val.toInt32();
+ }
+
+ double readNumber() {
+ Value val = read();
+ MOZ_RELEASE_ASSERT(val.isNumber());
+ return val.toNumber();
+ }
+
+ JSString* readString() {
+ Value val = read();
+ MOZ_RELEASE_ASSERT(val.isString());
+ return val.toString();
+ }
+
+ JS::BigInt* readBigInt() {
+ Value val = read();
+ MOZ_RELEASE_ASSERT(val.isBigInt());
+ return val.toBigInt();
+ }
+
+ JSObject* readObject() {
+ Value val = read();
+ MOZ_RELEASE_ASSERT(val.isObject());
+ return &val.toObject();
+ }
+
+ JS::GCCellPtr readGCCellPtr() {
+ Value val = read();
+ MOZ_RELEASE_ASSERT(val.isGCThing());
+ return val.toGCCellPtr();
+ }
+
// Read the |Normal| value unless it is not available and that the snapshot
// provides a |Default| value. This is useful to avoid invalidations of the
// frame while we are only interested in a few properties which are provided
diff --git a/js/src/jit/JitFrames.cpp b/js/src/jit/JitFrames.cpp
index 176b988e05..45ac1f5def 100644
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -83,6 +83,27 @@ static uint32_t NumArgAndLocalSlots(const InlineFrameIterator& frame) {
return CountArgSlots(script, frame.maybeCalleeTemplate()) + script->nfixed();
}
+static TrampolineNative TrampolineNativeForFrame(
+ JSRuntime* rt, TrampolineNativeFrameLayout* layout) {
+ JSFunction* nativeFun = CalleeTokenToFunction(layout->calleeToken());
+ MOZ_ASSERT(nativeFun->isBuiltinNative());
+ void** jitEntry = nativeFun->nativeJitEntry();
+ return rt->jitRuntime()->trampolineNativeForJitEntry(jitEntry);
+}
+
+static void UnwindTrampolineNativeFrame(JSRuntime* rt,
+ const JSJitFrameIter& frame) {
+ auto* layout = (TrampolineNativeFrameLayout*)frame.fp();
+ TrampolineNative native = TrampolineNativeForFrame(rt, layout);
+ switch (native) {
+ case TrampolineNative::ArraySort:
+ layout->getFrameData<ArraySortData>()->freeMallocData();
+ break;
+ case TrampolineNative::Count:
+ MOZ_CRASH("Invalid value");
+ }
+}
+
static void CloseLiveIteratorIon(JSContext* cx,
const InlineFrameIterator& frame,
const TryNote* tn) {
@@ -739,7 +760,7 @@ void HandleException(ResumeFromException* rfe) {
// JIT code can enter same-compartment realms, so reset cx->realm to
// this frame's realm.
- if (frame.isScripted()) {
+ if (frame.isScripted() || frame.isTrampolineNative()) {
cx->setRealmForJitExceptionHandler(iter.realm());
}
@@ -809,6 +830,8 @@ void HandleException(ResumeFromException* rfe) {
if (rfe->kind == ExceptionResumeKind::ForcedReturnBaseline) {
return;
}
+ } else if (frame.isTrampolineNative()) {
+ UnwindTrampolineNativeFrame(cx->runtime(), frame);
}
prevJitFrame = frame.current();
@@ -910,12 +933,15 @@ static inline uintptr_t ReadAllocation(const JSJitFrameIter& frame,
static void TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame,
JitFrameLayout* layout) {
- // Trace |this| and any extra actual arguments for an Ion frame. Tracing
- // of formal arguments is taken care of by the frame's safepoint/snapshot,
- // except when the script might have lazy arguments or rest, in which case
- // we trace them as well. We also have to trace formals if we have a
- // LazyLink frame or an InterpreterStub frame or a special JSJit to wasm
- // frame (since wasm doesn't use snapshots).
+ // Trace |this| and the actual and formal arguments of a JIT frame.
+ //
+ // Tracing of formal arguments of an Ion frame is taken care of by the frame's
+ // safepoint/snapshot. We skip tracing formal arguments if the script doesn't
+ // use |arguments| or rest, because the register allocator can spill values to
+ // argument slots in this case.
+ //
+ // For other frames such as LazyLink frames or InterpreterStub frames, we
+ // always trace all actual and formal arguments.
if (!CalleeTokenIsFunction(layout->calleeToken())) {
return;
@@ -927,8 +953,7 @@ static void TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame,
size_t numArgs = std::max(layout->numActualArgs(), numFormals);
size_t firstArg = 0;
- if (frame.type() != FrameType::JSJitToWasm &&
- !frame.isExitFrameLayout<CalledFromJitExitFrameLayout>() &&
+ if (frame.isIonScripted() &&
!fun->nonLazyScript()->mayReadFrameArgsDirectly()) {
firstArg = numFormals;
}
@@ -936,17 +961,17 @@ static void TraceThisAndArguments(JSTracer* trc, const JSJitFrameIter& frame,
Value* argv = layout->thisAndActualArgs();
// Trace |this|.
- TraceRoot(trc, argv, "ion-thisv");
+ TraceRoot(trc, argv, "jit-thisv");
// Trace arguments. Note + 1 for thisv.
for (size_t i = firstArg; i < numArgs; i++) {
- TraceRoot(trc, &argv[i + 1], "ion-argv");
+ TraceRoot(trc, &argv[i + 1], "jit-argv");
}
// Always trace the new.target from the frame. It's not in the snapshots.
// +1 to pass |this|
if (CalleeTokenIsConstructing(layout->calleeToken())) {
- TraceRoot(trc, &argv[1 + numArgs], "ion-newTarget");
+ TraceRoot(trc, &argv[1 + numArgs], "jit-newTarget");
}
}
@@ -1397,6 +1422,22 @@ static void TraceJSJitToWasmFrame(JSTracer* trc, const JSJitFrameIter& frame) {
TraceThisAndArguments(trc, frame, layout);
}
+static void TraceTrampolineNativeFrame(JSTracer* trc,
+ const JSJitFrameIter& frame) {
+ auto* layout = (TrampolineNativeFrameLayout*)frame.fp();
+ layout->replaceCalleeToken(TraceCalleeToken(trc, layout->calleeToken()));
+ TraceThisAndArguments(trc, frame, layout);
+
+ TrampolineNative native = TrampolineNativeForFrame(trc->runtime(), layout);
+ switch (native) {
+ case TrampolineNative::ArraySort:
+ layout->getFrameData<ArraySortData>()->trace(trc);
+ break;
+ case TrampolineNative::Count:
+ MOZ_CRASH("Invalid value");
+ }
+}
+
static void TraceJitActivation(JSTracer* trc, JitActivation* activation) {
#ifdef CHECK_OSIPOINT_REGISTERS
if (JitOptions.checkOsiPointRegisters) {
@@ -1439,6 +1480,9 @@ static void TraceJitActivation(JSTracer* trc, JitActivation* activation) {
case FrameType::Rectifier:
TraceRectifierFrame(trc, jitFrame);
break;
+ case FrameType::TrampolineNative:
+ TraceTrampolineNativeFrame(trc, jitFrame);
+ break;
case FrameType::IonICCall:
TraceIonICCallFrame(trc, jitFrame);
break;
diff --git a/js/src/jit/JitFrames.h b/js/src/jit/JitFrames.h
index ab882e7986..47c176492b 100644
--- a/js/src/jit/JitFrames.h
+++ b/js/src/jit/JitFrames.h
@@ -299,6 +299,17 @@ class RectifierFrameLayout : public JitFrameLayout {
static inline size_t Size() { return sizeof(RectifierFrameLayout); }
};
+class TrampolineNativeFrameLayout : public JitFrameLayout {
+ public:
+ static inline size_t Size() { return sizeof(TrampolineNativeFrameLayout); }
+
+ template <typename T>
+ T* getFrameData() {
+ uint8_t* raw = reinterpret_cast<uint8_t*>(this) - sizeof(T);
+ return reinterpret_cast<T*>(raw);
+ }
+};
+
class WasmToJSJitFrameLayout : public JitFrameLayout {
public:
static inline size_t Size() { return sizeof(WasmToJSJitFrameLayout); }
diff --git a/js/src/jit/JitOptions.cpp b/js/src/jit/JitOptions.cpp
index e9d389cf60..053cf868a7 100644
--- a/js/src/jit/JitOptions.cpp
+++ b/js/src/jit/JitOptions.cpp
@@ -447,7 +447,8 @@ void DefaultJitOptions::resetNormalIonWarmUpThreshold() {
void DefaultJitOptions::maybeSetWriteProtectCode(bool val) {
#ifdef JS_USE_APPLE_FAST_WX
- // On Apple Silicon we always use pthread_jit_write_protect_np.
+ // On Apple Silicon we always use pthread_jit_write_protect_np, or
+ // be_memory_inline_jit_restrict_*.
MOZ_ASSERT(!writeProtectCode);
#else
writeProtectCode = val;
diff --git a/js/src/jit/JitRuntime.h b/js/src/jit/JitRuntime.h
index 7d038ed0e2..383efca437 100644
--- a/js/src/jit/JitRuntime.h
+++ b/js/src/jit/JitRuntime.h
@@ -27,6 +27,7 @@
#include "jit/JitCode.h"
#include "jit/JitHints.h"
#include "jit/shared/Assembler-shared.h"
+#include "jit/TrampolineNatives.h"
#include "js/AllocPolicy.h"
#include "js/ProfilingFrameIterator.h"
#include "js/TypeDecls.h"
@@ -234,6 +235,13 @@ class JitRuntime {
MainThreadData<IonCompileTaskList> ionLazyLinkList_;
MainThreadData<size_t> ionLazyLinkListSize_{0};
+ // Pointer to trampoline code for each TrampolineNative. The JSFunction has
+ // a JitEntry pointer that points to an item in this array.
+ using TrampolineNativeJitEntryArray =
+ mozilla::EnumeratedArray<TrampolineNative, void*,
+ size_t(TrampolineNative::Count)>;
+ TrampolineNativeJitEntryArray trampolineNativeJitEntries_{};
+
#ifdef DEBUG
// Flag that can be set from JIT code to indicate it's invalid to call
// arbitrary JS code in a particular region. This is checked in RunScript.
@@ -293,6 +301,14 @@ class JitRuntime {
void generateBaselineInterpreterEntryTrampoline(MacroAssembler& masm);
void generateInterpreterEntryTrampoline(MacroAssembler& masm);
+ using TrampolineNativeJitEntryOffsets =
+ mozilla::EnumeratedArray<TrampolineNative, uint32_t,
+ size_t(TrampolineNative::Count)>;
+ void generateTrampolineNatives(MacroAssembler& masm,
+ TrampolineNativeJitEntryOffsets& offsets,
+ PerfSpewerRangeRecorder& rangeRecorder);
+ uint32_t generateArraySortTrampoline(MacroAssembler& masm);
+
void bindLabelToOffset(Label* label, uint32_t offset) {
MOZ_ASSERT(!trampolineCode_);
label->bind(offset);
@@ -418,6 +434,20 @@ class JitRuntime {
return trampolineCode(ionGenericCallStubOffset_[kind]);
}
+ void** trampolineNativeJitEntry(TrampolineNative native) {
+ void** jitEntry = &trampolineNativeJitEntries_[native];
+ MOZ_ASSERT(*jitEntry >= trampolineCode_->raw());
+ MOZ_ASSERT(*jitEntry <
+ trampolineCode_->raw() + trampolineCode_->instructionsSize());
+ return jitEntry;
+ }
+ TrampolineNative trampolineNativeForJitEntry(void** entry) {
+ MOZ_RELEASE_ASSERT(entry >= trampolineNativeJitEntries_.begin());
+ size_t index = entry - trampolineNativeJitEntries_.begin();
+ MOZ_RELEASE_ASSERT(index < size_t(TrampolineNative::Count));
+ return TrampolineNative(index);
+ }
+
bool hasJitcodeGlobalTable() const { return jitcodeGlobalTable_ != nullptr; }
JitcodeGlobalTable* getJitcodeGlobalTable() {
diff --git a/js/src/jit/LIROps.yaml b/js/src/jit/LIROps.yaml
index f13c4b0745..880e756f74 100644
--- a/js/src/jit/LIROps.yaml
+++ b/js/src/jit/LIROps.yaml
@@ -632,6 +632,21 @@
- name: ConstructArrayGeneric
gen_boilerplate: false
+- name: ApplyArgsNative
+ gen_boilerplate: false
+
+- name: ApplyArgsObjNative
+ gen_boilerplate: false
+
+- name: ApplyArrayNative
+ gen_boilerplate: false
+
+- name: ConstructArgsNative
+ gen_boilerplate: false
+
+- name: ConstructArrayNative
+ gen_boilerplate: false
+
- name: TestIAndBranch
gen_boilerplate: false
@@ -2189,7 +2204,7 @@
mir_op: ClampToUint8
- name: LoadScriptedProxyHandler
- result_type: BoxedValue
+ result_type: WordSized
operands:
object: WordSized
mir_op: true
@@ -3694,6 +3709,7 @@
result_type: WordSized
operands:
object: WordSized
+ mir_op: true
- name: GuardHasGetterSetter
operands:
diff --git a/js/src/jit/Lowering.cpp b/js/src/jit/Lowering.cpp
index b0007a114d..f7b898f240 100644
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -654,12 +654,23 @@ void LIRGenerator::visitApplyArgs(MApplyArgs* apply) {
static_assert(CallTempReg2 != JSReturnReg_Type);
static_assert(CallTempReg2 != JSReturnReg_Data);
- LApplyArgsGeneric* lir = new (alloc()) LApplyArgsGeneric(
- useFixedAtStart(apply->getFunction(), CallTempReg3),
- useFixedAtStart(apply->getArgc(), CallTempReg0),
- useBoxFixedAtStart(apply->getThis(), CallTempReg4, CallTempReg5),
- tempFixed(CallTempReg1), // object register
- tempFixed(CallTempReg2)); // stack counter register
+ auto function = useFixedAtStart(apply->getFunction(), CallTempReg3);
+ auto argc = useFixedAtStart(apply->getArgc(), CallTempReg0);
+ auto thisValue =
+ useBoxFixedAtStart(apply->getThis(), CallTempReg4, CallTempReg5);
+ auto tempObj = tempFixed(CallTempReg1); // object register
+ auto tempCopy = tempFixed(CallTempReg2); // copy register
+
+ auto* target = apply->getSingleTarget();
+
+ LInstruction* lir;
+ if (target && target->isNativeWithoutJitEntry()) {
+ lir = new (alloc())
+ LApplyArgsNative(function, argc, thisValue, tempObj, tempCopy);
+ } else {
+ lir = new (alloc())
+ LApplyArgsGeneric(function, argc, thisValue, tempObj, tempCopy);
+ }
// Bailout is needed in the case of too many values in the arguments array.
assignSnapshot(lir, apply->bailoutKind());
@@ -675,12 +686,23 @@ void LIRGenerator::visitApplyArgsObj(MApplyArgsObj* apply) {
static_assert(CallTempReg2 != JSReturnReg_Type);
static_assert(CallTempReg2 != JSReturnReg_Data);
- LApplyArgsObj* lir = new (alloc()) LApplyArgsObj(
- useFixedAtStart(apply->getFunction(), CallTempReg3),
- useFixedAtStart(apply->getArgsObj(), CallTempReg0),
- useBoxFixedAtStart(apply->getThis(), CallTempReg4, CallTempReg5),
- tempFixed(CallTempReg1), // object register
- tempFixed(CallTempReg2)); // stack counter register
+ auto function = useFixedAtStart(apply->getFunction(), CallTempReg3);
+ auto argsObj = useFixedAtStart(apply->getArgsObj(), CallTempReg0);
+ auto thisValue =
+ useBoxFixedAtStart(apply->getThis(), CallTempReg4, CallTempReg5);
+ auto tempObj = tempFixed(CallTempReg1); // object register
+ auto tempCopy = tempFixed(CallTempReg2); // copy register
+
+ auto* target = apply->getSingleTarget();
+
+ LInstruction* lir;
+ if (target && target->isNativeWithoutJitEntry()) {
+ lir = new (alloc())
+ LApplyArgsObjNative(function, argsObj, thisValue, tempObj, tempCopy);
+ } else {
+ lir = new (alloc())
+ LApplyArgsObj(function, argsObj, thisValue, tempObj, tempCopy);
+ }
// Bailout is needed in the case of too many values in the arguments array.
assignSnapshot(lir, apply->bailoutKind());
@@ -696,12 +718,23 @@ void LIRGenerator::visitApplyArray(MApplyArray* apply) {
static_assert(CallTempReg2 != JSReturnReg_Type);
static_assert(CallTempReg2 != JSReturnReg_Data);
- LApplyArrayGeneric* lir = new (alloc()) LApplyArrayGeneric(
- useFixedAtStart(apply->getFunction(), CallTempReg3),
- useFixedAtStart(apply->getElements(), CallTempReg0),
- useBoxFixedAtStart(apply->getThis(), CallTempReg4, CallTempReg5),
- tempFixed(CallTempReg1), // object register
- tempFixed(CallTempReg2)); // stack counter register
+ auto function = useFixedAtStart(apply->getFunction(), CallTempReg3);
+ auto elements = useFixedAtStart(apply->getElements(), CallTempReg0);
+ auto thisValue =
+ useBoxFixedAtStart(apply->getThis(), CallTempReg4, CallTempReg5);
+ auto tempObj = tempFixed(CallTempReg1); // object register
+ auto tempCopy = tempFixed(CallTempReg2); // copy register
+
+ auto* target = apply->getSingleTarget();
+
+ LInstruction* lir;
+ if (target && target->isNativeWithoutJitEntry()) {
+ lir = new (alloc())
+ LApplyArrayNative(function, elements, thisValue, tempObj, tempCopy);
+ } else {
+ lir = new (alloc())
+ LApplyArrayGeneric(function, elements, thisValue, tempObj, tempCopy);
+ }
// Bailout is needed in the case of too many values in the array, or empty
// space at the end of the array.
@@ -721,12 +754,26 @@ void LIRGenerator::visitConstructArgs(MConstructArgs* mir) {
static_assert(CallTempReg2 != JSReturnReg_Type);
static_assert(CallTempReg2 != JSReturnReg_Data);
- auto* lir = new (alloc()) LConstructArgsGeneric(
- useFixedAtStart(mir->getFunction(), CallTempReg3),
- useFixedAtStart(mir->getArgc(), CallTempReg0),
- useFixedAtStart(mir->getNewTarget(), CallTempReg1),
- useBoxFixedAtStart(mir->getThis(), CallTempReg4, CallTempReg5),
- tempFixed(CallTempReg2));
+ auto function = useFixedAtStart(mir->getFunction(), CallTempReg3);
+ auto argc = useFixedAtStart(mir->getArgc(), CallTempReg0);
+ auto newTarget = useFixedAtStart(mir->getNewTarget(), CallTempReg1);
+ auto temp = tempFixed(CallTempReg2);
+
+ auto* target = mir->getSingleTarget();
+
+ LInstruction* lir;
+ if (target && target->isNativeWithoutJitEntry()) {
+ auto temp2 = tempFixed(CallTempReg4);
+
+ lir = new (alloc())
+ LConstructArgsNative(function, argc, newTarget, temp, temp2);
+ } else {
+ auto thisValue =
+ useBoxFixedAtStart(mir->getThis(), CallTempReg4, CallTempReg5);
+
+ lir = new (alloc())
+ LConstructArgsGeneric(function, argc, newTarget, thisValue, temp);
+ }
// Bailout is needed in the case of too many values in the arguments array.
assignSnapshot(lir, mir->bailoutKind());
@@ -745,12 +792,26 @@ void LIRGenerator::visitConstructArray(MConstructArray* mir) {
static_assert(CallTempReg2 != JSReturnReg_Type);
static_assert(CallTempReg2 != JSReturnReg_Data);
- auto* lir = new (alloc()) LConstructArrayGeneric(
- useFixedAtStart(mir->getFunction(), CallTempReg3),
- useFixedAtStart(mir->getElements(), CallTempReg0),
- useFixedAtStart(mir->getNewTarget(), CallTempReg1),
- useBoxFixedAtStart(mir->getThis(), CallTempReg4, CallTempReg5),
- tempFixed(CallTempReg2));
+ auto function = useFixedAtStart(mir->getFunction(), CallTempReg3);
+ auto elements = useFixedAtStart(mir->getElements(), CallTempReg0);
+ auto newTarget = useFixedAtStart(mir->getNewTarget(), CallTempReg1);
+ auto temp = tempFixed(CallTempReg2);
+
+ auto* target = mir->getSingleTarget();
+
+ LInstruction* lir;
+ if (target && target->isNativeWithoutJitEntry()) {
+ auto temp2 = tempFixed(CallTempReg4);
+
+ lir = new (alloc())
+ LConstructArrayNative(function, elements, newTarget, temp, temp2);
+ } else {
+ auto thisValue =
+ useBoxFixedAtStart(mir->getThis(), CallTempReg4, CallTempReg5);
+
+ lir = new (alloc())
+ LConstructArrayGeneric(function, elements, newTarget, thisValue, temp);
+ }
// Bailout is needed in the case of too many values in the array, or empty
// space at the end of the array.
@@ -3241,7 +3302,9 @@ void LIRGenerator::visitWasmAnyRefFromJSString(MWasmAnyRefFromJSString* ins) {
}
void LIRGenerator::visitWasmNewI31Ref(MWasmNewI31Ref* ins) {
- LWasmNewI31Ref* lir = new (alloc()) LWasmNewI31Ref(useRegister(ins->input()));
+ // If it's a constant, it will be put directly into the register.
+ LWasmNewI31Ref* lir =
+ new (alloc()) LWasmNewI31Ref(useRegisterOrConstant(ins->input()));
define(lir, ins);
}
@@ -4686,7 +4749,8 @@ void LIRGenerator::visitLoadScriptedProxyHandler(
MLoadScriptedProxyHandler* ins) {
LLoadScriptedProxyHandler* lir = new (alloc())
LLoadScriptedProxyHandler(useRegisterAtStart(ins->object()));
- defineBox(lir, ins);
+ assignSnapshot(lir, ins->bailoutKind());
+ define(lir, ins);
}
void LIRGenerator::visitIdToStringOrSymbol(MIdToStringOrSymbol* ins) {
@@ -6750,7 +6814,11 @@ void LIRGenerator::visitLoadWrapperTarget(MLoadWrapperTarget* ins) {
MDefinition* object = ins->object();
MOZ_ASSERT(object->type() == MIRType::Object);
- define(new (alloc()) LLoadWrapperTarget(useRegisterAtStart(object)), ins);
+ auto* lir = new (alloc()) LLoadWrapperTarget(useRegisterAtStart(object));
+ if (ins->fallible()) {
+ assignSnapshot(lir, ins->bailoutKind());
+ }
+ define(lir, ins);
}
void LIRGenerator::visitGuardHasGetterSetter(MGuardHasGetterSetter* ins) {
diff --git a/js/src/jit/MIR.cpp b/js/src/jit/MIR.cpp
index c6daecb166..a74406567b 100644
--- a/js/src/jit/MIR.cpp
+++ b/js/src/jit/MIR.cpp
@@ -689,7 +689,62 @@ MDefinition* MTest::foldsNeedlessControlFlow(TempAllocator& alloc) {
return MGoto::New(alloc, ifTrue());
}
+// If a test is dominated by either the true or false path of a previous test of
+// the same condition, then the test is redundant and can be converted into a
+// goto true or goto false, respectively.
+MDefinition* MTest::foldsRedundantTest(TempAllocator& alloc) {
+ MBasicBlock* myBlock = this->block();
+ MDefinition* originalInput = getOperand(0);
+
+ // Handle single and double negatives. This ensures that we do not miss a
+ // folding opportunity due to a condition being inverted.
+ MDefinition* newInput = input();
+ bool inverted = false;
+ if (originalInput->isNot()) {
+ newInput = originalInput->toNot()->input();
+ inverted = true;
+ if (originalInput->toNot()->input()->isNot()) {
+ newInput = originalInput->toNot()->input()->toNot()->input();
+ inverted = false;
+ }
+ }
+
+ // The specific order of traversal does not matter. If there are multiple
+ // dominating redundant tests, they will either agree on direction (in which
+ // case we will prune the same way regardless of order), or they will
+ // disagree, in which case we will eventually be marked entirely dead by the
+ // folding of the redundant parent.
+ for (MUseIterator i(newInput->usesBegin()), e(newInput->usesEnd()); i != e;
+ ++i) {
+ if (!i->consumer()->isDefinition()) {
+ continue;
+ }
+ if (!i->consumer()->toDefinition()->isTest()) {
+ continue;
+ }
+ MTest* otherTest = i->consumer()->toDefinition()->toTest();
+ if (otherTest == this) {
+ continue;
+ }
+
+ if (otherTest->ifFalse()->dominates(myBlock)) {
+ // This test cannot be true, so fold to a goto false.
+ return MGoto::New(alloc, inverted ? ifTrue() : ifFalse());
+ }
+ if (otherTest->ifTrue()->dominates(myBlock)) {
+ // This test cannot be false, so fold to a goto true.
+ return MGoto::New(alloc, inverted ? ifFalse() : ifTrue());
+ }
+ }
+
+ return nullptr;
+}
+
MDefinition* MTest::foldsTo(TempAllocator& alloc) {
+ if (MDefinition* def = foldsRedundantTest(alloc)) {
+ return def;
+ }
+
if (MDefinition* def = foldsDoubleNegation(alloc)) {
return def;
}
@@ -7187,6 +7242,16 @@ AliasSet MLoadWrapperTarget::getAliasSet() const {
return AliasSet::Load(AliasSet::Any);
}
+bool MLoadWrapperTarget::congruentTo(const MDefinition* ins) const {
+ if (!ins->isLoadWrapperTarget()) {
+ return false;
+ }
+ if (ins->toLoadWrapperTarget()->fallible() != fallible()) {
+ return false;
+ }
+ return congruentIfOperandsEqual(ins);
+}
+
AliasSet MGuardHasGetterSetter::getAliasSet() const {
return AliasSet::Load(AliasSet::ObjectFields);
}
diff --git a/js/src/jit/MIR.h b/js/src/jit/MIR.h
index d882665a65..c672092f04 100644
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -1890,6 +1890,7 @@ class MTest : public MAryControlInstruction<1, 2>, public TestPolicy::Data {
MDefinition* foldsConstant(TempAllocator& alloc);
MDefinition* foldsTypes(TempAllocator& alloc);
MDefinition* foldsNeedlessControlFlow(TempAllocator& alloc);
+ MDefinition* foldsRedundantTest(TempAllocator& alloc);
MDefinition* foldsTo(TempAllocator& alloc) override;
#ifdef DEBUG
@@ -2309,7 +2310,7 @@ class WrappedFunction : public TempObject {
return nativeFun_->nativeUnchecked();
}
bool hasJitInfo() const {
- return flags_.isBuiltinNative() && nativeFun_->jitInfoUnchecked();
+ return flags_.canHaveJitInfo() && nativeFun_->jitInfoUnchecked();
}
const JSJitInfo* jitInfo() const {
MOZ_ASSERT(hasJitInfo());
diff --git a/js/src/jit/MIROps.yaml b/js/src/jit/MIROps.yaml
index 7f0df52742..78ab989221 100644
--- a/js/src/jit/MIROps.yaml
+++ b/js/src/jit/MIROps.yaml
@@ -539,7 +539,8 @@
- name: LoadScriptedProxyHandler
operands:
object: Object
- result_type: Value
+ result_type: Object
+ guard: true
congruent_to: if_operands_equal
alias_set: none
@@ -1421,8 +1422,6 @@
index: Int32
type_policy: none
alias_set: custom
- # By default no, unless built as a recovered instruction.
- can_recover: custom
# Load the function length. Bails for functions with lazy scripts or a
# resolved "length" property.
@@ -2810,13 +2809,16 @@
alias_set: none
# Load the target object from a proxy wrapper. The target is stored in the
-# proxy object's private slot.
+# proxy object's private slot. This operation is fallible if the proxy can
+# be revoked.
- name: LoadWrapperTarget
operands:
object: Object
+ arguments:
+ fallible: bool
result_type: Object
movable: true
- congruent_to: if_operands_equal
+ congruent_to: custom
# Can't use |AliasSet::None| because the target changes on navigation.
# TODO: Investigate using a narrower or a custom alias set.
alias_set: custom
diff --git a/js/src/jit/MacroAssembler.cpp b/js/src/jit/MacroAssembler.cpp
index 3b094d49dc..9fc4b96830 100644
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -3169,6 +3169,8 @@ void MacroAssembler::emitMegamorphicCachedSetSlot(
passABIArg(scratch2);
callWithABI<Fn, NativeObject::growSlotsPure>();
storeCallPointerResult(scratch2);
+
+ MOZ_ASSERT(!save.has(scratch2));
PopRegsInMask(save);
branchIfFalseBool(scratch2, &cacheMiss);
@@ -7803,6 +7805,24 @@ void MacroAssembler::loadArgumentsObjectLength(Register obj, Register output,
rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), output);
}
+void MacroAssembler::loadArgumentsObjectLength(Register obj, Register output) {
+ // Get initial length value.
+ unboxInt32(Address(obj, ArgumentsObject::getInitialLengthSlotOffset()),
+ output);
+
+#ifdef DEBUG
+ // Assert length hasn't been overridden.
+ Label ok;
+ branchTest32(Assembler::Zero, output,
+ Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT), &ok);
+ assumeUnreachable("arguments object length has been overridden");
+ bind(&ok);
+#endif
+
+ // Shift out arguments length and return it.
+ rshift32(Imm32(ArgumentsObject::PACKED_BITS_COUNT), output);
+}
+
void MacroAssembler::branchTestArgumentsObjectFlags(Register obj, Register temp,
uint32_t flags,
Condition cond,
diff --git a/js/src/jit/MacroAssembler.h b/js/src/jit/MacroAssembler.h
index 361de3ac5f..114aaa47d7 100644
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -5291,6 +5291,7 @@ class MacroAssembler : public MacroAssemblerSpecific {
Label* fail);
void loadArgumentsObjectLength(Register obj, Register output, Label* fail);
+ void loadArgumentsObjectLength(Register obj, Register output);
void branchTestArgumentsObjectFlags(Register obj, Register temp,
uint32_t flags, Condition cond,
diff --git a/js/src/jit/PerfSpewer.cpp b/js/src/jit/PerfSpewer.cpp
index 81954f3d92..c9d9cc8d88 100644
--- a/js/src/jit/PerfSpewer.cpp
+++ b/js/src/jit/PerfSpewer.cpp
@@ -23,7 +23,7 @@
# define gettid() static_cast<pid_t>(syscall(__NR_gettid))
#endif
-#if defined(JS_ION_PERF) && (defined(ANDROID) || defined(XP_MACOSX))
+#if defined(JS_ION_PERF) && (defined(ANDROID) || defined(XP_DARWIN))
# include <limits.h>
# include <stdlib.h>
# include <unistd.h>
@@ -42,7 +42,7 @@ char* get_current_dir_name() {
}
#endif
-#if defined(JS_ION_PERF) && defined(XP_MACOSX)
+#if defined(JS_ION_PERF) && defined(XP_DARWIN)
# include <pthread.h>
# include <unistd.h>
@@ -128,7 +128,7 @@ static uint64_t GetMonotonicTimestamp() {
return TimeStamp::Now().RawClockMonotonicNanosecondsSinceBoot();
# elif XP_WIN
return TimeStamp::Now().RawQueryPerformanceCounterValue().value();
-# elif XP_MACOSX
+# elif XP_DARWIN
return TimeStamp::Now().RawMachAbsoluteTimeNanoseconds();
# else
MOZ_CRASH("no timestamp");
diff --git a/js/src/jit/ProcessExecutableMemory.cpp b/js/src/jit/ProcessExecutableMemory.cpp
index 830d15f7fb..0c00b17c73 100644
--- a/js/src/jit/ProcessExecutableMemory.cpp
+++ b/js/src/jit/ProcessExecutableMemory.cpp
@@ -46,6 +46,10 @@
# include <valgrind/valgrind.h>
#endif
+#if defined(XP_IOS)
+# include <BrowserEngineCore/BEMemory.h>
+#endif
+
using namespace js;
using namespace js::jit;
@@ -990,11 +994,19 @@ bool js::jit::ReprotectRegion(void* start, size_t size,
#ifdef JS_USE_APPLE_FAST_WX
void js::jit::AutoMarkJitCodeWritableForThread::markExecutable(
bool executable) {
+# if defined(XP_IOS)
+ if (executable) {
+ be_memory_inline_jit_restrict_rwx_to_rx_with_witness();
+ } else {
+ be_memory_inline_jit_restrict_rwx_to_rw_with_witness();
+ }
+# else
if (__builtin_available(macOS 11.0, *)) {
pthread_jit_write_protect_np(executable);
} else {
MOZ_CRASH("pthread_jit_write_protect_np must be available");
}
+# endif
}
#endif
diff --git a/js/src/jit/Recover.cpp b/js/src/jit/Recover.cpp
index 220ffe7bb2..4c1ff56436 100644
--- a/js/src/jit/Recover.cpp
+++ b/js/src/jit/Recover.cpp
@@ -6,6 +6,8 @@
#include "jit/Recover.h"
+#include "mozilla/Casting.h"
+
#include "jsmath.h"
#include "builtin/Object.h"
@@ -495,16 +497,15 @@ bool MBigIntAdd::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntAdd::RBigIntAdd(CompactBufferReader& reader) {}
bool RBigIntAdd::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::AddValues(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::add(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -517,16 +518,15 @@ bool MBigIntSub::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntSub::RBigIntSub(CompactBufferReader& reader) {}
bool RBigIntSub::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::SubValues(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::sub(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -539,16 +539,15 @@ bool MBigIntMul::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntMul::RBigIntMul(CompactBufferReader& reader) {}
bool RBigIntMul::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::MulValues(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::mul(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -561,18 +560,17 @@ bool MBigIntDiv::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntDiv::RBigIntDiv(CompactBufferReader& reader) {}
bool RBigIntDiv::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
-
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- MOZ_ASSERT(!rhs.toBigInt()->isZero(),
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
+ MOZ_ASSERT(!rhs->isZero(),
"division by zero throws and therefore can't be recovered");
- if (!js::DivValues(cx, &lhs, &rhs, &result)) {
+
+ BigInt* result = BigInt::div(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -585,18 +583,17 @@ bool MBigIntMod::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntMod::RBigIntMod(CompactBufferReader& reader) {}
bool RBigIntMod::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
-
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- MOZ_ASSERT(!rhs.toBigInt()->isZero(),
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
+ MOZ_ASSERT(!rhs->isZero(),
"division by zero throws and therefore can't be recovered");
- if (!js::ModValues(cx, &lhs, &rhs, &result)) {
+
+ BigInt* result = BigInt::mod(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -609,18 +606,17 @@ bool MBigIntPow::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntPow::RBigIntPow(CompactBufferReader& reader) {}
bool RBigIntPow::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
-
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- MOZ_ASSERT(!rhs.toBigInt()->isNegative(),
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
+ MOZ_ASSERT(!rhs->isNegative(),
"negative exponent throws and therefore can't be recovered");
- if (!js::PowValues(cx, &lhs, &rhs, &result)) {
+
+ BigInt* result = BigInt::pow(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -633,16 +629,15 @@ bool MBigIntBitAnd::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntBitAnd::RBigIntBitAnd(CompactBufferReader& reader) {}
bool RBigIntBitAnd::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::BitAnd(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::bitAnd(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -655,16 +650,15 @@ bool MBigIntBitOr::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntBitOr::RBigIntBitOr(CompactBufferReader& reader) {}
bool RBigIntBitOr::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::BitOr(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::bitOr(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -677,16 +671,15 @@ bool MBigIntBitXor::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntBitXor::RBigIntBitXor(CompactBufferReader& reader) {}
bool RBigIntBitXor::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::BitXor(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::bitXor(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -699,16 +692,15 @@ bool MBigIntLsh::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntLsh::RBigIntLsh(CompactBufferReader& reader) {}
bool RBigIntLsh::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::BitLsh(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::lsh(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -721,16 +713,15 @@ bool MBigIntRsh::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntRsh::RBigIntRsh(CompactBufferReader& reader) {}
bool RBigIntRsh::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue lhs(cx, iter.read());
- RootedValue rhs(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> lhs(cx, iter.readBigInt());
+ Rooted<BigInt*> rhs(cx, iter.readBigInt());
- MOZ_ASSERT(lhs.isBigInt() && rhs.isBigInt());
- if (!js::BitRsh(cx, &lhs, &rhs, &result)) {
+ BigInt* result = BigInt::rsh(cx, lhs, rhs);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -743,15 +734,14 @@ bool MBigIntIncrement::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntIncrement::RBigIntIncrement(CompactBufferReader& reader) {}
bool RBigIntIncrement::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue operand(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> operand(cx, iter.readBigInt());
- MOZ_ASSERT(operand.isBigInt());
- if (!js::IncOperation(cx, operand, &result)) {
+ BigInt* result = BigInt::inc(cx, operand);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -764,15 +754,14 @@ bool MBigIntDecrement::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntDecrement::RBigIntDecrement(CompactBufferReader& reader) {}
bool RBigIntDecrement::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue operand(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> operand(cx, iter.readBigInt());
- MOZ_ASSERT(operand.isBigInt());
- if (!js::DecOperation(cx, operand, &result)) {
+ BigInt* result = BigInt::dec(cx, operand);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -785,15 +774,14 @@ bool MBigIntNegate::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntNegate::RBigIntNegate(CompactBufferReader& reader) {}
bool RBigIntNegate::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue operand(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> operand(cx, iter.readBigInt());
- MOZ_ASSERT(operand.isBigInt());
- if (!js::NegOperation(cx, &operand, &result)) {
+ BigInt* result = BigInt::neg(cx, operand);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -806,15 +794,14 @@ bool MBigIntBitNot::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntBitNot::RBigIntBitNot(CompactBufferReader& reader) {}
bool RBigIntBitNot::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedValue operand(cx, iter.read());
- RootedValue result(cx);
+ Rooted<BigInt*> operand(cx, iter.readBigInt());
- MOZ_ASSERT(operand.isBigInt());
- if (!js::BitNot(cx, &operand, &result)) {
+ BigInt* result = BigInt::bitNot(cx, operand);
+ if (!result) {
return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(BigIntValue(result));
return true;
}
@@ -910,7 +897,7 @@ bool RConcat::recover(JSContext* cx, SnapshotIterator& iter) const {
RStringLength::RStringLength(CompactBufferReader& reader) {}
bool RStringLength::recover(JSContext* cx, SnapshotIterator& iter) const {
- JSString* string = iter.read().toString();
+ JSString* string = iter.readString();
static_assert(JSString::MAX_LENGTH <= INT32_MAX,
"Can cast string length to int32_t");
@@ -953,7 +940,7 @@ bool MFloor::writeRecoverData(CompactBufferWriter& writer) const {
RFloor::RFloor(CompactBufferReader& reader) {}
bool RFloor::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::math_floor_impl(num);
iter.storeInstructionResult(NumberValue(result));
@@ -969,7 +956,7 @@ bool MCeil::writeRecoverData(CompactBufferWriter& writer) const {
RCeil::RCeil(CompactBufferReader& reader) {}
bool RCeil::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::math_ceil_impl(num);
iter.storeInstructionResult(NumberValue(result));
@@ -985,7 +972,7 @@ bool MRound::writeRecoverData(CompactBufferWriter& writer) const {
RRound::RRound(CompactBufferReader& reader) {}
bool RRound::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::math_round_impl(num);
iter.storeInstructionResult(NumberValue(result));
@@ -1001,7 +988,7 @@ bool MTrunc::writeRecoverData(CompactBufferWriter& writer) const {
RTrunc::RTrunc(CompactBufferReader& reader) {}
bool RTrunc::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::math_trunc_impl(num);
iter.storeInstructionResult(NumberValue(result));
@@ -1017,21 +1004,18 @@ bool MCharCodeAt::writeRecoverData(CompactBufferWriter& writer) const {
RCharCodeAt::RCharCodeAt(CompactBufferReader& reader) {}
bool RCharCodeAt::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedString string(cx, iter.read().toString());
- int32_t index = iter.read().toInt32();
+ JSString* string = iter.readString();
- RootedValue result(cx);
- if (0 <= index && size_t(index) < string->length()) {
- char16_t c;
- if (!string->getChar(cx, index, &c)) {
- return false;
- }
- result.setInt32(c);
- } else {
- result.setNaN();
+ // Int32 because |index| is computed from MBoundsCheck.
+ int32_t index = iter.readInt32();
+ MOZ_RELEASE_ASSERT(0 <= index && size_t(index) < string->length());
+
+ char16_t c;
+ if (!string->getChar(cx, index, &c)) {
+ return false;
}
- iter.storeInstructionResult(result);
+ iter.storeInstructionResult(Int32Value(c));
return true;
}
@@ -1044,7 +1028,8 @@ bool MFromCharCode::writeRecoverData(CompactBufferWriter& writer) const {
RFromCharCode::RFromCharCode(CompactBufferReader& reader) {}
bool RFromCharCode::recover(JSContext* cx, SnapshotIterator& iter) const {
- int32_t charCode = iter.read().toInt32();
+ // Number because |charCode| is computed from (recoverable) user input.
+ int32_t charCode = JS::ToInt32(iter.readNumber());
JSString* str = StringFromCharCode(cx, charCode);
if (!str) {
@@ -1068,7 +1053,8 @@ RFromCharCodeEmptyIfNegative::RFromCharCodeEmptyIfNegative(
bool RFromCharCodeEmptyIfNegative::recover(JSContext* cx,
SnapshotIterator& iter) const {
- int32_t charCode = iter.read().toInt32();
+ // Int32 because |charCode| is computed from MCharCodeAtOrNegative.
+ int32_t charCode = iter.readInt32();
JSString* str;
if (charCode < 0) {
@@ -1093,8 +1079,8 @@ bool MPow::writeRecoverData(CompactBufferWriter& writer) const {
RPow::RPow(CompactBufferReader& reader) {}
bool RPow::recover(JSContext* cx, SnapshotIterator& iter) const {
- double base = iter.read().toNumber();
- double power = iter.read().toNumber();
+ double base = iter.readNumber();
+ double power = iter.readNumber();
double result = ecmaPow(base, power);
iter.storeInstructionResult(NumberValue(result));
@@ -1110,7 +1096,7 @@ bool MPowHalf::writeRecoverData(CompactBufferWriter& writer) const {
RPowHalf::RPowHalf(CompactBufferReader& reader) {}
bool RPowHalf::recover(JSContext* cx, SnapshotIterator& iter) const {
- double base = iter.read().toNumber();
+ double base = iter.readNumber();
double power = 0.5;
double result = ecmaPow(base, power);
@@ -1128,8 +1114,8 @@ bool MMinMax::writeRecoverData(CompactBufferWriter& writer) const {
RMinMax::RMinMax(CompactBufferReader& reader) { isMax_ = reader.readByte(); }
bool RMinMax::recover(JSContext* cx, SnapshotIterator& iter) const {
- double x = iter.read().toNumber();
- double y = iter.read().toNumber();
+ double x = iter.readNumber();
+ double y = iter.readNumber();
double result;
if (isMax_) {
@@ -1151,7 +1137,7 @@ bool MAbs::writeRecoverData(CompactBufferWriter& writer) const {
RAbs::RAbs(CompactBufferReader& reader) {}
bool RAbs::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::math_abs_impl(num);
iter.storeInstructionResult(NumberValue(result));
@@ -1170,7 +1156,7 @@ RSqrt::RSqrt(CompactBufferReader& reader) {
}
bool RSqrt::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::math_sqrt_impl(num);
// MIRType::Float32 is a specialization embedding the fact that the result is
@@ -1192,8 +1178,8 @@ bool MAtan2::writeRecoverData(CompactBufferWriter& writer) const {
RAtan2::RAtan2(CompactBufferReader& reader) {}
bool RAtan2::recover(JSContext* cx, SnapshotIterator& iter) const {
- double y = iter.read().toNumber();
- double x = iter.read().toNumber();
+ double y = iter.readNumber();
+ double x = iter.readNumber();
double result = js::ecmaAtan2(y, x);
iter.storeInstructionResult(DoubleValue(result));
@@ -1218,7 +1204,7 @@ bool RHypot::recover(JSContext* cx, SnapshotIterator& iter) const {
}
for (uint32_t i = 0; i < numOperands_; ++i) {
- vec.infallibleAppend(iter.read());
+ vec.infallibleAppend(NumberValue(iter.readNumber()));
}
RootedValue result(cx);
@@ -1265,7 +1251,7 @@ bool MSign::writeRecoverData(CompactBufferWriter& writer) const {
RSign::RSign(CompactBufferReader& reader) {}
bool RSign::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::math_sign_impl(num);
iter.storeInstructionResult(NumberValue(result));
@@ -1322,7 +1308,7 @@ RMathFunction::RMathFunction(CompactBufferReader& reader) {
}
bool RMathFunction::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result;
switch (function_) {
@@ -1431,8 +1417,8 @@ bool MStringSplit::writeRecoverData(CompactBufferWriter& writer) const {
RStringSplit::RStringSplit(CompactBufferReader& reader) {}
bool RStringSplit::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedString str(cx, iter.read().toString());
- RootedString sep(cx, iter.read().toString());
+ RootedString str(cx, iter.readString());
+ RootedString sep(cx, iter.readString());
JSObject* res = StringSplitString(cx, str, sep, INT32_MAX);
if (!res) {
@@ -1452,7 +1438,7 @@ bool MNaNToZero::writeRecoverData(CompactBufferWriter& writer) const {
RNaNToZero::RNaNToZero(CompactBufferReader& reader) {}
bool RNaNToZero::recover(JSContext* cx, SnapshotIterator& iter) const {
- double v = iter.read().toNumber();
+ double v = iter.readNumber();
if (std::isnan(v) || mozilla::IsNegativeZero(v)) {
v = 0.0;
}
@@ -1470,9 +1456,11 @@ bool MRegExpMatcher::writeRecoverData(CompactBufferWriter& writer) const {
RRegExpMatcher::RRegExpMatcher(CompactBufferReader& reader) {}
bool RRegExpMatcher::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject regexp(cx, &iter.read().toObject());
- RootedString input(cx, iter.read().toString());
- int32_t lastIndex = iter.read().toInt32();
+ RootedObject regexp(cx, iter.readObject());
+ RootedString input(cx, iter.readString());
+
+ // Int32 because |lastIndex| is computed from transpiled self-hosted call.
+ int32_t lastIndex = iter.readInt32();
RootedValue result(cx);
if (!RegExpMatcherRaw(cx, regexp, input, lastIndex, nullptr, &result)) {
@@ -1507,7 +1495,8 @@ bool MTypeOfName::writeRecoverData(CompactBufferWriter& writer) const {
RTypeOfName::RTypeOfName(CompactBufferReader& reader) {}
bool RTypeOfName::recover(JSContext* cx, SnapshotIterator& iter) const {
- int32_t type = iter.read().toInt32();
+ // Int32 because |type| is computed from MTypeOf.
+ int32_t type = iter.readInt32();
MOZ_ASSERT(JSTYPE_UNDEFINED <= type && type < JSTYPE_LIMIT);
JSString* name = TypeName(JSType(type), *cx->runtime()->commonNames);
@@ -1548,7 +1537,7 @@ bool MToFloat32::writeRecoverData(CompactBufferWriter& writer) const {
RToFloat32::RToFloat32(CompactBufferReader& reader) {}
bool RToFloat32::recover(JSContext* cx, SnapshotIterator& iter) const {
- double num = iter.read().toNumber();
+ double num = iter.readNumber();
double result = js::RoundFloat32(num);
iter.storeInstructionResult(DoubleValue(result));
@@ -1588,7 +1577,7 @@ bool MNewObject::writeRecoverData(CompactBufferWriter& writer) const {
RNewObject::RNewObject(CompactBufferReader& reader) {}
bool RNewObject::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject templateObject(cx, &iter.read().toObject());
+ RootedObject templateObject(cx, iter.readObject());
// See CodeGenerator::visitNewObjectVMCall.
// Note that recover instructions are only used if mode == ObjectCreate.
@@ -1622,8 +1611,7 @@ RNewPlainObject::RNewPlainObject(CompactBufferReader& reader) {
}
bool RNewPlainObject::recover(JSContext* cx, SnapshotIterator& iter) const {
- Rooted<SharedShape*> shape(cx,
- &iter.read().toGCCellPtr().as<Shape>().asShared());
+ Rooted<SharedShape*> shape(cx, &iter.readGCCellPtr().as<Shape>().asShared());
// See CodeGenerator::visitNewPlainObject.
JSObject* resultObject =
@@ -1676,7 +1664,7 @@ bool MNewTypedArray::writeRecoverData(CompactBufferWriter& writer) const {
RNewTypedArray::RNewTypedArray(CompactBufferReader& reader) {}
bool RNewTypedArray::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject templateObject(cx, &iter.read().toObject());
+ RootedObject templateObject(cx, iter.readObject());
size_t length = templateObject.as<FixedLengthTypedArrayObject>()->length();
MOZ_ASSERT(length <= INT32_MAX,
@@ -1704,7 +1692,7 @@ RNewArray::RNewArray(CompactBufferReader& reader) {
}
bool RNewArray::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject templateObject(cx, &iter.read().toObject());
+ RootedObject templateObject(cx, iter.readObject());
Rooted<Shape*> shape(cx, templateObject->shape());
ArrayObject* resultObject = NewArrayWithShape(cx, count_, shape);
@@ -1728,7 +1716,7 @@ RNewIterator::RNewIterator(CompactBufferReader& reader) {
}
bool RNewIterator::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject templateObject(cx, &iter.read().toObject());
+ RootedObject templateObject(cx, iter.readObject());
JSObject* resultObject = nullptr;
switch (MNewIterator::Type(type_)) {
@@ -1760,8 +1748,8 @@ bool MLambda::writeRecoverData(CompactBufferWriter& writer) const {
RLambda::RLambda(CompactBufferReader& reader) {}
bool RLambda::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject scopeChain(cx, &iter.read().toObject());
- RootedFunction fun(cx, &iter.read().toObject().as<JSFunction>());
+ RootedObject scopeChain(cx, iter.readObject());
+ RootedFunction fun(cx, &iter.readObject()->as<JSFunction>());
JSObject* resultObject = js::Lambda(cx, fun, scopeChain);
if (!resultObject) {
@@ -1781,9 +1769,9 @@ bool MFunctionWithProto::writeRecoverData(CompactBufferWriter& writer) const {
RFunctionWithProto::RFunctionWithProto(CompactBufferReader& reader) {}
bool RFunctionWithProto::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject scopeChain(cx, &iter.read().toObject());
- RootedObject prototype(cx, &iter.read().toObject());
- RootedFunction fun(cx, &iter.read().toObject().as<JSFunction>());
+ RootedObject scopeChain(cx, iter.readObject());
+ RootedObject prototype(cx, iter.readObject());
+ RootedFunction fun(cx, &iter.readObject()->as<JSFunction>());
JSObject* resultObject =
js::FunWithProtoOperation(cx, fun, scopeChain, prototype);
@@ -1804,7 +1792,7 @@ bool MNewCallObject::writeRecoverData(CompactBufferWriter& writer) const {
RNewCallObject::RNewCallObject(CompactBufferReader& reader) {}
bool RNewCallObject::recover(JSContext* cx, SnapshotIterator& iter) const {
- Rooted<CallObject*> templateObj(cx, &iter.read().toObject().as<CallObject>());
+ Rooted<CallObject*> templateObj(cx, &iter.readObject()->as<CallObject>());
Rooted<SharedShape*> shape(cx, templateObj->sharedShape());
@@ -1832,7 +1820,7 @@ bool MObjectKeys::writeRecoverData(CompactBufferWriter& writer) const {
RObjectKeys::RObjectKeys(CompactBufferReader& reader) {}
bool RObjectKeys::recover(JSContext* cx, SnapshotIterator& iter) const {
- Rooted<JSObject*> obj(cx, &iter.read().toObject());
+ Rooted<JSObject*> obj(cx, iter.readObject());
JSObject* resultKeys = ObjectKeys(cx, obj);
if (!resultKeys) {
@@ -1855,7 +1843,7 @@ RObjectState::RObjectState(CompactBufferReader& reader) {
}
bool RObjectState::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedObject object(cx, &iter.read().toObject());
+ RootedObject object(cx, iter.readObject());
Handle<NativeObject*> nativeObject = object.as<NativeObject>();
MOZ_ASSERT(!Watchtower::watchesPropertyModification(nativeObject));
MOZ_ASSERT(nativeObject->slotSpan() == numSlots());
@@ -1881,8 +1869,10 @@ RArrayState::RArrayState(CompactBufferReader& reader) {
}
bool RArrayState::recover(JSContext* cx, SnapshotIterator& iter) const {
- ArrayObject* object = &iter.read().toObject().as<ArrayObject>();
- uint32_t initLength = iter.read().toInt32();
+ ArrayObject* object = &iter.readObject()->as<ArrayObject>();
+
+ // Int32 because |initLength| is computed from MConstant.
+ uint32_t initLength = iter.readInt32();
MOZ_ASSERT(object->getDenseInitializedLength() == 0,
"initDenseElement call below relies on this");
@@ -1903,37 +1893,6 @@ bool RArrayState::recover(JSContext* cx, SnapshotIterator& iter) const {
return true;
}
-bool MSetArrayLength::writeRecoverData(CompactBufferWriter& writer) const {
- MOZ_ASSERT(canRecoverOnBailout());
- // For simplicity, we capture directly the object instead of the elements
- // pointer.
- MOZ_ASSERT(elements()->type() != MIRType::Elements);
- writer.writeUnsigned(uint32_t(RInstruction::Recover_SetArrayLength));
- return true;
-}
-
-bool MSetArrayLength::canRecoverOnBailout() const {
- return isRecoveredOnBailout();
-}
-
-RSetArrayLength::RSetArrayLength(CompactBufferReader& reader) {}
-
-bool RSetArrayLength::recover(JSContext* cx, SnapshotIterator& iter) const {
- Rooted<ArrayObject*> obj(cx, &iter.read().toObject().as<ArrayObject>());
- RootedValue len(cx, iter.read());
-
- RootedId id(cx, NameToId(cx->names().length));
- Rooted<PropertyDescriptor> desc(
- cx, PropertyDescriptor::Data(len, JS::PropertyAttribute::Writable));
- ObjectOpResult error;
- if (!ArraySetLength(cx, obj, id, desc, error)) {
- return false;
- }
-
- iter.storeInstructionResult(ObjectValue(*obj));
- return true;
-}
-
bool MAssertRecoveredOnBailout::writeRecoverData(
CompactBufferWriter& writer) const {
MOZ_ASSERT(canRecoverOnBailout());
@@ -1966,9 +1925,9 @@ RStringReplace::RStringReplace(CompactBufferReader& reader) {
}
bool RStringReplace::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedString string(cx, iter.read().toString());
- RootedString pattern(cx, iter.read().toString());
- RootedString replace(cx, iter.read().toString());
+ RootedString string(cx, iter.readString());
+ RootedString pattern(cx, iter.readString());
+ RootedString replace(cx, iter.readString());
JSString* result =
isFlatReplacement_
@@ -1992,9 +1951,20 @@ bool MSubstr::writeRecoverData(CompactBufferWriter& writer) const {
RSubstr::RSubstr(CompactBufferReader& reader) {}
bool RSubstr::recover(JSContext* cx, SnapshotIterator& iter) const {
- RootedString str(cx, iter.read().toString());
- int32_t begin = iter.read().toInt32();
- int32_t length = iter.read().toInt32();
+ RootedString str(cx, iter.readString());
+
+ // Int32 because |begin| is computed from MStringTrimStartIndex, MConstant,
+ // or CallSubstringKernelResult.
+ int32_t begin = iter.readInt32();
+
+ // |length| is computed from MSub(truncated), MStringTrimEndIndex, or
+ // CallSubstringKernelResult. The current MSub inputs won't overflow, so when
+ // RSub recovers the MSub instruction, the input will be representable as an
+ // Int32. This is only true as long as RSub calls |js::SubOperation|, which in
+ // turn calls |JS::Value::setNumber|. We don't want to rely on this exact call
+ // sequence, so instead use |readNumber| here and then release-assert the
+ // number is exactly representable as an Int32.
+ int32_t length = mozilla::ReleaseAssertedCast<int32_t>(iter.readNumber());
JSString* result = SubstringKernel(cx, str, begin, length);
if (!result) {
@@ -2014,10 +1984,11 @@ bool MAtomicIsLockFree::writeRecoverData(CompactBufferWriter& writer) const {
RAtomicIsLockFree::RAtomicIsLockFree(CompactBufferReader& reader) {}
bool RAtomicIsLockFree::recover(JSContext* cx, SnapshotIterator& iter) const {
- Value operand = iter.read();
- MOZ_ASSERT(operand.isInt32());
+ double dsize = JS::ToInteger(iter.readNumber());
- bool result = AtomicOperations::isLockfreeJS(operand.toInt32());
+ int32_t size;
+ bool result = mozilla::NumberEqualsInt32(dsize, &size) &&
+ AtomicOperations::isLockfreeJS(size);
iter.storeInstructionResult(BooleanValue(result));
return true;
}
@@ -2031,10 +2002,12 @@ bool MBigIntAsIntN::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntAsIntN::RBigIntAsIntN(CompactBufferReader& reader) {}
bool RBigIntAsIntN::recover(JSContext* cx, SnapshotIterator& iter) const {
- int32_t bits = iter.read().toInt32();
- RootedBigInt input(cx, iter.read().toBigInt());
-
+ // Int32 because |bits| is computed from MGuardInt32IsNonNegative.
+ int32_t bits = iter.readInt32();
MOZ_ASSERT(bits >= 0);
+
+ RootedBigInt input(cx, iter.readBigInt());
+
BigInt* result = BigInt::asIntN(cx, input, bits);
if (!result) {
return false;
@@ -2053,10 +2026,12 @@ bool MBigIntAsUintN::writeRecoverData(CompactBufferWriter& writer) const {
RBigIntAsUintN::RBigIntAsUintN(CompactBufferReader& reader) {}
bool RBigIntAsUintN::recover(JSContext* cx, SnapshotIterator& iter) const {
- int32_t bits = iter.read().toInt32();
- RootedBigInt input(cx, iter.read().toBigInt());
-
+ // Int32 because |bits| is computed from MGuardInt32IsNonNegative.
+ int32_t bits = iter.readInt32();
MOZ_ASSERT(bits >= 0);
+
+ RootedBigInt input(cx, iter.readBigInt());
+
BigInt* result = BigInt::asUintN(cx, input, bits);
if (!result) {
return false;
@@ -2077,7 +2052,7 @@ RCreateArgumentsObject::RCreateArgumentsObject(CompactBufferReader& reader) {}
bool RCreateArgumentsObject::recover(JSContext* cx,
SnapshotIterator& iter) const {
- RootedObject callObject(cx, &iter.read().toObject());
+ RootedObject callObject(cx, iter.readObject());
RootedObject result(
cx, ArgumentsObject::createForIon(cx, iter.frame(), callObject));
if (!result) {
@@ -2104,8 +2079,8 @@ RCreateInlinedArgumentsObject::RCreateInlinedArgumentsObject(
bool RCreateInlinedArgumentsObject::recover(JSContext* cx,
SnapshotIterator& iter) const {
- RootedObject callObject(cx, &iter.read().toObject());
- RootedFunction callee(cx, &iter.read().toObject().as<JSFunction>());
+ RootedObject callObject(cx, iter.readObject());
+ RootedFunction callee(cx, &iter.readObject()->as<JSFunction>());
JS::RootedValueArray<ArgumentsObject::MaxInlinedArgs> argsArray(cx);
for (uint32_t i = 0; i < numActuals_; i++) {
@@ -2136,7 +2111,8 @@ RRest::RRest(CompactBufferReader& reader) {
bool RRest::recover(JSContext* cx, SnapshotIterator& iter) const {
JitFrameLayout* frame = iter.frame();
- uint32_t numActuals = iter.read().toInt32();
+ // Int32 because |numActuals| is computed from MArgumentsLength.
+ uint32_t numActuals = iter.readInt32();
MOZ_ASSERT(numActuals == frame->numActualArgs());
uint32_t numFormals = numFormals_;
diff --git a/js/src/jit/Recover.h b/js/src/jit/Recover.h
index 7cc46c636d..878204de83 100644
--- a/js/src/jit/Recover.h
+++ b/js/src/jit/Recover.h
@@ -129,7 +129,6 @@ namespace jit {
_(ObjectKeys) \
_(ObjectState) \
_(ArrayState) \
- _(SetArrayLength) \
_(AtomicIsLockFree) \
_(BigIntAsIntN) \
_(BigIntAsUintN) \
@@ -882,14 +881,6 @@ class RArrayState final : public RInstruction {
SnapshotIterator& iter) const override;
};
-class RSetArrayLength final : public RInstruction {
- public:
- RINSTRUCTION_HEADER_NUM_OP_(SetArrayLength, 2)
-
- [[nodiscard]] bool recover(JSContext* cx,
- SnapshotIterator& iter) const override;
-};
-
class RAtomicIsLockFree final : public RInstruction {
public:
RINSTRUCTION_HEADER_NUM_OP_(AtomicIsLockFree, 1)
diff --git a/js/src/jit/Trampoline.cpp b/js/src/jit/Trampoline.cpp
index 85661784a7..e6d0cd31c9 100644
--- a/js/src/jit/Trampoline.cpp
+++ b/js/src/jit/Trampoline.cpp
@@ -96,18 +96,13 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
// |
// ^--- IonICCall <---- Ion
// |
- // ^--- Arguments Rectifier
- // | ^
- // | |
- // | ^--- Ion
- // | |
- // | ^--- Baseline Stub <---- Baseline
- // | |
- // | ^--- Entry Frame (CppToJSJit or WasmToJSJit)
+ // ^--- Entry Frame (BaselineInterpreter) (unwrapped)
// |
- // ^--- Entry Frame (CppToJSJit or WasmToJSJit)
+ // ^--- Arguments Rectifier (unwrapped)
+ // |
+ // ^--- Trampoline Native (unwrapped)
// |
- // ^--- Entry Frame (BaselineInterpreter)
+ // ^--- Entry Frame (CppToJSJit or WasmToJSJit)
//
// NOTE: Keep this in sync with JSJitProfilingFrameIterator::moveToNextFrame!
@@ -153,6 +148,7 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
Label handle_BaselineOrIonJS;
Label handle_BaselineStub;
Label handle_Rectifier;
+ Label handle_TrampolineNative;
Label handle_BaselineInterpreterEntry;
Label handle_IonICCall;
Label handle_Entry;
@@ -176,6 +172,8 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
&handle_BaselineOrIonJS);
masm.branch32(Assembler::Equal, scratch, Imm32(FrameType::IonICCall),
&handle_IonICCall);
+ masm.branch32(Assembler::Equal, scratch, Imm32(FrameType::TrampolineNative),
+ &handle_TrampolineNative);
masm.branch32(Assembler::Equal, scratch, Imm32(FrameType::WasmToJSJit),
&handle_Entry);
@@ -237,9 +235,21 @@ void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler& masm,
// There can be multiple previous frame types so just "unwrap" the arguments
// rectifier frame and try again.
masm.loadPtr(Address(fpScratch, CallerFPOffset), fpScratch);
- emitAssertPrevFrameType(fpScratch, scratch,
- {FrameType::IonJS, FrameType::BaselineStub,
- FrameType::CppToJSJit, FrameType::WasmToJSJit});
+ emitAssertPrevFrameType(
+ fpScratch, scratch,
+ {FrameType::IonJS, FrameType::BaselineStub, FrameType::TrampolineNative,
+ FrameType::CppToJSJit, FrameType::WasmToJSJit});
+ masm.jump(&again);
+ }
+
+ masm.bind(&handle_TrampolineNative);
+ {
+ // Unwrap this frame, similar to arguments rectifier frames.
+ masm.loadPtr(Address(fpScratch, CallerFPOffset), fpScratch);
+ emitAssertPrevFrameType(
+ fpScratch, scratch,
+ {FrameType::IonJS, FrameType::BaselineStub, FrameType::Rectifier,
+ FrameType::CppToJSJit, FrameType::WasmToJSJit});
masm.jump(&again);
}
diff --git a/js/src/jit/TrampolineNatives.cpp b/js/src/jit/TrampolineNatives.cpp
new file mode 100644
index 0000000000..0bde6d9985
--- /dev/null
+++ b/js/src/jit/TrampolineNatives.cpp
@@ -0,0 +1,274 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "jit/TrampolineNatives.h"
+
+#include "jit/CalleeToken.h"
+#include "jit/Ion.h"
+#include "jit/JitCommon.h"
+#include "jit/JitRuntime.h"
+#include "jit/MacroAssembler.h"
+#include "jit/PerfSpewer.h"
+#include "js/CallArgs.h"
+#include "js/experimental/JitInfo.h"
+
+#include "jit/MacroAssembler-inl.h"
+#include "vm/Activation-inl.h"
+
+using namespace js;
+using namespace js::jit;
+
+#define ADD_NATIVE(native) \
+ const JSJitInfo js::jit::JitInfo_##native{ \
+ {nullptr}, \
+ {uint16_t(TrampolineNative::native)}, \
+ {0}, \
+ JSJitInfo::TrampolineNative};
+TRAMPOLINE_NATIVE_LIST(ADD_NATIVE)
+#undef ADD_NATIVE
+
+void js::jit::SetTrampolineNativeJitEntry(JSContext* cx, JSFunction* fun,
+ TrampolineNative native) {
+ if (!cx->runtime()->jitRuntime()) {
+ // No JIT support so there's no trampoline.
+ return;
+ }
+ void** entry = cx->runtime()->jitRuntime()->trampolineNativeJitEntry(native);
+ MOZ_ASSERT(entry);
+ MOZ_ASSERT(*entry);
+ fun->setTrampolineNativeJitEntry(entry);
+}
+
+uint32_t JitRuntime::generateArraySortTrampoline(MacroAssembler& masm) {
+ AutoCreatedBy acb(masm, "JitRuntime::generateArraySortTrampoline");
+
+ const uint32_t offset = startTrampolineCode(masm);
+
+ // The stack for the trampoline frame will look like this:
+ //
+ // [TrampolineNativeFrameLayout]
+ // * this and arguments passed by the caller
+ // * CalleeToken
+ // * Descriptor
+ // * Return Address
+ // * Saved frame pointer <= FramePointer
+ // [ArraySortData]
+ // * ...
+ // * Comparator this + argument Values --+ -> comparator JitFrameLayout
+ // * Comparator (CalleeToken) |
+ // * Descriptor ----+ <= StackPointer
+ //
+ // The call to the comparator pushes the return address and the frame pointer,
+ // so we check the alignment after pushing these two pointers.
+ constexpr size_t FrameSize = sizeof(ArraySortData);
+ constexpr size_t PushedByCall = 2 * sizeof(void*);
+ static_assert((FrameSize + PushedByCall) % JitStackAlignment == 0);
+
+ // Assert ArraySortData comparator data matches JitFrameLayout.
+ static_assert(PushedByCall + ArraySortData::offsetOfDescriptor() ==
+ JitFrameLayout::offsetOfDescriptor());
+ static_assert(PushedByCall + ArraySortData::offsetOfComparator() ==
+ JitFrameLayout::offsetOfCalleeToken());
+ static_assert(PushedByCall + ArraySortData::offsetOfComparatorThis() ==
+ JitFrameLayout::offsetOfThis());
+ static_assert(PushedByCall + ArraySortData::offsetOfComparatorArgs() ==
+ JitFrameLayout::offsetOfActualArgs());
+ static_assert(CalleeToken_Function == 0,
+ "JSFunction* is valid CalleeToken for non-constructor calls");
+
+ // Compute offsets from FramePointer.
+ constexpr int32_t ComparatorOffset =
+ -int32_t(FrameSize) + ArraySortData::offsetOfComparator();
+ constexpr int32_t RvalOffset =
+ -int32_t(FrameSize) + ArraySortData::offsetOfComparatorReturnValue();
+ constexpr int32_t DescriptorOffset =
+ -int32_t(FrameSize) + ArraySortData::offsetOfDescriptor();
+
+#ifdef JS_USE_LINK_REGISTER
+ masm.pushReturnAddress();
+#endif
+ masm.push(FramePointer);
+ masm.moveStackPtrTo(FramePointer);
+
+ AllocatableGeneralRegisterSet regs(GeneralRegisterSet::All());
+ regs.takeUnchecked(ReturnReg);
+ regs.takeUnchecked(JSReturnOperand);
+ Register temp0 = regs.takeAny();
+ Register temp1 = regs.takeAny();
+ Register temp2 = regs.takeAny();
+
+ // Reserve space and check alignment of the comparator frame.
+ masm.reserveStack(FrameSize);
+ masm.assertStackAlignment(JitStackAlignment, PushedByCall);
+
+ // Trampoline control flow looks like this:
+ //
+ // call ArraySortFromJit
+ // goto checkReturnValue
+ // call_comparator:
+ // call comparator
+ // call ArraySortData::sortWithComparator
+ // checkReturnValue:
+ // check return value, jump to call_comparator if needed
+ // return rval
+
+ auto pushExitFrame = [&](Register cxReg, Register scratchReg) {
+ MOZ_ASSERT(masm.framePushed() == FrameSize);
+ masm.PushFrameDescriptor(FrameType::TrampolineNative);
+ masm.Push(ImmWord(0)); // Fake return address.
+ masm.Push(FramePointer);
+ masm.enterFakeExitFrame(cxReg, scratchReg, ExitFrameType::Bare);
+ };
+
+ // Call ArraySortFromJit.
+ using Fn1 = ArraySortResult (*)(JSContext* cx,
+ jit::TrampolineNativeFrameLayout* frame);
+ masm.loadJSContext(temp0);
+ pushExitFrame(temp0, temp1);
+ masm.setupAlignedABICall();
+ masm.passABIArg(temp0);
+ masm.passABIArg(FramePointer);
+ masm.callWithABI<Fn1, ArraySortFromJit>(
+ ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
+
+ // Check return value.
+ Label checkReturnValue;
+ masm.jump(&checkReturnValue);
+ masm.setFramePushed(FrameSize);
+
+ // Call the comparator. Store the frame descriptor before each call to ensure
+ // the HASCACHEDSAVEDFRAME_BIT flag from a previous call is cleared.
+ uintptr_t jitCallDescriptor = MakeFrameDescriptorForJitCall(
+ jit::FrameType::TrampolineNative, ArraySortData::ComparatorActualArgs);
+ Label callDone, jitCallFast, jitCallSlow;
+ masm.bind(&jitCallFast);
+ {
+ masm.storePtr(ImmWord(jitCallDescriptor),
+ Address(FramePointer, DescriptorOffset));
+ masm.loadPtr(Address(FramePointer, ComparatorOffset), temp0);
+ masm.loadJitCodeRaw(temp0, temp1);
+ masm.callJit(temp1);
+ masm.jump(&callDone);
+ }
+ masm.bind(&jitCallSlow);
+ {
+ masm.storePtr(ImmWord(jitCallDescriptor),
+ Address(FramePointer, DescriptorOffset));
+ masm.loadPtr(Address(FramePointer, ComparatorOffset), temp0);
+ masm.loadJitCodeRaw(temp0, temp1);
+ masm.switchToObjectRealm(temp0, temp2);
+
+ // Handle arguments underflow.
+ Label noUnderflow, restoreRealm;
+ masm.loadFunctionArgCount(temp0, temp0);
+ masm.branch32(Assembler::BelowOrEqual, temp0,
+ Imm32(ArraySortData::ComparatorActualArgs), &noUnderflow);
+ {
+ Label rectifier;
+ bindLabelToOffset(&rectifier, argumentsRectifierOffset_);
+ masm.call(&rectifier);
+ masm.jump(&restoreRealm);
+ }
+ masm.bind(&noUnderflow);
+ masm.callJit(temp1);
+
+ masm.bind(&restoreRealm);
+ Address calleeToken(FramePointer,
+ TrampolineNativeFrameLayout::offsetOfCalleeToken());
+ masm.loadFunctionFromCalleeToken(calleeToken, temp0);
+ masm.switchToObjectRealm(temp0, temp1);
+ }
+
+ // Store the comparator's return value.
+ masm.bind(&callDone);
+ masm.storeValue(JSReturnOperand, Address(FramePointer, RvalOffset));
+
+ // Call ArraySortData::sortWithComparator.
+ using Fn2 = ArraySortResult (*)(ArraySortData* data);
+ masm.moveStackPtrTo(temp2);
+ masm.loadJSContext(temp0);
+ pushExitFrame(temp0, temp1);
+ masm.setupAlignedABICall();
+ masm.passABIArg(temp2);
+ masm.callWithABI<Fn2, ArraySortData::sortWithComparator>(
+ ABIType::General, CheckUnsafeCallWithABI::DontCheckHasExitFrame);
+
+ // Check return value.
+ masm.bind(&checkReturnValue);
+ masm.branch32(Assembler::Equal, ReturnReg,
+ Imm32(int32_t(ArraySortResult::Failure)), masm.failureLabel());
+ masm.freeStack(ExitFrameLayout::SizeWithFooter());
+ masm.branch32(Assembler::Equal, ReturnReg,
+ Imm32(int32_t(ArraySortResult::CallJSSameRealmNoRectifier)),
+ &jitCallFast);
+ masm.branch32(Assembler::Equal, ReturnReg,
+ Imm32(int32_t(ArraySortResult::CallJS)), &jitCallSlow);
+#ifdef DEBUG
+ Label ok;
+ masm.branch32(Assembler::Equal, ReturnReg,
+ Imm32(int32_t(ArraySortResult::Done)), &ok);
+ masm.assumeUnreachable("Unexpected return value");
+ masm.bind(&ok);
+#endif
+
+ masm.loadValue(Address(FramePointer, RvalOffset), JSReturnOperand);
+ masm.moveToStackPtr(FramePointer);
+ masm.pop(FramePointer);
+ masm.ret();
+
+ return offset;
+}
+
+void JitRuntime::generateTrampolineNatives(
+ MacroAssembler& masm, TrampolineNativeJitEntryOffsets& offsets,
+ PerfSpewerRangeRecorder& rangeRecorder) {
+ offsets[TrampolineNative::ArraySort] = generateArraySortTrampoline(masm);
+ rangeRecorder.recordOffset("Trampoline: ArraySort");
+}
+
+bool jit::CallTrampolineNativeJitCode(JSContext* cx, TrampolineNative native,
+ CallArgs& args) {
+ // Use the EnterJit trampoline to enter the native's trampoline code.
+
+ AutoCheckRecursionLimit recursion(cx);
+ if (!recursion.check(cx)) {
+ return false;
+ }
+
+ MOZ_ASSERT(!args.isConstructing());
+ CalleeToken calleeToken = CalleeToToken(&args.callee().as<JSFunction>(),
+ /* constructing = */ false);
+
+ Value* maxArgv = args.array() - 1; // -1 to include |this|
+ size_t maxArgc = args.length() + 1;
+
+ Rooted<Value> result(cx, Int32Value(args.length()));
+
+ AssertRealmUnchanged aru(cx);
+ ActivationEntryMonitor entryMonitor(cx, calleeToken);
+ JitActivation activation(cx);
+
+ EnterJitCode enter = cx->runtime()->jitRuntime()->enterJit();
+ void* code = *cx->runtime()->jitRuntime()->trampolineNativeJitEntry(native);
+
+ CALL_GENERATED_CODE(enter, code, maxArgc, maxArgv, /* osrFrame = */ nullptr,
+ calleeToken, /* envChain = */ nullptr,
+ /* osrNumStackValues = */ 0, result.address());
+
+ // Ensure the counter was reset to zero after exiting from JIT code.
+ MOZ_ASSERT(!cx->isInUnsafeRegion());
+
+ // Release temporary buffer used for OSR into Ion.
+ cx->runtime()->jitRuntime()->freeIonOsrTempData();
+
+ if (result.isMagic()) {
+ MOZ_ASSERT(result.isMagic(JS_ION_ERROR));
+ return false;
+ }
+
+ args.rval().set(result);
+ return true;
+}
diff --git a/js/src/jit/TrampolineNatives.h b/js/src/jit/TrampolineNatives.h
new file mode 100644
index 0000000000..f71a3b707d
--- /dev/null
+++ b/js/src/jit/TrampolineNatives.h
@@ -0,0 +1,60 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef jit_TrampolineNatives_h
+#define jit_TrampolineNatives_h
+
+#include <stdint.h>
+
+#include "js/TypeDecls.h"
+
+// [SMDOC] Trampoline Natives
+//
+// Trampoline natives are JS builtin functions that use the NATIVE_JIT_ENTRY
+// mechanism. This means they have two implementations: the usual native C++
+// implementation and a generated JIT trampoline that JIT callers can call
+// directly using the JIT ABI calling convention. (This is very similar to how
+// calls from JS to WebAssembly are optimized in the JITs.)
+//
+// The JIT trampoline lets us implement some natives in a more efficient way. In
+// particular, it's much faster to call (other) JS functions with JIT code from
+// a JIT trampoline than from C++ code.
+//
+// Trampoline frames use FrameType::TrampolineNative.
+
+class JSJitInfo;
+
+namespace JS {
+class CallArgs;
+} // namespace JS
+
+// List of all trampoline natives.
+#define TRAMPOLINE_NATIVE_LIST(_) _(ArraySort)
+
+namespace js {
+namespace jit {
+
+enum class TrampolineNative : uint16_t {
+#define ADD_NATIVE(native) native,
+ TRAMPOLINE_NATIVE_LIST(ADD_NATIVE)
+#undef ADD_NATIVE
+ Count
+};
+
+#define ADD_NATIVE(native) extern const JSJitInfo JitInfo_##native;
+TRAMPOLINE_NATIVE_LIST(ADD_NATIVE)
+#undef ADD_NATIVE
+
+void SetTrampolineNativeJitEntry(JSContext* cx, JSFunction* fun,
+ TrampolineNative native);
+
+bool CallTrampolineNativeJitCode(JSContext* cx, TrampolineNative native,
+ JS::CallArgs& args);
+
+} // namespace jit
+} // namespace js
+
+#endif /* jit_TrampolineNatives_h */
diff --git a/js/src/jit/VMFunctionList-inl.h b/js/src/jit/VMFunctionList-inl.h
index d87b010df6..99b98f17ed 100644
--- a/js/src/jit/VMFunctionList-inl.h
+++ b/js/src/jit/VMFunctionList-inl.h
@@ -211,6 +211,7 @@ namespace jit {
_(InterpretResume, js::jit::InterpretResume) \
_(InterruptCheck, js::jit::InterruptCheck) \
_(InvokeFunction, js::jit::InvokeFunction) \
+ _(InvokeNativeFunction, js::jit::InvokeNativeFunction) \
_(IonBinaryArithICUpdate, js::jit::IonBinaryArithIC::update) \
_(IonBindNameICUpdate, js::jit::IonBindNameIC::update) \
_(IonCheckPrivateFieldICUpdate, js::jit::IonCheckPrivateFieldIC::update) \
diff --git a/js/src/jit/VMFunctions.cpp b/js/src/jit/VMFunctions.cpp
index ed3f63c88c..3ec85a72c2 100644
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -545,6 +545,39 @@ bool InvokeFunction(JSContext* cx, HandleObject obj, bool constructing,
return Call(cx, fval, thisv, args, rval);
}
+bool InvokeNativeFunction(JSContext* cx, bool constructing,
+ bool ignoresReturnValue, uint32_t argc, Value* argv,
+ MutableHandleValue rval) {
+ // Ensure argv array is rooted - we may GC in here.
+ size_t numValues = argc + 2 + constructing;
+ RootedExternalValueArray argvRoot(cx, numValues, argv);
+
+ // Data in the argument vector is arranged for a JIT -> C++ call.
+ CallArgs callArgs = CallArgsFromSp(argc + constructing, argv + numValues,
+ constructing, ignoresReturnValue);
+
+ // This function is only called when the callee is a native function.
+ MOZ_ASSERT(callArgs.callee().as<JSFunction>().isNativeWithoutJitEntry());
+
+ if (constructing) {
+ MOZ_ASSERT(callArgs.thisv().isMagic(JS_IS_CONSTRUCTING));
+
+ if (!ConstructFromStack(cx, callArgs)) {
+ return false;
+ }
+
+ MOZ_ASSERT(callArgs.rval().isObject(),
+ "native constructors don't return primitives");
+ } else {
+ if (!CallFromStack(cx, callArgs)) {
+ return false;
+ }
+ }
+
+ rval.set(callArgs.rval());
+ return true;
+}
+
void* GetContextSensitiveInterpreterStub() {
return TlsContext.get()->runtime()->jitRuntime()->interpreterStub().value;
}
@@ -1111,7 +1144,7 @@ bool NormalSuspend(JSContext* cx, HandleObject obj, BaselineFrame* frame,
bool FinalSuspend(JSContext* cx, HandleObject obj, const jsbytecode* pc) {
MOZ_ASSERT(JSOp(*pc) == JSOp::FinalYieldRval);
- AbstractGeneratorObject::finalSuspend(obj);
+ AbstractGeneratorObject::finalSuspend(cx, obj);
return true;
}
diff --git a/js/src/jit/VMFunctions.h b/js/src/jit/VMFunctions.h
index a68dd8279f..b5ac5d700b 100644
--- a/js/src/jit/VMFunctions.h
+++ b/js/src/jit/VMFunctions.h
@@ -354,6 +354,10 @@ struct LastArg<HeadType, TailTypes...> {
uint32_t argc, Value* argv,
MutableHandleValue rval);
+[[nodiscard]] bool InvokeNativeFunction(JSContext* cx, bool constructing,
+ bool ignoresReturnValue, uint32_t argc,
+ Value* argv, MutableHandleValue rval);
+
bool InvokeFromInterpreterStub(JSContext* cx,
InterpreterStubExitFrameLayout* frame);
void* GetContextSensitiveInterpreterStub();
diff --git a/js/src/jit/WarpCacheIRTranspiler.cpp b/js/src/jit/WarpCacheIRTranspiler.cpp
index 9a99e0f5c3..fdaafd00b3 100644
--- a/js/src/jit/WarpCacheIRTranspiler.cpp
+++ b/js/src/jit/WarpCacheIRTranspiler.cpp
@@ -977,7 +977,7 @@ bool WarpCacheIRTranspiler::emitGuardDynamicSlotValue(ObjOperandId objId,
return true;
}
-bool WarpCacheIRTranspiler::emitLoadScriptedProxyHandler(ValOperandId resultId,
+bool WarpCacheIRTranspiler::emitLoadScriptedProxyHandler(ObjOperandId resultId,
ObjOperandId objId) {
MDefinition* obj = getOperand(objId);
@@ -5216,10 +5216,14 @@ bool WarpCacheIRTranspiler::emitLoadOperandResult(ValOperandId inputId) {
}
bool WarpCacheIRTranspiler::emitLoadWrapperTarget(ObjOperandId objId,
- ObjOperandId resultId) {
+ ObjOperandId resultId,
+ bool fallible) {
MDefinition* obj = getOperand(objId);
- auto* ins = MLoadWrapperTarget::New(alloc(), obj);
+ auto* ins = MLoadWrapperTarget::New(alloc(), obj, fallible);
+ if (fallible) {
+ ins->setGuard();
+ }
add(ins);
return defineOperand(resultId, ins);
diff --git a/js/src/jit/arm/Architecture-arm.h b/js/src/jit/arm/Architecture-arm.h
index fa2ae8e0ed..00edac33da 100644
--- a/js/src/jit/arm/Architecture-arm.h
+++ b/js/src/jit/arm/Architecture-arm.h
@@ -32,7 +32,7 @@ namespace jit {
static const int32_t NUNBOX32_TYPE_OFFSET = 4;
static const int32_t NUNBOX32_PAYLOAD_OFFSET = 0;
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
// How far forward/back can a jump go? Provide a generous buffer for thunks.
static const uint32_t JumpImmediateRange = 20 * 1024 * 1024;
diff --git a/js/src/jit/arm64/Architecture-arm64.h b/js/src/jit/arm64/Architecture-arm64.h
index 96bbc63848..7101709f18 100644
--- a/js/src/jit/arm64/Architecture-arm64.h
+++ b/js/src/jit/arm64/Architecture-arm64.h
@@ -551,7 +551,7 @@ static const uint32_t SpillSlotSize =
std::max(sizeof(Registers::RegisterContent),
sizeof(FloatRegisters::RegisterContent));
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
// When our only strategy for far jumps is to encode the offset directly, and
// not insert any jump islands during assembly for even further jumps, then the
diff --git a/js/src/jit/arm64/vixl/Cpu-vixl.cpp b/js/src/jit/arm64/vixl/Cpu-vixl.cpp
index 12244e73e4..b425b286ee 100644
--- a/js/src/jit/arm64/vixl/Cpu-vixl.cpp
+++ b/js/src/jit/arm64/vixl/Cpu-vixl.cpp
@@ -214,7 +214,7 @@ CPUFeatures CPU::InferCPUFeaturesFromOS(
for (size_t i = 0; i < kFeatureBitCount; i++) {
if (auxv & (1UL << i)) features.Combine(kFeatureBits[i]);
}
-#elif defined(XP_MACOSX)
+#elif defined(XP_DARWIN)
// Apple processors have kJSCVT, kDotProduct, and kAtomics features.
features.Combine(CPUFeatures::kJSCVT, CPUFeatures::kDotProduct,
CPUFeatures::kAtomics);
diff --git a/js/src/jit/loong64/Architecture-loong64.h b/js/src/jit/loong64/Architecture-loong64.h
index 48745ee37a..29da43272f 100644
--- a/js/src/jit/loong64/Architecture-loong64.h
+++ b/js/src/jit/loong64/Architecture-loong64.h
@@ -335,7 +335,7 @@ static const uint32_t SpillSlotSize =
std::max(sizeof(Registers::RegisterContent),
sizeof(FloatRegisters::RegisterContent));
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
static const uint32_t SizeOfReturnAddressAfterCall = 0;
// When our only strategy for far jumps is to encode the offset directly, and
diff --git a/js/src/jit/mips32/Architecture-mips32.h b/js/src/jit/mips32/Architecture-mips32.h
index 8e186d2c9c..4ce68032b2 100644
--- a/js/src/jit/mips32/Architecture-mips32.h
+++ b/js/src/jit/mips32/Architecture-mips32.h
@@ -20,7 +20,7 @@
namespace js {
namespace jit {
-static const uint32_t ShadowStackSpace = 4 * sizeof(uintptr_t);
+static constexpr uint32_t ShadowStackSpace = 4 * sizeof(uintptr_t);
// These offsets are specific to nunboxing, and capture offsets into the
// components of a js::Value.
diff --git a/js/src/jit/mips64/Architecture-mips64.h b/js/src/jit/mips64/Architecture-mips64.h
index d3db37ea2c..7bf6054a72 100644
--- a/js/src/jit/mips64/Architecture-mips64.h
+++ b/js/src/jit/mips64/Architecture-mips64.h
@@ -20,7 +20,7 @@ namespace js {
namespace jit {
// Shadow stack space is not required on MIPS64.
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
// MIPS64 have 64 bit floating-point coprocessor. There are 32 double
// precision register which can also be used as single precision registers.
diff --git a/js/src/jit/moz.build b/js/src/jit/moz.build
index c0d2d5f2df..c49b4fcd9f 100644
--- a/js/src/jit/moz.build
+++ b/js/src/jit/moz.build
@@ -87,6 +87,7 @@ UNIFIED_SOURCES += [
"Sink.cpp",
"Snapshots.cpp",
"Trampoline.cpp",
+ "TrampolineNatives.cpp",
"TrialInlining.cpp",
"TypePolicy.cpp",
"ValueNumbering.cpp",
diff --git a/js/src/jit/none/Architecture-none.h b/js/src/jit/none/Architecture-none.h
index 2433234fbf..9218404992 100644
--- a/js/src/jit/none/Architecture-none.h
+++ b/js/src/jit/none/Architecture-none.h
@@ -157,7 +157,7 @@ struct FloatRegister {
inline bool hasUnaliasedDouble() { MOZ_CRASH(); }
inline bool hasMultiAlias() { MOZ_CRASH(); }
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
static const uint32_t JumpImmediateRange = INT32_MAX;
#ifdef JS_NUNBOX32
diff --git a/js/src/jit/riscv64/Architecture-riscv64.h b/js/src/jit/riscv64/Architecture-riscv64.h
index c75bd05ff1..8d02e6e806 100644
--- a/js/src/jit/riscv64/Architecture-riscv64.h
+++ b/js/src/jit/riscv64/Architecture-riscv64.h
@@ -494,7 +494,7 @@ FloatRegister::LiveAsIndexableSet<RegTypeName::Any>(SetType set) {
inline bool hasUnaliasedDouble() { return false; }
inline bool hasMultiAlias() { return false; }
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
static const uint32_t JumpImmediateRange = INT32_MAX;
#ifdef JS_NUNBOX32
diff --git a/js/src/jit/shared/LIR-shared.h b/js/src/jit/shared/LIR-shared.h
index d8b5693d85..74c11bd91b 100644
--- a/js/src/jit/shared/LIR-shared.h
+++ b/js/src/jit/shared/LIR-shared.h
@@ -649,14 +649,14 @@ class LApplyArgsGeneric
LIR_HEADER(ApplyArgsGeneric)
LApplyArgsGeneric(const LAllocation& func, const LAllocation& argc,
- const LBoxAllocation& thisv, const LDefinition& tmpobjreg,
- const LDefinition& tmpcopy)
+ const LBoxAllocation& thisv, const LDefinition& tmpObjReg,
+ const LDefinition& tmpCopy)
: LCallInstructionHelper(classOpcode) {
setOperand(0, func);
setOperand(1, argc);
setBoxOperand(ThisIndex, thisv);
- setTemp(0, tmpobjreg);
- setTemp(1, tmpcopy);
+ setTemp(0, tmpObjReg);
+ setTemp(1, tmpCopy);
}
MApplyArgs* mir() const { return mir_->toApplyArgs(); }
@@ -712,14 +712,14 @@ class LApplyArrayGeneric
LIR_HEADER(ApplyArrayGeneric)
LApplyArrayGeneric(const LAllocation& func, const LAllocation& elements,
- const LBoxAllocation& thisv, const LDefinition& tmpobjreg,
- const LDefinition& tmpcopy)
+ const LBoxAllocation& thisv, const LDefinition& tmpObjReg,
+ const LDefinition& tmpCopy)
: LCallInstructionHelper(classOpcode) {
setOperand(0, func);
setOperand(1, elements);
setBoxOperand(ThisIndex, thisv);
- setTemp(0, tmpobjreg);
- setTemp(1, tmpcopy);
+ setTemp(0, tmpObjReg);
+ setTemp(1, tmpCopy);
}
MApplyArray* mir() const { return mir_->toApplyArray(); }
@@ -746,13 +746,13 @@ class LConstructArgsGeneric
LConstructArgsGeneric(const LAllocation& func, const LAllocation& argc,
const LAllocation& newTarget,
const LBoxAllocation& thisv,
- const LDefinition& tmpobjreg)
+ const LDefinition& tmpObjReg)
: LCallInstructionHelper(classOpcode) {
setOperand(0, func);
setOperand(1, argc);
setOperand(2, newTarget);
setBoxOperand(ThisIndex, thisv);
- setTemp(0, tmpobjreg);
+ setTemp(0, tmpObjReg);
}
MConstructArgs* mir() const { return mir_->toConstructArgs(); }
@@ -784,13 +784,13 @@ class LConstructArrayGeneric
LConstructArrayGeneric(const LAllocation& func, const LAllocation& elements,
const LAllocation& newTarget,
const LBoxAllocation& thisv,
- const LDefinition& tmpobjreg)
+ const LDefinition& tmpObjReg)
: LCallInstructionHelper(classOpcode) {
setOperand(0, func);
setOperand(1, elements);
setOperand(2, newTarget);
setBoxOperand(ThisIndex, thisv);
- setTemp(0, tmpobjreg);
+ setTemp(0, tmpObjReg);
}
MConstructArray* mir() const { return mir_->toConstructArray(); }
@@ -816,6 +816,164 @@ class LConstructArrayGeneric
const LAllocation* getTempForArgCopy() { return getOperand(2); }
};
+class LApplyArgsNative
+ : public LCallInstructionHelper<BOX_PIECES, BOX_PIECES + 2, 2> {
+ public:
+ LIR_HEADER(ApplyArgsNative)
+
+ LApplyArgsNative(const LAllocation& func, const LAllocation& argc,
+ const LBoxAllocation& thisv, const LDefinition& tmpObjReg,
+ const LDefinition& tmpCopy)
+ : LCallInstructionHelper(classOpcode) {
+ setOperand(0, func);
+ setOperand(1, argc);
+ setBoxOperand(ThisIndex, thisv);
+ setTemp(0, tmpObjReg);
+ setTemp(1, tmpCopy);
+ }
+
+ static constexpr bool isConstructing() { return false; }
+
+ MApplyArgs* mir() const { return mir_->toApplyArgs(); }
+
+ uint32_t numExtraFormals() const { return mir()->numExtraFormals(); }
+
+ const LAllocation* getFunction() { return getOperand(0); }
+ const LAllocation* getArgc() { return getOperand(1); }
+
+ static const size_t ThisIndex = 2;
+
+ const LDefinition* getTempObject() { return getTemp(0); }
+ const LDefinition* getTempForArgCopy() { return getTemp(1); }
+};
+
+class LApplyArgsObjNative
+ : public LCallInstructionHelper<BOX_PIECES, BOX_PIECES + 2, 2> {
+ public:
+ LIR_HEADER(ApplyArgsObjNative)
+
+ LApplyArgsObjNative(const LAllocation& func, const LAllocation& argsObj,
+ const LBoxAllocation& thisv, const LDefinition& tmpObjReg,
+ const LDefinition& tmpCopy)
+ : LCallInstructionHelper(classOpcode) {
+ setOperand(0, func);
+ setOperand(1, argsObj);
+ setBoxOperand(ThisIndex, thisv);
+ setTemp(0, tmpObjReg);
+ setTemp(1, tmpCopy);
+ }
+
+ static constexpr bool isConstructing() { return false; }
+
+ MApplyArgsObj* mir() const { return mir_->toApplyArgsObj(); }
+
+ const LAllocation* getFunction() { return getOperand(0); }
+ const LAllocation* getArgsObj() { return getOperand(1); }
+
+ static const size_t ThisIndex = 2;
+
+ const LDefinition* getTempObject() { return getTemp(0); }
+ const LDefinition* getTempForArgCopy() { return getTemp(1); }
+
+ // argc is mapped to the same register as argsObj: argc becomes live as
+ // argsObj is dying, all registers are calltemps.
+ const LAllocation* getArgc() { return getOperand(1); }
+};
+
+class LApplyArrayNative
+ : public LCallInstructionHelper<BOX_PIECES, BOX_PIECES + 2, 2> {
+ public:
+ LIR_HEADER(ApplyArrayNative)
+
+ LApplyArrayNative(const LAllocation& func, const LAllocation& elements,
+ const LBoxAllocation& thisv, const LDefinition& tmpObjReg,
+ const LDefinition& tmpCopy)
+ : LCallInstructionHelper(classOpcode) {
+ setOperand(0, func);
+ setOperand(1, elements);
+ setBoxOperand(ThisIndex, thisv);
+ setTemp(0, tmpObjReg);
+ setTemp(1, tmpCopy);
+ }
+
+ static constexpr bool isConstructing() { return false; }
+
+ MApplyArray* mir() const { return mir_->toApplyArray(); }
+
+ const LAllocation* getFunction() { return getOperand(0); }
+ const LAllocation* getElements() { return getOperand(1); }
+
+ static const size_t ThisIndex = 2;
+
+ const LDefinition* getTempObject() { return getTemp(0); }
+ const LDefinition* getTempForArgCopy() { return getTemp(1); }
+
+ // argc is mapped to the same register as elements: argc becomes live as
+ // elements is dying, all registers are calltemps.
+ const LAllocation* getArgc() { return getOperand(1); }
+};
+
+class LConstructArgsNative : public LCallInstructionHelper<BOX_PIECES, 3, 2> {
+ public:
+ LIR_HEADER(ConstructArgsNative)
+
+ LConstructArgsNative(const LAllocation& func, const LAllocation& argc,
+ const LAllocation& newTarget,
+ const LDefinition& tmpObjReg, const LDefinition& tmpCopy)
+ : LCallInstructionHelper(classOpcode) {
+ setOperand(0, func);
+ setOperand(1, argc);
+ setOperand(2, newTarget);
+ setTemp(0, tmpObjReg);
+ setTemp(1, tmpCopy);
+ }
+
+ static constexpr bool isConstructing() { return true; }
+
+ MConstructArgs* mir() const { return mir_->toConstructArgs(); }
+
+ uint32_t numExtraFormals() const { return mir()->numExtraFormals(); }
+
+ const LAllocation* getFunction() { return getOperand(0); }
+ const LAllocation* getArgc() { return getOperand(1); }
+ const LAllocation* getNewTarget() { return getOperand(2); }
+
+ const LDefinition* getTempObject() { return getTemp(0); }
+ const LDefinition* getTempForArgCopy() { return getTemp(1); }
+};
+
+class LConstructArrayNative : public LCallInstructionHelper<BOX_PIECES, 3, 2> {
+ public:
+ LIR_HEADER(ConstructArrayNative)
+
+ LConstructArrayNative(const LAllocation& func, const LAllocation& elements,
+ const LAllocation& newTarget,
+ const LDefinition& tmpObjReg,
+ const LDefinition& tmpCopy)
+ : LCallInstructionHelper(classOpcode) {
+ setOperand(0, func);
+ setOperand(1, elements);
+ setOperand(2, newTarget);
+ setTemp(0, tmpObjReg);
+ setTemp(1, tmpCopy);
+ }
+
+ static constexpr bool isConstructing() { return true; }
+
+ MConstructArray* mir() const { return mir_->toConstructArray(); }
+
+ const LAllocation* getFunction() { return getOperand(0); }
+ const LAllocation* getElements() { return getOperand(1); }
+ const LAllocation* getNewTarget() { return getOperand(2); }
+
+ const LDefinition* getTempObject() { return getTemp(0); }
+ const LDefinition* getTempForArgCopy() { return getTemp(1); }
+
+ // argc is mapped to the same register as elements: argc becomes live as
+ // elements is dying, all registers are calltemps.
+ const LAllocation* getArgc() { return getOperand(1); }
+};
+
// Takes in either an integer or boolean input and tests it for truthiness.
class LTestIAndBranch : public LControlInstructionHelper<2, 1, 0> {
public:
diff --git a/js/src/jit/wasm32/Architecture-wasm32.h b/js/src/jit/wasm32/Architecture-wasm32.h
index d7726eaa5f..2419591664 100644
--- a/js/src/jit/wasm32/Architecture-wasm32.h
+++ b/js/src/jit/wasm32/Architecture-wasm32.h
@@ -161,7 +161,7 @@ struct FloatRegister {
inline bool hasUnaliasedDouble() { MOZ_CRASH(); }
inline bool hasMultiAlias() { MOZ_CRASH(); }
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
static const uint32_t JumpImmediateRange = INT32_MAX;
#ifdef JS_NUNBOX32
diff --git a/js/src/jit/x86-shared/Architecture-x86-shared.h b/js/src/jit/x86-shared/Architecture-x86-shared.h
index b4701af284..72055efb7d 100644
--- a/js/src/jit/x86-shared/Architecture-x86-shared.h
+++ b/js/src/jit/x86-shared/Architecture-x86-shared.h
@@ -31,9 +31,9 @@ static const int32_t NUNBOX32_PAYLOAD_OFFSET = 0;
#endif
#if defined(JS_CODEGEN_X64) && defined(_WIN64)
-static const uint32_t ShadowStackSpace = 32;
+static constexpr uint32_t ShadowStackSpace = 32;
#else
-static const uint32_t ShadowStackSpace = 0;
+static constexpr uint32_t ShadowStackSpace = 0;
#endif
static const uint32_t JumpImmediateRange = INT32_MAX;
diff --git a/js/src/jsapi-tests/moz.build b/js/src/jsapi-tests/moz.build
index 9e2c99cfa6..f6cc3b3f70 100644
--- a/js/src/jsapi-tests/moz.build
+++ b/js/src/jsapi-tests/moz.build
@@ -138,6 +138,7 @@ UNIFIED_SOURCES += [
"testUbiNode.cpp",
"testUncaughtSymbol.cpp",
"testUTF8.cpp",
+ "testWasmEncoder.cpp",
"testWasmLEB128.cpp",
"testWasmReturnCalls.cpp",
"testWeakMap.cpp",
diff --git a/js/src/jsapi-tests/testDeduplication.cpp b/js/src/jsapi-tests/testDeduplication.cpp
index 6ee3cb989d..7f5eb68e37 100644
--- a/js/src/jsapi-tests/testDeduplication.cpp
+++ b/js/src/jsapi-tests/testDeduplication.cpp
@@ -33,6 +33,8 @@ static bool SameChars(JSContext* cx, JSString* str1, JSString* str2,
}
BEGIN_TEST(testDeduplication_ASSC) {
+ AutoGCParameter disableSemispace(cx, JSGC_SEMISPACE_NURSERY_ENABLED, 0);
+
// Test with a long enough string to avoid inline chars allocation.
const char text[] =
"Andthebeastshallcomeforthsurroundedbyaroilingcloudofvengeance."
diff --git a/js/src/jsapi-tests/testGCHeapBarriers.cpp b/js/src/jsapi-tests/testGCHeapBarriers.cpp
index 9493049e16..a23e41ae6e 100644
--- a/js/src/jsapi-tests/testGCHeapBarriers.cpp
+++ b/js/src/jsapi-tests/testGCHeapBarriers.cpp
@@ -169,6 +169,7 @@ static void MakeGray(const JS::ArrayBufferOrView& view) {
// - WeakHeapPtr
BEGIN_TEST(testGCHeapPostBarriers) {
AutoLeaveZeal nozeal(cx);
+ AutoGCParameter disableSemispace(cx, JSGC_SEMISPACE_NURSERY_ENABLED, 0);
/* Sanity check - objects start in the nursery and then become tenured. */
JS_GC(cx);
diff --git a/js/src/jsapi-tests/testGCMarking.cpp b/js/src/jsapi-tests/testGCMarking.cpp
index dc2f1e0f4d..0a51bf21bc 100644
--- a/js/src/jsapi-tests/testGCMarking.cpp
+++ b/js/src/jsapi-tests/testGCMarking.cpp
@@ -344,6 +344,7 @@ BEGIN_TEST(testIncrementalRoots) {
// Tenure everything so intentionally unrooted objects don't move before we
// can use them.
+ AutoGCParameter disableSemispace(cx, JSGC_SEMISPACE_NURSERY_ENABLED, 0);
cx->runtime()->gc.minorGC(JS::GCReason::API);
// Release all roots except for the RootedObjectVector.
diff --git a/js/src/jsapi-tests/testGCUniqueId.cpp b/js/src/jsapi-tests/testGCUniqueId.cpp
index 1c5652f280..577582837d 100644
--- a/js/src/jsapi-tests/testGCUniqueId.cpp
+++ b/js/src/jsapi-tests/testGCUniqueId.cpp
@@ -23,6 +23,7 @@ static void MinimizeHeap(JSContext* cx) {
BEGIN_TEST(testGCUID) {
AutoLeaveZeal nozeal(cx);
+ AutoGCParameter gcparam(cx, JSGC_SEMISPACE_NURSERY_ENABLED, 0);
uint64_t uid = 0;
uint64_t tmp = 0;
diff --git a/js/src/jsapi-tests/testIsInsideNursery.cpp b/js/src/jsapi-tests/testIsInsideNursery.cpp
index 793a1ed3cc..c7383f460b 100644
--- a/js/src/jsapi-tests/testIsInsideNursery.cpp
+++ b/js/src/jsapi-tests/testIsInsideNursery.cpp
@@ -9,6 +9,8 @@
#include "vm/JSContext-inl.h"
+using namespace js;
+
BEGIN_TEST(testIsInsideNursery) {
/* Non-GC things are never inside the nursery. */
CHECK(!cx->nursery().isInside(cx));
@@ -30,9 +32,9 @@ BEGIN_TEST(testIsInsideNursery) {
JS::Rooted<JSString*> string(cx, JS_NewStringCopyZ(cx, oolstr));
/* Objects are initially allocated in the nursery. */
- CHECK(js::gc::IsInsideNursery(object));
+ CHECK(gc::IsInsideNursery(object));
/* As are strings. */
- CHECK(js::gc::IsInsideNursery(string));
+ CHECK(gc::IsInsideNursery(string));
/* And their contents. */
{
JS::AutoCheckCannotGC nogc;
@@ -44,8 +46,8 @@ BEGIN_TEST(testIsInsideNursery) {
JS_GC(cx);
/* And are tenured if still live after a GC. */
- CHECK(!js::gc::IsInsideNursery(object));
- CHECK(!js::gc::IsInsideNursery(string));
+ CHECK(!gc::IsInsideNursery(object));
+ CHECK(!gc::IsInsideNursery(string));
{
JS::AutoCheckCannotGC nogc;
const JS::Latin1Char* strdata =
@@ -56,3 +58,76 @@ BEGIN_TEST(testIsInsideNursery) {
return true;
}
END_TEST(testIsInsideNursery)
+
+BEGIN_TEST(testSemispaceNursery) {
+ AutoGCParameter enableSemispace(cx, JSGC_SEMISPACE_NURSERY_ENABLED, 1);
+
+ JS_GC(cx);
+
+ /* Objects are initially allocated in the nursery. */
+ RootedObject object(cx, JS_NewPlainObject(cx));
+ CHECK(gc::IsInsideNursery(object));
+
+ /* Minor GC with the evict reason tenures them. */
+ cx->minorGC(JS::GCReason::EVICT_NURSERY);
+ CHECK(!gc::IsInsideNursery(object));
+
+ /* Minor GC with other reasons gives them a second chance. */
+ object = JS_NewPlainObject(cx);
+ void* ptr = object;
+ CHECK(gc::IsInsideNursery(object));
+ cx->minorGC(JS::GCReason::API);
+ CHECK(ptr != object);
+ CHECK(gc::IsInsideNursery(object));
+ ptr = object;
+ cx->minorGC(JS::GCReason::API);
+ CHECK(!gc::IsInsideNursery(object));
+ CHECK(ptr != object);
+
+ CHECK(testReferencesBetweenGenerations(0));
+ CHECK(testReferencesBetweenGenerations(1));
+ CHECK(testReferencesBetweenGenerations(2));
+
+ return true;
+}
+
+bool testReferencesBetweenGenerations(size_t referrerGeneration) {
+ MOZ_ASSERT(referrerGeneration <= 2);
+
+ RootedObject referrer(cx, JS_NewPlainObject(cx));
+ CHECK(referrer);
+ CHECK(gc::IsInsideNursery(referrer));
+
+ for (size_t i = 0; i < referrerGeneration; i++) {
+ cx->minorGC(JS::GCReason::API);
+ }
+ MOZ_ASSERT(IsInsideNursery(referrer) == (referrerGeneration < 2));
+
+ RootedObject object(cx, JS_NewPlainObject(cx));
+ CHECK(gc::IsInsideNursery(object));
+ RootedValue value(cx, JS::ObjectValue(*object));
+ CHECK(JS_DefineProperty(cx, referrer, "prop", value, 0));
+ CHECK(JS_GetProperty(cx, referrer, "prop", &value));
+ CHECK(&value.toObject() == object);
+ CHECK(JS_SetElement(cx, referrer, 0, value));
+ CHECK(JS_GetElement(cx, referrer, 0, &value));
+ CHECK(&value.toObject() == object);
+
+ cx->minorGC(JS::GCReason::API);
+ CHECK(gc::IsInsideNursery(object));
+ CHECK(JS_GetProperty(cx, referrer, "prop", &value));
+ CHECK(&value.toObject() == object);
+ CHECK(JS_GetElement(cx, referrer, 0, &value));
+ CHECK(&value.toObject() == object);
+
+ cx->minorGC(JS::GCReason::API);
+ CHECK(!gc::IsInsideNursery(object));
+ CHECK(JS_GetProperty(cx, referrer, "prop", &value));
+ CHECK(&value.toObject() == object);
+ CHECK(JS_GetElement(cx, referrer, 0, &value));
+ CHECK(&value.toObject() == object);
+
+ return true;
+}
+
+END_TEST(testSemispaceNursery)
diff --git a/js/src/jsapi-tests/testSparseBitmap.cpp b/js/src/jsapi-tests/testSparseBitmap.cpp
index bd5e7fee95..936132ab0c 100644
--- a/js/src/jsapi-tests/testSparseBitmap.cpp
+++ b/js/src/jsapi-tests/testSparseBitmap.cpp
@@ -111,5 +111,50 @@ BEGIN_TEST(testSparseBitmapExternalOR) {
return true;
}
-
END_TEST(testSparseBitmapExternalOR)
+
+BEGIN_TEST(testSparseBitmapExternalAND) {
+ // Testing ANDing data from an external array.
+
+ const size_t wordCount = 4;
+
+ // Create a bitmap with two bits set per word based on the word index.
+ SparseBitmap bitmap;
+ for (size_t i = 0; i < wordCount; i++) {
+ bitmap.setBit(i * JS_BITS_PER_WORD + i);
+ bitmap.setBit(i * JS_BITS_PER_WORD + i + 1);
+ }
+
+ // Update a single word, clearing one of the bits.
+ uintptr_t source[wordCount];
+ CHECK(bitmap.getBit(0));
+ CHECK(bitmap.getBit(1));
+ source[0] = ~(1 << 1);
+ bitmap.bitwiseAndRangeWith(0, 1, source);
+ CHECK(bitmap.getBit(0));
+ CHECK(!bitmap.getBit(1));
+
+ // Update a word at an offset.
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD + 1));
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD + 2));
+ source[0] = ~(1 << 2);
+ bitmap.bitwiseAndRangeWith(1, 1, source);
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD + 1));
+ CHECK(!bitmap.getBit(JS_BITS_PER_WORD + 2));
+
+ // Update multiple words at an offset.
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD * 2 + 2));
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD * 2 + 3));
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD * 3 + 3));
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD * 3 + 4));
+ source[0] = ~(1 << 3);
+ source[1] = ~(1 << 4);
+ bitmap.bitwiseAndRangeWith(2, 2, source);
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD * 2 + 2));
+ CHECK(!bitmap.getBit(JS_BITS_PER_WORD * 2 + 3));
+ CHECK(bitmap.getBit(JS_BITS_PER_WORD * 3 + 3));
+ CHECK(!bitmap.getBit(JS_BITS_PER_WORD * 3 + 4));
+
+ return true;
+}
+END_TEST(testSparseBitmapExternalAND)
diff --git a/js/src/jsapi-tests/testWasmEncoder.cpp b/js/src/jsapi-tests/testWasmEncoder.cpp
new file mode 100644
index 0000000000..a90249264c
--- /dev/null
+++ b/js/src/jsapi-tests/testWasmEncoder.cpp
@@ -0,0 +1,117 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "jit/MacroAssembler.h"
+
+#include "jsapi-tests/tests.h"
+#include "jsapi-tests/testsJit.h"
+
+#include "wasm/WasmConstants.h"
+#include "wasm/WasmFeatures.h" // AnyCompilerAvailable
+#include "wasm/WasmGenerator.h"
+#include "wasm/WasmSignalHandlers.h" // EnsureFullSignalHandlers
+#include "wasm/WasmValType.h"
+
+using namespace js;
+using namespace js::jit;
+using namespace js::wasm;
+
+static bool TestTruncFn(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+ double d = args[0].toDouble();
+ args.rval().setInt32((int)d);
+ return true;
+}
+
+// Check if wasm modules can be encoded in C++ and run.
+BEGIN_TEST(testWasmEncodeBasic) {
+ if (!AnyCompilerAvailable(cx)) {
+ knownFail = true;
+ return false;
+ }
+
+ EnsureFullSignalHandlers(cx);
+
+ FeatureOptions options;
+ ScriptedCaller scriptedCaller;
+ SharedCompileArgs compileArgs =
+ CompileArgs::buildAndReport(cx, std::move(scriptedCaller), options);
+
+ ModuleEnvironment moduleEnv(compileArgs->features);
+ CompilerEnvironment compilerEnv(CompileMode::Once, Tier::Optimized,
+ DebugEnabled::False);
+ compilerEnv.computeParameters();
+ MOZ_ALWAYS_TRUE(moduleEnv.init());
+
+ ValTypeVector paramsImp, resultsImp;
+ MOZ_ALWAYS_TRUE(paramsImp.emplaceBack(ValType::F64) &&
+ resultsImp.emplaceBack(ValType::I32));
+
+ CacheableName ns;
+ CacheableName impName;
+ MOZ_ALWAYS_TRUE(CacheableName::fromUTF8Chars("t", &impName));
+ MOZ_ALWAYS_TRUE(moduleEnv.addImportedFunc(std::move(paramsImp),
+ std::move(resultsImp),
+ std::move(ns), std::move(impName)));
+
+ ValTypeVector params, results;
+ MOZ_ALWAYS_TRUE(results.emplaceBack(ValType::I32));
+ CacheableName expName;
+ MOZ_ALWAYS_TRUE(CacheableName::fromUTF8Chars("r", &expName));
+ MOZ_ALWAYS_TRUE(moduleEnv.addDefinedFunc(std::move(params),
+ std::move(results), true,
+ mozilla::Some(std::move(expName))));
+
+ ModuleGenerator mg(*compileArgs, &moduleEnv, &compilerEnv, nullptr, nullptr,
+ nullptr);
+ MOZ_ALWAYS_TRUE(mg.init(nullptr));
+
+ // Build function and keep bytecode around until the end.
+ Bytes bytecode;
+ {
+ Encoder encoder(bytecode);
+ MOZ_ALWAYS_TRUE(EncodeLocalEntries(encoder, ValTypeVector()));
+ MOZ_ALWAYS_TRUE(encoder.writeOp(Op::F64Const) &&
+ encoder.writeFixedF64(42.3));
+ MOZ_ALWAYS_TRUE(encoder.writeOp(Op::Call) && encoder.writeVarU32(0));
+ MOZ_ALWAYS_TRUE(encoder.writeOp(Op::End));
+ }
+ MOZ_ALWAYS_TRUE(mg.compileFuncDef(1, 0, bytecode.begin(),
+ bytecode.begin() + bytecode.length()));
+ MOZ_ALWAYS_TRUE(mg.finishFuncDefs());
+
+ SharedBytes shareableBytes = js_new<ShareableBytes>();
+ MOZ_ALWAYS_TRUE(shareableBytes);
+ SharedModule module = mg.finishModule(*shareableBytes);
+ MOZ_ALWAYS_TRUE(module);
+
+ MOZ_ASSERT(module->imports().length() == 1);
+ MOZ_ASSERT(module->exports().length() == 1);
+
+ // Instantiate and run.
+ {
+ Rooted<ImportValues> imports(cx);
+ RootedFunction func(cx, NewNativeFunction(cx, TestTruncFn, 0, nullptr));
+ MOZ_ALWAYS_TRUE(func);
+ MOZ_ALWAYS_TRUE(imports.get().funcs.append(func));
+
+ Rooted<WasmInstanceObject*> instance(cx);
+ MOZ_ALWAYS_TRUE(module->instantiate(cx, imports.get(), nullptr, &instance));
+ RootedFunction wasmFunc(cx);
+ MOZ_ALWAYS_TRUE(
+ WasmInstanceObject::getExportedFunction(cx, instance, 1, &wasmFunc));
+
+ JSAutoRealm ar(cx, wasmFunc);
+ RootedValue rval(cx);
+ RootedValue fval(cx);
+ MOZ_ALWAYS_TRUE(
+ JS::Call(cx, fval, wasmFunc, JS::HandleValueArray::empty(), &rval));
+ MOZ_RELEASE_ASSERT(rval.toInt32() == 42);
+ }
+ return true;
+}
+END_TEST(testWasmEncodeBasic)
diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp
index 77c3ae5f09..8dda64d4f1 100644
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -40,6 +40,7 @@
#include "gc/Marking.h"
#include "gc/PublicIterators.h"
#include "jit/JitSpewer.h"
+#include "jit/TrampolineNatives.h"
#include "js/CallAndConstruct.h" // JS::IsCallable
#include "js/CharacterEncoding.h"
#include "js/ColumnNumber.h" // JS::TaggedColumnNumberOneOrigin, JS::ColumnNumberOneOrigin
@@ -49,6 +50,7 @@
#include "js/Date.h" // JS::GetReduceMicrosecondTimePrecisionCallback
#include "js/ErrorInterceptor.h"
#include "js/ErrorReport.h" // JSErrorBase
+#include "js/experimental/JitInfo.h" // JSJitInfo
#include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
#include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit
#include "js/GlobalObject.h"
@@ -1602,7 +1604,8 @@ JS_PUBLIC_API bool JS::ToPrimitive(JSContext* cx, HandleObject obj, JSType hint,
return ToPrimitiveSlow(cx, hint, vp);
}
-JS_PUBLIC_API bool JS::GetFirstArgumentAsTypeHint(JSContext* cx, CallArgs args,
+JS_PUBLIC_API bool JS::GetFirstArgumentAsTypeHint(JSContext* cx,
+ const CallArgs& args,
JSType* result) {
if (!args.get(0).isString()) {
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
@@ -2335,8 +2338,12 @@ JS_PUBLIC_API JSFunction* JS::NewFunctionFromSpec(JSContext* cx,
return nullptr;
}
- if (fs->call.info) {
- fun->setJitInfo(fs->call.info);
+ if (auto* jitInfo = fs->call.info) {
+ if (jitInfo->type() == JSJitInfo::OpType::TrampolineNative) {
+ jit::SetTrampolineNativeJitEntry(cx, fun, jitInfo->trampolineNative);
+ } else {
+ fun->setJitInfo(jitInfo);
+ }
}
return fun;
}
diff --git a/js/src/jsapi.h b/js/src/jsapi.h
index 3597e52582..7fe58c250f 100644
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -303,7 +303,7 @@ extern JS_PUBLIC_API bool ToPrimitive(JSContext* cx, JS::HandleObject obj,
* This can be useful in implementing a @@toPrimitive method.
*/
extern JS_PUBLIC_API bool GetFirstArgumentAsTypeHint(JSContext* cx,
- CallArgs args,
+ const CallArgs& args,
JSType* result);
} /* namespace JS */
diff --git a/js/src/jsdate.cpp b/js/src/jsdate.cpp
index 7040213f56..a6f8b42040 100644
--- a/js/src/jsdate.cpp
+++ b/js/src/jsdate.cpp
@@ -1387,11 +1387,12 @@ static bool ParseDate(DateTimeInfo::ForceUTC forceUTC, const CharT* s,
if (IsAsciiDigit(s[index])) {
break;
}
- } else {
- // Reject numbers directly after letters e.g. foo2
- if (IsAsciiDigit(s[index]) && IsAsciiAlpha(s[index - 1])) {
- return false;
- }
+ } else if (!strchr(" ,.-/", s[index])) {
+ // We're only allowing the above delimiters after the day of
+ // week to prevent things such as "foo_1" from being parsed
+ // as a date, which may break software which uses this function
+ // to determine whether or not something is a date.
+ return false;
}
}
diff --git a/js/src/jsfriendapi.cpp b/js/src/jsfriendapi.cpp
index 694058f6c9..574e61248e 100644
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -420,6 +420,23 @@ JS_PUBLIC_API JSFunction* js::NewFunctionByIdWithReserved(
gc::AllocKind::FUNCTION_EXTENDED);
}
+JS_PUBLIC_API JSFunction* js::NewFunctionByIdWithReservedAndProto(
+ JSContext* cx, JSNative native, HandleObject proto, unsigned nargs,
+ unsigned flags, jsid id) {
+ MOZ_ASSERT(id.isAtom());
+ MOZ_ASSERT(!cx->zone()->isAtomsZone());
+ MOZ_ASSERT(native);
+ CHECK_THREAD(cx);
+ cx->check(id);
+
+ Rooted<JSAtom*> atom(cx, id.toAtom());
+ FunctionFlags funflags = (flags & JSFUN_CONSTRUCTOR)
+ ? FunctionFlags::NATIVE_CTOR
+ : FunctionFlags::NATIVE_FUN;
+ return NewFunctionWithProto(cx, native, nargs, funflags, nullptr, atom, proto,
+ gc::AllocKind::FUNCTION_EXTENDED, TenuredObject);
+}
+
JS_PUBLIC_API const Value& js::GetFunctionNativeReserved(JSObject* fun,
size_t which) {
MOZ_ASSERT(fun->as<JSFunction>().isNativeFun());
diff --git a/js/src/jsfriendapi.h b/js/src/jsfriendapi.h
index 046045ac53..6ad8140d91 100644
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -246,6 +246,12 @@ extern JS_PUBLIC_API bool EnqueueJob(JSContext* cx, JS::HandleObject job);
*/
extern JS_PUBLIC_API void StopDrainingJobQueue(JSContext* cx);
+/**
+ * Instruct the runtime to restart draining the internal job queue after
+ * stopping it with StopDrainingJobQueue.
+ */
+extern JS_PUBLIC_API void RestartDrainingJobQueue(JSContext* cx);
+
extern JS_PUBLIC_API void RunJobs(JSContext* cx);
extern JS_PUBLIC_API JS::Zone* GetRealmZone(JS::Realm* realm);
@@ -389,6 +395,10 @@ JS_PUBLIC_API JSFunction* NewFunctionByIdWithReserved(JSContext* cx,
unsigned nargs,
unsigned flags, jsid id);
+JS_PUBLIC_API JSFunction* NewFunctionByIdWithReservedAndProto(
+ JSContext* cx, JSNative native, JS::Handle<JSObject*> proto, unsigned nargs,
+ unsigned flags, jsid id);
+
/**
* Get or set function's reserved slot value.
* `fun` should be a function created with `*WithReserved` API above.
diff --git a/js/src/make-source-package.py b/js/src/make-source-package.py
index 3ea6b7e310..d24b47d667 100755
--- a/js/src/make-source-package.py
+++ b/js/src/make-source-package.py
@@ -179,7 +179,7 @@ rsync_filter_list = """
+ /config/**
+ /python/**
-+ /.cargo/config.in
++ /.cargo/config.toml.in
+ /third_party/function2/**
- /third_party/python/gyp
diff --git a/js/src/moz.build b/js/src/moz.build
index 3ec9c6f489..63d0d75509 100644
--- a/js/src/moz.build
+++ b/js/src/moz.build
@@ -311,6 +311,7 @@ UNIFIED_SOURCES += [
"builtin/ParseRecordObject.cpp",
"builtin/Profilers.cpp",
"builtin/Promise.cpp",
+ "builtin/RawJSONObject.cpp",
"builtin/Reflect.cpp",
"builtin/ReflectParse.cpp",
"builtin/ShadowRealm.cpp",
diff --git a/js/src/old-configure.in b/js/src/old-configure.in
index 6861d2bbac..b9152f974d 100644
--- a/js/src/old-configure.in
+++ b/js/src/old-configure.in
@@ -436,33 +436,11 @@ fi
if test -z "$SKIP_COMPILER_CHECKS"; then
dnl Checks for typedefs, structures, and compiler characteristics.
dnl ========================================================
-AC_TYPE_MODE_T
-AC_TYPE_OFF_T
-AC_TYPE_PID_T
-
-AC_LANG_C
-AC_MSG_CHECKING(for ssize_t)
-AC_CACHE_VAL(ac_cv_type_ssize_t,
- [AC_TRY_COMPILE([#include <stdio.h>
- #include <sys/types.h>],
- [ssize_t foo = 0;],
- [ac_cv_type_ssize_t=true],
- [ac_cv_type_ssize_t=false])])
-if test "$ac_cv_type_ssize_t" = true ; then
- AC_DEFINE(HAVE_SSIZE_T)
- AC_MSG_RESULT(yes)
-else
- AC_MSG_RESULT(no)
-fi
AC_LANG_CPLUSPLUS
MOZ_CXX11
-dnl Checks for header files.
-dnl ========================================================
-AC_HEADER_DIRENT
-
dnl Checks for libraries.
dnl ========================================================
AC_CHECK_LIB(c_r, gethostbyname_r)
diff --git a/js/src/rust/Cargo.toml b/js/src/rust/Cargo.toml
index 0ea4a5c965..6624747fe9 100644
--- a/js/src/rust/Cargo.toml
+++ b/js/src/rust/Cargo.toml
@@ -21,4 +21,4 @@ mozilla-central-workspace-hack = { version = "0.1", features = ["jsrust"], optio
jsrust_shared = { path = "./shared" }
# Workaround for https://github.com/rust-lang/rust/issues/58393
mozglue-static = { path = "../../../mozglue/static/rust" }
-wast = "70.0.1"
+wast = "201.0.0"
diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp
index 624b8217c3..178c394e1d 100644
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -729,9 +729,6 @@ bool shell::enableSourcePragmas = true;
bool shell::enableAsyncStacks = false;
bool shell::enableAsyncStackCaptureDebuggeeOnly = false;
bool shell::enableToSource = false;
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
-bool shell::enableJSONParseWithSource = false;
-#endif
bool shell::enableImportAttributes = false;
bool shell::enableImportAttributesAssertSyntax = false;
#ifdef JS_GC_ZEAL
@@ -1316,6 +1313,13 @@ static bool DrainJobQueue(JSContext* cx, unsigned argc, Value* vp) {
return false;
}
+ if (cx->isEvaluatingModule != 0) {
+ JS_ReportErrorASCII(
+ cx,
+ "Can't drain the job queue when executing the top level of a module");
+ return false;
+ }
+
RunShellJobs(cx);
if (GetShellContext(cx)->quitting) {
@@ -2428,6 +2432,20 @@ static bool ConvertTranscodeResultToJSException(JSContext* cx,
}
}
+static void SetQuitting(JSContext* cx, int32_t code) {
+ ShellContext* sc = GetShellContext(cx);
+ js::StopDrainingJobQueue(cx);
+ sc->exitCode = code;
+ sc->quitting = true;
+}
+
+static void UnsetQuitting(JSContext* cx) {
+ ShellContext* sc = GetShellContext(cx);
+ js::RestartDrainingJobQueue(cx);
+ sc->exitCode = 0;
+ sc->quitting = false;
+}
+
static bool Evaluate(JSContext* cx, unsigned argc, Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
@@ -2716,6 +2734,11 @@ static bool Evaluate(JSContext* cx, unsigned argc, Value* vp) {
? JS_ExecuteScript(cx, script, args.rval())
: JS_ExecuteScript(cx, envChain, script, args.rval()))) {
if (catchTermination && !JS_IsExceptionPending(cx)) {
+ ShellContext* sc = GetShellContext(cx);
+ if (sc->quitting) {
+ UnsetQuitting(cx);
+ }
+
JSAutoRealm ar1(cx, callerGlobal);
JSString* str = JS_NewStringCopyZ(cx, "terminated");
if (!str) {
@@ -3165,8 +3188,6 @@ static bool PrintErr(JSContext* cx, unsigned argc, Value* vp) {
static bool Help(JSContext* cx, unsigned argc, Value* vp);
static bool Quit(JSContext* cx, unsigned argc, Value* vp) {
- ShellContext* sc = GetShellContext(cx);
-
// Print a message to stderr in differential testing to help jsfunfuzz
// find uncatchable-exception bugs.
if (js::SupportDifferentialTesting()) {
@@ -3189,9 +3210,7 @@ static bool Quit(JSContext* cx, unsigned argc, Value* vp) {
return false;
}
- js::StopDrainingJobQueue(cx);
- sc->exitCode = code;
- sc->quitting = true;
+ SetQuitting(cx, code);
return false;
}
@@ -4108,11 +4127,7 @@ static void SetStandardRealmOptions(JS::RealmOptions& options) {
options.creationOptions()
.setSharedMemoryAndAtomicsEnabled(enableSharedMemory)
.setCoopAndCoepEnabled(false)
- .setToSourceEnabled(enableToSource)
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- .setJSONParseWithSource(enableJSONParseWithSource)
-#endif
- ;
+ .setToSourceEnabled(enableToSource);
}
[[nodiscard]] static bool CheckRealmOptions(JSContext* cx,
@@ -5448,7 +5463,7 @@ static bool ModuleLink(JSContext* cx, unsigned argc, Value* vp) {
Rooted<ModuleObject*> module(cx,
object->as<ShellModuleObjectWrapper>().get());
- if (!js::ModuleLink(cx, module)) {
+ if (!JS::ModuleLink(cx, module)) {
return false;
}
@@ -5477,7 +5492,7 @@ static bool ModuleEvaluate(JSContext* cx, unsigned argc, Value* vp) {
Rooted<ModuleObject*> module(cx,
object->as<ShellModuleObjectWrapper>().get());
- if (!js::ModuleEvaluate(cx, module, args.rval())) {
+ if (!JS::ModuleEvaluate(cx, module, args.rval())) {
return false;
}
}
@@ -5726,6 +5741,11 @@ static bool FrontendTest(JSContext* cx, unsigned argc, Value* vp,
return false;
}
+ if (goal == frontend::ParseGoal::Module && options.lineno == 0) {
+ JS_ReportErrorASCII(cx, "Module cannot be compiled with lineNumber == 0");
+ return false;
+ }
+
#ifdef JS_ENABLE_SMOOSH
bool found = false;
if (!JS_HasProperty(cx, objOptions, "rustFrontend", &found)) {
@@ -6143,8 +6163,7 @@ static bool OffThreadCompileModuleToStencil(JSContext* cx, unsigned argc,
return false;
}
- if (options.lineno == 0) {
- JS_ReportErrorASCII(cx, "Module cannot be compiled with lineNumber == 0");
+ if (!ValidateModuleCompileOptions(cx, options)) {
return false;
}
}
@@ -6794,6 +6813,10 @@ static bool NewGlobal(JSContext* cx, unsigned argc, Value* vp) {
creationOptions.setNewCompartmentAndZone();
}
+ // Ensure the target compartment/zone is kept alive when sameCompartmentAs or
+ // sameZoneAs is used.
+ Rooted<JSObject*> compartmentRoot(cx);
+
JS::AutoHoldPrincipals principals(cx);
if (args.length() == 1 && args[0].isObject()) {
@@ -6811,15 +6834,16 @@ static bool NewGlobal(JSContext* cx, unsigned argc, Value* vp) {
return false;
}
if (v.isObject()) {
- creationOptions.setNewCompartmentInExistingZone(
- UncheckedUnwrap(&v.toObject()));
+ compartmentRoot = UncheckedUnwrap(&v.toObject());
+ creationOptions.setNewCompartmentInExistingZone(compartmentRoot);
}
if (!JS_GetProperty(cx, opts, "sameCompartmentAs", &v)) {
return false;
}
if (v.isObject()) {
- creationOptions.setExistingCompartment(UncheckedUnwrap(&v.toObject()));
+ compartmentRoot = UncheckedUnwrap(&v.toObject());
+ creationOptions.setExistingCompartment(compartmentRoot);
}
if (!JS_GetProperty(cx, opts, "newCompartment", &v)) {
@@ -12000,10 +12024,8 @@ bool InitOptionParser(OptionParser& op) {
"property of null or undefined") ||
!op.addBoolOption('\0', "enable-iterator-helpers",
"Enable iterator helpers") ||
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
!op.addBoolOption('\0', "enable-json-parse-with-source",
"Enable JSON.parse with source") ||
-#endif
!op.addBoolOption('\0', "enable-shadow-realms", "Enable ShadowRealms") ||
!op.addBoolOption('\0', "disable-array-grouping",
"Disable Object.groupBy and Map.groupBy") ||
@@ -12019,6 +12041,8 @@ bool InitOptionParser(OptionParser& op) {
!op.addBoolOption(
'\0', "enable-arraybuffer-resizable",
"Enable resizable ArrayBuffers and growable SharedArrayBuffers") ||
+ !op.addBoolOption('\0', "enable-uint8array-base64",
+ "Enable Uint8Array base64/hex methods") ||
!op.addBoolOption('\0', "enable-top-level-await",
"Enable top-level await") ||
!op.addBoolOption('\0', "enable-class-static-blocks",
@@ -12407,6 +12431,17 @@ bool SetGlobalOptionsPreJSInit(const OptionParser& op) {
if (op.getBoolOption("enable-symbols-as-weakmap-keys")) {
JS::Prefs::setAtStartup_experimental_symbols_as_weakmap_keys(true);
}
+ if (op.getBoolOption("enable-uint8array-base64")) {
+ JS::Prefs::setAtStartup_experimental_uint8array_base64(true);
+ }
+#endif
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ JS::Prefs::setAtStartup_experimental_json_parse_with_source(
+ op.getBoolOption("enable-json-parse-with-source"));
+#else
+ if (op.getBoolOption("enable-json-parse-with-source")) {
+ fprintf(stderr, "JSON.parse with source is not enabled on this build.\n");
+ }
#endif
if (op.getBoolOption("disable-weak-refs")) {
@@ -12627,9 +12662,6 @@ bool SetContextOptions(JSContext* cx, const OptionParser& op) {
enableAsyncStackCaptureDebuggeeOnly =
op.getBoolOption("async-stacks-capture-debuggee-only");
enableToSource = !op.getBoolOption("disable-tosource");
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- enableJSONParseWithSource = op.getBoolOption("enable-json-parse-with-source");
-#endif
enableImportAttributesAssertSyntax =
op.getBoolOption("enable-import-assertions");
enableImportAttributes = op.getBoolOption("enable-import-attributes") ||
diff --git a/js/src/shell/jsshell.h b/js/src/shell/jsshell.h
index 9cbf4505f9..67536a8f60 100644
--- a/js/src/shell/jsshell.h
+++ b/js/src/shell/jsshell.h
@@ -127,9 +127,6 @@ extern bool enableWellFormedUnicodeStrings;
extern bool enableArrayBufferTransfer;
extern bool enableArrayBufferResizable;
extern bool enableSymbolsAsWeakMapKeys;
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
-extern bool enableJSONParseWithSource;
-#endif
extern bool enableNewSetMethods;
extern bool enableImportAttributes;
extern bool enableImportAttributesAssertSyntax;
diff --git a/js/src/tests/jstests.list b/js/src/tests/jstests.list
index 55ffc65f6f..dbe3bd555e 100644
--- a/js/src/tests/jstests.list
+++ b/js/src/tests/jstests.list
@@ -1368,16 +1368,6 @@ skip script test262/built-ins/Temporal/ZonedDateTime/prototype/since/normalized-
skip script test262/built-ins/Temporal/ZonedDateTime/prototype/hoursInDay/precision-exact-mathematical-values-2.js
##############################################
-# Iterator helper tests #
-##############################################
-
-# Failures with --enable-iterator-helpers, see bug 1877831.
-shell-option(--enable-iterator-helpers) skip script test262/built-ins/Iterator/prototype/constructor/prop-desc.js
-shell-option(--enable-iterator-helpers) skip script test262/built-ins/Iterator/prototype/constructor/weird-setter.js
-shell-option(--enable-iterator-helpers) skip script test262/built-ins/Iterator/prototype/Symbol.toStringTag/prop-desc.js
-shell-option(--enable-iterator-helpers) skip script test262/built-ins/Iterator/prototype/Symbol.toStringTag/weird-setter.js
-
-##############################################
# AsyncFromSyncIteratorPrototype tests #
##############################################
diff --git a/js/src/tests/lib/jittests.py b/js/src/tests/lib/jittests.py
index 4e7be87255..7d79ba9f2a 100755
--- a/js/src/tests/lib/jittests.py
+++ b/js/src/tests/lib/jittests.py
@@ -343,6 +343,13 @@ class JitTest:
elif name.startswith("--"):
# // |jit-test| --ion-gvn=off; --no-sse4
test.jitflags.append(name)
+ elif name.startswith("-P"):
+ prefAndValue = name.split()
+ assert (
+ len(prefAndValue) == 2
+ ), f"{name}: failed to parse preference"
+ # // |jit-test| -P pref(=value)?
+ test.jitflags.append("--setpref=" + prefAndValue[1])
else:
print(
"{}: warning: unrecognized |jit-test| attribute"
diff --git a/js/src/tests/non262/Date/parse-day-of-week.js b/js/src/tests/non262/Date/parse-day-of-week.js
index 08bcd3ee05..52a8c6339a 100644
--- a/js/src/tests/non262/Date/parse-day-of-week.js
+++ b/js/src/tests/non262/Date/parse-day-of-week.js
@@ -63,6 +63,8 @@ const rejected = [
"Sep 26 1995 foo",
"1995 foo Sep 26",
"foo2 Sep 26 1995",
+ "Tuesday_Sep 26 1995",
+ "foo_12",
];
for (const format of formats) {
diff --git a/js/src/tests/non262/Iterator/proto.js b/js/src/tests/non262/Iterator/proto.js
index 642e48d171..5b32f5d34b 100644
--- a/js/src/tests/non262/Iterator/proto.js
+++ b/js/src/tests/non262/Iterator/proto.js
@@ -10,14 +10,5 @@ assertEq(propDesc.writable, false);
assertEq(propDesc.enumerable, false);
assertEq(propDesc.configurable, false);
-// Make sure @@toStringTag has been set.
-const toStringTagDesc = Reflect.getOwnPropertyDescriptor(Iterator.prototype, Symbol.toStringTag);
-assertDeepEq(toStringTagDesc, {
- value: "Iterator",
- writable: true,
- enumerable: false,
- configurable: true,
-});
-
if (typeof reportCompare === 'function')
reportCompare(0, 0);
diff --git a/js/src/tests/non262/JSON/parse-with-source.js b/js/src/tests/non262/JSON/parse-with-source.js
index 96e9550706..5e144bd6a4 100644
--- a/js/src/tests/non262/JSON/parse-with-source.js
+++ b/js/src/tests/non262/JSON/parse-with-source.js
@@ -1,49 +1,103 @@
-// |reftest| skip-if(!(this.hasOwnProperty('getBuildConfiguration')&&getBuildConfiguration('json-parse-with-source'))) shell-option(--enable-json-parse-with-source)
+// |reftest| shell-option(--enable-json-parse-with-source) skip-if(!JSON.hasOwnProperty('isRawJSON')||!xulRuntime.shell)
var actual;
function reviver(key, value, context) {
assertEq(arguments.length, 3);
- assertEq("source" in context, true);
- actual = context["source"];
+ if ("source" in context) {
+ actual.push(context["source"]);
+ } else { // objects and arrays have no "source"
+ actual.push(null);
+ }
}
let tests = [
// STRINGS
- {input: '""', expected: '""'},
- {input: '"str"', expected: '"str"'},
- {input: '"str" ', expected: '"str"'},
- {input: ' "str" ', expected: '"str"'},
- {input: ' " str"', expected: '" str"'},
- {input: '"\uD83D\uDE0A\u2764\u2FC1"', expected: '"\uD83D\uDE0A\u2764\u2FC1"'},
+ {input: '""', expected: ['""']},
+ {input: '"str"', expected: ['"str"']},
+ {input: '"str" ', expected: ['"str"']},
+ {input: ' "str" ', expected: ['"str"']},
+ {input: ' " str"', expected: ['" str"']},
+ {input: '"\uD83D\uDE0A\u2764\u2FC1"', expected: ['"\uD83D\uDE0A\u2764\u2FC1"']},
// NUMBERS
- {input: '1', expected: '1'},
- {input: ' 1', expected: '1'},
- {input: '4.2', expected: '4.2'},
- {input: '4.2 ', expected: '4.2'},
- {input: '4.2000 ', expected: '4.2000'},
- {input: '4e2000 ', expected: '4e2000'},
- {input: '4.4e2000 ', expected: '4.4e2000'},
- {input: '9007199254740999', expected: '9007199254740999'},
- {input: '-31', expected: '-31'},
- {input: '-3.1', expected: '-3.1'},
- {input: ' -31 ', expected: '-31'},
+ {input: '1', expected: ['1']},
+ {input: ' 1', expected: ['1']},
+ {input: '4.2', expected: ['4.2']},
+ {input: '4.2 ', expected: ['4.2']},
+ {input: '4.2000 ', expected: ['4.2000']},
+ {input: '4e2000 ', expected: ['4e2000']},
+ {input: '4.4e2000 ', expected: ['4.4e2000']},
+ {input: '9007199254740999', expected: ['9007199254740999']},
+ {input: '-31', expected: ['-31']},
+ {input: '-3.1', expected: ['-3.1']},
+ {input: ' -31 ', expected: ['-31']},
// BOOLEANS
- {input: 'true', expected: 'true'},
- {input: 'true ', expected: 'true'},
- {input: 'false', expected: 'false'},
- {input: ' false', expected: 'false'},
+ {input: 'true', expected: ['true']},
+ {input: 'true ', expected: ['true']},
+ {input: 'false', expected: ['false']},
+ {input: ' false', expected: ['false']},
// NULL
- {input: 'null', expected: 'null'},
- {input: ' null', expected: 'null'},
- {input: '\tnull ', expected: 'null'},
- {input: 'null\t', expected: 'null'},
+ {input: 'null', expected: ['null']},
+ {input: ' null', expected: ['null']},
+ {input: '\tnull ', expected: ['null']},
+ {input: 'null\t', expected: ['null']},
+ // OBJECTS
+ {input: '{ }', expected: [null]},
+ {input: '{ "4": 1 }', expected: ['1', null]},
+ {input: '{ "a": 1 }', expected: ['1', null]},
+ {input: '{ "b": 2, "a": 1 }', expected: ['2', '1', null]},
+ {input: '{ "b": 2, "1": 1 }', expected: ['1', '2', null]},
+ {input: '{ "b": 2, "c": null }', expected: ['2', 'null', null]},
+ {input: '{ "b": 2, "b": 1, "b": 4 }', expected: ['4', null]},
+ {input: '{ "b": 2, "a": "1" }', expected: ['2', '"1"', null]},
+ {input: '{ "b": { "c": 3 }, "a": 1 }', expected: ['3', null, '1', null]},
+ // ARRAYS
+ {input: '[]', expected: [null]},
+ {input: '[1, 5, 2]', expected: ['1', '5', '2', null]},
+ {input: '[1, null, 2]', expected: ['1', 'null', '2', null]},
+ {input: '[1, {"a":2}, "7"]', expected: ['1', '2', null, '"7"', null]},
+ {input: '[1, [2, [3, [4, 5], [6, 7], 8], 9], 10]', expected: ['1', '2', '3', '4', '5', null, '6', '7', null, '8', null, '9', null, '10', null]},
+ {input: '[1, [2, [3, [4, 5, 6, 7, 8, 9, 10], []]]]', expected: ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', null, null, null, null, null]},
+ {input: '{"a": [1, {"b":2}, "7"], "c": 8}', expected: ['1', '2', null, '"7"', null, '8', null]},
];
for (const test of tests) {
print("Testing " + JSON.stringify(test));
+ actual = [];
JSON.parse(test.input, reviver);
- assertEq(actual, test.expected);
+ assertDeepEq(actual, test.expected);
}
+// If the constructed object is modified but the result of the modification is
+// the same as the original, we should still provide the source
+function replace_c_with_same_val(key, value, context) {
+ if (key === "a") {
+ this["c"] = "ABCDEABCDE";
+ }
+ if (key) {
+ assertEq("source" in context, true);
+ }
+ return value;
+}
+JSON.parse('{ "a": "b", "c": "ABCDEABCDE" }', replace_c_with_same_val);
+
+// rawJSON
+function assertIsRawJson(rawJson, expectedRawJsonValue) {
+ assertEq(null, Object.getPrototypeOf(rawJson));
+ assertEq(true, Object.hasOwn(rawJson, 'rawJSON'));
+ assertDeepEq(['rawJSON'], Object.getOwnPropertyNames(rawJson));
+ assertDeepEq([], Object.getOwnPropertySymbols(rawJson));
+ assertEq(expectedRawJsonValue, rawJson.rawJSON);
+}
+
+assertEq(true, Object.isFrozen(JSON.rawJSON('"shouldBeFrozen"')));
+assertThrowsInstanceOf(() => JSON.rawJSON(), SyntaxError);
+assertIsRawJson(JSON.rawJSON(1, 2), '1');
+
+// isRawJSON
+assertEq(false, JSON.isRawJSON());
+assertEq(false, JSON.isRawJSON({}, {}));
+assertEq(false, JSON.isRawJSON({}, JSON.rawJSON(2)));
+assertEq(true, JSON.isRawJSON(JSON.rawJSON(1), JSON.rawJSON(2)));
+
if (typeof reportCompare == 'function')
reportCompare(0, 0); \ No newline at end of file
diff --git a/js/src/tests/non262/PrivateName/nested-class-in-computed-property-key.js b/js/src/tests/non262/PrivateName/nested-class-in-computed-property-key.js
new file mode 100644
index 0000000000..7596dfe542
--- /dev/null
+++ b/js/src/tests/non262/PrivateName/nested-class-in-computed-property-key.js
@@ -0,0 +1,17 @@
+let capturedPrivateAccess;
+class A {
+ // Declare private name in outer class.
+ static #x = 42;
+
+ static [(
+ // Inner class in computed property key.
+ class {},
+
+ // Access to private name from outer class.
+ capturedPrivateAccess = () => A.#x
+ )];
+}
+assertEq(capturedPrivateAccess(), 42);
+
+if (typeof reportCompare === 'function')
+ reportCompare(0, 0);
diff --git a/js/src/tests/non262/module/bug1689499.js b/js/src/tests/non262/module/bug1689499.js
index 859cd6f64f..2547c88d16 100644
--- a/js/src/tests/non262/module/bug1689499.js
+++ b/js/src/tests/non262/module/bug1689499.js
@@ -1,4 +1,4 @@
-// |reftest| skip-if(!xulRuntime.shell) module async -- needs drainJobQueue
+// |reftest| skip-if(!xulRuntime.shell) async -- needs drainJobQueue
async function test() {
try {
diff --git a/js/src/tests/test262-update.py b/js/src/tests/test262-update.py
index fc6fa62c45..d45d639ebb 100755
--- a/js/src/tests/test262-update.py
+++ b/js/src/tests/test262-update.py
@@ -24,7 +24,6 @@ UNSUPPORTED_FEATURES = set(
"Atomics.waitAsync", # Bug 1467846
"legacy-regexp", # Bug 1306461
"regexp-duplicate-named-groups", # Bug 1773135
- "json-parse-with-source", # Bug 1658310
"set-methods", # Bug 1805038
]
)
@@ -38,6 +37,8 @@ FEATURE_CHECK_NEEDED = {
"iterator-helpers": "!this.hasOwnProperty('Iterator')", # Bug 1568906
"Intl.Segmenter": "!Intl.Segmenter", # Bug 1423593
"resizable-arraybuffer": "!ArrayBuffer.prototype.resize", # Bug 1670026
+ "uint8array-base64": "!Uint8Array.fromBase64", # Bug 1862220
+ "json-parse-with-source": "!JSON.hasOwnProperty('isRawJSON')", # Bug 1658310
}
RELEASE_OR_BETA = set(
[
@@ -51,6 +52,8 @@ SHELL_OPTIONS = {
"iterator-helpers": "--enable-iterator-helpers",
"symbols-as-weakmap-keys": "--enable-symbols-as-weakmap-keys",
"resizable-arraybuffer": "--enable-arraybuffer-resizable",
+ "uint8array-base64": "--enable-uint8array-base64",
+ "json-parse-with-source": "--enable-json-parse-with-source",
}
diff --git a/js/src/tests/test262/prs/3994/browser.js b/js/src/tests/test262/prs/3994/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/alphabet.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/alphabet.js
new file mode 100644
index 0000000000..c893bf952a
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/alphabet.js
@@ -0,0 +1,25 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Conversion of base64 strings to Uint8Arrays exercising the alphabet option
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+assert.compareArray(Uint8Array.fromBase64('x+/y'), [199, 239, 242]);
+assert.compareArray(Uint8Array.fromBase64('x+/y', { alphabet: 'base64' }), [199, 239, 242]);
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('x+/y', { alphabet: 'base64url' });
+});
+
+assert.compareArray(Uint8Array.fromBase64('x-_y', { alphabet: 'base64url' }), [199, 239, 242]);
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('x-_y');
+});
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('x-_y', { alphabet: 'base64' });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/descriptor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/descriptor.js
new file mode 100644
index 0000000000..a0e843cfaf
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/descriptor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: >
+ Uint8Array.fromBase64 has default data property attributes.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array, 'fromBase64', {
+ enumerable: false,
+ writable: true,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/ignores-receiver.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/ignores-receiver.js
new file mode 100644
index 0000000000..0847653d63
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/ignores-receiver.js
@@ -0,0 +1,22 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Uint8Array.fromBase64 ignores its receiver
+features: [uint8array-base64, TypedArray]
+---*/
+
+var fromBase64 = Uint8Array.fromBase64;
+var noReceiver = fromBase64("Zg==");
+assert.sameValue(Object.getPrototypeOf(noReceiver), Uint8Array.prototype);
+
+class Subclass extends Uint8Array {
+ constructor() {
+ throw new Test262Error("subclass constructor called");
+ }
+}
+var fromSubclass = Subclass.fromBase64("Zg==");
+assert.sameValue(Object.getPrototypeOf(fromSubclass), Uint8Array.prototype);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/illegal-characters.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/illegal-characters.js
new file mode 100644
index 0000000000..6c2d6eef4a
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/illegal-characters.js
@@ -0,0 +1,26 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Uint8Array.fromBase64 throws a SyntaxError when input has non-base64, non-ascii-whitespace characters
+features: [uint8array-base64, TypedArray]
+---*/
+
+var illegal = [
+ 'Zm.9v',
+ 'Zm9v^',
+ 'Zg==&',
+ 'Z−==', // U+2212 'Minus Sign'
+ 'Z+==', // U+FF0B 'Fullwidth Plus Sign'
+ 'Zg\u00A0==', // nbsp
+ 'Zg\u2009==', // thin space
+ 'Zg\u2028==', // line separator
+];
+illegal.forEach(function(value) {
+ assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64(value)
+ });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/last-chunk-handling.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/last-chunk-handling.js
new file mode 100644
index 0000000000..c78d15e869
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/last-chunk-handling.js
@@ -0,0 +1,67 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Handling of final chunks in Uint8Array.fromBase64
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+// padding
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg=='), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg==', { lastChunkHandling: 'loose' }), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg==', { lastChunkHandling: 'stop-before-partial' }), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg==', { lastChunkHandling: 'strict' }), [101, 120, 97, 102]);
+
+// no padding
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg'), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg', { lastChunkHandling: 'loose' }), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg', { lastChunkHandling: 'stop-before-partial' }), [101, 120, 97]);
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg', { lastChunkHandling: 'strict' });
+});
+
+// non-zero padding bits
+assert.compareArray(Uint8Array.fromBase64('ZXhhZh=='), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZh==', { lastChunkHandling: 'loose' }), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZh==', { lastChunkHandling: 'stop-before-partial' }), [101, 120, 97, 102]);
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZh==', { lastChunkHandling: 'strict' });
+});
+
+// non-zero padding bits, no padding
+assert.compareArray(Uint8Array.fromBase64('ZXhhZh'), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZh', { lastChunkHandling: 'loose' }), [101, 120, 97, 102]);
+assert.compareArray(Uint8Array.fromBase64('ZXhhZh', { lastChunkHandling: 'stop-before-partial' }), [101, 120, 97]);
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZh', { lastChunkHandling: 'strict' });
+});
+
+// partial padding
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg=');
+});
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg=', { lastChunkHandling: 'loose' });
+});
+assert.compareArray(Uint8Array.fromBase64('ZXhhZg=', { lastChunkHandling: 'stop-before-partial' }), [101, 120, 97]);
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg=', { lastChunkHandling: 'strict' });
+});
+
+// excess padding
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg===');
+});
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg===', { lastChunkHandling: 'loose' });
+});
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg===', { lastChunkHandling: 'stop-before-partial' });
+});
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromBase64('ZXhhZg===', { lastChunkHandling: 'strict' });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/length.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/length.js
new file mode 100644
index 0000000000..dab9b49f61
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/length.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: >
+ Uint8Array.fromBase64.length is 1.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.fromBase64, 'length', {
+ value: 1,
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/name.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/name.js
new file mode 100644
index 0000000000..2bd6716863
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/name.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: >
+ Uint8Array.fromBase64.name is "fromBase64".
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.fromBase64, 'name', {
+ value: 'fromBase64',
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/nonconstructor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/nonconstructor.js
new file mode 100644
index 0000000000..7546b12b6b
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/nonconstructor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: >
+ Uint8Array.fromBase64 is not a constructor function.
+includes: [isConstructor.js]
+features: [uint8array-base64, TypedArray, Reflect.construct]
+---*/
+
+assert(!isConstructor(Uint8Array.fromBase64), "Uint8Array.fromBase64 is not a constructor");
+
+assert.throws(TypeError, function() {
+ new Uint8Array.fromBase64('');
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/option-coercion.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/option-coercion.js
new file mode 100644
index 0000000000..e314a8f16f
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/option-coercion.js
@@ -0,0 +1,62 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Uint8Array.fromBase64 triggers effects of the "alphabet" and "lastChunkHandling" getters, but does not perform toString on the results
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+assert.throws(TypeError, function() {
+ Uint8Array.fromBase64("Zg==", { alphabet: Object("base64") });
+});
+
+assert.throws(TypeError, function() {
+ Uint8Array.fromBase64("Zg==", { lastChunkHandling: Object("loose") });
+});
+
+
+var toStringCalls = 0;
+var throwyToString = {
+ toString: function() {
+ toStringCalls += 1;
+ throw new Test262Error("toString called");
+ }
+};
+assert.throws(TypeError, function() {
+ Uint8Array.fromBase64("Zg==", { alphabet: throwyToString });
+});
+assert.sameValue(toStringCalls, 0);
+
+assert.throws(TypeError, function() {
+ Uint8Array.fromBase64("Zg==", { lastChunkHandling: throwyToString });
+});
+assert.sameValue(toStringCalls, 0);
+
+
+var alphabetAccesses = 0;
+var base64UrlOptions = {};
+Object.defineProperty(base64UrlOptions, "alphabet", {
+ get: function() {
+ alphabetAccesses += 1;
+ return "base64url";
+ }
+});
+var arr = Uint8Array.fromBase64("x-_y", base64UrlOptions);
+assert.compareArray(arr, [199, 239, 242]);
+assert.sameValue(alphabetAccesses, 1);
+
+var lastChunkHandlingAccesses = 0;
+var strictOptions = {};
+Object.defineProperty(strictOptions, "lastChunkHandling", {
+ get: function() {
+ lastChunkHandlingAccesses += 1;
+ return "strict";
+ }
+});
+var arr = Uint8Array.fromBase64("Zg==", strictOptions);
+assert.compareArray(arr, [102]);
+assert.sameValue(lastChunkHandlingAccesses, 1);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/results.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/results.js
new file mode 100644
index 0000000000..9d19507f9a
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/results.js
@@ -0,0 +1,30 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Conversion of base64 strings to Uint8Arrays
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+// standard test vectors from https://datatracker.ietf.org/doc/html/rfc4648#section-10
+var standardBase64Vectors = [
+ ["", []],
+ ["Zg==", [102]],
+ ["Zm8=", [102, 111]],
+ ["Zm9v", [102, 111, 111]],
+ ["Zm9vYg==", [102, 111, 111, 98]],
+ ["Zm9vYmE=", [102, 111, 111, 98, 97]],
+ ["Zm9vYmFy", [102, 111, 111, 98, 97, 114]],
+];
+
+standardBase64Vectors.forEach(function (pair) {
+ var arr = Uint8Array.fromBase64(pair[0]);
+ assert.sameValue(Object.getPrototypeOf(arr), Uint8Array.prototype, "decoding " + pair[0]);
+ assert.sameValue(arr.length, pair[1].length, "decoding " + pair[0]);
+ assert.sameValue(arr.buffer.byteLength, pair[1].length, "decoding " + pair[0]);
+ assert.compareArray(arr, pair[1], "decoding " + pair[0]);
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/shell.js
new file mode 100644
index 0000000000..eda1477282
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/shell.js
@@ -0,0 +1,24 @@
+// GENERATED, DO NOT EDIT
+// file: isConstructor.js
+// Copyright (C) 2017 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+
+/*---
+description: |
+ Test if a given function is a constructor function.
+defines: [isConstructor]
+features: [Reflect.construct]
+---*/
+
+function isConstructor(f) {
+ if (typeof f !== "function") {
+ throw new Test262Error("isConstructor invoked with a non-function value");
+ }
+
+ try {
+ Reflect.construct(function(){}, [], f);
+ } catch (e) {
+ return false;
+ }
+ return true;
+}
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/string-coercion.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/string-coercion.js
new file mode 100644
index 0000000000..57292f8a98
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/string-coercion.js
@@ -0,0 +1,44 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Uint8Array.fromBase64 throws if its argument is not a string
+features: [uint8array-base64, TypedArray]
+---*/
+
+var toStringCalls = 0;
+var throwyToString = {
+ toString: function() {
+ toStringCalls += 1;
+ throw new Test262Error("toString called");
+ }
+};
+
+assert.throws(TypeError, function() {
+ Uint8Array.fromBase64(throwyToString);
+});
+assert.sameValue(toStringCalls, 0);
+
+
+var optionAccesses = 0;
+var touchyOptions = {};
+Object.defineProperty(touchyOptions, "alphabet", {
+ get: function() {
+ optionAccesses += 1;
+ throw new Test262Error("alphabet accessed");
+ }
+});
+Object.defineProperty(touchyOptions, "lastChunkHandling", {
+ get: function() {
+ optionAccesses += 1;
+ throw new Test262Error("lastChunkHandling accessed");
+ }
+});
+assert.throws(TypeError, function() {
+ Uint8Array.fromBase64(throwyToString, touchyOptions);
+});
+assert.sameValue(toStringCalls, 0);
+assert.sameValue(optionAccesses, 0);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/whitespace.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/whitespace.js
new file mode 100644
index 0000000000..87c79a6563
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromBase64/whitespace.js
@@ -0,0 +1,25 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.frombase64
+description: Uint8Array.fromBase64 ignores ASCII whitespace in the input
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var whitespaceKinds = [
+ ["Z g==", "space"],
+ ["Z\tg==", "tab"],
+ ["Z\x0Ag==", "LF"],
+ ["Z\x0Cg==", "FF"],
+ ["Z\x0Dg==", "CR"],
+];
+whitespaceKinds.forEach(function(pair) {
+ var arr = Uint8Array.fromBase64(pair[0]);
+ assert.sameValue(arr.length, 1);
+ assert.sameValue(arr.buffer.byteLength, 1);
+ assert.compareArray(arr, [102], "ascii whitespace: " + pair[1]);
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/descriptor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/descriptor.js
new file mode 100644
index 0000000000..3d53652054
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/descriptor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: >
+ Uint8Array.fromHex has default data property attributes.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array, 'fromHex', {
+ enumerable: false,
+ writable: true,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/ignores-receiver.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/ignores-receiver.js
new file mode 100644
index 0000000000..421a0278d7
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/ignores-receiver.js
@@ -0,0 +1,22 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: Uint8Array.fromHex ignores its receiver
+features: [uint8array-base64, TypedArray]
+---*/
+
+var fromHex = Uint8Array.fromHex;
+var noReceiver = fromHex("aa");
+assert.sameValue(Object.getPrototypeOf(noReceiver), Uint8Array.prototype);
+
+class Subclass extends Uint8Array {
+ constructor() {
+ throw new Test262Error("subclass constructor called");
+ }
+}
+var fromSubclass = Subclass.fromHex("aa");
+assert.sameValue(Object.getPrototypeOf(fromSubclass), Uint8Array.prototype);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/illegal-characters.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/illegal-characters.js
new file mode 100644
index 0000000000..718801e4c7
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/illegal-characters.js
@@ -0,0 +1,28 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: Uint8Array.fromHex throws a SyntaxError when input has non-hex characters
+features: [uint8array-base64, TypedArray]
+---*/
+
+var illegal = [
+ 'a.a',
+ 'aa^',
+ 'a a',
+ 'a\ta',
+ 'a\x0Aa',
+ 'a\x0Ca',
+ 'a\x0Da',
+ 'a\u00A0a', // nbsp
+ 'a\u2009a', // thin space
+ 'a\u2028a', // line separator
+];
+illegal.forEach(function(value) {
+ assert.throws(SyntaxError, function() {
+ Uint8Array.fromHex(value)
+ });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/length.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/length.js
new file mode 100644
index 0000000000..9108a1c2c1
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/length.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: >
+ Uint8Array.fromHex.length is 1.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.fromHex, 'length', {
+ value: 1,
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/name.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/name.js
new file mode 100644
index 0000000000..7d4e145349
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/name.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: >
+ Uint8Array.fromHex.name is "fromHex".
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.fromHex, 'name', {
+ value: 'fromHex',
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/nonconstructor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/nonconstructor.js
new file mode 100644
index 0000000000..6e34171e77
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/nonconstructor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: >
+ Uint8Array.fromHex is not a constructor function.
+includes: [isConstructor.js]
+features: [uint8array-base64, TypedArray, Reflect.construct]
+---*/
+
+assert(!isConstructor(Uint8Array.fromHex), "Uint8Array.fromHex is not a constructor");
+
+assert.throws(TypeError, function() {
+ new Uint8Array.fromHex('');
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/odd-length-input.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/odd-length-input.js
new file mode 100644
index 0000000000..fe7860a083
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/odd-length-input.js
@@ -0,0 +1,14 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: Uint8Array.fromHex throws if given an odd number of input hex characters
+features: [uint8array-base64, TypedArray]
+---*/
+
+assert.throws(SyntaxError, function() {
+ Uint8Array.fromHex('a');
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/results.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/results.js
new file mode 100644
index 0000000000..d40beab7cd
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/results.js
@@ -0,0 +1,31 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: Conversion of hex strings to Uint8Arrays
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var cases = [
+ ["", []],
+ ["66", [102]],
+ ["666f", [102, 111]],
+ ["666F", [102, 111]],
+ ["666f6f", [102, 111, 111]],
+ ["666F6f", [102, 111, 111]],
+ ["666f6f62", [102, 111, 111, 98]],
+ ["666f6f6261", [102, 111, 111, 98, 97]],
+ ["666f6f626172", [102, 111, 111, 98, 97, 114]],
+];
+
+cases.forEach(function (pair) {
+ var arr = Uint8Array.fromHex(pair[0]);
+ assert.sameValue(Object.getPrototypeOf(arr), Uint8Array.prototype, "decoding " + pair[0]);
+ assert.sameValue(arr.length, pair[1].length, "decoding " + pair[0]);
+ assert.sameValue(arr.buffer.byteLength, pair[1].length, "decoding " + pair[0]);
+ assert.compareArray(arr, pair[1], "decoding " + pair[0]);
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/shell.js
new file mode 100644
index 0000000000..eda1477282
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/shell.js
@@ -0,0 +1,24 @@
+// GENERATED, DO NOT EDIT
+// file: isConstructor.js
+// Copyright (C) 2017 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+
+/*---
+description: |
+ Test if a given function is a constructor function.
+defines: [isConstructor]
+features: [Reflect.construct]
+---*/
+
+function isConstructor(f) {
+ if (typeof f !== "function") {
+ throw new Test262Error("isConstructor invoked with a non-function value");
+ }
+
+ try {
+ Reflect.construct(function(){}, [], f);
+ } catch (e) {
+ return false;
+ }
+ return true;
+}
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/string-coercion.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/string-coercion.js
new file mode 100644
index 0000000000..fea57227fc
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/fromHex/string-coercion.js
@@ -0,0 +1,23 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.fromhex
+description: Uint8Array.fromHex throws if its argument is not a string
+features: [uint8array-base64, TypedArray]
+---*/
+
+var toStringCalls = 0;
+var throwyToString = {
+ toString: function() {
+ toStringCalls += 1;
+ throw new Test262Error("toString called");
+ }
+};
+
+assert.throws(TypeError, function() {
+ Uint8Array.fromHex(throwyToString);
+});
+assert.sameValue(toStringCalls, 0);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/alphabet.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/alphabet.js
new file mode 100644
index 0000000000..bb4acdb8f2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/alphabet.js
@@ -0,0 +1,44 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Conversion of base64 strings to Uint8Arrays exercising the alphabet option
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var target = new Uint8Array([255, 255, 255, 255]);
+var result = target.setFromBase64('x+/y');
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [199, 239, 242, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255]);
+var result = target.setFromBase64('x+/y', { alphabet: 'base64' });
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [199, 239, 242, 255]);
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255]);
+ target.setFromBase64('x+/y', { alphabet: 'base64url' });
+});
+
+
+var target = new Uint8Array([255, 255, 255, 255]);
+var result = target.setFromBase64('x-_y', { alphabet: 'base64url' });
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [199, 239, 242, 255]);
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255]);
+ target.setFromBase64('x-_y');
+});
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255]);
+ target.setFromBase64('x-_y', { alphabet: 'base64' });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/descriptor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/descriptor.js
new file mode 100644
index 0000000000..bd319f7d43
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/descriptor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: >
+ Uint8Array.prototype.setFromBase64 has default data property attributes.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype, 'setFromBase64', {
+ enumerable: false,
+ writable: true,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/detached-buffer.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/detached-buffer.js
new file mode 100644
index 0000000000..3dd042bb02
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/detached-buffer.js
@@ -0,0 +1,32 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Uint8Array.prototype.setFromBase64 throws on detatched buffers
+includes: [detachArrayBuffer.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var target = new Uint8Array([255, 255, 255]);
+$DETACHBUFFER(target.buffer);
+assert.throws(TypeError, function() {
+ target.setFromBase64('Zg==');
+});
+
+var getterCalls = 0;
+var targetDetachingOptions = {};
+Object.defineProperty(targetDetachingOptions, 'alphabet', {
+ get: function() {
+ getterCalls += 1;
+ $DETACHBUFFER(target.buffer);
+ return "base64";
+ }
+});
+var target = new Uint8Array([255, 255, 255]);
+assert.throws(TypeError, function() {
+ target.setFromBase64('Zg==', targetDetachingOptions);
+});
+assert.sameValue(getterCalls, 1);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/illegal-characters.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/illegal-characters.js
new file mode 100644
index 0000000000..9c6b827df8
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/illegal-characters.js
@@ -0,0 +1,27 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Uint8Array.prototype.setFromBase64 throws a SyntaxError when input has non-base64, non-ascii-whitespace characters
+features: [uint8array-base64, TypedArray]
+---*/
+
+var illegal = [
+ 'Zm.9v',
+ 'Zm9v^',
+ 'Zg==&',
+ 'Z−==', // U+2212 'Minus Sign'
+ 'Z+==', // U+FF0B 'Fullwidth Plus Sign'
+ 'Zg\u00A0==', // nbsp
+ 'Zg\u2009==', // thin space
+ 'Zg\u2028==', // line separator
+];
+illegal.forEach(function(value) {
+ assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255]);
+ target.setFromBase64(value);
+ });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/last-chunk-handling.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/last-chunk-handling.js
new file mode 100644
index 0000000000..958a89b10e
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/last-chunk-handling.js
@@ -0,0 +1,156 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Handling of final chunks in target.setFromBase64
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+// padding
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg==');
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg==', { lastChunkHandling: 'loose' });
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg==', { lastChunkHandling: 'stop-before-partial' });
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg==', { lastChunkHandling: 'strict' });
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+
+// no padding
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg');
+assert.sameValue(result.read, 6);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg', { lastChunkHandling: 'loose' });
+assert.sameValue(result.read, 6);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg', { lastChunkHandling: 'stop-before-partial' });
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [101, 120, 97, 255, 255, 255]);
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg', { lastChunkHandling: 'strict' });
+});
+
+
+// non-zero padding bits
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZh==');
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZh==', { lastChunkHandling: 'loose' });
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZh==', { lastChunkHandling: 'stop-before-partial' });
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZh==', { lastChunkHandling: 'strict' });
+});
+
+
+// non-zero padding bits, no padding
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZh');
+assert.sameValue(result.read, 6);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZh', { lastChunkHandling: 'loose' });
+assert.sameValue(result.read, 6);
+assert.sameValue(result.written, 4);
+assert.compareArray(target, [101, 120, 97, 102, 255, 255]);
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZh', { lastChunkHandling: 'stop-before-partial' });
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [101, 120, 97, 255, 255, 255]);
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZh', { lastChunkHandling: 'strict' });
+});
+
+
+// partial padding
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg=');
+});
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg=', { lastChunkHandling: 'loose' });
+});
+
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('ZXhhZg=', { lastChunkHandling: 'stop-before-partial' });
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [101, 120, 97, 255, 255, 255]);
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg=', { lastChunkHandling: 'strict' });
+});
+
+
+// excess padding
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg===');
+});
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg===', { lastChunkHandling: 'loose' });
+});
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg===', { lastChunkHandling: 'stop-before-partial' });
+});
+
+assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+ target.setFromBase64('ZXhhZg===', { lastChunkHandling: 'strict' });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/length.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/length.js
new file mode 100644
index 0000000000..df97101a14
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/length.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: >
+ Uint8Array.prototype.setFromBase64.length is 1.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.setFromBase64, 'length', {
+ value: 1,
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/name.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/name.js
new file mode 100644
index 0000000000..c38be74532
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/name.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: >
+ Uint8Array.prototype.setFromBase64.name is "setFromBase64".
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.setFromBase64, 'name', {
+ value: 'setFromBase64',
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/nonconstructor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/nonconstructor.js
new file mode 100644
index 0000000000..46f5078ff5
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/nonconstructor.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: >
+ Uint8Array.prototype.setFromBase64 is not a constructor function.
+includes: [isConstructor.js]
+features: [uint8array-base64, TypedArray, Reflect.construct]
+---*/
+
+assert(!isConstructor(Uint8Array.prototype.setFromBase64), "Uint8Array.prototype.setFromBase64 is not a constructor");
+
+assert.throws(TypeError, function() {
+ var target = new Uint8Array(10);
+ new target.setFromBase64('');
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/option-coercion.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/option-coercion.js
new file mode 100644
index 0000000000..90bd1e330d
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/option-coercion.js
@@ -0,0 +1,72 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Uint8Array.prototype.setFromBase64 triggers effects of the "alphabet" and "lastChunkHandling" getters, but does not perform toString on the results
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+assert.throws(TypeError, function() {
+ var target = new Uint8Array([255, 255, 255]);
+ target.setFromBase64("Zg==", { alphabet: Object("base64") });
+});
+
+assert.throws(TypeError, function() {
+ var target = new Uint8Array([255, 255, 255]);
+ target.setFromBase64("Zg==", { lastChunkHandling: Object("strict") });
+});
+
+
+var toStringCalls = 0;
+var throwyToString = {
+ toString: function() {
+ toStringCalls += 1;
+ throw new Test262Error("toString called");
+ }
+};
+assert.throws(TypeError, function() {
+ var target = new Uint8Array([255, 255, 255]);
+ target.setFromBase64("Zg==", { alphabet: throwyToString });
+});
+assert.sameValue(toStringCalls, 0);
+
+assert.throws(TypeError, function() {
+ var target = new Uint8Array([255, 255, 255]);
+ target.setFromBase64("Zg==", { lastChunkHandling: throwyToString });
+});
+assert.sameValue(toStringCalls, 0);
+
+
+var alphabetAccesses = 0;
+var base64UrlOptions = {};
+Object.defineProperty(base64UrlOptions, "alphabet", {
+ get: function() {
+ alphabetAccesses += 1;
+ return "base64url";
+ }
+});
+var target = new Uint8Array([255, 255, 255, 255]);
+var result = target.setFromBase64("x-_y", base64UrlOptions);
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [199, 239, 242, 255]);
+assert.sameValue(alphabetAccesses, 1);
+
+var lastChunkHandlingAccesses = 0;
+var strictOptions = {};
+Object.defineProperty(strictOptions, "lastChunkHandling", {
+ get: function() {
+ lastChunkHandlingAccesses += 1;
+ return "strict";
+ }
+});
+var target = new Uint8Array([255, 255, 255, 255]);
+var result = target.setFromBase64("Zg==", strictOptions);
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 1);
+assert.compareArray(target, [102, 255, 255, 255]);
+assert.sameValue(lastChunkHandlingAccesses, 1);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/results.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/results.js
new file mode 100644
index 0000000000..2c39bab318
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/results.js
@@ -0,0 +1,33 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Conversion of base64 strings to Uint8Arrays
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+// standard test vectors from https://datatracker.ietf.org/doc/html/rfc4648#section-10
+var standardBase64Vectors = [
+ ["", []],
+ ["Zg==", [102]],
+ ["Zm8=", [102, 111]],
+ ["Zm9v", [102, 111, 111]],
+ ["Zm9vYg==", [102, 111, 111, 98]],
+ ["Zm9vYmE=", [102, 111, 111, 98, 97]],
+ ["Zm9vYmFy", [102, 111, 111, 98, 97, 114]],
+];
+
+standardBase64Vectors.forEach(function (pair) {
+ var allFF = [255, 255, 255, 255, 255, 255, 255, 255];
+ var target = new Uint8Array(allFF);
+ var result = target.setFromBase64(pair[0]);
+ assert.sameValue(result.read, pair[0].length);
+ assert.sameValue(result.written, pair[1].length);
+
+ var expected = pair[1].concat(allFF.slice(pair[1].length))
+ assert.compareArray(target, expected, "decoding " + pair[0]);
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/shell.js
new file mode 100644
index 0000000000..a7590326c3
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/shell.js
@@ -0,0 +1,42 @@
+// GENERATED, DO NOT EDIT
+// file: detachArrayBuffer.js
+// Copyright (C) 2016 the V8 project authors. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ A function used in the process of asserting correctness of TypedArray objects.
+
+ $262.detachArrayBuffer is defined by a host.
+defines: [$DETACHBUFFER]
+---*/
+
+function $DETACHBUFFER(buffer) {
+ if (!$262 || typeof $262.detachArrayBuffer !== "function") {
+ throw new Test262Error("No method available to detach an ArrayBuffer");
+ }
+ $262.detachArrayBuffer(buffer);
+}
+
+// file: isConstructor.js
+// Copyright (C) 2017 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+
+/*---
+description: |
+ Test if a given function is a constructor function.
+defines: [isConstructor]
+features: [Reflect.construct]
+---*/
+
+function isConstructor(f) {
+ if (typeof f !== "function") {
+ throw new Test262Error("isConstructor invoked with a non-function value");
+ }
+
+ try {
+ Reflect.construct(function(){}, [], f);
+ } catch (e) {
+ return false;
+ }
+ return true;
+}
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/string-coercion.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/string-coercion.js
new file mode 100644
index 0000000000..7c479a78cd
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/string-coercion.js
@@ -0,0 +1,46 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Uint8Array.prototype.setFromBase64 throws if its first argument is not a string
+features: [uint8array-base64, TypedArray]
+---*/
+
+var toStringCalls = 0;
+var throwyToString = {
+ toString: function() {
+ toStringCalls += 1;
+ throw new Test262Error("toString called");
+ }
+};
+
+assert.throws(TypeError, function() {
+ var target = new Uint8Array(10);
+ target.setFromBase64(throwyToString);
+});
+assert.sameValue(toStringCalls, 0);
+
+
+var optionAccesses = 0;
+var touchyOptions = {};
+Object.defineProperty(touchyOptions, "alphabet", {
+ get: function() {
+ optionAccesses += 1;
+ throw new Test262Error("alphabet accessed");
+ }
+});
+Object.defineProperty(touchyOptions, "lastChunkHandling", {
+ get: function() {
+ optionAccesses += 1;
+ throw new Test262Error("lastChunkHandling accessed");
+ }
+});
+assert.throws(TypeError, function() {
+ var target = new Uint8Array(10);
+ target.setFromBase64(throwyToString, touchyOptions);
+});
+assert.sameValue(toStringCalls, 0);
+assert.sameValue(optionAccesses, 0);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/subarray.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/subarray.js
new file mode 100644
index 0000000000..ffffd227b9
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/subarray.js
@@ -0,0 +1,20 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Uint8Array.prototype.setFromBase64 takes into account the offset of the target Uint8Array
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var base = new Uint8Array([255, 255, 255, 255, 255, 255, 255]);
+var subarray = base.subarray(2, 5);
+
+var result = subarray.setFromBase64('Zm9vYmFy');
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(subarray, [102, 111, 111]);
+assert.compareArray(base, [255, 255, 102, 111, 111, 255, 255]);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/target-size.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/target-size.js
new file mode 100644
index 0000000000..4e38423621
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/target-size.js
@@ -0,0 +1,67 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Uint8Array.prototype.setFromBase64 behavior when target buffer is small
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+// buffer too small
+var target = new Uint8Array([255, 255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmFy');
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [102, 111, 111, 255, 255]);
+
+// buffer too small, padded
+var target = new Uint8Array([255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmE=');
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [102, 111, 111, 255]);
+
+// buffer exact
+var target = new Uint8Array([255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmFy');
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 6);
+assert.compareArray(target, [102, 111, 111, 98, 97, 114]);
+
+// buffer exact, padded
+var target = new Uint8Array([255, 255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmE=');
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 5);
+assert.compareArray(target, [102, 111, 111, 98, 97]);
+
+// buffer exact, not padded
+var target = new Uint8Array([255, 255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmE');
+assert.sameValue(result.read, 7);
+assert.sameValue(result.written, 5);
+assert.compareArray(target, [102, 111, 111, 98, 97]);
+
+// buffer exact, padded, stop-before-partial
+var target = new Uint8Array([255, 255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmE=', { lastChunkHandling: 'stop-before-partial' });
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 5);
+assert.compareArray(target, [102, 111, 111, 98, 97]);
+
+// buffer exact, not padded, stop-before-partial
+var target = new Uint8Array([255, 255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmE', { lastChunkHandling: 'stop-before-partial' });
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [102, 111, 111, 255, 255]);
+
+// buffer too large
+var target = new Uint8Array([255, 255, 255, 255, 255, 255, 255]);
+var result = target.setFromBase64('Zm9vYmFy');
+assert.sameValue(result.read, 8);
+assert.sameValue(result.written, 6);
+assert.compareArray(target, [102, 111, 111, 98, 97, 114, 255]);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/whitespace.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/whitespace.js
new file mode 100644
index 0000000000..a09673d2bd
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromBase64/whitespace.js
@@ -0,0 +1,26 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfrombase64
+description: Uint8Array.prototype.setFromBase64 ignores ASCII whitespace in the input
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var whitespaceKinds = [
+ ["Z g==", "space"],
+ ["Z\tg==", "tab"],
+ ["Z\x0Ag==", "LF"],
+ ["Z\x0Cg==", "FF"],
+ ["Z\x0Dg==", "CR"],
+];
+whitespaceKinds.forEach(function(pair) {
+ var target = new Uint8Array([255, 255, 255]);
+ var result = target.setFromBase64(pair[0]);
+ assert.sameValue(result.read, 5);
+ assert.sameValue(result.written, 1);
+ assert.compareArray(target, [102, 255, 255], "ascii whitespace: " + pair[1]);
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/descriptor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/descriptor.js
new file mode 100644
index 0000000000..3d1a4c996a
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/descriptor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: >
+ Uint8Array.prototype.setFromHex has default data property attributes.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype, 'setFromHex', {
+ enumerable: false,
+ writable: true,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/detached-buffer.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/detached-buffer.js
new file mode 100644
index 0000000000..5ba8e15727
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/detached-buffer.js
@@ -0,0 +1,17 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: Uint8Array.prototype.setFromHex throws on detatched buffers
+includes: [detachArrayBuffer.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var target = new Uint8Array([255, 255, 255]);
+$DETACHBUFFER(target.buffer);
+assert.throws(TypeError, function() {
+ target.setFromHex('aa');
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/illegal-characters.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/illegal-characters.js
new file mode 100644
index 0000000000..1c30b4cffd
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/illegal-characters.js
@@ -0,0 +1,29 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: Uint8Array.prototype.setFromHex throws a SyntaxError when input has non-hex characters
+features: [uint8array-base64, TypedArray]
+---*/
+
+var illegal = [
+ 'a.a',
+ 'aa^',
+ 'a a',
+ 'a\ta',
+ 'a\x0Aa',
+ 'a\x0Ca',
+ 'a\x0Da',
+ 'a\u00A0a', // nbsp
+ 'a\u2009a', // thin space
+ 'a\u2028a', // line separator
+];
+illegal.forEach(function(value) {
+ assert.throws(SyntaxError, function() {
+ var target = new Uint8Array([255, 255, 255, 255, 255]);
+ target.setFromHex(value);
+ });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/length.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/length.js
new file mode 100644
index 0000000000..b0b56ef761
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/length.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: >
+ Uint8Array.prototype.setFromHex.length is 1.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.setFromHex, 'length', {
+ value: 1,
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/name.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/name.js
new file mode 100644
index 0000000000..cbc06e6f3d
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/name.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: >
+ Uint8Array.prototype.setFromHex.name is "setFromHex".
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.setFromHex, 'name', {
+ value: 'setFromHex',
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/nonconstructor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/nonconstructor.js
new file mode 100644
index 0000000000..196b962bdf
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/nonconstructor.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: >
+ Uint8Array.prototype.setFromHex is not a constructor function.
+includes: [isConstructor.js]
+features: [uint8array-base64, TypedArray, Reflect.construct]
+---*/
+
+assert(!isConstructor(Uint8Array.prototype.setFromHex), "target.setFromHex is not a constructor");
+
+assert.throws(TypeError, function() {
+ var target = new Uint8Array(10);
+ new target.setFromHex('');
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/results.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/results.js
new file mode 100644
index 0000000000..c7749c6257
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/results.js
@@ -0,0 +1,34 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: Conversion of hex strings to Uint8Arrays
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var cases = [
+ ["", []],
+ ["66", [102]],
+ ["666f", [102, 111]],
+ ["666F", [102, 111]],
+ ["666f6f", [102, 111, 111]],
+ ["666F6f", [102, 111, 111]],
+ ["666f6f62", [102, 111, 111, 98]],
+ ["666f6f6261", [102, 111, 111, 98, 97]],
+ ["666f6f626172", [102, 111, 111, 98, 97, 114]],
+];
+
+cases.forEach(function (pair) {
+ var allFF = [255, 255, 255, 255, 255, 255, 255, 255];
+ var target = new Uint8Array(allFF);
+ var result = target.setFromHex(pair[0]);
+ assert.sameValue(result.read, pair[0].length);
+ assert.sameValue(result.written, pair[1].length);
+
+ var expected = pair[1].concat(allFF.slice(pair[1].length))
+ assert.compareArray(target, expected, "decoding " + pair[0]);
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/shell.js
new file mode 100644
index 0000000000..a7590326c3
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/shell.js
@@ -0,0 +1,42 @@
+// GENERATED, DO NOT EDIT
+// file: detachArrayBuffer.js
+// Copyright (C) 2016 the V8 project authors. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ A function used in the process of asserting correctness of TypedArray objects.
+
+ $262.detachArrayBuffer is defined by a host.
+defines: [$DETACHBUFFER]
+---*/
+
+function $DETACHBUFFER(buffer) {
+ if (!$262 || typeof $262.detachArrayBuffer !== "function") {
+ throw new Test262Error("No method available to detach an ArrayBuffer");
+ }
+ $262.detachArrayBuffer(buffer);
+}
+
+// file: isConstructor.js
+// Copyright (C) 2017 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+
+/*---
+description: |
+ Test if a given function is a constructor function.
+defines: [isConstructor]
+features: [Reflect.construct]
+---*/
+
+function isConstructor(f) {
+ if (typeof f !== "function") {
+ throw new Test262Error("isConstructor invoked with a non-function value");
+ }
+
+ try {
+ Reflect.construct(function(){}, [], f);
+ } catch (e) {
+ return false;
+ }
+ return true;
+}
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/string-coercion.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/string-coercion.js
new file mode 100644
index 0000000000..9a6f5f84c0
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/string-coercion.js
@@ -0,0 +1,24 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: Uint8Array.prototype.setFromHex throws if its first argument is not a string
+features: [uint8array-base64, TypedArray]
+---*/
+
+var toStringCalls = 0;
+var throwyToString = {
+ toString: function() {
+ toStringCalls += 1;
+ throw new Test262Error("toString called");
+ }
+};
+
+assert.throws(TypeError, function() {
+ var target = new Uint8Array(10);
+ target.setFromHex(throwyToString);
+});
+assert.sameValue(toStringCalls, 0);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/subarray.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/subarray.js
new file mode 100644
index 0000000000..45879d8cb3
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/subarray.js
@@ -0,0 +1,20 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: Uint8Array.prototype.setFromHex takes into account the offset of the target Uint8Array
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var base = new Uint8Array([255, 255, 255, 255, 255, 255, 255]);
+var subarray = base.subarray(2, 5);
+
+var result = subarray.setFromHex('aabbcc');
+assert.sameValue(result.read, 6);
+assert.sameValue(result.written, 3);
+assert.compareArray(subarray, [170, 187, 204]);
+assert.compareArray(base, [255, 255, 170, 187, 204, 255, 255]);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/target-size.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/target-size.js
new file mode 100644
index 0000000000..d73fa1749d
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/setFromHex/target-size.js
@@ -0,0 +1,32 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.setfromhex
+description: Uint8Array.prototype.setFromHex behavior when target buffer is small
+includes: [compareArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+// buffer too small
+var target = new Uint8Array([255, 255]);
+var result = target.setFromHex('aabbcc');
+assert.sameValue(result.read, 4);
+assert.sameValue(result.written, 2);
+assert.compareArray(target, [170, 187]);
+
+// buffer exact
+var target = new Uint8Array([255, 255, 255]);
+var result = target.setFromHex('aabbcc');
+assert.sameValue(result.read, 6);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [170, 187, 204]);
+
+// buffer too large
+var target = new Uint8Array([255, 255, 255, 255]);
+var result = target.setFromHex('aabbcc');
+assert.sameValue(result.read, 6);
+assert.sameValue(result.written, 3);
+assert.compareArray(target, [170, 187, 204, 255]);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/shell.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/shell.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/alphabet.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/alphabet.js
new file mode 100644
index 0000000000..7edf0a379e
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/alphabet.js
@@ -0,0 +1,20 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: Conversion of Uint8Arrays to base64 strings exercising the alphabet option
+features: [uint8array-base64, TypedArray]
+---*/
+
+assert.sameValue((new Uint8Array([199, 239, 242])).toBase64(), "x+/y");
+
+assert.sameValue((new Uint8Array([199, 239, 242])).toBase64({ alphabet: 'base64' }), "x+/y");
+
+assert.sameValue((new Uint8Array([199, 239, 242])).toBase64({ alphabet: 'base64url' }), "x-_y");
+
+assert.throws(TypeError, function() {
+ (new Uint8Array([199, 239, 242])).toBase64({ alphabet: 'other' });
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/descriptor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/descriptor.js
new file mode 100644
index 0000000000..9c34719bf1
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/descriptor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: >
+ Uint8Array.prototype.toBase64 has default data property attributes.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype, 'toBase64', {
+ enumerable: false,
+ writable: true,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/detached-buffer.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/detached-buffer.js
new file mode 100644
index 0000000000..0904454743
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/detached-buffer.js
@@ -0,0 +1,42 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: Uint8Array.prototype.toBase64 checks for detachedness after side-effects are finished
+includes: [detachArrayBuffer.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var array = new Uint8Array(2);
+var getterCalls = 0;
+var receiverDetachingOptions = {};
+Object.defineProperty(receiverDetachingOptions, "alphabet", {
+ get: function() {
+ getterCalls += 1;
+ $DETACHBUFFER(array.buffer);
+ return "base64";
+ }
+});
+assert.throws(TypeError, function() {
+ array.toBase64(receiverDetachingOptions);
+});
+assert.sameValue(getterCalls, 1);
+
+
+var detached = new Uint8Array(2);
+$DETACHBUFFER(detached.buffer);
+var getterCalls = 0;
+var sideEffectingOptions = {};
+Object.defineProperty(sideEffectingOptions, "alphabet", {
+ get: function() {
+ getterCalls += 1;
+ return "base64";
+ }
+});
+assert.throws(TypeError, function() {
+ detached.toBase64(sideEffectingOptions);
+});
+assert.sameValue(getterCalls, 1);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/length.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/length.js
new file mode 100644
index 0000000000..9e4aabb9ed
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/length.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: >
+ Uint8Array.prototype.toBase64.length is 0.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.toBase64, 'length', {
+ value: 0,
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/name.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/name.js
new file mode 100644
index 0000000000..2510648b76
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/name.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: >
+ Uint8Array.prototype.toBase64.name is "toBase64".
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.toBase64, 'name', {
+ value: 'toBase64',
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/nonconstructor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/nonconstructor.js
new file mode 100644
index 0000000000..79280e2d5d
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/nonconstructor.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: >
+ Uint8Array.prototype.toBase64 is not a constructor function.
+includes: [isConstructor.js]
+features: [uint8array-base64, TypedArray, Reflect.construct]
+---*/
+
+assert(!isConstructor(Uint8Array.prototype.toBase64), "Uint8Array.prototype.toBase64 is not a constructor");
+
+var uint8Array = new Uint8Array(8);
+assert.throws(TypeError, function() {
+ new uint8Array.toBase64();
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/option-coercion.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/option-coercion.js
new file mode 100644
index 0000000000..054a3a5e0d
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/option-coercion.js
@@ -0,0 +1,51 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: Uint8Array.prototype.toBase64 triggers effects of the "alphabet" getter, but does not perform toString on the result
+features: [uint8array-base64, TypedArray]
+---*/
+
+assert.throws(TypeError, function() {
+ (new Uint8Array(2)).toBase64({ alphabet: Object("base64") });
+});
+
+
+var toStringCalls = 0;
+var throwyToString = {
+ toString: function() {
+ toStringCalls += 1;
+ throw new Test262Error("toString called on alphabet value");
+ }
+};
+assert.throws(TypeError, function() {
+ (new Uint8Array(2)).toBase64({ alphabet: throwyToString });
+});
+assert.sameValue(toStringCalls, 0);
+
+var alphabetAccesses = 0;
+var base64UrlOptions = {};
+Object.defineProperty(base64UrlOptions, "alphabet", {
+ get: function() {
+ alphabetAccesses += 1;
+ return "base64url";
+ }
+});
+assert.sameValue((new Uint8Array([199, 239, 242])).toBase64(base64UrlOptions), "x-_y");
+assert.sameValue(alphabetAccesses, 1);
+
+// side-effects from the getter on the receiver are reflected in the result
+var array = new Uint8Array([0]);
+var receiverMutatingOptions = {};
+Object.defineProperty(receiverMutatingOptions, "alphabet", {
+ get: function() {
+ array[0] = 255;
+ return "base64";
+ }
+});
+var result = array.toBase64(receiverMutatingOptions);
+assert.sameValue(result, "/w==");
+assert.sameValue(array[0], 255);
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/receiver-not-uint8array.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/receiver-not-uint8array.js
new file mode 100644
index 0000000000..53352ce4a5
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/receiver-not-uint8array.js
@@ -0,0 +1,36 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: Uint8Array.prototype.toBase64 throws if the receiver is not a Uint8Array
+includes: [testTypedArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var toBase64 = Uint8Array.prototype.toBase64;
+
+var options = {};
+Object.defineProperty(options, "alphabet", {
+ get: function() {
+ throw new Test262Error("options.alphabet accessed despite incompatible receiver");
+ }
+});
+
+testWithTypedArrayConstructors(function(TA) {
+ if (TA === Uint8Array) return;
+ var sample = new TA(2);
+ assert.throws(TypeError, function() {
+ Uint8Array.prototype.toBase64.call(sample, options);
+ });
+});
+
+assert.throws(TypeError, function() {
+ Uint8Array.prototype.toBase64.call([], options);
+});
+
+assert.throws(TypeError, function() {
+ toBase64(options);
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/results.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/results.js
new file mode 100644
index 0000000000..5ce75170b7
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/results.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tobase64
+description: Conversion of Uint8Arrays to base64 strings
+features: [uint8array-base64, TypedArray]
+---*/
+
+// standard test vectors from https://datatracker.ietf.org/doc/html/rfc4648#section-10
+assert.sameValue((new Uint8Array([])).toBase64(), "");
+assert.sameValue((new Uint8Array([102])).toBase64(), "Zg==");
+assert.sameValue((new Uint8Array([102, 111])).toBase64(), "Zm8=");
+assert.sameValue((new Uint8Array([102, 111, 111])).toBase64(), "Zm9v");
+assert.sameValue((new Uint8Array([102, 111, 111, 98])).toBase64(), "Zm9vYg==");
+assert.sameValue((new Uint8Array([102, 111, 111, 98, 97])).toBase64(), "Zm9vYmE=");
+assert.sameValue((new Uint8Array([102, 111, 111, 98, 97, 114])).toBase64(), "Zm9vYmFy");
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/shell.js
new file mode 100644
index 0000000000..2af708ed59
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toBase64/shell.js
@@ -0,0 +1,203 @@
+// GENERATED, DO NOT EDIT
+// file: detachArrayBuffer.js
+// Copyright (C) 2016 the V8 project authors. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ A function used in the process of asserting correctness of TypedArray objects.
+
+ $262.detachArrayBuffer is defined by a host.
+defines: [$DETACHBUFFER]
+---*/
+
+function $DETACHBUFFER(buffer) {
+ if (!$262 || typeof $262.detachArrayBuffer !== "function") {
+ throw new Test262Error("No method available to detach an ArrayBuffer");
+ }
+ $262.detachArrayBuffer(buffer);
+}
+
+// file: isConstructor.js
+// Copyright (C) 2017 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+
+/*---
+description: |
+ Test if a given function is a constructor function.
+defines: [isConstructor]
+features: [Reflect.construct]
+---*/
+
+function isConstructor(f) {
+ if (typeof f !== "function") {
+ throw new Test262Error("isConstructor invoked with a non-function value");
+ }
+
+ try {
+ Reflect.construct(function(){}, [], f);
+ } catch (e) {
+ return false;
+ }
+ return true;
+}
+
+// file: testTypedArray.js
+// Copyright (C) 2015 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ Collection of functions used to assert the correctness of TypedArray objects.
+defines:
+ - floatArrayConstructors
+ - nonClampedIntArrayConstructors
+ - intArrayConstructors
+ - typedArrayConstructors
+ - TypedArray
+ - testWithTypedArrayConstructors
+ - nonAtomicsFriendlyTypedArrayConstructors
+ - testWithAtomicsFriendlyTypedArrayConstructors
+ - testWithNonAtomicsFriendlyTypedArrayConstructors
+ - testTypedArrayConversions
+---*/
+
+var floatArrayConstructors = [
+ Float64Array,
+ Float32Array
+];
+
+var nonClampedIntArrayConstructors = [
+ Int32Array,
+ Int16Array,
+ Int8Array,
+ Uint32Array,
+ Uint16Array,
+ Uint8Array
+];
+
+var intArrayConstructors = nonClampedIntArrayConstructors.concat([Uint8ClampedArray]);
+
+// Float16Array is a newer feature
+// adding it to this list unconditionally would cause implementations lacking it to fail every test which uses it
+if (typeof Float16Array !== 'undefined') {
+ floatArrayConstructors.push(Float16Array);
+}
+
+/**
+ * Array containing every non-bigint typed array constructor.
+ */
+
+var typedArrayConstructors = floatArrayConstructors.concat(intArrayConstructors);
+
+/**
+ * The %TypedArray% intrinsic constructor function.
+ */
+var TypedArray = Object.getPrototypeOf(Int8Array);
+
+/**
+ * Callback for testing a typed array constructor.
+ *
+ * @callback typedArrayConstructorCallback
+ * @param {Function} Constructor the constructor object to test with.
+ */
+
+/**
+ * Calls the provided function for every typed array constructor.
+ *
+ * @param {typedArrayConstructorCallback} f - the function to call for each typed array constructor.
+ * @param {Array} selected - An optional Array with filtered typed arrays
+ */
+function testWithTypedArrayConstructors(f, selected) {
+ var constructors = selected || typedArrayConstructors;
+ for (var i = 0; i < constructors.length; ++i) {
+ var constructor = constructors[i];
+ try {
+ f(constructor);
+ } catch (e) {
+ e.message += " (Testing with " + constructor.name + ".)";
+ throw e;
+ }
+ }
+}
+
+var nonAtomicsFriendlyTypedArrayConstructors = floatArrayConstructors.concat([Uint8ClampedArray]);
+/**
+ * Calls the provided function for every non-"Atomics Friendly" typed array constructor.
+ *
+ * @param {typedArrayConstructorCallback} f - the function to call for each typed array constructor.
+ * @param {Array} selected - An optional Array with filtered typed arrays
+ */
+function testWithNonAtomicsFriendlyTypedArrayConstructors(f) {
+ testWithTypedArrayConstructors(f, nonAtomicsFriendlyTypedArrayConstructors);
+}
+
+/**
+ * Calls the provided function for every "Atomics Friendly" typed array constructor.
+ *
+ * @param {typedArrayConstructorCallback} f - the function to call for each typed array constructor.
+ * @param {Array} selected - An optional Array with filtered typed arrays
+ */
+function testWithAtomicsFriendlyTypedArrayConstructors(f) {
+ testWithTypedArrayConstructors(f, [
+ Int32Array,
+ Int16Array,
+ Int8Array,
+ Uint32Array,
+ Uint16Array,
+ Uint8Array,
+ ]);
+}
+
+/**
+ * Helper for conversion operations on TypedArrays, the expected values
+ * properties are indexed in order to match the respective value for each
+ * TypedArray constructor
+ * @param {Function} fn - the function to call for each constructor and value.
+ * will be called with the constructor, value, expected
+ * value, and a initial value that can be used to avoid
+ * a false positive with an equivalent expected value.
+ */
+function testTypedArrayConversions(byteConversionValues, fn) {
+ var values = byteConversionValues.values;
+ var expected = byteConversionValues.expected;
+
+ testWithTypedArrayConstructors(function(TA) {
+ var name = TA.name.slice(0, -5);
+
+ return values.forEach(function(value, index) {
+ var exp = expected[name][index];
+ var initial = 0;
+ if (exp === 0) {
+ initial = 1;
+ }
+ fn(TA, value, exp, initial);
+ });
+ });
+}
+
+/**
+ * Checks if the given argument is one of the float-based TypedArray constructors.
+ *
+ * @param {constructor} ctor - the value to check
+ * @returns {boolean}
+ */
+function isFloatTypedArrayConstructor(arg) {
+ return floatArrayConstructors.indexOf(arg) !== -1;
+}
+
+/**
+ * Determines the precision of the given float-based TypedArray constructor.
+ *
+ * @param {constructor} ctor - the value to check
+ * @returns {string} "half", "single", or "double" for Float16Array, Float32Array, and Float64Array respectively.
+ */
+function floatTypedArrayConstructorPrecision(FA) {
+ if (typeof Float16Array !== "undefined" && FA === Float16Array) {
+ return "half";
+ } else if (FA === Float32Array) {
+ return "single";
+ } else if (FA === Float64Array) {
+ return "double";
+ } else {
+ throw new Error("Malformed test - floatTypedArrayConstructorPrecision called with non-float TypedArray");
+ }
+}
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/browser.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/descriptor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/descriptor.js
new file mode 100644
index 0000000000..fa1eba15de
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/descriptor.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tohex
+description: >
+ Uint8Array.prototype.toHex has default data property attributes.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype, 'toHex', {
+ enumerable: false,
+ writable: true,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/detached-buffer.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/detached-buffer.js
new file mode 100644
index 0000000000..e0126f6725
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/detached-buffer.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tohex
+description: Uint8Array.prototype.toHex throws if called on a detached buffer
+includes: [detachArrayBuffer.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var array = new Uint8Array(2);
+$DETACHBUFFER(array.buffer);
+assert.throws(TypeError, function() {
+ array.toHex();
+});
+
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/length.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/length.js
new file mode 100644
index 0000000000..9c7d00d693
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/length.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tohex
+description: >
+ Uint8Array.prototype.toHex.length is 0.
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.toHex, 'length', {
+ value: 0,
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/name.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/name.js
new file mode 100644
index 0000000000..af2e1ef1d4
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/name.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tohex
+description: >
+ Uint8Array.prototype.toHex.name is "toHex".
+includes: [propertyHelper.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+verifyProperty(Uint8Array.prototype.toHex, 'name', {
+ value: 'toHex',
+ enumerable: false,
+ writable: false,
+ configurable: true
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/nonconstructor.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/nonconstructor.js
new file mode 100644
index 0000000000..2d0f9fc00f
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/nonconstructor.js
@@ -0,0 +1,19 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tohex
+description: >
+ Uint8Array.prototype.toHex is not a constructor function.
+includes: [isConstructor.js]
+features: [uint8array-base64, TypedArray, Reflect.construct]
+---*/
+
+assert(!isConstructor(Uint8Array.prototype.toHex), "Uint8Array.prototype.toHex is not a constructor");
+
+var uint8Array = new Uint8Array(8);
+assert.throws(TypeError, function() {
+ new uint8Array.toHex();
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/receiver-not-uint8array.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/receiver-not-uint8array.js
new file mode 100644
index 0000000000..1409cca969
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/receiver-not-uint8array.js
@@ -0,0 +1,29 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tohex
+description: Uint8Array.prototype.toHex throws if the receiver is not a Uint8Array
+includes: [testTypedArray.js]
+features: [uint8array-base64, TypedArray]
+---*/
+
+var toHex = Uint8Array.prototype.toHex;
+
+testWithTypedArrayConstructors(function(TA) {
+ if (TA === Uint8Array) return;
+ var sample = new TA(2);
+ assert.throws(TypeError, function() {
+ Uint8Array.prototype.toHex.call(sample);
+ });
+});
+
+assert.throws(TypeError, function() {
+ Uint8Array.prototype.toHex.call([]);
+});
+
+assert.throws(TypeError, function() {
+ toHex();
+});
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/results.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/results.js
new file mode 100644
index 0000000000..fb8c31096f
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/results.js
@@ -0,0 +1,18 @@
+// |reftest| shell-option(--enable-uint8array-base64) skip-if(!Uint8Array.fromBase64||!xulRuntime.shell) -- uint8array-base64 is not enabled unconditionally, requires shell-options
+// Copyright (C) 2024 Kevin Gibbons. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+esid: sec-uint8array.prototype.tohex
+description: Conversion of Uint8Arrays to hex strings
+features: [uint8array-base64, TypedArray]
+---*/
+
+assert.sameValue((new Uint8Array([])).toHex(), "");
+assert.sameValue((new Uint8Array([102])).toHex(), "66");
+assert.sameValue((new Uint8Array([102, 111])).toHex(), "666f");
+assert.sameValue((new Uint8Array([102, 111, 111])).toHex(), "666f6f");
+assert.sameValue((new Uint8Array([102, 111, 111, 98])).toHex(), "666f6f62");
+assert.sameValue((new Uint8Array([102, 111, 111, 98, 97])).toHex(), "666f6f6261");
+assert.sameValue((new Uint8Array([102, 111, 111, 98, 97, 114])).toHex(), "666f6f626172");
+
+reportCompare(0, 0);
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/shell.js
new file mode 100644
index 0000000000..2af708ed59
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/prototype/toHex/shell.js
@@ -0,0 +1,203 @@
+// GENERATED, DO NOT EDIT
+// file: detachArrayBuffer.js
+// Copyright (C) 2016 the V8 project authors. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ A function used in the process of asserting correctness of TypedArray objects.
+
+ $262.detachArrayBuffer is defined by a host.
+defines: [$DETACHBUFFER]
+---*/
+
+function $DETACHBUFFER(buffer) {
+ if (!$262 || typeof $262.detachArrayBuffer !== "function") {
+ throw new Test262Error("No method available to detach an ArrayBuffer");
+ }
+ $262.detachArrayBuffer(buffer);
+}
+
+// file: isConstructor.js
+// Copyright (C) 2017 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+
+/*---
+description: |
+ Test if a given function is a constructor function.
+defines: [isConstructor]
+features: [Reflect.construct]
+---*/
+
+function isConstructor(f) {
+ if (typeof f !== "function") {
+ throw new Test262Error("isConstructor invoked with a non-function value");
+ }
+
+ try {
+ Reflect.construct(function(){}, [], f);
+ } catch (e) {
+ return false;
+ }
+ return true;
+}
+
+// file: testTypedArray.js
+// Copyright (C) 2015 André Bargull. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ Collection of functions used to assert the correctness of TypedArray objects.
+defines:
+ - floatArrayConstructors
+ - nonClampedIntArrayConstructors
+ - intArrayConstructors
+ - typedArrayConstructors
+ - TypedArray
+ - testWithTypedArrayConstructors
+ - nonAtomicsFriendlyTypedArrayConstructors
+ - testWithAtomicsFriendlyTypedArrayConstructors
+ - testWithNonAtomicsFriendlyTypedArrayConstructors
+ - testTypedArrayConversions
+---*/
+
+var floatArrayConstructors = [
+ Float64Array,
+ Float32Array
+];
+
+var nonClampedIntArrayConstructors = [
+ Int32Array,
+ Int16Array,
+ Int8Array,
+ Uint32Array,
+ Uint16Array,
+ Uint8Array
+];
+
+var intArrayConstructors = nonClampedIntArrayConstructors.concat([Uint8ClampedArray]);
+
+// Float16Array is a newer feature
+// adding it to this list unconditionally would cause implementations lacking it to fail every test which uses it
+if (typeof Float16Array !== 'undefined') {
+ floatArrayConstructors.push(Float16Array);
+}
+
+/**
+ * Array containing every non-bigint typed array constructor.
+ */
+
+var typedArrayConstructors = floatArrayConstructors.concat(intArrayConstructors);
+
+/**
+ * The %TypedArray% intrinsic constructor function.
+ */
+var TypedArray = Object.getPrototypeOf(Int8Array);
+
+/**
+ * Callback for testing a typed array constructor.
+ *
+ * @callback typedArrayConstructorCallback
+ * @param {Function} Constructor the constructor object to test with.
+ */
+
+/**
+ * Calls the provided function for every typed array constructor.
+ *
+ * @param {typedArrayConstructorCallback} f - the function to call for each typed array constructor.
+ * @param {Array} selected - An optional Array with filtered typed arrays
+ */
+function testWithTypedArrayConstructors(f, selected) {
+ var constructors = selected || typedArrayConstructors;
+ for (var i = 0; i < constructors.length; ++i) {
+ var constructor = constructors[i];
+ try {
+ f(constructor);
+ } catch (e) {
+ e.message += " (Testing with " + constructor.name + ".)";
+ throw e;
+ }
+ }
+}
+
+var nonAtomicsFriendlyTypedArrayConstructors = floatArrayConstructors.concat([Uint8ClampedArray]);
+/**
+ * Calls the provided function for every non-"Atomics Friendly" typed array constructor.
+ *
+ * @param {typedArrayConstructorCallback} f - the function to call for each typed array constructor.
+ * @param {Array} selected - An optional Array with filtered typed arrays
+ */
+function testWithNonAtomicsFriendlyTypedArrayConstructors(f) {
+ testWithTypedArrayConstructors(f, nonAtomicsFriendlyTypedArrayConstructors);
+}
+
+/**
+ * Calls the provided function for every "Atomics Friendly" typed array constructor.
+ *
+ * @param {typedArrayConstructorCallback} f - the function to call for each typed array constructor.
+ * @param {Array} selected - An optional Array with filtered typed arrays
+ */
+function testWithAtomicsFriendlyTypedArrayConstructors(f) {
+ testWithTypedArrayConstructors(f, [
+ Int32Array,
+ Int16Array,
+ Int8Array,
+ Uint32Array,
+ Uint16Array,
+ Uint8Array,
+ ]);
+}
+
+/**
+ * Helper for conversion operations on TypedArrays, the expected values
+ * properties are indexed in order to match the respective value for each
+ * TypedArray constructor
+ * @param {Function} fn - the function to call for each constructor and value.
+ * will be called with the constructor, value, expected
+ * value, and a initial value that can be used to avoid
+ * a false positive with an equivalent expected value.
+ */
+function testTypedArrayConversions(byteConversionValues, fn) {
+ var values = byteConversionValues.values;
+ var expected = byteConversionValues.expected;
+
+ testWithTypedArrayConstructors(function(TA) {
+ var name = TA.name.slice(0, -5);
+
+ return values.forEach(function(value, index) {
+ var exp = expected[name][index];
+ var initial = 0;
+ if (exp === 0) {
+ initial = 1;
+ }
+ fn(TA, value, exp, initial);
+ });
+ });
+}
+
+/**
+ * Checks if the given argument is one of the float-based TypedArray constructors.
+ *
+ * @param {constructor} ctor - the value to check
+ * @returns {boolean}
+ */
+function isFloatTypedArrayConstructor(arg) {
+ return floatArrayConstructors.indexOf(arg) !== -1;
+}
+
+/**
+ * Determines the precision of the given float-based TypedArray constructor.
+ *
+ * @param {constructor} ctor - the value to check
+ * @returns {string} "half", "single", or "double" for Float16Array, Float32Array, and Float64Array respectively.
+ */
+function floatTypedArrayConstructorPrecision(FA) {
+ if (typeof Float16Array !== "undefined" && FA === Float16Array) {
+ return "half";
+ } else if (FA === Float32Array) {
+ return "single";
+ } else if (FA === Float64Array) {
+ return "double";
+ } else {
+ throw new Error("Malformed test - floatTypedArrayConstructorPrecision called with non-float TypedArray");
+ }
+}
diff --git a/js/src/tests/test262/prs/3994/built-ins/Uint8Array/shell.js b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/shell.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/Uint8Array/shell.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/browser.js b/js/src/tests/test262/prs/3994/built-ins/browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/browser.js
diff --git a/js/src/tests/test262/prs/3994/built-ins/shell.js b/js/src/tests/test262/prs/3994/built-ins/shell.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/built-ins/shell.js
diff --git a/js/src/tests/test262/prs/3994/shell.js b/js/src/tests/test262/prs/3994/shell.js
new file mode 100644
index 0000000000..9adb7aa914
--- /dev/null
+++ b/js/src/tests/test262/prs/3994/shell.js
@@ -0,0 +1,723 @@
+// GENERATED, DO NOT EDIT
+// file: assert.js
+// Copyright (C) 2017 Ecma International. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ Collection of assertion functions used throughout test262
+defines: [assert]
+---*/
+
+
+function assert(mustBeTrue, message) {
+ if (mustBeTrue === true) {
+ return;
+ }
+
+ if (message === undefined) {
+ message = 'Expected true but got ' + assert._toString(mustBeTrue);
+ }
+ throw new Test262Error(message);
+}
+
+assert._isSameValue = function (a, b) {
+ if (a === b) {
+ // Handle +/-0 vs. -/+0
+ return a !== 0 || 1 / a === 1 / b;
+ }
+
+ // Handle NaN vs. NaN
+ return a !== a && b !== b;
+};
+
+assert.sameValue = function (actual, expected, message) {
+ try {
+ if (assert._isSameValue(actual, expected)) {
+ return;
+ }
+ } catch (error) {
+ throw new Test262Error(message + ' (_isSameValue operation threw) ' + error);
+ return;
+ }
+
+ if (message === undefined) {
+ message = '';
+ } else {
+ message += ' ';
+ }
+
+ message += 'Expected SameValue(«' + assert._toString(actual) + '», «' + assert._toString(expected) + '») to be true';
+
+ throw new Test262Error(message);
+};
+
+assert.notSameValue = function (actual, unexpected, message) {
+ if (!assert._isSameValue(actual, unexpected)) {
+ return;
+ }
+
+ if (message === undefined) {
+ message = '';
+ } else {
+ message += ' ';
+ }
+
+ message += 'Expected SameValue(«' + assert._toString(actual) + '», «' + assert._toString(unexpected) + '») to be false';
+
+ throw new Test262Error(message);
+};
+
+assert.throws = function (expectedErrorConstructor, func, message) {
+ var expectedName, actualName;
+ if (typeof func !== "function") {
+ throw new Test262Error('assert.throws requires two arguments: the error constructor ' +
+ 'and a function to run');
+ return;
+ }
+ if (message === undefined) {
+ message = '';
+ } else {
+ message += ' ';
+ }
+
+ try {
+ func();
+ } catch (thrown) {
+ if (typeof thrown !== 'object' || thrown === null) {
+ message += 'Thrown value was not an object!';
+ throw new Test262Error(message);
+ } else if (thrown.constructor !== expectedErrorConstructor) {
+ expectedName = expectedErrorConstructor.name;
+ actualName = thrown.constructor.name;
+ if (expectedName === actualName) {
+ message += 'Expected a ' + expectedName + ' but got a different error constructor with the same name';
+ } else {
+ message += 'Expected a ' + expectedName + ' but got a ' + actualName;
+ }
+ throw new Test262Error(message);
+ }
+ return;
+ }
+
+ message += 'Expected a ' + expectedErrorConstructor.name + ' to be thrown but no exception was thrown at all';
+ throw new Test262Error(message);
+};
+
+assert._toString = function (value) {
+ try {
+ if (value === 0 && 1 / value === -Infinity) {
+ return '-0';
+ }
+
+ return String(value);
+ } catch (err) {
+ if (err.name === 'TypeError') {
+ return Object.prototype.toString.call(value);
+ }
+
+ throw err;
+ }
+};
+
+// file: compareArray.js
+// Copyright (C) 2017 Ecma International. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ Compare the contents of two arrays
+defines: [compareArray]
+---*/
+
+function compareArray(a, b) {
+ if (b.length !== a.length) {
+ return false;
+ }
+
+ for (var i = 0; i < a.length; i++) {
+ if (!compareArray.isSameValue(b[i], a[i])) {
+ return false;
+ }
+ }
+ return true;
+}
+
+compareArray.isSameValue = function(a, b) {
+ if (a === 0 && b === 0) return 1 / a === 1 / b;
+ if (a !== a && b !== b) return true;
+
+ return a === b;
+};
+
+compareArray.format = function(arrayLike) {
+ return `[${[].map.call(arrayLike, String).join(', ')}]`;
+};
+
+assert.compareArray = function(actual, expected, message) {
+ message = message === undefined ? '' : message;
+
+ if (typeof message === 'symbol') {
+ message = message.toString();
+ }
+
+ assert(actual != null, `First argument shouldn't be nullish. ${message}`);
+ assert(expected != null, `Second argument shouldn't be nullish. ${message}`);
+ var format = compareArray.format;
+ var result = compareArray(actual, expected);
+
+ // The following prevents actual and expected from being iterated and evaluated
+ // more than once unless absolutely necessary.
+ if (!result) {
+ assert(false, `Expected ${format(actual)} and ${format(expected)} to have the same contents. ${message}`);
+ }
+};
+
+// file: propertyHelper.js
+// Copyright (C) 2017 Ecma International. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ Collection of functions used to safely verify the correctness of
+ property descriptors.
+defines:
+ - verifyProperty
+ - verifyEqualTo # deprecated
+ - verifyWritable # deprecated
+ - verifyNotWritable # deprecated
+ - verifyEnumerable # deprecated
+ - verifyNotEnumerable # deprecated
+ - verifyConfigurable # deprecated
+ - verifyNotConfigurable # deprecated
+---*/
+
+// @ts-check
+
+/**
+ * @param {object} obj
+ * @param {string|symbol} name
+ * @param {PropertyDescriptor|undefined} desc
+ * @param {object} [options]
+ * @param {boolean} [options.restore]
+ */
+function verifyProperty(obj, name, desc, options) {
+ assert(
+ arguments.length > 2,
+ 'verifyProperty should receive at least 3 arguments: obj, name, and descriptor'
+ );
+
+ var originalDesc = Object.getOwnPropertyDescriptor(obj, name);
+ var nameStr = String(name);
+
+ // Allows checking for undefined descriptor if it's explicitly given.
+ if (desc === undefined) {
+ assert.sameValue(
+ originalDesc,
+ undefined,
+ "obj['" + nameStr + "'] descriptor should be undefined"
+ );
+
+ // desc and originalDesc are both undefined, problem solved;
+ return true;
+ }
+
+ assert(
+ Object.prototype.hasOwnProperty.call(obj, name),
+ "obj should have an own property " + nameStr
+ );
+
+ assert.notSameValue(
+ desc,
+ null,
+ "The desc argument should be an object or undefined, null"
+ );
+
+ assert.sameValue(
+ typeof desc,
+ "object",
+ "The desc argument should be an object or undefined, " + String(desc)
+ );
+
+ var names = Object.getOwnPropertyNames(desc);
+ for (var i = 0; i < names.length; i++) {
+ assert(
+ names[i] === "value" ||
+ names[i] === "writable" ||
+ names[i] === "enumerable" ||
+ names[i] === "configurable" ||
+ names[i] === "get" ||
+ names[i] === "set",
+ "Invalid descriptor field: " + names[i],
+ );
+ }
+
+ var failures = [];
+
+ if (Object.prototype.hasOwnProperty.call(desc, 'value')) {
+ if (!isSameValue(desc.value, originalDesc.value)) {
+ failures.push("descriptor value should be " + desc.value);
+ }
+ if (!isSameValue(desc.value, obj[name])) {
+ failures.push("object value should be " + desc.value);
+ }
+ }
+
+ if (Object.prototype.hasOwnProperty.call(desc, 'enumerable')) {
+ if (desc.enumerable !== originalDesc.enumerable ||
+ desc.enumerable !== isEnumerable(obj, name)) {
+ failures.push('descriptor should ' + (desc.enumerable ? '' : 'not ') + 'be enumerable');
+ }
+ }
+
+ if (Object.prototype.hasOwnProperty.call(desc, 'writable')) {
+ if (desc.writable !== originalDesc.writable ||
+ desc.writable !== isWritable(obj, name)) {
+ failures.push('descriptor should ' + (desc.writable ? '' : 'not ') + 'be writable');
+ }
+ }
+
+ if (Object.prototype.hasOwnProperty.call(desc, 'configurable')) {
+ if (desc.configurable !== originalDesc.configurable ||
+ desc.configurable !== isConfigurable(obj, name)) {
+ failures.push('descriptor should ' + (desc.configurable ? '' : 'not ') + 'be configurable');
+ }
+ }
+
+ assert(!failures.length, failures.join('; '));
+
+ if (options && options.restore) {
+ Object.defineProperty(obj, name, originalDesc);
+ }
+
+ return true;
+}
+
+function isConfigurable(obj, name) {
+ var hasOwnProperty = Object.prototype.hasOwnProperty;
+ try {
+ delete obj[name];
+ } catch (e) {
+ if (!(e instanceof TypeError)) {
+ throw new Test262Error("Expected TypeError, got " + e);
+ }
+ }
+ return !hasOwnProperty.call(obj, name);
+}
+
+function isEnumerable(obj, name) {
+ var stringCheck = false;
+
+ if (typeof name === "string") {
+ for (var x in obj) {
+ if (x === name) {
+ stringCheck = true;
+ break;
+ }
+ }
+ } else {
+ // skip it if name is not string, works for Symbol names.
+ stringCheck = true;
+ }
+
+ return stringCheck &&
+ Object.prototype.hasOwnProperty.call(obj, name) &&
+ Object.prototype.propertyIsEnumerable.call(obj, name);
+}
+
+function isSameValue(a, b) {
+ if (a === 0 && b === 0) return 1 / a === 1 / b;
+ if (a !== a && b !== b) return true;
+
+ return a === b;
+}
+
+var __isArray = Array.isArray;
+function isWritable(obj, name, verifyProp, value) {
+ var unlikelyValue = __isArray(obj) && name === "length" ?
+ Math.pow(2, 32) - 1 :
+ "unlikelyValue";
+ var newValue = value || unlikelyValue;
+ var hadValue = Object.prototype.hasOwnProperty.call(obj, name);
+ var oldValue = obj[name];
+ var writeSucceeded;
+
+ try {
+ obj[name] = newValue;
+ } catch (e) {
+ if (!(e instanceof TypeError)) {
+ throw new Test262Error("Expected TypeError, got " + e);
+ }
+ }
+
+ writeSucceeded = isSameValue(obj[verifyProp || name], newValue);
+
+ // Revert the change only if it was successful (in other cases, reverting
+ // is unnecessary and may trigger exceptions for certain property
+ // configurations)
+ if (writeSucceeded) {
+ if (hadValue) {
+ obj[name] = oldValue;
+ } else {
+ delete obj[name];
+ }
+ }
+
+ return writeSucceeded;
+}
+
+/**
+ * Deprecated; please use `verifyProperty` in new tests.
+ */
+function verifyEqualTo(obj, name, value) {
+ if (!isSameValue(obj[name], value)) {
+ throw new Test262Error("Expected obj[" + String(name) + "] to equal " + value +
+ ", actually " + obj[name]);
+ }
+}
+
+/**
+ * Deprecated; please use `verifyProperty` in new tests.
+ */
+function verifyWritable(obj, name, verifyProp, value) {
+ if (!verifyProp) {
+ assert(Object.getOwnPropertyDescriptor(obj, name).writable,
+ "Expected obj[" + String(name) + "] to have writable:true.");
+ }
+ if (!isWritable(obj, name, verifyProp, value)) {
+ throw new Test262Error("Expected obj[" + String(name) + "] to be writable, but was not.");
+ }
+}
+
+/**
+ * Deprecated; please use `verifyProperty` in new tests.
+ */
+function verifyNotWritable(obj, name, verifyProp, value) {
+ if (!verifyProp) {
+ assert(!Object.getOwnPropertyDescriptor(obj, name).writable,
+ "Expected obj[" + String(name) + "] to have writable:false.");
+ }
+ if (isWritable(obj, name, verifyProp)) {
+ throw new Test262Error("Expected obj[" + String(name) + "] NOT to be writable, but was.");
+ }
+}
+
+/**
+ * Deprecated; please use `verifyProperty` in new tests.
+ */
+function verifyEnumerable(obj, name) {
+ assert(Object.getOwnPropertyDescriptor(obj, name).enumerable,
+ "Expected obj[" + String(name) + "] to have enumerable:true.");
+ if (!isEnumerable(obj, name)) {
+ throw new Test262Error("Expected obj[" + String(name) + "] to be enumerable, but was not.");
+ }
+}
+
+/**
+ * Deprecated; please use `verifyProperty` in new tests.
+ */
+function verifyNotEnumerable(obj, name) {
+ assert(!Object.getOwnPropertyDescriptor(obj, name).enumerable,
+ "Expected obj[" + String(name) + "] to have enumerable:false.");
+ if (isEnumerable(obj, name)) {
+ throw new Test262Error("Expected obj[" + String(name) + "] NOT to be enumerable, but was.");
+ }
+}
+
+/**
+ * Deprecated; please use `verifyProperty` in new tests.
+ */
+function verifyConfigurable(obj, name) {
+ assert(Object.getOwnPropertyDescriptor(obj, name).configurable,
+ "Expected obj[" + String(name) + "] to have configurable:true.");
+ if (!isConfigurable(obj, name)) {
+ throw new Test262Error("Expected obj[" + String(name) + "] to be configurable, but was not.");
+ }
+}
+
+/**
+ * Deprecated; please use `verifyProperty` in new tests.
+ */
+function verifyNotConfigurable(obj, name) {
+ assert(!Object.getOwnPropertyDescriptor(obj, name).configurable,
+ "Expected obj[" + String(name) + "] to have configurable:false.");
+ if (isConfigurable(obj, name)) {
+ throw new Test262Error("Expected obj[" + String(name) + "] NOT to be configurable, but was.");
+ }
+}
+
+// file: sta.js
+// Copyright (c) 2012 Ecma International. All rights reserved.
+// This code is governed by the BSD license found in the LICENSE file.
+/*---
+description: |
+ Provides both:
+
+ - An error class to avoid false positives when testing for thrown exceptions
+ - A function to explicitly throw an exception using the Test262Error class
+defines: [Test262Error, $DONOTEVALUATE]
+---*/
+
+
+function Test262Error(message) {
+ this.message = message || "";
+}
+
+Test262Error.prototype.toString = function () {
+ return "Test262Error: " + this.message;
+};
+
+Test262Error.thrower = function (message) {
+ throw new Test262Error(message);
+};
+
+function $DONOTEVALUATE() {
+ throw "Test262: This statement should not be evaluated.";
+}
+
+// file: test262-host.js
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// https://github.com/tc39/test262/blob/main/INTERPRETING.md#host-defined-functions
+;(function createHostObject(global) {
+ "use strict";
+
+ // Save built-in functions and constructors.
+ var FunctionToString = global.Function.prototype.toString;
+ var ReflectApply = global.Reflect.apply;
+ var Atomics = global.Atomics;
+ var Error = global.Error;
+ var SharedArrayBuffer = global.SharedArrayBuffer;
+ var Int32Array = global.Int32Array;
+
+ // Save built-in shell functions.
+ var NewGlobal = global.newGlobal;
+ var setSharedArrayBuffer = global.setSharedArrayBuffer;
+ var getSharedArrayBuffer = global.getSharedArrayBuffer;
+ var evalInWorker = global.evalInWorker;
+ var monotonicNow = global.monotonicNow;
+ var gc = global.gc;
+ var clearKeptObjects = global.clearKeptObjects;
+
+ var hasCreateIsHTMLDDA = "createIsHTMLDDA" in global;
+ var hasThreads = ("helperThreadCount" in global ? global.helperThreadCount() > 0 : true);
+ var hasMailbox = typeof setSharedArrayBuffer === "function" && typeof getSharedArrayBuffer === "function";
+ var hasEvalInWorker = typeof evalInWorker === "function";
+
+ if (!hasCreateIsHTMLDDA && !("document" in global && "all" in global.document))
+ throw new Error("no [[IsHTMLDDA]] object available for testing");
+
+ var IsHTMLDDA = hasCreateIsHTMLDDA
+ ? global.createIsHTMLDDA()
+ : global.document.all;
+
+ // The $262.agent framework is not appropriate for browsers yet, and some
+ // test cases can't work in browsers (they block the main thread).
+
+ var shellCode = hasMailbox && hasEvalInWorker;
+ var sabTestable = Atomics && SharedArrayBuffer && hasThreads && shellCode;
+
+ global.$262 = {
+ __proto__: null,
+ createRealm() {
+ var newGlobalObject = NewGlobal();
+ var createHostObjectFn = ReflectApply(FunctionToString, createHostObject, []);
+ newGlobalObject.Function(`${createHostObjectFn} createHostObject(this);`)();
+ return newGlobalObject.$262;
+ },
+ detachArrayBuffer: global.detachArrayBuffer,
+ evalScript: global.evaluateScript || global.evaluate,
+ global,
+ IsHTMLDDA,
+ gc() {
+ gc();
+ },
+ clearKeptObjects() {
+ clearKeptObjects();
+ },
+ agent: (function () {
+
+ // SpiderMonkey complication: With run-time argument --no-threads
+ // our test runner will not properly filter test cases that can't be
+ // run because agents can't be started, and so we do a little
+ // filtering here: We will quietly succeed and exit if an agent test
+ // should not have been run because threads cannot be started.
+ //
+ // Firefox complication: The test cases that use $262.agent can't
+ // currently work in the browser, so for now we rely on them not
+ // being run at all.
+
+ if (!sabTestable) {
+ let {reportCompare, quit} = global;
+
+ function notAvailable() {
+ // See comment above.
+ if (!hasThreads && shellCode) {
+ reportCompare(0, 0);
+ quit(0);
+ }
+ throw new Error("Agents not available");
+ }
+
+ return {
+ start(script) { notAvailable() },
+ broadcast(sab, id) { notAvailable() },
+ getReport() { notAvailable() },
+ sleep(s) { notAvailable() },
+ monotonicNow,
+ }
+ }
+
+ // The SpiderMonkey implementation uses a designated shared buffer _ia
+ // for coordination, and spinlocks for everything except sleeping.
+
+ var _MSG_LOC = 0; // Low bit set: broadcast available; High bits: seq #
+ var _ID_LOC = 1; // ID sent with broadcast
+ var _ACK_LOC = 2; // Worker increments this to ack that broadcast was received
+ var _RDY_LOC = 3; // Worker increments this to ack that worker is up and running
+ var _LOCKTXT_LOC = 4; // Writer lock for the text buffer: 0=open, 1=closed
+ var _NUMTXT_LOC = 5; // Count of messages in text buffer
+ var _NEXT_LOC = 6; // First free location in the buffer
+ var _SLEEP_LOC = 7; // Used for sleeping
+
+ var _FIRST = 10; // First location of first message
+
+ var _ia = new Int32Array(new SharedArrayBuffer(65536));
+ _ia[_NEXT_LOC] = _FIRST;
+
+ var _worker_prefix =
+// BEGIN WORKER PREFIX
+`if (typeof $262 === 'undefined')
+ $262 = {};
+$262.agent = (function (global) {
+ var ReflectApply = global.Reflect.apply;
+ var StringCharCodeAt = global.String.prototype.charCodeAt;
+ var {
+ add: Atomics_add,
+ compareExchange: Atomics_compareExchange,
+ load: Atomics_load,
+ store: Atomics_store,
+ wait: Atomics_wait,
+ } = global.Atomics;
+
+ var {getSharedArrayBuffer} = global;
+
+ var _ia = new Int32Array(getSharedArrayBuffer());
+ var agent = {
+ receiveBroadcast(receiver) {
+ var k;
+ while (((k = Atomics_load(_ia, ${_MSG_LOC})) & 1) === 0)
+ ;
+ var received_sab = getSharedArrayBuffer();
+ var received_id = Atomics_load(_ia, ${_ID_LOC});
+ Atomics_add(_ia, ${_ACK_LOC}, 1);
+ while (Atomics_load(_ia, ${_MSG_LOC}) === k)
+ ;
+ receiver(received_sab, received_id);
+ },
+
+ report(msg) {
+ while (Atomics_compareExchange(_ia, ${_LOCKTXT_LOC}, 0, 1) === 1)
+ ;
+ msg = "" + msg;
+ var i = _ia[${_NEXT_LOC}];
+ _ia[i++] = msg.length;
+ for ( let j=0 ; j < msg.length ; j++ )
+ _ia[i++] = ReflectApply(StringCharCodeAt, msg, [j]);
+ _ia[${_NEXT_LOC}] = i;
+ Atomics_add(_ia, ${_NUMTXT_LOC}, 1);
+ Atomics_store(_ia, ${_LOCKTXT_LOC}, 0);
+ },
+
+ sleep(s) {
+ Atomics_wait(_ia, ${_SLEEP_LOC}, 0, s);
+ },
+
+ leaving() {},
+
+ monotonicNow: global.monotonicNow,
+ };
+ Atomics_add(_ia, ${_RDY_LOC}, 1);
+ return agent;
+})(this);`;
+// END WORKER PREFIX
+
+ var _numWorkers = 0;
+ var _numReports = 0;
+ var _reportPtr = _FIRST;
+ var {
+ add: Atomics_add,
+ load: Atomics_load,
+ store: Atomics_store,
+ wait: Atomics_wait,
+ } = Atomics;
+ var StringFromCharCode = global.String.fromCharCode;
+
+ return {
+ start(script) {
+ setSharedArrayBuffer(_ia.buffer);
+ var oldrdy = Atomics_load(_ia, _RDY_LOC);
+ evalInWorker(_worker_prefix + script);
+ while (Atomics_load(_ia, _RDY_LOC) === oldrdy)
+ ;
+ _numWorkers++;
+ },
+
+ broadcast(sab, id) {
+ setSharedArrayBuffer(sab);
+ Atomics_store(_ia, _ID_LOC, id);
+ Atomics_store(_ia, _ACK_LOC, 0);
+ Atomics_add(_ia, _MSG_LOC, 1);
+ while (Atomics_load(_ia, _ACK_LOC) < _numWorkers)
+ ;
+ Atomics_add(_ia, _MSG_LOC, 1);
+ },
+
+ getReport() {
+ if (_numReports === Atomics_load(_ia, _NUMTXT_LOC))
+ return null;
+ var s = "";
+ var i = _reportPtr;
+ var len = _ia[i++];
+ for ( let j=0 ; j < len ; j++ )
+ s += StringFromCharCode(_ia[i++]);
+ _reportPtr = i;
+ _numReports++;
+ return s;
+ },
+
+ sleep(s) {
+ Atomics_wait(_ia, _SLEEP_LOC, 0, s);
+ },
+
+ monotonicNow,
+ };
+ })()
+ };
+})(this);
+
+var $mozAsyncTestDone = false;
+function $DONE(failure) {
+ // This function is generally called from within a Promise handler, so any
+ // exception thrown by this method will be swallowed and most likely
+ // ignored by the Promise machinery.
+ if ($mozAsyncTestDone) {
+ reportFailure("$DONE() already called");
+ return;
+ }
+ $mozAsyncTestDone = true;
+
+ if (failure)
+ reportFailure(failure);
+ else
+ reportCompare(0, 0);
+
+ if (typeof jsTestDriverEnd === "function") {
+ gDelayTestDriverEnd = false;
+ jsTestDriverEnd();
+ }
+}
+
+// Some tests in test262 leave promise rejections unhandled.
+if ("ignoreUnhandledRejections" in this) {
+ ignoreUnhandledRejections();
+}
diff --git a/js/src/tests/test262/staging/JSON/json-parse-with-source-snapshot.js b/js/src/tests/test262/staging/JSON/json-parse-with-source-snapshot.js
index 45465333d4..7ef4f88dd8 100644
--- a/js/src/tests/test262/staging/JSON/json-parse-with-source-snapshot.js
+++ b/js/src/tests/test262/staging/JSON/json-parse-with-source-snapshot.js
@@ -1,4 +1,4 @@
-// |reftest| skip -- json-parse-with-source is not supported
+// |reftest| shell-option(--enable-json-parse-with-source) skip-if(!JSON.hasOwnProperty('isRawJSON')||!xulRuntime.shell) -- json-parse-with-source is not enabled unconditionally, requires shell-options
// Copyright (C) 2023 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
diff --git a/js/src/tests/test262/staging/JSON/json-parse-with-source.js b/js/src/tests/test262/staging/JSON/json-parse-with-source.js
index f8674511ef..c6b6326423 100644
--- a/js/src/tests/test262/staging/JSON/json-parse-with-source.js
+++ b/js/src/tests/test262/staging/JSON/json-parse-with-source.js
@@ -1,4 +1,4 @@
-// |reftest| skip -- json-parse-with-source is not supported
+// |reftest| shell-option(--enable-json-parse-with-source) skip-if(!JSON.hasOwnProperty('isRawJSON')||!xulRuntime.shell) -- json-parse-with-source is not enabled unconditionally, requires shell-options
// Copyright (C) 2023 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
diff --git a/js/src/util/StructuredSpewer.cpp b/js/src/util/StructuredSpewer.cpp
index 72f483c6b5..ef59f79b87 100644
--- a/js/src/util/StructuredSpewer.cpp
+++ b/js/src/util/StructuredSpewer.cpp
@@ -158,7 +158,7 @@ void StructuredSpewer::spew(JSContext* cx, SpewChannel channel, const char* fmt,
json.beginObject();
json.property("channel", getName(channel));
- json.formatProperty("message", fmt, ap);
+ json.formatPropertyVA("message", fmt, ap);
json.endObject();
va_end(ap);
diff --git a/js/src/vm/ArrayBufferObject.cpp b/js/src/vm/ArrayBufferObject.cpp
index 2fe4f01f8d..14039af574 100644
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -2830,7 +2830,7 @@ bool InnerViewTable::addView(JSContext* cx, ArrayBufferObject* buffer,
if (isNurseryView && !hadNurseryViews && nurseryKeysValid) {
#ifdef DEBUG
if (nurseryKeys.length() < 100) {
- for (auto* key : nurseryKeys) {
+ for (const auto& key : nurseryKeys) {
MOZ_ASSERT(key != buffer);
}
}
@@ -2859,31 +2859,53 @@ void InnerViewTable::removeViews(ArrayBufferObject* buffer) {
map.remove(ptr);
}
-bool InnerViewTable::traceWeak(JSTracer* trc) { return map.traceWeak(trc); }
+bool InnerViewTable::traceWeak(JSTracer* trc) {
+ nurseryKeys.traceWeak(trc);
+ map.traceWeak(trc);
+ return true;
+}
void InnerViewTable::sweepAfterMinorGC(JSTracer* trc) {
MOZ_ASSERT(needsSweepAfterMinorGC());
- if (nurseryKeysValid) {
- for (size_t i = 0; i < nurseryKeys.length(); i++) {
- ArrayBufferObject* buffer = nurseryKeys[i];
+ NurseryKeysVector keys;
+ bool valid = true;
+ std::swap(nurseryKeys, keys);
+ std::swap(nurseryKeysValid, valid);
+
+ // Use nursery keys vector if possible.
+ if (valid) {
+ for (ArrayBufferObject* buffer : keys) {
MOZ_ASSERT(!gc::IsInsideNursery(buffer));
auto ptr = map.lookup(buffer);
- if (ptr && !ptr->value().sweepAfterMinorGC(trc)) {
+ if (ptr && !sweepViewsAfterMinorGC(trc, buffer, ptr->value())) {
map.remove(ptr);
}
}
- } else {
- for (ArrayBufferViewMap::Enum e(map); !e.empty(); e.popFront()) {
- MOZ_ASSERT(!gc::IsInsideNursery(e.front().key()));
- if (!e.front().value().sweepAfterMinorGC(trc)) {
- e.removeFront();
- }
+ return;
+ }
+
+ // Otherwise look at every map entry.
+ for (ArrayBufferViewMap::Enum e(map); !e.empty(); e.popFront()) {
+ MOZ_ASSERT(!gc::IsInsideNursery(e.front().key()));
+ if (!sweepViewsAfterMinorGC(trc, e.front().key(), e.front().value())) {
+ e.removeFront();
}
}
+}
+
+bool InnerViewTable::sweepViewsAfterMinorGC(JSTracer* trc,
+ ArrayBufferObject* buffer,
+ Views& views) {
+ if (!views.sweepAfterMinorGC(trc)) {
+ return false; // No more views.
+ }
- nurseryKeys.clear();
- nurseryKeysValid = true;
+ if (views.hasNurseryViews() && !nurseryKeys.append(buffer)) {
+ nurseryKeysValid = false;
+ }
+
+ return true;
}
size_t InnerViewTable::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
diff --git a/js/src/vm/ArrayBufferObject.h b/js/src/vm/ArrayBufferObject.h
index 5aa96bf887..ce78b26cb2 100644
--- a/js/src/vm/ArrayBufferObject.h
+++ b/js/src/vm/ArrayBufferObject.h
@@ -760,16 +760,20 @@ class InnerViewTable {
StableCellHasher<JSObject*>, ZoneAllocPolicy>;
ArrayBufferViewMap map;
- // List of keys from innerViews where either the source or at least one
- // target is in the nursery. The raw pointer to a JSObject is allowed here
- // because this vector is cleared after every minor collection. Users in
- // sweepAfterMinorCollection must be careful to use MaybeForwarded before
- // touching these pointers.
- Vector<ArrayBufferObject*, 0, SystemAllocPolicy> nurseryKeys;
+ // List of keys from map where either the source or at least one target is in
+ // the nursery. The raw pointer to a JSObject is allowed here because this
+ // vector is cleared after every minor collection. Users in sweepAfterMinorGC
+ // must be careful to use MaybeForwarded before touching these pointers.
+ using NurseryKeysVector =
+ GCVector<UnsafeBarePtr<ArrayBufferObject*>, 0, SystemAllocPolicy>;
+ NurseryKeysVector nurseryKeys;
// Whether nurseryKeys is a complete list.
bool nurseryKeysValid = true;
+ bool sweepMapEntryAfterMinorGC(UnsafeBarePtr<JSObject*>& buffer,
+ ViewVector& views);
+
public:
explicit InnerViewTable(Zone* zone) : map(zone) {}
@@ -793,6 +797,9 @@ class InnerViewTable {
ArrayBufferViewObject* view);
ViewVector* maybeViewsUnbarriered(ArrayBufferObject* buffer);
void removeViews(ArrayBufferObject* buffer);
+
+ bool sweepViewsAfterMinorGC(JSTracer* trc, ArrayBufferObject* buffer,
+ Views& views);
};
template <typename Wrapper>
diff --git a/js/src/vm/ArrayBufferViewObject.cpp b/js/src/vm/ArrayBufferViewObject.cpp
index 27004f3e2a..37272a94af 100644
--- a/js/src/vm/ArrayBufferViewObject.cpp
+++ b/js/src/vm/ArrayBufferViewObject.cpp
@@ -221,7 +221,6 @@ void ArrayBufferViewObject::computeResizableLengthAndByteOffset(
size_t bytesPerElement) {
MOZ_ASSERT(!isSharedMemory());
MOZ_ASSERT(hasBuffer());
- MOZ_ASSERT(!bufferUnshared()->isLengthPinned());
MOZ_ASSERT(bufferUnshared()->isResizable());
size_t byteOffsetStart = initialByteOffset();
diff --git a/js/src/vm/AsyncFunction.cpp b/js/src/vm/AsyncFunction.cpp
index ce523fe409..f6ed33c738 100644
--- a/js/src/vm/AsyncFunction.cpp
+++ b/js/src/vm/AsyncFunction.cpp
@@ -151,7 +151,7 @@ static bool AsyncFunctionResume(JSContext* cx,
if (!CallSelfHostedFunction(cx, funName, generatorOrValue, args,
&generatorOrValue)) {
if (!generator->isClosed()) {
- generator->setClosed();
+ generator->setClosed(cx);
}
// Handle the OOM case mentioned above.
diff --git a/js/src/vm/AsyncIteration.cpp b/js/src/vm/AsyncIteration.cpp
index 8f8902a64a..33546dc9dd 100644
--- a/js/src/vm/AsyncIteration.cpp
+++ b/js/src/vm/AsyncIteration.cpp
@@ -1071,7 +1071,7 @@ bool js::AsyncGeneratorThrow(JSContext* cx, unsigned argc, Value* vp) {
if (!CallSelfHostedFunction(cx, funName, thisOrRval, args, &thisOrRval)) {
// 25.5.3.2, steps 5.f, 5.g.
if (!generator->isClosed()) {
- generator->setClosed();
+ generator->setClosed(cx);
}
return AsyncGeneratorThrown(cx, generator);
}
diff --git a/js/src/vm/AtomsTable.h b/js/src/vm/AtomsTable.h
index aae7728fe5..28e448ce77 100644
--- a/js/src/vm/AtomsTable.h
+++ b/js/src/vm/AtomsTable.h
@@ -35,6 +35,82 @@ struct AtomHasher {
}
};
+struct js::AtomHasher::Lookup {
+ union {
+ const JS::Latin1Char* latin1Chars;
+ const char16_t* twoByteChars;
+ const char* utf8Bytes;
+ };
+ enum { TwoByteChar, Latin1, UTF8 } type;
+ size_t length;
+ size_t byteLength;
+ const JSAtom* atom; /* Optional. */
+ JS::AutoCheckCannotGC nogc;
+
+ HashNumber hash;
+
+ MOZ_ALWAYS_INLINE Lookup(const char* utf8Bytes, size_t byteLen, size_t length,
+ HashNumber hash)
+ : utf8Bytes(utf8Bytes),
+ type(UTF8),
+ length(length),
+ byteLength(byteLen),
+ atom(nullptr),
+ hash(hash) {}
+
+ MOZ_ALWAYS_INLINE Lookup(const char16_t* chars, size_t length)
+ : twoByteChars(chars),
+ type(TwoByteChar),
+ length(length),
+ atom(nullptr),
+ hash(mozilla::HashString(chars, length)) {}
+
+ MOZ_ALWAYS_INLINE Lookup(const JS::Latin1Char* chars, size_t length)
+ : latin1Chars(chars),
+ type(Latin1),
+ length(length),
+ atom(nullptr),
+ hash(mozilla::HashString(chars, length)) {}
+
+ MOZ_ALWAYS_INLINE Lookup(HashNumber hash, const char16_t* chars,
+ size_t length)
+ : twoByteChars(chars),
+ type(TwoByteChar),
+ length(length),
+ atom(nullptr),
+ hash(hash) {
+ MOZ_ASSERT(hash == mozilla::HashString(chars, length));
+ }
+
+ MOZ_ALWAYS_INLINE Lookup(HashNumber hash, const JS::Latin1Char* chars,
+ size_t length)
+ : latin1Chars(chars),
+ type(Latin1),
+ length(length),
+ atom(nullptr),
+ hash(hash) {
+ MOZ_ASSERT(hash == mozilla::HashString(chars, length));
+ }
+
+ inline explicit Lookup(const JSAtom* atom)
+ : type(atom->hasLatin1Chars() ? Latin1 : TwoByteChar),
+ length(atom->length()),
+ atom(atom),
+ hash(atom->hash()) {
+ if (type == Latin1) {
+ latin1Chars = atom->latin1Chars(nogc);
+ MOZ_ASSERT(mozilla::HashString(latin1Chars, length) == hash);
+ } else {
+ MOZ_ASSERT(type == TwoByteChar);
+ twoByteChars = atom->twoByteChars(nogc);
+ MOZ_ASSERT(mozilla::HashString(twoByteChars, length) == hash);
+ }
+ }
+
+ // Return: true iff the string in |atom| matches the string in this Lookup.
+ bool StringsMatch(const JSAtom& atom) const;
+};
+
// Note: Use a 'class' here to make forward declarations easier to use.
class AtomSet : public JS::GCHashSet<WeakHeapPtr<JSAtom*>, AtomHasher,
SystemAllocPolicy> {
diff --git a/js/src/vm/BigIntType.h b/js/src/vm/BigIntType.h
index fb9f4085e6..8959f93c77 100644
--- a/js/src/vm/BigIntType.h
+++ b/js/src/vm/BigIntType.h
@@ -419,7 +419,7 @@ class BigInt final : public js::gc::CellWithLengthAndFlags {
static JSLinearString* toStringGeneric(JSContext* cx, Handle<BigInt*>,
unsigned radix);
- friend struct ::JSStructuredCloneReader; // So it can call the following:
+ friend struct ::JSStructuredCloneReader; // So it can call the following:
static BigInt* destructivelyTrimHighZeroDigits(JSContext* cx, BigInt* x);
bool absFitsInUint64() const { return digitLength() <= 64 / DigitBits; }
diff --git a/js/src/vm/CommonPropertyNames.h b/js/src/vm/CommonPropertyNames.h
index d4376ec6a4..5fa3f2b633 100644
--- a/js/src/vm/CommonPropertyNames.h
+++ b/js/src/vm/CommonPropertyNames.h
@@ -29,6 +29,7 @@
MACRO_(allowContentIter, "allowContentIter") \
MACRO_(allowContentIterWith, "allowContentIterWith") \
MACRO_(allowContentIterWithNext, "allowContentIterWithNext") \
+ MACRO_(alphabet, "alphabet") \
MACRO_(ambiguous, "ambiguous") \
MACRO_(anonymous, "anonymous") \
MACRO_(Any, "Any") \
@@ -214,6 +215,8 @@
MACRO_(frame, "frame") \
MACRO_(from, "from") \
MACRO_(fromAsync, "fromAsync") \
+ MACRO_(fromBase64, "fromBase64") \
+ MACRO_(fromHex, "fromHex") \
MACRO_(fulfilled, "fulfilled") \
MACRO_(GatherAsyncParentCompletions, "GatherAsyncParentCompletions") \
MACRO_(gcCycleNumber, "gcCycleNumber") \
@@ -319,6 +322,7 @@
MACRO_(isoMinute, "isoMinute") \
MACRO_(isoMonth, "isoMonth") \
MACRO_(isoNanosecond, "isoNanosecond") \
+ MACRO_(isRawJSON, "isRawJSON") \
MACRO_(isoSecond, "isoSecond") \
MACRO_(isoYear, "isoYear") \
MACRO_(isStepStart, "isStepStart") \
@@ -338,6 +342,7 @@
MACRO_(label, "label") \
MACRO_(language, "language") \
MACRO_(largestUnit, "largestUnit") \
+ MACRO_(lastChunkHandling, "lastChunkHandling") \
MACRO_(lastIndex, "lastIndex") \
MACRO_(length, "length") \
MACRO_(let, "let") \
@@ -467,6 +472,8 @@
MACRO_(pull, "pull") \
MACRO_(quarter, "quarter") \
MACRO_(raw, "raw") \
+ MACRO_(rawJSON, "rawJSON") \
+ MACRO_(read, "read") \
MACRO_(reason, "reason") \
MACRO_(RegExp_String_Iterator_, "RegExp String Iterator") \
MACRO_(RegExp_prototype_Exec, "RegExp_prototype_Exec") \
@@ -503,6 +510,8 @@
MACRO_(SetConstructorInit, "SetConstructorInit") \
MACRO_(SetIsInlinableLargeFunction, "SetIsInlinableLargeFunction") \
MACRO_(Set_Iterator_, "Set Iterator") \
+ MACRO_(setFromBase64, "setFromBase64") \
+ MACRO_(setFromHex, "setFromHex") \
MACRO_(setPrototypeOf, "setPrototypeOf") \
MACRO_(shape, "shape") \
MACRO_(shared, "shared") \
@@ -540,7 +549,9 @@
MACRO_(timeStyle, "timeStyle") \
MACRO_(timeZone, "timeZone") \
MACRO_(timeZoneName, "timeZoneName") \
+ MACRO_(toBase64, "toBase64") \
MACRO_(toGMTString, "toGMTString") \
+ MACRO_(toHex, "toHex") \
MACRO_(toISOString, "toISOString") \
MACRO_(toJSON, "toJSON") \
MACRO_(toLocaleString, "toLocaleString") \
@@ -612,6 +623,7 @@
MACRO_(weeks, "weeks") \
MACRO_(while_, "while") \
MACRO_(with, "with") \
+ MACRO_(written, "written") \
MACRO_(toReversed, "toReversed") \
MACRO_(toSorted, "toSorted") \
MACRO_(toSpliced, "toSpliced") \
diff --git a/js/src/vm/FrameIter.cpp b/js/src/vm/FrameIter.cpp
index 3dd50c2fcf..9a665e6b9d 100644
--- a/js/src/vm/FrameIter.cpp
+++ b/js/src/vm/FrameIter.cpp
@@ -124,7 +124,12 @@ JS::Realm* JitFrameIter::realm() const {
return asWasm().instance()->realm();
}
- return asJSJit().script()->realm();
+ if (asJSJit().isScripted()) {
+ return asJSJit().script()->realm();
+ }
+
+ MOZ_RELEASE_ASSERT(asJSJit().isTrampolineNative());
+ return asJSJit().callee()->realm();
}
uint8_t* JitFrameIter::resumePCinCurrentFrame() const {
diff --git a/js/src/vm/FunctionFlags.h b/js/src/vm/FunctionFlags.h
index d927056230..27d51c214a 100644
--- a/js/src/vm/FunctionFlags.h
+++ b/js/src/vm/FunctionFlags.h
@@ -116,9 +116,10 @@ class FunctionFlags {
// This flag is used only by scripted functions and AsmJS.
LAMBDA = 1 << 9,
- // The WASM function has a JIT entry which emulates the
- // js::BaseScript::jitCodeRaw mechanism.
- WASM_JIT_ENTRY = 1 << 10,
+ // This Native function has a JIT entry which emulates the
+ // js::BaseScript::jitCodeRaw mechanism. Used for Wasm functions and
+ // TrampolineNative builtins.
+ NATIVE_JIT_ENTRY = 1 << 10,
// Function had no explicit name, but a name was set by SetFunctionName at
// compile time or SetFunctionName at runtime.
@@ -238,7 +239,7 @@ class FunctionFlags {
switch (kind()) {
case FunctionKind::NormalFunction:
MOZ_ASSERT(!hasFlags(LAZY_ACCESSOR_NAME));
- MOZ_ASSERT(!hasFlags(WASM_JIT_ENTRY));
+ MOZ_ASSERT(!hasFlags(NATIVE_JIT_ENTRY));
break;
case FunctionKind::Arrow:
@@ -246,38 +247,38 @@ class FunctionFlags {
MOZ_ASSERT(!hasFlags(CONSTRUCTOR));
MOZ_ASSERT(!hasFlags(LAZY_ACCESSOR_NAME));
MOZ_ASSERT(hasFlags(LAMBDA));
- MOZ_ASSERT(!hasFlags(WASM_JIT_ENTRY));
+ MOZ_ASSERT(!hasFlags(NATIVE_JIT_ENTRY));
break;
case FunctionKind::Method:
MOZ_ASSERT(hasFlags(BASESCRIPT) || hasFlags(SELFHOSTLAZY));
MOZ_ASSERT(!hasFlags(CONSTRUCTOR));
MOZ_ASSERT(!hasFlags(LAZY_ACCESSOR_NAME));
MOZ_ASSERT(!hasFlags(LAMBDA));
- MOZ_ASSERT(!hasFlags(WASM_JIT_ENTRY));
+ MOZ_ASSERT(!hasFlags(NATIVE_JIT_ENTRY));
break;
case FunctionKind::ClassConstructor:
MOZ_ASSERT(hasFlags(BASESCRIPT) || hasFlags(SELFHOSTLAZY));
MOZ_ASSERT(hasFlags(CONSTRUCTOR));
MOZ_ASSERT(!hasFlags(LAZY_ACCESSOR_NAME));
MOZ_ASSERT(!hasFlags(LAMBDA));
- MOZ_ASSERT(!hasFlags(WASM_JIT_ENTRY));
+ MOZ_ASSERT(!hasFlags(NATIVE_JIT_ENTRY));
break;
case FunctionKind::Getter:
MOZ_ASSERT(!hasFlags(CONSTRUCTOR));
MOZ_ASSERT(!hasFlags(LAMBDA));
- MOZ_ASSERT(!hasFlags(WASM_JIT_ENTRY));
+ MOZ_ASSERT(!hasFlags(NATIVE_JIT_ENTRY));
break;
case FunctionKind::Setter:
MOZ_ASSERT(!hasFlags(CONSTRUCTOR));
MOZ_ASSERT(!hasFlags(LAMBDA));
- MOZ_ASSERT(!hasFlags(WASM_JIT_ENTRY));
+ MOZ_ASSERT(!hasFlags(NATIVE_JIT_ENTRY));
break;
case FunctionKind::AsmJS:
MOZ_ASSERT(!hasFlags(BASESCRIPT));
MOZ_ASSERT(!hasFlags(SELFHOSTLAZY));
MOZ_ASSERT(!hasFlags(LAZY_ACCESSOR_NAME));
- MOZ_ASSERT(!hasFlags(WASM_JIT_ENTRY));
+ MOZ_ASSERT(!hasFlags(NATIVE_JIT_ENTRY));
break;
case FunctionKind::Wasm:
MOZ_ASSERT(!hasFlags(BASESCRIPT));
@@ -316,10 +317,11 @@ class FunctionFlags {
MOZ_ASSERT_IF(kind() == Wasm, isNativeFun());
return kind() == Wasm;
}
- bool isWasmWithJitEntry() const {
- MOZ_ASSERT_IF(hasFlags(WASM_JIT_ENTRY), isWasm());
- return hasFlags(WASM_JIT_ENTRY);
+ bool isNativeWithJitEntry() const {
+ MOZ_ASSERT_IF(hasFlags(NATIVE_JIT_ENTRY), isNativeFun());
+ return hasFlags(NATIVE_JIT_ENTRY);
}
+ bool isWasmWithJitEntry() const { return isWasm() && isNativeWithJitEntry(); }
bool isNativeWithoutJitEntry() const {
MOZ_ASSERT_IF(!hasJitEntry(), isNativeFun());
return !hasJitEntry();
@@ -328,7 +330,14 @@ class FunctionFlags {
return isNativeFun() && !isAsmJSNative() && !isWasm();
}
bool hasJitEntry() const {
- return hasBaseScript() || hasSelfHostedLazyScript() || isWasmWithJitEntry();
+ return hasBaseScript() || hasSelfHostedLazyScript() ||
+ isNativeWithJitEntry();
+ }
+
+ bool canHaveJitInfo() const {
+ // A native builtin can have a pointer to either its JitEntry or JSJitInfo,
+ // but not both.
+ return isBuiltinNative() && !isNativeWithJitEntry();
}
/* Possible attributes of an interpreted function: */
@@ -417,7 +426,7 @@ class FunctionFlags {
return clearFlags(LAZY_ACCESSOR_NAME);
}
- FunctionFlags& setWasmJitEntry() { return setFlags(WASM_JIT_ENTRY); }
+ FunctionFlags& setNativeJitEntry() { return setFlags(NATIVE_JIT_ENTRY); }
bool isExtended() const { return hasFlags(EXTENDED); }
FunctionFlags& setIsExtended() { return setFlags(EXTENDED); }
@@ -430,7 +439,7 @@ class FunctionFlags {
static uint16_t HasJitEntryFlags(bool isConstructing) {
uint16_t flags = BASESCRIPT | SELFHOSTLAZY;
if (!isConstructing) {
- flags |= WASM_JIT_ENTRY;
+ flags |= NATIVE_JIT_ENTRY;
}
return flags;
}
diff --git a/js/src/vm/GeckoProfiler.cpp b/js/src/vm/GeckoProfiler.cpp
index 330bcd1fa6..dbf1eb9081 100644
--- a/js/src/vm/GeckoProfiler.cpp
+++ b/js/src/vm/GeckoProfiler.cpp
@@ -57,9 +57,15 @@ static jit::JitFrameLayout* GetTopProfilingJitFrame(jit::JitActivation* act) {
return nullptr;
}
+ // Skip if the activation has no JS frames. This can happen if there's only a
+ // TrampolineNative frame because these are skipped by the profiling frame
+ // iterator.
jit::JSJitProfilingFrameIterator jitIter(
(jit::CommonFrameLayout*)iter.frame().fp());
- MOZ_ASSERT(!jitIter.done());
+ if (jitIter.done()) {
+ return nullptr;
+ }
+
return jitIter.framePtr();
}
diff --git a/js/src/vm/GeneratorObject.cpp b/js/src/vm/GeneratorObject.cpp
index 4f8d807df5..4a23c4ca9e 100644
--- a/js/src/vm/GeneratorObject.cpp
+++ b/js/src/vm/GeneratorObject.cpp
@@ -194,10 +194,10 @@ void AbstractGeneratorObject::dump() const {
}
#endif
-void AbstractGeneratorObject::finalSuspend(HandleObject obj) {
+void AbstractGeneratorObject::finalSuspend(JSContext* cx, HandleObject obj) {
auto* genObj = &obj->as<AbstractGeneratorObject>();
MOZ_ASSERT(genObj->isRunning());
- genObj->setClosed();
+ genObj->setClosed(cx);
}
static AbstractGeneratorObject* GetGeneratorObjectForCall(JSContext* cx,
@@ -442,6 +442,16 @@ void AbstractGeneratorObject::setUnaliasedLocal(uint32_t slot,
return stackStorage().setDenseElement(slot, value);
}
+void AbstractGeneratorObject::setClosed(JSContext* cx) {
+ setFixedSlot(CALLEE_SLOT, NullValue());
+ setFixedSlot(ENV_CHAIN_SLOT, NullValue());
+ setFixedSlot(ARGS_OBJ_SLOT, NullValue());
+ setFixedSlot(STACK_STORAGE_SLOT, NullValue());
+ setFixedSlot(RESUME_INDEX_SLOT, NullValue());
+
+ DebugAPI::onGeneratorClosed(cx, this);
+}
+
bool AbstractGeneratorObject::isAfterYield() {
return isAfterYieldOrAwait(JSOp::Yield);
}
diff --git a/js/src/vm/GeneratorObject.h b/js/src/vm/GeneratorObject.h
index ddc11ba781..b952ebd533 100644
--- a/js/src/vm/GeneratorObject.h
+++ b/js/src/vm/GeneratorObject.h
@@ -57,7 +57,7 @@ class AbstractGeneratorObject : public NativeObject {
static bool suspend(JSContext* cx, HandleObject obj, AbstractFramePtr frame,
const jsbytecode* pc, unsigned nvalues);
- static void finalSuspend(HandleObject obj);
+ static void finalSuspend(JSContext* cx, HandleObject obj);
JSFunction& callee() const {
return getFixedSlot(CALLEE_SLOT).toObject().as<JSFunction>();
@@ -149,13 +149,7 @@ class AbstractGeneratorObject : public NativeObject {
return getFixedSlot(RESUME_INDEX_SLOT).toInt32();
}
bool isClosed() const { return getFixedSlot(CALLEE_SLOT).isNull(); }
- void setClosed() {
- setFixedSlot(CALLEE_SLOT, NullValue());
- setFixedSlot(ENV_CHAIN_SLOT, NullValue());
- setFixedSlot(ARGS_OBJ_SLOT, NullValue());
- setFixedSlot(STACK_STORAGE_SLOT, NullValue());
- setFixedSlot(RESUME_INDEX_SLOT, NullValue());
- }
+ void setClosed(JSContext* cx);
bool isAfterYield();
bool isAfterAwait();
diff --git a/js/src/vm/GlobalObject.h b/js/src/vm/GlobalObject.h
index a296336385..97286fde51 100644
--- a/js/src/vm/GlobalObject.h
+++ b/js/src/vm/GlobalObject.h
@@ -1056,10 +1056,7 @@ class GlobalObject : public NativeObject {
void setSourceURLsHolder(ArrayObject* holder) {
data().sourceURLsHolder = holder;
}
- void clearSourceURLSHolder() {
- // This is called at the start of shrinking GCs, so avoids barriers.
- data().sourceURLsHolder.unbarrieredSet(nullptr);
- }
+ void clearSourceURLSHolder() { setSourceURLsHolder(nullptr); }
SharedShape* maybeArrayShapeWithDefaultProto() const {
return data().arrayShapeWithDefaultProto;
diff --git a/js/src/vm/Interpreter.cpp b/js/src/vm/Interpreter.cpp
index 9eec4b81dd..f4cdc86f18 100644
--- a/js/src/vm/Interpreter.cpp
+++ b/js/src/vm/Interpreter.cpp
@@ -1273,7 +1273,7 @@ bool js::HandleClosingGeneratorReturn(JSContext* cx, AbstractFramePtr frame,
cx->clearPendingException();
ok = true;
auto* genObj = GetGeneratorObjectForFrame(cx, frame);
- genObj->setClosed();
+ genObj->setClosed(cx);
}
return ok;
}
@@ -4164,7 +4164,7 @@ bool MOZ_NEVER_INLINE JS_HAZ_JSNATIVE_CALLER js::Interpret(JSContext* cx,
CASE(FinalYieldRval) {
ReservedRooted<JSObject*> gen(&rootObject0, &REGS.sp[-1].toObject());
REGS.sp--;
- AbstractGeneratorObject::finalSuspend(gen);
+ AbstractGeneratorObject::finalSuspend(cx, gen);
goto successful_return_continuation;
}
diff --git a/js/src/vm/InvalidatingFuse.cpp b/js/src/vm/InvalidatingFuse.cpp
index 3e14d72fcc..cae8875a19 100644
--- a/js/src/vm/InvalidatingFuse.cpp
+++ b/js/src/vm/InvalidatingFuse.cpp
@@ -22,7 +22,8 @@ js::DependentScriptSet::DependentScriptSet(JSContext* cx,
bool js::InvalidatingRuntimeFuse::addFuseDependency(JSContext* cx,
Handle<JSScript*> script) {
auto* zone = script->zone();
- DependentScriptSet* dss = zone->getOrCreateDependentScriptSet(cx, this);
+ DependentScriptSet* dss =
+ zone->fuseDependencies.getOrCreateDependentScriptSet(cx, this);
if (!dss) {
return false;
}
@@ -80,3 +81,20 @@ bool js::DependentScriptSet::addScriptForFuse(InvalidatingFuse* fuse,
// Script is already in the set, no need to re-add.
return true;
}
+
+js::DependentScriptSet* js::DependentScriptGroup::getOrCreateDependentScriptSet(
+ JSContext* cx, js::InvalidatingFuse* fuse) {
+ for (auto& dss : dependencies) {
+ if (dss.associatedFuse == fuse) {
+ return &dss;
+ }
+ }
+
+ if (!dependencies.emplaceBack(cx, fuse)) {
+ return nullptr;
+ }
+
+ auto& dss = dependencies.back();
+ MOZ_ASSERT(dss.associatedFuse == fuse);
+ return &dss;
+}
diff --git a/js/src/vm/InvalidatingFuse.h b/js/src/vm/InvalidatingFuse.h
index 6fc458ee79..b8760e8e0f 100644
--- a/js/src/vm/InvalidatingFuse.h
+++ b/js/src/vm/InvalidatingFuse.h
@@ -10,7 +10,7 @@
#include "gc/Barrier.h"
#include "gc/SweepingAPI.h"
#include "vm/GuardFuse.h"
-#include "vm/JSScript.h"
+class JSScript;
namespace js {
@@ -40,10 +40,6 @@ class InvalidatingRuntimeFuse : public InvalidatingFuse {
// A (weak) set of scripts which are dependent on an associated fuse.
//
-// These are typically stored in a vector at the moment, due to the low number
-// of invalidating fuses, and so the associated fuse is stored along with the
-// set.
-//
// Because it uses JS::WeakCache, GC tracing is taken care of without any need
// for tracing in this class.
class DependentScriptSet {
@@ -61,6 +57,24 @@ class DependentScriptSet {
js::WeakCache<WeakScriptSet> weakScripts;
};
+class DependentScriptGroup {
+ // A dependent script set pairs a fuse with a set of scripts which depend
+ // on said fuse; this is a vector of script sets because the expectation for
+ // now is that the number of runtime wide invalidating fuses will be small.
+ // This will need to be revisited (convert to HashMap?) should that no
+ // longer be the case
+ //
+ // Note: This isn't traced through the zone, but rather through the use
+ // of JS::WeakCache.
+ Vector<DependentScriptSet, 1, SystemAllocPolicy> dependencies;
+
+ public:
+ DependentScriptSet* getOrCreateDependentScriptSet(JSContext* cx,
+ InvalidatingFuse* fuse);
+ DependentScriptSet* begin() { return dependencies.begin(); }
+ DependentScriptSet* end() { return dependencies.end(); }
+};
+
} // namespace js
#endif // vm_InvalidatingFuse_h
diff --git a/js/src/vm/Iteration.cpp b/js/src/vm/Iteration.cpp
index d02f9de8cf..e36ea8b555 100644
--- a/js/src/vm/Iteration.cpp
+++ b/js/src/vm/Iteration.cpp
@@ -1940,10 +1940,113 @@ static const JSFunctionSpec iterator_methods_with_helpers[] = {
JS_FS_END,
};
+// https://tc39.es/proposal-iterator-helpers/#sec-SetterThatIgnoresPrototypeProperties
+static bool SetterThatIgnoresPrototypeProperties(JSContext* cx,
+ Handle<Value> thisv,
+ Handle<PropertyKey> prop,
+ Handle<Value> value) {
+ // Step 1.
+ Rooted<JSObject*> thisObj(cx,
+ RequireObject(cx, JSMSG_OBJECT_REQUIRED, thisv));
+ if (!thisObj) {
+ return false;
+ }
+
+ // Step 2.
+ Rooted<JSObject*> home(
+ cx, GlobalObject::getOrCreateIteratorPrototype(cx, cx->global()));
+ if (!home) {
+ return false;
+ }
+ if (thisObj == home) {
+ UniqueChars propName =
+ IdToPrintableUTF8(cx, prop, IdToPrintableBehavior::IdIsPropertyKey);
+ if (!propName) {
+ return false;
+ }
+
+ // Step 2.b.
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_READ_ONLY,
+ propName.get());
+ return false;
+ }
+
+ // Step 3.
+ Rooted<Maybe<PropertyDescriptor>> desc(cx);
+ if (!GetOwnPropertyDescriptor(cx, thisObj, prop, &desc)) {
+ return false;
+ }
+
+ // Step 4.
+ if (desc.isNothing()) {
+ // Step 4.a.
+ return DefineDataProperty(cx, thisObj, prop, value, JSPROP_ENUMERATE);
+ }
+
+ // Step 5.
+ return SetProperty(cx, thisObj, prop, value);
+}
+
+// https://tc39.es/proposal-iterator-helpers/#sec-get-iteratorprototype-@@tostringtag
+static bool toStringTagGetter(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Step 1.
+ args.rval().setString(cx->names().Iterator);
+ return true;
+}
+
+// https://tc39.es/proposal-iterator-helpers/#sec-set-iteratorprototype-@@tostringtag
+static bool toStringTagSetter(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Step 1.
+ Rooted<PropertyKey> prop(
+ cx, PropertyKey::Symbol(cx->wellKnownSymbols().toStringTag));
+ if (!SetterThatIgnoresPrototypeProperties(cx, args.thisv(), prop,
+ args.get(0))) {
+ return false;
+ }
+
+ // Step 2.
+ args.rval().setUndefined();
+ return true;
+}
+
+// https://tc39.es/proposal-iterator-helpers/#sec-get-iteratorprototype-constructor
+static bool constructorGetter(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Step 1.
+ Rooted<JSObject*> constructor(
+ cx, GlobalObject::getOrCreateConstructor(cx, JSProto_Iterator));
+ if (!constructor) {
+ return false;
+ }
+ args.rval().setObject(*constructor);
+ return true;
+}
+
+// https://tc39.es/proposal-iterator-helpers/#sec-set-iteratorprototype-constructor
+static bool constructorSetter(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Step 1.
+ Rooted<PropertyKey> prop(cx, NameToId(cx->names().constructor));
+ if (!SetterThatIgnoresPrototypeProperties(cx, args.thisv(), prop,
+ args.get(0))) {
+ return false;
+ }
+
+ // Step 2.
+ args.rval().setUndefined();
+ return true;
+}
+
static const JSPropertySpec iterator_properties[] = {
- // NOTE: Contrary to most other @@toStringTag properties, this property is
- // writable.
- JS_STRING_SYM_PS(toStringTag, "Iterator", 0),
+ // NOTE: Contrary to most other @@toStringTag properties, this property
+ // has a special setter (and a getter).
+ JS_SYM_GETSET(toStringTag, toStringTagGetter, toStringTagSetter, 0),
JS_PS_END,
};
@@ -2081,7 +2184,7 @@ static const ClassSpec IteratorObjectClassSpec = {
nullptr,
iterator_methods_with_helpers,
iterator_properties,
- nullptr,
+ IteratorObject::finishInit,
};
const JSClass IteratorObject::class_ = {
@@ -2098,6 +2201,13 @@ const JSClass IteratorObject::protoClass_ = {
&IteratorObjectClassSpec,
};
+/* static */ bool IteratorObject::finishInit(JSContext* cx, HandleObject ctor,
+ HandleObject proto) {
+ Rooted<PropertyKey> id(cx, NameToId(cx->names().constructor));
+ return JS_DefinePropertyById(cx, proto, id, constructorGetter,
+ constructorSetter, 0);
+}
+
// Set up WrapForValidIteratorObject class and its prototype.
static const JSFunctionSpec wrap_for_valid_iterator_methods[] = {
JS_SELF_HOSTED_FN("next", "WrapForValidIteratorNext", 0, 0),
diff --git a/js/src/vm/Iteration.h b/js/src/vm/Iteration.h
index 92edaccd65..7f1047eec0 100644
--- a/js/src/vm/Iteration.h
+++ b/js/src/vm/Iteration.h
@@ -751,6 +751,8 @@ class IteratorObject : public NativeObject {
public:
static const JSClass class_;
static const JSClass protoClass_;
+
+ static bool finishInit(JSContext* cx, HandleObject ctor, HandleObject proto);
};
/*
diff --git a/js/src/vm/JSAtomUtils.cpp b/js/src/vm/JSAtomUtils.cpp
index ab8e4b2e30..2f8b066f0c 100644
--- a/js/src/vm/JSAtomUtils.cpp
+++ b/js/src/vm/JSAtomUtils.cpp
@@ -62,113 +62,30 @@ extern bool GetUTF8AtomizationData(JSContext* cx, const JS::UTF8Chars& utf8,
JS::SmallestEncoding* encoding,
HashNumber* hashNum);
-struct js::AtomHasher::Lookup {
- union {
- const JS::Latin1Char* latin1Chars;
- const char16_t* twoByteChars;
- const char* utf8Bytes;
- };
- enum { TwoByteChar, Latin1, UTF8 } type;
- size_t length;
- size_t byteLength;
- const JSAtom* atom; /* Optional. */
- JS::AutoCheckCannotGC nogc;
-
- HashNumber hash;
-
- MOZ_ALWAYS_INLINE Lookup(const char* utf8Bytes, size_t byteLen, size_t length,
- HashNumber hash)
- : utf8Bytes(utf8Bytes),
- type(UTF8),
- length(length),
- byteLength(byteLen),
- atom(nullptr),
- hash(hash) {}
-
- MOZ_ALWAYS_INLINE Lookup(const char16_t* chars, size_t length)
- : twoByteChars(chars),
- type(TwoByteChar),
- length(length),
- atom(nullptr),
- hash(mozilla::HashString(chars, length)) {}
-
- MOZ_ALWAYS_INLINE Lookup(const JS::Latin1Char* chars, size_t length)
- : latin1Chars(chars),
- type(Latin1),
- length(length),
- atom(nullptr),
- hash(mozilla::HashString(chars, length)) {}
-
- MOZ_ALWAYS_INLINE Lookup(HashNumber hash, const char16_t* chars,
- size_t length)
- : twoByteChars(chars),
- type(TwoByteChar),
- length(length),
- atom(nullptr),
- hash(hash) {
- MOZ_ASSERT(hash == mozilla::HashString(chars, length));
- }
-
- MOZ_ALWAYS_INLINE Lookup(HashNumber hash, const JS::Latin1Char* chars,
- size_t length)
- : latin1Chars(chars),
- type(Latin1),
- length(length),
- atom(nullptr),
- hash(hash) {
- MOZ_ASSERT(hash == mozilla::HashString(chars, length));
- }
-
- inline explicit Lookup(const JSAtom* atom)
- : type(atom->hasLatin1Chars() ? Latin1 : TwoByteChar),
- length(atom->length()),
- atom(atom),
- hash(atom->hash()) {
- if (type == Latin1) {
- latin1Chars = atom->latin1Chars(nogc);
- MOZ_ASSERT(mozilla::HashString(latin1Chars, length) == hash);
- } else {
- MOZ_ASSERT(type == TwoByteChar);
- twoByteChars = atom->twoByteChars(nogc);
- MOZ_ASSERT(mozilla::HashString(twoByteChars, length) == hash);
- }
- }
-};
-
-inline HashNumber js::AtomHasher::hash(const Lookup& l) { return l.hash; }
-
-MOZ_ALWAYS_INLINE bool js::AtomHasher::match(const WeakHeapPtr<JSAtom*>& entry,
- const Lookup& lookup) {
- JSAtom* key = entry.unbarrieredGet();
- if (lookup.atom) {
- return lookup.atom == key;
- }
- if (key->length() != lookup.length || key->hash() != lookup.hash) {
- return false;
- }
-
- if (key->hasLatin1Chars()) {
- const Latin1Char* keyChars = key->latin1Chars(lookup.nogc);
- switch (lookup.type) {
+MOZ_ALWAYS_INLINE bool js::AtomHasher::Lookup::StringsMatch(
+ const JSAtom& atom) const {
+ if (atom.hasLatin1Chars()) {
+ const Latin1Char* keyChars = atom.latin1Chars(nogc);
+ switch (type) {
case Lookup::Latin1:
- return EqualChars(keyChars, lookup.latin1Chars, lookup.length);
+ return EqualChars(keyChars, latin1Chars, length);
case Lookup::TwoByteChar:
- return EqualChars(keyChars, lookup.twoByteChars, lookup.length);
+ return EqualChars(keyChars, twoByteChars, length);
case Lookup::UTF8: {
- JS::UTF8Chars utf8(lookup.utf8Bytes, lookup.byteLength);
+ JS::UTF8Chars utf8(utf8Bytes, byteLength);
return UTF8EqualsChars(utf8, keyChars);
}
}
}
- const char16_t* keyChars = key->twoByteChars(lookup.nogc);
- switch (lookup.type) {
+ const char16_t* keyChars = atom.twoByteChars(nogc);
+ switch (type) {
case Lookup::Latin1:
- return EqualChars(lookup.latin1Chars, keyChars, lookup.length);
+ return EqualChars(latin1Chars, keyChars, length);
case Lookup::TwoByteChar:
- return EqualChars(keyChars, lookup.twoByteChars, lookup.length);
+ return EqualChars(keyChars, twoByteChars, length);
case Lookup::UTF8: {
- JS::UTF8Chars utf8(lookup.utf8Bytes, lookup.byteLength);
+ JS::UTF8Chars utf8(utf8Bytes, byteLength);
return UTF8EqualsChars(utf8, keyChars);
}
}
@@ -177,6 +94,21 @@ MOZ_ALWAYS_INLINE bool js::AtomHasher::match(const WeakHeapPtr<JSAtom*>& entry,
return false;
}
+inline HashNumber js::AtomHasher::hash(const Lookup& l) { return l.hash; }
+
+MOZ_ALWAYS_INLINE bool js::AtomHasher::match(const WeakHeapPtr<JSAtom*>& entry,
+ const Lookup& lookup) {
+ JSAtom* key = entry.unbarrieredGet();
+ if (lookup.atom) {
+ return lookup.atom == key;
+ }
+ if (key->length() != lookup.length || key->hash() != lookup.hash) {
+ return false;
+ }
+
+ return lookup.StringsMatch(*key);
+}
+
UniqueChars js::AtomToPrintableString(JSContext* cx, JSAtom* atom) {
return QuoteString(cx, atom);
}
@@ -450,18 +382,21 @@ static MOZ_ALWAYS_INLINE JSAtom*
AtomizeAndCopyCharsNonStaticValidLengthFromLookup(
JSContext* cx, const CharT* chars, size_t length,
const AtomHasher::Lookup& lookup, const Maybe<uint32_t>& indexValue) {
- // Try the per-Zone cache first. If we find the atom there we can avoid the
- // markAtom call, and the multiple HashSet lookups below.
Zone* zone = cx->zone();
MOZ_ASSERT(zone);
- AtomSet::AddPtr zonePtr = zone->atomCache().lookupForAdd(lookup);
- if (zonePtr) {
- // The cache is purged on GC so if we're in the middle of an
- // incremental GC we should have barriered the atom when we put
- // it in the cache.
- JSAtom* atom = zonePtr->unbarrieredGet();
- MOZ_ASSERT(AtomIsMarked(zone, atom));
- return atom;
+
+ AtomCacheHashTable* atomCache = zone->atomCache();
+
+ // Try the per-Zone cache first. If we find the atom there we can avoid the
+ // markAtom call, and the multiple HashSet lookups below.
+ if (MOZ_LIKELY(atomCache)) {
+ JSAtom* const cachedAtom = atomCache->lookupForAdd(lookup);
+ if (cachedAtom) {
+ // The cache is purged on GC so if we're in the middle of an incremental
+ // GC we should have barriered the atom when we put it in the cache.
+ MOZ_ASSERT(AtomIsMarked(zone, cachedAtom));
+ return cachedAtom;
+ }
}
MOZ_ASSERT(cx->permanentAtomsPopulated());
@@ -469,11 +404,9 @@ AtomizeAndCopyCharsNonStaticValidLengthFromLookup(
AtomSet::Ptr pp = cx->permanentAtoms().readonlyThreadsafeLookup(lookup);
if (pp) {
JSAtom* atom = pp->get();
- if (MOZ_UNLIKELY(!zone->atomCache().add(zonePtr, atom))) {
- ReportOutOfMemory(cx);
- return nullptr;
+ if (MOZ_LIKELY(atomCache)) {
+ atomCache->add(lookup.hash, atom);
}
-
return atom;
}
@@ -488,11 +421,9 @@ AtomizeAndCopyCharsNonStaticValidLengthFromLookup(
return nullptr;
}
- if (MOZ_UNLIKELY(!zone->atomCache().add(zonePtr, atom))) {
- ReportOutOfMemory(cx);
- return nullptr;
+ if (MOZ_LIKELY(atomCache)) {
+ atomCache->add(lookup.hash, atom);
}
-
return atom;
}
diff --git a/js/src/vm/JSContext.cpp b/js/src/vm/JSContext.cpp
index 5a4bfa86cd..3cc2c4807c 100644
--- a/js/src/vm/JSContext.cpp
+++ b/js/src/vm/JSContext.cpp
@@ -799,8 +799,14 @@ JS_PUBLIC_API void js::StopDrainingJobQueue(JSContext* cx) {
cx->internalJobQueue->interrupt();
}
+JS_PUBLIC_API void js::RestartDrainingJobQueue(JSContext* cx) {
+ MOZ_ASSERT(cx->internalJobQueue.ref());
+ cx->internalJobQueue->uninterrupt();
+}
+
JS_PUBLIC_API void js::RunJobs(JSContext* cx) {
MOZ_ASSERT(cx->jobQueue);
+ MOZ_ASSERT(cx->isEvaluatingModule == 0);
cx->jobQueue->runJobs(cx);
JS::ClearKeptObjects(cx);
}
@@ -887,7 +893,6 @@ void InternalJobQueue::runJobs(JSContext* cx) {
draining_ = false;
if (interrupted_) {
- interrupted_ = false;
break;
}
@@ -969,6 +974,7 @@ JSContext::JSContext(JSRuntime* runtime, const JS::ContextOptions& options)
#ifdef DEBUG
inUnsafeCallWithABI(this, false),
hasAutoUnsafeCallWithABI(this, false),
+ liveArraySortDataInstances(this, 0),
#endif
#ifdef JS_SIMULATOR
simulator_(this, nullptr),
@@ -994,6 +1000,7 @@ JSContext::JSContext(JSRuntime* runtime, const JS::ContextOptions& options)
#else
regExpSearcherLastLimit(this, 0),
#endif
+ isEvaluatingModule(this, 0),
frontendCollectionPool_(this),
suppressProfilerSampling(false),
tempLifoAlloc_(this, (size_t)TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
@@ -1041,6 +1048,9 @@ JSContext::~JSContext() {
/* Free the stuff hanging off of cx. */
MOZ_ASSERT(!resolvingList);
+ // Ensure we didn't leak memory for the ArraySortData vector.
+ MOZ_ASSERT(liveArraySortDataInstances == 0);
+
if (dtoaState) {
DestroyDtoaState(dtoaState);
}
@@ -1189,6 +1199,13 @@ SavedFrame* JSContext::getPendingExceptionStack() {
return unwrappedExceptionStack();
}
+#ifdef DEBUG
+const JS::Value& JSContext::getPendingExceptionUnwrapped() {
+ MOZ_ASSERT(isExceptionPending());
+ return unwrappedException();
+}
+#endif
+
bool JSContext::isClosingGenerator() {
return isExceptionPending() &&
unwrappedException().isMagic(JS_GENERATOR_CLOSING);
diff --git a/js/src/vm/JSContext.h b/js/src/vm/JSContext.h
index 57aa236801..ba665d6c1a 100644
--- a/js/src/vm/JSContext.h
+++ b/js/src/vm/JSContext.h
@@ -89,12 +89,15 @@ class InternalJobQueue : public JS::JobQueue {
JS::HandleObject incumbentGlobal) override;
void runJobs(JSContext* cx) override;
bool empty() const override;
+ bool isDrainingStopped() const override { return interrupted_; }
// If we are currently in a call to runJobs(), make that call stop processing
// jobs once the current one finishes, and return. If we are not currently in
// a call to runJobs, make all future calls return immediately.
void interrupt() { interrupted_ = true; }
+ void uninterrupt() { interrupted_ = false; }
+
// Return the front element of the queue, or nullptr if the queue is empty.
// This is only used by shell testing functions.
JSObject* maybeFront() const;
@@ -425,6 +428,7 @@ struct JS_PUBLIC_API JSContext : public JS::RootingContext,
#ifdef DEBUG
js::ContextData<uint32_t> inUnsafeCallWithABI;
js::ContextData<bool> hasAutoUnsafeCallWithABI;
+ js::ContextData<uint32_t> liveArraySortDataInstances;
#endif
#ifdef JS_SIMULATOR
@@ -506,6 +510,9 @@ struct JS_PUBLIC_API JSContext : public JS::RootingContext,
return offsetof(JSContext, regExpSearcherLastLimit);
}
+ // Whether we are currently executing the top level of a module.
+ js::ContextData<uint32_t> isEvaluatingModule;
+
private:
// Pools used for recycling name maps and vectors when parsing and
// emitting bytecode. Purged on GC when there are no active script
@@ -719,6 +726,13 @@ struct JS_PUBLIC_API JSContext : public JS::RootingContext,
*/
js::SavedFrame* getPendingExceptionStack();
+#ifdef DEBUG
+ /**
+ * Return the pending exception (without wrapping).
+ */
+ const JS::Value& getPendingExceptionUnwrapped();
+#endif
+
bool isThrowingDebuggeeWouldRun();
bool isClosingGenerator();
diff --git a/js/src/vm/JSFunction.h b/js/src/vm/JSFunction.h
index 451b63e11a..afd54c8f13 100644
--- a/js/src/vm/JSFunction.h
+++ b/js/src/vm/JSFunction.h
@@ -86,7 +86,7 @@ class JSFunction : public js::NativeObject {
* the low bit set to ensure it's never identical to a BaseScript*
* pointer
*
- * - a wasm JIT entry
+ * - a native JIT entry (used for Wasm and TrampolineNative functions)
*
* The JIT depends on none of the above being a valid BaseScript pointer.
*
@@ -199,6 +199,7 @@ class JSFunction : public js::NativeObject {
bool isWasm() const { return flags().isWasm(); }
bool isWasmWithJitEntry() const { return flags().isWasmWithJitEntry(); }
+ bool isNativeWithJitEntry() const { return flags().isNativeWithJitEntry(); }
bool isNativeWithoutJitEntry() const {
return flags().isNativeWithoutJitEntry();
}
@@ -637,7 +638,9 @@ class JSFunction : public js::NativeObject {
JS::PrivateValue(reinterpret_cast<void*>(native)));
setNativeJitInfoOrInterpretedScript(const_cast<JSJitInfo*>(jitInfo));
}
- bool hasJitInfo() const { return isBuiltinNative() && jitInfoUnchecked(); }
+ bool hasJitInfo() const {
+ return flags().canHaveJitInfo() && jitInfoUnchecked();
+ }
const JSJitInfo* jitInfo() const {
MOZ_ASSERT(hasJitInfo());
return jitInfoUnchecked();
@@ -677,12 +680,25 @@ class JSFunction : public js::NativeObject {
MOZ_ASSERT(*entry);
MOZ_ASSERT(isWasm());
MOZ_ASSERT(!isWasmWithJitEntry());
- setFlags(flags().setWasmJitEntry());
+ setFlags(flags().setNativeJitEntry());
setNativeJitInfoOrInterpretedScript(entry);
MOZ_ASSERT(isWasmWithJitEntry());
}
+ void setTrampolineNativeJitEntry(void** entry) {
+ MOZ_ASSERT(*entry);
+ MOZ_ASSERT(isBuiltinNative());
+ MOZ_ASSERT(!hasJitEntry());
+ MOZ_ASSERT(!hasJitInfo(), "shouldn't clobber JSJitInfo");
+ setFlags(flags().setNativeJitEntry());
+ setNativeJitInfoOrInterpretedScript(entry);
+ MOZ_ASSERT(isNativeWithJitEntry());
+ }
void** wasmJitEntry() const {
MOZ_ASSERT(isWasmWithJitEntry());
+ return nativeJitEntry();
+ }
+ void** nativeJitEntry() const {
+ MOZ_ASSERT(isNativeWithJitEntry());
return static_cast<void**>(nativeJitInfoOrInterpretedScript());
}
inline js::wasm::Instance& wasmInstance() const;
diff --git a/js/src/vm/JSONParser.cpp b/js/src/vm/JSONParser.cpp
index f151f261b5..7a440e3090 100644
--- a/js/src/vm/JSONParser.cpp
+++ b/js/src/vm/JSONParser.cpp
@@ -644,7 +644,7 @@ inline bool JSONFullParseHandlerAnyChar::objectOpen(
return false;
}
}
- if (!stack.append(*properties)) {
+ if (!stack.append(StackEntry(cx, *properties))) {
js_delete(*properties);
return false;
}
@@ -676,11 +676,12 @@ inline bool JSONFullParseHandlerAnyChar::objectPropertyName(
return true;
}
-inline void JSONFullParseHandlerAnyChar::finishObjectMember(
+inline bool JSONFullParseHandlerAnyChar::finishObjectMember(
Vector<StackEntry, 10>& stack, JS::Handle<JS::Value> value,
PropertyVector** properties) {
*properties = &stack.back().properties();
(*properties)->back().value = value;
+ return true;
}
inline bool JSONFullParseHandlerAnyChar::finishObject(
@@ -718,7 +719,7 @@ inline bool JSONFullParseHandlerAnyChar::arrayOpen(
return false;
}
}
- if (!stack.append(*elements)) {
+ if (!stack.append(StackEntry(cx, *elements))) {
js_delete(*elements);
return false;
}
@@ -794,7 +795,7 @@ inline bool JSONFullParseHandler<CharT>::setStringValue(
return false;
}
v = JS::StringValue(str);
- return createJSONParseRecord(v, source);
+ return true;
}
template <typename CharT>
@@ -812,26 +813,26 @@ inline bool JSONFullParseHandler<CharT>::setStringValue(
return false;
}
v = JS::StringValue(str);
- return createJSONParseRecord(v, source);
+ return true;
}
template <typename CharT>
inline bool JSONFullParseHandler<CharT>::setNumberValue(
double d, mozilla::Span<const CharT>&& source) {
v = JS::NumberValue(d);
- return createJSONParseRecord(v, source);
+ return true;
}
template <typename CharT>
inline bool JSONFullParseHandler<CharT>::setBooleanValue(
bool value, mozilla::Span<const CharT>&& source) {
- return createJSONParseRecord(JS::BooleanValue(value), source);
+ return true;
}
template <typename CharT>
inline bool JSONFullParseHandler<CharT>::setNullValue(
mozilla::Span<const CharT>&& source) {
- return createJSONParseRecord(JS::NullValue(), source);
+ return true;
}
template <typename CharT>
@@ -847,29 +848,6 @@ void JSONFullParseHandler<CharT>::reportError(const char* msg, uint32_t line,
msg, lineString, columnString);
}
-template <typename CharT>
-void JSONFullParseHandler<CharT>::trace(JSTracer* trc) {
- Base::trace(trc);
- parseRecord.trace(trc);
-}
-
-template <typename CharT>
-inline bool JSONFullParseHandler<CharT>::createJSONParseRecord(
- const Value& value, mozilla::Span<const CharT>& source) {
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- if (cx->realm()->creationOptions().getJSONParseWithSource()) {
- MOZ_ASSERT(!source.IsEmpty());
- Rooted<JSONParseNode*> parseNode(cx,
- NewStringCopy<CanGC, CharT>(cx, source));
- if (!parseNode) {
- return false;
- }
- parseRecord = ParseRecordObject(parseNode, value);
- }
-#endif
- return true;
-}
-
template <typename CharT, typename HandlerT>
JSONPerHandlerParser<CharT, HandlerT>::~JSONPerHandlerParser() {
for (size_t i = 0; i < stack.length(); i++) {
@@ -889,7 +867,9 @@ bool JSONPerHandlerParser<CharT, HandlerT>::parseImpl(TempValueT& value,
switch (state) {
case JSONParserState::FinishObjectMember: {
typename HandlerT::PropertyVector* properties;
- handler.finishObjectMember(stack, value, &properties);
+ if (!handler.finishObjectMember(stack, value, &properties)) {
+ return false;
+ }
token = tokenizer.advanceAfterProperty();
if (token == JSONToken::ObjectClose) {
@@ -1069,6 +1049,13 @@ template class js::JSONPerHandlerParser<Latin1Char,
template class js::JSONPerHandlerParser<char16_t,
js::JSONFullParseHandler<char16_t>>;
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+template class js::JSONPerHandlerParser<Latin1Char,
+ js::JSONReviveHandler<Latin1Char>>;
+template class js::JSONPerHandlerParser<char16_t,
+ js::JSONReviveHandler<char16_t>>;
+#endif
+
template class js::JSONPerHandlerParser<Latin1Char,
js::JSONSyntaxParseHandler<Latin1Char>>;
template class js::JSONPerHandlerParser<char16_t,
@@ -1085,20 +1072,148 @@ bool JSONParser<CharT>::parse(JS::MutableHandle<JS::Value> vp) {
}
template <typename CharT>
-bool JSONParser<CharT>::parse(JS::MutableHandle<JS::Value> vp,
- JS::MutableHandle<ParseRecordObject> pro) {
+void JSONParser<CharT>::trace(JSTracer* trc) {
+ this->handler.trace(trc);
+
+ for (auto& elem : this->stack) {
+ if (elem.state == JSONParserState::FinishArrayElement) {
+ elem.elements().trace(trc);
+ } else {
+ elem.properties().trace(trc);
+ }
+ }
+}
+
+template class js::JSONParser<Latin1Char>;
+template class js::JSONParser<char16_t>;
+
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::objectOpen(Vector<StackEntry, 10>& stack,
+ PropertyVector** properties) {
+ if (!parseRecordStack.append(ParseRecordEntry{context()})) {
+ return false;
+ }
+
+ return Base::objectOpen(stack, properties);
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::finishObjectMember(
+ Vector<StackEntry, 10>& stack, JS::Handle<JS::Value> value,
+ PropertyVector** properties) {
+ if (!Base::finishObjectMember(stack, value, properties)) {
+ return false;
+ }
+ parseRecord.value = value;
+ return finishMemberParseRecord((*properties)->back().id,
+ parseRecordStack.back());
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::finishObject(
+ Vector<StackEntry, 10>& stack, JS::MutableHandle<JS::Value> vp,
+ PropertyVector* properties) {
+ if (!Base::finishObject(stack, vp, properties)) {
+ return false;
+ }
+ if (!finishCompoundParseRecord(vp, parseRecordStack.back())) {
+ return false;
+ }
+ parseRecordStack.popBack();
+
+ return true;
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::arrayOpen(Vector<StackEntry, 10>& stack,
+ ElementVector** elements) {
+ if (!parseRecordStack.append(ParseRecordEntry{context()})) {
+ return false;
+ }
+
+ return Base::arrayOpen(stack, elements);
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::arrayElement(
+ Vector<StackEntry, 10>& stack, JS::Handle<JS::Value> value,
+ ElementVector** elements) {
+ if (!Base::arrayElement(stack, value, elements)) {
+ return false;
+ }
+ size_t index = (*elements)->length() - 1;
+ JS::PropertyKey key = js::PropertyKey::Int(index);
+ return finishMemberParseRecord(key, parseRecordStack.back());
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::finishArray(
+ Vector<StackEntry, 10>& stack, JS::MutableHandle<JS::Value> vp,
+ ElementVector* elements) {
+ if (!Base::finishArray(stack, vp, elements)) {
+ return false;
+ }
+ if (!finishCompoundParseRecord(vp, parseRecordStack.back())) {
+ return false;
+ }
+ parseRecordStack.popBack();
+
+ return true;
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::finishMemberParseRecord(
+ JS::PropertyKey& key, ParseRecordEntry& objectEntry) {
+ parseRecord.key = key;
+ return objectEntry.put(key, std::move(parseRecord));
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::finishCompoundParseRecord(
+ const Value& value, ParseRecordEntry& objectEntry) {
+ Rooted<JSONParseNode*> parseNode(context());
+ parseRecord = ParseRecordObject(parseNode, value);
+ return parseRecord.addEntries(context(), std::move(objectEntry));
+}
+
+template <typename CharT>
+inline bool JSONReviveHandler<CharT>::finishPrimitiveParseRecord(
+ const Value& value, SourceT source) {
+ MOZ_ASSERT(!source.IsEmpty()); // Empty source is for objects and arrays
+ Rooted<JSONParseNode*> parseNode(
+ context(), NewStringCopy<CanGC, CharT>(context(), source));
+ if (!parseNode) {
+ return false;
+ }
+ parseRecord = ParseRecordObject(parseNode, value);
+ return true;
+}
+
+template <typename CharT>
+void JSONReviveHandler<CharT>::trace(JSTracer* trc) {
+ Base::trace(trc);
+ parseRecord.trace(trc);
+ for (auto& entry : this->parseRecordStack) {
+ entry.trace(trc);
+ }
+}
+
+template <typename CharT>
+bool JSONReviveParser<CharT>::parse(JS::MutableHandle<JS::Value> vp,
+ JS::MutableHandle<ParseRecordObject> pro) {
JS::Rooted<JS::Value> tempValue(this->handler.cx);
vp.setUndefined();
bool result = this->parseImpl(
tempValue, [&](JS::Handle<JS::Value> value) { vp.set(value); });
- pro.get() = std::move(this->handler.parseRecord);
+ pro.set(std::move(this->handler.parseRecord));
return result;
}
template <typename CharT>
-void JSONParser<CharT>::trace(JSTracer* trc) {
+void JSONReviveParser<CharT>::trace(JSTracer* trc) {
this->handler.trace(trc);
for (auto& elem : this->stack) {
@@ -1110,8 +1225,9 @@ void JSONParser<CharT>::trace(JSTracer* trc) {
}
}
-template class js::JSONParser<Latin1Char>;
-template class js::JSONParser<char16_t>;
+template class js::JSONReviveParser<Latin1Char>;
+template class js::JSONReviveParser<char16_t>;
+#endif // ENABLE_JSON_PARSE_WITH_SOURCE
template <typename CharT>
inline bool JSONSyntaxParseHandler<CharT>::objectOpen(
@@ -1359,9 +1475,11 @@ class MOZ_STACK_CLASS DelegateHandler {
*isProtoInEval = false;
return true;
}
- inline void finishObjectMember(Vector<StackEntry, 10>& stack,
+ inline bool finishObjectMember(Vector<StackEntry, 10>& stack,
DummyValue& value,
- PropertyVector** properties) {}
+ PropertyVector** properties) {
+ return true;
+ }
inline bool finishObject(Vector<StackEntry, 10>& stack, DummyValue* vp,
PropertyVector* properties) {
if (hadHandlerError_) {
diff --git a/js/src/vm/JSONParser.h b/js/src/vm/JSONParser.h
index 91e33c02b3..51b90e003c 100644
--- a/js/src/vm/JSONParser.h
+++ b/js/src/vm/JSONParser.h
@@ -190,10 +190,10 @@ class MOZ_STACK_CLASS JSONFullParseHandlerAnyChar {
return *static_cast<PropertyVector*>(vector);
}
- explicit StackEntry(ElementVector* elements)
+ explicit StackEntry(JSContext* cx, ElementVector* elements)
: state(JSONParserState::FinishArrayElement), vector(elements) {}
- explicit StackEntry(PropertyVector* properties)
+ explicit StackEntry(JSContext* cx, PropertyVector* properties)
: state(JSONParserState::FinishObjectMember), vector(properties) {}
JSONParserState state;
@@ -255,7 +255,7 @@ class MOZ_STACK_CLASS JSONFullParseHandlerAnyChar {
PropertyVector** properties);
inline bool objectPropertyName(Vector<StackEntry, 10>& stack,
bool* isProtoInEval);
- inline void finishObjectMember(Vector<StackEntry, 10>& stack,
+ inline bool finishObjectMember(Vector<StackEntry, 10>& stack,
JS::Handle<JS::Value> value,
PropertyVector** properties);
inline bool finishObject(Vector<StackEntry, 10>& stack,
@@ -303,8 +303,6 @@ class MOZ_STACK_CLASS JSONFullParseHandler
bool append(const CharT* begin, const CharT* end);
};
- ParseRecordObject parseRecord;
-
explicit JSONFullParseHandler(JSContext* cx) : Base(cx) {}
JSONFullParseHandler(JSONFullParseHandler&& other) noexcept
@@ -324,13 +322,101 @@ class MOZ_STACK_CLASS JSONFullParseHandler
inline bool setNullValue(mozilla::Span<const CharT>&& source);
void reportError(const char* msg, uint32_t line, uint32_t column);
+};
+
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+template <typename CharT>
+class MOZ_STACK_CLASS JSONReviveHandler : public JSONFullParseHandler<CharT> {
+ using CharPtr = mozilla::RangedPtr<const CharT>;
+ using Base = JSONFullParseHandler<CharT>;
+
+ public:
+ using SourceT = mozilla::Span<const CharT>;
+ using ParseRecordEntry = ParseRecordObject::EntryMap;
+
+ using StringBuilder = typename Base::StringBuilder;
+ using StackEntry = typename Base::StackEntry;
+ using PropertyVector = typename Base::PropertyVector;
+ using ElementVector = typename Base::ElementVector;
+
+ public:
+ explicit JSONReviveHandler(JSContext* cx) : Base(cx), parseRecordStack(cx) {}
+
+ JSONReviveHandler(JSONReviveHandler&& other) noexcept
+ : Base(std::move(other)),
+ parseRecordStack(std::move(other.parseRecordStack)),
+ parseRecord(std::move(other.parseRecord)) {}
+
+ JSONReviveHandler(const JSONReviveHandler& other) = delete;
+ void operator=(const JSONReviveHandler& other) = delete;
+
+ JSContext* context() { return this->cx; }
+
+ template <JSONStringType ST>
+ inline bool setStringValue(CharPtr start, size_t length, SourceT&& source) {
+ if (!Base::template setStringValue<ST>(start, length,
+ std::forward<SourceT&&>(source))) {
+ return false;
+ }
+ return finishPrimitiveParseRecord(this->v, source);
+ }
+
+ template <JSONStringType ST>
+ inline bool setStringValue(StringBuilder& builder, SourceT&& source) {
+ if (!Base::template setStringValue<ST>(builder,
+ std::forward<SourceT&&>(source))) {
+ return false;
+ }
+ return finishPrimitiveParseRecord(this->v, source);
+ }
+
+ inline bool setNumberValue(double d, SourceT&& source) {
+ if (!Base::setNumberValue(d, std::forward<SourceT&&>(source))) {
+ return false;
+ }
+ return finishPrimitiveParseRecord(this->v, source);
+ }
+
+ inline bool setBooleanValue(bool value, SourceT&& source) {
+ return finishPrimitiveParseRecord(JS::BooleanValue(value), source);
+ }
+ inline bool setNullValue(SourceT&& source) {
+ return finishPrimitiveParseRecord(JS::NullValue(), source);
+ }
+
+ inline bool objectOpen(Vector<StackEntry, 10>& stack,
+ PropertyVector** properties);
+ inline bool finishObjectMember(Vector<StackEntry, 10>& stack,
+ JS::Handle<JS::Value> value,
+ PropertyVector** properties);
+ inline bool finishObject(Vector<StackEntry, 10>& stack,
+ JS::MutableHandle<JS::Value> vp,
+ PropertyVector* properties);
+
+ inline bool arrayOpen(Vector<StackEntry, 10>& stack,
+ ElementVector** elements);
+ inline bool arrayElement(Vector<StackEntry, 10>& stack,
+ JS::Handle<JS::Value> value,
+ ElementVector** elements);
+ inline bool finishArray(Vector<StackEntry, 10>& stack,
+ JS::MutableHandle<JS::Value> vp,
+ ElementVector* elements);
void trace(JSTracer* trc);
- protected:
- inline bool createJSONParseRecord(const Value& value,
- mozilla::Span<const CharT>& source);
+ private:
+ inline bool finishMemberParseRecord(JS::PropertyKey& key,
+ ParseRecordEntry& objectEntry);
+ inline bool finishCompoundParseRecord(const Value& value,
+ ParseRecordEntry& objectEntry);
+ inline bool finishPrimitiveParseRecord(const Value& value, SourceT source);
+
+ Vector<ParseRecordEntry, 10> parseRecordStack;
+
+ public:
+ ParseRecordObject parseRecord;
};
+#endif // ENABLE_JSON_PARSE_WITH_SOURCE
template <typename CharT>
class MOZ_STACK_CLASS JSONSyntaxParseHandler {
@@ -409,9 +495,11 @@ class MOZ_STACK_CLASS JSONSyntaxParseHandler {
*isProtoInEval = false;
return true;
}
- inline void finishObjectMember(Vector<StackEntry, 10>& stack,
+ inline bool finishObjectMember(Vector<StackEntry, 10>& stack,
DummyValue& value,
- PropertyVector** properties) {}
+ PropertyVector** properties) {
+ return true;
+ }
inline bool finishObject(Vector<StackEntry, 10>& stack, DummyValue* vp,
PropertyVector* properties);
@@ -510,11 +598,52 @@ class MOZ_STACK_CLASS JSONParser
* represent |undefined|, so the JSON data couldn't have specified it.)
*/
bool parse(JS::MutableHandle<JS::Value> vp);
+
+ void trace(JSTracer* trc);
+};
+
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+template <typename CharT>
+class MOZ_STACK_CLASS JSONReviveParser
+ : JSONPerHandlerParser<CharT, JSONReviveHandler<CharT>> {
+ using Base = JSONPerHandlerParser<CharT, JSONReviveHandler<CharT>>;
+
+ public:
+ using ParseType = JSONFullParseHandlerAnyChar::ParseType;
+
+ /* Public API */
+
+ /* Create a parser for the provided JSON data. */
+ JSONReviveParser(JSContext* cx, mozilla::Range<const CharT> data)
+ : Base(cx, data) {}
+
+ /* Allow move construction for use with Rooted. */
+ JSONReviveParser(JSONReviveParser&& other) noexcept
+ : Base(std::move(other)) {}
+
+ JSONReviveParser(const JSONReviveParser& other) = delete;
+ void operator=(const JSONReviveParser& other) = delete;
+
+ /*
+ * Parse the JSON data specified at construction time. If it parses
+ * successfully, store the prescribed value in *vp and return true. If an
+ * internal error (e.g. OOM) occurs during parsing, return false.
+ * Otherwise, if invalid input was specifed but no internal error occurred,
+ * behavior depends upon the error handling specified at construction: if
+ * error handling is RaiseError then throw a SyntaxError and return false,
+ * otherwise return true and set *vp to |undefined|. (JSON syntax can't
+ * represent |undefined|, so the JSON data couldn't have specified it.)
+ *
+ * If it parses successfully, parse information for calling the reviver
+ * function is stored in *pro. If this function returns false, *pro will be
+ * set to |undefined|.
+ */
bool parse(JS::MutableHandle<JS::Value> vp,
JS::MutableHandle<ParseRecordObject> pro);
void trace(JSTracer* trc);
};
+#endif // ENABLE_JSON_PARSE_WITH_SOURCE
template <typename CharT, typename Wrapper>
class MutableWrappedPtrOperations<JSONParser<CharT>, Wrapper>
@@ -523,10 +652,6 @@ class MutableWrappedPtrOperations<JSONParser<CharT>, Wrapper>
bool parse(JS::MutableHandle<JS::Value> vp) {
return static_cast<Wrapper*>(this)->get().parse(vp);
}
- bool parse(JS::MutableHandle<JS::Value> vp,
- JS::MutableHandle<ParseRecordObject> pro) {
- return static_cast<Wrapper*>(this)->get().parse(vp, pro);
- }
};
template <typename CharT>
diff --git a/js/src/vm/JSONPrinter.cpp b/js/src/vm/JSONPrinter.cpp
index 5b5183d9fb..53ab4be67c 100644
--- a/js/src/vm/JSONPrinter.cpp
+++ b/js/src/vm/JSONPrinter.cpp
@@ -154,8 +154,8 @@ void JSONPrinter::formatProperty(const char* name, const char* format, ...) {
va_end(ap);
}
-void JSONPrinter::formatProperty(const char* name, const char* format,
- va_list ap) {
+void JSONPrinter::formatPropertyVA(const char* name, const char* format,
+ va_list ap) {
beginStringProperty(name);
out_.vprintf(format, ap);
endStringProperty();
diff --git a/js/src/vm/JSONPrinter.h b/js/src/vm/JSONPrinter.h
index b90696a2b4..61536a6c87 100644
--- a/js/src/vm/JSONPrinter.h
+++ b/js/src/vm/JSONPrinter.h
@@ -62,7 +62,7 @@ class JSONPrinter {
void formatProperty(const char* name, const char* format, ...)
MOZ_FORMAT_PRINTF(3, 4);
- void formatProperty(const char* name, const char* format, va_list ap);
+ void formatPropertyVA(const char* name, const char* format, va_list ap);
void propertyName(const char* name);
diff --git a/js/src/vm/JSObject-inl.h b/js/src/vm/JSObject-inl.h
index 4ca0946878..a493d7624a 100644
--- a/js/src/vm/JSObject-inl.h
+++ b/js/src/vm/JSObject-inl.h
@@ -189,8 +189,9 @@ template <typename T>
MOZ_ASSERT(!cx->realm()->hasObjectPendingMetadata());
// The metadata builder is invoked for each object created on the main thread,
- // except when it's suppressed.
- if (!cx->zone()->suppressAllocationMetadataBuilder) {
+ // except when it's suppressed or we're throwing over-recursion error.
+ if (!cx->zone()->suppressAllocationMetadataBuilder &&
+ !cx->isThrowingOverRecursed()) {
// Don't collect metadata on objects that represent metadata, to avoid
// recursion.
AutoSuppressAllocationMetadataBuilder suppressMetadata(cx);
diff --git a/js/src/vm/JSObject.cpp b/js/src/vm/JSObject.cpp
index ea4dfeb6f7..8bc8bc0d52 100644
--- a/js/src/vm/JSObject.cpp
+++ b/js/src/vm/JSObject.cpp
@@ -2215,9 +2215,7 @@ JS_PUBLIC_API bool js::ShouldIgnorePropertyDefinition(JSContext* cx,
id == NameToId(cx->names().symmetricDifference))) {
return true;
}
-#endif
-#ifdef NIGHTLY_BUILD
if (key == JSProto_ArrayBuffer && !JS::Prefs::arraybuffer_transfer() &&
(id == NameToId(cx->names().transfer) ||
id == NameToId(cx->names().transferToFixedLength) ||
@@ -2240,6 +2238,33 @@ JS_PUBLIC_API bool js::ShouldIgnorePropertyDefinition(JSContext* cx,
id == NameToId(cx->names().grow))) {
return true;
}
+
+ if (key == JSProto_Uint8Array &&
+ !JS::Prefs::experimental_uint8array_base64() &&
+ (id == NameToId(cx->names().setFromBase64) ||
+ id == NameToId(cx->names().setFromHex) ||
+ id == NameToId(cx->names().toBase64) ||
+ id == NameToId(cx->names().toHex))) {
+ return true;
+ }
+
+ // It's gently surprising that this is JSProto_Function, but the trick
+ // to realize is that this is a -constructor function-, not a function
+ // on the prototype; and the proto of the constructor is JSProto_Function.
+ if (key == JSProto_Function && !JS::Prefs::experimental_uint8array_base64() &&
+ (id == NameToId(cx->names().fromBase64) ||
+ id == NameToId(cx->names().fromHex))) {
+ return true;
+ }
+#endif
+
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ if (key == JSProto_JSON &&
+ !JS::Prefs::experimental_json_parse_with_source() &&
+ (id == NameToId(cx->names().isRawJSON) ||
+ id == NameToId(cx->names().rawJSON))) {
+ return true;
+ }
#endif
return false;
@@ -3165,19 +3190,8 @@ js::gc::AllocKind JSObject::allocKindForTenure(
return as<JSFunction>().getAllocKind();
}
- // Fixed length typed arrays in the nursery may have a lazily allocated
- // buffer, make sure there is room for the array's fixed data when moving
- // the array.
- if (is<FixedLengthTypedArrayObject>() &&
- !as<FixedLengthTypedArrayObject>().hasBuffer()) {
- gc::AllocKind allocKind;
- if (as<FixedLengthTypedArrayObject>().hasInlineElements()) {
- size_t nbytes = as<FixedLengthTypedArrayObject>().byteLength();
- allocKind = FixedLengthTypedArrayObject::AllocKindForLazyBuffer(nbytes);
- } else {
- allocKind = GetGCObjectKind(getClass());
- }
- return ForegroundToBackgroundAllocKind(allocKind);
+ if (is<FixedLengthTypedArrayObject>()) {
+ return as<FixedLengthTypedArrayObject>().allocKindForTenure();
}
return as<NativeObject>().allocKindForTenure();
diff --git a/js/src/vm/Modules.cpp b/js/src/vm/Modules.cpp
index f461e7bec1..917083a238 100644
--- a/js/src/vm/Modules.cpp
+++ b/js/src/vm/Modules.cpp
@@ -9,7 +9,8 @@
#include "vm/Modules.h"
#include "mozilla/Assertions.h" // MOZ_ASSERT
-#include "mozilla/Utf8.h" // mozilla::Utf8Unit
+#include "mozilla/ScopeExit.h"
+#include "mozilla/Utf8.h" // mozilla::Utf8Unit
#include <stdint.h> // uint32_t
@@ -42,6 +43,12 @@ using namespace js;
using mozilla::Utf8Unit;
+static bool ModuleLink(JSContext* cx, Handle<ModuleObject*> module);
+static bool ModuleEvaluate(JSContext* cx, Handle<ModuleObject*> module,
+ MutableHandle<Value> result);
+static bool SyntheticModuleEvaluate(JSContext* cx, Handle<ModuleObject*> module,
+ MutableHandle<Value> result);
+
////////////////////////////////////////////////////////////////////////////////
// Public API
JS_PUBLIC_API JS::ModuleResolveHook JS::GetModuleResolveHook(JSRuntime* rt) {
@@ -184,7 +191,7 @@ JS_PUBLIC_API bool JS::ModuleLink(JSContext* cx, Handle<JSObject*> moduleArg) {
CHECK_THREAD(cx);
cx->releaseCheck(moduleArg);
- return js::ModuleLink(cx, moduleArg.as<ModuleObject>());
+ return ::ModuleLink(cx, moduleArg.as<ModuleObject>());
}
JS_PUBLIC_API bool JS::ModuleEvaluate(JSContext* cx,
@@ -194,11 +201,17 @@ JS_PUBLIC_API bool JS::ModuleEvaluate(JSContext* cx,
CHECK_THREAD(cx);
cx->releaseCheck(moduleRecord);
+ cx->isEvaluatingModule++;
+ auto guard = mozilla::MakeScopeExit([cx] {
+ MOZ_ASSERT(cx->isEvaluatingModule != 0);
+ cx->isEvaluatingModule--;
+ });
+
if (moduleRecord.as<ModuleObject>()->hasSyntheticModuleFields()) {
return SyntheticModuleEvaluate(cx, moduleRecord.as<ModuleObject>(), rval);
}
- return js::ModuleEvaluate(cx, moduleRecord.as<ModuleObject>(), rval);
+ return ::ModuleEvaluate(cx, moduleRecord.as<ModuleObject>(), rval);
}
JS_PUBLIC_API bool JS::ThrowOnModuleEvaluationFailure(
@@ -327,6 +340,11 @@ JS_PUBLIC_API void JS::ClearModuleEnvironment(JSObject* moduleObj) {
}
}
+JS_PUBLIC_API bool JS::ModuleIsLinked(JSObject* moduleObj) {
+ AssertHeapIsIdle();
+ return moduleObj->as<ModuleObject>().status() != ModuleStatus::Unlinked;
+}
+
////////////////////////////////////////////////////////////////////////////////
// Internal implementation
@@ -356,14 +374,17 @@ static ModuleObject* HostResolveImportedModule(
JSContext* cx, Handle<ModuleObject*> module,
Handle<ModuleRequestObject*> moduleRequest,
ModuleStatus expectedMinimumStatus);
-static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
- Handle<JSAtom*> exportName,
- MutableHandle<ResolveSet> resolveSet,
- MutableHandle<Value> result);
+static bool CyclicModuleResolveExport(JSContext* cx,
+ Handle<ModuleObject*> module,
+ Handle<JSAtom*> exportName,
+ MutableHandle<ResolveSet> resolveSet,
+ MutableHandle<Value> result,
+ ModuleErrorInfo* errorInfoOut = nullptr);
static bool SyntheticModuleResolveExport(JSContext* cx,
Handle<ModuleObject*> module,
Handle<JSAtom*> exportName,
- MutableHandle<Value> result);
+ MutableHandle<Value> result,
+ ModuleErrorInfo* errorInfoOut);
static ModuleNamespaceObject* ModuleNamespaceCreate(
JSContext* cx, Handle<ModuleObject*> module,
MutableHandle<UniquePtr<ExportNameVector>> exports);
@@ -575,17 +596,20 @@ static ModuleObject* HostResolveImportedModule(
// - If the request is found to be ambiguous, the string `"ambiguous"` is
// returned.
//
-bool js::ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
- Handle<JSAtom*> exportName,
- MutableHandle<Value> result) {
+static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
+ Handle<JSAtom*> exportName,
+ MutableHandle<Value> result,
+ ModuleErrorInfo* errorInfoOut = nullptr) {
if (module->hasSyntheticModuleFields()) {
- return ::SyntheticModuleResolveExport(cx, module, exportName, result);
+ return SyntheticModuleResolveExport(cx, module, exportName, result,
+ errorInfoOut);
}
// Step 1. If resolveSet is not present, set resolveSet to a new empty List.
Rooted<ResolveSet> resolveSet(cx);
- return ::ModuleResolveExport(cx, module, exportName, &resolveSet, result);
+ return CyclicModuleResolveExport(cx, module, exportName, &resolveSet, result,
+ errorInfoOut);
}
static bool CreateResolvedBindingObject(JSContext* cx,
@@ -602,10 +626,12 @@ static bool CreateResolvedBindingObject(JSContext* cx,
return true;
}
-static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
- Handle<JSAtom*> exportName,
- MutableHandle<ResolveSet> resolveSet,
- MutableHandle<Value> result) {
+static bool CyclicModuleResolveExport(JSContext* cx,
+ Handle<ModuleObject*> module,
+ Handle<JSAtom*> exportName,
+ MutableHandle<ResolveSet> resolveSet,
+ MutableHandle<Value> result,
+ ModuleErrorInfo* errorInfoOut) {
// Step 2. For each Record { [[Module]], [[ExportName]] } r of resolveSet, do:
for (const auto& entry : resolveSet) {
// Step 2.a. If module and r.[[Module]] are the same Module Record and
@@ -614,6 +640,9 @@ static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
// Step 2.a.i. Assert: This is a circular import request.
// Step 2.a.ii. Return null.
result.setNull();
+ if (errorInfoOut) {
+ errorInfoOut->setCircularImport(cx, module);
+ }
return true;
}
}
@@ -669,8 +698,8 @@ static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
// importedModule.ResolveExport(e.[[ImportName]],
// resolveSet).
name = e.importName();
- return ModuleResolveExport(cx, importedModule, name, resolveSet,
- result);
+ return CyclicModuleResolveExport(cx, importedModule, name, resolveSet,
+ result, errorInfoOut);
}
}
}
@@ -683,6 +712,9 @@ static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
// Step 6.c. NOTE: A default export cannot be provided by an export * from
// "mod" declaration.
result.setNull();
+ if (errorInfoOut) {
+ errorInfoOut->setImportedModule(cx, module);
+ }
return true;
}
@@ -704,8 +736,8 @@ static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
// Step 8.b. Let resolution be ? importedModule.ResolveExport(exportName,
// resolveSet).
- if (!ModuleResolveExport(cx, importedModule, exportName, resolveSet,
- &resolution)) {
+ if (!CyclicModuleResolveExport(cx, importedModule, exportName, resolveSet,
+ &resolution, errorInfoOut)) {
return false;
}
@@ -744,6 +776,12 @@ static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
if (binding->module() != starResolution->module() ||
binding->bindingName() != starResolution->bindingName()) {
result.set(StringValue(cx->names().ambiguous));
+
+ if (errorInfoOut) {
+ Rooted<ModuleObject*> module1(cx, starResolution->module());
+ Rooted<ModuleObject*> module2(cx, binding->module());
+ errorInfoOut->setForAmbiguousImport(cx, module, module1, module2);
+ }
return true;
}
}
@@ -752,6 +790,9 @@ static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
// Step 9. Return starResolution.
result.setObjectOrNull(starResolution);
+ if (!starResolution && errorInfoOut) {
+ errorInfoOut->setImportedModule(cx, module);
+ }
return true;
}
@@ -759,10 +800,14 @@ static bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
static bool SyntheticModuleResolveExport(JSContext* cx,
Handle<ModuleObject*> module,
Handle<JSAtom*> exportName,
- MutableHandle<Value> result) {
+ MutableHandle<Value> result,
+ ModuleErrorInfo* errorInfoOut) {
// Step 2. If module.[[ExportNames]] does not contain exportName, return null.
if (!ContainsElement(module->syntheticExportNames(), exportName)) {
result.setNull();
+ if (errorInfoOut) {
+ errorInfoOut->setImportedModule(cx, module);
+ }
return true;
}
@@ -923,63 +968,93 @@ static ModuleNamespaceObject* ModuleNamespaceCreate(
return ns;
}
-static void ThrowResolutionError(JSContext* cx, Handle<ModuleObject*> module,
- Handle<Value> resolution, bool isDirectImport,
- Handle<JSAtom*> name, uint32_t line,
- JS::ColumnNumberOneOrigin column) {
- MOZ_ASSERT(line != 0);
+void ModuleErrorInfo::setImportedModule(JSContext* cx,
+ ModuleObject* importedModule) {
+ imported = importedModule->filename();
+}
- bool isAmbiguous = resolution == StringValue(cx->names().ambiguous);
+void ModuleErrorInfo::setCircularImport(JSContext* cx,
+ ModuleObject* importedModule) {
+ setImportedModule(cx, importedModule);
+ isCircular = true;
+}
- // ErrorNumbers:
- // | MISSING | AMBIGUOUS |
- // ---------+---------+-----------+
- // INDIRECT |
- // DIRECT |
- static constexpr unsigned ErrorNumbers[2][2] = {
- {JSMSG_MISSING_INDIRECT_EXPORT, JSMSG_AMBIGUOUS_INDIRECT_EXPORT},
- {JSMSG_MISSING_IMPORT, JSMSG_AMBIGUOUS_IMPORT}};
- unsigned errorNumber = ErrorNumbers[isDirectImport][isAmbiguous];
-
- const JSErrorFormatString* errorString =
- GetErrorMessage(nullptr, errorNumber);
- MOZ_ASSERT(errorString);
-
- MOZ_ASSERT(errorString->argCount == 0);
- Rooted<JSString*> message(cx, JS_NewStringCopyZ(cx, errorString->format));
- if (!message) {
+void ModuleErrorInfo::setForAmbiguousImport(JSContext* cx,
+ ModuleObject* importedModule,
+ ModuleObject* module1,
+ ModuleObject* module2) {
+ setImportedModule(cx, importedModule);
+ entry1 = module1->filename();
+ entry2 = module2->filename();
+}
+
+static void CreateErrorNumberMessageUTF8(JSContext* cx, unsigned errorNumber,
+ JSErrorReport* reportOut, ...) {
+ va_list ap;
+ va_start(ap, reportOut);
+ AutoReportFrontendContext fc(cx);
+ if (!ExpandErrorArgumentsVA(&fc, GetErrorMessage, nullptr, errorNumber,
+ ArgumentsAreUTF8, reportOut, ap)) {
+ ReportOutOfMemory(cx);
return;
}
- Rooted<JSString*> separator(cx, JS_NewStringCopyZ(cx, ": "));
- if (!separator) {
+ va_end(ap);
+}
+
+static void ThrowResolutionError(JSContext* cx, Handle<ModuleObject*> module,
+ Handle<Value> resolution, Handle<JSAtom*> name,
+ ModuleErrorInfo* errorInfo) {
+ MOZ_ASSERT(errorInfo);
+ auto chars = StringToNewUTF8CharsZ(cx, *name);
+ if (!chars) {
+ ReportOutOfMemory(cx);
return;
}
- message = ConcatStrings<CanGC>(cx, message, separator);
- if (!message) {
- return;
+ bool isAmbiguous = resolution == StringValue(cx->names().ambiguous);
+
+ unsigned errorNumber;
+ if (errorInfo->isCircular) {
+ errorNumber = JSMSG_MODULE_CIRCULAR_IMPORT;
+ } else if (isAmbiguous) {
+ errorNumber = JSMSG_MODULE_AMBIGUOUS;
+ } else {
+ errorNumber = JSMSG_MODULE_NO_EXPORT;
+ }
+
+ JSErrorReport report;
+ report.isWarning_ = false;
+ report.errorNumber = errorNumber;
+
+ if (errorNumber == JSMSG_MODULE_AMBIGUOUS) {
+ CreateErrorNumberMessageUTF8(cx, errorNumber, &report, errorInfo->imported,
+ chars.get(), errorInfo->entry1,
+ errorInfo->entry2);
+ } else {
+ CreateErrorNumberMessageUTF8(cx, errorNumber, &report, errorInfo->imported,
+ chars.get());
}
- message = ConcatStrings<CanGC>(cx, message, name);
+ Rooted<JSString*> message(cx, report.newMessageString(cx));
if (!message) {
+ ReportOutOfMemory(cx);
return;
}
- RootedString filename(cx);
- if (const char* chars = module->script()->filename()) {
- filename =
- JS_NewStringCopyUTF8Z(cx, JS::ConstUTF8CharsZ(chars, strlen(chars)));
- } else {
- filename = cx->names().empty_;
- }
+ const char* file = module->filename();
+ RootedString filename(
+ cx, JS_NewStringCopyUTF8Z(cx, JS::ConstUTF8CharsZ(file, strlen(file))));
if (!filename) {
+ ReportOutOfMemory(cx);
return;
}
RootedValue error(cx);
- if (!JS::CreateError(cx, JSEXN_SYNTAXERR, nullptr, filename, line, column,
- nullptr, message, JS::NothingHandleValue, &error)) {
+ if (!JS::CreateError(cx, JSEXN_SYNTAXERR, nullptr, filename,
+ errorInfo->lineNumber, errorInfo->columnNumber, nullptr,
+ message, JS::NothingHandleValue, &error)) {
+ ReportOutOfMemory(cx);
return;
}
@@ -988,8 +1063,8 @@ static void ThrowResolutionError(JSContext* cx, Handle<ModuleObject*> module,
// https://tc39.es/ecma262/#sec-source-text-module-record-initialize-environment
// ES2023 16.2.1.6.4 InitializeEnvironment
-bool js::ModuleInitializeEnvironment(JSContext* cx,
- Handle<ModuleObject*> module) {
+static bool ModuleInitializeEnvironment(JSContext* cx,
+ Handle<ModuleObject*> module) {
MOZ_ASSERT(module->status() == ModuleStatus::Linking);
// Step 1. For each ExportEntry Record e of module.[[IndirectExportEntries]],
@@ -1002,15 +1077,15 @@ bool js::ModuleInitializeEnvironment(JSContext* cx,
// Step 1.b. Let resolution be ? module.ResolveExport(e.[[ExportName]]).
exportName = e.exportName();
- if (!ModuleResolveExport(cx, module, exportName, &resolution)) {
+ ModuleErrorInfo errorInfo{e.lineNumber(), e.columnNumber()};
+ if (!ModuleResolveExport(cx, module, exportName, &resolution, &errorInfo)) {
return false;
}
// Step 1.c. If resolution is either null or AMBIGUOUS, throw a SyntaxError
// exception.
if (!IsResolvedBinding(cx, resolution)) {
- ThrowResolutionError(cx, module, resolution, false, exportName,
- e.lineNumber(), e.columnNumber());
+ ThrowResolutionError(cx, module, resolution, exportName, &errorInfo);
return false;
}
}
@@ -1059,15 +1134,16 @@ bool js::ModuleInitializeEnvironment(JSContext* cx,
// Step 7.d. Else:
// Step 7.d.i. Let resolution be ?
// importedModule.ResolveExport(in.[[ImportName]]).
- if (!ModuleResolveExport(cx, importedModule, importName, &resolution)) {
+ ModuleErrorInfo errorInfo{in.lineNumber(), in.columnNumber()};
+ if (!ModuleResolveExport(cx, importedModule, importName, &resolution,
+ &errorInfo)) {
return false;
}
// Step 7.d.ii. If resolution is null or ambiguous, throw a SyntaxError
// exception.
if (!IsResolvedBinding(cx, resolution)) {
- ThrowResolutionError(cx, module, resolution, true, importName,
- in.lineNumber(), in.columnNumber());
+ ThrowResolutionError(cx, module, resolution, importName, &errorInfo);
return false;
}
@@ -1134,7 +1210,7 @@ bool js::ModuleInitializeEnvironment(JSContext* cx,
// https://tc39.es/ecma262/#sec-moduledeclarationlinking
// ES2023 16.2.1.5.1 Link
-bool js::ModuleLink(JSContext* cx, Handle<ModuleObject*> module) {
+static bool ModuleLink(JSContext* cx, Handle<ModuleObject*> module) {
// Step 1. Assert: module.[[Status]] is not linking or evaluating.
ModuleStatus status = module->status();
if (status == ModuleStatus::Linking || status == ModuleStatus::Evaluating) {
@@ -1313,8 +1389,9 @@ static bool InnerModuleLinking(JSContext* cx, Handle<ModuleObject*> module,
return true;
}
-bool js::SyntheticModuleEvaluate(JSContext* cx, Handle<ModuleObject*> moduleArg,
- MutableHandle<Value> result) {
+static bool SyntheticModuleEvaluate(JSContext* cx,
+ Handle<ModuleObject*> moduleArg,
+ MutableHandle<Value> result) {
// Steps 1-12 happens elsewhere in the engine.
// Step 13. Let pc be ! NewPromiseCapability(%Promise%).
@@ -1337,8 +1414,8 @@ bool js::SyntheticModuleEvaluate(JSContext* cx, Handle<ModuleObject*> moduleArg,
// https://tc39.es/ecma262/#sec-moduleevaluation
// ES2023 16.2.1.5.2 Evaluate
-bool js::ModuleEvaluate(JSContext* cx, Handle<ModuleObject*> moduleArg,
- MutableHandle<Value> result) {
+static bool ModuleEvaluate(JSContext* cx, Handle<ModuleObject*> moduleArg,
+ MutableHandle<Value> result) {
Rooted<ModuleObject*> module(cx, moduleArg);
// Step 2. Assert: module.[[Status]] is linked, evaluating-async, or
diff --git a/js/src/vm/Modules.h b/js/src/vm/Modules.h
index 21aff46b90..d59c9db552 100644
--- a/js/src/vm/Modules.h
+++ b/js/src/vm/Modules.h
@@ -20,22 +20,32 @@ namespace js {
using ModuleVector = GCVector<ModuleObject*, 0, SystemAllocPolicy>;
-bool ModuleResolveExport(JSContext* cx, Handle<ModuleObject*> module,
- Handle<JSAtom*> exportName,
- MutableHandle<Value> result);
+// A struct with detailed error information when import/export failed.
+struct ModuleErrorInfo {
+ ModuleErrorInfo(uint32_t lineNumber_, JS::ColumnNumberOneOrigin columnNumber_)
+ : lineNumber(lineNumber_), columnNumber(columnNumber_) {}
-ModuleNamespaceObject* GetOrCreateModuleNamespace(JSContext* cx,
- Handle<ModuleObject*> module);
+ void setImportedModule(JSContext* cx, ModuleObject* importedModule);
+ void setCircularImport(JSContext* cx, ModuleObject* importedModule);
+ void setForAmbiguousImport(JSContext* cx, ModuleObject* importedModule,
+ ModuleObject* module1, ModuleObject* module2);
+
+ uint32_t lineNumber;
+ JS::ColumnNumberOneOrigin columnNumber;
-bool ModuleInitializeEnvironment(JSContext* cx, Handle<ModuleObject*> module);
+ // The filename of the imported module.
+ const char* imported;
-bool ModuleLink(JSContext* cx, Handle<ModuleObject*> module);
+ // The filenames of the ambiguous entries.
+ const char* entry1;
+ const char* entry2;
-// Start evaluating the module. If TLA is enabled, result will be a promise.
-bool ModuleEvaluate(JSContext* cx, Handle<ModuleObject*> module,
- MutableHandle<Value> result);
-bool SyntheticModuleEvaluate(JSContext* cx, Handle<ModuleObject*> module,
- MutableHandle<Value> result);
+ // A bool to indicate the error is a circular import when it's true.
+ bool isCircular = false;
+};
+
+ModuleNamespaceObject* GetOrCreateModuleNamespace(JSContext* cx,
+ Handle<ModuleObject*> module);
void AsyncModuleExecutionFulfilled(JSContext* cx, Handle<ModuleObject*> module);
diff --git a/js/src/vm/NativeObject.cpp b/js/src/vm/NativeObject.cpp
index 640a185981..19f8b94bb6 100644
--- a/js/src/vm/NativeObject.cpp
+++ b/js/src/vm/NativeObject.cpp
@@ -307,20 +307,26 @@ mozilla::Maybe<PropertyInfo> js::NativeObject::lookupPure(jsid id) {
return mozilla::Nothing();
}
-bool NativeObject::setUniqueId(JSContext* cx, uint64_t uid) {
+bool NativeObject::setUniqueId(JSRuntime* runtime, uint64_t uid) {
MOZ_ASSERT(!hasUniqueId());
MOZ_ASSERT(!gc::HasUniqueId(this));
- return setOrUpdateUniqueId(cx, uid);
+ Nursery& nursery = runtime->gc.nursery();
+ if (!hasDynamicSlots() && !allocateSlots(nursery, 0)) {
+ return false;
+ }
+
+ getSlotsHeader()->setUniqueId(uid);
+ return true;
}
bool NativeObject::setOrUpdateUniqueId(JSContext* cx, uint64_t uid) {
- if (!hasDynamicSlots() && !allocateSlots(cx, 0)) {
+ if (!hasDynamicSlots() && !allocateSlots(cx->nursery(), 0)) {
+ ReportOutOfMemory(cx);
return false;
}
getSlotsHeader()->setUniqueId(uid);
-
return true;
}
@@ -337,7 +343,12 @@ bool NativeObject::growSlots(JSContext* cx, uint32_t oldCapacity,
MOZ_ASSERT(newCapacity <= MAX_SLOTS_COUNT);
if (!hasDynamicSlots()) {
- return allocateSlots(cx, newCapacity);
+ if (!allocateSlots(cx->nursery(), newCapacity)) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
+
+ return true;
}
uint64_t uid = maybeUniqueId();
@@ -415,7 +426,7 @@ bool NativeObject::allocateInitialSlots(JSContext* cx, uint32_t capacity) {
return true;
}
-bool NativeObject::allocateSlots(JSContext* cx, uint32_t newCapacity) {
+bool NativeObject::allocateSlots(Nursery& nursery, uint32_t newCapacity) {
MOZ_ASSERT(!hasUniqueId());
MOZ_ASSERT(!hasDynamicSlots());
@@ -423,7 +434,8 @@ bool NativeObject::allocateSlots(JSContext* cx, uint32_t newCapacity) {
uint32_t dictionarySpan = getSlotsHeader()->dictionarySlotSpan();
- HeapSlot* allocation = AllocateCellBuffer<HeapSlot>(cx, this, newAllocated);
+ HeapSlot* allocation =
+ AllocateCellBuffer<HeapSlot>(nursery, this, newAllocated);
if (!allocation) {
return false;
}
diff --git a/js/src/vm/NativeObject.h b/js/src/vm/NativeObject.h
index 8312310219..18ca0b656e 100644
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -987,7 +987,7 @@ class NativeObject : public JSObject {
bool growSlotsForNewSlot(JSContext* cx, uint32_t numFixed, uint32_t slot);
void shrinkSlots(JSContext* cx, uint32_t oldCapacity, uint32_t newCapacity);
- bool allocateSlots(JSContext* cx, uint32_t newCapacity);
+ bool allocateSlots(Nursery& nursery, uint32_t newCapacity);
/*
* This method is static because it's called from JIT code. On OOM, returns
@@ -1257,7 +1257,7 @@ class NativeObject : public JSObject {
return UndefinedValue();
}
- [[nodiscard]] bool setUniqueId(JSContext* cx, uint64_t uid);
+ [[nodiscard]] bool setUniqueId(JSRuntime* runtime, uint64_t uid);
inline bool hasUniqueId() const { return getSlotsHeader()->hasUniqueId(); }
inline uint64_t uniqueId() const { return getSlotsHeader()->uniqueId(); }
inline uint64_t maybeUniqueId() const {
diff --git a/js/src/vm/PortableBaselineInterpret.cpp b/js/src/vm/PortableBaselineInterpret.cpp
index 2990942dc6..2588f12009 100644
--- a/js/src/vm/PortableBaselineInterpret.cpp
+++ b/js/src/vm/PortableBaselineInterpret.cpp
@@ -1373,9 +1373,13 @@ ICInterpretOps(BaselineFrame* frame, VMFrameManager& frameMgr, State& state,
CACHEOP_CASE(LoadWrapperTarget) {
ObjOperandId objId = icregs.cacheIRReader.objOperandId();
ObjOperandId resultId = icregs.cacheIRReader.objOperandId();
+ bool fallible = icregs.cacheIRReader.readBool();
BOUNDSCHECK(resultId);
JSObject* obj = reinterpret_cast<JSObject*>(icregs.icVals[objId.id()]);
- JSObject* target = &obj->as<ProxyObject>().private_().toObject();
+ JSObject* target = obj->as<ProxyObject>().private_().toObjectOrNull();
+ if (fallible && !target) {
+ return ICInterpretOpResult::NextIC;
+ }
icregs.icVals[resultId.id()] = reinterpret_cast<uintptr_t>(target);
DISPATCH_CACHEOP();
}
diff --git a/js/src/vm/RealmFuses.cpp b/js/src/vm/RealmFuses.cpp
index 3ceac2dd25..8f7a7801cc 100644
--- a/js/src/vm/RealmFuses.cpp
+++ b/js/src/vm/RealmFuses.cpp
@@ -11,6 +11,27 @@
#include "vm/Realm.h"
#include "vm/SelfHosting.h"
+void js::InvalidatingRealmFuse::popFuse(JSContext* cx, RealmFuses& realmFuses) {
+ InvalidatingFuse::popFuse(cx);
+
+ for (auto& fd : realmFuses.fuseDependencies) {
+ fd.invalidateForFuse(cx, this);
+ }
+}
+
+bool js::InvalidatingRealmFuse::addFuseDependency(JSContext* cx,
+ Handle<JSScript*> script) {
+ MOZ_ASSERT(script->realm() == cx->realm());
+ auto* dss =
+ cx->realm()->realmFuses.fuseDependencies.getOrCreateDependentScriptSet(
+ cx, this);
+ if (!dss) {
+ return false;
+ }
+
+ return dss->addScriptForFuse(this, script);
+}
+
void js::PopsOptimizedGetIteratorFuse::popFuse(JSContext* cx,
RealmFuses& realmFuses) {
// Pop Self.
diff --git a/js/src/vm/RealmFuses.h b/js/src/vm/RealmFuses.h
index 54fa7bc3bf..a14ff73c59 100644
--- a/js/src/vm/RealmFuses.h
+++ b/js/src/vm/RealmFuses.h
@@ -8,6 +8,7 @@
#define vm_RealmFuses_h
#include "vm/GuardFuse.h"
+#include "vm/InvalidatingFuse.h"
namespace js {
@@ -28,7 +29,19 @@ class RealmFuse : public GuardFuse {
virtual void popFuse(JSContext* cx) override { GuardFuse::popFuse(cx); }
};
-struct OptimizeGetIteratorFuse final : public RealmFuse {
+class InvalidatingRealmFuse : public InvalidatingFuse {
+ public:
+ virtual void popFuse(JSContext* cx, RealmFuses& realmFuses);
+ virtual bool addFuseDependency(JSContext* cx,
+ Handle<JSScript*> script) override;
+
+ protected:
+ virtual void popFuse(JSContext* cx) override {
+ InvalidatingFuse::popFuse(cx);
+ }
+};
+
+struct OptimizeGetIteratorFuse final : public InvalidatingRealmFuse {
virtual const char* name() override { return "OptimizeGetIteratorFuse"; }
virtual bool checkInvariant(JSContext* cx) override;
};
@@ -144,6 +157,8 @@ struct RealmFuses {
MOZ_CRASH("Fuse Not Found");
}
+ DependentScriptGroup fuseDependencies;
+
static int32_t fuseOffsets[];
static const char* fuseNames[];
diff --git a/js/src/vm/Runtime.cpp b/js/src/vm/Runtime.cpp
index 1c3611a6e5..d894bf57bf 100644
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -332,7 +332,7 @@ void JSRuntime::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
rtSizes->uncompressedSourceCache +=
caches().uncompressedSourceCache.sizeOfExcludingThis(mallocSizeOf);
- rtSizes->gc.nurseryCommitted += gc.nursery().committed();
+ rtSizes->gc.nurseryCommitted += gc.nursery().totalCommitted();
rtSizes->gc.nurseryMallocedBuffers +=
gc.nursery().sizeOfMallocedBuffers(mallocSizeOf);
gc.storeBuffer().addSizeOfExcludingThis(mallocSizeOf, &rtSizes->gc);
diff --git a/js/src/vm/SelfHosting.cpp b/js/src/vm/SelfHosting.cpp
index 32cdd4a335..5170b072fb 100644
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -50,6 +50,7 @@
#include "frontend/FrontendContext.h" // AutoReportFrontendContext
#include "jit/AtomicOperations.h"
#include "jit/InlinableNatives.h"
+#include "jit/TrampolineNatives.h"
#include "js/CompilationAndEvaluation.h"
#include "js/Conversions.h"
#include "js/ErrorReport.h" // JS::PrintError
@@ -900,7 +901,7 @@ static bool intrinsic_GeneratorSetClosed(JSContext* cx, unsigned argc,
MOZ_ASSERT(args[0].isObject());
GeneratorObject* genObj = &args[0].toObject().as<GeneratorObject>();
- genObj->setClosed();
+ genObj->setClosed(cx);
return true;
}
@@ -1962,7 +1963,6 @@ static const JSFunctionSpec intrinsic_functions[] = {
JS_INLINABLE_FN("ArrayIteratorPrototypeOptimizable",
intrinsic_ArrayIteratorPrototypeOptimizable, 0, 0,
IntrinsicArrayIteratorPrototypeOptimizable),
- JS_FN("ArrayNativeSort", intrinsic_ArrayNativeSort, 1, 0),
JS_FN("AssertionFailed", intrinsic_AssertionFailed, 1, 0),
JS_FN("CallArrayBufferMethodIfWrapped",
CallNonGenericSelfhostedMethod<Is<ArrayBufferObject>>, 2, 0),
@@ -2336,6 +2336,7 @@ static const JSFunctionSpec intrinsic_functions[] = {
JS_FN("std_Array_indexOf", array_indexOf, 1, 0),
JS_FN("std_Array_lastIndexOf", array_lastIndexOf, 1, 0),
JS_INLINABLE_FN("std_Array_pop", array_pop, 0, 0, ArrayPop),
+ JS_TRAMPOLINE_FN("std_Array_sort", array_sort, 1, 0, ArraySort),
JS_FN("std_BigInt_valueOf", BigIntObject::valueOf, 0, 0),
JS_FN("std_Date_now", date_now, 0, 0),
JS_FN("std_Function_apply", fun_apply, 2, 0),
diff --git a/js/src/vm/Stack.cpp b/js/src/vm/Stack.cpp
index d222ddc51c..c8a8898d49 100644
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -509,9 +509,8 @@ void JS::ProfilingFrameIterator::operator++() {
void JS::ProfilingFrameIterator::settleFrames() {
// Handle transition frames (see comment in JitFrameIter::operator++).
- if (isJSJit() && !jsJitIter().done() &&
- jsJitIter().frameType() == jit::FrameType::WasmToJSJit) {
- wasm::Frame* fp = (wasm::Frame*)jsJitIter().fp();
+ if (isJSJit() && jsJitIter().done() && jsJitIter().wasmCallerFP()) {
+ wasm::Frame* fp = (wasm::Frame*)jsJitIter().wasmCallerFP();
iteratorDestroy();
new (storage()) wasm::ProfilingFrameIterator(fp);
kind_ = Kind::Wasm;
@@ -529,7 +528,6 @@ void JS::ProfilingFrameIterator::settleFrames() {
new (storage())
jit::JSJitProfilingFrameIterator((jit::CommonFrameLayout*)fp);
kind_ = Kind::JSJit;
- MOZ_ASSERT(!jsJitIter().done());
maybeSetEndStackAddress(jsJitIter().endStackAddress());
return;
}
diff --git a/js/src/vm/StringType-inl.h b/js/src/vm/StringType-inl.h
index 8954a46aac..4548e5d7ea 100644
--- a/js/src/vm/StringType-inl.h
+++ b/js/src/vm/StringType-inl.h
@@ -19,6 +19,7 @@
#include "vm/StaticStrings.h"
#include "gc/GCContext-inl.h"
+#include "gc/Marking-inl.h"
#include "gc/StoreBuffer-inl.h"
#include "vm/JSContext-inl.h"
@@ -451,6 +452,20 @@ void JSLinearString::disownCharsBecauseError() {
d.s.u2.nonInlineCharsLatin1 = nullptr;
}
+inline JSLinearString* JSDependentString::rootBaseDuringMinorGC() {
+ JSLinearString* root = this;
+ while (MaybeForwarded(root)->hasBase()) {
+ if (root->isForwarded()) {
+ root = js::gc::StringRelocationOverlay::fromCell(root)
+ ->savedNurseryBaseOrRelocOverlay();
+ } else {
+ // Possibly nursery or tenured string (not an overlay).
+ root = root->nurseryBaseOrRelocOverlay();
+ }
+ }
+ return root;
+}
+
template <js::AllowGC allowGC, typename CharT>
MOZ_ALWAYS_INLINE JSLinearString* JSLinearString::new_(
JSContext* cx, JS::MutableHandle<JSString::OwnedChars<CharT>> chars,
@@ -530,6 +545,19 @@ inline js::PropertyName* JSLinearString::toPropertyName(JSContext* cx) {
return atom->asPropertyName();
}
+// String characters are movable in the following cases:
+//
+// 1. Inline nursery strings (moved during promotion)
+// 2. Nursery strings with nursery chars (moved during promotion)
+// 3. Nursery strings that are deduplicated (moved during promotion)
+// 4. Inline tenured strings (moved during compaction)
+//
+// This method does not consider #3, because if this method returns true and the
+// caller does not want the characters to move, it can fix them in place by
+// setting the nondeduplicatable bit. (If the bit were already taken into
+// consideration, then the caller wouldn't know whether the movability is
+// "fixable" or not. If it is *only* movable because of the lack of the bit
+// being set, then it is fixable by setting the bit.)
bool JSLinearString::hasMovableChars() const {
const JSLinearString* topBase = this;
while (topBase->hasBase()) {
diff --git a/js/src/vm/StringType.cpp b/js/src/vm/StringType.cpp
index 63afd8864b..b735b91b71 100644
--- a/js/src/vm/StringType.cpp
+++ b/js/src/vm/StringType.cpp
@@ -380,9 +380,13 @@ void ForEachStringFlag(const JSString* str, uint32_t flags, KnownF known,
static_assert(JSString::LINEAR_IS_EXTENSIBLE_BIT ==
JSString::INLINE_IS_FAT_BIT);
if (str->isLinear()) {
- known("EXTENSIBLE");
- } else if (str->isInline()) {
- known("FAT");
+ if (str->isInline()) {
+ known("FAT");
+ } else if (!str->isAtom()) {
+ known("EXTENSIBLE");
+ } else {
+ unknown(i);
+ }
} else {
unknown(i);
}
@@ -407,9 +411,6 @@ void ForEachStringFlag(const JSString* str, uint32_t flags, KnownF known,
case JSString::INDEX_VALUE_BIT:
known("INDEX_VALUE_BIT");
break;
- case JSString::NON_DEDUP_BIT:
- known("NON_DEDUP_BIT");
- break;
case JSString::IN_STRING_TO_ATOM_CACHE:
known("IN_STRING_TO_ATOM_CACHE");
break;
@@ -417,7 +418,7 @@ void ForEachStringFlag(const JSString* str, uint32_t flags, KnownF known,
if (str->isRope()) {
known("FLATTEN_VISIT_RIGHT");
} else {
- unknown(i);
+ known("NON_DEDUP_BIT");
}
break;
case JSString::FLATTEN_FINISH_NODE:
@@ -638,11 +639,20 @@ static MOZ_ALWAYS_INLINE JSString::OwnedChars<CharT> AllocChars(JSContext* cx,
MOZ_ASSERT(cx->nursery().isEnabled());
auto [buffer, isMalloced] = cx->nursery().allocateBuffer(
cx->zone(), length * sizeof(CharT), js::StringBufferArena);
+ if (!buffer) {
+ ReportOutOfMemory(cx);
+ return {nullptr, 0, false, false};
+ }
return {static_cast<CharT*>(buffer), length, isMalloced, isMalloced};
}
auto buffer = cx->make_pod_arena_array<CharT>(js::StringBufferArena, length);
+ if (!buffer) {
+ ReportOutOfMemory(cx);
+ return {nullptr, 0, false, false};
+ }
+
return {std::move(buffer), length, true};
}
@@ -914,6 +924,14 @@ JSLinearString* JSRope::flattenInternal(JSRope* root) {
* JSDependentStrings pointing to them already. Stealing the buffer doesn't
* change its address, only its owning JSExtensibleString, so all chars()
* pointers in the JSDependentStrings are still valid.
+ *
+ * This chain of dependent strings could be problematic if the base string
+ * moves, either because it was initially allocated in the nursery or it
+ * gets deduplicated, because you might have a dependent ->
+ * tenured dependent -> nursery base string, and the store buffer would
+ * only capture the latter edge. Prevent this case from happening by
+ * marking the root as nondeduplicatable if the extensible string
+ * optimization applied.
*/
const size_t wholeLength = root->length();
size_t wholeCapacity;
@@ -1068,8 +1086,12 @@ finish_root:
left.setLengthAndFlags(left.length(), StringFlagsForCharType<CharT>(flags));
left.d.s.u3.base = &root->asLinear();
if (left.isTenured() && !root->isTenured()) {
- // leftmost child -> root is a tenured -> nursery edge.
+ // leftmost child -> root is a tenured -> nursery edge. Put the leftmost
+ // child in the store buffer and prevent the root's chars from moving or
+ // being freed (because the leftmost child may have a tenured dependent
+ // string that cannot be updated.)
root->storeBuffer()->putWholeCell(&left);
+ root->setNonDeduplicatable();
}
}
@@ -1455,16 +1477,18 @@ uint32_t JSAtom::getIndexSlow() const {
: AtomCharsToIndex(twoByteChars(nogc), len);
}
-static void MarkStringAndBasesNonDeduplicatable(JSLinearString* s) {
- while (true) {
- if (!s->isTenured()) {
- s->setNonDeduplicatable();
- }
- if (!s->hasBase()) {
- break;
- }
+// Prevent the actual owner of the string's characters from being deduplicated
+// (and thus freeing its characters, which would invalidate the ASSC's chars
+// pointer). Intermediate dependent strings on the chain can be deduplicated,
+// since the base will be updated to the root base during tenuring anyway and
+// the intermediates won't matter.
+void PreventRootBaseDeduplication(JSLinearString* s) {
+ while (s->hasBase()) {
s = s->base();
}
+ if (!s->isTenured()) {
+ s->setNonDeduplicatable();
+ }
}
bool AutoStableStringChars::init(JSContext* cx, JSString* s) {
@@ -1492,7 +1516,7 @@ bool AutoStableStringChars::init(JSContext* cx, JSString* s) {
twoByteChars_ = linearString->rawTwoByteChars();
}
- MarkStringAndBasesNonDeduplicatable(linearString);
+ PreventRootBaseDeduplication(linearString);
s_ = linearString;
return true;
@@ -1518,7 +1542,7 @@ bool AutoStableStringChars::initTwoByte(JSContext* cx, JSString* s) {
state_ = TwoByte;
twoByteChars_ = linearString->rawTwoByteChars();
- MarkStringAndBasesNonDeduplicatable(linearString);
+ PreventRootBaseDeduplication(linearString);
s_ = linearString;
return true;
diff --git a/js/src/vm/StringType.h b/js/src/vm/StringType.h
index f2850c33a4..38dea85c60 100644
--- a/js/src/vm/StringType.h
+++ b/js/src/vm/StringType.h
@@ -407,8 +407,10 @@ class JSString : public js::gc::CellWithLengthAndFlags {
static const uint32_t INDEX_VALUE_BIT = js::Bit(11);
static const uint32_t INDEX_VALUE_SHIFT = 16;
- // NON_DEDUP_BIT is used in string deduplication during tenuring.
- static const uint32_t NON_DEDUP_BIT = js::Bit(12);
+ // NON_DEDUP_BIT is used in string deduplication during tenuring. This bit is
+ // shared with both FLATTEN_FINISH_NODE and ATOM_IS_PERMANENT_BIT, since it
+ // only applies to linear non-atoms.
+ static const uint32_t NON_DEDUP_BIT = js::Bit(15);
// If IN_STRING_TO_ATOM_CACHE is set, this string had an entry in the
// StringToAtomCache at some point. Note that GC can purge the cache without
@@ -627,13 +629,27 @@ class JSString : public js::gc::CellWithLengthAndFlags {
}
MOZ_ALWAYS_INLINE
- void setNonDeduplicatable() { setFlagBit(NON_DEDUP_BIT); }
+ void setNonDeduplicatable() {
+ MOZ_ASSERT(isLinear());
+ MOZ_ASSERT(!isAtom());
+ setFlagBit(NON_DEDUP_BIT);
+ }
+ // After copying a string from the nursery to the tenured heap, adjust bits
+ // that no longer apply.
MOZ_ALWAYS_INLINE
- void clearNonDeduplicatable() { clearFlagBit(NON_DEDUP_BIT); }
+ void clearBitsOnTenure() {
+ MOZ_ASSERT(!isAtom());
+ clearFlagBit(NON_DEDUP_BIT | IN_STRING_TO_ATOM_CACHE);
+ }
+ // NON_DEDUP_BIT is only valid for linear non-atoms.
MOZ_ALWAYS_INLINE
- bool isDeduplicatable() { return !(flags() & NON_DEDUP_BIT); }
+ bool isDeduplicatable() {
+ MOZ_ASSERT(isLinear());
+ MOZ_ASSERT(!isAtom());
+ return !(flags() & NON_DEDUP_BIT);
+ }
void setInStringToAtomCache() {
MOZ_ASSERT(!isAtom());
@@ -659,6 +675,7 @@ class JSString : public js::gc::CellWithLengthAndFlags {
inline bool canOwnDependentChars() const;
+ // Only called by the GC during nursery collection.
inline void setBase(JSLinearString* newBase);
void traceBase(JSTracer* trc);
@@ -781,6 +798,8 @@ class JSString : public js::gc::CellWithLengthAndFlags {
void traceChildren(JSTracer* trc);
+ inline void traceBaseFromStoreBuffer(JSTracer* trc);
+
// Override base class implementation to tell GC about permanent atoms.
bool isPermanentAndMayBeShared() const { return isPermanentAtom(); }
@@ -930,6 +949,7 @@ class JSLinearString : public JSString {
friend class JS::AutoStableStringChars;
friend class js::gc::TenuringTracer;
friend class js::gc::CellAllocator;
+ friend class JSDependentString; // To allow access when used as base.
/* Vacuous and therefore unimplemented. */
JSLinearString* ensureLinear(JSContext* cx) = delete;
@@ -1138,6 +1158,13 @@ class JSDependentString : public JSLinearString {
setNonInlineChars(chars + offset);
}
+ inline JSLinearString* rootBaseDuringMinorGC();
+
+ template <typename CharT>
+ inline void sweepTypedAfterMinorGC();
+
+ inline void sweepAfterMinorGC();
+
#if defined(DEBUG) || defined(JS_JITSPEW) || defined(JS_CACHEIR_SPEW)
void dumpOwnRepresentationFields(js::JSONPrinter& json) const;
#endif
@@ -2260,14 +2287,17 @@ class StringRelocationOverlay : public RelocationOverlay {
MOZ_ALWAYS_INLINE const CharT* savedNurseryChars() const;
const MOZ_ALWAYS_INLINE JS::Latin1Char* savedNurseryCharsLatin1() const {
+ MOZ_ASSERT(!forwardingAddress()->as<JSString>()->hasBase());
return nurseryCharsLatin1;
}
const MOZ_ALWAYS_INLINE char16_t* savedNurseryCharsTwoByte() const {
+ MOZ_ASSERT(!forwardingAddress()->as<JSString>()->hasBase());
return nurseryCharsTwoByte;
}
JSLinearString* savedNurseryBaseOrRelocOverlay() const {
+ MOZ_ASSERT(forwardingAddress()->as<JSString>()->hasBase());
return nurseryBaseOrRelocOverlay;
}
diff --git a/js/src/vm/StructuredClone.cpp b/js/src/vm/StructuredClone.cpp
index e2e67a2ee3..405c18670b 100644
--- a/js/src/vm/StructuredClone.cpp
+++ b/js/src/vm/StructuredClone.cpp
@@ -893,7 +893,7 @@ bool SCInput::readArray(T* p, size_t nelems) {
// To avoid any way in which uninitialized data could escape, zero the array
// if filling it failed.
std::uninitialized_fill_n(p, nelems, 0);
- return false;
+ return reportTruncated();
}
swapFromLittleEndianInPlace(p, nelems);
diff --git a/js/src/vm/TypedArrayObject-inl.h b/js/src/vm/TypedArrayObject-inl.h
index 95ba7b23d4..ffb9a3c9f6 100644
--- a/js/src/vm/TypedArrayObject-inl.h
+++ b/js/src/vm/TypedArrayObject-inl.h
@@ -760,6 +760,26 @@ class ElementSpecific {
}
};
+inline gc::AllocKind js::FixedLengthTypedArrayObject::allocKindForTenure()
+ const {
+ // Fixed length typed arrays in the nursery may have a lazily allocated
+ // buffer. Make sure there is room for the array's fixed data when moving the
+ // array.
+
+ if (hasBuffer()) {
+ return NativeObject::allocKindForTenure();
+ }
+
+ gc::AllocKind allocKind;
+ if (hasInlineElements()) {
+ allocKind = AllocKindForLazyBuffer(byteLength());
+ } else {
+ allocKind = gc::GetGCObjectKind(getClass());
+ }
+
+ return gc::ForegroundToBackgroundAllocKind(allocKind);
+}
+
/* static */ gc::AllocKind
js::FixedLengthTypedArrayObject::AllocKindForLazyBuffer(size_t nbytes) {
MOZ_ASSERT(nbytes <= INLINE_BUFFER_LIMIT);
diff --git a/js/src/vm/TypedArrayObject.cpp b/js/src/vm/TypedArrayObject.cpp
index 35a2237cd5..935a902abe 100644
--- a/js/src/vm/TypedArrayObject.cpp
+++ b/js/src/vm/TypedArrayObject.cpp
@@ -7,8 +7,10 @@
#include "vm/TypedArrayObject-inl.h"
#include "vm/TypedArrayObject.h"
+#include "mozilla/CheckedInt.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/IntegerTypeTraits.h"
+#include "mozilla/Likely.h"
#include "mozilla/PodOperations.h"
#include "mozilla/TextUtils.h"
@@ -39,6 +41,7 @@
#include "js/UniquePtr.h"
#include "js/Wrapper.h"
#include "util/DifferentialTesting.h"
+#include "util/StringBuffer.h"
#include "util/Text.h"
#include "util/WindowsWrapper.h"
#include "vm/ArrayBufferObject.h"
@@ -114,6 +117,13 @@ static bool IsTypedArrayObject(HandleValue v) {
return v.isObject() && v.toObject().is<TypedArrayObject>();
}
+#ifdef NIGHTLY_BUILD
+static bool IsUint8ArrayObject(HandleValue v) {
+ return IsTypedArrayObject(v) &&
+ v.toObject().as<TypedArrayObject>().type() == Scalar::Uint8;
+}
+#endif
+
/* static */
bool TypedArrayObject::ensureHasBuffer(JSContext* cx,
Handle<TypedArrayObject*> typedArray) {
@@ -199,7 +209,6 @@ size_t FixedLengthTypedArrayObject::objectMoved(JSObject* obj, JSObject* old) {
auto* newObj = &obj->as<FixedLengthTypedArrayObject>();
const auto* oldObj = &old->as<FixedLengthTypedArrayObject>();
MOZ_ASSERT(newObj->elementsRaw() == oldObj->elementsRaw());
- MOZ_ASSERT(obj->isTenured());
// Typed arrays with a buffer object do not need an update.
if (oldObj->hasBuffer()) {
@@ -233,13 +242,13 @@ size_t FixedLengthTypedArrayObject::objectMoved(JSObject* obj, JSObject* old) {
constexpr size_t headerSize = dataOffset() + sizeof(HeapSlot);
- // See AllocKindForLazyBuffer.
- gc::AllocKind newAllocKind = obj->asTenured().getAllocKind();
+ gc::AllocKind allocKind = oldObj->allocKindForTenure();
+ MOZ_ASSERT_IF(obj->isTenured(), obj->asTenured().getAllocKind() == allocKind);
MOZ_ASSERT_IF(nbytes == 0,
- headerSize + sizeof(uint8_t) <= GetGCKindBytes(newAllocKind));
+ headerSize + sizeof(uint8_t) <= GetGCKindBytes(allocKind));
if (nursery.isInside(buf) &&
- headerSize + nbytes <= GetGCKindBytes(newAllocKind)) {
+ headerSize + nbytes <= GetGCKindBytes(allocKind)) {
MOZ_ASSERT(oldObj->hasInlineElements());
#ifdef DEBUG
if (nbytes == 0) {
@@ -2046,6 +2055,1083 @@ bool TypedArrayObject::copyWithin(JSContext* cx, unsigned argc, Value* vp) {
TypedArrayObject::copyWithin_impl>(cx, args);
}
+#ifdef NIGHTLY_BUILD
+
+// Byte vector with large enough inline storage to allow constructing small
+// typed arrays without extra heap allocations.
+using ByteVector =
+ js::Vector<uint8_t, FixedLengthTypedArrayObject::INLINE_BUFFER_LIMIT>;
+
+static UniqueChars QuoteString(JSContext* cx, char16_t ch) {
+ Sprinter sprinter(cx);
+ if (!sprinter.init()) {
+ return nullptr;
+ }
+
+ StringEscape esc{};
+ js::EscapePrinter ep(sprinter, esc);
+ ep.putChar(ch);
+
+ return sprinter.release();
+}
+
+/**
+ * FromHex ( string [ , maxLength ] )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-fromhex
+ */
+static bool FromHex(JSContext* cx, Handle<JSString*> string, size_t maxLength,
+ ByteVector& bytes, size_t* readLength) {
+ // Step 1. (Not applicable in our implementation.)
+
+ // Step 2.
+ size_t length = string->length();
+
+ // Step 3.
+ if (length % 2 != 0) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_HEX_STRING_LENGTH);
+ return false;
+ }
+
+ JSLinearString* linear = string->ensureLinear(cx);
+ if (!linear) {
+ return false;
+ }
+
+ // Step 4. (Not applicable in our implementation.)
+ MOZ_ASSERT(bytes.empty());
+
+ // Step 5.
+ size_t index = 0;
+
+ // Step 6.
+ while (index < length && bytes.length() < maxLength) {
+ // Step 6.a.
+ char16_t c0 = linear->latin1OrTwoByteChar(index);
+ char16_t c1 = linear->latin1OrTwoByteChar(index + 1);
+
+ // Step 6.b.
+ if (MOZ_UNLIKELY(!mozilla::IsAsciiHexDigit(c0) ||
+ !mozilla::IsAsciiHexDigit(c1))) {
+ char16_t ch = !mozilla::IsAsciiHexDigit(c0) ? c0 : c1;
+ if (auto str = QuoteString(cx, ch)) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_HEX_DIGIT, str.get());
+ }
+ return false;
+ }
+
+ // Step 6.c.
+ index += 2;
+
+ // Step 6.d.
+ uint8_t byte = (mozilla::AsciiAlphanumericToNumber(c0) << 4) +
+ mozilla::AsciiAlphanumericToNumber(c1);
+
+ // Step 6.e.
+ if (!bytes.append(byte)) {
+ return false;
+ }
+ }
+
+ // Step 7.
+ *readLength = index;
+ return true;
+}
+
+namespace Base64 {
+static constexpr uint8_t InvalidChar = UINT8_MAX;
+
+static constexpr auto DecodeTable(const char (&alphabet)[65]) {
+ std::array<uint8_t, 128> result = {};
+
+ // Initialize all elements to InvalidChar.
+ for (auto& e : result) {
+ e = InvalidChar;
+ }
+
+ // Map the base64 characters to their values.
+ for (uint8_t i = 0; i < 64; ++i) {
+ result[alphabet[i]] = i;
+ }
+
+ return result;
+}
+} // namespace Base64
+
+namespace Base64::Encode {
+static constexpr const char Base64[] =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+static_assert(std::char_traits<char>::length(Base64) == 64);
+
+static constexpr const char Base64Url[] =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
+static_assert(std::char_traits<char>::length(Base64Url) == 64);
+} // namespace Base64::Encode
+
+namespace Base64::Decode {
+static constexpr auto Base64 = DecodeTable(Base64::Encode::Base64);
+static_assert(Base64.size() == 128,
+ "128 elements to allow access through ASCII characters");
+
+static constexpr auto Base64Url = DecodeTable(Base64::Encode::Base64Url);
+static_assert(Base64Url.size() == 128,
+ "128 elements to allow access through ASCII characters");
+} // namespace Base64::Decode
+
+enum class Alphabet {
+ /**
+ * Standard base64 alphabet.
+ */
+ Base64,
+
+ /**
+ * URL and filename safe base64 alphabet.
+ */
+ Base64Url,
+};
+
+enum class LastChunkHandling {
+ /**
+ * Allow partial chunks at the end of the input.
+ */
+ Loose,
+
+ /**
+ * Disallow partial chunks at the end of the input.
+ */
+ Strict,
+
+ /**
+ * Stop before partial chunks at the end of the input.
+ */
+ StopBeforePartial,
+};
+
+/**
+ * FromBase64 ( string, alphabet, lastChunkHandling [ , maxLength ] )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-frombase64
+ */
+static bool FromBase64(JSContext* cx, Handle<JSString*> string,
+ Alphabet alphabet, LastChunkHandling lastChunkHandling,
+ size_t maxLength, ByteVector& bytes,
+ size_t* readLength) {
+ // Steps 1-2. (Not applicable in our implementation.)
+
+ // Step 3.
+ size_t remaining = maxLength;
+ if (remaining == 0) {
+ MOZ_ASSERT(bytes.empty());
+ *readLength = 0;
+ return true;
+ }
+
+ JSLinearString* linear = string->ensureLinear(cx);
+ if (!linear) {
+ return false;
+ }
+
+ // DecodeBase64Chunk ( chunk [ , throwOnExtraBits ] )
+ //
+ // Encode a complete base64 chunk.
+ auto decodeChunk = [&](uint32_t chunk) {
+ MOZ_ASSERT(chunk <= 0xffffff);
+ MOZ_ASSERT(remaining >= 3);
+
+ if (!bytes.reserve(bytes.length() + 3)) {
+ return false;
+ }
+ bytes.infallibleAppend(chunk >> 16);
+ bytes.infallibleAppend(chunk >> 8);
+ bytes.infallibleAppend(chunk);
+ return true;
+ };
+
+ // DecodeBase64Chunk ( chunk [ , throwOnExtraBits ] )
+ //
+ // Encode a three element partial base64 chunk.
+ auto decodeChunk3 = [&](uint32_t chunk, bool throwOnExtraBits) {
+ MOZ_ASSERT(chunk <= 0x3ffff);
+ MOZ_ASSERT(remaining >= 2);
+
+ if (throwOnExtraBits && (chunk & 0x3) != 0) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_EXTRA_BASE64_BITS);
+ return false;
+ }
+
+ if (!bytes.reserve(bytes.length() + 2)) {
+ return false;
+ }
+ bytes.infallibleAppend(chunk >> 10);
+ bytes.infallibleAppend(chunk >> 2);
+ return true;
+ };
+
+ // DecodeBase64Chunk ( chunk [ , throwOnExtraBits ] )
+ //
+ // Encode a two element partial base64 chunk.
+ auto decodeChunk2 = [&](uint32_t chunk, bool throwOnExtraBits) {
+ MOZ_ASSERT(chunk <= 0xfff);
+ MOZ_ASSERT(remaining >= 1);
+
+ if (throwOnExtraBits && (chunk & 0xf) != 0) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_EXTRA_BASE64_BITS);
+ return false;
+ }
+
+ if (!bytes.reserve(bytes.length() + 1)) {
+ return false;
+ }
+ bytes.infallibleAppend(chunk >> 4);
+ return true;
+ };
+
+ // DecodeBase64Chunk ( chunk [ , throwOnExtraBits ] )
+ //
+ // Encode a partial base64 chunk.
+ auto decodePartialChunk = [&](uint32_t chunk, uint32_t chunkLength,
+ bool throwOnExtraBits = false) {
+ MOZ_ASSERT(chunkLength == 2 || chunkLength == 3);
+ return chunkLength == 2 ? decodeChunk2(chunk, throwOnExtraBits)
+ : decodeChunk3(chunk, throwOnExtraBits);
+ };
+
+ // Step 4.
+ //
+ // String index after the last fully read base64 chunk.
+ size_t read = 0;
+
+ // Step 5.
+ MOZ_ASSERT(bytes.empty());
+
+ // Step 6.
+ //
+ // Current base64 chunk, a uint24 number.
+ uint32_t chunk = 0;
+
+ // Step 7.
+ //
+ // Current base64 chunk length, in the range [0..4].
+ size_t chunkLength = 0;
+
+ // Step 8.
+ //
+ // Current string index.
+ size_t index = 0;
+
+ // Step 9.
+ size_t length = linear->length();
+
+ const auto& decode = alphabet == Alphabet::Base64 ? Base64::Decode::Base64
+ : Base64::Decode::Base64Url;
+
+ // Step 10.
+ for (; index < length; index++) {
+ // Step 10.c. (Reordered)
+ char16_t ch = linear->latin1OrTwoByteChar(index);
+
+ // Step 10.a.
+ if (mozilla::IsAsciiWhitespace(ch)) {
+ continue;
+ }
+
+ // Step 10.b. (Moved out of loop.)
+
+ // Step 10.d. (Performed in for-loop step.)
+
+ // Step 10.e.
+ if (ch == '=') {
+ break;
+ }
+
+ // Steps 10.f-g.
+ uint8_t value = Base64::InvalidChar;
+ if (mozilla::IsAscii(ch)) {
+ value = decode[ch];
+ }
+ if (MOZ_UNLIKELY(value == Base64::InvalidChar)) {
+ if (auto str = QuoteString(cx, ch)) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_BASE64_CHAR, str.get());
+ }
+ return false;
+ }
+
+ // Step 10.h. (Not applicable in our implementation.)
+
+ // Step 10.i.
+ if ((remaining == 1 && chunkLength == 2) ||
+ (remaining == 2 && chunkLength == 3)) {
+ *readLength = read;
+ return true;
+ }
+
+ // Step 10.j.
+ chunk = (chunk << 6) | value;
+
+ // Step 10.k.
+ chunkLength += 1;
+
+ // Step 10.l.
+ if (chunkLength == 4) {
+ // Step 10.l.i.
+ if (!decodeChunk(chunk)) {
+ return false;
+ }
+
+ // Step 10.l.ii.
+ chunk = 0;
+
+ // Step 10.l.iii.
+ chunkLength = 0;
+
+ // Step 10.l.iv.
+ //
+ // NB: Add +1 to include the |index| update from step 10.d.
+ read = index + 1;
+
+ // Step 10.l.v.
+ MOZ_ASSERT(remaining >= 3);
+ remaining -= 3;
+ if (remaining == 0) {
+ *readLength = read;
+ return true;
+ }
+ }
+ }
+
+ // Step 10.b.
+ if (index == length) {
+ // Step 10.b.i.
+ if (chunkLength > 0) {
+ // Step 10.b.i.1.
+ if (lastChunkHandling == LastChunkHandling::StopBeforePartial) {
+ *readLength = read;
+ return true;
+ }
+
+ // Steps 10.b.i.2-3.
+ if (lastChunkHandling == LastChunkHandling::Loose) {
+ // Step 10.b.i.2.a.
+ if (chunkLength == 1) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_INCOMPLETE_CHUNK);
+ return false;
+ }
+ MOZ_ASSERT(chunkLength == 2 || chunkLength == 3);
+
+ // Step 10.b.i.2.b.
+ if (!decodePartialChunk(chunk, chunkLength)) {
+ return false;
+ }
+ } else {
+ // Step 10.b.i.3.a.
+ MOZ_ASSERT(lastChunkHandling == LastChunkHandling::Strict);
+
+ // Step 10.b.i.3.b.
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_INCOMPLETE_CHUNK);
+ return false;
+ }
+ }
+
+ // Step 10.b.ii.
+ *readLength = length;
+ return true;
+ }
+
+ // Step 10.e.
+ MOZ_ASSERT(index < length);
+ MOZ_ASSERT(linear->latin1OrTwoByteChar(index) == '=');
+
+ // Step 10.e.i.
+ if (chunkLength < 2) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_INCOMPLETE_CHUNK);
+ return false;
+ }
+ MOZ_ASSERT(chunkLength == 2 || chunkLength == 3);
+
+ // Step 10.e.ii. (Inlined SkipAsciiWhitespace)
+ while (++index < length) {
+ char16_t ch = linear->latin1OrTwoByteChar(index);
+ if (!mozilla::IsAsciiWhitespace(ch)) {
+ break;
+ }
+ }
+
+ // Step 10.e.iii.
+ if (chunkLength == 2) {
+ // Step 10.e.iii.1.
+ if (index == length) {
+ // Step 10.e.iii.1.a.
+ if (lastChunkHandling == LastChunkHandling::StopBeforePartial) {
+ *readLength = read;
+ return true;
+ }
+
+ // Step 10.e.iii.1.b.
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_MISSING_BASE64_PADDING);
+ return false;
+ }
+
+ // Step 10.e.iii.2.
+ char16_t ch = linear->latin1OrTwoByteChar(index);
+
+ // Step 10.e.iii.3.
+ if (ch == '=') {
+ // Step 10.e.iii.3.a. (Inlined SkipAsciiWhitespace)
+ while (++index < length) {
+ char16_t ch = linear->latin1OrTwoByteChar(index);
+ if (!mozilla::IsAsciiWhitespace(ch)) {
+ break;
+ }
+ }
+ }
+ }
+
+ // Step 10.e.iv.
+ if (index < length) {
+ char16_t ch = linear->latin1OrTwoByteChar(index);
+ if (auto str = QuoteString(cx, ch)) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_BASE64_AFTER_PADDING,
+ str.get());
+ }
+ return false;
+ }
+
+ // Steps 10.e.v-vi.
+ bool throwOnExtraBits = lastChunkHandling == LastChunkHandling::Strict;
+
+ // Step 10.e.vii.
+ if (!decodePartialChunk(chunk, chunkLength, throwOnExtraBits)) {
+ return false;
+ }
+
+ // Step 10.e.viii.
+ *readLength = length;
+ return true;
+}
+
+/**
+ * Uint8Array.fromBase64 ( string [ , options ] )
+ * Uint8Array.prototype.setFromBase64 ( string [ , options ] )
+ * Uint8Array.prototype.toBase64 ( [ options ] )
+ *
+ * Helper to retrieve the "alphabet" option.
+ */
+static bool GetAlphabetOption(JSContext* cx, Handle<JSObject*> options,
+ Alphabet* result) {
+ Rooted<Value> value(cx);
+ if (!GetProperty(cx, options, options, cx->names().alphabet, &value)) {
+ return false;
+ }
+
+ if (value.isUndefined()) {
+ *result = Alphabet::Base64;
+ return true;
+ }
+
+ if (!value.isString()) {
+ return ReportValueError(cx, JSMSG_UNEXPECTED_TYPE, JSDVG_IGNORE_STACK,
+ value, nullptr, "not a string");
+ }
+
+ auto* linear = value.toString()->ensureLinear(cx);
+ if (!linear) {
+ return false;
+ }
+
+ if (StringEqualsAscii(linear, "base64")) {
+ *result = Alphabet::Base64;
+ return true;
+ }
+
+ if (StringEqualsAscii(linear, "base64url")) {
+ *result = Alphabet::Base64Url;
+ return true;
+ }
+
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_BASE64_ALPHABET);
+ return false;
+}
+
+/**
+ * Uint8Array.fromBase64 ( string [ , options ] )
+ * Uint8Array.prototype.setFromBase64 ( string [ , options ] )
+ *
+ * Helper to retrieve the "lastChunkHandling" option.
+ */
+static bool GetLastChunkHandlingOption(JSContext* cx, Handle<JSObject*> options,
+ LastChunkHandling* result) {
+ Rooted<Value> value(cx);
+ if (!GetProperty(cx, options, options, cx->names().lastChunkHandling,
+ &value)) {
+ return false;
+ }
+
+ if (value.isUndefined()) {
+ *result = LastChunkHandling::Loose;
+ return true;
+ }
+
+ if (!value.isString()) {
+ return ReportValueError(cx, JSMSG_UNEXPECTED_TYPE, JSDVG_IGNORE_STACK,
+ value, nullptr, "not a string");
+ }
+
+ auto* linear = value.toString()->ensureLinear(cx);
+ if (!linear) {
+ return false;
+ }
+
+ if (StringEqualsAscii(linear, "loose")) {
+ *result = LastChunkHandling::Loose;
+ return true;
+ }
+
+ if (StringEqualsAscii(linear, "strict")) {
+ *result = LastChunkHandling::Strict;
+ return true;
+ }
+
+ if (StringEqualsAscii(linear, "stop-before-partial")) {
+ *result = LastChunkHandling::StopBeforePartial;
+ return true;
+ }
+
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_TYPED_ARRAY_BAD_BASE64_LAST_CHUNK_HANDLING);
+ return false;
+}
+
+/**
+ * Uint8Array.fromBase64 ( string [ , options ] )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.frombase64
+ */
+static bool uint8array_fromBase64(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Step 1.
+ if (!args.get(0).isString()) {
+ return ReportValueError(cx, JSMSG_UNEXPECTED_TYPE, JSDVG_SEARCH_STACK,
+ args.get(0), nullptr, "not a string");
+ }
+ Rooted<JSString*> string(cx, args[0].toString());
+
+ // Steps 2-9.
+ auto alphabet = Alphabet::Base64;
+ auto lastChunkHandling = LastChunkHandling::Loose;
+ if (args.hasDefined(1)) {
+ // Step 2. (Inlined GetOptionsObject)
+ Rooted<JSObject*> options(
+ cx, RequireObjectArg(cx, "options", "fromBase64", args[1]));
+ if (!options) {
+ return false;
+ }
+
+ // Steps 3-6.
+ if (!GetAlphabetOption(cx, options, &alphabet)) {
+ return false;
+ }
+
+ // Steps 7-9.
+ if (!GetLastChunkHandlingOption(cx, options, &lastChunkHandling)) {
+ return false;
+ }
+ }
+
+ // Step 10.
+ constexpr size_t maxLength = std::numeric_limits<size_t>::max();
+ ByteVector bytes(cx);
+ size_t unusedReadLength;
+ if (!FromBase64(cx, string, alphabet, lastChunkHandling, maxLength, bytes,
+ &unusedReadLength)) {
+ return false;
+ }
+
+ // Step 11.
+ size_t resultLength = bytes.length();
+
+ // Step 12.
+ auto* tarray =
+ TypedArrayObjectTemplate<uint8_t>::fromLength(cx, resultLength);
+ if (!tarray) {
+ return false;
+ }
+
+ // Step 13.
+ auto target = SharedMem<uint8_t*>::unshared(tarray->dataPointerUnshared());
+ auto source = SharedMem<uint8_t*>::unshared(bytes.begin());
+ UnsharedOps::podCopy(target, source, resultLength);
+
+ // Step 14.
+ args.rval().setObject(*tarray);
+ return true;
+}
+
+/**
+ * Uint8Array.fromHex ( string )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.fromhex
+ */
+static bool uint8array_fromHex(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Step 1.
+ if (!args.get(0).isString()) {
+ return ReportValueError(cx, JSMSG_UNEXPECTED_TYPE, JSDVG_SEARCH_STACK,
+ args.get(0), nullptr, "not a string");
+ }
+ Rooted<JSString*> string(cx, args[0].toString());
+
+ // Step 2.
+ constexpr size_t maxLength = std::numeric_limits<size_t>::max();
+ ByteVector bytes(cx);
+ size_t unusedReadLength;
+ if (!FromHex(cx, string, maxLength, bytes, &unusedReadLength)) {
+ return false;
+ }
+
+ // Step 3.
+ size_t resultLength = bytes.length();
+
+ // Step 4.
+ auto* tarray =
+ TypedArrayObjectTemplate<uint8_t>::fromLength(cx, resultLength);
+ if (!tarray) {
+ return false;
+ }
+
+ // Step 5.
+ auto target = SharedMem<uint8_t*>::unshared(tarray->dataPointerUnshared());
+ auto source = SharedMem<uint8_t*>::unshared(bytes.begin());
+ UnsharedOps::podCopy(target, source, resultLength);
+
+ // Step 6.
+ args.rval().setObject(*tarray);
+ return true;
+}
+
+/**
+ * Uint8Array.prototype.setFromBase64 ( string [ , options ] )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.setfrombase64
+ */
+static bool uint8array_setFromBase64(JSContext* cx, const CallArgs& args) {
+ Rooted<TypedArrayObject*> tarray(
+ cx, &args.thisv().toObject().as<TypedArrayObject>());
+
+ // Step 3.
+ if (!args.get(0).isString()) {
+ return ReportValueError(cx, JSMSG_UNEXPECTED_TYPE, JSDVG_SEARCH_STACK,
+ args.get(0), nullptr, "not a string");
+ }
+ Rooted<JSString*> string(cx, args[0].toString());
+
+ // Steps 4-11.
+ auto alphabet = Alphabet::Base64;
+ auto lastChunkHandling = LastChunkHandling::Loose;
+ if (args.hasDefined(1)) {
+ // Step 2. (Inlined GetOptionsObject)
+ Rooted<JSObject*> options(
+ cx, RequireObjectArg(cx, "options", "setFromBase64", args[1]));
+ if (!options) {
+ return false;
+ }
+
+ // Steps 3-6.
+ if (!GetAlphabetOption(cx, options, &alphabet)) {
+ return false;
+ }
+
+ // Steps 7-9.
+ if (!GetLastChunkHandlingOption(cx, options, &lastChunkHandling)) {
+ return false;
+ }
+ }
+
+ // Steps 12-14.
+ auto length = tarray->length();
+ if (!length) {
+ ReportOutOfBounds(cx, tarray);
+ return false;
+ }
+
+ // Step 15.
+ size_t maxLength = *length;
+
+ // Steps 16-17.
+ ByteVector bytes(cx);
+ size_t readLength;
+ if (!FromBase64(cx, string, alphabet, lastChunkHandling, maxLength, bytes,
+ &readLength)) {
+ return false;
+ }
+
+ // Step 18.
+ size_t written = bytes.length();
+
+ // Step 19.
+ //
+ // The underlying buffer has neither been detached nor shrunk. (It may have
+ // been grown when it's a growable shared buffer and a concurrent thread
+ // resized the buffer.)
+ MOZ_ASSERT(!tarray->hasDetachedBuffer());
+ MOZ_ASSERT(tarray->length().valueOr(0) >= *length);
+
+ // Step 20.
+ MOZ_ASSERT(written <= *length);
+
+ // Step 21. (Inlined SetUint8ArrayBytes)
+ auto target = tarray->dataPointerEither().cast<uint8_t*>();
+ auto source = SharedMem<uint8_t*>::unshared(bytes.begin());
+ if (tarray->isSharedMemory()) {
+ SharedOps::podCopy(target, source, written);
+ } else {
+ UnsharedOps::podCopy(target, source, written);
+ }
+
+ // Step 22.
+ Rooted<PlainObject*> result(cx, NewPlainObject(cx));
+ if (!result) {
+ return false;
+ }
+
+ // Step 23.
+ Rooted<Value> readValue(cx, NumberValue(readLength));
+ if (!DefineDataProperty(cx, result, cx->names().read, readValue)) {
+ return false;
+ }
+
+ // Step 24.
+ Rooted<Value> writtenValue(cx, NumberValue(written));
+ if (!DefineDataProperty(cx, result, cx->names().written, writtenValue)) {
+ return false;
+ }
+
+ // Step 25.
+ args.rval().setObject(*result);
+ return true;
+}
+
+/**
+ * Uint8Array.prototype.setFromBase64 ( string [ , options ] )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.setfrombase64
+ */
+static bool uint8array_setFromBase64(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Steps 1-2.
+ return CallNonGenericMethod<IsUint8ArrayObject, uint8array_setFromBase64>(
+ cx, args);
+}
+
+/**
+ * Uint8Array.prototype.setFromHex ( string )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.setfromhex
+ */
+static bool uint8array_setFromHex(JSContext* cx, const CallArgs& args) {
+ Rooted<TypedArrayObject*> tarray(
+ cx, &args.thisv().toObject().as<TypedArrayObject>());
+
+ // Step 3.
+ if (!args.get(0).isString()) {
+ return ReportValueError(cx, JSMSG_UNEXPECTED_TYPE, JSDVG_SEARCH_STACK,
+ args.get(0), nullptr, "not a string");
+ }
+ Rooted<JSString*> string(cx, args[0].toString());
+
+ // Steps 4-6.
+ auto length = tarray->length();
+ if (!length) {
+ ReportOutOfBounds(cx, tarray);
+ return false;
+ }
+
+ // Step 7.
+ size_t maxLength = *length;
+
+ // Steps 8-9.
+ ByteVector bytes(cx);
+ size_t readLength;
+ if (!FromHex(cx, string, maxLength, bytes, &readLength)) {
+ return false;
+ }
+
+ // Step 10.
+ size_t written = bytes.length();
+
+ // Step 11.
+ //
+ // The underlying buffer has neither been detached nor shrunk. (It may have
+ // been grown when it's a growable shared buffer and a concurrent thread
+ // resized the buffer.)
+ MOZ_ASSERT(!tarray->hasDetachedBuffer());
+ MOZ_ASSERT(tarray->length().valueOr(0) >= *length);
+
+ // Step 12.
+ MOZ_ASSERT(written <= *length);
+
+ // Step 13. (Inlined SetUint8ArrayBytes)
+ auto target = tarray->dataPointerEither().cast<uint8_t*>();
+ auto source = SharedMem<uint8_t*>::unshared(bytes.begin());
+ if (tarray->isSharedMemory()) {
+ SharedOps::podCopy(target, source, written);
+ } else {
+ UnsharedOps::podCopy(target, source, written);
+ }
+
+ // Step 14.
+ Rooted<PlainObject*> result(cx, NewPlainObject(cx));
+ if (!result) {
+ return false;
+ }
+
+ // Step 15.
+ Rooted<Value> readValue(cx, NumberValue(readLength));
+ if (!DefineDataProperty(cx, result, cx->names().read, readValue)) {
+ return false;
+ }
+
+ // Step 16.
+ Rooted<Value> writtenValue(cx, NumberValue(written));
+ if (!DefineDataProperty(cx, result, cx->names().written, writtenValue)) {
+ return false;
+ }
+
+ // Step 17.
+ args.rval().setObject(*result);
+ return true;
+}
+
+/**
+ * Uint8Array.prototype.setFromHex ( string )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.setfromhex
+ */
+static bool uint8array_setFromHex(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Steps 1-2.
+ return CallNonGenericMethod<IsUint8ArrayObject, uint8array_setFromHex>(cx,
+ args);
+}
+
+/**
+ * Uint8Array.prototype.toBase64 ( [ options ] )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.tobase64
+ */
+static bool uint8array_toBase64(JSContext* cx, const CallArgs& args) {
+ Rooted<TypedArrayObject*> tarray(
+ cx, &args.thisv().toObject().as<TypedArrayObject>());
+
+ // Steps 3-7.
+ auto alphabet = Alphabet::Base64;
+ if (args.hasDefined(0)) {
+ // Step 3. (Inlined GetOptionsObject)
+ Rooted<JSObject*> options(
+ cx, RequireObjectArg(cx, "options", "toBase64", args[0]));
+ if (!options) {
+ return false;
+ }
+
+ // Steps 4-7.
+ if (!GetAlphabetOption(cx, options, &alphabet)) {
+ return false;
+ }
+ }
+
+ // Step 8. (Partial)
+ auto length = tarray->length();
+ if (!length) {
+ ReportOutOfBounds(cx, tarray);
+ return false;
+ }
+
+ // Compute the output string length. Three input bytes are encoded as four
+ // characters, so the output length is ⌈length × 4/3⌉.
+ auto outLength = mozilla::CheckedInt<size_t>{*length};
+ outLength += 2;
+ outLength /= 3;
+ outLength *= 4;
+ if (!outLength.isValid() || outLength.value() > JSString::MAX_LENGTH) {
+ ReportAllocationOverflow(cx);
+ return false;
+ }
+
+ JSStringBuilder sb(cx);
+ if (!sb.reserve(outLength.value())) {
+ return false;
+ }
+
+ // Steps 9-10.
+ const auto& base64Chars = alphabet == Alphabet::Base64
+ ? Base64::Encode::Base64
+ : Base64::Encode::Base64Url;
+
+ auto encode = [&base64Chars](uint32_t value) {
+ return base64Chars[value & 0x3f];
+ };
+
+ // Our implementation directly converts the bytes to their string
+ // representation instead of first collecting them into an intermediate list.
+ auto data = tarray->dataPointerEither().cast<uint8_t*>();
+ auto toRead = *length;
+ for (; toRead >= 3; toRead -= 3) {
+ // Combine three input bytes into a single uint24 value.
+ auto byte0 = jit::AtomicOperations::loadSafeWhenRacy(data++);
+ auto byte1 = jit::AtomicOperations::loadSafeWhenRacy(data++);
+ auto byte2 = jit::AtomicOperations::loadSafeWhenRacy(data++);
+ auto u24 = (uint32_t(byte0) << 16) | (uint32_t(byte1) << 8) | byte2;
+
+ // Encode the uint24 value as base64.
+ sb.infallibleAppend(encode(u24 >> 18));
+ sb.infallibleAppend(encode(u24 >> 12));
+ sb.infallibleAppend(encode(u24 >> 6));
+ sb.infallibleAppend(encode(u24 >> 0));
+ }
+
+ // Trailing two and one element bytes are padded with '='.
+ if (toRead == 2) {
+ // Combine two input bytes into a single uint24 value.
+ auto byte0 = jit::AtomicOperations::loadSafeWhenRacy(data++);
+ auto byte1 = jit::AtomicOperations::loadSafeWhenRacy(data++);
+ auto u24 = (uint32_t(byte0) << 16) | (uint32_t(byte1) << 8);
+
+ // Encode the uint24 value as base64, including padding.
+ sb.infallibleAppend(encode(u24 >> 18));
+ sb.infallibleAppend(encode(u24 >> 12));
+ sb.infallibleAppend(encode(u24 >> 6));
+ sb.infallibleAppend('=');
+ } else if (toRead == 1) {
+ // Combine one input byte into a single uint24 value.
+ auto byte0 = jit::AtomicOperations::loadSafeWhenRacy(data++);
+ auto u24 = uint32_t(byte0) << 16;
+
+ // Encode the uint24 value as base64, including padding.
+ sb.infallibleAppend(encode(u24 >> 18));
+ sb.infallibleAppend(encode(u24 >> 12));
+ sb.infallibleAppend('=');
+ sb.infallibleAppend('=');
+ } else {
+ MOZ_ASSERT(toRead == 0);
+ }
+
+ MOZ_ASSERT(sb.length() == outLength.value(), "all characters were written");
+
+ // Step 11.
+ auto* str = sb.finishString();
+ if (!str) {
+ return false;
+ }
+
+ args.rval().setString(str);
+ return true;
+}
+
+/**
+ * Uint8Array.prototype.toBase64 ( [ options ] )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.tobase64
+ */
+static bool uint8array_toBase64(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Steps 1-2.
+ return CallNonGenericMethod<IsUint8ArrayObject, uint8array_toBase64>(cx,
+ args);
+}
+
+/**
+ * Uint8Array.prototype.toHex ( )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.tohex
+ */
+static bool uint8array_toHex(JSContext* cx, const CallArgs& args) {
+ Rooted<TypedArrayObject*> tarray(
+ cx, &args.thisv().toObject().as<TypedArrayObject>());
+
+ // Step 3. (Partial)
+ auto length = tarray->length();
+ if (!length) {
+ ReportOutOfBounds(cx, tarray);
+ return false;
+ }
+
+ // |length| is limited by |ByteLengthLimit|, which ensures that multiplying it
+ // by two won't overflow.
+ static_assert(TypedArrayObject::ByteLengthLimit <=
+ std::numeric_limits<size_t>::max() / 2);
+ MOZ_ASSERT(*length <= TypedArrayObject::ByteLengthLimit);
+
+ // Compute the output string length. Each byte is encoded as two characters,
+ // so the output length is exactly twice as large as |length|.
+ size_t outLength = *length * 2;
+ if (outLength > JSString::MAX_LENGTH) {
+ ReportAllocationOverflow(cx);
+ return false;
+ }
+
+ // Step 4.
+ JSStringBuilder sb(cx);
+ if (!sb.reserve(outLength)) {
+ return false;
+ }
+
+ // NB: Lower case hex digits.
+ static constexpr char HexDigits[] = "0123456789abcdef";
+ static_assert(std::char_traits<char>::length(HexDigits) == 16);
+
+ // Steps 3 and 5.
+ //
+ // Our implementation directly converts the bytes to their string
+ // representation instead of first collecting them into an intermediate list.
+ auto data = tarray->dataPointerEither().cast<uint8_t*>();
+ for (size_t index = 0; index < *length; index++) {
+ auto byte = jit::AtomicOperations::loadSafeWhenRacy(data + index);
+
+ sb.infallibleAppend(HexDigits[byte >> 4]);
+ sb.infallibleAppend(HexDigits[byte & 0xf]);
+ }
+
+ MOZ_ASSERT(sb.length() == outLength, "all characters were written");
+
+ // Step 6.
+ auto* str = sb.finishString();
+ if (!str) {
+ return false;
+ }
+
+ args.rval().setString(str);
+ return true;
+}
+
+/**
+ * Uint8Array.prototype.toHex ( )
+ *
+ * https://tc39.es/proposal-arraybuffer-base64/spec/#sec-uint8array.prototype.tohex
+ */
+static bool uint8array_toHex(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // Steps 1-2.
+ return CallNonGenericMethod<IsUint8ArrayObject, uint8array_toHex>(cx, args);
+}
+
+#endif
+
/* static */ const JSFunctionSpec TypedArrayObject::protoFunctions[] = {
JS_SELF_HOSTED_FN("subarray", "TypedArraySubarray", 2, 0),
JS_FN("set", TypedArrayObject::set, 1, 0),
@@ -2364,15 +3450,50 @@ static const JSPropertySpec
#undef IMPL_TYPED_ARRAY_PROPERTIES
};
+#ifdef NIGHTLY_BUILD
+static const JSFunctionSpec uint8array_static_methods[] = {
+ JS_FN("fromBase64", uint8array_fromBase64, 1, 0),
+ JS_FN("fromHex", uint8array_fromHex, 1, 0),
+ JS_FS_END,
+};
+
+static const JSFunctionSpec uint8array_methods[] = {
+ JS_FN("setFromBase64", uint8array_setFromBase64, 1, 0),
+ JS_FN("setFromHex", uint8array_setFromHex, 1, 0),
+ JS_FN("toBase64", uint8array_toBase64, 0, 0),
+ JS_FN("toHex", uint8array_toHex, 0, 0),
+ JS_FS_END,
+};
+#endif
+
+static constexpr const JSFunctionSpec* TypedArrayStaticMethods(
+ Scalar::Type type) {
+#ifdef NIGHTLY_BUILD
+ if (type == Scalar::Uint8) {
+ return uint8array_static_methods;
+ }
+#endif
+ return nullptr;
+}
+
+static constexpr const JSFunctionSpec* TypedArrayMethods(Scalar::Type type) {
+#ifdef NIGHTLY_BUILD
+ if (type == Scalar::Uint8) {
+ return uint8array_methods;
+ }
+#endif
+ return nullptr;
+}
+
static const ClassSpec
TypedArrayObjectClassSpecs[Scalar::MaxTypedArrayViewType] = {
#define IMPL_TYPED_ARRAY_CLASS_SPEC(ExternalType, NativeType, Name) \
{ \
TypedArrayObjectTemplate<NativeType>::createConstructor, \
TypedArrayObjectTemplate<NativeType>::createPrototype, \
- nullptr, \
+ TypedArrayStaticMethods(Scalar::Type::Name), \
static_prototype_properties[Scalar::Type::Name], \
- nullptr, \
+ TypedArrayMethods(Scalar::Type::Name), \
static_prototype_properties[Scalar::Type::Name], \
nullptr, \
JSProto_TypedArray, \
diff --git a/js/src/vm/TypedArrayObject.h b/js/src/vm/TypedArrayObject.h
index 46531ec4ee..6905e83600 100644
--- a/js/src/vm/TypedArrayObject.h
+++ b/js/src/vm/TypedArrayObject.h
@@ -149,6 +149,7 @@ class FixedLengthTypedArrayObject : public TypedArrayObject {
static constexpr uint32_t INLINE_BUFFER_LIMIT =
(NativeObject::MAX_FIXED_SLOTS - FIXED_DATA_START) * sizeof(Value);
+ inline gc::AllocKind allocKindForTenure() const;
static inline gc::AllocKind AllocKindForLazyBuffer(size_t nbytes);
size_t byteOffset() const {
diff --git a/js/src/wasm/AsmJS.cpp b/js/src/wasm/AsmJS.cpp
index 11a0c2b23d..1e79880360 100644
--- a/js/src/wasm/AsmJS.cpp
+++ b/js/src/wasm/AsmJS.cpp
@@ -6914,8 +6914,8 @@ static bool GetImports(JSContext* cx, const AsmJSMetadata& metadata,
return true;
}
-static bool TryInstantiate(JSContext* cx, CallArgs args, const Module& module,
- const AsmJSMetadata& metadata,
+static bool TryInstantiate(JSContext* cx, const CallArgs& args,
+ const Module& module, const AsmJSMetadata& metadata,
MutableHandle<WasmInstanceObject*> instanceObj,
MutableHandleObject exportObj) {
HandleValue globalVal = args.get(0);
@@ -6956,7 +6956,7 @@ static bool TryInstantiate(JSContext* cx, CallArgs args, const Module& module,
return true;
}
-static bool HandleInstantiationFailure(JSContext* cx, CallArgs args,
+static bool HandleInstantiationFailure(JSContext* cx, const CallArgs& args,
const AsmJSMetadata& metadata) {
using js::frontend::FunctionSyntaxKind;
diff --git a/js/src/wasm/WasmAnyRef.h b/js/src/wasm/WasmAnyRef.h
index 1675a9fa8d..cbd3fe75e0 100644
--- a/js/src/wasm/WasmAnyRef.h
+++ b/js/src/wasm/WasmAnyRef.h
@@ -208,16 +208,19 @@ class AnyRef {
// losslessly represent all i31 values.
static AnyRef fromUint32Truncate(uint32_t value) {
// See 64-bit GPRs carrying 32-bit values invariants in MacroAssember.h
-#if defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_ARM64)
+#if defined(JS_CODEGEN_NONE) || defined(JS_CODEGEN_X64) || \
+ defined(JS_CODEGEN_ARM64)
// Truncate the value to the 31-bit value size.
uintptr_t wideValue = uintptr_t(value & 0x7FFFFFFF);
#elif defined(JS_CODEGEN_LOONG64) || defined(JS_CODEGEN_MIPS64) || \
defined(JS_CODEGEN_RISCV64)
// Sign extend the value to the native pointer size.
uintptr_t wideValue = uintptr_t(int64_t((uint64_t(value) << 33)) >> 33);
-#else
+#elif !defined(JS_64BIT)
// Transfer 32-bit value as is.
uintptr_t wideValue = (uintptr_t)value;
+#else
+# error "unknown architecture"
#endif
// Left shift the value by 1, truncating the high bit.
diff --git a/js/src/wasm/WasmBCClass.h b/js/src/wasm/WasmBCClass.h
index 844ae3381a..52a73d195c 100644
--- a/js/src/wasm/WasmBCClass.h
+++ b/js/src/wasm/WasmBCClass.h
@@ -1402,9 +1402,6 @@ struct BaseCompiler final {
// Used for common setup for catch and catch_all.
void emitCatchSetup(LabelKind kind, Control& tryCatch,
const ResultType& resultType);
- // Helper function used to generate landing pad code for the special
- // case in which `delegate` jumps to a function's body block.
- [[nodiscard]] bool emitBodyDelegateThrowPad();
[[nodiscard]] bool emitTry();
[[nodiscard]] bool emitTryTable();
diff --git a/js/src/wasm/WasmBaselineCompile.cpp b/js/src/wasm/WasmBaselineCompile.cpp
index cb0fbde6ec..dbd5bf11d4 100644
--- a/js/src/wasm/WasmBaselineCompile.cpp
+++ b/js/src/wasm/WasmBaselineCompile.cpp
@@ -3780,12 +3780,6 @@ bool BaseCompiler::emitEnd() {
return false;
}
doReturn(ContinuationKind::Fallthrough);
- // This is emitted here after `doReturn` to avoid being executed in the
- // normal return path of a function, and instead only when a `delegate`
- // jumps to it.
- if (!emitBodyDelegateThrowPad()) {
- return false;
- }
iter_.popEnd();
MOZ_ASSERT(iter_.controlStackEmpty());
return iter_.endFunction(iter_.end());
@@ -4108,9 +4102,6 @@ bool BaseCompiler::emitTryTable() {
Label skipLandingPad;
masm.jump(&skipLandingPad);
- // Bind the otherLabel so that delegate can target this
- masm.bind(&controlItem().otherLabel);
-
StackHeight prePadHeight = fr.stackHeight();
uint32_t padOffset = masm.currentOffset();
uint32_t padStackHeight = masm.framePushed();
@@ -4496,30 +4487,6 @@ bool BaseCompiler::emitCatchAll() {
return pushBlockResults(exnResult);
}
-bool BaseCompiler::emitBodyDelegateThrowPad() {
- Control& block = controlItem();
-
- // Only emit a landing pad if a `delegate` has generated a jump to here.
- if (block.otherLabel.used()) {
- StackHeight savedHeight = fr.stackHeight();
- fr.setStackHeight(block.stackHeight);
- masm.bind(&block.otherLabel);
-
- // A try-delegate jumps immediately to its delegated try block, so we are
- // responsible to unpack the exception and rethrow it.
- RegRef exn;
- RegRef tag;
- consumePendingException(RegPtr(InstanceReg), &exn, &tag);
- freeRef(tag);
- if (!throwFrom(exn)) {
- return false;
- }
- fr.setStackHeight(savedHeight);
- }
-
- return true;
-}
-
bool BaseCompiler::emitDelegate() {
uint32_t relativeDepth;
ResultType resultType;
@@ -4529,47 +4496,17 @@ bool BaseCompiler::emitDelegate() {
return false;
}
- Control& tryDelegate = controlItem();
-
- // End the try branch like a plain catch block without exception ref handling.
- if (deadCode_) {
- fr.resetStackHeight(tryDelegate.stackHeight, resultType);
- popValueStackTo(tryDelegate.stackSize);
- } else {
- MOZ_ASSERT(stk_.length() == tryDelegate.stackSize + resultType.length());
- popBlockResults(resultType, tryDelegate.stackHeight,
- ContinuationKind::Jump);
- freeResultRegisters(resultType);
- masm.jump(&tryDelegate.label);
- MOZ_ASSERT(!tryDelegate.deadOnArrival);
+ if (!endBlock(resultType)) {
+ return false;
}
- deadCode_ = tryDelegate.deadOnArrival;
-
- if (deadCode_) {
+ if (controlItem().deadOnArrival) {
return true;
}
- // Create an exception landing pad that immediately branches to the landing
- // pad of the delegated try block.
- masm.bind(&tryDelegate.otherLabel);
-
- StackHeight savedHeight = fr.stackHeight();
- fr.setStackHeight(tryDelegate.stackHeight);
-
// Mark the end of the try body. This may insert a nop.
finishTryNote(controlItem().tryNoteIndex);
- // The landing pad begins at this point
- TryNoteVector& tryNotes = masm.tryNotes();
- TryNote& tryNote = tryNotes[controlItem().tryNoteIndex];
- tryNote.setLandingPad(masm.currentOffset(), masm.framePushed());
-
- // Store the Instance that was left in InstanceReg by the exception
- // handling mechanism, that is this frame's Instance but with the exception
- // filled in Instance::pendingException.
- fr.storeInstancePtr(InstanceReg);
-
// If the target block is a non-try block, skip over it and find the next
// try block or the very last block (to re-throw out of the function).
Control& lastBlock = controlOutermost();
@@ -4579,22 +4516,24 @@ bool BaseCompiler::emitDelegate() {
relativeDepth++;
}
Control& target = controlItem(relativeDepth);
-
- popBlockResults(ResultType::Empty(), target.stackHeight,
- ContinuationKind::Jump);
- masm.jump(&target.otherLabel);
-
- fr.setStackHeight(savedHeight);
-
- // Where the try branch jumps to, if it's not dead.
- if (tryDelegate.label.used()) {
- masm.bind(&tryDelegate.label);
+ TryNoteVector& tryNotes = masm.tryNotes();
+ TryNote& delegateTryNote = tryNotes[controlItem().tryNoteIndex];
+
+ if (&target == &lastBlock) {
+ // A delegate targeting the function body block means that any exception
+ // in this try needs to be propagated to the caller function. We use the
+ // delegate code offset of `0` as that will be in the prologue and cannot
+ // have a try note.
+ delegateTryNote.setDelegate(0);
+ } else {
+ // Delegate to one byte inside the beginning of the target try note, as
+ // that's when matches hit. Try notes are guaranteed to not be empty either
+ // and so this will not miss either.
+ const TryNote& targetTryNote = tryNotes[target.tryNoteIndex];
+ delegateTryNote.setDelegate(targetTryNote.tryBodyBegin() + 1);
}
- captureResultRegisters(resultType);
- bceSafe_ = tryDelegate.bceSafeOnExit;
-
- return pushBlockResults(resultType);
+ return true;
}
bool BaseCompiler::endTryCatch(ResultType type) {
@@ -4634,7 +4573,6 @@ bool BaseCompiler::endTryCatch(ResultType type) {
// Create landing pad for all catch handlers in this block.
// When used for a catchless try block, this will generate a landing pad
// with no handlers and only the fall-back rethrow.
- masm.bind(&tryCatch.otherLabel);
// The stack height also needs to be set not for a block result, but for the
// entry to the exception handlers. This is reset again below for the join.
diff --git a/js/src/wasm/WasmBuiltins.cpp b/js/src/wasm/WasmBuiltins.cpp
index 7b03494bcd..f0773583ac 100644
--- a/js/src/wasm/WasmBuiltins.cpp
+++ b/js/src/wasm/WasmBuiltins.cpp
@@ -628,6 +628,22 @@ static WasmExceptionObject* GetOrWrapWasmException(JitActivation* activation,
return nullptr;
}
+static const wasm::TryNote* FindNonDelegateTryNote(const wasm::Code& code,
+ const uint8_t* pc,
+ Tier* tier) {
+ const wasm::TryNote* tryNote = code.lookupTryNote((void*)pc, tier);
+ while (tryNote && tryNote->isDelegate()) {
+ const wasm::CodeTier& codeTier = code.codeTier(*tier);
+ pc = codeTier.segment().base() + tryNote->delegateOffset();
+ const wasm::TryNote* delegateTryNote = code.lookupTryNote((void*)pc, tier);
+ MOZ_RELEASE_ASSERT(delegateTryNote == nullptr ||
+ delegateTryNote->tryBodyBegin() <
+ tryNote->tryBodyBegin());
+ tryNote = delegateTryNote;
+ }
+ return tryNote;
+}
+
// Unwind the entire activation in response to a thrown exception. This function
// is responsible for notifying the debugger of each unwound frame. The return
// value is the new stack address which the calling stub will set to the sp
@@ -674,10 +690,10 @@ bool wasm::HandleThrow(JSContext* cx, WasmFrameIter& iter,
// Only look for an exception handler if there's a catchable exception.
if (wasmExn) {
+ Tier tier;
const wasm::Code& code = iter.instance()->code();
const uint8_t* pc = iter.resumePCinCurrentFrame();
- Tier tier;
- const wasm::TryNote* tryNote = code.lookupTryNote((void*)pc, &tier);
+ const wasm::TryNote* tryNote = FindNonDelegateTryNote(code, pc, &tier);
if (tryNote) {
#ifdef ENABLE_WASM_TAIL_CALLS
@@ -751,8 +767,8 @@ bool wasm::HandleThrow(JSContext* cx, WasmFrameIter& iter,
// Assert that any pending exception escaping to non-wasm code is not a
// wrapper exception object
#ifdef DEBUG
- Rooted<Value> pendingException(cx);
- if (cx->isExceptionPending() && cx->getPendingException(&pendingException)) {
+ if (cx->isExceptionPending()) {
+ Rooted<Value> pendingException(cx, cx->getPendingExceptionUnwrapped());
MOZ_ASSERT_IF(pendingException.isObject() &&
pendingException.toObject().is<WasmExceptionObject>(),
!pendingException.toObject()
diff --git a/js/src/wasm/WasmCodegenTypes.h b/js/src/wasm/WasmCodegenTypes.h
index 590572ae8a..7e54ad15f7 100644
--- a/js/src/wasm/WasmCodegenTypes.h
+++ b/js/src/wasm/WasmCodegenTypes.h
@@ -671,20 +671,30 @@ struct TryNote {
// Sentinel value to detect a try note that has not been given a try body.
static const uint32_t BEGIN_NONE = UINT32_MAX;
+ // Sentinel value used in `entryPointOrIsDelegate_`.
+ static const uint32_t IS_DELEGATE = UINT32_MAX;
+
// Begin code offset of the try body.
uint32_t begin_;
// Exclusive end code offset of the try body.
uint32_t end_;
- // The code offset of the landing pad.
- uint32_t entryPoint_;
- // Track offset from frame of stack pointer.
- uint32_t framePushed_;
+ // Either a marker that this is a 'delegate' or else the code offset of the
+ // landing pad to jump to.
+ uint32_t entryPointOrIsDelegate_;
+ // If this is a delegate, then this is the code offset to delegate to,
+ // otherwise this is the offset from the frame pointer of the stack pointer
+ // to use when jumping to the landing pad.
+ uint32_t framePushedOrDelegateOffset_;
- WASM_CHECK_CACHEABLE_POD(begin_, end_, entryPoint_, framePushed_);
+ WASM_CHECK_CACHEABLE_POD(begin_, end_, entryPointOrIsDelegate_,
+ framePushedOrDelegateOffset_);
public:
explicit TryNote()
- : begin_(BEGIN_NONE), end_(0), entryPoint_(0), framePushed_(0) {}
+ : begin_(BEGIN_NONE),
+ end_(0),
+ entryPointOrIsDelegate_(0),
+ framePushedOrDelegateOffset_(0) {}
// Returns whether a try note has been assigned a range for the try body.
bool hasTryBody() const { return begin_ != BEGIN_NONE; }
@@ -700,11 +710,27 @@ struct TryNote {
return offset > begin_ && offset <= end_;
}
+ // Check if the unwinder should delegate the handling of this try note to the
+ // try note given at the delegate offset.
+ bool isDelegate() const { return entryPointOrIsDelegate_ == IS_DELEGATE; }
+
+ // The code offset to delegate the handling of this try note to.
+ uint32_t delegateOffset() const {
+ MOZ_ASSERT(isDelegate());
+ return framePushedOrDelegateOffset_;
+ }
+
// The code offset of the entry to the landing pad.
- uint32_t landingPadEntryPoint() const { return entryPoint_; }
+ uint32_t landingPadEntryPoint() const {
+ MOZ_ASSERT(!isDelegate());
+ return entryPointOrIsDelegate_;
+ }
// The stack frame pushed amount at the entry to the landing pad.
- uint32_t landingPadFramePushed() const { return framePushed_; }
+ uint32_t landingPadFramePushed() const {
+ MOZ_ASSERT(!isDelegate());
+ return framePushedOrDelegateOffset_;
+ }
// Set the beginning of the try body.
void setTryBodyBegin(uint32_t begin) {
@@ -722,17 +748,29 @@ struct TryNote {
MOZ_ASSERT(end_ > begin_);
}
+ // Mark this try note as a delegate, requesting the unwinder to use the try
+ // note found at the delegate offset.
+ void setDelegate(uint32_t delegateOffset) {
+ entryPointOrIsDelegate_ = IS_DELEGATE;
+ framePushedOrDelegateOffset_ = delegateOffset;
+ }
+
// Set the entry point and frame pushed of the landing pad.
void setLandingPad(uint32_t entryPoint, uint32_t framePushed) {
- entryPoint_ = entryPoint;
- framePushed_ = framePushed;
+ MOZ_ASSERT(!isDelegate());
+ entryPointOrIsDelegate_ = entryPoint;
+ framePushedOrDelegateOffset_ = framePushed;
}
// Adjust all code offsets in this try note by a delta.
void offsetBy(uint32_t offset) {
begin_ += offset;
end_ += offset;
- entryPoint_ += offset;
+ if (isDelegate()) {
+ framePushedOrDelegateOffset_ += offset;
+ } else {
+ entryPointOrIsDelegate_ += offset;
+ }
}
bool operator<(const TryNote& other) const {
diff --git a/js/src/wasm/WasmFrame.h b/js/src/wasm/WasmFrame.h
index 23e1c4f49c..a2fdba2b75 100644
--- a/js/src/wasm/WasmFrame.h
+++ b/js/src/wasm/WasmFrame.h
@@ -346,14 +346,16 @@ static_assert(!std::is_polymorphic_v<Frame>, "Frame doesn't need a vtable.");
static_assert(sizeof(Frame) == 2 * sizeof(void*),
"Frame is a two pointer structure");
-// Note that sizeof(FrameWithInstances) does not account for ShadowStackSpace.
-// Use FrameWithInstances::sizeOf() if you are not incorporating
-// ShadowStackSpace through other means (eg the ABIArgIter).
-
-class FrameWithInstances : public Frame {
+class FrameWithShadowStackSpace : public Frame {
+ protected:
// `ShadowStackSpace` bytes will be allocated here on Win64, at higher
// addresses than Frame and at lower addresses than the instance fields.
+ uint8_t shadowStackSpace_[js::jit::ShadowStackSpace];
+};
+class FrameWithInstances
+ : public std::conditional_t<js::jit::ShadowStackSpace >= 1,
+ FrameWithShadowStackSpace, Frame> {
// The instance area MUST be two pointers exactly.
Instance* calleeInstance_;
Instance* callerInstance_;
@@ -362,21 +364,17 @@ class FrameWithInstances : public Frame {
Instance* calleeInstance() { return calleeInstance_; }
Instance* callerInstance() { return callerInstance_; }
- constexpr static uint32_t sizeOf() {
- return sizeof(wasm::FrameWithInstances) + js::jit::ShadowStackSpace;
- }
-
constexpr static uint32_t sizeOfInstanceFields() {
- return sizeof(wasm::FrameWithInstances) - sizeof(wasm::Frame);
+ return sizeof(wasm::FrameWithInstances) - sizeof(wasm::Frame) -
+ js::jit::ShadowStackSpace;
}
constexpr static uint32_t sizeOfInstanceFieldsAndShadowStack() {
- return sizeOfInstanceFields() + js::jit::ShadowStackSpace;
+ return sizeof(wasm::FrameWithInstances) - sizeof(wasm::Frame);
}
constexpr static uint32_t calleeInstanceOffset() {
- return offsetof(FrameWithInstances, calleeInstance_) +
- js::jit::ShadowStackSpace;
+ return offsetof(FrameWithInstances, calleeInstance_);
}
constexpr static uint32_t calleeInstanceOffsetWithoutFrame() {
@@ -384,8 +382,7 @@ class FrameWithInstances : public Frame {
}
constexpr static uint32_t callerInstanceOffset() {
- return offsetof(FrameWithInstances, callerInstance_) +
- js::jit::ShadowStackSpace;
+ return offsetof(FrameWithInstances, callerInstance_);
}
constexpr static uint32_t callerInstanceOffsetWithoutFrame() {
diff --git a/js/src/wasm/WasmGC.h b/js/src/wasm/WasmGC.h
index b502f2f40e..57e823b3c5 100644
--- a/js/src/wasm/WasmGC.h
+++ b/js/src/wasm/WasmGC.h
@@ -91,7 +91,7 @@ struct StackMapHeader {
// Add 16 words to account for the size of FrameWithInstances including any
// shadow stack (at worst 8 words total), and then a little headroom in case
// the argument area had to be aligned.
- static_assert(FrameWithInstances::sizeOf() / sizeof(void*) <= 8);
+ static_assert(sizeof(FrameWithInstances) / sizeof(void*) <= 8);
static_assert(maxFrameOffsetFromTop >=
(MaxParams * MaxParamSize / sizeof(void*)) + 16,
"limited size of the offset field");
diff --git a/js/src/wasm/WasmGcObject.cpp b/js/src/wasm/WasmGcObject.cpp
index bcab5fe275..3ccb08d381 100644
--- a/js/src/wasm/WasmGcObject.cpp
+++ b/js/src/wasm/WasmGcObject.cpp
@@ -410,8 +410,6 @@ void WasmArrayObject::obj_finalize(JS::GCContext* gcx, JSObject* object) {
/* static */
size_t WasmArrayObject::obj_moved(JSObject* obj, JSObject* old) {
- MOZ_ASSERT(!IsInsideNursery(obj));
-
// Moving inline arrays requires us to update the data pointer.
WasmArrayObject& arrayObj = obj->as<WasmArrayObject>();
WasmArrayObject& oldArrayObj = old->as<WasmArrayObject>();
@@ -423,24 +421,18 @@ size_t WasmArrayObject::obj_moved(JSObject* obj, JSObject* old) {
MOZ_ASSERT(arrayObj.isDataInline() == oldArrayObj.isDataInline());
if (IsInsideNursery(old)) {
+ Nursery& nursery = obj->runtimeFromMainThread()->gc.nursery();
// It's been tenured.
- MOZ_ASSERT(obj->isTenured());
if (!arrayObj.isDataInline()) {
- // Tell the nursery that the trailer is no longer associated with an
- // object in the nursery, since the object has been moved to the tenured
- // heap.
- Nursery& nursery = obj->runtimeFromMainThread()->gc.nursery();
- nursery.unregisterTrailer(arrayObj.dataHeader());
- // Tell the tenured-heap accounting machinery that the trailer is now
- // associated with the tenured heap.
const TypeDef& typeDef = arrayObj.typeDef();
MOZ_ASSERT(typeDef.isArrayType());
size_t trailerSize = calcStorageBytes(
typeDef.arrayType().elementType_.size(), arrayObj.numElements_);
// Ensured by WasmArrayObject::createArrayOOL.
MOZ_RELEASE_ASSERT(trailerSize <= size_t(MaxArrayPayloadBytes));
- AddCellMemory(&arrayObj, trailerSize + TrailerBlockOverhead,
- MemoryUse::WasmTrailerBlock);
+ nursery.trackTrailerOnPromotion(arrayObj.dataHeader(), obj, trailerSize,
+ TrailerBlockOverhead,
+ MemoryUse::WasmTrailerBlock);
}
}
@@ -563,16 +555,9 @@ void WasmStructObject::obj_finalize(JS::GCContext* gcx, JSObject* object) {
/* static */
size_t WasmStructObject::obj_moved(JSObject* obj, JSObject* old) {
// See also, corresponding comments in WasmArrayObject::obj_moved.
- MOZ_ASSERT(!IsInsideNursery(obj));
if (IsInsideNursery(old)) {
- // It's been tenured.
- MOZ_ASSERT(obj->isTenured());
- WasmStructObject& structObj = obj->as<WasmStructObject>();
- // WasmStructObject::classForTypeDef ensures we only get called for
- // structs with OOL data. Hence:
- MOZ_ASSERT(structObj.outlineData_);
Nursery& nursery = obj->runtimeFromMainThread()->gc.nursery();
- nursery.unregisterTrailer(structObj.outlineData_);
+ WasmStructObject& structObj = obj->as<WasmStructObject>();
const TypeDef& typeDef = structObj.typeDef();
MOZ_ASSERT(typeDef.isStructType());
uint32_t totalBytes = typeDef.structType().size_;
@@ -580,9 +565,11 @@ size_t WasmStructObject::obj_moved(JSObject* obj, JSObject* old) {
WasmStructObject::getDataByteSizes(totalBytes, &inlineBytes, &outlineBytes);
MOZ_ASSERT(inlineBytes == WasmStructObject_MaxInlineBytes);
MOZ_ASSERT(outlineBytes > 0);
- AddCellMemory(&structObj, outlineBytes + TrailerBlockOverhead,
- MemoryUse::WasmTrailerBlock);
+ nursery.trackTrailerOnPromotion(structObj.outlineData_, obj, outlineBytes,
+ TrailerBlockOverhead,
+ MemoryUse::WasmTrailerBlock);
}
+
return 0;
}
diff --git a/js/src/wasm/WasmInstance.cpp b/js/src/wasm/WasmInstance.cpp
index d025c02c16..606601581d 100644
--- a/js/src/wasm/WasmInstance.cpp
+++ b/js/src/wasm/WasmInstance.cpp
@@ -2937,7 +2937,8 @@ static bool EnsureEntryStubs(const Instance& instance, uint32_t funcIndex,
}
static bool GetInterpEntryAndEnsureStubs(JSContext* cx, Instance& instance,
- uint32_t funcIndex, CallArgs args,
+ uint32_t funcIndex,
+ const CallArgs& args,
void** interpEntry,
const FuncType** funcType) {
const FuncExport* funcExport;
@@ -3099,8 +3100,8 @@ class MOZ_RAII ReturnToJSResultCollector {
}
};
-bool Instance::callExport(JSContext* cx, uint32_t funcIndex, CallArgs args,
- CoercionLevel level) {
+bool Instance::callExport(JSContext* cx, uint32_t funcIndex,
+ const CallArgs& args, CoercionLevel level) {
if (memory0Base_) {
// If there has been a moving grow, this Instance should have been notified.
MOZ_RELEASE_ASSERT(memoryBase(0).unwrap() == memory0Base_);
diff --git a/js/src/wasm/WasmInstance.h b/js/src/wasm/WasmInstance.h
index 074c6212df..0e4f9745b7 100644
--- a/js/src/wasm/WasmInstance.h
+++ b/js/src/wasm/WasmInstance.h
@@ -364,7 +364,7 @@ class alignas(16) Instance {
// value in args.rval.
[[nodiscard]] bool callExport(JSContext* cx, uint32_t funcIndex,
- CallArgs args,
+ const CallArgs& args,
CoercionLevel level = CoercionLevel::Spec);
// Exception handling support
diff --git a/js/src/wasm/WasmJS.cpp b/js/src/wasm/WasmJS.cpp
index 2eb5e355d9..d987ecec29 100644
--- a/js/src/wasm/WasmJS.cpp
+++ b/js/src/wasm/WasmJS.cpp
@@ -123,6 +123,10 @@ static bool ThrowBadImportArg(JSContext* cx) {
static bool ThrowBadImportType(JSContext* cx, const CacheableName& field,
const char* str) {
UniqueChars fieldQuoted = field.toQuotedString(cx);
+ if (!fieldQuoted) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_IMPORT_TYPE, fieldQuoted.get(), str);
return false;
@@ -178,6 +182,10 @@ bool js::wasm::GetImports(JSContext* cx, const Module& module,
if (!importModuleValue.isObject()) {
UniqueChars moduleQuoted = import.module.toQuotedString(cx);
+ if (!moduleQuoted) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_IMPORT_FIELD,
moduleQuoted.get());
@@ -256,6 +264,10 @@ bool js::wasm::GetImports(JSContext* cx, const Module& module,
if (obj->resultType() != tags[index].type->resultType()) {
UniqueChars fieldQuoted = import.field.toQuotedString(cx);
UniqueChars moduleQuoted = import.module.toQuotedString(cx);
+ if (!fieldQuoted || !moduleQuoted) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_TAG_SIG, moduleQuoted.get(),
fieldQuoted.get());
@@ -1005,8 +1017,9 @@ static bool IsModuleObject(JSObject* obj, const Module** module) {
return true;
}
-static bool GetModuleArg(JSContext* cx, CallArgs args, uint32_t numRequired,
- const char* name, const Module** module) {
+static bool GetModuleArg(JSContext* cx, const CallArgs& args,
+ uint32_t numRequired, const char* name,
+ const Module** module) {
if (!args.requireAtLeast(cx, name, numRequired)) {
return false;
}
@@ -4514,8 +4527,8 @@ static bool EnsurePromiseSupport(JSContext* cx) {
return true;
}
-static bool GetBufferSource(JSContext* cx, CallArgs callArgs, const char* name,
- MutableBytes* bytecode) {
+static bool GetBufferSource(JSContext* cx, const CallArgs& callArgs,
+ const char* name, MutableBytes* bytecode) {
if (!callArgs.requireAtLeast(cx, name, 1)) {
return false;
}
@@ -4576,7 +4589,7 @@ static bool WebAssembly_compile(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
-static bool GetInstantiateArgs(JSContext* cx, CallArgs callArgs,
+static bool GetInstantiateArgs(JSContext* cx, const CallArgs& callArgs,
MutableHandleObject firstArg,
MutableHandleObject importObj,
MutableHandleValue featureOptions) {
@@ -5089,7 +5102,7 @@ const JSClass ResolveResponseClosure::class_ = {
&ResolveResponseClosure::classOps_,
};
-static ResolveResponseClosure* ToResolveResponseClosure(CallArgs args) {
+static ResolveResponseClosure* ToResolveResponseClosure(const CallArgs& args) {
return &args.callee()
.as<JSFunction>()
.getExtendedSlot(0)
diff --git a/js/src/wasm/WasmModule.cpp b/js/src/wasm/WasmModule.cpp
index a297e81ad3..406c16462b 100644
--- a/js/src/wasm/WasmModule.cpp
+++ b/js/src/wasm/WasmModule.cpp
@@ -486,6 +486,10 @@ bool Module::instantiateFunctions(JSContext* cx,
const Import& import = FindImportFunction(imports_, i);
UniqueChars importModuleName = import.module.toQuotedString(cx);
UniqueChars importFieldName = import.field.toQuotedString(cx);
+ if (!importFieldName || !importModuleName) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr,
JSMSG_WASM_BAD_IMPORT_SIG,
importModuleName.get(), importFieldName.get());
diff --git a/js/src/wasm/WasmStubs.cpp b/js/src/wasm/WasmStubs.cpp
index 83a18c9992..dfaa898744 100644
--- a/js/src/wasm/WasmStubs.cpp
+++ b/js/src/wasm/WasmStubs.cpp
@@ -1633,9 +1633,10 @@ static void FillArgumentArrayForInterpExit(MacroAssembler& masm,
const FuncType& funcType,
unsigned argOffset,
Register scratch) {
- // This is FrameWithInstances::sizeOf() - ShadowStackSpace because the latter
+ // This is `sizeof(FrameWithInstances) - ShadowStackSpace` because the latter
// is accounted for by the ABIArgIter.
- const unsigned offsetFromFPToCallerStackArgs = sizeof(FrameWithInstances);
+ const unsigned offsetFromFPToCallerStackArgs =
+ sizeof(FrameWithInstances) - jit::ShadowStackSpace;
GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; arguments ",
funcImportIndex);
@@ -1729,9 +1730,10 @@ static void FillArgumentArrayForJitExit(MacroAssembler& masm, Register instance,
Register scratch2, Label* throwLabel) {
MOZ_ASSERT(scratch != scratch2);
- // This is FrameWithInstances::sizeOf() - ShadowStackSpace because the latter
+ // This is `sizeof(FrameWithInstances) - ShadowStackSpace` because the latter
// is accounted for by the ABIArgIter.
- const unsigned offsetFromFPToCallerStackArgs = sizeof(FrameWithInstances);
+ const unsigned offsetFromFPToCallerStackArgs =
+ sizeof(FrameWithInstances) - jit::ShadowStackSpace;
// This loop does not root the values that are being constructed in
// for the arguments. Allocations that are generated by code either
@@ -2473,9 +2475,10 @@ bool wasm::GenerateBuiltinThunk(MacroAssembler& masm, ABIFunctionType abiType,
// Copy out and convert caller arguments, if needed.
- // This is FrameWithInstances::sizeOf() - ShadowStackSpace because the latter
+ // This is `sizeof(FrameWithInstances) - ShadowStackSpace` because the latter
// is accounted for by the ABIArgIter.
- unsigned offsetFromFPToCallerStackArgs = sizeof(FrameWithInstances);
+ unsigned offsetFromFPToCallerStackArgs =
+ sizeof(FrameWithInstances) - jit::ShadowStackSpace;
Register scratch = ABINonArgReturnReg0;
for (ABIArgIter i(args); !i.done(); i++) {
if (i->argInRegister()) {
diff --git a/js/src/wasm/WasmTypeDef.cpp b/js/src/wasm/WasmTypeDef.cpp
index 42367c0cb2..ee005681c5 100644
--- a/js/src/wasm/WasmTypeDef.cpp
+++ b/js/src/wasm/WasmTypeDef.cpp
@@ -364,6 +364,21 @@ bool StructType::init() {
return true;
}
+/* static */
+bool StructType::createImmutable(const ValTypeVector& types,
+ StructType* struct_) {
+ StructFieldVector fields;
+ if (!fields.resize(types.length())) {
+ return false;
+ }
+ for (size_t i = 0; i < types.length(); i++) {
+ fields[i].type = StorageType(types[i].packed());
+ fields[i].isMutable = false;
+ }
+ *struct_ = StructType(std::move(fields));
+ return struct_->init();
+}
+
size_t StructType::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const {
return fields_.sizeOfExcludingThis(mallocSizeOf);
}
diff --git a/js/src/wasm/WasmTypeDef.h b/js/src/wasm/WasmTypeDef.h
index 3426647095..a0d44e647b 100644
--- a/js/src/wasm/WasmTypeDef.h
+++ b/js/src/wasm/WasmTypeDef.h
@@ -371,6 +371,8 @@ class StructType {
return true;
}
+ static bool createImmutable(const ValTypeVector& types, StructType* struct_);
+
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
WASM_DECLARE_FRIEND_SERIALIZE(StructType);
};
diff --git a/js/src/wasm/WasmValidate.cpp b/js/src/wasm/WasmValidate.cpp
index 98a1423a41..d67967fa41 100644
--- a/js/src/wasm/WasmValidate.cpp
+++ b/js/src/wasm/WasmValidate.cpp
@@ -40,6 +40,49 @@ using mozilla::CheckedInt32;
using mozilla::IsUtf8;
using mozilla::Span;
+// Module environment helpers.
+
+bool ModuleEnvironment::addDefinedFunc(
+ ValTypeVector&& params, ValTypeVector&& results, bool declareForRef,
+ Maybe<CacheableName>&& optionalExportedName) {
+ uint32_t typeIndex = types->length();
+ FuncType funcType(std::move(params), std::move(results));
+ if (!types->addType(std::move(funcType))) {
+ return false;
+ }
+
+ FuncDesc funcDesc = FuncDesc(&(*types)[typeIndex].funcType(), typeIndex);
+ uint32_t funcIndex = funcs.length();
+ if (!funcs.append(funcDesc)) {
+ return false;
+ }
+ if (declareForRef) {
+ declareFuncExported(funcIndex, true, true);
+ }
+ if (optionalExportedName.isSome()) {
+ if (!exports.emplaceBack(std::move(optionalExportedName.ref()), funcIndex,
+ DefinitionKind::Function)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ModuleEnvironment::addImportedFunc(ValTypeVector&& params,
+ ValTypeVector&& results,
+ CacheableName&& importModName,
+ CacheableName&& importFieldName) {
+ MOZ_ASSERT(numFuncImports == funcs.length());
+ if (!addDefinedFunc(std::move(params), std::move(results), false,
+ mozilla::Nothing())) {
+ return false;
+ }
+ numFuncImports++;
+ return imports.emplaceBack(std::move(importModName),
+ std::move(importFieldName),
+ DefinitionKind::Function);
+}
+
// Misc helpers.
bool wasm::EncodeLocalEntries(Encoder& e, const ValTypeVector& locals) {
diff --git a/js/src/wasm/WasmValidate.h b/js/src/wasm/WasmValidate.h
index 8ba08fd088..f8d712b3b3 100644
--- a/js/src/wasm/WasmValidate.h
+++ b/js/src/wasm/WasmValidate.h
@@ -191,6 +191,15 @@ struct ModuleEnvironment {
MOZ_ASSERT(tagIndex < tags.length());
return tagsOffsetStart + tagIndex * sizeof(TagInstanceData);
}
+
+ bool addDefinedFunc(
+ ValTypeVector&& params, ValTypeVector&& results,
+ bool declareForRef = false,
+ Maybe<CacheableName>&& optionalExportedName = mozilla::Nothing());
+
+ bool addImportedFunc(ValTypeVector&& params, ValTypeVector&& results,
+ CacheableName&& importModName,
+ CacheableName&& importFieldName);
};
// ElemSegmentFlags provides methods for decoding and encoding the flags field
diff --git a/js/src/wasm/WasmValue.h b/js/src/wasm/WasmValue.h
index 79e20285b9..9a5442fc75 100644
--- a/js/src/wasm/WasmValue.h
+++ b/js/src/wasm/WasmValue.h
@@ -224,9 +224,9 @@ class LitVal {
Cell& cell() { return cell_; }
const Cell& cell() const { return cell_; }
- // Updates the type of the LitVal. Does not check that the type is valid for the
- // actual value, so make sure the type is definitely correct via validation or
- // something.
+ // Updates the type of the LitVal. Does not check that the type is valid for
+ // the actual value, so make sure the type is definitely correct via
+ // validation or something.
void unsafeSetType(ValType type) { type_ = type; }
uint32_t i32() const {
diff --git a/js/xpconnect/idl/xpccomponents.idl b/js/xpconnect/idl/xpccomponents.idl
index 1d4155ed74..47ed22e3a5 100644
--- a/js/xpconnect/idl/xpccomponents.idl
+++ b/js/xpconnect/idl/xpccomponents.idl
@@ -261,7 +261,7 @@ interface nsIXPCComponents_Utils : nsISupports
[optional] in jsval version,
[optional] in AUTF8String filename,
[optional] in long lineNo,
- [optional] in bool enforceFilenameRestrictions);
+ [optional] in boolean enforceFilenameRestrictions);
/*
* Get the sandbox for running JS-implemented UA widgets (video controls etc.),
@@ -569,7 +569,7 @@ interface nsIXPCComponents_Utils : nsISupports
* reference edges) and will throw if you touch them (e.g. by
* reading/writing a property).
*/
- bool isDeadWrapper(in jsval obj);
+ boolean isDeadWrapper(in jsval obj);
/**
* Determines whether this value is a remote object proxy, such as
@@ -584,7 +584,7 @@ interface nsIXPCComponents_Utils : nsISupports
* frame.contentWindow.doCrossOriginThing();
* }
*/
- bool isRemoteProxy(in jsval val);
+ boolean isRemoteProxy(in jsval val);
/*
* To be called from JS only. This is for Gecko internal use only, and may
@@ -653,12 +653,12 @@ interface nsIXPCComponents_Utils : nsISupports
/**
* Check whether the given object is an opaque wrapper (PermissiveXrayOpaque).
*/
- bool isOpaqueWrapper(in jsval obj);
+ boolean isOpaqueWrapper(in jsval obj);
/**
* Check whether the given object is an XrayWrapper.
*/
- bool isXrayWrapper(in jsval obj);
+ boolean isXrayWrapper(in jsval obj);
/**
* Waive Xray on a given value. Identity op for primitives.
@@ -680,7 +680,7 @@ interface nsIXPCComponents_Utils : nsISupports
* probably what you want.
*/
[implicit_jscontext]
- string getClassName(in jsval aObj, in bool aUnwrap);
+ string getClassName(in jsval aObj, in boolean aUnwrap);
/**
* Get a DOM classinfo for the given classname. Only some class
diff --git a/js/xpconnect/loader/mozJSModuleLoader.cpp b/js/xpconnect/loader/mozJSModuleLoader.cpp
index 017ac32b3b..cdf4df1970 100644
--- a/js/xpconnect/loader/mozJSModuleLoader.cpp
+++ b/js/xpconnect/loader/mozJSModuleLoader.cpp
@@ -1279,7 +1279,10 @@ nsresult mozJSModuleLoader::GetScriptForLocation(
}
void mozJSModuleLoader::UnloadModules() {
+ MOZ_ASSERT(!mIsUnloaded);
+
mInitialized = false;
+ mIsUnloaded = true;
if (mLoaderGlobal) {
MOZ_ASSERT(JS_HasExtensibleLexicalEnvironment(mLoaderGlobal));
@@ -1387,6 +1390,11 @@ nsresult mozJSModuleLoader::IsModuleLoaded(const nsACString& aLocation,
bool* retval) {
MOZ_ASSERT(nsContentUtils::IsCallerChrome());
+ if (mIsUnloaded) {
+ *retval = false;
+ return NS_OK;
+ }
+
mInitialized = true;
ModuleLoaderInfo info(aLocation);
if (mImports.Get(info.Key())) {
@@ -1420,6 +1428,11 @@ nsresult mozJSModuleLoader::IsJSModuleLoaded(const nsACString& aLocation,
bool* retval) {
MOZ_ASSERT(nsContentUtils::IsCallerChrome());
+ if (mIsUnloaded) {
+ *retval = false;
+ return NS_OK;
+ }
+
mInitialized = true;
ModuleLoaderInfo info(aLocation);
if (mImports.Get(info.Key())) {
@@ -1435,6 +1448,11 @@ nsresult mozJSModuleLoader::IsESModuleLoaded(const nsACString& aLocation,
bool* retval) {
MOZ_ASSERT(nsContentUtils::IsCallerChrome());
+ if (mIsUnloaded) {
+ *retval = false;
+ return NS_OK;
+ }
+
mInitialized = true;
ModuleLoaderInfo info(aLocation);
@@ -1728,6 +1746,11 @@ nsresult mozJSModuleLoader::Import(JSContext* aCx, const nsACString& aLocation,
JS::MutableHandleObject aModuleGlobal,
JS::MutableHandleObject aModuleExports,
bool aIgnoreExports) {
+ if (mIsUnloaded) {
+ JS_ReportErrorASCII(aCx, "Module loaded is already unloaded");
+ return NS_ERROR_FAILURE;
+ }
+
mInitialized = true;
AUTO_PROFILER_MARKER_TEXT(
@@ -2013,6 +2036,11 @@ nsresult mozJSModuleLoader::ImportESModule(
aSkipCheck /* = SkipCheckForBrokenURLOrZeroSized::No */) {
using namespace JS::loader;
+ if (mIsUnloaded) {
+ JS_ReportErrorASCII(aCx, "Module loaded is already unloaded");
+ return NS_ERROR_FAILURE;
+ }
+
mInitialized = true;
// Called from ChromeUtils::ImportESModule.
diff --git a/js/xpconnect/loader/mozJSModuleLoader.h b/js/xpconnect/loader/mozJSModuleLoader.h
index ac118c507d..b5e81a4b25 100644
--- a/js/xpconnect/loader/mozJSModuleLoader.h
+++ b/js/xpconnect/loader/mozJSModuleLoader.h
@@ -290,6 +290,7 @@ class mozJSModuleLoader final : public nsIMemoryReporter {
nsClassHashtable<nsCStringHashKey, nsCString> mLocations;
bool mInitialized;
+ bool mIsUnloaded = false;
#ifdef DEBUG
bool mIsInitializingLoaderGlobal = false;
#endif
diff --git a/js/xpconnect/src/Sandbox.cpp b/js/xpconnect/src/Sandbox.cpp
index 3e931320a9..ed77605193 100644
--- a/js/xpconnect/src/Sandbox.cpp
+++ b/js/xpconnect/src/Sandbox.cpp
@@ -311,7 +311,7 @@ static bool SandboxFetch(JSContext* cx, JS::HandleObject scope,
}
BindingCallContext callCx(cx, "fetch");
- RequestOrUSVString request;
+ RequestOrUTF8String request;
if (!request.Init(callCx, args[0], "Argument 1")) {
return false;
}
@@ -1274,7 +1274,7 @@ nsresult ApplyAddonContentScriptCSP(nsISupports* prinOrSop) {
csp = new nsCSPContext();
MOZ_TRY(
- csp->SetRequestContextWithPrincipal(clonedPrincipal, selfURI, u""_ns, 0));
+ csp->SetRequestContextWithPrincipal(clonedPrincipal, selfURI, ""_ns, 0));
MOZ_TRY(csp->AppendPolicy(baseCSP, false, false));
diff --git a/js/xpconnect/src/XPCJSRuntime.cpp b/js/xpconnect/src/XPCJSRuntime.cpp
index fd495ec964..567cd860cf 100644
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -2589,7 +2589,8 @@ static void AccumulateTelemetryCallback(JSMetric id, uint32_t sample) {
glean::performance_clone_deserialize::size.Accumulate(sample);
break;
case JSMetric::DESERIALIZE_ITEMS:
- glean::performance_clone_deserialize::items.AccumulateSamples({sample});
+ glean::performance_clone_deserialize::items.AccumulateSingleSample(
+ sample);
break;
case JSMetric::DESERIALIZE_US:
glean::performance_clone_deserialize::time.AccumulateRawDuration(
diff --git a/js/xpconnect/tests/mochitest/mochitest.toml b/js/xpconnect/tests/mochitest/mochitest.toml
index c57cb26890..bc0f1d97b7 100644
--- a/js/xpconnect/tests/mochitest/mochitest.toml
+++ b/js/xpconnect/tests/mochitest/mochitest.toml
@@ -200,8 +200,6 @@ skip-if = [
"http2",
]
-["test_bug871887.html"]
-
["test_bug912322.html"]
["test_bug916945.html"]
diff --git a/js/xpconnect/tests/mochitest/test_bug871887.html b/js/xpconnect/tests/mochitest/test_bug871887.html
deleted file mode 100644
index 082b2ae746..0000000000
--- a/js/xpconnect/tests/mochitest/test_bug871887.html
+++ /dev/null
@@ -1,43 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<!--
-https://bugzilla.mozilla.org/show_bug.cgi?id=871887
--->
-<head>
- <meta charset="utf-8">
- <title>Test for Bug 871887</title>
- <script src="/tests/SimpleTest/SimpleTest.js"></script>
- <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
- <script type="application/javascript">
-
- /** Test for Bug 871887 **/
- SimpleTest.waitForExplicitFinish();
-
- // NB: onstart ends up getting invoked twice, for mysterious and potentially-
- // IE6-related reasons.
- function checkpoint(invocant) {
- ok(true, "onstart called");
- is(invocant, $('llama'), "this-binding is correct");
- $('llama').loop = 1;
- $('llama').scrollDelay = 1;
- $('llama').scrollAmount = 500;
- }
-
- function done(invocant) {
- is(invocant, $('llama'), "this-binding is correct");
- ok(true, "onfinish called");
- SimpleTest.finish();
- }
-
- </script>
-</head>
-<body>
-<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=871887">Mozilla Bug 871887</a>
-<p id="display"></p>
-<div id="content">
-<marquee id="llama" onstart="checkpoint(this);" onfinish="done(this);">Watch the Llama</marquee>
-</div>
-<pre id="test">
-</pre>
-</body>
-</html>
diff --git a/js/xpconnect/tests/unit/test_import_global_current.js b/js/xpconnect/tests/unit/test_import_global_current.js
index cf466a7391..59037512f3 100644
--- a/js/xpconnect/tests/unit/test_import_global_current.js
+++ b/js/xpconnect/tests/unit/test_import_global_current.js
@@ -204,7 +204,7 @@ ChromeUtils.importESModule("resource://test/es6module_import_error.js", {
`, sb);
} catch (e) {
caught = true;
- Assert.stringMatches(e.message, /import not found/);
+ Assert.stringMatches(e.message, /doesn't provide an export named/);
}
Assert.ok(caught);
});
diff --git a/js/xpconnect/tests/unit/test_scriptable_nsIClassInfo.js b/js/xpconnect/tests/unit/test_scriptable_nsIClassInfo.js
new file mode 100644
index 0000000000..161afcbb9b
--- /dev/null
+++ b/js/xpconnect/tests/unit/test_scriptable_nsIClassInfo.js
@@ -0,0 +1,41 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+add_task(function () {
+ class TestClass {
+ QueryInterface = ChromeUtils.generateQI([
+ "nsIXPCTestInterfaceA",
+ "nsIClassInfo",
+ ]);
+
+ interfaces = [Ci.nsIXPCTestInterfaceA, Ci.nsIClassInfo, Ci.nsISupports];
+ contractID = "@mozilla.org/test/class;1";
+ classDescription = "description";
+ classID = Components.ID("{4da556d4-00fa-451a-a280-d2aec7c5f265}");
+ flags = 0;
+
+ name = "this is a test";
+ }
+
+ let instance = new TestClass();
+ Assert.ok(instance, "can create an instance");
+ Assert.ok(instance.QueryInterface(Ci.nsIClassInfo), "can QI to nsIClassInfo");
+
+ let registrar = Components.manager.QueryInterface(Ci.nsIComponentRegistrar);
+ registrar.registerFactory(
+ instance.classID,
+ instance.classDescription,
+ instance.contractID,
+ {
+ createInstance(iid) {
+ return instance.QueryInterface(iid);
+ },
+ }
+ );
+ Assert.ok(true, "successfully registered the factory");
+
+ let otherInstance = Cc["@mozilla.org/test/class;1"].createInstance(
+ Ci.nsIXPCTestInterfaceA
+ );
+ Assert.ok(otherInstance, "can create an instance via xpcom");
+});
diff --git a/js/xpconnect/tests/unit/xpcshell.toml b/js/xpconnect/tests/unit/xpcshell.toml
index 37274eba96..b7bc15afdc 100644
--- a/js/xpconnect/tests/unit/xpcshell.toml
+++ b/js/xpconnect/tests/unit/xpcshell.toml
@@ -358,6 +358,8 @@ head = "head_ongc.js"
["test_sandbox_name.js"]
+["test_scriptable_nsIClassInfo.js"]
+
["test_storage.js"]
["test_structuredClone.js"]
diff --git a/js/xpconnect/wrappers/XrayWrapper.cpp b/js/xpconnect/wrappers/XrayWrapper.cpp
index cdedb02a5f..48696cd12d 100644
--- a/js/xpconnect/wrappers/XrayWrapper.cpp
+++ b/js/xpconnect/wrappers/XrayWrapper.cpp
@@ -1799,6 +1799,11 @@ bool DOMXrayTraits::call(JSContext* cx, HandleObject wrapper,
// using "legacycaller". At this time for all the legacycaller users it makes
// more sense to invoke on the xray compartment, so we just go ahead and do
// that for everything.
+ if (IsDOMConstructor(obj)) {
+ const JSNativeHolder* holder = NativeHolderFromObject(obj);
+ return holder->mNative(cx, args.length(), args.base());
+ }
+
if (js::IsProxy(obj)) {
if (JS::IsCallable(obj)) {
// Passing obj here, but it doesn't really matter because legacycaller
@@ -1822,20 +1827,21 @@ bool DOMXrayTraits::construct(JSContext* cx, HandleObject wrapper,
const JS::CallArgs& args,
const js::Wrapper& baseInstance) {
RootedObject obj(cx, getTargetObject(wrapper));
- MOZ_ASSERT(mozilla::dom::HasConstructor(obj));
- const JSClass* clasp = JS::GetClass(obj);
// See comments in DOMXrayTraits::call() explaining what's going on here.
- if (clasp->flags & JSCLASS_IS_DOMIFACEANDPROTOJSCLASS) {
- if (JSNative construct = clasp->getConstruct()) {
- if (!construct(cx, args.length(), args.base())) {
- return false;
- }
- } else {
+ if (IsDOMConstructor(obj)) {
+ const JSNativeHolder* holder = NativeHolderFromObject(obj);
+ if (!holder->mNative(cx, args.length(), args.base())) {
+ return false;
+ }
+ } else {
+ const JSClass* clasp = JS::GetClass(obj);
+ if (clasp->flags & JSCLASS_IS_DOMIFACEANDPROTOJSCLASS) {
+ MOZ_ASSERT(!clasp->getConstruct());
+
RootedValue v(cx, ObjectValue(*wrapper));
js::ReportIsNotFunction(cx, v);
return false;
}
- } else {
if (!baseInstance.construct(cx, wrapper, args)) {
return false;
}