summaryrefslogtreecommitdiffstats
path: root/js/src/builtin
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/builtin')
-rw-r--r--js/src/builtin/Array.cpp572
-rw-r--r--js/src/builtin/Array.h151
-rw-r--r--js/src/builtin/Array.js97
-rw-r--r--js/src/builtin/JSON.cpp268
-rw-r--r--js/src/builtin/MapObject.cpp160
-rw-r--r--js/src/builtin/MapObject.h20
-rw-r--r--js/src/builtin/ModuleObject.cpp11
-rw-r--r--js/src/builtin/ModuleObject.h1
-rw-r--r--js/src/builtin/ParseRecordObject.cpp16
-rw-r--r--js/src/builtin/ParseRecordObject.h21
-rw-r--r--js/src/builtin/Promise.cpp52
-rw-r--r--js/src/builtin/RawJSONObject.cpp41
-rw-r--r--js/src/builtin/RawJSONObject.h27
-rw-r--r--js/src/builtin/Reflect.js2
-rw-r--r--js/src/builtin/Sorting.js97
-rw-r--r--js/src/builtin/TestingFunctions.cpp154
-rw-r--r--js/src/builtin/TestingUtility.cpp15
-rw-r--r--js/src/builtin/TestingUtility.h2
-rw-r--r--js/src/builtin/Tuple.js2
-rw-r--r--js/src/builtin/WeakMapObject-inl.h15
-rw-r--r--js/src/builtin/WeakMapObject.cpp8
-rw-r--r--js/src/builtin/WeakSetObject.cpp9
-rw-r--r--js/src/builtin/embedjs.py4
23 files changed, 1338 insertions, 407 deletions
diff --git a/js/src/builtin/Array.cpp b/js/src/builtin/Array.cpp
index 76493eee7f..2ced07b6b9 100644
--- a/js/src/builtin/Array.cpp
+++ b/js/src/builtin/Array.cpp
@@ -10,6 +10,7 @@
#include "mozilla/DebugOnly.h"
#include "mozilla/MathAlgorithms.h"
#include "mozilla/Maybe.h"
+#include "mozilla/ScopeExit.h"
#include "mozilla/SIMD.h"
#include "mozilla/TextUtils.h"
@@ -23,6 +24,7 @@
#include "ds/Sort.h"
#include "jit/InlinableNatives.h"
+#include "jit/TrampolineNatives.h"
#include "js/Class.h"
#include "js/Conversions.h"
#include "js/experimental/JitInfo.h" // JSJitGetterOp, JSJitInfo
@@ -2026,57 +2028,10 @@ static bool FillWithUndefined(JSContext* cx, HandleObject obj, uint32_t start,
return true;
}
-static bool ArrayNativeSortImpl(JSContext* cx, Handle<JSObject*> obj,
- Handle<Value> fval, ComparatorMatchResult comp);
-
-bool js::intrinsic_ArrayNativeSort(JSContext* cx, unsigned argc, Value* vp) {
- // This function is called from the self-hosted Array.prototype.sort
- // implementation. It returns |true| if the array was sorted, otherwise it
- // returns |false| to notify the self-hosted code to perform the sorting.
- CallArgs args = CallArgsFromVp(argc, vp);
- MOZ_ASSERT(args.length() == 1);
-
- HandleValue fval = args[0];
- MOZ_ASSERT(fval.isUndefined() || IsCallable(fval));
-
- ComparatorMatchResult comp;
- if (fval.isObject()) {
- comp = MatchNumericComparator(cx, &fval.toObject());
- if (comp == Match_Failure) {
- return false;
- }
-
- if (comp == Match_None) {
- // Non-optimized user supplied comparators perform much better when
- // called from within a self-hosted sorting function.
- args.rval().setBoolean(false);
- return true;
- }
- } else {
- comp = Match_None;
- }
-
- Rooted<JSObject*> obj(cx, &args.thisv().toObject());
-
- if (!ArrayNativeSortImpl(cx, obj, fval, comp)) {
- return false;
- }
-
- args.rval().setBoolean(true);
- return true;
-}
-
-static bool ArrayNativeSortImpl(JSContext* cx, Handle<JSObject*> obj,
- Handle<Value> fval,
- ComparatorMatchResult comp) {
- uint64_t length;
- if (!GetLengthPropertyInlined(cx, obj, &length)) {
- return false;
- }
- if (length < 2) {
- /* [] and [a] remain unchanged when sorted. */
- return true;
- }
+static bool ArraySortWithoutComparator(JSContext* cx, Handle<JSObject*> obj,
+ uint64_t length,
+ ComparatorMatchResult comp) {
+ MOZ_ASSERT(length > 1);
if (length > UINT32_MAX) {
ReportAllocationOverflow(cx);
@@ -2225,6 +2180,519 @@ static bool ArrayNativeSortImpl(JSContext* cx, Handle<JSObject*> obj,
return true;
}
+void ArraySortData::init(JSObject* obj, JSObject* comparator, ValueVector&& vec,
+ uint32_t length, uint32_t denseLen) {
+ MOZ_ASSERT(!vec.empty(), "must have items to sort");
+ MOZ_ASSERT(denseLen <= length);
+
+ obj_ = obj;
+ comparator_ = comparator;
+
+ this->length = length;
+ this->denseLen = denseLen;
+ this->vec = std::move(vec);
+
+ auto getComparatorKind = [](JSContext* cx, JSObject* comparator) {
+ if (!comparator->is<JSFunction>()) {
+ return ComparatorKind::Unoptimized;
+ }
+ JSFunction* fun = &comparator->as<JSFunction>();
+ if (!fun->hasJitEntry() || fun->isClassConstructor()) {
+ return ComparatorKind::Unoptimized;
+ }
+ if (fun->realm() == cx->realm() && fun->nargs() <= ComparatorActualArgs) {
+ return ComparatorKind::JSSameRealmNoRectifier;
+ }
+ return ComparatorKind::JS;
+ };
+ comparatorKind_ = getComparatorKind(cx(), comparator);
+}
+
+// This function handles sorting without a comparator function (or with a
+// trivial comparator function that we can pattern match) by calling
+// ArraySortWithoutComparator.
+//
+// If there's a non-trivial comparator function, it initializes the
+// ArraySortData struct for ArraySortData::sortWithComparator. This function
+// must be called next to perform the sorting.
+//
+// This is separate from ArraySortData::sortWithComparator because it lets the
+// compiler generate better code for ArraySortData::sortWithComparator.
+//
+// https://tc39.es/ecma262/#sec-array.prototype.sort
+// 23.1.3.30 Array.prototype.sort ( comparefn )
+static MOZ_ALWAYS_INLINE bool ArraySortPrologue(JSContext* cx,
+ Handle<Value> thisv,
+ Handle<Value> comparefn,
+ ArraySortData* d, bool* done) {
+ // Step 1.
+ if (MOZ_UNLIKELY(!comparefn.isUndefined() && !IsCallable(comparefn))) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_BAD_SORT_ARG);
+ return false;
+ }
+
+ // Step 2.
+ Rooted<JSObject*> obj(cx, ToObject(cx, thisv));
+ if (!obj) {
+ return false;
+ }
+
+ // Step 3.
+ uint64_t length;
+ if (MOZ_UNLIKELY(!GetLengthPropertyInlined(cx, obj, &length))) {
+ return false;
+ }
+
+ // Arrays with less than two elements remain unchanged when sorted.
+ if (length <= 1) {
+ d->setReturnValue(obj);
+ *done = true;
+ return true;
+ }
+
+ // Use a fast path if there's no comparator or if the comparator is a function
+ // that we can pattern match.
+ do {
+ ComparatorMatchResult comp = Match_None;
+ if (comparefn.isObject()) {
+ comp = MatchNumericComparator(cx, &comparefn.toObject());
+ if (comp == Match_Failure) {
+ return false;
+ }
+ if (comp == Match_None) {
+ // Pattern matching failed.
+ break;
+ }
+ }
+ if (!ArraySortWithoutComparator(cx, obj, length, comp)) {
+ return false;
+ }
+ d->setReturnValue(obj);
+ *done = true;
+ return true;
+ } while (false);
+
+ // Ensure length * 2 (used below) doesn't overflow UINT32_MAX.
+ if (MOZ_UNLIKELY(length > UINT32_MAX / 2)) {
+ ReportAllocationOverflow(cx);
+ return false;
+ }
+ uint32_t len = uint32_t(length);
+
+ // Merge sort requires extra scratch space.
+ bool needsScratchSpace = len >= ArraySortData::InsertionSortLimit;
+
+ Rooted<ArraySortData::ValueVector> vec(cx);
+ if (MOZ_UNLIKELY(!vec.reserve(needsScratchSpace ? (2 * len) : len))) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
+
+ // Append elements to the vector. Skip holes.
+ if (IsPackedArray(obj)) {
+ Handle<ArrayObject*> array = obj.as<ArrayObject>();
+ const Value* elements = array->getDenseElements();
+ vec.infallibleAppend(elements, len);
+ } else {
+ RootedValue v(cx);
+ for (uint32_t i = 0; i < len; i++) {
+ if (!CheckForInterrupt(cx)) {
+ return false;
+ }
+
+ bool hole;
+ if (!HasAndGetElement(cx, obj, i, &hole, &v)) {
+ return false;
+ }
+ if (hole) {
+ continue;
+ }
+ vec.infallibleAppend(v);
+ }
+ // If there are only holes, the object is already sorted.
+ if (vec.empty()) {
+ d->setReturnValue(obj);
+ *done = true;
+ return true;
+ }
+ }
+
+ uint32_t denseLen = vec.length();
+ if (needsScratchSpace) {
+ MOZ_ALWAYS_TRUE(vec.resize(denseLen * 2));
+ }
+ d->init(obj, &comparefn.toObject(), std::move(vec.get()), len, denseLen);
+
+ // Continue in ArraySortData::sortWithComparator.
+ MOZ_ASSERT(!*done);
+ return true;
+}
+
+static ArraySortResult CallComparatorSlow(ArraySortData* d, const Value& x,
+ const Value& y) {
+ JSContext* cx = d->cx();
+ FixedInvokeArgs<2> callArgs(cx);
+ callArgs[0].set(x);
+ callArgs[1].set(y);
+ Rooted<Value> comparefn(cx, ObjectValue(*d->comparator()));
+ Rooted<Value> rval(cx);
+ if (!js::Call(cx, comparefn, UndefinedHandleValue, callArgs, &rval)) {
+ return ArraySortResult::Failure;
+ }
+ d->setComparatorReturnValue(rval);
+ return ArraySortResult::Done;
+}
+
+static MOZ_ALWAYS_INLINE ArraySortResult
+MaybeYieldToComparator(ArraySortData* d, const Value& x, const Value& y) {
+ // https://tc39.es/ecma262/#sec-comparearrayelements
+ // 23.1.3.30.2 CompareArrayElements ( x, y, comparefn )
+
+ // Steps 1-2.
+ if (x.isUndefined()) {
+ d->setComparatorReturnValue(Int32Value(y.isUndefined() ? 0 : 1));
+ return ArraySortResult::Done;
+ }
+
+ // Step 3.
+ if (y.isUndefined()) {
+ d->setComparatorReturnValue(Int32Value(-1));
+ return ArraySortResult::Done;
+ }
+
+ // Step 4. Yield to the JIT trampoline (or js::array_sort) if the comparator
+ // is a JS function we can call more efficiently from JIT code.
+ auto kind = d->comparatorKind();
+ if (MOZ_LIKELY(kind != ArraySortData::ComparatorKind::Unoptimized)) {
+ d->setComparatorArgs(x, y);
+ return (kind == ArraySortData::ComparatorKind::JSSameRealmNoRectifier)
+ ? ArraySortResult::CallJSSameRealmNoRectifier
+ : ArraySortResult::CallJS;
+ }
+ return CallComparatorSlow(d, x, y);
+}
+
+static MOZ_ALWAYS_INLINE bool RvalIsLessOrEqual(ArraySortData* data,
+ bool* lessOrEqual) {
+ // https://tc39.es/ecma262/#sec-comparearrayelements
+ // 23.1.3.30.2 CompareArrayElements ( x, y, comparefn )
+
+ // Fast path for int32 return values.
+ Value rval = data->comparatorReturnValue();
+ if (MOZ_LIKELY(rval.isInt32())) {
+ *lessOrEqual = rval.toInt32() <= 0;
+ return true;
+ }
+
+ // Step 4.a.
+ Rooted<Value> rvalRoot(data->cx(), rval);
+ double d;
+ if (MOZ_UNLIKELY(!ToNumber(data->cx(), rvalRoot, &d))) {
+ return false;
+ }
+
+ // Step 4.b-c.
+ *lessOrEqual = std::isnan(d) ? true : (d <= 0);
+ return true;
+}
+
+static void CopyValues(Value* out, const Value* list, uint32_t start,
+ uint32_t end) {
+ for (uint32_t i = start; i <= end; i++) {
+ out[i] = list[i];
+ }
+}
+
+// static
+ArraySortResult ArraySortData::sortWithComparator(ArraySortData* d) {
+ JSContext* cx = d->cx();
+ auto& vec = d->vec;
+
+ // This function is like a generator that is called repeatedly from the JIT
+ // trampoline or js::array_sort. Resume the sorting algorithm where we left
+ // off before calling the comparator.
+ switch (d->state) {
+ case State::Initial:
+ break;
+ case State::InsertionSortCall1:
+ goto insertion_sort_call1;
+ case State::InsertionSortCall2:
+ goto insertion_sort_call2;
+ case State::MergeSortCall1:
+ goto merge_sort_call1;
+ case State::MergeSortCall2:
+ goto merge_sort_call2;
+ }
+
+ d->list = vec.begin();
+
+ // Use insertion sort for small arrays.
+ if (d->denseLen < InsertionSortLimit) {
+ for (d->i = 1; d->i < d->denseLen; d->i++) {
+ d->item = vec[d->i];
+ d->j = d->i - 1;
+ do {
+ {
+ ArraySortResult res = MaybeYieldToComparator(d, vec[d->j], d->item);
+ if (res != ArraySortResult::Done) {
+ d->state = State::InsertionSortCall1;
+ return res;
+ }
+ }
+ insertion_sort_call1:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ if (lessOrEqual) {
+ break;
+ }
+ vec[d->j + 1] = vec[d->j];
+ } while (d->j-- > 0);
+ vec[d->j + 1] = d->item;
+ }
+ } else {
+ static constexpr size_t InitialWindowSize = 4;
+
+ // Use insertion sort for initial ranges.
+ for (d->start = 0; d->start < d->denseLen - 1;
+ d->start += InitialWindowSize) {
+ d->end =
+ std::min<uint32_t>(d->start + InitialWindowSize - 1, d->denseLen - 1);
+ for (d->i = d->start + 1; d->i <= d->end; d->i++) {
+ d->item = vec[d->i];
+ d->j = d->i - 1;
+ do {
+ {
+ ArraySortResult res = MaybeYieldToComparator(d, vec[d->j], d->item);
+ if (res != ArraySortResult::Done) {
+ d->state = State::InsertionSortCall2;
+ return res;
+ }
+ }
+ insertion_sort_call2:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ if (lessOrEqual) {
+ break;
+ }
+ vec[d->j + 1] = vec[d->j];
+ } while (d->j-- > d->start);
+ vec[d->j + 1] = d->item;
+ }
+ }
+
+ // Merge sort. Set d->out to scratch space initially.
+ d->out = vec.begin() + d->denseLen;
+ for (d->windowSize = InitialWindowSize; d->windowSize < d->denseLen;
+ d->windowSize *= 2) {
+ for (d->start = 0; d->start < d->denseLen;
+ d->start += 2 * d->windowSize) {
+ // The midpoint between the two subarrays.
+ d->mid = d->start + d->windowSize - 1;
+
+ // To keep from going over the edge.
+ d->end = std::min<uint32_t>(d->start + 2 * d->windowSize - 1,
+ d->denseLen - 1);
+
+ // Merge comparator-sorted slices list[start..<=mid] and
+ // list[mid+1..<=end], storing the merged sequence in out[start..<=end].
+
+ // Skip lopsided runs to avoid doing useless work.
+ if (d->mid >= d->end) {
+ CopyValues(d->out, d->list, d->start, d->end);
+ continue;
+ }
+
+ // Skip calling the comparator if the sub-list is already sorted.
+ {
+ ArraySortResult res =
+ MaybeYieldToComparator(d, d->list[d->mid], d->list[d->mid + 1]);
+ if (res != ArraySortResult::Done) {
+ d->state = State::MergeSortCall1;
+ return res;
+ }
+ }
+ merge_sort_call1:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ if (lessOrEqual) {
+ CopyValues(d->out, d->list, d->start, d->end);
+ continue;
+ }
+
+ d->i = d->start;
+ d->j = d->mid + 1;
+ d->k = d->start;
+
+ while (d->i <= d->mid && d->j <= d->end) {
+ {
+ ArraySortResult res =
+ MaybeYieldToComparator(d, d->list[d->i], d->list[d->j]);
+ if (res != ArraySortResult::Done) {
+ d->state = State::MergeSortCall2;
+ return res;
+ }
+ }
+ merge_sort_call2:
+ bool lessOrEqual;
+ if (!RvalIsLessOrEqual(d, &lessOrEqual)) {
+ return ArraySortResult::Failure;
+ }
+ d->out[d->k++] = lessOrEqual ? d->list[d->i++] : d->list[d->j++];
+ }
+
+ // Empty out any remaining elements. Use local variables to let the
+ // compiler generate more efficient code.
+ Value* out = d->out;
+ Value* list = d->list;
+ uint32_t k = d->k;
+ uint32_t mid = d->mid;
+ uint32_t end = d->end;
+ for (uint32_t i = d->i; i <= mid; i++) {
+ out[k++] = list[i];
+ }
+ for (uint32_t j = d->j; j <= end; j++) {
+ out[k++] = list[j];
+ }
+ }
+
+ // Swap both lists.
+ std::swap(d->list, d->out);
+ }
+ }
+
+ // Copy elements to the array.
+ Rooted<JSObject*> obj(cx, d->obj_);
+ if (!SetArrayElements(cx, obj, 0, d->denseLen, d->list)) {
+ return ArraySortResult::Failure;
+ }
+
+ // Re-create any holes that sorted to the end of the array.
+ for (uint32_t i = d->denseLen; i < d->length; i++) {
+ if (!CheckForInterrupt(cx) || !DeletePropertyOrThrow(cx, obj, i)) {
+ return ArraySortResult::Failure;
+ }
+ }
+
+ d->freeMallocData();
+ d->setReturnValue(obj);
+ return ArraySortResult::Done;
+}
+
+// https://tc39.es/ecma262/#sec-array.prototype.sort
+// 23.1.3.30 Array.prototype.sort ( comparefn )
+bool js::array_sort(JSContext* cx, unsigned argc, Value* vp) {
+ AutoJSMethodProfilerEntry pseudoFrame(cx, "Array.prototype", "sort");
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ // If we have a comparator argument, use the JIT trampoline implementation
+ // instead. This avoids a performance cliff (especially with large arrays)
+ // because C++ => JIT calls are much slower than Trampoline => JIT calls.
+ if (args.hasDefined(0) && jit::IsBaselineInterpreterEnabled()) {
+ return CallTrampolineNativeJitCode(cx, jit::TrampolineNative::ArraySort,
+ args);
+ }
+
+ Rooted<ArraySortData> data(cx, cx);
+
+ // On all return paths other than ArraySortWithComparatorLoop returning Done,
+ // we call freeMallocData to not fail debug assertions. This matches the JIT
+ // trampoline where we can't rely on C++ destructors.
+ auto freeData =
+ mozilla::MakeScopeExit([&]() { data.get().freeMallocData(); });
+
+ bool done = false;
+ if (!ArraySortPrologue(cx, args.thisv(), args.get(0), data.address(),
+ &done)) {
+ return false;
+ }
+ if (done) {
+ args.rval().set(data.get().returnValue());
+ return true;
+ }
+
+ FixedInvokeArgs<2> callArgs(cx);
+ Rooted<Value> rval(cx);
+
+ while (true) {
+ ArraySortResult res = ArraySortData::sortWithComparator(data.address());
+ switch (res) {
+ case ArraySortResult::Failure:
+ return false;
+
+ case ArraySortResult::Done:
+ freeData.release();
+ args.rval().set(data.get().returnValue());
+ return true;
+
+ case ArraySortResult::CallJS:
+ case ArraySortResult::CallJSSameRealmNoRectifier:
+ MOZ_ASSERT(data.get().comparatorThisValue().isUndefined());
+ MOZ_ASSERT(&args[0].toObject() == data.get().comparator());
+ callArgs[0].set(data.get().comparatorArg(0));
+ callArgs[1].set(data.get().comparatorArg(1));
+ if (!js::Call(cx, args[0], UndefinedHandleValue, callArgs, &rval)) {
+ return false;
+ }
+ data.get().setComparatorReturnValue(rval);
+ break;
+ }
+ }
+}
+
+ArraySortResult js::ArraySortFromJit(JSContext* cx,
+ jit::TrampolineNativeFrameLayout* frame) {
+ // Initialize the ArraySortData class stored in the trampoline frame.
+ void* dataUninit = frame->getFrameData<ArraySortData>();
+ auto* data = new (dataUninit) ArraySortData(cx);
+
+ Rooted<Value> thisv(cx, frame->thisv());
+ Rooted<Value> comparefn(cx);
+ if (frame->numActualArgs() > 0) {
+ comparefn = frame->actualArgs()[0];
+ }
+
+ bool done = false;
+ if (!ArraySortPrologue(cx, thisv, comparefn, data, &done)) {
+ return ArraySortResult::Failure;
+ }
+ if (done) {
+ data->freeMallocData();
+ return ArraySortResult::Done;
+ }
+
+ return ArraySortData::sortWithComparator(data);
+}
+
+ArraySortData::ArraySortData(JSContext* cx) : cx_(cx) {
+#ifdef DEBUG
+ cx_->liveArraySortDataInstances++;
+#endif
+}
+
+void ArraySortData::freeMallocData() {
+ vec.clearAndFree();
+#ifdef DEBUG
+ MOZ_ASSERT(cx_->liveArraySortDataInstances > 0);
+ cx_->liveArraySortDataInstances--;
+#endif
+}
+
+void ArraySortData::trace(JSTracer* trc) {
+ TraceNullableRoot(trc, &comparator_, "comparator_");
+ TraceRoot(trc, &thisv, "thisv");
+ TraceRoot(trc, &callArgs[0], "callArgs0");
+ TraceRoot(trc, &callArgs[1], "callArgs1");
+ vec.trace(trc);
+ TraceRoot(trc, &item, "item");
+ TraceNullableRoot(trc, &obj_, "obj");
+}
+
bool js::NewbornArrayPush(JSContext* cx, HandleObject obj, const Value& v) {
Handle<ArrayObject*> arr = obj.as<ArrayObject>();
@@ -4846,7 +5314,7 @@ static const JSFunctionSpec array_methods[] = {
/* Perl-ish methods. */
JS_INLINABLE_FN("join", array_join, 1, 0, ArrayJoin),
JS_FN("reverse", array_reverse, 0, 0),
- JS_SELF_HOSTED_FN("sort", "ArraySort", 1, 0),
+ JS_TRAMPOLINE_FN("sort", array_sort, 1, 0, ArraySort),
JS_INLINABLE_FN("push", array_push, 1, 0, ArrayPush),
JS_INLINABLE_FN("pop", array_pop, 0, 0, ArrayPop),
JS_INLINABLE_FN("shift", array_shift, 0, 0, ArrayShift),
diff --git a/js/src/builtin/Array.h b/js/src/builtin/Array.h
index f0de034998..f861505e7b 100644
--- a/js/src/builtin/Array.h
+++ b/js/src/builtin/Array.h
@@ -15,6 +15,10 @@
namespace js {
+namespace jit {
+class TrampolineNativeFrameLayout;
+}
+
class ArrayObject;
MOZ_ALWAYS_INLINE bool IdIsIndex(jsid id, uint32_t* indexp) {
@@ -107,16 +111,12 @@ extern bool GetElements(JSContext* cx, HandleObject aobj, uint32_t length,
/* Natives exposed for optimization by the interpreter and JITs. */
-extern bool intrinsic_ArrayNativeSort(JSContext* cx, unsigned argc,
- js::Value* vp);
-
extern bool array_includes(JSContext* cx, unsigned argc, js::Value* vp);
extern bool array_indexOf(JSContext* cx, unsigned argc, js::Value* vp);
extern bool array_lastIndexOf(JSContext* cx, unsigned argc, js::Value* vp);
-
extern bool array_pop(JSContext* cx, unsigned argc, js::Value* vp);
-
extern bool array_join(JSContext* cx, unsigned argc, js::Value* vp);
+extern bool array_sort(JSContext* cx, unsigned argc, js::Value* vp);
extern void ArrayShiftMoveElements(ArrayObject* arr);
@@ -172,6 +172,147 @@ extern bool ArrayLengthGetter(JSContext* cx, HandleObject obj, HandleId id,
extern bool ArrayLengthSetter(JSContext* cx, HandleObject obj, HandleId id,
HandleValue v, ObjectOpResult& result);
+// Note: we use uint32_t because the JIT code uses branch32.
+enum class ArraySortResult : uint32_t {
+ Failure,
+ Done,
+ CallJS,
+ CallJSSameRealmNoRectifier
+};
+
+// We use a JIT trampoline to optimize sorting with a comparator function. The
+// trampoline frame has an ArraySortData instance that contains all state used
+// by the sorting algorithm. The sorting algorithm is implemented as a C++
+// "generator" that can yield to the trampoline to perform a fast JIT => JIT
+// call to the comparator function. When the comparator function returns, the
+// trampoline calls back into C++ to resume the sorting algorithm.
+//
+// ArraySortData stores the JS Values in a js::Vector. To ensure we don't leak
+// its memory, we have debug assertions to check that for each C++ constructor
+// call we call |freeMallocData| exactly once. C++ code calls |freeMallocData|
+// when it's done sorting and the JIT exception handler calls it when unwinding
+// the trampoline frame.
+class ArraySortData {
+ public:
+ enum class ComparatorKind : uint8_t {
+ Unoptimized,
+ JS,
+ JSSameRealmNoRectifier,
+ };
+
+ static constexpr size_t ComparatorActualArgs = 2;
+
+ // Insertion sort is used if the length is < InsertionSortLimit.
+ static constexpr size_t InsertionSortLimit = 24;
+
+ using ValueVector = GCVector<Value, 8, SystemAllocPolicy>;
+
+ protected: // Silence Clang warning about unused private fields.
+ // Data for the comparator call. These fields must match the JitFrameLayout
+ // to let us perform efficient calls to the comparator from JIT code.
+ // This is asserted in the JIT trampoline code.
+ // callArgs[0] is also used to store the return value of the sort function and
+ // the comparator.
+ uintptr_t descriptor_;
+ JSObject* comparator_ = nullptr;
+ Value thisv;
+ Value callArgs[ComparatorActualArgs];
+
+ private:
+ ValueVector vec;
+ Value item;
+ JSContext* cx_;
+ JSObject* obj_ = nullptr;
+
+ Value* list;
+ Value* out;
+
+ // The value of the .length property.
+ uint32_t length;
+
+ // The number of items to sort. Can be less than |length| if the object has
+ // holes.
+ uint32_t denseLen;
+
+ uint32_t windowSize;
+ uint32_t start;
+ uint32_t mid;
+ uint32_t end;
+ uint32_t i, j, k;
+
+ // The state value determines where we resume in sortWithComparator.
+ enum class State : uint8_t {
+ Initial,
+ InsertionSortCall1,
+ InsertionSortCall2,
+ MergeSortCall1,
+ MergeSortCall2
+ };
+ State state = State::Initial;
+ ComparatorKind comparatorKind_;
+
+ // Optional padding to ensure proper alignment of the comparator JIT frame.
+#if !defined(JS_64BIT) && !defined(DEBUG)
+ protected: // Silence Clang warning about unused private field.
+ size_t padding;
+#endif
+
+ public:
+ explicit ArraySortData(JSContext* cx);
+
+ void MOZ_ALWAYS_INLINE init(JSObject* obj, JSObject* comparator,
+ ValueVector&& vec, uint32_t length,
+ uint32_t denseLen);
+
+ JSContext* cx() const { return cx_; }
+
+ JSObject* comparator() const {
+ MOZ_ASSERT(comparator_);
+ return comparator_;
+ }
+
+ Value returnValue() const { return callArgs[0]; }
+ void setReturnValue(JSObject* obj) { callArgs[0].setObject(*obj); }
+
+ Value comparatorArg(size_t index) {
+ MOZ_ASSERT(index < ComparatorActualArgs);
+ return callArgs[index];
+ }
+ Value comparatorThisValue() const { return thisv; }
+ Value comparatorReturnValue() const { return callArgs[0]; }
+ void setComparatorArgs(const Value& x, const Value& y) {
+ callArgs[0] = x;
+ callArgs[1] = y;
+ }
+ void setComparatorReturnValue(const Value& v) { callArgs[0] = v; }
+
+ ComparatorKind comparatorKind() const { return comparatorKind_; }
+
+ static ArraySortResult sortWithComparator(ArraySortData* d);
+
+ void freeMallocData();
+ void trace(JSTracer* trc);
+
+ static constexpr int32_t offsetOfDescriptor() {
+ return offsetof(ArraySortData, descriptor_);
+ }
+ static constexpr int32_t offsetOfComparator() {
+ return offsetof(ArraySortData, comparator_);
+ }
+ static constexpr int32_t offsetOfComparatorReturnValue() {
+ return offsetof(ArraySortData, callArgs[0]);
+ }
+ static constexpr int32_t offsetOfComparatorThis() {
+ return offsetof(ArraySortData, thisv);
+ }
+ static constexpr int32_t offsetOfComparatorArgs() {
+ return offsetof(ArraySortData, callArgs);
+ }
+};
+
+extern ArraySortResult ArraySortFromJit(
+ JSContext* cx, jit::TrampolineNativeFrameLayout* frame);
+
class MOZ_NON_TEMPORARY_CLASS ArraySpeciesLookup final {
/*
* An ArraySpeciesLookup holds the following:
diff --git a/js/src/builtin/Array.js b/js/src/builtin/Array.js
index f831615041..b62edef7e9 100644
--- a/js/src/builtin/Array.js
+++ b/js/src/builtin/Array.js
@@ -76,85 +76,6 @@ function ArraySome(callbackfn /*, thisArg*/) {
// Inlining this enables inlining of the callback function.
SetIsInlinableLargeFunction(ArraySome);
-// ES2023 draft rev cb4224156c54156f30c18c50784c1b0148ebfae5
-// 23.1.3.30 Array.prototype.sort ( comparefn )
-function ArraySortCompare(comparefn) {
- return function(x, y) {
- // Steps 4.a-c.
- if (x === undefined) {
- if (y === undefined) {
- return 0;
- }
- return 1;
- }
- if (y === undefined) {
- return -1;
- }
-
- // Step 4.d.i.
- var v = ToNumber(callContentFunction(comparefn, undefined, x, y));
-
- // Steps 4.d.ii-iii.
- return v !== v ? 0 : v;
- };
-}
-
-// ES2023 draft rev cb4224156c54156f30c18c50784c1b0148ebfae5
-// 23.1.3.30 Array.prototype.sort ( comparefn )
-function ArraySort(comparefn) {
- // Step 1.
- if (comparefn !== undefined) {
- if (!IsCallable(comparefn)) {
- ThrowTypeError(JSMSG_BAD_SORT_ARG);
- }
- }
-
- // Step 2.
- var O = ToObject(this);
-
- // First try to sort the array in native code, if that fails, indicated by
- // returning |false| from ArrayNativeSort, sort it in self-hosted code.
- if (callFunction(ArrayNativeSort, O, comparefn)) {
- return O;
- }
-
- // Step 3.
- var len = ToLength(O.length);
-
- // Arrays with less than two elements remain unchanged when sorted.
- if (len <= 1) {
- return O;
- }
-
- // Step 4.
- var wrappedCompareFn = ArraySortCompare(comparefn);
-
- // Step 5.
- // To save effort we will do all of our work on a dense list, then create
- // holes at the end.
- var denseList = [];
- var denseLen = 0;
-
- for (var i = 0; i < len; i++) {
- if (i in O) {
- DefineDataProperty(denseList, denseLen++, O[i]);
- }
- }
-
- if (denseLen < 1) {
- return O;
- }
-
- var sorted = MergeSort(denseList, denseLen, wrappedCompareFn);
-
- assert(IsPackedArray(sorted), "sorted is a packed array");
- assert(sorted.length === denseLen, "sorted array has the correct length");
-
- MoveHoles(O, len, sorted, denseLen);
-
- return O;
-}
-
/* ES5 15.4.4.18. */
function ArrayForEach(callbackfn /*, thisArg*/) {
/* Step 1. */
@@ -1335,22 +1256,8 @@ function ArrayToSorted(comparefn) {
return items;
}
- // First try to sort the array in native code, if that fails, indicated by
- // returning |false| from ArrayNativeSort, sort it in self-hosted code.
- if (callFunction(ArrayNativeSort, items, comparefn)) {
- return items;
- }
-
- // Step 5.
- var wrappedCompareFn = ArraySortCompare(comparefn);
-
- // Steps 6-9.
- var sorted = MergeSort(items, len, wrappedCompareFn);
-
- assert(IsPackedArray(sorted), "sorted is a packed array");
- assert(sorted.length === len, "sorted array has the correct length");
-
- return sorted;
+ // Steps 5-9.
+ return callFunction(std_Array_sort, items, comparefn);
}
// https://github.com/tc39/proposal-array-find-from-last
diff --git a/js/src/builtin/JSON.cpp b/js/src/builtin/JSON.cpp
index dbfab8b43a..1423595937 100644
--- a/js/src/builtin/JSON.cpp
+++ b/js/src/builtin/JSON.cpp
@@ -20,15 +20,18 @@
#include "builtin/Array.h"
#include "builtin/BigInt.h"
#include "builtin/ParseRecordObject.h"
+#include "builtin/RawJSONObject.h"
#include "js/friend/ErrorMessages.h" // js::GetErrorMessage, JSMSG_*
#include "js/friend/StackLimits.h" // js::AutoCheckRecursionLimit
#include "js/Object.h" // JS::GetBuiltinClass
+#include "js/Prefs.h" // JS::Prefs
#include "js/PropertySpec.h"
#include "js/StableStringChars.h"
#include "js/TypeDecls.h"
#include "js/Value.h"
#include "util/StringBuffer.h"
-#include "vm/BooleanObject.h" // js::BooleanObject
+#include "vm/BooleanObject.h" // js::BooleanObject
+#include "vm/EqualityOperations.h" // js::SameValue
#include "vm/Interpreter.h"
#include "vm/Iteration.h"
#include "vm/JSAtomUtils.h" // ToAtom
@@ -436,6 +439,16 @@ class CycleDetector {
bool appended_;
};
+static inline JSString* MaybeGetRawJSON(JSContext* cx, JSObject* obj) {
+ if (!obj->is<RawJSONObject>()) {
+ return nullptr;
+ }
+
+ JSString* rawJSON = obj->as<js::RawJSONObject>().rawJSON(cx);
+ MOZ_ASSERT(rawJSON);
+ return rawJSON;
+}
+
#ifdef ENABLE_RECORD_TUPLE
enum class JOType { Record, Object };
template <JOType type = JOType::Object>
@@ -783,6 +796,12 @@ static bool SerializeJSONProperty(JSContext* cx, const Value& v,
MOZ_ASSERT(v.hasObjectPayload());
RootedObject obj(cx, &v.getObjectPayload());
+ /* https://tc39.es/proposal-json-parse-with-source/#sec-serializejsonproperty
+ * Step 4a.*/
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, obj)) {
+ return scx->sb.append(rawJSON);
+ }
+
MOZ_ASSERT(
!scx->maybeSafely || obj->is<PlainObject>() || obj->is<ArrayObject>(),
"input to JS::ToJSONMaybeSafely must not include reachable "
@@ -1233,6 +1252,10 @@ static bool FastSerializeJSONProperty(JSContext* cx, Handle<Value> v,
MOZ_ASSERT(*whySlow == BailReason::NO_REASON);
MOZ_ASSERT(v.isObject());
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, &v.toObject())) {
+ return scx->sb.append(rawJSON);
+ }
+
/*
* FastSerializeJSONProperty is an optimistic fast path for the
* SerializeJSONProperty algorithm that applies in limited situations. It
@@ -1356,19 +1379,24 @@ static bool FastSerializeJSONProperty(JSContext* cx, Handle<Value> v,
}
if (val.isObject()) {
- if (stack.length() >= MAX_STACK_DEPTH - 1) {
- *whySlow = BailReason::DEEP_RECURSION;
- return true;
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, &val.toObject())) {
+ if (!scx->sb.append(rawJSON)) {
+ return false;
+ }
+ } else {
+ if (stack.length() >= MAX_STACK_DEPTH - 1) {
+ *whySlow = BailReason::DEEP_RECURSION;
+ return true;
+ }
+ // Save the current iterator position on the stack and
+ // switch to processing the nested value.
+ stack.infallibleAppend(std::move(top));
+ top = FastStackEntry(&val.toObject().as<NativeObject>());
+ wroteMember = false;
+ nestedObject = true; // Break out to the outer loop.
+ break;
}
- // Save the current iterator position on the stack and
- // switch to processing the nested value.
- stack.infallibleAppend(std::move(top));
- top = FastStackEntry(&val.toObject().as<NativeObject>());
- wroteMember = false;
- nestedObject = true; // Break out to the outer loop.
- break;
- }
- if (!EmitSimpleValue(cx, scx->sb, val)) {
+ } else if (!EmitSimpleValue(cx, scx->sb, val)) {
return false;
}
}
@@ -1433,19 +1461,24 @@ static bool FastSerializeJSONProperty(JSContext* cx, Handle<Value> v,
return false;
}
if (val.isObject()) {
- if (stack.length() >= MAX_STACK_DEPTH - 1) {
- *whySlow = BailReason::DEEP_RECURSION;
- return true;
+ if (JSString* rawJSON = MaybeGetRawJSON(cx, &val.toObject())) {
+ if (!scx->sb.append(rawJSON)) {
+ return false;
+ }
+ } else {
+ if (stack.length() >= MAX_STACK_DEPTH - 1) {
+ *whySlow = BailReason::DEEP_RECURSION;
+ return true;
+ }
+ // Save the current iterator position on the stack and
+ // switch to processing the nested value.
+ stack.infallibleAppend(std::move(top));
+ top = FastStackEntry(&val.toObject().as<NativeObject>());
+ wroteMember = false;
+ nesting = true; // Break out to the outer loop.
+ break;
}
- // Save the current iterator position on the stack and
- // switch to processing the nested value.
- stack.infallibleAppend(std::move(top));
- top = FastStackEntry(&val.toObject().as<NativeObject>());
- wroteMember = false;
- nesting = true; // Break out to the outer loop.
- break;
- }
- if (!EmitSimpleValue(cx, scx->sb, val)) {
+ } else if (!EmitSimpleValue(cx, scx->sb, val)) {
return false;
}
}
@@ -1733,6 +1766,41 @@ static bool InternalizeJSONProperty(
return false;
}
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ RootedObject context(cx);
+ Rooted<UniquePtr<ParseRecordObject::EntryMap>> entries(cx);
+ if (JS::Prefs::experimental_json_parse_with_source()) {
+ // https://tc39.es/proposal-json-parse-with-source/#sec-internalizejsonproperty
+ bool sameVal = false;
+ Rooted<Value> parsedValue(cx, parseRecord.get().value);
+ if (!SameValue(cx, parsedValue, val, &sameVal)) {
+ return false;
+ }
+ if (!parseRecord.get().isEmpty() && sameVal) {
+ if (parseRecord.get().parseNode) {
+ MOZ_ASSERT(!val.isObject());
+ Rooted<IdValueVector> props(cx, cx);
+ if (!props.emplaceBack(
+ IdValuePair(NameToId(cx->names().source),
+ StringValue(parseRecord.get().parseNode)))) {
+ return false;
+ }
+ context = NewPlainObjectWithUniqueNames(cx, props);
+ if (!context) {
+ return false;
+ }
+ }
+ entries = std::move(parseRecord.get().entries);
+ }
+ if (!context) {
+ context = NewPlainObject(cx);
+ if (!context) {
+ return false;
+ }
+ }
+ }
+#endif
+
/* Step 2. */
if (val.isObject()) {
RootedObject obj(cx, &val.toObject());
@@ -1763,6 +1831,13 @@ static bool InternalizeJSONProperty(
/* Step 2a(iii)(1). */
Rooted<ParseRecordObject> elementRecord(cx);
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ if (entries) {
+ if (auto entry = entries->lookup(id)) {
+ elementRecord = std::move(entry->value());
+ }
+ }
+#endif
if (!InternalizeJSONProperty(cx, obj, id, reviver, &elementRecord,
&newElement)) {
return false;
@@ -1804,6 +1879,13 @@ static bool InternalizeJSONProperty(
/* Step 2c(ii)(1). */
id = keys[i];
Rooted<ParseRecordObject> entryRecord(cx);
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ if (entries) {
+ if (auto entry = entries->lookup(id)) {
+ entryRecord = std::move(entry->value());
+ }
+ }
+#endif
if (!InternalizeJSONProperty(cx, obj, id, reviver, &entryRecord,
&newElement)) {
return false;
@@ -1838,15 +1920,7 @@ static bool InternalizeJSONProperty(
RootedValue keyVal(cx, StringValue(key));
#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- if (cx->realm()->creationOptions().getJSONParseWithSource()) {
- RootedObject context(cx, NewPlainObject(cx));
- if (!context) {
- return false;
- }
- Rooted<Value> parseNode(cx, StringValue(parseRecord.get().parseNode));
- if (!DefineDataProperty(cx, context, cx->names().source, parseNode)) {
- return false;
- }
+ if (JS::Prefs::experimental_json_parse_with_source()) {
RootedValue contextVal(cx, ObjectValue(*context));
return js::Call(cx, reviver, holder, keyVal, val, contextVal, vp);
}
@@ -1867,7 +1941,7 @@ static bool Revive(JSContext* cx, HandleValue reviver,
}
#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- MOZ_ASSERT_IF(cx->realm()->creationOptions().getJSONParseWithSource(),
+ MOZ_ASSERT_IF(JS::Prefs::experimental_json_parse_with_source(),
pro.get().value == vp.get());
#endif
Rooted<jsid> id(cx, NameToId(cx->names().empty_));
@@ -1876,10 +1950,10 @@ static bool Revive(JSContext* cx, HandleValue reviver,
template <typename CharT>
bool ParseJSON(JSContext* cx, const mozilla::Range<const CharT> chars,
- MutableHandleValue vp, MutableHandle<ParseRecordObject> pro) {
+ MutableHandleValue vp) {
Rooted<JSONParser<CharT>> parser(cx, cx, chars,
JSONParser<CharT>::ParseType::JSONParse);
- return parser.parse(vp, pro);
+ return parser.parse(vp);
}
template <typename CharT>
@@ -1888,7 +1962,15 @@ bool js::ParseJSONWithReviver(JSContext* cx,
HandleValue reviver, MutableHandleValue vp) {
/* https://262.ecma-international.org/14.0/#sec-json.parse steps 2-10. */
Rooted<ParseRecordObject> pro(cx);
- if (!ParseJSON(cx, chars, vp, &pro)) {
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ if (JS::Prefs::experimental_json_parse_with_source() && IsCallable(reviver)) {
+ Rooted<JSONReviveParser<CharT>> parser(cx, cx, chars);
+ if (!parser.get().parse(vp, &pro)) {
+ return false;
+ }
+ } else
+#endif
+ if (!ParseJSON(cx, chars, vp)) {
return false;
}
@@ -2086,14 +2168,13 @@ static bool json_parseImmutable(JSContext* cx, unsigned argc, Value* vp) {
HandleValue reviver = args.get(1);
RootedValue unfiltered(cx);
- Rooted<ParseRecordObject> pro(cx);
if (linearChars.isLatin1()) {
- if (!ParseJSON(cx, linearChars.latin1Range(), &unfiltered, &pro)) {
+ if (!ParseJSON(cx, linearChars.latin1Range(), &unfiltered)) {
return false;
}
} else {
- if (!ParseJSON(cx, linearChars.twoByteRange(), &unfiltered, &pro)) {
+ if (!ParseJSON(cx, linearChars.twoByteRange(), &unfiltered)) {
return false;
}
}
@@ -2103,6 +2184,104 @@ static bool json_parseImmutable(JSContext* cx, unsigned argc, Value* vp) {
}
#endif
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+/* https://tc39.es/proposal-json-parse-with-source/#sec-json.israwjson */
+static bool json_isRawJSON(JSContext* cx, unsigned argc, Value* vp) {
+ AutoJSMethodProfilerEntry pseudoFrame(cx, "JSON", "isRawJSON");
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ /* Step 1. */
+ if (args.get(0).isObject()) {
+ Rooted<JSObject*> obj(cx, &args[0].toObject());
+# ifdef DEBUG
+ if (obj->is<RawJSONObject>()) {
+ bool objIsFrozen = false;
+ MOZ_ASSERT(js::TestIntegrityLevel(cx, obj, IntegrityLevel::Frozen,
+ &objIsFrozen));
+ MOZ_ASSERT(objIsFrozen);
+ }
+# endif // DEBUG
+ args.rval().setBoolean(obj->is<RawJSONObject>());
+ return true;
+ }
+
+ /* Step 2. */
+ args.rval().setBoolean(false);
+ return true;
+}
+
+static inline bool IsJSONWhitespace(char16_t ch) {
+ return ch == '\t' || ch == '\n' || ch == '\r' || ch == ' ';
+}
+
+/* https://tc39.es/proposal-json-parse-with-source/#sec-json.rawjson */
+static bool json_rawJSON(JSContext* cx, unsigned argc, Value* vp) {
+ AutoJSMethodProfilerEntry pseudoFrame(cx, "JSON", "rawJSON");
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ /* Step 1. */
+ JSString* jsonString = ToString<CanGC>(cx, args.get(0));
+ if (!jsonString) {
+ return false;
+ }
+
+ Rooted<JSLinearString*> linear(cx, jsonString->ensureLinear(cx));
+ if (!linear) {
+ return false;
+ }
+
+ AutoStableStringChars linearChars(cx);
+ if (!linearChars.init(cx, linear)) {
+ return false;
+ }
+
+ /* Step 2. */
+ if (linear->empty()) {
+ JS_ReportErrorNumberASCII(cx, js::GetErrorMessage, nullptr,
+ JSMSG_JSON_RAW_EMPTY);
+ return false;
+ }
+ if (IsJSONWhitespace(linear->latin1OrTwoByteChar(0)) ||
+ IsJSONWhitespace(linear->latin1OrTwoByteChar(linear->length() - 1))) {
+ JS_ReportErrorNumberASCII(cx, js::GetErrorMessage, nullptr,
+ JSMSG_JSON_RAW_WHITESPACE);
+ return false;
+ }
+
+ /* Step 3. */
+ RootedValue parsedValue(cx);
+ if (linearChars.isLatin1()) {
+ if (!ParseJSON(cx, linearChars.latin1Range(), &parsedValue)) {
+ return false;
+ }
+ } else {
+ if (!ParseJSON(cx, linearChars.twoByteRange(), &parsedValue)) {
+ return false;
+ }
+ }
+
+ if (parsedValue.isObject()) {
+ JS_ReportErrorNumberASCII(cx, js::GetErrorMessage, nullptr,
+ JSMSG_JSON_RAW_ARRAY_OR_OBJECT);
+ return false;
+ }
+
+ /* Steps 4-6. */
+ Rooted<RawJSONObject*> obj(cx, RawJSONObject::create(cx, linear));
+ if (!obj) {
+ return false;
+ }
+
+ /* Step 7. */
+ if (!js::FreezeObject(cx, obj)) {
+ return false;
+ }
+
+ args.rval().setObject(*obj);
+ return true;
+}
+#endif // ENABLE_JSON_PARSE_WITH_SOURCE
+
/* https://262.ecma-international.org/14.0/#sec-json.stringify */
bool json_stringify(JSContext* cx, unsigned argc, Value* vp) {
AutoJSMethodProfilerEntry pseudoFrame(cx, "JSON", "stringify");
@@ -2141,11 +2320,16 @@ bool json_stringify(JSContext* cx, unsigned argc, Value* vp) {
}
static const JSFunctionSpec json_static_methods[] = {
- JS_FN("toSource", json_toSource, 0, 0), JS_FN("parse", json_parse, 2, 0),
+ JS_FN("toSource", json_toSource, 0, 0),
+ JS_FN("parse", json_parse, 2, 0),
JS_FN("stringify", json_stringify, 3, 0),
#ifdef ENABLE_RECORD_TUPLE
JS_FN("parseImmutable", json_parseImmutable, 2, 0),
#endif
+#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
+ JS_FN("isRawJSON", json_isRawJSON, 1, 0),
+ JS_FN("rawJSON", json_rawJSON, 1, 0),
+#endif
JS_FS_END};
static const JSPropertySpec json_static_properties[] = {
diff --git a/js/src/builtin/MapObject.cpp b/js/src/builtin/MapObject.cpp
index fd35b8e776..cd4ae7f46a 100644
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -333,23 +333,42 @@ size_t MapIteratorObject::objectMoved(JSObject* obj, JSObject* old) {
Nursery& nursery = iter->runtimeFromMainThread()->gc.nursery();
if (!nursery.isInside(range)) {
nursery.removeMallocedBufferDuringMinorGC(range);
- return 0;
}
+ size_t size = RoundUp(sizeof(ValueMap::Range), gc::CellAlignBytes);
AutoEnterOOMUnsafeRegion oomUnsafe;
- auto newRange = iter->zone()->new_<ValueMap::Range>(*range);
- if (!newRange) {
- oomUnsafe.crash(
- "MapIteratorObject failed to allocate Range data while tenuring.");
+ void* buffer = nursery.allocateBufferSameLocation(obj, size, js::MallocArena);
+ if (!buffer) {
+ oomUnsafe.crash("MapIteratorObject::objectMoved");
}
+ bool iteratorIsInNursery = IsInsideNursery(obj);
+ MOZ_ASSERT(iteratorIsInNursery == nursery.isInside(buffer));
+ auto* newRange = new (buffer) ValueMap::Range(*range, iteratorIsInNursery);
range->~Range();
iter->setReservedSlot(MapIteratorObject::RangeSlot, PrivateValue(newRange));
- return sizeof(ValueMap::Range);
+
+ if (iteratorIsInNursery && iter->target()) {
+ SetHasNurseryMemory(iter->target(), true);
+ }
+
+ return size;
+}
+
+MapObject* MapIteratorObject::target() const {
+ Value value = getFixedSlot(TargetSlot);
+ if (value.isUndefined()) {
+ return nullptr;
+ }
+
+ return &MaybeForwarded(&value.toObject())->as<MapObject>();
}
template <typename Range>
static void DestroyRange(JSObject* iterator, Range* range) {
+ MOZ_ASSERT(IsInsideNursery(iterator) ==
+ iterator->runtimeFromMainThread()->gc.nursery().isInside(range));
+
range->~Range();
if (!IsInsideNursery(iterator)) {
js_free(range);
@@ -533,7 +552,7 @@ void MapObject::trace(JSTracer* trc, JSObject* obj) {
}
}
-using NurseryKeysVector = mozilla::Vector<Value, 0, SystemAllocPolicy>;
+using NurseryKeysVector = GCVector<Value, 0, SystemAllocPolicy>;
template <typename TableObject>
static NurseryKeysVector* GetNurseryKeys(TableObject* t) {
@@ -577,17 +596,34 @@ class js::OrderedHashTableRef : public gc::BufferableRef {
reinterpret_cast<typename ObjectT::UnbarrieredTable*>(realTable);
NurseryKeysVector* keys = GetNurseryKeys(object);
MOZ_ASSERT(keys);
- for (Value key : *keys) {
- MOZ_ASSERT(unbarrieredTable->hash(key) ==
- realTable->hash(*reinterpret_cast<HashableValue*>(&key)));
- // Note: we use a lambda to avoid tenuring keys that have been removed
- // from the Map or Set.
- unbarrieredTable->rekeyOneEntry(key, [trc](const Value& prior) {
- Value key = prior;
- TraceManuallyBarrieredEdge(trc, &key, "ordered hash table key");
- return key;
- });
+
+ keys->mutableEraseIf([&](Value& key) {
+ MOZ_ASSERT(
+ unbarrieredTable->hash(key) ==
+ realTable->hash(*reinterpret_cast<const HashableValue*>(&key)));
+ MOZ_ASSERT(IsInsideNursery(key.toGCThing()));
+
+ auto result =
+ unbarrieredTable->rekeyOneEntry(key, [trc](const Value& prior) {
+ Value key = prior;
+ TraceManuallyBarrieredEdge(trc, &key, "ordered hash table key");
+ return key;
+ });
+
+ if (result.isNothing()) {
+ return true; // Key removed.
+ }
+
+ key = result.value();
+ return !IsInsideNursery(key.toGCThing());
+ });
+
+ if (!keys->empty()) {
+ trc->runtime()->gc.storeBuffer().putGeneric(
+ OrderedHashTableRef<ObjectT>(object));
+ return;
}
+
DeleteNurseryKeys(object);
}
};
@@ -739,6 +775,8 @@ void MapObject::finalize(JS::GCContext* gcx, JSObject* obj) {
return;
}
+ MOZ_ASSERT_IF(obj->isTenured(), !table->hasNurseryRanges());
+
bool needsPostBarriers = obj->isTenured();
if (needsPostBarriers) {
// Use the ValueMap representation which has post barriers.
@@ -750,21 +788,36 @@ void MapObject::finalize(JS::GCContext* gcx, JSObject* obj) {
}
}
+void MapObject::clearNurseryRangesBeforeMinorGC() {
+ getTableUnchecked()->destroyNurseryRanges();
+ SetHasNurseryMemory(this, false);
+}
+
/* static */
-void MapObject::sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj) {
- bool wasInsideNursery = IsInsideNursery(mapobj);
- if (wasInsideNursery && !IsForwarded(mapobj)) {
+MapObject* MapObject::sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj) {
+ Nursery& nursery = gcx->runtime()->gc.nursery();
+ bool wasInCollectedRegion = nursery.inCollectedRegion(mapobj);
+ if (wasInCollectedRegion && !IsForwarded(mapobj)) {
finalize(gcx, mapobj);
- return;
+ return nullptr;
}
mapobj = MaybeForwarded(mapobj);
- mapobj->getTableUnchecked()->destroyNurseryRanges();
- SetHasNurseryMemory(mapobj, false);
- if (wasInsideNursery) {
+ bool insideNursery = IsInsideNursery(mapobj);
+ if (insideNursery) {
+ SetHasNurseryMemory(mapobj, true);
+ }
+
+ if (wasInCollectedRegion && mapobj->isTenured()) {
AddCellMemory(mapobj, sizeof(ValueMap), MemoryUse::MapObjectTable);
}
+
+ if (!HasNurseryMemory(mapobj)) {
+ return nullptr;
+ }
+
+ return mapobj;
}
bool MapObject::construct(JSContext* cx, unsigned argc, Value* vp) {
@@ -1199,19 +1252,36 @@ size_t SetIteratorObject::objectMoved(JSObject* obj, JSObject* old) {
Nursery& nursery = iter->runtimeFromMainThread()->gc.nursery();
if (!nursery.isInside(range)) {
nursery.removeMallocedBufferDuringMinorGC(range);
- return 0;
}
+ size_t size = RoundUp(sizeof(ValueSet::Range), gc::CellAlignBytes);
+ ;
AutoEnterOOMUnsafeRegion oomUnsafe;
- auto newRange = iter->zone()->new_<ValueSet::Range>(*range);
- if (!newRange) {
- oomUnsafe.crash(
- "SetIteratorObject failed to allocate Range data while tenuring.");
+ void* buffer = nursery.allocateBufferSameLocation(obj, size, js::MallocArena);
+ if (!buffer) {
+ oomUnsafe.crash("SetIteratorObject::objectMoved");
}
+ bool iteratorIsInNursery = IsInsideNursery(obj);
+ MOZ_ASSERT(iteratorIsInNursery == nursery.isInside(buffer));
+ auto* newRange = new (buffer) ValueSet::Range(*range, iteratorIsInNursery);
range->~Range();
iter->setReservedSlot(SetIteratorObject::RangeSlot, PrivateValue(newRange));
- return sizeof(ValueSet::Range);
+
+ if (iteratorIsInNursery && iter->target()) {
+ SetHasNurseryMemory(iter->target(), true);
+ }
+
+ return size;
+}
+
+SetObject* SetIteratorObject::target() const {
+ Value value = getFixedSlot(TargetSlot);
+ if (value.isUndefined()) {
+ return nullptr;
+ }
+
+ return &MaybeForwarded(&value.toObject())->as<SetObject>();
}
bool SetIteratorObject::next(SetIteratorObject* setIterator,
@@ -1449,25 +1519,41 @@ void SetObject::finalize(JS::GCContext* gcx, JSObject* obj) {
MOZ_ASSERT(gcx->onMainThread());
SetObject* setobj = static_cast<SetObject*>(obj);
if (ValueSet* set = setobj->getData()) {
+ MOZ_ASSERT_IF(obj->isTenured(), !set->hasNurseryRanges());
gcx->delete_(obj, set, MemoryUse::MapObjectTable);
}
}
+void SetObject::clearNurseryRangesBeforeMinorGC() {
+ getTableUnchecked()->destroyNurseryRanges();
+ SetHasNurseryMemory(this, false);
+}
+
/* static */
-void SetObject::sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj) {
- bool wasInsideNursery = IsInsideNursery(setobj);
- if (wasInsideNursery && !IsForwarded(setobj)) {
+SetObject* SetObject::sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj) {
+ Nursery& nursery = gcx->runtime()->gc.nursery();
+ bool wasInCollectedRegion = nursery.inCollectedRegion(setobj);
+ if (wasInCollectedRegion && !IsForwarded(setobj)) {
finalize(gcx, setobj);
- return;
+ return nullptr;
}
setobj = MaybeForwarded(setobj);
- setobj->getData()->destroyNurseryRanges();
- SetHasNurseryMemory(setobj, false);
- if (wasInsideNursery) {
+ bool insideNursery = IsInsideNursery(setobj);
+ if (insideNursery) {
+ SetHasNurseryMemory(setobj, true);
+ }
+
+ if (wasInCollectedRegion && setobj->isTenured()) {
AddCellMemory(setobj, sizeof(ValueSet), MemoryUse::MapObjectTable);
}
+
+ if (!HasNurseryMemory(setobj)) {
+ return nullptr;
+ }
+
+ return setobj;
}
bool SetObject::isBuiltinAdd(HandleValue add) {
diff --git a/js/src/builtin/MapObject.h b/js/src/builtin/MapObject.h
index ef37b9912e..d47797eff7 100644
--- a/js/src/builtin/MapObject.h
+++ b/js/src/builtin/MapObject.h
@@ -168,7 +168,14 @@ class MapObject : public NativeObject {
OrderedHashMap<Value, Value, UnbarrieredHashPolicy, CellAllocPolicy>;
friend class OrderedHashTableRef<MapObject>;
- static void sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj);
+ void clearNurseryRangesBeforeMinorGC();
+
+ // Sweeps a map that had nursery memory associated with it after a minor
+ // GC. This may finalize the map if it was in the nursery and has died.
+ //
+ // Returns a pointer to the map if it still has nursery memory associated with
+ // it, or nullptr.
+ static MapObject* sweepAfterMinorGC(JS::GCContext* gcx, MapObject* mapobj);
size_t sizeOfData(mozilla::MallocSizeOf mallocSizeOf);
@@ -276,6 +283,7 @@ class MapIteratorObject : public NativeObject {
private:
inline MapObject::IteratorKind kind() const;
+ MapObject* target() const;
};
class SetObject : public NativeObject {
@@ -326,7 +334,14 @@ class SetObject : public NativeObject {
OrderedHashSet<Value, UnbarrieredHashPolicy, CellAllocPolicy>;
friend class OrderedHashTableRef<SetObject>;
- static void sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj);
+ void clearNurseryRangesBeforeMinorGC();
+
+ // Sweeps a set that had nursery memory associated with it after a minor
+ // GC. This may finalize the set if it was in the nursery and has died.
+ //
+ // Returns a pointer to the set if it still has nursery memory associated with
+ // it, or nullptr.
+ static SetObject* sweepAfterMinorGC(JS::GCContext* gcx, SetObject* setobj);
size_t sizeOfData(mozilla::MallocSizeOf mallocSizeOf);
@@ -414,6 +429,7 @@ class SetIteratorObject : public NativeObject {
private:
inline SetObject::IteratorKind kind() const;
+ SetObject* target() const;
};
using SetInitGetPrototypeOp = NativeObject* (*)(JSContext*,
diff --git a/js/src/builtin/ModuleObject.cpp b/js/src/builtin/ModuleObject.cpp
index b9db9bf02d..0365f744a6 100644
--- a/js/src/builtin/ModuleObject.cpp
+++ b/js/src/builtin/ModuleObject.cpp
@@ -1109,6 +1109,17 @@ JSScript* ModuleObject::script() const {
return ptr;
}
+const char* ModuleObject::filename() const {
+ // The ScriptSlot will be cleared once the module is evaluated, so we try to
+ // get the filename from cyclicModuleFields().
+
+ // TODO: Bug 1885483: Provide filename for JSON modules
+ if (!hasCyclicModuleFields()) {
+ return "(JSON module)";
+ }
+ return cyclicModuleFields()->scriptSourceObject->source()->filename();
+}
+
static inline void AssertValidModuleStatus(ModuleStatus status) {
MOZ_ASSERT(status >= ModuleStatus::Unlinked &&
status <= ModuleStatus::Evaluated_Error);
diff --git a/js/src/builtin/ModuleObject.h b/js/src/builtin/ModuleObject.h
index d39d65b18c..cb74d67c40 100644
--- a/js/src/builtin/ModuleObject.h
+++ b/js/src/builtin/ModuleObject.h
@@ -360,6 +360,7 @@ class ModuleObject : public NativeObject {
JSScript* maybeScript() const;
JSScript* script() const;
+ const char* filename() const;
ModuleEnvironmentObject& initialEnvironment() const;
ModuleEnvironmentObject* environment() const;
ModuleNamespaceObject* namespace_();
diff --git a/js/src/builtin/ParseRecordObject.cpp b/js/src/builtin/ParseRecordObject.cpp
index a453c30c0e..efb05d1845 100644
--- a/js/src/builtin/ParseRecordObject.cpp
+++ b/js/src/builtin/ParseRecordObject.cpp
@@ -19,8 +19,24 @@ ParseRecordObject::ParseRecordObject(Handle<js::JSONParseNode*> parseNode,
const Value& val)
: parseNode(parseNode), key(JS::PropertyKey::Void()), value(val) {}
+bool ParseRecordObject::addEntries(JSContext* cx, EntryMap&& appendEntries) {
+ if (!entries) {
+ entries = js::MakeUnique<EntryMap>(std::move(appendEntries));
+ return !!entries;
+ }
+ for (auto iter = appendEntries.iter(); !iter.done(); iter.next()) {
+ if (!entries->put(iter.get().key(), std::move(iter.get().value()))) {
+ return false;
+ }
+ }
+ return true;
+}
+
void ParseRecordObject::trace(JSTracer* trc) {
JS::TraceRoot(trc, &parseNode, "ParseRecordObject parse node");
JS::TraceRoot(trc, &key, "ParseRecordObject key");
JS::TraceRoot(trc, &value, "ParseRecordObject value");
+ if (entries) {
+ entries->trace(trc);
+ }
}
diff --git a/js/src/builtin/ParseRecordObject.h b/js/src/builtin/ParseRecordObject.h
index 60a902f19b..7e3176a23f 100644
--- a/js/src/builtin/ParseRecordObject.h
+++ b/js/src/builtin/ParseRecordObject.h
@@ -7,7 +7,8 @@
#ifndef builtin_ParseRecordObject_h
#define builtin_ParseRecordObject_h
-#include "js/TraceKind.h"
+#include "js/HashTable.h"
+#include "js/TracingAPI.h"
#include "vm/JSContext.h"
namespace js {
@@ -16,24 +17,40 @@ using JSONParseNode = JSString;
class ParseRecordObject {
public:
+ using EntryMap = js::GCHashMap<PropertyKey, ParseRecordObject>;
+
+ // The source text that was parsed for this record. According to the spec, we
+ // don't track this for objects and arrays, so it will be a null pointer.
JSONParseNode* parseNode;
+ // For object members, the member key. For arrays, the index. For JSON
+ // primitives, it will be undefined.
JS::PropertyKey key;
+ // The original value corresponding to this record, used to determine if the
+ // reviver function has modified it.
Value value;
+ // For objects and arrays, the records for the members and elements
+ // (respectively). If there are none, or for JSON primitives, we won't
+ // allocate an EntryMap.
+ UniquePtr<EntryMap> entries;
ParseRecordObject();
ParseRecordObject(Handle<js::JSONParseNode*> parseNode, const Value& val);
ParseRecordObject(ParseRecordObject&& other)
: parseNode(std::move(other.parseNode)),
key(std::move(other.key)),
- value(std::move(other.value)) {}
+ value(std::move(other.value)),
+ entries(std::move(other.entries)) {}
bool isEmpty() const { return value.isUndefined(); }
+ bool addEntries(JSContext* cx, EntryMap&& appendEntries);
+
// move assignment
ParseRecordObject& operator=(ParseRecordObject&& other) noexcept {
parseNode = other.parseNode;
key = other.key;
value = other.value;
+ entries = std::move(other.entries);
return *this;
}
diff --git a/js/src/builtin/Promise.cpp b/js/src/builtin/Promise.cpp
index bd40add77f..92e72b9cb9 100644
--- a/js/src/builtin/Promise.cpp
+++ b/js/src/builtin/Promise.cpp
@@ -940,13 +940,7 @@ static JSFunction* GetRejectFunctionFromResolve(JSFunction* resolve);
/**
* Returns Promise Resolve Function's [[AlreadyResolved]].[[Value]].
*/
-static bool IsAlreadyResolvedMaybeWrappedResolveFunction(
- JSObject* resolveFunObj) {
- if (IsWrapper(resolveFunObj)) {
- resolveFunObj = UncheckedUnwrap(resolveFunObj);
- }
-
- JSFunction* resolveFun = &resolveFunObj->as<JSFunction>();
+static bool IsAlreadyResolvedResolveFunction(JSFunction* resolveFun) {
MOZ_ASSERT(resolveFun->maybeNative() == ResolvePromiseFunction);
bool alreadyResolved =
@@ -970,13 +964,7 @@ static bool IsAlreadyResolvedMaybeWrappedResolveFunction(
/**
* Returns Promise Reject Function's [[AlreadyResolved]].[[Value]].
*/
-static bool IsAlreadyResolvedMaybeWrappedRejectFunction(
- JSObject* rejectFunObj) {
- if (IsWrapper(rejectFunObj)) {
- rejectFunObj = UncheckedUnwrap(rejectFunObj);
- }
-
- JSFunction* rejectFun = &rejectFunObj->as<JSFunction>();
+static bool IsAlreadyResolvedRejectFunction(JSFunction* rejectFun) {
MOZ_ASSERT(rejectFun->maybeNative() == RejectPromiseFunction);
bool alreadyResolved =
@@ -1023,8 +1011,8 @@ static void SetAlreadyResolvedResolutionFunction(JSFunction* resolutionFun) {
reject->setExtendedSlot(RejectFunctionSlot_Promise, UndefinedValue());
reject->setExtendedSlot(RejectFunctionSlot_ResolveFunction, UndefinedValue());
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedResolveFunction(resolve));
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedRejectFunction(reject));
+ MOZ_ASSERT(IsAlreadyResolvedResolveFunction(resolve));
+ MOZ_ASSERT(IsAlreadyResolvedRejectFunction(reject));
}
/**
@@ -1132,8 +1120,8 @@ void js::SetAlreadyResolvedPromiseWithDefaultResolvingFunction(
rejectFun->initExtendedSlot(RejectFunctionSlot_ResolveFunction,
ObjectValue(*resolveFun));
- MOZ_ASSERT(!IsAlreadyResolvedMaybeWrappedResolveFunction(resolveFun));
- MOZ_ASSERT(!IsAlreadyResolvedMaybeWrappedRejectFunction(rejectFun));
+ MOZ_ASSERT(!IsAlreadyResolvedResolveFunction(resolveFun));
+ MOZ_ASSERT(!IsAlreadyResolvedRejectFunction(rejectFun));
// Step 12. Return the Record { [[Resolve]]: resolve, [[Reject]]: reject }.
return true;
@@ -1181,8 +1169,7 @@ static bool RejectPromiseFunction(JSContext* cx, unsigned argc, Value* vp) {
// If the Promise isn't available anymore, it has been resolved and the
// reference to it removed to make it eligible for collection.
bool alreadyResolved = promiseVal.isUndefined();
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedRejectFunction(reject) ==
- alreadyResolved);
+ MOZ_ASSERT(IsAlreadyResolvedRejectFunction(reject) == alreadyResolved);
if (alreadyResolved) {
args.rval().setUndefined();
return true;
@@ -1362,8 +1349,7 @@ static bool ResolvePromiseFunction(JSContext* cx, unsigned argc, Value* vp) {
//
// NOTE: We use the reference to the reject function as [[AlreadyResolved]].
bool alreadyResolved = promiseVal.isUndefined();
- MOZ_ASSERT(IsAlreadyResolvedMaybeWrappedResolveFunction(resolve) ==
- alreadyResolved);
+ MOZ_ASSERT(IsAlreadyResolvedResolveFunction(resolve) == alreadyResolved);
if (alreadyResolved) {
args.rval().setUndefined();
return true;
@@ -3164,7 +3150,8 @@ static bool PromiseAllResolveElementFunction(JSContext* cx, unsigned argc,
for (size_t i = 0, len = promises.length(); i < len; i++) {
JSObject* obj = promises[i];
cx->check(obj);
- MOZ_ASSERT(UncheckedUnwrap(obj)->is<PromiseObject>());
+ JSObject* unwrapped = UncheckedUnwrap(obj);
+ MOZ_ASSERT(unwrapped->is<PromiseObject>() || JS_IsDeadWrapper(unwrapped));
}
#endif
@@ -3265,7 +3252,13 @@ static bool PromiseAllResolveElementFunction(JSContext* cx, unsigned argc,
// compartments with principals inaccessible from the current
// compartment. To make that work, it unwraps promises with
// UncheckedUnwrap,
- nextPromise = &UncheckedUnwrap(nextPromiseObj)->as<PromiseObject>();
+ JSObject* unwrapped = UncheckedUnwrap(nextPromiseObj);
+ if (JS_IsDeadWrapper(unwrapped)) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
+ JSMSG_DEAD_OBJECT);
+ return nullptr;
+ }
+ nextPromise = &unwrapped->as<PromiseObject>();
if (!PerformPromiseThen(cx, nextPromise, resolveFunVal, rejectFunVal,
resultCapabilityWithoutResolving)) {
@@ -5036,7 +5029,8 @@ static PromiseReactionRecord* NewReactionRecord(
// This is the only case where we allow `resolve` and `reject` to
// be null when the `promise` field is not a PromiseObject.
JSObject* unwrappedPromise = UncheckedUnwrap(resultCapability.promise());
- MOZ_ASSERT(unwrappedPromise->is<PromiseObject>());
+ MOZ_ASSERT(unwrappedPromise->is<PromiseObject>() ||
+ JS_IsDeadWrapper(unwrappedPromise));
MOZ_ASSERT(!resultCapability.resolve());
MOZ_ASSERT(!resultCapability.reject());
}
@@ -6218,6 +6212,11 @@ bool js::Promise_then(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
+ if (JS_IsDeadWrapper(UncheckedUnwrap(dependentPromise))) {
+ JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_DEAD_OBJECT);
+ return false;
+ }
+
// `dependentPromise` should be a maybe-wrapped Promise.
MOZ_ASSERT(UncheckedUnwrap(dependentPromise)->is<PromiseObject>());
@@ -6934,7 +6933,8 @@ JS::AutoDebuggerJobQueueInterruption::AutoDebuggerJobQueueInterruption()
: cx(nullptr) {}
JS::AutoDebuggerJobQueueInterruption::~AutoDebuggerJobQueueInterruption() {
- MOZ_ASSERT_IF(initialized(), cx->jobQueue->empty());
+ MOZ_ASSERT_IF(initialized() && !cx->jobQueue->isDrainingStopped(),
+ cx->jobQueue->empty());
}
bool JS::AutoDebuggerJobQueueInterruption::init(JSContext* cx) {
diff --git a/js/src/builtin/RawJSONObject.cpp b/js/src/builtin/RawJSONObject.cpp
new file mode 100644
index 0000000000..08a98835c1
--- /dev/null
+++ b/js/src/builtin/RawJSONObject.cpp
@@ -0,0 +1,41 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "builtin/RawJSONObject.h"
+#include "js/PropertyDescriptor.h"
+
+#include "vm/JSObject-inl.h"
+
+using namespace js;
+
+const JSClass RawJSONObject::class_ = {"RawJSON",
+ JSCLASS_HAS_RESERVED_SLOTS(SlotCount)};
+
+/* static */
+RawJSONObject* RawJSONObject::create(JSContext* cx,
+ Handle<JSString*> jsonString) {
+ Rooted<RawJSONObject*> obj(
+ cx, NewObjectWithGivenProto<RawJSONObject>(cx, nullptr));
+ if (!obj) {
+ return nullptr;
+ }
+ Rooted<PropertyKey> id(cx, NameToId(cx->names().rawJSON));
+ Rooted<Value> jsonStringVal(cx, StringValue(jsonString));
+ if (!NativeDefineDataProperty(cx, obj, id, jsonStringVal, 0)) {
+ return nullptr;
+ }
+ return obj;
+}
+
+JSString* RawJSONObject::rawJSON(JSContext* cx) {
+ // RawJSONObjects are frozen on creation, so must always have a rawJSON string
+ // property.
+ PropertyKey id(NameToId(cx->names().rawJSON));
+ JS::Value vp;
+ MOZ_ALWAYS_TRUE(GetPropertyNoGC(cx, this, ObjectValue(*this), id, &vp));
+ MOZ_ASSERT(vp.isString());
+ return vp.toString();
+}
diff --git a/js/src/builtin/RawJSONObject.h b/js/src/builtin/RawJSONObject.h
new file mode 100644
index 0000000000..cf8821a844
--- /dev/null
+++ b/js/src/builtin/RawJSONObject.h
@@ -0,0 +1,27 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef builtin_RawJSONObject_h
+#define builtin_RawJSONObject_h
+
+#include "vm/NativeObject.h"
+
+namespace js {
+
+class RawJSONObject : public NativeObject {
+ enum { SlotCount = 0 };
+
+ public:
+ static const JSClass class_;
+
+ static RawJSONObject* create(JSContext* cx, Handle<JSString*> jsonString);
+
+ JSString* rawJSON(JSContext* cx);
+};
+
+} // namespace js
+
+#endif /* builtin_RawJSONObject_h */
diff --git a/js/src/builtin/Reflect.js b/js/src/builtin/Reflect.js
index f56d603ca3..34ca34af60 100644
--- a/js/src/builtin/Reflect.js
+++ b/js/src/builtin/Reflect.js
@@ -52,6 +52,8 @@ function Reflect_apply(target, thisArgument, argumentsList) {
// Steps 2-4.
return callFunction(std_Function_apply, target, thisArgument, argumentsList);
}
+// This function is only barely too long for normal inlining.
+SetIsInlinableLargeFunction(Reflect_apply);
// ES2017 draft rev a785b0832b071f505a694e1946182adeab84c972
// 26.1.2 Reflect.construct ( target, argumentsList [ , newTarget ] )
diff --git a/js/src/builtin/Sorting.js b/js/src/builtin/Sorting.js
index 4530a82818..175b506192 100644
--- a/js/src/builtin/Sorting.js
+++ b/js/src/builtin/Sorting.js
@@ -6,7 +6,7 @@
// consolidated here to avoid confusion and re-implementation of existing
// algorithms.
-// For sorting small arrays.
+// For sorting small typed arrays.
function InsertionSort(array, from, to, comparefn) {
var item, swap, i, j;
for (i = from + 1; i <= to; i++) {
@@ -22,101 +22,6 @@ function InsertionSort(array, from, to, comparefn) {
}
}
-// A helper function for MergeSort.
-//
-// Merge comparefn-sorted slices list[start..<=mid] and list[mid+1..<=end],
-// storing the merged sequence in out[start..<=end].
-function Merge(list, out, start, mid, end, comparefn) {
- // Skip lopsided runs to avoid doing useless work.
- // Skip calling the comparator if the sub-list is already sorted.
- if (
- mid >= end ||
- callContentFunction(comparefn, undefined, list[mid], list[mid + 1]) <= 0
- ) {
- for (var i = start; i <= end; i++) {
- DefineDataProperty(out, i, list[i]);
- }
- return;
- }
-
- var i = start;
- var j = mid + 1;
- var k = start;
- while (i <= mid && j <= end) {
- var lvalue = list[i];
- var rvalue = list[j];
- if (callContentFunction(comparefn, undefined, lvalue, rvalue) <= 0) {
- DefineDataProperty(out, k++, lvalue);
- i++;
- } else {
- DefineDataProperty(out, k++, rvalue);
- j++;
- }
- }
-
- // Empty out any remaining elements.
- while (i <= mid) {
- DefineDataProperty(out, k++, list[i++]);
- }
- while (j <= end) {
- DefineDataProperty(out, k++, list[j++]);
- }
-}
-
-// Helper function for overwriting a sparse array with a
-// dense array, filling remaining slots with holes.
-function MoveHoles(sparse, sparseLen, dense, denseLen) {
- for (var i = 0; i < denseLen; i++) {
- sparse[i] = dense[i];
- }
- for (var j = denseLen; j < sparseLen; j++) {
- delete sparse[j];
- }
-}
-
-// Iterative, bottom up, mergesort.
-function MergeSort(array, len, comparefn) {
- assert(IsPackedArray(array), "array is packed");
- assert(array.length === len, "length mismatch");
- assert(len > 0, "array should be non-empty");
-
- // Insertion sort for small arrays, where "small" is defined by performance
- // testing.
- if (len < 24) {
- InsertionSort(array, 0, len - 1, comparefn);
- return array;
- }
-
- // We do all of our allocating up front
- var lBuffer = array;
- var rBuffer = [];
-
- // Use insertion sort for initial ranges.
- var windowSize = 4;
- for (var start = 0; start < len - 1; start += windowSize) {
- var end = std_Math_min(start + windowSize - 1, len - 1);
- InsertionSort(lBuffer, start, end, comparefn);
- }
-
- for (; windowSize < len; windowSize = 2 * windowSize) {
- for (var start = 0; start < len; start += 2 * windowSize) {
- // The midpoint between the two subarrays.
- var mid = start + windowSize - 1;
-
- // To keep from going over the edge.
- var end = std_Math_min(start + 2 * windowSize - 1, len - 1);
-
- Merge(lBuffer, rBuffer, start, mid, end, comparefn);
- }
-
- // Swap both lists.
- var swap = lBuffer;
- lBuffer = rBuffer;
- rBuffer = swap;
- }
- return lBuffer;
-}
-
// A helper function for MergeSortTypedArray.
//
// Merge comparefn-sorted slices list[start..<=mid] and list[mid+1..<=end],
diff --git a/js/src/builtin/TestingFunctions.cpp b/js/src/builtin/TestingFunctions.cpp
index 498fa1746d..da7efd2fcc 100644
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -604,15 +604,6 @@ static bool GetBuildConfiguration(JSContext* cx, unsigned argc, Value* vp) {
return false;
}
-#ifdef ENABLE_JSON_PARSE_WITH_SOURCE
- value = BooleanValue(true);
-#else
- value = BooleanValue(false);
-#endif
- if (!JS_SetProperty(cx, info, "json-parse-with-source", value)) {
- return false;
- }
-
#ifdef FUZZING
value = BooleanValue(true);
#else
@@ -1620,7 +1611,7 @@ static bool WasmLosslessInvoke(JSContext* cx, unsigned argc, Value* vp) {
if (!wasmCallFrame.resize(len)) {
return false;
}
- wasmCallFrame[0].set(args.calleev());
+ wasmCallFrame[0].set(ObjectValue(*func));
wasmCallFrame[1].set(args.thisv());
// Copy over the arguments needed to invoke the provided wasm function,
// skipping the wasm function we're calling that is at `args.get(0)`.
@@ -3714,6 +3705,85 @@ static bool NewString(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
+static bool NewDependentString(JSContext* cx, unsigned argc, Value* vp) {
+ CallArgs args = CallArgsFromVp(argc, vp);
+
+ RootedString src(cx, ToString(cx, args.get(0)));
+ if (!src) {
+ return false;
+ }
+
+ uint64_t indexStart = 0;
+ mozilla::Maybe<uint64_t> indexEnd;
+ gc::Heap heap = gc::Heap::Default;
+ mozilla::Maybe<gc::Heap> requiredHeap;
+
+ if (!ToIndex(cx, args.get(1), &indexStart)) {
+ return false;
+ }
+
+ Rooted<Value> options(cx);
+ if (args.get(2).isObject()) {
+ options = args[2];
+ } else {
+ uint64_t idx;
+ if (args.hasDefined(2)) {
+ if (!ToIndex(cx, args.get(2), &idx)) {
+ return false;
+ }
+ indexEnd.emplace(idx);
+ }
+ options = args.get(3);
+ }
+
+ if (options.isObject()) {
+ Rooted<Value> v(cx);
+ Rooted<JSObject*> optObj(cx, &options.toObject());
+ if (!JS_GetProperty(cx, optObj, "tenured", &v)) {
+ return false;
+ }
+ if (v.isBoolean()) {
+ requiredHeap.emplace(v.toBoolean() ? gc::Heap::Tenured
+ : gc::Heap::Default);
+ heap = *requiredHeap;
+ }
+ }
+
+ if (indexEnd.isNothing()) {
+ // Read the length now that no more JS code can run.
+ indexEnd.emplace(src->length());
+ }
+ if (indexStart > src->length() || *indexEnd > src->length() ||
+ indexStart >= *indexEnd) {
+ JS_ReportErrorASCII(cx, "invalid dependent string bounds");
+ return false;
+ }
+ if (!src->ensureLinear(cx)) {
+ return false;
+ }
+ Rooted<JSString*> result(
+ cx, js::NewDependentString(cx, src, indexStart, *indexEnd - indexStart,
+ heap));
+ if (!result) {
+ return false;
+ }
+ if (!result->isDependent()) {
+ JS_ReportErrorASCII(cx, "resulting string is not dependent (too short?)");
+ return false;
+ }
+
+ if (requiredHeap.isSome()) {
+ MOZ_ASSERT_IF(*requiredHeap == gc::Heap::Tenured, result->isTenured());
+ if ((*requiredHeap == gc::Heap::Default) && result->isTenured()) {
+ JS_ReportErrorASCII(cx, "nursery string created in tenured heap");
+ return false;
+ }
+ }
+
+ args.rval().setString(result);
+ return true;
+}
+
// Warning! This will let you create ropes that I'm not sure would be possible
// otherwise, specifically:
//
@@ -7183,7 +7253,7 @@ static bool SetImmutablePrototype(JSContext* cx, unsigned argc, Value* vp) {
return true;
}
-#ifdef DEBUG
+#if defined(DEBUG) || defined(JS_JITSPEW)
static bool DumpStringRepresentation(JSContext* cx, unsigned argc, Value* vp) {
CallArgs args = CallArgsFromVp(argc, vp);
@@ -7221,7 +7291,6 @@ static bool GetStringRepresentation(JSContext* cx, unsigned argc, Value* vp) {
args.rval().setString(rep);
return true;
}
-
#endif
static bool ParseCompileOptionsForModule(JSContext* cx,
@@ -7237,9 +7306,7 @@ static bool ParseCompileOptionsForModule(JSContext* cx,
options.setModule();
isModule = true;
- // js::ParseCompileOptions should already be called.
- if (options.lineno == 0) {
- JS_ReportErrorASCII(cx, "Module cannot be compiled with lineNumber == 0");
+ if (!ValidateModuleCompileOptions(cx, options)) {
return false;
}
} else {
@@ -9435,6 +9502,15 @@ static const JSFunctionSpecWithHelp TestingFunctions[] = {
" - maybeExternal: create an external string, unless the data fits within an\n"
" inline string. Inline strings may be nursery-allocated."),
+ JS_FN_HELP("newDependentString", NewDependentString, 2, 0,
+"newDependentString(str, indexStart[, indexEnd] [, options])",
+" Essentially the same as str.substring() but insist on\n"
+" creating a dependent string and failing if not. Also has options to\n"
+" control the heap the string object is allocated into:\n"
+" \n"
+" - tenured: if true, allocate in the tenured heap or throw. If false,\n"
+" allocate in the nursery or throw."),
+
JS_FN_HELP("ensureLinearString", EnsureLinearString, 1, 0,
"ensureLinearString(str)",
" Ensures str is a linear (non-rope) string and returns it."),
@@ -10058,20 +10134,6 @@ JS_FOR_WASM_FEATURES(WASM_FEATURE)
"wasmMetadataAnalysis(wasmObject)",
" Prints an analysis of the size of metadata on this wasm object.\n"),
-#if defined(DEBUG) || defined(JS_JITSPEW)
- JS_FN_HELP("dumpObject", DumpObject, 1, 0,
-"dumpObject(obj)",
-" Dump an internal representation of an object."),
-
- JS_FN_HELP("dumpValue", DumpValue, 1, 0,
-"dumpValue(v)",
-" Dump an internal representation of a value."),
-
- JS_FN_HELP("dumpValueToString", DumpValueToString, 1, 0,
-"dumpValue(v)",
-" Return a dump of an internal representation of a value."),
-#endif
-
JS_FN_HELP("sharedMemoryEnabled", SharedMemoryEnabled, 0, 0,
"sharedMemoryEnabled()",
" Return true if SharedArrayBuffer and Atomics are enabled"),
@@ -10129,17 +10191,6 @@ JS_FOR_WASM_FEATURES(WASM_FEATURE)
" of internal error, or if the operation doesn't even make sense (for example,\n"
" because the object is a revoked proxy)."),
-#ifdef DEBUG
- JS_FN_HELP("dumpStringRepresentation", DumpStringRepresentation, 1, 0,
-"dumpStringRepresentation(str)",
-" Print a human-readable description of how the string |str| is represented.\n"),
-
- JS_FN_HELP("stringRepresentation", GetStringRepresentation, 1, 0,
-"stringRepresentation(str)",
-" Return a human-readable description of how the string |str| is represented.\n"),
-
-#endif
-
JS_FN_HELP("allocationMarker", AllocationMarker, 0, 0,
"allocationMarker([options])",
" Return a freshly allocated object whose [[Class]] name is\n"
@@ -10428,6 +10479,29 @@ JS_FN_HELP("getEnvironmentObjectType", GetEnvironmentObjectType, 1, 0,
" Return an object describing the calling realm's fuse state, "
"as well as the state of any runtime fuses."),
+#if defined(DEBUG) || defined(JS_JITSPEW)
+ JS_FN_HELP("dumpObject", DumpObject, 1, 0,
+"dumpObject(obj)",
+" Dump an internal representation of an object."),
+
+ JS_FN_HELP("dumpValue", DumpValue, 1, 0,
+"dumpValue(v)",
+" Dump an internal representation of a value."),
+
+ JS_FN_HELP("dumpValueToString", DumpValueToString, 1, 0,
+"dumpValue(v)",
+" Return a dump of an internal representation of a value."),
+
+ JS_FN_HELP("dumpStringRepresentation", DumpStringRepresentation, 1, 0,
+"dumpStringRepresentation(str)",
+" Print a human-readable description of how the string |str| is represented.\n"),
+
+ JS_FN_HELP("stringRepresentation", GetStringRepresentation, 1, 0,
+"stringRepresentation(str)",
+" Return a human-readable description of how the string |str| is represented.\n"),
+
+#endif
+
JS_FS_HELP_END
};
// clang-format on
diff --git a/js/src/builtin/TestingUtility.cpp b/js/src/builtin/TestingUtility.cpp
index 12a99c7f08..9d4af5897c 100644
--- a/js/src/builtin/TestingUtility.cpp
+++ b/js/src/builtin/TestingUtility.cpp
@@ -308,3 +308,18 @@ bool js::ValidateLazinessOfStencilAndGlobal(
return true;
}
+
+bool js::ValidateModuleCompileOptions(JSContext* cx,
+ JS::CompileOptions& options) {
+ if (options.lineno == 0) {
+ JS_ReportErrorASCII(cx, "Module cannot be compiled with lineNumber == 0");
+ return false;
+ }
+
+ if (!options.filename()) {
+ JS_ReportErrorASCII(cx, "Module should have filename");
+ return false;
+ }
+
+ return true;
+}
diff --git a/js/src/builtin/TestingUtility.h b/js/src/builtin/TestingUtility.h
index 4c666540f7..82225068a1 100644
--- a/js/src/builtin/TestingUtility.h
+++ b/js/src/builtin/TestingUtility.h
@@ -72,6 +72,8 @@ JSObject* CreateScriptPrivate(JSContext* cx,
bool ValidateLazinessOfStencilAndGlobal(
JSContext* cx, const frontend::CompilationStencil& stencil);
+bool ValidateModuleCompileOptions(JSContext* cx, JS::CompileOptions& options);
+
} /* namespace js */
#endif /* builtin_TestingUtility_h */
diff --git a/js/src/builtin/Tuple.js b/js/src/builtin/Tuple.js
index 98177ac35d..bade1739d0 100644
--- a/js/src/builtin/Tuple.js
+++ b/js/src/builtin/Tuple.js
@@ -25,7 +25,7 @@ function TupleToSorted(comparefn) {
/* Step 3. */
var items = TupleToArray(T);
- var sorted = callFunction(ArraySort, items, comparefn);
+ var sorted = callFunction(std_Array_sort, items, comparefn);
return std_Tuple_unchecked(sorted);
}
diff --git a/js/src/builtin/WeakMapObject-inl.h b/js/src/builtin/WeakMapObject-inl.h
index 28ab1abeb2..3c5f2ceb27 100644
--- a/js/src/builtin/WeakMapObject-inl.h
+++ b/js/src/builtin/WeakMapObject-inl.h
@@ -92,6 +92,21 @@ static MOZ_ALWAYS_INLINE bool CanBeHeldWeakly(JSContext* cx,
return false;
}
+static unsigned GetErrorNumber(bool isWeakMap) {
+#ifdef NIGHTLY_BUILD
+ bool symbolsAsWeakMapKeysEnabled =
+ JS::Prefs::experimental_symbols_as_weakmap_keys();
+
+ if (symbolsAsWeakMapKeysEnabled) {
+ return isWeakMap ? JSMSG_WEAKMAP_KEY_CANT_BE_HELD_WEAKLY
+ : JSMSG_WEAKSET_VAL_CANT_BE_HELD_WEAKLY;
+ }
+#endif
+
+ return isWeakMap ? JSMSG_WEAKMAP_KEY_MUST_BE_AN_OBJECT
+ : JSMSG_WEAKSET_VAL_MUST_BE_AN_OBJECT;
+}
+
} // namespace js
#endif /* builtin_WeakMapObject_inl_h */
diff --git a/js/src/builtin/WeakMapObject.cpp b/js/src/builtin/WeakMapObject.cpp
index 680f49bc3e..a9d7dcdedc 100644
--- a/js/src/builtin/WeakMapObject.cpp
+++ b/js/src/builtin/WeakMapObject.cpp
@@ -122,8 +122,8 @@ bool WeakMapObject::delete_(JSContext* cx, unsigned argc, Value* vp) {
MOZ_ASSERT(WeakMapObject::is(args.thisv()));
if (!CanBeHeldWeakly(cx, args.get(0))) {
- ReportValueError(cx, JSMSG_WEAKMAP_KEY_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, args.get(0), nullptr);
+ unsigned errorNum = GetErrorNumber(true);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, args.get(0), nullptr);
return false;
}
@@ -238,8 +238,8 @@ JS_PUBLIC_API bool JS::SetWeakMapEntry(JSContext* cx, HandleObject mapObj,
CHECK_THREAD(cx);
cx->check(key, val);
if (!CanBeHeldWeakly(cx, key)) {
- ReportValueError(cx, JSMSG_WEAKMAP_KEY_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, key, nullptr);
+ unsigned errorNum = GetErrorNumber(true);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, key, nullptr);
return false;
}
diff --git a/js/src/builtin/WeakSetObject.cpp b/js/src/builtin/WeakSetObject.cpp
index 06e190f51e..3705e94218 100644
--- a/js/src/builtin/WeakSetObject.cpp
+++ b/js/src/builtin/WeakSetObject.cpp
@@ -31,8 +31,8 @@ using namespace js;
// Step 4.
if (!CanBeHeldWeakly(cx, args.get(0))) {
- ReportValueError(cx, JSMSG_WEAKSET_VAL_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, args.get(0), nullptr);
+ unsigned errorNum = GetErrorNumber(false);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, args.get(0), nullptr);
return false;
}
@@ -198,8 +198,9 @@ bool WeakSetObject::construct(JSContext* cx, unsigned argc, Value* vp) {
MOZ_ASSERT(!keyVal.isMagic(JS_ELEMENTS_HOLE));
if (!CanBeHeldWeakly(cx, keyVal)) {
- ReportValueError(cx, JSMSG_WEAKSET_VAL_CANT_BE_HELD_WEAKLY,
- JSDVG_IGNORE_STACK, keyVal, nullptr);
+ unsigned errorNum = GetErrorNumber(false);
+ ReportValueError(cx, errorNum, JSDVG_IGNORE_STACK, args.get(0),
+ nullptr);
return false;
}
diff --git a/js/src/builtin/embedjs.py b/js/src/builtin/embedjs.py
index 6a2c25b999..685f511e49 100644
--- a/js/src/builtin/embedjs.py
+++ b/js/src/builtin/embedjs.py
@@ -142,7 +142,9 @@ def preprocess(cxx, preprocessorOption, source, args=[]):
with open(tmpIn, "wb") as input:
input.write(source.encode("utf-8"))
- print(" ".join(cxx + outputArg + args + [tmpIn]))
+
+ if os.environ.get("BUILD_VERBOSE_LOG"):
+ print("Executing:", " ".join(cxx + outputArg + args + [tmpIn]))
result = subprocess.Popen(cxx + outputArg + args + [tmpIn]).wait()
if result != 0:
sys.exit(result)