summaryrefslogtreecommitdiffstats
path: root/js/src/jit/JitcodeMap.h
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /js/src/jit/JitcodeMap.h
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'js/src/jit/JitcodeMap.h')
-rw-r--r--js/src/jit/JitcodeMap.h808
1 files changed, 808 insertions, 0 deletions
diff --git a/js/src/jit/JitcodeMap.h b/js/src/jit/JitcodeMap.h
new file mode 100644
index 0000000000..b4ed8ae7ff
--- /dev/null
+++ b/js/src/jit/JitcodeMap.h
@@ -0,0 +1,808 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: set ts=8 sts=2 et sw=2 tw=80:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef jit_JitcodeMap_h
+#define jit_JitcodeMap_h
+
+#include "mozilla/Assertions.h" // MOZ_ASSERT, MOZ_ASSERT_IF, MOZ_CRASH
+
+#include <stddef.h> // size_t
+#include <stdint.h> // uint8_t, uint32_t, uint64_t
+
+#include "ds/AvlTree.h" // AvlTree
+#include "jit/CompactBuffer.h" // CompactBufferReader, CompactBufferWriter
+#include "jit/shared/Assembler-shared.h" // CodeOffset
+#include "js/AllocPolicy.h" // SystemAllocPolicy
+#include "js/TypeDecls.h" // jsbytecode
+#include "js/Vector.h" // Vector
+#include "vm/BytecodeLocation.h" // BytecodeLocation
+
+class JSScript;
+class JSTracer;
+struct JSRuntime;
+
+namespace JS {
+class Zone;
+} // namespace JS
+
+namespace js {
+
+class GCMarker;
+
+namespace jit {
+
+class InlineScriptTree;
+
+/*
+ * The jitcode map implements tables to allow mapping from addresses in jitcode
+ * to the list of (JSScript*, jsbytecode*) pairs that are implicitly active in
+ * the frame at that point in the native code.
+ *
+ * To represent this information efficiently, a multi-level table is used.
+ *
+ * At the top level, a global AVL-tree of JitcodeGlobalEntry describing the
+ * mapping for each individual JitCode generated by compiles. The entries are
+ * ordered by their nativeStartAddr.
+ *
+ * Every entry in the table is of fixed size, but there are different entry
+ * types, distinguished by the kind field.
+ */
+
+class JitcodeGlobalTable;
+class JitcodeIonTable;
+class JitcodeRegionEntry;
+
+struct NativeToBytecode {
+ CodeOffset nativeOffset;
+ InlineScriptTree* tree;
+ jsbytecode* pc;
+};
+
+// Describes range [start, end) of JIT-generated code.
+class JitCodeRange {
+ protected:
+ void* const nativeStartAddr_;
+ void* const nativeEndAddr_;
+
+ public:
+ JitCodeRange(void* start, void* end)
+ : nativeStartAddr_(start), nativeEndAddr_(end) {
+ MOZ_ASSERT(start < end);
+ }
+
+ // Comparator used by the AvlTree.
+ static int compare(const JitCodeRange* r1, const JitCodeRange* r2) {
+ // JitCodeRange includes 'start' but excludes 'end'.
+ if (r1->nativeEndAddr_ <= r2->nativeStartAddr_) {
+ return -1;
+ }
+ if (r1->nativeStartAddr_ >= r2->nativeEndAddr_) {
+ return 1;
+ }
+ return 0;
+ }
+
+ void* nativeStartAddr() const { return nativeStartAddr_; }
+ void* nativeEndAddr() const { return nativeEndAddr_; }
+
+ bool containsPointer(void* ptr) const {
+ return nativeStartAddr() <= ptr && ptr < nativeEndAddr();
+ }
+};
+
+typedef Vector<BytecodeLocation, 0, SystemAllocPolicy> BytecodeLocationVector;
+
+class IonEntry;
+class IonICEntry;
+class BaselineEntry;
+class BaselineInterpreterEntry;
+class DummyEntry;
+
+// Base class for all entries.
+class JitcodeGlobalEntry : public JitCodeRange {
+ protected:
+ JitCode* jitcode_;
+ // If this entry is referenced from the profiler buffer, this is the
+ // position where the most recent sample that references it starts.
+ // Otherwise set to kNoSampleInBuffer.
+ static const uint64_t kNoSampleInBuffer = UINT64_MAX;
+ uint64_t samplePositionInBuffer_ = kNoSampleInBuffer;
+
+ public:
+ enum class Kind : uint8_t {
+ Ion,
+ IonIC,
+ Baseline,
+ BaselineInterpreter,
+ Dummy
+ };
+
+ protected:
+ Kind kind_;
+
+ JitcodeGlobalEntry(Kind kind, JitCode* code, void* nativeStartAddr,
+ void* nativeEndAddr)
+ : JitCodeRange(nativeStartAddr, nativeEndAddr),
+ jitcode_(code),
+ kind_(kind) {
+ MOZ_ASSERT(code);
+ MOZ_ASSERT(nativeStartAddr);
+ MOZ_ASSERT(nativeEndAddr);
+ }
+
+ // Protected destructor to ensure this is called through DestroyPolicy.
+ ~JitcodeGlobalEntry() = default;
+
+ JitcodeGlobalEntry(const JitcodeGlobalEntry& other) = delete;
+ void operator=(const JitcodeGlobalEntry& other) = delete;
+
+ public:
+ struct DestroyPolicy {
+ void operator()(JitcodeGlobalEntry* entry);
+ };
+
+ void setSamplePositionInBuffer(uint64_t bufferWritePos) {
+ samplePositionInBuffer_ = bufferWritePos;
+ }
+ void setAsExpired() { samplePositionInBuffer_ = kNoSampleInBuffer; }
+ bool isSampled(uint64_t bufferRangeStart) {
+ if (samplePositionInBuffer_ == kNoSampleInBuffer) {
+ return false;
+ }
+ return bufferRangeStart <= samplePositionInBuffer_;
+ }
+
+ Kind kind() const { return kind_; }
+ bool isIon() const { return kind() == Kind::Ion; }
+ bool isIonIC() const { return kind() == Kind::IonIC; }
+ bool isBaseline() const { return kind() == Kind::Baseline; }
+ bool isBaselineInterpreter() const {
+ return kind() == Kind::BaselineInterpreter;
+ }
+ bool isDummy() const { return kind() == Kind::Dummy; }
+
+ inline const IonEntry& asIon() const;
+ inline const IonICEntry& asIonIC() const;
+ inline const BaselineEntry& asBaseline() const;
+ inline const BaselineInterpreterEntry& asBaselineInterpreter() const;
+ inline const DummyEntry& asDummy() const;
+
+ inline IonEntry& asIon();
+ inline IonICEntry& asIonIC();
+ inline BaselineEntry& asBaseline();
+ inline BaselineInterpreterEntry& asBaselineInterpreter();
+ inline DummyEntry& asDummy();
+
+ JitCode* jitcode() const { return jitcode_; }
+ JitCode** jitcodePtr() { return &jitcode_; }
+ Zone* zone() const { return jitcode()->zone(); }
+
+ bool traceJitcode(JSTracer* trc);
+ bool isJitcodeMarkedFromAnyThread(JSRuntime* rt);
+
+ bool trace(JSTracer* trc);
+ void traceWeak(JSTracer* trc);
+ uint64_t lookupRealmID(JSRuntime* rt, void* ptr) const;
+ void* canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const;
+
+ // Read the inline call stack at a given point in the native code and append
+ // into the given vector. Innermost (script,pc) pair will be appended first,
+ // and outermost appended last.
+ //
+ // Returns false on memory failure.
+ [[nodiscard]] bool callStackAtAddr(JSRuntime* rt, void* ptr,
+ BytecodeLocationVector& results,
+ uint32_t* depth) const;
+ uint32_t callStackAtAddr(JSRuntime* rt, void* ptr, const char** results,
+ uint32_t maxResults) const;
+};
+
+using UniqueJitcodeGlobalEntry =
+ UniquePtr<JitcodeGlobalEntry, JitcodeGlobalEntry::DestroyPolicy>;
+
+template <typename T, typename... Args>
+inline UniqueJitcodeGlobalEntry MakeJitcodeGlobalEntry(JSContext* cx,
+ Args&&... args) {
+ UniqueJitcodeGlobalEntry res(js_new<T>(std::forward<Args>(args)...));
+ if (!res) {
+ ReportOutOfMemory(cx);
+ }
+ return res;
+}
+
+class IonEntry : public JitcodeGlobalEntry {
+ public:
+ struct ScriptNamePair {
+ JSScript* script;
+ UniqueChars str;
+ ScriptNamePair(JSScript* script, UniqueChars str)
+ : script(script), str(std::move(str)) {}
+ };
+ using ScriptList = Vector<ScriptNamePair, 2, SystemAllocPolicy>;
+
+ private:
+ ScriptList scriptList_;
+
+ // regionTable_ points to the start of the region table within the
+ // packed map for compile represented by this entry. Since the
+ // region table occurs at the tail of the memory region, this pointer
+ // points somewhere inside the region memory space, and not to the start
+ // of the memory space.
+ const JitcodeIonTable* regionTable_;
+
+ public:
+ IonEntry(JitCode* code, void* nativeStartAddr, void* nativeEndAddr,
+ ScriptList&& scriptList, JitcodeIonTable* regionTable)
+ : JitcodeGlobalEntry(Kind::Ion, code, nativeStartAddr, nativeEndAddr),
+ scriptList_(std::move(scriptList)),
+ regionTable_(regionTable) {
+ MOZ_ASSERT(regionTable);
+ }
+
+ ~IonEntry();
+
+ ScriptList& scriptList() { return scriptList_; }
+
+ size_t numScripts() const { return scriptList_.length(); }
+
+ JSScript* getScript(unsigned idx) const {
+ MOZ_ASSERT(idx < numScripts());
+ return scriptList_[idx].script;
+ }
+
+ const char* getStr(unsigned idx) const {
+ MOZ_ASSERT(idx < numScripts());
+ return scriptList_[idx].str.get();
+ }
+
+ const JitcodeIonTable* regionTable() const { return regionTable_; }
+
+ void* canonicalNativeAddrFor(void* ptr) const;
+
+ [[nodiscard]] bool callStackAtAddr(void* ptr, BytecodeLocationVector& results,
+ uint32_t* depth) const;
+
+ uint32_t callStackAtAddr(void* ptr, const char** results,
+ uint32_t maxResults) const;
+
+ uint64_t lookupRealmID(void* ptr) const;
+
+ bool trace(JSTracer* trc);
+ void traceWeak(JSTracer* trc);
+};
+
+class IonICEntry : public JitcodeGlobalEntry {
+ // Address for this IC in the IonScript code. Most operations on IonICEntry
+ // use this to forward to the IonEntry.
+ void* rejoinAddr_;
+
+ public:
+ IonICEntry(JitCode* code, void* nativeStartAddr, void* nativeEndAddr,
+ void* rejoinAddr)
+ : JitcodeGlobalEntry(Kind::IonIC, code, nativeStartAddr, nativeEndAddr),
+ rejoinAddr_(rejoinAddr) {
+ MOZ_ASSERT(rejoinAddr_);
+ }
+
+ void* rejoinAddr() const { return rejoinAddr_; }
+
+ void* canonicalNativeAddrFor(void* ptr) const;
+
+ [[nodiscard]] bool callStackAtAddr(JSRuntime* rt, void* ptr,
+ BytecodeLocationVector& results,
+ uint32_t* depth) const;
+
+ uint32_t callStackAtAddr(JSRuntime* rt, void* ptr, const char** results,
+ uint32_t maxResults) const;
+
+ uint64_t lookupRealmID(JSRuntime* rt, void* ptr) const;
+
+ bool trace(JSTracer* trc);
+ void traceWeak(JSTracer* trc);
+};
+
+class BaselineEntry : public JitcodeGlobalEntry {
+ JSScript* script_;
+ UniqueChars str_;
+
+ public:
+ BaselineEntry(JitCode* code, void* nativeStartAddr, void* nativeEndAddr,
+ JSScript* script, UniqueChars str)
+ : JitcodeGlobalEntry(Kind::Baseline, code, nativeStartAddr,
+ nativeEndAddr),
+ script_(script),
+ str_(std::move(str)) {
+ MOZ_ASSERT(script_);
+ MOZ_ASSERT(str_);
+ }
+
+ JSScript* script() const { return script_; }
+
+ const char* str() const { return str_.get(); }
+
+ void* canonicalNativeAddrFor(void* ptr) const;
+
+ [[nodiscard]] bool callStackAtAddr(void* ptr, BytecodeLocationVector& results,
+ uint32_t* depth) const;
+
+ uint32_t callStackAtAddr(void* ptr, const char** results,
+ uint32_t maxResults) const;
+
+ uint64_t lookupRealmID() const;
+
+ bool trace(JSTracer* trc);
+ void traceWeak(JSTracer* trc);
+};
+
+class BaselineInterpreterEntry : public JitcodeGlobalEntry {
+ public:
+ BaselineInterpreterEntry(JitCode* code, void* nativeStartAddr,
+ void* nativeEndAddr)
+ : JitcodeGlobalEntry(Kind::BaselineInterpreter, code, nativeStartAddr,
+ nativeEndAddr) {}
+
+ void* canonicalNativeAddrFor(void* ptr) const;
+
+ [[nodiscard]] bool callStackAtAddr(void* ptr, BytecodeLocationVector& results,
+ uint32_t* depth) const;
+
+ uint32_t callStackAtAddr(void* ptr, const char** results,
+ uint32_t maxResults) const;
+
+ uint64_t lookupRealmID() const;
+};
+
+// Dummy entries are created for jitcode generated when profiling is not
+// turned on, so that they have representation in the global table if they are
+// on the stack when profiling is enabled.
+class DummyEntry : public JitcodeGlobalEntry {
+ public:
+ DummyEntry(JitCode* code, void* nativeStartAddr, void* nativeEndAddr)
+ : JitcodeGlobalEntry(Kind::Dummy, code, nativeStartAddr, nativeEndAddr) {}
+
+ void* canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const {
+ return nullptr;
+ }
+
+ [[nodiscard]] bool callStackAtAddr(JSRuntime* rt, void* ptr,
+ BytecodeLocationVector& results,
+ uint32_t* depth) const {
+ return true;
+ }
+
+ uint32_t callStackAtAddr(JSRuntime* rt, void* ptr, const char** results,
+ uint32_t maxResults) const {
+ return 0;
+ }
+
+ uint64_t lookupRealmID() const { return 0; }
+};
+
+inline const IonEntry& JitcodeGlobalEntry::asIon() const {
+ MOZ_ASSERT(isIon());
+ return *static_cast<const IonEntry*>(this);
+}
+
+inline const IonICEntry& JitcodeGlobalEntry::asIonIC() const {
+ MOZ_ASSERT(isIonIC());
+ return *static_cast<const IonICEntry*>(this);
+}
+
+inline const BaselineEntry& JitcodeGlobalEntry::asBaseline() const {
+ MOZ_ASSERT(isBaseline());
+ return *static_cast<const BaselineEntry*>(this);
+}
+
+inline const BaselineInterpreterEntry&
+JitcodeGlobalEntry::asBaselineInterpreter() const {
+ MOZ_ASSERT(isBaselineInterpreter());
+ return *static_cast<const BaselineInterpreterEntry*>(this);
+}
+
+inline const DummyEntry& JitcodeGlobalEntry::asDummy() const {
+ MOZ_ASSERT(isDummy());
+ return *static_cast<const DummyEntry*>(this);
+}
+
+inline IonEntry& JitcodeGlobalEntry::asIon() {
+ MOZ_ASSERT(isIon());
+ return *static_cast<IonEntry*>(this);
+}
+
+inline IonICEntry& JitcodeGlobalEntry::asIonIC() {
+ MOZ_ASSERT(isIonIC());
+ return *static_cast<IonICEntry*>(this);
+}
+
+inline BaselineEntry& JitcodeGlobalEntry::asBaseline() {
+ MOZ_ASSERT(isBaseline());
+ return *static_cast<BaselineEntry*>(this);
+}
+
+inline BaselineInterpreterEntry& JitcodeGlobalEntry::asBaselineInterpreter() {
+ MOZ_ASSERT(isBaselineInterpreter());
+ return *static_cast<BaselineInterpreterEntry*>(this);
+}
+
+inline DummyEntry& JitcodeGlobalEntry::asDummy() {
+ MOZ_ASSERT(isDummy());
+ return *static_cast<DummyEntry*>(this);
+}
+
+// Global table of JitcodeGlobalEntry entries.
+class JitcodeGlobalTable {
+ private:
+ // Vector containing (and owning) all entries. This is unsorted and used for
+ // iterating over all entries, because the AvlTree currently doesn't support
+ // modifications while iterating.
+ using EntryVector = Vector<UniqueJitcodeGlobalEntry, 0, SystemAllocPolicy>;
+ EntryVector entries_;
+
+ // AVL tree containing all entries in the Vector above. This is used to
+ // efficiently look up the entry corresponding to a native code address.
+ using EntryTree = AvlTree<JitCodeRange*, JitCodeRange>;
+ static const size_t LIFO_CHUNK_SIZE = 16 * 1024;
+ LifoAlloc alloc_;
+ EntryTree tree_;
+
+ public:
+ JitcodeGlobalTable() : alloc_(LIFO_CHUNK_SIZE), tree_(&alloc_) {}
+
+ bool empty() const {
+ MOZ_ASSERT(entries_.empty() == tree_.empty());
+ return entries_.empty();
+ }
+
+ JitcodeGlobalEntry* lookup(void* ptr) { return lookupInternal(ptr); }
+
+ const JitcodeGlobalEntry* lookupForSampler(void* ptr, JSRuntime* rt,
+ uint64_t samplePosInBuffer);
+
+ [[nodiscard]] bool addEntry(UniqueJitcodeGlobalEntry entry);
+
+ void setAllEntriesAsExpired();
+ [[nodiscard]] bool markIteratively(GCMarker* marker);
+ void traceWeak(JSRuntime* rt, JSTracer* trc);
+
+ private:
+ JitcodeGlobalEntry* lookupInternal(void* ptr);
+};
+
+// clang-format off
+/*
+ * Container class for main jitcode table.
+ * The Region table's memory is structured as follows:
+ *
+ * +------------------------------------------------+ |
+ * | Region 1 Run | |
+ * |------------------------------------------------| |
+ * | Region 2 Run | |
+ * | | |
+ * | | |
+ * |------------------------------------------------| |
+ * | Region 3 Run | |
+ * | | |
+ * |------------------------------------------------| |-- Payload
+ * | | |
+ * | ... | |
+ * | | |
+ * |------------------------------------------------| |
+ * | Region M Run | |
+ * | | |
+ * +================================================+ <- RegionTable pointer points here
+ * | uint23_t numRegions = M | |
+ * +------------------------------------------------+ |
+ * | Region 1 | |
+ * | uint32_t entryOffset = size(Payload) | |
+ * +------------------------------------------------+ |
+ * | | |-- Table
+ * | ... | |
+ * | | |
+ * +------------------------------------------------+ |
+ * | Region M | |
+ * | uint32_t entryOffset | |
+ * +------------------------------------------------+ |
+ *
+ * The region table is composed of two sections: a tail section that contains a table of
+ * fixed-size entries containing offsets into the the head section, and a head section that
+ * holds a sequence of variable-sized runs. The table in the tail section serves to
+ * locate the variable-length encoded structures in the head section.
+ *
+ * The entryOffsets in the table indicate the bytes offset to subtract from the regionTable
+ * pointer to arrive at the encoded region in the payload.
+ *
+ *
+ * Variable-length entries in payload
+ * ----------------------------------
+ * The entryOffsets in the region table's fixed-sized entries refer to a location within the
+ * variable-length payload section. This location contains a compactly encoded "run" of
+ * mappings.
+ *
+ * Each run starts by describing the offset within the native code it starts at, and the
+ * sequence of (JSScript*, jsbytecode*) pairs active at that site. Following that, there
+ * are a number of variable-length entries encoding (nativeOffsetDelta, bytecodeOffsetDelta)
+ * pairs for the run.
+ *
+ * VarUint32 nativeOffset;
+ * - The offset from nativeStartAddr in the global table entry at which
+ * the jitcode for this region starts.
+ *
+ * Uint8_t scriptDepth;
+ * - The depth of inlined scripts for this region.
+ *
+ * List<VarUint32> inlineScriptPcStack;
+ * - We encode (2 * scriptDepth) VarUint32s here. Each pair of uint32s are taken
+ * as an index into the scriptList in the global table entry, and a pcOffset
+ * respectively.
+ *
+ * List<NativeAndBytecodeDelta> deltaRun;
+ * - The rest of the entry is a deltaRun that stores a series of variable-length
+ * encoded NativeAndBytecodeDelta datums.
+ */
+// clang-format on
+class JitcodeRegionEntry {
+ private:
+ static const unsigned MAX_RUN_LENGTH = 100;
+
+ public:
+ static void WriteHead(CompactBufferWriter& writer, uint32_t nativeOffset,
+ uint8_t scriptDepth);
+ static void ReadHead(CompactBufferReader& reader, uint32_t* nativeOffset,
+ uint8_t* scriptDepth);
+
+ static void WriteScriptPc(CompactBufferWriter& writer, uint32_t scriptIdx,
+ uint32_t pcOffset);
+ static void ReadScriptPc(CompactBufferReader& reader, uint32_t* scriptIdx,
+ uint32_t* pcOffset);
+
+ static void WriteDelta(CompactBufferWriter& writer, uint32_t nativeDelta,
+ int32_t pcDelta);
+ static void ReadDelta(CompactBufferReader& reader, uint32_t* nativeDelta,
+ int32_t* pcDelta);
+
+ // Given a pointer into an array of NativeToBytecode (and a pointer to the end
+ // of the array), compute the number of entries that would be consume by
+ // outputting a run starting at this one.
+ static uint32_t ExpectedRunLength(const NativeToBytecode* entry,
+ const NativeToBytecode* end);
+
+ // Write a run, starting at the given NativeToBytecode entry, into the given
+ // buffer writer.
+ [[nodiscard]] static bool WriteRun(CompactBufferWriter& writer,
+ const IonEntry::ScriptList& scriptList,
+ uint32_t runLength,
+ const NativeToBytecode* entry);
+
+ // Delta Run entry formats are encoded little-endian:
+ //
+ // byte 0
+ // NNNN-BBB0
+ // Single byte format. nativeDelta in [0, 15], pcDelta in [0, 7]
+ //
+ static const uint32_t ENC1_MASK = 0x1;
+ static const uint32_t ENC1_MASK_VAL = 0x0;
+
+ static const uint32_t ENC1_NATIVE_DELTA_MAX = 0xf;
+ static const unsigned ENC1_NATIVE_DELTA_SHIFT = 4;
+
+ static const uint32_t ENC1_PC_DELTA_MASK = 0x0e;
+ static const int32_t ENC1_PC_DELTA_MAX = 0x7;
+ static const unsigned ENC1_PC_DELTA_SHIFT = 1;
+
+ // byte 1 byte 0
+ // NNNN-NNNN BBBB-BB01
+ // Two-byte format. nativeDelta in [0, 255], pcDelta in [0, 63]
+ //
+ static const uint32_t ENC2_MASK = 0x3;
+ static const uint32_t ENC2_MASK_VAL = 0x1;
+
+ static const uint32_t ENC2_NATIVE_DELTA_MAX = 0xff;
+ static const unsigned ENC2_NATIVE_DELTA_SHIFT = 8;
+
+ static const uint32_t ENC2_PC_DELTA_MASK = 0x00fc;
+ static const int32_t ENC2_PC_DELTA_MAX = 0x3f;
+ static const unsigned ENC2_PC_DELTA_SHIFT = 2;
+
+ // byte 2 byte 1 byte 0
+ // NNNN-NNNN NNNB-BBBB BBBB-B011
+ // Three-byte format. nativeDelta in [0, 2047], pcDelta in [-512, 511]
+ //
+ static const uint32_t ENC3_MASK = 0x7;
+ static const uint32_t ENC3_MASK_VAL = 0x3;
+
+ static const uint32_t ENC3_NATIVE_DELTA_MAX = 0x7ff;
+ static const unsigned ENC3_NATIVE_DELTA_SHIFT = 13;
+
+ static const uint32_t ENC3_PC_DELTA_MASK = 0x001ff8;
+ static const int32_t ENC3_PC_DELTA_MAX = 0x1ff;
+ static const int32_t ENC3_PC_DELTA_MIN = -ENC3_PC_DELTA_MAX - 1;
+ static const unsigned ENC3_PC_DELTA_SHIFT = 3;
+
+ // byte 3 byte 2 byte 1 byte 0
+ // NNNN-NNNN NNNN-NNNN BBBB-BBBB BBBB-B111
+ // Three-byte format. nativeDelta in [0, 65535],
+ // pcDelta in [-4096, 4095]
+ static const uint32_t ENC4_MASK = 0x7;
+ static const uint32_t ENC4_MASK_VAL = 0x7;
+
+ static const uint32_t ENC4_NATIVE_DELTA_MAX = 0xffff;
+ static const unsigned ENC4_NATIVE_DELTA_SHIFT = 16;
+
+ static const uint32_t ENC4_PC_DELTA_MASK = 0x0000fff8;
+ static const int32_t ENC4_PC_DELTA_MAX = 0xfff;
+ static const int32_t ENC4_PC_DELTA_MIN = -ENC4_PC_DELTA_MAX - 1;
+ static const unsigned ENC4_PC_DELTA_SHIFT = 3;
+
+ static bool IsDeltaEncodeable(uint32_t nativeDelta, int32_t pcDelta) {
+ return (nativeDelta <= ENC4_NATIVE_DELTA_MAX) &&
+ (pcDelta >= ENC4_PC_DELTA_MIN) && (pcDelta <= ENC4_PC_DELTA_MAX);
+ }
+
+ private:
+ const uint8_t* data_;
+ const uint8_t* end_;
+
+ // Unpacked state from jitcode entry.
+ uint32_t nativeOffset_;
+ uint8_t scriptDepth_;
+ const uint8_t* scriptPcStack_;
+ const uint8_t* deltaRun_;
+
+ void unpack();
+
+ public:
+ JitcodeRegionEntry(const uint8_t* data, const uint8_t* end)
+ : data_(data),
+ end_(end),
+ nativeOffset_(0),
+ scriptDepth_(0),
+ scriptPcStack_(nullptr),
+ deltaRun_(nullptr) {
+ MOZ_ASSERT(data_ < end_);
+ unpack();
+ MOZ_ASSERT(scriptPcStack_ < end_);
+ MOZ_ASSERT(deltaRun_ <= end_);
+ }
+
+ uint32_t nativeOffset() const { return nativeOffset_; }
+ uint32_t scriptDepth() const { return scriptDepth_; }
+
+ class ScriptPcIterator {
+ private:
+ const uint8_t* start_;
+ const uint8_t* end_;
+#ifdef DEBUG
+ uint32_t count_;
+#endif
+ uint32_t idx_;
+ const uint8_t* cur_;
+
+ public:
+ ScriptPcIterator(const uint8_t* start, const uint8_t* end, uint32_t count)
+ : start_(start),
+ end_(end),
+#ifdef DEBUG
+ count_(count),
+#endif
+ idx_(0),
+ cur_(start_) {
+ }
+
+ bool hasMore() const {
+ MOZ_ASSERT((idx_ == count_) == (cur_ == end_));
+ MOZ_ASSERT((idx_ < count_) == (cur_ < end_));
+ return cur_ < end_;
+ }
+
+ void readNext(uint32_t* scriptIdxOut, uint32_t* pcOffsetOut) {
+ MOZ_ASSERT(scriptIdxOut);
+ MOZ_ASSERT(pcOffsetOut);
+ MOZ_ASSERT(hasMore());
+
+ CompactBufferReader reader(cur_, end_);
+ ReadScriptPc(reader, scriptIdxOut, pcOffsetOut);
+
+ cur_ = reader.currentPosition();
+ MOZ_ASSERT(cur_ <= end_);
+
+ idx_++;
+ MOZ_ASSERT_IF(idx_ == count_, cur_ == end_);
+ }
+
+ void reset() {
+ idx_ = 0;
+ cur_ = start_;
+ }
+ };
+
+ ScriptPcIterator scriptPcIterator() const {
+ // End of script+pc sequence is the start of the delta run.
+ return ScriptPcIterator(scriptPcStack_, deltaRun_, scriptDepth_);
+ }
+
+ class DeltaIterator {
+ private:
+ const uint8_t* start_;
+ const uint8_t* end_;
+ const uint8_t* cur_;
+
+ public:
+ DeltaIterator(const uint8_t* start, const uint8_t* end)
+ : start_(start), end_(end), cur_(start) {}
+
+ bool hasMore() const {
+ MOZ_ASSERT(cur_ <= end_);
+ return cur_ < end_;
+ }
+
+ void readNext(uint32_t* nativeDeltaOut, int32_t* pcDeltaOut) {
+ MOZ_ASSERT(nativeDeltaOut != nullptr);
+ MOZ_ASSERT(pcDeltaOut != nullptr);
+
+ MOZ_ASSERT(hasMore());
+
+ CompactBufferReader reader(cur_, end_);
+ ReadDelta(reader, nativeDeltaOut, pcDeltaOut);
+
+ cur_ = reader.currentPosition();
+ MOZ_ASSERT(cur_ <= end_);
+ }
+
+ void reset() { cur_ = start_; }
+ };
+ DeltaIterator deltaIterator() const { return DeltaIterator(deltaRun_, end_); }
+
+ uint32_t findPcOffset(uint32_t queryNativeOffset,
+ uint32_t startPcOffset) const;
+};
+
+class JitcodeIonTable {
+ private:
+ /* Variable length payload section "below" here. */
+ uint32_t numRegions_;
+ uint32_t regionOffsets_[1];
+
+ const uint8_t* payloadEnd() const {
+ return reinterpret_cast<const uint8_t*>(this);
+ }
+
+ public:
+ JitcodeIonTable() = delete;
+
+ uint32_t numRegions() const { return numRegions_; }
+
+ uint32_t regionOffset(uint32_t regionIndex) const {
+ MOZ_ASSERT(regionIndex < numRegions());
+ return regionOffsets_[regionIndex];
+ }
+
+ JitcodeRegionEntry regionEntry(uint32_t regionIndex) const {
+ const uint8_t* regionStart = payloadEnd() - regionOffset(regionIndex);
+ const uint8_t* regionEnd = payloadEnd();
+ if (regionIndex < numRegions_ - 1) {
+ regionEnd -= regionOffset(regionIndex + 1);
+ }
+ return JitcodeRegionEntry(regionStart, regionEnd);
+ }
+
+ uint32_t findRegionEntry(uint32_t offset) const;
+
+ const uint8_t* payloadStart() const {
+ // The beginning of the payload the beginning of the first region are the
+ // same.
+ return payloadEnd() - regionOffset(0);
+ }
+
+ [[nodiscard]] static bool WriteIonTable(
+ CompactBufferWriter& writer, const IonEntry::ScriptList& scriptList,
+ const NativeToBytecode* start, const NativeToBytecode* end,
+ uint32_t* tableOffsetOut, uint32_t* numRegionsOut);
+};
+
+} // namespace jit
+} // namespace js
+
+#endif /* jit_JitcodeMap_h */