v8 12.4.254 (node 22.4.1)
V8 is Google's open source JavaScript engine
Loading...
Searching...
No Matches
v8-internal.h
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_V8_INTERNAL_H_
6#define INCLUDE_V8_INTERNAL_H_
7
8#include <stddef.h>
9#include <stdint.h>
10#include <string.h>
11
12#include <atomic>
13#include <iterator>
14#include <memory>
15#include <type_traits>
16
17#include "v8config.h" // NOLINT(build/include_directory)
18
19namespace v8 {
20
21class Array;
22class Context;
23class Data;
24class Isolate;
25
26namespace internal {
27
28class Heap;
29class Isolate;
30
31typedef uintptr_t Address;
32static constexpr Address kNullAddress = 0;
33
34constexpr int KB = 1024;
35constexpr int MB = KB * 1024;
36constexpr int GB = MB * 1024;
37#ifdef V8_TARGET_ARCH_X64
38constexpr size_t TB = size_t{GB} * 1024;
39#endif
40
44const int kApiSystemPointerSize = sizeof(void*);
45const int kApiDoubleSize = sizeof(double);
46const int kApiInt32Size = sizeof(int32_t);
47const int kApiInt64Size = sizeof(int64_t);
48const int kApiSizetSize = sizeof(size_t);
49
50// Tag information for HeapObject.
51const int kHeapObjectTag = 1;
52const int kWeakHeapObjectTag = 3;
53const int kHeapObjectTagSize = 2;
54const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
56
57// Tag information for fowarding pointers stored in object headers.
58// 0b00 at the lowest 2 bits in the header indicates that the map word is a
59// forwarding pointer.
60const int kForwardingTag = 0;
61const int kForwardingTagSize = 2;
62const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
63
64// Tag information for Smi.
65const int kSmiTag = 0;
66const int kSmiTagSize = 1;
67const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
68
69template <size_t tagged_ptr_size>
71
72constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
73constexpr uintptr_t kUintptrAllBitsSet =
74 static_cast<uintptr_t>(kIntptrAllBitsSet);
75
76// Smi constants for systems where tagged pointer is a 32-bit value.
77template <>
78struct SmiTagging<4> {
79 enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
80
81 static constexpr intptr_t kSmiMinValue =
82 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
83 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
84
85 V8_INLINE static constexpr int SmiToInt(Address value) {
86 int shift_bits = kSmiTagSize + kSmiShiftSize;
87 // Truncate and shift down (requires >> to be sign extending).
88 return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
89 }
90 V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
91 // Is value in range [kSmiMinValue, kSmiMaxValue].
92 // Use unsigned operations in order to avoid undefined behaviour in case of
93 // signed integer overflow.
94 return (static_cast<uintptr_t>(value) -
95 static_cast<uintptr_t>(kSmiMinValue)) <=
96 (static_cast<uintptr_t>(kSmiMaxValue) -
97 static_cast<uintptr_t>(kSmiMinValue));
98 }
99};
100
101// Smi constants for systems where tagged pointer is a 64-bit value.
102template <>
103struct SmiTagging<8> {
104 enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
105
106 static constexpr intptr_t kSmiMinValue =
107 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
108 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
109
110 V8_INLINE static constexpr int SmiToInt(Address value) {
111 int shift_bits = kSmiTagSize + kSmiShiftSize;
112 // Shift down and throw away top 32 bits.
113 return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
114 }
115 V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
116 // To be representable as a long smi, the value must be a 32-bit integer.
117 return (value == static_cast<int32_t>(value));
118 }
119};
120
121#ifdef V8_COMPRESS_POINTERS
122// See v8:7703 or src/common/ptr-compr-inl.h for details about pointer
123// compression.
124constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
125constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
126
127static_assert(
129 "Pointer compression can be enabled only for 64-bit architectures");
130const int kApiTaggedSize = kApiInt32Size;
131#else
133#endif
134
137}
138
139#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
140using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
141#else
143#endif
144
145// TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
146// since it's used much more often than the inividual constants.
147const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
148const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
149const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
150const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
151constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
152constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
153constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); }
154
155V8_INLINE static constexpr Address IntToSmi(int value) {
156 return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
157 kSmiTag;
158}
159
160/*
161 * Sandbox related types, constants, and functions.
162 */
163constexpr bool SandboxIsEnabled() {
164#ifdef V8_ENABLE_SANDBOX
165 return true;
166#else
167 return false;
168#endif
169}
170
171// SandboxedPointers are guaranteed to point into the sandbox. This is achieved
172// for example by storing them as offset rather than as raw pointers.
174
175#ifdef V8_ENABLE_SANDBOX
176
177// Size of the sandbox, excluding the guard regions surrounding it.
178#if defined(V8_TARGET_OS_ANDROID)
179// On Android, most 64-bit devices seem to be configured with only 39 bits of
180// virtual address space for userspace. As such, limit the sandbox to 128GB (a
181// quarter of the total available address space).
182constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
183#elif defined(V8_TARGET_ARCH_LOONG64)
184// Some Linux distros on LoongArch64 configured with only 40 bits of virtual
185// address space for userspace. Limit the sandbox to 256GB here.
186constexpr size_t kSandboxSizeLog2 = 38; // 256 GB
187#else
188// Everywhere else use a 1TB sandbox.
189constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
190#endif // V8_TARGET_OS_ANDROID
191constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
192
193// Required alignment of the sandbox. For simplicity, we require the
194// size of the guard regions to be a multiple of this, so that this specifies
195// the alignment of the sandbox including and excluding surrounding guard
196// regions. The alignment requirement is due to the pointer compression cage
197// being located at the start of the sandbox.
198constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
199
200// Sandboxed pointers are stored inside the heap as offset from the sandbox
201// base shifted to the left. This way, it is guaranteed that the offset is
202// smaller than the sandbox size after shifting it to the right again. This
203// constant specifies the shift amount.
204constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
205
206// Size of the guard regions surrounding the sandbox. This assumes a worst-case
207// scenario of a 32-bit unsigned index used to access an array of 64-bit
208// values.
209constexpr size_t kSandboxGuardRegionSize = 32ULL * GB;
210
211static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
212 "The size of the guard regions around the sandbox must be a "
213 "multiple of its required alignment.");
214
215// On OSes where reserving virtual memory is too expensive to reserve the
216// entire address space backing the sandbox, notably Windows pre 8.1, we create
217// a partially reserved sandbox that doesn't actually reserve most of the
218// memory, and so doesn't have the desired security properties as unrelated
219// memory allocations could end up inside of it, but which still ensures that
220// objects that should be located inside the sandbox are allocated within
221// kSandboxSize bytes from the start of the sandbox. The minimum size of the
222// region that is actually reserved for such a sandbox is specified by this
223// constant and should be big enough to contain the pointer compression cage as
224// well as the ArrayBuffer partition.
225constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
226
227static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
228 "The minimum reservation size for a sandbox must be larger than "
229 "the pointer compression cage contained within it.");
230
231// The maximum buffer size allowed inside the sandbox. This is mostly dependent
232// on the size of the guard regions around the sandbox: an attacker must not be
233// able to construct a buffer that appears larger than the guard regions and
234// thereby "reach out of" the sandbox.
235constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1;
236static_assert(kMaxSafeBufferSizeForSandbox <= kSandboxGuardRegionSize,
237 "The maximum allowed buffer size must not be larger than the "
238 "sandbox's guard regions");
239
240constexpr size_t kBoundedSizeShift = 29;
241static_assert(1ULL << (64 - kBoundedSizeShift) ==
242 kMaxSafeBufferSizeForSandbox + 1,
243 "The maximum size of a BoundedSize must be synchronized with the "
244 "kMaxSafeBufferSizeForSandbox");
245
246#endif // V8_ENABLE_SANDBOX
247
248#ifdef V8_COMPRESS_POINTERS
249
250#ifdef V8_TARGET_OS_ANDROID
251// The size of the virtual memory reservation for an external pointer table.
252// This determines the maximum number of entries in a table. Using a maximum
253// size allows omitting bounds checks on table accesses if the indices are
254// guaranteed (e.g. through shifting) to be below the maximum index. This
255// value must be a power of two.
256constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
257
258// The external pointer table indices stored in HeapObjects as external
259// pointers are shifted to the left by this amount to guarantee that they are
260// smaller than the maximum table size.
261constexpr uint32_t kExternalPointerIndexShift = 6;
262#else
263constexpr size_t kExternalPointerTableReservationSize = 1024 * MB;
264constexpr uint32_t kExternalPointerIndexShift = 5;
265#endif // V8_TARGET_OS_ANDROID
266
267// The maximum number of entries in an external pointer table.
268constexpr int kExternalPointerTableEntrySize = 8;
269constexpr int kExternalPointerTableEntrySizeLog2 = 3;
270constexpr size_t kMaxExternalPointers =
271 kExternalPointerTableReservationSize / kExternalPointerTableEntrySize;
272static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
273 "kExternalPointerTableReservationSize and "
274 "kExternalPointerIndexShift don't match");
275
276#else // !V8_COMPRESS_POINTERS
277
278// Needed for the V8.SandboxedExternalPointersCount histogram.
279constexpr size_t kMaxExternalPointers = 0;
280
281#endif // V8_COMPRESS_POINTERS
282
283// A ExternalPointerHandle represents a (opaque) reference to an external
284// pointer that can be stored inside the sandbox. A ExternalPointerHandle has
285// meaning only in combination with an (active) Isolate as it references an
286// external pointer stored in the currently active Isolate's
287// ExternalPointerTable. Internally, an ExternalPointerHandles is simply an
288// index into an ExternalPointerTable that is shifted to the left to guarantee
289// that it is smaller than the size of the table.
290using ExternalPointerHandle = uint32_t;
291
292// ExternalPointers point to objects located outside the sandbox. When the V8
293// sandbox is enabled, these are stored on heap as ExternalPointerHandles,
294// otherwise they are simply raw pointers.
295#ifdef V8_ENABLE_SANDBOX
297#else
299#endif
300
303
304//
305// External Pointers.
306//
307// When the sandbox is enabled, external pointers are stored in an external
308// pointer table and are referenced from HeapObjects through an index (a
309// "handle"). When stored in the table, the pointers are tagged with per-type
310// tags to prevent type confusion attacks between different external objects.
311// Besides type information bits, these tags also contain the GC marking bit
312// which indicates whether the pointer table entry is currently alive. When a
313// pointer is written into the table, the tag is ORed into the top bits. When
314// that pointer is later loaded from the table, it is ANDed with the inverse of
315// the expected tag. If the expected and actual type differ, this will leave
316// some of the top bits of the pointer set, rendering the pointer inaccessible.
317// The AND operation also removes the GC marking bit from the pointer.
318//
319// The tags are constructed such that UNTAG(TAG(0, T1), T2) != 0 for any two
320// (distinct) tags T1 and T2. In practice, this is achieved by generating tags
321// that all have the same number of zeroes and ones but different bit patterns.
322// With N type tag bits, this allows for (N choose N/2) possible type tags.
323// Besides the type tag bits, the tags also have the GC marking bit set so that
324// the marking bit is automatically set when a pointer is written into the
325// external pointer table (in which case it is clearly alive) and is cleared
326// when the pointer is loaded. The exception to this is the free entry tag,
327// which doesn't have the mark bit set, as the entry is not alive. This
328// construction allows performing the type check and removing GC marking bits
329// from the pointer in one efficient operation (bitwise AND). The number of
330// available bits is limited in the following way: on x64, bits [47, 64) are
331// generally available for tagging (userspace has 47 address bits available).
332// On Arm64, userspace typically has a 40 or 48 bit address space. However, due
333// to top-byte ignore (TBI) and memory tagging (MTE), the top byte is unusable
334// for type checks as type-check failures would go unnoticed or collide with
335// MTE bits. Some bits of the top byte can, however, still be used for the GC
336// marking bit. The bits available for the type tags are therefore limited to
337// [48, 56), i.e. (8 choose 4) = 70 different types.
338// The following options exist to increase the number of possible types:
339// - Using multiple ExternalPointerTables since tags can safely be reused
340// across different tables
341// - Using "extended" type checks, where additional type information is stored
342// either in an adjacent pointer table entry or at the pointed-to location
343// - Using a different tagging scheme, for example based on XOR which would
344// allow for 2**8 different tags but require a separate operation to remove
345// the marking bit
346//
347// The external pointer sandboxing mechanism ensures that every access to an
348// external pointer field will result in a valid pointer of the expected type
349// even in the presence of an attacker able to corrupt memory inside the
350// sandbox. However, if any data related to the external object is stored
351// inside the sandbox it may still be corrupted and so must be validated before
352// use or moved into the external object. Further, an attacker will always be
353// able to substitute different external pointers of the same type for each
354// other. Therefore, code using external pointers must be written in a
355// "substitution-safe" way, i.e. it must always be possible to substitute
356// external pointers of the same type without causing memory corruption outside
357// of the sandbox. Generally this is achieved by referencing any group of
358// related external objects through a single external pointer.
359//
360// Currently we use bit 62 for the marking bit which should always be unused as
361// it's part of the non-canonical address range. When Arm's top-byte ignore
362// (TBI) is enabled, this bit will be part of the ignored byte, and we assume
363// that the Embedder is not using this byte (really only this one bit) for any
364// other purpose. This bit also does not collide with the memory tagging
365// extension (MTE) which would use bits [56, 60).
366//
367// External pointer tables are also available even when the sandbox is off but
368// pointer compression is on. In that case, the mechanism can be used to easy
369// alignment requirements as it turns unaligned 64-bit raw pointers into
370// aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
371// for this purpose, instead of using the ExternalPointer accessors one needs to
372// use ExternalPointerHandles directly and use them to access the pointers in an
373// ExternalPointerTable.
374constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
375constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
376constexpr uint64_t kExternalPointerTagMaskWithoutMarkBit = 0xff000000000000;
377constexpr uint64_t kExternalPointerTagShift = 48;
378
379// All possible 8-bit type tags.
380// These are sorted so that tags can be grouped together and it can efficiently
381// be checked if a tag belongs to a given group. See for example the
382// IsSharedExternalPointerType routine.
383constexpr uint64_t kAllExternalPointerTypeTags[] = {
384 0b00001111, 0b00010111, 0b00011011, 0b00011101, 0b00011110, 0b00100111,
385 0b00101011, 0b00101101, 0b00101110, 0b00110011, 0b00110101, 0b00110110,
386 0b00111001, 0b00111010, 0b00111100, 0b01000111, 0b01001011, 0b01001101,
387 0b01001110, 0b01010011, 0b01010101, 0b01010110, 0b01011001, 0b01011010,
388 0b01011100, 0b01100011, 0b01100101, 0b01100110, 0b01101001, 0b01101010,
389 0b01101100, 0b01110001, 0b01110010, 0b01110100, 0b01111000, 0b10000111,
390 0b10001011, 0b10001101, 0b10001110, 0b10010011, 0b10010101, 0b10010110,
391 0b10011001, 0b10011010, 0b10011100, 0b10100011, 0b10100101, 0b10100110,
392 0b10101001, 0b10101010, 0b10101100, 0b10110001, 0b10110010, 0b10110100,
393 0b10111000, 0b11000011, 0b11000101, 0b11000110, 0b11001001, 0b11001010,
394 0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
395 0b11100010, 0b11100100, 0b11101000, 0b11110000};
396
397#define TAG(i) \
398 ((kAllExternalPointerTypeTags[i] << kExternalPointerTagShift) | \
399 kExternalPointerMarkBit)
400
401// clang-format off
402
403// When adding new tags, please ensure that the code using these tags is
404// "substitution-safe", i.e. still operate safely if external pointers of the
405// same type are swapped by an attacker. See comment above for more details.
406
407// Shared external pointers are owned by the shared Isolate and stored in the
408// shared external pointer table associated with that Isolate, where they can
409// be accessed from multiple threads at the same time. The objects referenced
410// in this way must therefore always be thread-safe.
411#define SHARED_EXTERNAL_POINTER_TAGS(V) \
412 V(kFirstSharedTag, TAG(0)) \
413 V(kWaiterQueueNodeTag, TAG(0)) \
414 V(kExternalStringResourceTag, TAG(1)) \
415 V(kExternalStringResourceDataTag, TAG(2)) \
416 V(kLastSharedTag, TAG(2))
417
418// External pointers using these tags are kept in a per-Isolate external
419// pointer table and can only be accessed when this Isolate is active.
420#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
421 V(kForeignForeignAddressTag, TAG(10)) \
422 V(kNativeContextMicrotaskQueueTag, TAG(11)) \
423 V(kEmbedderDataSlotPayloadTag, TAG(12)) \
424/* This tag essentially stands for a `void*` pointer in the V8 API, and */ \
425/* it is the Embedder's responsibility to ensure type safety (against */ \
426/* substitution) and lifetime validity of these objects. */ \
427 V(kExternalObjectValueTag, TAG(13)) \
428 V(kFunctionTemplateInfoCallbackTag, TAG(14)) \
429 V(kAccessorInfoGetterTag, TAG(15)) \
430 V(kAccessorInfoSetterTag, TAG(16)) \
431 V(kWasmInternalFunctionCallTargetTag, TAG(17)) \
432 V(kWasmTypeInfoNativeTypeTag, TAG(18)) \
433 V(kWasmExportedFunctionDataSignatureTag, TAG(19)) \
434 V(kWasmContinuationJmpbufTag, TAG(20)) \
435 V(kWasmIndirectFunctionTargetTag, TAG(21)) \
436 V(kArrayBufferExtensionTag, TAG(22))
437
438// All external pointer tags.
439#define ALL_EXTERNAL_POINTER_TAGS(V) \
440 SHARED_EXTERNAL_POINTER_TAGS(V) \
441 PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
442
443#define EXTERNAL_POINTER_TAG_ENUM(Name, Tag) Name = Tag,
444#define MAKE_TAG(HasMarkBit, TypeTag) \
445 ((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
446 (HasMarkBit ? kExternalPointerMarkBit : 0))
447enum ExternalPointerTag : uint64_t {
448 // Empty tag value. Mostly used as placeholder.
450 // External pointer tag that will match any external pointer. Use with care!
452 // The free entry tag has all type bits set so every type check with a
453 // different type fails. It also doesn't have the mark bit set as free
454 // entries are (by definition) not alive.
456 // Evacuation entries are used during external pointer table compaction.
458
460};
461
462#undef MAKE_TAG
463#undef TAG
464#undef EXTERNAL_POINTER_TAG_ENUM
465
466// clang-format on
467
468// True if the external pointer must be accessed from the shared isolate's
469// external pointer table.
470V8_INLINE static constexpr bool IsSharedExternalPointerType(
471 ExternalPointerTag tag) {
472 return tag >= kFirstSharedTag && tag <= kLastSharedTag;
473}
474
475// True if the external pointer may live in a read-only object, in which case
476// the table entry will be in the shared read-only segment of the external
477// pointer table.
478V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
479 ExternalPointerTag tag) {
480 return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag ||
481 tag == kFunctionTemplateInfoCallbackTag;
482}
483
484// Sanity checks.
485#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
486 static_assert(IsSharedExternalPointerType(Tag));
487#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
488 static_assert(!IsSharedExternalPointerType(Tag));
489
492
493#undef CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS
494#undef CHECK_SHARED_EXTERNAL_POINTER_TAGS
495
496#undef SHARED_EXTERNAL_POINTER_TAGS
497#undef EXTERNAL_POINTER_TAGS
498
499//
500// Indirect Pointers.
501//
502// When the sandbox is enabled, indirect pointers are used to reference
503// HeapObjects that live outside of the sandbox (but are still managed by V8's
504// garbage collector). When object A references an object B through an indirect
505// pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
506// 32-bit index, which identifies an entry in a pointer table (either the
507// trusted pointer table for TrustedObjects, or the code pointer table if it is
508// a Code object). This table entry then contains the actual pointer to object
509// B. Further, object B owns this pointer table entry, and it is responsible
510// for updating the "self-pointer" in the entry when it is relocated in memory.
511// This way, in contrast to "normal" pointers, indirect pointers never need to
512// be tracked by the GC (i.e. there is no remembered set for them).
513// These pointers do not exist when the sandbox is disabled.
514
515// An IndirectPointerHandle represents a 32-bit index into a pointer table.
516using IndirectPointerHandle = uint32_t;
517
518// A null handle always references an entry that contains nullptr.
520
521// When the sandbox is enabled, indirect pointers are used to implement:
522// - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
523// and referencing a TrustedObject in one of the trusted heap spaces.
524// - CodePointers, an indirect pointer using the code pointer table (CPT) and
525// referencing a Code object together with its instruction stream.
526
527//
528// Trusted Pointers.
529//
530// A pointer to a TrustedObject.
531// When the sandbox is enabled, these are indirect pointers using the trusted
532// pointer table (TPT). They are used to reference trusted objects (located in
533// one of V8's trusted heap spaces, outside of the sandbox) from inside the
534// sandbox in a memory-safe way. When the sandbox is disabled, these are
535// regular tagged pointers.
537
538// The size of the virtual memory reservation for the trusted pointer table.
539// As with the external pointer table, a maximum table size in combination with
540// shifted indices allows omitting bounds checks.
542
543// The trusted pointer handles are stores shifted to the left by this amount
544// to guarantee that they are smaller than the maximum table size.
545constexpr uint32_t kTrustedPointerHandleShift = 9;
546
547// A null handle always references an entry that contains nullptr.
550
551// The maximum number of entries in an trusted pointer table.
554constexpr size_t kMaxTrustedPointers =
556static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
557 "kTrustedPointerTableReservationSize and "
558 "kTrustedPointerHandleShift don't match");
559
560//
561// Code Pointers.
562//
563// A pointer to a Code object.
564// Essentially a specialized version of a trusted pointer that (when the
565// sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
566// Each entry in the CPT contains both a pointer to a Code object as well as a
567// pointer to the Code's entrypoint. This allows calling/jumping into Code with
568// one fewer memory access (compared to the case where the entrypoint pointer
569// first needs to be loaded from the Code object). As such, a CodePointerHandle
570// can be used both to obtain the referenced Code object and to directly load
571// its entrypoint.
572//
573// When the sandbox is disabled, these are regular tagged pointers.
575
576// The size of the virtual memory reservation for the code pointer table.
577// As with the other tables, a maximum table size in combination with shifted
578// indices allows omitting bounds checks.
579constexpr size_t kCodePointerTableReservationSize = 16 * MB;
580
581// Code pointer handles are shifted by a different amount than indirect pointer
582// handles as the tables have a different maximum size.
583constexpr uint32_t kCodePointerHandleShift = 12;
584
585// A null handle always references an entry that contains nullptr.
587
588// It can sometimes be necessary to distinguish a code pointer handle from a
589// trusted pointer handle. A typical example would be a union trusted pointer
590// field that can refer to both Code objects and other trusted objects. To
591// support these use-cases, we use a simple marking scheme where some of the
592// low bits of a code pointer handle are set, while they will be unset on a
593// trusted pointer handle. This way, the correct table to resolve the handle
594// can be determined even in the absence of a type tag.
595constexpr uint32_t kCodePointerHandleMarker = 0x1;
596static_assert(kCodePointerHandleShift > 0);
597static_assert(kTrustedPointerHandleShift > 0);
598
599// The maximum number of entries in a code pointer table.
600constexpr int kCodePointerTableEntrySize = 16;
602constexpr size_t kMaxCodePointers =
604static_assert(
606 "kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
607
610
611// Constants that can be used to mark places that should be modified once
612// certain types of objects are moved out of the sandbox and into trusted space.
618
619// {obj} must be the raw tagged pointer representation of a HeapObject
620// that's guaranteed to never be in ReadOnlySpace.
622
623// Returns if we need to throw when an error occurs. This infers the language
624// mode based on the current context and the closure. This returns true if the
625// language mode is strict.
626V8_EXPORT bool ShouldThrowOnError(internal::Isolate* isolate);
633#ifdef V8_MAP_PACKING
634 V8_INLINE static constexpr Address UnpackMapWord(Address mapword) {
635 // TODO(wenyuzhao): Clear header metadata.
636 return mapword ^ kMapWordXorMask;
637 }
638#endif
639
640 public:
641 // These values match non-compiler-dependent values defined within
642 // the implementation of v8.
643 static const int kHeapObjectMapOffset = 0;
645 static const int kStringResourceOffset =
647
649 static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
650 static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
653#ifdef V8_ENABLE_SANDBOX
655#else
657#endif
660 static const int kStringEncodingMask = 0x8;
661 static const int kExternalTwoByteRepresentationTag = 0x02;
662 static const int kExternalOneByteRepresentationTag = 0x0a;
663
664 static const uint32_t kNumIsolateDataSlots = 4;
666 static const int kNumberOfBooleanFlags = 6;
667 static const int kErrorMessageParamSize = 1;
668 static const int kTablesAlignmentPaddingSize = 1;
673 static const int kHandleScopeDataSize =
675
676 // ExternalPointerTable and TrustedPointerTable layout guarantees.
681
682 // IsolateData layout guarantees.
683 static const int kIsolateCageBaseOffset = 0;
684 static const int kIsolateStackGuardOffset =
686 static const int kVariousBooleanFlagsOffset =
688 static const int kErrorMessageParamOffset =
693 static const int kBuiltinTier0TableOffset =
695 static const int kNewAllocationInfoOffset =
697 static const int kOldAllocationInfoOffset =
699
715 static const int kIsolateEmbedderDataOffset =
717#ifdef V8_COMPRESS_POINTERS
718 static const int kIsolateExternalPointerTableOffset =
720 static const int kIsolateSharedExternalPointerTableAddressOffset =
721 kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
722#ifdef V8_ENABLE_SANDBOX
723 static const int kIsolateTrustedCageBaseOffset =
724 kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
725 static const int kIsolateTrustedPointerTableOffset =
726 kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
728 kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
729#else
731 kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
732#endif // V8_ENABLE_SANDBOX
733#else
736#endif // V8_COMPRESS_POINTERS
739
741 static const int kWasm64OOBOffsetOffset =
744 static const int kIsolateRootsOffset =
745 kWasm64OOBOffsetOffset + sizeof(int64_t);
746
747#if V8_STATIC_ROOTS_BOOL
748
749// These constants are copied from static-roots.h and guarded by static asserts.
750#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
751 V(UndefinedValue, 0x69) \
752 V(NullValue, 0x85) \
753 V(TrueValue, 0xc9) \
754 V(FalseValue, 0xad) \
755 V(EmptyString, 0xa1) \
756 V(TheHoleValue, 0x719)
757
758 using Tagged_t = uint32_t;
759 struct StaticReadOnlyRoot {
760#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
761 EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
762#undef DEF_ROOT
763
764 static constexpr Tagged_t kFirstStringMap = 0xe5;
765 static constexpr Tagged_t kLastStringMap = 0x47d;
766
767#define PLUSONE(...) +1
768 static constexpr size_t kNumberOfExportedStaticRoots =
769 2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE);
770#undef PLUSONE
771 };
772
773#endif // V8_STATIC_ROOTS_BOOL
774
775 static const int kUndefinedValueRootIndex = 4;
776 static const int kTheHoleValueRootIndex = 5;
777 static const int kNullValueRootIndex = 6;
778 static const int kTrueValueRootIndex = 7;
779 static const int kFalseValueRootIndex = 8;
780 static const int kEmptyStringRootIndex = 9;
781
783 static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
784 static const int kNodeStateMask = 0x3;
785 static const int kNodeStateIsWeakValue = 2;
786
787 static const int kFirstNonstringType = 0x80;
788 static const int kOddballType = 0x83;
789 static const int kForeignType = 0xcc;
790 static const int kJSSpecialApiObjectType = 0x410;
791 static const int kJSObjectType = 0x421;
792 static const int kFirstJSApiObjectType = 0x422;
793 static const int kLastJSApiObjectType = 0x80A;
794 // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
795 // of JSApiObject instance type values that an embedder can use.
796 static const int kFirstEmbedderJSApiObjectType = 0;
799
800 static const int kUndefinedOddballKind = 4;
801 static const int kNullOddballKind = 3;
802
803 // Constants used by PropertyCallbackInfo to check if we should throw when an
804 // error occurs.
805 static const int kThrowOnError = 0;
806 static const int kDontThrow = 1;
807 static const int kInferShouldThrowMode = 2;
808
809 // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
810 // incremental GC once the external memory reaches this limit.
811 static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
812
813#ifdef V8_MAP_PACKING
814 static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
815 // The lowest two bits of mapwords are always `0b10`
816 static const uintptr_t kMapWordSignature = 0b10;
817 // XORing a (non-compressed) map with this mask ensures that the two
818 // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
819 // although real Smis have all lower 32 bits unset. We only rely on these
820 // values passing as Smis in very few places.
821 static const int kMapWordXorMask = 0b11;
822#endif
823
825 V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
826#ifdef V8_ENABLE_CHECKS
827 CheckInitializedImpl(isolate);
828#endif
829 }
830
831 V8_INLINE static constexpr bool HasHeapObjectTag(Address value) {
832 return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
833 }
834
835 V8_INLINE static constexpr int SmiValue(Address value) {
836 return PlatformSmiTagging::SmiToInt(value);
837 }
838
839 V8_INLINE static constexpr Address IntToSmi(int value) {
840 return internal::IntToSmi(value);
841 }
842
843 V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
844 return PlatformSmiTagging::IsValidSmi(value);
845 }
846
847#if V8_STATIC_ROOTS_BOOL
848 V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
849 return static_cast<Tagged_t>(obj) == constant;
850 }
851
852 V8_INLINE static bool CheckInstanceMapRange(Address obj, Tagged_t first_map,
853 Tagged_t last_map) {
854 auto map = ReadRawField<Tagged_t>(obj, kHeapObjectMapOffset);
855#ifdef V8_MAP_PACKING
856 map = UnpackMapWord(map);
857#endif
858 return map >= first_map && map <= last_map;
859 }
860#endif
861
864#ifdef V8_MAP_PACKING
865 map = UnpackMapWord(map);
866#endif
867 return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
868 }
869
871 if (!HasHeapObjectTag(obj)) return kNullAddress;
873#ifdef V8_MAP_PACKING
874 map = UnpackMapWord(map);
875#endif
876 return map;
877 }
878
881 }
882
883 V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
884 int representation = (instance_type & kStringRepresentationAndEncodingMask);
885 return representation == kExternalTwoByteRepresentationTag;
886 }
887
888 V8_INLINE static constexpr bool CanHaveInternalField(int instance_type) {
889 static_assert(kJSObjectType + 1 == kFirstJSApiObjectType);
890 static_assert(kJSObjectType < kLastJSApiObjectType);
892 // Check for IsJSObject() || IsJSSpecialApiObject() || IsJSApiObject()
893 return instance_type == kJSSpecialApiObjectType ||
894 // inlined version of base::IsInRange
895 (static_cast<unsigned>(static_cast<unsigned>(instance_type) -
896 static_cast<unsigned>(kJSObjectType)) <=
897 static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
898 }
899
900 V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) {
901 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
902 return *addr & static_cast<uint8_t>(1U << shift);
903 }
904
905 V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) {
906 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
907 uint8_t mask = static_cast<uint8_t>(1U << shift);
908 *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
909 }
910
911 V8_INLINE static uint8_t GetNodeState(Address* obj) {
912 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
913 return *addr & kNodeStateMask;
914 }
915
916 V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) {
917 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
918 *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
919 }
920
921 V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
922 void* data) {
923 Address addr = reinterpret_cast<Address>(isolate) +
925 *reinterpret_cast<void**>(addr) = data;
926 }
927
928 V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
929 uint32_t slot) {
930 Address addr = reinterpret_cast<Address>(isolate) +
932 return *reinterpret_cast<void* const*>(addr);
933 }
934
936 Address addr =
937 reinterpret_cast<Address>(isolate) + kIsolateLongTaskStatsCounterOffset;
938 ++(*reinterpret_cast<size_t*>(addr));
939 }
940
941 V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) {
942 Address addr = reinterpret_cast<Address>(isolate) + kIsolateRootsOffset +
943 index * kApiSystemPointerSize;
944 return reinterpret_cast<Address*>(addr);
945 }
946
947 V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) {
948#if V8_STATIC_ROOTS_BOOL
949 Address base = *reinterpret_cast<Address*>(
950 reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
951 switch (index) {
952#define DECOMPRESS_ROOT(name, ...) \
953 case k##name##RootIndex: \
954 return base + StaticReadOnlyRoot::k##name;
955 EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
956#undef DECOMPRESS_ROOT
957#undef EXPORTED_STATIC_ROOTS_PTR_LIST
958 default:
959 break;
960 }
961#endif // V8_STATIC_ROOTS_BOOL
962 return *GetRootSlot(isolate, index);
963 }
964
965#ifdef V8_ENABLE_SANDBOX
966 V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) {
967 Address addr = reinterpret_cast<Address>(isolate) +
968 kIsolateExternalPointerTableOffset +
970 return *reinterpret_cast<Address**>(addr);
971 }
972
973 V8_INLINE static Address* GetSharedExternalPointerTableBase(
974 v8::Isolate* isolate) {
975 Address addr = reinterpret_cast<Address>(isolate) +
976 kIsolateSharedExternalPointerTableAddressOffset;
977 addr = *reinterpret_cast<Address*>(addr);
979 return *reinterpret_cast<Address**>(addr);
980 }
981#endif
982
983 template <typename T>
984 V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) {
985 Address addr = heap_object_ptr + offset - kHeapObjectTag;
986#ifdef V8_COMPRESS_POINTERS
987 if (sizeof(T) > kApiTaggedSize) {
988 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
989 // fields (external pointers, doubles and BigInt data) are only
990 // kTaggedSize aligned so we have to use unaligned pointer friendly way of
991 // accessing them in order to avoid undefined behavior in C++ code.
992 T r;
993 memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
994 return r;
995 }
996#endif
997 return *reinterpret_cast<const T*>(addr);
998 }
999
1001 int offset) {
1002#ifdef V8_COMPRESS_POINTERS
1003 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1004 Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
1005 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1006#else
1007 return ReadRawField<Address>(heap_object_ptr, offset);
1008#endif
1009 }
1010
1012 int offset) {
1013#ifdef V8_COMPRESS_POINTERS
1014 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1015 return static_cast<Address>(static_cast<uintptr_t>(value));
1016#else
1017 return ReadRawField<Address>(heap_object_ptr, offset);
1018#endif
1019 }
1020
1022#ifdef V8_ENABLE_SANDBOX
1023 return reinterpret_cast<v8::Isolate*>(
1025#else
1026 // Not used in non-sandbox mode.
1027 return nullptr;
1028#endif
1029 }
1030
1031 template <ExternalPointerTag tag>
1033 Address heap_object_ptr,
1034 int offset) {
1035#ifdef V8_ENABLE_SANDBOX
1036 static_assert(tag != kExternalPointerNullTag);
1037 // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
1038 // it can be inlined and doesn't require an additional call.
1039 Address* table = IsSharedExternalPointerType(tag)
1040 ? GetSharedExternalPointerTableBase(isolate)
1041 : GetExternalPointerTableBase(isolate);
1043 ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
1044 uint32_t index = handle >> kExternalPointerIndexShift;
1045 std::atomic<Address>* ptr =
1046 reinterpret_cast<std::atomic<Address>*>(&table[index]);
1047 Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
1048 return entry & ~tag;
1049#else
1050 return ReadRawField<Address>(heap_object_ptr, offset);
1051#endif // V8_ENABLE_SANDBOX
1052 }
1053
1054#ifdef V8_COMPRESS_POINTERS
1055 V8_INLINE static Address GetPtrComprCageBaseFromOnHeapAddress(Address addr) {
1056 return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
1057 }
1058
1059 V8_INLINE static uint32_t CompressTagged(Address value) {
1060 return static_cast<uint32_t>(value);
1061 }
1062
1063 V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
1064 uint32_t value) {
1065 Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
1066 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1067 }
1068
1069#endif // V8_COMPRESS_POINTERS
1070};
1071
1072// Only perform cast check for types derived from v8::Data since
1073// other types do not implement the Cast method.
1074template <bool PerformCheck>
1076 template <class T>
1077 static void Perform(T* data);
1078};
1079
1080template <>
1081template <class T>
1083 T::Cast(data);
1084}
1085
1086template <>
1087template <class T>
1089
1090template <class T>
1093 !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
1094}
1095
1096// A base class for backing stores, which is needed due to vagaries of
1097// how static casts work with std::shared_ptr.
1099
1100// The maximum value in enum GarbageCollectionReason, defined in heap.h.
1101// This is needed for histograms sampling garbage collection reasons.
1103
1104// Base class for the address block allocator compatible with standard
1105// containers, which registers its allocated range as strong roots.
1107 public:
1108 Heap* heap() const { return heap_; }
1109
1110 bool operator==(const StrongRootAllocatorBase& other) const {
1111 return heap_ == other.heap_;
1112 }
1113 bool operator!=(const StrongRootAllocatorBase& other) const {
1114 return heap_ != other.heap_;
1115 }
1116
1117 protected:
1118 explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
1120
1121 // Allocate/deallocate a range of n elements of type internal::Address.
1123 void deallocate_impl(Address* p, size_t n) noexcept;
1124
1125 private:
1126 Heap* heap_;
1127};
1128
1129// The general version of this template behaves just as std::allocator, with
1130// the exception that the constructor takes the isolate as parameter. Only
1131// specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
1132// and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
1133// as strong roots.
1134template <typename T>
1136 private std::allocator<T> {
1137 public:
1138 using value_type = T;
1139
1142 : StrongRootAllocatorBase(isolate) {}
1143 template <typename U>
1145 : StrongRootAllocatorBase(other) {}
1146
1147 using std::allocator<T>::allocate;
1148 using std::allocator<T>::deallocate;
1149};
1150
1151// A class of iterators that wrap some different iterator type.
1152// If specified, ElementType is the type of element accessed by the wrapper
1153// iterator; in this case, the actual reference and pointer types of Iterator
1154// must be convertible to ElementType& and ElementType*, respectively.
1155template <typename Iterator, typename ElementType = void>
1157 public:
1158 static_assert(
1159 !std::is_void_v<ElementType> ||
1160 (std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
1161 ElementType*> &&
1162 std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
1163 ElementType&>));
1164
1166 typename std::iterator_traits<Iterator>::iterator_category;
1168 typename std::iterator_traits<Iterator>::difference_type;
1170 std::conditional_t<std::is_void_v<ElementType>,
1171 typename std::iterator_traits<Iterator>::value_type,
1172 ElementType>;
1173 using pointer =
1174 std::conditional_t<std::is_void_v<ElementType>,
1175 typename std::iterator_traits<Iterator>::pointer,
1176 ElementType*>;
1178 std::conditional_t<std::is_void_v<ElementType>,
1179 typename std::iterator_traits<Iterator>::reference,
1180 ElementType&>;
1181
1182 constexpr WrappedIterator() noexcept : it_() {}
1183 constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
1184
1185 template <typename OtherIterator, typename OtherElementType,
1186 std::enable_if_t<std::is_convertible_v<OtherIterator, Iterator>,
1187 bool> = true>
1190 : it_(it.base()) {}
1191
1192 constexpr reference operator*() const noexcept { return *it_; }
1193 constexpr pointer operator->() const noexcept { return it_.operator->(); }
1194
1195 constexpr WrappedIterator& operator++() noexcept {
1196 ++it_;
1197 return *this;
1198 }
1199 constexpr WrappedIterator operator++(int) noexcept {
1200 WrappedIterator result(*this);
1201 ++(*this);
1202 return result;
1203 }
1204
1205 constexpr WrappedIterator& operator--() noexcept {
1206 --it_;
1207 return *this;
1208 }
1209 constexpr WrappedIterator operator--(int) noexcept {
1210 WrappedIterator result(*this);
1211 --(*this);
1212 return result;
1213 }
1214 constexpr WrappedIterator operator+(difference_type n) const noexcept {
1215 WrappedIterator result(*this);
1216 result += n;
1217 return result;
1218 }
1220 it_ += n;
1221 return *this;
1222 }
1223 constexpr WrappedIterator operator-(difference_type n) const noexcept {
1224 return *this + (-n);
1225 }
1227 *this += -n;
1228 return *this;
1229 }
1230 constexpr reference operator[](difference_type n) const noexcept {
1231 return it_[n];
1232 }
1233
1234 constexpr Iterator base() const noexcept { return it_; }
1235
1236 private:
1237 template <typename OtherIterator, typename OtherElementType>
1238 friend class WrappedIterator;
1239
1240 private:
1241 Iterator it_;
1242};
1243
1244template <typename Iterator, typename ElementType, typename OtherIterator,
1245 typename OtherElementType>
1246constexpr bool operator==(
1249 return x.base() == y.base();
1250}
1251
1252template <typename Iterator, typename ElementType, typename OtherIterator,
1253 typename OtherElementType>
1254constexpr bool operator<(
1257 return x.base() < y.base();
1258}
1259
1260template <typename Iterator, typename ElementType, typename OtherIterator,
1261 typename OtherElementType>
1262constexpr bool operator!=(
1265 return !(x == y);
1266}
1267
1268template <typename Iterator, typename ElementType, typename OtherIterator,
1269 typename OtherElementType>
1270constexpr bool operator>(
1273 return y < x;
1274}
1275
1276template <typename Iterator, typename ElementType, typename OtherIterator,
1277 typename OtherElementType>
1278constexpr bool operator>=(
1281 return !(x < y);
1282}
1283
1284template <typename Iterator, typename ElementType, typename OtherIterator,
1285 typename OtherElementType>
1286constexpr bool operator<=(
1289 return !(y < x);
1290}
1291
1292template <typename Iterator, typename ElementType, typename OtherIterator,
1293 typename OtherElementType>
1294constexpr auto operator-(
1297 -> decltype(x.base() - y.base()) {
1298 return x.base() - y.base();
1299}
1300
1301template <typename Iterator, typename ElementType>
1304 const WrappedIterator<Iterator, ElementType>& x) noexcept {
1305 x += n;
1306 return x;
1307}
1308
1309// Helper functions about values contained in handles.
1310// A value is either an indirect pointer or a direct pointer, depending on
1311// whether direct local support is enabled.
1312class ValueHelper final {
1313 public:
1314#ifdef V8_ENABLE_DIRECT_LOCAL
1315 static constexpr Address kTaggedNullAddress = 1;
1316 static constexpr Address kEmpty = kTaggedNullAddress;
1317#else
1318 static constexpr Address kEmpty = kNullAddress;
1319#endif // V8_ENABLE_DIRECT_LOCAL
1320
1321 template <typename T>
1322 V8_INLINE static bool IsEmpty(T* value) {
1323 return reinterpret_cast<Address>(value) == kEmpty;
1324 }
1325
1326 // Returns a handle's "value" for all kinds of abstract handles. For Local,
1327 // it is equivalent to `*handle`. The variadic parameters support handle
1328 // types with extra type parameters, like `Persistent<T, M>`.
1329 template <template <typename T, typename... Ms> typename H, typename T,
1330 typename... Ms>
1331 V8_INLINE static T* HandleAsValue(const H<T, Ms...>& handle) {
1332 return handle.template value<T>();
1333 }
1334
1335#ifdef V8_ENABLE_DIRECT_LOCAL
1336
1337 template <typename T>
1338 V8_INLINE static Address ValueAsAddress(const T* value) {
1339 return reinterpret_cast<Address>(value);
1340 }
1341
1342 template <typename T, bool check_null = true, typename S>
1343 V8_INLINE static T* SlotAsValue(S* slot) {
1344 if (check_null && slot == nullptr) {
1345 return reinterpret_cast<T*>(kTaggedNullAddress);
1346 }
1347 return *reinterpret_cast<T**>(slot);
1348 }
1349
1350#else // !V8_ENABLE_DIRECT_LOCAL
1351
1352 template <typename T>
1353 V8_INLINE static Address ValueAsAddress(const T* value) {
1354 return *reinterpret_cast<const Address*>(value);
1355 }
1356
1357 template <typename T, bool check_null = true, typename S>
1358 V8_INLINE static T* SlotAsValue(S* slot) {
1359 return reinterpret_cast<T*>(slot);
1360 }
1361
1362#endif // V8_ENABLE_DIRECT_LOCAL
1363};
1364
1368class HandleHelper final {
1369 public:
1380 template <typename T1, typename T2>
1381 V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
1382 if (lhs.IsEmpty()) return rhs.IsEmpty();
1383 if (rhs.IsEmpty()) return false;
1384 return lhs.ptr() == rhs.ptr();
1385 }
1386
1387 static V8_EXPORT bool IsOnStack(const void* ptr);
1388 static V8_EXPORT void VerifyOnStack(const void* ptr);
1390};
1391
1393
1394} // namespace internal
1395} // namespace v8
1396
1397#endif // INCLUDE_V8_INTERNAL_H_
static V8_INLINE bool EqualHandles(const T1 &lhs, const T2 &rhs)
static V8_EXPORT void VerifyOnMainThread()
static V8_EXPORT void VerifyOnStack(const void *ptr)
static V8_EXPORT bool IsOnStack(const void *ptr)
static const int kTrustedPointerTableBasePointerOffset
static const int kIsolateCageBaseOffset
static V8_INLINE Address LoadMap(Address obj)
static const int kTrueValueRootIndex
static const int kInferShouldThrowMode
static V8_INLINE void UpdateNodeFlag(Address *obj, bool value, int shift)
static const int kJSSpecialApiObjectType
static V8_INLINE void IncrementLongTasksStatsCounter(v8::Isolate *isolate)
static const int kExternalPointerTableSize
static const int kOddballKindOffset
static const int kOldAllocationInfoOffset
static const int kFirstEmbedderJSApiObjectType
static const int kFastCCallAlignmentPaddingSize
static const int kErrorMessageParamSize
static const int kFalseValueRootIndex
static const int kWasm64OOBOffsetOffset
static constexpr int kExternalAllocationSoftLimit
static const int kIsolateStackGuardOffset
static const int kNullValueRootIndex
static const int kIsolateFastCCallCallerPcOffset
static const int kDontThrow
static const int kEmptyStringRootIndex
static V8_INLINE uint8_t GetNodeState(Address *obj)
static const int kNativeContextEmbedderDataOffset
static V8_INLINE uint8_t GetNodeFlag(Address *obj, int shift)
static const int kStringRepresentationAndEncodingMask
static const int kIsolateThreadLocalTopOffset
static const int kIsolateLongTaskStatsCounterOffset
static const int kEmbedderDataArrayHeaderSize
static const int kExternalTwoByteRepresentationTag
static const int kNodeStateMask
static const int kUndefinedValueRootIndex
static V8_INLINE constexpr bool IsValidSmi(intptr_t value)
static V8_INLINE constexpr bool HasHeapObjectTag(Address value)
static const int kThrowOnError
static const int kEmbedderDataSlotExternalPointerOffset
static const int kBuiltinTier0TableSize
static const int kHandleScopeDataSize
static V8_INLINE Address ReadTaggedSignedField(Address heap_object_ptr, int offset)
static const int kWasm64OOBOffsetAlignmentPaddingSize
static V8_INLINE T ReadRawField(Address heap_object_ptr, int offset)
static V8_INLINE void CheckInitialized(v8::Isolate *isolate)
static const uint32_t kNumIsolateDataSlots
static const int kNumberOfBooleanFlags
static V8_INLINE Address * GetRootSlot(v8::Isolate *isolate, int index)
static const int kBuiltinTier0EntryTableSize
static const int kStackGuardSize
static const int kIsolateFastApiCallTargetOffset
static V8_INLINE constexpr bool CanHaveInternalField(int instance_type)
static V8_INLINE v8::Isolate * GetIsolateForSandbox(Address obj)
static const int kNodeFlagsOffset
static const int kContinuationPreservedEmbedderDataOffset
static const int kLastJSApiObjectType
static const int kExternalOneByteRepresentationTag
static const int kNodeStateIsWeakValue
static V8_EXPORT void CheckInitializedImpl(v8::Isolate *isolate)
static const int kTrustedPointerTableSize
static const int kBuiltinTier0TableOffset
static const int kForeignType
static const int kNodeClassIdOffset
static V8_INLINE int GetInstanceType(Address obj)
static const int kIsolateRootsOffset
static const int kIsolateApiCallbackThunkArgumentOffset
static const int kExternalPointerTableBasePointerOffset
static const int kFirstJSApiObjectType
static const int kOddballType
static const int kMapInstanceTypeOffset
static const int kLinearAllocationAreaSize
static const int kIsolateEmbedderDataOffset
static V8_INLINE int GetOddballKind(Address obj)
static V8_INLINE void * GetEmbedderData(const v8::Isolate *isolate, uint32_t slot)
static const int kNewAllocationInfoOffset
static V8_INLINE constexpr int SmiValue(Address value)
static const int kUndefinedOddballKind
static const int kEmbedderDataSlotSize
static const int kHeapObjectMapOffset
static V8_INLINE Address ReadTaggedPointerField(Address heap_object_ptr, int offset)
static const int kNullOddballKind
static V8_INLINE bool IsExternalTwoByteString(int instance_type)
static V8_INLINE constexpr Address IntToSmi(int value)
static const int kFirstNonstringType
static V8_INLINE void UpdateNodeState(Address *obj, uint8_t value)
static const int kBuiltinTier0EntryTableOffset
static const int kStringEncodingMask
static V8_INLINE Address GetRoot(v8::Isolate *isolate, int index)
static const int kLastEmbedderJSApiObjectType
static const int kVariousBooleanFlagsOffset
static const int kThreadLocalTopSize
static const int kTablesAlignmentPaddingSize
static const int kJSObjectHeaderSize
static V8_INLINE void SetEmbedderData(v8::Isolate *isolate, uint32_t slot, void *data)
static const int kFixedArrayHeaderSize
static const int kJSObjectType
static const int kTheHoleValueRootIndex
static V8_INLINE Address ReadExternalPointerField(v8::Isolate *isolate, Address heap_object_ptr, int offset)
static const int kStringResourceOffset
static const int kIsolateHandleScopeDataOffset
static const int kIsolateFastCCallCallerFpOffset
static const int kErrorMessageParamOffset
void deallocate_impl(Address *p, size_t n) noexcept
bool operator!=(const StrongRootAllocatorBase &other) const
bool operator==(const StrongRootAllocatorBase &other) const
StrongRootAllocatorBase(v8::Isolate *isolate)
StrongRootAllocator(v8::Isolate *isolate)
StrongRootAllocator(const StrongRootAllocator< U > &other) noexcept
static V8_INLINE Address ValueAsAddress(const T *value)
static V8_INLINE T * HandleAsValue(const H< T, Ms... > &handle)
static V8_INLINE T * SlotAsValue(S *slot)
static constexpr Address kEmpty
static V8_INLINE bool IsEmpty(T *value)
constexpr WrappedIterator operator++(int) noexcept
constexpr WrappedIterator operator-(difference_type n) const noexcept
constexpr reference operator[](difference_type n) const noexcept
constexpr pointer operator->() const noexcept
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::pointer, ElementType * > pointer
constexpr WrappedIterator operator+(difference_type n) const noexcept
constexpr Iterator base() const noexcept
constexpr WrappedIterator & operator--() noexcept
constexpr WrappedIterator(const WrappedIterator< OtherIterator, OtherElementType > &it) noexcept
constexpr WrappedIterator operator--(int) noexcept
constexpr WrappedIterator() noexcept
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::reference, ElementType & > reference
constexpr reference operator*() const noexcept
constexpr WrappedIterator & operator+=(difference_type n) noexcept
constexpr WrappedIterator(Iterator it) noexcept
constexpr WrappedIterator & operator++() noexcept
typename std::iterator_traits< Iterator >::difference_type difference_type
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::value_type, ElementType > value_type
typename std::iterator_traits< Iterator >::iterator_category iterator_category
constexpr WrappedIterator & operator-=(difference_type n) noexcept
constexpr bool PointerCompressionIsEnabled()
constexpr int kCodePointerTableEntryCodeObjectOffset
Address SandboxedPointer_t
const int kApiSystemPointerSize
Definition v8-internal.h:44
constexpr bool operator!=(const WrappedIterator< Iterator, ElementType > &x, const WrappedIterator< OtherIterator, OtherElementType > &y) noexcept
constexpr size_t kMaxTrustedPointers
V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty)
constexpr uint64_t kAllExternalPointerTypeTags[]
constexpr IndirectPointerHandle kNullIndirectPointerHandle
constexpr int kTrustedPointerTableEntrySizeLog2
const int kApiInt32Size
Definition v8-internal.h:46
constexpr bool operator>=(const WrappedIterator< Iterator, ElementType > &x, const WrappedIterator< OtherIterator, OtherElementType > &y) noexcept
const int kApiTaggedSize
Address ExternalPointer_t
const int kHeapObjectTag
Definition v8-internal.h:51
const int kSmiTagSize
Definition v8-internal.h:66
constexpr int MB
Definition v8-internal.h:35
constexpr uint64_t kExternalPointerTagShift
const intptr_t kHeapObjectTagMask
Definition v8-internal.h:54
const int kSmiTag
Definition v8-internal.h:65
constexpr int kCodePointerTableEntryEntrypointOffset
const int kApiSizetSize
Definition v8-internal.h:48
constexpr size_t kMaxExternalPointers
const int kHeapObjectTagSize
Definition v8-internal.h:53
constexpr size_t kTrustedPointerTableReservationSize
const int kForwardingTag
Definition v8-internal.h:60
constexpr auto operator-(const WrappedIterator< Iterator, ElementType > &x, const WrappedIterator< OtherIterator, OtherElementType > &y) noexcept -> decltype(x.base() - y.base())
const int kSmiValueSize
constexpr ExternalPointerHandle kNullExternalPointerHandle
constexpr uint32_t kCodePointerHandleMarker
const intptr_t kForwardingTagMask
Definition v8-internal.h:62
constexpr uint32_t kCodePointerHandleShift
constexpr ExternalPointer_t kNullExternalPointer
constexpr int kCodePointerTableEntrySize
const int kSmiMaxValue
IndirectPointerHandle CodePointerHandle
constexpr bool Is64()
constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace
constexpr bool operator<(const WrappedIterator< Iterator, ElementType > &x, const WrappedIterator< OtherIterator, OtherElementType > &y) noexcept
SmiTagging< kApiTaggedSize > PlatformSmiTagging
V8_EXPORT internal::Isolate * IsolateFromNeverReadOnlySpaceObject(Address obj)
constexpr bool SmiValuesAre32Bits()
uintptr_t Address
Definition v8-internal.h:31
constexpr bool SmiValuesAre31Bits()
const int kApiDoubleSize
Definition v8-internal.h:45
constexpr uintptr_t kUintptrAllBitsSet
Definition v8-internal.h:73
constexpr bool kAllCodeObjectsLiveInTrustedSpace
@ kExternalPointerNullTag
@ kExternalPointerFreeEntryTag
@ kExternalPointerEvacuationEntryTag
constexpr bool operator>(const WrappedIterator< Iterator, ElementType > &x, const WrappedIterator< OtherIterator, OtherElementType > &y) noexcept
uint32_t ExternalPointerHandle
constexpr WrappedIterator< Iterator > operator+(typename WrappedIterator< Iterator, ElementType >::difference_type n, const WrappedIterator< Iterator, ElementType > &x) noexcept
constexpr TrustedPointerHandle kNullTrustedPointerHandle
uint32_t IndirectPointerHandle
constexpr uint64_t kExternalPointerTagMask
constexpr size_t kCodePointerTableReservationSize
constexpr int kTrustedPointerTableEntrySize
constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace
const int kApiInt64Size
Definition v8-internal.h:47
IndirectPointerHandle TrustedPointerHandle
const int kForwardingTagSize
Definition v8-internal.h:61
V8_INLINE void PerformCastCheck(T *data)
constexpr bool SandboxIsEnabled()
constexpr int kCodePointerTableEntrySizeLog2
constexpr uint64_t kExternalPointerTagMaskWithoutMarkBit
constexpr bool operator==(const WrappedIterator< Iterator, ElementType > &x, const WrappedIterator< OtherIterator, OtherElementType > &y) noexcept
const int kWeakHeapObjectTag
Definition v8-internal.h:52
V8_EXPORT bool ShouldThrowOnError(internal::Isolate *isolate)
constexpr bool operator<=(const WrappedIterator< Iterator, ElementType > &x, const WrappedIterator< OtherIterator, OtherElementType > &y) noexcept
constexpr int GB
Definition v8-internal.h:36
const int kSmiMinValue
constexpr intptr_t kIntptrAllBitsSet
Definition v8-internal.h:72
constexpr uint32_t kTrustedPointerHandleShift
const intptr_t kHeapObjectReferenceTagMask
Definition v8-internal.h:55
constexpr size_t kMaxCodePointers
constexpr int KB
Definition v8-internal.h:34
constexpr CodePointerHandle kNullCodePointerHandle
const intptr_t kSmiTagMask
Definition v8-internal.h:67
constexpr uint64_t kExternalPointerMarkBit
const int kSmiShiftSize
constexpr int kGarbageCollectionReasonMaxValue
static void Perform(T *data)
static V8_INLINE constexpr bool IsValidSmi(intptr_t value)
Definition v8-internal.h:90
static V8_INLINE constexpr int SmiToInt(Address value)
Definition v8-internal.h:85
static V8_INLINE constexpr bool IsValidSmi(intptr_t value)
static V8_INLINE constexpr int SmiToInt(Address value)
#define EXTERNAL_POINTER_TAG_ENUM(Name, Tag)
#define ALL_EXTERNAL_POINTER_TAGS(V)
#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag,...)
#define SHARED_EXTERNAL_POINTER_TAGS(V)
#define MAKE_TAG(HasMarkBit, TypeTag)
#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag,...)
#define V8_EXPORT
Definition v8config.h:753
#define V8_INLINE
Definition v8config.h:477