v8 13.6.233 (node 24.1.0)
V8 is Google's open source JavaScript engine
Loading...
Searching...
No Matches
v8-internal.h
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_V8_INTERNAL_H_
6#define INCLUDE_V8_INTERNAL_H_
7
8#include <stddef.h>
9#include <stdint.h>
10#include <string.h>
11
12#include <atomic>
13#include <iterator>
14#include <limits>
15#include <memory>
16#include <optional>
17#include <type_traits>
18
19#include "v8config.h" // NOLINT(build/include_directory)
20
21// TODO(pkasting): Use <compare>/spaceship unconditionally after dropping
22// support for old libstdc++ versions.
23#if __has_include(<version>)
24#include <version>
25#endif
26#if defined(__cpp_lib_three_way_comparison) && \
27 __cpp_lib_three_way_comparison >= 201711L && \
28 defined(__cpp_lib_concepts) && __cpp_lib_concepts >= 202002L
29#include <compare>
30#include <concepts>
31
32#define V8_HAVE_SPACESHIP_OPERATOR 1
33#else
34#define V8_HAVE_SPACESHIP_OPERATOR 0
35#endif
36
37namespace v8 {
38
39class Array;
40class Context;
41class Data;
42class Isolate;
43
44namespace internal {
45
46class Heap;
47class LocalHeap;
48class Isolate;
49class IsolateGroup;
50class LocalIsolate;
51
52typedef uintptr_t Address;
53static constexpr Address kNullAddress = 0;
54
55constexpr int KB = 1024;
56constexpr int MB = KB * 1024;
57constexpr int GB = MB * 1024;
58#ifdef V8_TARGET_ARCH_X64
59constexpr size_t TB = size_t{GB} * 1024;
60#endif
61
65const int kApiSystemPointerSize = sizeof(void*);
66const int kApiDoubleSize = sizeof(double);
67const int kApiInt32Size = sizeof(int32_t);
68const int kApiInt64Size = sizeof(int64_t);
69const int kApiSizetSize = sizeof(size_t);
70
71// Tag information for HeapObject.
72const int kHeapObjectTag = 1;
73const int kWeakHeapObjectTag = 3;
74const int kHeapObjectTagSize = 2;
75const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
77
78// Tag information for fowarding pointers stored in object headers.
79// 0b00 at the lowest 2 bits in the header indicates that the map word is a
80// forwarding pointer.
81const int kForwardingTag = 0;
82const int kForwardingTagSize = 2;
83const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
84
85// Tag information for Smi.
86const int kSmiTag = 0;
87const int kSmiTagSize = 1;
88const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
89
90template <size_t tagged_ptr_size>
92
93constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
94constexpr uintptr_t kUintptrAllBitsSet =
95 static_cast<uintptr_t>(kIntptrAllBitsSet);
96
97// Smi constants for systems where tagged pointer is a 32-bit value.
98template <>
99struct SmiTagging<4> {
100 enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
101
102 static constexpr intptr_t kSmiMinValue =
103 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
104 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
105
106 V8_INLINE static constexpr int SmiToInt(Address value) {
107 int shift_bits = kSmiTagSize + kSmiShiftSize;
108 // Truncate and shift down (requires >> to be sign extending).
109 return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
110 }
111
112 template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
113 std::is_signed_v<T>>* = nullptr>
114 V8_INLINE static constexpr bool IsValidSmi(T value) {
115 // Is value in range [kSmiMinValue, kSmiMaxValue].
116 // Use unsigned operations in order to avoid undefined behaviour in case of
117 // signed integer overflow.
118 return (static_cast<uintptr_t>(value) -
119 static_cast<uintptr_t>(kSmiMinValue)) <=
120 (static_cast<uintptr_t>(kSmiMaxValue) -
121 static_cast<uintptr_t>(kSmiMinValue));
122 }
123
124 template <class T,
125 typename std::enable_if_t<std::is_integral_v<T> &&
126 std::is_unsigned_v<T>>* = nullptr>
127 V8_INLINE static constexpr bool IsValidSmi(T value) {
128 static_assert(kSmiMaxValue <= std::numeric_limits<uintptr_t>::max());
129 return value <= static_cast<uintptr_t>(kSmiMaxValue);
130 }
131
132 // Same as the `intptr_t` version but works with int64_t on 32-bit builds
133 // without slowing down anything else.
134 V8_INLINE static constexpr bool IsValidSmi(int64_t value) {
135 return (static_cast<uint64_t>(value) -
136 static_cast<uint64_t>(kSmiMinValue)) <=
137 (static_cast<uint64_t>(kSmiMaxValue) -
138 static_cast<uint64_t>(kSmiMinValue));
139 }
140
141 V8_INLINE static constexpr bool IsValidSmi(uint64_t value) {
142 static_assert(kSmiMaxValue <= std::numeric_limits<uint64_t>::max());
143 return value <= static_cast<uint64_t>(kSmiMaxValue);
144 }
145};
146
147// Smi constants for systems where tagged pointer is a 64-bit value.
148template <>
149struct SmiTagging<8> {
150 enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
151
152 static constexpr intptr_t kSmiMinValue =
153 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
154 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
155
156 V8_INLINE static constexpr int SmiToInt(Address value) {
157 int shift_bits = kSmiTagSize + kSmiShiftSize;
158 // Shift down and throw away top 32 bits.
159 return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
160 }
161
162 template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
163 std::is_signed_v<T>>* = nullptr>
164 V8_INLINE static constexpr bool IsValidSmi(T value) {
165 // To be representable as a long smi, the value must be a 32-bit integer.
166 return std::numeric_limits<int32_t>::min() <= value &&
167 value <= std::numeric_limits<int32_t>::max();
168 }
169
170 template <class T,
171 typename std::enable_if_t<std::is_integral_v<T> &&
172 std::is_unsigned_v<T>>* = nullptr>
173 V8_INLINE static constexpr bool IsValidSmi(T value) {
174 return value <= std::numeric_limits<int32_t>::max();
175 }
176};
177
178#ifdef V8_COMPRESS_POINTERS
179// See v8:7703 or src/common/ptr-compr-inl.h for details about pointer
180// compression.
181constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
182constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
183
184static_assert(
186 "Pointer compression can be enabled only for 64-bit architectures");
187const int kApiTaggedSize = kApiInt32Size;
188#else
190#endif
191
195
196#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
197using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
198#else
200#endif
201
202// TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
203// since it's used much more often than the inividual constants.
204const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
205const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
206const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
207const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
208constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
209constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
210constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); }
211
212V8_INLINE static constexpr Address IntToSmi(int value) {
213 return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
214 kSmiTag;
215}
216
217/*
218 * Sandbox related types, constants, and functions.
219 */
220constexpr bool SandboxIsEnabled() {
221#ifdef V8_ENABLE_SANDBOX
222 return true;
223#else
224 return false;
225#endif
226}
227
228// SandboxedPointers are guaranteed to point into the sandbox. This is achieved
229// for example by storing them as offset rather than as raw pointers.
231
232#ifdef V8_ENABLE_SANDBOX
233
234// Size of the sandbox, excluding the guard regions surrounding it.
235#if defined(V8_TARGET_OS_ANDROID)
236// On Android, most 64-bit devices seem to be configured with only 39 bits of
237// virtual address space for userspace. As such, limit the sandbox to 128GB (a
238// quarter of the total available address space).
239constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
240#else
241// Everywhere else use a 1TB sandbox.
242constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
243#endif // V8_TARGET_OS_ANDROID
244constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
245
246// Required alignment of the sandbox. For simplicity, we require the
247// size of the guard regions to be a multiple of this, so that this specifies
248// the alignment of the sandbox including and excluding surrounding guard
249// regions. The alignment requirement is due to the pointer compression cage
250// being located at the start of the sandbox.
251constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
252
253// Sandboxed pointers are stored inside the heap as offset from the sandbox
254// base shifted to the left. This way, it is guaranteed that the offset is
255// smaller than the sandbox size after shifting it to the right again. This
256// constant specifies the shift amount.
257constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
258
259// Size of the guard regions surrounding the sandbox. This assumes a worst-case
260// scenario of a 32-bit unsigned index used to access an array of 64-bit values
261// with an additional 4GB (compressed pointer) offset. In particular, accesses
262// to TypedArrays are effectively computed as
263// `entry_pointer = array->base + array->offset + index * array->element_size`.
264// See also https://crbug.com/40070746 for more details.
265constexpr size_t kSandboxGuardRegionSize = 32ULL * GB + 4ULL * GB;
266
267static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
268 "The size of the guard regions around the sandbox must be a "
269 "multiple of its required alignment.");
270
271// On OSes where reserving virtual memory is too expensive to reserve the
272// entire address space backing the sandbox, notably Windows pre 8.1, we create
273// a partially reserved sandbox that doesn't actually reserve most of the
274// memory, and so doesn't have the desired security properties as unrelated
275// memory allocations could end up inside of it, but which still ensures that
276// objects that should be located inside the sandbox are allocated within
277// kSandboxSize bytes from the start of the sandbox. The minimum size of the
278// region that is actually reserved for such a sandbox is specified by this
279// constant and should be big enough to contain the pointer compression cage as
280// well as the ArrayBuffer partition.
281constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
282
283static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
284 "The minimum reservation size for a sandbox must be larger than "
285 "the pointer compression cage contained within it.");
286
287// The maximum buffer size allowed inside the sandbox. This is mostly dependent
288// on the size of the guard regions around the sandbox: an attacker must not be
289// able to construct a buffer that appears larger than the guard regions and
290// thereby "reach out of" the sandbox.
291constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1;
292static_assert(kMaxSafeBufferSizeForSandbox <= kSandboxGuardRegionSize,
293 "The maximum allowed buffer size must not be larger than the "
294 "sandbox's guard regions");
295
296constexpr size_t kBoundedSizeShift = 29;
297static_assert(1ULL << (64 - kBoundedSizeShift) ==
298 kMaxSafeBufferSizeForSandbox + 1,
299 "The maximum size of a BoundedSize must be synchronized with the "
300 "kMaxSafeBufferSizeForSandbox");
301
302#endif // V8_ENABLE_SANDBOX
303
304#ifdef V8_COMPRESS_POINTERS
305
306#ifdef V8_TARGET_OS_ANDROID
307// The size of the virtual memory reservation for an external pointer table.
308// This determines the maximum number of entries in a table. Using a maximum
309// size allows omitting bounds checks on table accesses if the indices are
310// guaranteed (e.g. through shifting) to be below the maximum index. This
311// value must be a power of two.
312constexpr size_t kExternalPointerTableReservationSize = 256 * MB;
313
314// The external pointer table indices stored in HeapObjects as external
315// pointers are shifted to the left by this amount to guarantee that they are
316// smaller than the maximum table size even after the C++ compiler multiplies
317// them by 8 to be used as indexes into a table of 64 bit pointers.
318constexpr uint32_t kExternalPointerIndexShift = 7;
319#else
320constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
321constexpr uint32_t kExternalPointerIndexShift = 6;
322#endif // V8_TARGET_OS_ANDROID
323
324// The maximum number of entries in an external pointer table.
325constexpr int kExternalPointerTableEntrySize = 8;
326constexpr int kExternalPointerTableEntrySizeLog2 = 3;
327constexpr size_t kMaxExternalPointers =
328 kExternalPointerTableReservationSize / kExternalPointerTableEntrySize;
329static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
330 "kExternalPointerTableReservationSize and "
331 "kExternalPointerIndexShift don't match");
332
333#else // !V8_COMPRESS_POINTERS
334
335// Needed for the V8.SandboxedExternalPointersCount histogram.
336constexpr size_t kMaxExternalPointers = 0;
337
338#endif // V8_COMPRESS_POINTERS
339
340constexpr uint64_t kExternalPointerMarkBit = 1ULL << 48;
341constexpr uint64_t kExternalPointerTagShift = 49;
342constexpr uint64_t kExternalPointerTagMask = 0x00fe000000000000ULL;
347constexpr uint64_t kExternalPointerTagAndMarkbitMask = 0x00ff000000000000ULL;
348constexpr uint64_t kExternalPointerPayloadMask = 0xff00ffffffffffffULL;
349
350// A ExternalPointerHandle represents a (opaque) reference to an external
351// pointer that can be stored inside the sandbox. A ExternalPointerHandle has
352// meaning only in combination with an (active) Isolate as it references an
353// external pointer stored in the currently active Isolate's
354// ExternalPointerTable. Internally, an ExternalPointerHandles is simply an
355// index into an ExternalPointerTable that is shifted to the left to guarantee
356// that it is smaller than the size of the table.
357using ExternalPointerHandle = uint32_t;
358
359// ExternalPointers point to objects located outside the sandbox. When the V8
360// sandbox is enabled, these are stored on heap as ExternalPointerHandles,
361// otherwise they are simply raw pointers.
362#ifdef V8_ENABLE_SANDBOX
364#else
366#endif
367
370
371// See `ExternalPointerHandle` for the main documentation. The difference to
372// `ExternalPointerHandle` is that the handle does not represent an arbitrary
373// external pointer but always refers to an object managed by `CppHeap`. The
374// handles are using in combination with a dedicated table for `CppHeap`
375// references.
376using CppHeapPointerHandle = uint32_t;
377
378// The actual pointer to objects located on the `CppHeap`. When pointer
379// compression is enabled these pointers are stored as `CppHeapPointerHandle`.
380// In non-compressed configurations the pointers are simply stored as raw
381// pointers.
382#ifdef V8_COMPRESS_POINTERS
384#else
386#endif
387
390
391constexpr uint64_t kCppHeapPointerMarkBit = 1ULL;
392constexpr uint64_t kCppHeapPointerTagShift = 1;
393constexpr uint64_t kCppHeapPointerPayloadShift = 16;
394
395#ifdef V8_COMPRESS_POINTERS
396// CppHeapPointers use a dedicated pointer table. These constants control the
397// size and layout of the table. See the corresponding constants for the
398// external pointer table for further details.
399constexpr size_t kCppHeapPointerTableReservationSize =
400 kExternalPointerTableReservationSize;
401constexpr uint32_t kCppHeapPointerIndexShift = kExternalPointerIndexShift;
402
403constexpr int kCppHeapPointerTableEntrySize = 8;
404constexpr int kCppHeapPointerTableEntrySizeLog2 = 3;
405constexpr size_t kMaxCppHeapPointers =
406 kCppHeapPointerTableReservationSize / kCppHeapPointerTableEntrySize;
407static_assert((1 << (32 - kCppHeapPointerIndexShift)) == kMaxCppHeapPointers,
408 "kCppHeapPointerTableReservationSize and "
409 "kCppHeapPointerIndexShift don't match");
410
411#else // !V8_COMPRESS_POINTERS
412
413// Needed for the V8.SandboxedCppHeapPointersCount histogram.
414constexpr size_t kMaxCppHeapPointers = 0;
415
416#endif // V8_COMPRESS_POINTERS
417
418// Generic tag range struct to represent ranges of type tags.
419//
420// When referencing external objects via pointer tables, type tags are
421// frequently necessary to guarantee type safety for the external objects. When
422// support for subtyping is necessary, range-based type checks are used in
423// which all subtypes of a given supertype use contiguous tags. This struct can
424// then be used to represent such a type range.
425//
426// As an example, consider the following type hierarchy:
427//
428// A F
429// / \
430// B E
431// / \
432// C D
433//
434// A potential type id assignment for range-based type checks is
435// {A: 0, B: 1, C: 2, D: 3, E: 4, F: 5}. With that, the type check for type A
436// would check for the range [A, E], while the check for B would check range
437// [B, D], and for F it would simply check [F, F].
438//
439// In addition, there is an option for performance tweaks: if the size of the
440// type range corresponding to a supertype is a power of two and starts at a
441// power of two (e.g. [0x100, 0x13f]), then the compiler can often optimize
442// the type check to use even fewer instructions (essentially replace a AND +
443// SUB with a single AND).
444//
445template <typename Tag>
446struct TagRange {
447 static_assert(std::is_enum_v<Tag> &&
448 std::is_same_v<std::underlying_type_t<Tag>, uint16_t>,
449 "Tag parameter must be an enum with base type uint16_t");
450
451 // Construct the inclusive tag range [first, last].
452 constexpr TagRange(Tag first, Tag last) : first(first), last(last) {}
453
454 // Construct a tag range consisting of a single tag.
455 //
456 // A single tag is always implicitly convertible to a tag range. This greatly
457 // increases readability as most of the time, the exact tag of a field is
458 // known and so no tag range needs to explicitly be created for it.
459 constexpr TagRange(Tag tag) // NOLINT(runtime/explicit)
460 : first(tag), last(tag) {}
461
462 // Construct an empty tag range.
463 constexpr TagRange() : TagRange(static_cast<Tag>(0)) {}
464
465 // A tag range is considered empty if it only contains the null tag.
466 constexpr bool IsEmpty() const { return first == 0 && last == 0; }
467
468 constexpr size_t Size() const {
469 if (IsEmpty()) {
470 return 0;
471 } else {
472 return last - first + 1;
473 }
474 }
475
476 constexpr bool Contains(Tag tag) const {
477 // Need to perform the math with uint32_t. Otherwise, the uint16_ts would
478 // be promoted to (signed) int, allowing the compiler to (wrongly) assume
479 // that an underflow cannot happen as that would be undefined behavior.
480 return static_cast<uint32_t>(tag) - first <=
481 static_cast<uint32_t>(last) - first;
482 }
483
484 constexpr bool Contains(TagRange tag_range) const {
485 return tag_range.first >= first && tag_range.last <= last;
486 }
487
488 constexpr bool operator==(const TagRange other) const {
489 return first == other.first && last == other.last;
490 }
491
492 constexpr size_t hash_value() const {
493 static_assert(std::is_same_v<std::underlying_type_t<Tag>, uint16_t>);
494 return (static_cast<size_t>(first) << 16) | last;
495 }
496
497 // Internally we represent tag ranges as half-open ranges [first, last).
498 const Tag first;
499 const Tag last;
500};
501
502//
503// External Pointers.
504//
505// When the sandbox is enabled, external pointers are stored in an external
506// pointer table and are referenced from HeapObjects through an index (a
507// "handle"). When stored in the table, the pointers are tagged with per-type
508// tags to prevent type confusion attacks between different external objects.
509//
510// When loading an external pointer, a range of allowed tags can be specified.
511// This way, type hierarchies can be supported. The main requirement for that
512// is that all (transitive) child classes of a given parent class have type ids
513// in the same range, and that there are no unrelated types in that range. For
514// more details about how to assign type tags to types, see the TagRange class.
515//
516// The external pointer sandboxing mechanism ensures that every access to an
517// external pointer field will result in a valid pointer of the expected type
518// even in the presence of an attacker able to corrupt memory inside the
519// sandbox. However, if any data related to the external object is stored
520// inside the sandbox it may still be corrupted and so must be validated before
521// use or moved into the external object. Further, an attacker will always be
522// able to substitute different external pointers of the same type for each
523// other. Therefore, code using external pointers must be written in a
524// "substitution-safe" way, i.e. it must always be possible to substitute
525// external pointers of the same type without causing memory corruption outside
526// of the sandbox. Generally this is achieved by referencing any group of
527// related external objects through a single external pointer.
528//
529// Currently we use bit 62 for the marking bit which should always be unused as
530// it's part of the non-canonical address range. When Arm's top-byte ignore
531// (TBI) is enabled, this bit will be part of the ignored byte, and we assume
532// that the Embedder is not using this byte (really only this one bit) for any
533// other purpose. This bit also does not collide with the memory tagging
534// extension (MTE) which would use bits [56, 60).
535//
536// External pointer tables are also available even when the sandbox is off but
537// pointer compression is on. In that case, the mechanism can be used to ease
538// alignment requirements as it turns unaligned 64-bit raw pointers into
539// aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
540// for this purpose, instead of using the ExternalPointer accessors one needs to
541// use ExternalPointerHandles directly and use them to access the pointers in an
542// ExternalPointerTable.
543//
544// The tag is currently in practice limited to 15 bits since it needs to fit
545// together with a marking bit into the unused parts of a pointer.
546enum ExternalPointerTag : uint16_t {
549
550 // When adding new tags, please ensure that the code using these tags is
551 // "substitution-safe", i.e. still operate safely if external pointers of the
552 // same type are swapped by an attacker. See comment above for more details.
553
554 // Shared external pointers are owned by the shared Isolate and stored in the
555 // shared external pointer table associated with that Isolate, where they can
556 // be accessed from multiple threads at the same time. The objects referenced
557 // in this way must therefore always be thread-safe.
563
564 // External pointers using these tags are kept in a per-Isolate external
565 // pointer table and can only be accessed when this Isolate is active.
568 // This tag essentially stands for a `void*` pointer in the V8 API, and it is
569 // the Embedder's responsibility to ensure type safety (against substitution)
570 // and lifetime validity of these objects.
582
583 // Foreigns
608
609 // Managed
632 // External resources whose lifetime is tied to their entry in the external
633 // pointer table but which are not referenced via a Managed
636
640 // The tags are limited to 7 bits, so the last tag is 0x7f.
642};
643
645
659
660// True if the external pointer must be accessed from the shared isolate's
661// external pointer table.
662V8_INLINE static constexpr bool IsSharedExternalPointerType(
663 ExternalPointerTagRange tag_range) {
664 return kAnySharedExternalPointerTagRange.Contains(tag_range);
665}
666
667// True if the external pointer may live in a read-only object, in which case
668// the table entry will be in the shared read-only segment of the external
669// pointer table.
670V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
671 ExternalPointerTagRange tag_range) {
673}
674
675// True if the external pointer references an external object whose lifetime is
676// tied to the entry in the external pointer table.
677// In this case, the entry in the ExternalPointerTable always points to an
678// object derived from ExternalPointerTable::ManagedResource.
679V8_INLINE static constexpr bool IsManagedExternalPointerType(
680 ExternalPointerTagRange tag_range) {
682}
683
684// When an external poiner field can contain the null external pointer handle,
685// the type checking mechanism needs to also check for null.
686// TODO(saelo): this is mostly a temporary workaround to introduce range-based
687// type checks. In the future, we should either (a) change the type tagging
688// scheme so that null always passes or (b) (more likely) introduce dedicated
689// null entries for those tags that need them (similar to other well-known
690// empty value constants such as the empty fixed array).
691V8_INLINE static constexpr bool ExternalPointerCanBeEmpty(
692 ExternalPointerTagRange tag_range) {
693 return tag_range.Contains(kArrayBufferExtensionTag) ||
694 tag_range.Contains(kEmbedderDataSlotPayloadTag);
695}
696
697// Indirect Pointers.
698//
699// When the sandbox is enabled, indirect pointers are used to reference
700// HeapObjects that live outside of the sandbox (but are still managed by V8's
701// garbage collector). When object A references an object B through an indirect
702// pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
703// 32-bit index, which identifies an entry in a pointer table (either the
704// trusted pointer table for TrustedObjects, or the code pointer table if it is
705// a Code object). This table entry then contains the actual pointer to object
706// B. Further, object B owns this pointer table entry, and it is responsible
707// for updating the "self-pointer" in the entry when it is relocated in memory.
708// This way, in contrast to "normal" pointers, indirect pointers never need to
709// be tracked by the GC (i.e. there is no remembered set for them).
710// These pointers do not exist when the sandbox is disabled.
711
712// An IndirectPointerHandle represents a 32-bit index into a pointer table.
713using IndirectPointerHandle = uint32_t;
714
715// A null handle always references an entry that contains nullptr.
717
718// When the sandbox is enabled, indirect pointers are used to implement:
719// - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
720// and referencing a TrustedObject in one of the trusted heap spaces.
721// - CodePointers, an indirect pointer using the code pointer table (CPT) and
722// referencing a Code object together with its instruction stream.
723
724//
725// Trusted Pointers.
726//
727// A pointer to a TrustedObject.
728// When the sandbox is enabled, these are indirect pointers using the trusted
729// pointer table (TPT). They are used to reference trusted objects (located in
730// one of V8's trusted heap spaces, outside of the sandbox) from inside the
731// sandbox in a memory-safe way. When the sandbox is disabled, these are
732// regular tagged pointers.
734
735// The size of the virtual memory reservation for the trusted pointer table.
736// As with the external pointer table, a maximum table size in combination with
737// shifted indices allows omitting bounds checks.
739
740// The trusted pointer handles are stored shifted to the left by this amount
741// to guarantee that they are smaller than the maximum table size.
742constexpr uint32_t kTrustedPointerHandleShift = 9;
743
744// A null handle always references an entry that contains nullptr.
747
748// The maximum number of entries in an trusted pointer table.
751constexpr size_t kMaxTrustedPointers =
753static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
754 "kTrustedPointerTableReservationSize and "
755 "kTrustedPointerHandleShift don't match");
756
757//
758// Code Pointers.
759//
760// A pointer to a Code object.
761// Essentially a specialized version of a trusted pointer that (when the
762// sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
763// Each entry in the CPT contains both a pointer to a Code object as well as a
764// pointer to the Code's entrypoint. This allows calling/jumping into Code with
765// one fewer memory access (compared to the case where the entrypoint pointer
766// first needs to be loaded from the Code object). As such, a CodePointerHandle
767// can be used both to obtain the referenced Code object and to directly load
768// its entrypoint.
769//
770// When the sandbox is disabled, these are regular tagged pointers.
772
773// The size of the virtual memory reservation for the code pointer table.
774// As with the other tables, a maximum table size in combination with shifted
775// indices allows omitting bounds checks.
776constexpr size_t kCodePointerTableReservationSize = 128 * MB;
777
778// Code pointer handles are shifted by a different amount than indirect pointer
779// handles as the tables have a different maximum size.
780constexpr uint32_t kCodePointerHandleShift = 9;
781
782// A null handle always references an entry that contains nullptr.
784
785// It can sometimes be necessary to distinguish a code pointer handle from a
786// trusted pointer handle. A typical example would be a union trusted pointer
787// field that can refer to both Code objects and other trusted objects. To
788// support these use-cases, we use a simple marking scheme where some of the
789// low bits of a code pointer handle are set, while they will be unset on a
790// trusted pointer handle. This way, the correct table to resolve the handle
791// can be determined even in the absence of a type tag.
792constexpr uint32_t kCodePointerHandleMarker = 0x1;
793static_assert(kCodePointerHandleShift > 0);
794static_assert(kTrustedPointerHandleShift > 0);
795
796// The maximum number of entries in a code pointer table.
797constexpr int kCodePointerTableEntrySize = 16;
799constexpr size_t kMaxCodePointers =
801static_assert(
803 "kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
804
807
808// Constants that can be used to mark places that should be modified once
809// certain types of objects are moved out of the sandbox and into trusted space.
815
816// {obj} must be the raw tagged pointer representation of a HeapObject
817// that's guaranteed to never be in ReadOnlySpace.
819
820// Returns if we need to throw when an error occurs. This infers the language
821// mode based on the current context and the closure. This returns true if the
822// language mode is strict.
823V8_EXPORT bool ShouldThrowOnError(internal::Isolate* isolate);
830#ifdef V8_MAP_PACKING
831 V8_INLINE static constexpr Address UnpackMapWord(Address mapword) {
832 // TODO(wenyuzhao): Clear header metadata.
833 return mapword ^ kMapWordXorMask;
834 }
835#endif
836
837 public:
838 // These values match non-compiler-dependent values defined within
839 // the implementation of v8.
840 static const int kHeapObjectMapOffset = 0;
842 static const int kStringResourceOffset =
844
846 static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
847#ifdef V8_COMPRESS_POINTERS
850#else // !V8_COMPRESS_POINTERS
853#endif // !V8_COMPRESS_POINTERS
854 static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
857#ifdef V8_ENABLE_SANDBOX
859#else
861#endif
864 static const int kStringEncodingMask = 0x8;
865 static const int kExternalTwoByteRepresentationTag = 0x02;
866 static const int kExternalOneByteRepresentationTag = 0x0a;
867
868 static const uint32_t kNumIsolateDataSlots = 4;
870 static const int kNumberOfBooleanFlags = 6;
871 static const int kErrorMessageParamSize = 1;
872 static const int kTablesAlignmentPaddingSize = 1;
878 static const int kHandleScopeDataSize =
880
881 // ExternalPointerTable and TrustedPointerTable layout guarantees.
886
887 // IsolateData layout guarantees.
888 static const int kIsolateCageBaseOffset = 0;
889 static const int kIsolateStackGuardOffset =
891 static const int kVariousBooleanFlagsOffset =
893 static const int kErrorMessageParamOffset =
898 static const int kBuiltinTier0TableOffset =
900 static const int kNewAllocationInfoOffset =
902 static const int kOldAllocationInfoOffset =
904
921 static const int kIsolateEmbedderDataOffset =
923#ifdef V8_COMPRESS_POINTERS
924 static const int kIsolateExternalPointerTableOffset =
926 static const int kIsolateSharedExternalPointerTableAddressOffset =
927 kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
928 static const int kIsolateCppHeapPointerTableOffset =
929 kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
930#ifdef V8_ENABLE_SANDBOX
931 static const int kIsolateTrustedCageBaseOffset =
932 kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
933 static const int kIsolateTrustedPointerTableOffset =
934 kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
935 static const int kIsolateSharedTrustedPointerTableAddressOffset =
936 kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
937 static const int kIsolateTrustedPointerPublishingScopeOffset =
938 kIsolateSharedTrustedPointerTableAddressOffset + kApiSystemPointerSize;
939 static const int kIsolateCodePointerTableBaseAddressOffset =
940 kIsolateTrustedPointerPublishingScopeOffset + kApiSystemPointerSize;
942 kIsolateCodePointerTableBaseAddressOffset + kApiSystemPointerSize;
943#else
945 kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
946#endif // V8_ENABLE_SANDBOX
947#else
950#endif // V8_COMPRESS_POINTERS
955 static const int kIsolateRootsOffset =
957
958 // Assert scopes
959 static const int kDisallowGarbageCollectionAlign = alignof(uint32_t);
960 static const int kDisallowGarbageCollectionSize = sizeof(uint32_t);
961
962#if V8_STATIC_ROOTS_BOOL
963
964// These constants are copied from static-roots.h and guarded by static asserts.
965#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
966 V(UndefinedValue, 0x11) \
967 V(NullValue, 0x2d) \
968 V(TrueValue, 0x71) \
969 V(FalseValue, 0x55) \
970 V(EmptyString, 0x49) \
971 V(TheHoleValue, 0x761)
972
973 using Tagged_t = uint32_t;
974 struct StaticReadOnlyRoot {
975#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
976 EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
977#undef DEF_ROOT
978
979 // Use 0 for kStringMapLowerBound since string maps are the first maps.
980 static constexpr Tagged_t kStringMapLowerBound = 0;
981 static constexpr Tagged_t kStringMapUpperBound = 0x425;
982
983#define PLUSONE(...) +1
984 static constexpr size_t kNumberOfExportedStaticRoots =
985 2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE);
986#undef PLUSONE
987 };
988
989#endif // V8_STATIC_ROOTS_BOOL
990
991 static const int kUndefinedValueRootIndex = 4;
992 static const int kTheHoleValueRootIndex = 5;
993 static const int kNullValueRootIndex = 6;
994 static const int kTrueValueRootIndex = 7;
995 static const int kFalseValueRootIndex = 8;
996 static const int kEmptyStringRootIndex = 9;
997
999 static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
1000 static const int kNodeStateMask = 0x3;
1001 static const int kNodeStateIsWeakValue = 2;
1002
1003 static const int kFirstNonstringType = 0x80;
1004 static const int kOddballType = 0x83;
1005 static const int kForeignType = 0xcc;
1006 static const int kJSSpecialApiObjectType = 0x410;
1007 static const int kJSObjectType = 0x421;
1008 static const int kFirstJSApiObjectType = 0x422;
1009 static const int kLastJSApiObjectType = 0x80A;
1010 // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
1011 // of JSApiObject instance type values that an embedder can use.
1012 static const int kFirstEmbedderJSApiObjectType = 0;
1015
1016 static const int kUndefinedOddballKind = 4;
1017 static const int kNullOddballKind = 3;
1018
1019 // Constants used by PropertyCallbackInfo to check if we should throw when an
1020 // error occurs.
1021 static const int kDontThrow = 0;
1022 static const int kThrowOnError = 1;
1023 static const int kInferShouldThrowMode = 2;
1024
1025 // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
1026 // incremental GC once the external memory reaches this limit.
1027 static constexpr size_t kExternalAllocationSoftLimit = 64 * 1024 * 1024;
1028
1029#ifdef V8_MAP_PACKING
1030 static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
1031 // The lowest two bits of mapwords are always `0b10`
1032 static const uintptr_t kMapWordSignature = 0b10;
1033 // XORing a (non-compressed) map with this mask ensures that the two
1034 // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
1035 // although real Smis have all lower 32 bits unset. We only rely on these
1036 // values passing as Smis in very few places.
1037 static const int kMapWordXorMask = 0b11;
1038#endif
1039
1042#ifdef V8_ENABLE_CHECKS
1043 CheckInitializedImpl(isolate);
1044#endif
1045 }
1046
1047 V8_INLINE static constexpr bool HasHeapObjectTag(Address value) {
1048 return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
1049 }
1050
1051 V8_INLINE static constexpr int SmiValue(Address value) {
1052 return PlatformSmiTagging::SmiToInt(value);
1053 }
1054
1055 V8_INLINE static constexpr Address AddressToSmi(Address value) {
1056 return (value << (kSmiTagSize + PlatformSmiTagging::kSmiShiftSize)) |
1057 kSmiTag;
1058 }
1059
1060 V8_INLINE static constexpr Address IntToSmi(int value) {
1061 return AddressToSmi(static_cast<Address>(value));
1062 }
1063
1064 template <typename T,
1065 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1066 V8_INLINE static constexpr Address IntegralToSmi(T value) {
1067 return AddressToSmi(static_cast<Address>(value));
1068 }
1069
1070 template <typename T,
1071 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1072 V8_INLINE static constexpr bool IsValidSmi(T value) {
1073 return PlatformSmiTagging::IsValidSmi(value);
1074 }
1075
1076 template <typename T,
1077 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1078 static constexpr std::optional<Address> TryIntegralToSmi(T value) {
1079 if (V8_LIKELY(PlatformSmiTagging::IsValidSmi(value))) {
1080 return {AddressToSmi(static_cast<Address>(value))};
1081 }
1082 return {};
1083 }
1084
1085#if V8_STATIC_ROOTS_BOOL
1086 V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
1087 return static_cast<Tagged_t>(obj) == constant;
1088 }
1089
1090 V8_INLINE static bool CheckInstanceMapRange(Address obj, Tagged_t first_map,
1091 Tagged_t last_map) {
1093#ifdef V8_MAP_PACKING
1094 map = UnpackMapWord(map);
1095#endif
1096 return map >= first_map && map <= last_map;
1097 }
1098#endif
1099
1102#ifdef V8_MAP_PACKING
1103 map = UnpackMapWord(map);
1104#endif
1106 }
1107
1109 if (!HasHeapObjectTag(obj)) return kNullAddress;
1111#ifdef V8_MAP_PACKING
1112 map = UnpackMapWord(map);
1113#endif
1114 return map;
1115 }
1116
1120
1121 V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
1122 int representation = (instance_type & kStringRepresentationAndEncodingMask);
1123 return representation == kExternalTwoByteRepresentationTag;
1124 }
1125
1126 V8_INLINE static constexpr bool CanHaveInternalField(int instance_type) {
1127 static_assert(kJSObjectType + 1 == kFirstJSApiObjectType);
1128 static_assert(kJSObjectType < kLastJSApiObjectType);
1130 // Check for IsJSObject() || IsJSSpecialApiObject() || IsJSApiObject()
1131 return instance_type == kJSSpecialApiObjectType ||
1132 // inlined version of base::IsInRange
1133 (static_cast<unsigned>(static_cast<unsigned>(instance_type) -
1134 static_cast<unsigned>(kJSObjectType)) <=
1135 static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
1136 }
1137
1138 V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) {
1139 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1140 return *addr & static_cast<uint8_t>(1U << shift);
1141 }
1142
1143 V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) {
1144 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1145 uint8_t mask = static_cast<uint8_t>(1U << shift);
1146 *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
1147 }
1148
1149 V8_INLINE static uint8_t GetNodeState(Address* obj) {
1150 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1151 return *addr & kNodeStateMask;
1152 }
1153
1154 V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) {
1155 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1156 *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
1157 }
1158
1159 V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
1160 void* data) {
1161 Address addr = reinterpret_cast<Address>(isolate) +
1163 *reinterpret_cast<void**>(addr) = data;
1164 }
1165
1166 V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
1167 uint32_t slot) {
1168 Address addr = reinterpret_cast<Address>(isolate) +
1170 return *reinterpret_cast<void* const*>(addr);
1171 }
1172
1174 Address addr =
1175 reinterpret_cast<Address>(isolate) + kIsolateLongTaskStatsCounterOffset;
1176 ++(*reinterpret_cast<size_t*>(addr));
1177 }
1178
1179 V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) {
1180 Address addr = reinterpret_cast<Address>(isolate) + kIsolateRootsOffset +
1181 index * kApiSystemPointerSize;
1182 return reinterpret_cast<Address*>(addr);
1183 }
1184
1185 V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) {
1186#if V8_STATIC_ROOTS_BOOL
1187 Address base = *reinterpret_cast<Address*>(
1188 reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
1189 switch (index) {
1190#define DECOMPRESS_ROOT(name, ...) \
1191 case k##name##RootIndex: \
1192 return base + StaticReadOnlyRoot::k##name;
1193 EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
1194#undef DECOMPRESS_ROOT
1195#undef EXPORTED_STATIC_ROOTS_PTR_LIST
1196 default:
1197 break;
1198 }
1199#endif // V8_STATIC_ROOTS_BOOL
1200 return *GetRootSlot(isolate, index);
1201 }
1202
1203#ifdef V8_ENABLE_SANDBOX
1204 V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) {
1205 Address addr = reinterpret_cast<Address>(isolate) +
1206 kIsolateExternalPointerTableOffset +
1208 return *reinterpret_cast<Address**>(addr);
1209 }
1210
1211 V8_INLINE static Address* GetSharedExternalPointerTableBase(
1212 v8::Isolate* isolate) {
1213 Address addr = reinterpret_cast<Address>(isolate) +
1214 kIsolateSharedExternalPointerTableAddressOffset;
1215 addr = *reinterpret_cast<Address*>(addr);
1217 return *reinterpret_cast<Address**>(addr);
1218 }
1219#endif
1220
1221 template <typename T>
1222 V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) {
1223 Address addr = heap_object_ptr + offset - kHeapObjectTag;
1224#ifdef V8_COMPRESS_POINTERS
1225 if (sizeof(T) > kApiTaggedSize) {
1226 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
1227 // fields (external pointers, doubles and BigInt data) are only
1228 // kTaggedSize aligned so we have to use unaligned pointer friendly way of
1229 // accessing them in order to avoid undefined behavior in C++ code.
1230 T r;
1231 memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
1232 return r;
1233 }
1234#endif
1235 return *reinterpret_cast<const T*>(addr);
1236 }
1237
1239 int offset) {
1240#ifdef V8_COMPRESS_POINTERS
1241 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1242 Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
1243 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1244#else
1245 return ReadRawField<Address>(heap_object_ptr, offset);
1246#endif
1247 }
1248
1250 int offset) {
1251#ifdef V8_COMPRESS_POINTERS
1252 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1253 return static_cast<Address>(static_cast<uintptr_t>(value));
1254#else
1255 return ReadRawField<Address>(heap_object_ptr, offset);
1256#endif
1257 }
1258
1260#ifdef V8_ENABLE_SANDBOX
1261 return reinterpret_cast<v8::Isolate*>(
1263#else
1264 // Not used in non-sandbox mode.
1265 return nullptr;
1266#endif
1267 }
1268
1269 template <ExternalPointerTagRange tag_range>
1271 Address heap_object_ptr,
1272 int offset) {
1273#ifdef V8_ENABLE_SANDBOX
1274 static_assert(!tag_range.IsEmpty());
1275 // See src/sandbox/external-pointer-table.h. Logic duplicated here so
1276 // it can be inlined and doesn't require an additional call.
1277 Address* table = IsSharedExternalPointerType(tag_range)
1278 ? GetSharedExternalPointerTableBase(isolate)
1279 : GetExternalPointerTableBase(isolate);
1281 ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
1282 uint32_t index = handle >> kExternalPointerIndexShift;
1283 std::atomic<Address>* ptr =
1284 reinterpret_cast<std::atomic<Address>*>(&table[index]);
1285 Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
1286 ExternalPointerTag actual_tag = static_cast<ExternalPointerTag>(
1288 if (V8_LIKELY(tag_range.Contains(actual_tag))) {
1289 return entry & kExternalPointerPayloadMask;
1290 } else {
1291 return 0;
1292 }
1293 return entry;
1294#else
1295 return ReadRawField<Address>(heap_object_ptr, offset);
1296#endif // V8_ENABLE_SANDBOX
1297 }
1298
1299#ifdef V8_COMPRESS_POINTERS
1300 V8_INLINE static Address GetPtrComprCageBaseFromOnHeapAddress(Address addr) {
1301 return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
1302 }
1303
1304 V8_INLINE static uint32_t CompressTagged(Address value) {
1305 return static_cast<uint32_t>(value);
1306 }
1307
1308 V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
1309 uint32_t value) {
1310 Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
1311 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1312 }
1313
1314#endif // V8_COMPRESS_POINTERS
1315};
1316
1317// Only perform cast check for types derived from v8::Data since
1318// other types do not implement the Cast method.
1319template <bool PerformCheck>
1321 template <class T>
1322 static void Perform(T* data);
1323};
1324
1325template <>
1326template <class T>
1328 T::Cast(data);
1329}
1330
1331template <>
1332template <class T>
1334
1335template <class T>
1338 !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
1339}
1340
1341// A base class for backing stores, which is needed due to vagaries of
1342// how static casts work with std::shared_ptr.
1344
1345// The maximum value in enum GarbageCollectionReason, defined in heap.h.
1346// This is needed for histograms sampling garbage collection reasons.
1348
1349// Base class for the address block allocator compatible with standard
1350// containers, which registers its allocated range as strong roots.
1352 public:
1353 Heap* heap() const { return heap_; }
1354
1356 const StrongRootAllocatorBase& b) {
1357 // TODO(pkasting): Replace this body with `= default` after dropping support
1358 // for old gcc versions.
1359 return a.heap_ == b.heap_;
1360 }
1361
1362 protected:
1363 explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
1364 explicit StrongRootAllocatorBase(LocalHeap* heap);
1367 explicit StrongRootAllocatorBase(LocalIsolate* isolate);
1368
1369 // Allocate/deallocate a range of n elements of type internal::Address.
1371 void deallocate_impl(Address* p, size_t n) noexcept;
1372
1373 private:
1374 Heap* heap_;
1375};
1376
1377// The general version of this template behaves just as std::allocator, with
1378// the exception that the constructor takes the isolate as parameter. Only
1379// specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
1380// and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
1381// as strong roots.
1382template <typename T>
1383class StrongRootAllocator : private std::allocator<T> {
1384 public:
1385 using value_type = T;
1386
1387 template <typename HeapOrIsolateT>
1388 explicit StrongRootAllocator(HeapOrIsolateT*) {}
1389 template <typename U>
1391
1392 using std::allocator<T>::allocate;
1393 using std::allocator<T>::deallocate;
1394};
1395
1396// TODO(pkasting): Replace with `requires` clauses after dropping support for
1397// old gcc versions.
1398template <typename Iterator, typename = void>
1399inline constexpr bool kHaveIteratorConcept = false;
1400template <typename Iterator>
1401inline constexpr bool kHaveIteratorConcept<
1402 Iterator, std::void_t<typename Iterator::iterator_concept>> = true;
1403
1404template <typename Iterator, typename = void>
1405inline constexpr bool kHaveIteratorCategory = false;
1406template <typename Iterator>
1407inline constexpr bool kHaveIteratorCategory<
1408 Iterator, std::void_t<typename Iterator::iterator_category>> = true;
1409
1410// Helper struct that contains an `iterator_concept` type alias only when either
1411// `Iterator` or `std::iterator_traits<Iterator>` do.
1412// Default: no alias.
1413template <typename Iterator, typename = void>
1415// Use `Iterator::iterator_concept` if available.
1416template <typename Iterator>
1418 Iterator, std::enable_if_t<kHaveIteratorConcept<Iterator>>> {
1419 using iterator_concept = typename Iterator::iterator_concept;
1420};
1421// Otherwise fall back to `std::iterator_traits<Iterator>` if possible.
1422template <typename Iterator>
1424 Iterator, std::enable_if_t<kHaveIteratorCategory<Iterator> &&
1425 !kHaveIteratorConcept<Iterator>>> {
1426 // There seems to be no feature-test macro covering this, so use the
1427 // presence of `<ranges>` as a crude proxy, since it was added to the
1428 // standard as part of the Ranges papers.
1429 // TODO(pkasting): Add this unconditionally after dropping support for old
1430 // libstdc++ versions.
1431#if __has_include(<ranges>)
1432 using iterator_concept =
1433 typename std::iterator_traits<Iterator>::iterator_concept;
1434#endif
1435};
1436
1437// A class of iterators that wrap some different iterator type.
1438// If specified, ElementType is the type of element accessed by the wrapper
1439// iterator; in this case, the actual reference and pointer types of Iterator
1440// must be convertible to ElementType& and ElementType*, respectively.
1441template <typename Iterator, typename ElementType = void>
1443 public:
1444 static_assert(
1445 std::is_void_v<ElementType> ||
1446 (std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
1447 std::add_pointer_t<ElementType>> &&
1448 std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
1449 std::add_lvalue_reference_t<ElementType>>));
1450
1452 typename std::iterator_traits<Iterator>::difference_type;
1454 std::conditional_t<std::is_void_v<ElementType>,
1455 typename std::iterator_traits<Iterator>::value_type,
1456 ElementType>;
1457 using pointer =
1458 std::conditional_t<std::is_void_v<ElementType>,
1459 typename std::iterator_traits<Iterator>::pointer,
1460 std::add_pointer_t<ElementType>>;
1462 std::conditional_t<std::is_void_v<ElementType>,
1463 typename std::iterator_traits<Iterator>::reference,
1464 std::add_lvalue_reference_t<ElementType>>;
1466 typename std::iterator_traits<Iterator>::iterator_category;
1467
1468 constexpr WrappedIterator() noexcept = default;
1469 constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
1470
1471 // TODO(pkasting): Switch to `requires` and concepts after dropping support
1472 // for old gcc and libstdc++ versions.
1473 template <typename OtherIterator, typename OtherElementType,
1474 typename = std::enable_if_t<
1475 std::is_convertible_v<OtherIterator, Iterator>>>
1478 : it_(other.base()) {}
1479
1480 [[nodiscard]] constexpr reference operator*() const noexcept { return *it_; }
1481 [[nodiscard]] constexpr pointer operator->() const noexcept {
1482 if constexpr (std::is_pointer_v<Iterator>) {
1483 return it_;
1484 } else {
1485 return it_.operator->();
1486 }
1487 }
1488
1489 template <typename OtherIterator, typename OtherElementType>
1490 [[nodiscard]] constexpr bool operator==(
1492 const noexcept {
1493 return it_ == other.base();
1494 }
1495#if V8_HAVE_SPACESHIP_OPERATOR
1496 template <typename OtherIterator, typename OtherElementType>
1497 [[nodiscard]] constexpr auto operator<=>(
1499 const noexcept {
1500 if constexpr (std::three_way_comparable_with<Iterator, OtherIterator>) {
1501 return it_ <=> other.base();
1502 } else if constexpr (std::totally_ordered_with<Iterator, OtherIterator>) {
1503 if (it_ < other.base()) {
1504 return std::strong_ordering::less;
1505 }
1506 return (it_ > other.base()) ? std::strong_ordering::greater
1507 : std::strong_ordering::equal;
1508 } else {
1509 if (it_ < other.base()) {
1510 return std::partial_ordering::less;
1511 }
1512 if (other.base() < it_) {
1513 return std::partial_ordering::greater;
1514 }
1515 return (it_ == other.base()) ? std::partial_ordering::equivalent
1516 : std::partial_ordering::unordered;
1517 }
1518 }
1519#else
1520 // Assume that if spaceship isn't present, operator rewriting might not be
1521 // either.
1522 template <typename OtherIterator, typename OtherElementType>
1523 [[nodiscard]] constexpr bool operator!=(
1525 const noexcept {
1526 return it_ != other.base();
1527 }
1528
1529 template <typename OtherIterator, typename OtherElementType>
1530 [[nodiscard]] constexpr bool operator<(
1532 const noexcept {
1533 return it_ < other.base();
1534 }
1535 template <typename OtherIterator, typename OtherElementType>
1536 [[nodiscard]] constexpr bool operator<=(
1538 const noexcept {
1539 return it_ <= other.base();
1540 }
1541 template <typename OtherIterator, typename OtherElementType>
1542 [[nodiscard]] constexpr bool operator>(
1544 const noexcept {
1545 return it_ > other.base();
1546 }
1547 template <typename OtherIterator, typename OtherElementType>
1548 [[nodiscard]] constexpr bool operator>=(
1550 const noexcept {
1551 return it_ >= other.base();
1552 }
1553#endif
1554
1555 constexpr WrappedIterator& operator++() noexcept {
1556 ++it_;
1557 return *this;
1558 }
1559 constexpr WrappedIterator operator++(int) noexcept {
1560 WrappedIterator result(*this);
1561 ++(*this);
1562 return result;
1563 }
1564
1565 constexpr WrappedIterator& operator--() noexcept {
1566 --it_;
1567 return *this;
1568 }
1569 constexpr WrappedIterator operator--(int) noexcept {
1570 WrappedIterator result(*this);
1571 --(*this);
1572 return result;
1573 }
1574 [[nodiscard]] constexpr WrappedIterator operator+(
1575 difference_type n) const noexcept {
1576 WrappedIterator result(*this);
1577 result += n;
1578 return result;
1579 }
1580 [[nodiscard]] friend constexpr WrappedIterator operator+(
1581 difference_type n, const WrappedIterator& x) noexcept {
1582 return x + n;
1583 }
1585 it_ += n;
1586 return *this;
1587 }
1588 [[nodiscard]] constexpr WrappedIterator operator-(
1589 difference_type n) const noexcept {
1590 return *this + -n;
1591 }
1593 return *this += -n;
1594 }
1595 template <typename OtherIterator, typename OtherElementType>
1596 [[nodiscard]] constexpr auto operator-(
1598 const noexcept {
1599 return it_ - other.base();
1600 }
1601 [[nodiscard]] constexpr reference operator[](
1602 difference_type n) const noexcept {
1603 return it_[n];
1604 }
1605
1606 [[nodiscard]] constexpr const Iterator& base() const noexcept { return it_; }
1607
1608 private:
1609 Iterator it_;
1610};
1611
1612// Helper functions about values contained in handles.
1613// A value is either an indirect pointer or a direct pointer, depending on
1614// whether direct local support is enabled.
1615class ValueHelper final {
1616 public:
1617 // ValueHelper::InternalRepresentationType is an abstract type that
1618 // corresponds to the internal representation of v8::Local and essentially
1619 // to what T* really is (these two are always in sync). This type is used in
1620 // methods like GetDataFromSnapshotOnce that need access to a handle's
1621 // internal representation. In particular, if `x` is a `v8::Local<T>`, then
1622 // `v8::Local<T>::FromRepr(x.repr())` gives exactly the same handle as `x`.
1623#ifdef V8_ENABLE_DIRECT_HANDLE
1624 static constexpr Address kTaggedNullAddress = 1;
1625
1627 static constexpr InternalRepresentationType kEmpty = kTaggedNullAddress;
1628#else
1630 static constexpr InternalRepresentationType kEmpty = nullptr;
1631#endif // V8_ENABLE_DIRECT_HANDLE
1632
1633 template <typename T>
1634 V8_INLINE static bool IsEmpty(T* value) {
1635 return ValueAsRepr(value) == kEmpty;
1636 }
1637
1638 // Returns a handle's "value" for all kinds of abstract handles. For Local,
1639 // it is equivalent to `*handle`. The variadic parameters support handle
1640 // types with extra type parameters, like `Persistent<T, M>`.
1641 template <template <typename T, typename... Ms> typename H, typename T,
1642 typename... Ms>
1643 V8_INLINE static T* HandleAsValue(const H<T, Ms...>& handle) {
1644 return handle.template value<T>();
1645 }
1646
1647#ifdef V8_ENABLE_DIRECT_HANDLE
1648
1649 template <typename T>
1650 V8_INLINE static Address ValueAsAddress(const T* value) {
1651 return reinterpret_cast<Address>(value);
1652 }
1653
1654 template <typename T, bool check_null = true, typename S>
1655 V8_INLINE static T* SlotAsValue(S* slot) {
1656 if (check_null && slot == nullptr) {
1657 return reinterpret_cast<T*>(kTaggedNullAddress);
1658 }
1659 return *reinterpret_cast<T**>(slot);
1660 }
1661
1662 template <typename T>
1663 V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
1664 return reinterpret_cast<InternalRepresentationType>(value);
1665 }
1666
1667 template <typename T>
1669 return reinterpret_cast<T*>(repr);
1670 }
1671
1672#else // !V8_ENABLE_DIRECT_HANDLE
1673
1674 template <typename T>
1675 V8_INLINE static Address ValueAsAddress(const T* value) {
1676 return *reinterpret_cast<const Address*>(value);
1677 }
1678
1679 template <typename T, bool check_null = true, typename S>
1680 V8_INLINE static T* SlotAsValue(S* slot) {
1681 return reinterpret_cast<T*>(slot);
1682 }
1683
1684 template <typename T>
1686 return const_cast<InternalRepresentationType>(
1687 reinterpret_cast<const Address*>(value));
1688 }
1689
1690 template <typename T>
1692 return reinterpret_cast<T*>(repr);
1693 }
1694
1695#endif // V8_ENABLE_DIRECT_HANDLE
1696};
1697
1701class HandleHelper final {
1702 public:
1713 template <typename T1, typename T2>
1714 V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
1715 if (lhs.IsEmpty()) return rhs.IsEmpty();
1716 if (rhs.IsEmpty()) return false;
1717 return lhs.ptr() == rhs.ptr();
1718 }
1719};
1720
1722
1723// These functions are here just to match friend declarations in
1724// XxxCallbackInfo classes allowing these functions to access the internals
1725// of the info objects. These functions are supposed to be called by debugger
1726// macros.
1727void PrintFunctionCallbackInfo(void* function_callback_info);
1728void PrintPropertyCallbackInfo(void* property_callback_info);
1729
1730} // namespace internal
1731} // namespace v8
1732
1733#endif // INCLUDE_V8_INTERNAL_H_
static V8_INLINE bool EqualHandles(const T1 &lhs, const T2 &rhs)
static const int kTrustedPointerTableBasePointerOffset
static const int kIsolateCageBaseOffset
static V8_INLINE Address LoadMap(Address obj)
static const int kTrueValueRootIndex
static const int kInferShouldThrowMode
static V8_INLINE void UpdateNodeFlag(Address *obj, bool value, int shift)
static const int kJSSpecialApiObjectType
static V8_INLINE void IncrementLongTasksStatsCounter(v8::Isolate *isolate)
static const int kExternalPointerTableSize
static const int kOddballKindOffset
static const int kOldAllocationInfoOffset
static const int kFirstEmbedderJSApiObjectType
static const int kFastCCallAlignmentPaddingSize
static const int kErrorMessageParamSize
static const int kFalseValueRootIndex
static const int kIsolateStackGuardOffset
static const int kNullValueRootIndex
static const int kIsolateFastCCallCallerPcOffset
static const int kDontThrow
static const int kEmptyStringRootIndex
static V8_INLINE uint8_t GetNodeState(Address *obj)
static const int kNativeContextEmbedderDataOffset
static V8_INLINE uint8_t GetNodeFlag(Address *obj, int shift)
static const int kStringRepresentationAndEncodingMask
static const int kIsolateThreadLocalTopOffset
static const int kDisallowGarbageCollectionSize
static const int kIsolateLongTaskStatsCounterOffset
static const int kEmbedderDataArrayHeaderSize
static const int kExternalTwoByteRepresentationTag
static const int kNodeStateMask
static const int kUndefinedValueRootIndex
static const int kDisallowGarbageCollectionAlign
static const int kRegExpStaticResultOffsetsVectorSize
static V8_INLINE constexpr bool HasHeapObjectTag(Address value)
static const int kThrowOnError
static const int kEmbedderDataSlotExternalPointerOffset
static const int kBuiltinTier0TableSize
static const int kHandleScopeDataSize
static V8_INLINE Address ReadTaggedSignedField(Address heap_object_ptr, int offset)
static V8_INLINE constexpr Address AddressToSmi(Address value)
static V8_INLINE T ReadRawField(Address heap_object_ptr, int offset)
static V8_INLINE void CheckInitialized(v8::Isolate *isolate)
static const uint32_t kNumIsolateDataSlots
static const int kNumberOfBooleanFlags
static V8_INLINE Address * GetRootSlot(v8::Isolate *isolate, int index)
static const int kBuiltinTier0EntryTableSize
static const int kStackGuardSize
static const int kIsolateFastApiCallTargetOffset
static V8_INLINE constexpr bool CanHaveInternalField(int instance_type)
static V8_INLINE v8::Isolate * GetIsolateForSandbox(Address obj)
static constexpr size_t kExternalAllocationSoftLimit
static const int kNodeFlagsOffset
static const int kContinuationPreservedEmbedderDataOffset
static const int kLastJSApiObjectType
static const int kExternalOneByteRepresentationTag
static const int kNodeStateIsWeakValue
static V8_EXPORT void CheckInitializedImpl(v8::Isolate *isolate)
static const int kTrustedPointerTableSize
static const int kBuiltinTier0TableOffset
static const int kForeignType
static const int kNodeClassIdOffset
static V8_INLINE int GetInstanceType(Address obj)
static const int kIsolateRootsOffset
static const int kIsolateApiCallbackThunkArgumentOffset
static const int kExternalPointerTableBasePointerOffset
static const int kFirstJSApiObjectType
static const int kOddballType
static const int kMapInstanceTypeOffset
static const int kLinearAllocationAreaSize
static const int kIsolateEmbedderDataOffset
static V8_INLINE int GetOddballKind(Address obj)
static V8_INLINE void * GetEmbedderData(const v8::Isolate *isolate, uint32_t slot)
static const int kNewAllocationInfoOffset
static V8_INLINE constexpr int SmiValue(Address value)
static const int kUndefinedOddballKind
static const int kEmbedderDataSlotSize
static const int kHeapObjectMapOffset
static V8_INLINE Address ReadTaggedPointerField(Address heap_object_ptr, int offset)
static const int kNullOddballKind
static V8_INLINE bool IsExternalTwoByteString(int instance_type)
static constexpr std::optional< Address > TryIntegralToSmi(T value)
static V8_INLINE constexpr Address IntToSmi(int value)
static const int kFirstNonstringType
static V8_INLINE void UpdateNodeState(Address *obj, uint8_t value)
static const int kJSAPIObjectWithEmbedderSlotsHeaderSize
static const int kBuiltinTier0EntryTableOffset
static const int kStringEncodingMask
static const int kIsolateRegexpExecVectorArgumentOffset
static V8_INLINE Address GetRoot(v8::Isolate *isolate, int index)
static const int kLastEmbedderJSApiObjectType
static const int kVariousBooleanFlagsOffset
static V8_INLINE constexpr Address IntegralToSmi(T value)
static const int kThreadLocalTopSize
static const int kTablesAlignmentPaddingSize
static V8_INLINE constexpr bool IsValidSmi(T value)
static const int kJSObjectHeaderSize
static V8_INLINE void SetEmbedderData(v8::Isolate *isolate, uint32_t slot, void *data)
static const int kFixedArrayHeaderSize
static const int kJSObjectType
static const int kTheHoleValueRootIndex
static V8_INLINE Address ReadExternalPointerField(v8::Isolate *isolate, Address heap_object_ptr, int offset)
static const int kStringResourceOffset
static const int kIsolateHandleScopeDataOffset
static const int kIsolateFastCCallCallerFpOffset
static const int kErrorMessageParamOffset
friend bool operator==(const StrongRootAllocatorBase &a, const StrongRootAllocatorBase &b)
void deallocate_impl(Address *p, size_t n) noexcept
StrongRootAllocatorBase(LocalIsolate *isolate)
StrongRootAllocatorBase(v8::Isolate *isolate)
StrongRootAllocator(HeapOrIsolateT *)
StrongRootAllocator(const StrongRootAllocator< U > &other) noexcept
static V8_INLINE Address ValueAsAddress(const T *value)
static constexpr InternalRepresentationType kEmpty
static V8_INLINE T * HandleAsValue(const H< T, Ms... > &handle)
static V8_INLINE T * SlotAsValue(S *slot)
static V8_INLINE bool IsEmpty(T *value)
internal::Address * InternalRepresentationType
static V8_INLINE T * ReprAsValue(InternalRepresentationType repr)
static V8_INLINE InternalRepresentationType ValueAsRepr(const T *value)
constexpr const Iterator & base() const noexcept
typename std::iterator_traits< Iterator >::iterator_category iterator_category
constexpr WrappedIterator(const WrappedIterator< OtherIterator, OtherElementType > &other) noexcept
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::value_type, ElementType > value_type
constexpr WrappedIterator operator++(int) noexcept
constexpr bool operator>=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator operator-(difference_type n) const noexcept
constexpr reference operator[](difference_type n) const noexcept
constexpr pointer operator->() const noexcept
constexpr WrappedIterator operator+(difference_type n) const noexcept
typename std::iterator_traits< Iterator >::difference_type difference_type
constexpr auto operator-(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator & operator--() noexcept
constexpr WrappedIterator operator--(int) noexcept
constexpr WrappedIterator() noexcept=default
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::reference, std::add_lvalue_reference_t< ElementType > > reference
constexpr reference operator*() const noexcept
friend constexpr WrappedIterator operator+(difference_type n, const WrappedIterator &x) noexcept
constexpr WrappedIterator & operator+=(difference_type n) noexcept
constexpr bool operator<=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator & operator++() noexcept
constexpr bool operator!=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::pointer, std::add_pointer_t< ElementType > > pointer
constexpr bool operator==(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr bool operator>(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr bool operator<(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator & operator-=(difference_type n) noexcept
constexpr bool PointerCompressionIsEnabled()
constexpr int kCodePointerTableEntryCodeObjectOffset
Address SandboxedPointer_t
TagRange< ExternalPointerTag > ExternalPointerTagRange
const int kApiSystemPointerSize
Definition v8-internal.h:65
constexpr size_t kMaxTrustedPointers
constexpr ExternalPointerTagRange kAnyManagedResourceExternalPointerTag(kFirstManagedResourceTag, kLastManagedResourceTag)
V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty)
constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(kFirstManagedExternalPointerTag, kLastManagedExternalPointerTag)
constexpr IndirectPointerHandle kNullIndirectPointerHandle
constexpr int kTrustedPointerTableEntrySizeLog2
const int kApiInt32Size
Definition v8-internal.h:67
const int kApiTaggedSize
Address ExternalPointer_t
const int kHeapObjectTag
Definition v8-internal.h:72
const int kSmiTagSize
Definition v8-internal.h:87
constexpr int MB
Definition v8-internal.h:56
void PrintPropertyCallbackInfo(void *property_callback_info)
constexpr uint64_t kExternalPointerTagShift
constexpr uint64_t kExternalPointerTagAndMarkbitMask
const intptr_t kHeapObjectTagMask
Definition v8-internal.h:75
constexpr bool kHaveIteratorCategory
constexpr CppHeapPointerHandle kNullCppHeapPointerHandle
const int kSmiTag
Definition v8-internal.h:86
constexpr int kCodePointerTableEntryEntrypointOffset
const int kApiSizetSize
Definition v8-internal.h:69
uint32_t CppHeapPointerHandle
constexpr size_t kMaxExternalPointers
constexpr ExternalPointerTagRange kAnyForeignExternalPointerTagRange(kFirstForeignExternalPointerTag, kLastForeignExternalPointerTag)
@ kExternalPointerNullTag
@ kDisplayNamesInternalTag
@ kApiIndexedPropertyDefinerCallbackTag
@ kLastManagedResourceTag
@ kFirstExternalPointerTag
@ kApiAbortScriptExecutionCallbackTag
@ kExternalPointerFreeEntryTag
@ kFirstManagedResourceTag
@ kExternalStringResourceDataTag
@ kWasmExportedFunctionDataSignatureTag
@ kApiNamedPropertyDefinerCallbackTag
@ kIcuLocalizedNumberFormatterTag
@ kLastExternalPointerTag
@ kFirstForeignExternalPointerTag
@ kExternalPointerZappedEntryTag
@ kIcuDateIntervalFormatTag
@ kEmbedderDataSlotPayloadTag
@ kLastMaybeReadOnlyExternalPointerTag
@ kWasmTypeInfoNativeTypeTag
@ kExternalPointerEvacuationEntryTag
@ kLastSharedExternalPointerTag
@ kFunctionTemplateInfoCallbackTag
@ kApiIndexedPropertyDescriptorCallbackTag
@ kApiNamedPropertyQueryCallbackTag
@ kLastManagedExternalPointerTag
@ kIcuRelativeDateTimeFormatterTag
@ kApiNamedPropertyDeleterCallbackTag
@ kExternalObjectValueTag
@ kApiIndexedPropertySetterCallbackTag
@ kD8ModuleEmbedderDataTag
@ kIcuSimpleDateFormatTag
@ kArrayBufferExtensionTag
@ kApiNamedPropertySetterCallbackTag
@ kApiNamedPropertyDescriptorCallbackTag
@ kFirstManagedExternalPointerTag
@ kExternalStringResourceTag
@ kApiNamedPropertyGetterCallbackTag
@ kFirstSharedExternalPointerTag
@ kFirstMaybeReadOnlyExternalPointerTag
@ kApiIndexedPropertyEnumeratorCallbackTag
@ kApiIndexedPropertyQueryCallbackTag
@ kLastForeignExternalPointerTag
@ kWasmInternalFunctionCallTargetTag
@ kWasmIndirectFunctionTargetTag
@ kNativeContextMicrotaskQueueTag
@ kApiAccessCheckCallbackTag
@ kApiIndexedPropertyGetterCallbackTag
@ kMicrotaskCallbackDataTag
@ kApiIndexedPropertyDeleterCallbackTag
const int kHeapObjectTagSize
Definition v8-internal.h:74
constexpr size_t kTrustedPointerTableReservationSize
const int kForwardingTag
Definition v8-internal.h:81
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(kFirstExternalPointerTag, kLastExternalPointerTag)
constexpr uint64_t kCppHeapPointerMarkBit
const int kSmiValueSize
constexpr size_t kMaxCppHeapPointers
constexpr ExternalPointerHandle kNullExternalPointerHandle
constexpr uint32_t kCodePointerHandleMarker
const intptr_t kForwardingTagMask
Definition v8-internal.h:83
constexpr uint32_t kCodePointerHandleShift
constexpr ExternalPointer_t kNullExternalPointer
constexpr int kCodePointerTableEntrySize
const int kSmiMaxValue
IndirectPointerHandle CodePointerHandle
constexpr bool kHaveIteratorConcept
constexpr bool Is64()
constexpr ExternalPointerTagRange kAnySharedExternalPointerTagRange(kFirstSharedExternalPointerTag, kLastSharedExternalPointerTag)
constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace
SmiTagging< kApiTaggedSize > PlatformSmiTagging
V8_EXPORT internal::Isolate * IsolateFromNeverReadOnlySpaceObject(Address obj)
constexpr bool SmiValuesAre32Bits()
uintptr_t Address
Definition v8-internal.h:52
constexpr bool SmiValuesAre31Bits()
void PrintFunctionCallbackInfo(void *function_callback_info)
const int kApiDoubleSize
Definition v8-internal.h:66
constexpr ExternalPointerTagRange kAnyMaybeReadOnlyExternalPointerTagRange(kFirstMaybeReadOnlyExternalPointerTag, kLastMaybeReadOnlyExternalPointerTag)
constexpr uintptr_t kUintptrAllBitsSet
Definition v8-internal.h:94
constexpr bool kAllCodeObjectsLiveInTrustedSpace
uint32_t ExternalPointerHandle
constexpr TrustedPointerHandle kNullTrustedPointerHandle
constexpr uint64_t kCppHeapPointerTagShift
uint32_t IndirectPointerHandle
constexpr uint64_t kExternalPointerTagMask
constexpr size_t kCodePointerTableReservationSize
constexpr int kTrustedPointerTableEntrySize
constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace
const int kApiInt64Size
Definition v8-internal.h:68
IndirectPointerHandle TrustedPointerHandle
const int kForwardingTagSize
Definition v8-internal.h:82
V8_INLINE void PerformCastCheck(T *data)
constexpr bool SandboxIsEnabled()
constexpr int kCodePointerTableEntrySizeLog2
constexpr CppHeapPointer_t kNullCppHeapPointer
const int kWeakHeapObjectTag
Definition v8-internal.h:73
V8_EXPORT bool ShouldThrowOnError(internal::Isolate *isolate)
constexpr int GB
Definition v8-internal.h:57
Address CppHeapPointer_t
const int kSmiMinValue
constexpr intptr_t kIntptrAllBitsSet
Definition v8-internal.h:93
constexpr uint64_t kExternalPointerPayloadMask
constexpr uint32_t kTrustedPointerHandleShift
const intptr_t kHeapObjectReferenceTagMask
Definition v8-internal.h:76
constexpr size_t kMaxCodePointers
constexpr int KB
Definition v8-internal.h:55
constexpr uint64_t kExternalPointerShiftedTagMask
constexpr CodePointerHandle kNullCodePointerHandle
constexpr uint64_t kCppHeapPointerPayloadShift
const intptr_t kSmiTagMask
Definition v8-internal.h:88
constexpr uint64_t kExternalPointerMarkBit
const int kSmiShiftSize
constexpr int kGarbageCollectionReasonMaxValue
static void Perform(T *data)
static V8_INLINE constexpr bool IsValidSmi(int64_t value)
static V8_INLINE constexpr int SmiToInt(Address value)
static V8_INLINE constexpr bool IsValidSmi(uint64_t value)
static V8_INLINE constexpr bool IsValidSmi(T value)
static constexpr intptr_t kSmiMinValue
static constexpr intptr_t kSmiMaxValue
static V8_INLINE constexpr int SmiToInt(Address value)
static V8_INLINE constexpr bool IsValidSmi(T value)
static constexpr intptr_t kSmiMinValue
static constexpr intptr_t kSmiMaxValue
constexpr bool Contains(TagRange tag_range) const
constexpr bool IsEmpty() const
constexpr size_t hash_value() const
constexpr bool operator==(const TagRange other) const
constexpr TagRange(Tag first, Tag last)
constexpr size_t Size() const
constexpr TagRange(Tag tag)
constexpr bool Contains(Tag tag) const
#define V8_EXPORT
Definition v8config.h:800
#define V8_INLINE
Definition v8config.h:500
#define V8_LIKELY(condition)
Definition v8config.h:661