v8 14.1.146 (node 25.0.0)
V8 is Google's open source JavaScript engine
Loading...
Searching...
No Matches
v8-internal.h
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_V8_INTERNAL_H_
6#define INCLUDE_V8_INTERNAL_H_
7
8#include <stddef.h>
9#include <stdint.h>
10#include <string.h>
11
12#include <atomic>
13#include <iterator>
14#include <limits>
15#include <memory>
16#include <optional>
17#include <type_traits>
18
19#include "v8config.h" // NOLINT(build/include_directory)
20
21// TODO(pkasting): Use <compare>/spaceship unconditionally after dropping
22// support for old libstdc++ versions.
23#if __has_include(<version>)
24#include <version>
25#endif
26#if defined(__cpp_lib_three_way_comparison) &&
27 __cpp_lib_three_way_comparison >= 201711L &&
28 defined(__cpp_lib_concepts) && __cpp_lib_concepts >= 202002L
29#include <compare>
30#include <concepts>
31
32#define V8_HAVE_SPACESHIP_OPERATOR 1
33#else
34#define V8_HAVE_SPACESHIP_OPERATOR 0
35#endif
36
37namespace v8 {
38
39class Array;
40class Context;
41class Data;
42class Isolate;
43
44namespace internal {
45
46class Heap;
47class LocalHeap;
48class Isolate;
49class IsolateGroup;
50class LocalIsolate;
51
52typedef uintptr_t Address;
53static constexpr Address kNullAddress = 0;
54
55constexpr int KB = 1024;
56constexpr int MB = KB * 1024;
57constexpr int GB = MB * 1024;
58#ifdef V8_TARGET_ARCH_X64
59constexpr size_t TB = size_t{GB} * 1024;
60#endif
61
62/**
63 * Configuration of tagging scheme.
64 */
65const int kApiSystemPointerSize = sizeof(void*);
66const int kApiDoubleSize = sizeof(double);
67const int kApiInt32Size = sizeof(int32_t);
68const int kApiInt64Size = sizeof(int64_t);
69const int kApiSizetSize = sizeof(size_t);
70
71// Tag information for HeapObject.
72const int kHeapObjectTag = 1;
73const int kWeakHeapObjectTag = 3;
74const int kHeapObjectTagSize = 2;
75const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
77
78// Tag information for fowarding pointers stored in object headers.
79// 0b00 at the lowest 2 bits in the header indicates that the map word is a
80// forwarding pointer.
81const int kForwardingTag = 0;
82const int kForwardingTagSize = 2;
83const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
84
85// Tag information for Smi.
86const int kSmiTag = 0;
87const int kSmiTagSize = 1;
88const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
89
90template <size_t tagged_ptr_size>
91struct SmiTagging;
92
93constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
94constexpr uintptr_t kUintptrAllBitsSet =
95 static_cast<uintptr_t>(kIntptrAllBitsSet);
96
97// Smi constants for systems where tagged pointer is a 32-bit value.
98template <>
99struct SmiTagging<4> {
100 enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
101
102 static constexpr intptr_t kSmiMinValue =
103 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
104 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
105
106 V8_INLINE static constexpr int SmiToInt(Address value) {
107 int shift_bits = kSmiTagSize + kSmiShiftSize;
108 // Truncate and shift down (requires >> to be sign extending).
109 return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
110 }
111
112 template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
113 std::is_signed_v<T>>* = nullptr>
114 V8_INLINE static constexpr bool IsValidSmi(T value) {
115 // Is value in range [kSmiMinValue, kSmiMaxValue].
116 // Use unsigned operations in order to avoid undefined behaviour in case of
117 // signed integer overflow.
118 return (static_cast<uintptr_t>(value) -
119 static_cast<uintptr_t>(kSmiMinValue)) <=
120 (static_cast<uintptr_t>(kSmiMaxValue) -
121 static_cast<uintptr_t>(kSmiMinValue));
122 }
123
124 template <class T,
125 typename std::enable_if_t<std::is_integral_v<T> &&
126 std::is_unsigned_v<T>>* = nullptr>
127 V8_INLINE static constexpr bool IsValidSmi(T value) {
128 static_assert(kSmiMaxValue <= std::numeric_limits<uintptr_t>::max());
129 return value <= static_cast<uintptr_t>(kSmiMaxValue);
130 }
131
132 // Same as the `intptr_t` version but works with int64_t on 32-bit builds
133 // without slowing down anything else.
134 V8_INLINE static constexpr bool IsValidSmi(int64_t value) {
135 return (static_cast<uint64_t>(value) -
136 static_cast<uint64_t>(kSmiMinValue)) <=
137 (static_cast<uint64_t>(kSmiMaxValue) -
138 static_cast<uint64_t>(kSmiMinValue));
139 }
140
141 V8_INLINE static constexpr bool IsValidSmi(uint64_t value) {
142 static_assert(kSmiMaxValue <= std::numeric_limits<uint64_t>::max());
143 return value <= static_cast<uint64_t>(kSmiMaxValue);
144 }
145};
146
147// Smi constants for systems where tagged pointer is a 64-bit value.
148template <>
149struct SmiTagging<8> {
150 enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
151
152 static constexpr intptr_t kSmiMinValue =
153 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
154 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
155
156 V8_INLINE static constexpr int SmiToInt(Address value) {
157 int shift_bits = kSmiTagSize + kSmiShiftSize;
158 // Shift down and throw away top 32 bits.
159 return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
160 }
161
162 template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
163 std::is_signed_v<T>>* = nullptr>
164 V8_INLINE static constexpr bool IsValidSmi(T value) {
165 // To be representable as a long smi, the value must be a 32-bit integer.
166 return std::numeric_limits<int32_t>::min() <= value &&
167 value <= std::numeric_limits<int32_t>::max();
168 }
169
170 template <class T,
171 typename std::enable_if_t<std::is_integral_v<T> &&
172 std::is_unsigned_v<T>>* = nullptr>
173 V8_INLINE static constexpr bool IsValidSmi(T value) {
174 return value <= std::numeric_limits<int32_t>::max();
175 }
176};
177
178#ifdef V8_COMPRESS_POINTERS
179// See v8:7703 or src/common/ptr-compr-inl.h for details about pointer
180// compression.
181constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
182constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
183
184static_assert(
186 "Pointer compression can be enabled only for 64-bit architectures");
187const int kApiTaggedSize = kApiInt32Size;
188#else
190#endif
191
194}
195
196#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
198#else
199using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
200#endif
201
202// TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
203// since it's used much more often than the inividual constants.
204const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
205const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
206const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
207const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
208constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
209constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
210constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); }
211
212V8_INLINE static constexpr Address IntToSmi(int value) {
213 return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
214 kSmiTag;
215}
216
217/*
218 * Sandbox related types, constants, and functions.
219 */
220constexpr bool SandboxIsEnabled() {
221#ifdef V8_ENABLE_SANDBOX
222 return true;
223#else
224 return false;
225#endif
226}
227
228// SandboxedPointers are guaranteed to point into the sandbox. This is achieved
229// for example by storing them as offset rather than as raw pointers.
230using SandboxedPointer_t = Address;
231
232#ifdef V8_ENABLE_SANDBOX
233
234// Size of the sandbox, excluding the guard regions surrounding it.
235#if defined(V8_TARGET_OS_ANDROID)
236// On Android, most 64-bit devices seem to be configured with only 39 bits of
237// virtual address space for userspace. As such, limit the sandbox to 128GB (a
238// quarter of the total available address space).
239constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
240#elif defined(V8_TARGET_OS_IOS)
241// On iOS, we only get 64 GB of usable virtual address space even with the
242// "jumbo" extended virtual addressing entitlement. Limit the sandbox size to
243// 16 GB so that the base address + size for the emulated virtual address space
244// lies within the 64 GB total virtual address space.
245constexpr size_t kSandboxSizeLog2 = 34; // 16 GB
246#else
247// Everywhere else use a 1TB sandbox.
248constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
249#endif // V8_TARGET_OS_ANDROID
250constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
251
252// Required alignment of the sandbox. For simplicity, we require the
253// size of the guard regions to be a multiple of this, so that this specifies
254// the alignment of the sandbox including and excluding surrounding guard
255// regions. The alignment requirement is due to the pointer compression cage
256// being located at the start of the sandbox.
258
259// Sandboxed pointers are stored inside the heap as offset from the sandbox
260// base shifted to the left. This way, it is guaranteed that the offset is
261// smaller than the sandbox size after shifting it to the right again. This
262// constant specifies the shift amount.
264
265// Size of the guard regions surrounding the sandbox. This assumes a worst-case
266// scenario of a 32-bit unsigned index used to access an array of 64-bit values
267// with an additional 4GB (compressed pointer) offset. In particular, accesses
268// to TypedArrays are effectively computed as
269// `entry_pointer = array->base + array->offset + index * array->element_size`.
270// See also https://crbug.com/40070746 for more details.
271constexpr size_t kSandboxGuardRegionSize = 32ULL * GB + 4ULL * GB;
272
273static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
274 "The size of the guard regions around the sandbox must be a "
275 "multiple of its required alignment.");
276
277// On OSes where reserving virtual memory is too expensive to reserve the
278// entire address space backing the sandbox, notably Windows pre 8.1, we create
279// a partially reserved sandbox that doesn't actually reserve most of the
280// memory, and so doesn't have the desired security properties as unrelated
281// memory allocations could end up inside of it, but which still ensures that
282// objects that should be located inside the sandbox are allocated within
283// kSandboxSize bytes from the start of the sandbox. The minimum size of the
284// region that is actually reserved for such a sandbox is specified by this
285// constant and should be big enough to contain the pointer compression cage as
286// well as the ArrayBuffer partition.
287constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
288
290 "The minimum reservation size for a sandbox must be larger than "
291 "the pointer compression cage contained within it.");
292
293// The maximum buffer size allowed inside the sandbox. This is mostly dependent
294// on the size of the guard regions around the sandbox: an attacker must not be
295// able to construct a buffer that appears larger than the guard regions and
296// thereby "reach out of" the sandbox.
297constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1;
299 "The maximum allowed buffer size must not be larger than the "
300 "sandbox's guard regions");
301
302constexpr size_t kBoundedSizeShift = 29;
303static_assert(1ULL << (64 - kBoundedSizeShift) ==
305 "The maximum size of a BoundedSize must be synchronized with the "
306 "kMaxSafeBufferSizeForSandbox");
307
308#endif // V8_ENABLE_SANDBOX
309
310#ifdef V8_COMPRESS_POINTERS
311
312#ifdef V8_TARGET_OS_ANDROID
313// The size of the virtual memory reservation for an external pointer table.
314// This determines the maximum number of entries in a table. Using a maximum
315// size allows omitting bounds checks on table accesses if the indices are
316// guaranteed (e.g. through shifting) to be below the maximum index. This
317// value must be a power of two.
319
320// The external pointer table indices stored in HeapObjects as external
321// pointers are shifted to the left by this amount to guarantee that they are
322// smaller than the maximum table size even after the C++ compiler multiplies
323// them by 8 to be used as indexes into a table of 64 bit pointers.
325#else
328#endif // V8_TARGET_OS_ANDROID
329
330// The maximum number of entries in an external pointer table.
331constexpr int kExternalPointerTableEntrySize = 8;
332constexpr int kExternalPointerTableEntrySizeLog2 = 3;
335static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
336 "kExternalPointerTableReservationSize and "
337 "kExternalPointerIndexShift don't match");
338
339#else // !V8_COMPRESS_POINTERS
340
341// Needed for the V8.SandboxedExternalPointersCount histogram.
343
344#endif // V8_COMPRESS_POINTERS
345
346constexpr uint64_t kExternalPointerMarkBit = 1ULL << 48;
347constexpr uint64_t kExternalPointerTagShift = 49;
348constexpr uint64_t kExternalPointerTagMask = 0x00fe000000000000ULL;
353constexpr uint64_t kExternalPointerTagAndMarkbitMask = 0x00ff000000000000ULL;
354constexpr uint64_t kExternalPointerPayloadMask = 0xff00ffffffffffffULL;
355
356// A ExternalPointerHandle represents a (opaque) reference to an external
357// pointer that can be stored inside the sandbox. A ExternalPointerHandle has
358// meaning only in combination with an (active) Isolate as it references an
359// external pointer stored in the currently active Isolate's
360// ExternalPointerTable. Internally, an ExternalPointerHandles is simply an
361// index into an ExternalPointerTable that is shifted to the left to guarantee
362// that it is smaller than the size of the table.
363using ExternalPointerHandle = uint32_t;
364
365// ExternalPointers point to objects located outside the sandbox. When the V8
366// sandbox is enabled, these are stored on heap as ExternalPointerHandles,
367// otherwise they are simply raw pointers.
368#ifdef V8_ENABLE_SANDBOX
370#else
371using ExternalPointer_t = Address;
372#endif
373
374constexpr ExternalPointer_t kNullExternalPointer = 0;
375constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
376
377// See `ExternalPointerHandle` for the main documentation. The difference to
378// `ExternalPointerHandle` is that the handle does not represent an arbitrary
379// external pointer but always refers to an object managed by `CppHeap`. The
380// handles are using in combination with a dedicated table for `CppHeap`
381// references.
382using CppHeapPointerHandle = uint32_t;
383
384// The actual pointer to objects located on the `CppHeap`. When pointer
385// compression is enabled these pointers are stored as `CppHeapPointerHandle`.
386// In non-compressed configurations the pointers are simply stored as raw
387// pointers.
388#ifdef V8_COMPRESS_POINTERS
390#else
391using CppHeapPointer_t = Address;
392#endif
393
394constexpr CppHeapPointer_t kNullCppHeapPointer = 0;
395constexpr CppHeapPointerHandle kNullCppHeapPointerHandle = 0;
396
397constexpr uint64_t kCppHeapPointerMarkBit = 1ULL;
398constexpr uint64_t kCppHeapPointerTagShift = 1;
399constexpr uint64_t kCppHeapPointerPayloadShift = 16;
400
401#ifdef V8_COMPRESS_POINTERS
402// CppHeapPointers use a dedicated pointer table. These constants control the
403// size and layout of the table. See the corresponding constants for the
404// external pointer table for further details.
408
409constexpr int kCppHeapPointerTableEntrySize = 8;
410constexpr int kCppHeapPointerTableEntrySizeLog2 = 3;
411constexpr size_t kMaxCppHeapPointers =
413static_assert((1 << (32 - kCppHeapPointerIndexShift)) == kMaxCppHeapPointers,
414 "kCppHeapPointerTableReservationSize and "
415 "kCppHeapPointerIndexShift don't match");
416
417#else // !V8_COMPRESS_POINTERS
418
419// Needed for the V8.SandboxedCppHeapPointersCount histogram.
421
422#endif // V8_COMPRESS_POINTERS
423
424// The number of tags reserved for embedder data. The value is picked
425// arbitrarily. In Chrome there are 4 embedders, so at least 4 tags are needed.
426// A generic tag was used for embedder data before, so one tag is used for that.
427#define V8_EMBEDDER_DATA_TAG_COUNT 5
428
429// Generic tag range struct to represent ranges of type tags.
430//
431// When referencing external objects via pointer tables, type tags are
432// frequently necessary to guarantee type safety for the external objects. When
433// support for subtyping is necessary, range-based type checks are used in
434// which all subtypes of a given supertype use contiguous tags. This struct can
435// then be used to represent such a type range.
436//
437// As an example, consider the following type hierarchy:
438//
439// A F
440// / \
441// B E
442// / \
443// C D
444//
445// A potential type id assignment for range-based type checks is
446// {A: 0, B: 1, C: 2, D: 3, E: 4, F: 5}. With that, the type check for type A
447// would check for the range [A, E], while the check for B would check range
448// [B, D], and for F it would simply check [F, F].
449//
450// In addition, there is an option for performance tweaks: if the size of the
451// type range corresponding to a supertype is a power of two and starts at a
452// power of two (e.g. [0x100, 0x13f]), then the compiler can often optimize
453// the type check to use even fewer instructions (essentially replace a AND +
454// SUB with a single AND).
455//
456template <typename Tag>
457struct TagRange {
458 static_assert(std::is_enum_v<Tag> &&
460 "Tag parameter must be an enum with base type uint16_t");
461
462 // Construct the inclusive tag range [first, last].
463 constexpr TagRange(Tag first, Tag last) : first(first), last(last) {}
464
465 // Construct a tag range consisting of a single tag.
466 //
467 // A single tag is always implicitly convertible to a tag range. This greatly
468 // increases readability as most of the time, the exact tag of a field is
469 // known and so no tag range needs to explicitly be created for it.
470 constexpr TagRange(Tag tag) // NOLINT(runtime/explicit)
471 : first(tag), last(tag) {}
472
473 // Construct an empty tag range.
474 constexpr TagRange() : TagRange(static_cast<Tag>(0)) {}
475
476 // A tag range is considered empty if it only contains the null tag.
477 constexpr bool IsEmpty() const { return first == 0 && last == 0; }
478
479 constexpr size_t Size() const {
480 if (IsEmpty()) {
481 return 0;
482 } else {
483 return last - first + 1;
484 }
485 }
486
487 constexpr bool Contains(Tag tag) const {
488 // Need to perform the math with uint32_t. Otherwise, the uint16_ts would
489 // be promoted to (signed) int, allowing the compiler to (wrongly) assume
490 // that an underflow cannot happen as that would be undefined behavior.
491 return static_cast<uint32_t>(tag) - first <=
492 static_cast<uint32_t>(last) - first;
493 }
494
495 constexpr bool Contains(TagRange tag_range) const {
496 return tag_range.first >= first && tag_range.last <= last;
497 }
498
499 constexpr bool operator==(const TagRange other) const {
500 return first == other.first && last == other.last;
501 }
502
503 constexpr size_t hash_value() const {
504 static_assert(std::is_same_v<std::underlying_type_t<Tag>, uint16_t>);
505 return (static_cast<size_t>(first) << 16) | last;
506 }
507
508 // Internally we represent tag ranges as half-open ranges [first, last).
509 const Tag first;
510 const Tag last;
511};
512
513//
514// External Pointers.
515//
516// When the sandbox is enabled, external pointers are stored in an external
517// pointer table and are referenced from HeapObjects through an index (a
518// "handle"). When stored in the table, the pointers are tagged with per-type
519// tags to prevent type confusion attacks between different external objects.
520//
521// When loading an external pointer, a range of allowed tags can be specified.
522// This way, type hierarchies can be supported. The main requirement for that
523// is that all (transitive) child classes of a given parent class have type ids
524// in the same range, and that there are no unrelated types in that range. For
525// more details about how to assign type tags to types, see the TagRange class.
526//
527// The external pointer sandboxing mechanism ensures that every access to an
528// external pointer field will result in a valid pointer of the expected type
529// even in the presence of an attacker able to corrupt memory inside the
530// sandbox. However, if any data related to the external object is stored
531// inside the sandbox it may still be corrupted and so must be validated before
532// use or moved into the external object. Further, an attacker will always be
533// able to substitute different external pointers of the same type for each
534// other. Therefore, code using external pointers must be written in a
535// "substitution-safe" way, i.e. it must always be possible to substitute
536// external pointers of the same type without causing memory corruption outside
537// of the sandbox. Generally this is achieved by referencing any group of
538// related external objects through a single external pointer.
539//
540// Currently we use bit 62 for the marking bit which should always be unused as
541// it's part of the non-canonical address range. When Arm's top-byte ignore
542// (TBI) is enabled, this bit will be part of the ignored byte, and we assume
543// that the Embedder is not using this byte (really only this one bit) for any
544// other purpose. This bit also does not collide with the memory tagging
545// extension (MTE) which would use bits [56, 60).
546//
547// External pointer tables are also available even when the sandbox is off but
548// pointer compression is on. In that case, the mechanism can be used to ease
549// alignment requirements as it turns unaligned 64-bit raw pointers into
550// aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
551// for this purpose, instead of using the ExternalPointer accessors one needs to
552// use ExternalPointerHandles directly and use them to access the pointers in an
553// ExternalPointerTable.
554//
555// The tag is currently in practice limited to 15 bits since it needs to fit
556// together with a marking bit into the unused parts of a pointer.
560
561 // When adding new tags, please ensure that the code using these tags is
562 // "substitution-safe", i.e. still operate safely if external pointers of the
563 // same type are swapped by an attacker. See comment above for more details.
564
565 // Shared external pointers are owned by the shared Isolate and stored in the
566 // shared external pointer table associated with that Isolate, where they can
567 // be accessed from multiple threads at the same time. The objects referenced
568 // in this way must therefore always be thread-safe.
574
575 // External pointers using these tags are kept in a per-Isolate external
576 // pointer table and can only be accessed when this Isolate is active.
578
579 // Placeholders for embedder data.
583 // This tag essentially stands for a `void*` pointer in the V8 API, and it is
584 // the Embedder's responsibility to ensure type safety (against substitution)
585 // and lifetime validity of these objects.
591
592 // InterceptorInfo external pointers.
610
612
614
615 // Foreigns
618
628
629 // Managed
660 // External resources whose lifetime is tied to their entry in the external
661 // pointer table but which are not referenced via a Managed
664
668 // The tags are limited to 7 bits, so the last tag is 0x7f.
670};
671
672using ExternalPointerTagRange = TagRange<ExternalPointerTag>;
673
674constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(
676constexpr ExternalPointerTagRange kAnySharedExternalPointerTagRange(
678constexpr ExternalPointerTagRange kAnyForeignExternalPointerTagRange(
680constexpr ExternalPointerTagRange kAnyInterceptorInfoExternalPointerTagRange(
683constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(
685constexpr ExternalPointerTagRange kAnyMaybeReadOnlyExternalPointerTagRange(
688constexpr ExternalPointerTagRange kAnyManagedResourceExternalPointerTag(
690
691// True if the external pointer must be accessed from the shared isolate's
692// external pointer table.
693V8_INLINE static constexpr bool IsSharedExternalPointerType(
694 ExternalPointerTagRange tag_range) {
695 return kAnySharedExternalPointerTagRange.Contains(tag_range);
696}
697
698// True if the external pointer may live in a read-only object, in which case
699// the table entry will be in the shared read-only segment of the external
700// pointer table.
701V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
702 ExternalPointerTagRange tag_range) {
703 return kAnyMaybeReadOnlyExternalPointerTagRange.Contains(tag_range);
704}
705
706// True if the external pointer references an external object whose lifetime is
707// tied to the entry in the external pointer table.
708// In this case, the entry in the ExternalPointerTable always points to an
709// object derived from ExternalPointerTable::ManagedResource.
710V8_INLINE static constexpr bool IsManagedExternalPointerType(
711 ExternalPointerTagRange tag_range) {
712 return kAnyManagedResourceExternalPointerTag.Contains(tag_range);
713}
714
715// When an external poiner field can contain the null external pointer handle,
716// the type checking mechanism needs to also check for null.
717// TODO(saelo): this is mostly a temporary workaround to introduce range-based
718// type checks. In the future, we should either (a) change the type tagging
719// scheme so that null always passes or (b) (more likely) introduce dedicated
720// null entries for those tags that need them (similar to other well-known
721// empty value constants such as the empty fixed array).
722V8_INLINE static constexpr bool ExternalPointerCanBeEmpty(
723 ExternalPointerTagRange tag_range) {
724 return tag_range.Contains(kArrayBufferExtensionTag) ||
725 (tag_range.first <= kLastEmbedderDataTag &&
726 kFirstEmbedderDataTag <= tag_range.last) ||
727 kAnyInterceptorInfoExternalPointerTagRange.Contains(tag_range);
728}
729
730// Indirect Pointers.
731//
732// When the sandbox is enabled, indirect pointers are used to reference
733// HeapObjects that live outside of the sandbox (but are still managed by V8's
734// garbage collector). When object A references an object B through an indirect
735// pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
736// 32-bit index, which identifies an entry in a pointer table (either the
737// trusted pointer table for TrustedObjects, or the code pointer table if it is
738// a Code object). This table entry then contains the actual pointer to object
739// B. Further, object B owns this pointer table entry, and it is responsible
740// for updating the "self-pointer" in the entry when it is relocated in memory.
741// This way, in contrast to "normal" pointers, indirect pointers never need to
742// be tracked by the GC (i.e. there is no remembered set for them).
743// These pointers do not exist when the sandbox is disabled.
744
745// An IndirectPointerHandle represents a 32-bit index into a pointer table.
746using IndirectPointerHandle = uint32_t;
747
748// A null handle always references an entry that contains nullptr.
749constexpr IndirectPointerHandle kNullIndirectPointerHandle = 0;
750
751// When the sandbox is enabled, indirect pointers are used to implement:
752// - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
753// and referencing a TrustedObject in one of the trusted heap spaces.
754// - CodePointers, an indirect pointer using the code pointer table (CPT) and
755// referencing a Code object together with its instruction stream.
756
757//
758// Trusted Pointers.
759//
760// A pointer to a TrustedObject.
761// When the sandbox is enabled, these are indirect pointers using the trusted
762// pointer table (TPT). They are used to reference trusted objects (located in
763// one of V8's trusted heap spaces, outside of the sandbox) from inside the
764// sandbox in a memory-safe way. When the sandbox is disabled, these are
765// regular tagged pointers.
766using TrustedPointerHandle = IndirectPointerHandle;
767
768// The size of the virtual memory reservation for the trusted pointer table.
769// As with the external pointer table, a maximum table size in combination with
770// shifted indices allows omitting bounds checks.
772
773// The trusted pointer handles are stored shifted to the left by this amount
774// to guarantee that they are smaller than the maximum table size.
775constexpr uint32_t kTrustedPointerHandleShift = 9;
776
777// A null handle always references an entry that contains nullptr.
778constexpr TrustedPointerHandle kNullTrustedPointerHandle =
780
781// The maximum number of entries in an trusted pointer table.
786static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
787 "kTrustedPointerTableReservationSize and "
788 "kTrustedPointerHandleShift don't match");
789
790//
791// Code Pointers.
792//
793// A pointer to a Code object.
794// Essentially a specialized version of a trusted pointer that (when the
795// sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
796// Each entry in the CPT contains both a pointer to a Code object as well as a
797// pointer to the Code's entrypoint. This allows calling/jumping into Code with
798// one fewer memory access (compared to the case where the entrypoint pointer
799// first needs to be loaded from the Code object). As such, a CodePointerHandle
800// can be used both to obtain the referenced Code object and to directly load
801// its entrypoint.
802//
803// When the sandbox is disabled, these are regular tagged pointers.
804using CodePointerHandle = IndirectPointerHandle;
805
806// The size of the virtual memory reservation for the code pointer table.
807// As with the other tables, a maximum table size in combination with shifted
808// indices allows omitting bounds checks.
810
811// Code pointer handles are shifted by a different amount than indirect pointer
812// handles as the tables have a different maximum size.
813constexpr uint32_t kCodePointerHandleShift = 9;
814
815// A null handle always references an entry that contains nullptr.
817
818// It can sometimes be necessary to distinguish a code pointer handle from a
819// trusted pointer handle. A typical example would be a union trusted pointer
820// field that can refer to both Code objects and other trusted objects. To
821// support these use-cases, we use a simple marking scheme where some of the
822// low bits of a code pointer handle are set, while they will be unset on a
823// trusted pointer handle. This way, the correct table to resolve the handle
824// can be determined even in the absence of a type tag.
825constexpr uint32_t kCodePointerHandleMarker = 0x1;
826static_assert(kCodePointerHandleShift > 0);
827static_assert(kTrustedPointerHandleShift > 0);
828
829// The maximum number of entries in a code pointer table.
830constexpr int kCodePointerTableEntrySize = 16;
834static_assert(
835 (1 << (32 - kCodePointerHandleShift)) == kMaxCodePointers,
836 "kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
837
840
841// Constants that can be used to mark places that should be modified once
842// certain types of objects are moved out of the sandbox and into trusted space.
848
849// {obj} must be the raw tagged pointer representation of a HeapObject
850// that's guaranteed to never be in ReadOnlySpace.
852 "Use GetCurrentIsolate() instead, which is guaranteed to return the same "
853 "isolate since https://crrev.com/c/6458560.")
854V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
855
856// Returns if we need to throw when an error occurs. This infers the language
857// mode based on the current context and the closure. This returns true if the
858// language mode is strict.
859V8_EXPORT bool ShouldThrowOnError(internal::Isolate* isolate);
860
861struct HandleScopeData final {
862 static constexpr uint32_t kSizeInBytes =
864
867 int level;
869
870 void Initialize() {
871 next = limit = nullptr;
872 sealed_level = level = 0;
873 }
874};
875
876static_assert(HandleScopeData::kSizeInBytes == sizeof(HandleScopeData));
877
878/**
879 * This class exports constants and functionality from within v8 that
880 * is necessary to implement inline functions in the v8 api. Don't
881 * depend on functions and constants defined here.
882 */
884#ifdef V8_MAP_PACKING
885 V8_INLINE static constexpr Address UnpackMapWord(Address mapword) {
886 // TODO(wenyuzhao): Clear header metadata.
887 return mapword ^ kMapWordXorMask;
888 }
889#endif
890
891 public:
892 // These values match non-compiler-dependent values defined within
893 // the implementation of v8.
894 static const int kHeapObjectMapOffset = 0;
896 static const int kStringResourceOffset =
898
900 static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
901#ifdef V8_COMPRESS_POINTERS
904#else // !V8_COMPRESS_POINTERS
907#endif // !V8_COMPRESS_POINTERS
908 static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
911#ifdef V8_ENABLE_SANDBOX
913#else
915#endif
918 static const int kStringEncodingMask = 0x8;
919 static const int kExternalTwoByteRepresentationTag = 0x02;
920 static const int kExternalOneByteRepresentationTag = 0x0a;
921
922 static const uint32_t kNumIsolateDataSlots = 4;
924 static const int kNumberOfBooleanFlags = 6;
925 static const int kErrorMessageParamSize = 1;
926 static const int kTablesAlignmentPaddingSize = 1;
932 static const int kHandleScopeDataSize =
934
935 // ExternalPointerTable and TrustedPointerTable layout guarantees.
937 static const int kSegmentedTableSegmentPoolSize = 4;
938 static const int kExternalPointerTableSize =
940 kSegmentedTableSegmentPoolSize * sizeof(uint32_t);
941 static const int kTrustedPointerTableSize =
943 kSegmentedTableSegmentPoolSize * sizeof(uint32_t);
945
946 // IsolateData layout guarantees.
947 static const int kIsolateCageBaseOffset = 0;
948 static const int kIsolateStackGuardOffset =
950 static const int kVariousBooleanFlagsOffset =
952 static const int kErrorMessageParamOffset =
957 static const int kBuiltinTier0TableOffset =
959 static const int kNewAllocationInfoOffset =
961 static const int kOldAllocationInfoOffset =
963 static const int kLastYoungAllocationOffset =
965
981 kIsolateThreadLocalTopOffset + kThreadLocalTopSize;
982 static const int kIsolateEmbedderDataOffset =
984#ifdef V8_COMPRESS_POINTERS
985 static const int kIsolateExternalPointerTableOffset =
989 static const int kIsolateCppHeapPointerTableOffset =
991#ifdef V8_ENABLE_SANDBOX
992 static const int kIsolateTrustedCageBaseOffset =
994 static const int kIsolateTrustedPointerTableOffset =
1004#else
1007#endif // V8_ENABLE_SANDBOX
1008#else
1011#endif // V8_COMPRESS_POINTERS
1012 static const int kJSDispatchTableOffset =
1018 static const int kIsolateRootsOffset =
1020
1021 // Assert scopes
1022 static const int kDisallowGarbageCollectionAlign = alignof(uint32_t);
1023 static const int kDisallowGarbageCollectionSize = sizeof(uint32_t);
1024
1026
1027// These constants are copied from static-roots.h and guarded by static asserts.
1028#define EXPORTED_STATIC_ROOTS_PTR_LIST(V)
1029 V(UndefinedValue, 0x11)
1030 V(NullValue, 0x2d)
1031 V(TrueValue, 0x71)
1032 V(FalseValue, 0x55)
1033 V(EmptyString, 0x49)
1034 /* The Hole moves around depending on build flags, so define it */
1035 /* separately inside StaticReadOnlyRoot using build macros */
1036 V(TheHoleValue, kBuildDependentTheHoleValue)
1037
1038 using Tagged_t = uint32_t;
1039 struct StaticReadOnlyRoot {
1040#ifdef V8_ENABLE_WEBASSEMBLY
1041#ifdef V8_INTL_SUPPORT
1042 static constexpr Tagged_t kBuildDependentTheHoleValue = 0x67b9;
1043#else
1044 static constexpr Tagged_t kBuildDependentTheHoleValue = 0x5b1d;
1045#endif
1046#else
1047#ifdef V8_INTL_SUPPORT
1048 static constexpr Tagged_t kBuildDependentTheHoleValue = 0x6511;
1049#else
1050 static constexpr Tagged_t kBuildDependentTheHoleValue = 0x5875;
1051#endif
1052#endif
1053
1054#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
1056#undef DEF_ROOT
1057
1058 // Use 0 for kStringMapLowerBound since string maps are the first maps.
1059 static constexpr Tagged_t kStringMapLowerBound = 0;
1060 static constexpr Tagged_t kStringMapUpperBound = 0x425;
1061
1062#define PLUSONE(...) +1
1063 static constexpr size_t kNumberOfExportedStaticRoots =
1065#undef PLUSONE
1066 };
1067
1068#endif // V8_STATIC_ROOTS_BOOL
1069
1070 static const int kUndefinedValueRootIndex = 0;
1071 static const int kTheHoleValueRootIndex = 1;
1072 static const int kNullValueRootIndex = 2;
1073 static const int kTrueValueRootIndex = 3;
1074 static const int kFalseValueRootIndex = 4;
1075 static const int kEmptyStringRootIndex = 5;
1076
1078 static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
1079 static const int kNodeStateMask = 0x3;
1080 static const int kNodeStateIsWeakValue = 2;
1081
1082 static const int kFirstNonstringType = 0x80;
1083 static const int kOddballType = 0x83;
1084 static const int kForeignType = 0xcc;
1085 static const int kJSSpecialApiObjectType = 0x410;
1086 static const int kJSObjectType = 0x421;
1087 static const int kFirstJSApiObjectType = 0x422;
1088 static const int kLastJSApiObjectType = 0x80A;
1089 // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
1090 // of JSApiObject instance type values that an embedder can use.
1091 static const int kFirstEmbedderJSApiObjectType = 0;
1094
1095 static const int kUndefinedOddballKind = 4;
1096 static const int kNullOddballKind = 3;
1097
1098 // Constants used by PropertyCallbackInfo to check if we should throw when an
1099 // error occurs.
1100 static const int kDontThrow = 0;
1101 static const int kThrowOnError = 1;
1102 static const int kInferShouldThrowMode = 2;
1103
1104 // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
1105 // incremental GC once the external memory reaches this limit.
1106 static constexpr size_t kExternalAllocationSoftLimit = 64 * 1024 * 1024;
1107
1108#ifdef V8_MAP_PACKING
1109 static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
1110 // The lowest two bits of mapwords are always `0b10`
1111 static const uintptr_t kMapWordSignature = 0b10;
1112 // XORing a (non-compressed) map with this mask ensures that the two
1113 // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
1114 // although real Smis have all lower 32 bits unset. We only rely on these
1115 // values passing as Smis in very few places.
1116 static const int kMapWordXorMask = 0b11;
1117#endif
1118
1120 V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
1121#ifdef V8_ENABLE_CHECKS
1122 CheckInitializedImpl(isolate);
1123#endif
1124 }
1125
1126 V8_INLINE static constexpr bool HasHeapObjectTag(Address value) {
1127 return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
1128 }
1129
1130 V8_INLINE static constexpr int SmiValue(Address value) {
1131 return PlatformSmiTagging::SmiToInt(value);
1132 }
1133
1134 V8_INLINE static constexpr Address AddressToSmi(Address value) {
1135 return (value << (kSmiTagSize + PlatformSmiTagging::kSmiShiftSize)) |
1136 kSmiTag;
1137 }
1138
1139 V8_INLINE static constexpr Address IntToSmi(int value) {
1140 return AddressToSmi(static_cast<Address>(value));
1141 }
1142
1143 template <typename T,
1144 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1145 V8_INLINE static constexpr Address IntegralToSmi(T value) {
1146 return AddressToSmi(static_cast<Address>(value));
1147 }
1148
1149 template <typename T,
1150 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1151 V8_INLINE static constexpr bool IsValidSmi(T value) {
1152 return PlatformSmiTagging::IsValidSmi(value);
1153 }
1154
1155 template <typename T,
1156 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1157 static constexpr std::optional<Address> TryIntegralToSmi(T value) {
1158 if (V8_LIKELY(PlatformSmiTagging::IsValidSmi(value))) {
1159 return {AddressToSmi(static_cast<Address>(value))};
1160 }
1161 return {};
1162 }
1163
1166 return static_cast<Tagged_t>(obj) == constant;
1167 }
1168
1172#ifdef V8_MAP_PACKING
1174#endif
1175 return map >= first_map && map <= last_map;
1176 }
1177#endif
1178
1181#ifdef V8_MAP_PACKING
1182 map = UnpackMapWord(map);
1183#endif
1184 return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
1185 }
1186
1188 if (!HasHeapObjectTag(obj)) return kNullAddress;
1190#ifdef V8_MAP_PACKING
1191 map = UnpackMapWord(map);
1192#endif
1193 return map;
1194 }
1195
1198 }
1199
1200 V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
1201 int representation = (instance_type & kStringRepresentationAndEncodingMask);
1202 return representation == kExternalTwoByteRepresentationTag;
1203 }
1204
1205 V8_INLINE static constexpr bool CanHaveInternalField(int instance_type) {
1206 static_assert(kJSObjectType + 1 == kFirstJSApiObjectType);
1207 static_assert(kJSObjectType < kLastJSApiObjectType);
1209 // Check for IsJSObject() || IsJSSpecialApiObject() || IsJSApiObject()
1210 return instance_type == kJSSpecialApiObjectType ||
1211 // inlined version of base::IsInRange
1212 (static_cast<unsigned>(static_cast<unsigned>(instance_type) -
1213 static_cast<unsigned>(kJSObjectType)) <=
1214 static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
1215 }
1216
1217 V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) {
1218 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1219 return *addr & static_cast<uint8_t>(1U << shift);
1220 }
1221
1222 V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) {
1223 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1224 uint8_t mask = static_cast<uint8_t>(1U << shift);
1225 *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
1226 }
1227
1228 V8_INLINE static uint8_t GetNodeState(Address* obj) {
1229 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1230 return *addr & kNodeStateMask;
1231 }
1232
1233 V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) {
1234 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1235 *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
1236 }
1237
1238 V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
1239 void* data) {
1240 Address addr = reinterpret_cast<Address>(isolate) +
1242 *reinterpret_cast<void**>(addr) = data;
1243 }
1244
1245 V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
1246 uint32_t slot) {
1247 Address addr = reinterpret_cast<Address>(isolate) +
1249 return *reinterpret_cast<void* const*>(addr);
1250 }
1251
1252 V8_INLINE static HandleScopeData* GetHandleScopeData(v8::Isolate* isolate) {
1253 Address addr =
1254 reinterpret_cast<Address>(isolate) + kIsolateHandleScopeDataOffset;
1255 return reinterpret_cast<HandleScopeData*>(addr);
1256 }
1257
1259 Address addr =
1260 reinterpret_cast<Address>(isolate) + kIsolateLongTaskStatsCounterOffset;
1261 ++(*reinterpret_cast<size_t*>(addr));
1262 }
1263
1264 V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) {
1265 Address addr = reinterpret_cast<Address>(isolate) + kIsolateRootsOffset +
1266 index * kApiSystemPointerSize;
1267 return reinterpret_cast<Address*>(addr);
1268 }
1269
1270 V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) {
1272 Address base = *reinterpret_cast<Address*>(
1273 reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
1274 switch (index) {
1275#define DECOMPRESS_ROOT(name, ...)
1276 case k##name##RootIndex:
1277 return base + StaticReadOnlyRoot::k##name;
1278 EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
1279#undef DECOMPRESS_ROOT
1280#undef EXPORTED_STATIC_ROOTS_PTR_LIST
1281 default:
1282 break;
1283 }
1284#endif // V8_STATIC_ROOTS_BOOL
1285 return *GetRootSlot(isolate, index);
1286 }
1287
1288#ifdef V8_ENABLE_SANDBOX
1290 Address addr = reinterpret_cast<Address>(isolate) +
1293 return *reinterpret_cast<Address**>(addr);
1294 }
1295
1297 v8::Isolate* isolate) {
1298 Address addr = reinterpret_cast<Address>(isolate) +
1300 addr = *reinterpret_cast<Address*>(addr);
1302 return *reinterpret_cast<Address**>(addr);
1303 }
1304#endif
1305
1306 template <typename T>
1307 V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) {
1308 Address addr = heap_object_ptr + offset - kHeapObjectTag;
1309#ifdef V8_COMPRESS_POINTERS
1310 if constexpr (sizeof(T) > kApiTaggedSize) {
1311 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
1312 // fields (external pointers, doubles and BigInt data) are only
1313 // kTaggedSize aligned so we have to use unaligned pointer friendly way of
1314 // accessing them in order to avoid undefined behavior in C++ code.
1315 T r;
1316 memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
1317 return r;
1318 }
1319#endif
1320 return *reinterpret_cast<const T*>(addr);
1321 }
1322
1324 int offset) {
1325#ifdef V8_COMPRESS_POINTERS
1326 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1327 Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
1328 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1329#else
1330 return ReadRawField<Address>(heap_object_ptr, offset);
1331#endif
1332 }
1333
1335 int offset) {
1336#ifdef V8_COMPRESS_POINTERS
1337 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1338 return static_cast<Address>(static_cast<uintptr_t>(value));
1339#else
1340 return ReadRawField<Address>(heap_object_ptr, offset);
1341#endif
1342 }
1343
1345 "Use GetCurrentIsolateForSandbox() instead, which is guaranteed to "
1346 "return the same isolate since https://crrev.com/c/6458560.")
1347 V8_INLINE static v8::Isolate* GetIsolateForSandbox(Address obj) {
1348#ifdef V8_ENABLE_SANDBOX
1349 return GetCurrentIsolate();
1350#else
1351 // Not used in non-sandbox mode.
1352 return nullptr;
1353#endif
1354 }
1355
1356 // Returns v8::Isolate::Current(), but without needing to include the
1357 // v8-isolate.h header.
1359
1361#ifdef V8_ENABLE_SANDBOX
1362 return GetCurrentIsolate();
1363#else
1364 // Not used in non-sandbox mode.
1365 return nullptr;
1366#endif
1367 }
1368
1369 template <ExternalPointerTagRange tag_range>
1371 Address heap_object_ptr,
1372 int offset) {
1373#ifdef V8_ENABLE_SANDBOX
1374 static_assert(!tag_range.IsEmpty());
1375 // See src/sandbox/external-pointer-table.h. Logic duplicated here so
1376 // it can be inlined and doesn't require an additional call.
1377 Address* table = IsSharedExternalPointerType(tag_range)
1378 ? GetSharedExternalPointerTableBase(isolate)
1379 : GetExternalPointerTableBase(isolate);
1380 internal::ExternalPointerHandle handle =
1381 ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
1382 uint32_t index = handle >> kExternalPointerIndexShift;
1383 std::atomic<Address>* ptr =
1384 reinterpret_cast<std::atomic<Address>*>(&table[index]);
1385 Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
1386 ExternalPointerTag actual_tag = static_cast<ExternalPointerTag>(
1387 (entry & kExternalPointerTagMask) >> kExternalPointerTagShift);
1388 if (V8_LIKELY(tag_range.Contains(actual_tag))) {
1389 return entry & kExternalPointerPayloadMask;
1390 } else {
1391 return 0;
1392 }
1393 return entry;
1394#else
1395 return ReadRawField<Address>(heap_object_ptr, offset);
1396#endif // V8_ENABLE_SANDBOX
1397 }
1398
1399#ifdef V8_COMPRESS_POINTERS
1401 return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
1402 }
1403
1405 return static_cast<uint32_t>(value);
1406 }
1407
1409 uint32_t value) {
1411 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1412 }
1413
1414#endif // V8_COMPRESS_POINTERS
1415};
1416
1417// Only perform cast check for types derived from v8::Data since
1418// other types do not implement the Cast method.
1419template <bool PerformCheck>
1421 template <class T>
1422 static void Perform(T* data);
1423};
1424
1425template <>
1426template <class T>
1427void CastCheck<true>::Perform(T* data) {
1428 T::Cast(data);
1429}
1430
1431template <>
1432template <class T>
1433void CastCheck<false>::Perform(T* data) {}
1434
1435template <class T>
1437 CastCheck<std::is_base_of_v<Data, T> &&
1438 !std::is_same_v<Data, std::remove_cv_t<T>>>::Perform(data);
1439}
1440
1441// A base class for backing stores, which is needed due to vagaries of
1442// how static casts work with std::shared_ptr.
1444
1445// The maximum value in enum GarbageCollectionReason, defined in heap.h.
1446// This is needed for histograms sampling garbage collection reasons.
1448
1449// Base class for the address block allocator compatible with standard
1450// containers, which registers its allocated range as strong roots.
1452 public:
1453 Heap* heap() const { return heap_; }
1454
1455 friend bool operator==(const StrongRootAllocatorBase& a,
1456 const StrongRootAllocatorBase& b) {
1457 // TODO(pkasting): Replace this body with `= default` after dropping support
1458 // for old gcc versions.
1459 return a.heap_ == b.heap_;
1460 }
1461
1462 protected:
1463 explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
1464 explicit StrongRootAllocatorBase(LocalHeap* heap);
1465 explicit StrongRootAllocatorBase(Isolate* isolate);
1467 explicit StrongRootAllocatorBase(LocalIsolate* isolate);
1468
1469 // Allocate/deallocate a range of n elements of type internal::Address.
1471 void deallocate_impl(Address* p, size_t n) noexcept;
1472
1473 private:
1474 Heap* heap_;
1475};
1476
1477// The general version of this template behaves just as std::allocator, with
1478// the exception that the constructor takes the isolate as parameter. Only
1479// specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
1480// and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
1481// as strong roots.
1482template <typename T>
1483class StrongRootAllocator : private std::allocator<T> {
1484 public:
1485 using value_type = T;
1486
1487 template <typename HeapOrIsolateT>
1488 explicit StrongRootAllocator(HeapOrIsolateT*) {}
1489 template <typename U>
1490 StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept {}
1491
1492 using std::allocator<T>::allocate;
1493 using std::allocator<T>::deallocate;
1494};
1495
1496// TODO(pkasting): Replace with `requires` clauses after dropping support for
1497// old gcc versions.
1498template <typename Iterator, typename = void>
1499inline constexpr bool kHaveIteratorConcept = false;
1500template <typename Iterator>
1501inline constexpr bool kHaveIteratorConcept<
1502 Iterator, std::void_t<typename Iterator::iterator_concept>> = true;
1503
1504template <typename Iterator, typename = void>
1505inline constexpr bool kHaveIteratorCategory = false;
1506template <typename Iterator>
1507inline constexpr bool kHaveIteratorCategory<
1508 Iterator, std::void_t<typename Iterator::iterator_category>> = true;
1509
1510// Helper struct that contains an `iterator_concept` type alias only when either
1511// `Iterator` or `std::iterator_traits<Iterator>` do.
1512// Default: no alias.
1513template <typename Iterator, typename = void>
1515// Use `Iterator::iterator_concept` if available.
1516template <typename Iterator>
1520};
1521// Otherwise fall back to `std::iterator_traits<Iterator>` if possible.
1522template <typename Iterator>
1526 // There seems to be no feature-test macro covering this, so use the
1527 // presence of `<ranges>` as a crude proxy, since it was added to the
1528 // standard as part of the Ranges papers.
1529 // TODO(pkasting): Add this unconditionally after dropping support for old
1530 // libstdc++ versions.
1531#if __has_include(<ranges>)
1532 using iterator_concept =
1534#endif
1535};
1536
1537// A class of iterators that wrap some different iterator type.
1538// If specified, ElementType is the type of element accessed by the wrapper
1539// iterator; in this case, the actual reference and pointer types of Iterator
1540// must be convertible to ElementType& and ElementType*, respectively.
1541template <typename Iterator, typename ElementType = void>
1543 public:
1544 static_assert(
1550
1551 using difference_type =
1552 typename std::iterator_traits<Iterator>::difference_type;
1556 ElementType>;
1557 using pointer =
1559 typename std::iterator_traits<Iterator>::pointer,
1565 using iterator_category =
1566 typename std::iterator_traits<Iterator>::iterator_category;
1567
1568 constexpr WrappedIterator() noexcept = default;
1569 constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
1570
1571 // TODO(pkasting): Switch to `requires` and concepts after dropping support
1572 // for old gcc and libstdc++ versions.
1573 template <typename OtherIterator, typename OtherElementType,
1574 typename = std::enable_if_t<
1577 const WrappedIterator<OtherIterator, OtherElementType>& other) noexcept
1578 : it_(other.base()) {}
1579
1580 [[nodiscard]] constexpr reference operator*() const noexcept { return *it_; }
1581 [[nodiscard]] constexpr pointer operator->() const noexcept {
1582 if constexpr (std::is_pointer_v<Iterator>) {
1583 return it_;
1584 } else {
1585 return it_.operator->();
1586 }
1587 }
1588
1589 template <typename OtherIterator, typename OtherElementType>
1590 [[nodiscard]] constexpr bool operator==(
1591 const WrappedIterator<OtherIterator, OtherElementType>& other)
1592 const noexcept {
1593 return it_ == other.base();
1594 }
1596 template <typename OtherIterator, typename OtherElementType>
1597 [[nodiscard]] constexpr auto operator<=>(
1599 const noexcept {
1601 return it_ <=> other.base();
1602 } else if constexpr (std::totally_ordered_with<Iterator, OtherIterator>) {
1603 if (it_ < other.base()) {
1604 return std::strong_ordering::less;
1605 }
1606 return (it_ > other.base()) ? std::strong_ordering::greater
1608 } else {
1609 if (it_ < other.base()) {
1610 return std::partial_ordering::less;
1611 }
1612 if (other.base() < it_) {
1613 return std::partial_ordering::greater;
1614 }
1615 return (it_ == other.base()) ? std::partial_ordering::equivalent
1617 }
1618 }
1619#else
1620 // Assume that if spaceship isn't present, operator rewriting might not be
1621 // either.
1622 template <typename OtherIterator, typename OtherElementType>
1623 [[nodiscard]] constexpr bool operator!=(
1624 const WrappedIterator<OtherIterator, OtherElementType>& other)
1625 const noexcept {
1626 return it_ != other.base();
1627 }
1628
1629 template <typename OtherIterator, typename OtherElementType>
1630 [[nodiscard]] constexpr bool operator<(
1631 const WrappedIterator<OtherIterator, OtherElementType>& other)
1632 const noexcept {
1633 return it_ < other.base();
1634 }
1635 template <typename OtherIterator, typename OtherElementType>
1636 [[nodiscard]] constexpr bool operator<=(
1637 const WrappedIterator<OtherIterator, OtherElementType>& other)
1638 const noexcept {
1639 return it_ <= other.base();
1640 }
1641 template <typename OtherIterator, typename OtherElementType>
1642 [[nodiscard]] constexpr bool operator>(
1643 const WrappedIterator<OtherIterator, OtherElementType>& other)
1644 const noexcept {
1645 return it_ > other.base();
1646 }
1647 template <typename OtherIterator, typename OtherElementType>
1648 [[nodiscard]] constexpr bool operator>=(
1649 const WrappedIterator<OtherIterator, OtherElementType>& other)
1650 const noexcept {
1651 return it_ >= other.base();
1652 }
1653#endif
1654
1655 constexpr WrappedIterator& operator++() noexcept {
1656 ++it_;
1657 return *this;
1658 }
1659 constexpr WrappedIterator operator++(int) noexcept {
1660 WrappedIterator result(*this);
1661 ++(*this);
1662 return result;
1663 }
1664
1665 constexpr WrappedIterator& operator--() noexcept {
1666 --it_;
1667 return *this;
1668 }
1669 constexpr WrappedIterator operator--(int) noexcept {
1670 WrappedIterator result(*this);
1671 --(*this);
1672 return result;
1673 }
1675 difference_type n) const noexcept {
1676 WrappedIterator result(*this);
1677 result += n;
1678 return result;
1679 }
1680 [[nodiscard]] friend constexpr WrappedIterator operator+(
1681 difference_type n, const WrappedIterator& x) noexcept {
1682 return x + n;
1683 }
1684 constexpr WrappedIterator& operator+=(difference_type n) noexcept {
1685 it_ += n;
1686 return *this;
1687 }
1689 difference_type n) const noexcept {
1690 return *this + -n;
1691 }
1692 constexpr WrappedIterator& operator-=(difference_type n) noexcept {
1693 return *this += -n;
1694 }
1695 template <typename OtherIterator, typename OtherElementType>
1696 [[nodiscard]] constexpr auto operator-(
1697 const WrappedIterator<OtherIterator, OtherElementType>& other)
1698 const noexcept {
1699 return it_ - other.base();
1700 }
1702 difference_type n) const noexcept {
1703 return it_[n];
1704 }
1705
1706 [[nodiscard]] constexpr const Iterator& base() const noexcept { return it_; }
1707
1708 private:
1709 Iterator it_;
1710};
1711
1712// Helper functions about values contained in handles.
1713// A value is either an indirect pointer or a direct pointer, depending on
1714// whether direct local support is enabled.
1715class ValueHelper final {
1716 public:
1717 // ValueHelper::InternalRepresentationType is an abstract type that
1718 // corresponds to the internal representation of v8::Local and essentially
1719 // to what T* really is (these two are always in sync). This type is used in
1720 // methods like GetDataFromSnapshotOnce that need access to a handle's
1721 // internal representation. In particular, if `x` is a `v8::Local<T>`, then
1722 // `v8::Local<T>::FromRepr(x.repr())` gives exactly the same handle as `x`.
1723#ifdef V8_ENABLE_DIRECT_HANDLE
1724 static constexpr Address kTaggedNullAddress = 1;
1725
1726 using InternalRepresentationType = internal::Address;
1727 static constexpr InternalRepresentationType kEmpty = kTaggedNullAddress;
1728#else
1729 using InternalRepresentationType = internal::Address*;
1730 static constexpr InternalRepresentationType kEmpty = nullptr;
1731#endif // V8_ENABLE_DIRECT_HANDLE
1732
1733 template <typename T>
1734 V8_INLINE static bool IsEmpty(T* value) {
1735 return ValueAsRepr(value) == kEmpty;
1736 }
1737
1738 // Returns a handle's "value" for all kinds of abstract handles. For Local,
1739 // it is equivalent to `*handle`. The variadic parameters support handle
1740 // types with extra type parameters, like `Persistent<T, M>`.
1741 template <template <typename T, typename... Ms> typename H, typename T,
1742 typename... Ms>
1743 V8_INLINE static T* HandleAsValue(const H<T, Ms...>& handle) {
1744 return handle.template value<T>();
1745 }
1746
1747#ifdef V8_ENABLE_DIRECT_HANDLE
1748
1749 template <typename T>
1750 V8_INLINE static Address ValueAsAddress(const T* value) {
1751 return reinterpret_cast<Address>(value);
1752 }
1753
1754 template <typename T, bool check_null = true, typename S>
1755 V8_INLINE static T* SlotAsValue(S* slot) {
1756 if (check_null && slot == nullptr) {
1757 return reinterpret_cast<T*>(kTaggedNullAddress);
1758 }
1759 return *reinterpret_cast<T**>(slot);
1760 }
1761
1762 template <typename T>
1763 V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
1764 return reinterpret_cast<InternalRepresentationType>(value);
1765 }
1766
1767 template <typename T>
1768 V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
1769 return reinterpret_cast<T*>(repr);
1770 }
1771
1772#else // !V8_ENABLE_DIRECT_HANDLE
1773
1774 template <typename T>
1775 V8_INLINE static Address ValueAsAddress(const T* value) {
1776 return *reinterpret_cast<const Address*>(value);
1777 }
1778
1779 template <typename T, bool check_null = true, typename S>
1780 V8_INLINE static T* SlotAsValue(S* slot) {
1781 return reinterpret_cast<T*>(slot);
1782 }
1783
1784 template <typename T>
1785 V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
1786 return const_cast<InternalRepresentationType>(
1787 reinterpret_cast<const Address*>(value));
1788 }
1789
1790 template <typename T>
1791 V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
1792 return reinterpret_cast<T*>(repr);
1793 }
1794
1795#endif // V8_ENABLE_DIRECT_HANDLE
1796};
1797
1798/**
1799 * Helper functions about handles.
1800 */
1801class HandleHelper final {
1802 public:
1803 /**
1804 * Checks whether two handles are equal.
1805 * They are equal iff they are both empty or they are both non-empty and the
1806 * objects to which they refer are physically equal.
1807 *
1808 * If both handles refer to JS objects, this is the same as strict equality.
1809 * For primitives, such as numbers or strings, a `false` return value does not
1810 * indicate that the values aren't equal in the JavaScript sense.
1811 * Use `Value::StrictEquals()` to check primitives for equality.
1812 */
1813 template <typename T1, typename T2>
1814 V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
1815 if (lhs.IsEmpty()) return rhs.IsEmpty();
1816 if (rhs.IsEmpty()) return false;
1817 return lhs.ptr() == rhs.ptr();
1818 }
1819};
1820
1822
1823// These functions are here just to match friend declarations in
1824// XxxCallbackInfo classes allowing these functions to access the internals
1825// of the info objects. These functions are supposed to be called by debugger
1826// macros.
1827void PrintFunctionCallbackInfo(void* function_callback_info);
1828void PrintPropertyCallbackInfo(void* property_callback_info);
1829
1830} // namespace internal
1831} // namespace v8
1832
1833#endif // INCLUDE_V8_INTERNAL_H_
friend void internal::PrintFunctionCallbackInfo(void *)
friend void internal::PrintPropertyCallbackInfo(void *)
static V8_INLINE bool EqualHandles(const T1 &lhs, const T2 &rhs)
static const int kTrustedPointerTableBasePointerOffset
static const int kIsolateCageBaseOffset
static V8_INLINE Address LoadMap(Address obj)
static const int kTrueValueRootIndex
static const int kInferShouldThrowMode
static V8_INLINE void UpdateNodeFlag(Address *obj, bool value, int shift)
static const int kLastYoungAllocationOffset
static const int kJSSpecialApiObjectType
static V8_INLINE void IncrementLongTasksStatsCounter(v8::Isolate *isolate)
static const int kExternalPointerTableSize
static const int kOddballKindOffset
static const int kOldAllocationInfoOffset
static const int kFirstEmbedderJSApiObjectType
static const int kFastCCallAlignmentPaddingSize
static const int kErrorMessageParamSize
static const int kFalseValueRootIndex
static const int kIsolateStackGuardOffset
static const int kNullValueRootIndex
static const int kIsolateFastCCallCallerPcOffset
static const int kDontThrow
static const int kEmptyStringRootIndex
static V8_INLINE uint8_t GetNodeState(Address *obj)
static const int kNativeContextEmbedderDataOffset
static V8_INLINE uint8_t GetNodeFlag(Address *obj, int shift)
static const int kStringRepresentationAndEncodingMask
static const int kIsolateThreadLocalTopOffset
static const int kDisallowGarbageCollectionSize
static const int kIsolateLongTaskStatsCounterOffset
static const int kEmbedderDataArrayHeaderSize
static const int kExternalTwoByteRepresentationTag
static const int kNodeStateMask
static const int kUndefinedValueRootIndex
static const int kDisallowGarbageCollectionAlign
static const int kRegExpStaticResultOffsetsVectorSize
static V8_INLINE constexpr bool HasHeapObjectTag(Address value)
static V8_INLINE HandleScopeData * GetHandleScopeData(v8::Isolate *isolate)
static const int kThrowOnError
static const int kEmbedderDataSlotExternalPointerOffset
static const int kBuiltinTier0TableSize
static const int kHandleScopeDataSize
static V8_INLINE Address ReadTaggedSignedField(Address heap_object_ptr, int offset)
static V8_INLINE constexpr Address AddressToSmi(Address value)
static V8_INLINE T ReadRawField(Address heap_object_ptr, int offset)
static V8_INLINE void CheckInitialized(v8::Isolate *isolate)
static const uint32_t kNumIsolateDataSlots
static const int kNumberOfBooleanFlags
static V8_INLINE Address * GetRootSlot(v8::Isolate *isolate, int index)
static const int kBuiltinTier0EntryTableSize
static const int kStackGuardSize
static const int kIsolateFastApiCallTargetOffset
static V8_INLINE constexpr bool CanHaveInternalField(int instance_type)
static constexpr size_t kExternalAllocationSoftLimit
static const int kNodeFlagsOffset
static const int kContinuationPreservedEmbedderDataOffset
static const int kLastJSApiObjectType
static const int kExternalOneByteRepresentationTag
static const int kNodeStateIsWeakValue
static V8_EXPORT void CheckInitializedImpl(v8::Isolate *isolate)
static const int kTrustedPointerTableSize
static const int kBuiltinTier0TableOffset
static const int kForeignType
static const int kNodeClassIdOffset
static V8_INLINE int GetInstanceType(Address obj)
static const int kIsolateRootsOffset
static const int kIsolateApiCallbackThunkArgumentOffset
static const int kExternalPointerTableBasePointerOffset
static const int kFirstJSApiObjectType
static const int kOddballType
static const int kMapInstanceTypeOffset
static const int kLinearAllocationAreaSize
static const int kIsolateEmbedderDataOffset
static V8_INLINE int GetOddballKind(Address obj)
static V8_INLINE v8::Isolate * GetCurrentIsolateForSandbox()
static V8_INLINE void * GetEmbedderData(const v8::Isolate *isolate, uint32_t slot)
static const int kNewAllocationInfoOffset
static V8_INLINE constexpr int SmiValue(Address value)
static const int kUndefinedOddballKind
static const int kEmbedderDataSlotSize
static const int kHeapObjectMapOffset
static V8_INLINE Address ReadTaggedPointerField(Address heap_object_ptr, int offset)
static const int kNullOddballKind
static V8_INLINE bool IsExternalTwoByteString(int instance_type)
static constexpr std::optional< Address > TryIntegralToSmi(T value)
static V8_INLINE constexpr Address IntToSmi(int value)
static V8_EXPORT v8::Isolate * GetCurrentIsolate()
static const int kFirstNonstringType
static V8_INLINE void UpdateNodeState(Address *obj, uint8_t value)
static const int kJSAPIObjectWithEmbedderSlotsHeaderSize
static const int kJSDispatchTableOffset
static const int kBuiltinTier0EntryTableOffset
static const int kStringEncodingMask
static const int kSegmentedTableSegmentPoolSize
static const int kIsolateRegexpExecVectorArgumentOffset
static V8_INLINE Address GetRoot(v8::Isolate *isolate, int index)
static const int kLastEmbedderJSApiObjectType
static const int kVariousBooleanFlagsOffset
static V8_INLINE constexpr Address IntegralToSmi(T value)
static const int kThreadLocalTopSize
static const int kTablesAlignmentPaddingSize
static V8_INLINE constexpr bool IsValidSmi(T value)
static const int kJSObjectHeaderSize
static V8_INLINE void SetEmbedderData(v8::Isolate *isolate, uint32_t slot, void *data)
static const int kFixedArrayHeaderSize
static const int kJSObjectType
static const int kTheHoleValueRootIndex
static V8_INLINE Address ReadExternalPointerField(v8::Isolate *isolate, Address heap_object_ptr, int offset)
static const int kStringResourceOffset
static const int kIsolateHandleScopeDataOffset
static const int kIsolateFastCCallCallerFpOffset
static const int kErrorMessageParamOffset
friend bool operator==(const StrongRootAllocatorBase &a, const StrongRootAllocatorBase &b)
void deallocate_impl(Address *p, size_t n) noexcept
StrongRootAllocatorBase(LocalIsolate *isolate)
StrongRootAllocatorBase(v8::Isolate *isolate)
StrongRootAllocator(HeapOrIsolateT *)
StrongRootAllocator(const StrongRootAllocator< U > &other) noexcept
static V8_INLINE Address ValueAsAddress(const T *value)
static constexpr InternalRepresentationType kEmpty
static V8_INLINE T * HandleAsValue(const H< T, Ms... > &handle)
static V8_INLINE T * SlotAsValue(S *slot)
static V8_INLINE bool IsEmpty(T *value)
static V8_INLINE T * ReprAsValue(InternalRepresentationType repr)
static V8_INLINE InternalRepresentationType ValueAsRepr(const T *value)
constexpr const Iterator & base() const noexcept
constexpr WrappedIterator(const WrappedIterator< OtherIterator, OtherElementType > &other) noexcept
constexpr WrappedIterator operator++(int) noexcept
constexpr bool operator>=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator operator-(difference_type n) const noexcept
constexpr reference operator[](difference_type n) const noexcept
constexpr pointer operator->() const noexcept
constexpr WrappedIterator operator+(difference_type n) const noexcept
constexpr auto operator-(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator & operator--() noexcept
constexpr WrappedIterator operator--(int) noexcept
constexpr WrappedIterator() noexcept=default
constexpr reference operator*() const noexcept
friend constexpr WrappedIterator operator+(difference_type n, const WrappedIterator &x) noexcept
constexpr WrappedIterator & operator+=(difference_type n) noexcept
constexpr WrappedIterator(Iterator it) noexcept
constexpr bool operator<=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator & operator++() noexcept
constexpr bool operator!=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr bool operator==(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr bool operator>(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr bool operator<(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator & operator-=(difference_type n) noexcept
constexpr bool PointerCompressionIsEnabled()
constexpr int kCodePointerTableEntryCodeObjectOffset
const int kApiSystemPointerSize
Definition v8-internal.h:65
constexpr size_t kMaxTrustedPointers
V8_DEPRECATE_SOON("Use GetCurrentIsolate() instead, which is guaranteed to return the same " "isolate since https://crrev.com/c/6458560.") V8_EXPORT internal V8_EXPORT bool ShouldThrowOnError(internal::Isolate *isolate)
constexpr ExternalPointerTagRange kAnyManagedResourceExternalPointerTag(kFirstManagedResourceTag, kLastManagedResourceTag)
V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty)
constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(kFirstManagedExternalPointerTag, kLastManagedExternalPointerTag)
constexpr IndirectPointerHandle kNullIndirectPointerHandle
constexpr int kTrustedPointerTableEntrySizeLog2
const int kApiInt32Size
Definition v8-internal.h:67
const int kApiTaggedSize
const int kHeapObjectTag
Definition v8-internal.h:72
const int kSmiTagSize
Definition v8-internal.h:87
constexpr int MB
Definition v8-internal.h:56
constexpr uint64_t kExternalPointerTagShift
constexpr uint64_t kExternalPointerTagAndMarkbitMask
const intptr_t kHeapObjectTagMask
Definition v8-internal.h:75
constexpr bool kHaveIteratorCategory
constexpr CppHeapPointerHandle kNullCppHeapPointerHandle
const int kSmiTag
Definition v8-internal.h:86
constexpr int kCodePointerTableEntryEntrypointOffset
const int kApiSizetSize
Definition v8-internal.h:69
constexpr size_t kMaxExternalPointers
constexpr ExternalPointerTagRange kAnyForeignExternalPointerTagRange(kFirstForeignExternalPointerTag, kLastForeignExternalPointerTag)
@ kExternalPointerNullTag
@ kDisplayNamesInternalTag
@ kApiIndexedPropertyDefinerCallbackTag
@ kLastManagedResourceTag
@ kFirstExternalPointerTag
@ kApiAbortScriptExecutionCallbackTag
@ kExternalPointerFreeEntryTag
@ kFirstManagedResourceTag
@ kExternalStringResourceDataTag
@ kApiNamedPropertyDefinerCallbackTag
@ kTemporalZonedDateTimeTag
@ kTemporalPlainDateTimeTag
@ kApiNamedPropertyEnumeratorCallbackTag
@ kIcuLocalizedNumberFormatterTag
@ kLastExternalPointerTag
@ kFirstForeignExternalPointerTag
@ kExternalPointerZappedEntryTag
@ kIcuDateIntervalFormatTag
@ kEmbedderDataSlotPayloadTag
@ kLastMaybeReadOnlyExternalPointerTag
@ kExternalPointerEvacuationEntryTag
@ kLastSharedExternalPointerTag
@ kFunctionTemplateInfoCallbackTag
@ kFirstInterceptorInfoExternalPointerTag
@ kApiIndexedPropertyDescriptorCallbackTag
@ kApiNamedPropertyQueryCallbackTag
@ kLastInterceptorInfoExternalPointerTag
@ kLastManagedExternalPointerTag
@ kIcuRelativeDateTimeFormatterTag
@ kApiNamedPropertyDeleterCallbackTag
@ kExternalObjectValueTag
@ kApiIndexedPropertySetterCallbackTag
@ kD8ModuleEmbedderDataTag
@ kIcuSimpleDateFormatTag
@ kArrayBufferExtensionTag
@ kApiNamedPropertySetterCallbackTag
@ kApiNamedPropertyDescriptorCallbackTag
@ kTemporalPlainMonthDayTag
@ kFirstManagedExternalPointerTag
@ kExternalStringResourceTag
@ kApiNamedPropertyGetterCallbackTag
@ kFirstSharedExternalPointerTag
@ kFirstMaybeReadOnlyExternalPointerTag
@ kApiIndexedPropertyEnumeratorCallbackTag
@ kApiIndexedPropertyQueryCallbackTag
@ kLastForeignExternalPointerTag
@ kTemporalPlainYearMonthTag
@ kNativeContextMicrotaskQueueTag
@ kApiAccessCheckCallbackTag
@ kApiIndexedPropertyGetterCallbackTag
@ kMicrotaskCallbackDataTag
@ kApiIndexedPropertyDeleterCallbackTag
constexpr ExternalPointerTagRange kAnyInterceptorInfoExternalPointerTagRange(kFirstInterceptorInfoExternalPointerTag, kLastInterceptorInfoExternalPointerTag)
const int kHeapObjectTagSize
Definition v8-internal.h:74
constexpr size_t kTrustedPointerTableReservationSize
const int kForwardingTag
Definition v8-internal.h:81
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(kFirstExternalPointerTag, kLastExternalPointerTag)
constexpr uint64_t kCppHeapPointerMarkBit
const int kSmiValueSize
constexpr size_t kMaxCppHeapPointers
constexpr ExternalPointerHandle kNullExternalPointerHandle
constexpr uint32_t kCodePointerHandleMarker
const intptr_t kForwardingTagMask
Definition v8-internal.h:83
constexpr uint32_t kCodePointerHandleShift
constexpr ExternalPointer_t kNullExternalPointer
constexpr int kCodePointerTableEntrySize
const int kSmiMaxValue
constexpr bool kHaveIteratorConcept
constexpr bool Is64()
constexpr ExternalPointerTagRange kAnySharedExternalPointerTagRange(kFirstSharedExternalPointerTag, kLastSharedExternalPointerTag)
constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace
constexpr bool SmiValuesAre32Bits()
uintptr_t Address
Definition v8-internal.h:52
constexpr bool SmiValuesAre31Bits()
const int kApiDoubleSize
Definition v8-internal.h:66
constexpr ExternalPointerTagRange kAnyMaybeReadOnlyExternalPointerTagRange(kFirstMaybeReadOnlyExternalPointerTag, kLastMaybeReadOnlyExternalPointerTag)
constexpr uintptr_t kUintptrAllBitsSet
Definition v8-internal.h:94
constexpr bool kAllCodeObjectsLiveInTrustedSpace
constexpr TrustedPointerHandle kNullTrustedPointerHandle
constexpr uint64_t kCppHeapPointerTagShift
constexpr uint64_t kExternalPointerTagMask
constexpr size_t kCodePointerTableReservationSize
constexpr int kTrustedPointerTableEntrySize
constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace
const int kApiInt64Size
Definition v8-internal.h:68
const int kForwardingTagSize
Definition v8-internal.h:82
V8_INLINE void PerformCastCheck(T *data)
constexpr bool SandboxIsEnabled()
constexpr int kCodePointerTableEntrySizeLog2
constexpr CppHeapPointer_t kNullCppHeapPointer
const int kWeakHeapObjectTag
Definition v8-internal.h:73
constexpr int GB
Definition v8-internal.h:57
const int kSmiMinValue
constexpr intptr_t kIntptrAllBitsSet
Definition v8-internal.h:93
constexpr uint64_t kExternalPointerPayloadMask
constexpr uint32_t kTrustedPointerHandleShift
const intptr_t kHeapObjectReferenceTagMask
Definition v8-internal.h:76
constexpr size_t kMaxCodePointers
constexpr int KB
Definition v8-internal.h:55
constexpr uint64_t kExternalPointerShiftedTagMask
constexpr CodePointerHandle kNullCodePointerHandle
constexpr uint64_t kCppHeapPointerPayloadShift
const intptr_t kSmiTagMask
Definition v8-internal.h:88
constexpr uint64_t kExternalPointerMarkBit
const int kSmiShiftSize
constexpr int kGarbageCollectionReasonMaxValue
static void Perform(T *data)
static constexpr uint32_t kSizeInBytes
static V8_INLINE constexpr bool IsValidSmi(int64_t value)
static V8_INLINE constexpr int SmiToInt(Address value)
static V8_INLINE constexpr bool IsValidSmi(uint64_t value)
static V8_INLINE constexpr bool IsValidSmi(T value)
static constexpr intptr_t kSmiMinValue
static constexpr intptr_t kSmiMaxValue
static V8_INLINE constexpr int SmiToInt(Address value)
static V8_INLINE constexpr bool IsValidSmi(T value)
static constexpr intptr_t kSmiMinValue
static constexpr intptr_t kSmiMaxValue
constexpr bool Contains(TagRange tag_range) const
constexpr bool IsEmpty() const
constexpr size_t hash_value() const
constexpr bool operator==(const TagRange other) const
constexpr TagRange(Tag first, Tag last)
constexpr size_t Size() const
constexpr TagRange(Tag tag)
constexpr bool Contains(Tag tag) const
#define V8_HAVE_SPACESHIP_OPERATOR
Definition v8-internal.h:34
#define V8_EMBEDDER_DATA_TAG_COUNT
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1061
#define V8_EXPORT
Definition v8config.h:860
#define V8_INLINE
Definition v8config.h:513
#define V8_DEPRECATE_SOON(message)
Definition v8config.h:627
#define V8_DEPRECATED(message)
Definition v8config.h:619
#define V8_LIKELY(condition)
Definition v8config.h:674