v8 11.3.244 (node 20.3.0)
V8 is Google's open source JavaScript engine
Loading...
Searching...
No Matches
v8-internal.h
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_V8_INTERNAL_H_
6#define INCLUDE_V8_INTERNAL_H_
7
8#include <stddef.h>
9#include <stdint.h>
10#include <string.h>
11
12#include <atomic>
13#include <type_traits>
14
15#include "v8-version.h" // NOLINT(build/include_directory)
16#include "v8config.h" // NOLINT(build/include_directory)
17
18namespace v8 {
19
20class Array;
21class Context;
22class Data;
23class Isolate;
24
25namespace internal {
26
27class Isolate;
28
29typedef uintptr_t Address;
30static constexpr Address kNullAddress = 0;
31
32constexpr int KB = 1024;
33constexpr int MB = KB * 1024;
34constexpr int GB = MB * 1024;
35#ifdef V8_TARGET_ARCH_X64
36constexpr size_t TB = size_t{GB} * 1024;
37#endif
38
42const int kApiSystemPointerSize = sizeof(void*);
43const int kApiDoubleSize = sizeof(double);
44const int kApiInt32Size = sizeof(int32_t);
45const int kApiInt64Size = sizeof(int64_t);
46const int kApiSizetSize = sizeof(size_t);
47
48// Tag information for HeapObject.
49const int kHeapObjectTag = 1;
50const int kWeakHeapObjectTag = 3;
51const int kHeapObjectTagSize = 2;
52const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
54
55// Tag information for fowarding pointers stored in object headers.
56// 0b00 at the lowest 2 bits in the header indicates that the map word is a
57// forwarding pointer.
58const int kForwardingTag = 0;
59const int kForwardingTagSize = 2;
60const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
61
62// Tag information for Smi.
63const int kSmiTag = 0;
64const int kSmiTagSize = 1;
65const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
66
67template <size_t tagged_ptr_size>
69
70constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
71constexpr uintptr_t kUintptrAllBitsSet =
72 static_cast<uintptr_t>(kIntptrAllBitsSet);
73
74// Smi constants for systems where tagged pointer is a 32-bit value.
75template <>
76struct SmiTagging<4> {
77 enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
78
79 static constexpr intptr_t kSmiMinValue =
80 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
81 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
82
83 V8_INLINE static int SmiToInt(Address value) {
84 int shift_bits = kSmiTagSize + kSmiShiftSize;
85 // Truncate and shift down (requires >> to be sign extending).
86 return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
87 }
88 V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
89 // Is value in range [kSmiMinValue, kSmiMaxValue].
90 // Use unsigned operations in order to avoid undefined behaviour in case of
91 // signed integer overflow.
92 return (static_cast<uintptr_t>(value) -
93 static_cast<uintptr_t>(kSmiMinValue)) <=
94 (static_cast<uintptr_t>(kSmiMaxValue) -
95 static_cast<uintptr_t>(kSmiMinValue));
96 }
97};
98
99// Smi constants for systems where tagged pointer is a 64-bit value.
100template <>
101struct SmiTagging<8> {
102 enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
103
104 static constexpr intptr_t kSmiMinValue =
105 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
106 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
107
108 V8_INLINE static int SmiToInt(Address value) {
109 int shift_bits = kSmiTagSize + kSmiShiftSize;
110 // Shift down and throw away top 32 bits.
111 return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
112 }
113 V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
114 // To be representable as a long smi, the value must be a 32-bit integer.
115 return (value == static_cast<int32_t>(value));
116 }
117};
118
119#ifdef V8_COMPRESS_POINTERS
120// See v8:7703 or src/common/ptr-compr-inl.h for details about pointer
121// compression.
122constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
123constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
124
125static_assert(
127 "Pointer compression can be enabled only for 64-bit architectures");
128const int kApiTaggedSize = kApiInt32Size;
129#else
131#endif
132
135}
136
137#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
138using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
139#else
141#endif
142
143// TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
144// since it's used much more often than the inividual constants.
145const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
146const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
147const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
148const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
149constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
150constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
151constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); }
152
153V8_INLINE static constexpr Address IntToSmi(int value) {
154 return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
155 kSmiTag;
156}
157
158/*
159 * Sandbox related types, constants, and functions.
160 */
161constexpr bool SandboxIsEnabled() {
162#ifdef V8_ENABLE_SANDBOX
163 return true;
164#else
165 return false;
166#endif
167}
168
169// SandboxedPointers are guaranteed to point into the sandbox. This is achieved
170// for example by storing them as offset rather than as raw pointers.
172
173#ifdef V8_ENABLE_SANDBOX
174
175// Size of the sandbox, excluding the guard regions surrounding it.
176#ifdef V8_TARGET_OS_ANDROID
177// On Android, most 64-bit devices seem to be configured with only 39 bits of
178// virtual address space for userspace. As such, limit the sandbox to 128GB (a
179// quarter of the total available address space).
180constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
181#else
182// Everywhere else use a 1TB sandbox.
183constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
184#endif // V8_TARGET_OS_ANDROID
185constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
186
187// Required alignment of the sandbox. For simplicity, we require the
188// size of the guard regions to be a multiple of this, so that this specifies
189// the alignment of the sandbox including and excluding surrounding guard
190// regions. The alignment requirement is due to the pointer compression cage
191// being located at the start of the sandbox.
192constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
193
194// Sandboxed pointers are stored inside the heap as offset from the sandbox
195// base shifted to the left. This way, it is guaranteed that the offset is
196// smaller than the sandbox size after shifting it to the right again. This
197// constant specifies the shift amount.
198constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
199
200// Size of the guard regions surrounding the sandbox. This assumes a worst-case
201// scenario of a 32-bit unsigned index used to access an array of 64-bit
202// values.
203constexpr size_t kSandboxGuardRegionSize = 32ULL * GB;
204
205static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
206 "The size of the guard regions around the sandbox must be a "
207 "multiple of its required alignment.");
208
209// On OSes where reserving virtual memory is too expensive to reserve the
210// entire address space backing the sandbox, notably Windows pre 8.1, we create
211// a partially reserved sandbox that doesn't actually reserve most of the
212// memory, and so doesn't have the desired security properties as unrelated
213// memory allocations could end up inside of it, but which still ensures that
214// objects that should be located inside the sandbox are allocated within
215// kSandboxSize bytes from the start of the sandbox. The minimum size of the
216// region that is actually reserved for such a sandbox is specified by this
217// constant and should be big enough to contain the pointer compression cage as
218// well as the ArrayBuffer partition.
219constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
220
221static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
222 "The minimum reservation size for a sandbox must be larger than "
223 "the pointer compression cage contained within it.");
224
225// The maximum buffer size allowed inside the sandbox. This is mostly dependent
226// on the size of the guard regions around the sandbox: an attacker must not be
227// able to construct a buffer that appears larger than the guard regions and
228// thereby "reach out of" the sandbox.
229constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1;
230static_assert(kMaxSafeBufferSizeForSandbox <= kSandboxGuardRegionSize,
231 "The maximum allowed buffer size must not be larger than the "
232 "sandbox's guard regions");
233
234constexpr size_t kBoundedSizeShift = 29;
235static_assert(1ULL << (64 - kBoundedSizeShift) ==
236 kMaxSafeBufferSizeForSandbox + 1,
237 "The maximum size of a BoundedSize must be synchronized with the "
238 "kMaxSafeBufferSizeForSandbox");
239
240#endif // V8_ENABLE_SANDBOX
241
242#ifdef V8_COMPRESS_POINTERS
243
244#ifdef V8_TARGET_OS_ANDROID
245// The size of the virtual memory reservation for an external pointer table.
246// This determines the maximum number of entries in a table. Using a maximum
247// size allows omitting bounds checks on table accesses if the indices are
248// guaranteed (e.g. through shifting) to be below the maximum index. This
249// value must be a power of two.
250static const size_t kExternalPointerTableReservationSize = 512 * MB;
251
252// The external pointer table indices stored in HeapObjects as external
253// pointers are shifted to the left by this amount to guarantee that they are
254// smaller than the maximum table size.
255static const uint32_t kExternalPointerIndexShift = 6;
256#else
257static const size_t kExternalPointerTableReservationSize = 1024 * MB;
258static const uint32_t kExternalPointerIndexShift = 5;
259#endif // V8_TARGET_OS_ANDROID
260
261// The maximum number of entries in an external pointer table.
262static const size_t kMaxExternalPointers =
263 kExternalPointerTableReservationSize / kApiSystemPointerSize;
264static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
265 "kExternalPointerTableReservationSize and "
266 "kExternalPointerIndexShift don't match");
267
268#else // !V8_COMPRESS_POINTERS
269
270// Needed for the V8.SandboxedExternalPointersCount histogram.
271static const size_t kMaxExternalPointers = 0;
272
273#endif // V8_COMPRESS_POINTERS
274
275// A ExternalPointerHandle represents a (opaque) reference to an external
276// pointer that can be stored inside the sandbox. A ExternalPointerHandle has
277// meaning only in combination with an (active) Isolate as it references an
278// external pointer stored in the currently active Isolate's
279// ExternalPointerTable. Internally, an ExternalPointerHandles is simply an
280// index into an ExternalPointerTable that is shifted to the left to guarantee
281// that it is smaller than the size of the table.
282using ExternalPointerHandle = uint32_t;
283
284// ExternalPointers point to objects located outside the sandbox. When
285// sandboxed external pointers are enabled, these are stored on heap as
286// ExternalPointerHandles, otherwise they are simply raw pointers.
287#ifdef V8_ENABLE_SANDBOX
289#else
291#endif
292
293// When the sandbox is enabled, external pointers are stored in an external
294// pointer table and are referenced from HeapObjects through an index (a
295// "handle"). When stored in the table, the pointers are tagged with per-type
296// tags to prevent type confusion attacks between different external objects.
297// Besides type information bits, these tags also contain the GC marking bit
298// which indicates whether the pointer table entry is currently alive. When a
299// pointer is written into the table, the tag is ORed into the top bits. When
300// that pointer is later loaded from the table, it is ANDed with the inverse of
301// the expected tag. If the expected and actual type differ, this will leave
302// some of the top bits of the pointer set, rendering the pointer inaccessible.
303// The AND operation also removes the GC marking bit from the pointer.
304//
305// The tags are constructed such that UNTAG(TAG(0, T1), T2) != 0 for any two
306// (distinct) tags T1 and T2. In practice, this is achieved by generating tags
307// that all have the same number of zeroes and ones but different bit patterns.
308// With N type tag bits, this allows for (N choose N/2) possible type tags.
309// Besides the type tag bits, the tags also have the GC marking bit set so that
310// the marking bit is automatically set when a pointer is written into the
311// external pointer table (in which case it is clearly alive) and is cleared
312// when the pointer is loaded. The exception to this is the free entry tag,
313// which doesn't have the mark bit set, as the entry is not alive. This
314// construction allows performing the type check and removing GC marking bits
315// from the pointer in one efficient operation (bitwise AND). The number of
316// available bits is limited in the following way: on x64, bits [47, 64) are
317// generally available for tagging (userspace has 47 address bits available).
318// On Arm64, userspace typically has a 40 or 48 bit address space. However, due
319// to top-byte ignore (TBI) and memory tagging (MTE), the top byte is unusable
320// for type checks as type-check failures would go unnoticed or collide with
321// MTE bits. Some bits of the top byte can, however, still be used for the GC
322// marking bit. The bits available for the type tags are therefore limited to
323// [48, 56), i.e. (8 choose 4) = 70 different types.
324// The following options exist to increase the number of possible types:
325// - Using multiple ExternalPointerTables since tags can safely be reused
326// across different tables
327// - Using "extended" type checks, where additional type information is stored
328// either in an adjacent pointer table entry or at the pointed-to location
329// - Using a different tagging scheme, for example based on XOR which would
330// allow for 2**8 different tags but require a separate operation to remove
331// the marking bit
332//
333// The external pointer sandboxing mechanism ensures that every access to an
334// external pointer field will result in a valid pointer of the expected type
335// even in the presence of an attacker able to corrupt memory inside the
336// sandbox. However, if any data related to the external object is stored
337// inside the sandbox it may still be corrupted and so must be validated before
338// use or moved into the external object. Further, an attacker will always be
339// able to substitute different external pointers of the same type for each
340// other. Therefore, code using external pointers must be written in a
341// "substitution-safe" way, i.e. it must always be possible to substitute
342// external pointers of the same type without causing memory corruption outside
343// of the sandbox. Generally this is achieved by referencing any group of
344// related external objects through a single external pointer.
345//
346// Currently we use bit 62 for the marking bit which should always be unused as
347// it's part of the non-canonical address range. When Arm's top-byte ignore
348// (TBI) is enabled, this bit will be part of the ignored byte, and we assume
349// that the Embedder is not using this byte (really only this one bit) for any
350// other purpose. This bit also does not collide with the memory tagging
351// extension (MTE) which would use bits [56, 60).
352//
353// External pointer tables are also available even when the sandbox is off but
354// pointer compression is on. In that case, the mechanism can be used to easy
355// alignment requirements as it turns unaligned 64-bit raw pointers into
356// aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
357// for this purpose, instead of using the ExternalPointer accessors one needs to
358// use ExternalPointerHandles directly and use them to access the pointers in an
359// ExternalPointerTable.
360constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
361constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
362constexpr uint64_t kExternalPointerTagShift = 48;
363
364// All possible 8-bit type tags.
365// These are sorted so that tags can be grouped together and it can efficiently
366// be checked if a tag belongs to a given group. See for example the
367// IsSharedExternalPointerType routine.
368constexpr uint64_t kAllExternalPointerTypeTags[] = {
369 0b00001111, 0b00010111, 0b00011011, 0b00011101, 0b00011110, 0b00100111,
370 0b00101011, 0b00101101, 0b00101110, 0b00110011, 0b00110101, 0b00110110,
371 0b00111001, 0b00111010, 0b00111100, 0b01000111, 0b01001011, 0b01001101,
372 0b01001110, 0b01010011, 0b01010101, 0b01010110, 0b01011001, 0b01011010,
373 0b01011100, 0b01100011, 0b01100101, 0b01100110, 0b01101001, 0b01101010,
374 0b01101100, 0b01110001, 0b01110010, 0b01110100, 0b01111000, 0b10000111,
375 0b10001011, 0b10001101, 0b10001110, 0b10010011, 0b10010101, 0b10010110,
376 0b10011001, 0b10011010, 0b10011100, 0b10100011, 0b10100101, 0b10100110,
377 0b10101001, 0b10101010, 0b10101100, 0b10110001, 0b10110010, 0b10110100,
378 0b10111000, 0b11000011, 0b11000101, 0b11000110, 0b11001001, 0b11001010,
379 0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
380 0b11100010, 0b11100100, 0b11101000, 0b11110000};
381
382#define TAG(i) \
383 ((kAllExternalPointerTypeTags[i] << kExternalPointerTagShift) | \
384 kExternalPointerMarkBit)
385
386// clang-format off
387
388// When adding new tags, please ensure that the code using these tags is
389// "substitution-safe", i.e. still operate safely if external pointers of the
390// same type are swapped by an attacker. See comment above for more details.
391
392// Shared external pointers are owned by the shared Isolate and stored in the
393// shared external pointer table associated with that Isolate, where they can
394// be accessed from multiple threads at the same time. The objects referenced
395// in this way must therefore always be thread-safe.
396#define SHARED_EXTERNAL_POINTER_TAGS(V) \
397 V(kFirstSharedTag, TAG(0)) \
398 V(kWaiterQueueNodeTag, TAG(0)) \
399 V(kExternalStringResourceTag, TAG(1)) \
400 V(kExternalStringResourceDataTag, TAG(2)) \
401 V(kLastSharedTag, TAG(2))
402
403// External pointers using these tags are kept in a per-Isolate external
404// pointer table and can only be accessed when this Isolate is active.
405#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
406 V(kForeignForeignAddressTag, TAG(10)) \
407 V(kNativeContextMicrotaskQueueTag, TAG(11)) \
408 V(kEmbedderDataSlotPayloadTag, TAG(12)) \
409/* This tag essentially stands for a `void*` pointer in the V8 API, and */ \
410/* it is the Embedder's responsibility to ensure type safety (against */ \
411/* substitution) and lifetime validity of these objects. */ \
412 V(kExternalObjectValueTag, TAG(13)) \
413 V(kCallHandlerInfoCallbackTag, TAG(14)) \
414 V(kAccessorInfoGetterTag, TAG(15)) \
415 V(kAccessorInfoSetterTag, TAG(16)) \
416 V(kWasmInternalFunctionCallTargetTag, TAG(17)) \
417 V(kWasmTypeInfoNativeTypeTag, TAG(18)) \
418 V(kWasmExportedFunctionDataSignatureTag, TAG(19)) \
419 V(kWasmContinuationJmpbufTag, TAG(20)) \
420 V(kArrayBufferExtensionTag, TAG(21))
421
422// All external pointer tags.
423#define ALL_EXTERNAL_POINTER_TAGS(V) \
424 SHARED_EXTERNAL_POINTER_TAGS(V) \
425 PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
426
427#define EXTERNAL_POINTER_TAG_ENUM(Name, Tag) Name = Tag,
428#define MAKE_TAG(HasMarkBit, TypeTag) \
429 ((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
430 (HasMarkBit ? kExternalPointerMarkBit : 0))
431enum ExternalPointerTag : uint64_t {
432 // Empty tag value. Mostly used as placeholder.
434 // External pointer tag that will match any external pointer. Use with care!
436 // The free entry tag has all type bits set so every type check with a
437 // different type fails. It also doesn't have the mark bit set as free
438 // entries are (by definition) not alive.
440 // Evacuation entries are used during external pointer table compaction.
442
444};
445
446#undef MAKE_TAG
447#undef TAG
448#undef EXTERNAL_POINTER_TAG_ENUM
449
450// clang-format on
451
452// True if the external pointer must be accessed from the shared isolate's
453// external pointer table.
454V8_INLINE static constexpr bool IsSharedExternalPointerType(
455 ExternalPointerTag tag) {
456 return tag >= kFirstSharedTag && tag <= kLastSharedTag;
457}
458
459// Sanity checks.
460#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
461 static_assert(IsSharedExternalPointerType(Tag));
462#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
463 static_assert(!IsSharedExternalPointerType(Tag));
464
467
468#undef CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS
469#undef CHECK_SHARED_EXTERNAL_POINTER_TAGS
470
471#undef SHARED_EXTERNAL_POINTER_TAGS
472#undef EXTERNAL_POINTER_TAGS
473
474// {obj} must be the raw tagged pointer representation of a HeapObject
475// that's guaranteed to never be in ReadOnlySpace.
477
478// Returns if we need to throw when an error occurs. This infers the language
479// mode based on the current context and the closure. This returns true if the
480// language mode is strict.
481V8_EXPORT bool ShouldThrowOnError(internal::Isolate* isolate);
488#ifdef V8_MAP_PACKING
489 V8_INLINE static constexpr Address UnpackMapWord(Address mapword) {
490 // TODO(wenyuzhao): Clear header metadata.
491 return mapword ^ kMapWordXorMask;
492 }
493#endif
494
495 public:
496 // These values match non-compiler-dependent values defined within
497 // the implementation of v8.
498 static const int kHeapObjectMapOffset = 0;
500 static const int kStringResourceOffset =
502
504 static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
505 static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
508#ifdef V8_ENABLE_SANDBOX
510#else
512#endif
515 static const int kStringEncodingMask = 0x8;
516 static const int kExternalTwoByteRepresentationTag = 0x02;
517 static const int kExternalOneByteRepresentationTag = 0x0a;
518
519 static const uint32_t kNumIsolateDataSlots = 4;
525
526 // ExternalPointerTable layout guarantees.
529
530 // IsolateData layout guarantees.
531 static const int kIsolateCageBaseOffset = 0;
532 static const int kIsolateStackGuardOffset =
534 static const int kVariousBooleanFlagsOffset =
538 static const int kBuiltinTier0TableOffset =
540 static const int kNewAllocationInfoOffset =
542 static const int kOldAllocationInfoOffset =
554 static const int kIsolateEmbedderDataOffset =
556#ifdef V8_COMPRESS_POINTERS
557 static const int kIsolateExternalPointerTableOffset =
559 static const int kIsolateSharedExternalPointerTableAddressOffset =
560 kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
561 static const int kIsolateRootsOffset =
562 kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
563#else
564 static const int kIsolateRootsOffset =
566#endif
567
568#if V8_STATIC_ROOTS_BOOL
569
570// These constants need to be initialized in api.cc.
571#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
572 V(UndefinedValue) \
573 V(NullValue) \
574 V(TrueValue) \
575 V(FalseValue) \
576 V(EmptyString) \
577 V(TheHoleValue)
578
579 using Tagged_t = uint32_t;
580 struct StaticReadOnlyRoot {
581#define DEF_ROOT(name) V8_EXPORT static const Tagged_t k##name;
582 EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
583#undef DEF_ROOT
584
585 V8_EXPORT static const Tagged_t kFirstStringMap;
586 V8_EXPORT static const Tagged_t kLastStringMap;
587 };
588
589#endif // V8_STATIC_ROOTS_BOOL
590
591 static const int kUndefinedValueRootIndex = 4;
592 static const int kTheHoleValueRootIndex = 5;
593 static const int kNullValueRootIndex = 6;
594 static const int kTrueValueRootIndex = 7;
595 static const int kFalseValueRootIndex = 8;
596 static const int kEmptyStringRootIndex = 9;
597
599 static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
600 static const int kNodeStateMask = 0x3;
601 static const int kNodeStateIsWeakValue = 2;
602
604
605 static const int kFirstNonstringType = 0x80;
606 static const int kOddballType = 0x83;
607 static const int kForeignType = 0xcc;
608 static const int kJSSpecialApiObjectType = 0x410;
609 static const int kJSObjectType = 0x421;
610 static const int kFirstJSApiObjectType = 0x422;
611 static const int kLastJSApiObjectType = 0x80A;
612
613 static const int kUndefinedOddballKind = 5;
614 static const int kNullOddballKind = 3;
615
616 // Constants used by PropertyCallbackInfo to check if we should throw when an
617 // error occurs.
618 static const int kThrowOnError = 0;
619 static const int kDontThrow = 1;
620 static const int kInferShouldThrowMode = 2;
621
622 // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
623 // incremental GC once the external memory reaches this limit.
624 static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
625
626#ifdef V8_MAP_PACKING
627 static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
628 // The lowest two bits of mapwords are always `0b10`
629 static const uintptr_t kMapWordSignature = 0b10;
630 // XORing a (non-compressed) map with this mask ensures that the two
631 // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
632 // although real Smis have all lower 32 bits unset. We only rely on these
633 // values passing as Smis in very few places.
634 static const int kMapWordXorMask = 0b11;
635#endif
636
638 V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
639#ifdef V8_ENABLE_CHECKS
640 CheckInitializedImpl(isolate);
641#endif
642 }
643
644 V8_INLINE static bool HasHeapObjectTag(Address value) {
645 return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
646 }
647
648 V8_INLINE static int SmiValue(Address value) {
649 return PlatformSmiTagging::SmiToInt(value);
650 }
651
652 V8_INLINE static constexpr Address IntToSmi(int value) {
653 return internal::IntToSmi(value);
654 }
655
656 V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
657 return PlatformSmiTagging::IsValidSmi(value);
658 }
659
660#if V8_STATIC_ROOTS_BOOL
661 V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
662 return static_cast<Tagged_t>(obj) == constant;
663 }
664
665 V8_INLINE static bool CheckInstanceMapRange(Address obj, Tagged_t first_map,
666 Tagged_t last_map) {
667 auto map = ReadRawField<Tagged_t>(obj, kHeapObjectMapOffset);
668#ifdef V8_MAP_PACKING
669 map = UnpackMapWord(map);
670#endif
671 return map >= first_map && map <= last_map;
672 }
673#endif
674
677#ifdef V8_MAP_PACKING
678 map = UnpackMapWord(map);
679#endif
680 return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
681 }
682
685 }
686
687 V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
688 int representation = (instance_type & kStringRepresentationAndEncodingMask);
689 return representation == kExternalTwoByteRepresentationTag;
690 }
691
692 V8_INLINE static constexpr bool CanHaveInternalField(int instance_type) {
693 static_assert(kJSObjectType + 1 == kFirstJSApiObjectType);
694 static_assert(kJSObjectType < kLastJSApiObjectType);
696 // Check for IsJSObject() || IsJSSpecialApiObject() || IsJSApiObject()
697 return instance_type == kJSSpecialApiObjectType ||
698 // inlined version of base::IsInRange
699 (static_cast<unsigned>(static_cast<unsigned>(instance_type) -
700 static_cast<unsigned>(kJSObjectType)) <=
701 static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
702 }
703
704 V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) {
705 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
706 return *addr & static_cast<uint8_t>(1U << shift);
707 }
708
709 V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) {
710 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
711 uint8_t mask = static_cast<uint8_t>(1U << shift);
712 *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
713 }
714
715 V8_INLINE static uint8_t GetNodeState(Address* obj) {
716 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
717 return *addr & kNodeStateMask;
718 }
719
720 V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) {
721 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
722 *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
723 }
724
725 V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
726 void* data) {
727 Address addr = reinterpret_cast<Address>(isolate) +
729 *reinterpret_cast<void**>(addr) = data;
730 }
731
732 V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
733 uint32_t slot) {
734 Address addr = reinterpret_cast<Address>(isolate) +
736 return *reinterpret_cast<void* const*>(addr);
737 }
738
740 Address addr =
741 reinterpret_cast<Address>(isolate) + kIsolateLongTaskStatsCounterOffset;
742 ++(*reinterpret_cast<size_t*>(addr));
743 }
744
745 V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) {
746 Address addr = reinterpret_cast<Address>(isolate) + kIsolateRootsOffset +
747 index * kApiSystemPointerSize;
748 return reinterpret_cast<Address*>(addr);
749 }
750
751 V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) {
752#if V8_STATIC_ROOTS_BOOL
753 Address base = *reinterpret_cast<Address*>(
754 reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
755 switch (index) {
756#define DECOMPRESS_ROOT(name) \
757 case k##name##RootIndex: \
758 return base + StaticReadOnlyRoot::k##name;
759 EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
760#undef DECOMPRESS_ROOT
761 default:
762 break;
763 }
764#undef EXPORTED_STATIC_ROOTS_PTR_LIST
765#endif // V8_STATIC_ROOTS_BOOL
766 return *GetRootSlot(isolate, index);
767 }
768
769#ifdef V8_ENABLE_SANDBOX
770 V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) {
771 Address addr = reinterpret_cast<Address>(isolate) +
772 kIsolateExternalPointerTableOffset +
774 return *reinterpret_cast<Address**>(addr);
775 }
776
777 V8_INLINE static Address* GetSharedExternalPointerTableBase(
778 v8::Isolate* isolate) {
779 Address addr = reinterpret_cast<Address>(isolate) +
780 kIsolateSharedExternalPointerTableAddressOffset;
781 addr = *reinterpret_cast<Address*>(addr);
783 return *reinterpret_cast<Address**>(addr);
784 }
785#endif
786
787 template <typename T>
788 V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) {
789 Address addr = heap_object_ptr + offset - kHeapObjectTag;
790#ifdef V8_COMPRESS_POINTERS
791 if (sizeof(T) > kApiTaggedSize) {
792 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
793 // fields (external pointers, doubles and BigInt data) are only
794 // kTaggedSize aligned so we have to use unaligned pointer friendly way of
795 // accessing them in order to avoid undefined behavior in C++ code.
796 T r;
797 memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
798 return r;
799 }
800#endif
801 return *reinterpret_cast<const T*>(addr);
802 }
803
805 int offset) {
806#ifdef V8_COMPRESS_POINTERS
807 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
808 Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
809 return base + static_cast<Address>(static_cast<uintptr_t>(value));
810#else
811 return ReadRawField<Address>(heap_object_ptr, offset);
812#endif
813 }
814
816 int offset) {
817#ifdef V8_COMPRESS_POINTERS
818 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
819 return static_cast<Address>(static_cast<uintptr_t>(value));
820#else
821 return ReadRawField<Address>(heap_object_ptr, offset);
822#endif
823 }
824
826#ifdef V8_ENABLE_SANDBOX
827 return reinterpret_cast<v8::Isolate*>(
829#else
830 // Not used in non-sandbox mode.
831 return nullptr;
832#endif
833 }
834
835 template <ExternalPointerTag tag>
837 Address heap_object_ptr,
838 int offset) {
839#ifdef V8_ENABLE_SANDBOX
840 static_assert(tag != kExternalPointerNullTag);
841 // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
842 // it can be inlined and doesn't require an additional call.
843 Address* table = IsSharedExternalPointerType(tag)
844 ? GetSharedExternalPointerTableBase(isolate)
845 : GetExternalPointerTableBase(isolate);
847 ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
848 uint32_t index = handle >> kExternalPointerIndexShift;
849 std::atomic<Address>* ptr =
850 reinterpret_cast<std::atomic<Address>*>(&table[index]);
851 Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
852 return entry & ~tag;
853#else
854 return ReadRawField<Address>(heap_object_ptr, offset);
855#endif // V8_ENABLE_SANDBOX
856 }
857
858#ifdef V8_COMPRESS_POINTERS
859 V8_INLINE static Address GetPtrComprCageBaseFromOnHeapAddress(Address addr) {
860 return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
861 }
862
863 V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
864 uint32_t value) {
865 Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
866 return base + static_cast<Address>(static_cast<uintptr_t>(value));
867 }
868
869#endif // V8_COMPRESS_POINTERS
870};
871
872// Only perform cast check for types derived from v8::Data since
873// other types do not implement the Cast method.
874template <bool PerformCheck>
875struct CastCheck {
876 template <class T>
877 static void Perform(T* data);
878};
879
880template <>
881template <class T>
883 T::Cast(data);
884}
885
886template <>
887template <class T>
889
890template <class T>
893 !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
894}
895
896// A base class for backing stores, which is needed due to vagaries of
897// how static casts work with std::shared_ptr.
899
900// The maximum value in enum GarbageCollectionReason, defined in heap.h.
901// This is needed for histograms sampling garbage collection reasons.
903
904// Helper functions about values contained in handles.
905class ValueHelper final {
906 public:
907#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
908 static constexpr Address kLocalTaggedNullAddress = 1;
909
910 template <typename T>
911 static constexpr T* EmptyValue() {
912 return reinterpret_cast<T*>(kLocalTaggedNullAddress);
913 }
914
915 template <typename T>
916 V8_INLINE static Address ValueAsAddress(const T* value) {
917 return reinterpret_cast<Address>(value);
918 }
919
920 template <typename T, typename S>
921 V8_INLINE static T* SlotAsValue(S* slot) {
922 return *reinterpret_cast<T**>(slot);
923 }
924
925 template <typename T>
926 V8_INLINE static T* ValueAsSlot(T* const& value) {
927 return reinterpret_cast<T*>(const_cast<T**>(&value));
928 }
929
930#else // !V8_ENABLE_CONSERVATIVE_STACK_SCANNING
931
932 template <typename T>
933 static constexpr T* EmptyValue() {
934 return nullptr;
935 }
936
937 template <typename T>
938 V8_INLINE static Address ValueAsAddress(const T* value) {
939 return *reinterpret_cast<const Address*>(value);
940 }
941
942 template <typename T, typename S>
943 V8_INLINE static T* SlotAsValue(S* slot) {
944 return reinterpret_cast<T*>(slot);
945 }
946
947 template <typename T>
948 V8_INLINE static T* ValueAsSlot(T* const& value) {
949 return value;
950 }
951
952#endif // V8_ENABLE_CONSERVATIVE_STACK_SCANNING
953};
954
955} // namespace internal
956} // namespace v8
957
958#endif // INCLUDE_V8_INTERNAL_H_
static const int kIsolateCageBaseOffset
static const int kTrueValueRootIndex
static const int kInferShouldThrowMode
static V8_INLINE void UpdateNodeFlag(Address *obj, bool value, int shift)
static const int kJSSpecialApiObjectType
static V8_INLINE void IncrementLongTasksStatsCounter(v8::Isolate *isolate)
static const int kExternalPointerTableSize
static const int kOddballKindOffset
static const int kOldAllocationInfoOffset
static const int kFalseValueRootIndex
static constexpr int kExternalAllocationSoftLimit
static const int kIsolateStackGuardOffset
static const int kNullValueRootIndex
static const int kIsolateFastCCallCallerPcOffset
static const int kDontThrow
static const int kEmptyStringRootIndex
static V8_INLINE uint8_t GetNodeState(Address *obj)
static const int kNativeContextEmbedderDataOffset
static V8_INLINE uint8_t GetNodeFlag(Address *obj, int shift)
static const int kStringRepresentationAndEncodingMask
static const int kIsolateThreadLocalTopOffset
static const int kIsolateLongTaskStatsCounterOffset
static const int kEmbedderDataArrayHeaderSize
static V8_INLINE bool HasHeapObjectTag(Address value)
static const int kExternalTwoByteRepresentationTag
static const int kNodeStateMask
static const int kUndefinedValueRootIndex
static V8_INLINE int SmiValue(Address value)
static V8_INLINE constexpr bool IsValidSmi(intptr_t value)
static const int kThrowOnError
static const int kEmbedderDataSlotExternalPointerOffset
static const int kBuiltinTier0TableSize
static V8_INLINE Address ReadTaggedSignedField(Address heap_object_ptr, int offset)
static V8_INLINE T ReadRawField(Address heap_object_ptr, int offset)
static V8_INLINE void CheckInitialized(v8::Isolate *isolate)
static const uint32_t kNumIsolateDataSlots
static V8_INLINE Address * GetRootSlot(v8::Isolate *isolate, int index)
static const int kBuiltinTier0EntryTableSize
static const int kStackGuardSize
static const int kIsolateFastApiCallTargetOffset
static V8_INLINE constexpr bool CanHaveInternalField(int instance_type)
static V8_INLINE v8::Isolate * GetIsolateForSandbox(Address obj)
static const int kNodeFlagsOffset
static const int kLastJSApiObjectType
static const int kExternalOneByteRepresentationTag
static const int kNodeStateIsWeakValue
static V8_EXPORT void CheckInitializedImpl(v8::Isolate *isolate)
static const int kBuiltinTier0TableOffset
static const int kForeignType
static const int kNodeClassIdOffset
static V8_INLINE int GetInstanceType(Address obj)
static const int kIsolateRootsOffset
static const int kFirstJSApiObjectType
static const int kOddballType
static const int kMapInstanceTypeOffset
static const int kLinearAllocationAreaSize
static const int kIsolateEmbedderDataOffset
static V8_INLINE int GetOddballKind(Address obj)
static V8_INLINE void * GetEmbedderData(const v8::Isolate *isolate, uint32_t slot)
static const int kNewAllocationInfoOffset
static const int kUndefinedOddballKind
static const int kEmbedderDataSlotSize
static const int kHeapObjectMapOffset
static V8_INLINE Address ReadTaggedPointerField(Address heap_object_ptr, int offset)
static const int kTracedNodeClassIdOffset
static const int kNullOddballKind
static V8_INLINE bool IsExternalTwoByteString(int instance_type)
static V8_INLINE constexpr Address IntToSmi(int value)
static const int kFirstNonstringType
static V8_INLINE void UpdateNodeState(Address *obj, uint8_t value)
static const int kBuiltinTier0EntryTableOffset
static const int kStringEncodingMask
static const int kExternalPointerTableBufferOffset
static V8_INLINE Address GetRoot(v8::Isolate *isolate, int index)
static const int kVariousBooleanFlagsOffset
static const int kThreadLocalTopSize
static const int kJSObjectHeaderSize
static V8_INLINE void SetEmbedderData(v8::Isolate *isolate, uint32_t slot, void *data)
static const int kFixedArrayHeaderSize
static const int kJSObjectType
static const int kTheHoleValueRootIndex
static V8_INLINE Address ReadExternalPointerField(v8::Isolate *isolate, Address heap_object_ptr, int offset)
static const int kStringResourceOffset
static const int kIsolateFastCCallCallerFpOffset
static V8_INLINE Address ValueAsAddress(const T *value)
static V8_INLINE T * ValueAsSlot(T *const &value)
static V8_INLINE T * SlotAsValue(S *slot)
static constexpr T * EmptyValue()
constexpr bool PointerCompressionIsEnabled()
Address SandboxedPointer_t
const int kApiSystemPointerSize
Definition v8-internal.h:42
constexpr uint64_t kAllExternalPointerTypeTags[]
const int kApiInt32Size
Definition v8-internal.h:44
const int kApiTaggedSize
Address ExternalPointer_t
const int kHeapObjectTag
Definition v8-internal.h:49
const int kSmiTagSize
Definition v8-internal.h:64
constexpr int MB
Definition v8-internal.h:33
constexpr uint64_t kExternalPointerTagShift
const intptr_t kHeapObjectTagMask
Definition v8-internal.h:52
const int kSmiTag
Definition v8-internal.h:63
const int kApiSizetSize
Definition v8-internal.h:46
const int kHeapObjectTagSize
Definition v8-internal.h:51
const int kForwardingTag
Definition v8-internal.h:58
const int kSmiValueSize
const intptr_t kForwardingTagMask
Definition v8-internal.h:60
const int kSmiMaxValue
constexpr bool Is64()
SmiTagging< kApiTaggedSize > PlatformSmiTagging
V8_EXPORT internal::Isolate * IsolateFromNeverReadOnlySpaceObject(Address obj)
constexpr bool SmiValuesAre32Bits()
uintptr_t Address
Definition v8-internal.h:29
constexpr bool SmiValuesAre31Bits()
const int kApiDoubleSize
Definition v8-internal.h:43
constexpr uintptr_t kUintptrAllBitsSet
Definition v8-internal.h:71
@ kExternalPointerNullTag
@ kExternalPointerFreeEntryTag
@ kExternalPointerEvacuationEntryTag
uint32_t ExternalPointerHandle
constexpr uint64_t kExternalPointerTagMask
const int kApiInt64Size
Definition v8-internal.h:45
const int kForwardingTagSize
Definition v8-internal.h:59
V8_INLINE void PerformCastCheck(T *data)
constexpr bool SandboxIsEnabled()
const int kWeakHeapObjectTag
Definition v8-internal.h:50
V8_EXPORT bool ShouldThrowOnError(internal::Isolate *isolate)
constexpr int GB
Definition v8-internal.h:34
const int kSmiMinValue
constexpr intptr_t kIntptrAllBitsSet
Definition v8-internal.h:70
const intptr_t kHeapObjectReferenceTagMask
Definition v8-internal.h:53
constexpr int KB
Definition v8-internal.h:32
const intptr_t kSmiTagMask
Definition v8-internal.h:65
constexpr uint64_t kExternalPointerMarkBit
const int kSmiShiftSize
constexpr int kGarbageCollectionReasonMaxValue
static void Perform(T *data)
static V8_INLINE constexpr bool IsValidSmi(intptr_t value)
Definition v8-internal.h:88
static V8_INLINE int SmiToInt(Address value)
Definition v8-internal.h:83
static V8_INLINE constexpr bool IsValidSmi(intptr_t value)
static V8_INLINE int SmiToInt(Address value)
#define EXTERNAL_POINTER_TAG_ENUM(Name, Tag)
#define ALL_EXTERNAL_POINTER_TAGS(V)
#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag,...)
#define SHARED_EXTERNAL_POINTER_TAGS(V)
#define MAKE_TAG(HasMarkBit, TypeTag)
#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag,...)
#define V8_EXPORT
Definition v8config.h:719
#define V8_INLINE
Definition v8config.h:460