v8  8.4.371 (node 14.15.5)
V8 is Google's open source JavaScript engine
v8-internal.h
Go to the documentation of this file.
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDE_V8_INTERNAL_H_
6 #define INCLUDE_V8_INTERNAL_H_
7 
8 #include <stddef.h>
9 #include <stdint.h>
10 #include <string.h>
11 #include <type_traits>
12 
13 #include "v8-version.h" // NOLINT(build/include_directory)
14 #include "v8config.h" // NOLINT(build/include_directory)
15 
16 namespace v8 {
17 
18 class Context;
19 class Data;
20 class Isolate;
21 
22 namespace internal {
23 
24 class Isolate;
25 
26 typedef uintptr_t Address;
27 static const Address kNullAddress = 0;
28 
29 /**
30  * Configuration of tagging scheme.
31  */
32 const int kApiSystemPointerSize = sizeof(void*);
33 const int kApiDoubleSize = sizeof(double);
34 const int kApiInt32Size = sizeof(int32_t);
35 const int kApiInt64Size = sizeof(int64_t);
36 
37 // Tag information for HeapObject.
38 const int kHeapObjectTag = 1;
39 const int kWeakHeapObjectTag = 3;
40 const int kHeapObjectTagSize = 2;
41 const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
42 
43 // Tag information for Smi.
44 const int kSmiTag = 0;
45 const int kSmiTagSize = 1;
46 const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
47 
48 template <size_t tagged_ptr_size>
49 struct SmiTagging;
50 
51 constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
52 constexpr uintptr_t kUintptrAllBitsSet =
53  static_cast<uintptr_t>(kIntptrAllBitsSet);
54 
55 // Smi constants for systems where tagged pointer is a 32-bit value.
56 template <>
57 struct SmiTagging<4> {
58  enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
59 
60  static constexpr intptr_t kSmiMinValue =
61  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
62  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
63 
64  V8_INLINE static int SmiToInt(const internal::Address value) {
65  int shift_bits = kSmiTagSize + kSmiShiftSize;
66  // Truncate and shift down (requires >> to be sign extending).
67  return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
68  }
69  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
70  // Is value in range [kSmiMinValue, kSmiMaxValue].
71  // Use unsigned operations in order to avoid undefined behaviour in case of
72  // signed integer overflow.
73  return (static_cast<uintptr_t>(value) -
74  static_cast<uintptr_t>(kSmiMinValue)) <=
75  (static_cast<uintptr_t>(kSmiMaxValue) -
76  static_cast<uintptr_t>(kSmiMinValue));
77  }
78 };
79 
80 // Smi constants for systems where tagged pointer is a 64-bit value.
81 template <>
82 struct SmiTagging<8> {
83  enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
84 
85  static constexpr intptr_t kSmiMinValue =
86  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
87  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
88 
89  V8_INLINE static int SmiToInt(const internal::Address value) {
90  int shift_bits = kSmiTagSize + kSmiShiftSize;
91  // Shift down and throw away top 32 bits.
92  return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
93  }
94  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
95  // To be representable as a long smi, the value must be a 32-bit integer.
96  return (value == static_cast<int32_t>(value));
97  }
98 };
99 
100 #ifdef V8_COMPRESS_POINTERS
101 static_assert(
103  "Pointer compression can be enabled only for 64-bit architectures");
104 const int kApiTaggedSize = kApiInt32Size;
105 #else
107 #endif
108 
109 constexpr bool PointerCompressionIsEnabled() {
111 }
112 
113 constexpr bool HeapSandboxIsEnabled() {
114 #ifdef V8_HEAP_SANDBOX
115  return true;
116 #else
117  return false;
118 #endif
119 }
120 
121 using ExternalPointer_t = Address;
122 
123 #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
125 #else
126 using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
127 #endif
128 
129 // TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
130 // since it's used much more often than the inividual constants.
131 const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
132 const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
133 const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
134 const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
135 constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
136 constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
137 
138 V8_INLINE static constexpr internal::Address IntToSmi(int value) {
139  return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
140  kSmiTag;
141 }
142 
143 // {obj} must be the raw tagged pointer representation of a HeapObject
144 // that's guaranteed to never be in ReadOnlySpace.
146 
147 // Returns if we need to throw when an error occurs. This infers the language
148 // mode based on the current context and the closure. This returns true if the
149 // language mode is strict.
150 V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
151 
152 /**
153  * This class exports constants and functionality from within v8 that
154  * is necessary to implement inline functions in the v8 api. Don't
155  * depend on functions and constants defined here.
156  */
157 class Internals {
158  public:
159  // These values match non-compiler-dependent values defined within
160  // the implementation of v8.
161  static const int kHeapObjectMapOffset = 0;
163  static const int kStringResourceOffset =
164  1 * kApiTaggedSize + 2 * kApiInt32Size;
165 
167  static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
168  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
172  static const int kFullStringRepresentationMask = 0x0f;
173  static const int kStringEncodingMask = 0x8;
174  static const int kExternalTwoByteRepresentationTag = 0x02;
175  static const int kExternalOneByteRepresentationTag = 0x0a;
176 
177  static const uint32_t kNumIsolateDataSlots = 4;
178 
179  // IsolateData layout guarantees.
180  static const int kIsolateEmbedderDataOffset = 0;
181  static const int kExternalMemoryOffset =
183  static const int kExternalMemoryLimitOffset =
191  static const int kIsolateStackGuardOffset =
193  static const int kIsolateRootsOffset =
195 
196  static const int kUndefinedValueRootIndex = 4;
197  static const int kTheHoleValueRootIndex = 5;
198  static const int kNullValueRootIndex = 6;
199  static const int kTrueValueRootIndex = 7;
200  static const int kFalseValueRootIndex = 8;
201  static const int kEmptyStringRootIndex = 9;
202 
204  static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
205  static const int kNodeStateMask = 0x7;
206  static const int kNodeStateIsWeakValue = 2;
207  static const int kNodeStateIsPendingValue = 3;
208 
209  static const int kFirstNonstringType = 0x40;
210  static const int kOddballType = 0x43;
211  static const int kForeignType = 0x46;
212  static const int kJSSpecialApiObjectType = 0x410;
213  static const int kJSApiObjectType = 0x420;
214  static const int kJSObjectType = 0x421;
215 
216  static const int kUndefinedOddballKind = 5;
217  static const int kNullOddballKind = 3;
218 
219  // Constants used by PropertyCallbackInfo to check if we should throw when an
220  // error occurs.
221  static const int kThrowOnError = 0;
222  static const int kDontThrow = 1;
223  static const int kInferShouldThrowMode = 2;
224 
225  // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
226  // incremental GC once the external memory reaches this limit.
227  static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
228 
229  V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
230  V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
231 #ifdef V8_ENABLE_CHECKS
232  CheckInitializedImpl(isolate);
233 #endif
234  }
235 
236  V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
237  return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
238  }
239 
240  V8_INLINE static int SmiValue(const internal::Address value) {
241  return PlatformSmiTagging::SmiToInt(value);
242  }
243 
244  V8_INLINE static constexpr internal::Address IntToSmi(int value) {
245  return internal::IntToSmi(value);
246  }
247 
248  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
249  return PlatformSmiTagging::IsValidSmi(value);
250  }
251 
252  V8_INLINE static int GetInstanceType(const internal::Address obj) {
253  typedef internal::Address A;
255  return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
256  }
257 
258  V8_INLINE static int GetOddballKind(const internal::Address obj) {
260  }
261 
262  V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
263  int representation = (instance_type & kFullStringRepresentationMask);
264  return representation == kExternalTwoByteRepresentationTag;
265  }
266 
267  V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
268  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
269  return *addr & static_cast<uint8_t>(1U << shift);
270  }
271 
272  V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
273  int shift) {
274  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
275  uint8_t mask = static_cast<uint8_t>(1U << shift);
276  *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
277  }
278 
279  V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
280  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
281  return *addr & kNodeStateMask;
282  }
283 
284  V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
285  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
286  *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
287  }
288 
289  V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
290  void* data) {
291  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
293  slot * kApiSystemPointerSize;
294  *reinterpret_cast<void**>(addr) = data;
295  }
296 
297  V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
298  uint32_t slot) {
299  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
301  slot * kApiSystemPointerSize;
302  return *reinterpret_cast<void* const*>(addr);
303  }
304 
305  V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
306  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
308  index * kApiSystemPointerSize;
309  return reinterpret_cast<internal::Address*>(addr);
310  }
311 
312  template <typename T>
313  V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
314  int offset) {
315  internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
316 #ifdef V8_COMPRESS_POINTERS
317  if (sizeof(T) > kApiTaggedSize) {
318  // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
319  // fields (external pointers, doubles and BigInt data) are only
320  // kTaggedSize aligned so we have to use unaligned pointer friendly way of
321  // accessing them in order to avoid undefined behavior in C++ code.
322  T r;
323  memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
324  return r;
325  }
326 #endif
327  return *reinterpret_cast<const T*>(addr);
328  }
329 
331  internal::Address heap_object_ptr, int offset) {
332 #ifdef V8_COMPRESS_POINTERS
333  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
334  internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
335  return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
336 #else
337  return ReadRawField<internal::Address>(heap_object_ptr, offset);
338 #endif
339  }
340 
342  internal::Address heap_object_ptr, int offset) {
343 #ifdef V8_COMPRESS_POINTERS
344  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
345  return static_cast<internal::Address>(static_cast<uintptr_t>(value));
346 #else
347  return ReadRawField<internal::Address>(heap_object_ptr, offset);
348 #endif
349  }
350 
352  internal::Address obj) {
353 #ifdef V8_HEAP_SANDBOX
354  return internal::IsolateFromNeverReadOnlySpaceObject(obj);
355 #else
356  // Not used in non-sandbox mode.
357  return nullptr;
358 #endif
359  }
360 
362  internal::Isolate* isolate, internal::Address heap_object_ptr,
363  int offset) {
364  internal::Address value = ReadRawField<Address>(heap_object_ptr, offset);
365 #ifdef V8_HEAP_SANDBOX
366  // We currently have to treat zero as nullptr in embedder slots.
367  if (value) value = DecodeExternalPointer(isolate, value);
368 #endif
369  return value;
370  }
371 
372 #ifdef V8_COMPRESS_POINTERS
373  // See v8:7703 or src/ptr-compr.* for details about pointer compression.
374  static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
375  static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
376 
377  // See v8:10391 for details about V8 heap sandbox.
378  static constexpr uint32_t kExternalPointerSalt =
379  0x7fffffff & ~static_cast<uint32_t>(kHeapObjectTagMask);
380 
382  internal::Address addr) {
383  return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
384  }
385 
389  return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
390  }
391 
394 #ifndef V8_HEAP_SANDBOX
395  return encoded_pointer;
396 #else
398 #endif
399  }
400 #endif // V8_COMPRESS_POINTERS
401 };
402 
403 // Only perform cast check for types derived from v8::Data since
404 // other types do not implement the Cast method.
405 template <bool PerformCheck>
406 struct CastCheck {
407  template <class T>
408  static void Perform(T* data);
409 };
410 
411 template <>
412 template <class T>
413 void CastCheck<true>::Perform(T* data) {
414  T::Cast(data);
415 }
416 
417 template <>
418 template <class T>
419 void CastCheck<false>::Perform(T* data) {}
420 
421 template <class T>
423  CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
424 }
425 
426 // A base class for backing stores, which is needed due to vagaries of
427 // how static casts work with std::shared_ptr.
429 
430 } // namespace internal
431 } // namespace v8
432 
433 #endif // INCLUDE_V8_INTERNAL_H_