v8  8.6.395 (node 15.0.1)
V8 is Google's open source JavaScript engine
v8-internal.h
Go to the documentation of this file.
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDE_V8_INTERNAL_H_
6 #define INCLUDE_V8_INTERNAL_H_
7 
8 #include <stddef.h>
9 #include <stdint.h>
10 #include <string.h>
11 #include <type_traits>
12 
13 #include "v8-version.h" // NOLINT(build/include_directory)
14 #include "v8config.h" // NOLINT(build/include_directory)
15 
16 namespace v8 {
17 
18 class Context;
19 class Data;
20 class Isolate;
21 
22 namespace internal {
23 
24 class Isolate;
25 
26 typedef uintptr_t Address;
27 static const Address kNullAddress = 0;
28 
29 /**
30  * Configuration of tagging scheme.
31  */
32 const int kApiSystemPointerSize = sizeof(void*);
33 const int kApiDoubleSize = sizeof(double);
34 const int kApiInt32Size = sizeof(int32_t);
35 const int kApiInt64Size = sizeof(int64_t);
36 
37 // Tag information for HeapObject.
38 const int kHeapObjectTag = 1;
39 const int kWeakHeapObjectTag = 3;
40 const int kHeapObjectTagSize = 2;
41 const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
42 
43 // Tag information for Smi.
44 const int kSmiTag = 0;
45 const int kSmiTagSize = 1;
46 const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
47 
48 template <size_t tagged_ptr_size>
49 struct SmiTagging;
50 
51 constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
52 constexpr uintptr_t kUintptrAllBitsSet =
53  static_cast<uintptr_t>(kIntptrAllBitsSet);
54 
55 // Smi constants for systems where tagged pointer is a 32-bit value.
56 template <>
57 struct SmiTagging<4> {
58  enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
59 
60  static constexpr intptr_t kSmiMinValue =
61  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
62  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
63 
64  V8_INLINE static int SmiToInt(const internal::Address value) {
65  int shift_bits = kSmiTagSize + kSmiShiftSize;
66  // Truncate and shift down (requires >> to be sign extending).
67  return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
68  }
69  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
70  // Is value in range [kSmiMinValue, kSmiMaxValue].
71  // Use unsigned operations in order to avoid undefined behaviour in case of
72  // signed integer overflow.
73  return (static_cast<uintptr_t>(value) -
74  static_cast<uintptr_t>(kSmiMinValue)) <=
75  (static_cast<uintptr_t>(kSmiMaxValue) -
76  static_cast<uintptr_t>(kSmiMinValue));
77  }
78 };
79 
80 // Smi constants for systems where tagged pointer is a 64-bit value.
81 template <>
82 struct SmiTagging<8> {
83  enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
84 
85  static constexpr intptr_t kSmiMinValue =
86  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
87  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
88 
89  V8_INLINE static int SmiToInt(const internal::Address value) {
90  int shift_bits = kSmiTagSize + kSmiShiftSize;
91  // Shift down and throw away top 32 bits.
92  return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
93  }
94  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
95  // To be representable as a long smi, the value must be a 32-bit integer.
96  return (value == static_cast<int32_t>(value));
97  }
98 };
99 
100 #ifdef V8_COMPRESS_POINTERS
101 static_assert(
103  "Pointer compression can be enabled only for 64-bit architectures");
104 const int kApiTaggedSize = kApiInt32Size;
105 #else
107 #endif
108 
109 constexpr bool PointerCompressionIsEnabled() {
111 }
112 
113 constexpr bool HeapSandboxIsEnabled() {
114 #ifdef V8_HEAP_SANDBOX
115  return true;
116 #else
117  return false;
118 #endif
119 }
120 
121 using ExternalPointer_t = Address;
122 
123 #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
125 #else
126 using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
127 #endif
128 
129 // TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
130 // since it's used much more often than the inividual constants.
131 const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
132 const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
133 const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
134 const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
135 constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
136 constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
137 
138 V8_INLINE static constexpr internal::Address IntToSmi(int value) {
139  return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
140  kSmiTag;
141 }
142 
143 // {obj} must be the raw tagged pointer representation of a HeapObject
144 // that's guaranteed to never be in ReadOnlySpace.
146 
147 // Returns if we need to throw when an error occurs. This infers the language
148 // mode based on the current context and the closure. This returns true if the
149 // language mode is strict.
150 V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
151 
152 /**
153  * This class exports constants and functionality from within v8 that
154  * is necessary to implement inline functions in the v8 api. Don't
155  * depend on functions and constants defined here.
156  */
157 class Internals {
158  public:
159  // These values match non-compiler-dependent values defined within
160  // the implementation of v8.
161  static const int kHeapObjectMapOffset = 0;
163  static const int kStringResourceOffset =
164  1 * kApiTaggedSize + 2 * kApiInt32Size;
165 
167  static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
168  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
172  static const int kFullStringRepresentationMask = 0x0f;
173  static const int kStringEncodingMask = 0x8;
174  static const int kExternalTwoByteRepresentationTag = 0x02;
175  static const int kExternalOneByteRepresentationTag = 0x0a;
176 
177  static const uint32_t kNumIsolateDataSlots = 4;
178 
179  // IsolateData layout guarantees.
180  static const int kIsolateEmbedderDataOffset = 0;
185  static const int kIsolateStackGuardOffset =
187  static const int kIsolateRootsOffset =
189 
190  static const int kUndefinedValueRootIndex = 4;
191  static const int kTheHoleValueRootIndex = 5;
192  static const int kNullValueRootIndex = 6;
193  static const int kTrueValueRootIndex = 7;
194  static const int kFalseValueRootIndex = 8;
195  static const int kEmptyStringRootIndex = 9;
196 
198  static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
199  static const int kNodeStateMask = 0x7;
200  static const int kNodeStateIsWeakValue = 2;
201  static const int kNodeStateIsPendingValue = 3;
202 
203  static const int kFirstNonstringType = 0x40;
204  static const int kOddballType = 0x43;
205  static const int kForeignType = 0x46;
206  static const int kJSSpecialApiObjectType = 0x410;
207  static const int kJSApiObjectType = 0x420;
208  static const int kJSObjectType = 0x421;
209 
210  static const int kUndefinedOddballKind = 5;
211  static const int kNullOddballKind = 3;
212 
213  // Constants used by PropertyCallbackInfo to check if we should throw when an
214  // error occurs.
215  static const int kThrowOnError = 0;
216  static const int kDontThrow = 1;
217  static const int kInferShouldThrowMode = 2;
218 
219  // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
220  // incremental GC once the external memory reaches this limit.
221  static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
222 
223  V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
224  V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
225 #ifdef V8_ENABLE_CHECKS
226  CheckInitializedImpl(isolate);
227 #endif
228  }
229 
230  V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
231  return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
232  }
233 
234  V8_INLINE static int SmiValue(const internal::Address value) {
235  return PlatformSmiTagging::SmiToInt(value);
236  }
237 
238  V8_INLINE static constexpr internal::Address IntToSmi(int value) {
239  return internal::IntToSmi(value);
240  }
241 
242  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
243  return PlatformSmiTagging::IsValidSmi(value);
244  }
245 
246  V8_INLINE static int GetInstanceType(const internal::Address obj) {
247  typedef internal::Address A;
249  return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
250  }
251 
252  V8_INLINE static int GetOddballKind(const internal::Address obj) {
254  }
255 
256  V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
257  int representation = (instance_type & kFullStringRepresentationMask);
258  return representation == kExternalTwoByteRepresentationTag;
259  }
260 
261  V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
262  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
263  return *addr & static_cast<uint8_t>(1U << shift);
264  }
265 
266  V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
267  int shift) {
268  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
269  uint8_t mask = static_cast<uint8_t>(1U << shift);
270  *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
271  }
272 
273  V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
274  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
275  return *addr & kNodeStateMask;
276  }
277 
278  V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
279  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
280  *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
281  }
282 
283  V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
284  void* data) {
285  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
287  slot * kApiSystemPointerSize;
288  *reinterpret_cast<void**>(addr) = data;
289  }
290 
291  V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
292  uint32_t slot) {
293  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
295  slot * kApiSystemPointerSize;
296  return *reinterpret_cast<void* const*>(addr);
297  }
298 
299  V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
300  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
302  index * kApiSystemPointerSize;
303  return reinterpret_cast<internal::Address*>(addr);
304  }
305 
306  template <typename T>
307  V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
308  int offset) {
309  internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
310 #ifdef V8_COMPRESS_POINTERS
311  if (sizeof(T) > kApiTaggedSize) {
312  // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
313  // fields (external pointers, doubles and BigInt data) are only
314  // kTaggedSize aligned so we have to use unaligned pointer friendly way of
315  // accessing them in order to avoid undefined behavior in C++ code.
316  T r;
317  memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
318  return r;
319  }
320 #endif
321  return *reinterpret_cast<const T*>(addr);
322  }
323 
325  internal::Address heap_object_ptr, int offset) {
326 #ifdef V8_COMPRESS_POINTERS
327  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
328  internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
329  return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
330 #else
331  return ReadRawField<internal::Address>(heap_object_ptr, offset);
332 #endif
333  }
334 
336  internal::Address heap_object_ptr, int offset) {
337 #ifdef V8_COMPRESS_POINTERS
338  uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
339  return static_cast<internal::Address>(static_cast<uintptr_t>(value));
340 #else
341  return ReadRawField<internal::Address>(heap_object_ptr, offset);
342 #endif
343  }
344 
346  internal::Address obj) {
347 #ifdef V8_HEAP_SANDBOX
348  return internal::IsolateFromNeverReadOnlySpaceObject(obj);
349 #else
350  // Not used in non-sandbox mode.
351  return nullptr;
352 #endif
353  }
354 
356  internal::Isolate* isolate, internal::Address heap_object_ptr,
357  int offset) {
358  internal::Address value = ReadRawField<Address>(heap_object_ptr, offset);
359 #ifdef V8_HEAP_SANDBOX
360  // We currently have to treat zero as nullptr in embedder slots.
361  if (value) value = DecodeExternalPointer(isolate, value);
362 #endif
363  return value;
364  }
365 
366 #ifdef V8_COMPRESS_POINTERS
367  // See v8:7703 or src/ptr-compr.* for details about pointer compression.
368  static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
369  static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
370 
371  // See v8:10391 for details about V8 heap sandbox.
372  static constexpr uint32_t kExternalPointerSalt =
373  0x7fffffff & ~static_cast<uint32_t>(kHeapObjectTagMask);
374 
376  internal::Address addr) {
377  return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
378  }
379 
383  return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
384  }
385 
388 #ifndef V8_HEAP_SANDBOX
389  return encoded_pointer;
390 #else
392 #endif
393  }
394 #endif // V8_COMPRESS_POINTERS
395 };
396 
397 // Only perform cast check for types derived from v8::Data since
398 // other types do not implement the Cast method.
399 template <bool PerformCheck>
400 struct CastCheck {
401  template <class T>
402  static void Perform(T* data);
403 };
404 
405 template <>
406 template <class T>
407 void CastCheck<true>::Perform(T* data) {
408  T::Cast(data);
409 }
410 
411 template <>
412 template <class T>
413 void CastCheck<false>::Perform(T* data) {}
414 
415 template <class T>
417  CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
418 }
419 
420 // A base class for backing stores, which is needed due to vagaries of
421 // how static casts work with std::shared_ptr.
423 
424 } // namespace internal
425 } // namespace v8
426 
427 #endif // INCLUDE_V8_INTERNAL_H_