v8  7.8.279 (node 12.19.1)
V8 is Google's open source JavaScript engine
v8-internal.h
Go to the documentation of this file.
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDE_V8_INTERNAL_H_
6 #define INCLUDE_V8_INTERNAL_H_
7 
8 #include <stddef.h>
9 #include <stdint.h>
10 #include <string.h>
11 #include <type_traits>
12 
13 #include "v8-version.h" // NOLINT(build/include)
14 #include "v8config.h" // NOLINT(build/include)
15 
16 namespace v8 {
17 
18 class Context;
19 class Data;
20 class Isolate;
21 
22 namespace internal {
23 
24 class Isolate;
25 
26 typedef uintptr_t Address;
27 static const Address kNullAddress = 0;
28 
29 /**
30  * Configuration of tagging scheme.
31  */
32 const int kApiSystemPointerSize = sizeof(void*);
33 const int kApiDoubleSize = sizeof(double);
34 const int kApiInt32Size = sizeof(int32_t);
35 const int kApiInt64Size = sizeof(int64_t);
36 
37 // Tag information for HeapObject.
38 const int kHeapObjectTag = 1;
39 const int kWeakHeapObjectTag = 3;
40 const int kHeapObjectTagSize = 2;
41 const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
42 
43 // Tag information for Smi.
44 const int kSmiTag = 0;
45 const int kSmiTagSize = 1;
46 const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
47 
48 template <size_t tagged_ptr_size>
49 struct SmiTagging;
50 
51 constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
52 constexpr uintptr_t kUintptrAllBitsSet =
53  static_cast<uintptr_t>(kIntptrAllBitsSet);
54 
55 // Smi constants for systems where tagged pointer is a 32-bit value.
56 template <>
57 struct SmiTagging<4> {
58  enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
59 
60  static constexpr intptr_t kSmiMinValue =
61  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
62  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
63 
64  V8_INLINE static int SmiToInt(const internal::Address value) {
65  int shift_bits = kSmiTagSize + kSmiShiftSize;
66  // Truncate and shift down (requires >> to be sign extending).
67  return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
68  }
69  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
70  // Is value in range [kSmiMinValue, kSmiMaxValue].
71  // Use unsigned operations in order to avoid undefined behaviour in case of
72  // signed integer overflow.
73  return (static_cast<uintptr_t>(value) -
74  static_cast<uintptr_t>(kSmiMinValue)) <=
75  (static_cast<uintptr_t>(kSmiMaxValue) -
76  static_cast<uintptr_t>(kSmiMinValue));
77  }
78 };
79 
80 // Smi constants for systems where tagged pointer is a 64-bit value.
81 template <>
82 struct SmiTagging<8> {
83  enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
84 
85  static constexpr intptr_t kSmiMinValue =
86  static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
87  static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
88 
89  V8_INLINE static int SmiToInt(const internal::Address value) {
90  int shift_bits = kSmiTagSize + kSmiShiftSize;
91  // Shift down and throw away top 32 bits.
92  return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
93  }
94  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
95  // To be representable as a long smi, the value must be a 32-bit integer.
96  return (value == static_cast<int32_t>(value));
97  }
98 };
99 
100 #ifdef V8_COMPRESS_POINTERS
101 static_assert(
103  "Pointer compression can be enabled only for 64-bit architectures");
104 const int kApiTaggedSize = kApiInt32Size;
105 #else
107 #endif
108 
109 #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
111 #else
112 using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
113 #endif
114 
115 const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
116 const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
117 const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
118 const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
119 constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
120 constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
121 
122 V8_INLINE static constexpr internal::Address IntToSmi(int value) {
123  return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
124  kSmiTag;
125 }
126 
127 /**
128  * This class exports constants and functionality from within v8 that
129  * is necessary to implement inline functions in the v8 api. Don't
130  * depend on functions and constants defined here.
131  */
132 class Internals {
133  public:
134  // These values match non-compiler-dependent values defined within
135  // the implementation of v8.
136  static const int kHeapObjectMapOffset = 0;
138  static const int kStringResourceOffset =
139  1 * kApiTaggedSize + 2 * kApiInt32Size;
140 
143  static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
144  static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
148  static const int kFullStringRepresentationMask = 0x0f;
149  static const int kStringEncodingMask = 0x8;
150  static const int kExternalTwoByteRepresentationTag = 0x02;
151  static const int kExternalOneByteRepresentationTag = 0x0a;
152 
153  static const uint32_t kNumIsolateDataSlots = 4;
154 
155  // IsolateData layout guarantees.
156  static const int kIsolateEmbedderDataOffset = 0;
157  static const int kExternalMemoryOffset =
159  static const int kExternalMemoryLimitOffset =
167  static const int kIsolateStackGuardOffset =
169  static const int kIsolateRootsOffset =
171 
172  static const int kUndefinedValueRootIndex = 4;
173  static const int kTheHoleValueRootIndex = 5;
174  static const int kNullValueRootIndex = 6;
175  static const int kTrueValueRootIndex = 7;
176  static const int kFalseValueRootIndex = 8;
177  static const int kEmptyStringRootIndex = 9;
178 
180  static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
181  static const int kNodeStateMask = 0x7;
182  static const int kNodeStateIsWeakValue = 2;
183  static const int kNodeStateIsPendingValue = 3;
184 
185  static const int kFirstNonstringType = 0x40;
186  static const int kOddballType = 0x43;
187  static const int kForeignType = 0x46;
188  static const int kJSSpecialApiObjectType = 0x410;
189  static const int kJSApiObjectType = 0x420;
190  static const int kJSObjectType = 0x421;
191 
192  static const int kUndefinedOddballKind = 5;
193  static const int kNullOddballKind = 3;
194 
195  // Constants used by PropertyCallbackInfo to check if we should throw when an
196  // error occurs.
197  static const int kThrowOnError = 0;
198  static const int kDontThrow = 1;
199  static const int kInferShouldThrowMode = 2;
200 
201  // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
202  // incremental GC once the external memory reaches this limit.
203  static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
204 
205  V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
206  V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
207 #ifdef V8_ENABLE_CHECKS
208  CheckInitializedImpl(isolate);
209 #endif
210  }
211 
212  V8_INLINE static bool HasHeapObjectTag(const internal::Address value) {
213  return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
214  }
215 
216  V8_INLINE static int SmiValue(const internal::Address value) {
217  return PlatformSmiTagging::SmiToInt(value);
218  }
219 
220  V8_INLINE static constexpr internal::Address IntToSmi(int value) {
221  return internal::IntToSmi(value);
222  }
223 
224  V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
225  return PlatformSmiTagging::IsValidSmi(value);
226  }
227 
228  V8_INLINE static int GetInstanceType(const internal::Address obj) {
229  typedef internal::Address A;
231  return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
232  }
233 
234  V8_INLINE static int GetOddballKind(const internal::Address obj) {
236  }
237 
238  V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
239  int representation = (instance_type & kFullStringRepresentationMask);
240  return representation == kExternalTwoByteRepresentationTag;
241  }
242 
243  V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
244  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
245  return *addr & static_cast<uint8_t>(1U << shift);
246  }
247 
248  V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value,
249  int shift) {
250  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
251  uint8_t mask = static_cast<uint8_t>(1U << shift);
252  *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
253  }
254 
255  V8_INLINE static uint8_t GetNodeState(internal::Address* obj) {
256  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
257  return *addr & kNodeStateMask;
258  }
259 
260  V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) {
261  uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
262  *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
263  }
264 
265  V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
266  void* data) {
267  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
269  slot * kApiSystemPointerSize;
270  *reinterpret_cast<void**>(addr) = data;
271  }
272 
273  V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
274  uint32_t slot) {
275  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
277  slot * kApiSystemPointerSize;
278  return *reinterpret_cast<void* const*>(addr);
279  }
280 
281  V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) {
282  internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
284  index * kApiSystemPointerSize;
285  return reinterpret_cast<internal::Address*>(addr);
286  }
287 
288  template <typename T>
289  V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
290  int offset) {
291  internal::Address addr = heap_object_ptr + offset - kHeapObjectTag;
292 #ifdef V8_COMPRESS_POINTERS
293  if (sizeof(T) > kApiTaggedSize) {
294  // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
295  // fields (external pointers, doubles and BigInt data) are only
296  // kTaggedSize aligned so we have to use unaligned pointer friendly way of
297  // accessing them in order to avoid undefined behavior in C++ code.
298  T r;
299  memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
300  return r;
301  }
302 #endif
303  return *reinterpret_cast<const T*>(addr);
304  }
305 
307  internal::Address heap_object_ptr, int offset) {
308 #ifdef V8_COMPRESS_POINTERS
309  int32_t value = ReadRawField<int32_t>(heap_object_ptr, offset);
310  internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
311  return root + static_cast<internal::Address>(static_cast<intptr_t>(value));
312 #else
313  return ReadRawField<internal::Address>(heap_object_ptr, offset);
314 #endif
315  }
316 
318  internal::Address heap_object_ptr, int offset) {
319 #ifdef V8_COMPRESS_POINTERS
320  int32_t value = ReadRawField<int32_t>(heap_object_ptr, offset);
321  return static_cast<internal::Address>(static_cast<intptr_t>(value));
322 #else
323  return ReadRawField<internal::Address>(heap_object_ptr, offset);
324 #endif
325  }
326 
327 #ifdef V8_COMPRESS_POINTERS
328  // See v8:7703 or src/ptr-compr.* for details about pointer compression.
329  static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
330  static constexpr size_t kPtrComprIsolateRootBias =
332  static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
333 
335  internal::Address addr) {
336  return (addr + kPtrComprIsolateRootBias) &
338  }
339 
342  internal::Address root_mask = static_cast<internal::Address>(
343  -static_cast<intptr_t>(value & kSmiTagMask));
346  return root_or_zero +
347  static_cast<internal::Address>(static_cast<intptr_t>(value));
348  }
349 #endif // V8_COMPRESS_POINTERS
350 };
351 
352 // Only perform cast check for types derived from v8::Data since
353 // other types do not implement the Cast method.
354 template <bool PerformCheck>
355 struct CastCheck {
356  template <class T>
357  static void Perform(T* data);
358 };
359 
360 template <>
361 template <class T>
362 void CastCheck<true>::Perform(T* data) {
363  T::Cast(data);
364 }
365 
366 template <>
367 template <class T>
368 void CastCheck<false>::Perform(T* data) {}
369 
370 template <class T>
372  CastCheck<std::is_base_of<Data, T>::value>::Perform(data);
373 }
374 
375 // {obj} must be the raw tagged pointer representation of a HeapObject
376 // that's guaranteed to never be in ReadOnlySpace.
378 
379 // Returns if we need to throw when an error occurs. This infers the language
380 // mode based on the current context and the closure. This returns true if the
381 // language mode is strict.
382 V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
383 
384 } // namespace internal
385 } // namespace v8
386 
387 #endif // INCLUDE_V8_INTERNAL_H_