v8 14.1.146 (node 25.0.0)
V8 is Google's open source JavaScript engine
Loading...
Searching...
No Matches
member-storage.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
6#define INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
7
8#include <atomic>
9#include <cstddef>
10#include <type_traits>
11
12#include "cppgc/internal/api-constants.h"
13#include "cppgc/internal/caged-heap.h"
14#include "cppgc/internal/logging.h"
15#include "cppgc/sentinel-pointer.h"
16#include "v8config.h" // NOLINT(build/include_directory)
17
18namespace cppgc {
19namespace internal {
20
24};
25
26#if defined(CPPGC_POINTER_COMPRESSION)
27
28#if defined(__clang__)
29// Attribute const allows the compiler to assume that CageBaseGlobal::g_base_
30// doesn't change (e.g. across calls) and thereby avoid redundant loads.
31#define CPPGC_CONST __attribute__((const))
32#define CPPGC_REQUIRE_CONSTANT_INIT
33 __attribute__((require_constant_initialization))
34#else // defined(__clang__)
35#define CPPGC_CONST
36#define CPPGC_REQUIRE_CONSTANT_INIT
37#endif // defined(__clang__)
38
40 public:
43 return g_base_.base;
44 }
45
46 V8_INLINE CPPGC_CONST static bool IsSet() {
48 return (g_base_.base & ~kLowerHalfWordMask) != 0;
49 }
50
51 private:
52 // We keep the lower halfword as ones to speed up decompression.
53 static constexpr uintptr_t kLowerHalfWordMask =
55
56 static union alignas(api_constants::kCachelineSize) Base {
60
61 CageBaseGlobal() = delete;
62
63 V8_INLINE static bool IsBaseConsistent() {
65 }
66
67 friend class CageBaseGlobalUpdater;
68};
69
70#undef CPPGC_REQUIRE_CONSTANT_INIT
71#undef CPPGC_CONST
72
74 public:
75 struct AtomicInitializerTag {};
76
77 using IntegralType = uint32_t;
78 static constexpr auto kWriteBarrierSlotType =
80
82 V8_INLINE explicit CompressedPointer(const void* value,
85 }
86 V8_INLINE explicit CompressedPointer(const void* ptr)
87 : value_(Compress(ptr)) {}
91
92 V8_INLINE const void* Load() const { return Decompress(value_); }
93 V8_INLINE const void* LoadAtomic() const {
94 return Decompress(
95 reinterpret_cast<const std::atomic<IntegralType>&>(value_).load(
97 }
98
99 V8_INLINE void Store(const void* ptr) { value_ = Compress(ptr); }
100 V8_INLINE void StoreAtomic(const void* value) {
101 reinterpret_cast<std::atomic<IntegralType>&>(value_).store(
103 }
104
105 V8_INLINE void Clear() { value_ = 0u; }
106 V8_INLINE bool IsCleared() const { return !value_; }
107
108 V8_INLINE bool IsSentinel() const { return value_ == kCompressedSentinel; }
109
110 V8_INLINE uint32_t GetAsInteger() const { return value_; }
111
113 return a.value_ == b.value_;
114 }
116 return a.value_ != b.value_;
117 }
119 return a.value_ < b.value_;
120 }
122 return a.value_ <= b.value_;
123 }
125 return a.value_ > b.value_;
126 }
128 return a.value_ >= b.value_;
129 }
130
131 static V8_INLINE IntegralType Compress(const void* ptr) {
132 static_assert(SentinelPointer::kSentinelValue ==
134 "The compression scheme relies on the sentinel encoded as 1 "
135 "<< kPointerCompressionShift");
136 static constexpr size_t kGigaCageMask =
138 static constexpr size_t kPointerCompressionShiftMask =
140
142 const uintptr_t base = CageBaseGlobal::Get();
144 (base & kGigaCageMask) ==
145 (reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
147 (reinterpret_cast<uintptr_t>(ptr) & kPointerCompressionShiftMask) == 0);
148
149 const auto uptr = reinterpret_cast<uintptr_t>(ptr);
150 // Shift the pointer and truncate.
151 auto compressed = static_cast<IntegralType>(
153 // Normal compressed pointers must have the MSB set. This is guaranteed by
154 // the cage alignment.
156 (compressed & (1 << 31)));
157 return compressed;
158 }
159
160 static V8_INLINE void* Decompress(IntegralType ptr) {
162 const uintptr_t base = CageBaseGlobal::Get();
163 return Decompress(ptr, base);
164 }
165
169 // Sign-extend compressed pointer to full width. This ensure that normal
170 // pointers have only 1s in the base part of the address. It's also
171 // important to shift the unsigned value, as otherwise it would result in
172 // undefined behavior.
173 const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
175 // Set the base part of the address for normal compressed pointers. Note
176 // that nullptr and the sentinel value do not have 1s in the base part and
177 // remain as-is in this operation.
178 return reinterpret_cast<void*>(mask & base);
179 }
180
181 // For a given memory `address`, this method iterates all possible pointers
182 // that can be reasonably recovered with the current compression scheme and
183 // passes them to `callback`.
184 template <typename Callback>
185 static V8_INLINE void VisitPossiblePointers(const void* address,
187
188 private:
189 static constexpr IntegralType kCompressedSentinel =
192 // All constructors initialize `value_`. Do not add a default value here as it
193 // results in a non-atomic write on some builds, even when the atomic version
194 // of the constructor is used.
196};
197
198template <typename Callback>
199// static
202 const uintptr_t base = CageBaseGlobal::Get();
204 // We may have random compressed pointers on stack (e.g. due to inlined
205 // collections). These could be present in both halfwords.
207 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(address));
209 const uint32_t compressed_high = static_cast<uint32_t>(
210 reinterpret_cast<uintptr_t>(address) >> (sizeof(uint32_t) * CHAR_BIT));
212 // Iterate possible intermediate values, see `Decompress()`. The intermediate
213 // value of decompressing is a 64-bit value where 35 bits are the offset. We
214 // don't assume sign extension is stored and recover that part.
215 //
216 // Note that this case conveniently also recovers the full pointer.
217 static constexpr uintptr_t kBitForIntermediateValue =
219 static constexpr uintptr_t kSignExtensionMask =
220 ~((uintptr_t{1} << kBitForIntermediateValue) - 1);
222 reinterpret_cast<uintptr_t>(address) | kSignExtensionMask;
223 callback(reinterpret_cast<void*>(intermediate_sign_extended & base));
224}
225
226#endif // defined(CPPGC_POINTER_COMPRESSION)
227
228class V8_TRIVIAL_ABI RawPointer final {
229 public:
231
232 using IntegralType = uintptr_t;
233 static constexpr auto kWriteBarrierSlotType =
235
236 V8_INLINE RawPointer() : ptr_(nullptr) {}
237 V8_INLINE explicit RawPointer(const void* ptr, AtomicInitializerTag) {
238 StoreAtomic(ptr);
239 }
240 V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
241
242 V8_INLINE const void* Load() const { return ptr_; }
243 V8_INLINE const void* LoadAtomic() const {
244 return reinterpret_cast<const std::atomic<const void*>&>(ptr_).load(
245 std::memory_order_relaxed);
246 }
247
248 V8_INLINE void Store(const void* ptr) { ptr_ = ptr; }
249 V8_INLINE void StoreAtomic(const void* ptr) {
250 reinterpret_cast<std::atomic<const void*>&>(ptr_).store(
251 ptr, std::memory_order_relaxed);
252 }
253
254 V8_INLINE void Clear() { ptr_ = nullptr; }
255 V8_INLINE bool IsCleared() const { return !ptr_; }
256
257 V8_INLINE bool IsSentinel() const { return ptr_ == kSentinelPointer; }
258
259 V8_INLINE uintptr_t GetAsInteger() const {
260 return reinterpret_cast<uintptr_t>(ptr_);
261 }
262
263 V8_INLINE friend bool operator==(RawPointer a, RawPointer b) {
264 return a.ptr_ == b.ptr_;
265 }
266 V8_INLINE friend bool operator!=(RawPointer a, RawPointer b) {
267 return a.ptr_ != b.ptr_;
268 }
269 V8_INLINE friend bool operator<(RawPointer a, RawPointer b) {
270 return a.ptr_ < b.ptr_;
271 }
272 V8_INLINE friend bool operator<=(RawPointer a, RawPointer b) {
273 return a.ptr_ <= b.ptr_;
274 }
275 V8_INLINE friend bool operator>(RawPointer a, RawPointer b) {
276 return a.ptr_ > b.ptr_;
277 }
278 V8_INLINE friend bool operator>=(RawPointer a, RawPointer b) {
279 return a.ptr_ >= b.ptr_;
280 }
281
282 template <typename Callback>
283 static V8_INLINE void VisitPossiblePointers(const void* address,
284 Callback callback) {
285 // Pass along the full pointer.
286 return callback(const_cast<void*>(address));
287 }
288
289 private:
290 // All constructors initialize `ptr_`. Do not add a default value here as it
291 // results in a non-atomic write on some builds, even when the atomic version
292 // of the constructor is used.
293 const void* ptr_;
294};
295
296#if defined(CPPGC_POINTER_COMPRESSION)
298#else // !defined(CPPGC_POINTER_COMPRESSION)
299using DefaultMemberStorage = RawPointer;
300#endif // !defined(CPPGC_POINTER_COMPRESSION)
301
302} // namespace internal
303} // namespace cppgc
304
305#endif // INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
V8_INLINE friend bool operator!=(RawPointer a, RawPointer b)
V8_INLINE RawPointer(const void *ptr, AtomicInitializerTag)
V8_INLINE friend bool operator<(RawPointer a, RawPointer b)
V8_INLINE const void * Load() const
static V8_INLINE void VisitPossiblePointers(const void *address, Callback callback)
V8_INLINE bool IsSentinel() const
V8_INLINE friend bool operator==(RawPointer a, RawPointer b)
static constexpr auto kWriteBarrierSlotType
V8_INLINE const void * LoadAtomic() const
V8_INLINE void StoreAtomic(const void *ptr)
V8_INLINE void Store(const void *ptr)
V8_INLINE RawPointer(const void *ptr)
V8_INLINE bool IsCleared() const
V8_INLINE friend bool operator>(RawPointer a, RawPointer b)
V8_INLINE friend bool operator>=(RawPointer a, RawPointer b)
V8_INLINE uintptr_t GetAsInteger() const
V8_INLINE friend bool operator<=(RawPointer a, RawPointer b)
constexpr internal::SentinelPointer kSentinelPointer
#define V8_INLINE
Definition v8config.h:513
#define V8_TRIVIAL_ABI
Definition v8config.h:813