v8  9.4.146 (node 16.15.0)
V8 is Google's open source JavaScript engine
write-barrier.h
Go to the documentation of this file.
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6 #define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7 
8 #include <cstddef>
9 #include <cstdint>
10 
11 #include "cppgc/heap-state.h"
12 #include "cppgc/internal/api-constants.h"
13 #include "cppgc/internal/atomic-entry-flag.h"
14 #include "cppgc/platform.h"
15 #include "cppgc/sentinel-pointer.h"
16 #include "cppgc/trace-trait.h"
17 #include "v8config.h" // NOLINT(build/include_directory)
18 
19 #if defined(CPPGC_CAGED_HEAP)
20 #include "cppgc/internal/caged-heap-local-data.h"
21 #endif
22 
23 namespace cppgc {
24 
25 class HeapHandle;
26 
27 namespace internal {
28 
29 #if defined(CPPGC_CAGED_HEAP)
31 #else // !CPPGC_CAGED_HEAP
32 class WriteBarrierTypeForNonCagedHeapPolicy;
33 #endif // !CPPGC_CAGED_HEAP
34 
35 class V8_EXPORT WriteBarrier final {
36  public:
37  enum class Type : uint8_t {
38  kNone,
39  kMarking,
41  };
42 
43  struct Params {
44  HeapHandle* heap = nullptr;
45 #if V8_ENABLE_CHECKS
46  Type type = Type::kNone;
47 #endif // !V8_ENABLE_CHECKS
48 #if defined(CPPGC_CAGED_HEAP)
49  uintptr_t start = 0;
51  return *reinterpret_cast<CagedHeapLocalData*>(start);
52  }
55 #endif // CPPGC_CAGED_HEAP
56  };
57 
58  enum class ValueMode {
61  };
62 
63  // Returns the required write barrier for a given `slot` and `value`.
64  static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
65  Params& params);
66  // Returns the required write barrier for a given `slot`.
67  template <typename HeapHandleCallback>
68  static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
69  HeapHandleCallback callback);
70 
71  template <typename HeapHandleCallback>
73  const void* value, Params& params, HeapHandleCallback callback);
74 
75  static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
76  const void* object);
78  const Params& params, const void* first_element, size_t element_size,
79  size_t number_of_elements, TraceCallback trace_callback);
80  static V8_INLINE void SteeleMarkingBarrier(const Params& params,
81  const void* object);
82 #if defined(CPPGC_YOUNG_GENERATION)
83  static V8_INLINE void GenerationalBarrier(const Params& params,
84  const void* slot);
85 #else // !CPPGC_YOUNG_GENERATION
86  static V8_INLINE void GenerationalBarrier(const Params& params,
87  const void* slot) {}
88 #endif // CPPGC_YOUNG_GENERATION
89 
90 #if V8_ENABLE_CHECKS
91  static void CheckParams(Type expected_type, const Params& params);
92 #else // !V8_ENABLE_CHECKS
93  static void CheckParams(Type expected_type, const Params& params) {}
94 #endif // !V8_ENABLE_CHECKS
95 
96  // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
97  // |incremental_or_concurrent_marking_flag_|.
98  class IncrementalOrConcurrentMarkingFlagUpdater;
100  return incremental_or_concurrent_marking_flag_.MightBeEntered();
101  }
102 
103  private:
104  WriteBarrier() = delete;
105 
106 #if defined(CPPGC_CAGED_HEAP)
107  using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
108 #else // !CPPGC_CAGED_HEAP
109  using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
110 #endif // !CPPGC_CAGED_HEAP
111 
112  static void DijkstraMarkingBarrierSlow(const void* value);
113  static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
114  static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
115  const void* first_element,
116  size_t element_size,
117  size_t number_of_elements,
118  TraceCallback trace_callback);
119  static void SteeleMarkingBarrierSlow(const void* value);
120  static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
121 
122 #if defined(CPPGC_YOUNG_GENERATION)
123  static CagedHeapLocalData& GetLocalData(HeapHandle&);
124  static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
125  const AgeTable& ageTable,
126  const void* slot, uintptr_t value_offset);
127 #endif // CPPGC_YOUNG_GENERATION
128 
129  static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
130 };
131 
132 template <WriteBarrier::Type type>
133 V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
134  if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
135 #if V8_ENABLE_CHECKS
136  params.type = type;
137 #endif // !V8_ENABLE_CHECKS
138  return type;
139 }
140 
141 #if defined(CPPGC_CAGED_HEAP)
143  public:
145  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
149  }
150 
151  template <typename HeapHandleCallback>
154  if (!TryGetCagedHeap(value, value, params)) {
155  return WriteBarrier::Type::kNone;
156  }
159  }
161  }
162 
163  private:
165 
166  template <WriteBarrier::ValueMode value_mode>
167  struct ValueModeDispatch;
168 
169  static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
172  // This method assumes that the stack is allocated in high
173  // addresses. That is not guaranteed on Windows and Fuchsia. Having a
174  // low-address (below api_constants::kCagedHeapReservationSize) on-stack
175  // slot with a nullptr value would cause this method to erroneously return
176  // that the slot resides in a caged heap that starts at a null address.
177  // This check is applied only on Windows because it is not an issue on
178  // other OSes where the stack resides in higher adderesses, and to keep
179  // the write barrier as cheap as possible.
180  if (!value) return false;
181  }
182  params.start = reinterpret_cast<uintptr_t>(value) &
184  const uintptr_t slot_offset =
185  reinterpret_cast<uintptr_t>(slot) - params.start;
187  // Check if slot is on stack or value is sentinel or nullptr. This relies
188  // on the fact that kSentinelPointer is encoded as 0x1.
189  return false;
190  }
191  return true;
192  }
193 
194  // Returns whether marking is in progress. If marking is not in progress
195  // sets the start of the cage accordingly.
196  //
197  // TODO(chromium:1056170): Create fast path on API.
198  static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
199 };
200 
201 template <>
204  template <typename HeapHandleCallback>
205  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
209  if (!within_cage) {
210  return WriteBarrier::Type::kNone;
211  }
213 #if defined(CPPGC_YOUNG_GENERATION)
214  params.heap = reinterpret_cast<HeapHandle*>(params.start);
215  params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
216  params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
218 #else // !CPPGC_YOUNG_GENERATION
220 #endif // !CPPGC_YOUNG_GENERATION
221  }
222  params.heap = reinterpret_cast<HeapHandle*>(params.start);
224  }
225 };
226 
227 template <>
230  template <typename HeapHandleCallback>
231  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
234 #if defined(CPPGC_YOUNG_GENERATION)
236  if (V8_LIKELY(!IsMarking(handle, params))) {
237  // params.start is populated by IsMarking().
238  params.heap = &handle;
239  params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
240  // params.value_offset stays 0.
242  // Check if slot is on stack.
244  }
246  }
247 #else // !CPPGC_YOUNG_GENERATION
250  }
254  }
255 #endif // !CPPGC_YOUNG_GENERATION
256  params.heap = &handle;
258  }
259 };
260 
261 #endif // CPPGC_CAGED_HEAP
262 
263 class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
264  public:
265  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
266  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
267  WriteBarrier::Params& params,
268  HeapHandleCallback callback) {
269  return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
270  }
271 
272  template <typename HeapHandleCallback>
274  const void* value, WriteBarrier::Params& params,
275  HeapHandleCallback callback) {
276  // The slot will never be used in `Get()` below.
277  return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
278  callback);
279  }
280 
281  private:
282  template <WriteBarrier::ValueMode value_mode>
283  struct ValueModeDispatch;
284 
285  // TODO(chromium:1056170): Create fast path on API.
286  static bool IsMarking(const void*, HeapHandle**);
287  // TODO(chromium:1056170): Create fast path on API.
288  static bool IsMarking(HeapHandle&);
289 
290  WriteBarrierTypeForNonCagedHeapPolicy() = delete;
291 };
292 
293 template <>
294 struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
295  WriteBarrier::ValueMode::kValuePresent> {
296  template <typename HeapHandleCallback>
297  static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
298  WriteBarrier::Params& params,
299  HeapHandleCallback callback) {
300  // The following check covers nullptr as well as sentinel pointer.
301  if (object <= static_cast<void*>(kSentinelPointer)) {
302  return WriteBarrier::Type::kNone;
303  }
304  if (IsMarking(object, &params.heap)) {
305  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
306  }
307  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
308  }
309 };
310 
311 template <>
312 struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
313  WriteBarrier::ValueMode::kNoValuePresent> {
314  template <typename HeapHandleCallback>
315  static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
316  WriteBarrier::Params& params,
317  HeapHandleCallback callback) {
319  HeapHandle& handle = callback();
320  if (IsMarking(handle)) {
321  params.heap = &handle;
322  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
323  }
324  }
325  return WriteBarrier::Type::kNone;
326  }
327 };
328 
329 // static
330 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
331  const void* slot, const void* value, WriteBarrier::Params& params) {
332  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
333  params, []() {});
334 }
335 
336 // static
337 template <typename HeapHandleCallback>
338 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
339  const void* slot, WriteBarrier::Params& params,
340  HeapHandleCallback callback) {
341  return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
342  slot, nullptr, params, callback);
343 }
344 
345 // static
346 template <typename HeapHandleCallback>
347 WriteBarrier::Type
349  const void* value, Params& params, HeapHandleCallback callback) {
350  return WriteBarrierTypePolicy::GetForExternallyReferenced(value, params,
351  callback);
352 }
353 
354 // static
355 void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
356  const void* object) {
358 #if defined(CPPGC_CAGED_HEAP)
359  // Caged heap already filters out sentinels.
360  DijkstraMarkingBarrierSlow(object);
361 #else // !CPPGC_CAGED_HEAP
362  DijkstraMarkingBarrierSlowWithSentinelCheck(object);
363 #endif // !CPPGC_CAGED_HEAP
364 }
365 
366 // static
367 void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
368  const void* first_element,
369  size_t element_size,
370  size_t number_of_elements,
371  TraceCallback trace_callback) {
373  DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
374  number_of_elements, trace_callback);
375 }
376 
377 // static
378 void WriteBarrier::SteeleMarkingBarrier(const Params& params,
379  const void* object) {
381 #if defined(CPPGC_CAGED_HEAP)
382  // Caged heap already filters out sentinels.
383  SteeleMarkingBarrierSlow(object);
384 #else // !CPPGC_CAGED_HEAP
385  SteeleMarkingBarrierSlowWithSentinelCheck(object);
386 #endif // !CPPGC_CAGED_HEAP
387 }
388 
389 #if defined(CPPGC_YOUNG_GENERATION)
390 // static
391 void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
393 
396 
397  // Bail out if the slot is in young generation.
399 
401 }
402 
403 #endif // !CPPGC_YOUNG_GENERATION
404 
405 } // namespace internal
406 } // namespace cppgc
407 
408 #endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_