v8  10.1.124 (node 18.2.0)
V8 is Google's open source JavaScript engine
write-barrier.h
Go to the documentation of this file.
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6 #define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
7 
8 #include <cstddef>
9 #include <cstdint>
10 
11 #include "cppgc/heap-state.h"
12 #include "cppgc/internal/api-constants.h"
13 #include "cppgc/internal/atomic-entry-flag.h"
14 #include "cppgc/platform.h"
15 #include "cppgc/sentinel-pointer.h"
16 #include "cppgc/trace-trait.h"
17 #include "v8config.h" // NOLINT(build/include_directory)
18 
19 #if defined(CPPGC_CAGED_HEAP)
20 #include "cppgc/internal/caged-heap-local-data.h"
21 #endif
22 
23 namespace cppgc {
24 
25 class HeapHandle;
26 
27 namespace internal {
28 
29 #if defined(CPPGC_CAGED_HEAP)
31 #else // !CPPGC_CAGED_HEAP
32 class WriteBarrierTypeForNonCagedHeapPolicy;
33 #endif // !CPPGC_CAGED_HEAP
34 
35 class V8_EXPORT WriteBarrier final {
36  public:
37  enum class Type : uint8_t {
38  kNone,
39  kMarking,
41  };
42 
43  struct Params {
44  HeapHandle* heap = nullptr;
45 #if V8_ENABLE_CHECKS
46  Type type = Type::kNone;
47 #endif // !V8_ENABLE_CHECKS
48 #if defined(CPPGC_CAGED_HEAP)
49  uintptr_t start = 0;
51  return *reinterpret_cast<CagedHeapLocalData*>(start);
52  }
55 #endif // CPPGC_CAGED_HEAP
56  };
57 
58  enum class ValueMode {
61  };
62 
63  // Returns the required write barrier for a given `slot` and `value`.
64  static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
65  Params& params);
66  // Returns the required write barrier for a given `slot`.
67  template <typename HeapHandleCallback>
68  static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
69  HeapHandleCallback callback);
70  // Returns the required write barrier for a given `value`.
71  static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
72 
73  static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
74  const void* object);
76  const Params& params, const void* first_element, size_t element_size,
77  size_t number_of_elements, TraceCallback trace_callback);
78  static V8_INLINE void SteeleMarkingBarrier(const Params& params,
79  const void* object);
80 #if defined(CPPGC_YOUNG_GENERATION)
81  static V8_INLINE void GenerationalBarrier(const Params& params,
82  const void* slot);
83  static V8_INLINE void GenerationalBarrierForSourceObject(
84  const Params& params, const void* inner_pointer);
85 #else // !CPPGC_YOUNG_GENERATION
86  static V8_INLINE void GenerationalBarrier(const Params& params,
87  const void* slot) {}
89  const Params& params, const void* inner_pointer) {}
90 #endif // CPPGC_YOUNG_GENERATION
91 
92 #if V8_ENABLE_CHECKS
93  static void CheckParams(Type expected_type, const Params& params);
94 #else // !V8_ENABLE_CHECKS
95  static void CheckParams(Type expected_type, const Params& params) {}
96 #endif // !V8_ENABLE_CHECKS
97 
98  // The IncrementalOrConcurrentUpdater class allows cppgc internal to update
99  // |incremental_or_concurrent_marking_flag_|.
100  class IncrementalOrConcurrentMarkingFlagUpdater;
102  return incremental_or_concurrent_marking_flag_.MightBeEntered();
103  }
104 
105  private:
106  WriteBarrier() = delete;
107 
108 #if defined(CPPGC_CAGED_HEAP)
109  using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
110 #else // !CPPGC_CAGED_HEAP
111  using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
112 #endif // !CPPGC_CAGED_HEAP
113 
114  static void DijkstraMarkingBarrierSlow(const void* value);
115  static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
116  static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
117  const void* first_element,
118  size_t element_size,
119  size_t number_of_elements,
120  TraceCallback trace_callback);
121  static void SteeleMarkingBarrierSlow(const void* value);
122  static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
123 
124 #if defined(CPPGC_YOUNG_GENERATION)
125  static CagedHeapLocalData& GetLocalData(HeapHandle&);
126  static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
127  const AgeTable& age_table,
128  const void* slot, uintptr_t value_offset);
129  static void GenerationalBarrierForSourceObjectSlow(
130  const CagedHeapLocalData& local_data, const void* object);
131 #endif // CPPGC_YOUNG_GENERATION
132 
133  static AtomicEntryFlag incremental_or_concurrent_marking_flag_;
134 };
135 
136 template <WriteBarrier::Type type>
137 V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
138  if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone;
139 #if V8_ENABLE_CHECKS
140  params.type = type;
141 #endif // !V8_ENABLE_CHECKS
142  return type;
143 }
144 
145 #if defined(CPPGC_CAGED_HEAP)
147  public:
149  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
153  }
154 
156  static V8_INLINE WriteBarrier::Type Get(const void* value,
159  return GetNoSlot(value, params, callback);
160  }
161 
162  private:
164 
165  template <typename HeapHandleCallback>
166  static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
169  if (!TryGetCagedHeap(value, value, params)) {
170  return WriteBarrier::Type::kNone;
171  }
174  }
176  }
177 
178  template <WriteBarrier::ValueMode value_mode>
179  struct ValueModeDispatch;
180 
181  static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
183  // TODO(chromium:1056170): Check if the null check can be folded in with
184  // the rest of the write barrier.
185  if (!value) return false;
186  params.start = reinterpret_cast<uintptr_t>(value) &
188  const uintptr_t slot_offset =
189  reinterpret_cast<uintptr_t>(slot) - params.start;
191  // Check if slot is on stack or value is sentinel or nullptr. This relies
192  // on the fact that kSentinelPointer is encoded as 0x1.
193  return false;
194  }
195  return true;
196  }
197 
198  // Returns whether marking is in progress. If marking is not in progress
199  // sets the start of the cage accordingly.
200  //
201  // TODO(chromium:1056170): Create fast path on API.
202  static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
203 };
204 
205 template <>
208  template <typename HeapHandleCallback>
209  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
212 #if !defined(CPPGC_YOUNG_GENERATION)
215  }
216 #endif // !CPPGC_YOUNG_GENERATION
218  if (!within_cage) {
219  return WriteBarrier::Type::kNone;
220  }
222 #if defined(CPPGC_YOUNG_GENERATION)
223  params.heap = reinterpret_cast<HeapHandle*>(params.start);
224  params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
225  params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start;
227 #else // !CPPGC_YOUNG_GENERATION
229 #endif // !CPPGC_YOUNG_GENERATION
230  }
231  params.heap = reinterpret_cast<HeapHandle*>(params.start);
233  }
234 };
235 
236 template <>
239  template <typename HeapHandleCallback>
240  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
243 #if defined(CPPGC_YOUNG_GENERATION)
245  if (V8_LIKELY(!IsMarking(handle, params))) {
246  // params.start is populated by IsMarking().
247  params.heap = &handle;
248  params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start;
249  // params.value_offset stays 0.
251  // Check if slot is on stack.
253  }
255  }
256 #else // !CPPGC_YOUNG_GENERATION
259  }
263  }
264 #endif // !CPPGC_YOUNG_GENERATION
265  params.heap = &handle;
267  }
268 };
269 
270 #endif // CPPGC_CAGED_HEAP
271 
272 class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
273  public:
274  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
275  static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
276  WriteBarrier::Params& params,
277  HeapHandleCallback callback) {
278  return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
279  }
280 
281  template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
282  static V8_INLINE WriteBarrier::Type Get(const void* value,
283  WriteBarrier::Params& params,
284  HeapHandleCallback callback) {
285  // The slot will never be used in `Get()` below.
286  return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
287  callback);
288  }
289 
290  private:
291  template <WriteBarrier::ValueMode value_mode>
292  struct ValueModeDispatch;
293 
294  // TODO(chromium:1056170): Create fast path on API.
295  static bool IsMarking(const void*, HeapHandle**);
296  // TODO(chromium:1056170): Create fast path on API.
297  static bool IsMarking(HeapHandle&);
298 
299  WriteBarrierTypeForNonCagedHeapPolicy() = delete;
300 };
301 
302 template <>
303 struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
304  WriteBarrier::ValueMode::kValuePresent> {
305  template <typename HeapHandleCallback>
306  static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
307  WriteBarrier::Params& params,
308  HeapHandleCallback callback) {
309  // The following check covers nullptr as well as sentinel pointer.
310  if (object <= static_cast<void*>(kSentinelPointer)) {
311  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
312  }
314  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
315  }
316  if (IsMarking(object, &params.heap)) {
317  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
318  }
319  return SetAndReturnType<WriteBarrier::Type::kNone>(params);
320  }
321 };
322 
323 template <>
324 struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
325  WriteBarrier::ValueMode::kNoValuePresent> {
326  template <typename HeapHandleCallback>
327  static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
328  WriteBarrier::Params& params,
329  HeapHandleCallback callback) {
331  HeapHandle& handle = callback();
332  if (IsMarking(handle)) {
333  params.heap = &handle;
334  return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
335  }
336  }
337  return WriteBarrier::Type::kNone;
338  }
339 };
340 
341 // static
342 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
343  const void* slot, const void* value, WriteBarrier::Params& params) {
344  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
345  params, []() {});
346 }
347 
348 // static
349 template <typename HeapHandleCallback>
350 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
351  const void* slot, WriteBarrier::Params& params,
352  HeapHandleCallback callback) {
353  return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
354  slot, nullptr, params, callback);
355 }
356 
357 // static
358 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
359  const void* value, WriteBarrier::Params& params) {
360  return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
361  []() {});
362 }
363 
364 // static
365 void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
366  const void* object) {
368 #if defined(CPPGC_CAGED_HEAP)
369  // Caged heap already filters out sentinels.
370  DijkstraMarkingBarrierSlow(object);
371 #else // !CPPGC_CAGED_HEAP
372  DijkstraMarkingBarrierSlowWithSentinelCheck(object);
373 #endif // !CPPGC_CAGED_HEAP
374 }
375 
376 // static
377 void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
378  const void* first_element,
379  size_t element_size,
380  size_t number_of_elements,
381  TraceCallback trace_callback) {
383  DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
384  number_of_elements, trace_callback);
385 }
386 
387 // static
388 void WriteBarrier::SteeleMarkingBarrier(const Params& params,
389  const void* object) {
391 #if defined(CPPGC_CAGED_HEAP)
392  // Caged heap already filters out sentinels.
393  SteeleMarkingBarrierSlow(object);
394 #else // !CPPGC_CAGED_HEAP
395  SteeleMarkingBarrierSlowWithSentinelCheck(object);
396 #endif // !CPPGC_CAGED_HEAP
397 }
398 
399 #if defined(CPPGC_YOUNG_GENERATION)
400 // static
401 void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
403 
406 
407  // Bail out if the slot is in young generation.
409 
411 }
412 
413 // static
415  const Params& params, const void* inner_pointer) {
417 
420 
421  // Assume that if the first element is in young generation, the whole range is
422  // in young generation.
424 
426 }
427 
428 #endif // !CPPGC_YOUNG_GENERATION
429 
430 } // namespace internal
431 } // namespace cppgc
432 
433 #endif // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_