5#ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
6#define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
19#if defined(CPPGC_CAGED_HEAP)
29#if defined(CPPGC_CAGED_HEAP)
30class WriteBarrierTypeForCagedHeapPolicy;
32class WriteBarrierTypeForNonCagedHeapPolicy;
37 enum class Type : uint8_t {
44 HeapHandle* heap =
nullptr;
46 Type type = Type::kNone;
48#if defined(CPPGC_CAGED_HEAP)
53 uintptr_t slot_offset = 0;
54 uintptr_t value_offset = 0;
64 static V8_INLINE Type GetWriteBarrierType(
const void* slot,
const void* value,
67 template <
typename HeapHandleCallback>
69 HeapHandleCallback callback);
75 static V8_INLINE void DijkstraMarkingBarrierRange(
76 const Params& params,
const void* first_element,
size_t element_size,
80#if defined(CPPGC_YOUNG_GENERATION)
83 static V8_INLINE void GenerationalBarrierForSourceObject(
84 const Params& params,
const void* inner_pointer);
89 const Params& params,
const void* inner_pointer) {}
93 static void CheckParams(Type expected_type,
const Params& params);
100 class IncrementalOrConcurrentMarkingFlagUpdater;
102 return incremental_or_concurrent_marking_flag_.MightBeEntered();
108#if defined(CPPGC_CAGED_HEAP)
109 using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
114 static void DijkstraMarkingBarrierSlow(
const void* value);
115 static void DijkstraMarkingBarrierSlowWithSentinelCheck(
const void* value);
116 static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
117 const void* first_element,
119 size_t number_of_elements,
121 static void SteeleMarkingBarrierSlow(
const void* value);
122 static void SteeleMarkingBarrierSlowWithSentinelCheck(
const void* value);
124#if defined(CPPGC_YOUNG_GENERATION)
127 const AgeTable& age_table,
128 const void* slot, uintptr_t value_offset);
129 static void GenerationalBarrierForSourceObjectSlow(
136template <WriteBarrier::Type type>
145#if defined(CPPGC_CAGED_HEAP)
146class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
148 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
149 static V8_INLINE WriteBarrier::Type Get(
const void* slot,
const void* value,
150 WriteBarrier::Params& params,
151 HeapHandleCallback callback) {
152 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
155 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
156 static V8_INLINE WriteBarrier::Type Get(
const void* value,
157 WriteBarrier::Params& params,
158 HeapHandleCallback callback) {
159 return GetNoSlot(value, params, callback);
163 WriteBarrierTypeForCagedHeapPolicy() =
delete;
165 template <
typename HeapHandleCallback>
166 static V8_INLINE WriteBarrier::Type GetNoSlot(
const void* value,
167 WriteBarrier::Params& params,
168 HeapHandleCallback) {
169 if (!TryGetCagedHeap(value, value, params)) {
170 return WriteBarrier::Type::kNone;
172 if (
V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) {
173 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
175 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
178 template <WriteBarrier::ValueMode value_mode>
179 struct ValueModeDispatch;
181 static V8_INLINE bool TryGetCagedHeap(
const void* slot,
const void* value,
182 WriteBarrier::Params& params) {
185 if (!value)
return false;
186 params.start =
reinterpret_cast<uintptr_t
>(value) &
187 ~(api_constants::kCagedHeapReservationAlignment - 1);
188 const uintptr_t slot_offset =
189 reinterpret_cast<uintptr_t
>(slot) - params.start;
190 if (slot_offset > api_constants::kCagedHeapReservationSize) {
202 static bool IsMarking(
const HeapHandle&, WriteBarrier::Params&);
206struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
207 WriteBarrier::ValueMode::kValuePresent> {
208 template <
typename HeapHandleCallback>
209 static V8_INLINE WriteBarrier::Type Get(
const void* slot,
const void* value,
210 WriteBarrier::Params& params,
211 HeapHandleCallback) {
212#if !defined(CPPGC_YOUNG_GENERATION)
213 if (
V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
214 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
217 bool within_cage = TryGetCagedHeap(slot, value, params);
219 return WriteBarrier::Type::kNone;
221 if (
V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) {
222#if defined(CPPGC_YOUNG_GENERATION)
223 params.heap =
reinterpret_cast<HeapHandle*
>(params.start);
224 params.slot_offset =
reinterpret_cast<uintptr_t
>(slot) - params.start;
225 params.value_offset =
reinterpret_cast<uintptr_t
>(value) - params.start;
226 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
228 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
231 params.heap =
reinterpret_cast<HeapHandle*
>(params.start);
232 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
237struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
238 WriteBarrier::ValueMode::kNoValuePresent> {
239 template <
typename HeapHandleCallback>
240 static V8_INLINE WriteBarrier::Type Get(
const void* slot,
const void*,
241 WriteBarrier::Params& params,
242 HeapHandleCallback callback) {
243#if defined(CPPGC_YOUNG_GENERATION)
244 HeapHandle& handle = callback();
245 if (
V8_LIKELY(!IsMarking(handle, params))) {
247 params.heap = &handle;
248 params.slot_offset =
reinterpret_cast<uintptr_t
>(slot) - params.start;
250 if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
252 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
254 return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
257 if (
V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
258 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
260 HeapHandle& handle = callback();
261 if (
V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
262 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
265 params.heap = &handle;
266 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
274 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
277 HeapHandleCallback callback) {
278 return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
281 template <WriteBarrier::ValueMode value_mode,
typename HeapHandleCallback>
284 HeapHandleCallback callback) {
286 return Get<WriteBarrier::ValueMode::kValuePresent>(
nullptr, value, params,
291 template <WriteBarrier::ValueMode value_mode>
292 struct ValueModeDispatch;
295 static bool IsMarking(
const void*, HeapHandle**);
297 static bool IsMarking(HeapHandle&);
303struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
305 template <
typename HeapHandleCallback>
308 HeapHandleCallback callback) {
311 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
314 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
316 if (IsMarking(
object, ¶ms.
heap)) {
317 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
319 return SetAndReturnType<WriteBarrier::Type::kNone>(params);
324struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
326 template <
typename HeapHandleCallback>
329 HeapHandleCallback callback) {
331 HeapHandle& handle = callback();
332 if (IsMarking(handle)) {
333 params.
heap = &handle;
334 return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
344 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
349template <
typename HeapHandleCallback>
352 HeapHandleCallback callback) {
353 return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
354 slot,
nullptr, params, callback);
360 return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
366 const void*
object) {
368#if defined(CPPGC_CAGED_HEAP)
370 DijkstraMarkingBarrierSlow(
object);
372 DijkstraMarkingBarrierSlowWithSentinelCheck(
object);
378 const void* first_element,
380 size_t number_of_elements,
383 DijkstraMarkingBarrierRangeSlow(*params.
heap, first_element, element_size,
384 number_of_elements, trace_callback);
389 const void*
object) {
391#if defined(CPPGC_CAGED_HEAP)
393 SteeleMarkingBarrierSlow(
object);
395 SteeleMarkingBarrierSlowWithSentinelCheck(
object);
399#if defined(CPPGC_YOUNG_GENERATION)
405 const AgeTable& age_table = local_data.age_table;
408 if (
V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
411 GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset);
416 const Params& params,
const void* inner_pointer) {
419 const CagedHeapLocalData& local_data = params.caged_heap();
420 const AgeTable& age_table = local_data.age_table;
424 if (
V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
427 GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer);
static V8_INLINE void GenerationalBarrierForSourceObject(const Params ¶ms, const void *inner_pointer)
static V8_INLINE void DijkstraMarkingBarrier(const Params ¶ms, const void *object)
static V8_INLINE Type GetWriteBarrierType(const void *slot, const void *value, Params ¶ms)
static V8_INLINE void GenerationalBarrier(const Params ¶ms, const void *slot)
static V8_INLINE void SteeleMarkingBarrier(const Params ¶ms, const void *object)
static void CheckParams(Type expected_type, const Params ¶ms)
static bool IsAnyIncrementalOrConcurrentMarking()
static V8_INLINE void DijkstraMarkingBarrierRange(const Params ¶ms, const void *first_element, size_t element_size, size_t number_of_elements, TraceCallback trace_callback)
static V8_INLINE Type GetWriteBarrierType(const void *slot, Params ¶ms, HeapHandleCallback callback)
static V8_INLINE WriteBarrier::Type Get(const void *slot, const void *value, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
static V8_INLINE WriteBarrier::Type Get(const void *value, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params ¶ms)
void(*)(Visitor *visitor, const void *object) TraceCallback
constexpr internal::SentinelPointer kSentinelPointer
static V8_INLINE WriteBarrier::Type Get(const void *, const void *object, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
static V8_INLINE WriteBarrier::Type Get(const void *, const void *, WriteBarrier::Params ¶ms, HeapHandleCallback callback)
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)