}
}
#endif
- context->set(Context::NEXT_CONTEXT_LINK, heap->native_contexts_list());
+ context->set(Context::NEXT_CONTEXT_LINK, heap->native_contexts_list(),
+ UPDATE_WEAK_WRITE_BARRIER);
heap->set_native_contexts_list(context);
}
DCHECK(function->next_function_link()->IsUndefined());
function->set_next_function_link(get(OPTIMIZED_FUNCTIONS_LIST));
- set(OPTIMIZED_FUNCTIONS_LIST, function);
+ set(OPTIMIZED_FUNCTIONS_LIST, function, UPDATE_WEAK_WRITE_BARRIER);
}
element_function->next_function_link()->IsJSFunction());
if (element_function == function) {
if (prev == NULL) {
- set(OPTIMIZED_FUNCTIONS_LIST, element_function->next_function_link());
+ set(OPTIMIZED_FUNCTIONS_LIST, element_function->next_function_link(),
+ UPDATE_WEAK_WRITE_BARRIER);
} else {
prev->set_next_function_link(element_function->next_function_link());
}
void Context::SetOptimizedFunctionsListHead(Object* head) {
DCHECK(IsNativeContext());
- set(OPTIMIZED_FUNCTIONS_LIST, head);
+ set(OPTIMIZED_FUNCTIONS_LIST, head, UPDATE_WEAK_WRITE_BARRIER);
}
DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
DCHECK(code->next_code_link()->IsUndefined());
code->set_next_code_link(get(OPTIMIZED_CODE_LIST));
- set(OPTIMIZED_CODE_LIST, code);
+ set(OPTIMIZED_CODE_LIST, code, UPDATE_WEAK_WRITE_BARRIER);
}
void Context::SetOptimizedCodeListHead(Object* head) {
DCHECK(IsNativeContext());
- set(OPTIMIZED_CODE_LIST, head);
+ set(OPTIMIZED_CODE_LIST, head, UPDATE_WEAK_WRITE_BARRIER);
}
void Context::SetDeoptimizedCodeListHead(Object* head) {
DCHECK(IsNativeContext());
- set(DEOPTIMIZED_CODE_LIST, head);
+ set(DEOPTIMIZED_CODE_LIST, head, UPDATE_WEAK_WRITE_BARRIER);
}
template <>
struct WeakListVisitor<Context> {
static void SetWeakNext(Context* context, Object* next) {
- context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
+ context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object* WeakNext(Context* context) {
heap->RecordWrite(object->address(), offset); \
}
-#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
- if (mode == UPDATE_WRITE_BARRIER) { \
- heap->incremental_marking()->RecordWrite( \
- object, HeapObject::RawField(object, offset), value); \
- if (heap->InNewSpace(value)) { \
- heap->RecordWrite(object->address(), offset); \
- } \
+#define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
+ if (mode != SKIP_WRITE_BARRIER) { \
+ if (mode == UPDATE_WRITE_BARRIER) { \
+ heap->incremental_marking()->RecordWrite( \
+ object, HeapObject::RawField(object, offset), value); \
+ } \
+ if (heap->InNewSpace(value)) { \
+ heap->RecordWrite(object->address(), offset); \
+ } \
}
#ifndef V8_TARGET_ARCH_MIPS
enum IcCheckType { ELEMENT, PROPERTY };
-// Setter that skips the write barrier if mode is SKIP_WRITE_BARRIER.
-enum WriteBarrierMode { SKIP_WRITE_BARRIER, UPDATE_WRITE_BARRIER };
+// SKIP_WRITE_BARRIER skips the write barrier.
+// UPDATE_WEAK_WRITE_BARRIER skips the marking part of the write barrier and
+// only performs the generational part.
+// UPDATE_WRITE_BARRIER is doing the full barrier, marking and generational.
+enum WriteBarrierMode {
+ SKIP_WRITE_BARRIER,
+ UPDATE_WEAK_WRITE_BARRIER,
+ UPDATE_WRITE_BARRIER
+};
// Indicates whether a value can be loaded as a constant.