1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_STORE_BUFFER_H_
6 #define V8_STORE_BUFFER_H_
8 #include "src/allocation.h"
9 #include "src/base/logging.h"
10 #include "src/base/platform/platform.h"
11 #include "src/globals.h"
20 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
22 typedef void (StoreBuffer::*RegionCallback)(Address start, Address end,
23 ObjectSlotCallback slot_callback);
25 // Used to implement the write barrier by collecting addresses of pointers
29 explicit StoreBuffer(Heap* heap);
31 static void StoreBufferOverflow(Isolate* isolate);
33 inline Address TopAddress();
38 // This is used by the mutator to enter addresses into the store buffer.
39 inline void Mark(Address addr);
41 // This is used by the heap traversal to enter the addresses into the store
42 // buffer that should still be in the store buffer after GC. It enters
43 // addresses directly into the old buffer because the GC starts by wiping the
44 // old buffer and thereafter only visits each cell once so there is no need
45 // to attempt to remove any dupes. During the first part of a GC we
46 // are using the store buffer to access the old spaces and at the same time
47 // we are rebuilding the store buffer using this function. There is, however
48 // no issue of overwriting the buffer we are iterating over, because this
49 // stage of the scavenge can only reduce the number of addresses in the store
50 // buffer (some objects are promoted so pointers to them do not need to be in
51 // the store buffer). The later parts of the GC scan the pages that are
52 // exempt from the store buffer and process the promotion queue. These steps
53 // can overflow this buffer. We check for this and on overflow we call the
54 // callback set up with the StoreBufferRebuildScope object.
55 inline void EnterDirectlyIntoStoreBuffer(Address addr);
57 // Iterates over all pointers that go from old space to new space. It will
58 // delete the store buffer as it starts so the callback should reenter
59 // surviving old-to-new pointers into the store buffer to rebuild it.
60 void IteratePointersToNewSpace(ObjectSlotCallback callback);
62 static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2);
63 static const int kStoreBufferSize = kStoreBufferOverflowBit;
64 static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address);
65 static const int kOldStoreBufferLength = kStoreBufferLength * 16;
66 static const int kHashSetLengthLog2 = 12;
67 static const int kHashSetLength = 1 << kHashSetLengthLog2;
74 Object*** Limit() { return reinterpret_cast<Object***>(old_limit_); }
75 Object*** Start() { return reinterpret_cast<Object***>(old_start_); }
76 Object*** Top() { return reinterpret_cast<Object***>(old_top_); }
77 void SetTop(Object*** top) {
78 DCHECK(top >= Start());
79 DCHECK(top <= Limit());
80 old_top_ = reinterpret_cast<Address*>(top);
83 bool old_buffer_is_sorted() { return old_buffer_is_sorted_; }
84 bool old_buffer_is_filtered() { return old_buffer_is_filtered_; }
86 void EnsureSpace(intptr_t space_needed);
89 bool PrepareForIteration();
91 void Filter(int flag);
93 // Eliminates all stale store buffer entries from the store buffer, i.e.,
94 // slots that are not part of live objects anymore. This method must be
95 // called after marking, when the whole transitive closure is known and
96 // must be called before sweeping when mark bits are still intact.
97 void ClearInvalidStoreBufferEntries();
98 void VerifyValidStoreBufferEntries();
103 // The store buffer is divided up into a new buffer that is constantly being
104 // filled by mutator activity and an old buffer that is filled with the data
105 // from the new buffer after compression.
112 Address* old_reserved_limit_;
113 base::VirtualMemory* old_virtual_memory_;
115 bool old_buffer_is_sorted_;
116 bool old_buffer_is_filtered_;
118 // The garbage collector iterates over many pointers to new space that are not
119 // handled by the store buffer. This flag indicates whether the pointers
120 // found by the callbacks should be added to the store buffer or not.
121 bool store_buffer_rebuilding_enabled_;
122 StoreBufferCallback callback_;
123 bool may_move_store_buffer_entries_;
125 base::VirtualMemory* virtual_memory_;
127 // Two hash sets used for filtering.
128 // If address is in the hash set then it is guaranteed to be in the
129 // old part of the store buffer.
130 uintptr_t* hash_set_1_;
131 uintptr_t* hash_set_2_;
132 bool hash_sets_are_empty_;
134 void ClearFilteringHashSets();
136 bool SpaceAvailable(intptr_t space_needed);
137 void ExemptPopularPages(int prime_sample_step, int threshold);
139 void ProcessOldToNewSlot(Address slot_address,
140 ObjectSlotCallback slot_callback);
142 void FindPointersToNewSpaceInRegion(Address start, Address end,
143 ObjectSlotCallback slot_callback);
145 // For each region of pointers on a page in use from an old space call
146 // visit_pointer_region callback.
147 // If either visit_pointer_region or callback can cause an allocation
148 // in old space and changes in allocation watermark then
149 // can_preallocate_during_iteration should be set to true.
150 void IteratePointersOnPage(PagedSpace* space, Page* page,
151 RegionCallback region_callback,
152 ObjectSlotCallback slot_callback);
154 void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback);
157 void VerifyPointers(LargeObjectSpace* space);
160 friend class StoreBufferRebuildScope;
161 friend class DontMoveStoreBufferEntriesScope;
165 class StoreBufferRebuildScope {
167 explicit StoreBufferRebuildScope(Heap* heap, StoreBuffer* store_buffer,
168 StoreBufferCallback callback)
169 : store_buffer_(store_buffer),
170 stored_state_(store_buffer->store_buffer_rebuilding_enabled_),
171 stored_callback_(store_buffer->callback_) {
172 store_buffer_->store_buffer_rebuilding_enabled_ = true;
173 store_buffer_->callback_ = callback;
174 (*callback)(heap, NULL, kStoreBufferStartScanningPagesEvent);
177 ~StoreBufferRebuildScope() {
178 store_buffer_->callback_ = stored_callback_;
179 store_buffer_->store_buffer_rebuilding_enabled_ = stored_state_;
183 StoreBuffer* store_buffer_;
185 StoreBufferCallback stored_callback_;
189 class DontMoveStoreBufferEntriesScope {
191 explicit DontMoveStoreBufferEntriesScope(StoreBuffer* store_buffer)
192 : store_buffer_(store_buffer),
193 stored_state_(store_buffer->may_move_store_buffer_entries_) {
194 store_buffer_->may_move_store_buffer_entries_ = false;
197 ~DontMoveStoreBufferEntriesScope() {
198 store_buffer_->may_move_store_buffer_entries_ = stored_state_;
202 StoreBuffer* store_buffer_;
206 } // namespace v8::internal
208 #endif // V8_STORE_BUFFER_H_