1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_
11 // Clients of this interface shouldn't depend on lots of heap internals.
12 // Do not include anything from src/heap here!
13 #include "src/allocation.h"
14 #include "src/assert-scope.h"
15 #include "src/atomic-utils.h"
16 #include "src/globals.h"
17 // TODO(mstarzinger): Two more includes to kill!
18 #include "src/heap/spaces.h"
19 #include "src/heap/store-buffer.h"
25 // Defines all the roots in Heap.
26 #define STRONG_ROOT_LIST(V) \
27 V(Map, byte_array_map, ByteArrayMap) \
28 V(Map, free_space_map, FreeSpaceMap) \
29 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
30 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
31 /* Cluster the most popular ones in a few cache lines here at the top. */ \
32 V(Smi, store_buffer_top, StoreBufferTop) \
33 V(Oddball, undefined_value, UndefinedValue) \
34 V(Oddball, the_hole_value, TheHoleValue) \
35 V(Oddball, null_value, NullValue) \
36 V(Oddball, true_value, TrueValue) \
37 V(Oddball, false_value, FalseValue) \
38 V(String, empty_string, empty_string) \
39 V(String, hidden_string, hidden_string) \
40 V(Oddball, uninitialized_value, UninitializedValue) \
41 V(Map, cell_map, CellMap) \
42 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
43 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
44 V(Map, meta_map, MetaMap) \
45 V(Map, heap_number_map, HeapNumberMap) \
46 V(Map, mutable_heap_number_map, MutableHeapNumberMap) \
47 V(Map, float32x4_map, Float32x4Map) \
48 V(Map, int32x4_map, Int32x4Map) \
49 V(Map, uint32x4_map, Uint32x4Map) \
50 V(Map, bool32x4_map, Bool32x4Map) \
51 V(Map, int16x8_map, Int16x8Map) \
52 V(Map, uint16x8_map, Uint16x8Map) \
53 V(Map, bool16x8_map, Bool16x8Map) \
54 V(Map, int8x16_map, Int8x16Map) \
55 V(Map, uint8x16_map, Uint8x16Map) \
56 V(Map, bool8x16_map, Bool8x16Map) \
57 V(Map, native_context_map, NativeContextMap) \
58 V(Map, fixed_array_map, FixedArrayMap) \
59 V(Map, code_map, CodeMap) \
60 V(Map, scope_info_map, ScopeInfoMap) \
61 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
62 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
63 V(Map, weak_cell_map, WeakCellMap) \
64 V(Map, one_byte_string_map, OneByteStringMap) \
65 V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap) \
66 V(Map, function_context_map, FunctionContextMap) \
67 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
68 V(ByteArray, empty_byte_array, EmptyByteArray) \
69 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
70 /* The roots above this line should be boring from a GC point of view. */ \
71 /* This means they are never in new space and never on a page that is */ \
72 /* being compacted. */ \
73 V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
74 V(Oddball, arguments_marker, ArgumentsMarker) \
75 V(Oddball, exception, Exception) \
76 V(Oddball, termination_exception, TerminationException) \
77 V(FixedArray, number_string_cache, NumberStringCache) \
78 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
79 V(Object, instanceof_cache_map, InstanceofCacheMap) \
80 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
81 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
82 V(FixedArray, string_split_cache, StringSplitCache) \
83 V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
84 V(Smi, hash_seed, HashSeed) \
85 V(Map, hash_table_map, HashTableMap) \
86 V(Map, ordered_hash_table_map, OrderedHashTableMap) \
87 V(Map, symbol_map, SymbolMap) \
88 V(Map, string_map, StringMap) \
89 V(Map, cons_one_byte_string_map, ConsOneByteStringMap) \
90 V(Map, cons_string_map, ConsStringMap) \
91 V(Map, sliced_string_map, SlicedStringMap) \
92 V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap) \
93 V(Map, external_string_map, ExternalStringMap) \
94 V(Map, external_string_with_one_byte_data_map, \
95 ExternalStringWithOneByteDataMap) \
96 V(Map, external_one_byte_string_map, ExternalOneByteStringMap) \
97 V(Map, native_source_string_map, NativeSourceStringMap) \
98 V(Map, short_external_string_map, ShortExternalStringMap) \
99 V(Map, short_external_string_with_one_byte_data_map, \
100 ShortExternalStringWithOneByteDataMap) \
101 V(Map, internalized_string_map, InternalizedStringMap) \
102 V(Map, external_internalized_string_map, ExternalInternalizedStringMap) \
103 V(Map, external_internalized_string_with_one_byte_data_map, \
104 ExternalInternalizedStringWithOneByteDataMap) \
105 V(Map, external_one_byte_internalized_string_map, \
106 ExternalOneByteInternalizedStringMap) \
107 V(Map, short_external_internalized_string_map, \
108 ShortExternalInternalizedStringMap) \
109 V(Map, short_external_internalized_string_with_one_byte_data_map, \
110 ShortExternalInternalizedStringWithOneByteDataMap) \
111 V(Map, short_external_one_byte_internalized_string_map, \
112 ShortExternalOneByteInternalizedStringMap) \
113 V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap) \
114 V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
115 V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
116 V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
117 V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
118 V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
119 V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
120 V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
121 V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
122 V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
123 V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
124 V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
125 V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
126 V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
127 V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
128 V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
129 V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
130 V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
131 V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
132 EmptyFixedUint8ClampedArray) \
133 V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
134 V(Map, catch_context_map, CatchContextMap) \
135 V(Map, with_context_map, WithContextMap) \
136 V(Map, block_context_map, BlockContextMap) \
137 V(Map, module_context_map, ModuleContextMap) \
138 V(Map, script_context_map, ScriptContextMap) \
139 V(Map, script_context_table_map, ScriptContextTableMap) \
140 V(Map, undefined_map, UndefinedMap) \
141 V(Map, the_hole_map, TheHoleMap) \
142 V(Map, null_map, NullMap) \
143 V(Map, boolean_map, BooleanMap) \
144 V(Map, uninitialized_map, UninitializedMap) \
145 V(Map, arguments_marker_map, ArgumentsMarkerMap) \
146 V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap) \
147 V(Map, exception_map, ExceptionMap) \
148 V(Map, termination_exception_map, TerminationExceptionMap) \
149 V(Map, message_object_map, JSMessageObjectMap) \
150 V(Map, foreign_map, ForeignMap) \
151 V(Map, neander_map, NeanderMap) \
152 V(Map, external_map, ExternalMap) \
153 V(HeapNumber, nan_value, NanValue) \
154 V(HeapNumber, infinity_value, InfinityValue) \
155 V(HeapNumber, minus_zero_value, MinusZeroValue) \
156 V(HeapNumber, minus_infinity_value, MinusInfinityValue) \
157 V(JSObject, message_listeners, MessageListeners) \
158 V(UnseededNumberDictionary, code_stubs, CodeStubs) \
159 V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
160 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
161 V(Code, js_entry_code, JsEntryCode) \
162 V(Code, js_construct_entry_code, JsConstructEntryCode) \
163 V(FixedArray, natives_source_cache, NativesSourceCache) \
164 V(FixedArray, experimental_natives_source_cache, \
165 ExperimentalNativesSourceCache) \
166 V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache) \
167 V(FixedArray, experimental_extra_natives_source_cache, \
168 ExperimentalExtraNativesSourceCache) \
169 V(FixedArray, code_stub_natives_source_cache, CodeStubNativesSourceCache) \
170 V(Script, empty_script, EmptyScript) \
171 V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
172 V(Cell, undefined_cell, UndefinedCell) \
173 V(JSObject, observation_state, ObservationState) \
174 V(Object, symbol_registry, SymbolRegistry) \
175 V(Object, script_list, ScriptList) \
176 V(SeededNumberDictionary, empty_slow_element_dictionary, \
177 EmptySlowElementDictionary) \
178 V(FixedArray, materialized_objects, MaterializedObjects) \
179 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
180 V(FixedArray, microtask_queue, MicrotaskQueue) \
181 V(FixedArray, dummy_vector, DummyVector) \
182 V(FixedArray, detached_contexts, DetachedContexts) \
183 V(ArrayList, retained_maps, RetainedMaps) \
184 V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable) \
185 V(PropertyCell, array_protector, ArrayProtector) \
186 V(PropertyCell, empty_property_cell, EmptyPropertyCell) \
187 V(Object, weak_stack_trace_list, WeakStackTraceList) \
188 V(Object, code_stub_context, CodeStubContext) \
189 V(JSObject, code_stub_exports_object, CodeStubExportsObject) \
190 V(FixedArray, interpreter_table, InterpreterTable) \
191 V(Map, bytecode_array_map, BytecodeArrayMap) \
192 V(BytecodeArray, empty_bytecode_array, EmptyBytecodeArray)
195 // Entries in this list are limited to Smis and are not visited during GC.
196 #define SMI_ROOT_LIST(V) \
197 V(Smi, stack_limit, StackLimit) \
198 V(Smi, real_stack_limit, RealStackLimit) \
199 V(Smi, last_script_id, LastScriptId) \
200 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
201 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
202 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
203 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
206 #define ROOT_LIST(V) \
207 STRONG_ROOT_LIST(V) \
209 V(StringTable, string_table, StringTable)
211 #define INTERNALIZED_STRING_LIST(V) \
212 V(Object_string, "Object") \
213 V(proto_string, "__proto__") \
214 V(arguments_string, "arguments") \
215 V(Arguments_string, "Arguments") \
216 V(caller_string, "caller") \
217 V(boolean_string, "boolean") \
218 V(Boolean_string, "Boolean") \
219 V(callee_string, "callee") \
220 V(constructor_string, "constructor") \
221 V(default_string, "default") \
222 V(dot_result_string, ".result") \
223 V(eval_string, "eval") \
224 V(float32x4_string, "float32x4") \
225 V(Float32x4_string, "Float32x4") \
226 V(int32x4_string, "int32x4") \
227 V(Int32x4_string, "Int32x4") \
228 V(uint32x4_string, "uint32x4") \
229 V(Uint32x4_string, "Uint32x4") \
230 V(bool32x4_string, "bool32x4") \
231 V(Bool32x4_string, "Bool32x4") \
232 V(int16x8_string, "int16x8") \
233 V(Int16x8_string, "Int16x8") \
234 V(uint16x8_string, "uint16x8") \
235 V(Uint16x8_string, "Uint16x8") \
236 V(bool16x8_string, "bool16x8") \
237 V(Bool16x8_string, "Bool16x8") \
238 V(int8x16_string, "int8x16") \
239 V(Int8x16_string, "Int8x16") \
240 V(uint8x16_string, "uint8x16") \
241 V(Uint8x16_string, "Uint8x16") \
242 V(bool8x16_string, "bool8x16") \
243 V(Bool8x16_string, "Bool8x16") \
244 V(function_string, "function") \
245 V(Function_string, "Function") \
246 V(length_string, "length") \
247 V(name_string, "name") \
248 V(null_string, "null") \
249 V(number_string, "number") \
250 V(Number_string, "Number") \
251 V(nan_string, "NaN") \
252 V(source_string, "source") \
253 V(source_url_string, "source_url") \
254 V(source_mapping_url_string, "source_mapping_url") \
255 V(this_string, "this") \
256 V(global_string, "global") \
257 V(ignore_case_string, "ignoreCase") \
258 V(multiline_string, "multiline") \
259 V(sticky_string, "sticky") \
260 V(unicode_string, "unicode") \
261 V(harmony_tolength_string, "harmony_tolength") \
262 V(input_string, "input") \
263 V(index_string, "index") \
264 V(last_index_string, "lastIndex") \
265 V(object_string, "object") \
266 V(prototype_string, "prototype") \
267 V(string_string, "string") \
268 V(String_string, "String") \
269 V(symbol_string, "symbol") \
270 V(Symbol_string, "Symbol") \
271 V(Map_string, "Map") \
272 V(Set_string, "Set") \
273 V(WeakMap_string, "WeakMap") \
274 V(WeakSet_string, "WeakSet") \
275 V(for_string, "for") \
276 V(for_api_string, "for_api") \
277 V(Date_string, "Date") \
278 V(char_at_string, "CharAt") \
279 V(undefined_string, "undefined") \
280 V(valueOf_string, "valueOf") \
281 V(stack_string, "stack") \
282 V(toString_string, "toString") \
283 V(toJSON_string, "toJSON") \
284 V(KeyedLoadMonomorphic_string, "KeyedLoadMonomorphic") \
285 V(KeyedStoreMonomorphic_string, "KeyedStoreMonomorphic") \
286 V(illegal_access_string, "illegal access") \
287 V(cell_value_string, "%cell_value") \
288 V(illegal_argument_string, "illegal argument") \
289 V(closure_string, "(closure)") \
291 V(compare_ic_string, "==") \
292 V(strict_compare_ic_string, "===") \
293 V(infinity_string, "Infinity") \
294 V(minus_infinity_string, "-Infinity") \
295 V(query_colon_string, "(?:)") \
296 V(Generator_string, "Generator") \
297 V(throw_string, "throw") \
298 V(done_string, "done") \
299 V(value_string, "value") \
300 V(next_string, "next") \
301 V(byte_length_string, "byteLength") \
302 V(byte_offset_string, "byteOffset") \
303 V(minus_zero_string, "-0") \
304 V(Array_string, "Array") \
305 V(Error_string, "Error") \
306 V(RegExp_string, "RegExp")
308 #define PRIVATE_SYMBOL_LIST(V) \
309 V(array_iteration_kind_symbol) \
310 V(array_iterator_next_symbol) \
311 V(array_iterator_object_symbol) \
312 V(call_site_function_symbol) \
313 V(call_site_position_symbol) \
314 V(call_site_receiver_symbol) \
315 V(call_site_strict_symbol) \
316 V(class_end_position_symbol) \
317 V(class_start_position_symbol) \
318 V(detailed_stack_trace_symbol) \
319 V(elements_transition_symbol) \
320 V(error_end_pos_symbol) \
321 V(error_script_symbol) \
322 V(error_start_pos_symbol) \
323 V(formatted_stack_trace_symbol) \
325 V(hash_code_symbol) \
326 V(home_object_symbol) \
327 V(internal_error_symbol) \
328 V(intl_impl_object_symbol) \
329 V(intl_initialized_marker_symbol) \
330 V(megamorphic_symbol) \
331 V(nonexistent_symbol) \
332 V(nonextensible_symbol) \
333 V(normal_ic_symbol) \
335 V(premonomorphic_symbol) \
336 V(promise_debug_marker_symbol) \
337 V(promise_has_handler_symbol) \
338 V(promise_on_resolve_symbol) \
339 V(promise_on_reject_symbol) \
340 V(promise_raw_symbol) \
341 V(promise_status_symbol) \
342 V(promise_value_symbol) \
344 V(stack_trace_symbol) \
345 V(string_iterator_iterated_string_symbol) \
346 V(string_iterator_next_index_symbol) \
347 V(uninitialized_symbol)
349 #define PUBLIC_SYMBOL_LIST(V) \
350 V(has_instance_symbol, Symbol.hasInstance) \
351 V(is_concat_spreadable_symbol, Symbol.isConcatSpreadable) \
352 V(is_regexp_symbol, Symbol.isRegExp) \
353 V(iterator_symbol, Symbol.iterator) \
354 V(to_primitive_symbol, Symbol.toPrimitive) \
355 V(to_string_tag_symbol, Symbol.toStringTag) \
356 V(unscopables_symbol, Symbol.unscopables)
358 // Heap roots that are known to be immortal immovable, for which we can safely
359 // skip write barriers. This list is not complete and has omissions.
360 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
362 V(BytecodeArrayMap) \
364 V(OnePointerFillerMap) \
365 V(TwoPointerFillerMap) \
371 V(UninitializedValue) \
373 V(GlobalPropertyCellMap) \
374 V(SharedFunctionInfoMap) \
377 V(MutableHeapNumberMap) \
388 V(NativeContextMap) \
392 V(FixedCOWArrayMap) \
393 V(FixedDoubleArrayMap) \
395 V(NoInterceptorResultSentinel) \
397 V(OrderedHashTableMap) \
400 V(EmptyBytecodeArray) \
401 V(EmptyDescriptorArray) \
404 V(SloppyArgumentsElementsMap) \
405 V(FunctionContextMap) \
409 V(ModuleContextMap) \
410 V(ScriptContextMap) \
415 V(UninitializedMap) \
416 V(ArgumentsMarkerMap) \
417 V(JSMessageObjectMap) \
421 PRIVATE_SYMBOL_LIST(V)
423 // Forward declarations.
424 class ArrayBufferTracker;
425 class GCIdleTimeAction;
426 class GCIdleTimeHandler;
427 class GCIdleTimeHeapState;
429 class HeapObjectsFilter;
436 class WeakObjectRetainer;
439 // A queue of objects promoted during scavenge. Each object is accompanied
440 // by it's size to avoid dereferencing a map pointer for scanning.
441 // The last page in to-space is used for the promotion queue. On conflict
442 // during scavenge, the promotion queue is allocated externally and all
443 // entries are copied to the external queue.
444 class PromotionQueue {
446 explicit PromotionQueue(Heap* heap)
457 delete emergency_stack_;
458 emergency_stack_ = NULL;
461 Page* GetHeadPage() {
462 return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
465 void SetNewLimit(Address limit) {
466 // If we are already using an emergency stack, we can ignore it.
467 if (emergency_stack_) return;
469 // If the limit is not on the same page, we can ignore it.
470 if (Page::FromAllocationTop(limit) != GetHeadPage()) return;
472 limit_ = reinterpret_cast<intptr_t*>(limit);
474 if (limit_ <= rear_) {
481 bool IsBelowPromotionQueue(Address to_space_top) {
482 // If an emergency stack is used, the to-space address cannot interfere
483 // with the promotion queue.
484 if (emergency_stack_) return true;
486 // If the given to-space top pointer and the head of the promotion queue
487 // are not on the same page, then the to-space objects are below the
489 if (GetHeadPage() != Page::FromAddress(to_space_top)) {
492 // If the to space top pointer is smaller or equal than the promotion
493 // queue head, then the to-space objects are below the promotion queue.
494 return reinterpret_cast<intptr_t*>(to_space_top) <= rear_;
498 return (front_ == rear_) &&
499 (emergency_stack_ == NULL || emergency_stack_->length() == 0);
502 inline void insert(HeapObject* target, int size);
504 void remove(HeapObject** target, int* size) {
506 if (front_ == rear_) {
507 Entry e = emergency_stack_->RemoveLast();
513 *target = reinterpret_cast<HeapObject*>(*(--front_));
514 *size = static_cast<int>(*(--front_));
515 // Assert no underflow.
516 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
517 reinterpret_cast<Address>(front_));
521 // The front of the queue is higher in the memory page chain than the rear.
526 static const int kEntrySizeInWords = 2;
529 Entry(HeapObject* obj, int size) : obj_(obj), size_(size) {}
534 List<Entry>* emergency_stack_;
538 void RelocateQueueHead();
540 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
544 enum ArrayStorageAllocationMode {
545 DONT_INITIALIZE_ARRAY_ELEMENTS,
546 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
552 // Declare all the root indices. This defines the root list order.
554 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
555 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
556 #undef ROOT_INDEX_DECLARATION
558 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
559 INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
560 #undef STRING_DECLARATION
562 #define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
563 PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
564 #undef SYMBOL_INDEX_DECLARATION
566 #define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
567 PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
568 #undef SYMBOL_INDEX_DECLARATION
571 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
572 STRUCT_LIST(DECLARE_STRUCT_MAP)
573 #undef DECLARE_STRUCT_MAP
574 kStringTableRootIndex,
576 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
577 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
578 #undef ROOT_INDEX_DECLARATION
580 kStrongRootListLength = kStringTableRootIndex,
581 kSmiRootsStart = kStringTableRootIndex + 1
584 // Indicates whether live bytes adjustment is triggered
585 // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
586 // - or from within GC (CONCURRENT_TO_SWEEPER),
587 // - or mutator code (CONCURRENT_TO_SWEEPER).
588 enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
590 enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
592 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
594 // Taking this lock prevents the GC from entering a phase that relocates
595 // object references.
596 class RelocationLock {
598 explicit RelocationLock(Heap* heap) : heap_(heap) {
599 heap_->relocation_mutex_.Lock();
602 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
608 // An optional version of the above lock that can be used for some critical
609 // sections on the mutator thread; only safe since the GC currently does not
610 // do concurrent compaction.
611 class OptionalRelocationLock {
613 OptionalRelocationLock(Heap* heap, bool concurrent)
614 : heap_(heap), concurrent_(concurrent) {
615 if (concurrent_) heap_->relocation_mutex_.Lock();
618 ~OptionalRelocationLock() {
619 if (concurrent_) heap_->relocation_mutex_.Unlock();
627 // Support for partial snapshots. After calling this we have a linear
628 // space to write objects in each space.
634 typedef List<Chunk> Reservation;
636 static const intptr_t kMinimumOldGenerationAllocationLimit =
637 8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
639 static const int kInitalOldGenerationLimitFactor = 2;
642 // Don't apply pointer multiplier on Android since it has no swap space and
643 // should instead adapt it's heap size based on available physical memory.
644 static const int kPointerMultiplier = 1;
646 static const int kPointerMultiplier = i::kPointerSize / 4;
649 // The new space size has to be a power of 2. Sizes are in MB.
650 static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
651 static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
652 static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
653 static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
655 // The old space size has to be a multiple of Page::kPageSize.
657 static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
658 static const int kMaxOldSpaceSizeMediumMemoryDevice =
659 256 * kPointerMultiplier;
660 static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
661 static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
663 // The executable size has to be a multiple of Page::kPageSize.
665 static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
666 static const int kMaxExecutableSizeMediumMemoryDevice =
667 192 * kPointerMultiplier;
668 static const int kMaxExecutableSizeHighMemoryDevice =
669 256 * kPointerMultiplier;
670 static const int kMaxExecutableSizeHugeMemoryDevice =
671 256 * kPointerMultiplier;
673 static const int kTraceRingBufferSize = 512;
674 static const int kStacktraceBufferSize = 512;
676 static const double kMinHeapGrowingFactor;
677 static const double kMaxHeapGrowingFactor;
678 static const double kMaxHeapGrowingFactorMemoryConstrained;
679 static const double kMaxHeapGrowingFactorIdle;
680 static const double kTargetMutatorUtilization;
682 // Sloppy mode arguments object size.
683 static const int kSloppyArgumentsObjectSize =
684 JSObject::kHeaderSize + 2 * kPointerSize;
686 // Strict mode arguments has no callee so it is smaller.
687 static const int kStrictArgumentsObjectSize =
688 JSObject::kHeaderSize + 1 * kPointerSize;
690 // Indicies for direct access into argument objects.
691 static const int kArgumentsLengthIndex = 0;
693 // callee is only valid in sloppy mode.
694 static const int kArgumentsCalleeIndex = 1;
696 static const int kNoGCFlags = 0;
697 static const int kReduceMemoryFootprintMask = 1;
698 static const int kAbortIncrementalMarkingMask = 2;
699 static const int kFinalizeIncrementalMarkingMask = 4;
701 // Making the heap iterable requires us to abort incremental marking.
702 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
704 // The roots that have an index less than this are always in old space.
705 static const int kOldSpaceRoots = 0x20;
707 // The minimum size of a HeapObject on the heap.
708 static const int kMinObjectSizeInWords = 2;
710 STATIC_ASSERT(kUndefinedValueRootIndex ==
711 Internals::kUndefinedValueRootIndex);
712 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
713 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
714 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
715 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
717 // Calculates the maximum amount of filler that could be required by the
719 static int GetMaximumFillToAlign(AllocationAlignment alignment);
720 // Calculates the actual amount of filler required for a given address at the
722 static int GetFillToAlign(Address address, AllocationAlignment alignment);
724 template <typename T>
725 static inline bool IsOneByte(T t, int chars);
727 static void FatalProcessOutOfMemory(const char* location,
728 bool take_snapshot = false);
730 static bool RootIsImmortalImmovable(int root_index);
732 // Checks whether the space is valid.
733 static bool IsValidAllocationSpace(AllocationSpace space);
735 // An object may have an AllocationSite associated with it through a trailing
736 // AllocationMemento. Its feedback should be updated when objects are found
738 static inline void UpdateAllocationSiteFeedback(HeapObject* object,
739 ScratchpadSlotMode mode);
741 // Generated code can embed direct references to non-writable roots if
742 // they are in new space.
743 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
745 // Zapping is needed for verify heap, and always done in debug builds.
746 static inline bool ShouldZapGarbage() {
751 return FLAG_verify_heap;
758 static double HeapGrowingFactor(double gc_speed, double mutator_speed);
760 // Copy block of memory from src to dst. Size of block should be aligned
762 static inline void CopyBlock(Address dst, Address src, int byte_size);
764 // Optimized version of memmove for blocks with pointer size aligned sizes and
765 // pointer size aligned addresses.
766 static inline void MoveBlock(Address dst, Address src, int byte_size);
768 // Determines a static visitor id based on the given {map} that can then be
769 // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
770 static int GetStaticVisitorIdForMap(Map* map);
772 // Notifies the heap that is ok to start marking or other activities that
773 // should not happen during deserialization.
774 void NotifyDeserializationComplete();
776 intptr_t old_generation_allocation_limit() const {
777 return old_generation_allocation_limit_;
780 bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
782 Address* NewSpaceAllocationTopAddress() {
783 return new_space_.allocation_top_address();
785 Address* NewSpaceAllocationLimitAddress() {
786 return new_space_.allocation_limit_address();
789 Address* OldSpaceAllocationTopAddress() {
790 return old_space_->allocation_top_address();
792 Address* OldSpaceAllocationLimitAddress() {
793 return old_space_->allocation_limit_address();
796 // TODO(hpayer): There is still a missmatch between capacity and actual
797 // committed memory size.
798 bool CanExpandOldGeneration(int size) {
799 return (CommittedOldGenerationMemory() + size) < MaxOldGenerationSize();
802 // Clear the Instanceof cache (used when a prototype changes).
803 inline void ClearInstanceofCache();
805 // Iterates the whole code space to clear all keyed store ICs.
806 void ClearAllKeyedStoreICs();
808 // FreeSpace objects have a null map after deserialization. Update the map.
809 void RepairFreeListsAfterDeserialization();
811 // Move len elements within a given array from src_index index to dst_index
813 void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
815 // Initialize a filler object to keep the ability to iterate over the heap
816 // when introducing gaps within pages.
817 void CreateFillerObjectAt(Address addr, int size);
819 bool CanMoveObjectStart(HeapObject* object);
821 // Maintain consistency of live bytes during incremental marking.
822 void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
824 // Trim the given array from the left. Note that this relocates the object
825 // start and hence is only valid if there is only a single reference to it.
826 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
828 // Trim the given array from the right.
829 template<Heap::InvocationMode mode>
830 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
832 // Converts the given boolean condition to JavaScript boolean value.
833 inline Object* ToBoolean(bool condition);
835 // Check whether the heap is currently iterable.
836 bool IsHeapIterable();
838 // Notify the heap that a context has been disposed.
839 int NotifyContextDisposed(bool dependant_context);
841 inline void increment_scan_on_scavenge_pages() {
842 scan_on_scavenge_pages_++;
843 if (FLAG_gc_verbose) {
844 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
848 inline void decrement_scan_on_scavenge_pages() {
849 scan_on_scavenge_pages_--;
850 if (FLAG_gc_verbose) {
851 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
855 void set_native_contexts_list(Object* object) {
856 native_contexts_list_ = object;
858 Object* native_contexts_list() const { return native_contexts_list_; }
860 void set_allocation_sites_list(Object* object) {
861 allocation_sites_list_ = object;
863 Object* allocation_sites_list() { return allocation_sites_list_; }
865 // Used in CreateAllocationSiteStub and the (de)serializer.
866 Object** allocation_sites_list_address() { return &allocation_sites_list_; }
868 void set_encountered_weak_collections(Object* weak_collection) {
869 encountered_weak_collections_ = weak_collection;
871 Object* encountered_weak_collections() const {
872 return encountered_weak_collections_;
875 void set_encountered_weak_cells(Object* weak_cell) {
876 encountered_weak_cells_ = weak_cell;
878 Object* encountered_weak_cells() const { return encountered_weak_cells_; }
880 // Number of mark-sweeps.
881 int ms_count() const { return ms_count_; }
883 // Checks whether the given object is allowed to be migrated from it's
884 // current space into the given destination space. Used for debugging.
885 inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
887 void CheckHandleCount();
889 // Number of "runtime allocations" done so far.
890 uint32_t allocations_count() { return allocations_count_; }
892 // Returns deterministic "time" value in ms. Works only with
893 // FLAG_verify_predictable.
894 double synthetic_time() { return allocations_count() / 2.0; }
896 // Print short heap statistics.
897 void PrintShortHeapStatistics();
899 inline HeapState gc_state() { return gc_state_; }
901 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
903 // If an object has an AllocationMemento trailing it, return it, otherwise
905 inline AllocationMemento* FindAllocationMemento(HeapObject* object);
907 // Returns false if not able to reserve.
908 bool ReserveSpace(Reservation* reservations);
911 // Support for the API.
914 void CreateApiObjects();
916 // Implements the corresponding V8 API function.
917 bool IdleNotification(double deadline_in_seconds);
918 bool IdleNotification(int idle_time_in_ms);
920 double MonotonicallyIncreasingTimeInMs();
922 void RecordStats(HeapStats* stats, bool take_snapshot = false);
924 // Check new space expansion criteria and expand semispaces if it was hit.
925 void CheckNewSpaceExpansionCriteria();
927 inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
928 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
930 intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
932 if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
937 void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
939 // An object should be promoted if the object has survived a
940 // scavenge operation.
941 inline bool ShouldBePromoted(Address old_address, int object_size);
943 void ClearNormalizedMapCaches();
945 void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
947 bool concurrent_sweeping_enabled() { return concurrent_sweeping_enabled_; }
949 inline bool OldGenerationAllocationLimitReached();
951 void QueueMemoryChunkForFree(MemoryChunk* chunk);
952 void FilterStoreBufferEntriesOnAboutToBeFreedPages();
953 void FreeQueuedChunks(MemoryChunk* list_head);
954 void FreeQueuedChunks();
955 void WaitUntilUnmappingOfFreeChunksCompleted();
957 // Completely clear the Instanceof cache (to stop it keeping objects alive
959 inline void CompletelyClearInstanceofCache();
961 inline uint32_t HashSeed();
963 inline int NextScriptId();
965 inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
966 inline void SetConstructStubDeoptPCOffset(int pc_offset);
967 inline void SetGetterStubDeoptPCOffset(int pc_offset);
968 inline void SetSetterStubDeoptPCOffset(int pc_offset);
970 // For post mortem debugging.
971 void RememberUnmappedPage(Address page, bool compacted);
973 // Global inline caching age: it is incremented on some GCs after context
974 // disposal. We use it to flush inline caches.
975 int global_ic_age() { return global_ic_age_; }
977 void AgeInlineCaches() {
978 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
981 int64_t amount_of_external_allocated_memory() {
982 return amount_of_external_allocated_memory_;
985 void update_amount_of_external_allocated_memory(int64_t delta) {
986 amount_of_external_allocated_memory_ += delta;
989 void DeoptMarkedAllocationSites();
991 bool DeoptMaybeTenuredAllocationSites() {
992 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
995 void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
996 Handle<DependentCode> dep);
998 DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
1000 void AddRetainedMap(Handle<Map> map);
1002 // This event is triggered after successful allocation of a new object made
1003 // by runtime. Allocations of target space for object evacuation do not
1004 // trigger the event. In order to track ALL allocations one must turn off
1005 // FLAG_inline_new and FLAG_use_allocation_folding.
1006 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1008 // This event is triggered after object is moved to a new place.
1009 inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1012 bool deserialization_complete() const { return deserialization_complete_; }
1014 bool HasLowAllocationRate();
1015 bool HasHighFragmentation();
1016 bool HasHighFragmentation(intptr_t used, intptr_t committed);
1018 bool ShouldOptimizeForMemoryUsage() { return optimize_for_memory_usage_; }
1020 // ===========================================================================
1021 // Initialization. ===========================================================
1022 // ===========================================================================
1024 // Configure heap size in MB before setup. Return false if the heap has been
1026 bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
1027 int max_executable_size, size_t code_range_size);
1028 bool ConfigureHeapDefault();
1030 // Prepares the heap, setting up memory areas that are needed in the isolate
1031 // without actually creating any objects.
1034 // Bootstraps the object heap with the core set of objects required to run.
1035 // Returns whether it succeeded.
1036 bool CreateHeapObjects();
1038 // Destroys all memory allocated by the heap.
1041 // Returns whether SetUp has been called.
1042 bool HasBeenSetUp();
1044 // ===========================================================================
1045 // Getters for spaces. =======================================================
1046 // ===========================================================================
1048 // Return the starting address and a mask for the new space. And-masking an
1049 // address with the mask will result in the start address of the new space
1050 // for all addresses in either semispace.
1051 Address NewSpaceStart() { return new_space_.start(); }
1052 uintptr_t NewSpaceMask() { return new_space_.mask(); }
1053 Address NewSpaceTop() { return new_space_.top(); }
1055 NewSpace* new_space() { return &new_space_; }
1056 OldSpace* old_space() { return old_space_; }
1057 OldSpace* code_space() { return code_space_; }
1058 MapSpace* map_space() { return map_space_; }
1059 LargeObjectSpace* lo_space() { return lo_space_; }
1061 PagedSpace* paged_space(int idx) {
1068 return code_space();
1076 Space* space(int idx) {
1083 return paged_space(idx);
1087 // Returns name of the space.
1088 const char* GetSpaceName(int idx);
1090 // ===========================================================================
1091 // Getters to other components. ==============================================
1092 // ===========================================================================
1094 GCTracer* tracer() { return tracer_; }
1096 PromotionQueue* promotion_queue() { return &promotion_queue_; }
1098 inline Isolate* isolate();
1100 MarkCompactCollector* mark_compact_collector() {
1101 return mark_compact_collector_;
1104 // ===========================================================================
1105 // Root set access. ==========================================================
1106 // ===========================================================================
1108 // Heap root getters.
1109 #define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
1110 ROOT_LIST(ROOT_ACCESSOR)
1111 #undef ROOT_ACCESSOR
1113 // Utility type maps.
1114 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
1115 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
1116 #undef STRUCT_MAP_ACCESSOR
1118 #define STRING_ACCESSOR(name, str) inline String* name();
1119 INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
1120 #undef STRING_ACCESSOR
1122 #define SYMBOL_ACCESSOR(name) inline Symbol* name();
1123 PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
1124 #undef SYMBOL_ACCESSOR
1126 #define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
1127 PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
1128 #undef SYMBOL_ACCESSOR
1130 Object* root(RootListIndex index) { return roots_[index]; }
1131 Handle<Object> root_handle(RootListIndex index) {
1132 return Handle<Object>(&roots_[index]);
1135 // Generated code can embed this address to get access to the roots.
1136 Object** roots_array_start() { return roots_; }
1138 // Sets the stub_cache_ (only used when expanding the dictionary).
1139 void SetRootCodeStubs(UnseededNumberDictionary* value) {
1140 roots_[kCodeStubsRootIndex] = value;
1143 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1144 void SetRootNonMonomorphicCache(UnseededNumberDictionary* value) {
1145 roots_[kNonMonomorphicCacheRootIndex] = value;
1148 void SetRootMaterializedObjects(FixedArray* objects) {
1149 roots_[kMaterializedObjectsRootIndex] = objects;
1152 void SetRootCodeStubContext(Object* value) {
1153 roots_[kCodeStubContextRootIndex] = value;
1156 void SetRootCodeStubExportsObject(JSObject* value) {
1157 roots_[kCodeStubExportsObjectRootIndex] = value;
1160 void SetRootScriptList(Object* value) {
1161 roots_[kScriptListRootIndex] = value;
1164 void SetRootStringTable(StringTable* value) {
1165 roots_[kStringTableRootIndex] = value;
1168 // Set the stack limit in the roots_ array. Some architectures generate
1169 // code that looks here, because it is faster than loading from the static
1170 // jslimit_/real_jslimit_ variable in the StackGuard.
1171 void SetStackLimits();
1173 // Generated code can treat direct references to this root as constant.
1174 bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1176 Map* MapForFixedTypedArray(ExternalArrayType array_type);
1177 RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
1179 RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
1180 FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
1182 void RegisterStrongRoots(Object** start, Object** end);
1183 void UnregisterStrongRoots(Object** start);
1185 // ===========================================================================
1186 // Inline allocation. ========================================================
1187 // ===========================================================================
1189 // Indicates whether inline bump-pointer allocation has been disabled.
1190 bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1192 // Switch whether inline bump-pointer allocation should be used.
1193 void EnableInlineAllocation();
1194 void DisableInlineAllocation();
1196 // ===========================================================================
1197 // Methods triggering GCs. ===================================================
1198 // ===========================================================================
1200 // Performs garbage collection operation.
1201 // Returns whether there is a chance that another major GC could
1202 // collect more garbage.
1203 inline bool CollectGarbage(
1204 AllocationSpace space, const char* gc_reason = NULL,
1205 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1207 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
1208 // non-zero, then the slower precise sweeper is used, which leaves the heap
1209 // in a state where we can iterate over the heap visiting all objects.
1210 void CollectAllGarbage(
1211 int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
1212 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1214 // Last hope GC, should try to squeeze as much as possible.
1215 void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1217 // Reports and external memory pressure event, either performs a major GC or
1218 // completes incremental marking in order to free external resources.
1219 void ReportExternalMemoryPressure(const char* gc_reason = NULL);
1221 // Invoked when GC was requested via the stack guard.
1222 void HandleGCRequest();
1224 // ===========================================================================
1225 // Iterators. ================================================================
1226 // ===========================================================================
1228 // Iterates over all roots in the heap.
1229 void IterateRoots(ObjectVisitor* v, VisitMode mode);
1230 // Iterates over all strong roots in the heap.
1231 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1232 // Iterates over entries in the smi roots list. Only interesting to the
1233 // serializer/deserializer, since GC does not care about smis.
1234 void IterateSmiRoots(ObjectVisitor* v);
1235 // Iterates over all the other roots in the heap.
1236 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1238 // Iterate pointers to from semispace of new space found in memory interval
1239 // from start to end within |object|.
1240 void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
1241 Address end, bool record_slots,
1242 ObjectSlotCallback callback);
1244 // ===========================================================================
1245 // Store buffer API. =========================================================
1246 // ===========================================================================
1248 // Write barrier support for address[offset] = o.
1249 INLINE(void RecordWrite(Address address, int offset));
1251 // Write barrier support for address[start : start + len[ = o.
1252 INLINE(void RecordWrites(Address address, int start, int len));
1254 Address* store_buffer_top_address() {
1255 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1258 // ===========================================================================
1259 // Incremental marking API. ==================================================
1260 // ===========================================================================
1262 // Start incremental marking and ensure that idle time handler can perform
1263 // incremental steps.
1264 void StartIdleIncrementalMarking();
1266 // Starts incremental marking assuming incremental marking is currently
1268 void StartIncrementalMarking(int gc_flags = kNoGCFlags,
1269 const GCCallbackFlags gc_callback_flags =
1270 GCCallbackFlags::kNoGCCallbackFlags,
1271 const char* reason = nullptr);
1273 void FinalizeIncrementalMarkingIfComplete(const char* comment);
1275 bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms);
1277 IncrementalMarking* incremental_marking() { return incremental_marking_; }
1279 // ===========================================================================
1280 // External string table API. ================================================
1281 // ===========================================================================
1283 // Registers an external string.
1284 inline void RegisterExternalString(String* string);
1286 // Finalizes an external string by deleting the associated external
1287 // data and clearing the resource pointer.
1288 inline void FinalizeExternalString(String* string);
1290 // ===========================================================================
1291 // Methods checking/returning the space of a given object/address. ===========
1292 // ===========================================================================
1294 // Returns whether the object resides in new space.
1295 inline bool InNewSpace(Object* object);
1296 inline bool InNewSpace(Address address);
1297 inline bool InNewSpacePage(Address address);
1298 inline bool InFromSpace(Object* object);
1299 inline bool InToSpace(Object* object);
1301 // Returns whether the object resides in old space.
1302 inline bool InOldSpace(Address address);
1303 inline bool InOldSpace(Object* object);
1305 // Checks whether an address/object in the heap (including auxiliary
1306 // area and unused area).
1307 bool Contains(Address addr);
1308 bool Contains(HeapObject* value);
1310 // Checks whether an address/object in a space.
1311 // Currently used by tests, serialization and heap verification only.
1312 bool InSpace(Address addr, AllocationSpace space);
1313 bool InSpace(HeapObject* value, AllocationSpace space);
1315 // ===========================================================================
1316 // Object statistics tracking. ===============================================
1317 // ===========================================================================
1319 // Returns the number of buckets used by object statistics tracking during a
1320 // major GC. Note that the following methods fail gracefully when the bounds
1321 // are exceeded though.
1322 size_t NumberOfTrackedHeapObjectTypes();
1324 // Returns object statistics about count and size at the last major GC.
1325 // Objects are being grouped into buckets that roughly resemble existing
1327 size_t ObjectCountAtLastGC(size_t index);
1328 size_t ObjectSizeAtLastGC(size_t index);
1330 // Retrieves names of buckets used by object statistics tracking.
1331 bool GetObjectTypeName(size_t index, const char** object_type,
1332 const char** object_sub_type);
1334 // ===========================================================================
1335 // GC statistics. ============================================================
1336 // ===========================================================================
1338 // Returns the maximum amount of memory reserved for the heap. For
1339 // the young generation, we reserve 4 times the amount needed for a
1340 // semi space. The young generation consists of two semi spaces and
1341 // we reserve twice the amount needed for those in order to ensure
1342 // that new space can be aligned to its size.
1343 intptr_t MaxReserved() {
1344 return 4 * reserved_semispace_size_ + max_old_generation_size_;
1346 int MaxSemiSpaceSize() { return max_semi_space_size_; }
1347 int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
1348 int InitialSemiSpaceSize() { return initial_semispace_size_; }
1349 int TargetSemiSpaceSize() { return target_semispace_size_; }
1350 intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
1351 intptr_t MaxExecutableSize() { return max_executable_size_; }
1353 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
1354 // more spaces are needed until it reaches the limit.
1355 intptr_t Capacity();
1357 // Returns the amount of memory currently committed for the heap.
1358 intptr_t CommittedMemory();
1360 // Returns the amount of memory currently committed for the old space.
1361 intptr_t CommittedOldGenerationMemory();
1363 // Returns the amount of executable memory currently committed for the heap.
1364 intptr_t CommittedMemoryExecutable();
1366 // Returns the amount of phyical memory currently committed for the heap.
1367 size_t CommittedPhysicalMemory();
1369 // Returns the maximum amount of memory ever committed for the heap.
1370 intptr_t MaximumCommittedMemory() { return maximum_committed_; }
1372 // Updates the maximum committed memory for the heap. Should be called
1373 // whenever a space grows.
1374 void UpdateMaximumCommitted();
1376 // Returns the available bytes in space w/o growing.
1377 // Heap doesn't guarantee that it can allocate an object that requires
1378 // all available bytes. Check MaxHeapObjectSize() instead.
1379 intptr_t Available();
1381 // Returns of size of all objects residing in the heap.
1382 intptr_t SizeOfObjects();
1384 void UpdateSurvivalStatistics(int start_new_space_size);
1386 inline void IncrementPromotedObjectsSize(int object_size) {
1387 DCHECK(object_size > 0);
1388 promoted_objects_size_ += object_size;
1390 inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
1392 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1393 DCHECK(object_size > 0);
1394 semi_space_copied_object_size_ += object_size;
1396 inline intptr_t semi_space_copied_object_size() {
1397 return semi_space_copied_object_size_;
1400 inline intptr_t SurvivedNewSpaceObjectSize() {
1401 return promoted_objects_size_ + semi_space_copied_object_size_;
1404 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1406 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1408 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1410 inline void IncrementYoungSurvivorsCounter(int survived) {
1411 DCHECK(survived >= 0);
1412 survived_last_scavenge_ = survived;
1413 survived_since_last_expansion_ += survived;
1416 inline intptr_t PromotedTotalSize() {
1417 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1418 if (total > std::numeric_limits<intptr_t>::max()) {
1419 // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
1420 return std::numeric_limits<intptr_t>::max();
1422 if (total < 0) return 0;
1423 return static_cast<intptr_t>(total);
1426 void UpdateNewSpaceAllocationCounter() {
1427 new_space_allocation_counter_ = NewSpaceAllocationCounter();
1430 size_t NewSpaceAllocationCounter() {
1431 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
1434 // This should be used only for testing.
1435 void set_new_space_allocation_counter(size_t new_value) {
1436 new_space_allocation_counter_ = new_value;
1439 void UpdateOldGenerationAllocationCounter() {
1440 old_generation_allocation_counter_ = OldGenerationAllocationCounter();
1443 size_t OldGenerationAllocationCounter() {
1444 return old_generation_allocation_counter_ + PromotedSinceLastGC();
1447 // This should be used only for testing.
1448 void set_old_generation_allocation_counter(size_t new_value) {
1449 old_generation_allocation_counter_ = new_value;
1452 size_t PromotedSinceLastGC() {
1453 return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
1456 int gc_count() const { return gc_count_; }
1458 // Returns the size of objects residing in non new spaces.
1459 intptr_t PromotedSpaceSizeOfObjects();
1461 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1462 void IncreaseTotalRegexpCodeGenerated(int size) {
1463 total_regexp_code_generated_ += size;
1466 void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1467 if (is_crankshafted) {
1468 crankshaft_codegen_bytes_generated_ += size;
1470 full_codegen_bytes_generated_ += size;
1474 // ===========================================================================
1475 // Prologue/epilogue callback methods.========================================
1476 // ===========================================================================
1478 void AddGCPrologueCallback(v8::Isolate::GCCallback callback,
1479 GCType gc_type_filter, bool pass_isolate = true);
1480 void RemoveGCPrologueCallback(v8::Isolate::GCCallback callback);
1482 void AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
1483 GCType gc_type_filter, bool pass_isolate = true);
1484 void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
1486 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1487 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1489 // ===========================================================================
1490 // Allocation methods. =======================================================
1491 // ===========================================================================
1493 // Creates a filler object and returns a heap object immediately after it.
1494 MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
1497 // Creates a filler object if needed for alignment and returns a heap object
1498 // immediately after it. If any space is left after the returned object,
1499 // another filler object is created so the over allocated memory is iterable.
1500 MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
1502 int allocation_size,
1503 AllocationAlignment alignment);
1505 // ===========================================================================
1506 // ArrayBuffer tracking. =====================================================
1507 // ===========================================================================
1509 void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
1510 void UnregisterArrayBuffer(JSArrayBuffer* buffer);
1512 inline ArrayBufferTracker* array_buffer_tracker() {
1513 return array_buffer_tracker_;
1516 // =============================================================================
1519 // Verify the heap is in its normal state before or after a GC.
1524 void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
1526 void TracePathToObjectFrom(Object* target, Object* root);
1527 void TracePathToObject(Object* target);
1528 void TracePathToGlobal();
1531 void PrintHandles();
1533 // Report heap statistics.
1534 void ReportHeapStatistics(const char* title);
1535 void ReportCodeStatistics(const char* title);
1539 class UnmapFreeMemoryTask;
1541 // External strings table is a place where all external strings are
1542 // registered. We need to keep track of such strings to properly
1544 class ExternalStringTable {
1546 // Registers an external string.
1547 inline void AddString(String* string);
1549 inline void Iterate(ObjectVisitor* v);
1551 // Restores internal invariant and gets rid of collected strings.
1552 // Must be called after each Iterate() that modified the strings.
1555 // Destroys all allocated memory.
1559 explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
1561 inline void Verify();
1563 inline void AddOldString(String* string);
1565 // Notifies the table that only a prefix of the new list is valid.
1566 inline void ShrinkNewStrings(int position);
1568 // To speed up scavenge collections new space string are kept
1569 // separate from old space strings.
1570 List<Object*> new_space_strings_;
1571 List<Object*> old_space_strings_;
1577 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
1580 struct StrongRootsList;
1582 struct StringTypeTable {
1585 RootListIndex index;
1588 struct ConstantStringTable {
1589 const char* contents;
1590 RootListIndex index;
1593 struct StructTable {
1596 RootListIndex index;
1599 struct GCCallbackPair {
1600 GCCallbackPair(v8::Isolate::GCCallback callback, GCType gc_type,
1602 : callback(callback), gc_type(gc_type), pass_isolate(pass_isolate) {}
1604 bool operator==(const GCCallbackPair& other) const {
1605 return other.callback == callback;
1608 v8::Isolate::GCCallback callback;
1613 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
1616 static const int kInitialStringTableSize = 2048;
1617 static const int kInitialEvalCacheSize = 64;
1618 static const int kInitialNumberStringCacheSize = 256;
1620 static const int kRememberedUnmappedPages = 128;
1622 static const StringTypeTable string_type_table[];
1623 static const ConstantStringTable constant_string_table[];
1624 static const StructTable struct_table[];
1626 static const int kYoungSurvivalRateHighThreshold = 90;
1627 static const int kYoungSurvivalRateAllowedDeviation = 15;
1628 static const int kOldSurvivalRateLowThreshold = 10;
1630 static const int kMaxMarkCompactsInIdleRound = 7;
1631 static const int kIdleScavengeThreshold = 5;
1633 static const int kAllocationSiteScratchpadSize = 256;
1637 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1638 Heap* heap, Object** pointer);
1640 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1641 StoreBufferEvent event);
1643 // Selects the proper allocation space based on the pretenuring decision.
1644 static AllocationSpace SelectSpace(PretenureFlag pretenure) {
1645 return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
1648 #define ROOT_ACCESSOR(type, name, camel_name) \
1649 inline void set_##name(type* value);
1650 ROOT_LIST(ROOT_ACCESSOR)
1651 #undef ROOT_ACCESSOR
1653 StoreBuffer* store_buffer() { return &store_buffer_; }
1655 void set_current_gc_flags(int flags) {
1656 current_gc_flags_ = flags;
1657 DCHECK(!ShouldFinalizeIncrementalMarking() ||
1658 !ShouldAbortIncrementalMarking());
1661 inline bool ShouldReduceMemory() const {
1662 return current_gc_flags_ & kReduceMemoryFootprintMask;
1665 inline bool ShouldAbortIncrementalMarking() const {
1666 return current_gc_flags_ & kAbortIncrementalMarkingMask;
1669 inline bool ShouldFinalizeIncrementalMarking() const {
1670 return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
1673 void PreprocessStackTraces();
1675 // Pretenuring decisions are made based on feedback collected during new
1676 // space evacuation. Note that between feedback collection and calling this
1677 // method object in old space must not move.
1678 // Right now we only process pretenuring feedback in high promotion mode.
1679 bool ProcessPretenuringFeedback();
1681 // Checks whether a global GC is necessary
1682 GarbageCollector SelectGarbageCollector(AllocationSpace space,
1683 const char** reason);
1685 // Make sure there is a filler value behind the top of the new space
1686 // so that the GC does not confuse some unintialized/stale memory
1687 // with the allocation memento of the object at the top
1688 void EnsureFillerObjectAtTop();
1690 // Ensure that we have swept all spaces in such a way that we can iterate
1691 // over all objects. May cause a GC.
1692 void MakeHeapIterable();
1694 // Performs garbage collection operation.
1695 // Returns whether there is a chance that another major GC could
1696 // collect more garbage.
1697 bool CollectGarbage(
1698 GarbageCollector collector, const char* gc_reason,
1699 const char* collector_reason,
1700 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1702 // Performs garbage collection
1703 // Returns whether there is a chance another major GC could
1704 // collect more garbage.
1705 bool PerformGarbageCollection(
1706 GarbageCollector collector,
1707 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1709 inline void UpdateOldSpaceLimits();
1711 // Initializes a JSObject based on its map.
1712 void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
1714 void InitializeAllocationMemento(AllocationMemento* memento,
1715 AllocationSite* allocation_site);
1717 bool CreateInitialMaps();
1718 void CreateInitialObjects();
1720 // These five Create*EntryStub functions are here and forced to not be inlined
1721 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1722 NO_INLINE(void CreateJSEntryStub());
1723 NO_INLINE(void CreateJSConstructEntryStub());
1725 void CreateFixedStubs();
1727 HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
1729 // Commits from space if it is uncommitted.
1730 void EnsureFromSpaceIsCommitted();
1732 // Uncommit unused semi space.
1733 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1735 // Fill in bogus values in from space
1736 void ZapFromSpace();
1738 // Deopts all code that contains allocation instruction which are tenured or
1739 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1740 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1742 // Evaluates local pretenuring for the old space and calls
1743 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1745 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1747 // Record statistics before and after garbage collection.
1748 void ReportStatisticsBeforeGC();
1749 void ReportStatisticsAfterGC();
1751 // Creates and installs the full-sized number string cache.
1752 int FullSizeNumberStringCacheLength();
1753 // Flush the number to string cache.
1754 void FlushNumberStringCache();
1756 // Sets used allocation sites entries to undefined.
1757 void FlushAllocationSitesScratchpad();
1759 // Initializes the allocation sites scratchpad with undefined values.
1760 void InitializeAllocationSitesScratchpad();
1762 // Adds an allocation site to the scratchpad if there is space left.
1763 void AddAllocationSiteToScratchpad(AllocationSite* site,
1764 ScratchpadSlotMode mode);
1766 // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
1767 // Re-visit incremental marking heuristics.
1768 bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
1770 void ConfigureInitialOldGenerationSize();
1772 bool HasLowYoungGenerationAllocationRate();
1773 bool HasLowOldGenerationAllocationRate();
1774 double YoungGenerationMutatorUtilization();
1775 double OldGenerationMutatorUtilization();
1777 void ReduceNewSpaceSize();
1779 bool TryFinalizeIdleIncrementalMarking(
1780 double idle_time_in_ms, size_t size_of_objects,
1781 size_t mark_compact_speed_in_bytes_per_ms);
1783 GCIdleTimeHeapState ComputeHeapState();
1785 bool PerformIdleTimeAction(GCIdleTimeAction action,
1786 GCIdleTimeHeapState heap_state,
1787 double deadline_in_ms);
1789 void IdleNotificationEpilogue(GCIdleTimeAction action,
1790 GCIdleTimeHeapState heap_state, double start_ms,
1791 double deadline_in_ms);
1792 void CheckAndNotifyBackgroundIdleNotification(double idle_time_in_ms,
1795 inline void UpdateAllocationsHash(HeapObject* object);
1796 inline void UpdateAllocationsHash(uint32_t value);
1797 void PrintAlloctionsHash();
1799 void AddToRingBuffer(const char* string);
1800 void GetFromRingBuffer(char* buffer);
1802 // Attempt to over-approximate the weak closure by marking object groups and
1803 // implicit references from global handles, but don't atomically complete
1804 // marking. If we continue to mark incrementally, we might have marked
1805 // objects that die later.
1806 void OverApproximateWeakClosure(const char* gc_reason);
1808 // ===========================================================================
1809 // Actual GC. ================================================================
1810 // ===========================================================================
1812 // Code that should be run before and after each GC. Includes some
1813 // reporting/verification activities when compiled with DEBUG set.
1814 void GarbageCollectionPrologue();
1815 void GarbageCollectionEpilogue();
1817 // Performs a major collection in the whole heap.
1820 // Code to be run before and after mark-compact.
1821 void MarkCompactPrologue();
1822 void MarkCompactEpilogue();
1824 // Performs a minor collection in new generation.
1827 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1829 void UpdateNewSpaceReferencesInExternalStringTable(
1830 ExternalStringTableUpdaterCallback updater_func);
1832 void UpdateReferencesInExternalStringTable(
1833 ExternalStringTableUpdaterCallback updater_func);
1835 void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1836 void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1837 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1838 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1840 // ===========================================================================
1841 // GC statistics. ============================================================
1842 // ===========================================================================
1844 inline intptr_t OldGenerationSpaceAvailable() {
1845 return old_generation_allocation_limit_ - PromotedTotalSize();
1848 // Returns maximum GC pause.
1849 double get_max_gc_pause() { return max_gc_pause_; }
1851 // Returns maximum size of objects alive after GC.
1852 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1854 // Returns minimal interval between two subsequent collections.
1855 double get_min_in_mutator() { return min_in_mutator_; }
1857 // Update GC statistics that are tracked on the Heap.
1858 void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1859 double marking_time);
1861 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1863 // ===========================================================================
1864 // Growing strategy. =========================================================
1865 // ===========================================================================
1867 // Decrease the allocation limit if the new limit based on the given
1868 // parameters is lower than the current limit.
1869 void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
1871 double mutator_speed);
1874 // Calculates the allocation limit based on a given growing factor and a
1875 // given old generation size.
1876 intptr_t CalculateOldGenerationAllocationLimit(double factor,
1877 intptr_t old_gen_size);
1879 // Sets the allocation limit to trigger the next full garbage collection.
1880 void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
1881 double mutator_speed);
1883 // ===========================================================================
1884 // Inline allocation. ========================================================
1885 // ===========================================================================
1887 void LowerInlineAllocationLimit(intptr_t step);
1888 void ResetInlineAllocationLimit();
1890 // ===========================================================================
1891 // Idle notification. ========================================================
1892 // ===========================================================================
1894 bool RecentIdleNotificationHappened();
1895 void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
1897 // ===========================================================================
1898 // Allocation methods. =======================================================
1899 // ===========================================================================
1901 // Returns a deep copy of the JavaScript object.
1902 // Properties and elements are copied too.
1903 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
1904 MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
1905 AllocationSite* site = NULL);
1907 // Allocates a JS Map in the heap.
1908 MUST_USE_RESULT AllocationResult
1909 AllocateMap(InstanceType instance_type, int instance_size,
1910 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1912 // Allocates and initializes a new JavaScript object based on a
1914 // If allocation_site is non-null, then a memento is emitted after the object
1915 // that points to the site.
1916 MUST_USE_RESULT AllocationResult AllocateJSObject(
1917 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
1918 AllocationSite* allocation_site = NULL);
1920 // Allocates and initializes a new JavaScript object based on a map.
1921 // Passing an allocation site means that a memento will be created that
1922 // points to the site.
1923 MUST_USE_RESULT AllocationResult
1924 AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
1925 AllocationSite* allocation_site = NULL);
1927 // Allocates a HeapNumber from value.
1928 MUST_USE_RESULT AllocationResult
1929 AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
1930 PretenureFlag pretenure = NOT_TENURED);
1932 // Allocates SIMD values from the given lane values.
1933 #define SIMD_ALLOCATE_DECLARATION(TYPE, Type, type, lane_count, lane_type) \
1934 AllocationResult Allocate##Type(lane_type lanes[lane_count], \
1935 PretenureFlag pretenure = NOT_TENURED);
1936 SIMD128_TYPES(SIMD_ALLOCATE_DECLARATION)
1937 #undef SIMD_ALLOCATE_DECLARATION
1939 // Allocates a byte array of the specified length
1940 MUST_USE_RESULT AllocationResult
1941 AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1943 // Allocates a bytecode array with given contents.
1944 MUST_USE_RESULT AllocationResult
1945 AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
1946 int parameter_count, FixedArray* constant_pool);
1948 // Copy the code and scope info part of the code object, but insert
1949 // the provided data as the relocation information.
1950 MUST_USE_RESULT AllocationResult CopyCode(Code* code,
1951 Vector<byte> reloc_info);
1953 MUST_USE_RESULT AllocationResult CopyCode(Code* code);
1955 // Allocates a fixed array initialized with undefined values
1956 MUST_USE_RESULT AllocationResult
1957 AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
1959 // Allocate an uninitialized object. The memory is non-executable if the
1960 // hardware and OS allow. This is the single choke-point for allocations
1961 // performed by the runtime and should not be bypassed (to extend this to
1962 // inlined allocations, use the Heap::DisableInlineAllocation() support).
1963 MUST_USE_RESULT inline AllocationResult AllocateRaw(
1964 int size_in_bytes, AllocationSpace space,
1965 AllocationAlignment aligment = kWordAligned);
1967 // Allocates a heap object based on the map.
1968 MUST_USE_RESULT AllocationResult
1969 Allocate(Map* map, AllocationSpace space,
1970 AllocationSite* allocation_site = NULL);
1972 // Allocates a partial map for bootstrapping.
1973 MUST_USE_RESULT AllocationResult
1974 AllocatePartialMap(InstanceType instance_type, int instance_size);
1976 // Allocate a block of memory in the given space (filled with a filler).
1977 // Used as a fall-back for generated code when the space is full.
1978 MUST_USE_RESULT AllocationResult
1979 AllocateFillerObject(int size, bool double_align, AllocationSpace space);
1981 // Allocate an uninitialized fixed array.
1982 MUST_USE_RESULT AllocationResult
1983 AllocateRawFixedArray(int length, PretenureFlag pretenure);
1985 // Allocate an uninitialized fixed double array.
1986 MUST_USE_RESULT AllocationResult
1987 AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
1989 // Allocate an initialized fixed array with the given filler value.
1990 MUST_USE_RESULT AllocationResult
1991 AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
1994 // Allocate and partially initializes a String. There are two String
1995 // encodings: one-byte and two-byte. These functions allocate a string of
1996 // the given length and set its map and length fields. The characters of
1997 // the string are uninitialized.
1998 MUST_USE_RESULT AllocationResult
1999 AllocateRawOneByteString(int length, PretenureFlag pretenure);
2000 MUST_USE_RESULT AllocationResult
2001 AllocateRawTwoByteString(int length, PretenureFlag pretenure);
2003 // Allocates an internalized string in old space based on the character
2005 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
2006 Vector<const char> str, int chars, uint32_t hash_field);
2008 MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
2009 Vector<const uint8_t> str, uint32_t hash_field);
2011 MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
2012 Vector<const uc16> str, uint32_t hash_field);
2014 template <bool is_one_byte, typename T>
2015 MUST_USE_RESULT AllocationResult
2016 AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
2018 template <typename T>
2019 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
2020 T t, int chars, uint32_t hash_field);
2022 // Allocates an uninitialized fixed array. It must be filled by the caller.
2023 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
2025 // Make a copy of src and return it.
2026 MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
2028 // Make a copy of src, also grow the copy, and return the copy.
2029 MUST_USE_RESULT AllocationResult
2030 CopyFixedArrayAndGrow(FixedArray* src, int grow_by, PretenureFlag pretenure);
2032 // Make a copy of src, set the map, and return the copy.
2033 MUST_USE_RESULT AllocationResult
2034 CopyFixedArrayWithMap(FixedArray* src, Map* map);
2036 // Make a copy of src and return it.
2037 MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
2038 FixedDoubleArray* src);
2040 // Computes a single character string where the character has code.
2041 // A cache is used for one-byte (Latin1) codes.
2042 MUST_USE_RESULT AllocationResult
2043 LookupSingleCharacterStringFromCode(uint16_t code);
2045 // Allocate a symbol in old space.
2046 MUST_USE_RESULT AllocationResult AllocateSymbol();
2048 // Allocates an external array of the specified length and type.
2049 MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
2050 int length, ExternalArrayType array_type, void* external_pointer,
2051 PretenureFlag pretenure);
2053 // Allocates a fixed typed array of the specified length and type.
2054 MUST_USE_RESULT AllocationResult
2055 AllocateFixedTypedArray(int length, ExternalArrayType array_type,
2056 bool initialize, PretenureFlag pretenure);
2058 // Make a copy of src and return it.
2059 MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
2061 // Make a copy of src, set the map, and return the copy.
2062 MUST_USE_RESULT AllocationResult
2063 CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
2065 // Allocates a fixed double array with uninitialized values. Returns
2066 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
2067 int length, PretenureFlag pretenure = NOT_TENURED);
2069 // Allocate empty fixed array.
2070 MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
2072 // Allocate empty fixed typed array of given type.
2073 MUST_USE_RESULT AllocationResult
2074 AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
2076 // Allocate a tenured simple cell.
2077 MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
2079 // Allocate a tenured JS global property cell initialized with the hole.
2080 MUST_USE_RESULT AllocationResult AllocatePropertyCell();
2082 MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
2084 // Allocates a new utility object in the old generation.
2085 MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
2087 // Allocates a new foreign object.
2088 MUST_USE_RESULT AllocationResult
2089 AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
2091 MUST_USE_RESULT AllocationResult
2092 AllocateCode(int object_size, bool immovable);
2094 MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
2096 MUST_USE_RESULT AllocationResult InternalizeString(String* str);
2098 // The amount of external memory registered through the API kept alive
2099 // by global handles
2100 int64_t amount_of_external_allocated_memory_;
2102 // Caches the amount of external memory registered at the last global gc.
2103 int64_t amount_of_external_allocated_memory_at_last_global_gc_;
2105 // This can be calculated directly from a pointer to the heap; however, it is
2106 // more expedient to get at the isolate directly from within Heap methods.
2109 Object* roots_[kRootListLength];
2111 size_t code_range_size_;
2112 int reserved_semispace_size_;
2113 int max_semi_space_size_;
2114 int initial_semispace_size_;
2115 int target_semispace_size_;
2116 intptr_t max_old_generation_size_;
2117 intptr_t initial_old_generation_size_;
2118 bool old_generation_size_configured_;
2119 intptr_t max_executable_size_;
2120 intptr_t maximum_committed_;
2122 // For keeping track of how much data has survived
2123 // scavenge since last new space expansion.
2124 int survived_since_last_expansion_;
2126 // ... and since the last scavenge.
2127 int survived_last_scavenge_;
2129 // This is not the depth of nested AlwaysAllocateScope's but rather a single
2130 // count, as scopes can be acquired from multiple tasks (read: threads).
2131 AtomicNumber<size_t> always_allocate_scope_count_;
2133 // For keeping track of context disposals.
2134 int contexts_disposed_;
2138 int scan_on_scavenge_pages_;
2140 NewSpace new_space_;
2141 OldSpace* old_space_;
2142 OldSpace* code_space_;
2143 MapSpace* map_space_;
2144 LargeObjectSpace* lo_space_;
2145 HeapState gc_state_;
2146 int gc_post_processing_depth_;
2147 Address new_space_top_after_last_gc_;
2149 // Returns the amount of external memory registered since last global gc.
2150 int64_t PromotedExternalMemorySize();
2152 // How many "runtime allocations" happened.
2153 uint32_t allocations_count_;
2155 // Running hash over allocations performed.
2156 uint32_t raw_allocations_hash_;
2158 // Countdown counter, dumps allocation hash when 0.
2159 uint32_t dump_allocations_hash_countdown_;
2161 // How many mark-sweep collections happened.
2162 unsigned int ms_count_;
2164 // How many gc happened.
2165 unsigned int gc_count_;
2167 // For post mortem debugging.
2168 int remembered_unmapped_pages_index_;
2169 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2172 // If the --gc-interval flag is set to a positive value, this
2173 // variable holds the value indicating the number of allocations
2174 // remain until the next failure and garbage collection.
2175 int allocation_timeout_;
2178 // Limit that triggers a global GC on the next (normally caused) GC. This
2179 // is checked when we have already decided to do a GC to help determine
2180 // which collector to invoke, before expanding a paged space in the old
2181 // generation and on every allocation in large object space.
2182 intptr_t old_generation_allocation_limit_;
2184 // Indicates that an allocation has failed in the old generation since the
2186 bool old_gen_exhausted_;
2188 // Indicates that memory usage is more important than latency.
2189 // TODO(ulan): Merge it with memory reducer once chromium:490559 is fixed.
2190 bool optimize_for_memory_usage_;
2192 // Indicates that inline bump-pointer allocation has been globally disabled
2193 // for all spaces. This is used to disable allocations in generated code.
2194 bool inline_allocation_disabled_;
2196 // Weak list heads, threaded through the objects.
2197 // List heads are initialized lazily and contain the undefined_value at start.
2198 Object* native_contexts_list_;
2199 Object* allocation_sites_list_;
2201 // List of encountered weak collections (JSWeakMap and JSWeakSet) during
2202 // marking. It is initialized during marking, destroyed after marking and
2203 // contains Smi(0) while marking is not active.
2204 Object* encountered_weak_collections_;
2206 Object* encountered_weak_cells_;
2208 StoreBufferRebuilder store_buffer_rebuilder_;
2210 List<GCCallbackPair> gc_epilogue_callbacks_;
2211 List<GCCallbackPair> gc_prologue_callbacks_;
2213 // Total RegExp code ever generated
2214 double total_regexp_code_generated_;
2216 int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
2220 int high_survival_rate_period_length_;
2221 intptr_t promoted_objects_size_;
2222 double promotion_ratio_;
2223 double promotion_rate_;
2224 intptr_t semi_space_copied_object_size_;
2225 intptr_t previous_semi_space_copied_object_size_;
2226 double semi_space_copied_rate_;
2227 int nodes_died_in_new_space_;
2228 int nodes_copied_in_new_space_;
2229 int nodes_promoted_;
2231 // This is the pretenuring trigger for allocation sites that are in maybe
2232 // tenure state. When we switched to the maximum new space size we deoptimize
2233 // the code that belongs to the allocation site and derive the lifetime
2234 // of the allocation site.
2235 unsigned int maximum_size_scavenges_;
2237 // Maximum GC pause.
2238 double max_gc_pause_;
2240 // Total time spent in GC.
2241 double total_gc_time_ms_;
2243 // Maximum size of objects alive after GC.
2244 intptr_t max_alive_after_gc_;
2246 // Minimal interval between two subsequent collections.
2247 double min_in_mutator_;
2249 // Cumulative GC time spent in marking.
2250 double marking_time_;
2252 // Cumulative GC time spent in sweeping.
2253 double sweeping_time_;
2255 // Last time an idle notification happened.
2256 double last_idle_notification_time_;
2258 // Last time a garbage collection happened.
2259 double last_gc_time_;
2261 Scavenger* scavenge_collector_;
2263 MarkCompactCollector* mark_compact_collector_;
2265 StoreBuffer store_buffer_;
2267 IncrementalMarking* incremental_marking_;
2269 GCIdleTimeHandler* gc_idle_time_handler_;
2271 MemoryReducer* memory_reducer_;
2273 ObjectStats* object_stats_;
2275 ScavengeJob* scavenge_job_;
2277 // These two counters are monotomically increasing and never reset.
2278 size_t full_codegen_bytes_generated_;
2279 size_t crankshaft_codegen_bytes_generated_;
2281 // This counter is increased before each GC and never reset.
2282 // To account for the bytes allocated since the last GC, use the
2283 // NewSpaceAllocationCounter() function.
2284 size_t new_space_allocation_counter_;
2286 // This counter is increased before each GC and never reset. To
2287 // account for the bytes allocated since the last GC, use the
2288 // OldGenerationAllocationCounter() function.
2289 size_t old_generation_allocation_counter_;
2291 // The size of objects in old generation after the last MarkCompact GC.
2292 size_t old_generation_size_at_last_gc_;
2294 // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2295 // this variable holds the number of garbage collections since the last
2296 // deoptimization triggered by garbage collection.
2297 int gcs_since_last_deopt_;
2299 int allocation_sites_scratchpad_length_;
2301 char trace_ring_buffer_[kTraceRingBufferSize];
2302 // If it's not full then the data is from 0 to ring_buffer_end_. If it's
2303 // full then the data is from ring_buffer_end_ to the end of the buffer and
2304 // from 0 to ring_buffer_end_.
2305 bool ring_buffer_full_;
2306 size_t ring_buffer_end_;
2308 // Shared state read by the scavenge collector and set by ScavengeObject.
2309 PromotionQueue promotion_queue_;
2311 // Flag is set when the heap has been configured. The heap can be repeatedly
2312 // configured through the API until it is set up.
2315 // Currently set GC flags that are respected by all GC components.
2316 int current_gc_flags_;
2318 // Currently set GC callback flags that are used to pass information between
2319 // the embedder and V8's GC.
2320 GCCallbackFlags current_gc_callback_flags_;
2322 ExternalStringTable external_string_table_;
2324 MemoryChunk* chunks_queued_for_free_;
2326 size_t concurrent_unmapping_tasks_active_;
2328 base::Semaphore pending_unmapping_tasks_semaphore_;
2330 base::Mutex relocation_mutex_;
2332 int gc_callbacks_depth_;
2334 bool deserialization_complete_;
2336 bool concurrent_sweeping_enabled_;
2338 StrongRootsList* strong_roots_list_;
2340 ArrayBufferTracker* array_buffer_tracker_;
2342 // Classes in "heap" can be friends.
2343 friend class AlwaysAllocateScope;
2344 friend class GCCallbacksScope;
2345 friend class GCTracer;
2346 friend class HeapIterator;
2347 friend class IncrementalMarking;
2348 friend class MarkCompactCollector;
2349 friend class MarkCompactMarkingVisitor;
2350 friend class NewSpace;
2351 friend class ObjectStatsVisitor;
2353 friend class Scavenger;
2354 friend class StoreBuffer;
2356 // The allocator interface.
2357 friend class Factory;
2359 // The Isolate constructs us.
2360 friend class Isolate;
2363 friend class HeapTester;
2365 DISALLOW_COPY_AND_ASSIGN(Heap);
2371 static const int kStartMarker = 0xDECADE00;
2372 static const int kEndMarker = 0xDECADE01;
2374 int* start_marker; // 0
2375 int* new_space_size; // 1
2376 int* new_space_capacity; // 2
2377 intptr_t* old_space_size; // 3
2378 intptr_t* old_space_capacity; // 4
2379 intptr_t* code_space_size; // 5
2380 intptr_t* code_space_capacity; // 6
2381 intptr_t* map_space_size; // 7
2382 intptr_t* map_space_capacity; // 8
2383 intptr_t* lo_space_size; // 9
2384 int* global_handle_count; // 10
2385 int* weak_global_handle_count; // 11
2386 int* pending_global_handle_count; // 12
2387 int* near_death_global_handle_count; // 13
2388 int* free_global_handle_count; // 14
2389 intptr_t* memory_allocator_size; // 15
2390 intptr_t* memory_allocator_capacity; // 16
2391 int* objects_per_type; // 17
2392 int* size_per_type; // 18
2393 int* os_error; // 19
2394 char* last_few_messages; // 20
2395 char* js_stacktrace; // 21
2396 int* end_marker; // 22
2400 class AlwaysAllocateScope {
2402 explicit inline AlwaysAllocateScope(Isolate* isolate);
2403 inline ~AlwaysAllocateScope();
2410 // Visitor class to verify interior pointers in spaces that do not contain
2411 // or care about intergenerational references. All heap object pointers have to
2412 // point into the heap to a location that has a map pointer at its first word.
2413 // Caveat: Heap::Contains is an approximation because it can return true for
2414 // objects in a heap space but above the allocation pointer.
2415 class VerifyPointersVisitor : public ObjectVisitor {
2417 inline void VisitPointers(Object** start, Object** end);
2421 // Verify that all objects are Smis.
2422 class VerifySmisVisitor : public ObjectVisitor {
2424 inline void VisitPointers(Object** start, Object** end);
2428 // Space iterator for iterating over all spaces of the heap. Returns each space
2429 // in turn, and null when it is done.
2430 class AllSpaces BASE_EMBEDDED {
2432 explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2441 // Space iterator for iterating over all old spaces of the heap: Old space
2442 // and code space. Returns each space in turn, and null when it is done.
2443 class OldSpaces BASE_EMBEDDED {
2445 explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2454 // Space iterator for iterating over all the paged spaces of the heap: Map
2455 // space, old space, code space and cell space. Returns
2456 // each space in turn, and null when it is done.
2457 class PagedSpaces BASE_EMBEDDED {
2459 explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2468 // Space iterator for iterating over all spaces of the heap.
2469 // For each space an object iterator is provided. The deallocation of the
2470 // returned object iterators is handled by the space iterator.
2471 class SpaceIterator : public Malloced {
2473 explicit SpaceIterator(Heap* heap);
2474 virtual ~SpaceIterator();
2477 ObjectIterator* next();
2480 ObjectIterator* CreateIterator();
2483 int current_space_; // from enum AllocationSpace.
2484 ObjectIterator* iterator_; // object iterator for the current space.
2488 // A HeapIterator provides iteration over the whole heap. It
2489 // aggregates the specific iterators for the different spaces as
2490 // these can only iterate over one space only.
2492 // HeapIterator ensures there is no allocation during its lifetime
2493 // (using an embedded DisallowHeapAllocation instance).
2495 // HeapIterator can skip free list nodes (that is, de-allocated heap
2496 // objects that still remain in the heap). As implementation of free
2497 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2498 // phases. Also, it is forbidden to interrupt iteration in this mode,
2499 // as this will leave heap objects marked (and thus, unusable).
2500 class HeapIterator BASE_EMBEDDED {
2502 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2504 explicit HeapIterator(Heap* heap,
2505 HeapObjectsFiltering filtering = kNoFiltering);
2511 struct MakeHeapIterableHelper {
2512 explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2515 HeapObject* NextObject();
2517 // The following two fields need to be declared in this order. Initialization
2518 // order guarantees that we first make the heap iterable (which may involve
2519 // allocations) and only then lock it down by not allowing further
2521 MakeHeapIterableHelper make_heap_iterable_helper_;
2522 DisallowHeapAllocation no_heap_allocation_;
2525 HeapObjectsFiltering filtering_;
2526 HeapObjectsFilter* filter_;
2527 // Space iterator for iterating all the spaces.
2528 SpaceIterator* space_iterator_;
2529 // Object iterator for the space currently being iterated.
2530 ObjectIterator* object_iterator_;
2534 // Cache for mapping (map, property name) into field offset.
2535 // Cleared at startup and prior to mark sweep collection.
2536 class KeyedLookupCache {
2538 // Lookup field offset for (map, name). If absent, -1 is returned.
2539 int Lookup(Handle<Map> map, Handle<Name> name);
2541 // Update an element in the cache.
2542 void Update(Handle<Map> map, Handle<Name> name, int field_offset);
2547 static const int kLength = 256;
2548 static const int kCapacityMask = kLength - 1;
2549 static const int kMapHashShift = 5;
2550 static const int kHashMask = -4; // Zero the last two bits.
2551 static const int kEntriesPerBucket = 4;
2552 static const int kEntryLength = 2;
2553 static const int kMapIndex = 0;
2554 static const int kKeyIndex = 1;
2555 static const int kNotFound = -1;
2557 // kEntriesPerBucket should be a power of 2.
2558 STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2559 STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2562 KeyedLookupCache() {
2563 for (int i = 0; i < kLength; ++i) {
2564 keys_[i].map = NULL;
2565 keys_[i].name = NULL;
2566 field_offsets_[i] = kNotFound;
2570 static inline int Hash(Handle<Map> map, Handle<Name> name);
2572 // Get the address of the keys and field_offsets arrays. Used in
2573 // generated code to perform cache lookups.
2574 Address keys_address() { return reinterpret_cast<Address>(&keys_); }
2576 Address field_offsets_address() {
2577 return reinterpret_cast<Address>(&field_offsets_);
2586 int field_offsets_[kLength];
2588 friend class ExternalReference;
2589 friend class Isolate;
2590 DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2594 // Cache for mapping (map, property name) into descriptor index.
2595 // The cache contains both positive and negative results.
2596 // Descriptor index equals kNotFound means the property is absent.
2597 // Cleared at startup and prior to any gc.
2598 class DescriptorLookupCache {
2600 // Lookup descriptor index for (map, name).
2601 // If absent, kAbsent is returned.
2602 inline int Lookup(Map* source, Name* name);
2604 // Update an element in the cache.
2605 inline void Update(Map* source, Name* name, int result);
2610 static const int kAbsent = -2;
2613 DescriptorLookupCache() {
2614 for (int i = 0; i < kLength; ++i) {
2615 keys_[i].source = NULL;
2616 keys_[i].name = NULL;
2617 results_[i] = kAbsent;
2621 static int Hash(Object* source, Name* name) {
2622 // Uses only lower 32 bits if pointers are larger.
2623 uint32_t source_hash =
2624 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
2626 uint32_t name_hash =
2627 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >>
2629 return (source_hash ^ name_hash) % kLength;
2632 static const int kLength = 64;
2639 int results_[kLength];
2641 friend class Isolate;
2642 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2646 // Abstract base class for checking whether a weak object should be retained.
2647 class WeakObjectRetainer {
2649 virtual ~WeakObjectRetainer() {}
2651 // Return whether this object should be retained. If NULL is returned the
2652 // object has no references. Otherwise the address of the retained object
2653 // should be returned as in some GC situations the object has been moved.
2654 virtual Object* RetainAs(Object* object) = 0;
2659 // Helper class for tracing paths to a search target Object from all roots.
2660 // The TracePathFrom() method can be used to trace paths from a specific
2661 // object to the search target object.
2662 class PathTracer : public ObjectVisitor {
2665 FIND_ALL, // Will find all matches.
2666 FIND_FIRST // Will stop the search after first match.
2669 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2670 static const int kMarkTag = 2;
2672 // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2673 // after the first match. If FIND_ALL is specified, then tracing will be
2674 // done for all matches.
2675 PathTracer(Object* search_target, WhatToFind what_to_find,
2676 VisitMode visit_mode)
2677 : search_target_(search_target),
2678 found_target_(false),
2679 found_target_in_trace_(false),
2680 what_to_find_(what_to_find),
2681 visit_mode_(visit_mode),
2685 virtual void VisitPointers(Object** start, Object** end);
2688 void TracePathFrom(Object** root);
2690 bool found() const { return found_target_; }
2692 static Object* const kAnyGlobalObject;
2696 class UnmarkVisitor;
2698 void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2699 void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2700 virtual void ProcessResults();
2702 Object* search_target_;
2704 bool found_target_in_trace_;
2705 WhatToFind what_to_find_;
2706 VisitMode visit_mode_;
2707 List<Object*> object_stack_;
2709 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2712 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2715 } // namespace internal
2718 #endif // V8_HEAP_HEAP_H_