1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/counters.h"
13 #include "src/globals.h"
14 #include "src/incremental-marking.h"
16 #include "src/mark-compact.h"
17 #include "src/objects-visiting.h"
18 #include "src/spaces.h"
19 #include "src/splay-tree-inl.h"
20 #include "src/store-buffer.h"
25 // Defines all the roots in Heap.
26 #define STRONG_ROOT_LIST(V) \
27 V(Map, byte_array_map, ByteArrayMap) \
28 V(Map, free_space_map, FreeSpaceMap) \
29 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
30 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
31 /* Cluster the most popular ones in a few cache lines here at the top. */ \
32 V(Smi, store_buffer_top, StoreBufferTop) \
33 V(Oddball, undefined_value, UndefinedValue) \
34 V(Oddball, the_hole_value, TheHoleValue) \
35 V(Oddball, null_value, NullValue) \
36 V(Oddball, true_value, TrueValue) \
37 V(Oddball, false_value, FalseValue) \
38 V(Oddball, uninitialized_value, UninitializedValue) \
39 V(Oddball, exception, Exception) \
40 V(Map, cell_map, CellMap) \
41 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
42 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
43 V(Map, meta_map, MetaMap) \
44 V(Map, heap_number_map, HeapNumberMap) \
45 V(Map, native_context_map, NativeContextMap) \
46 V(Map, fixed_array_map, FixedArrayMap) \
47 V(Map, code_map, CodeMap) \
48 V(Map, scope_info_map, ScopeInfoMap) \
49 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
50 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
51 V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
52 V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
53 V(Map, hash_table_map, HashTableMap) \
54 V(Map, ordered_hash_table_map, OrderedHashTableMap) \
55 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
56 V(ByteArray, empty_byte_array, EmptyByteArray) \
57 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
58 V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
59 V(Oddball, arguments_marker, ArgumentsMarker) \
60 /* The roots above this line should be boring from a GC point of view. */ \
61 /* This means they are never in new space and never on a page that is */ \
62 /* being compacted. */ \
63 V(FixedArray, number_string_cache, NumberStringCache) \
64 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
65 V(Object, instanceof_cache_map, InstanceofCacheMap) \
66 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
67 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
68 V(FixedArray, string_split_cache, StringSplitCache) \
69 V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
70 V(Oddball, termination_exception, TerminationException) \
71 V(Smi, hash_seed, HashSeed) \
72 V(Map, symbol_map, SymbolMap) \
73 V(Map, string_map, StringMap) \
74 V(Map, ascii_string_map, AsciiStringMap) \
75 V(Map, cons_string_map, ConsStringMap) \
76 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
77 V(Map, sliced_string_map, SlicedStringMap) \
78 V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
79 V(Map, external_string_map, ExternalStringMap) \
81 external_string_with_one_byte_data_map, \
82 ExternalStringWithOneByteDataMap) \
83 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
84 V(Map, short_external_string_map, ShortExternalStringMap) \
86 short_external_string_with_one_byte_data_map, \
87 ShortExternalStringWithOneByteDataMap) \
88 V(Map, internalized_string_map, InternalizedStringMap) \
89 V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap) \
91 external_internalized_string_map, \
92 ExternalInternalizedStringMap) \
94 external_internalized_string_with_one_byte_data_map, \
95 ExternalInternalizedStringWithOneByteDataMap) \
97 external_ascii_internalized_string_map, \
98 ExternalAsciiInternalizedStringMap) \
100 short_external_internalized_string_map, \
101 ShortExternalInternalizedStringMap) \
103 short_external_internalized_string_with_one_byte_data_map, \
104 ShortExternalInternalizedStringWithOneByteDataMap) \
106 short_external_ascii_internalized_string_map, \
107 ShortExternalAsciiInternalizedStringMap) \
108 V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
109 V(Map, undetectable_string_map, UndetectableStringMap) \
110 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
111 V(Map, external_int8_array_map, ExternalInt8ArrayMap) \
112 V(Map, external_uint8_array_map, ExternalUint8ArrayMap) \
113 V(Map, external_int16_array_map, ExternalInt16ArrayMap) \
114 V(Map, external_uint16_array_map, ExternalUint16ArrayMap) \
115 V(Map, external_int32_array_map, ExternalInt32ArrayMap) \
116 V(Map, external_int32x4_array_map, ExternalInt32x4ArrayMap) \
117 V(Map, external_uint32_array_map, ExternalUint32ArrayMap) \
118 V(Map, external_float32_array_map, ExternalFloat32ArrayMap) \
119 V(Map, external_float32x4_array_map, ExternalFloat32x4ArrayMap) \
120 V(Map, external_float64x2_array_map, ExternalFloat64x2ArrayMap) \
121 V(Map, external_float64_array_map, ExternalFloat64ArrayMap) \
122 V(Map, external_uint8_clamped_array_map, ExternalUint8ClampedArrayMap) \
123 V(ExternalArray, empty_external_int8_array, \
124 EmptyExternalInt8Array) \
125 V(ExternalArray, empty_external_uint8_array, \
126 EmptyExternalUint8Array) \
127 V(ExternalArray, empty_external_int16_array, EmptyExternalInt16Array) \
128 V(ExternalArray, empty_external_uint16_array, \
129 EmptyExternalUint16Array) \
130 V(ExternalArray, empty_external_int32_array, EmptyExternalInt32Array) \
131 V(ExternalArray, empty_external_int32x4_array, EmptyExternalInt32x4Array) \
132 V(ExternalArray, empty_external_uint32_array, \
133 EmptyExternalUint32Array) \
134 V(ExternalArray, empty_external_float32_array, EmptyExternalFloat32Array) \
135 V(ExternalArray, empty_external_float32x4_array, EmptyExternalFloat32x4Array)\
136 V(ExternalArray, empty_external_float64x2_array, EmptyExternalFloat64x2Array)\
137 V(ExternalArray, empty_external_float64_array, EmptyExternalFloat64Array) \
138 V(ExternalArray, empty_external_uint8_clamped_array, \
139 EmptyExternalUint8ClampedArray) \
140 V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
141 V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
142 V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
143 V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
144 V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
145 V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
146 V(Map, fixed_int32x4_array_map, FixedInt32x4ArrayMap) \
147 V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
148 V(Map, fixed_float32x4_array_map, FixedFloat32x4ArrayMap) \
149 V(Map, fixed_float64x2_array_map, FixedFloat64x2ArrayMap) \
150 V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
151 V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
152 V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
153 V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
154 V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
155 V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
156 V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
157 V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
158 V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
159 V(FixedTypedArrayBase, empty_fixed_float32x4_array, \
160 EmptyFixedFloat32x4Array) \
161 V(FixedTypedArrayBase, empty_fixed_float64x2_array, \
162 EmptyFixedFloat64x2Array) \
163 V(FixedTypedArrayBase, empty_fixed_int32x4_array, EmptyFixedInt32x4Array) \
164 V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
165 V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
166 EmptyFixedUint8ClampedArray) \
167 V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
168 V(Map, function_context_map, FunctionContextMap) \
169 V(Map, catch_context_map, CatchContextMap) \
170 V(Map, with_context_map, WithContextMap) \
171 V(Map, block_context_map, BlockContextMap) \
172 V(Map, module_context_map, ModuleContextMap) \
173 V(Map, global_context_map, GlobalContextMap) \
174 V(Map, undefined_map, UndefinedMap) \
175 V(Map, the_hole_map, TheHoleMap) \
176 V(Map, null_map, NullMap) \
177 V(Map, boolean_map, BooleanMap) \
178 V(Map, uninitialized_map, UninitializedMap) \
179 V(Map, arguments_marker_map, ArgumentsMarkerMap) \
180 V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap) \
181 V(Map, exception_map, ExceptionMap) \
182 V(Map, termination_exception_map, TerminationExceptionMap) \
183 V(Map, message_object_map, JSMessageObjectMap) \
184 V(Map, foreign_map, ForeignMap) \
185 V(HeapNumber, nan_value, NanValue) \
186 V(HeapNumber, infinity_value, InfinityValue) \
187 V(HeapNumber, minus_zero_value, MinusZeroValue) \
188 V(Map, neander_map, NeanderMap) \
189 V(JSObject, message_listeners, MessageListeners) \
190 V(UnseededNumberDictionary, code_stubs, CodeStubs) \
191 V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
192 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
193 V(Code, js_entry_code, JsEntryCode) \
194 V(Code, js_construct_entry_code, JsConstructEntryCode) \
195 V(FixedArray, natives_source_cache, NativesSourceCache) \
196 V(Script, empty_script, EmptyScript) \
197 V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
198 V(Cell, undefined_cell, UndefineCell) \
199 V(JSObject, observation_state, ObservationState) \
200 V(Map, external_map, ExternalMap) \
201 V(Object, symbol_registry, SymbolRegistry) \
202 V(Symbol, frozen_symbol, FrozenSymbol) \
203 V(Symbol, nonexistent_symbol, NonExistentSymbol) \
204 V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
205 V(SeededNumberDictionary, empty_slow_element_dictionary, \
206 EmptySlowElementDictionary) \
207 V(Symbol, observed_symbol, ObservedSymbol) \
208 V(Symbol, uninitialized_symbol, UninitializedSymbol) \
209 V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
210 V(FixedArray, materialized_objects, MaterializedObjects) \
211 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
212 V(FixedArray, microtask_queue, MicrotaskQueue)
214 // Entries in this list are limited to Smis and are not visited during GC.
215 #define SMI_ROOT_LIST(V) \
216 V(Smi, stack_limit, StackLimit) \
217 V(Smi, real_stack_limit, RealStackLimit) \
218 V(Smi, last_script_id, LastScriptId) \
219 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
220 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
221 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
222 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
224 #define ROOT_LIST(V) \
225 STRONG_ROOT_LIST(V) \
227 V(StringTable, string_table, StringTable)
229 // Heap roots that are known to be immortal immovable, for which we can safely
230 // skip write barriers.
231 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
234 V(one_pointer_filler_map) \
235 V(two_pointer_filler_map) \
241 V(uninitialized_value) \
243 V(global_property_cell_map) \
244 V(shared_function_info_map) \
247 V(native_context_map) \
251 V(fixed_cow_array_map) \
252 V(fixed_double_array_map) \
253 V(constant_pool_array_map) \
254 V(no_interceptor_result_sentinel) \
256 V(ordered_hash_table_map) \
257 V(empty_fixed_array) \
258 V(empty_byte_array) \
259 V(empty_descriptor_array) \
260 V(empty_constant_pool_array) \
261 V(arguments_marker) \
263 V(sloppy_arguments_elements_map) \
264 V(function_context_map) \
265 V(catch_context_map) \
266 V(with_context_map) \
267 V(block_context_map) \
268 V(module_context_map) \
269 V(global_context_map) \
274 V(uninitialized_map) \
275 V(message_object_map) \
279 #define INTERNALIZED_STRING_LIST(V) \
280 V(Array_string, "Array") \
281 V(Object_string, "Object") \
282 V(proto_string, "__proto__") \
283 V(arguments_string, "arguments") \
284 V(Arguments_string, "Arguments") \
285 V(call_string, "call") \
286 V(apply_string, "apply") \
287 V(caller_string, "caller") \
288 V(boolean_string, "boolean") \
289 V(Boolean_string, "Boolean") \
290 V(callee_string, "callee") \
291 V(constructor_string, "constructor") \
292 V(dot_result_string, ".result") \
293 V(dot_for_string, ".for.") \
294 V(dot_iterable_string, ".iterable") \
295 V(dot_iterator_string, ".iterator") \
296 V(dot_generator_object_string, ".generator_object") \
297 V(eval_string, "eval") \
298 V(empty_string, "") \
299 V(function_string, "function") \
300 V(length_string, "length") \
301 V(module_string, "module") \
302 V(name_string, "name") \
303 V(native_string, "native") \
304 V(null_string, "null") \
305 V(number_string, "number") \
306 V(Number_string, "Number") \
307 V(float32x4_string, "float32x4") \
308 V(float64x2_string, "float64x2") \
309 V(int32x4_string, "int32x4") \
310 V(nan_string, "NaN") \
311 V(RegExp_string, "RegExp") \
312 V(source_string, "source") \
313 V(global_string, "global") \
314 V(ignore_case_string, "ignoreCase") \
315 V(multiline_string, "multiline") \
316 V(input_string, "input") \
317 V(index_string, "index") \
318 V(last_index_string, "lastIndex") \
319 V(object_string, "object") \
320 V(literals_string, "literals") \
321 V(prototype_string, "prototype") \
322 V(string_string, "string") \
323 V(String_string, "String") \
324 V(symbol_string, "symbol") \
325 V(Symbol_string, "Symbol") \
326 V(for_string, "for") \
327 V(for_api_string, "for_api") \
328 V(for_intern_string, "for_intern") \
329 V(private_api_string, "private_api") \
330 V(private_intern_string, "private_intern") \
331 V(Date_string, "Date") \
332 V(this_string, "this") \
333 V(to_string_string, "toString") \
334 V(char_at_string, "CharAt") \
335 V(undefined_string, "undefined") \
336 V(value_of_string, "valueOf") \
337 V(stack_string, "stack") \
338 V(toJSON_string, "toJSON") \
339 V(InitializeVarGlobal_string, "InitializeVarGlobal") \
340 V(InitializeConstGlobal_string, "InitializeConstGlobal") \
341 V(KeyedLoadElementMonomorphic_string, \
342 "KeyedLoadElementMonomorphic") \
343 V(KeyedStoreElementMonomorphic_string, \
344 "KeyedStoreElementMonomorphic") \
345 V(stack_overflow_string, "kStackOverflowBoilerplate") \
346 V(illegal_access_string, "illegal access") \
347 V(get_string, "get") \
348 V(set_string, "set") \
349 V(map_field_string, "%map") \
350 V(elements_field_string, "%elements") \
351 V(length_field_string, "%length") \
352 V(cell_value_string, "%cell_value") \
353 V(function_class_string, "Function") \
354 V(illegal_argument_string, "illegal argument") \
355 V(MakeReferenceError_string, "MakeReferenceError") \
356 V(MakeSyntaxError_string, "MakeSyntaxError") \
357 V(MakeTypeError_string, "MakeTypeError") \
358 V(unknown_label_string, "unknown_label") \
359 V(space_string, " ") \
360 V(exec_string, "exec") \
361 V(zero_string, "0") \
362 V(global_eval_string, "GlobalEval") \
363 V(identity_hash_string, "v8::IdentityHash") \
364 V(closure_string, "(closure)") \
365 V(use_strict_string, "use strict") \
367 V(anonymous_function_string, "(anonymous function)") \
368 V(compare_ic_string, "==") \
369 V(strict_compare_ic_string, "===") \
370 V(infinity_string, "Infinity") \
371 V(minus_infinity_string, "-Infinity") \
372 V(hidden_stack_trace_string, "v8::hidden_stack_trace") \
373 V(query_colon_string, "(?:)") \
374 V(Generator_string, "Generator") \
375 V(throw_string, "throw") \
376 V(done_string, "done") \
377 V(value_string, "value") \
378 V(signMask, "signMask") \
388 V(next_string, "next") \
389 V(byte_length_string, "byteLength") \
390 V(byte_offset_string, "byteOffset") \
391 V(buffer_string, "buffer") \
392 V(intl_initialized_marker_string, "v8::intl_initialized_marker") \
393 V(intl_impl_object_string, "v8::intl_object")
395 // Forward declarations.
399 class WeakObjectRetainer;
402 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
405 class StoreBufferRebuilder {
407 explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
408 : store_buffer_(store_buffer) {
411 void Callback(MemoryChunk* page, StoreBufferEvent event);
414 StoreBuffer* store_buffer_;
416 // We record in this variable how full the store buffer was when we started
417 // iterating over the current page, finding pointers to new space. If the
418 // store buffer overflows again we can exempt the page from the store buffer
419 // by rewinding to this point instead of having to search the store buffer.
420 Object*** start_of_current_page_;
421 // The current page we are scanning in the store buffer iterator.
422 MemoryChunk* current_page_;
427 // A queue of objects promoted during scavenge. Each object is accompanied
428 // by it's size to avoid dereferencing a map pointer for scanning.
429 class PromotionQueue {
431 explicit PromotionQueue(Heap* heap)
442 delete emergency_stack_;
443 emergency_stack_ = NULL;
446 inline void ActivateGuardIfOnTheSamePage();
448 Page* GetHeadPage() {
449 return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
452 void SetNewLimit(Address limit) {
457 ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
458 limit_ = reinterpret_cast<intptr_t*>(limit);
460 if (limit_ <= rear_) {
468 return (front_ == rear_) &&
469 (emergency_stack_ == NULL || emergency_stack_->length() == 0);
472 inline void insert(HeapObject* target, int size);
474 void remove(HeapObject** target, int* size) {
476 if (front_ == rear_) {
477 Entry e = emergency_stack_->RemoveLast();
483 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
484 NewSpacePage* front_page =
485 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
486 ASSERT(!front_page->prev_page()->is_anchor());
488 reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
490 *target = reinterpret_cast<HeapObject*>(*(--front_));
491 *size = static_cast<int>(*(--front_));
492 // Assert no underflow.
493 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
494 reinterpret_cast<Address>(front_));
498 // The front of the queue is higher in the memory page chain than the rear.
505 static const int kEntrySizeInWords = 2;
508 Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
513 List<Entry>* emergency_stack_;
517 void RelocateQueueHead();
519 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
523 typedef void (*ScavengingCallback)(Map* map,
528 // External strings table is a place where all external strings are
529 // registered. We need to keep track of such strings to properly
531 class ExternalStringTable {
533 // Registers an external string.
534 inline void AddString(String* string);
536 inline void Iterate(ObjectVisitor* v);
538 // Restores internal invariant and gets rid of collected strings.
539 // Must be called after each Iterate() that modified the strings.
542 // Destroys all allocated memory.
546 explicit ExternalStringTable(Heap* heap) : heap_(heap) { }
550 inline void Verify();
552 inline void AddOldString(String* string);
554 // Notifies the table that only a prefix of the new list is valid.
555 inline void ShrinkNewStrings(int position);
557 // To speed up scavenge collections new space string are kept
558 // separate from old space strings.
559 List<Object*> new_space_strings_;
560 List<Object*> old_space_strings_;
564 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
568 enum ArrayStorageAllocationMode {
569 DONT_INITIALIZE_ARRAY_ELEMENTS,
570 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
576 // Configure heap size in MB before setup. Return false if the heap has been
578 bool ConfigureHeap(int max_semi_space_size,
579 int max_old_space_size,
580 int max_executable_size,
581 size_t code_range_size);
582 bool ConfigureHeapDefault();
584 // Prepares the heap, setting up memory areas that are needed in the isolate
585 // without actually creating any objects.
588 // Bootstraps the object heap with the core set of objects required to run.
589 // Returns whether it succeeded.
590 bool CreateHeapObjects();
592 // Destroys all memory allocated by the heap.
595 // Set the stack limit in the roots_ array. Some architectures generate
596 // code that looks here, because it is faster than loading from the static
597 // jslimit_/real_jslimit_ variable in the StackGuard.
598 void SetStackLimits();
600 // Returns whether SetUp has been called.
603 // Returns the maximum amount of memory reserved for the heap. For
604 // the young generation, we reserve 4 times the amount needed for a
605 // semi space. The young generation consists of two semi spaces and
606 // we reserve twice the amount needed for those in order to ensure
607 // that new space can be aligned to its size.
608 intptr_t MaxReserved() {
609 return 4 * reserved_semispace_size_ + max_old_generation_size_;
611 int MaxSemiSpaceSize() { return max_semi_space_size_; }
612 int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
613 int InitialSemiSpaceSize() { return initial_semispace_size_; }
614 intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
615 intptr_t MaxExecutableSize() { return max_executable_size_; }
617 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
618 // more spaces are needed until it reaches the limit.
621 // Returns the amount of memory currently committed for the heap.
622 intptr_t CommittedMemory();
624 // Returns the amount of executable memory currently committed for the heap.
625 intptr_t CommittedMemoryExecutable();
627 // Returns the amount of phyical memory currently committed for the heap.
628 size_t CommittedPhysicalMemory();
630 // Returns the maximum amount of memory ever committed for the heap.
631 intptr_t MaximumCommittedMemory() { return maximum_committed_; }
633 // Updates the maximum committed memory for the heap. Should be called
634 // whenever a space grows.
635 void UpdateMaximumCommitted();
637 // Returns the available bytes in space w/o growing.
638 // Heap doesn't guarantee that it can allocate an object that requires
639 // all available bytes. Check MaxHeapObjectSize() instead.
640 intptr_t Available();
642 // Returns of size of all objects residing in the heap.
643 intptr_t SizeOfObjects();
645 // Return the starting address and a mask for the new space. And-masking an
646 // address with the mask will result in the start address of the new space
647 // for all addresses in either semispace.
648 Address NewSpaceStart() { return new_space_.start(); }
649 uintptr_t NewSpaceMask() { return new_space_.mask(); }
650 Address NewSpaceTop() { return new_space_.top(); }
652 NewSpace* new_space() { return &new_space_; }
653 OldSpace* old_pointer_space() { return old_pointer_space_; }
654 OldSpace* old_data_space() { return old_data_space_; }
655 OldSpace* code_space() { return code_space_; }
656 MapSpace* map_space() { return map_space_; }
657 CellSpace* cell_space() { return cell_space_; }
658 PropertyCellSpace* property_cell_space() {
659 return property_cell_space_;
661 LargeObjectSpace* lo_space() { return lo_space_; }
662 PagedSpace* paged_space(int idx) {
664 case OLD_POINTER_SPACE:
665 return old_pointer_space();
667 return old_data_space();
672 case PROPERTY_CELL_SPACE:
673 return property_cell_space();
683 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
684 Address always_allocate_scope_depth_address() {
685 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
687 bool linear_allocation() {
688 return linear_allocation_scope_depth_ != 0;
691 Address* NewSpaceAllocationTopAddress() {
692 return new_space_.allocation_top_address();
694 Address* NewSpaceAllocationLimitAddress() {
695 return new_space_.allocation_limit_address();
698 Address* OldPointerSpaceAllocationTopAddress() {
699 return old_pointer_space_->allocation_top_address();
701 Address* OldPointerSpaceAllocationLimitAddress() {
702 return old_pointer_space_->allocation_limit_address();
705 Address* OldDataSpaceAllocationTopAddress() {
706 return old_data_space_->allocation_top_address();
708 Address* OldDataSpaceAllocationLimitAddress() {
709 return old_data_space_->allocation_limit_address();
712 // Returns a deep copy of the JavaScript object.
713 // Properties and elements are copied too.
714 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
715 MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
716 AllocationSite* site = NULL);
718 // Clear the Instanceof cache (used when a prototype changes).
719 inline void ClearInstanceofCache();
721 // Iterates the whole code space to clear all ICs of the given kind.
722 void ClearAllICsByKind(Code::Kind kind);
724 // For use during bootup.
725 void RepairFreeListsAfterBoot();
728 static inline bool IsOneByte(T t, int chars);
730 // Move len elements within a given array from src_index index to dst_index
732 void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
734 // Sloppy mode arguments object size.
735 static const int kSloppyArgumentsObjectSize =
736 JSObject::kHeaderSize + 2 * kPointerSize;
737 // Strict mode arguments has no callee so it is smaller.
738 static const int kStrictArgumentsObjectSize =
739 JSObject::kHeaderSize + 1 * kPointerSize;
740 // Indicies for direct access into argument objects.
741 static const int kArgumentsLengthIndex = 0;
742 // callee is only valid in sloppy mode.
743 static const int kArgumentsCalleeIndex = 1;
745 // Finalizes an external string by deleting the associated external
746 // data and clearing the resource pointer.
747 inline void FinalizeExternalString(String* string);
749 // Initialize a filler object to keep the ability to iterate over the heap
750 // when shortening objects.
751 void CreateFillerObjectAt(Address addr, int size);
753 bool CanMoveObjectStart(HeapObject* object);
755 enum InvocationMode { FROM_GC, FROM_MUTATOR };
757 // Maintain marking consistency for IncrementalMarking.
758 void AdjustLiveBytes(Address address, int by, InvocationMode mode);
760 // Converts the given boolean condition to JavaScript boolean value.
761 inline Object* ToBoolean(bool condition);
763 // Performs garbage collection operation.
764 // Returns whether there is a chance that another major GC could
765 // collect more garbage.
766 inline bool CollectGarbage(
767 AllocationSpace space,
768 const char* gc_reason = NULL,
769 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
771 static const int kNoGCFlags = 0;
772 static const int kSweepPreciselyMask = 1;
773 static const int kReduceMemoryFootprintMask = 2;
774 static const int kAbortIncrementalMarkingMask = 4;
776 // Making the heap iterable requires us to sweep precisely and abort any
777 // incremental marking as well.
778 static const int kMakeHeapIterableMask =
779 kSweepPreciselyMask | kAbortIncrementalMarkingMask;
781 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
782 // non-zero, then the slower precise sweeper is used, which leaves the heap
783 // in a state where we can iterate over the heap visiting all objects.
784 void CollectAllGarbage(
786 const char* gc_reason = NULL,
787 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
789 // Last hope GC, should try to squeeze as much as possible.
790 void CollectAllAvailableGarbage(const char* gc_reason = NULL);
792 // Check whether the heap is currently iterable.
793 bool IsHeapIterable();
795 // Notify the heap that a context has been disposed.
796 int NotifyContextDisposed();
798 inline void increment_scan_on_scavenge_pages() {
799 scan_on_scavenge_pages_++;
800 if (FLAG_gc_verbose) {
801 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
805 inline void decrement_scan_on_scavenge_pages() {
806 scan_on_scavenge_pages_--;
807 if (FLAG_gc_verbose) {
808 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
812 PromotionQueue* promotion_queue() { return &promotion_queue_; }
814 void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
815 GCType gc_type_filter,
816 bool pass_isolate = true);
817 void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback);
819 void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
820 GCType gc_type_filter,
821 bool pass_isolate = true);
822 void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback);
824 // Heap root getters. We have versions with and without type::cast() here.
825 // You can't use type::cast during GC because the assert fails.
826 // TODO(1490): Try removing the unchecked accessors, now that GC marking does
827 // not corrupt the map.
828 #define ROOT_ACCESSOR(type, name, camel_name) \
830 return type::cast(roots_[k##camel_name##RootIndex]); \
832 type* raw_unchecked_##name() { \
833 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
835 ROOT_LIST(ROOT_ACCESSOR)
839 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
840 Map* name##_map() { \
841 return Map::cast(roots_[k##Name##MapRootIndex]); \
843 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
844 #undef STRUCT_MAP_ACCESSOR
846 #define STRING_ACCESSOR(name, str) String* name() { \
847 return String::cast(roots_[k##name##RootIndex]); \
849 INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
850 #undef STRING_ACCESSOR
852 // The hidden_string is special because it is the empty string, but does
853 // not match the empty string.
854 String* hidden_string() { return hidden_string_; }
856 void set_native_contexts_list(Object* object) {
857 native_contexts_list_ = object;
859 Object* native_contexts_list() const { return native_contexts_list_; }
861 void set_array_buffers_list(Object* object) {
862 array_buffers_list_ = object;
864 Object* array_buffers_list() const { return array_buffers_list_; }
866 void set_allocation_sites_list(Object* object) {
867 allocation_sites_list_ = object;
869 Object* allocation_sites_list() { return allocation_sites_list_; }
871 // Used in CreateAllocationSiteStub and the (de)serializer.
872 Object** allocation_sites_list_address() { return &allocation_sites_list_; }
874 Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
876 void set_encountered_weak_collections(Object* weak_collection) {
877 encountered_weak_collections_ = weak_collection;
879 Object* encountered_weak_collections() const {
880 return encountered_weak_collections_;
883 // Number of mark-sweeps.
884 unsigned int ms_count() { return ms_count_; }
886 // Iterates over all roots in the heap.
887 void IterateRoots(ObjectVisitor* v, VisitMode mode);
888 // Iterates over all strong roots in the heap.
889 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
890 // Iterates over entries in the smi roots list. Only interesting to the
891 // serializer/deserializer, since GC does not care about smis.
892 void IterateSmiRoots(ObjectVisitor* v);
893 // Iterates over all the other roots in the heap.
894 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
896 // Iterate pointers to from semispace of new space found in memory interval
897 // from start to end.
898 void IterateAndMarkPointersToFromSpace(Address start,
900 ObjectSlotCallback callback);
902 // Returns whether the object resides in new space.
903 inline bool InNewSpace(Object* object);
904 inline bool InNewSpace(Address address);
905 inline bool InNewSpacePage(Address address);
906 inline bool InFromSpace(Object* object);
907 inline bool InToSpace(Object* object);
909 // Returns whether the object resides in old pointer space.
910 inline bool InOldPointerSpace(Address address);
911 inline bool InOldPointerSpace(Object* object);
913 // Returns whether the object resides in old data space.
914 inline bool InOldDataSpace(Address address);
915 inline bool InOldDataSpace(Object* object);
917 // Checks whether an address/object in the heap (including auxiliary
918 // area and unused area).
919 bool Contains(Address addr);
920 bool Contains(HeapObject* value);
922 // Checks whether an address/object in a space.
923 // Currently used by tests, serialization and heap verification only.
924 bool InSpace(Address addr, AllocationSpace space);
925 bool InSpace(HeapObject* value, AllocationSpace space);
927 // Finds out which space an object should get promoted to based on its type.
928 inline OldSpace* TargetSpace(HeapObject* object);
929 static inline AllocationSpace TargetSpaceId(InstanceType type);
931 // Checks whether the given object is allowed to be migrated from it's
932 // current space into the given destination space. Used for debugging.
933 inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
935 // Sets the stub_cache_ (only used when expanding the dictionary).
936 void public_set_code_stubs(UnseededNumberDictionary* value) {
937 roots_[kCodeStubsRootIndex] = value;
940 // Support for computing object sizes for old objects during GCs. Returns
941 // a function that is guaranteed to be safe for computing object sizes in
942 // the current GC phase.
943 HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
944 return gc_safe_size_of_old_object_;
947 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
948 void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
949 roots_[kNonMonomorphicCacheRootIndex] = value;
952 void public_set_empty_script(Script* script) {
953 roots_[kEmptyScriptRootIndex] = script;
956 void public_set_store_buffer_top(Address* top) {
957 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
960 void public_set_materialized_objects(FixedArray* objects) {
961 roots_[kMaterializedObjectsRootIndex] = objects;
964 // Generated code can embed this address to get access to the roots.
965 Object** roots_array_start() { return roots_; }
967 Address* store_buffer_top_address() {
968 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
972 // Verify the heap is in its normal state before or after a GC.
976 bool weak_embedded_objects_verification_enabled() {
977 return no_weak_object_verification_scope_depth_ == 0;
985 void OldPointerSpaceCheckStoreBuffer();
986 void MapSpaceCheckStoreBuffer();
987 void LargeObjectSpaceCheckStoreBuffer();
989 // Report heap statistics.
990 void ReportHeapStatistics(const char* title);
991 void ReportCodeStatistics(const char* title);
994 // Zapping is needed for verify heap, and always done in debug builds.
995 static inline bool ShouldZapGarbage() {
1000 return FLAG_verify_heap;
1007 // Print short heap statistics.
1008 void PrintShortHeapStatistics();
1010 // Write barrier support for address[offset] = o.
1011 INLINE(void RecordWrite(Address address, int offset));
1013 // Write barrier support for address[start : start + len[ = o.
1014 INLINE(void RecordWrites(Address address, int start, int len));
1016 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
1017 inline HeapState gc_state() { return gc_state_; }
1019 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1022 void set_allocation_timeout(int timeout) {
1023 allocation_timeout_ = timeout;
1026 void TracePathToObjectFrom(Object* target, Object* root);
1027 void TracePathToObject(Object* target);
1028 void TracePathToGlobal();
1031 // Callback function passed to Heap::Iterate etc. Copies an object if
1032 // necessary, the object might be promoted to an old space. The caller must
1033 // ensure the precondition that the object is (a) a heap object and (b) in
1034 // the heap's from space.
1035 static inline void ScavengePointer(HeapObject** p);
1036 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1038 enum ScratchpadSlotMode {
1039 IGNORE_SCRATCHPAD_SLOT,
1040 RECORD_SCRATCHPAD_SLOT
1043 // If an object has an AllocationMemento trailing it, return it, otherwise
1045 inline AllocationMemento* FindAllocationMemento(HeapObject* object);
1047 // An object may have an AllocationSite associated with it through a trailing
1048 // AllocationMemento. Its feedback should be updated when objects are found
1050 static inline void UpdateAllocationSiteFeedback(
1051 HeapObject* object, ScratchpadSlotMode mode);
1053 // Support for partial snapshots. After calling this we have a linear
1054 // space to write objects in each space.
1055 void ReserveSpace(int *sizes, Address* addresses);
1058 // Support for the API.
1061 void CreateApiObjects();
1063 inline intptr_t PromotedTotalSize() {
1064 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1065 if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
1066 if (total < 0) return 0;
1067 return static_cast<intptr_t>(total);
1070 inline intptr_t OldGenerationSpaceAvailable() {
1071 return old_generation_allocation_limit_ - PromotedTotalSize();
1074 inline intptr_t OldGenerationCapacityAvailable() {
1075 return max_old_generation_size_ - PromotedTotalSize();
1078 static const intptr_t kMinimumOldGenerationAllocationLimit =
1079 8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1081 static const int kPointerMultiplier = i::kPointerSize / 4;
1083 // The new space size has to be a power of 2. Sizes are in MB.
1084 static const int kMaxSemiSpaceSizeLowMemoryDevice =
1085 1 * kPointerMultiplier;
1086 static const int kMaxSemiSpaceSizeMediumMemoryDevice =
1087 4 * kPointerMultiplier;
1088 static const int kMaxSemiSpaceSizeHighMemoryDevice =
1089 8 * kPointerMultiplier;
1090 static const int kMaxSemiSpaceSizeHugeMemoryDevice =
1091 8 * kPointerMultiplier;
1093 // The old space size has to be a multiple of Page::kPageSize.
1095 static const int kMaxOldSpaceSizeLowMemoryDevice =
1096 128 * kPointerMultiplier;
1097 static const int kMaxOldSpaceSizeMediumMemoryDevice =
1098 256 * kPointerMultiplier;
1099 static const int kMaxOldSpaceSizeHighMemoryDevice =
1100 512 * kPointerMultiplier;
1101 static const int kMaxOldSpaceSizeHugeMemoryDevice =
1102 700 * kPointerMultiplier;
1104 // The executable size has to be a multiple of Page::kPageSize.
1106 static const int kMaxExecutableSizeLowMemoryDevice =
1107 128 * kPointerMultiplier;
1108 static const int kMaxExecutableSizeMediumMemoryDevice =
1109 256 * kPointerMultiplier;
1110 static const int kMaxExecutableSizeHighMemoryDevice =
1111 512 * kPointerMultiplier;
1112 static const int kMaxExecutableSizeHugeMemoryDevice =
1113 700 * kPointerMultiplier;
1115 intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size,
1116 int freed_global_handles);
1118 // Indicates whether inline bump-pointer allocation has been disabled.
1119 bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1121 // Switch whether inline bump-pointer allocation should be used.
1122 void EnableInlineAllocation();
1123 void DisableInlineAllocation();
1125 // Implements the corresponding V8 API function.
1126 bool IdleNotification(int hint);
1128 // Declare all the root indices. This defines the root list order.
1129 enum RootListIndex {
1130 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1131 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1132 #undef ROOT_INDEX_DECLARATION
1134 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1135 INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
1136 #undef STRING_DECLARATION
1138 // Utility type maps
1139 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1140 STRUCT_LIST(DECLARE_STRUCT_MAP)
1141 #undef DECLARE_STRUCT_MAP
1143 kStringTableRootIndex,
1145 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1146 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
1147 #undef ROOT_INDEX_DECLARATION
1150 kStrongRootListLength = kStringTableRootIndex,
1151 kSmiRootsStart = kStringTableRootIndex + 1
1154 STATIC_ASSERT(kUndefinedValueRootIndex ==
1155 Internals::kUndefinedValueRootIndex);
1156 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
1157 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1158 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1159 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
1161 // Generated code can embed direct references to non-writable roots if
1162 // they are in new space.
1163 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1164 // Generated code can treat direct references to this root as constant.
1165 bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1167 Map* MapForFixedTypedArray(ExternalArrayType array_type);
1168 RootListIndex RootIndexForFixedTypedArray(
1169 ExternalArrayType array_type);
1171 Map* MapForExternalArrayType(ExternalArrayType array_type);
1172 RootListIndex RootIndexForExternalArrayType(
1173 ExternalArrayType array_type);
1175 RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
1176 RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
1177 ExternalArray* EmptyExternalArrayForMap(Map* map);
1178 FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
1180 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1182 // Copy block of memory from src to dst. Size of block should be aligned
1184 static inline void CopyBlock(Address dst, Address src, int byte_size);
1186 // Optimized version of memmove for blocks with pointer size aligned sizes and
1187 // pointer size aligned addresses.
1188 static inline void MoveBlock(Address dst, Address src, int byte_size);
1190 // Check new space expansion criteria and expand semispaces if it was hit.
1191 void CheckNewSpaceExpansionCriteria();
1193 inline void IncrementPromotedObjectsSize(int object_size) {
1194 ASSERT(object_size > 0);
1195 promoted_objects_size_ += object_size;
1198 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1199 ASSERT(object_size > 0);
1200 semi_space_copied_object_size_ += object_size;
1203 inline void IncrementYoungSurvivorsCounter(int survived) {
1204 ASSERT(survived >= 0);
1205 survived_since_last_expansion_ += survived;
1208 inline bool NextGCIsLikelyToBeFull() {
1209 if (FLAG_gc_global) return true;
1211 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1213 intptr_t adjusted_allocation_limit =
1214 old_generation_allocation_limit_ - new_space_.Capacity();
1216 if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1221 void UpdateNewSpaceReferencesInExternalStringTable(
1222 ExternalStringTableUpdaterCallback updater_func);
1224 void UpdateReferencesInExternalStringTable(
1225 ExternalStringTableUpdaterCallback updater_func);
1227 void ProcessWeakReferences(WeakObjectRetainer* retainer);
1229 void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1231 // Helper function that governs the promotion policy from new space to
1232 // old. If the object's old address lies below the new space's age
1233 // mark or if we've already filled the bottom 1/16th of the to space,
1234 // we try to promote this object.
1235 inline bool ShouldBePromoted(Address old_address, int object_size);
1237 void ClearJSFunctionResultCaches();
1239 void ClearNormalizedMapCaches();
1241 GCTracer* tracer() { return tracer_; }
1243 // Returns the size of objects residing in non new spaces.
1244 intptr_t PromotedSpaceSizeOfObjects();
1246 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1247 void IncreaseTotalRegexpCodeGenerated(int size) {
1248 total_regexp_code_generated_ += size;
1251 void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1252 if (is_crankshafted) {
1253 crankshaft_codegen_bytes_generated_ += size;
1255 full_codegen_bytes_generated_ += size;
1259 // Returns maximum GC pause.
1260 double get_max_gc_pause() { return max_gc_pause_; }
1262 // Returns maximum size of objects alive after GC.
1263 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1265 // Returns minimal interval between two subsequent collections.
1266 double get_min_in_mutator() { return min_in_mutator_; }
1268 // TODO(hpayer): remove, should be handled by GCTracer
1269 void AddMarkingTime(double marking_time) {
1270 marking_time_ += marking_time;
1273 double marking_time() const {
1274 return marking_time_;
1277 // TODO(hpayer): remove, should be handled by GCTracer
1278 void AddSweepingTime(double sweeping_time) {
1279 sweeping_time_ += sweeping_time;
1282 double sweeping_time() const {
1283 return sweeping_time_;
1286 MarkCompactCollector* mark_compact_collector() {
1287 return &mark_compact_collector_;
1290 StoreBuffer* store_buffer() {
1291 return &store_buffer_;
1294 Marking* marking() {
1298 IncrementalMarking* incremental_marking() {
1299 return &incremental_marking_;
1302 ExternalStringTable* external_string_table() {
1303 return &external_string_table_;
1306 // Returns the current sweep generation.
1307 int sweep_generation() {
1308 return sweep_generation_;
1311 inline Isolate* isolate();
1313 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1314 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1316 inline bool OldGenerationAllocationLimitReached();
1318 inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1319 scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1322 void QueueMemoryChunkForFree(MemoryChunk* chunk);
1323 void FreeQueuedChunks();
1325 int gc_count() const { return gc_count_; }
1327 // Completely clear the Instanceof cache (to stop it keeping objects alive
1329 inline void CompletelyClearInstanceofCache();
1331 // The roots that have an index less than this are always in old space.
1332 static const int kOldSpaceRoots = 0x20;
1334 uint32_t HashSeed() {
1335 uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1336 ASSERT(FLAG_randomize_hashes || seed == 0);
1340 void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1341 ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1342 set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1345 void SetConstructStubDeoptPCOffset(int pc_offset) {
1346 ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1347 set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1350 void SetGetterStubDeoptPCOffset(int pc_offset) {
1351 ASSERT(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1352 set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1355 void SetSetterStubDeoptPCOffset(int pc_offset) {
1356 ASSERT(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1357 set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1360 // For post mortem debugging.
1361 void RememberUnmappedPage(Address page, bool compacted);
1363 // Global inline caching age: it is incremented on some GCs after context
1364 // disposal. We use it to flush inline caches.
1365 int global_ic_age() {
1366 return global_ic_age_;
1369 void AgeInlineCaches() {
1370 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1373 bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1375 int64_t amount_of_external_allocated_memory() {
1376 return amount_of_external_allocated_memory_;
1379 void DeoptMarkedAllocationSites();
1381 bool MaximumSizeScavenge() {
1382 return maximum_size_scavenges_ > 0;
1385 bool DeoptMaybeTenuredAllocationSites() {
1386 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
1389 // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1390 // stored in a contiguous linear buffer. Stats groups are stored one after
1393 FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
1394 FIRST_FIXED_ARRAY_SUB_TYPE =
1395 FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
1396 FIRST_CODE_AGE_SUB_TYPE =
1397 FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1,
1398 OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
1401 void RecordObjectStats(InstanceType type, size_t size) {
1402 ASSERT(type <= LAST_TYPE);
1403 object_counts_[type]++;
1404 object_sizes_[type] += size;
1407 void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
1408 int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
1409 int code_age_index =
1410 FIRST_CODE_AGE_SUB_TYPE + code_age - Code::kFirstCodeAge;
1411 ASSERT(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
1412 code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
1413 ASSERT(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
1414 code_age_index < OBJECT_STATS_COUNT);
1415 object_counts_[code_sub_type_index]++;
1416 object_sizes_[code_sub_type_index] += size;
1417 object_counts_[code_age_index]++;
1418 object_sizes_[code_age_index] += size;
1421 void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
1422 ASSERT(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1423 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
1424 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
1427 void CheckpointObjectStats();
1429 // We don't use a LockGuard here since we want to lock the heap
1430 // only when FLAG_concurrent_recompilation is true.
1431 class RelocationLock {
1433 explicit RelocationLock(Heap* heap) : heap_(heap) {
1434 heap_->relocation_mutex_.Lock();
1439 heap_->relocation_mutex_.Unlock();
1446 void AddWeakObjectToCodeDependency(Handle<Object> obj,
1447 Handle<DependentCode> dep);
1449 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj);
1451 void InitializeWeakObjectToCodeTable() {
1452 set_weak_object_to_code_table(undefined_value());
1455 void EnsureWeakObjectToCodeTable();
1457 static void FatalProcessOutOfMemory(const char* location,
1458 bool take_snapshot = false);
1461 // Methods made available to tests.
1463 // Allocates a JS Map in the heap.
1464 MUST_USE_RESULT AllocationResult AllocateMap(
1465 InstanceType instance_type,
1467 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1469 // Allocates and initializes a new JavaScript object based on a
1471 // If allocation_site is non-null, then a memento is emitted after the object
1472 // that points to the site.
1473 MUST_USE_RESULT AllocationResult AllocateJSObject(
1474 JSFunction* constructor,
1475 PretenureFlag pretenure = NOT_TENURED,
1476 AllocationSite* allocation_site = NULL);
1478 // Allocates and initializes a new JavaScript object based on a map.
1479 // Passing an allocation site means that a memento will be created that
1480 // points to the site.
1481 MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap(
1483 PretenureFlag pretenure = NOT_TENURED,
1484 bool alloc_props = true,
1485 AllocationSite* allocation_site = NULL);
1487 // Allocated a HeapNumber from value.
1488 MUST_USE_RESULT AllocationResult AllocateHeapNumber(
1489 double value, PretenureFlag pretenure = NOT_TENURED);
1491 // Allocated a Float32x4 from value.
1492 MUST_USE_RESULT AllocationResult AllocateFloat32x4(
1493 float32x4_value_t value,
1494 PretenureFlag pretenure = NOT_TENURED);
1496 // Allocated a Float64x2 from value.
1497 MUST_USE_RESULT AllocationResult AllocateFloat64x2(
1498 float64x2_value_t value,
1499 PretenureFlag pretenure = NOT_TENURED);
1501 // Allocated a Int32x4 from value.
1502 MUST_USE_RESULT AllocationResult AllocateInt32x4(
1503 int32x4_value_t value,
1504 PretenureFlag pretenure = NOT_TENURED);
1506 // Allocate a byte array of the specified length
1507 MUST_USE_RESULT AllocationResult AllocateByteArray(
1509 PretenureFlag pretenure = NOT_TENURED);
1511 // Allocates an arguments object - optionally with an elements array.
1512 MUST_USE_RESULT AllocationResult AllocateArgumentsObject(
1513 Object* callee, int length);
1515 // Copy the code and scope info part of the code object, but insert
1516 // the provided data as the relocation information.
1517 MUST_USE_RESULT AllocationResult CopyCode(Code* code,
1518 Vector<byte> reloc_info);
1520 MUST_USE_RESULT AllocationResult CopyCode(Code* code);
1522 // Allocates a fixed array initialized with undefined values
1523 MUST_USE_RESULT AllocationResult AllocateFixedArray(
1525 PretenureFlag pretenure = NOT_TENURED);
1530 // The amount of external memory registered through the API kept alive
1531 // by global handles
1532 int64_t amount_of_external_allocated_memory_;
1534 // Caches the amount of external memory registered at the last global gc.
1535 int64_t amount_of_external_allocated_memory_at_last_global_gc_;
1537 // This can be calculated directly from a pointer to the heap; however, it is
1538 // more expedient to get at the isolate directly from within Heap methods.
1541 Object* roots_[kRootListLength];
1543 size_t code_range_size_;
1544 int reserved_semispace_size_;
1545 int max_semi_space_size_;
1546 int initial_semispace_size_;
1547 intptr_t max_old_generation_size_;
1548 intptr_t max_executable_size_;
1549 intptr_t maximum_committed_;
1551 // For keeping track of how much data has survived
1552 // scavenge since last new space expansion.
1553 int survived_since_last_expansion_;
1555 // For keeping track on when to flush RegExp code.
1556 int sweep_generation_;
1558 int always_allocate_scope_depth_;
1559 int linear_allocation_scope_depth_;
1561 // For keeping track of context disposals.
1562 int contexts_disposed_;
1566 bool flush_monomorphic_ics_;
1568 int scan_on_scavenge_pages_;
1570 NewSpace new_space_;
1571 OldSpace* old_pointer_space_;
1572 OldSpace* old_data_space_;
1573 OldSpace* code_space_;
1574 MapSpace* map_space_;
1575 CellSpace* cell_space_;
1576 PropertyCellSpace* property_cell_space_;
1577 LargeObjectSpace* lo_space_;
1578 HeapState gc_state_;
1579 int gc_post_processing_depth_;
1580 Address new_space_top_after_last_gc_;
1582 // Returns the amount of external memory registered since last global gc.
1583 int64_t PromotedExternalMemorySize();
1585 unsigned int ms_count_; // how many mark-sweep collections happened
1586 unsigned int gc_count_; // how many gc happened
1588 // For post mortem debugging.
1589 static const int kRememberedUnmappedPages = 128;
1590 int remembered_unmapped_pages_index_;
1591 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1593 // Total length of the strings we failed to flatten since the last GC.
1594 int unflattened_strings_length_;
1596 #define ROOT_ACCESSOR(type, name, camel_name) \
1597 inline void set_##name(type* value) { \
1598 /* The deserializer makes use of the fact that these common roots are */ \
1599 /* never in new space and never on a page that is being compacted. */ \
1600 ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1601 roots_[k##camel_name##RootIndex] = value; \
1603 ROOT_LIST(ROOT_ACCESSOR)
1604 #undef ROOT_ACCESSOR
1607 // If the --gc-interval flag is set to a positive value, this
1608 // variable holds the value indicating the number of allocations
1609 // remain until the next failure and garbage collection.
1610 int allocation_timeout_;
1613 // Limit that triggers a global GC on the next (normally caused) GC. This
1614 // is checked when we have already decided to do a GC to help determine
1615 // which collector to invoke, before expanding a paged space in the old
1616 // generation and on every allocation in large object space.
1617 intptr_t old_generation_allocation_limit_;
1619 // Indicates that an allocation has failed in the old generation since the
1621 bool old_gen_exhausted_;
1623 // Indicates that inline bump-pointer allocation has been globally disabled
1624 // for all spaces. This is used to disable allocations in generated code.
1625 bool inline_allocation_disabled_;
1627 // Weak list heads, threaded through the objects.
1628 // List heads are initilized lazily and contain the undefined_value at start.
1629 Object* native_contexts_list_;
1630 Object* array_buffers_list_;
1631 Object* allocation_sites_list_;
1633 // WeakHashTable that maps objects embedded in optimized code to dependent
1634 // code list. It is initilized lazily and contains the undefined_value at
1636 Object* weak_object_to_code_table_;
1638 // List of encountered weak collections (JSWeakMap and JSWeakSet) during
1639 // marking. It is initialized during marking, destroyed after marking and
1640 // contains Smi(0) while marking is not active.
1641 Object* encountered_weak_collections_;
1643 StoreBufferRebuilder store_buffer_rebuilder_;
1645 struct StringTypeTable {
1648 RootListIndex index;
1651 struct ConstantStringTable {
1652 const char* contents;
1653 RootListIndex index;
1656 struct StructTable {
1659 RootListIndex index;
1662 static const StringTypeTable string_type_table[];
1663 static const ConstantStringTable constant_string_table[];
1664 static const StructTable struct_table[];
1666 // The special hidden string which is an empty string, but does not match
1667 // any string when looked up in properties.
1668 String* hidden_string_;
1670 // GC callback function, called before and after mark-compact GC.
1671 // Allocations in the callback function are disallowed.
1672 struct GCPrologueCallbackPair {
1673 GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1676 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
1678 bool operator==(const GCPrologueCallbackPair& pair) const {
1679 return pair.callback == callback;
1681 v8::Isolate::GCPrologueCallback callback;
1683 // TODO(dcarney): remove variable
1686 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1688 struct GCEpilogueCallbackPair {
1689 GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1692 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
1694 bool operator==(const GCEpilogueCallbackPair& pair) const {
1695 return pair.callback == callback;
1697 v8::Isolate::GCPrologueCallback callback;
1699 // TODO(dcarney): remove variable
1702 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1704 // Support for computing object sizes during GC.
1705 HeapObjectCallback gc_safe_size_of_old_object_;
1706 static int GcSafeSizeOfOldObject(HeapObject* object);
1708 // Update the GC state. Called from the mark-compact collector.
1709 void MarkMapPointersAsEncoded(bool encoded) {
1711 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1714 // Code that should be run before and after each GC. Includes some
1715 // reporting/verification activities when compiled with DEBUG set.
1716 void GarbageCollectionPrologue();
1717 void GarbageCollectionEpilogue();
1719 // Pretenuring decisions are made based on feedback collected during new
1720 // space evacuation. Note that between feedback collection and calling this
1721 // method object in old space must not move.
1722 // Right now we only process pretenuring feedback in high promotion mode.
1723 void ProcessPretenuringFeedback();
1725 // Checks whether a global GC is necessary
1726 GarbageCollector SelectGarbageCollector(AllocationSpace space,
1727 const char** reason);
1729 // Make sure there is a filler value behind the top of the new space
1730 // so that the GC does not confuse some unintialized/stale memory
1731 // with the allocation memento of the object at the top
1732 void EnsureFillerObjectAtTop();
1734 // Ensure that we have swept all spaces in such a way that we can iterate
1735 // over all objects. May cause a GC.
1736 void MakeHeapIterable();
1738 // Performs garbage collection operation.
1739 // Returns whether there is a chance that another major GC could
1740 // collect more garbage.
1741 bool CollectGarbage(
1742 GarbageCollector collector,
1743 const char* gc_reason,
1744 const char* collector_reason,
1745 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1747 // Performs garbage collection
1748 // Returns whether there is a chance another major GC could
1749 // collect more garbage.
1750 bool PerformGarbageCollection(
1751 GarbageCollector collector,
1753 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1755 inline void UpdateOldSpaceLimits();
1757 // Selects the proper allocation space depending on the given object
1758 // size, pretenuring decision, and preferred old-space.
1759 static AllocationSpace SelectSpace(int object_size,
1760 AllocationSpace preferred_old_space,
1761 PretenureFlag pretenure) {
1762 ASSERT(preferred_old_space == OLD_POINTER_SPACE ||
1763 preferred_old_space == OLD_DATA_SPACE);
1764 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
1765 return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
1768 // Allocate an uninitialized object. The memory is non-executable if the
1769 // hardware and OS allow. This is the single choke-point for allocations
1770 // performed by the runtime and should not be bypassed (to extend this to
1771 // inlined allocations, use the Heap::DisableInlineAllocation() support).
1772 MUST_USE_RESULT inline AllocationResult AllocateRaw(
1774 AllocationSpace space,
1775 AllocationSpace retry_space);
1777 // Allocates a heap object based on the map.
1778 MUST_USE_RESULT AllocationResult Allocate(
1780 AllocationSpace space,
1781 AllocationSite* allocation_site = NULL);
1783 // Allocates a partial map for bootstrapping.
1784 MUST_USE_RESULT AllocationResult AllocatePartialMap(
1785 InstanceType instance_type,
1788 // Initializes a JSObject based on its map.
1789 void InitializeJSObjectFromMap(JSObject* obj,
1790 FixedArray* properties,
1792 void InitializeAllocationMemento(AllocationMemento* memento,
1793 AllocationSite* allocation_site);
1795 // Allocate a block of memory in the given space (filled with a filler).
1796 // Used as a fall-back for generated code when the space is full.
1797 MUST_USE_RESULT AllocationResult AllocateFillerObject(int size,
1799 AllocationSpace space);
1801 // Allocate an uninitialized fixed array.
1802 MUST_USE_RESULT AllocationResult AllocateRawFixedArray(
1803 int length, PretenureFlag pretenure);
1805 // Allocate an uninitialized fixed double array.
1806 MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray(
1807 int length, PretenureFlag pretenure);
1809 // Allocate an initialized fixed array with the given filler value.
1810 MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller(
1811 int length, PretenureFlag pretenure, Object* filler);
1813 // Allocate and partially initializes a String. There are two String
1814 // encodings: ASCII and two byte. These functions allocate a string of the
1815 // given length and set its map and length fields. The characters of the
1816 // string are uninitialized.
1817 MUST_USE_RESULT AllocationResult AllocateRawOneByteString(
1818 int length, PretenureFlag pretenure);
1819 MUST_USE_RESULT AllocationResult AllocateRawTwoByteString(
1820 int length, PretenureFlag pretenure);
1822 bool CreateInitialMaps();
1823 void CreateInitialObjects();
1825 // Allocates an internalized string in old space based on the character
1827 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
1828 Vector<const char> str,
1830 uint32_t hash_field);
1832 MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
1833 Vector<const uint8_t> str,
1834 uint32_t hash_field);
1836 MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
1837 Vector<const uc16> str,
1838 uint32_t hash_field);
1840 template<bool is_one_byte, typename T>
1841 MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl(
1842 T t, int chars, uint32_t hash_field);
1844 template<typename T>
1845 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
1846 T t, int chars, uint32_t hash_field);
1848 // Allocates an uninitialized fixed array. It must be filled by the caller.
1849 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
1851 // Make a copy of src and return it. Returns
1852 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1853 MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
1855 // Make a copy of src, set the map, and return the copy. Returns
1856 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1857 MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray* src,
1860 // Make a copy of src and return it. Returns
1861 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1862 MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
1863 FixedDoubleArray* src);
1865 // Make a copy of src and return it. Returns
1866 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1867 MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray(
1868 ConstantPoolArray* src);
1871 // Computes a single character string where the character has code.
1872 // A cache is used for ASCII codes.
1873 MUST_USE_RESULT AllocationResult LookupSingleCharacterStringFromCode(
1876 // Allocate a symbol in old space.
1877 MUST_USE_RESULT AllocationResult AllocateSymbol();
1879 // Make a copy of src, set the map, and return the copy.
1880 MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap(
1881 ConstantPoolArray* src, Map* map);
1883 MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(
1884 const ConstantPoolArray::NumberOfEntries& small);
1886 MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(
1887 const ConstantPoolArray::NumberOfEntries& small,
1888 const ConstantPoolArray::NumberOfEntries& extended);
1890 // Allocates an external array of the specified length and type.
1891 MUST_USE_RESULT AllocationResult AllocateExternalArray(
1893 ExternalArrayType array_type,
1894 void* external_pointer,
1895 PretenureFlag pretenure);
1897 // Allocates a fixed typed array of the specified length and type.
1898 MUST_USE_RESULT AllocationResult AllocateFixedTypedArray(
1900 ExternalArrayType array_type,
1901 PretenureFlag pretenure);
1903 // Make a copy of src and return it.
1904 MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
1906 // Make a copy of src, set the map, and return the copy.
1907 MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap(
1908 FixedDoubleArray* src, Map* map);
1910 // Allocates a fixed double array with uninitialized values. Returns
1911 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
1913 PretenureFlag pretenure = NOT_TENURED);
1915 // These five Create*EntryStub functions are here and forced to not be inlined
1916 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1917 NO_INLINE(void CreateJSEntryStub());
1918 NO_INLINE(void CreateJSConstructEntryStub());
1920 void CreateFixedStubs();
1922 // Allocate empty fixed array.
1923 MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
1925 // Allocate empty external array of given type.
1926 MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray(
1927 ExternalArrayType array_type);
1929 // Allocate empty fixed typed array of given type.
1930 MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray(
1931 ExternalArrayType array_type);
1933 // Allocate empty constant pool array.
1934 MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray();
1936 // Allocate a tenured simple cell.
1937 MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
1939 // Allocate a tenured JS global property cell initialized with the hole.
1940 MUST_USE_RESULT AllocationResult AllocatePropertyCell();
1942 // Allocates a new utility object in the old generation.
1943 MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
1945 // Allocates a new foreign object.
1946 MUST_USE_RESULT AllocationResult AllocateForeign(
1947 Address address, PretenureFlag pretenure = NOT_TENURED);
1949 MUST_USE_RESULT AllocationResult AllocateCode(int object_size,
1952 MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
1954 MUST_USE_RESULT AllocationResult InternalizeString(String* str);
1956 // Performs a minor collection in new generation.
1959 // Commits from space if it is uncommitted.
1960 void EnsureFromSpaceIsCommitted();
1962 // Uncommit unused semi space.
1963 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1965 // Fill in bogus values in from space
1966 void ZapFromSpace();
1968 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1972 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1973 static void ScavengeStoreBufferCallback(Heap* heap,
1975 StoreBufferEvent event);
1977 // Performs a major collection in the whole heap.
1978 void MarkCompact(GCTracer* tracer);
1980 // Code to be run before and after mark-compact.
1981 void MarkCompactPrologue();
1983 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1984 void ProcessArrayBuffers(WeakObjectRetainer* retainer);
1985 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1987 // Deopts all code that contains allocation instruction which are tenured or
1988 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1989 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1991 // Evaluates local pretenuring for the old space and calls
1992 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1994 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1996 // Called on heap tear-down.
1997 void TearDownArrayBuffers();
1999 // Record statistics before and after garbage collection.
2000 void ReportStatisticsBeforeGC();
2001 void ReportStatisticsAfterGC();
2003 // Slow part of scavenge object.
2004 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
2006 // Total RegExp code ever generated
2007 double total_regexp_code_generated_;
2011 // Creates and installs the full-sized number string cache.
2012 int FullSizeNumberStringCacheLength();
2013 // Flush the number to string cache.
2014 void FlushNumberStringCache();
2016 // Sets used allocation sites entries to undefined.
2017 void FlushAllocationSitesScratchpad();
2019 // Initializes the allocation sites scratchpad with undefined values.
2020 void InitializeAllocationSitesScratchpad();
2022 // Adds an allocation site to the scratchpad if there is space left.
2023 void AddAllocationSiteToScratchpad(AllocationSite* site,
2024 ScratchpadSlotMode mode);
2026 void UpdateSurvivalStatistics(int start_new_space_size);
2028 static const int kYoungSurvivalRateHighThreshold = 90;
2029 static const int kYoungSurvivalRateAllowedDeviation = 15;
2031 static const int kOldSurvivalRateLowThreshold = 10;
2033 int high_survival_rate_period_length_;
2034 intptr_t promoted_objects_size_;
2035 double promotion_rate_;
2036 intptr_t semi_space_copied_object_size_;
2037 double semi_space_copied_rate_;
2039 // This is the pretenuring trigger for allocation sites that are in maybe
2040 // tenure state. When we switched to the maximum new space size we deoptimize
2041 // the code that belongs to the allocation site and derive the lifetime
2042 // of the allocation site.
2043 unsigned int maximum_size_scavenges_;
2045 // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
2046 // Re-visit incremental marking heuristics.
2047 bool IsHighSurvivalRate() {
2048 return high_survival_rate_period_length_ > 0;
2051 void SelectScavengingVisitorsTable();
2053 void StartIdleRound() {
2054 mark_sweeps_since_idle_round_started_ = 0;
2057 void FinishIdleRound() {
2058 mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2059 scavenges_since_last_idle_round_ = 0;
2062 bool EnoughGarbageSinceLastIdleRound() {
2063 return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2066 // Estimates how many milliseconds a Mark-Sweep would take to complete.
2067 // In idle notification handler we assume that this function will return:
2068 // - a number less than 10 for small heaps, which are less than 8Mb.
2069 // - a number greater than 10 for large heaps, which are greater than 32Mb.
2070 int TimeMarkSweepWouldTakeInMs() {
2071 // Rough estimate of how many megabytes of heap can be processed in 1 ms.
2072 static const int kMbPerMs = 2;
2074 int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
2075 return heap_size_mb / kMbPerMs;
2078 // Returns true if no more GC work is left.
2079 bool IdleGlobalGC();
2081 void AdvanceIdleIncrementalMarking(intptr_t step_size);
2083 void ClearObjectStats(bool clear_last_time_stats = false);
2085 void set_weak_object_to_code_table(Object* value) {
2086 ASSERT(!InNewSpace(value));
2087 weak_object_to_code_table_ = value;
2090 Object** weak_object_to_code_table_address() {
2091 return &weak_object_to_code_table_;
2094 static const int kInitialStringTableSize = 2048;
2095 static const int kInitialEvalCacheSize = 64;
2096 static const int kInitialNumberStringCacheSize = 256;
2098 // Object counts and used memory by InstanceType
2099 size_t object_counts_[OBJECT_STATS_COUNT];
2100 size_t object_counts_last_time_[OBJECT_STATS_COUNT];
2101 size_t object_sizes_[OBJECT_STATS_COUNT];
2102 size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
2104 // Maximum GC pause.
2105 double max_gc_pause_;
2107 // Total time spent in GC.
2108 double total_gc_time_ms_;
2110 // Maximum size of objects alive after GC.
2111 intptr_t max_alive_after_gc_;
2113 // Minimal interval between two subsequent collections.
2114 double min_in_mutator_;
2116 // Size of objects alive after last GC.
2117 intptr_t alive_after_last_gc_;
2119 double last_gc_end_timestamp_;
2121 // Cumulative GC time spent in marking
2122 double marking_time_;
2124 // Cumulative GC time spent in sweeping
2125 double sweeping_time_;
2127 MarkCompactCollector mark_compact_collector_;
2129 StoreBuffer store_buffer_;
2133 IncrementalMarking incremental_marking_;
2135 int number_idle_notifications_;
2136 unsigned int last_idle_notification_gc_count_;
2137 bool last_idle_notification_gc_count_init_;
2139 int mark_sweeps_since_idle_round_started_;
2140 unsigned int gc_count_at_last_idle_gc_;
2141 int scavenges_since_last_idle_round_;
2143 // These two counters are monotomically increasing and never reset.
2144 size_t full_codegen_bytes_generated_;
2145 size_t crankshaft_codegen_bytes_generated_;
2147 // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2148 // this variable holds the number of garbage collections since the last
2149 // deoptimization triggered by garbage collection.
2150 int gcs_since_last_deopt_;
2153 int no_weak_object_verification_scope_depth_;
2156 static const int kAllocationSiteScratchpadSize = 256;
2157 int allocation_sites_scratchpad_length_;
2159 static const int kMaxMarkSweepsInIdleRound = 7;
2160 static const int kIdleScavengeThreshold = 5;
2162 // Shared state read by the scavenge collector and set by ScavengeObject.
2163 PromotionQueue promotion_queue_;
2165 // Flag is set when the heap has been configured. The heap can be repeatedly
2166 // configured through the API until it is set up.
2169 ExternalStringTable external_string_table_;
2171 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2173 MemoryChunk* chunks_queued_for_free_;
2175 Mutex relocation_mutex_;
2177 int gc_callbacks_depth_;
2179 friend class AlwaysAllocateScope;
2180 friend class Factory;
2181 friend class GCCallbacksScope;
2182 friend class GCTracer;
2183 friend class HeapIterator;
2184 friend class Isolate;
2185 friend class MarkCompactCollector;
2186 friend class MarkCompactMarkingVisitor;
2187 friend class MapCompact;
2189 friend class NoWeakObjectVerificationScope;
2193 DISALLOW_COPY_AND_ASSIGN(Heap);
2199 static const int kStartMarker = 0xDECADE00;
2200 static const int kEndMarker = 0xDECADE01;
2202 int* start_marker; // 0
2203 int* new_space_size; // 1
2204 int* new_space_capacity; // 2
2205 intptr_t* old_pointer_space_size; // 3
2206 intptr_t* old_pointer_space_capacity; // 4
2207 intptr_t* old_data_space_size; // 5
2208 intptr_t* old_data_space_capacity; // 6
2209 intptr_t* code_space_size; // 7
2210 intptr_t* code_space_capacity; // 8
2211 intptr_t* map_space_size; // 9
2212 intptr_t* map_space_capacity; // 10
2213 intptr_t* cell_space_size; // 11
2214 intptr_t* cell_space_capacity; // 12
2215 intptr_t* lo_space_size; // 13
2216 int* global_handle_count; // 14
2217 int* weak_global_handle_count; // 15
2218 int* pending_global_handle_count; // 16
2219 int* near_death_global_handle_count; // 17
2220 int* free_global_handle_count; // 18
2221 intptr_t* memory_allocator_size; // 19
2222 intptr_t* memory_allocator_capacity; // 20
2223 int* objects_per_type; // 21
2224 int* size_per_type; // 22
2225 int* os_error; // 23
2226 int* end_marker; // 24
2227 intptr_t* property_cell_space_size; // 25
2228 intptr_t* property_cell_space_capacity; // 26
2232 class AlwaysAllocateScope {
2234 explicit inline AlwaysAllocateScope(Isolate* isolate);
2235 inline ~AlwaysAllocateScope();
2238 // Implicitly disable artificial allocation failures.
2240 DisallowAllocationFailure daf_;
2245 class NoWeakObjectVerificationScope {
2247 inline NoWeakObjectVerificationScope();
2248 inline ~NoWeakObjectVerificationScope();
2253 class GCCallbacksScope {
2255 explicit inline GCCallbacksScope(Heap* heap);
2256 inline ~GCCallbacksScope();
2258 inline bool CheckReenter();
2265 // Visitor class to verify interior pointers in spaces that do not contain
2266 // or care about intergenerational references. All heap object pointers have to
2267 // point into the heap to a location that has a map pointer at its first word.
2268 // Caveat: Heap::Contains is an approximation because it can return true for
2269 // objects in a heap space but above the allocation pointer.
2270 class VerifyPointersVisitor: public ObjectVisitor {
2272 inline void VisitPointers(Object** start, Object** end);
2276 // Verify that all objects are Smis.
2277 class VerifySmisVisitor: public ObjectVisitor {
2279 inline void VisitPointers(Object** start, Object** end);
2283 // Space iterator for iterating over all spaces of the heap. Returns each space
2284 // in turn, and null when it is done.
2285 class AllSpaces BASE_EMBEDDED {
2287 explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2295 // Space iterator for iterating over all old spaces of the heap: Old pointer
2296 // space, old data space and code space. Returns each space in turn, and null
2298 class OldSpaces BASE_EMBEDDED {
2300 explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2308 // Space iterator for iterating over all the paged spaces of the heap: Map
2309 // space, old pointer space, old data space, code space and cell space. Returns
2310 // each space in turn, and null when it is done.
2311 class PagedSpaces BASE_EMBEDDED {
2313 explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2321 // Space iterator for iterating over all spaces of the heap.
2322 // For each space an object iterator is provided. The deallocation of the
2323 // returned object iterators is handled by the space iterator.
2324 class SpaceIterator : public Malloced {
2326 explicit SpaceIterator(Heap* heap);
2327 SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2328 virtual ~SpaceIterator();
2331 ObjectIterator* next();
2334 ObjectIterator* CreateIterator();
2337 int current_space_; // from enum AllocationSpace.
2338 ObjectIterator* iterator_; // object iterator for the current space.
2339 HeapObjectCallback size_func_;
2343 // A HeapIterator provides iteration over the whole heap. It
2344 // aggregates the specific iterators for the different spaces as
2345 // these can only iterate over one space only.
2347 // HeapIterator ensures there is no allocation during its lifetime
2348 // (using an embedded DisallowHeapAllocation instance).
2350 // HeapIterator can skip free list nodes (that is, de-allocated heap
2351 // objects that still remain in the heap). As implementation of free
2352 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2353 // phases. Also, it is forbidden to interrupt iteration in this mode,
2354 // as this will leave heap objects marked (and thus, unusable).
2355 class HeapObjectsFilter;
2357 class HeapIterator BASE_EMBEDDED {
2359 enum HeapObjectsFiltering {
2364 explicit HeapIterator(Heap* heap);
2365 HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
2372 struct MakeHeapIterableHelper {
2373 explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2376 // Perform the initialization.
2378 // Perform all necessary shutdown (destruction) work.
2380 HeapObject* NextObject();
2382 MakeHeapIterableHelper make_heap_iterable_helper_;
2383 DisallowHeapAllocation no_heap_allocation_;
2385 HeapObjectsFiltering filtering_;
2386 HeapObjectsFilter* filter_;
2387 // Space iterator for iterating all the spaces.
2388 SpaceIterator* space_iterator_;
2389 // Object iterator for the space currently being iterated.
2390 ObjectIterator* object_iterator_;
2394 // Cache for mapping (map, property name) into field offset.
2395 // Cleared at startup and prior to mark sweep collection.
2396 class KeyedLookupCache {
2398 // Lookup field offset for (map, name). If absent, -1 is returned.
2399 int Lookup(Handle<Map> map, Handle<Name> name);
2401 // Update an element in the cache.
2402 void Update(Handle<Map> map, Handle<Name> name, int field_offset);
2407 static const int kLength = 256;
2408 static const int kCapacityMask = kLength - 1;
2409 static const int kMapHashShift = 5;
2410 static const int kHashMask = -4; // Zero the last two bits.
2411 static const int kEntriesPerBucket = 4;
2412 static const int kEntryLength = 2;
2413 static const int kMapIndex = 0;
2414 static const int kKeyIndex = 1;
2415 static const int kNotFound = -1;
2417 // kEntriesPerBucket should be a power of 2.
2418 STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2419 STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2422 KeyedLookupCache() {
2423 for (int i = 0; i < kLength; ++i) {
2424 keys_[i].map = NULL;
2425 keys_[i].name = NULL;
2426 field_offsets_[i] = kNotFound;
2430 static inline int Hash(Handle<Map> map, Handle<Name> name);
2432 // Get the address of the keys and field_offsets arrays. Used in
2433 // generated code to perform cache lookups.
2434 Address keys_address() {
2435 return reinterpret_cast<Address>(&keys_);
2438 Address field_offsets_address() {
2439 return reinterpret_cast<Address>(&field_offsets_);
2448 int field_offsets_[kLength];
2450 friend class ExternalReference;
2451 friend class Isolate;
2452 DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2456 // Cache for mapping (map, property name) into descriptor index.
2457 // The cache contains both positive and negative results.
2458 // Descriptor index equals kNotFound means the property is absent.
2459 // Cleared at startup and prior to any gc.
2460 class DescriptorLookupCache {
2462 // Lookup descriptor index for (map, name).
2463 // If absent, kAbsent is returned.
2464 int Lookup(Map* source, Name* name) {
2465 if (!name->IsUniqueName()) return kAbsent;
2466 int index = Hash(source, name);
2467 Key& key = keys_[index];
2468 if ((key.source == source) && (key.name == name)) return results_[index];
2472 // Update an element in the cache.
2473 void Update(Map* source, Name* name, int result) {
2474 ASSERT(result != kAbsent);
2475 if (name->IsUniqueName()) {
2476 int index = Hash(source, name);
2477 Key& key = keys_[index];
2478 key.source = source;
2480 results_[index] = result;
2487 static const int kAbsent = -2;
2490 DescriptorLookupCache() {
2491 for (int i = 0; i < kLength; ++i) {
2492 keys_[i].source = NULL;
2493 keys_[i].name = NULL;
2494 results_[i] = kAbsent;
2498 static int Hash(Object* source, Name* name) {
2499 // Uses only lower 32 bits if pointers are larger.
2500 uint32_t source_hash =
2501 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source))
2502 >> kPointerSizeLog2;
2503 uint32_t name_hash =
2504 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name))
2505 >> kPointerSizeLog2;
2506 return (source_hash ^ name_hash) % kLength;
2509 static const int kLength = 64;
2516 int results_[kLength];
2518 friend class Isolate;
2519 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2523 // GCTracer collects and prints ONE line after each garbage collector
2524 // invocation IFF --trace_gc is used.
2526 class GCTracer BASE_EMBEDDED {
2528 class Scope BASE_EMBEDDED {
2537 MC_UPDATE_NEW_TO_NEW_POINTERS,
2538 MC_UPDATE_ROOT_TO_NEW_POINTERS,
2539 MC_UPDATE_OLD_TO_NEW_POINTERS,
2540 MC_UPDATE_POINTERS_TO_EVACUATED,
2541 MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
2542 MC_UPDATE_MISC_POINTERS,
2543 MC_WEAKCOLLECTION_PROCESS,
2544 MC_WEAKCOLLECTION_CLEAR,
2549 Scope(GCTracer* tracer, ScopeId scope)
2552 start_time_ = OS::TimeCurrentMillis();
2556 ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned.
2557 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2566 explicit GCTracer(Heap* heap,
2567 const char* gc_reason,
2568 const char* collector_reason);
2571 // Sets the collector.
2572 void set_collector(GarbageCollector collector) { collector_ = collector; }
2574 // Sets the GC count.
2575 void set_gc_count(unsigned int count) { gc_count_ = count; }
2577 // Sets the full GC count.
2578 void set_full_gc_count(int count) { full_gc_count_ = count; }
2580 void increment_nodes_died_in_new_space() {
2581 nodes_died_in_new_space_++;
2584 void increment_nodes_copied_in_new_space() {
2585 nodes_copied_in_new_space_++;
2588 void increment_nodes_promoted() {
2593 // Returns a string matching the collector.
2594 const char* CollectorString();
2596 // Returns size of object in heap (in MB).
2597 inline double SizeOfHeapObjects();
2599 // Timestamp set in the constructor.
2602 // Size of objects in heap set in constructor.
2603 intptr_t start_object_size_;
2605 // Size of memory allocated from OS set in constructor.
2606 intptr_t start_memory_size_;
2608 // Type of collector.
2609 GarbageCollector collector_;
2611 // A count (including this one, e.g. the first collection is 1) of the
2612 // number of garbage collections.
2613 unsigned int gc_count_;
2615 // A count (including this one) of the number of full garbage collections.
2618 // Amounts of time spent in different scopes during GC.
2619 double scopes_[Scope::kNumberOfScopes];
2621 // Total amount of space either wasted or contained in one of free lists
2622 // before the current GC.
2623 intptr_t in_free_list_or_wasted_before_gc_;
2625 // Difference between space used in the heap at the beginning of the current
2626 // collection and the end of the previous collection.
2627 intptr_t allocated_since_last_gc_;
2629 // Amount of time spent in mutator that is time elapsed between end of the
2630 // previous collection and the beginning of the current one.
2631 double spent_in_mutator_;
2633 // Number of died nodes in the new space.
2634 int nodes_died_in_new_space_;
2636 // Number of copied nodes to the new space.
2637 int nodes_copied_in_new_space_;
2639 // Number of promoted nodes to the old space.
2640 int nodes_promoted_;
2642 // Incremental marking steps counters.
2645 double longest_step_;
2646 int steps_count_since_last_gc_;
2647 double steps_took_since_last_gc_;
2651 const char* gc_reason_;
2652 const char* collector_reason_;
2656 class RegExpResultsCache {
2658 enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
2660 // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
2661 // On success, the returned result is guaranteed to be a COW-array.
2662 static Object* Lookup(Heap* heap,
2664 Object* key_pattern,
2665 ResultsCacheType type);
2666 // Attempt to add value_array to the cache specified by type. On success,
2667 // value_array is turned into a COW-array.
2668 static void Enter(Isolate* isolate,
2669 Handle<String> key_string,
2670 Handle<Object> key_pattern,
2671 Handle<FixedArray> value_array,
2672 ResultsCacheType type);
2673 static void Clear(FixedArray* cache);
2674 static const int kRegExpResultsCacheSize = 0x100;
2677 static const int kArrayEntriesPerCacheEntry = 4;
2678 static const int kStringOffset = 0;
2679 static const int kPatternOffset = 1;
2680 static const int kArrayOffset = 2;
2684 // Abstract base class for checking whether a weak object should be retained.
2685 class WeakObjectRetainer {
2687 virtual ~WeakObjectRetainer() {}
2689 // Return whether this object should be retained. If NULL is returned the
2690 // object has no references. Otherwise the address of the retained object
2691 // should be returned as in some GC situations the object has been moved.
2692 virtual Object* RetainAs(Object* object) = 0;
2696 // Intrusive object marking uses least significant bit of
2697 // heap object's map word to mark objects.
2698 // Normally all map words have least significant bit set
2699 // because they contain tagged map pointer.
2700 // If the bit is not set object is marked.
2701 // All objects should be unmarked before resuming
2702 // JavaScript execution.
2703 class IntrusiveMarking {
2705 static bool IsMarked(HeapObject* object) {
2706 return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2709 static void ClearMark(HeapObject* object) {
2710 uintptr_t map_word = object->map_word().ToRawValue();
2711 object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2712 ASSERT(!IsMarked(object));
2715 static void SetMark(HeapObject* object) {
2716 uintptr_t map_word = object->map_word().ToRawValue();
2717 object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2718 ASSERT(IsMarked(object));
2721 static Map* MapOfMarkedObject(HeapObject* object) {
2722 uintptr_t map_word = object->map_word().ToRawValue();
2723 return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2726 static int SizeOfMarkedObject(HeapObject* object) {
2727 return object->SizeFromMap(MapOfMarkedObject(object));
2731 static const uintptr_t kNotMarkedBit = 0x1;
2732 STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0); // NOLINT
2737 // Helper class for tracing paths to a search target Object from all roots.
2738 // The TracePathFrom() method can be used to trace paths from a specific
2739 // object to the search target object.
2740 class PathTracer : public ObjectVisitor {
2743 FIND_ALL, // Will find all matches.
2744 FIND_FIRST // Will stop the search after first match.
2747 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2748 static const int kMarkTag = 2;
2750 // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2751 // after the first match. If FIND_ALL is specified, then tracing will be
2752 // done for all matches.
2753 PathTracer(Object* search_target,
2754 WhatToFind what_to_find,
2755 VisitMode visit_mode)
2756 : search_target_(search_target),
2757 found_target_(false),
2758 found_target_in_trace_(false),
2759 what_to_find_(what_to_find),
2760 visit_mode_(visit_mode),
2764 virtual void VisitPointers(Object** start, Object** end);
2767 void TracePathFrom(Object** root);
2769 bool found() const { return found_target_; }
2771 static Object* const kAnyGlobalObject;
2775 class UnmarkVisitor;
2777 void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2778 void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2779 virtual void ProcessResults();
2781 Object* search_target_;
2783 bool found_target_in_trace_;
2784 WhatToFind what_to_find_;
2785 VisitMode visit_mode_;
2786 List<Object*> object_stack_;
2788 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2791 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2795 } } // namespace v8::internal
2797 #endif // V8_HEAP_H_