1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/counters.h"
13 #include "src/globals.h"
14 #include "src/heap/gc-idle-time-handler.h"
15 #include "src/heap/gc-tracer.h"
16 #include "src/heap/incremental-marking.h"
17 #include "src/heap/mark-compact.h"
18 #include "src/heap/objects-visiting.h"
19 #include "src/heap/spaces.h"
20 #include "src/heap/store-buffer.h"
22 #include "src/splay-tree-inl.h"
27 // Defines all the roots in Heap.
28 #define STRONG_ROOT_LIST(V) \
29 V(Map, byte_array_map, ByteArrayMap) \
30 V(Map, free_space_map, FreeSpaceMap) \
31 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
32 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
33 /* Cluster the most popular ones in a few cache lines here at the top. */ \
34 V(Smi, store_buffer_top, StoreBufferTop) \
35 V(Oddball, undefined_value, UndefinedValue) \
36 V(Oddball, the_hole_value, TheHoleValue) \
37 V(Oddball, null_value, NullValue) \
38 V(Oddball, true_value, TrueValue) \
39 V(Oddball, false_value, FalseValue) \
40 V(Oddball, uninitialized_value, UninitializedValue) \
41 V(Oddball, exception, Exception) \
42 V(Map, cell_map, CellMap) \
43 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
44 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
45 V(Map, meta_map, MetaMap) \
46 V(Map, heap_number_map, HeapNumberMap) \
47 V(Map, mutable_heap_number_map, MutableHeapNumberMap) \
48 V(Map, native_context_map, NativeContextMap) \
49 V(Map, fixed_array_map, FixedArrayMap) \
50 V(Map, code_map, CodeMap) \
51 V(Map, scope_info_map, ScopeInfoMap) \
52 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
53 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
54 V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
55 V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
56 V(Map, hash_table_map, HashTableMap) \
57 V(Map, ordered_hash_table_map, OrderedHashTableMap) \
58 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
59 V(ByteArray, empty_byte_array, EmptyByteArray) \
60 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
61 V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
62 V(Oddball, arguments_marker, ArgumentsMarker) \
63 /* The roots above this line should be boring from a GC point of view. */ \
64 /* This means they are never in new space and never on a page that is */ \
65 /* being compacted. */ \
66 V(FixedArray, number_string_cache, NumberStringCache) \
67 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
68 V(Object, instanceof_cache_map, InstanceofCacheMap) \
69 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
70 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
71 V(FixedArray, string_split_cache, StringSplitCache) \
72 V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
73 V(Oddball, termination_exception, TerminationException) \
74 V(Smi, hash_seed, HashSeed) \
75 V(Map, symbol_map, SymbolMap) \
76 V(Map, string_map, StringMap) \
77 V(Map, one_byte_string_map, OneByteStringMap) \
78 V(Map, cons_string_map, ConsStringMap) \
79 V(Map, cons_one_byte_string_map, ConsOneByteStringMap) \
80 V(Map, sliced_string_map, SlicedStringMap) \
81 V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap) \
82 V(Map, external_string_map, ExternalStringMap) \
83 V(Map, external_string_with_one_byte_data_map, \
84 ExternalStringWithOneByteDataMap) \
85 V(Map, external_one_byte_string_map, ExternalOneByteStringMap) \
86 V(Map, short_external_string_map, ShortExternalStringMap) \
87 V(Map, short_external_string_with_one_byte_data_map, \
88 ShortExternalStringWithOneByteDataMap) \
89 V(Map, internalized_string_map, InternalizedStringMap) \
90 V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap) \
91 V(Map, external_internalized_string_map, ExternalInternalizedStringMap) \
92 V(Map, external_internalized_string_with_one_byte_data_map, \
93 ExternalInternalizedStringWithOneByteDataMap) \
94 V(Map, external_one_byte_internalized_string_map, \
95 ExternalOneByteInternalizedStringMap) \
96 V(Map, short_external_internalized_string_map, \
97 ShortExternalInternalizedStringMap) \
98 V(Map, short_external_internalized_string_with_one_byte_data_map, \
99 ShortExternalInternalizedStringWithOneByteDataMap) \
100 V(Map, short_external_one_byte_internalized_string_map, \
101 ShortExternalOneByteInternalizedStringMap) \
102 V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap) \
103 V(Map, undetectable_string_map, UndetectableStringMap) \
104 V(Map, undetectable_one_byte_string_map, UndetectableOneByteStringMap) \
105 V(Map, external_int8_array_map, ExternalInt8ArrayMap) \
106 V(Map, external_uint8_array_map, ExternalUint8ArrayMap) \
107 V(Map, external_int16_array_map, ExternalInt16ArrayMap) \
108 V(Map, external_uint16_array_map, ExternalUint16ArrayMap) \
109 V(Map, external_int32_array_map, ExternalInt32ArrayMap) \
110 V(Map, external_int32x4_array_map, ExternalInt32x4ArrayMap) \
111 V(Map, external_uint32_array_map, ExternalUint32ArrayMap) \
112 V(Map, external_float32_array_map, ExternalFloat32ArrayMap) \
113 V(Map, external_float32x4_array_map, ExternalFloat32x4ArrayMap) \
114 V(Map, external_float64x2_array_map, ExternalFloat64x2ArrayMap) \
115 V(Map, external_float64_array_map, ExternalFloat64ArrayMap) \
116 V(Map, external_uint8_clamped_array_map, ExternalUint8ClampedArrayMap) \
117 V(ExternalArray, empty_external_int8_array, EmptyExternalInt8Array) \
118 V(ExternalArray, empty_external_uint8_array, EmptyExternalUint8Array) \
119 V(ExternalArray, empty_external_int16_array, EmptyExternalInt16Array) \
120 V(ExternalArray, empty_external_uint16_array, EmptyExternalUint16Array) \
121 V(ExternalArray, empty_external_int32_array, EmptyExternalInt32Array) \
122 V(ExternalArray, empty_external_int32x4_array, EmptyExternalInt32x4Array) \
123 V(ExternalArray, empty_external_uint32_array, EmptyExternalUint32Array) \
124 V(ExternalArray, empty_external_float32_array, EmptyExternalFloat32Array) \
125 V(ExternalArray, empty_external_float32x4_array, EmptyExternalFloat32x4Array)\
126 V(ExternalArray, empty_external_float64x2_array, EmptyExternalFloat64x2Array)\
127 V(ExternalArray, empty_external_float64_array, EmptyExternalFloat64Array) \
128 V(ExternalArray, empty_external_uint8_clamped_array, \
129 EmptyExternalUint8ClampedArray) \
130 V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
131 V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
132 V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
133 V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
134 V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
135 V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
136 V(Map, fixed_int32x4_array_map, FixedInt32x4ArrayMap) \
137 V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
138 V(Map, fixed_float32x4_array_map, FixedFloat32x4ArrayMap) \
139 V(Map, fixed_float64x2_array_map, FixedFloat64x2ArrayMap) \
140 V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
141 V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
142 V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
143 V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
144 V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
145 V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
146 V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
147 V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
148 V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
149 V(FixedTypedArrayBase, empty_fixed_float32x4_array, \
150 EmptyFixedFloat32x4Array) \
151 V(FixedTypedArrayBase, empty_fixed_float64x2_array, \
152 EmptyFixedFloat64x2Array) \
153 V(FixedTypedArrayBase, empty_fixed_int32x4_array, EmptyFixedInt32x4Array) \
154 V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
155 V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
156 EmptyFixedUint8ClampedArray) \
157 V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
158 V(Map, function_context_map, FunctionContextMap) \
159 V(Map, catch_context_map, CatchContextMap) \
160 V(Map, with_context_map, WithContextMap) \
161 V(Map, block_context_map, BlockContextMap) \
162 V(Map, module_context_map, ModuleContextMap) \
163 V(Map, global_context_map, GlobalContextMap) \
164 V(Map, undefined_map, UndefinedMap) \
165 V(Map, the_hole_map, TheHoleMap) \
166 V(Map, null_map, NullMap) \
167 V(Map, boolean_map, BooleanMap) \
168 V(Map, uninitialized_map, UninitializedMap) \
169 V(Map, arguments_marker_map, ArgumentsMarkerMap) \
170 V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap) \
171 V(Map, exception_map, ExceptionMap) \
172 V(Map, termination_exception_map, TerminationExceptionMap) \
173 V(Map, message_object_map, JSMessageObjectMap) \
174 V(Map, foreign_map, ForeignMap) \
175 V(HeapNumber, nan_value, NanValue) \
176 V(HeapNumber, infinity_value, InfinityValue) \
177 V(HeapNumber, minus_zero_value, MinusZeroValue) \
178 V(Map, neander_map, NeanderMap) \
179 V(JSObject, message_listeners, MessageListeners) \
180 V(UnseededNumberDictionary, code_stubs, CodeStubs) \
181 V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
182 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
183 V(Code, js_entry_code, JsEntryCode) \
184 V(Code, js_construct_entry_code, JsConstructEntryCode) \
185 V(FixedArray, natives_source_cache, NativesSourceCache) \
186 V(Script, empty_script, EmptyScript) \
187 V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
188 V(Cell, undefined_cell, UndefineCell) \
189 V(JSObject, observation_state, ObservationState) \
190 V(Map, external_map, ExternalMap) \
191 V(Object, symbol_registry, SymbolRegistry) \
192 V(Symbol, frozen_symbol, FrozenSymbol) \
193 V(Symbol, nonexistent_symbol, NonExistentSymbol) \
194 V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
195 V(SeededNumberDictionary, empty_slow_element_dictionary, \
196 EmptySlowElementDictionary) \
197 V(Symbol, observed_symbol, ObservedSymbol) \
198 V(Symbol, uninitialized_symbol, UninitializedSymbol) \
199 V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
200 V(Symbol, premonomorphic_symbol, PremonomorphicSymbol) \
201 V(Symbol, generic_symbol, GenericSymbol) \
202 V(Symbol, stack_trace_symbol, StackTraceSymbol) \
203 V(Symbol, detailed_stack_trace_symbol, DetailedStackTraceSymbol) \
204 V(Symbol, normal_ic_symbol, NormalICSymbol) \
205 V(Symbol, home_object_symbol, HomeObjectSymbol) \
206 V(FixedArray, materialized_objects, MaterializedObjects) \
207 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
208 V(FixedArray, microtask_queue, MicrotaskQueue)
210 // Entries in this list are limited to Smis and are not visited during GC.
211 #define SMI_ROOT_LIST(V) \
212 V(Smi, stack_limit, StackLimit) \
213 V(Smi, real_stack_limit, RealStackLimit) \
214 V(Smi, last_script_id, LastScriptId) \
215 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
216 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
217 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
218 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
220 #define ROOT_LIST(V) \
221 STRONG_ROOT_LIST(V) \
223 V(StringTable, string_table, StringTable)
225 // Heap roots that are known to be immortal immovable, for which we can safely
226 // skip write barriers.
227 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
230 V(one_pointer_filler_map) \
231 V(two_pointer_filler_map) \
237 V(uninitialized_value) \
239 V(global_property_cell_map) \
240 V(shared_function_info_map) \
243 V(mutable_heap_number_map) \
244 V(native_context_map) \
248 V(fixed_cow_array_map) \
249 V(fixed_double_array_map) \
250 V(constant_pool_array_map) \
251 V(no_interceptor_result_sentinel) \
253 V(ordered_hash_table_map) \
254 V(empty_fixed_array) \
255 V(empty_byte_array) \
256 V(empty_descriptor_array) \
257 V(empty_constant_pool_array) \
258 V(arguments_marker) \
260 V(sloppy_arguments_elements_map) \
261 V(function_context_map) \
262 V(catch_context_map) \
263 V(with_context_map) \
264 V(block_context_map) \
265 V(module_context_map) \
266 V(global_context_map) \
271 V(uninitialized_map) \
272 V(message_object_map) \
276 #define INTERNALIZED_STRING_LIST(V) \
277 V(Object_string, "Object") \
278 V(proto_string, "__proto__") \
279 V(arguments_string, "arguments") \
280 V(Arguments_string, "Arguments") \
281 V(caller_string, "caller") \
282 V(boolean_string, "boolean") \
283 V(Boolean_string, "Boolean") \
284 V(callee_string, "callee") \
285 V(constructor_string, "constructor") \
286 V(dot_result_string, ".result") \
287 V(dot_for_string, ".for.") \
288 V(eval_string, "eval") \
289 V(empty_string, "") \
290 V(function_string, "function") \
291 V(Function_string, "Function") \
292 V(length_string, "length") \
293 V(name_string, "name") \
294 V(null_string, "null") \
295 V(number_string, "number") \
296 V(Number_string, "Number") \
297 V(float32x4_string, "float32x4") \
298 V(float64x2_string, "float64x2") \
299 V(int32x4_string, "int32x4") \
300 V(nan_string, "NaN") \
301 V(source_string, "source") \
302 V(source_url_string, "source_url") \
303 V(source_mapping_url_string, "source_mapping_url") \
304 V(global_string, "global") \
305 V(ignore_case_string, "ignoreCase") \
306 V(multiline_string, "multiline") \
307 V(sticky_string, "sticky") \
308 V(harmony_regexps_string, "harmony_regexps") \
309 V(input_string, "input") \
310 V(index_string, "index") \
311 V(last_index_string, "lastIndex") \
312 V(object_string, "object") \
313 V(prototype_string, "prototype") \
314 V(string_string, "string") \
315 V(String_string, "String") \
316 V(symbol_string, "symbol") \
317 V(Symbol_string, "Symbol") \
318 V(Map_string, "Map") \
319 V(Set_string, "Set") \
320 V(WeakMap_string, "WeakMap") \
321 V(WeakSet_string, "WeakSet") \
322 V(for_string, "for") \
323 V(for_api_string, "for_api") \
324 V(for_intern_string, "for_intern") \
325 V(private_api_string, "private_api") \
326 V(private_intern_string, "private_intern") \
327 V(Date_string, "Date") \
328 V(char_at_string, "CharAt") \
329 V(undefined_string, "undefined") \
330 V(value_of_string, "valueOf") \
331 V(stack_string, "stack") \
332 V(toJSON_string, "toJSON") \
333 V(KeyedLoadMonomorphic_string, "KeyedLoadMonomorphic") \
334 V(KeyedStoreMonomorphic_string, "KeyedStoreMonomorphic") \
335 V(stack_overflow_string, "kStackOverflowBoilerplate") \
336 V(illegal_access_string, "illegal access") \
337 V(cell_value_string, "%cell_value") \
338 V(illegal_argument_string, "illegal argument") \
339 V(identity_hash_string, "v8::IdentityHash") \
340 V(closure_string, "(closure)") \
342 V(compare_ic_string, "==") \
343 V(strict_compare_ic_string, "===") \
344 V(infinity_string, "Infinity") \
345 V(minus_infinity_string, "-Infinity") \
346 V(query_colon_string, "(?:)") \
347 V(Generator_string, "Generator") \
348 V(throw_string, "throw") \
349 V(done_string, "done") \
350 V(value_string, "value") \
351 V(signMask, "signMask") \
361 V(next_string, "next") \
362 V(byte_length_string, "byteLength") \
363 V(byte_offset_string, "byteOffset") \
364 V(intl_initialized_marker_string, "v8::intl_initialized_marker") \
365 V(intl_impl_object_string, "v8::intl_object")
367 // Forward declarations.
370 class WeakObjectRetainer;
373 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
376 class StoreBufferRebuilder {
378 explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
379 : store_buffer_(store_buffer) {}
381 void Callback(MemoryChunk* page, StoreBufferEvent event);
384 StoreBuffer* store_buffer_;
386 // We record in this variable how full the store buffer was when we started
387 // iterating over the current page, finding pointers to new space. If the
388 // store buffer overflows again we can exempt the page from the store buffer
389 // by rewinding to this point instead of having to search the store buffer.
390 Object*** start_of_current_page_;
391 // The current page we are scanning in the store buffer iterator.
392 MemoryChunk* current_page_;
396 // A queue of objects promoted during scavenge. Each object is accompanied
397 // by it's size to avoid dereferencing a map pointer for scanning.
398 class PromotionQueue {
400 explicit PromotionQueue(Heap* heap)
411 delete emergency_stack_;
412 emergency_stack_ = NULL;
415 Page* GetHeadPage() {
416 return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
419 void SetNewLimit(Address limit) {
420 limit_ = reinterpret_cast<intptr_t*>(limit);
422 if (limit_ <= rear_) {
429 bool IsBelowPromotionQueue(Address to_space_top) {
430 // If the given to-space top pointer and the head of the promotion queue
431 // are not on the same page, then the to-space objects are below the
433 if (GetHeadPage() != Page::FromAddress(to_space_top)) {
436 // If the to space top pointer is smaller or equal than the promotion
437 // queue head, then the to-space objects are below the promotion queue.
438 return reinterpret_cast<intptr_t*>(to_space_top) <= rear_;
442 return (front_ == rear_) &&
443 (emergency_stack_ == NULL || emergency_stack_->length() == 0);
446 inline void insert(HeapObject* target, int size);
448 void remove(HeapObject** target, int* size) {
450 if (front_ == rear_) {
451 Entry e = emergency_stack_->RemoveLast();
457 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
458 NewSpacePage* front_page =
459 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
460 DCHECK(!front_page->prev_page()->is_anchor());
461 front_ = reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
463 *target = reinterpret_cast<HeapObject*>(*(--front_));
464 *size = static_cast<int>(*(--front_));
465 // Assert no underflow.
466 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
467 reinterpret_cast<Address>(front_));
471 // The front of the queue is higher in the memory page chain than the rear.
476 static const int kEntrySizeInWords = 2;
479 Entry(HeapObject* obj, int size) : obj_(obj), size_(size) {}
484 List<Entry>* emergency_stack_;
488 void RelocateQueueHead();
490 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
494 typedef void (*ScavengingCallback)(Map* map, HeapObject** slot,
498 // External strings table is a place where all external strings are
499 // registered. We need to keep track of such strings to properly
501 class ExternalStringTable {
503 // Registers an external string.
504 inline void AddString(String* string);
506 inline void Iterate(ObjectVisitor* v);
508 // Restores internal invariant and gets rid of collected strings.
509 // Must be called after each Iterate() that modified the strings.
512 // Destroys all allocated memory.
516 explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
520 inline void Verify();
522 inline void AddOldString(String* string);
524 // Notifies the table that only a prefix of the new list is valid.
525 inline void ShrinkNewStrings(int position);
527 // To speed up scavenge collections new space string are kept
528 // separate from old space strings.
529 List<Object*> new_space_strings_;
530 List<Object*> old_space_strings_;
534 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
538 enum ArrayStorageAllocationMode {
539 DONT_INITIALIZE_ARRAY_ELEMENTS,
540 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
546 // Configure heap size in MB before setup. Return false if the heap has been
548 bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
549 int max_executable_size, size_t code_range_size);
550 bool ConfigureHeapDefault();
552 // Prepares the heap, setting up memory areas that are needed in the isolate
553 // without actually creating any objects.
556 // Bootstraps the object heap with the core set of objects required to run.
557 // Returns whether it succeeded.
558 bool CreateHeapObjects();
560 // Destroys all memory allocated by the heap.
563 // Set the stack limit in the roots_ array. Some architectures generate
564 // code that looks here, because it is faster than loading from the static
565 // jslimit_/real_jslimit_ variable in the StackGuard.
566 void SetStackLimits();
568 // Returns whether SetUp has been called.
571 // Returns the maximum amount of memory reserved for the heap. For
572 // the young generation, we reserve 4 times the amount needed for a
573 // semi space. The young generation consists of two semi spaces and
574 // we reserve twice the amount needed for those in order to ensure
575 // that new space can be aligned to its size.
576 intptr_t MaxReserved() {
577 return 4 * reserved_semispace_size_ + max_old_generation_size_;
579 int MaxSemiSpaceSize() { return max_semi_space_size_; }
580 int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
581 int InitialSemiSpaceSize() { return initial_semispace_size_; }
582 intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
583 intptr_t MaxExecutableSize() { return max_executable_size_; }
585 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
586 // more spaces are needed until it reaches the limit.
589 // Returns the amount of memory currently committed for the heap.
590 intptr_t CommittedMemory();
592 // Returns the amount of executable memory currently committed for the heap.
593 intptr_t CommittedMemoryExecutable();
595 // Returns the amount of phyical memory currently committed for the heap.
596 size_t CommittedPhysicalMemory();
598 // Returns the maximum amount of memory ever committed for the heap.
599 intptr_t MaximumCommittedMemory() { return maximum_committed_; }
601 // Updates the maximum committed memory for the heap. Should be called
602 // whenever a space grows.
603 void UpdateMaximumCommitted();
605 // Returns the available bytes in space w/o growing.
606 // Heap doesn't guarantee that it can allocate an object that requires
607 // all available bytes. Check MaxHeapObjectSize() instead.
608 intptr_t Available();
610 // Returns of size of all objects residing in the heap.
611 intptr_t SizeOfObjects();
613 // Return the starting address and a mask for the new space. And-masking an
614 // address with the mask will result in the start address of the new space
615 // for all addresses in either semispace.
616 Address NewSpaceStart() { return new_space_.start(); }
617 uintptr_t NewSpaceMask() { return new_space_.mask(); }
618 Address NewSpaceTop() { return new_space_.top(); }
620 NewSpace* new_space() { return &new_space_; }
621 OldSpace* old_pointer_space() { return old_pointer_space_; }
622 OldSpace* old_data_space() { return old_data_space_; }
623 OldSpace* code_space() { return code_space_; }
624 MapSpace* map_space() { return map_space_; }
625 CellSpace* cell_space() { return cell_space_; }
626 PropertyCellSpace* property_cell_space() { return property_cell_space_; }
627 LargeObjectSpace* lo_space() { return lo_space_; }
628 PagedSpace* paged_space(int idx) {
630 case OLD_POINTER_SPACE:
631 return old_pointer_space();
633 return old_data_space();
638 case PROPERTY_CELL_SPACE:
639 return property_cell_space();
649 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
650 Address always_allocate_scope_depth_address() {
651 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
654 Address* NewSpaceAllocationTopAddress() {
655 return new_space_.allocation_top_address();
657 Address* NewSpaceAllocationLimitAddress() {
658 return new_space_.allocation_limit_address();
661 Address* OldPointerSpaceAllocationTopAddress() {
662 return old_pointer_space_->allocation_top_address();
664 Address* OldPointerSpaceAllocationLimitAddress() {
665 return old_pointer_space_->allocation_limit_address();
668 Address* OldDataSpaceAllocationTopAddress() {
669 return old_data_space_->allocation_top_address();
671 Address* OldDataSpaceAllocationLimitAddress() {
672 return old_data_space_->allocation_limit_address();
675 // Returns a deep copy of the JavaScript object.
676 // Properties and elements are copied too.
677 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
678 MUST_USE_RESULT AllocationResult
679 CopyJSObject(JSObject* source, AllocationSite* site = NULL);
681 // Clear the Instanceof cache (used when a prototype changes).
682 inline void ClearInstanceofCache();
684 // Iterates the whole code space to clear all ICs of the given kind.
685 void ClearAllICsByKind(Code::Kind kind);
687 // For use during bootup.
688 void RepairFreeListsAfterBoot();
690 template <typename T>
691 static inline bool IsOneByte(T t, int chars);
693 // Move len elements within a given array from src_index index to dst_index
695 void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
697 // Sloppy mode arguments object size.
698 static const int kSloppyArgumentsObjectSize =
699 JSObject::kHeaderSize + 2 * kPointerSize;
700 // Strict mode arguments has no callee so it is smaller.
701 static const int kStrictArgumentsObjectSize =
702 JSObject::kHeaderSize + 1 * kPointerSize;
703 // Indicies for direct access into argument objects.
704 static const int kArgumentsLengthIndex = 0;
705 // callee is only valid in sloppy mode.
706 static const int kArgumentsCalleeIndex = 1;
708 // Finalizes an external string by deleting the associated external
709 // data and clearing the resource pointer.
710 inline void FinalizeExternalString(String* string);
712 // Initialize a filler object to keep the ability to iterate over the heap
713 // when introducing gaps within pages.
714 void CreateFillerObjectAt(Address addr, int size);
716 bool CanMoveObjectStart(HeapObject* object);
718 // Indicates whether live bytes adjustment is triggered from within the GC
719 // code or from mutator code.
720 enum InvocationMode { FROM_GC, FROM_MUTATOR };
722 // Maintain consistency of live bytes during incremental marking.
723 void AdjustLiveBytes(Address address, int by, InvocationMode mode);
725 // Trim the given array from the left. Note that this relocates the object
726 // start and hence is only valid if there is only a single reference to it.
727 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
729 // Trim the given array from the right.
730 template<Heap::InvocationMode mode>
731 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
733 // Converts the given boolean condition to JavaScript boolean value.
734 inline Object* ToBoolean(bool condition);
736 // Performs garbage collection operation.
737 // Returns whether there is a chance that another major GC could
738 // collect more garbage.
739 inline bool CollectGarbage(
740 AllocationSpace space, const char* gc_reason = NULL,
741 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
743 static const int kNoGCFlags = 0;
744 static const int kReduceMemoryFootprintMask = 1;
745 static const int kAbortIncrementalMarkingMask = 2;
747 // Making the heap iterable requires us to abort incremental marking.
748 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
750 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
751 // non-zero, then the slower precise sweeper is used, which leaves the heap
752 // in a state where we can iterate over the heap visiting all objects.
753 void CollectAllGarbage(
754 int flags, const char* gc_reason = NULL,
755 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
757 // Last hope GC, should try to squeeze as much as possible.
758 void CollectAllAvailableGarbage(const char* gc_reason = NULL);
760 // Check whether the heap is currently iterable.
761 bool IsHeapIterable();
763 // Notify the heap that a context has been disposed.
764 int NotifyContextDisposed();
766 inline void increment_scan_on_scavenge_pages() {
767 scan_on_scavenge_pages_++;
768 if (FLAG_gc_verbose) {
769 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
773 inline void decrement_scan_on_scavenge_pages() {
774 scan_on_scavenge_pages_--;
775 if (FLAG_gc_verbose) {
776 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
780 PromotionQueue* promotion_queue() { return &promotion_queue_; }
782 void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
783 GCType gc_type_filter, bool pass_isolate = true);
784 void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback);
786 void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
787 GCType gc_type_filter, bool pass_isolate = true);
788 void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback);
790 // Heap root getters. We have versions with and without type::cast() here.
791 // You can't use type::cast during GC because the assert fails.
792 // TODO(1490): Try removing the unchecked accessors, now that GC marking does
793 // not corrupt the map.
794 #define ROOT_ACCESSOR(type, name, camel_name) \
795 type* name() { return type::cast(roots_[k##camel_name##RootIndex]); } \
796 type* raw_unchecked_##name() { \
797 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
799 ROOT_LIST(ROOT_ACCESSOR)
803 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
804 Map* name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
805 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
806 #undef STRUCT_MAP_ACCESSOR
808 #define STRING_ACCESSOR(name, str) \
809 String* name() { return String::cast(roots_[k##name##RootIndex]); }
810 INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
811 #undef STRING_ACCESSOR
813 // The hidden_string is special because it is the empty string, but does
814 // not match the empty string.
815 String* hidden_string() { return hidden_string_; }
817 void set_native_contexts_list(Object* object) {
818 native_contexts_list_ = object;
820 Object* native_contexts_list() const { return native_contexts_list_; }
822 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
823 Object* array_buffers_list() const { return array_buffers_list_; }
825 void set_allocation_sites_list(Object* object) {
826 allocation_sites_list_ = object;
828 Object* allocation_sites_list() { return allocation_sites_list_; }
830 // Used in CreateAllocationSiteStub and the (de)serializer.
831 Object** allocation_sites_list_address() { return &allocation_sites_list_; }
833 Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
835 void set_encountered_weak_collections(Object* weak_collection) {
836 encountered_weak_collections_ = weak_collection;
838 Object* encountered_weak_collections() const {
839 return encountered_weak_collections_;
842 // Number of mark-sweeps.
843 unsigned int ms_count() { return ms_count_; }
845 // Iterates over all roots in the heap.
846 void IterateRoots(ObjectVisitor* v, VisitMode mode);
847 // Iterates over all strong roots in the heap.
848 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
849 // Iterates over entries in the smi roots list. Only interesting to the
850 // serializer/deserializer, since GC does not care about smis.
851 void IterateSmiRoots(ObjectVisitor* v);
852 // Iterates over all the other roots in the heap.
853 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
855 // Iterate pointers to from semispace of new space found in memory interval
856 // from start to end.
857 void IterateAndMarkPointersToFromSpace(Address start, Address end,
858 ObjectSlotCallback callback);
860 // Returns whether the object resides in new space.
861 inline bool InNewSpace(Object* object);
862 inline bool InNewSpace(Address address);
863 inline bool InNewSpacePage(Address address);
864 inline bool InFromSpace(Object* object);
865 inline bool InToSpace(Object* object);
867 // Returns whether the object resides in old pointer space.
868 inline bool InOldPointerSpace(Address address);
869 inline bool InOldPointerSpace(Object* object);
871 // Returns whether the object resides in old data space.
872 inline bool InOldDataSpace(Address address);
873 inline bool InOldDataSpace(Object* object);
875 // Checks whether an address/object in the heap (including auxiliary
876 // area and unused area).
877 bool Contains(Address addr);
878 bool Contains(HeapObject* value);
880 // Checks whether an address/object in a space.
881 // Currently used by tests, serialization and heap verification only.
882 bool InSpace(Address addr, AllocationSpace space);
883 bool InSpace(HeapObject* value, AllocationSpace space);
885 // Finds out which space an object should get promoted to based on its type.
886 inline OldSpace* TargetSpace(HeapObject* object);
887 static inline AllocationSpace TargetSpaceId(InstanceType type);
889 // Checks whether the given object is allowed to be migrated from it's
890 // current space into the given destination space. Used for debugging.
891 inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
893 // Sets the stub_cache_ (only used when expanding the dictionary).
894 void public_set_code_stubs(UnseededNumberDictionary* value) {
895 roots_[kCodeStubsRootIndex] = value;
898 // Support for computing object sizes for old objects during GCs. Returns
899 // a function that is guaranteed to be safe for computing object sizes in
900 // the current GC phase.
901 HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
902 return gc_safe_size_of_old_object_;
905 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
906 void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
907 roots_[kNonMonomorphicCacheRootIndex] = value;
910 void public_set_empty_script(Script* script) {
911 roots_[kEmptyScriptRootIndex] = script;
914 void public_set_store_buffer_top(Address* top) {
915 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
918 void public_set_materialized_objects(FixedArray* objects) {
919 roots_[kMaterializedObjectsRootIndex] = objects;
922 // Generated code can embed this address to get access to the roots.
923 Object** roots_array_start() { return roots_; }
925 Address* store_buffer_top_address() {
926 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
930 // Verify the heap is in its normal state before or after a GC.
934 bool weak_embedded_objects_verification_enabled() {
935 return no_weak_object_verification_scope_depth_ == 0;
943 void OldPointerSpaceCheckStoreBuffer();
944 void MapSpaceCheckStoreBuffer();
945 void LargeObjectSpaceCheckStoreBuffer();
947 // Report heap statistics.
948 void ReportHeapStatistics(const char* title);
949 void ReportCodeStatistics(const char* title);
952 // Zapping is needed for verify heap, and always done in debug builds.
953 static inline bool ShouldZapGarbage() {
958 return FLAG_verify_heap;
965 // Number of "runtime allocations" done so far.
966 uint32_t allocations_count() { return allocations_count_; }
968 // Returns deterministic "time" value in ms. Works only with
969 // FLAG_verify_predictable.
970 double synthetic_time() { return allocations_count_ / 2.0; }
972 // Print short heap statistics.
973 void PrintShortHeapStatistics();
975 // Write barrier support for address[offset] = o.
976 INLINE(void RecordWrite(Address address, int offset));
978 // Write barrier support for address[start : start + len[ = o.
979 INLINE(void RecordWrites(Address address, int start, int len));
981 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
982 inline HeapState gc_state() { return gc_state_; }
984 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
987 void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
989 void TracePathToObjectFrom(Object* target, Object* root);
990 void TracePathToObject(Object* target);
991 void TracePathToGlobal();
994 // Callback function passed to Heap::Iterate etc. Copies an object if
995 // necessary, the object might be promoted to an old space. The caller must
996 // ensure the precondition that the object is (a) a heap object and (b) in
997 // the heap's from space.
998 static inline void ScavengePointer(HeapObject** p);
999 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1001 enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
1003 // If an object has an AllocationMemento trailing it, return it, otherwise
1005 inline AllocationMemento* FindAllocationMemento(HeapObject* object);
1007 // An object may have an AllocationSite associated with it through a trailing
1008 // AllocationMemento. Its feedback should be updated when objects are found
1010 static inline void UpdateAllocationSiteFeedback(HeapObject* object,
1011 ScratchpadSlotMode mode);
1013 // Support for partial snapshots. After calling this we have a linear
1014 // space to write objects in each space.
1015 void ReserveSpace(int* sizes, Address* addresses);
1018 // Support for the API.
1021 void CreateApiObjects();
1023 inline intptr_t PromotedTotalSize() {
1024 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1025 if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
1026 if (total < 0) return 0;
1027 return static_cast<intptr_t>(total);
1030 inline intptr_t OldGenerationSpaceAvailable() {
1031 return old_generation_allocation_limit_ - PromotedTotalSize();
1034 inline intptr_t OldGenerationCapacityAvailable() {
1035 return max_old_generation_size_ - PromotedTotalSize();
1038 static const intptr_t kMinimumOldGenerationAllocationLimit =
1039 8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1041 static const int kPointerMultiplier = i::kPointerSize / 4;
1043 // The new space size has to be a power of 2. Sizes are in MB.
1044 static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
1045 static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
1046 static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
1047 static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
1049 // The old space size has to be a multiple of Page::kPageSize.
1051 static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
1052 static const int kMaxOldSpaceSizeMediumMemoryDevice =
1053 256 * kPointerMultiplier;
1054 static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
1055 static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
1057 // The executable size has to be a multiple of Page::kPageSize.
1059 static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
1060 static const int kMaxExecutableSizeMediumMemoryDevice =
1061 192 * kPointerMultiplier;
1062 static const int kMaxExecutableSizeHighMemoryDevice =
1063 256 * kPointerMultiplier;
1064 static const int kMaxExecutableSizeHugeMemoryDevice =
1065 256 * kPointerMultiplier;
1067 intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size,
1068 int freed_global_handles);
1070 // Indicates whether inline bump-pointer allocation has been disabled.
1071 bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1073 // Switch whether inline bump-pointer allocation should be used.
1074 void EnableInlineAllocation();
1075 void DisableInlineAllocation();
1077 // Implements the corresponding V8 API function.
1078 bool IdleNotification(int idle_time_in_ms);
1080 // Declare all the root indices. This defines the root list order.
1081 enum RootListIndex {
1082 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1083 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1084 #undef ROOT_INDEX_DECLARATION
1086 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1087 INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
1088 #undef STRING_DECLARATION
1090 // Utility type maps
1091 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1092 STRUCT_LIST(DECLARE_STRUCT_MAP)
1093 #undef DECLARE_STRUCT_MAP
1094 kStringTableRootIndex,
1096 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1097 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
1098 #undef ROOT_INDEX_DECLARATION
1100 kStrongRootListLength = kStringTableRootIndex,
1101 kSmiRootsStart = kStringTableRootIndex + 1
1104 STATIC_ASSERT(kUndefinedValueRootIndex ==
1105 Internals::kUndefinedValueRootIndex);
1106 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
1107 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1108 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1109 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
1111 // Generated code can embed direct references to non-writable roots if
1112 // they are in new space.
1113 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1114 // Generated code can treat direct references to this root as constant.
1115 bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1117 Map* MapForFixedTypedArray(ExternalArrayType array_type);
1118 RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
1120 Map* MapForExternalArrayType(ExternalArrayType array_type);
1121 RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type);
1123 RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
1124 RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
1125 ExternalArray* EmptyExternalArrayForMap(Map* map);
1126 FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
1128 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1130 // Copy block of memory from src to dst. Size of block should be aligned
1132 static inline void CopyBlock(Address dst, Address src, int byte_size);
1134 // Optimized version of memmove for blocks with pointer size aligned sizes and
1135 // pointer size aligned addresses.
1136 static inline void MoveBlock(Address dst, Address src, int byte_size);
1138 // Check new space expansion criteria and expand semispaces if it was hit.
1139 void CheckNewSpaceExpansionCriteria();
1141 inline void IncrementPromotedObjectsSize(int object_size) {
1142 DCHECK(object_size > 0);
1143 promoted_objects_size_ += object_size;
1146 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1147 DCHECK(object_size > 0);
1148 semi_space_copied_object_size_ += object_size;
1151 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1153 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1155 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1157 inline void IncrementYoungSurvivorsCounter(int survived) {
1158 DCHECK(survived >= 0);
1159 survived_since_last_expansion_ += survived;
1162 inline bool NextGCIsLikelyToBeFull() {
1163 if (FLAG_gc_global) return true;
1165 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1167 intptr_t adjusted_allocation_limit =
1168 old_generation_allocation_limit_ - new_space_.Capacity();
1170 if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1175 void UpdateNewSpaceReferencesInExternalStringTable(
1176 ExternalStringTableUpdaterCallback updater_func);
1178 void UpdateReferencesInExternalStringTable(
1179 ExternalStringTableUpdaterCallback updater_func);
1181 void ProcessWeakReferences(WeakObjectRetainer* retainer);
1183 void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1185 // An object should be promoted if the object has survived a
1186 // scavenge operation.
1187 inline bool ShouldBePromoted(Address old_address, int object_size);
1189 void ClearJSFunctionResultCaches();
1191 void ClearNormalizedMapCaches();
1193 GCTracer* tracer() { return &tracer_; }
1195 // Returns the size of objects residing in non new spaces.
1196 intptr_t PromotedSpaceSizeOfObjects();
1198 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1199 void IncreaseTotalRegexpCodeGenerated(int size) {
1200 total_regexp_code_generated_ += size;
1203 void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1204 if (is_crankshafted) {
1205 crankshaft_codegen_bytes_generated_ += size;
1207 full_codegen_bytes_generated_ += size;
1211 // Update GC statistics that are tracked on the Heap.
1212 void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1213 double marking_time);
1215 // Returns maximum GC pause.
1216 double get_max_gc_pause() { return max_gc_pause_; }
1218 // Returns maximum size of objects alive after GC.
1219 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1221 // Returns minimal interval between two subsequent collections.
1222 double get_min_in_mutator() { return min_in_mutator_; }
1224 MarkCompactCollector* mark_compact_collector() {
1225 return &mark_compact_collector_;
1228 StoreBuffer* store_buffer() { return &store_buffer_; }
1230 Marking* marking() { return &marking_; }
1232 IncrementalMarking* incremental_marking() { return &incremental_marking_; }
1234 ExternalStringTable* external_string_table() {
1235 return &external_string_table_;
1238 // Returns the current sweep generation.
1239 int sweep_generation() { return sweep_generation_; }
1241 inline Isolate* isolate();
1243 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1244 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1246 inline bool OldGenerationAllocationLimitReached();
1248 inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1249 scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1252 void QueueMemoryChunkForFree(MemoryChunk* chunk);
1253 void FreeQueuedChunks();
1255 int gc_count() const { return gc_count_; }
1257 // Completely clear the Instanceof cache (to stop it keeping objects alive
1259 inline void CompletelyClearInstanceofCache();
1261 // The roots that have an index less than this are always in old space.
1262 static const int kOldSpaceRoots = 0x20;
1264 uint32_t HashSeed() {
1265 uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1266 DCHECK(FLAG_randomize_hashes || seed == 0);
1270 void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1271 DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1272 set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1275 void SetConstructStubDeoptPCOffset(int pc_offset) {
1276 DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1277 set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1280 void SetGetterStubDeoptPCOffset(int pc_offset) {
1281 DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1282 set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1285 void SetSetterStubDeoptPCOffset(int pc_offset) {
1286 DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1287 set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1290 // For post mortem debugging.
1291 void RememberUnmappedPage(Address page, bool compacted);
1293 // Global inline caching age: it is incremented on some GCs after context
1294 // disposal. We use it to flush inline caches.
1295 int global_ic_age() { return global_ic_age_; }
1297 void AgeInlineCaches() {
1298 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1301 bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1303 int64_t amount_of_external_allocated_memory() {
1304 return amount_of_external_allocated_memory_;
1307 void DeoptMarkedAllocationSites();
1309 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1311 bool DeoptMaybeTenuredAllocationSites() {
1312 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
1315 // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1316 // stored in a contiguous linear buffer. Stats groups are stored one after
1319 FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
1320 FIRST_FIXED_ARRAY_SUB_TYPE =
1321 FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
1322 FIRST_CODE_AGE_SUB_TYPE =
1323 FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1,
1324 OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
1327 void RecordObjectStats(InstanceType type, size_t size) {
1328 DCHECK(type <= LAST_TYPE);
1329 object_counts_[type]++;
1330 object_sizes_[type] += size;
1333 void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
1334 int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
1335 int code_age_index =
1336 FIRST_CODE_AGE_SUB_TYPE + code_age - Code::kFirstCodeAge;
1337 DCHECK(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
1338 code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
1339 DCHECK(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
1340 code_age_index < OBJECT_STATS_COUNT);
1341 object_counts_[code_sub_type_index]++;
1342 object_sizes_[code_sub_type_index] += size;
1343 object_counts_[code_age_index]++;
1344 object_sizes_[code_age_index] += size;
1347 void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
1348 DCHECK(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1349 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
1350 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
1353 void CheckpointObjectStats();
1355 // We don't use a LockGuard here since we want to lock the heap
1356 // only when FLAG_concurrent_recompilation is true.
1357 class RelocationLock {
1359 explicit RelocationLock(Heap* heap) : heap_(heap) {
1360 heap_->relocation_mutex_.Lock();
1364 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
1370 void AddWeakObjectToCodeDependency(Handle<Object> obj,
1371 Handle<DependentCode> dep);
1373 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj);
1375 void InitializeWeakObjectToCodeTable() {
1376 set_weak_object_to_code_table(undefined_value());
1379 void EnsureWeakObjectToCodeTable();
1381 static void FatalProcessOutOfMemory(const char* location,
1382 bool take_snapshot = false);
1384 // This event is triggered after successful allocation of a new object made
1385 // by runtime. Allocations of target space for object evacuation do not
1386 // trigger the event. In order to track ALL allocations one must turn off
1387 // FLAG_inline_new and FLAG_use_allocation_folding.
1388 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1390 // This event is triggered after object is moved to a new place.
1391 inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1395 // Methods made available to tests.
1397 // Allocates a JS Map in the heap.
1398 MUST_USE_RESULT AllocationResult
1399 AllocateMap(InstanceType instance_type, int instance_size,
1400 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1402 // Allocates and initializes a new JavaScript object based on a
1404 // If allocation_site is non-null, then a memento is emitted after the object
1405 // that points to the site.
1406 MUST_USE_RESULT AllocationResult
1407 AllocateJSObject(JSFunction* constructor,
1408 PretenureFlag pretenure = NOT_TENURED,
1409 AllocationSite* allocation_site = NULL);
1411 // Allocates and initializes a new JavaScript object based on a map.
1412 // Passing an allocation site means that a memento will be created that
1413 // points to the site.
1414 MUST_USE_RESULT AllocationResult
1415 AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
1416 bool alloc_props = true,
1417 AllocationSite* allocation_site = NULL);
1419 // Allocated a HeapNumber from value.
1420 MUST_USE_RESULT AllocationResult
1421 AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
1422 PretenureFlag pretenure = NOT_TENURED);
1424 // Allocated a Float32x4 from value.
1425 MUST_USE_RESULT AllocationResult AllocateFloat32x4(
1426 float32x4_value_t value,
1427 PretenureFlag pretenure = NOT_TENURED);
1429 // Allocated a Float64x2 from value.
1430 MUST_USE_RESULT AllocationResult AllocateFloat64x2(
1431 float64x2_value_t value,
1432 PretenureFlag pretenure = NOT_TENURED);
1434 // Allocated a Int32x4 from value.
1435 MUST_USE_RESULT AllocationResult AllocateInt32x4(
1436 int32x4_value_t value,
1437 PretenureFlag pretenure = NOT_TENURED);
1439 // Allocate a byte array of the specified length
1440 MUST_USE_RESULT AllocationResult
1441 AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1443 // Copy the code and scope info part of the code object, but insert
1444 // the provided data as the relocation information.
1445 MUST_USE_RESULT AllocationResult
1446 CopyCode(Code* code, Vector<byte> reloc_info);
1448 MUST_USE_RESULT AllocationResult CopyCode(Code* code);
1450 // Allocates a fixed array initialized with undefined values
1451 MUST_USE_RESULT AllocationResult
1452 AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
1457 // The amount of external memory registered through the API kept alive
1458 // by global handles
1459 int64_t amount_of_external_allocated_memory_;
1461 // Caches the amount of external memory registered at the last global gc.
1462 int64_t amount_of_external_allocated_memory_at_last_global_gc_;
1464 // This can be calculated directly from a pointer to the heap; however, it is
1465 // more expedient to get at the isolate directly from within Heap methods.
1468 Object* roots_[kRootListLength];
1470 size_t code_range_size_;
1471 int reserved_semispace_size_;
1472 int max_semi_space_size_;
1473 int initial_semispace_size_;
1474 intptr_t max_old_generation_size_;
1475 intptr_t max_executable_size_;
1476 intptr_t maximum_committed_;
1478 // For keeping track of how much data has survived
1479 // scavenge since last new space expansion.
1480 int survived_since_last_expansion_;
1482 // For keeping track on when to flush RegExp code.
1483 int sweep_generation_;
1485 int always_allocate_scope_depth_;
1487 // For keeping track of context disposals.
1488 int contexts_disposed_;
1492 bool flush_monomorphic_ics_;
1494 int scan_on_scavenge_pages_;
1496 NewSpace new_space_;
1497 OldSpace* old_pointer_space_;
1498 OldSpace* old_data_space_;
1499 OldSpace* code_space_;
1500 MapSpace* map_space_;
1501 CellSpace* cell_space_;
1502 PropertyCellSpace* property_cell_space_;
1503 LargeObjectSpace* lo_space_;
1504 HeapState gc_state_;
1505 int gc_post_processing_depth_;
1506 Address new_space_top_after_last_gc_;
1508 // Returns the amount of external memory registered since last global gc.
1509 int64_t PromotedExternalMemorySize();
1511 // How many "runtime allocations" happened.
1512 uint32_t allocations_count_;
1514 // Running hash over allocations performed.
1515 uint32_t raw_allocations_hash_;
1517 // Countdown counter, dumps allocation hash when 0.
1518 uint32_t dump_allocations_hash_countdown_;
1520 // How many mark-sweep collections happened.
1521 unsigned int ms_count_;
1523 // How many gc happened.
1524 unsigned int gc_count_;
1526 // For post mortem debugging.
1527 static const int kRememberedUnmappedPages = 128;
1528 int remembered_unmapped_pages_index_;
1529 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1531 // Total length of the strings we failed to flatten since the last GC.
1532 int unflattened_strings_length_;
1534 #define ROOT_ACCESSOR(type, name, camel_name) \
1535 inline void set_##name(type* value) { \
1536 /* The deserializer makes use of the fact that these common roots are */ \
1537 /* never in new space and never on a page that is being compacted. */ \
1538 DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1539 roots_[k##camel_name##RootIndex] = value; \
1541 ROOT_LIST(ROOT_ACCESSOR)
1542 #undef ROOT_ACCESSOR
1545 // If the --gc-interval flag is set to a positive value, this
1546 // variable holds the value indicating the number of allocations
1547 // remain until the next failure and garbage collection.
1548 int allocation_timeout_;
1551 // Limit that triggers a global GC on the next (normally caused) GC. This
1552 // is checked when we have already decided to do a GC to help determine
1553 // which collector to invoke, before expanding a paged space in the old
1554 // generation and on every allocation in large object space.
1555 intptr_t old_generation_allocation_limit_;
1557 // Indicates that an allocation has failed in the old generation since the
1559 bool old_gen_exhausted_;
1561 // Indicates that inline bump-pointer allocation has been globally disabled
1562 // for all spaces. This is used to disable allocations in generated code.
1563 bool inline_allocation_disabled_;
1565 // Weak list heads, threaded through the objects.
1566 // List heads are initilized lazily and contain the undefined_value at start.
1567 Object* native_contexts_list_;
1568 Object* array_buffers_list_;
1569 Object* allocation_sites_list_;
1571 // WeakHashTable that maps objects embedded in optimized code to dependent
1572 // code list. It is initilized lazily and contains the undefined_value at
1574 Object* weak_object_to_code_table_;
1576 // List of encountered weak collections (JSWeakMap and JSWeakSet) during
1577 // marking. It is initialized during marking, destroyed after marking and
1578 // contains Smi(0) while marking is not active.
1579 Object* encountered_weak_collections_;
1581 StoreBufferRebuilder store_buffer_rebuilder_;
1583 struct StringTypeTable {
1586 RootListIndex index;
1589 struct ConstantStringTable {
1590 const char* contents;
1591 RootListIndex index;
1594 struct StructTable {
1597 RootListIndex index;
1600 static const StringTypeTable string_type_table[];
1601 static const ConstantStringTable constant_string_table[];
1602 static const StructTable struct_table[];
1604 // The special hidden string which is an empty string, but does not match
1605 // any string when looked up in properties.
1606 String* hidden_string_;
1608 // GC callback function, called before and after mark-compact GC.
1609 // Allocations in the callback function are disallowed.
1610 struct GCPrologueCallbackPair {
1611 GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1612 GCType gc_type, bool pass_isolate)
1613 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1614 bool operator==(const GCPrologueCallbackPair& pair) const {
1615 return pair.callback == callback;
1617 v8::Isolate::GCPrologueCallback callback;
1619 // TODO(dcarney): remove variable
1622 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1624 struct GCEpilogueCallbackPair {
1625 GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1626 GCType gc_type, bool pass_isolate)
1627 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1628 bool operator==(const GCEpilogueCallbackPair& pair) const {
1629 return pair.callback == callback;
1631 v8::Isolate::GCPrologueCallback callback;
1633 // TODO(dcarney): remove variable
1636 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1638 // Support for computing object sizes during GC.
1639 HeapObjectCallback gc_safe_size_of_old_object_;
1640 static int GcSafeSizeOfOldObject(HeapObject* object);
1642 // Update the GC state. Called from the mark-compact collector.
1643 void MarkMapPointersAsEncoded(bool encoded) {
1645 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1648 // Code that should be run before and after each GC. Includes some
1649 // reporting/verification activities when compiled with DEBUG set.
1650 void GarbageCollectionPrologue();
1651 void GarbageCollectionEpilogue();
1653 // Pretenuring decisions are made based on feedback collected during new
1654 // space evacuation. Note that between feedback collection and calling this
1655 // method object in old space must not move.
1656 // Right now we only process pretenuring feedback in high promotion mode.
1657 void ProcessPretenuringFeedback();
1659 // Checks whether a global GC is necessary
1660 GarbageCollector SelectGarbageCollector(AllocationSpace space,
1661 const char** reason);
1663 // Make sure there is a filler value behind the top of the new space
1664 // so that the GC does not confuse some unintialized/stale memory
1665 // with the allocation memento of the object at the top
1666 void EnsureFillerObjectAtTop();
1668 // Ensure that we have swept all spaces in such a way that we can iterate
1669 // over all objects. May cause a GC.
1670 void MakeHeapIterable();
1672 // Performs garbage collection operation.
1673 // Returns whether there is a chance that another major GC could
1674 // collect more garbage.
1675 bool CollectGarbage(
1676 GarbageCollector collector, const char* gc_reason,
1677 const char* collector_reason,
1678 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1680 // Performs garbage collection
1681 // Returns whether there is a chance another major GC could
1682 // collect more garbage.
1683 bool PerformGarbageCollection(
1684 GarbageCollector collector,
1685 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1687 inline void UpdateOldSpaceLimits();
1689 // Selects the proper allocation space depending on the given object
1690 // size, pretenuring decision, and preferred old-space.
1691 static AllocationSpace SelectSpace(int object_size,
1692 AllocationSpace preferred_old_space,
1693 PretenureFlag pretenure) {
1694 DCHECK(preferred_old_space == OLD_POINTER_SPACE ||
1695 preferred_old_space == OLD_DATA_SPACE);
1696 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
1697 return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
1700 // Allocate an uninitialized object. The memory is non-executable if the
1701 // hardware and OS allow. This is the single choke-point for allocations
1702 // performed by the runtime and should not be bypassed (to extend this to
1703 // inlined allocations, use the Heap::DisableInlineAllocation() support).
1704 MUST_USE_RESULT inline AllocationResult AllocateRaw(
1705 int size_in_bytes, AllocationSpace space, AllocationSpace retry_space);
1707 // Allocates a heap object based on the map.
1708 MUST_USE_RESULT AllocationResult
1709 Allocate(Map* map, AllocationSpace space,
1710 AllocationSite* allocation_site = NULL);
1712 // Allocates a partial map for bootstrapping.
1713 MUST_USE_RESULT AllocationResult
1714 AllocatePartialMap(InstanceType instance_type, int instance_size);
1716 // Initializes a JSObject based on its map.
1717 void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
1719 void InitializeAllocationMemento(AllocationMemento* memento,
1720 AllocationSite* allocation_site);
1722 // Allocate a block of memory in the given space (filled with a filler).
1723 // Used as a fall-back for generated code when the space is full.
1724 MUST_USE_RESULT AllocationResult
1725 AllocateFillerObject(int size, bool double_align, AllocationSpace space);
1727 // Allocate an uninitialized fixed array.
1728 MUST_USE_RESULT AllocationResult
1729 AllocateRawFixedArray(int length, PretenureFlag pretenure);
1731 // Allocate an uninitialized fixed double array.
1732 MUST_USE_RESULT AllocationResult
1733 AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
1735 // Allocate an initialized fixed array with the given filler value.
1736 MUST_USE_RESULT AllocationResult
1737 AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
1740 // Allocate and partially initializes a String. There are two String
1741 // encodings: one-byte and two-byte. These functions allocate a string of
1742 // the given length and set its map and length fields. The characters of
1743 // the string are uninitialized.
1744 MUST_USE_RESULT AllocationResult
1745 AllocateRawOneByteString(int length, PretenureFlag pretenure);
1746 MUST_USE_RESULT AllocationResult
1747 AllocateRawTwoByteString(int length, PretenureFlag pretenure);
1749 bool CreateInitialMaps();
1750 void CreateInitialObjects();
1752 // Allocates an internalized string in old space based on the character
1754 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
1755 Vector<const char> str, int chars, uint32_t hash_field);
1757 MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
1758 Vector<const uint8_t> str, uint32_t hash_field);
1760 MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
1761 Vector<const uc16> str, uint32_t hash_field);
1763 template <bool is_one_byte, typename T>
1764 MUST_USE_RESULT AllocationResult
1765 AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
1767 template <typename T>
1768 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
1769 T t, int chars, uint32_t hash_field);
1771 // Allocates an uninitialized fixed array. It must be filled by the caller.
1772 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
1774 // Make a copy of src and return it. Returns
1775 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1776 MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
1778 // Make a copy of src, set the map, and return the copy. Returns
1779 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1780 MUST_USE_RESULT AllocationResult
1781 CopyFixedArrayWithMap(FixedArray* src, Map* map);
1783 // Make a copy of src and return it. Returns
1784 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1785 MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
1786 FixedDoubleArray* src);
1788 // Make a copy of src and return it. Returns
1789 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1790 MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray(
1791 ConstantPoolArray* src);
1794 // Computes a single character string where the character has code.
1795 // A cache is used for one-byte (Latin1) codes.
1796 MUST_USE_RESULT AllocationResult
1797 LookupSingleCharacterStringFromCode(uint16_t code);
1799 // Allocate a symbol in old space.
1800 MUST_USE_RESULT AllocationResult AllocateSymbol();
1802 // Make a copy of src, set the map, and return the copy.
1803 MUST_USE_RESULT AllocationResult
1804 CopyConstantPoolArrayWithMap(ConstantPoolArray* src, Map* map);
1806 MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(
1807 const ConstantPoolArray::NumberOfEntries& small);
1809 MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(
1810 const ConstantPoolArray::NumberOfEntries& small,
1811 const ConstantPoolArray::NumberOfEntries& extended);
1813 // Allocates an external array of the specified length and type.
1814 MUST_USE_RESULT AllocationResult
1815 AllocateExternalArray(int length, ExternalArrayType array_type,
1816 void* external_pointer, PretenureFlag pretenure);
1818 // Allocates a fixed typed array of the specified length and type.
1819 MUST_USE_RESULT AllocationResult
1820 AllocateFixedTypedArray(int length, ExternalArrayType array_type,
1821 PretenureFlag pretenure);
1823 // Make a copy of src and return it.
1824 MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
1826 // Make a copy of src, set the map, and return the copy.
1827 MUST_USE_RESULT AllocationResult
1828 CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
1830 // Allocates a fixed double array with uninitialized values. Returns
1831 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
1832 int length, PretenureFlag pretenure = NOT_TENURED);
1834 // These five Create*EntryStub functions are here and forced to not be inlined
1835 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1836 NO_INLINE(void CreateJSEntryStub());
1837 NO_INLINE(void CreateJSConstructEntryStub());
1839 void CreateFixedStubs();
1841 // Allocate empty fixed array.
1842 MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
1844 // Allocate empty external array of given type.
1845 MUST_USE_RESULT AllocationResult
1846 AllocateEmptyExternalArray(ExternalArrayType array_type);
1848 // Allocate empty fixed typed array of given type.
1849 MUST_USE_RESULT AllocationResult
1850 AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
1852 // Allocate empty constant pool array.
1853 MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray();
1855 // Allocate a tenured simple cell.
1856 MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
1858 // Allocate a tenured JS global property cell initialized with the hole.
1859 MUST_USE_RESULT AllocationResult AllocatePropertyCell();
1861 // Allocates a new utility object in the old generation.
1862 MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
1864 // Allocates a new foreign object.
1865 MUST_USE_RESULT AllocationResult
1866 AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
1868 MUST_USE_RESULT AllocationResult
1869 AllocateCode(int object_size, bool immovable);
1871 MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
1873 MUST_USE_RESULT AllocationResult InternalizeString(String* str);
1875 // Performs a minor collection in new generation.
1878 // Commits from space if it is uncommitted.
1879 void EnsureFromSpaceIsCommitted();
1881 // Uncommit unused semi space.
1882 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1884 // Fill in bogus values in from space
1885 void ZapFromSpace();
1887 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1888 Heap* heap, Object** pointer);
1890 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1891 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1892 StoreBufferEvent event);
1894 // Performs a major collection in the whole heap.
1897 // Code to be run before and after mark-compact.
1898 void MarkCompactPrologue();
1900 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1901 void ProcessArrayBuffers(WeakObjectRetainer* retainer);
1902 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1904 // Deopts all code that contains allocation instruction which are tenured or
1905 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1906 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1908 // Evaluates local pretenuring for the old space and calls
1909 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1911 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1913 // Called on heap tear-down.
1914 void TearDownArrayBuffers();
1916 // Record statistics before and after garbage collection.
1917 void ReportStatisticsBeforeGC();
1918 void ReportStatisticsAfterGC();
1920 // Slow part of scavenge object.
1921 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1923 // Total RegExp code ever generated
1924 double total_regexp_code_generated_;
1928 // Creates and installs the full-sized number string cache.
1929 int FullSizeNumberStringCacheLength();
1930 // Flush the number to string cache.
1931 void FlushNumberStringCache();
1933 // Sets used allocation sites entries to undefined.
1934 void FlushAllocationSitesScratchpad();
1936 // Initializes the allocation sites scratchpad with undefined values.
1937 void InitializeAllocationSitesScratchpad();
1939 // Adds an allocation site to the scratchpad if there is space left.
1940 void AddAllocationSiteToScratchpad(AllocationSite* site,
1941 ScratchpadSlotMode mode);
1943 void UpdateSurvivalStatistics(int start_new_space_size);
1945 static const int kYoungSurvivalRateHighThreshold = 90;
1946 static const int kYoungSurvivalRateAllowedDeviation = 15;
1948 static const int kOldSurvivalRateLowThreshold = 10;
1950 int high_survival_rate_period_length_;
1951 intptr_t promoted_objects_size_;
1952 double promotion_rate_;
1953 intptr_t semi_space_copied_object_size_;
1954 double semi_space_copied_rate_;
1955 int nodes_died_in_new_space_;
1956 int nodes_copied_in_new_space_;
1957 int nodes_promoted_;
1959 // This is the pretenuring trigger for allocation sites that are in maybe
1960 // tenure state. When we switched to the maximum new space size we deoptimize
1961 // the code that belongs to the allocation site and derive the lifetime
1962 // of the allocation site.
1963 unsigned int maximum_size_scavenges_;
1965 // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
1966 // Re-visit incremental marking heuristics.
1967 bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
1969 void SelectScavengingVisitorsTable();
1971 void AdvanceIdleIncrementalMarking(intptr_t step_size);
1973 bool WorthActivatingIncrementalMarking();
1975 void ClearObjectStats(bool clear_last_time_stats = false);
1977 void set_weak_object_to_code_table(Object* value) {
1978 DCHECK(!InNewSpace(value));
1979 weak_object_to_code_table_ = value;
1982 Object** weak_object_to_code_table_address() {
1983 return &weak_object_to_code_table_;
1986 inline void UpdateAllocationsHash(HeapObject* object);
1987 inline void UpdateAllocationsHash(uint32_t value);
1988 inline void PrintAlloctionsHash();
1990 static const int kInitialStringTableSize = 2048;
1991 static const int kInitialEvalCacheSize = 64;
1992 static const int kInitialNumberStringCacheSize = 256;
1994 // Object counts and used memory by InstanceType
1995 size_t object_counts_[OBJECT_STATS_COUNT];
1996 size_t object_counts_last_time_[OBJECT_STATS_COUNT];
1997 size_t object_sizes_[OBJECT_STATS_COUNT];
1998 size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
2000 // Maximum GC pause.
2001 double max_gc_pause_;
2003 // Total time spent in GC.
2004 double total_gc_time_ms_;
2006 // Maximum size of objects alive after GC.
2007 intptr_t max_alive_after_gc_;
2009 // Minimal interval between two subsequent collections.
2010 double min_in_mutator_;
2012 // Cumulative GC time spent in marking
2013 double marking_time_;
2015 // Cumulative GC time spent in sweeping
2016 double sweeping_time_;
2018 MarkCompactCollector mark_compact_collector_;
2020 StoreBuffer store_buffer_;
2024 IncrementalMarking incremental_marking_;
2026 GCIdleTimeHandler gc_idle_time_handler_;
2027 unsigned int gc_count_at_last_idle_gc_;
2029 // These two counters are monotomically increasing and never reset.
2030 size_t full_codegen_bytes_generated_;
2031 size_t crankshaft_codegen_bytes_generated_;
2033 // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2034 // this variable holds the number of garbage collections since the last
2035 // deoptimization triggered by garbage collection.
2036 int gcs_since_last_deopt_;
2039 int no_weak_object_verification_scope_depth_;
2042 static const int kAllocationSiteScratchpadSize = 256;
2043 int allocation_sites_scratchpad_length_;
2045 static const int kMaxMarkCompactsInIdleRound = 7;
2046 static const int kIdleScavengeThreshold = 5;
2048 // Shared state read by the scavenge collector and set by ScavengeObject.
2049 PromotionQueue promotion_queue_;
2051 // Flag is set when the heap has been configured. The heap can be repeatedly
2052 // configured through the API until it is set up.
2055 ExternalStringTable external_string_table_;
2057 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2059 MemoryChunk* chunks_queued_for_free_;
2061 base::Mutex relocation_mutex_;
2063 int gc_callbacks_depth_;
2065 friend class AlwaysAllocateScope;
2066 friend class Factory;
2067 friend class GCCallbacksScope;
2068 friend class GCTracer;
2069 friend class HeapIterator;
2070 friend class Isolate;
2071 friend class MarkCompactCollector;
2072 friend class MarkCompactMarkingVisitor;
2073 friend class MapCompact;
2075 friend class NoWeakObjectVerificationScope;
2079 DISALLOW_COPY_AND_ASSIGN(Heap);
2085 static const int kStartMarker = 0xDECADE00;
2086 static const int kEndMarker = 0xDECADE01;
2088 int* start_marker; // 0
2089 int* new_space_size; // 1
2090 int* new_space_capacity; // 2
2091 intptr_t* old_pointer_space_size; // 3
2092 intptr_t* old_pointer_space_capacity; // 4
2093 intptr_t* old_data_space_size; // 5
2094 intptr_t* old_data_space_capacity; // 6
2095 intptr_t* code_space_size; // 7
2096 intptr_t* code_space_capacity; // 8
2097 intptr_t* map_space_size; // 9
2098 intptr_t* map_space_capacity; // 10
2099 intptr_t* cell_space_size; // 11
2100 intptr_t* cell_space_capacity; // 12
2101 intptr_t* lo_space_size; // 13
2102 int* global_handle_count; // 14
2103 int* weak_global_handle_count; // 15
2104 int* pending_global_handle_count; // 16
2105 int* near_death_global_handle_count; // 17
2106 int* free_global_handle_count; // 18
2107 intptr_t* memory_allocator_size; // 19
2108 intptr_t* memory_allocator_capacity; // 20
2109 int* objects_per_type; // 21
2110 int* size_per_type; // 22
2111 int* os_error; // 23
2112 int* end_marker; // 24
2113 intptr_t* property_cell_space_size; // 25
2114 intptr_t* property_cell_space_capacity; // 26
2118 class AlwaysAllocateScope {
2120 explicit inline AlwaysAllocateScope(Isolate* isolate);
2121 inline ~AlwaysAllocateScope();
2124 // Implicitly disable artificial allocation failures.
2126 DisallowAllocationFailure daf_;
2131 class NoWeakObjectVerificationScope {
2133 inline NoWeakObjectVerificationScope();
2134 inline ~NoWeakObjectVerificationScope();
2139 class GCCallbacksScope {
2141 explicit inline GCCallbacksScope(Heap* heap);
2142 inline ~GCCallbacksScope();
2144 inline bool CheckReenter();
2151 // Visitor class to verify interior pointers in spaces that do not contain
2152 // or care about intergenerational references. All heap object pointers have to
2153 // point into the heap to a location that has a map pointer at its first word.
2154 // Caveat: Heap::Contains is an approximation because it can return true for
2155 // objects in a heap space but above the allocation pointer.
2156 class VerifyPointersVisitor : public ObjectVisitor {
2158 inline void VisitPointers(Object** start, Object** end);
2162 // Verify that all objects are Smis.
2163 class VerifySmisVisitor : public ObjectVisitor {
2165 inline void VisitPointers(Object** start, Object** end);
2169 // Space iterator for iterating over all spaces of the heap. Returns each space
2170 // in turn, and null when it is done.
2171 class AllSpaces BASE_EMBEDDED {
2173 explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2182 // Space iterator for iterating over all old spaces of the heap: Old pointer
2183 // space, old data space and code space. Returns each space in turn, and null
2185 class OldSpaces BASE_EMBEDDED {
2187 explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2196 // Space iterator for iterating over all the paged spaces of the heap: Map
2197 // space, old pointer space, old data space, code space and cell space. Returns
2198 // each space in turn, and null when it is done.
2199 class PagedSpaces BASE_EMBEDDED {
2201 explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2210 // Space iterator for iterating over all spaces of the heap.
2211 // For each space an object iterator is provided. The deallocation of the
2212 // returned object iterators is handled by the space iterator.
2213 class SpaceIterator : public Malloced {
2215 explicit SpaceIterator(Heap* heap);
2216 SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2217 virtual ~SpaceIterator();
2220 ObjectIterator* next();
2223 ObjectIterator* CreateIterator();
2226 int current_space_; // from enum AllocationSpace.
2227 ObjectIterator* iterator_; // object iterator for the current space.
2228 HeapObjectCallback size_func_;
2232 // A HeapIterator provides iteration over the whole heap. It
2233 // aggregates the specific iterators for the different spaces as
2234 // these can only iterate over one space only.
2236 // HeapIterator ensures there is no allocation during its lifetime
2237 // (using an embedded DisallowHeapAllocation instance).
2239 // HeapIterator can skip free list nodes (that is, de-allocated heap
2240 // objects that still remain in the heap). As implementation of free
2241 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2242 // phases. Also, it is forbidden to interrupt iteration in this mode,
2243 // as this will leave heap objects marked (and thus, unusable).
2244 class HeapObjectsFilter;
2246 class HeapIterator BASE_EMBEDDED {
2248 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2250 explicit HeapIterator(Heap* heap);
2251 HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
2258 struct MakeHeapIterableHelper {
2259 explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2262 // Perform the initialization.
2264 // Perform all necessary shutdown (destruction) work.
2266 HeapObject* NextObject();
2268 MakeHeapIterableHelper make_heap_iterable_helper_;
2269 DisallowHeapAllocation no_heap_allocation_;
2271 HeapObjectsFiltering filtering_;
2272 HeapObjectsFilter* filter_;
2273 // Space iterator for iterating all the spaces.
2274 SpaceIterator* space_iterator_;
2275 // Object iterator for the space currently being iterated.
2276 ObjectIterator* object_iterator_;
2280 // Cache for mapping (map, property name) into field offset.
2281 // Cleared at startup and prior to mark sweep collection.
2282 class KeyedLookupCache {
2284 // Lookup field offset for (map, name). If absent, -1 is returned.
2285 int Lookup(Handle<Map> map, Handle<Name> name);
2287 // Update an element in the cache.
2288 void Update(Handle<Map> map, Handle<Name> name, int field_offset);
2293 static const int kLength = 256;
2294 static const int kCapacityMask = kLength - 1;
2295 static const int kMapHashShift = 5;
2296 static const int kHashMask = -4; // Zero the last two bits.
2297 static const int kEntriesPerBucket = 4;
2298 static const int kEntryLength = 2;
2299 static const int kMapIndex = 0;
2300 static const int kKeyIndex = 1;
2301 static const int kNotFound = -1;
2303 // kEntriesPerBucket should be a power of 2.
2304 STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2305 STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2308 KeyedLookupCache() {
2309 for (int i = 0; i < kLength; ++i) {
2310 keys_[i].map = NULL;
2311 keys_[i].name = NULL;
2312 field_offsets_[i] = kNotFound;
2316 static inline int Hash(Handle<Map> map, Handle<Name> name);
2318 // Get the address of the keys and field_offsets arrays. Used in
2319 // generated code to perform cache lookups.
2320 Address keys_address() { return reinterpret_cast<Address>(&keys_); }
2322 Address field_offsets_address() {
2323 return reinterpret_cast<Address>(&field_offsets_);
2332 int field_offsets_[kLength];
2334 friend class ExternalReference;
2335 friend class Isolate;
2336 DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2340 // Cache for mapping (map, property name) into descriptor index.
2341 // The cache contains both positive and negative results.
2342 // Descriptor index equals kNotFound means the property is absent.
2343 // Cleared at startup and prior to any gc.
2344 class DescriptorLookupCache {
2346 // Lookup descriptor index for (map, name).
2347 // If absent, kAbsent is returned.
2348 int Lookup(Map* source, Name* name) {
2349 if (!name->IsUniqueName()) return kAbsent;
2350 int index = Hash(source, name);
2351 Key& key = keys_[index];
2352 if ((key.source == source) && (key.name == name)) return results_[index];
2356 // Update an element in the cache.
2357 void Update(Map* source, Name* name, int result) {
2358 DCHECK(result != kAbsent);
2359 if (name->IsUniqueName()) {
2360 int index = Hash(source, name);
2361 Key& key = keys_[index];
2362 key.source = source;
2364 results_[index] = result;
2371 static const int kAbsent = -2;
2374 DescriptorLookupCache() {
2375 for (int i = 0; i < kLength; ++i) {
2376 keys_[i].source = NULL;
2377 keys_[i].name = NULL;
2378 results_[i] = kAbsent;
2382 static int Hash(Object* source, Name* name) {
2383 // Uses only lower 32 bits if pointers are larger.
2384 uint32_t source_hash =
2385 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
2387 uint32_t name_hash =
2388 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >>
2390 return (source_hash ^ name_hash) % kLength;
2393 static const int kLength = 64;
2400 int results_[kLength];
2402 friend class Isolate;
2403 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2407 class RegExpResultsCache {
2409 enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
2411 // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
2412 // On success, the returned result is guaranteed to be a COW-array.
2413 static Object* Lookup(Heap* heap, String* key_string, Object* key_pattern,
2414 ResultsCacheType type);
2415 // Attempt to add value_array to the cache specified by type. On success,
2416 // value_array is turned into a COW-array.
2417 static void Enter(Isolate* isolate, Handle<String> key_string,
2418 Handle<Object> key_pattern, Handle<FixedArray> value_array,
2419 ResultsCacheType type);
2420 static void Clear(FixedArray* cache);
2421 static const int kRegExpResultsCacheSize = 0x100;
2424 static const int kArrayEntriesPerCacheEntry = 4;
2425 static const int kStringOffset = 0;
2426 static const int kPatternOffset = 1;
2427 static const int kArrayOffset = 2;
2431 // Abstract base class for checking whether a weak object should be retained.
2432 class WeakObjectRetainer {
2434 virtual ~WeakObjectRetainer() {}
2436 // Return whether this object should be retained. If NULL is returned the
2437 // object has no references. Otherwise the address of the retained object
2438 // should be returned as in some GC situations the object has been moved.
2439 virtual Object* RetainAs(Object* object) = 0;
2443 // Intrusive object marking uses least significant bit of
2444 // heap object's map word to mark objects.
2445 // Normally all map words have least significant bit set
2446 // because they contain tagged map pointer.
2447 // If the bit is not set object is marked.
2448 // All objects should be unmarked before resuming
2449 // JavaScript execution.
2450 class IntrusiveMarking {
2452 static bool IsMarked(HeapObject* object) {
2453 return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2456 static void ClearMark(HeapObject* object) {
2457 uintptr_t map_word = object->map_word().ToRawValue();
2458 object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2459 DCHECK(!IsMarked(object));
2462 static void SetMark(HeapObject* object) {
2463 uintptr_t map_word = object->map_word().ToRawValue();
2464 object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2465 DCHECK(IsMarked(object));
2468 static Map* MapOfMarkedObject(HeapObject* object) {
2469 uintptr_t map_word = object->map_word().ToRawValue();
2470 return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2473 static int SizeOfMarkedObject(HeapObject* object) {
2474 return object->SizeFromMap(MapOfMarkedObject(object));
2478 static const uintptr_t kNotMarkedBit = 0x1;
2479 STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0); // NOLINT
2484 // Helper class for tracing paths to a search target Object from all roots.
2485 // The TracePathFrom() method can be used to trace paths from a specific
2486 // object to the search target object.
2487 class PathTracer : public ObjectVisitor {
2490 FIND_ALL, // Will find all matches.
2491 FIND_FIRST // Will stop the search after first match.
2494 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2495 static const int kMarkTag = 2;
2497 // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2498 // after the first match. If FIND_ALL is specified, then tracing will be
2499 // done for all matches.
2500 PathTracer(Object* search_target, WhatToFind what_to_find,
2501 VisitMode visit_mode)
2502 : search_target_(search_target),
2503 found_target_(false),
2504 found_target_in_trace_(false),
2505 what_to_find_(what_to_find),
2506 visit_mode_(visit_mode),
2510 virtual void VisitPointers(Object** start, Object** end);
2513 void TracePathFrom(Object** root);
2515 bool found() const { return found_target_; }
2517 static Object* const kAnyGlobalObject;
2521 class UnmarkVisitor;
2523 void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2524 void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2525 virtual void ProcessResults();
2527 Object* search_target_;
2529 bool found_target_in_trace_;
2530 WhatToFind what_to_find_;
2531 VisitMode visit_mode_;
2532 List<Object*> object_stack_;
2534 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2537 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2541 } // namespace v8::internal
2543 #endif // V8_HEAP_HEAP_H_