1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/counters.h"
13 #include "src/globals.h"
14 #include "src/heap/gc-tracer.h"
15 #include "src/heap/incremental-marking.h"
16 #include "src/heap/mark-compact.h"
17 #include "src/heap/objects-visiting.h"
18 #include "src/heap/spaces.h"
19 #include "src/heap/store-buffer.h"
21 #include "src/splay-tree-inl.h"
26 // Defines all the roots in Heap.
27 #define STRONG_ROOT_LIST(V) \
28 V(Map, byte_array_map, ByteArrayMap) \
29 V(Map, free_space_map, FreeSpaceMap) \
30 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
31 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
32 /* Cluster the most popular ones in a few cache lines here at the top. */ \
33 V(Smi, store_buffer_top, StoreBufferTop) \
34 V(Oddball, undefined_value, UndefinedValue) \
35 V(Oddball, the_hole_value, TheHoleValue) \
36 V(Oddball, null_value, NullValue) \
37 V(Oddball, true_value, TrueValue) \
38 V(Oddball, false_value, FalseValue) \
39 V(Oddball, uninitialized_value, UninitializedValue) \
40 V(Oddball, exception, Exception) \
41 V(Map, cell_map, CellMap) \
42 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
43 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
44 V(Map, meta_map, MetaMap) \
45 V(Map, heap_number_map, HeapNumberMap) \
46 V(Map, mutable_heap_number_map, MutableHeapNumberMap) \
47 V(Map, native_context_map, NativeContextMap) \
48 V(Map, fixed_array_map, FixedArrayMap) \
49 V(Map, code_map, CodeMap) \
50 V(Map, scope_info_map, ScopeInfoMap) \
51 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
52 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
53 V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
54 V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
55 V(Map, hash_table_map, HashTableMap) \
56 V(Map, ordered_hash_table_map, OrderedHashTableMap) \
57 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
58 V(ByteArray, empty_byte_array, EmptyByteArray) \
59 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
60 V(ConstantPoolArray, empty_constant_pool_array, EmptyConstantPoolArray) \
61 V(Oddball, arguments_marker, ArgumentsMarker) \
62 /* The roots above this line should be boring from a GC point of view. */ \
63 /* This means they are never in new space and never on a page that is */ \
64 /* being compacted. */ \
65 V(FixedArray, number_string_cache, NumberStringCache) \
66 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
67 V(Object, instanceof_cache_map, InstanceofCacheMap) \
68 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
69 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
70 V(FixedArray, string_split_cache, StringSplitCache) \
71 V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
72 V(Oddball, termination_exception, TerminationException) \
73 V(Smi, hash_seed, HashSeed) \
74 V(Map, symbol_map, SymbolMap) \
75 V(Map, string_map, StringMap) \
76 V(Map, ascii_string_map, AsciiStringMap) \
77 V(Map, cons_string_map, ConsStringMap) \
78 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
79 V(Map, sliced_string_map, SlicedStringMap) \
80 V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
81 V(Map, external_string_map, ExternalStringMap) \
82 V(Map, external_string_with_one_byte_data_map, \
83 ExternalStringWithOneByteDataMap) \
84 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
85 V(Map, short_external_string_map, ShortExternalStringMap) \
86 V(Map, short_external_string_with_one_byte_data_map, \
87 ShortExternalStringWithOneByteDataMap) \
88 V(Map, internalized_string_map, InternalizedStringMap) \
89 V(Map, ascii_internalized_string_map, AsciiInternalizedStringMap) \
90 V(Map, external_internalized_string_map, ExternalInternalizedStringMap) \
91 V(Map, external_internalized_string_with_one_byte_data_map, \
92 ExternalInternalizedStringWithOneByteDataMap) \
93 V(Map, external_ascii_internalized_string_map, \
94 ExternalAsciiInternalizedStringMap) \
95 V(Map, short_external_internalized_string_map, \
96 ShortExternalInternalizedStringMap) \
97 V(Map, short_external_internalized_string_with_one_byte_data_map, \
98 ShortExternalInternalizedStringWithOneByteDataMap) \
99 V(Map, short_external_ascii_internalized_string_map, \
100 ShortExternalAsciiInternalizedStringMap) \
101 V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
102 V(Map, undetectable_string_map, UndetectableStringMap) \
103 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
104 V(Map, external_int8_array_map, ExternalInt8ArrayMap) \
105 V(Map, external_uint8_array_map, ExternalUint8ArrayMap) \
106 V(Map, external_int16_array_map, ExternalInt16ArrayMap) \
107 V(Map, external_uint16_array_map, ExternalUint16ArrayMap) \
108 V(Map, external_int32_array_map, ExternalInt32ArrayMap) \
109 V(Map, external_uint32_array_map, ExternalUint32ArrayMap) \
110 V(Map, external_float32_array_map, ExternalFloat32ArrayMap) \
111 V(Map, external_float64_array_map, ExternalFloat64ArrayMap) \
112 V(Map, external_uint8_clamped_array_map, ExternalUint8ClampedArrayMap) \
113 V(ExternalArray, empty_external_int8_array, EmptyExternalInt8Array) \
114 V(ExternalArray, empty_external_uint8_array, EmptyExternalUint8Array) \
115 V(ExternalArray, empty_external_int16_array, EmptyExternalInt16Array) \
116 V(ExternalArray, empty_external_uint16_array, EmptyExternalUint16Array) \
117 V(ExternalArray, empty_external_int32_array, EmptyExternalInt32Array) \
118 V(ExternalArray, empty_external_uint32_array, EmptyExternalUint32Array) \
119 V(ExternalArray, empty_external_float32_array, EmptyExternalFloat32Array) \
120 V(ExternalArray, empty_external_float64_array, EmptyExternalFloat64Array) \
121 V(ExternalArray, empty_external_uint8_clamped_array, \
122 EmptyExternalUint8ClampedArray) \
123 V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
124 V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
125 V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
126 V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
127 V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
128 V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
129 V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
130 V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
131 V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
132 V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
133 V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
134 V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
135 V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
136 V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
137 V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
138 V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
139 V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
140 V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
141 EmptyFixedUint8ClampedArray) \
142 V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
143 V(Map, function_context_map, FunctionContextMap) \
144 V(Map, catch_context_map, CatchContextMap) \
145 V(Map, with_context_map, WithContextMap) \
146 V(Map, block_context_map, BlockContextMap) \
147 V(Map, module_context_map, ModuleContextMap) \
148 V(Map, global_context_map, GlobalContextMap) \
149 V(Map, undefined_map, UndefinedMap) \
150 V(Map, the_hole_map, TheHoleMap) \
151 V(Map, null_map, NullMap) \
152 V(Map, boolean_map, BooleanMap) \
153 V(Map, uninitialized_map, UninitializedMap) \
154 V(Map, arguments_marker_map, ArgumentsMarkerMap) \
155 V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap) \
156 V(Map, exception_map, ExceptionMap) \
157 V(Map, termination_exception_map, TerminationExceptionMap) \
158 V(Map, message_object_map, JSMessageObjectMap) \
159 V(Map, foreign_map, ForeignMap) \
160 V(HeapNumber, nan_value, NanValue) \
161 V(HeapNumber, infinity_value, InfinityValue) \
162 V(HeapNumber, minus_zero_value, MinusZeroValue) \
163 V(Map, neander_map, NeanderMap) \
164 V(JSObject, message_listeners, MessageListeners) \
165 V(UnseededNumberDictionary, code_stubs, CodeStubs) \
166 V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
167 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
168 V(Code, js_entry_code, JsEntryCode) \
169 V(Code, js_construct_entry_code, JsConstructEntryCode) \
170 V(FixedArray, natives_source_cache, NativesSourceCache) \
171 V(Script, empty_script, EmptyScript) \
172 V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
173 V(Cell, undefined_cell, UndefineCell) \
174 V(JSObject, observation_state, ObservationState) \
175 V(Map, external_map, ExternalMap) \
176 V(Object, symbol_registry, SymbolRegistry) \
177 V(Symbol, frozen_symbol, FrozenSymbol) \
178 V(Symbol, nonexistent_symbol, NonExistentSymbol) \
179 V(Symbol, elements_transition_symbol, ElementsTransitionSymbol) \
180 V(SeededNumberDictionary, empty_slow_element_dictionary, \
181 EmptySlowElementDictionary) \
182 V(Symbol, observed_symbol, ObservedSymbol) \
183 V(Symbol, uninitialized_symbol, UninitializedSymbol) \
184 V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
185 V(Symbol, stack_trace_symbol, StackTraceSymbol) \
186 V(Symbol, detailed_stack_trace_symbol, DetailedStackTraceSymbol) \
187 V(Symbol, normal_ic_symbol, NormalICSymbol) \
188 V(FixedArray, materialized_objects, MaterializedObjects) \
189 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
190 V(FixedArray, microtask_queue, MicrotaskQueue)
192 // Entries in this list are limited to Smis and are not visited during GC.
193 #define SMI_ROOT_LIST(V) \
194 V(Smi, stack_limit, StackLimit) \
195 V(Smi, real_stack_limit, RealStackLimit) \
196 V(Smi, last_script_id, LastScriptId) \
197 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
198 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
199 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
200 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
202 #define ROOT_LIST(V) \
203 STRONG_ROOT_LIST(V) \
205 V(StringTable, string_table, StringTable)
207 // Heap roots that are known to be immortal immovable, for which we can safely
208 // skip write barriers.
209 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
212 V(one_pointer_filler_map) \
213 V(two_pointer_filler_map) \
219 V(uninitialized_value) \
221 V(global_property_cell_map) \
222 V(shared_function_info_map) \
225 V(mutable_heap_number_map) \
226 V(native_context_map) \
230 V(fixed_cow_array_map) \
231 V(fixed_double_array_map) \
232 V(constant_pool_array_map) \
233 V(no_interceptor_result_sentinel) \
235 V(ordered_hash_table_map) \
236 V(empty_fixed_array) \
237 V(empty_byte_array) \
238 V(empty_descriptor_array) \
239 V(empty_constant_pool_array) \
240 V(arguments_marker) \
242 V(sloppy_arguments_elements_map) \
243 V(function_context_map) \
244 V(catch_context_map) \
245 V(with_context_map) \
246 V(block_context_map) \
247 V(module_context_map) \
248 V(global_context_map) \
253 V(uninitialized_map) \
254 V(message_object_map) \
258 #define INTERNALIZED_STRING_LIST(V) \
259 V(Array_string, "Array") \
260 V(Object_string, "Object") \
261 V(proto_string, "__proto__") \
262 V(arguments_string, "arguments") \
263 V(Arguments_string, "Arguments") \
264 V(call_string, "call") \
265 V(apply_string, "apply") \
266 V(caller_string, "caller") \
267 V(boolean_string, "boolean") \
268 V(Boolean_string, "Boolean") \
269 V(callee_string, "callee") \
270 V(constructor_string, "constructor") \
271 V(dot_result_string, ".result") \
272 V(dot_for_string, ".for.") \
273 V(eval_string, "eval") \
274 V(empty_string, "") \
275 V(function_string, "function") \
276 V(length_string, "length") \
277 V(name_string, "name") \
278 V(null_string, "null") \
279 V(number_string, "number") \
280 V(Number_string, "Number") \
281 V(nan_string, "NaN") \
282 V(RegExp_string, "RegExp") \
283 V(source_string, "source") \
284 V(source_url_string, "source_url") \
285 V(source_mapping_url_string, "source_mapping_url") \
286 V(global_string, "global") \
287 V(ignore_case_string, "ignoreCase") \
288 V(multiline_string, "multiline") \
289 V(input_string, "input") \
290 V(index_string, "index") \
291 V(last_index_string, "lastIndex") \
292 V(object_string, "object") \
293 V(literals_string, "literals") \
294 V(prototype_string, "prototype") \
295 V(string_string, "string") \
296 V(String_string, "String") \
297 V(symbol_string, "symbol") \
298 V(Symbol_string, "Symbol") \
299 V(for_string, "for") \
300 V(for_api_string, "for_api") \
301 V(for_intern_string, "for_intern") \
302 V(private_api_string, "private_api") \
303 V(private_intern_string, "private_intern") \
304 V(Date_string, "Date") \
305 V(to_string_string, "toString") \
306 V(char_at_string, "CharAt") \
307 V(undefined_string, "undefined") \
308 V(value_of_string, "valueOf") \
309 V(stack_string, "stack") \
310 V(toJSON_string, "toJSON") \
311 V(InitializeVarGlobal_string, "InitializeVarGlobal") \
312 V(InitializeConstGlobal_string, "InitializeConstGlobal") \
313 V(KeyedLoadMonomorphic_string, "KeyedLoadMonomorphic") \
314 V(KeyedStoreMonomorphic_string, "KeyedStoreMonomorphic") \
315 V(stack_overflow_string, "kStackOverflowBoilerplate") \
316 V(illegal_access_string, "illegal access") \
317 V(get_string, "get") \
318 V(set_string, "set") \
319 V(map_field_string, "%map") \
320 V(elements_field_string, "%elements") \
321 V(length_field_string, "%length") \
322 V(cell_value_string, "%cell_value") \
323 V(function_class_string, "Function") \
324 V(illegal_argument_string, "illegal argument") \
325 V(space_string, " ") \
326 V(exec_string, "exec") \
327 V(zero_string, "0") \
328 V(global_eval_string, "GlobalEval") \
329 V(identity_hash_string, "v8::IdentityHash") \
330 V(closure_string, "(closure)") \
332 V(compare_ic_string, "==") \
333 V(strict_compare_ic_string, "===") \
334 V(infinity_string, "Infinity") \
335 V(minus_infinity_string, "-Infinity") \
336 V(query_colon_string, "(?:)") \
337 V(Generator_string, "Generator") \
338 V(throw_string, "throw") \
339 V(done_string, "done") \
340 V(value_string, "value") \
341 V(next_string, "next") \
342 V(byte_length_string, "byteLength") \
343 V(byte_offset_string, "byteOffset") \
344 V(buffer_string, "buffer") \
345 V(intl_initialized_marker_string, "v8::intl_initialized_marker") \
346 V(intl_impl_object_string, "v8::intl_object")
348 // Forward declarations.
351 class WeakObjectRetainer;
354 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
357 class StoreBufferRebuilder {
359 explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
360 : store_buffer_(store_buffer) {}
362 void Callback(MemoryChunk* page, StoreBufferEvent event);
365 StoreBuffer* store_buffer_;
367 // We record in this variable how full the store buffer was when we started
368 // iterating over the current page, finding pointers to new space. If the
369 // store buffer overflows again we can exempt the page from the store buffer
370 // by rewinding to this point instead of having to search the store buffer.
371 Object*** start_of_current_page_;
372 // The current page we are scanning in the store buffer iterator.
373 MemoryChunk* current_page_;
377 // A queue of objects promoted during scavenge. Each object is accompanied
378 // by it's size to avoid dereferencing a map pointer for scanning.
379 class PromotionQueue {
381 explicit PromotionQueue(Heap* heap)
392 delete emergency_stack_;
393 emergency_stack_ = NULL;
396 inline void ActivateGuardIfOnTheSamePage();
398 Page* GetHeadPage() {
399 return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
402 void SetNewLimit(Address limit) {
407 DCHECK(GetHeadPage() == Page::FromAllocationTop(limit));
408 limit_ = reinterpret_cast<intptr_t*>(limit);
410 if (limit_ <= rear_) {
417 bool IsBelowPromotionQueue(Address to_space_top) {
418 // If the given to-space top pointer and the head of the promotion queue
419 // are not on the same page, then the to-space objects are below the
421 if (GetHeadPage() != Page::FromAddress(to_space_top)) {
424 // If the to space top pointer is smaller or equal than the promotion
425 // queue head, then the to-space objects are below the promotion queue.
426 return reinterpret_cast<intptr_t*>(to_space_top) <= rear_;
430 return (front_ == rear_) &&
431 (emergency_stack_ == NULL || emergency_stack_->length() == 0);
434 inline void insert(HeapObject* target, int size);
436 void remove(HeapObject** target, int* size) {
438 if (front_ == rear_) {
439 Entry e = emergency_stack_->RemoveLast();
445 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
446 NewSpacePage* front_page =
447 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
448 DCHECK(!front_page->prev_page()->is_anchor());
449 front_ = reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
451 *target = reinterpret_cast<HeapObject*>(*(--front_));
452 *size = static_cast<int>(*(--front_));
453 // Assert no underflow.
454 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
455 reinterpret_cast<Address>(front_));
459 // The front of the queue is higher in the memory page chain than the rear.
466 static const int kEntrySizeInWords = 2;
469 Entry(HeapObject* obj, int size) : obj_(obj), size_(size) {}
474 List<Entry>* emergency_stack_;
478 void RelocateQueueHead();
480 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
484 typedef void (*ScavengingCallback)(Map* map, HeapObject** slot,
488 // External strings table is a place where all external strings are
489 // registered. We need to keep track of such strings to properly
491 class ExternalStringTable {
493 // Registers an external string.
494 inline void AddString(String* string);
496 inline void Iterate(ObjectVisitor* v);
498 // Restores internal invariant and gets rid of collected strings.
499 // Must be called after each Iterate() that modified the strings.
502 // Destroys all allocated memory.
506 explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
510 inline void Verify();
512 inline void AddOldString(String* string);
514 // Notifies the table that only a prefix of the new list is valid.
515 inline void ShrinkNewStrings(int position);
517 // To speed up scavenge collections new space string are kept
518 // separate from old space strings.
519 List<Object*> new_space_strings_;
520 List<Object*> old_space_strings_;
524 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
528 enum ArrayStorageAllocationMode {
529 DONT_INITIALIZE_ARRAY_ELEMENTS,
530 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
536 // Configure heap size in MB before setup. Return false if the heap has been
538 bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
539 int max_executable_size, size_t code_range_size);
540 bool ConfigureHeapDefault();
542 // Prepares the heap, setting up memory areas that are needed in the isolate
543 // without actually creating any objects.
546 // Bootstraps the object heap with the core set of objects required to run.
547 // Returns whether it succeeded.
548 bool CreateHeapObjects();
550 // Destroys all memory allocated by the heap.
553 // Set the stack limit in the roots_ array. Some architectures generate
554 // code that looks here, because it is faster than loading from the static
555 // jslimit_/real_jslimit_ variable in the StackGuard.
556 void SetStackLimits();
558 // Returns whether SetUp has been called.
561 // Returns the maximum amount of memory reserved for the heap. For
562 // the young generation, we reserve 4 times the amount needed for a
563 // semi space. The young generation consists of two semi spaces and
564 // we reserve twice the amount needed for those in order to ensure
565 // that new space can be aligned to its size.
566 intptr_t MaxReserved() {
567 return 4 * reserved_semispace_size_ + max_old_generation_size_;
569 int MaxSemiSpaceSize() { return max_semi_space_size_; }
570 int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
571 int InitialSemiSpaceSize() { return initial_semispace_size_; }
572 intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
573 intptr_t MaxExecutableSize() { return max_executable_size_; }
575 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
576 // more spaces are needed until it reaches the limit.
579 // Returns the amount of memory currently committed for the heap.
580 intptr_t CommittedMemory();
582 // Returns the amount of executable memory currently committed for the heap.
583 intptr_t CommittedMemoryExecutable();
585 // Returns the amount of phyical memory currently committed for the heap.
586 size_t CommittedPhysicalMemory();
588 // Returns the maximum amount of memory ever committed for the heap.
589 intptr_t MaximumCommittedMemory() { return maximum_committed_; }
591 // Updates the maximum committed memory for the heap. Should be called
592 // whenever a space grows.
593 void UpdateMaximumCommitted();
595 // Returns the available bytes in space w/o growing.
596 // Heap doesn't guarantee that it can allocate an object that requires
597 // all available bytes. Check MaxHeapObjectSize() instead.
598 intptr_t Available();
600 // Returns of size of all objects residing in the heap.
601 intptr_t SizeOfObjects();
603 // Return the starting address and a mask for the new space. And-masking an
604 // address with the mask will result in the start address of the new space
605 // for all addresses in either semispace.
606 Address NewSpaceStart() { return new_space_.start(); }
607 uintptr_t NewSpaceMask() { return new_space_.mask(); }
608 Address NewSpaceTop() { return new_space_.top(); }
610 NewSpace* new_space() { return &new_space_; }
611 OldSpace* old_pointer_space() { return old_pointer_space_; }
612 OldSpace* old_data_space() { return old_data_space_; }
613 OldSpace* code_space() { return code_space_; }
614 MapSpace* map_space() { return map_space_; }
615 CellSpace* cell_space() { return cell_space_; }
616 PropertyCellSpace* property_cell_space() { return property_cell_space_; }
617 LargeObjectSpace* lo_space() { return lo_space_; }
618 PagedSpace* paged_space(int idx) {
620 case OLD_POINTER_SPACE:
621 return old_pointer_space();
623 return old_data_space();
628 case PROPERTY_CELL_SPACE:
629 return property_cell_space();
639 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
640 Address always_allocate_scope_depth_address() {
641 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
644 Address* NewSpaceAllocationTopAddress() {
645 return new_space_.allocation_top_address();
647 Address* NewSpaceAllocationLimitAddress() {
648 return new_space_.allocation_limit_address();
651 Address* OldPointerSpaceAllocationTopAddress() {
652 return old_pointer_space_->allocation_top_address();
654 Address* OldPointerSpaceAllocationLimitAddress() {
655 return old_pointer_space_->allocation_limit_address();
658 Address* OldDataSpaceAllocationTopAddress() {
659 return old_data_space_->allocation_top_address();
661 Address* OldDataSpaceAllocationLimitAddress() {
662 return old_data_space_->allocation_limit_address();
665 // Returns a deep copy of the JavaScript object.
666 // Properties and elements are copied too.
667 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
668 MUST_USE_RESULT AllocationResult
669 CopyJSObject(JSObject* source, AllocationSite* site = NULL);
671 // Clear the Instanceof cache (used when a prototype changes).
672 inline void ClearInstanceofCache();
674 // Iterates the whole code space to clear all ICs of the given kind.
675 void ClearAllICsByKind(Code::Kind kind);
677 // For use during bootup.
678 void RepairFreeListsAfterBoot();
680 template <typename T>
681 static inline bool IsOneByte(T t, int chars);
683 // Move len elements within a given array from src_index index to dst_index
685 void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
687 // Sloppy mode arguments object size.
688 static const int kSloppyArgumentsObjectSize =
689 JSObject::kHeaderSize + 2 * kPointerSize;
690 // Strict mode arguments has no callee so it is smaller.
691 static const int kStrictArgumentsObjectSize =
692 JSObject::kHeaderSize + 1 * kPointerSize;
693 // Indicies for direct access into argument objects.
694 static const int kArgumentsLengthIndex = 0;
695 // callee is only valid in sloppy mode.
696 static const int kArgumentsCalleeIndex = 1;
698 // Finalizes an external string by deleting the associated external
699 // data and clearing the resource pointer.
700 inline void FinalizeExternalString(String* string);
702 // Initialize a filler object to keep the ability to iterate over the heap
703 // when introducing gaps within pages.
704 void CreateFillerObjectAt(Address addr, int size);
706 bool CanMoveObjectStart(HeapObject* object);
708 // Indicates whether live bytes adjustment is triggered from within the GC
709 // code or from mutator code.
710 enum InvocationMode { FROM_GC, FROM_MUTATOR };
712 // Maintain consistency of live bytes during incremental marking.
713 void AdjustLiveBytes(Address address, int by, InvocationMode mode);
715 // Trim the given array from the left. Note that this relocates the object
716 // start and hence is only valid if there is only a single reference to it.
717 FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
719 // Trim the given array from the right.
720 template<Heap::InvocationMode mode>
721 void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
723 // Converts the given boolean condition to JavaScript boolean value.
724 inline Object* ToBoolean(bool condition);
726 // Performs garbage collection operation.
727 // Returns whether there is a chance that another major GC could
728 // collect more garbage.
729 inline bool CollectGarbage(
730 AllocationSpace space, const char* gc_reason = NULL,
731 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
733 static const int kNoGCFlags = 0;
734 static const int kSweepPreciselyMask = 1;
735 static const int kReduceMemoryFootprintMask = 2;
736 static const int kAbortIncrementalMarkingMask = 4;
738 // Making the heap iterable requires us to sweep precisely and abort any
739 // incremental marking as well.
740 static const int kMakeHeapIterableMask =
741 kSweepPreciselyMask | kAbortIncrementalMarkingMask;
743 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
744 // non-zero, then the slower precise sweeper is used, which leaves the heap
745 // in a state where we can iterate over the heap visiting all objects.
746 void CollectAllGarbage(
747 int flags, const char* gc_reason = NULL,
748 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
750 // Last hope GC, should try to squeeze as much as possible.
751 void CollectAllAvailableGarbage(const char* gc_reason = NULL);
753 // Check whether the heap is currently iterable.
754 bool IsHeapIterable();
756 // Notify the heap that a context has been disposed.
757 int NotifyContextDisposed();
759 inline void increment_scan_on_scavenge_pages() {
760 scan_on_scavenge_pages_++;
761 if (FLAG_gc_verbose) {
762 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
766 inline void decrement_scan_on_scavenge_pages() {
767 scan_on_scavenge_pages_--;
768 if (FLAG_gc_verbose) {
769 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
773 PromotionQueue* promotion_queue() { return &promotion_queue_; }
775 void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
776 GCType gc_type_filter, bool pass_isolate = true);
777 void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback);
779 void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
780 GCType gc_type_filter, bool pass_isolate = true);
781 void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback);
783 // Heap root getters. We have versions with and without type::cast() here.
784 // You can't use type::cast during GC because the assert fails.
785 // TODO(1490): Try removing the unchecked accessors, now that GC marking does
786 // not corrupt the map.
787 #define ROOT_ACCESSOR(type, name, camel_name) \
788 type* name() { return type::cast(roots_[k##camel_name##RootIndex]); } \
789 type* raw_unchecked_##name() { \
790 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
792 ROOT_LIST(ROOT_ACCESSOR)
796 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
797 Map* name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
798 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
799 #undef STRUCT_MAP_ACCESSOR
801 #define STRING_ACCESSOR(name, str) \
802 String* name() { return String::cast(roots_[k##name##RootIndex]); }
803 INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
804 #undef STRING_ACCESSOR
806 // The hidden_string is special because it is the empty string, but does
807 // not match the empty string.
808 String* hidden_string() { return hidden_string_; }
810 void set_native_contexts_list(Object* object) {
811 native_contexts_list_ = object;
813 Object* native_contexts_list() const { return native_contexts_list_; }
815 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
816 Object* array_buffers_list() const { return array_buffers_list_; }
818 void set_allocation_sites_list(Object* object) {
819 allocation_sites_list_ = object;
821 Object* allocation_sites_list() { return allocation_sites_list_; }
823 // Used in CreateAllocationSiteStub and the (de)serializer.
824 Object** allocation_sites_list_address() { return &allocation_sites_list_; }
826 Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
828 void set_encountered_weak_collections(Object* weak_collection) {
829 encountered_weak_collections_ = weak_collection;
831 Object* encountered_weak_collections() const {
832 return encountered_weak_collections_;
835 // Number of mark-sweeps.
836 unsigned int ms_count() { return ms_count_; }
838 // Iterates over all roots in the heap.
839 void IterateRoots(ObjectVisitor* v, VisitMode mode);
840 // Iterates over all strong roots in the heap.
841 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
842 // Iterates over entries in the smi roots list. Only interesting to the
843 // serializer/deserializer, since GC does not care about smis.
844 void IterateSmiRoots(ObjectVisitor* v);
845 // Iterates over all the other roots in the heap.
846 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
848 // Iterate pointers to from semispace of new space found in memory interval
849 // from start to end.
850 void IterateAndMarkPointersToFromSpace(Address start, Address end,
851 ObjectSlotCallback callback);
853 // Returns whether the object resides in new space.
854 inline bool InNewSpace(Object* object);
855 inline bool InNewSpace(Address address);
856 inline bool InNewSpacePage(Address address);
857 inline bool InFromSpace(Object* object);
858 inline bool InToSpace(Object* object);
860 // Returns whether the object resides in old pointer space.
861 inline bool InOldPointerSpace(Address address);
862 inline bool InOldPointerSpace(Object* object);
864 // Returns whether the object resides in old data space.
865 inline bool InOldDataSpace(Address address);
866 inline bool InOldDataSpace(Object* object);
868 // Checks whether an address/object in the heap (including auxiliary
869 // area and unused area).
870 bool Contains(Address addr);
871 bool Contains(HeapObject* value);
873 // Checks whether an address/object in a space.
874 // Currently used by tests, serialization and heap verification only.
875 bool InSpace(Address addr, AllocationSpace space);
876 bool InSpace(HeapObject* value, AllocationSpace space);
878 // Finds out which space an object should get promoted to based on its type.
879 inline OldSpace* TargetSpace(HeapObject* object);
880 static inline AllocationSpace TargetSpaceId(InstanceType type);
882 // Checks whether the given object is allowed to be migrated from it's
883 // current space into the given destination space. Used for debugging.
884 inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
886 // Sets the stub_cache_ (only used when expanding the dictionary).
887 void public_set_code_stubs(UnseededNumberDictionary* value) {
888 roots_[kCodeStubsRootIndex] = value;
891 // Support for computing object sizes for old objects during GCs. Returns
892 // a function that is guaranteed to be safe for computing object sizes in
893 // the current GC phase.
894 HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
895 return gc_safe_size_of_old_object_;
898 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
899 void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
900 roots_[kNonMonomorphicCacheRootIndex] = value;
903 void public_set_empty_script(Script* script) {
904 roots_[kEmptyScriptRootIndex] = script;
907 void public_set_store_buffer_top(Address* top) {
908 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
911 void public_set_materialized_objects(FixedArray* objects) {
912 roots_[kMaterializedObjectsRootIndex] = objects;
915 // Generated code can embed this address to get access to the roots.
916 Object** roots_array_start() { return roots_; }
918 Address* store_buffer_top_address() {
919 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
923 // Verify the heap is in its normal state before or after a GC.
927 bool weak_embedded_objects_verification_enabled() {
928 return no_weak_object_verification_scope_depth_ == 0;
936 void OldPointerSpaceCheckStoreBuffer();
937 void MapSpaceCheckStoreBuffer();
938 void LargeObjectSpaceCheckStoreBuffer();
940 // Report heap statistics.
941 void ReportHeapStatistics(const char* title);
942 void ReportCodeStatistics(const char* title);
945 // Zapping is needed for verify heap, and always done in debug builds.
946 static inline bool ShouldZapGarbage() {
951 return FLAG_verify_heap;
958 // Number of "runtime allocations" done so far.
959 uint32_t allocations_count() { return allocations_count_; }
961 // Returns deterministic "time" value in ms. Works only with
962 // FLAG_verify_predictable.
963 double synthetic_time() { return allocations_count_ / 100.0; }
965 // Print short heap statistics.
966 void PrintShortHeapStatistics();
968 // Write barrier support for address[offset] = o.
969 INLINE(void RecordWrite(Address address, int offset));
971 // Write barrier support for address[start : start + len[ = o.
972 INLINE(void RecordWrites(Address address, int start, int len));
974 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
975 inline HeapState gc_state() { return gc_state_; }
977 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
980 void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
982 void TracePathToObjectFrom(Object* target, Object* root);
983 void TracePathToObject(Object* target);
984 void TracePathToGlobal();
987 // Callback function passed to Heap::Iterate etc. Copies an object if
988 // necessary, the object might be promoted to an old space. The caller must
989 // ensure the precondition that the object is (a) a heap object and (b) in
990 // the heap's from space.
991 static inline void ScavengePointer(HeapObject** p);
992 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
994 enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
996 // If an object has an AllocationMemento trailing it, return it, otherwise
998 inline AllocationMemento* FindAllocationMemento(HeapObject* object);
1000 // An object may have an AllocationSite associated with it through a trailing
1001 // AllocationMemento. Its feedback should be updated when objects are found
1003 static inline void UpdateAllocationSiteFeedback(HeapObject* object,
1004 ScratchpadSlotMode mode);
1006 // Support for partial snapshots. After calling this we have a linear
1007 // space to write objects in each space.
1008 void ReserveSpace(int* sizes, Address* addresses);
1011 // Support for the API.
1014 void CreateApiObjects();
1016 inline intptr_t PromotedTotalSize() {
1017 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1018 if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
1019 if (total < 0) return 0;
1020 return static_cast<intptr_t>(total);
1023 inline intptr_t OldGenerationSpaceAvailable() {
1024 return old_generation_allocation_limit_ - PromotedTotalSize();
1027 inline intptr_t OldGenerationCapacityAvailable() {
1028 return max_old_generation_size_ - PromotedTotalSize();
1031 static const intptr_t kMinimumOldGenerationAllocationLimit =
1032 8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1034 static const int kPointerMultiplier = i::kPointerSize / 4;
1036 // The new space size has to be a power of 2. Sizes are in MB.
1037 static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
1038 static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
1039 static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
1040 static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
1042 // The old space size has to be a multiple of Page::kPageSize.
1044 static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
1045 static const int kMaxOldSpaceSizeMediumMemoryDevice =
1046 256 * kPointerMultiplier;
1047 static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
1048 static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
1050 // The executable size has to be a multiple of Page::kPageSize.
1052 static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
1053 static const int kMaxExecutableSizeMediumMemoryDevice =
1054 192 * kPointerMultiplier;
1055 static const int kMaxExecutableSizeHighMemoryDevice =
1056 256 * kPointerMultiplier;
1057 static const int kMaxExecutableSizeHugeMemoryDevice =
1058 256 * kPointerMultiplier;
1060 intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size,
1061 int freed_global_handles);
1063 // Indicates whether inline bump-pointer allocation has been disabled.
1064 bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1066 // Switch whether inline bump-pointer allocation should be used.
1067 void EnableInlineAllocation();
1068 void DisableInlineAllocation();
1070 // Implements the corresponding V8 API function.
1071 bool IdleNotification(int hint);
1073 // Declare all the root indices. This defines the root list order.
1074 enum RootListIndex {
1075 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1076 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1077 #undef ROOT_INDEX_DECLARATION
1079 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1080 INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
1081 #undef STRING_DECLARATION
1083 // Utility type maps
1084 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1085 STRUCT_LIST(DECLARE_STRUCT_MAP)
1086 #undef DECLARE_STRUCT_MAP
1087 kStringTableRootIndex,
1089 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1090 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
1091 #undef ROOT_INDEX_DECLARATION
1093 kStrongRootListLength = kStringTableRootIndex,
1094 kSmiRootsStart = kStringTableRootIndex + 1
1097 STATIC_ASSERT(kUndefinedValueRootIndex ==
1098 Internals::kUndefinedValueRootIndex);
1099 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
1100 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1101 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1102 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
1104 // Generated code can embed direct references to non-writable roots if
1105 // they are in new space.
1106 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1107 // Generated code can treat direct references to this root as constant.
1108 bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1110 Map* MapForFixedTypedArray(ExternalArrayType array_type);
1111 RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
1113 Map* MapForExternalArrayType(ExternalArrayType array_type);
1114 RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type);
1116 RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
1117 RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
1118 ExternalArray* EmptyExternalArrayForMap(Map* map);
1119 FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
1121 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1123 // Copy block of memory from src to dst. Size of block should be aligned
1125 static inline void CopyBlock(Address dst, Address src, int byte_size);
1127 // Optimized version of memmove for blocks with pointer size aligned sizes and
1128 // pointer size aligned addresses.
1129 static inline void MoveBlock(Address dst, Address src, int byte_size);
1131 // Check new space expansion criteria and expand semispaces if it was hit.
1132 void CheckNewSpaceExpansionCriteria();
1134 inline void IncrementPromotedObjectsSize(int object_size) {
1135 DCHECK(object_size > 0);
1136 promoted_objects_size_ += object_size;
1139 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1140 DCHECK(object_size > 0);
1141 semi_space_copied_object_size_ += object_size;
1144 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1146 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1148 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1150 inline void IncrementYoungSurvivorsCounter(int survived) {
1151 DCHECK(survived >= 0);
1152 survived_since_last_expansion_ += survived;
1155 inline bool NextGCIsLikelyToBeFull() {
1156 if (FLAG_gc_global) return true;
1158 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1160 intptr_t adjusted_allocation_limit =
1161 old_generation_allocation_limit_ - new_space_.Capacity();
1163 if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1168 void UpdateNewSpaceReferencesInExternalStringTable(
1169 ExternalStringTableUpdaterCallback updater_func);
1171 void UpdateReferencesInExternalStringTable(
1172 ExternalStringTableUpdaterCallback updater_func);
1174 void ProcessWeakReferences(WeakObjectRetainer* retainer);
1176 void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
1178 // An object should be promoted if the object has survived a
1179 // scavenge operation.
1180 inline bool ShouldBePromoted(Address old_address, int object_size);
1182 void ClearJSFunctionResultCaches();
1184 void ClearNormalizedMapCaches();
1186 GCTracer* tracer() { return &tracer_; }
1188 // Returns the size of objects residing in non new spaces.
1189 intptr_t PromotedSpaceSizeOfObjects();
1191 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1192 void IncreaseTotalRegexpCodeGenerated(int size) {
1193 total_regexp_code_generated_ += size;
1196 void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1197 if (is_crankshafted) {
1198 crankshaft_codegen_bytes_generated_ += size;
1200 full_codegen_bytes_generated_ += size;
1204 // Update GC statistics that are tracked on the Heap.
1205 void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1206 double marking_time);
1208 // Returns maximum GC pause.
1209 double get_max_gc_pause() { return max_gc_pause_; }
1211 // Returns maximum size of objects alive after GC.
1212 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1214 // Returns minimal interval between two subsequent collections.
1215 double get_min_in_mutator() { return min_in_mutator_; }
1217 MarkCompactCollector* mark_compact_collector() {
1218 return &mark_compact_collector_;
1221 StoreBuffer* store_buffer() { return &store_buffer_; }
1223 Marking* marking() { return &marking_; }
1225 IncrementalMarking* incremental_marking() { return &incremental_marking_; }
1227 ExternalStringTable* external_string_table() {
1228 return &external_string_table_;
1231 // Returns the current sweep generation.
1232 int sweep_generation() { return sweep_generation_; }
1234 inline Isolate* isolate();
1236 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1237 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1239 inline bool OldGenerationAllocationLimitReached();
1241 inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1242 scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1245 void QueueMemoryChunkForFree(MemoryChunk* chunk);
1246 void FreeQueuedChunks();
1248 int gc_count() const { return gc_count_; }
1250 // Completely clear the Instanceof cache (to stop it keeping objects alive
1252 inline void CompletelyClearInstanceofCache();
1254 // The roots that have an index less than this are always in old space.
1255 static const int kOldSpaceRoots = 0x20;
1257 uint32_t HashSeed() {
1258 uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1259 DCHECK(FLAG_randomize_hashes || seed == 0);
1263 void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
1264 DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1265 set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1268 void SetConstructStubDeoptPCOffset(int pc_offset) {
1269 DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1270 set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1273 void SetGetterStubDeoptPCOffset(int pc_offset) {
1274 DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1275 set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1278 void SetSetterStubDeoptPCOffset(int pc_offset) {
1279 DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1280 set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1283 // For post mortem debugging.
1284 void RememberUnmappedPage(Address page, bool compacted);
1286 // Global inline caching age: it is incremented on some GCs after context
1287 // disposal. We use it to flush inline caches.
1288 int global_ic_age() { return global_ic_age_; }
1290 void AgeInlineCaches() {
1291 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1294 bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1296 int64_t amount_of_external_allocated_memory() {
1297 return amount_of_external_allocated_memory_;
1300 void DeoptMarkedAllocationSites();
1302 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1304 bool DeoptMaybeTenuredAllocationSites() {
1305 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
1308 // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1309 // stored in a contiguous linear buffer. Stats groups are stored one after
1312 FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
1313 FIRST_FIXED_ARRAY_SUB_TYPE =
1314 FIRST_CODE_KIND_SUB_TYPE + Code::NUMBER_OF_KINDS,
1315 FIRST_CODE_AGE_SUB_TYPE =
1316 FIRST_FIXED_ARRAY_SUB_TYPE + LAST_FIXED_ARRAY_SUB_TYPE + 1,
1317 OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1
1320 void RecordObjectStats(InstanceType type, size_t size) {
1321 DCHECK(type <= LAST_TYPE);
1322 object_counts_[type]++;
1323 object_sizes_[type] += size;
1326 void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size) {
1327 int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
1328 int code_age_index =
1329 FIRST_CODE_AGE_SUB_TYPE + code_age - Code::kFirstCodeAge;
1330 DCHECK(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
1331 code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
1332 DCHECK(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
1333 code_age_index < OBJECT_STATS_COUNT);
1334 object_counts_[code_sub_type_index]++;
1335 object_sizes_[code_sub_type_index] += size;
1336 object_counts_[code_age_index]++;
1337 object_sizes_[code_age_index] += size;
1340 void RecordFixedArraySubTypeStats(int array_sub_type, size_t size) {
1341 DCHECK(array_sub_type <= LAST_FIXED_ARRAY_SUB_TYPE);
1342 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type]++;
1343 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + array_sub_type] += size;
1346 void CheckpointObjectStats();
1348 // We don't use a LockGuard here since we want to lock the heap
1349 // only when FLAG_concurrent_recompilation is true.
1350 class RelocationLock {
1352 explicit RelocationLock(Heap* heap) : heap_(heap) {
1353 heap_->relocation_mutex_.Lock();
1357 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
1363 void AddWeakObjectToCodeDependency(Handle<Object> obj,
1364 Handle<DependentCode> dep);
1366 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj);
1368 void InitializeWeakObjectToCodeTable() {
1369 set_weak_object_to_code_table(undefined_value());
1372 void EnsureWeakObjectToCodeTable();
1374 static void FatalProcessOutOfMemory(const char* location,
1375 bool take_snapshot = false);
1377 // This event is triggered after successful allocation of a new object made
1378 // by runtime. Allocations of target space for object evacuation do not
1379 // trigger the event. In order to track ALL allocations one must turn off
1380 // FLAG_inline_new and FLAG_use_allocation_folding.
1381 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1383 // This event is triggered after object is moved to a new place.
1384 inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1388 // Methods made available to tests.
1390 // Allocates a JS Map in the heap.
1391 MUST_USE_RESULT AllocationResult
1392 AllocateMap(InstanceType instance_type, int instance_size,
1393 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1395 // Allocates and initializes a new JavaScript object based on a
1397 // If allocation_site is non-null, then a memento is emitted after the object
1398 // that points to the site.
1399 MUST_USE_RESULT AllocationResult
1400 AllocateJSObject(JSFunction* constructor,
1401 PretenureFlag pretenure = NOT_TENURED,
1402 AllocationSite* allocation_site = NULL);
1404 // Allocates and initializes a new JavaScript object based on a map.
1405 // Passing an allocation site means that a memento will be created that
1406 // points to the site.
1407 MUST_USE_RESULT AllocationResult
1408 AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
1409 bool alloc_props = true,
1410 AllocationSite* allocation_site = NULL);
1412 // Allocated a HeapNumber from value.
1413 MUST_USE_RESULT AllocationResult
1414 AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
1415 PretenureFlag pretenure = NOT_TENURED);
1417 // Allocate a byte array of the specified length
1418 MUST_USE_RESULT AllocationResult
1419 AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1421 // Copy the code and scope info part of the code object, but insert
1422 // the provided data as the relocation information.
1423 MUST_USE_RESULT AllocationResult
1424 CopyCode(Code* code, Vector<byte> reloc_info);
1426 MUST_USE_RESULT AllocationResult CopyCode(Code* code);
1428 // Allocates a fixed array initialized with undefined values
1429 MUST_USE_RESULT AllocationResult
1430 AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
1435 // The amount of external memory registered through the API kept alive
1436 // by global handles
1437 int64_t amount_of_external_allocated_memory_;
1439 // Caches the amount of external memory registered at the last global gc.
1440 int64_t amount_of_external_allocated_memory_at_last_global_gc_;
1442 // This can be calculated directly from a pointer to the heap; however, it is
1443 // more expedient to get at the isolate directly from within Heap methods.
1446 Object* roots_[kRootListLength];
1448 size_t code_range_size_;
1449 int reserved_semispace_size_;
1450 int max_semi_space_size_;
1451 int initial_semispace_size_;
1452 intptr_t max_old_generation_size_;
1453 intptr_t max_executable_size_;
1454 intptr_t maximum_committed_;
1456 // For keeping track of how much data has survived
1457 // scavenge since last new space expansion.
1458 int survived_since_last_expansion_;
1460 // For keeping track on when to flush RegExp code.
1461 int sweep_generation_;
1463 int always_allocate_scope_depth_;
1465 // For keeping track of context disposals.
1466 int contexts_disposed_;
1470 bool flush_monomorphic_ics_;
1472 int scan_on_scavenge_pages_;
1474 NewSpace new_space_;
1475 OldSpace* old_pointer_space_;
1476 OldSpace* old_data_space_;
1477 OldSpace* code_space_;
1478 MapSpace* map_space_;
1479 CellSpace* cell_space_;
1480 PropertyCellSpace* property_cell_space_;
1481 LargeObjectSpace* lo_space_;
1482 HeapState gc_state_;
1483 int gc_post_processing_depth_;
1484 Address new_space_top_after_last_gc_;
1486 // Returns the amount of external memory registered since last global gc.
1487 int64_t PromotedExternalMemorySize();
1489 // How many "runtime allocations" happened.
1490 uint32_t allocations_count_;
1492 // Running hash over allocations performed.
1493 uint32_t raw_allocations_hash_;
1495 // Countdown counter, dumps allocation hash when 0.
1496 uint32_t dump_allocations_hash_countdown_;
1498 // How many mark-sweep collections happened.
1499 unsigned int ms_count_;
1501 // How many gc happened.
1502 unsigned int gc_count_;
1504 // For post mortem debugging.
1505 static const int kRememberedUnmappedPages = 128;
1506 int remembered_unmapped_pages_index_;
1507 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1509 // Total length of the strings we failed to flatten since the last GC.
1510 int unflattened_strings_length_;
1512 #define ROOT_ACCESSOR(type, name, camel_name) \
1513 inline void set_##name(type* value) { \
1514 /* The deserializer makes use of the fact that these common roots are */ \
1515 /* never in new space and never on a page that is being compacted. */ \
1516 DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1517 roots_[k##camel_name##RootIndex] = value; \
1519 ROOT_LIST(ROOT_ACCESSOR)
1520 #undef ROOT_ACCESSOR
1523 // If the --gc-interval flag is set to a positive value, this
1524 // variable holds the value indicating the number of allocations
1525 // remain until the next failure and garbage collection.
1526 int allocation_timeout_;
1529 // Limit that triggers a global GC on the next (normally caused) GC. This
1530 // is checked when we have already decided to do a GC to help determine
1531 // which collector to invoke, before expanding a paged space in the old
1532 // generation and on every allocation in large object space.
1533 intptr_t old_generation_allocation_limit_;
1535 // Indicates that an allocation has failed in the old generation since the
1537 bool old_gen_exhausted_;
1539 // Indicates that inline bump-pointer allocation has been globally disabled
1540 // for all spaces. This is used to disable allocations in generated code.
1541 bool inline_allocation_disabled_;
1543 // Weak list heads, threaded through the objects.
1544 // List heads are initilized lazily and contain the undefined_value at start.
1545 Object* native_contexts_list_;
1546 Object* array_buffers_list_;
1547 Object* allocation_sites_list_;
1549 // WeakHashTable that maps objects embedded in optimized code to dependent
1550 // code list. It is initilized lazily and contains the undefined_value at
1552 Object* weak_object_to_code_table_;
1554 // List of encountered weak collections (JSWeakMap and JSWeakSet) during
1555 // marking. It is initialized during marking, destroyed after marking and
1556 // contains Smi(0) while marking is not active.
1557 Object* encountered_weak_collections_;
1559 StoreBufferRebuilder store_buffer_rebuilder_;
1561 struct StringTypeTable {
1564 RootListIndex index;
1567 struct ConstantStringTable {
1568 const char* contents;
1569 RootListIndex index;
1572 struct StructTable {
1575 RootListIndex index;
1578 static const StringTypeTable string_type_table[];
1579 static const ConstantStringTable constant_string_table[];
1580 static const StructTable struct_table[];
1582 // The special hidden string which is an empty string, but does not match
1583 // any string when looked up in properties.
1584 String* hidden_string_;
1586 // GC callback function, called before and after mark-compact GC.
1587 // Allocations in the callback function are disallowed.
1588 struct GCPrologueCallbackPair {
1589 GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1590 GCType gc_type, bool pass_isolate)
1591 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1592 bool operator==(const GCPrologueCallbackPair& pair) const {
1593 return pair.callback == callback;
1595 v8::Isolate::GCPrologueCallback callback;
1597 // TODO(dcarney): remove variable
1600 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1602 struct GCEpilogueCallbackPair {
1603 GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1604 GCType gc_type, bool pass_isolate)
1605 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1606 bool operator==(const GCEpilogueCallbackPair& pair) const {
1607 return pair.callback == callback;
1609 v8::Isolate::GCPrologueCallback callback;
1611 // TODO(dcarney): remove variable
1614 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1616 // Support for computing object sizes during GC.
1617 HeapObjectCallback gc_safe_size_of_old_object_;
1618 static int GcSafeSizeOfOldObject(HeapObject* object);
1620 // Update the GC state. Called from the mark-compact collector.
1621 void MarkMapPointersAsEncoded(bool encoded) {
1623 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1626 // Code that should be run before and after each GC. Includes some
1627 // reporting/verification activities when compiled with DEBUG set.
1628 void GarbageCollectionPrologue();
1629 void GarbageCollectionEpilogue();
1631 // Pretenuring decisions are made based on feedback collected during new
1632 // space evacuation. Note that between feedback collection and calling this
1633 // method object in old space must not move.
1634 // Right now we only process pretenuring feedback in high promotion mode.
1635 void ProcessPretenuringFeedback();
1637 // Checks whether a global GC is necessary
1638 GarbageCollector SelectGarbageCollector(AllocationSpace space,
1639 const char** reason);
1641 // Make sure there is a filler value behind the top of the new space
1642 // so that the GC does not confuse some unintialized/stale memory
1643 // with the allocation memento of the object at the top
1644 void EnsureFillerObjectAtTop();
1646 // Ensure that we have swept all spaces in such a way that we can iterate
1647 // over all objects. May cause a GC.
1648 void MakeHeapIterable();
1650 // Performs garbage collection operation.
1651 // Returns whether there is a chance that another major GC could
1652 // collect more garbage.
1653 bool CollectGarbage(
1654 GarbageCollector collector, const char* gc_reason,
1655 const char* collector_reason,
1656 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1658 // Performs garbage collection
1659 // Returns whether there is a chance another major GC could
1660 // collect more garbage.
1661 bool PerformGarbageCollection(
1662 GarbageCollector collector,
1663 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1665 inline void UpdateOldSpaceLimits();
1667 // Selects the proper allocation space depending on the given object
1668 // size, pretenuring decision, and preferred old-space.
1669 static AllocationSpace SelectSpace(int object_size,
1670 AllocationSpace preferred_old_space,
1671 PretenureFlag pretenure) {
1672 DCHECK(preferred_old_space == OLD_POINTER_SPACE ||
1673 preferred_old_space == OLD_DATA_SPACE);
1674 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
1675 return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
1678 // Allocate an uninitialized object. The memory is non-executable if the
1679 // hardware and OS allow. This is the single choke-point for allocations
1680 // performed by the runtime and should not be bypassed (to extend this to
1681 // inlined allocations, use the Heap::DisableInlineAllocation() support).
1682 MUST_USE_RESULT inline AllocationResult AllocateRaw(
1683 int size_in_bytes, AllocationSpace space, AllocationSpace retry_space);
1685 // Allocates a heap object based on the map.
1686 MUST_USE_RESULT AllocationResult
1687 Allocate(Map* map, AllocationSpace space,
1688 AllocationSite* allocation_site = NULL);
1690 // Allocates a partial map for bootstrapping.
1691 MUST_USE_RESULT AllocationResult
1692 AllocatePartialMap(InstanceType instance_type, int instance_size);
1694 // Initializes a JSObject based on its map.
1695 void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
1697 void InitializeAllocationMemento(AllocationMemento* memento,
1698 AllocationSite* allocation_site);
1700 // Allocate a block of memory in the given space (filled with a filler).
1701 // Used as a fall-back for generated code when the space is full.
1702 MUST_USE_RESULT AllocationResult
1703 AllocateFillerObject(int size, bool double_align, AllocationSpace space);
1705 // Allocate an uninitialized fixed array.
1706 MUST_USE_RESULT AllocationResult
1707 AllocateRawFixedArray(int length, PretenureFlag pretenure);
1709 // Allocate an uninitialized fixed double array.
1710 MUST_USE_RESULT AllocationResult
1711 AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
1713 // Allocate an initialized fixed array with the given filler value.
1714 MUST_USE_RESULT AllocationResult
1715 AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
1718 // Allocate and partially initializes a String. There are two String
1719 // encodings: ASCII and two byte. These functions allocate a string of the
1720 // given length and set its map and length fields. The characters of the
1721 // string are uninitialized.
1722 MUST_USE_RESULT AllocationResult
1723 AllocateRawOneByteString(int length, PretenureFlag pretenure);
1724 MUST_USE_RESULT AllocationResult
1725 AllocateRawTwoByteString(int length, PretenureFlag pretenure);
1727 bool CreateInitialMaps();
1728 void CreateInitialObjects();
1730 // Allocates an internalized string in old space based on the character
1732 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
1733 Vector<const char> str, int chars, uint32_t hash_field);
1735 MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
1736 Vector<const uint8_t> str, uint32_t hash_field);
1738 MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
1739 Vector<const uc16> str, uint32_t hash_field);
1741 template <bool is_one_byte, typename T>
1742 MUST_USE_RESULT AllocationResult
1743 AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
1745 template <typename T>
1746 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
1747 T t, int chars, uint32_t hash_field);
1749 // Allocates an uninitialized fixed array. It must be filled by the caller.
1750 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
1752 // Make a copy of src and return it. Returns
1753 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1754 MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
1756 // Make a copy of src, set the map, and return the copy. Returns
1757 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1758 MUST_USE_RESULT AllocationResult
1759 CopyFixedArrayWithMap(FixedArray* src, Map* map);
1761 // Make a copy of src and return it. Returns
1762 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1763 MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
1764 FixedDoubleArray* src);
1766 // Make a copy of src and return it. Returns
1767 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1768 MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray(
1769 ConstantPoolArray* src);
1772 // Computes a single character string where the character has code.
1773 // A cache is used for ASCII codes.
1774 MUST_USE_RESULT AllocationResult
1775 LookupSingleCharacterStringFromCode(uint16_t code);
1777 // Allocate a symbol in old space.
1778 MUST_USE_RESULT AllocationResult AllocateSymbol();
1780 // Make a copy of src, set the map, and return the copy.
1781 MUST_USE_RESULT AllocationResult
1782 CopyConstantPoolArrayWithMap(ConstantPoolArray* src, Map* map);
1784 MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(
1785 const ConstantPoolArray::NumberOfEntries& small);
1787 MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(
1788 const ConstantPoolArray::NumberOfEntries& small,
1789 const ConstantPoolArray::NumberOfEntries& extended);
1791 // Allocates an external array of the specified length and type.
1792 MUST_USE_RESULT AllocationResult
1793 AllocateExternalArray(int length, ExternalArrayType array_type,
1794 void* external_pointer, PretenureFlag pretenure);
1796 // Allocates a fixed typed array of the specified length and type.
1797 MUST_USE_RESULT AllocationResult
1798 AllocateFixedTypedArray(int length, ExternalArrayType array_type,
1799 PretenureFlag pretenure);
1801 // Make a copy of src and return it.
1802 MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
1804 // Make a copy of src, set the map, and return the copy.
1805 MUST_USE_RESULT AllocationResult
1806 CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
1808 // Allocates a fixed double array with uninitialized values. Returns
1809 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
1810 int length, PretenureFlag pretenure = NOT_TENURED);
1812 // These five Create*EntryStub functions are here and forced to not be inlined
1813 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1814 NO_INLINE(void CreateJSEntryStub());
1815 NO_INLINE(void CreateJSConstructEntryStub());
1817 void CreateFixedStubs();
1819 // Allocate empty fixed array.
1820 MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
1822 // Allocate empty external array of given type.
1823 MUST_USE_RESULT AllocationResult
1824 AllocateEmptyExternalArray(ExternalArrayType array_type);
1826 // Allocate empty fixed typed array of given type.
1827 MUST_USE_RESULT AllocationResult
1828 AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
1830 // Allocate empty constant pool array.
1831 MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray();
1833 // Allocate a tenured simple cell.
1834 MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
1836 // Allocate a tenured JS global property cell initialized with the hole.
1837 MUST_USE_RESULT AllocationResult AllocatePropertyCell();
1839 // Allocates a new utility object in the old generation.
1840 MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
1842 // Allocates a new foreign object.
1843 MUST_USE_RESULT AllocationResult
1844 AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
1846 MUST_USE_RESULT AllocationResult
1847 AllocateCode(int object_size, bool immovable);
1849 MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
1851 MUST_USE_RESULT AllocationResult InternalizeString(String* str);
1853 // Performs a minor collection in new generation.
1856 // Commits from space if it is uncommitted.
1857 void EnsureFromSpaceIsCommitted();
1859 // Uncommit unused semi space.
1860 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1862 // Fill in bogus values in from space
1863 void ZapFromSpace();
1865 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1866 Heap* heap, Object** pointer);
1868 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1869 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1870 StoreBufferEvent event);
1872 // Performs a major collection in the whole heap.
1875 // Code to be run before and after mark-compact.
1876 void MarkCompactPrologue();
1878 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1879 void ProcessArrayBuffers(WeakObjectRetainer* retainer);
1880 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1882 // Deopts all code that contains allocation instruction which are tenured or
1883 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1884 void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1886 // Evaluates local pretenuring for the old space and calls
1887 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1889 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1891 // Called on heap tear-down.
1892 void TearDownArrayBuffers();
1894 // Record statistics before and after garbage collection.
1895 void ReportStatisticsBeforeGC();
1896 void ReportStatisticsAfterGC();
1898 // Slow part of scavenge object.
1899 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1901 // Total RegExp code ever generated
1902 double total_regexp_code_generated_;
1906 // Creates and installs the full-sized number string cache.
1907 int FullSizeNumberStringCacheLength();
1908 // Flush the number to string cache.
1909 void FlushNumberStringCache();
1911 // Sets used allocation sites entries to undefined.
1912 void FlushAllocationSitesScratchpad();
1914 // Initializes the allocation sites scratchpad with undefined values.
1915 void InitializeAllocationSitesScratchpad();
1917 // Adds an allocation site to the scratchpad if there is space left.
1918 void AddAllocationSiteToScratchpad(AllocationSite* site,
1919 ScratchpadSlotMode mode);
1921 void UpdateSurvivalStatistics(int start_new_space_size);
1923 static const int kYoungSurvivalRateHighThreshold = 90;
1924 static const int kYoungSurvivalRateAllowedDeviation = 15;
1926 static const int kOldSurvivalRateLowThreshold = 10;
1928 int high_survival_rate_period_length_;
1929 intptr_t promoted_objects_size_;
1930 double promotion_rate_;
1931 intptr_t semi_space_copied_object_size_;
1932 double semi_space_copied_rate_;
1933 int nodes_died_in_new_space_;
1934 int nodes_copied_in_new_space_;
1935 int nodes_promoted_;
1937 // This is the pretenuring trigger for allocation sites that are in maybe
1938 // tenure state. When we switched to the maximum new space size we deoptimize
1939 // the code that belongs to the allocation site and derive the lifetime
1940 // of the allocation site.
1941 unsigned int maximum_size_scavenges_;
1943 // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
1944 // Re-visit incremental marking heuristics.
1945 bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
1947 void SelectScavengingVisitorsTable();
1949 void StartIdleRound() { mark_sweeps_since_idle_round_started_ = 0; }
1951 void FinishIdleRound() {
1952 mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
1953 scavenges_since_last_idle_round_ = 0;
1956 bool EnoughGarbageSinceLastIdleRound() {
1957 return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
1960 // Estimates how many milliseconds a Mark-Sweep would take to complete.
1961 // In idle notification handler we assume that this function will return:
1962 // - a number less than 10 for small heaps, which are less than 8Mb.
1963 // - a number greater than 10 for large heaps, which are greater than 32Mb.
1964 int TimeMarkSweepWouldTakeInMs() {
1965 // Rough estimate of how many megabytes of heap can be processed in 1 ms.
1966 static const int kMbPerMs = 2;
1968 int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
1969 return heap_size_mb / kMbPerMs;
1972 void AdvanceIdleIncrementalMarking(intptr_t step_size);
1974 void ClearObjectStats(bool clear_last_time_stats = false);
1976 void set_weak_object_to_code_table(Object* value) {
1977 DCHECK(!InNewSpace(value));
1978 weak_object_to_code_table_ = value;
1981 Object** weak_object_to_code_table_address() {
1982 return &weak_object_to_code_table_;
1985 inline void UpdateAllocationsHash(HeapObject* object);
1986 inline void UpdateAllocationsHash(uint32_t value);
1987 inline void PrintAlloctionsHash();
1989 static const int kInitialStringTableSize = 2048;
1990 static const int kInitialEvalCacheSize = 64;
1991 static const int kInitialNumberStringCacheSize = 256;
1993 // Object counts and used memory by InstanceType
1994 size_t object_counts_[OBJECT_STATS_COUNT];
1995 size_t object_counts_last_time_[OBJECT_STATS_COUNT];
1996 size_t object_sizes_[OBJECT_STATS_COUNT];
1997 size_t object_sizes_last_time_[OBJECT_STATS_COUNT];
1999 // Maximum GC pause.
2000 double max_gc_pause_;
2002 // Total time spent in GC.
2003 double total_gc_time_ms_;
2005 // Maximum size of objects alive after GC.
2006 intptr_t max_alive_after_gc_;
2008 // Minimal interval between two subsequent collections.
2009 double min_in_mutator_;
2011 // Cumulative GC time spent in marking
2012 double marking_time_;
2014 // Cumulative GC time spent in sweeping
2015 double sweeping_time_;
2017 MarkCompactCollector mark_compact_collector_;
2019 StoreBuffer store_buffer_;
2023 IncrementalMarking incremental_marking_;
2025 int number_idle_notifications_;
2026 unsigned int last_idle_notification_gc_count_;
2027 bool last_idle_notification_gc_count_init_;
2029 int mark_sweeps_since_idle_round_started_;
2030 unsigned int gc_count_at_last_idle_gc_;
2031 int scavenges_since_last_idle_round_;
2033 // These two counters are monotomically increasing and never reset.
2034 size_t full_codegen_bytes_generated_;
2035 size_t crankshaft_codegen_bytes_generated_;
2037 // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2038 // this variable holds the number of garbage collections since the last
2039 // deoptimization triggered by garbage collection.
2040 int gcs_since_last_deopt_;
2043 int no_weak_object_verification_scope_depth_;
2046 static const int kAllocationSiteScratchpadSize = 256;
2047 int allocation_sites_scratchpad_length_;
2049 static const int kMaxMarkSweepsInIdleRound = 7;
2050 static const int kIdleScavengeThreshold = 5;
2052 // Shared state read by the scavenge collector and set by ScavengeObject.
2053 PromotionQueue promotion_queue_;
2055 // Flag is set when the heap has been configured. The heap can be repeatedly
2056 // configured through the API until it is set up.
2059 ExternalStringTable external_string_table_;
2061 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
2063 MemoryChunk* chunks_queued_for_free_;
2065 base::Mutex relocation_mutex_;
2067 int gc_callbacks_depth_;
2069 friend class AlwaysAllocateScope;
2070 friend class Factory;
2071 friend class GCCallbacksScope;
2072 friend class GCTracer;
2073 friend class HeapIterator;
2074 friend class Isolate;
2075 friend class MarkCompactCollector;
2076 friend class MarkCompactMarkingVisitor;
2077 friend class MapCompact;
2079 friend class NoWeakObjectVerificationScope;
2083 DISALLOW_COPY_AND_ASSIGN(Heap);
2089 static const int kStartMarker = 0xDECADE00;
2090 static const int kEndMarker = 0xDECADE01;
2092 int* start_marker; // 0
2093 int* new_space_size; // 1
2094 int* new_space_capacity; // 2
2095 intptr_t* old_pointer_space_size; // 3
2096 intptr_t* old_pointer_space_capacity; // 4
2097 intptr_t* old_data_space_size; // 5
2098 intptr_t* old_data_space_capacity; // 6
2099 intptr_t* code_space_size; // 7
2100 intptr_t* code_space_capacity; // 8
2101 intptr_t* map_space_size; // 9
2102 intptr_t* map_space_capacity; // 10
2103 intptr_t* cell_space_size; // 11
2104 intptr_t* cell_space_capacity; // 12
2105 intptr_t* lo_space_size; // 13
2106 int* global_handle_count; // 14
2107 int* weak_global_handle_count; // 15
2108 int* pending_global_handle_count; // 16
2109 int* near_death_global_handle_count; // 17
2110 int* free_global_handle_count; // 18
2111 intptr_t* memory_allocator_size; // 19
2112 intptr_t* memory_allocator_capacity; // 20
2113 int* objects_per_type; // 21
2114 int* size_per_type; // 22
2115 int* os_error; // 23
2116 int* end_marker; // 24
2117 intptr_t* property_cell_space_size; // 25
2118 intptr_t* property_cell_space_capacity; // 26
2122 class AlwaysAllocateScope {
2124 explicit inline AlwaysAllocateScope(Isolate* isolate);
2125 inline ~AlwaysAllocateScope();
2128 // Implicitly disable artificial allocation failures.
2130 DisallowAllocationFailure daf_;
2135 class NoWeakObjectVerificationScope {
2137 inline NoWeakObjectVerificationScope();
2138 inline ~NoWeakObjectVerificationScope();
2143 class GCCallbacksScope {
2145 explicit inline GCCallbacksScope(Heap* heap);
2146 inline ~GCCallbacksScope();
2148 inline bool CheckReenter();
2155 // Visitor class to verify interior pointers in spaces that do not contain
2156 // or care about intergenerational references. All heap object pointers have to
2157 // point into the heap to a location that has a map pointer at its first word.
2158 // Caveat: Heap::Contains is an approximation because it can return true for
2159 // objects in a heap space but above the allocation pointer.
2160 class VerifyPointersVisitor : public ObjectVisitor {
2162 inline void VisitPointers(Object** start, Object** end);
2166 // Verify that all objects are Smis.
2167 class VerifySmisVisitor : public ObjectVisitor {
2169 inline void VisitPointers(Object** start, Object** end);
2173 // Space iterator for iterating over all spaces of the heap. Returns each space
2174 // in turn, and null when it is done.
2175 class AllSpaces BASE_EMBEDDED {
2177 explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2186 // Space iterator for iterating over all old spaces of the heap: Old pointer
2187 // space, old data space and code space. Returns each space in turn, and null
2189 class OldSpaces BASE_EMBEDDED {
2191 explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2200 // Space iterator for iterating over all the paged spaces of the heap: Map
2201 // space, old pointer space, old data space, code space and cell space. Returns
2202 // each space in turn, and null when it is done.
2203 class PagedSpaces BASE_EMBEDDED {
2205 explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2214 // Space iterator for iterating over all spaces of the heap.
2215 // For each space an object iterator is provided. The deallocation of the
2216 // returned object iterators is handled by the space iterator.
2217 class SpaceIterator : public Malloced {
2219 explicit SpaceIterator(Heap* heap);
2220 SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2221 virtual ~SpaceIterator();
2224 ObjectIterator* next();
2227 ObjectIterator* CreateIterator();
2230 int current_space_; // from enum AllocationSpace.
2231 ObjectIterator* iterator_; // object iterator for the current space.
2232 HeapObjectCallback size_func_;
2236 // A HeapIterator provides iteration over the whole heap. It
2237 // aggregates the specific iterators for the different spaces as
2238 // these can only iterate over one space only.
2240 // HeapIterator ensures there is no allocation during its lifetime
2241 // (using an embedded DisallowHeapAllocation instance).
2243 // HeapIterator can skip free list nodes (that is, de-allocated heap
2244 // objects that still remain in the heap). As implementation of free
2245 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2246 // phases. Also, it is forbidden to interrupt iteration in this mode,
2247 // as this will leave heap objects marked (and thus, unusable).
2248 class HeapObjectsFilter;
2250 class HeapIterator BASE_EMBEDDED {
2252 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2254 explicit HeapIterator(Heap* heap);
2255 HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
2262 struct MakeHeapIterableHelper {
2263 explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2266 // Perform the initialization.
2268 // Perform all necessary shutdown (destruction) work.
2270 HeapObject* NextObject();
2272 MakeHeapIterableHelper make_heap_iterable_helper_;
2273 DisallowHeapAllocation no_heap_allocation_;
2275 HeapObjectsFiltering filtering_;
2276 HeapObjectsFilter* filter_;
2277 // Space iterator for iterating all the spaces.
2278 SpaceIterator* space_iterator_;
2279 // Object iterator for the space currently being iterated.
2280 ObjectIterator* object_iterator_;
2284 // Cache for mapping (map, property name) into field offset.
2285 // Cleared at startup and prior to mark sweep collection.
2286 class KeyedLookupCache {
2288 // Lookup field offset for (map, name). If absent, -1 is returned.
2289 int Lookup(Handle<Map> map, Handle<Name> name);
2291 // Update an element in the cache.
2292 void Update(Handle<Map> map, Handle<Name> name, int field_offset);
2297 static const int kLength = 256;
2298 static const int kCapacityMask = kLength - 1;
2299 static const int kMapHashShift = 5;
2300 static const int kHashMask = -4; // Zero the last two bits.
2301 static const int kEntriesPerBucket = 4;
2302 static const int kEntryLength = 2;
2303 static const int kMapIndex = 0;
2304 static const int kKeyIndex = 1;
2305 static const int kNotFound = -1;
2307 // kEntriesPerBucket should be a power of 2.
2308 STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2309 STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2312 KeyedLookupCache() {
2313 for (int i = 0; i < kLength; ++i) {
2314 keys_[i].map = NULL;
2315 keys_[i].name = NULL;
2316 field_offsets_[i] = kNotFound;
2320 static inline int Hash(Handle<Map> map, Handle<Name> name);
2322 // Get the address of the keys and field_offsets arrays. Used in
2323 // generated code to perform cache lookups.
2324 Address keys_address() { return reinterpret_cast<Address>(&keys_); }
2326 Address field_offsets_address() {
2327 return reinterpret_cast<Address>(&field_offsets_);
2336 int field_offsets_[kLength];
2338 friend class ExternalReference;
2339 friend class Isolate;
2340 DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2344 // Cache for mapping (map, property name) into descriptor index.
2345 // The cache contains both positive and negative results.
2346 // Descriptor index equals kNotFound means the property is absent.
2347 // Cleared at startup and prior to any gc.
2348 class DescriptorLookupCache {
2350 // Lookup descriptor index for (map, name).
2351 // If absent, kAbsent is returned.
2352 int Lookup(Map* source, Name* name) {
2353 if (!name->IsUniqueName()) return kAbsent;
2354 int index = Hash(source, name);
2355 Key& key = keys_[index];
2356 if ((key.source == source) && (key.name == name)) return results_[index];
2360 // Update an element in the cache.
2361 void Update(Map* source, Name* name, int result) {
2362 DCHECK(result != kAbsent);
2363 if (name->IsUniqueName()) {
2364 int index = Hash(source, name);
2365 Key& key = keys_[index];
2366 key.source = source;
2368 results_[index] = result;
2375 static const int kAbsent = -2;
2378 DescriptorLookupCache() {
2379 for (int i = 0; i < kLength; ++i) {
2380 keys_[i].source = NULL;
2381 keys_[i].name = NULL;
2382 results_[i] = kAbsent;
2386 static int Hash(Object* source, Name* name) {
2387 // Uses only lower 32 bits if pointers are larger.
2388 uint32_t source_hash =
2389 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
2391 uint32_t name_hash =
2392 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >>
2394 return (source_hash ^ name_hash) % kLength;
2397 static const int kLength = 64;
2404 int results_[kLength];
2406 friend class Isolate;
2407 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2411 class RegExpResultsCache {
2413 enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
2415 // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
2416 // On success, the returned result is guaranteed to be a COW-array.
2417 static Object* Lookup(Heap* heap, String* key_string, Object* key_pattern,
2418 ResultsCacheType type);
2419 // Attempt to add value_array to the cache specified by type. On success,
2420 // value_array is turned into a COW-array.
2421 static void Enter(Isolate* isolate, Handle<String> key_string,
2422 Handle<Object> key_pattern, Handle<FixedArray> value_array,
2423 ResultsCacheType type);
2424 static void Clear(FixedArray* cache);
2425 static const int kRegExpResultsCacheSize = 0x100;
2428 static const int kArrayEntriesPerCacheEntry = 4;
2429 static const int kStringOffset = 0;
2430 static const int kPatternOffset = 1;
2431 static const int kArrayOffset = 2;
2435 // Abstract base class for checking whether a weak object should be retained.
2436 class WeakObjectRetainer {
2438 virtual ~WeakObjectRetainer() {}
2440 // Return whether this object should be retained. If NULL is returned the
2441 // object has no references. Otherwise the address of the retained object
2442 // should be returned as in some GC situations the object has been moved.
2443 virtual Object* RetainAs(Object* object) = 0;
2447 // Intrusive object marking uses least significant bit of
2448 // heap object's map word to mark objects.
2449 // Normally all map words have least significant bit set
2450 // because they contain tagged map pointer.
2451 // If the bit is not set object is marked.
2452 // All objects should be unmarked before resuming
2453 // JavaScript execution.
2454 class IntrusiveMarking {
2456 static bool IsMarked(HeapObject* object) {
2457 return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2460 static void ClearMark(HeapObject* object) {
2461 uintptr_t map_word = object->map_word().ToRawValue();
2462 object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2463 DCHECK(!IsMarked(object));
2466 static void SetMark(HeapObject* object) {
2467 uintptr_t map_word = object->map_word().ToRawValue();
2468 object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2469 DCHECK(IsMarked(object));
2472 static Map* MapOfMarkedObject(HeapObject* object) {
2473 uintptr_t map_word = object->map_word().ToRawValue();
2474 return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2477 static int SizeOfMarkedObject(HeapObject* object) {
2478 return object->SizeFromMap(MapOfMarkedObject(object));
2482 static const uintptr_t kNotMarkedBit = 0x1;
2483 STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0); // NOLINT
2488 // Helper class for tracing paths to a search target Object from all roots.
2489 // The TracePathFrom() method can be used to trace paths from a specific
2490 // object to the search target object.
2491 class PathTracer : public ObjectVisitor {
2494 FIND_ALL, // Will find all matches.
2495 FIND_FIRST // Will stop the search after first match.
2498 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2499 static const int kMarkTag = 2;
2501 // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2502 // after the first match. If FIND_ALL is specified, then tracing will be
2503 // done for all matches.
2504 PathTracer(Object* search_target, WhatToFind what_to_find,
2505 VisitMode visit_mode)
2506 : search_target_(search_target),
2507 found_target_(false),
2508 found_target_in_trace_(false),
2509 what_to_find_(what_to_find),
2510 visit_mode_(visit_mode),
2514 virtual void VisitPointers(Object** start, Object** end);
2517 void TracePathFrom(Object** root);
2519 bool found() const { return found_target_; }
2521 static Object* const kAnyGlobalObject;
2525 class UnmarkVisitor;
2527 void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2528 void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2529 virtual void ProcessResults();
2531 Object* search_target_;
2533 bool found_target_in_trace_;
2534 WhatToFind what_to_find_;
2535 VisitMode visit_mode_;
2536 List<Object*> object_stack_;
2538 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2541 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2545 } // namespace v8::internal
2547 #endif // V8_HEAP_HEAP_H_