1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "allocation.h"
35 #include "incremental-marking.h"
37 #include "mark-compact.h"
38 #include "objects-visiting.h"
40 #include "splay-tree-inl.h"
41 #include "store-buffer.h"
42 #include "v8-counters.h"
43 #include "v8globals.h"
48 // TODO(isolates): remove HEAP here
49 #define HEAP (_inline_get_heap_())
51 inline Heap* _inline_get_heap_();
54 // Defines all the roots in Heap.
55 #define STRONG_ROOT_LIST(V) \
56 V(Map, byte_array_map, ByteArrayMap) \
57 V(Map, free_space_map, FreeSpaceMap) \
58 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
59 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
60 /* Cluster the most popular ones in a few cache lines here at the top. */ \
61 V(Smi, store_buffer_top, StoreBufferTop) \
62 V(Oddball, undefined_value, UndefinedValue) \
63 V(Oddball, the_hole_value, TheHoleValue) \
64 V(Oddball, null_value, NullValue) \
65 V(Oddball, true_value, TrueValue) \
66 V(Oddball, false_value, FalseValue) \
67 V(Map, global_property_cell_map, GlobalPropertyCellMap) \
68 V(Map, shared_function_info_map, SharedFunctionInfoMap) \
69 V(Map, meta_map, MetaMap) \
70 V(Map, ascii_symbol_map, AsciiSymbolMap) \
71 V(Map, ascii_string_map, AsciiStringMap) \
72 V(Map, heap_number_map, HeapNumberMap) \
73 V(Map, global_context_map, GlobalContextMap) \
74 V(Map, fixed_array_map, FixedArrayMap) \
75 V(Map, code_map, CodeMap) \
76 V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \
77 V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
78 V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
79 V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
80 V(Map, hash_table_map, HashTableMap) \
81 V(FixedArray, empty_fixed_array, EmptyFixedArray) \
82 V(ByteArray, empty_byte_array, EmptyByteArray) \
83 V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray) \
84 V(String, empty_string, EmptyString) \
85 V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
86 V(Smi, stack_limit, StackLimit) \
87 V(Oddball, frame_alignment_marker, FrameAlignmentMarker) \
88 V(Oddball, arguments_marker, ArgumentsMarker) \
89 /* The first 32 roots above this line should be boring from a GC point of */ \
90 /* view. This means they are never in new space and never on a page that */ \
91 /* is being compacted. */ \
92 V(FixedArray, number_string_cache, NumberStringCache) \
93 V(Object, instanceof_cache_function, InstanceofCacheFunction) \
94 V(Object, instanceof_cache_map, InstanceofCacheMap) \
95 V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
96 V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
97 V(FixedArray, string_split_cache, StringSplitCache) \
98 V(Object, termination_exception, TerminationException) \
99 V(Map, string_map, StringMap) \
100 V(Map, symbol_map, SymbolMap) \
101 V(Map, cons_string_map, ConsStringMap) \
102 V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
103 V(Map, sliced_string_map, SlicedStringMap) \
104 V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
105 V(Map, cons_symbol_map, ConsSymbolMap) \
106 V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
107 V(Map, external_symbol_map, ExternalSymbolMap) \
108 V(Map, external_symbol_with_ascii_data_map, ExternalSymbolWithAsciiDataMap) \
109 V(Map, external_ascii_symbol_map, ExternalAsciiSymbolMap) \
110 V(Map, external_string_map, ExternalStringMap) \
111 V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \
112 V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
113 V(Map, undetectable_string_map, UndetectableStringMap) \
114 V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
115 V(Map, external_pixel_array_map, ExternalPixelArrayMap) \
116 V(Map, external_byte_array_map, ExternalByteArrayMap) \
117 V(Map, external_unsigned_byte_array_map, ExternalUnsignedByteArrayMap) \
118 V(Map, external_short_array_map, ExternalShortArrayMap) \
119 V(Map, external_unsigned_short_array_map, ExternalUnsignedShortArrayMap) \
120 V(Map, external_int_array_map, ExternalIntArrayMap) \
121 V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
122 V(Map, external_float_array_map, ExternalFloatArrayMap) \
123 V(Map, external_double_array_map, ExternalDoubleArrayMap) \
124 V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap) \
125 V(Map, function_context_map, FunctionContextMap) \
126 V(Map, catch_context_map, CatchContextMap) \
127 V(Map, with_context_map, WithContextMap) \
128 V(Map, block_context_map, BlockContextMap) \
129 V(Map, oddball_map, OddballMap) \
130 V(Map, message_object_map, JSMessageObjectMap) \
131 V(Map, foreign_map, ForeignMap) \
132 V(HeapNumber, nan_value, NanValue) \
133 V(HeapNumber, infinity_value, InfinityValue) \
134 V(HeapNumber, minus_zero_value, MinusZeroValue) \
135 V(Map, neander_map, NeanderMap) \
136 V(JSObject, message_listeners, MessageListeners) \
137 V(Foreign, prototype_accessors, PrototypeAccessors) \
138 V(NumberDictionary, code_stubs, CodeStubs) \
139 V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
140 V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
141 V(Code, js_entry_code, JsEntryCode) \
142 V(Code, js_construct_entry_code, JsConstructEntryCode) \
143 V(FixedArray, natives_source_cache, NativesSourceCache) \
144 V(Object, last_script_id, LastScriptId) \
145 V(Script, empty_script, EmptyScript) \
146 V(Smi, real_stack_limit, RealStackLimit) \
147 V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
149 #define ROOT_LIST(V) \
150 STRONG_ROOT_LIST(V) \
151 V(SymbolTable, symbol_table, SymbolTable)
153 #define SYMBOL_LIST(V) \
154 V(Array_symbol, "Array") \
155 V(Object_symbol, "Object") \
156 V(Proto_symbol, "__proto__") \
157 V(StringImpl_symbol, "StringImpl") \
158 V(arguments_symbol, "arguments") \
159 V(Arguments_symbol, "Arguments") \
160 V(call_symbol, "call") \
161 V(apply_symbol, "apply") \
162 V(caller_symbol, "caller") \
163 V(boolean_symbol, "boolean") \
164 V(Boolean_symbol, "Boolean") \
165 V(callee_symbol, "callee") \
166 V(constructor_symbol, "constructor") \
167 V(code_symbol, ".code") \
168 V(result_symbol, ".result") \
169 V(catch_var_symbol, ".catch-var") \
170 V(empty_symbol, "") \
171 V(eval_symbol, "eval") \
172 V(function_symbol, "function") \
173 V(length_symbol, "length") \
174 V(name_symbol, "name") \
175 V(native_symbol, "native") \
176 V(null_symbol, "null") \
177 V(number_symbol, "number") \
178 V(Number_symbol, "Number") \
179 V(nan_symbol, "NaN") \
180 V(RegExp_symbol, "RegExp") \
181 V(source_symbol, "source") \
182 V(global_symbol, "global") \
183 V(ignore_case_symbol, "ignoreCase") \
184 V(multiline_symbol, "multiline") \
185 V(input_symbol, "input") \
186 V(index_symbol, "index") \
187 V(last_index_symbol, "lastIndex") \
188 V(object_symbol, "object") \
189 V(prototype_symbol, "prototype") \
190 V(string_symbol, "string") \
191 V(String_symbol, "String") \
192 V(Date_symbol, "Date") \
193 V(this_symbol, "this") \
194 V(to_string_symbol, "toString") \
195 V(char_at_symbol, "CharAt") \
196 V(undefined_symbol, "undefined") \
197 V(value_of_symbol, "valueOf") \
198 V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
199 V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
200 V(KeyedLoadElementMonomorphic_symbol, \
201 "KeyedLoadElementMonomorphic") \
202 V(KeyedLoadElementPolymorphic_symbol, \
203 "KeyedLoadElementPolymorphic") \
204 V(KeyedStoreElementMonomorphic_symbol, \
205 "KeyedStoreElementMonomorphic") \
206 V(KeyedStoreElementPolymorphic_symbol, \
207 "KeyedStoreElementPolymorphic") \
208 V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
209 V(illegal_access_symbol, "illegal access") \
210 V(out_of_memory_symbol, "out-of-memory") \
211 V(illegal_execution_state_symbol, "illegal execution state") \
212 V(get_symbol, "get") \
213 V(set_symbol, "set") \
214 V(function_class_symbol, "Function") \
215 V(illegal_argument_symbol, "illegal argument") \
216 V(MakeReferenceError_symbol, "MakeReferenceError") \
217 V(MakeSyntaxError_symbol, "MakeSyntaxError") \
218 V(MakeTypeError_symbol, "MakeTypeError") \
219 V(invalid_lhs_in_assignment_symbol, "invalid_lhs_in_assignment") \
220 V(invalid_lhs_in_for_in_symbol, "invalid_lhs_in_for_in") \
221 V(invalid_lhs_in_postfix_op_symbol, "invalid_lhs_in_postfix_op") \
222 V(invalid_lhs_in_prefix_op_symbol, "invalid_lhs_in_prefix_op") \
223 V(illegal_return_symbol, "illegal_return") \
224 V(illegal_break_symbol, "illegal_break") \
225 V(illegal_continue_symbol, "illegal_continue") \
226 V(unknown_label_symbol, "unknown_label") \
227 V(redeclaration_symbol, "redeclaration") \
228 V(failure_symbol, "<failure>") \
229 V(space_symbol, " ") \
230 V(exec_symbol, "exec") \
231 V(zero_symbol, "0") \
232 V(global_eval_symbol, "GlobalEval") \
233 V(identity_hash_symbol, "v8::IdentityHash") \
234 V(closure_symbol, "(closure)") \
235 V(use_strict, "use strict") \
237 V(anonymous_function_symbol, "(anonymous function)") \
238 V(infinity_symbol, "Infinity") \
239 V(minus_infinity_symbol, "-Infinity")
241 // Forward declarations.
245 class WeakObjectRetainer;
248 typedef HeapObject* (*ExternalStringTableUpdaterCallback)(Heap* heap,
251 class StoreBufferRebuilder {
253 explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
254 : store_buffer_(store_buffer) {
257 void Callback(MemoryChunk* page, StoreBufferEvent event);
260 StoreBuffer* store_buffer_;
262 // We record in this variable how full the store buffer was when we started
263 // iterating over the current page, finding pointers to new space. If the
264 // store buffer overflows again we can exempt the page from the store buffer
265 // by rewinding to this point instead of having to search the store buffer.
266 Object*** start_of_current_page_;
267 // The current page we are scanning in the store buffer iterator.
268 MemoryChunk* current_page_;
273 // The all static Heap captures the interface to the global object heap.
274 // All JavaScript contexts by this process share the same object heap.
277 class HeapDebugUtils;
281 // A queue of objects promoted during scavenge. Each object is accompanied
282 // by it's size to avoid dereferencing a map pointer for scanning.
283 class PromotionQueue {
285 PromotionQueue() : front_(NULL), rear_(NULL) { }
287 void Initialize(Address start_address) {
288 // Assumes that a NewSpacePage exactly fits a number of promotion queue
289 // entries (where each is a pair of intptr_t). This allows us to simplify
290 // the test fpr when to switch pages.
291 ASSERT((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize)
293 ASSERT(NewSpacePage::IsAtEnd(start_address));
294 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
297 bool is_empty() { return front_ == rear_; }
299 inline void insert(HeapObject* target, int size);
301 void remove(HeapObject** target, int* size) {
303 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
304 NewSpacePage* front_page =
305 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
306 ASSERT(!front_page->prev_page()->is_anchor());
308 reinterpret_cast<intptr_t*>(front_page->prev_page()->body_limit());
310 *target = reinterpret_cast<HeapObject*>(*(--front_));
311 *size = static_cast<int>(*(--front_));
312 // Assert no underflow.
313 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
314 reinterpret_cast<Address>(front_));
318 // The front of the queue is higher in the memory page chain than the rear.
322 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
326 typedef void (*ScavengingCallback)(Map* map,
331 // External strings table is a place where all external strings are
332 // registered. We need to keep track of such strings to properly
334 // The ExternalStringTable can contain both strings and objects with
335 // external resources. It was not renamed to make the patch simpler.
336 class ExternalStringTable {
338 // Registers an external string.
339 inline void AddString(String* string);
340 // Registers an external object.
341 inline void AddObject(HeapObject* string);
343 inline void Iterate(ObjectVisitor* v);
345 // Restores internal invariant and gets rid of collected strings.
346 // Must be called after each Iterate() that modified the strings.
349 // Destroys all allocated memory.
353 ExternalStringTable() { }
357 inline void Verify();
359 inline void AddOldObject(HeapObject* string);
361 // Notifies the table that only a prefix of the new list is valid.
362 inline void ShrinkNewObjects(int position);
364 // To speed up scavenge collections new space string are kept
365 // separate from old space strings.
366 List<Object*> new_space_strings_;
367 List<Object*> old_space_strings_;
371 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
377 // Configure heap size before setup. Return false if the heap has been
379 bool ConfigureHeap(int max_semispace_size,
380 intptr_t max_old_gen_size,
381 intptr_t max_executable_size);
382 bool ConfigureHeapDefault();
384 // Initializes the global object heap. If create_heap_objects is true,
385 // also creates the basic non-mutable objects.
386 // Returns whether it succeeded.
387 bool Setup(bool create_heap_objects);
389 // Destroys all memory allocated by the heap.
392 // Set the stack limit in the roots_ array. Some architectures generate
393 // code that looks here, because it is faster than loading from the static
394 // jslimit_/real_jslimit_ variable in the StackGuard.
395 void SetStackLimits();
397 // Returns whether Setup has been called.
400 // Returns the maximum amount of memory reserved for the heap. For
401 // the young generation, we reserve 4 times the amount needed for a
402 // semi space. The young generation consists of two semi spaces and
403 // we reserve twice the amount needed for those in order to ensure
404 // that new space can be aligned to its size.
405 intptr_t MaxReserved() {
406 return 4 * reserved_semispace_size_ + max_old_generation_size_;
408 int MaxSemiSpaceSize() { return max_semispace_size_; }
409 int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
410 int InitialSemiSpaceSize() { return initial_semispace_size_; }
411 intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
412 intptr_t MaxExecutableSize() { return max_executable_size_; }
414 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
415 // more spaces are needed until it reaches the limit.
418 // Returns the amount of memory currently committed for the heap.
419 intptr_t CommittedMemory();
421 // Returns the amount of executable memory currently committed for the heap.
422 intptr_t CommittedMemoryExecutable();
424 // Returns the available bytes in space w/o growing.
425 // Heap doesn't guarantee that it can allocate an object that requires
426 // all available bytes. Check MaxHeapObjectSize() instead.
427 intptr_t Available();
429 // Returns the maximum object size in paged space.
430 inline int MaxObjectSizeInPagedSpace();
432 // Returns of size of all objects residing in the heap.
433 intptr_t SizeOfObjects();
435 // Return the starting address and a mask for the new space. And-masking an
436 // address with the mask will result in the start address of the new space
437 // for all addresses in either semispace.
438 Address NewSpaceStart() { return new_space_.start(); }
439 uintptr_t NewSpaceMask() { return new_space_.mask(); }
440 Address NewSpaceTop() { return new_space_.top(); }
442 NewSpace* new_space() { return &new_space_; }
443 OldSpace* old_pointer_space() { return old_pointer_space_; }
444 OldSpace* old_data_space() { return old_data_space_; }
445 OldSpace* code_space() { return code_space_; }
446 MapSpace* map_space() { return map_space_; }
447 CellSpace* cell_space() { return cell_space_; }
448 LargeObjectSpace* lo_space() { return lo_space_; }
450 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
451 Address always_allocate_scope_depth_address() {
452 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
454 bool linear_allocation() {
455 return linear_allocation_scope_depth_ != 0;
458 Address* NewSpaceAllocationTopAddress() {
459 return new_space_.allocation_top_address();
461 Address* NewSpaceAllocationLimitAddress() {
462 return new_space_.allocation_limit_address();
465 // Uncommit unused semi space.
466 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
468 // Allocates and initializes a new JavaScript object based on a
470 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
472 // Please note this does not perform a garbage collection.
473 MUST_USE_RESULT MaybeObject* AllocateJSObject(
474 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
476 // Allocates and initializes a new global object based on a constructor.
477 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
479 // Please note this does not perform a garbage collection.
480 MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
482 // Returns a deep copy of the JavaScript object.
483 // Properties and elements are copied too.
484 // Returns failure if allocation failed.
485 MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
487 // Allocates the function prototype.
488 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
490 // Please note this does not perform a garbage collection.
491 MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
493 // Allocates a Harmony proxy or function proxy.
494 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
496 // Please note this does not perform a garbage collection.
497 MUST_USE_RESULT MaybeObject* AllocateJSProxy(Object* handler,
500 MUST_USE_RESULT MaybeObject* AllocateJSFunctionProxy(Object* handler,
502 Object* construct_trap,
505 // Reinitialize a JSReceiver into an (empty) JS object of respective type and
506 // size, but keeping the original prototype. The receiver must have at least
507 // the size of the new object. The object is reinitialized and behaves as an
508 // object that has been freshly allocated.
509 // Returns failure if an error occured, otherwise object.
510 MUST_USE_RESULT MaybeObject* ReinitializeJSReceiver(JSReceiver* object,
514 // Reinitialize an JSGlobalProxy based on a constructor. The object
515 // must have the same size as objects allocated using the
516 // constructor. The object is reinitialized and behaves as an
517 // object that has been freshly allocated using the constructor.
518 MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
519 JSFunction* constructor, JSGlobalProxy* global);
521 // Allocates and initializes a new JavaScript object based on a map.
522 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
524 // Please note this does not perform a garbage collection.
525 MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
526 Map* map, PretenureFlag pretenure = NOT_TENURED);
528 // Allocates a heap object based on the map.
529 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
531 // Please note this function does not perform a garbage collection.
532 MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
534 // Allocates a JS Map in the heap.
535 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
537 // Please note this function does not perform a garbage collection.
538 MUST_USE_RESULT MaybeObject* AllocateMap(
539 InstanceType instance_type,
541 ElementsKind elements_kind = FAST_ELEMENTS);
543 // Allocates a partial map for bootstrapping.
544 MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
547 // Allocate a map for the specified function
548 MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
550 // Allocates an empty code cache.
551 MUST_USE_RESULT MaybeObject* AllocateCodeCache();
553 // Allocates a serialized scope info.
554 MUST_USE_RESULT MaybeObject* AllocateSerializedScopeInfo(int length);
556 // Allocates an empty PolymorphicCodeCache.
557 MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache();
559 // Clear the Instanceof cache (used when a prototype changes).
560 inline void ClearInstanceofCache();
562 // Allocates and fully initializes a String. There are two String
563 // encodings: ASCII and two byte. One should choose between the three string
564 // allocation functions based on the encoding of the string buffer used to
565 // initialized the string.
566 // - ...FromAscii initializes the string from a buffer that is ASCII
567 // encoded (it does not check that the buffer is ASCII encoded) and the
568 // result will be ASCII encoded.
569 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
570 // encoded. If the characters are all single-byte characters, the
571 // result will be ASCII encoded, otherwise it will converted to two
573 // - ...FromTwoByte initializes the string from a buffer that is two-byte
574 // encoded. If the characters are all single-byte characters, the
575 // result will be converted to ASCII, otherwise it will be left as
577 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
579 // Please note this does not perform a garbage collection.
580 MUST_USE_RESULT MaybeObject* AllocateStringFromAscii(
581 Vector<const char> str,
582 PretenureFlag pretenure = NOT_TENURED);
583 MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
584 Vector<const char> str,
585 PretenureFlag pretenure = NOT_TENURED);
586 MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
587 Vector<const char> str,
588 PretenureFlag pretenure = NOT_TENURED);
589 MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
590 Vector<const uc16> str,
591 PretenureFlag pretenure = NOT_TENURED);
593 // Allocates a symbol in old space based on the character stream.
594 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
596 // Please note this function does not perform a garbage collection.
597 MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
599 uint32_t hash_field);
601 MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
602 Vector<const char> str,
603 uint32_t hash_field);
605 MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
606 Vector<const uc16> str,
607 uint32_t hash_field);
609 MUST_USE_RESULT MaybeObject* AllocateInternalSymbol(
610 unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
612 MUST_USE_RESULT MaybeObject* AllocateExternalSymbol(
613 Vector<const char> str,
616 // Allocates and partially initializes a String. There are two String
617 // encodings: ASCII and two byte. These functions allocate a string of the
618 // given length and set its map and length fields. The characters of the
619 // string are uninitialized.
620 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
622 // Please note this does not perform a garbage collection.
623 MUST_USE_RESULT MaybeObject* AllocateRawAsciiString(
625 PretenureFlag pretenure = NOT_TENURED);
626 MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
628 PretenureFlag pretenure = NOT_TENURED);
630 // Computes a single character string where the character has code.
631 // A cache is used for ascii codes.
632 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
633 // failed. Please note this does not perform a garbage collection.
634 MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
637 // Allocate a byte array of the specified length
638 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
640 // Please note this does not perform a garbage collection.
641 MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
642 PretenureFlag pretenure);
644 // Allocate a non-tenured byte array of the specified length
645 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
647 // Please note this does not perform a garbage collection.
648 MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
650 // Allocates an external array of the specified length and type.
651 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
653 // Please note this does not perform a garbage collection.
654 MUST_USE_RESULT MaybeObject* AllocateExternalArray(
656 ExternalArrayType array_type,
657 void* external_pointer,
658 PretenureFlag pretenure);
660 // Allocate a tenured JS global property cell.
661 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
663 // Please note this does not perform a garbage collection.
664 MUST_USE_RESULT MaybeObject* AllocateJSGlobalPropertyCell(Object* value);
666 // Allocates a fixed array initialized with undefined values
667 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
669 // Please note this does not perform a garbage collection.
670 MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
671 PretenureFlag pretenure);
672 // Allocates a fixed array initialized with undefined values
673 MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
675 // Allocates an uninitialized fixed array. It must be filled by the caller.
677 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
679 // Please note this does not perform a garbage collection.
680 MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
682 // Make a copy of src and return it. Returns
683 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
684 MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
686 // Make a copy of src, set the map, and return the copy. Returns
687 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
688 MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
690 // Make a copy of src and return it. Returns
691 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
692 MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
693 FixedDoubleArray* src);
695 // Make a copy of src, set the map, and return the copy. Returns
696 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
697 MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap(
698 FixedDoubleArray* src, Map* map);
700 // Allocates a fixed array initialized with the hole values.
701 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
703 // Please note this does not perform a garbage collection.
704 MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
706 PretenureFlag pretenure = NOT_TENURED);
708 MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray(
710 PretenureFlag pretenure);
712 // Allocates a fixed double array with uninitialized values. Returns
713 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
714 // Please note this does not perform a garbage collection.
715 MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedDoubleArray(
717 PretenureFlag pretenure = NOT_TENURED);
719 // AllocateHashTable is identical to AllocateFixedArray except
720 // that the resulting object has hash_table_map as map.
721 MUST_USE_RESULT MaybeObject* AllocateHashTable(
722 int length, PretenureFlag pretenure = NOT_TENURED);
724 // Allocate a global (but otherwise uninitialized) context.
725 MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
727 // Allocate a function context.
728 MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
729 JSFunction* function);
731 // Allocate a catch context.
732 MUST_USE_RESULT MaybeObject* AllocateCatchContext(JSFunction* function,
735 Object* thrown_object);
736 // Allocate a 'with' context.
737 MUST_USE_RESULT MaybeObject* AllocateWithContext(JSFunction* function,
739 JSObject* extension);
741 // Allocate a block context.
742 MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
744 SerializedScopeInfo* info);
746 // Allocates a new utility object in the old generation.
747 MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
749 // Allocates a function initialized with a shared part.
750 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
752 // Please note this does not perform a garbage collection.
753 MUST_USE_RESULT MaybeObject* AllocateFunction(
755 SharedFunctionInfo* shared,
757 PretenureFlag pretenure = TENURED);
759 // Arguments object size.
760 static const int kArgumentsObjectSize =
761 JSObject::kHeaderSize + 2 * kPointerSize;
762 // Strict mode arguments has no callee so it is smaller.
763 static const int kArgumentsObjectSizeStrict =
764 JSObject::kHeaderSize + 1 * kPointerSize;
765 // Indicies for direct access into argument objects.
766 static const int kArgumentsLengthIndex = 0;
767 // callee is only valid in non-strict mode.
768 static const int kArgumentsCalleeIndex = 1;
770 // Allocates an arguments object - optionally with an elements array.
771 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
773 // Please note this does not perform a garbage collection.
774 MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
775 Object* callee, int length);
777 // Same as NewNumberFromDouble, but may return a preallocated/immutable
778 // number object (e.g., minus_zero_value_, nan_value_)
779 MUST_USE_RESULT MaybeObject* NumberFromDouble(
780 double value, PretenureFlag pretenure = NOT_TENURED);
782 // Allocated a HeapNumber from value.
783 MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
785 PretenureFlag pretenure);
786 // pretenure = NOT_TENURED
787 MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
789 // Converts an int into either a Smi or a HeapNumber object.
790 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
792 // Please note this does not perform a garbage collection.
793 MUST_USE_RESULT inline MaybeObject* NumberFromInt32(int32_t value);
795 // Converts an int into either a Smi or a HeapNumber object.
796 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
798 // Please note this does not perform a garbage collection.
799 MUST_USE_RESULT inline MaybeObject* NumberFromUint32(uint32_t value);
801 // Allocates a new foreign object.
802 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
804 // Please note this does not perform a garbage collection.
805 MUST_USE_RESULT MaybeObject* AllocateForeign(
806 Address address, PretenureFlag pretenure = NOT_TENURED);
808 // Allocates a new SharedFunctionInfo object.
809 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
811 // Please note this does not perform a garbage collection.
812 MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
814 // Allocates a new JSMessageObject object.
815 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
817 // Please note that this does not perform a garbage collection.
818 MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
825 Object* stack_frames);
827 // Allocates a new cons string object.
828 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
830 // Please note this does not perform a garbage collection.
831 MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
834 // Allocates a new sub string object which is a substring of an underlying
835 // string buffer stretching from the index start (inclusive) to the index
837 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
839 // Please note this does not perform a garbage collection.
840 MUST_USE_RESULT MaybeObject* AllocateSubString(
844 PretenureFlag pretenure = NOT_TENURED);
846 // Allocate a new external string object, which is backed by a string
847 // resource that resides outside the V8 heap.
848 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
850 // Please note this does not perform a garbage collection.
851 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
852 const ExternalAsciiString::Resource* resource);
853 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
854 const ExternalTwoByteString::Resource* resource);
856 // Finalizes an external string by deleting the associated external
857 // data and clearing the resource pointer.
858 inline void FinalizeExternalString(HeapObject* string);
860 // Allocates an uninitialized object. The memory is non-executable if the
861 // hardware and OS allow.
862 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
864 // Please note this function does not perform a garbage collection.
865 MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
866 AllocationSpace space,
867 AllocationSpace retry_space);
869 // Initialize a filler object to keep the ability to iterate over the heap
870 // when shortening objects.
871 void CreateFillerObjectAt(Address addr, int size);
873 // Makes a new native code object
874 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
875 // failed. On success, the pointer to the Code object is stored in the
876 // self_reference. This allows generated code to reference its own Code
877 // object by containing this pointer.
878 // Please note this function does not perform a garbage collection.
879 MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
881 Handle<Object> self_reference,
882 bool immovable = false);
884 MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
886 // Copy the code and scope info part of the code object, but insert
887 // the provided data as the relocation information.
888 MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
890 // Finds the symbol for string in the symbol table.
891 // If not found, a new symbol is added to the table and returned.
892 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
894 // Please note this function does not perform a garbage collection.
895 MUST_USE_RESULT MaybeObject* LookupSymbol(Vector<const char> str);
896 MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Vector<const char> str);
897 MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(
898 Vector<const uc16> str);
899 MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
900 return LookupSymbol(CStrVector(str));
902 MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
903 MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Handle<SeqAsciiString> string,
907 bool LookupSymbolIfExists(String* str, String** symbol);
908 bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
910 // Compute the matching symbol map for a string if possible.
911 // NULL is returned if string is in new space or not flattened.
912 Map* SymbolMapForString(String* str);
914 // Tries to flatten a string before compare operation.
916 // Returns a failure in case it was decided that flattening was
917 // necessary and failed. Note, if flattening is not necessary the
918 // string might stay non-flat even when not a failure is returned.
920 // Please note this function does not perform a garbage collection.
921 MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
923 // Converts the given boolean condition to JavaScript boolean value.
924 inline Object* ToBoolean(bool condition);
926 // Code that should be run before and after each GC. Includes some
927 // reporting/verification activities when compiled with DEBUG set.
928 void GarbageCollectionPrologue();
929 void GarbageCollectionEpilogue();
931 // Performs garbage collection operation.
932 // Returns whether there is a chance that another major GC could
933 // collect more garbage.
934 bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
936 // Performs garbage collection operation.
937 // Returns whether there is a chance that another major GC could
938 // collect more garbage.
939 inline bool CollectGarbage(AllocationSpace space);
941 static const int kNoGCFlags = 0;
942 static const int kMakeHeapIterableMask = 1;
944 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
945 // non-zero, then the slower precise sweeper is used, which leaves the heap
946 // in a state where we can iterate over the heap visiting all objects.
947 void CollectAllGarbage(int flags);
949 // Last hope GC, should try to squeeze as much as possible.
950 void CollectAllAvailableGarbage();
952 // Check whether the heap is currently iterable.
953 bool IsHeapIterable();
955 // Ensure that we have swept all spaces in such a way that we can iterate
956 // over all objects. May cause a GC.
957 void EnsureHeapIsIterable();
959 // Notify the heap that a context has been disposed.
960 int NotifyContextDisposed() { return ++contexts_disposed_; }
962 // Utility to invoke the scavenger. This is needed in test code to
963 // ensure correct callback for weak global handles.
964 void PerformScavenge();
966 inline void increment_scan_on_scavenge_pages() {
967 scan_on_scavenge_pages_++;
968 if (FLAG_gc_verbose) {
969 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
973 inline void decrement_scan_on_scavenge_pages() {
974 scan_on_scavenge_pages_--;
975 if (FLAG_gc_verbose) {
976 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
980 PromotionQueue* promotion_queue() { return &promotion_queue_; }
983 // Utility used with flag gc-greedy.
984 void GarbageCollectionGreedyCheck();
987 void AddGCPrologueCallback(
988 GCEpilogueCallback callback, GCType gc_type_filter);
989 void RemoveGCPrologueCallback(GCEpilogueCallback callback);
991 void AddGCEpilogueCallback(
992 GCEpilogueCallback callback, GCType gc_type_filter);
993 void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
995 void SetGlobalGCPrologueCallback(GCCallback callback) {
996 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
997 global_gc_prologue_callback_ = callback;
999 void SetGlobalGCEpilogueCallback(GCCallback callback) {
1000 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
1001 global_gc_epilogue_callback_ = callback;
1004 // Heap root getters. We have versions with and without type::cast() here.
1005 // You can't use type::cast during GC because the assert fails.
1006 // TODO(1490): Try removing the unchecked accessors, now that GC marking does
1007 // not corrupt the stack.
1008 #define ROOT_ACCESSOR(type, name, camel_name) \
1010 return type::cast(roots_[k##camel_name##RootIndex]); \
1012 type* raw_unchecked_##name() { \
1013 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
1015 ROOT_LIST(ROOT_ACCESSOR)
1016 #undef ROOT_ACCESSOR
1018 // Utility type maps
1019 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
1020 Map* name##_map() { \
1021 return Map::cast(roots_[k##Name##MapRootIndex]); \
1023 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
1024 #undef STRUCT_MAP_ACCESSOR
1026 #define SYMBOL_ACCESSOR(name, str) String* name() { \
1027 return String::cast(roots_[k##name##RootIndex]); \
1029 SYMBOL_LIST(SYMBOL_ACCESSOR)
1030 #undef SYMBOL_ACCESSOR
1032 // The hidden_symbol is special because it is the empty string, but does
1033 // not match the empty string.
1034 String* hidden_symbol() { return hidden_symbol_; }
1036 void set_global_contexts_list(Object* object) {
1037 global_contexts_list_ = object;
1039 Object* global_contexts_list() { return global_contexts_list_; }
1041 // Number of mark-sweeps.
1042 int ms_count() { return ms_count_; }
1044 // Iterates over all roots in the heap.
1045 void IterateRoots(ObjectVisitor* v, VisitMode mode);
1046 // Iterates over all strong roots in the heap.
1047 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1048 // Iterates over all the other roots in the heap.
1049 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1051 // Iterate pointers to from semispace of new space found in memory interval
1052 // from start to end.
1053 void IterateAndMarkPointersToFromSpace(Address start,
1055 ObjectSlotCallback callback);
1057 // Returns whether the object resides in new space.
1058 inline bool InNewSpace(Object* object);
1059 inline bool InNewSpace(Address addr);
1060 inline bool InNewSpacePage(Address addr);
1061 inline bool InFromSpace(Object* object);
1062 inline bool InToSpace(Object* object);
1064 // Checks whether an address/object in the heap (including auxiliary
1065 // area and unused area).
1066 bool Contains(Address addr);
1067 bool Contains(HeapObject* value);
1069 // Checks whether an address/object in a space.
1070 // Currently used by tests, serialization and heap verification only.
1071 bool InSpace(Address addr, AllocationSpace space);
1072 bool InSpace(HeapObject* value, AllocationSpace space);
1074 // Finds out which space an object should get promoted to based on its type.
1075 inline OldSpace* TargetSpace(HeapObject* object);
1076 inline AllocationSpace TargetSpaceId(InstanceType type);
1078 // Sets the stub_cache_ (only used when expanding the dictionary).
1079 void public_set_code_stubs(NumberDictionary* value) {
1080 roots_[kCodeStubsRootIndex] = value;
1083 // Support for computing object sizes for old objects during GCs. Returns
1084 // a function that is guaranteed to be safe for computing object sizes in
1085 // the current GC phase.
1086 HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
1087 return gc_safe_size_of_old_object_;
1090 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
1091 void public_set_non_monomorphic_cache(NumberDictionary* value) {
1092 roots_[kNonMonomorphicCacheRootIndex] = value;
1095 void public_set_empty_script(Script* script) {
1096 roots_[kEmptyScriptRootIndex] = script;
1099 void public_set_store_buffer_top(Address* top) {
1100 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
1103 // Update the next script id.
1104 inline void SetLastScriptId(Object* last_script_id);
1106 // Generated code can embed this address to get access to the roots.
1107 Object** roots_array_start() { return roots_; }
1109 Address* store_buffer_top_address() {
1110 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1113 // Get address of global contexts list for serialization support.
1114 Object** global_contexts_list_address() {
1115 return &global_contexts_list_;
1120 void PrintHandles();
1122 // Verify the heap is in its normal state before or after a GC.
1125 void OldPointerSpaceCheckStoreBuffer();
1126 void MapSpaceCheckStoreBuffer();
1127 void LargeObjectSpaceCheckStoreBuffer();
1129 // Report heap statistics.
1130 void ReportHeapStatistics(const char* title);
1131 void ReportCodeStatistics(const char* title);
1133 // Fill in bogus values in from space
1134 void ZapFromSpace();
1137 // Print short heap statistics.
1138 void PrintShortHeapStatistics();
1140 // Makes a new symbol object
1141 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
1143 // Please note this function does not perform a garbage collection.
1144 MUST_USE_RESULT MaybeObject* CreateSymbol(
1145 const char* str, int length, int hash);
1146 MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
1148 // Write barrier support for address[offset] = o.
1149 inline void RecordWrite(Address address, int offset);
1151 // Write barrier support for address[start : start + len[ = o.
1152 inline void RecordWrites(Address address, int start, int len);
1154 // Given an address occupied by a live code object, return that object.
1155 Object* FindCodeObject(Address a);
1157 // Invoke Shrink on shrinkable spaces.
1160 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
1161 inline HeapState gc_state() { return gc_state_; }
1163 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
1166 bool IsAllocationAllowed() { return allocation_allowed_; }
1167 inline bool allow_allocation(bool enable);
1169 bool disallow_allocation_failure() {
1170 return disallow_allocation_failure_;
1173 void TracePathToObject(Object* target);
1174 void TracePathToGlobal();
1177 // Callback function passed to Heap::Iterate etc. Copies an object if
1178 // necessary, the object might be promoted to an old space. The caller must
1179 // ensure the precondition that the object is (a) a heap object and (b) in
1180 // the heap's from space.
1181 static inline void ScavengePointer(HeapObject** p);
1182 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1184 // Commits from space if it is uncommitted.
1185 void EnsureFromSpaceIsCommitted();
1187 // Support for partial snapshots. After calling this we can allocate a
1188 // certain number of bytes using only linear allocation (with a
1189 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
1190 // or causing a GC. It returns true of space was reserved or false if a GC is
1191 // needed. For paged spaces the space requested must include the space wasted
1192 // at the end of each page when allocating linearly.
1195 int pointer_space_size,
1196 int data_space_size,
1197 int code_space_size,
1199 int cell_space_size,
1200 int large_object_size);
1203 // Support for the API.
1206 bool CreateApiObjects();
1208 // Attempt to find the number in a small cache. If we finds it, return
1209 // the string representation of the number. Otherwise return undefined.
1210 Object* GetNumberStringCache(Object* number);
1212 // Update the cache with a new number-string pair.
1213 void SetNumberStringCache(Object* number, String* str);
1215 // Adjusts the amount of registered external memory.
1216 // Returns the adjusted value.
1217 inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
1219 // Allocate uninitialized fixed array.
1220 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1221 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1222 PretenureFlag pretenure);
1224 inline intptr_t PromotedTotalSize() {
1225 return PromotedSpaceSize() + PromotedExternalMemorySize();
1228 // True if we have reached the allocation limit in the old generation that
1229 // should force the next GC (caused normally) to be a full one.
1230 inline bool OldGenerationPromotionLimitReached() {
1231 return PromotedTotalSize() > old_gen_promotion_limit_;
1234 inline intptr_t OldGenerationSpaceAvailable() {
1235 return old_gen_allocation_limit_ - PromotedTotalSize();
1238 static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
1239 static const intptr_t kMinimumAllocationLimit =
1240 8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1242 // When we sweep lazily we initially guess that there is no garbage on the
1243 // heap and set the limits for the next GC accordingly. As we sweep we find
1244 // out that some of the pages contained garbage and we have to adjust
1245 // downwards the size of the heap. This means the limits that control the
1246 // timing of the next GC also need to be adjusted downwards.
1247 void LowerOldGenLimits(intptr_t adjustment) {
1248 size_of_old_gen_at_last_old_space_gc_ -= adjustment;
1249 old_gen_promotion_limit_ =
1250 OldGenPromotionLimit(size_of_old_gen_at_last_old_space_gc_);
1251 old_gen_allocation_limit_ =
1252 OldGenAllocationLimit(size_of_old_gen_at_last_old_space_gc_);
1255 intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
1256 const int divisor = FLAG_stress_compaction ? 10 : 3;
1258 Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit);
1259 limit += new_space_.Capacity();
1260 limit *= old_gen_limit_factor_;
1264 intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
1265 const int divisor = FLAG_stress_compaction ? 8 : 2;
1267 Max(old_gen_size + old_gen_size / divisor, kMinimumAllocationLimit);
1268 limit += new_space_.Capacity();
1269 limit *= old_gen_limit_factor_;
1273 // Can be called when the embedding application is idle.
1274 bool IdleNotification();
1276 // Declare all the root indices.
1277 enum RootListIndex {
1278 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1279 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1280 #undef ROOT_INDEX_DECLARATION
1282 // Utility type maps
1283 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1284 STRUCT_LIST(DECLARE_STRUCT_MAP)
1285 #undef DECLARE_STRUCT_MAP
1287 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1288 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
1289 #undef SYMBOL_DECLARATION
1291 kSymbolTableRootIndex,
1292 kStrongRootListLength = kSymbolTableRootIndex,
1296 MUST_USE_RESULT MaybeObject* NumberToString(
1297 Object* number, bool check_number_string_cache = true);
1298 MUST_USE_RESULT MaybeObject* Uint32ToString(
1299 uint32_t value, bool check_number_string_cache = true);
1301 Map* MapForExternalArrayType(ExternalArrayType array_type);
1302 RootListIndex RootIndexForExternalArrayType(
1303 ExternalArrayType array_type);
1305 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1307 // Copy block of memory from src to dst. Size of block should be aligned
1309 static inline void CopyBlock(Address dst, Address src, int byte_size);
1311 // Optimized version of memmove for blocks with pointer size aligned sizes and
1312 // pointer size aligned addresses.
1313 static inline void MoveBlock(Address dst, Address src, int byte_size);
1315 // Check new space expansion criteria and expand semispaces if it was hit.
1316 void CheckNewSpaceExpansionCriteria();
1318 inline void IncrementYoungSurvivorsCounter(int survived) {
1319 young_survivors_after_last_gc_ = survived;
1320 survived_since_last_expansion_ += survived;
1323 inline bool NextGCIsLikelyToBeFull() {
1324 if (FLAG_gc_global) return true;
1326 intptr_t total_promoted = PromotedTotalSize();
1328 intptr_t adjusted_promotion_limit =
1329 old_gen_promotion_limit_ - new_space_.Capacity();
1331 if (total_promoted >= adjusted_promotion_limit) return true;
1333 intptr_t adjusted_allocation_limit =
1334 old_gen_allocation_limit_ - new_space_.Capacity() / 5;
1336 if (PromotedSpaceSize() >= adjusted_allocation_limit) return true;
1342 void UpdateNewSpaceReferencesInExternalStringTable(
1343 ExternalStringTableUpdaterCallback updater_func);
1345 void UpdateReferencesInExternalStringTable(
1346 ExternalStringTableUpdaterCallback updater_func);
1348 void ProcessWeakReferences(WeakObjectRetainer* retainer);
1350 // Helper function that governs the promotion policy from new space to
1351 // old. If the object's old address lies below the new space's age
1352 // mark or if we've already filled the bottom 1/16th of the to space,
1353 // we try to promote this object.
1354 inline bool ShouldBePromoted(Address old_address, int object_size);
1356 int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1358 void ClearJSFunctionResultCaches();
1360 void ClearNormalizedMapCaches();
1362 GCTracer* tracer() { return tracer_; }
1364 // Returns the size of objects residing in non new spaces.
1365 intptr_t PromotedSpaceSize();
1367 double total_regexp_code_generated() { return total_regexp_code_generated_; }
1368 void IncreaseTotalRegexpCodeGenerated(int size) {
1369 total_regexp_code_generated_ += size;
1372 // Returns maximum GC pause.
1373 int get_max_gc_pause() { return max_gc_pause_; }
1375 // Returns maximum size of objects alive after GC.
1376 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1378 // Returns minimal interval between two subsequent collections.
1379 int get_min_in_mutator() { return min_in_mutator_; }
1381 MarkCompactCollector* mark_compact_collector() {
1382 return &mark_compact_collector_;
1385 StoreBuffer* store_buffer() {
1386 return &store_buffer_;
1389 Marking* marking() {
1393 IncrementalMarking* incremental_marking() {
1394 return &incremental_marking_;
1397 ExternalStringTable* external_string_table() {
1398 return &external_string_table_;
1401 // Returns the current sweep generation.
1402 int sweep_generation() {
1403 return sweep_generation_;
1406 inline Isolate* isolate();
1408 inline void CallGlobalGCPrologueCallback() {
1409 if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1412 inline void CallGlobalGCEpilogueCallback() {
1413 if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1416 inline bool OldGenerationAllocationLimitReached();
1418 inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1419 scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
1422 void QueueMemoryChunkForFree(MemoryChunk* chunk);
1423 void FreeQueuedChunks();
1425 // Completely clear the Instanceof cache (to stop it keeping objects alive
1427 inline void CompletelyClearInstanceofCache();
1429 // The roots that have an index less than this are always in old space.
1430 static const int kOldSpaceRoots = 0x20;
1435 // This can be calculated directly from a pointer to the heap; however, it is
1436 // more expedient to get at the isolate directly from within Heap methods.
1439 intptr_t code_range_size_;
1440 int reserved_semispace_size_;
1441 int max_semispace_size_;
1442 int initial_semispace_size_;
1443 intptr_t max_old_generation_size_;
1444 intptr_t max_executable_size_;
1446 // For keeping track of how much data has survived
1447 // scavenge since last new space expansion.
1448 int survived_since_last_expansion_;
1450 // For keeping track on when to flush RegExp code.
1451 int sweep_generation_;
1453 int always_allocate_scope_depth_;
1454 int linear_allocation_scope_depth_;
1456 // For keeping track of context disposals.
1457 int contexts_disposed_;
1459 int scan_on_scavenge_pages_;
1461 #if defined(V8_TARGET_ARCH_X64)
1462 static const int kMaxObjectSizeInNewSpace = 1024*KB;
1464 static const int kMaxObjectSizeInNewSpace = 512*KB;
1467 NewSpace new_space_;
1468 OldSpace* old_pointer_space_;
1469 OldSpace* old_data_space_;
1470 OldSpace* code_space_;
1471 MapSpace* map_space_;
1472 CellSpace* cell_space_;
1473 LargeObjectSpace* lo_space_;
1474 HeapState gc_state_;
1475 int gc_post_processing_depth_;
1477 // Returns the amount of external memory registered since last global gc.
1478 int PromotedExternalMemorySize();
1480 int ms_count_; // how many mark-sweep collections happened
1481 unsigned int gc_count_; // how many gc happened
1483 // Total length of the strings we failed to flatten since the last GC.
1484 int unflattened_strings_length_;
1486 #define ROOT_ACCESSOR(type, name, camel_name) \
1487 inline void set_##name(type* value) { \
1488 /* The deserializer makes use of the fact that these common roots are */ \
1489 /* never in new space and never on a page that is being compacted. */ \
1490 ASSERT(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1491 roots_[k##camel_name##RootIndex] = value; \
1493 ROOT_LIST(ROOT_ACCESSOR)
1494 #undef ROOT_ACCESSOR
1497 bool allocation_allowed_;
1499 // If the --gc-interval flag is set to a positive value, this
1500 // variable holds the value indicating the number of allocations
1501 // remain until the next failure and garbage collection.
1502 int allocation_timeout_;
1504 // Do we expect to be able to handle allocation failure at this
1506 bool disallow_allocation_failure_;
1508 HeapDebugUtils* debug_utils_;
1511 // Limit that triggers a global GC on the next (normally caused) GC. This
1512 // is checked when we have already decided to do a GC to help determine
1513 // which collector to invoke.
1514 intptr_t old_gen_promotion_limit_;
1516 // Limit that triggers a global GC as soon as is reasonable. This is
1517 // checked before expanding a paged space in the old generation and on
1518 // every allocation in large object space.
1519 intptr_t old_gen_allocation_limit_;
1521 // Sometimes the heuristics dictate that those limits are increased. This
1522 // variable records that fact.
1523 int old_gen_limit_factor_;
1525 // Used to adjust the limits that control the timing of the next GC.
1526 intptr_t size_of_old_gen_at_last_old_space_gc_;
1528 // Limit on the amount of externally allocated memory allowed
1529 // between global GCs. If reached a global GC is forced.
1530 intptr_t external_allocation_limit_;
1532 // The amount of external memory registered through the API kept alive
1533 // by global handles
1534 int amount_of_external_allocated_memory_;
1536 // Caches the amount of external memory registered at the last global gc.
1537 int amount_of_external_allocated_memory_at_last_global_gc_;
1539 // Indicates that an allocation has failed in the old generation since the
1541 int old_gen_exhausted_;
1543 Object* roots_[kRootListLength];
1545 Object* global_contexts_list_;
1547 StoreBufferRebuilder store_buffer_rebuilder_;
1549 struct StringTypeTable {
1552 RootListIndex index;
1555 struct ConstantSymbolTable {
1556 const char* contents;
1557 RootListIndex index;
1560 struct StructTable {
1563 RootListIndex index;
1566 static const StringTypeTable string_type_table[];
1567 static const ConstantSymbolTable constant_symbol_table[];
1568 static const StructTable struct_table[];
1570 // The special hidden symbol which is an empty string, but does not match
1571 // any string when looked up in properties.
1572 String* hidden_symbol_;
1574 // GC callback function, called before and after mark-compact GC.
1575 // Allocations in the callback function are disallowed.
1576 struct GCPrologueCallbackPair {
1577 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1578 : callback(callback), gc_type(gc_type) {
1580 bool operator==(const GCPrologueCallbackPair& pair) const {
1581 return pair.callback == callback;
1583 GCPrologueCallback callback;
1586 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1588 struct GCEpilogueCallbackPair {
1589 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1590 : callback(callback), gc_type(gc_type) {
1592 bool operator==(const GCEpilogueCallbackPair& pair) const {
1593 return pair.callback == callback;
1595 GCEpilogueCallback callback;
1598 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1600 GCCallback global_gc_prologue_callback_;
1601 GCCallback global_gc_epilogue_callback_;
1603 // Support for computing object sizes during GC.
1604 HeapObjectCallback gc_safe_size_of_old_object_;
1605 static int GcSafeSizeOfOldObject(HeapObject* object);
1607 // Update the GC state. Called from the mark-compact collector.
1608 void MarkMapPointersAsEncoded(bool encoded) {
1610 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1613 // Checks whether a global GC is necessary
1614 GarbageCollector SelectGarbageCollector(AllocationSpace space);
1616 // Performs garbage collection
1617 // Returns whether there is a chance another major GC could
1618 // collect more garbage.
1619 bool PerformGarbageCollection(GarbageCollector collector,
1623 inline void UpdateOldSpaceLimits();
1626 // Allocate an uninitialized object in map space. The behavior is identical
1627 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1628 // have to test the allocation space argument and (b) can reduce code size
1629 // (since both AllocateRaw and AllocateRawMap are inlined).
1630 MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1632 // Allocate an uninitialized object in the global property cell space.
1633 MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1635 // Initializes a JSObject based on its map.
1636 void InitializeJSObjectFromMap(JSObject* obj,
1637 FixedArray* properties,
1640 bool CreateInitialMaps();
1641 bool CreateInitialObjects();
1643 // These five Create*EntryStub functions are here and forced to not be inlined
1644 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1645 NO_INLINE(void CreateJSEntryStub());
1646 NO_INLINE(void CreateJSConstructEntryStub());
1648 void CreateFixedStubs();
1650 MaybeObject* CreateOddball(const char* to_string,
1654 // Allocate empty fixed array.
1655 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1657 // Allocate empty fixed double array.
1658 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedDoubleArray();
1660 // Performs a minor collection in new generation.
1663 static HeapObject* UpdateNewSpaceReferenceInExternalStringTableEntry(
1667 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1668 static void ScavengeStoreBufferCallback(Heap* heap,
1670 StoreBufferEvent event);
1672 // Performs a major collection in the whole heap.
1673 void MarkCompact(GCTracer* tracer);
1675 // Code to be run before and after mark-compact.
1676 void MarkCompactPrologue();
1678 // Record statistics before and after garbage collection.
1679 void ReportStatisticsBeforeGC();
1680 void ReportStatisticsAfterGC();
1682 // Slow part of scavenge object.
1683 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1685 // Initializes a function with a shared part and prototype.
1686 // Note: this code was factored out of AllocateFunction such that
1687 // other parts of the VM could use it. Specifically, a function that creates
1688 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1689 // Please note this does not perform a garbage collection.
1690 inline void InitializeFunction(
1691 JSFunction* function,
1692 SharedFunctionInfo* shared,
1695 // Total RegExp code ever generated
1696 double total_regexp_code_generated_;
1701 // Initializes the number to string cache based on the max semispace size.
1702 MUST_USE_RESULT MaybeObject* InitializeNumberStringCache();
1703 // Flush the number to string cache.
1704 void FlushNumberStringCache();
1706 void UpdateSurvivalRateTrend(int start_new_space_size);
1708 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1710 static const int kYoungSurvivalRateThreshold = 90;
1711 static const int kYoungSurvivalRateAllowedDeviation = 15;
1713 int young_survivors_after_last_gc_;
1714 int high_survival_rate_period_length_;
1715 double survival_rate_;
1716 SurvivalRateTrend previous_survival_rate_trend_;
1717 SurvivalRateTrend survival_rate_trend_;
1719 void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
1720 ASSERT(survival_rate_trend != FLUCTUATING);
1721 previous_survival_rate_trend_ = survival_rate_trend_;
1722 survival_rate_trend_ = survival_rate_trend;
1725 SurvivalRateTrend survival_rate_trend() {
1726 if (survival_rate_trend_ == STABLE) {
1728 } else if (previous_survival_rate_trend_ == STABLE) {
1729 return survival_rate_trend_;
1730 } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
1733 return survival_rate_trend_;
1737 bool IsStableOrIncreasingSurvivalTrend() {
1738 switch (survival_rate_trend()) {
1747 bool IsIncreasingSurvivalTrend() {
1748 return survival_rate_trend() == INCREASING;
1751 bool IsHighSurvivalRate() {
1752 return high_survival_rate_period_length_ > 0;
1755 void SelectScavengingVisitorsTable();
1757 static const int kInitialSymbolTableSize = 2048;
1758 static const int kInitialEvalCacheSize = 64;
1760 // Maximum GC pause.
1763 // Maximum size of objects alive after GC.
1764 intptr_t max_alive_after_gc_;
1766 // Minimal interval between two subsequent collections.
1767 int min_in_mutator_;
1769 // Size of objects alive after last GC.
1770 intptr_t alive_after_last_gc_;
1772 double last_gc_end_timestamp_;
1774 MarkCompactCollector mark_compact_collector_;
1776 StoreBuffer store_buffer_;
1780 IncrementalMarking incremental_marking_;
1782 int number_idle_notifications_;
1783 unsigned int last_idle_notification_gc_count_;
1784 bool last_idle_notification_gc_count_init_;
1786 // Shared state read by the scavenge collector and set by ScavengeObject.
1787 PromotionQueue promotion_queue_;
1789 // Flag is set when the heap has been configured. The heap can be repeatedly
1790 // configured through the API until it is setup.
1793 ExternalStringTable external_string_table_;
1795 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
1797 MemoryChunk* chunks_queued_for_free_;
1799 friend class Factory;
1800 friend class GCTracer;
1801 friend class DisallowAllocationFailure;
1802 friend class AlwaysAllocateScope;
1803 friend class LinearAllocationScope;
1805 friend class Isolate;
1806 friend class MarkCompactCollector;
1807 friend class StaticMarkingVisitor;
1808 friend class MapCompact;
1810 DISALLOW_COPY_AND_ASSIGN(Heap);
1816 static const int kStartMarker = 0xDECADE00;
1817 static const int kEndMarker = 0xDECADE01;
1819 int* start_marker; // 0
1820 int* new_space_size; // 1
1821 int* new_space_capacity; // 2
1822 intptr_t* old_pointer_space_size; // 3
1823 intptr_t* old_pointer_space_capacity; // 4
1824 intptr_t* old_data_space_size; // 5
1825 intptr_t* old_data_space_capacity; // 6
1826 intptr_t* code_space_size; // 7
1827 intptr_t* code_space_capacity; // 8
1828 intptr_t* map_space_size; // 9
1829 intptr_t* map_space_capacity; // 10
1830 intptr_t* cell_space_size; // 11
1831 intptr_t* cell_space_capacity; // 12
1832 intptr_t* lo_space_size; // 13
1833 int* global_handle_count; // 14
1834 int* weak_global_handle_count; // 15
1835 int* pending_global_handle_count; // 16
1836 int* near_death_global_handle_count; // 17
1837 int* free_global_handle_count; // 18
1838 intptr_t* memory_allocator_size; // 19
1839 intptr_t* memory_allocator_capacity; // 20
1840 int* objects_per_type; // 21
1841 int* size_per_type; // 22
1842 int* os_error; // 23
1843 int* end_marker; // 24
1847 class AlwaysAllocateScope {
1849 AlwaysAllocateScope() {
1850 // We shouldn't hit any nested scopes, because that requires
1851 // non-handle code to call handle code. The code still works but
1852 // performance will degrade, so we want to catch this situation
1854 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1855 HEAP->always_allocate_scope_depth_++;
1858 ~AlwaysAllocateScope() {
1859 HEAP->always_allocate_scope_depth_--;
1860 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1865 class LinearAllocationScope {
1867 LinearAllocationScope() {
1868 HEAP->linear_allocation_scope_depth_++;
1871 ~LinearAllocationScope() {
1872 HEAP->linear_allocation_scope_depth_--;
1873 ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
1879 // Visitor class to verify interior pointers in spaces that do not contain
1880 // or care about intergenerational references. All heap object pointers have to
1881 // point into the heap to a location that has a map pointer at its first word.
1882 // Caveat: Heap::Contains is an approximation because it can return true for
1883 // objects in a heap space but above the allocation pointer.
1884 class VerifyPointersVisitor: public ObjectVisitor {
1886 void VisitPointers(Object** start, Object** end) {
1887 for (Object** current = start; current < end; current++) {
1888 if ((*current)->IsHeapObject()) {
1889 HeapObject* object = HeapObject::cast(*current);
1890 ASSERT(HEAP->Contains(object));
1891 ASSERT(object->map()->IsMap());
1899 // Space iterator for iterating over all spaces of the heap.
1900 // Returns each space in turn, and null when it is done.
1901 class AllSpaces BASE_EMBEDDED {
1904 AllSpaces() { counter_ = FIRST_SPACE; }
1910 // Space iterator for iterating over all old spaces of the heap: Old pointer
1911 // space, old data space and code space.
1912 // Returns each space in turn, and null when it is done.
1913 class OldSpaces BASE_EMBEDDED {
1916 OldSpaces() { counter_ = OLD_POINTER_SPACE; }
1922 // Space iterator for iterating over all the paged spaces of the heap:
1923 // Map space, old pointer space, old data space, code space and cell space.
1924 // Returns each space in turn, and null when it is done.
1925 class PagedSpaces BASE_EMBEDDED {
1928 PagedSpaces() { counter_ = OLD_POINTER_SPACE; }
1934 // Space iterator for iterating over all spaces of the heap.
1935 // For each space an object iterator is provided. The deallocation of the
1936 // returned object iterators is handled by the space iterator.
1937 class SpaceIterator : public Malloced {
1940 explicit SpaceIterator(HeapObjectCallback size_func);
1941 virtual ~SpaceIterator();
1944 ObjectIterator* next();
1947 ObjectIterator* CreateIterator();
1949 int current_space_; // from enum AllocationSpace.
1950 ObjectIterator* iterator_; // object iterator for the current space.
1951 HeapObjectCallback size_func_;
1955 // A HeapIterator provides iteration over the whole heap. It
1956 // aggregates the specific iterators for the different spaces as
1957 // these can only iterate over one space only.
1959 // HeapIterator can skip free list nodes (that is, de-allocated heap
1960 // objects that still remain in the heap). As implementation of free
1961 // nodes filtering uses GC marks, it can't be used during MS/MC GC
1962 // phases. Also, it is forbidden to interrupt iteration in this mode,
1963 // as this will leave heap objects marked (and thus, unusable).
1964 class HeapObjectsFilter;
1966 class HeapIterator BASE_EMBEDDED {
1968 enum HeapObjectsFiltering {
1974 explicit HeapIterator(HeapObjectsFiltering filtering);
1981 // Perform the initialization.
1983 // Perform all necessary shutdown (destruction) work.
1985 HeapObject* NextObject();
1987 HeapObjectsFiltering filtering_;
1988 HeapObjectsFilter* filter_;
1989 // Space iterator for iterating all the spaces.
1990 SpaceIterator* space_iterator_;
1991 // Object iterator for the space currently being iterated.
1992 ObjectIterator* object_iterator_;
1996 // Cache for mapping (map, property name) into field offset.
1997 // Cleared at startup and prior to mark sweep collection.
1998 class KeyedLookupCache {
2000 // Lookup field offset for (map, name). If absent, -1 is returned.
2001 int Lookup(Map* map, String* name);
2003 // Update an element in the cache.
2004 void Update(Map* map, String* name, int field_offset);
2009 static const int kLength = 64;
2010 static const int kCapacityMask = kLength - 1;
2011 static const int kMapHashShift = 2;
2012 static const int kNotFound = -1;
2015 KeyedLookupCache() {
2016 for (int i = 0; i < kLength; ++i) {
2017 keys_[i].map = NULL;
2018 keys_[i].name = NULL;
2019 field_offsets_[i] = kNotFound;
2023 static inline int Hash(Map* map, String* name);
2025 // Get the address of the keys and field_offsets arrays. Used in
2026 // generated code to perform cache lookups.
2027 Address keys_address() {
2028 return reinterpret_cast<Address>(&keys_);
2031 Address field_offsets_address() {
2032 return reinterpret_cast<Address>(&field_offsets_);
2041 int field_offsets_[kLength];
2043 friend class ExternalReference;
2044 friend class Isolate;
2045 DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2049 // Cache for mapping (array, property name) into descriptor index.
2050 // The cache contains both positive and negative results.
2051 // Descriptor index equals kNotFound means the property is absent.
2052 // Cleared at startup and prior to any gc.
2053 class DescriptorLookupCache {
2055 // Lookup descriptor index for (map, name).
2056 // If absent, kAbsent is returned.
2057 int Lookup(DescriptorArray* array, String* name) {
2058 if (!StringShape(name).IsSymbol()) return kAbsent;
2059 int index = Hash(array, name);
2060 Key& key = keys_[index];
2061 if ((key.array == array) && (key.name == name)) return results_[index];
2065 // Update an element in the cache.
2066 void Update(DescriptorArray* array, String* name, int result) {
2067 ASSERT(result != kAbsent);
2068 if (StringShape(name).IsSymbol()) {
2069 int index = Hash(array, name);
2070 Key& key = keys_[index];
2073 results_[index] = result;
2080 static const int kAbsent = -2;
2083 DescriptorLookupCache() {
2084 for (int i = 0; i < kLength; ++i) {
2085 keys_[i].array = NULL;
2086 keys_[i].name = NULL;
2087 results_[i] = kAbsent;
2091 static int Hash(DescriptorArray* array, String* name) {
2092 // Uses only lower 32 bits if pointers are larger.
2093 uint32_t array_hash =
2094 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
2095 uint32_t name_hash =
2096 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
2097 return (array_hash ^ name_hash) % kLength;
2100 static const int kLength = 64;
2102 DescriptorArray* array;
2107 int results_[kLength];
2109 friend class Isolate;
2110 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2114 // A helper class to document/test C++ scopes where we do not
2115 // expect a GC. Usage:
2117 // /* Allocation not allowed: we cannot handle a GC in this scope. */
2118 // { AssertNoAllocation nogc;
2124 class DisallowAllocationFailure {
2126 DisallowAllocationFailure() {
2127 old_state_ = HEAP->disallow_allocation_failure_;
2128 HEAP->disallow_allocation_failure_ = true;
2130 ~DisallowAllocationFailure() {
2131 HEAP->disallow_allocation_failure_ = old_state_;
2137 class AssertNoAllocation {
2139 AssertNoAllocation() {
2140 old_state_ = HEAP->allow_allocation(false);
2143 ~AssertNoAllocation() {
2144 HEAP->allow_allocation(old_state_);
2151 class DisableAssertNoAllocation {
2153 DisableAssertNoAllocation() {
2154 old_state_ = HEAP->allow_allocation(true);
2157 ~DisableAssertNoAllocation() {
2158 HEAP->allow_allocation(old_state_);
2167 class AssertNoAllocation {
2169 AssertNoAllocation() { }
2170 ~AssertNoAllocation() { }
2173 class DisableAssertNoAllocation {
2175 DisableAssertNoAllocation() { }
2176 ~DisableAssertNoAllocation() { }
2181 // GCTracer collects and prints ONE line after each garbage collector
2182 // invocation IFF --trace_gc is used.
2184 class GCTracer BASE_EMBEDDED {
2186 class Scope BASE_EMBEDDED {
2198 Scope(GCTracer* tracer, ScopeId scope)
2201 start_time_ = OS::TimeCurrentMillis();
2205 ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned.
2206 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
2215 explicit GCTracer(Heap* heap);
2218 // Sets the collector.
2219 void set_collector(GarbageCollector collector) { collector_ = collector; }
2221 // Sets the GC count.
2222 void set_gc_count(unsigned int count) { gc_count_ = count; }
2224 // Sets the full GC count.
2225 void set_full_gc_count(int count) { full_gc_count_ = count; }
2227 void increment_promoted_objects_size(int object_size) {
2228 promoted_objects_size_ += object_size;
2232 // Returns a string matching the collector.
2233 const char* CollectorString();
2235 // Returns size of object in heap (in MB).
2236 double SizeOfHeapObjects() {
2237 return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
2240 double start_time_; // Timestamp set in the constructor.
2241 intptr_t start_size_; // Size of objects in heap set in constructor.
2242 GarbageCollector collector_; // Type of collector.
2244 // A count (including this one, eg, the first collection is 1) of the
2245 // number of garbage collections.
2246 unsigned int gc_count_;
2248 // A count (including this one) of the number of full garbage collections.
2251 // Amounts of time spent in different scopes during GC.
2252 double scopes_[Scope::kNumberOfScopes];
2254 // Total amount of space either wasted or contained in one of free lists
2255 // before the current GC.
2256 intptr_t in_free_list_or_wasted_before_gc_;
2258 // Difference between space used in the heap at the beginning of the current
2259 // collection and the end of the previous collection.
2260 intptr_t allocated_since_last_gc_;
2262 // Amount of time spent in mutator that is time elapsed between end of the
2263 // previous collection and the beginning of the current one.
2264 double spent_in_mutator_;
2266 // Size of objects promoted during the current collection.
2267 intptr_t promoted_objects_size_;
2269 // Incremental marking steps counters.
2272 double longest_step_;
2273 int steps_count_since_last_gc_;
2274 double steps_took_since_last_gc_;
2280 class StringSplitCache {
2282 static Object* Lookup(FixedArray* cache, String* string, String* pattern);
2283 static void Enter(Heap* heap,
2288 static void Clear(FixedArray* cache);
2289 static const int kStringSplitCacheSize = 0x100;
2292 static const int kArrayEntriesPerCacheEntry = 4;
2293 static const int kStringOffset = 0;
2294 static const int kPatternOffset = 1;
2295 static const int kArrayOffset = 2;
2297 static MaybeObject* WrapFixedArrayInJSArray(Object* fixed_array);
2301 class TranscendentalCache {
2303 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2304 static const int kTranscendentalTypeBits = 3;
2305 STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2307 // Returns a heap number with f(input), where f is a math function specified
2308 // by the 'type' argument.
2309 MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2311 // The cache contains raw Object pointers. This method disposes of
2312 // them before a garbage collection.
2317 static const int kCacheSize = 512;
2319 explicit SubCache(Type t);
2321 MUST_USE_RESULT inline MaybeObject* Get(double input);
2323 inline double Calculate(double input);
2332 uint32_t integers[2];
2335 inline static int Hash(const Converter& c) {
2336 uint32_t hash = (c.integers[0] ^ c.integers[1]);
2337 hash ^= static_cast<int32_t>(hash) >> 16;
2338 hash ^= static_cast<int32_t>(hash) >> 8;
2339 return (hash & (kCacheSize - 1));
2342 Element elements_[kCacheSize];
2346 // Allow access to the caches_ array as an ExternalReference.
2347 friend class ExternalReference;
2348 // Inline implementation of the cache.
2349 friend class TranscendentalCacheStub;
2350 // For evaluating value.
2351 friend class TranscendentalCache;
2353 DISALLOW_COPY_AND_ASSIGN(SubCache);
2356 TranscendentalCache() {
2357 for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2360 // Used to create an external reference.
2361 inline Address cache_array_address();
2364 friend class Isolate;
2365 // Inline implementation of the caching.
2366 friend class TranscendentalCacheStub;
2367 // Allow access to the caches_ array as an ExternalReference.
2368 friend class ExternalReference;
2370 SubCache* caches_[kNumberOfCaches];
2371 DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
2375 // Abstract base class for checking whether a weak object should be retained.
2376 class WeakObjectRetainer {
2378 virtual ~WeakObjectRetainer() {}
2380 // Return whether this object should be retained. If NULL is returned the
2381 // object has no references. Otherwise the address of the retained object
2382 // should be returned as in some GC situations the object has been moved.
2383 virtual Object* RetainAs(Object* object) = 0;
2387 // Intrusive object marking uses least significant bit of
2388 // heap object's map word to mark objects.
2389 // Normally all map words have least significant bit set
2390 // because they contain tagged map pointer.
2391 // If the bit is not set object is marked.
2392 // All objects should be unmarked before resuming
2393 // JavaScript execution.
2394 class IntrusiveMarking {
2396 static bool IsMarked(HeapObject* object) {
2397 return (object->map_word().ToRawValue() & kNotMarkedBit) == 0;
2400 static void ClearMark(HeapObject* object) {
2401 uintptr_t map_word = object->map_word().ToRawValue();
2402 object->set_map_word(MapWord::FromRawValue(map_word | kNotMarkedBit));
2403 ASSERT(!IsMarked(object));
2406 static void SetMark(HeapObject* object) {
2407 uintptr_t map_word = object->map_word().ToRawValue();
2408 object->set_map_word(MapWord::FromRawValue(map_word & ~kNotMarkedBit));
2409 ASSERT(IsMarked(object));
2412 static Map* MapOfMarkedObject(HeapObject* object) {
2413 uintptr_t map_word = object->map_word().ToRawValue();
2414 return MapWord::FromRawValue(map_word | kNotMarkedBit).ToMap();
2417 static int SizeOfMarkedObject(HeapObject* object) {
2418 return object->SizeFromMap(MapOfMarkedObject(object));
2422 static const uintptr_t kNotMarkedBit = 0x1;
2423 STATIC_ASSERT((kHeapObjectTag & kNotMarkedBit) != 0);
2427 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
2428 // Helper class for tracing paths to a search target Object from all roots.
2429 // The TracePathFrom() method can be used to trace paths from a specific
2430 // object to the search target object.
2431 class PathTracer : public ObjectVisitor {
2434 FIND_ALL, // Will find all matches.
2435 FIND_FIRST // Will stop the search after first match.
2438 // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2439 // after the first match. If FIND_ALL is specified, then tracing will be
2440 // done for all matches.
2441 PathTracer(Object* search_target,
2442 WhatToFind what_to_find,
2443 VisitMode visit_mode)
2444 : search_target_(search_target),
2445 found_target_(false),
2446 found_target_in_trace_(false),
2447 what_to_find_(what_to_find),
2448 visit_mode_(visit_mode),
2452 virtual void VisitPointers(Object** start, Object** end);
2455 void TracePathFrom(Object** root);
2457 bool found() const { return found_target_; }
2459 static Object* const kAnyGlobalObject;
2463 class UnmarkVisitor;
2465 void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2466 void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2467 virtual void ProcessResults();
2469 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2470 static const int kMarkTag = 2;
2472 Object* search_target_;
2474 bool found_target_in_trace_;
2475 WhatToFind what_to_find_;
2476 VisitMode visit_mode_;
2477 List<Object*> object_stack_;
2479 AssertNoAllocation no_alloc; // i.e. no gc allowed.
2481 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2483 #endif // DEBUG || LIVE_OBJECT_LIST
2485 } } // namespace v8::internal
2489 #endif // V8_HEAP_H_