1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "src/compilation-cache.h"
34 #include "src/execution.h"
35 #include "src/factory.h"
36 #include "src/global-handles.h"
37 #include "src/ic/ic.h"
38 #include "src/macro-assembler.h"
39 #include "src/snapshot.h"
40 #include "test/cctest/cctest.h"
42 using namespace v8::internal;
44 static void CheckMap(Map* map, int type, int instance_size) {
45 CHECK(map->IsHeapObject());
47 CHECK(CcTest::heap()->Contains(map));
49 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
50 CHECK_EQ(type, map->instance_type());
51 CHECK_EQ(instance_size, map->instance_size());
56 CcTest::InitializeVM();
57 Heap* heap = CcTest::heap();
58 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
59 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
60 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
61 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
65 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
66 CHECK(obj->IsOddball());
67 Handle<Object> handle(obj, isolate);
68 Object* print_string =
69 *Execution::ToString(isolate, handle).ToHandleChecked();
70 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
74 static void CheckSmi(Isolate* isolate, int value, const char* string) {
75 Handle<Object> handle(Smi::FromInt(value), isolate);
76 Object* print_string =
77 *Execution::ToString(isolate, handle).ToHandleChecked();
78 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
82 static void CheckNumber(Isolate* isolate, double value, const char* string) {
83 Handle<Object> number = isolate->factory()->NewNumber(value);
84 CHECK(number->IsNumber());
85 Handle<Object> print_string =
86 Execution::ToString(isolate, number).ToHandleChecked();
87 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
91 static void CheckFindCodeObject(Isolate* isolate) {
92 // Test FindCodeObject
95 Assembler assm(isolate, NULL, 0);
97 __ nop(); // supported on all architectures
101 Handle<Code> code = isolate->factory()->NewCode(
102 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
103 CHECK(code->IsCode());
105 HeapObject* obj = HeapObject::cast(*code);
106 Address obj_addr = obj->address();
108 for (int i = 0; i < obj->Size(); i += kPointerSize) {
109 Object* found = isolate->FindCodeObject(obj_addr + i);
110 CHECK_EQ(*code, found);
113 Handle<Code> copy = isolate->factory()->NewCode(
114 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
115 HeapObject* obj_copy = HeapObject::cast(*copy);
116 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
117 obj_copy->Size() / 2);
118 CHECK(not_right != *code);
123 CcTest::InitializeVM();
124 Isolate* isolate = CcTest::i_isolate();
125 HandleScope outer_scope(isolate);
126 LocalContext context;
127 Handle<Object> n(reinterpret_cast<Object*>(NULL), isolate);
133 CcTest::InitializeVM();
134 Isolate* isolate = CcTest::i_isolate();
135 Factory* factory = isolate->factory();
136 Heap* heap = isolate->heap();
138 HandleScope sc(isolate);
139 Handle<Object> value = factory->NewNumber(1.000123);
140 CHECK(value->IsHeapNumber());
141 CHECK(value->IsNumber());
142 CHECK_EQ(1.000123, value->Number());
144 value = factory->NewNumber(1.0);
145 CHECK(value->IsSmi());
146 CHECK(value->IsNumber());
147 CHECK_EQ(1.0, value->Number());
149 value = factory->NewNumberFromInt(1024);
150 CHECK(value->IsSmi());
151 CHECK(value->IsNumber());
152 CHECK_EQ(1024.0, value->Number());
154 value = factory->NewNumberFromInt(Smi::kMinValue);
155 CHECK(value->IsSmi());
156 CHECK(value->IsNumber());
157 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
159 value = factory->NewNumberFromInt(Smi::kMaxValue);
160 CHECK(value->IsSmi());
161 CHECK(value->IsNumber());
162 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
164 #if !defined(V8_TARGET_ARCH_64_BIT)
165 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
166 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
167 CHECK(value->IsHeapNumber());
168 CHECK(value->IsNumber());
169 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
172 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
173 CHECK(value->IsHeapNumber());
174 CHECK(value->IsNumber());
175 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
178 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
179 CHECK(value->IsHeapNumber());
180 CHECK(value->IsNumber());
181 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
184 // nan oddball checks
185 CHECK(factory->nan_value()->IsNumber());
186 CHECK(std::isnan(factory->nan_value()->Number()));
188 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
189 CHECK(s->IsString());
190 CHECK_EQ(10, s->length());
192 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
193 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
194 v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(global, object_string);
195 CHECK(maybe.has_value);
198 // Check ToString for oddballs
199 CheckOddball(isolate, heap->true_value(), "true");
200 CheckOddball(isolate, heap->false_value(), "false");
201 CheckOddball(isolate, heap->null_value(), "null");
202 CheckOddball(isolate, heap->undefined_value(), "undefined");
204 // Check ToString for Smis
205 CheckSmi(isolate, 0, "0");
206 CheckSmi(isolate, 42, "42");
207 CheckSmi(isolate, -42, "-42");
209 // Check ToString for Numbers
210 CheckNumber(isolate, 1.1, "1.1");
212 CheckFindCodeObject(isolate);
217 CcTest::InitializeVM();
219 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
220 CHECK(Smi::FromInt(42)->IsSmi());
221 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
222 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
226 TEST(GarbageCollection) {
227 CcTest::InitializeVM();
228 Isolate* isolate = CcTest::i_isolate();
229 Heap* heap = isolate->heap();
230 Factory* factory = isolate->factory();
232 HandleScope sc(isolate);
234 heap->CollectGarbage(NEW_SPACE);
236 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
237 Handle<String> name = factory->InternalizeUtf8String("theFunction");
238 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
239 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
240 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
241 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
242 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
245 HandleScope inner_scope(isolate);
246 // Allocate a function and keep it in global object's property.
247 Handle<JSFunction> function = factory->NewFunction(name);
248 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
249 // Allocate an object. Unrooted after leaving the scope.
250 Handle<JSObject> obj = factory->NewJSObject(function);
251 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
252 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
254 CHECK_EQ(Smi::FromInt(23),
255 *Object::GetProperty(obj, prop_name).ToHandleChecked());
256 CHECK_EQ(Smi::FromInt(24),
257 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
260 heap->CollectGarbage(NEW_SPACE);
262 // Function should be alive.
263 v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(global, name);
264 CHECK(maybe.has_value);
266 // Check function is retained.
267 Handle<Object> func_value =
268 Object::GetProperty(global, name).ToHandleChecked();
269 CHECK(func_value->IsJSFunction());
270 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
273 HandleScope inner_scope(isolate);
274 // Allocate another object, make it reachable from global.
275 Handle<JSObject> obj = factory->NewJSObject(function);
276 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
277 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
280 // After gc, it should survive.
281 heap->CollectGarbage(NEW_SPACE);
283 maybe = JSReceiver::HasOwnProperty(global, obj_name);
284 CHECK(maybe.has_value);
287 Object::GetProperty(global, obj_name).ToHandleChecked();
288 CHECK(obj->IsJSObject());
289 CHECK_EQ(Smi::FromInt(23),
290 *Object::GetProperty(obj, prop_name).ToHandleChecked());
294 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
295 HandleScope scope(isolate);
296 Handle<String> s = isolate->factory()->NewStringFromUtf8(
297 CStrVector(string)).ToHandleChecked();
298 CHECK_EQ(StrLength(string), s->length());
299 for (int index = 0; index < s->length(); index++) {
300 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
306 CcTest::InitializeVM();
307 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
309 VerifyStringAllocation(isolate, "a");
310 VerifyStringAllocation(isolate, "ab");
311 VerifyStringAllocation(isolate, "abc");
312 VerifyStringAllocation(isolate, "abcd");
313 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
318 CcTest::InitializeVM();
319 Isolate* isolate = CcTest::i_isolate();
320 Factory* factory = isolate->factory();
322 v8::HandleScope scope(CcTest::isolate());
323 const char* name = "Kasper the spunky";
324 Handle<String> string = factory->NewStringFromAsciiChecked(name);
325 CHECK_EQ(StrLength(name), string->length());
329 TEST(GlobalHandles) {
330 CcTest::InitializeVM();
331 Isolate* isolate = CcTest::i_isolate();
332 Heap* heap = isolate->heap();
333 Factory* factory = isolate->factory();
334 GlobalHandles* global_handles = isolate->global_handles();
342 HandleScope scope(isolate);
344 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
345 Handle<Object> u = factory->NewNumber(1.12344);
347 h1 = global_handles->Create(*i);
348 h2 = global_handles->Create(*u);
349 h3 = global_handles->Create(*i);
350 h4 = global_handles->Create(*u);
353 // after gc, it should survive
354 heap->CollectGarbage(NEW_SPACE);
356 CHECK((*h1)->IsString());
357 CHECK((*h2)->IsHeapNumber());
358 CHECK((*h3)->IsString());
359 CHECK((*h4)->IsHeapNumber());
362 GlobalHandles::Destroy(h1.location());
363 GlobalHandles::Destroy(h3.location());
366 GlobalHandles::Destroy(h2.location());
367 GlobalHandles::Destroy(h4.location());
371 static bool WeakPointerCleared = false;
373 static void TestWeakGlobalHandleCallback(
374 const v8::WeakCallbackData<v8::Value, void>& data) {
375 std::pair<v8::Persistent<v8::Value>*, int>* p =
376 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
377 data.GetParameter());
378 if (p->second == 1234) WeakPointerCleared = true;
383 TEST(WeakGlobalHandlesScavenge) {
384 i::FLAG_stress_compaction = false;
385 CcTest::InitializeVM();
386 Isolate* isolate = CcTest::i_isolate();
387 Heap* heap = isolate->heap();
388 Factory* factory = isolate->factory();
389 GlobalHandles* global_handles = isolate->global_handles();
391 WeakPointerCleared = false;
397 HandleScope scope(isolate);
399 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
400 Handle<Object> u = factory->NewNumber(1.12344);
402 h1 = global_handles->Create(*i);
403 h2 = global_handles->Create(*u);
406 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
407 GlobalHandles::MakeWeak(h2.location(),
408 reinterpret_cast<void*>(&handle_and_id),
409 &TestWeakGlobalHandleCallback);
411 // Scavenge treats weak pointers as normal roots.
412 heap->CollectGarbage(NEW_SPACE);
414 CHECK((*h1)->IsString());
415 CHECK((*h2)->IsHeapNumber());
417 CHECK(!WeakPointerCleared);
418 CHECK(!global_handles->IsNearDeath(h2.location()));
419 CHECK(!global_handles->IsNearDeath(h1.location()));
421 GlobalHandles::Destroy(h1.location());
422 GlobalHandles::Destroy(h2.location());
426 TEST(WeakGlobalHandlesMark) {
427 CcTest::InitializeVM();
428 Isolate* isolate = CcTest::i_isolate();
429 Heap* heap = isolate->heap();
430 Factory* factory = isolate->factory();
431 GlobalHandles* global_handles = isolate->global_handles();
433 WeakPointerCleared = false;
439 HandleScope scope(isolate);
441 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
442 Handle<Object> u = factory->NewNumber(1.12344);
444 h1 = global_handles->Create(*i);
445 h2 = global_handles->Create(*u);
448 // Make sure the objects are promoted.
449 heap->CollectGarbage(OLD_POINTER_SPACE);
450 heap->CollectGarbage(NEW_SPACE);
451 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
453 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
454 GlobalHandles::MakeWeak(h2.location(),
455 reinterpret_cast<void*>(&handle_and_id),
456 &TestWeakGlobalHandleCallback);
457 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
458 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
460 // Incremental marking potentially marked handles before they turned weak.
461 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
463 CHECK((*h1)->IsString());
465 CHECK(WeakPointerCleared);
466 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
468 GlobalHandles::Destroy(h1.location());
472 TEST(DeleteWeakGlobalHandle) {
473 i::FLAG_stress_compaction = false;
474 CcTest::InitializeVM();
475 Isolate* isolate = CcTest::i_isolate();
476 Heap* heap = isolate->heap();
477 Factory* factory = isolate->factory();
478 GlobalHandles* global_handles = isolate->global_handles();
480 WeakPointerCleared = false;
485 HandleScope scope(isolate);
487 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
488 h = global_handles->Create(*i);
491 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
492 GlobalHandles::MakeWeak(h.location(),
493 reinterpret_cast<void*>(&handle_and_id),
494 &TestWeakGlobalHandleCallback);
496 // Scanvenge does not recognize weak reference.
497 heap->CollectGarbage(NEW_SPACE);
499 CHECK(!WeakPointerCleared);
501 // Mark-compact treats weak reference properly.
502 heap->CollectGarbage(OLD_POINTER_SPACE);
504 CHECK(WeakPointerCleared);
508 static const char* not_so_random_string_table[] = {
572 static void CheckInternalizedStrings(const char** strings) {
573 Isolate* isolate = CcTest::i_isolate();
574 Factory* factory = isolate->factory();
575 for (const char* string = *strings; *strings != 0; string = *strings++) {
576 HandleScope scope(isolate);
578 isolate->factory()->InternalizeUtf8String(CStrVector(string));
579 // InternalizeUtf8String may return a failure if a GC is needed.
580 CHECK(a->IsInternalizedString());
581 Handle<String> b = factory->InternalizeUtf8String(string);
583 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
584 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
586 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
592 CcTest::InitializeVM();
594 v8::HandleScope sc(CcTest::isolate());
595 CheckInternalizedStrings(not_so_random_string_table);
596 CheckInternalizedStrings(not_so_random_string_table);
600 TEST(FunctionAllocation) {
601 CcTest::InitializeVM();
602 Isolate* isolate = CcTest::i_isolate();
603 Factory* factory = isolate->factory();
605 v8::HandleScope sc(CcTest::isolate());
606 Handle<String> name = factory->InternalizeUtf8String("theFunction");
607 Handle<JSFunction> function = factory->NewFunction(name);
609 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
610 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
612 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
613 Handle<JSObject> obj = factory->NewJSObject(function);
614 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
615 CHECK_EQ(Smi::FromInt(23),
616 *Object::GetProperty(obj, prop_name).ToHandleChecked());
617 // Check that we can add properties to function objects.
618 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
619 CHECK_EQ(Smi::FromInt(24),
620 *Object::GetProperty(function, prop_name).ToHandleChecked());
624 TEST(ObjectProperties) {
625 CcTest::InitializeVM();
626 Isolate* isolate = CcTest::i_isolate();
627 Factory* factory = isolate->factory();
629 v8::HandleScope sc(CcTest::isolate());
630 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
631 Handle<Object> object = Object::GetProperty(
632 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
633 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
634 Handle<JSObject> obj = factory->NewJSObject(constructor);
635 Handle<String> first = factory->InternalizeUtf8String("first");
636 Handle<String> second = factory->InternalizeUtf8String("second");
638 Handle<Smi> one(Smi::FromInt(1), isolate);
639 Handle<Smi> two(Smi::FromInt(2), isolate);
642 v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(obj, first);
643 CHECK(maybe.has_value);
647 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
648 maybe = JSReceiver::HasOwnProperty(obj, first);
649 CHECK(maybe.has_value);
653 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
654 maybe = JSReceiver::HasOwnProperty(obj, first);
655 CHECK(maybe.has_value);
658 // add first and then second
659 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
660 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
661 maybe = JSReceiver::HasOwnProperty(obj, first);
662 CHECK(maybe.has_value);
664 maybe = JSReceiver::HasOwnProperty(obj, second);
665 CHECK(maybe.has_value);
668 // delete first and then second
669 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
670 maybe = JSReceiver::HasOwnProperty(obj, second);
671 CHECK(maybe.has_value);
673 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
674 maybe = JSReceiver::HasOwnProperty(obj, first);
675 CHECK(maybe.has_value);
677 maybe = JSReceiver::HasOwnProperty(obj, second);
678 CHECK(maybe.has_value);
681 // add first and then second
682 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
683 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
684 maybe = JSReceiver::HasOwnProperty(obj, first);
685 CHECK(maybe.has_value);
687 maybe = JSReceiver::HasOwnProperty(obj, second);
688 CHECK(maybe.has_value);
691 // delete second and then first
692 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
693 maybe = JSReceiver::HasOwnProperty(obj, first);
694 CHECK(maybe.has_value);
696 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
697 maybe = JSReceiver::HasOwnProperty(obj, first);
698 CHECK(maybe.has_value);
700 maybe = JSReceiver::HasOwnProperty(obj, second);
701 CHECK(maybe.has_value);
704 // check string and internalized string match
705 const char* string1 = "fisk";
706 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
707 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
708 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
709 maybe = JSReceiver::HasOwnProperty(obj, s1_string);
710 CHECK(maybe.has_value);
713 // check internalized string and string match
714 const char* string2 = "fugl";
715 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
716 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
717 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
718 maybe = JSReceiver::HasOwnProperty(obj, s2);
719 CHECK(maybe.has_value);
725 CcTest::InitializeVM();
726 Isolate* isolate = CcTest::i_isolate();
727 Factory* factory = isolate->factory();
729 v8::HandleScope sc(CcTest::isolate());
730 Handle<String> name = factory->InternalizeUtf8String("theFunction");
731 Handle<JSFunction> function = factory->NewFunction(name);
733 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
734 Handle<JSObject> obj = factory->NewJSObject(function);
735 Handle<Map> initial_map(function->initial_map());
738 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
739 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
740 CHECK_EQ(Smi::FromInt(23),
741 *Object::GetProperty(obj, prop_name).ToHandleChecked());
743 // Check the map has changed
744 CHECK(*initial_map != obj->map());
749 CcTest::InitializeVM();
750 Isolate* isolate = CcTest::i_isolate();
751 Factory* factory = isolate->factory();
753 v8::HandleScope sc(CcTest::isolate());
754 Handle<String> name = factory->InternalizeUtf8String("Array");
755 Handle<Object> fun_obj = Object::GetProperty(
756 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
757 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
759 // Allocate the object.
760 Handle<Object> element;
761 Handle<JSObject> object = factory->NewJSObject(function);
762 Handle<JSArray> array = Handle<JSArray>::cast(object);
763 // We just initialized the VM, no heap allocation failure yet.
764 JSArray::Initialize(array, 0);
766 // Set array length to 0.
767 JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)).Check();
768 CHECK_EQ(Smi::FromInt(0), array->length());
769 // Must be in fast mode.
770 CHECK(array->HasFastSmiOrObjectElements());
772 // array[length] = name.
773 JSReceiver::SetElement(array, 0, name, NONE, SLOPPY).Check();
774 CHECK_EQ(Smi::FromInt(1), array->length());
775 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
776 CHECK_EQ(*element, *name);
778 // Set array length with larger than smi value.
779 Handle<Object> length =
780 factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
781 JSArray::SetElementsLength(array, length).Check();
783 uint32_t int_length = 0;
784 CHECK(length->ToArrayIndex(&int_length));
785 CHECK_EQ(*length, array->length());
786 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
788 // array[length] = name.
789 JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY).Check();
790 uint32_t new_int_length = 0;
791 CHECK(array->length()->ToArrayIndex(&new_int_length));
792 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
793 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
794 CHECK_EQ(*element, *name);
795 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
796 CHECK_EQ(*element, *name);
801 CcTest::InitializeVM();
802 Isolate* isolate = CcTest::i_isolate();
803 Factory* factory = isolate->factory();
805 v8::HandleScope sc(CcTest::isolate());
806 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
807 Handle<Object> object = Object::GetProperty(
808 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
809 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
810 Handle<JSObject> obj = factory->NewJSObject(constructor);
811 Handle<String> first = factory->InternalizeUtf8String("first");
812 Handle<String> second = factory->InternalizeUtf8String("second");
814 Handle<Smi> one(Smi::FromInt(1), isolate);
815 Handle<Smi> two(Smi::FromInt(2), isolate);
817 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
818 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
820 JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY).Check();
821 JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY).Check();
824 Handle<Object> value1, value2;
825 Handle<JSObject> clone = factory->CopyJSObject(obj);
826 CHECK(!clone.is_identical_to(obj));
828 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
829 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
830 CHECK_EQ(*value1, *value2);
831 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
832 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
833 CHECK_EQ(*value1, *value2);
835 value1 = Object::GetProperty(obj, first).ToHandleChecked();
836 value2 = Object::GetProperty(clone, first).ToHandleChecked();
837 CHECK_EQ(*value1, *value2);
838 value1 = Object::GetProperty(obj, second).ToHandleChecked();
839 value2 = Object::GetProperty(clone, second).ToHandleChecked();
840 CHECK_EQ(*value1, *value2);
843 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
844 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
846 JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY).Check();
847 JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY).Check();
849 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
850 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
851 CHECK_EQ(*value1, *value2);
852 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
853 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
854 CHECK_EQ(*value1, *value2);
856 value1 = Object::GetProperty(obj, second).ToHandleChecked();
857 value2 = Object::GetProperty(clone, first).ToHandleChecked();
858 CHECK_EQ(*value1, *value2);
859 value1 = Object::GetProperty(obj, first).ToHandleChecked();
860 value2 = Object::GetProperty(clone, second).ToHandleChecked();
861 CHECK_EQ(*value1, *value2);
865 TEST(StringAllocation) {
866 CcTest::InitializeVM();
867 Isolate* isolate = CcTest::i_isolate();
868 Factory* factory = isolate->factory();
870 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
871 for (int length = 0; length < 100; length++) {
872 v8::HandleScope scope(CcTest::isolate());
873 char* non_one_byte = NewArray<char>(3 * length + 1);
874 char* one_byte = NewArray<char>(length + 1);
875 non_one_byte[3 * length] = 0;
876 one_byte[length] = 0;
877 for (int i = 0; i < length; i++) {
879 non_one_byte[3 * i] = chars[0];
880 non_one_byte[3 * i + 1] = chars[1];
881 non_one_byte[3 * i + 2] = chars[2];
883 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
884 Vector<const char>(non_one_byte, 3 * length));
885 CHECK_EQ(length, non_one_byte_sym->length());
886 Handle<String> one_byte_sym =
887 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
888 CHECK_EQ(length, one_byte_sym->length());
889 Handle<String> non_one_byte_str =
890 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
892 non_one_byte_str->Hash();
893 CHECK_EQ(length, non_one_byte_str->length());
894 Handle<String> one_byte_str =
895 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
897 one_byte_str->Hash();
898 CHECK_EQ(length, one_byte_str->length());
899 DeleteArray(non_one_byte);
900 DeleteArray(one_byte);
905 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
906 // Count the number of objects found in the heap.
908 HeapIterator iterator(heap);
909 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
910 for (int i = 0; i < size; i++) {
911 if (*objs[i] == obj) {
921 CcTest::InitializeVM();
922 Isolate* isolate = CcTest::i_isolate();
923 Factory* factory = isolate->factory();
924 v8::HandleScope scope(CcTest::isolate());
926 // Array of objects to scan haep for.
927 const int objs_count = 6;
928 Handle<Object> objs[objs_count];
929 int next_objs_index = 0;
931 // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
932 objs[next_objs_index++] = factory->NewJSArray(10);
933 objs[next_objs_index++] = factory->NewJSArray(10,
937 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
938 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
939 objs[next_objs_index++] =
940 factory->NewStringFromStaticChars("abcdefghij", TENURED);
942 // Allocate a large string (for large object space).
943 int large_size = Page::kMaxRegularHeapObjectSize + 1;
944 char* str = new char[large_size];
945 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
946 str[large_size - 1] = '\0';
947 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
950 // Add a Map object to look for.
951 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
953 CHECK_EQ(objs_count, next_objs_index);
954 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
958 TEST(EmptyHandleEscapeFrom) {
959 CcTest::InitializeVM();
961 v8::HandleScope scope(CcTest::isolate());
962 Handle<JSObject> runaway;
965 v8::EscapableHandleScope nested(CcTest::isolate());
966 Handle<JSObject> empty;
967 runaway = empty.EscapeFrom(&nested);
970 CHECK(runaway.is_null());
974 static int LenFromSize(int size) {
975 return (size - FixedArray::kHeaderSize) / kPointerSize;
979 TEST(Regression39128) {
980 // Test case for crbug.com/39128.
981 CcTest::InitializeVM();
982 Isolate* isolate = CcTest::i_isolate();
983 TestHeap* heap = CcTest::test_heap();
985 // Increase the chance of 'bump-the-pointer' allocation in old space.
986 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
988 v8::HandleScope scope(CcTest::isolate());
990 // The plan: create JSObject which references objects in new space.
991 // Then clone this object (forcing it to go into old space) and check
992 // that region dirty marks are updated correctly.
994 // Step 1: prepare a map for the object. We add 1 inobject property to it.
995 // Create a map with single inobject property.
996 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
997 int n_properties = my_map->inobject_properties();
998 CHECK_GT(n_properties, 0);
1000 int object_size = my_map->instance_size();
1002 // Step 2: allocate a lot of objects so to almost fill new space: we need
1003 // just enough room to allocate JSObject and thus fill the newspace.
1005 int allocation_amount = Min(FixedArray::kMaxSize,
1006 Page::kMaxRegularHeapObjectSize + kPointerSize);
1007 int allocation_len = LenFromSize(allocation_amount);
1008 NewSpace* new_space = heap->new_space();
1009 Address* top_addr = new_space->allocation_top_address();
1010 Address* limit_addr = new_space->allocation_limit_address();
1011 while ((*limit_addr - *top_addr) > allocation_amount) {
1012 CHECK(!heap->always_allocate());
1013 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1014 CHECK(new_space->Contains(array));
1017 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1018 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1019 int fixed_array_len = LenFromSize(to_fill);
1020 CHECK(fixed_array_len < FixedArray::kMaxLength);
1022 CHECK(!heap->always_allocate());
1023 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1024 CHECK(new_space->Contains(array));
1026 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1027 CHECK(new_space->Contains(object));
1028 JSObject* jsobject = JSObject::cast(object);
1029 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1030 CHECK_EQ(0, jsobject->properties()->length());
1031 // Create a reference to object in new space in jsobject.
1032 FieldIndex index = FieldIndex::ForInObjectOffset(
1033 JSObject::kHeaderSize - kPointerSize);
1034 jsobject->FastPropertyAtPut(index, array);
1036 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1038 // Step 4: clone jsobject, but force always allocate first to create a clone
1039 // in old pointer space.
1040 Address old_pointer_space_top = heap->old_pointer_space()->top();
1041 AlwaysAllocateScope aa_scope(isolate);
1042 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1043 JSObject* clone = JSObject::cast(clone_obj);
1044 if (clone->address() != old_pointer_space_top) {
1045 // Alas, got allocated from free list, we cannot do checks.
1048 CHECK(heap->old_pointer_space()->Contains(clone->address()));
1052 UNINITIALIZED_TEST(TestCodeFlushing) {
1053 // If we do not flush code this test is invalid.
1054 if (!FLAG_flush_code) return;
1055 i::FLAG_allow_natives_syntax = true;
1056 i::FLAG_optimize_for_size = false;
1057 v8::Isolate* isolate = v8::Isolate::New();
1058 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1060 Factory* factory = i_isolate->factory();
1062 v8::HandleScope scope(isolate);
1063 v8::Context::New(isolate)->Enter();
1064 const char* source =
1071 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1073 // This compile will add the code to the compilation cache.
1075 v8::HandleScope scope(isolate);
1079 // Check function is compiled.
1080 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1081 foo_name).ToHandleChecked();
1082 CHECK(func_value->IsJSFunction());
1083 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1084 CHECK(function->shared()->is_compiled());
1086 // The code will survive at least two GCs.
1087 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1088 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1089 CHECK(function->shared()->is_compiled());
1091 // Simulate several GCs that use full marking.
1092 const int kAgingThreshold = 6;
1093 for (int i = 0; i < kAgingThreshold; i++) {
1094 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1097 // foo should no longer be in the compilation cache
1098 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1099 CHECK(!function->is_compiled() || function->IsOptimized());
1100 // Call foo to get it recompiled.
1101 CompileRun("foo()");
1102 CHECK(function->shared()->is_compiled());
1103 CHECK(function->is_compiled());
1110 TEST(TestCodeFlushingPreAged) {
1111 // If we do not flush code this test is invalid.
1112 if (!FLAG_flush_code) return;
1113 i::FLAG_allow_natives_syntax = true;
1114 i::FLAG_optimize_for_size = true;
1115 CcTest::InitializeVM();
1116 Isolate* isolate = CcTest::i_isolate();
1117 Factory* factory = isolate->factory();
1118 v8::HandleScope scope(CcTest::isolate());
1119 const char* source = "function foo() {"
1125 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1127 // Compile foo, but don't run it.
1128 { v8::HandleScope scope(CcTest::isolate());
1132 // Check function is compiled.
1133 Handle<Object> func_value =
1134 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1135 CHECK(func_value->IsJSFunction());
1136 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1137 CHECK(function->shared()->is_compiled());
1139 // The code has been run so will survive at least one GC.
1140 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1141 CHECK(function->shared()->is_compiled());
1143 // The code was only run once, so it should be pre-aged and collected on the
1145 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1146 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1148 // Execute the function again twice, and ensure it is reset to the young age.
1149 { v8::HandleScope scope(CcTest::isolate());
1154 // The code will survive at least two GC now that it is young again.
1155 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1156 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1157 CHECK(function->shared()->is_compiled());
1159 // Simulate several GCs that use full marking.
1160 const int kAgingThreshold = 6;
1161 for (int i = 0; i < kAgingThreshold; i++) {
1162 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1165 // foo should no longer be in the compilation cache
1166 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1167 CHECK(!function->is_compiled() || function->IsOptimized());
1168 // Call foo to get it recompiled.
1169 CompileRun("foo()");
1170 CHECK(function->shared()->is_compiled());
1171 CHECK(function->is_compiled());
1175 TEST(TestCodeFlushingIncremental) {
1176 // If we do not flush code this test is invalid.
1177 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1178 i::FLAG_allow_natives_syntax = true;
1179 i::FLAG_optimize_for_size = false;
1180 CcTest::InitializeVM();
1181 Isolate* isolate = CcTest::i_isolate();
1182 Factory* factory = isolate->factory();
1183 v8::HandleScope scope(CcTest::isolate());
1184 const char* source = "function foo() {"
1190 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1192 // This compile will add the code to the compilation cache.
1193 { v8::HandleScope scope(CcTest::isolate());
1197 // Check function is compiled.
1198 Handle<Object> func_value =
1199 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1200 CHECK(func_value->IsJSFunction());
1201 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1202 CHECK(function->shared()->is_compiled());
1204 // The code will survive at least two GCs.
1205 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1206 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1207 CHECK(function->shared()->is_compiled());
1209 // Simulate several GCs that use incremental marking.
1210 const int kAgingThreshold = 6;
1211 for (int i = 0; i < kAgingThreshold; i++) {
1212 SimulateIncrementalMarking(CcTest::heap());
1213 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1215 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1216 CHECK(!function->is_compiled() || function->IsOptimized());
1218 // This compile will compile the function again.
1219 { v8::HandleScope scope(CcTest::isolate());
1220 CompileRun("foo();");
1223 // Simulate several GCs that use incremental marking but make sure
1224 // the loop breaks once the function is enqueued as a candidate.
1225 for (int i = 0; i < kAgingThreshold; i++) {
1226 SimulateIncrementalMarking(CcTest::heap());
1227 if (!function->next_function_link()->IsUndefined()) break;
1228 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1231 // Force optimization while incremental marking is active and while
1232 // the function is enqueued as a candidate.
1233 { v8::HandleScope scope(CcTest::isolate());
1234 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1237 // Simulate one final GC to make sure the candidate queue is sane.
1238 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1239 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1240 CHECK(function->is_compiled() || !function->IsOptimized());
1244 TEST(TestCodeFlushingIncrementalScavenge) {
1245 // If we do not flush code this test is invalid.
1246 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1247 i::FLAG_allow_natives_syntax = true;
1248 i::FLAG_optimize_for_size = false;
1249 CcTest::InitializeVM();
1250 Isolate* isolate = CcTest::i_isolate();
1251 Factory* factory = isolate->factory();
1252 v8::HandleScope scope(CcTest::isolate());
1253 const char* source = "var foo = function() {"
1259 "var bar = function() {"
1263 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1264 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1266 // Perfrom one initial GC to enable code flushing.
1267 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1269 // This compile will add the code to the compilation cache.
1270 { v8::HandleScope scope(CcTest::isolate());
1274 // Check functions are compiled.
1275 Handle<Object> func_value =
1276 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1277 CHECK(func_value->IsJSFunction());
1278 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1279 CHECK(function->shared()->is_compiled());
1280 Handle<Object> func_value2 =
1281 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1282 CHECK(func_value2->IsJSFunction());
1283 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1284 CHECK(function2->shared()->is_compiled());
1286 // Clear references to functions so that one of them can die.
1287 { v8::HandleScope scope(CcTest::isolate());
1288 CompileRun("foo = 0; bar = 0;");
1291 // Bump the code age so that flushing is triggered while the function
1292 // object is still located in new-space.
1293 const int kAgingThreshold = 6;
1294 for (int i = 0; i < kAgingThreshold; i++) {
1295 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1296 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1299 // Simulate incremental marking so that the functions are enqueued as
1300 // code flushing candidates. Then kill one of the functions. Finally
1301 // perform a scavenge while incremental marking is still running.
1302 SimulateIncrementalMarking(CcTest::heap());
1303 *function2.location() = NULL;
1304 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1306 // Simulate one final GC to make sure the candidate queue is sane.
1307 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1308 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1309 CHECK(!function->is_compiled() || function->IsOptimized());
1313 TEST(TestCodeFlushingIncrementalAbort) {
1314 // If we do not flush code this test is invalid.
1315 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1316 i::FLAG_allow_natives_syntax = true;
1317 i::FLAG_optimize_for_size = false;
1318 CcTest::InitializeVM();
1319 Isolate* isolate = CcTest::i_isolate();
1320 Factory* factory = isolate->factory();
1321 Heap* heap = isolate->heap();
1322 v8::HandleScope scope(CcTest::isolate());
1323 const char* source = "function foo() {"
1329 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1331 // This compile will add the code to the compilation cache.
1332 { v8::HandleScope scope(CcTest::isolate());
1336 // Check function is compiled.
1337 Handle<Object> func_value =
1338 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1339 CHECK(func_value->IsJSFunction());
1340 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1341 CHECK(function->shared()->is_compiled());
1343 // The code will survive at least two GCs.
1344 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1345 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1346 CHECK(function->shared()->is_compiled());
1348 // Bump the code age so that flushing is triggered.
1349 const int kAgingThreshold = 6;
1350 for (int i = 0; i < kAgingThreshold; i++) {
1351 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1354 // Simulate incremental marking so that the function is enqueued as
1355 // code flushing candidate.
1356 SimulateIncrementalMarking(heap);
1358 // Enable the debugger and add a breakpoint while incremental marking
1359 // is running so that incremental marking aborts and code flushing is
1362 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1363 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1364 isolate->debug()->ClearAllBreakPoints();
1366 // Force optimization now that code flushing is disabled.
1367 { v8::HandleScope scope(CcTest::isolate());
1368 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1371 // Simulate one final GC to make sure the candidate queue is sane.
1372 heap->CollectAllGarbage(Heap::kNoGCFlags);
1373 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1374 CHECK(function->is_compiled() || !function->IsOptimized());
1378 TEST(CompilationCacheCachingBehavior) {
1379 // If we do not flush code, or have the compilation cache turned off, this
1381 if (!FLAG_flush_code || !FLAG_flush_code_incrementally ||
1382 !FLAG_compilation_cache) {
1385 CcTest::InitializeVM();
1386 Isolate* isolate = CcTest::i_isolate();
1387 Factory* factory = isolate->factory();
1388 Heap* heap = isolate->heap();
1389 CompilationCache* compilation_cache = isolate->compilation_cache();
1390 LanguageMode language_mode =
1391 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1393 v8::HandleScope scope(CcTest::isolate());
1394 const char* raw_source =
1401 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1402 Handle<Context> native_context = isolate->native_context();
1405 v8::HandleScope scope(CcTest::isolate());
1406 CompileRun(raw_source);
1409 // On first compilation, only a hash is inserted in the code cache. We can't
1411 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1412 source, Handle<Object>(), 0, 0, false, true, native_context,
1414 CHECK(info.is_null());
1417 v8::HandleScope scope(CcTest::isolate());
1418 CompileRun(raw_source);
1421 // On second compilation, the hash is replaced by a real cache entry mapping
1422 // the source to the shared function info containing the code.
1423 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1424 true, native_context, language_mode);
1425 CHECK(!info.is_null());
1427 heap->CollectAllGarbage(Heap::kNoGCFlags);
1429 // On second compilation, the hash is replaced by a real cache entry mapping
1430 // the source to the shared function info containing the code.
1431 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1432 true, native_context, language_mode);
1433 CHECK(!info.is_null());
1435 while (!info.ToHandleChecked()->code()->IsOld()) {
1436 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1439 heap->CollectAllGarbage(Heap::kNoGCFlags);
1440 // Ensure code aging cleared the entry from the cache.
1441 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1442 true, native_context, language_mode);
1443 CHECK(info.is_null());
1446 v8::HandleScope scope(CcTest::isolate());
1447 CompileRun(raw_source);
1450 // On first compilation, only a hash is inserted in the code cache. We can't
1452 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1453 true, native_context, language_mode);
1454 CHECK(info.is_null());
1456 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1457 compilation_cache->MarkCompactPrologue();
1461 v8::HandleScope scope(CcTest::isolate());
1462 CompileRun(raw_source);
1465 // If we aged the cache before caching the script, ensure that we didn't cache
1466 // on next compilation.
1467 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1468 true, native_context, language_mode);
1469 CHECK(info.is_null());
1473 // Count the number of native contexts in the weak list of native contexts.
1474 int CountNativeContexts() {
1476 Object* object = CcTest::heap()->native_contexts_list();
1477 while (!object->IsUndefined()) {
1479 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1485 // Count the number of user functions in the weak list of optimized
1486 // functions attached to a native context.
1487 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1489 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1490 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1491 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1493 object = JSFunction::cast(object)->next_function_link();
1499 TEST(TestInternalWeakLists) {
1500 v8::V8::Initialize();
1502 // Some flags turn Scavenge collections into Mark-sweep collections
1503 // and hence are incompatible with this test case.
1504 if (FLAG_gc_global || FLAG_stress_compaction) return;
1506 static const int kNumTestContexts = 10;
1508 Isolate* isolate = CcTest::i_isolate();
1509 Heap* heap = isolate->heap();
1510 HandleScope scope(isolate);
1511 v8::Handle<v8::Context> ctx[kNumTestContexts];
1513 CHECK_EQ(0, CountNativeContexts());
1515 // Create a number of global contests which gets linked together.
1516 for (int i = 0; i < kNumTestContexts; i++) {
1517 ctx[i] = v8::Context::New(CcTest::isolate());
1519 // Collect garbage that might have been created by one of the
1520 // installed extensions.
1521 isolate->compilation_cache()->Clear();
1522 heap->CollectAllGarbage(Heap::kNoGCFlags);
1524 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1526 CHECK_EQ(i + 1, CountNativeContexts());
1530 // Create a handle scope so no function objects get stuch in the outer
1532 HandleScope scope(isolate);
1533 const char* source = "function f1() { };"
1534 "function f2() { };"
1535 "function f3() { };"
1536 "function f4() { };"
1537 "function f5() { };";
1539 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1541 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1543 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1545 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1547 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1549 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1551 // Remove function f1, and
1552 CompileRun("f1=null");
1554 // Scavenge treats these references as strong.
1555 for (int j = 0; j < 10; j++) {
1556 CcTest::heap()->CollectGarbage(NEW_SPACE);
1557 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1560 // Mark compact handles the weak references.
1561 isolate->compilation_cache()->Clear();
1562 heap->CollectAllGarbage(Heap::kNoGCFlags);
1563 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1565 // Get rid of f3 and f5 in the same way.
1566 CompileRun("f3=null");
1567 for (int j = 0; j < 10; j++) {
1568 CcTest::heap()->CollectGarbage(NEW_SPACE);
1569 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1571 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1572 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1573 CompileRun("f5=null");
1574 for (int j = 0; j < 10; j++) {
1575 CcTest::heap()->CollectGarbage(NEW_SPACE);
1576 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1578 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1579 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1584 // Force compilation cache cleanup.
1585 CcTest::heap()->NotifyContextDisposed(true);
1586 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1588 // Dispose the native contexts one by one.
1589 for (int i = 0; i < kNumTestContexts; i++) {
1590 // TODO(dcarney): is there a better way to do this?
1591 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1592 *unsafe = CcTest::heap()->undefined_value();
1595 // Scavenge treats these references as strong.
1596 for (int j = 0; j < 10; j++) {
1597 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1598 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1601 // Mark compact handles the weak references.
1602 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1603 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1606 CHECK_EQ(0, CountNativeContexts());
1610 // Count the number of native contexts in the weak list of native contexts
1611 // causing a GC after the specified number of elements.
1612 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1613 Heap* heap = isolate->heap();
1615 Handle<Object> object(heap->native_contexts_list(), isolate);
1616 while (!object->IsUndefined()) {
1618 if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1620 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1627 // Count the number of user functions in the weak list of optimized
1628 // functions attached to a native context causing a GC after the
1629 // specified number of elements.
1630 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1633 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1634 Isolate* isolate = icontext->GetIsolate();
1635 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1637 while (object->IsJSFunction() &&
1638 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1640 if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1641 object = Handle<Object>(
1642 Object::cast(JSFunction::cast(*object)->next_function_link()),
1649 TEST(TestInternalWeakListsTraverseWithGC) {
1650 v8::V8::Initialize();
1651 Isolate* isolate = CcTest::i_isolate();
1653 static const int kNumTestContexts = 10;
1655 HandleScope scope(isolate);
1656 v8::Handle<v8::Context> ctx[kNumTestContexts];
1658 CHECK_EQ(0, CountNativeContexts());
1660 // Create an number of contexts and check the length of the weak list both
1661 // with and without GCs while iterating the list.
1662 for (int i = 0; i < kNumTestContexts; i++) {
1663 ctx[i] = v8::Context::New(CcTest::isolate());
1664 CHECK_EQ(i + 1, CountNativeContexts());
1665 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1668 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1670 // Compile a number of functions the length of the weak list of optimized
1671 // functions both with and without GCs while iterating the list.
1673 const char* source = "function f1() { };"
1674 "function f2() { };"
1675 "function f3() { };"
1676 "function f4() { };"
1677 "function f5() { };";
1679 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1681 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1682 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1684 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1685 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1687 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1688 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1690 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1691 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1693 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1694 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1700 TEST(TestSizeOfRegExpCode) {
1701 if (!FLAG_regexp_optimization) return;
1703 v8::V8::Initialize();
1705 Isolate* isolate = CcTest::i_isolate();
1706 HandleScope scope(isolate);
1708 LocalContext context;
1710 // Adjust source below and this check to match
1711 // RegExpImple::kRegExpTooLargeToOptimize.
1712 DCHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 10 * KB);
1714 // Compile a regexp that is much larger if we are using regexp optimizations.
1716 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1717 "var half_size_reg_exp;"
1718 "while (reg_exp_source.length < 10 * 1024) {"
1719 " half_size_reg_exp = reg_exp_source;"
1720 " reg_exp_source = reg_exp_source + reg_exp_source;"
1723 "reg_exp_source.match(/f/);");
1725 // Get initial heap size after several full GCs, which will stabilize
1726 // the heap size and return with sweeping finished completely.
1727 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1728 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1729 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1730 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1731 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1732 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1733 if (collector->sweeping_in_progress()) {
1734 collector->EnsureSweepingCompleted();
1736 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1738 CompileRun("'foo'.match(reg_exp_source);");
1739 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1740 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1742 CompileRun("'foo'.match(half_size_reg_exp);");
1743 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1744 int size_with_optimized_regexp =
1745 static_cast<int>(CcTest::heap()->SizeOfObjects());
1747 int size_of_regexp_code = size_with_regexp - initial_size;
1749 CHECK_LE(size_of_regexp_code, 1 * MB);
1751 // Small regexp is half the size, but compiles to more than twice the code
1752 // due to the optimization steps.
1753 CHECK_GE(size_with_optimized_regexp,
1754 size_with_regexp + size_of_regexp_code * 2);
1758 TEST(TestSizeOfObjects) {
1759 v8::V8::Initialize();
1761 // Get initial heap size after several full GCs, which will stabilize
1762 // the heap size and return with sweeping finished completely.
1763 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1764 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1765 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1766 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1767 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1768 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1769 if (collector->sweeping_in_progress()) {
1770 collector->EnsureSweepingCompleted();
1772 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1775 // Allocate objects on several different old-space pages so that
1776 // concurrent sweeper threads will be busy sweeping the old space on
1777 // subsequent GC runs.
1778 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1779 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1780 for (int i = 1; i <= 100; i++) {
1781 CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1782 CHECK_EQ(initial_size + i * filler_size,
1783 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1787 // The heap size should go back to initial size after a full GC, even
1788 // though sweeping didn't finish yet.
1789 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1791 // Normally sweeping would not be complete here, but no guarantees.
1793 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1795 // Waiting for sweeper threads should not change heap size.
1796 if (collector->sweeping_in_progress()) {
1797 collector->EnsureSweepingCompleted();
1799 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1803 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1804 CcTest::InitializeVM();
1805 HeapIterator iterator(CcTest::heap());
1806 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1807 intptr_t size_of_objects_2 = 0;
1808 for (HeapObject* obj = iterator.next();
1810 obj = iterator.next()) {
1811 if (!obj->IsFreeSpace()) {
1812 size_of_objects_2 += obj->Size();
1815 // Delta must be within 5% of the larger result.
1816 // TODO(gc): Tighten this up by distinguishing between byte
1817 // arrays that are real and those that merely mark free space
1819 if (size_of_objects_1 > size_of_objects_2) {
1820 intptr_t delta = size_of_objects_1 - size_of_objects_2;
1821 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1822 "Iterator: %" V8_PTR_PREFIX "d, "
1823 "delta: %" V8_PTR_PREFIX "d\n",
1824 size_of_objects_1, size_of_objects_2, delta);
1825 CHECK_GT(size_of_objects_1 / 20, delta);
1827 intptr_t delta = size_of_objects_2 - size_of_objects_1;
1828 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1829 "Iterator: %" V8_PTR_PREFIX "d, "
1830 "delta: %" V8_PTR_PREFIX "d\n",
1831 size_of_objects_1, size_of_objects_2, delta);
1832 CHECK_GT(size_of_objects_2 / 20, delta);
1837 static void FillUpNewSpace(NewSpace* new_space) {
1838 // Fill up new space to the point that it is completely full. Make sure
1839 // that the scavenger does not undo the filling.
1840 Heap* heap = new_space->heap();
1841 Isolate* isolate = heap->isolate();
1842 Factory* factory = isolate->factory();
1843 HandleScope scope(isolate);
1844 AlwaysAllocateScope always_allocate(isolate);
1845 intptr_t available = new_space->Capacity() - new_space->Size();
1846 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1847 for (intptr_t i = 0; i < number_of_fillers; i++) {
1848 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1853 TEST(GrowAndShrinkNewSpace) {
1854 CcTest::InitializeVM();
1855 Heap* heap = CcTest::heap();
1856 NewSpace* new_space = heap->new_space();
1858 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1859 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1860 // The max size cannot exceed the reserved size, since semispaces must be
1861 // always within the reserved space. We can't test new space growing and
1862 // shrinking if the reserved size is the same as the minimum (initial) size.
1866 // Explicitly growing should double the space capacity.
1867 intptr_t old_capacity, new_capacity;
1868 old_capacity = new_space->TotalCapacity();
1870 new_capacity = new_space->TotalCapacity();
1871 CHECK(2 * old_capacity == new_capacity);
1873 old_capacity = new_space->TotalCapacity();
1874 FillUpNewSpace(new_space);
1875 new_capacity = new_space->TotalCapacity();
1876 CHECK(old_capacity == new_capacity);
1878 // Explicitly shrinking should not affect space capacity.
1879 old_capacity = new_space->TotalCapacity();
1880 new_space->Shrink();
1881 new_capacity = new_space->TotalCapacity();
1882 CHECK(old_capacity == new_capacity);
1884 // Let the scavenger empty the new space.
1885 heap->CollectGarbage(NEW_SPACE);
1886 CHECK_LE(new_space->Size(), old_capacity);
1888 // Explicitly shrinking should halve the space capacity.
1889 old_capacity = new_space->TotalCapacity();
1890 new_space->Shrink();
1891 new_capacity = new_space->TotalCapacity();
1892 CHECK(old_capacity == 2 * new_capacity);
1894 // Consecutive shrinking should not affect space capacity.
1895 old_capacity = new_space->TotalCapacity();
1896 new_space->Shrink();
1897 new_space->Shrink();
1898 new_space->Shrink();
1899 new_capacity = new_space->TotalCapacity();
1900 CHECK(old_capacity == new_capacity);
1904 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1905 CcTest::InitializeVM();
1906 Heap* heap = CcTest::heap();
1907 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1908 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1909 // The max size cannot exceed the reserved size, since semispaces must be
1910 // always within the reserved space. We can't test new space growing and
1911 // shrinking if the reserved size is the same as the minimum (initial) size.
1915 v8::HandleScope scope(CcTest::isolate());
1916 NewSpace* new_space = heap->new_space();
1917 intptr_t old_capacity, new_capacity;
1918 old_capacity = new_space->TotalCapacity();
1920 new_capacity = new_space->TotalCapacity();
1921 CHECK(2 * old_capacity == new_capacity);
1922 FillUpNewSpace(new_space);
1923 heap->CollectAllAvailableGarbage();
1924 new_capacity = new_space->TotalCapacity();
1925 CHECK(old_capacity == new_capacity);
1929 static int NumberOfGlobalObjects() {
1931 HeapIterator iterator(CcTest::heap());
1932 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1933 if (obj->IsGlobalObject()) count++;
1939 // Test that we don't embed maps from foreign contexts into
1941 TEST(LeakNativeContextViaMap) {
1942 i::FLAG_allow_natives_syntax = true;
1943 v8::Isolate* isolate = CcTest::isolate();
1944 v8::HandleScope outer_scope(isolate);
1945 v8::Persistent<v8::Context> ctx1p;
1946 v8::Persistent<v8::Context> ctx2p;
1948 v8::HandleScope scope(isolate);
1949 ctx1p.Reset(isolate, v8::Context::New(isolate));
1950 ctx2p.Reset(isolate, v8::Context::New(isolate));
1951 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1954 CcTest::heap()->CollectAllAvailableGarbage();
1955 CHECK_EQ(4, NumberOfGlobalObjects());
1958 v8::HandleScope inner_scope(isolate);
1959 CompileRun("var v = {x: 42}");
1960 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1961 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1962 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1964 ctx2->Global()->Set(v8_str("o"), v);
1965 v8::Local<v8::Value> res = CompileRun(
1966 "function f() { return o.x; }"
1967 "for (var i = 0; i < 10; ++i) f();"
1968 "%OptimizeFunctionOnNextCall(f);"
1970 CHECK_EQ(42, res->Int32Value());
1971 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1973 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1975 isolate->ContextDisposedNotification();
1977 CcTest::heap()->CollectAllAvailableGarbage();
1978 CHECK_EQ(2, NumberOfGlobalObjects());
1980 CcTest::heap()->CollectAllAvailableGarbage();
1981 CHECK_EQ(0, NumberOfGlobalObjects());
1985 // Test that we don't embed functions from foreign contexts into
1987 TEST(LeakNativeContextViaFunction) {
1988 i::FLAG_allow_natives_syntax = true;
1989 v8::Isolate* isolate = CcTest::isolate();
1990 v8::HandleScope outer_scope(isolate);
1991 v8::Persistent<v8::Context> ctx1p;
1992 v8::Persistent<v8::Context> ctx2p;
1994 v8::HandleScope scope(isolate);
1995 ctx1p.Reset(isolate, v8::Context::New(isolate));
1996 ctx2p.Reset(isolate, v8::Context::New(isolate));
1997 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2000 CcTest::heap()->CollectAllAvailableGarbage();
2001 CHECK_EQ(4, NumberOfGlobalObjects());
2004 v8::HandleScope inner_scope(isolate);
2005 CompileRun("var v = function() { return 42; }");
2006 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2007 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2008 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2010 ctx2->Global()->Set(v8_str("o"), v);
2011 v8::Local<v8::Value> res = CompileRun(
2012 "function f(x) { return x(); }"
2013 "for (var i = 0; i < 10; ++i) f(o);"
2014 "%OptimizeFunctionOnNextCall(f);"
2016 CHECK_EQ(42, res->Int32Value());
2017 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2021 isolate->ContextDisposedNotification();
2023 CcTest::heap()->CollectAllAvailableGarbage();
2024 CHECK_EQ(2, NumberOfGlobalObjects());
2026 CcTest::heap()->CollectAllAvailableGarbage();
2027 CHECK_EQ(0, NumberOfGlobalObjects());
2031 TEST(LeakNativeContextViaMapKeyed) {
2032 i::FLAG_allow_natives_syntax = true;
2033 v8::Isolate* isolate = CcTest::isolate();
2034 v8::HandleScope outer_scope(isolate);
2035 v8::Persistent<v8::Context> ctx1p;
2036 v8::Persistent<v8::Context> ctx2p;
2038 v8::HandleScope scope(isolate);
2039 ctx1p.Reset(isolate, v8::Context::New(isolate));
2040 ctx2p.Reset(isolate, v8::Context::New(isolate));
2041 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2044 CcTest::heap()->CollectAllAvailableGarbage();
2045 CHECK_EQ(4, NumberOfGlobalObjects());
2048 v8::HandleScope inner_scope(isolate);
2049 CompileRun("var v = [42, 43]");
2050 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2051 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2052 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2054 ctx2->Global()->Set(v8_str("o"), v);
2055 v8::Local<v8::Value> res = CompileRun(
2056 "function f() { return o[0]; }"
2057 "for (var i = 0; i < 10; ++i) f();"
2058 "%OptimizeFunctionOnNextCall(f);"
2060 CHECK_EQ(42, res->Int32Value());
2061 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2065 isolate->ContextDisposedNotification();
2067 CcTest::heap()->CollectAllAvailableGarbage();
2068 CHECK_EQ(2, NumberOfGlobalObjects());
2070 CcTest::heap()->CollectAllAvailableGarbage();
2071 CHECK_EQ(0, NumberOfGlobalObjects());
2075 TEST(LeakNativeContextViaMapProto) {
2076 i::FLAG_allow_natives_syntax = true;
2077 v8::Isolate* isolate = CcTest::isolate();
2078 v8::HandleScope outer_scope(isolate);
2079 v8::Persistent<v8::Context> ctx1p;
2080 v8::Persistent<v8::Context> ctx2p;
2082 v8::HandleScope scope(isolate);
2083 ctx1p.Reset(isolate, v8::Context::New(isolate));
2084 ctx2p.Reset(isolate, v8::Context::New(isolate));
2085 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2088 CcTest::heap()->CollectAllAvailableGarbage();
2089 CHECK_EQ(4, NumberOfGlobalObjects());
2092 v8::HandleScope inner_scope(isolate);
2093 CompileRun("var v = { y: 42}");
2094 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2095 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2096 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2098 ctx2->Global()->Set(v8_str("o"), v);
2099 v8::Local<v8::Value> res = CompileRun(
2105 "for (var i = 0; i < 10; ++i) f();"
2106 "%OptimizeFunctionOnNextCall(f);"
2108 CHECK_EQ(42, res->Int32Value());
2109 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2113 isolate->ContextDisposedNotification();
2115 CcTest::heap()->CollectAllAvailableGarbage();
2116 CHECK_EQ(2, NumberOfGlobalObjects());
2118 CcTest::heap()->CollectAllAvailableGarbage();
2119 CHECK_EQ(0, NumberOfGlobalObjects());
2123 TEST(InstanceOfStubWriteBarrier) {
2124 i::FLAG_allow_natives_syntax = true;
2126 i::FLAG_verify_heap = true;
2129 CcTest::InitializeVM();
2130 if (!CcTest::i_isolate()->use_crankshaft()) return;
2131 if (i::FLAG_force_marking_deque_overflows) return;
2132 v8::HandleScope outer_scope(CcTest::isolate());
2135 v8::HandleScope scope(CcTest::isolate());
2137 "function foo () { }"
2138 "function mkbar () { return new (new Function(\"\")) (); }"
2139 "function f (x) { return (x instanceof foo); }"
2140 "function g () { f(mkbar()); }"
2141 "f(new foo()); f(new foo());"
2142 "%OptimizeFunctionOnNextCall(f);"
2143 "f(new foo()); g();");
2146 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2150 Handle<JSFunction> f =
2151 v8::Utils::OpenHandle(
2152 *v8::Handle<v8::Function>::Cast(
2153 CcTest::global()->Get(v8_str("f"))));
2155 CHECK(f->IsOptimized());
2157 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2158 !marking->IsStopped()) {
2159 // Discard any pending GC requests otherwise we will get GC when we enter
2161 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2164 CHECK(marking->IsMarking());
2167 v8::HandleScope scope(CcTest::isolate());
2168 v8::Handle<v8::Object> global = CcTest::global();
2169 v8::Handle<v8::Function> g =
2170 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2171 g->Call(global, 0, NULL);
2174 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2175 CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
2179 TEST(PrototypeTransitionClearing) {
2180 if (FLAG_never_compact) return;
2181 CcTest::InitializeVM();
2182 Isolate* isolate = CcTest::i_isolate();
2183 Factory* factory = isolate->factory();
2184 v8::HandleScope scope(CcTest::isolate());
2186 CompileRun("var base = {};");
2187 Handle<JSObject> baseObject =
2188 v8::Utils::OpenHandle(
2189 *v8::Handle<v8::Object>::Cast(
2190 CcTest::global()->Get(v8_str("base"))));
2191 int initialTransitions = baseObject->map()->NumberOfProtoTransitions();
2195 "for (var i = 0; i < 10; i++) {"
2197 " var prototype = {};"
2198 " object.__proto__ = prototype;"
2199 " if (i >= 3) live.push(object, prototype);"
2202 // Verify that only dead prototype transitions are cleared.
2203 CHECK_EQ(initialTransitions + 10,
2204 baseObject->map()->NumberOfProtoTransitions());
2205 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2206 const int transitions = 10 - 3;
2207 CHECK_EQ(initialTransitions + transitions,
2208 baseObject->map()->NumberOfProtoTransitions());
2210 // Verify that prototype transitions array was compacted.
2211 FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
2212 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2213 int j = Map::kProtoTransitionHeaderSize + i;
2214 CHECK(trans->get(j)->IsMap());
2217 // Make sure next prototype is placed on an old-space evacuation candidate.
2218 Handle<JSObject> prototype;
2219 PagedSpace* space = CcTest::heap()->old_pointer_space();
2221 AlwaysAllocateScope always_allocate(isolate);
2222 SimulateFullSpace(space);
2223 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
2226 // Add a prototype on an evacuation candidate and verify that transition
2227 // clearing correctly records slots in prototype transition array.
2228 i::FLAG_always_compact = true;
2229 Handle<Map> map(baseObject->map());
2230 CHECK(!space->LastPage()->Contains(
2231 map->GetPrototypeTransitions()->address()));
2232 CHECK(space->LastPage()->Contains(prototype->address()));
2236 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2237 i::FLAG_stress_compaction = false;
2238 i::FLAG_allow_natives_syntax = true;
2240 i::FLAG_verify_heap = true;
2243 CcTest::InitializeVM();
2244 if (!CcTest::i_isolate()->use_crankshaft()) return;
2245 v8::HandleScope outer_scope(CcTest::isolate());
2248 v8::HandleScope scope(CcTest::isolate());
2252 " for (var i = 0; i < 100; i++) s += i;"
2256 "%OptimizeFunctionOnNextCall(f);"
2259 Handle<JSFunction> f =
2260 v8::Utils::OpenHandle(
2261 *v8::Handle<v8::Function>::Cast(
2262 CcTest::global()->Get(v8_str("f"))));
2263 CHECK(f->IsOptimized());
2265 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2269 // The following two calls will increment CcTest::heap()->global_ic_age().
2270 const int kLongIdlePauseInMs = 1000;
2271 CcTest::isolate()->ContextDisposedNotification();
2272 CcTest::isolate()->IdleNotification(kLongIdlePauseInMs);
2274 while (!marking->IsStopped() && !marking->IsComplete()) {
2275 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2277 if (!marking->IsStopped() || marking->should_hurry()) {
2278 // We don't normally finish a GC via Step(), we normally finish by
2279 // setting the stack guard and then do the final steps in the stack
2280 // guard interrupt. But here we didn't ask for that, and there is no
2281 // JS code running to trigger the interrupt, so we explicitly finalize
2283 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
2284 "Test finalizing incremental mark-sweep");
2287 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2288 CHECK_EQ(0, f->shared()->opt_count());
2289 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2293 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2294 i::FLAG_stress_compaction = false;
2295 i::FLAG_allow_natives_syntax = true;
2297 i::FLAG_verify_heap = true;
2300 CcTest::InitializeVM();
2301 if (!CcTest::i_isolate()->use_crankshaft()) return;
2302 v8::HandleScope outer_scope(CcTest::isolate());
2305 v8::HandleScope scope(CcTest::isolate());
2309 " for (var i = 0; i < 100; i++) s += i;"
2313 "%OptimizeFunctionOnNextCall(f);"
2316 Handle<JSFunction> f =
2317 v8::Utils::OpenHandle(
2318 *v8::Handle<v8::Function>::Cast(
2319 CcTest::global()->Get(v8_str("f"))));
2320 CHECK(f->IsOptimized());
2322 CcTest::heap()->incremental_marking()->Abort();
2324 // The following two calls will increment CcTest::heap()->global_ic_age().
2325 // Since incremental marking is off, IdleNotification will do full GC.
2326 const int kLongIdlePauseInMs = 1000;
2327 CcTest::isolate()->ContextDisposedNotification();
2328 CcTest::isolate()->IdleNotification(kLongIdlePauseInMs);
2330 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2331 CHECK_EQ(0, f->shared()->opt_count());
2332 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2336 TEST(IdleNotificationFinishMarking) {
2337 i::FLAG_allow_natives_syntax = true;
2338 CcTest::InitializeVM();
2339 SimulateFullSpace(CcTest::heap()->old_pointer_space());
2340 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2344 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2346 // TODO(hpayer): We cannot write proper unit test right now for heap.
2347 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2348 // marking delay counter.
2350 // Perform a huge incremental marking step but don't complete marking.
2351 intptr_t bytes_processed = 0;
2354 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2355 IncrementalMarking::FORCE_MARKING,
2356 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2357 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2358 } while (bytes_processed);
2360 // The next invocations of incremental marking are not going to complete
2362 // since the completion threshold is not reached
2363 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2365 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2366 IncrementalMarking::FORCE_MARKING,
2367 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2368 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2371 // The next idle notification has to finish incremental marking.
2372 const int kLongIdleTime = 1000000;
2373 CcTest::isolate()->IdleNotification(kLongIdleTime);
2374 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2378 // Test that HAllocateObject will always return an object in new-space.
2379 TEST(OptimizedAllocationAlwaysInNewSpace) {
2380 i::FLAG_allow_natives_syntax = true;
2381 CcTest::InitializeVM();
2382 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2383 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2384 v8::HandleScope scope(CcTest::isolate());
2386 SimulateFullSpace(CcTest::heap()->new_space());
2387 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2388 v8::Local<v8::Value> res = CompileRun(
2391 " for (var i = 0; i < 32; i++) {"
2392 " this['x' + i] = x;"
2395 "function f(x) { return new c(x); };"
2397 "%OptimizeFunctionOnNextCall(f);"
2400 4, res.As<v8::Object>()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2402 Handle<JSObject> o =
2403 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2405 CHECK(CcTest::heap()->InNewSpace(*o));
2409 TEST(OptimizedPretenuringAllocationFolding) {
2410 i::FLAG_allow_natives_syntax = true;
2411 i::FLAG_expose_gc = true;
2412 CcTest::InitializeVM();
2413 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2414 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2415 v8::HandleScope scope(CcTest::isolate());
2417 // Grow new space unitl maximum capacity reached.
2418 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2419 CcTest::heap()->new_space()->Grow();
2422 i::ScopedVector<char> source(1024);
2425 "var number_elements = %d;"
2426 "var elements = new Array();"
2428 " for (var i = 0; i < number_elements; i++) {"
2429 " elements[i] = [[{}], [1.1]];"
2431 " return elements[number_elements-1]"
2435 "%%OptimizeFunctionOnNextCall(f);"
2437 AllocationSite::kPretenureMinimumCreated);
2439 v8::Local<v8::Value> res = CompileRun(source.start());
2441 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2442 Handle<JSObject> int_array_handle =
2443 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2444 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2445 Handle<JSObject> double_array_handle =
2446 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2448 Handle<JSObject> o =
2449 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2450 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2451 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2452 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2453 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2454 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2458 TEST(OptimizedPretenuringObjectArrayLiterals) {
2459 i::FLAG_allow_natives_syntax = true;
2460 i::FLAG_expose_gc = true;
2461 CcTest::InitializeVM();
2462 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2463 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2464 v8::HandleScope scope(CcTest::isolate());
2466 // Grow new space unitl maximum capacity reached.
2467 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2468 CcTest::heap()->new_space()->Grow();
2471 i::ScopedVector<char> source(1024);
2474 "var number_elements = %d;"
2475 "var elements = new Array(number_elements);"
2477 " for (var i = 0; i < number_elements; i++) {"
2478 " elements[i] = [{}, {}, {}];"
2480 " return elements[number_elements - 1];"
2484 "%%OptimizeFunctionOnNextCall(f);"
2486 AllocationSite::kPretenureMinimumCreated);
2488 v8::Local<v8::Value> res = CompileRun(source.start());
2490 Handle<JSObject> o =
2491 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2493 CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
2494 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2498 TEST(OptimizedPretenuringMixedInObjectProperties) {
2499 i::FLAG_allow_natives_syntax = true;
2500 i::FLAG_expose_gc = true;
2501 CcTest::InitializeVM();
2502 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2503 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2504 v8::HandleScope scope(CcTest::isolate());
2506 // Grow new space unitl maximum capacity reached.
2507 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2508 CcTest::heap()->new_space()->Grow();
2512 i::ScopedVector<char> source(1024);
2515 "var number_elements = %d;"
2516 "var elements = new Array(number_elements);"
2518 " for (var i = 0; i < number_elements; i++) {"
2519 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2521 " return elements[number_elements - 1];"
2525 "%%OptimizeFunctionOnNextCall(f);"
2527 AllocationSite::kPretenureMinimumCreated);
2529 v8::Local<v8::Value> res = CompileRun(source.start());
2531 Handle<JSObject> o =
2532 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2534 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2535 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2536 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2537 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
2538 if (!o->IsUnboxedDoubleField(idx2)) {
2539 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
2541 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2544 JSObject* inner_object =
2545 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2546 CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
2547 if (!inner_object->IsUnboxedDoubleField(idx1)) {
2549 CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
2551 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2553 CHECK(CcTest::heap()->InOldPointerSpace(
2554 inner_object->RawFastPropertyAt(idx2)));
2558 TEST(OptimizedPretenuringDoubleArrayProperties) {
2559 i::FLAG_allow_natives_syntax = true;
2560 i::FLAG_expose_gc = true;
2561 CcTest::InitializeVM();
2562 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2563 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2564 v8::HandleScope scope(CcTest::isolate());
2566 // Grow new space unitl maximum capacity reached.
2567 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2568 CcTest::heap()->new_space()->Grow();
2571 i::ScopedVector<char> source(1024);
2574 "var number_elements = %d;"
2575 "var elements = new Array(number_elements);"
2577 " for (var i = 0; i < number_elements; i++) {"
2578 " elements[i] = {a: 1.1, b: 2.2};"
2580 " return elements[i - 1];"
2584 "%%OptimizeFunctionOnNextCall(f);"
2586 AllocationSite::kPretenureMinimumCreated);
2588 v8::Local<v8::Value> res = CompileRun(source.start());
2590 Handle<JSObject> o =
2591 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2593 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2594 CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
2598 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2599 i::FLAG_allow_natives_syntax = true;
2600 i::FLAG_expose_gc = true;
2601 CcTest::InitializeVM();
2602 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2603 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2604 v8::HandleScope scope(CcTest::isolate());
2606 // Grow new space unitl maximum capacity reached.
2607 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2608 CcTest::heap()->new_space()->Grow();
2611 i::ScopedVector<char> source(1024);
2614 "var number_elements = %d;"
2615 "var elements = new Array(number_elements);"
2617 " for (var i = 0; i < number_elements; i++) {"
2618 " elements[i] = [1.1, 2.2, 3.3];"
2620 " return elements[number_elements - 1];"
2624 "%%OptimizeFunctionOnNextCall(f);"
2626 AllocationSite::kPretenureMinimumCreated);
2628 v8::Local<v8::Value> res = CompileRun(source.start());
2630 Handle<JSObject> o =
2631 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2633 CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
2634 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2638 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2639 i::FLAG_allow_natives_syntax = true;
2640 i::FLAG_expose_gc = true;
2641 CcTest::InitializeVM();
2642 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2643 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2644 v8::HandleScope scope(CcTest::isolate());
2646 // Grow new space unitl maximum capacity reached.
2647 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2648 CcTest::heap()->new_space()->Grow();
2651 i::ScopedVector<char> source(1024);
2654 "var number_elements = 100;"
2655 "var elements = new Array(number_elements);"
2657 " for (var i = 0; i < number_elements; i++) {"
2658 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2660 " return elements[number_elements - 1];"
2664 "%%OptimizeFunctionOnNextCall(f);"
2667 v8::Local<v8::Value> res = CompileRun(source.start());
2669 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2670 Handle<JSObject> int_array_handle =
2671 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2672 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2673 Handle<JSObject> double_array_handle =
2674 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2676 Handle<JSObject> o =
2677 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2678 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2679 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2680 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2681 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2682 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2686 TEST(OptimizedPretenuringNestedObjectLiterals) {
2687 i::FLAG_allow_natives_syntax = true;
2688 i::FLAG_expose_gc = true;
2689 CcTest::InitializeVM();
2690 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2691 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2692 v8::HandleScope scope(CcTest::isolate());
2694 // Grow new space unitl maximum capacity reached.
2695 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2696 CcTest::heap()->new_space()->Grow();
2699 i::ScopedVector<char> source(1024);
2702 "var number_elements = %d;"
2703 "var elements = new Array(number_elements);"
2705 " for (var i = 0; i < number_elements; i++) {"
2706 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2708 " return elements[number_elements - 1];"
2712 "%%OptimizeFunctionOnNextCall(f);"
2714 AllocationSite::kPretenureMinimumCreated);
2716 v8::Local<v8::Value> res = CompileRun(source.start());
2718 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2719 Handle<JSObject> int_array_handle_1 =
2720 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2721 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2722 Handle<JSObject> int_array_handle_2 =
2723 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2725 Handle<JSObject> o =
2726 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2727 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2728 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
2729 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
2730 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
2731 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
2735 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2736 i::FLAG_allow_natives_syntax = true;
2737 i::FLAG_expose_gc = true;
2738 CcTest::InitializeVM();
2739 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2740 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2741 v8::HandleScope scope(CcTest::isolate());
2743 // Grow new space unitl maximum capacity reached.
2744 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2745 CcTest::heap()->new_space()->Grow();
2748 i::ScopedVector<char> source(1024);
2751 "var number_elements = %d;"
2752 "var elements = new Array(number_elements);"
2754 " for (var i = 0; i < number_elements; i++) {"
2755 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2757 " return elements[number_elements - 1];"
2761 "%%OptimizeFunctionOnNextCall(f);"
2763 AllocationSite::kPretenureMinimumCreated);
2765 v8::Local<v8::Value> res = CompileRun(source.start());
2767 v8::Local<v8::Value> double_array_1 =
2768 v8::Object::Cast(*res)->Get(v8_str("0"));
2769 Handle<JSObject> double_array_handle_1 =
2770 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2771 v8::Local<v8::Value> double_array_2 =
2772 v8::Object::Cast(*res)->Get(v8_str("1"));
2773 Handle<JSObject> double_array_handle_2 =
2774 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2776 Handle<JSObject> o =
2777 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2778 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2779 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
2780 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
2781 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
2782 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
2786 // Make sure pretenuring feedback is gathered for constructed objects as well
2788 TEST(OptimizedPretenuringConstructorCalls) {
2789 if (!i::FLAG_pretenuring_call_new) {
2790 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2793 i::FLAG_allow_natives_syntax = true;
2794 i::FLAG_expose_gc = true;
2795 CcTest::InitializeVM();
2796 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2797 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2798 v8::HandleScope scope(CcTest::isolate());
2800 // Grow new space unitl maximum capacity reached.
2801 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2802 CcTest::heap()->new_space()->Grow();
2805 i::ScopedVector<char> source(1024);
2806 // Call new is doing slack tracking for the first
2807 // JSFunction::kGenerousAllocationCount allocations, and we can't find
2808 // mementos during that time.
2811 "var number_elements = %d;"
2812 "var elements = new Array(number_elements);"
2818 " for (var i = 0; i < number_elements; i++) {"
2819 " elements[i] = new foo();"
2821 " return elements[number_elements - 1];"
2825 "%%OptimizeFunctionOnNextCall(f);"
2827 AllocationSite::kPretenureMinimumCreated +
2828 JSFunction::kGenerousAllocationCount);
2830 v8::Local<v8::Value> res = CompileRun(source.start());
2832 Handle<JSObject> o =
2833 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2835 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2839 TEST(OptimizedPretenuringCallNew) {
2840 if (!i::FLAG_pretenuring_call_new) {
2841 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2844 i::FLAG_allow_natives_syntax = true;
2845 i::FLAG_expose_gc = true;
2846 CcTest::InitializeVM();
2847 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2848 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2849 v8::HandleScope scope(CcTest::isolate());
2851 // Grow new space unitl maximum capacity reached.
2852 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2853 CcTest::heap()->new_space()->Grow();
2856 i::ScopedVector<char> source(1024);
2857 // Call new is doing slack tracking for the first
2858 // JSFunction::kGenerousAllocationCount allocations, and we can't find
2859 // mementos during that time.
2862 "var number_elements = %d;"
2863 "var elements = new Array(number_elements);"
2864 "function g() { this.a = 0; }"
2866 " for (var i = 0; i < number_elements; i++) {"
2867 " elements[i] = new g();"
2869 " return elements[number_elements - 1];"
2873 "%%OptimizeFunctionOnNextCall(f);"
2875 AllocationSite::kPretenureMinimumCreated +
2876 JSFunction::kGenerousAllocationCount);
2878 v8::Local<v8::Value> res = CompileRun(source.start());
2880 Handle<JSObject> o =
2881 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2882 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2886 // Test regular array literals allocation.
2887 TEST(OptimizedAllocationArrayLiterals) {
2888 i::FLAG_allow_natives_syntax = true;
2889 CcTest::InitializeVM();
2890 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2891 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2892 v8::HandleScope scope(CcTest::isolate());
2894 v8::Local<v8::Value> res = CompileRun(
2896 " var numbers = new Array(1, 2, 3);"
2897 " numbers[0] = 3.14;"
2901 "%OptimizeFunctionOnNextCall(f);"
2903 CHECK_EQ(static_cast<int>(3.14),
2904 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2906 Handle<JSObject> o =
2907 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2909 CHECK(CcTest::heap()->InNewSpace(o->elements()));
2913 static int CountMapTransitions(Map* map) {
2914 return map->transitions()->number_of_transitions();
2918 // Test that map transitions are cleared and maps are collected with
2919 // incremental marking as well.
2921 i::FLAG_stress_compaction = false;
2922 i::FLAG_allow_natives_syntax = true;
2923 i::FLAG_trace_incremental_marking = true;
2924 CcTest::InitializeVM();
2925 v8::HandleScope scope(CcTest::isolate());
2926 static const int transitions_count = 256;
2928 CompileRun("function F() {}");
2930 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2931 for (int i = 0; i < transitions_count; i++) {
2932 EmbeddedVector<char, 64> buffer;
2933 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2934 CompileRun(buffer.start());
2936 CompileRun("var root = new F;");
2939 Handle<JSObject> root =
2940 v8::Utils::OpenHandle(
2941 *v8::Handle<v8::Object>::Cast(
2942 CcTest::global()->Get(v8_str("root"))));
2944 // Count number of live transitions before marking.
2945 int transitions_before = CountMapTransitions(root->map());
2946 CompileRun("%DebugPrint(root);");
2947 CHECK_EQ(transitions_count, transitions_before);
2949 SimulateIncrementalMarking(CcTest::heap());
2950 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2952 // Count number of live transitions after marking. Note that one transition
2953 // is left, because 'o' still holds an instance of one transition target.
2954 int transitions_after = CountMapTransitions(root->map());
2955 CompileRun("%DebugPrint(root);");
2956 CHECK_EQ(1, transitions_after);
2961 static void AddTransitions(int transitions_count) {
2962 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2963 for (int i = 0; i < transitions_count; i++) {
2964 EmbeddedVector<char, 64> buffer;
2965 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2966 CompileRun(buffer.start());
2971 static Handle<JSObject> GetByName(const char* name) {
2972 return v8::Utils::OpenHandle(
2973 *v8::Handle<v8::Object>::Cast(
2974 CcTest::global()->Get(v8_str(name))));
2978 static void AddPropertyTo(
2979 int gc_count, Handle<JSObject> object, const char* property_name) {
2980 Isolate* isolate = CcTest::i_isolate();
2981 Factory* factory = isolate->factory();
2982 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
2983 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
2984 i::FLAG_gc_interval = gc_count;
2985 i::FLAG_gc_global = true;
2986 CcTest::heap()->set_allocation_timeout(gc_count);
2987 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
2991 TEST(TransitionArrayShrinksDuringAllocToZero) {
2992 i::FLAG_stress_compaction = false;
2993 i::FLAG_allow_natives_syntax = true;
2994 CcTest::InitializeVM();
2995 v8::HandleScope scope(CcTest::isolate());
2996 static const int transitions_count = 10;
2997 CompileRun("function F() { }");
2998 AddTransitions(transitions_count);
2999 CompileRun("var root = new F;");
3000 Handle<JSObject> root = GetByName("root");
3002 // Count number of live transitions before marking.
3003 int transitions_before = CountMapTransitions(root->map());
3004 CHECK_EQ(transitions_count, transitions_before);
3007 CompileRun("o = new F;"
3009 root = GetByName("root");
3010 AddPropertyTo(2, root, "funny");
3011 CcTest::heap()->CollectGarbage(NEW_SPACE);
3013 // Count number of live transitions after marking. Note that one transition
3014 // is left, because 'o' still holds an instance of one transition target.
3015 int transitions_after = CountMapTransitions(
3016 Map::cast(root->map()->GetBackPointer()));
3017 CHECK_EQ(1, transitions_after);
3021 TEST(TransitionArrayShrinksDuringAllocToOne) {
3022 i::FLAG_stress_compaction = false;
3023 i::FLAG_allow_natives_syntax = true;
3024 CcTest::InitializeVM();
3025 v8::HandleScope scope(CcTest::isolate());
3026 static const int transitions_count = 10;
3027 CompileRun("function F() {}");
3028 AddTransitions(transitions_count);
3029 CompileRun("var root = new F;");
3030 Handle<JSObject> root = GetByName("root");
3032 // Count number of live transitions before marking.
3033 int transitions_before = CountMapTransitions(root->map());
3034 CHECK_EQ(transitions_count, transitions_before);
3036 root = GetByName("root");
3037 AddPropertyTo(2, root, "funny");
3038 CcTest::heap()->CollectGarbage(NEW_SPACE);
3040 // Count number of live transitions after marking. Note that one transition
3041 // is left, because 'o' still holds an instance of one transition target.
3042 int transitions_after = CountMapTransitions(
3043 Map::cast(root->map()->GetBackPointer()));
3044 CHECK_EQ(2, transitions_after);
3048 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3049 i::FLAG_stress_compaction = false;
3050 i::FLAG_allow_natives_syntax = true;
3051 CcTest::InitializeVM();
3052 v8::HandleScope scope(CcTest::isolate());
3053 static const int transitions_count = 10;
3054 CompileRun("function F() {}");
3055 AddTransitions(transitions_count);
3056 CompileRun("var root = new F;");
3057 Handle<JSObject> root = GetByName("root");
3059 // Count number of live transitions before marking.
3060 int transitions_before = CountMapTransitions(root->map());
3061 CHECK_EQ(transitions_count, transitions_before);
3063 root = GetByName("root");
3064 AddPropertyTo(0, root, "prop9");
3065 CcTest::i_isolate()->heap()->CollectGarbage(OLD_POINTER_SPACE);
3067 // Count number of live transitions after marking. Note that one transition
3068 // is left, because 'o' still holds an instance of one transition target.
3069 int transitions_after = CountMapTransitions(
3070 Map::cast(root->map()->GetBackPointer()));
3071 CHECK_EQ(1, transitions_after);
3075 TEST(TransitionArraySimpleToFull) {
3076 i::FLAG_stress_compaction = false;
3077 i::FLAG_allow_natives_syntax = true;
3078 CcTest::InitializeVM();
3079 v8::HandleScope scope(CcTest::isolate());
3080 static const int transitions_count = 1;
3081 CompileRun("function F() {}");
3082 AddTransitions(transitions_count);
3083 CompileRun("var root = new F;");
3084 Handle<JSObject> root = GetByName("root");
3086 // Count number of live transitions before marking.
3087 int transitions_before = CountMapTransitions(root->map());
3088 CHECK_EQ(transitions_count, transitions_before);
3090 CompileRun("o = new F;"
3092 root = GetByName("root");
3093 DCHECK(root->map()->transitions()->IsSimpleTransition());
3094 AddPropertyTo(2, root, "happy");
3096 // Count number of live transitions after marking. Note that one transition
3097 // is left, because 'o' still holds an instance of one transition target.
3098 int transitions_after = CountMapTransitions(
3099 Map::cast(root->map()->GetBackPointer()));
3100 CHECK_EQ(1, transitions_after);
3105 TEST(Regress2143a) {
3106 i::FLAG_collect_maps = true;
3107 i::FLAG_incremental_marking = true;
3108 CcTest::InitializeVM();
3109 v8::HandleScope scope(CcTest::isolate());
3111 // Prepare a map transition from the root object together with a yet
3112 // untransitioned root object.
3113 CompileRun("var root = new Object;"
3115 "root = new Object;");
3117 SimulateIncrementalMarking(CcTest::heap());
3119 // Compile a StoreIC that performs the prepared map transition. This
3120 // will restart incremental marking and should make sure the root is
3121 // marked grey again.
3122 CompileRun("function f(o) {"
3128 // This bug only triggers with aggressive IC clearing.
3129 CcTest::heap()->AgeInlineCaches();
3131 // Explicitly request GC to perform final marking step and sweeping.
3132 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3134 Handle<JSObject> root =
3135 v8::Utils::OpenHandle(
3136 *v8::Handle<v8::Object>::Cast(
3137 CcTest::global()->Get(v8_str("root"))));
3139 // The root object should be in a sane state.
3140 CHECK(root->IsJSObject());
3141 CHECK(root->map()->IsMap());
3145 TEST(Regress2143b) {
3146 i::FLAG_collect_maps = true;
3147 i::FLAG_incremental_marking = true;
3148 i::FLAG_allow_natives_syntax = true;
3149 CcTest::InitializeVM();
3150 v8::HandleScope scope(CcTest::isolate());
3152 // Prepare a map transition from the root object together with a yet
3153 // untransitioned root object.
3154 CompileRun("var root = new Object;"
3156 "root = new Object;");
3158 SimulateIncrementalMarking(CcTest::heap());
3160 // Compile an optimized LStoreNamedField that performs the prepared
3161 // map transition. This will restart incremental marking and should
3162 // make sure the root is marked grey again.
3163 CompileRun("function f(o) {"
3168 "%OptimizeFunctionOnNextCall(f);"
3170 "%DeoptimizeFunction(f);");
3172 // This bug only triggers with aggressive IC clearing.
3173 CcTest::heap()->AgeInlineCaches();
3175 // Explicitly request GC to perform final marking step and sweeping.
3176 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3178 Handle<JSObject> root =
3179 v8::Utils::OpenHandle(
3180 *v8::Handle<v8::Object>::Cast(
3181 CcTest::global()->Get(v8_str("root"))));
3183 // The root object should be in a sane state.
3184 CHECK(root->IsJSObject());
3185 CHECK(root->map()->IsMap());
3189 TEST(ReleaseOverReservedPages) {
3190 if (FLAG_never_compact) return;
3191 i::FLAG_trace_gc = true;
3192 // The optimizer can allocate stuff, messing up the test.
3193 i::FLAG_crankshaft = false;
3194 i::FLAG_always_opt = false;
3195 CcTest::InitializeVM();
3196 Isolate* isolate = CcTest::i_isolate();
3197 Factory* factory = isolate->factory();
3198 Heap* heap = isolate->heap();
3199 v8::HandleScope scope(CcTest::isolate());
3200 static const int number_of_test_pages = 20;
3202 // Prepare many pages with low live-bytes count.
3203 PagedSpace* old_pointer_space = heap->old_pointer_space();
3204 CHECK_EQ(1, old_pointer_space->CountTotalPages());
3205 for (int i = 0; i < number_of_test_pages; i++) {
3206 AlwaysAllocateScope always_allocate(isolate);
3207 SimulateFullSpace(old_pointer_space);
3208 factory->NewFixedArray(1, TENURED);
3210 CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3212 // Triggering one GC will cause a lot of garbage to be discovered but
3213 // even spread across all allocated pages.
3214 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
3215 "triggered for preparation");
3216 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3218 // Triggering subsequent GCs should cause at least half of the pages
3219 // to be released to the OS after at most two cycles.
3220 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
3221 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3222 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
3223 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
3225 // Triggering a last-resort GC should cause all pages to be released to the
3226 // OS so that other processes can seize the memory. If we get a failure here
3227 // where there are 2 pages left instead of 1, then we should increase the
3228 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3229 // first page should be small in order to reduce memory used when the VM
3230 // boots, but if the 20 small arrays don't fit on the first page then that's
3231 // an indication that it is too small.
3232 heap->CollectAllAvailableGarbage("triggered really hard");
3233 CHECK_EQ(1, old_pointer_space->CountTotalPages());
3238 i::FLAG_stress_compaction = false;
3239 CcTest::InitializeVM();
3240 Isolate* isolate = CcTest::i_isolate();
3241 Factory* factory = isolate->factory();
3242 v8::HandleScope scope(CcTest::isolate());
3243 Handle<String> slice(CcTest::heap()->empty_string());
3246 // Generate a parent that lives in new-space.
3247 v8::HandleScope inner_scope(CcTest::isolate());
3248 const char* c = "This text is long enough to trigger sliced strings.";
3249 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3250 CHECK(s->IsSeqOneByteString());
3251 CHECK(CcTest::heap()->InNewSpace(*s));
3253 // Generate a sliced string that is based on the above parent and
3254 // lives in old-space.
3255 SimulateFullSpace(CcTest::heap()->new_space());
3256 AlwaysAllocateScope always_allocate(isolate);
3257 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3258 CHECK(t->IsSlicedString());
3259 CHECK(!CcTest::heap()->InNewSpace(*t));
3260 *slice.location() = *t.location();
3263 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3264 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3265 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3270 TEST(PrintSharedFunctionInfo) {
3271 CcTest::InitializeVM();
3272 v8::HandleScope scope(CcTest::isolate());
3273 const char* source = "f = function() { return 987654321; }\n"
3274 "g = function() { return 123456789; }\n";
3276 Handle<JSFunction> g =
3277 v8::Utils::OpenHandle(
3278 *v8::Handle<v8::Function>::Cast(
3279 CcTest::global()->Get(v8_str("g"))));
3281 OFStream os(stdout);
3282 g->shared()->Print(os);
3285 #endif // OBJECT_PRINT
3289 CcTest::InitializeVM();
3290 v8::HandleScope scope(CcTest::isolate());
3292 v8::Handle<v8::String> value = v8_str("val string");
3293 Smi* hash = Smi::FromInt(321);
3294 Factory* factory = CcTest::i_isolate()->factory();
3296 for (int i = 0; i < 2; i++) {
3297 // Store identity hash first and common hidden property second.
3298 v8::Handle<v8::Object> obj = v8::Object::New(CcTest::isolate());
3299 Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
3300 CHECK(internal_obj->HasFastProperties());
3302 // In the first iteration, set hidden value first and identity hash second.
3303 // In the second iteration, reverse the order.
3304 if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
3305 JSObject::SetIdentityHash(internal_obj, handle(hash, CcTest::i_isolate()));
3306 if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
3310 internal_obj->GetHiddenProperty(factory->identity_hash_string()));
3311 CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
3314 FieldIndex index = FieldIndex::ForDescriptor(internal_obj->map(), 0);
3315 ObjectHashTable* hashtable = ObjectHashTable::cast(
3316 internal_obj->RawFastPropertyAt(index));
3317 // HashTable header (5) and 4 initial entries (8).
3318 CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
3323 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3324 if (i::FLAG_always_opt) return;
3325 CcTest::InitializeVM();
3326 v8::HandleScope scope(CcTest::isolate());
3327 v8::Local<v8::Value> fun1, fun2;
3331 CompileRun("function fun() {};");
3332 fun1 = env->Global()->Get(v8_str("fun"));
3337 CompileRun("function fun() {};");
3338 fun2 = env->Global()->Get(v8_str("fun"));
3341 // Prepare function f that contains type feedback for closures
3342 // originating from two different native contexts.
3343 CcTest::global()->Set(v8_str("fun1"), fun1);
3344 CcTest::global()->Set(v8_str("fun2"), fun2);
3345 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3347 Handle<JSFunction> f =
3348 v8::Utils::OpenHandle(
3349 *v8::Handle<v8::Function>::Cast(
3350 CcTest::global()->Get(v8_str("f"))));
3352 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3354 int expected_slots = 2;
3355 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3358 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell());
3359 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
3361 SimulateIncrementalMarking(CcTest::heap());
3362 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3364 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
3366 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2)))
3371 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3372 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3373 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3374 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3375 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3376 RelocInfo* info = it.rinfo();
3377 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3378 if (target->is_inline_cache_stub() && target->kind() == kind) {
3386 static void CheckVectorIC(Handle<JSFunction> f, int ic_slot_index,
3387 InlineCacheState desired_state) {
3388 Handle<TypeFeedbackVector> vector =
3389 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3390 FeedbackVectorICSlot slot(ic_slot_index);
3391 LoadICNexus nexus(vector, slot);
3392 CHECK(nexus.StateFromFeedback() == desired_state);
3396 static void CheckVectorICCleared(Handle<JSFunction> f, int ic_slot_index) {
3397 Handle<TypeFeedbackVector> vector =
3398 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3399 FeedbackVectorICSlot slot(ic_slot_index);
3400 LoadICNexus nexus(vector, slot);
3401 CHECK(IC::IsCleared(&nexus));
3405 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3406 if (i::FLAG_always_opt) return;
3407 CcTest::InitializeVM();
3408 v8::HandleScope scope(CcTest::isolate());
3410 // Prepare function f that contains a monomorphic IC for object
3411 // originating from the same native context.
3412 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3413 "function f(o) { return o.x; } f(obj); f(obj);");
3414 Handle<JSFunction> f =
3415 v8::Utils::OpenHandle(
3416 *v8::Handle<v8::Function>::Cast(
3417 CcTest::global()->Get(v8_str("f"))));
3419 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3420 if (FLAG_vector_ics) {
3421 CheckVectorIC(f, 0, MONOMORPHIC);
3422 CHECK(ic_before->ic_state() == DEFAULT);
3424 CHECK(ic_before->ic_state() == MONOMORPHIC);
3427 SimulateIncrementalMarking(CcTest::heap());
3428 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3430 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3431 if (FLAG_vector_ics) {
3432 CheckVectorIC(f, 0, MONOMORPHIC);
3433 CHECK(ic_after->ic_state() == DEFAULT);
3435 CHECK(ic_after->ic_state() == MONOMORPHIC);
3440 TEST(IncrementalMarkingClearsMonomorphicIC) {
3441 if (i::FLAG_always_opt) return;
3442 CcTest::InitializeVM();
3443 v8::HandleScope scope(CcTest::isolate());
3444 v8::Local<v8::Value> obj1;
3448 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3449 obj1 = env->Global()->Get(v8_str("obj"));
3452 // Prepare function f that contains a monomorphic IC for object
3453 // originating from a different native context.
3454 CcTest::global()->Set(v8_str("obj1"), obj1);
3455 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3456 Handle<JSFunction> f = v8::Utils::OpenHandle(
3457 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3459 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3460 if (FLAG_vector_ics) {
3461 CheckVectorIC(f, 0, MONOMORPHIC);
3462 CHECK(ic_before->ic_state() == DEFAULT);
3464 CHECK(ic_before->ic_state() == MONOMORPHIC);
3467 // Fire context dispose notification.
3468 CcTest::isolate()->ContextDisposedNotification();
3469 SimulateIncrementalMarking(CcTest::heap());
3470 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3472 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3473 if (FLAG_vector_ics) {
3474 CheckVectorICCleared(f, 0);
3475 CHECK(ic_after->ic_state() == DEFAULT);
3477 CHECK(IC::IsCleared(ic_after));
3482 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3483 if (i::FLAG_always_opt) return;
3484 CcTest::InitializeVM();
3485 v8::HandleScope scope(CcTest::isolate());
3486 v8::Local<v8::Value> obj1, obj2;
3490 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3491 obj1 = env->Global()->Get(v8_str("obj"));
3496 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3497 obj2 = env->Global()->Get(v8_str("obj"));
3500 // Prepare function f that contains a polymorphic IC for objects
3501 // originating from two different native contexts.
3502 CcTest::global()->Set(v8_str("obj1"), obj1);
3503 CcTest::global()->Set(v8_str("obj2"), obj2);
3504 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3505 Handle<JSFunction> f = v8::Utils::OpenHandle(
3506 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3508 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3509 if (FLAG_vector_ics) {
3510 CheckVectorIC(f, 0, POLYMORPHIC);
3511 CHECK(ic_before->ic_state() == DEFAULT);
3513 CHECK(ic_before->ic_state() == POLYMORPHIC);
3516 // Fire context dispose notification.
3517 SimulateIncrementalMarking(CcTest::heap());
3518 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3520 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3521 if (FLAG_vector_ics) {
3522 CheckVectorIC(f, 0, POLYMORPHIC);
3523 CHECK(ic_after->ic_state() == DEFAULT);
3525 CHECK(ic_after->ic_state() == POLYMORPHIC);
3530 TEST(IncrementalMarkingClearsPolymorphicIC) {
3531 if (i::FLAG_always_opt) return;
3532 CcTest::InitializeVM();
3533 v8::HandleScope scope(CcTest::isolate());
3534 v8::Local<v8::Value> obj1, obj2;
3538 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3539 obj1 = env->Global()->Get(v8_str("obj"));
3544 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3545 obj2 = env->Global()->Get(v8_str("obj"));
3548 // Prepare function f that contains a polymorphic IC for objects
3549 // originating from two different native contexts.
3550 CcTest::global()->Set(v8_str("obj1"), obj1);
3551 CcTest::global()->Set(v8_str("obj2"), obj2);
3552 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3553 Handle<JSFunction> f = v8::Utils::OpenHandle(
3554 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3556 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3557 if (FLAG_vector_ics) {
3558 CheckVectorIC(f, 0, POLYMORPHIC);
3559 CHECK(ic_before->ic_state() == DEFAULT);
3561 CHECK(ic_before->ic_state() == POLYMORPHIC);
3564 // Fire context dispose notification.
3565 CcTest::isolate()->ContextDisposedNotification();
3566 SimulateIncrementalMarking(CcTest::heap());
3567 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3569 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3570 if (FLAG_vector_ics) {
3571 CheckVectorICCleared(f, 0);
3572 CHECK(ic_before->ic_state() == DEFAULT);
3574 CHECK(IC::IsCleared(ic_after));
3579 class SourceResource : public v8::String::ExternalOneByteStringResource {
3581 explicit SourceResource(const char* data)
3582 : data_(data), length_(strlen(data)) { }
3584 virtual void Dispose() {
3585 i::DeleteArray(data_);
3589 const char* data() const { return data_; }
3591 size_t length() const { return length_; }
3593 bool IsDisposed() { return data_ == NULL; }
3601 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3602 const char* accessor) {
3603 // Test that the data retained by the Error.stack accessor is released
3604 // after the first time the accessor is fired. We use external string
3605 // to check whether the data is being released since the external string
3606 // resource's callback is fired when the external string is GC'ed.
3607 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3608 v8::HandleScope scope(isolate);
3609 SourceResource* resource = new SourceResource(i::StrDup(source));
3611 v8::HandleScope scope(isolate);
3612 v8::Handle<v8::String> source_string =
3613 v8::String::NewExternal(isolate, resource);
3614 i_isolate->heap()->CollectAllAvailableGarbage();
3615 v8::Script::Compile(source_string)->Run();
3616 CHECK(!resource->IsDisposed());
3618 // i_isolate->heap()->CollectAllAvailableGarbage();
3619 CHECK(!resource->IsDisposed());
3621 CompileRun(accessor);
3622 i_isolate->heap()->CollectAllAvailableGarbage();
3624 // External source has been released.
3625 CHECK(resource->IsDisposed());
3630 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3631 if (i::FLAG_always_opt) {
3632 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3633 // See: https://codereview.chromium.org/181833004/
3636 FLAG_use_ic = false; // ICs retain objects.
3637 FLAG_concurrent_recompilation = false;
3638 v8::Isolate* isolate = v8::Isolate::New();
3640 v8::Isolate::Scope isolate_scope(isolate);
3641 v8::HandleScope handle_scope(isolate);
3642 v8::Context::New(isolate)->Enter();
3643 static const char* source1 = "var error = null; "
3644 /* Normal Error */ "try { "
3645 " throw new Error(); "
3649 static const char* source2 = "var error = null; "
3650 /* Stack overflow */ "try { "
3651 " (function f() { f(); })(); "
3655 static const char* source3 = "var error = null; "
3656 /* Normal Error */ "try { "
3657 /* as prototype */ " throw new Error(); "
3660 " error.__proto__ = e; "
3662 static const char* source4 = "var error = null; "
3663 /* Stack overflow */ "try { "
3664 /* as prototype */ " (function f() { f(); })(); "
3667 " error.__proto__ = e; "
3669 static const char* getter = "error.stack";
3670 static const char* setter = "error.stack = 0";
3672 ReleaseStackTraceDataTest(isolate, source1, setter);
3673 ReleaseStackTraceDataTest(isolate, source2, setter);
3674 // We do not test source3 and source4 with setter, since the setter is
3675 // supposed to (untypically) write to the receiver, not the holder. This is
3676 // to emulate the behavior of a data property.
3678 ReleaseStackTraceDataTest(isolate, source1, getter);
3679 ReleaseStackTraceDataTest(isolate, source2, getter);
3680 ReleaseStackTraceDataTest(isolate, source3, getter);
3681 ReleaseStackTraceDataTest(isolate, source4, getter);
3687 TEST(Regress159140) {
3688 i::FLAG_allow_natives_syntax = true;
3689 i::FLAG_flush_code_incrementally = true;
3690 CcTest::InitializeVM();
3691 Isolate* isolate = CcTest::i_isolate();
3692 Heap* heap = isolate->heap();
3693 HandleScope scope(isolate);
3695 // Perform one initial GC to enable code flushing.
3696 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3698 // Prepare several closures that are all eligible for code flushing
3699 // because all reachable ones are not optimized. Make sure that the
3700 // optimized code object is directly reachable through a handle so
3701 // that it is marked black during incremental marking.
3704 HandleScope inner_scope(isolate);
3705 CompileRun("function h(x) {}"
3706 "function mkClosure() {"
3707 " return function(x) { return x + 1; };"
3709 "var f = mkClosure();"
3710 "var g = mkClosure();"
3714 "%OptimizeFunctionOnNextCall(f); f(3);"
3715 "%OptimizeFunctionOnNextCall(h); h(3);");
3717 Handle<JSFunction> f =
3718 v8::Utils::OpenHandle(
3719 *v8::Handle<v8::Function>::Cast(
3720 CcTest::global()->Get(v8_str("f"))));
3721 CHECK(f->is_compiled());
3722 CompileRun("f = null;");
3724 Handle<JSFunction> g =
3725 v8::Utils::OpenHandle(
3726 *v8::Handle<v8::Function>::Cast(
3727 CcTest::global()->Get(v8_str("g"))));
3728 CHECK(g->is_compiled());
3729 const int kAgingThreshold = 6;
3730 for (int i = 0; i < kAgingThreshold; i++) {
3731 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3734 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3737 // Simulate incremental marking so that the functions are enqueued as
3738 // code flushing candidates. Then optimize one function. Finally
3739 // finish the GC to complete code flushing.
3740 SimulateIncrementalMarking(heap);
3741 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3742 heap->CollectAllGarbage(Heap::kNoGCFlags);
3744 // Unoptimized code is missing and the deoptimizer will go ballistic.
3745 CompileRun("g('bozo');");
3749 TEST(Regress165495) {
3750 i::FLAG_allow_natives_syntax = true;
3751 i::FLAG_flush_code_incrementally = true;
3752 CcTest::InitializeVM();
3753 Isolate* isolate = CcTest::i_isolate();
3754 Heap* heap = isolate->heap();
3755 HandleScope scope(isolate);
3757 // Perform one initial GC to enable code flushing.
3758 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3760 // Prepare an optimized closure that the optimized code map will get
3761 // populated. Then age the unoptimized code to trigger code flushing
3762 // but make sure the optimized code is unreachable.
3764 HandleScope inner_scope(isolate);
3765 CompileRun("function mkClosure() {"
3766 " return function(x) { return x + 1; };"
3768 "var f = mkClosure();"
3770 "%OptimizeFunctionOnNextCall(f); f(3);");
3772 Handle<JSFunction> f =
3773 v8::Utils::OpenHandle(
3774 *v8::Handle<v8::Function>::Cast(
3775 CcTest::global()->Get(v8_str("f"))));
3776 CHECK(f->is_compiled());
3777 const int kAgingThreshold = 6;
3778 for (int i = 0; i < kAgingThreshold; i++) {
3779 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3782 CompileRun("f = null;");
3785 // Simulate incremental marking so that unoptimized code is flushed
3786 // even though it still is cached in the optimized code map.
3787 SimulateIncrementalMarking(heap);
3788 heap->CollectAllGarbage(Heap::kNoGCFlags);
3790 // Make a new closure that will get code installed from the code map.
3791 // Unoptimized code is missing and the deoptimizer will go ballistic.
3792 CompileRun("var g = mkClosure(); g('bozo');");
3796 TEST(Regress169209) {
3797 i::FLAG_stress_compaction = false;
3798 i::FLAG_allow_natives_syntax = true;
3799 i::FLAG_flush_code_incrementally = true;
3801 CcTest::InitializeVM();
3802 Isolate* isolate = CcTest::i_isolate();
3803 Heap* heap = isolate->heap();
3804 HandleScope scope(isolate);
3806 // Perform one initial GC to enable code flushing.
3807 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3809 // Prepare a shared function info eligible for code flushing for which
3810 // the unoptimized code will be replaced during optimization.
3811 Handle<SharedFunctionInfo> shared1;
3813 HandleScope inner_scope(isolate);
3814 CompileRun("function f() { return 'foobar'; }"
3815 "function g(x) { if (x) f(); }"
3820 Handle<JSFunction> f =
3821 v8::Utils::OpenHandle(
3822 *v8::Handle<v8::Function>::Cast(
3823 CcTest::global()->Get(v8_str("f"))));
3824 CHECK(f->is_compiled());
3825 const int kAgingThreshold = 6;
3826 for (int i = 0; i < kAgingThreshold; i++) {
3827 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3830 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3833 // Prepare a shared function info eligible for code flushing that will
3834 // represent the dangling tail of the candidate list.
3835 Handle<SharedFunctionInfo> shared2;
3837 HandleScope inner_scope(isolate);
3838 CompileRun("function flushMe() { return 0; }"
3841 Handle<JSFunction> f =
3842 v8::Utils::OpenHandle(
3843 *v8::Handle<v8::Function>::Cast(
3844 CcTest::global()->Get(v8_str("flushMe"))));
3845 CHECK(f->is_compiled());
3846 const int kAgingThreshold = 6;
3847 for (int i = 0; i < kAgingThreshold; i++) {
3848 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3851 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3854 // Simulate incremental marking and collect code flushing candidates.
3855 SimulateIncrementalMarking(heap);
3856 CHECK(shared1->code()->gc_metadata() != NULL);
3858 // Optimize function and make sure the unoptimized code is replaced.
3862 CompileRun("%OptimizeFunctionOnNextCall(g);"
3865 // Finish garbage collection cycle.
3866 heap->CollectAllGarbage(Heap::kNoGCFlags);
3867 CHECK(shared1->code()->gc_metadata() == NULL);
3871 TEST(Regress169928) {
3872 i::FLAG_allow_natives_syntax = true;
3873 i::FLAG_crankshaft = false;
3874 CcTest::InitializeVM();
3875 Isolate* isolate = CcTest::i_isolate();
3876 Factory* factory = isolate->factory();
3877 v8::HandleScope scope(CcTest::isolate());
3879 // Some flags turn Scavenge collections into Mark-sweep collections
3880 // and hence are incompatible with this test case.
3881 if (FLAG_gc_global || FLAG_stress_compaction) return;
3883 // Prepare the environment
3884 CompileRun("function fastliteralcase(literal, value) {"
3885 " literal[0] = value;"
3888 "function get_standard_literal() {"
3889 " var literal = [1, 2, 3];"
3892 "obj = fastliteralcase(get_standard_literal(), 1);"
3893 "obj = fastliteralcase(get_standard_literal(), 1.5);"
3894 "obj = fastliteralcase(get_standard_literal(), 2);");
3897 v8::Local<v8::String> mote_code_string =
3898 v8_str("fastliteralcase(mote, 2.5);");
3900 v8::Local<v8::String> array_name = v8_str("mote");
3901 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
3903 // First make sure we flip spaces
3904 CcTest::heap()->CollectGarbage(NEW_SPACE);
3906 // Allocate the object.
3907 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3908 array_data->set(0, Smi::FromInt(1));
3909 array_data->set(1, Smi::FromInt(2));
3911 AllocateAllButNBytes(CcTest::heap()->new_space(),
3912 JSArray::kSize + AllocationMemento::kSize +
3915 Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3919 CHECK_EQ(Smi::FromInt(2), array->length());
3920 CHECK(array->HasFastSmiOrObjectElements());
3922 // We need filler the size of AllocationMemento object, plus an extra
3923 // fill pointer value.
3924 HeapObject* obj = NULL;
3925 AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw(
3926 AllocationMemento::kSize + kPointerSize);
3927 CHECK(allocation.To(&obj));
3928 Address addr_obj = obj->address();
3929 CcTest::heap()->CreateFillerObjectAt(
3930 addr_obj, AllocationMemento::kSize + kPointerSize);
3932 // Give the array a name, making sure not to allocate strings.
3933 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3934 CcTest::global()->Set(array_name, array_obj);
3936 // This should crash with a protection violation if we are running a build
3938 AlwaysAllocateScope aa_scope(isolate);
3939 v8::Script::Compile(mote_code_string)->Run();
3943 TEST(Regress168801) {
3944 if (i::FLAG_never_compact) return;
3945 i::FLAG_always_compact = true;
3946 i::FLAG_cache_optimized_code = false;
3947 i::FLAG_allow_natives_syntax = true;
3948 i::FLAG_flush_code_incrementally = true;
3949 CcTest::InitializeVM();
3950 Isolate* isolate = CcTest::i_isolate();
3951 Heap* heap = isolate->heap();
3952 HandleScope scope(isolate);
3954 // Perform one initial GC to enable code flushing.
3955 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3957 // Ensure the code ends up on an evacuation candidate.
3958 SimulateFullSpace(heap->code_space());
3960 // Prepare an unoptimized function that is eligible for code flushing.
3961 Handle<JSFunction> function;
3963 HandleScope inner_scope(isolate);
3964 CompileRun("function mkClosure() {"
3965 " return function(x) { return x + 1; };"
3967 "var f = mkClosure();"
3970 Handle<JSFunction> f =
3971 v8::Utils::OpenHandle(
3972 *v8::Handle<v8::Function>::Cast(
3973 CcTest::global()->Get(v8_str("f"))));
3974 CHECK(f->is_compiled());
3975 const int kAgingThreshold = 6;
3976 for (int i = 0; i < kAgingThreshold; i++) {
3977 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3980 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3983 // Simulate incremental marking so that unoptimized function is enqueued as a
3984 // candidate for code flushing. The shared function info however will not be
3985 // explicitly enqueued.
3986 SimulateIncrementalMarking(heap);
3988 // Now optimize the function so that it is taken off the candidate list.
3990 HandleScope inner_scope(isolate);
3991 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3994 // This cycle will bust the heap and subsequent cycles will go ballistic.
3995 heap->CollectAllGarbage(Heap::kNoGCFlags);
3996 heap->CollectAllGarbage(Heap::kNoGCFlags);
4000 TEST(Regress173458) {
4001 if (i::FLAG_never_compact) return;
4002 i::FLAG_always_compact = true;
4003 i::FLAG_cache_optimized_code = false;
4004 i::FLAG_allow_natives_syntax = true;
4005 i::FLAG_flush_code_incrementally = true;
4006 CcTest::InitializeVM();
4007 Isolate* isolate = CcTest::i_isolate();
4008 Heap* heap = isolate->heap();
4009 HandleScope scope(isolate);
4011 // Perform one initial GC to enable code flushing.
4012 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4014 // Ensure the code ends up on an evacuation candidate.
4015 SimulateFullSpace(heap->code_space());
4017 // Prepare an unoptimized function that is eligible for code flushing.
4018 Handle<JSFunction> function;
4020 HandleScope inner_scope(isolate);
4021 CompileRun("function mkClosure() {"
4022 " return function(x) { return x + 1; };"
4024 "var f = mkClosure();"
4027 Handle<JSFunction> f =
4028 v8::Utils::OpenHandle(
4029 *v8::Handle<v8::Function>::Cast(
4030 CcTest::global()->Get(v8_str("f"))));
4031 CHECK(f->is_compiled());
4032 const int kAgingThreshold = 6;
4033 for (int i = 0; i < kAgingThreshold; i++) {
4034 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4037 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4040 // Simulate incremental marking so that unoptimized function is enqueued as a
4041 // candidate for code flushing. The shared function info however will not be
4042 // explicitly enqueued.
4043 SimulateIncrementalMarking(heap);
4045 // Now enable the debugger which in turn will disable code flushing.
4046 CHECK(isolate->debug()->Load());
4048 // This cycle will bust the heap and subsequent cycles will go ballistic.
4049 heap->CollectAllGarbage(Heap::kNoGCFlags);
4050 heap->CollectAllGarbage(Heap::kNoGCFlags);
4054 class DummyVisitor : public ObjectVisitor {
4056 void VisitPointers(Object** start, Object** end) { }
4060 TEST(DeferredHandles) {
4061 CcTest::InitializeVM();
4062 Isolate* isolate = CcTest::i_isolate();
4063 Heap* heap = isolate->heap();
4064 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4065 HandleScopeData* data = isolate->handle_scope_data();
4066 Handle<Object> init(heap->empty_string(), isolate);
4067 while (data->next < data->limit) {
4068 Handle<Object> obj(heap->empty_string(), isolate);
4070 // An entire block of handles has been filled.
4071 // Next handle would require a new block.
4072 DCHECK(data->next == data->limit);
4074 DeferredHandleScope deferred(isolate);
4075 DummyVisitor visitor;
4076 isolate->handle_scope_implementer()->Iterate(&visitor);
4077 delete deferred.Detach();
4081 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4082 CcTest::InitializeVM();
4083 v8::HandleScope scope(CcTest::isolate());
4084 CompileRun("function f(n) {"
4085 " var a = new Array(n);"
4086 " for (var i = 0; i < n; i += 100) a[i] = i;"
4088 "f(10 * 1024 * 1024);");
4089 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4090 if (marking->IsStopped()) marking->Start();
4091 // This big step should be sufficient to mark the whole array.
4092 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4093 DCHECK(marking->IsComplete());
4097 TEST(DisableInlineAllocation) {
4098 i::FLAG_allow_natives_syntax = true;
4099 CcTest::InitializeVM();
4100 v8::HandleScope scope(CcTest::isolate());
4101 CompileRun("function test() {"
4103 " for (var i = 0; i < 10; i++) {"
4104 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4108 " %OptimizeFunctionOnNextCall(test);"
4110 " %DeoptimizeFunction(test);"
4113 // Warm-up with inline allocation enabled.
4114 CompileRun("test(); test(); run();");
4116 // Run test with inline allocation disabled.
4117 CcTest::heap()->DisableInlineAllocation();
4118 CompileRun("run()");
4120 // Run test with inline allocation re-enabled.
4121 CcTest::heap()->EnableInlineAllocation();
4122 CompileRun("run()");
4126 static int AllocationSitesCount(Heap* heap) {
4128 for (Object* site = heap->allocation_sites_list();
4129 !(site->IsUndefined());
4130 site = AllocationSite::cast(site)->weak_next()) {
4137 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4138 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4139 i::FLAG_allow_natives_syntax = true;
4140 CcTest::InitializeVM();
4141 Isolate* isolate = CcTest::i_isolate();
4142 v8::internal::Heap* heap = CcTest::heap();
4143 GlobalHandles* global_handles = isolate->global_handles();
4145 if (!isolate->use_crankshaft()) return;
4147 // The allocation site at the head of the list is ours.
4148 Handle<AllocationSite> site;
4150 LocalContext context;
4151 v8::HandleScope scope(context->GetIsolate());
4153 int count = AllocationSitesCount(heap);
4154 CompileRun("var bar = function() { return (new Array()); };"
4159 // One allocation site should have been created.
4160 int new_count = AllocationSitesCount(heap);
4161 CHECK_EQ(new_count, (count + 1));
4162 site = Handle<AllocationSite>::cast(
4163 global_handles->Create(
4164 AllocationSite::cast(heap->allocation_sites_list())));
4166 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4168 DependentCode::GroupStartIndexes starts(site->dependent_code());
4169 CHECK_GE(starts.number_of_entries(), 1);
4170 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4171 CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
4172 Code* function_bar = Code::cast(
4173 WeakCell::cast(site->dependent_code()->object_at(index))->value());
4174 Handle<JSFunction> bar_handle =
4175 v8::Utils::OpenHandle(
4176 *v8::Handle<v8::Function>::Cast(
4177 CcTest::global()->Get(v8_str("bar"))));
4178 CHECK_EQ(bar_handle->code(), function_bar);
4181 // Now make sure that a gc should get rid of the function, even though we
4182 // still have the allocation site alive.
4183 for (int i = 0; i < 4; i++) {
4184 heap->CollectAllGarbage(Heap::kNoGCFlags);
4187 // The site still exists because of our global handle, but the code is no
4188 // longer referred to by dependent_code().
4189 DependentCode::GroupStartIndexes starts(site->dependent_code());
4190 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4191 CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
4192 WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
4196 TEST(CellsInOptimizedCodeAreWeak) {
4197 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4198 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4199 i::FLAG_allow_natives_syntax = true;
4200 CcTest::InitializeVM();
4201 Isolate* isolate = CcTest::i_isolate();
4202 v8::internal::Heap* heap = CcTest::heap();
4204 if (!isolate->use_crankshaft()) return;
4205 HandleScope outer_scope(heap->isolate());
4208 LocalContext context;
4209 HandleScope scope(heap->isolate());
4211 CompileRun("bar = (function() {"
4215 " var foo = function(x) { with (x) { return 1 + x; } };"
4219 " %OptimizeFunctionOnNextCall(bar);"
4221 " return bar;})();");
4223 Handle<JSFunction> bar =
4224 v8::Utils::OpenHandle(
4225 *v8::Handle<v8::Function>::Cast(
4226 CcTest::global()->Get(v8_str("bar"))));
4227 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4230 // Now make sure that a gc should get rid of the function
4231 for (int i = 0; i < 4; i++) {
4232 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4235 DCHECK(code->marked_for_deoptimization());
4239 TEST(ObjectsInOptimizedCodeAreWeak) {
4240 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4241 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4242 i::FLAG_allow_natives_syntax = true;
4243 CcTest::InitializeVM();
4244 Isolate* isolate = CcTest::i_isolate();
4245 v8::internal::Heap* heap = CcTest::heap();
4247 if (!isolate->use_crankshaft()) return;
4248 HandleScope outer_scope(heap->isolate());
4251 LocalContext context;
4252 HandleScope scope(heap->isolate());
4254 CompileRun("function bar() {"
4257 "function foo(x) { with (x) { return 1 + x; } };"
4261 "%OptimizeFunctionOnNextCall(bar);"
4264 Handle<JSFunction> bar =
4265 v8::Utils::OpenHandle(
4266 *v8::Handle<v8::Function>::Cast(
4267 CcTest::global()->Get(v8_str("bar"))));
4268 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4271 // Now make sure that a gc should get rid of the function
4272 for (int i = 0; i < 4; i++) {
4273 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4276 DCHECK(code->marked_for_deoptimization());
4280 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4281 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4282 if (!i::FLAG_incremental_marking) return;
4283 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4284 i::FLAG_allow_natives_syntax = true;
4285 i::FLAG_compilation_cache = false;
4286 CcTest::InitializeVM();
4287 Isolate* isolate = CcTest::i_isolate();
4288 v8::internal::Heap* heap = CcTest::heap();
4290 if (!isolate->use_crankshaft()) return;
4291 HandleScope outer_scope(heap->isolate());
4292 for (int i = 0; i < 3; i++) {
4293 SimulateIncrementalMarking(heap);
4295 LocalContext context;
4296 HandleScope scope(heap->isolate());
4297 EmbeddedVector<char, 256> source;
4299 "function bar%d() {"
4302 "function foo%d(x) { with (x) { return 1 + x; } };"
4306 "%%OptimizeFwunctionOnNextCall(bar%d);"
4308 i, i, i, i, i, i, i, i);
4309 CompileRun(source.start());
4311 heap->CollectAllGarbage(i::Heap::kNoGCFlags);
4314 if (heap->weak_object_to_code_table()->IsHashTable()) {
4315 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4316 elements = t->NumberOfElements();
4318 CHECK_EQ(0, elements);
4322 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
4323 EmbeddedVector<char, 256> source;
4325 "function %s() { return 0; }"
4327 "%%OptimizeFunctionOnNextCall(%s);"
4328 "%s();", name, name, name, name, name);
4329 CompileRun(source.start());
4330 Handle<JSFunction> fun =
4331 v8::Utils::OpenHandle(
4332 *v8::Handle<v8::Function>::Cast(
4333 CcTest::global()->Get(v8_str(name))));
4338 static int GetCodeChainLength(Code* code) {
4340 while (code->next_code_link()->IsCode()) {
4342 code = Code::cast(code->next_code_link());
4348 TEST(NextCodeLinkIsWeak) {
4349 i::FLAG_allow_natives_syntax = true;
4350 i::FLAG_turbo_deoptimization = true;
4351 CcTest::InitializeVM();
4352 Isolate* isolate = CcTest::i_isolate();
4353 v8::internal::Heap* heap = CcTest::heap();
4355 if (!isolate->use_crankshaft()) return;
4356 HandleScope outer_scope(heap->isolate());
4358 heap->CollectAllAvailableGarbage();
4359 int code_chain_length_before, code_chain_length_after;
4361 HandleScope scope(heap->isolate());
4362 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
4363 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
4364 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4365 code_chain_length_before = GetCodeChainLength(immortal->code());
4366 // Keep the immortal code and let the mortal code die.
4367 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4368 CompileRun("mortal = null; immortal = null;");
4370 heap->CollectAllAvailableGarbage();
4371 // Now mortal code should be dead.
4372 code_chain_length_after = GetCodeChainLength(*code);
4373 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4377 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4378 i::byte buffer[i::Assembler::kMinimalBufferSize];
4379 MacroAssembler masm(isolate, buffer, sizeof(buffer));
4381 masm.Push(isolate->factory()->undefined_value());
4383 masm.GetCode(&desc);
4384 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4385 Handle<Code> code = isolate->factory()->NewCode(
4386 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4387 CHECK(code->IsCode());
4392 TEST(NextCodeLinkIsWeak2) {
4393 i::FLAG_allow_natives_syntax = true;
4394 CcTest::InitializeVM();
4395 Isolate* isolate = CcTest::i_isolate();
4396 v8::internal::Heap* heap = CcTest::heap();
4398 if (!isolate->use_crankshaft()) return;
4399 HandleScope outer_scope(heap->isolate());
4400 heap->CollectAllAvailableGarbage();
4401 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4402 Handle<Code> new_head;
4403 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4405 HandleScope scope(heap->isolate());
4406 Handle<Code> immortal = DummyOptimizedCode(isolate);
4407 Handle<Code> mortal = DummyOptimizedCode(isolate);
4408 mortal->set_next_code_link(*old_head);
4409 immortal->set_next_code_link(*mortal);
4410 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4411 new_head = scope.CloseAndEscape(immortal);
4413 heap->CollectAllAvailableGarbage();
4414 // Now mortal code should be dead.
4415 CHECK_EQ(*old_head, new_head->next_code_link());
4419 static bool weak_ic_cleared = false;
4421 static void ClearWeakIC(const v8::WeakCallbackData<v8::Object, void>& data) {
4422 printf("clear weak is called\n");
4423 weak_ic_cleared = true;
4424 v8::Persistent<v8::Value>* p =
4425 reinterpret_cast<v8::Persistent<v8::Value>*>(data.GetParameter());
4426 CHECK(p->IsNearDeath());
4431 // Checks that the value returned by execution of the source is weak.
4432 void CheckWeakness(const char* source) {
4433 i::FLAG_stress_compaction = false;
4434 CcTest::InitializeVM();
4435 v8::Isolate* isolate = CcTest::isolate();
4436 v8::HandleScope scope(isolate);
4437 v8::Persistent<v8::Object> garbage;
4439 v8::HandleScope scope(isolate);
4440 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
4442 weak_ic_cleared = false;
4443 garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
4444 Heap* heap = CcTest::i_isolate()->heap();
4445 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4446 CHECK(weak_ic_cleared);
4450 // Each of the following "weak IC" tests creates an IC that embeds a map with
4451 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4452 TEST(WeakMapInMonomorphicLoadIC) {
4453 CheckWeakness("function loadIC(obj) {"
4457 " var proto = {'name' : 'weak'};"
4458 " var obj = Object.create(proto);"
4467 TEST(WeakMapInPolymorphicLoadIC) {
4469 "function loadIC(obj) {"
4473 " var proto = {'name' : 'weak'};"
4474 " var obj = Object.create(proto);"
4478 " var poly = Object.create(proto);"
4486 TEST(WeakMapInMonomorphicKeyedLoadIC) {
4487 CheckWeakness("function keyedLoadIC(obj, field) {"
4488 " return obj[field];"
4491 " var proto = {'name' : 'weak'};"
4492 " var obj = Object.create(proto);"
4493 " keyedLoadIC(obj, 'name');"
4494 " keyedLoadIC(obj, 'name');"
4495 " keyedLoadIC(obj, 'name');"
4501 TEST(WeakMapInPolymorphicKeyedLoadIC) {
4503 "function keyedLoadIC(obj, field) {"
4504 " return obj[field];"
4507 " var proto = {'name' : 'weak'};"
4508 " var obj = Object.create(proto);"
4509 " keyedLoadIC(obj, 'name');"
4510 " keyedLoadIC(obj, 'name');"
4511 " keyedLoadIC(obj, 'name');"
4512 " var poly = Object.create(proto);"
4514 " keyedLoadIC(poly, 'name');"
4520 TEST(WeakMapInMonomorphicStoreIC) {
4521 CheckWeakness("function storeIC(obj, value) {"
4522 " obj.name = value;"
4525 " var proto = {'name' : 'weak'};"
4526 " var obj = Object.create(proto);"
4527 " storeIC(obj, 'x');"
4528 " storeIC(obj, 'x');"
4529 " storeIC(obj, 'x');"
4535 TEST(WeakMapInPolymorphicStoreIC) {
4537 "function storeIC(obj, value) {"
4538 " obj.name = value;"
4541 " var proto = {'name' : 'weak'};"
4542 " var obj = Object.create(proto);"
4543 " storeIC(obj, 'x');"
4544 " storeIC(obj, 'x');"
4545 " storeIC(obj, 'x');"
4546 " var poly = Object.create(proto);"
4548 " storeIC(poly, 'x');"
4554 TEST(WeakMapInMonomorphicKeyedStoreIC) {
4555 CheckWeakness("function keyedStoreIC(obj, field, value) {"
4556 " obj[field] = value;"
4559 " var proto = {'name' : 'weak'};"
4560 " var obj = Object.create(proto);"
4561 " keyedStoreIC(obj, 'x');"
4562 " keyedStoreIC(obj, 'x');"
4563 " keyedStoreIC(obj, 'x');"
4569 TEST(WeakMapInPolymorphicKeyedStoreIC) {
4571 "function keyedStoreIC(obj, field, value) {"
4572 " obj[field] = value;"
4575 " var proto = {'name' : 'weak'};"
4576 " var obj = Object.create(proto);"
4577 " keyedStoreIC(obj, 'x');"
4578 " keyedStoreIC(obj, 'x');"
4579 " keyedStoreIC(obj, 'x');"
4580 " var poly = Object.create(proto);"
4582 " keyedStoreIC(poly, 'x');"
4588 TEST(WeakMapInMonomorphicCompareNilIC) {
4589 CheckWeakness("function compareNilIC(obj) {"
4590 " return obj == null;"
4593 " var proto = {'name' : 'weak'};"
4594 " var obj = Object.create(proto);"
4595 " compareNilIC(obj);"
4596 " compareNilIC(obj);"
4597 " compareNilIC(obj);"
4603 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4604 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4605 Handle<Object> obj =
4606 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
4607 return Handle<JSFunction>::cast(obj);
4611 void CheckIC(Code* code, Code::Kind kind, InlineCacheState state) {
4612 Code* ic = FindFirstIC(code, kind);
4613 CHECK(ic->is_inline_cache_stub());
4614 CHECK(ic->ic_state() == state);
4618 TEST(MonomorphicStaysMonomorphicAfterGC) {
4619 if (FLAG_always_opt) return;
4620 // TODO(mvstanton): vector ics need weak support!
4621 if (FLAG_vector_ics) return;
4622 CcTest::InitializeVM();
4623 Isolate* isolate = CcTest::i_isolate();
4624 Heap* heap = isolate->heap();
4625 v8::HandleScope scope(CcTest::isolate());
4627 "function loadIC(obj) {"
4630 "function testIC() {"
4631 " var proto = {'name' : 'weak'};"
4632 " var obj = Object.create(proto);"
4638 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4640 v8::HandleScope scope(CcTest::isolate());
4641 CompileRun("(testIC())");
4643 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4644 CheckIC(loadIC->code(), Code::LOAD_IC, MONOMORPHIC);
4646 v8::HandleScope scope(CcTest::isolate());
4647 CompileRun("(testIC())");
4649 CheckIC(loadIC->code(), Code::LOAD_IC, MONOMORPHIC);
4653 TEST(PolymorphicStaysPolymorphicAfterGC) {
4654 if (FLAG_always_opt) return;
4655 // TODO(mvstanton): vector ics need weak support!
4656 if (FLAG_vector_ics) return;
4657 CcTest::InitializeVM();
4658 Isolate* isolate = CcTest::i_isolate();
4659 Heap* heap = isolate->heap();
4660 v8::HandleScope scope(CcTest::isolate());
4662 "function loadIC(obj) {"
4665 "function testIC() {"
4666 " var proto = {'name' : 'weak'};"
4667 " var obj = Object.create(proto);"
4671 " var poly = Object.create(proto);"
4676 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4678 v8::HandleScope scope(CcTest::isolate());
4679 CompileRun("(testIC())");
4681 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4682 CheckIC(loadIC->code(), Code::LOAD_IC, POLYMORPHIC);
4684 v8::HandleScope scope(CcTest::isolate());
4685 CompileRun("(testIC())");
4687 CheckIC(loadIC->code(), Code::LOAD_IC, POLYMORPHIC);
4692 CcTest::InitializeVM();
4693 Isolate* isolate = CcTest::i_isolate();
4694 v8::internal::Heap* heap = CcTest::heap();
4695 v8::internal::Factory* factory = isolate->factory();
4697 HandleScope outer_scope(isolate);
4698 Handle<WeakCell> weak_cell1;
4700 HandleScope inner_scope(isolate);
4701 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
4702 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
4705 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4706 Handle<WeakCell> weak_cell2;
4708 HandleScope inner_scope(isolate);
4709 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
4711 CHECK(weak_cell1->value()->IsFixedArray());
4712 CHECK_EQ(*survivor, weak_cell2->value());
4713 heap->CollectGarbage(NEW_SPACE);
4714 CHECK(weak_cell1->value()->IsFixedArray());
4715 CHECK_EQ(*survivor, weak_cell2->value());
4716 heap->CollectGarbage(NEW_SPACE);
4717 CHECK(weak_cell1->value()->IsFixedArray());
4718 CHECK_EQ(*survivor, weak_cell2->value());
4719 heap->CollectAllAvailableGarbage();
4720 CHECK(weak_cell1->cleared());
4721 CHECK_EQ(*survivor, weak_cell2->value());
4725 TEST(WeakCellsWithIncrementalMarking) {
4726 CcTest::InitializeVM();
4727 Isolate* isolate = CcTest::i_isolate();
4728 v8::internal::Heap* heap = CcTest::heap();
4729 v8::internal::Factory* factory = isolate->factory();
4732 HandleScope outer_scope(isolate);
4733 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4734 Handle<WeakCell> weak_cells[N];
4736 for (int i = 0; i < N; i++) {
4737 HandleScope inner_scope(isolate);
4738 Handle<HeapObject> value =
4739 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
4740 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
4741 CHECK(weak_cell->value()->IsFixedArray());
4742 IncrementalMarking* marking = heap->incremental_marking();
4743 if (marking->IsStopped()) marking->Start();
4744 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4745 heap->CollectGarbage(NEW_SPACE);
4746 CHECK(weak_cell->value()->IsFixedArray());
4747 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
4749 heap->CollectAllGarbage(Heap::kNoGCFlags);
4750 CHECK_EQ(*survivor, weak_cells[0]->value());
4751 for (int i = 1; i < N; i++) {
4752 CHECK(weak_cells[i]->cleared());
4758 TEST(AddInstructionChangesNewSpacePromotion) {
4759 i::FLAG_allow_natives_syntax = true;
4760 i::FLAG_expose_gc = true;
4761 i::FLAG_stress_compaction = true;
4762 i::FLAG_gc_interval = 1000;
4763 CcTest::InitializeVM();
4764 if (!i::FLAG_allocation_site_pretenuring) return;
4765 v8::HandleScope scope(CcTest::isolate());
4766 Isolate* isolate = CcTest::i_isolate();
4767 Heap* heap = isolate->heap();
4770 "function add(a, b) {"
4774 "add(\"a\", \"b\");"
4775 "var oldSpaceObject;"
4777 "function crash(x) {"
4778 " var object = {a: null, b: null};"
4779 " var result = add(1.5, x | 0);"
4780 " object.a = result;"
4781 " oldSpaceObject = object;"
4786 "%OptimizeFunctionOnNextCall(crash);"
4789 v8::Handle<v8::Object> global = CcTest::global();
4790 v8::Handle<v8::Function> g =
4791 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
4792 v8::Handle<v8::Value> args1[] = { v8_num(1) };
4793 heap->DisableInlineAllocation();
4794 heap->set_allocation_timeout(1);
4795 g->Call(global, 1, args1);
4796 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4800 void OnFatalErrorExpectOOM(const char* location, const char* message) {
4801 // Exit with 0 if the location matches our expectation.
4802 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4806 TEST(CEntryStubOOM) {
4807 i::FLAG_allow_natives_syntax = true;
4808 CcTest::InitializeVM();
4809 v8::HandleScope scope(CcTest::isolate());
4810 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
4812 v8::Handle<v8::Value> result = CompileRun(
4813 "%SetFlags('--gc-interval=1');"
4818 CHECK(result->IsNumber());
4824 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4827 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4828 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
4832 TEST(Regress357137) {
4833 CcTest::InitializeVM();
4834 v8::Isolate* isolate = CcTest::isolate();
4835 v8::HandleScope hscope(isolate);
4836 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4837 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
4838 v8::FunctionTemplate::New(isolate, RequestInterrupt));
4839 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
4840 DCHECK(!context.IsEmpty());
4841 v8::Context::Scope cscope(context);
4843 v8::Local<v8::Value> result = CompileRun(
4845 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4846 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4847 "interrupt();" // This triggers a fake stack overflow in f.
4849 CHECK_EQ(42.0, result->ToNumber(isolate)->Value());
4853 TEST(ArrayShiftSweeping) {
4854 i::FLAG_expose_gc = true;
4855 CcTest::InitializeVM();
4856 v8::HandleScope scope(CcTest::isolate());
4857 Isolate* isolate = CcTest::i_isolate();
4858 Heap* heap = isolate->heap();
4860 v8::Local<v8::Value> result = CompileRun(
4861 "var array = new Array(40000);"
4862 "var tmp = new Array(100000);"
4869 Handle<JSObject> o =
4870 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
4871 CHECK(heap->InOldPointerSpace(o->elements()));
4872 CHECK(heap->InOldPointerSpace(*o));
4873 Page* page = Page::FromAddress(o->elements()->address());
4874 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
4875 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
4879 UNINITIALIZED_TEST(PromotionQueue) {
4880 i::FLAG_expose_gc = true;
4881 i::FLAG_max_semi_space_size = 2;
4882 v8::Isolate* isolate = v8::Isolate::New();
4883 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4885 v8::Isolate::Scope isolate_scope(isolate);
4886 v8::HandleScope handle_scope(isolate);
4887 v8::Context::New(isolate)->Enter();
4888 Heap* heap = i_isolate->heap();
4889 NewSpace* new_space = heap->new_space();
4891 // In this test we will try to overwrite the promotion queue which is at the
4892 // end of to-space. To actually make that possible, we need at least two
4893 // semi-space pages and take advantage of fragmentation.
4894 // (1) Grow semi-space to two pages.
4895 // (2) Create a few small long living objects and call the scavenger to
4896 // move them to the other semi-space.
4897 // (3) Create a huge object, i.e., remainder of first semi-space page and
4898 // create another huge object which should be of maximum allocatable memory
4899 // size of the second semi-space page.
4900 // (4) Call the scavenger again.
4901 // What will happen is: the scavenger will promote the objects created in
4902 // (2) and will create promotion queue entries at the end of the second
4903 // semi-space page during the next scavenge when it promotes the objects to
4904 // the old generation. The first allocation of (3) will fill up the first
4905 // semi-space page. The second allocation in (3) will not fit into the
4906 // first semi-space page, but it will overwrite the promotion queue which
4907 // are in the second semi-space page. If the right guards are in place, the
4908 // promotion queue will be evacuated in that case.
4910 // Grow the semi-space to two pages to make semi-space copy overwrite the
4911 // promotion queue, which will be at the end of the second page.
4912 intptr_t old_capacity = new_space->TotalCapacity();
4914 // If we are in a low memory config, we can't grow to two pages and we can't
4915 // run this test. This also means the issue we are testing cannot arise, as
4916 // there is no fragmentation.
4917 if (new_space->IsAtMaximumCapacity()) return;
4920 CHECK(new_space->IsAtMaximumCapacity());
4921 CHECK(2 * old_capacity == new_space->TotalCapacity());
4923 // Call the scavenger two times to get an empty new space
4924 heap->CollectGarbage(NEW_SPACE);
4925 heap->CollectGarbage(NEW_SPACE);
4927 // First create a few objects which will survive a scavenge, and will get
4928 // promoted to the old generation later on. These objects will create
4929 // promotion queue entries at the end of the second semi-space page.
4930 const int number_handles = 12;
4931 Handle<FixedArray> handles[number_handles];
4932 for (int i = 0; i < number_handles; i++) {
4933 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
4935 heap->CollectGarbage(NEW_SPACE);
4937 // Create the first huge object which will exactly fit the first semi-space
4939 int new_linear_size =
4940 static_cast<int>(*heap->new_space()->allocation_limit_address() -
4941 *heap->new_space()->allocation_top_address());
4942 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
4943 Handle<FixedArray> first =
4944 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4945 CHECK(heap->InNewSpace(*first));
4947 // Create the second huge object of maximum allocatable second semi-space
4950 static_cast<int>(*heap->new_space()->allocation_limit_address() -
4951 *heap->new_space()->allocation_top_address());
4952 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
4953 FixedArray::kHeaderSize;
4954 Handle<FixedArray> second =
4955 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4956 CHECK(heap->InNewSpace(*second));
4958 // This scavenge will corrupt memory if the promotion queue is not
4960 heap->CollectGarbage(NEW_SPACE);
4966 TEST(Regress388880) {
4967 i::FLAG_expose_gc = true;
4968 CcTest::InitializeVM();
4969 v8::HandleScope scope(CcTest::isolate());
4970 Isolate* isolate = CcTest::i_isolate();
4971 Factory* factory = isolate->factory();
4972 Heap* heap = isolate->heap();
4974 Handle<Map> map1 = Map::Create(isolate, 1);
4976 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
4977 HeapType::Any(isolate), NONE, Representation::Tagged(),
4978 OMIT_TRANSITION).ToHandleChecked();
4980 int desired_offset = Page::kPageSize - map1->instance_size();
4982 // Allocate fixed array in old pointer space so, that object allocated
4983 // afterwards would end at the end of the page.
4985 SimulateFullSpace(heap->old_pointer_space());
4986 int padding_size = desired_offset - Page::kObjectStartOffset;
4987 int padding_array_length =
4988 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
4990 Handle<FixedArray> temp2 =
4991 factory->NewFixedArray(padding_array_length, TENURED);
4992 Page* page = Page::FromAddress(temp2->address());
4993 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
4996 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED, false);
4997 o->set_properties(*factory->empty_fixed_array());
4999 // Ensure that the object allocated where we need it.
5000 Page* page = Page::FromAddress(o->address());
5001 CHECK_EQ(desired_offset, page->Offset(o->address()));
5003 // Now we have an object right at the end of the page.
5005 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5006 // that would cause crash.
5007 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5010 CHECK(marking->IsMarking());
5012 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5013 // when it calls heap->AdjustLiveBytes(...).
5014 JSObject::MigrateToMap(o, map2);
5019 i::FLAG_expose_gc = true;
5020 CcTest::InitializeVM();
5021 v8::HandleScope scope(CcTest::isolate());
5022 Isolate* isolate = CcTest::i_isolate();
5023 Heap* heap = isolate->heap();
5024 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5025 v8::Local<v8::Value> result = CompileRun(
5026 "var weak_map = new WeakMap();"
5027 "var future_keys = [];"
5028 "for (var i = 0; i < 50; i++) {"
5029 " var key = {'k' : i + 0.1};"
5030 " weak_map.set(key, 1);"
5031 " future_keys.push({'x' : i + 0.2});"
5034 if (marking->IsStopped()) {
5037 // Incrementally mark the backing store.
5038 Handle<JSObject> obj =
5039 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5040 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5041 while (!Marking::IsBlack(
5042 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5043 !marking->IsStopped()) {
5044 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5046 // Stash the backing store in a handle.
5047 Handle<Object> save(weak_map->table(), isolate);
5048 // The following line will update the backing store.
5050 "for (var i = 0; i < 50; i++) {"
5051 " weak_map.set(future_keys[i], i);"
5053 heap->incremental_marking()->set_should_hurry(true);
5054 heap->CollectGarbage(OLD_POINTER_SPACE);
5058 TEST(Regress442710) {
5059 CcTest::InitializeVM();
5060 Isolate* isolate = CcTest::i_isolate();
5061 Heap* heap = isolate->heap();
5062 Factory* factory = isolate->factory();
5064 HandleScope sc(isolate);
5065 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
5066 Handle<JSArray> array = factory->NewJSArray(2);
5068 Handle<String> name = factory->InternalizeUtf8String("testArray");
5069 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5070 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5071 heap->CollectGarbage(OLD_POINTER_SPACE);
5075 TEST(NumberStringCacheSize) {
5076 if (!Snapshot::HaveASnapshotToStartFrom()) return;
5077 // Test that the number-string cache has not been resized in the snapshot.
5078 CcTest::InitializeVM();
5079 Isolate* isolate = CcTest::i_isolate();
5080 Heap* heap = isolate->heap();
5081 CHECK_EQ(TestHeap::kInitialNumberStringCacheSize * 2,
5082 heap->number_string_cache()->length());
5088 CcTest::InitializeVM();
5089 v8::HandleScope scope(CcTest::isolate());
5091 v8::Local<v8::Value> result = CompileRun("'abc'");
5092 Handle<Object> o = v8::Utils::OpenHandle(*result);
5093 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5098 TEST(FirstPageFitsStartup) {
5099 // Test that the first page sizes provided by the default snapshot are large
5100 // enough to fit everything right after startup and creating one context.
5101 // If this test fails, we are allocating too much aside from deserialization.
5102 if (!Snapshot::HaveASnapshotToStartFrom()) return;
5103 if (Snapshot::EmbedsScript()) return;
5104 CcTest::InitializeVM();
5106 PagedSpaces spaces(CcTest::heap());
5107 for (PagedSpace* s = spaces.next(); s != NULL; s = spaces.next()) {
5108 uint32_t default_size = s->AreaSize();
5109 uint32_t reduced_size = Snapshot::SizeOfFirstPage(s->identity());
5110 if (reduced_size == default_size) continue;
5113 for (PageIterator it(s); it.has_next(); page = it.next()) counter++;
5114 CHECK_LE(counter, 1);
5115 CHECK(static_cast<uint32_t>(page->area_size()) == reduced_size);