1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "src/compilation-cache.h"
34 #include "src/execution.h"
35 #include "src/factory.h"
36 #include "src/global-handles.h"
37 #include "src/ic/ic.h"
38 #include "src/macro-assembler.h"
39 #include "test/cctest/cctest.h"
41 using namespace v8::internal;
44 static void CheckMap(Map* map, int type, int instance_size) {
45 CHECK(map->IsHeapObject());
47 CHECK(CcTest::heap()->Contains(map));
49 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
50 CHECK_EQ(type, map->instance_type());
51 CHECK_EQ(instance_size, map->instance_size());
56 CcTest::InitializeVM();
57 Heap* heap = CcTest::heap();
58 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
59 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
60 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
61 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
65 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
66 CHECK(obj->IsOddball());
67 Handle<Object> handle(obj, isolate);
68 Object* print_string =
69 *Execution::ToString(isolate, handle).ToHandleChecked();
70 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
74 static void CheckSmi(Isolate* isolate, int value, const char* string) {
75 Handle<Object> handle(Smi::FromInt(value), isolate);
76 Object* print_string =
77 *Execution::ToString(isolate, handle).ToHandleChecked();
78 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
82 static void CheckNumber(Isolate* isolate, double value, const char* string) {
83 Handle<Object> number = isolate->factory()->NewNumber(value);
84 CHECK(number->IsNumber());
85 Handle<Object> print_string =
86 Execution::ToString(isolate, number).ToHandleChecked();
87 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
91 static void CheckFindCodeObject(Isolate* isolate) {
92 // Test FindCodeObject
95 Assembler assm(isolate, NULL, 0);
97 __ nop(); // supported on all architectures
101 Handle<Code> code = isolate->factory()->NewCode(
102 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
103 CHECK(code->IsCode());
105 HeapObject* obj = HeapObject::cast(*code);
106 Address obj_addr = obj->address();
108 for (int i = 0; i < obj->Size(); i += kPointerSize) {
109 Object* found = isolate->FindCodeObject(obj_addr + i);
110 CHECK_EQ(*code, found);
113 Handle<Code> copy = isolate->factory()->NewCode(
114 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
115 HeapObject* obj_copy = HeapObject::cast(*copy);
116 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
117 obj_copy->Size() / 2);
118 CHECK(not_right != *code);
123 CcTest::InitializeVM();
124 Isolate* isolate = CcTest::i_isolate();
125 HandleScope outer_scope(isolate);
126 LocalContext context;
127 Handle<Object> n(reinterpret_cast<Object*>(NULL), isolate);
133 CcTest::InitializeVM();
134 Isolate* isolate = CcTest::i_isolate();
135 Factory* factory = isolate->factory();
136 Heap* heap = isolate->heap();
138 HandleScope sc(isolate);
139 Handle<Object> value = factory->NewNumber(1.000123);
140 CHECK(value->IsHeapNumber());
141 CHECK(value->IsNumber());
142 CHECK_EQ(1.000123, value->Number());
144 value = factory->NewNumber(1.0);
145 CHECK(value->IsSmi());
146 CHECK(value->IsNumber());
147 CHECK_EQ(1.0, value->Number());
149 value = factory->NewNumberFromInt(1024);
150 CHECK(value->IsSmi());
151 CHECK(value->IsNumber());
152 CHECK_EQ(1024.0, value->Number());
154 value = factory->NewNumberFromInt(Smi::kMinValue);
155 CHECK(value->IsSmi());
156 CHECK(value->IsNumber());
157 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
159 value = factory->NewNumberFromInt(Smi::kMaxValue);
160 CHECK(value->IsSmi());
161 CHECK(value->IsNumber());
162 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
164 #if !defined(V8_TARGET_ARCH_64_BIT)
165 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
166 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
167 CHECK(value->IsHeapNumber());
168 CHECK(value->IsNumber());
169 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
172 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
173 CHECK(value->IsHeapNumber());
174 CHECK(value->IsNumber());
175 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
178 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
179 CHECK(value->IsHeapNumber());
180 CHECK(value->IsNumber());
181 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
184 // nan oddball checks
185 CHECK(factory->nan_value()->IsNumber());
186 CHECK(std::isnan(factory->nan_value()->Number()));
188 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
189 CHECK(s->IsString());
190 CHECK_EQ(10, s->length());
192 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
193 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
194 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
196 // Check ToString for oddballs
197 CheckOddball(isolate, heap->true_value(), "true");
198 CheckOddball(isolate, heap->false_value(), "false");
199 CheckOddball(isolate, heap->null_value(), "null");
200 CheckOddball(isolate, heap->undefined_value(), "undefined");
202 // Check ToString for Smis
203 CheckSmi(isolate, 0, "0");
204 CheckSmi(isolate, 42, "42");
205 CheckSmi(isolate, -42, "-42");
207 // Check ToString for Numbers
208 CheckNumber(isolate, 1.1, "1.1");
210 CheckFindCodeObject(isolate);
215 CcTest::InitializeVM();
217 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
218 CHECK(Smi::FromInt(42)->IsSmi());
219 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
220 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
224 TEST(GarbageCollection) {
225 CcTest::InitializeVM();
226 Isolate* isolate = CcTest::i_isolate();
227 Heap* heap = isolate->heap();
228 Factory* factory = isolate->factory();
230 HandleScope sc(isolate);
232 heap->CollectGarbage(NEW_SPACE);
234 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
235 Handle<String> name = factory->InternalizeUtf8String("theFunction");
236 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
237 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
238 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
239 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
240 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
243 HandleScope inner_scope(isolate);
244 // Allocate a function and keep it in global object's property.
245 Handle<JSFunction> function = factory->NewFunction(name);
246 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
247 // Allocate an object. Unrooted after leaving the scope.
248 Handle<JSObject> obj = factory->NewJSObject(function);
249 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
250 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
252 CHECK_EQ(Smi::FromInt(23),
253 *Object::GetProperty(obj, prop_name).ToHandleChecked());
254 CHECK_EQ(Smi::FromInt(24),
255 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
258 heap->CollectGarbage(NEW_SPACE);
260 // Function should be alive.
261 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
262 // Check function is retained.
263 Handle<Object> func_value =
264 Object::GetProperty(global, name).ToHandleChecked();
265 CHECK(func_value->IsJSFunction());
266 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
269 HandleScope inner_scope(isolate);
270 // Allocate another object, make it reachable from global.
271 Handle<JSObject> obj = factory->NewJSObject(function);
272 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
273 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
276 // After gc, it should survive.
277 heap->CollectGarbage(NEW_SPACE);
279 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
281 Object::GetProperty(global, obj_name).ToHandleChecked();
282 CHECK(obj->IsJSObject());
283 CHECK_EQ(Smi::FromInt(23),
284 *Object::GetProperty(obj, prop_name).ToHandleChecked());
288 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
289 HandleScope scope(isolate);
290 Handle<String> s = isolate->factory()->NewStringFromUtf8(
291 CStrVector(string)).ToHandleChecked();
292 CHECK_EQ(StrLength(string), s->length());
293 for (int index = 0; index < s->length(); index++) {
294 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
300 CcTest::InitializeVM();
301 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
303 VerifyStringAllocation(isolate, "a");
304 VerifyStringAllocation(isolate, "ab");
305 VerifyStringAllocation(isolate, "abc");
306 VerifyStringAllocation(isolate, "abcd");
307 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
312 CcTest::InitializeVM();
313 Isolate* isolate = CcTest::i_isolate();
314 Factory* factory = isolate->factory();
316 v8::HandleScope scope(CcTest::isolate());
317 const char* name = "Kasper the spunky";
318 Handle<String> string = factory->NewStringFromAsciiChecked(name);
319 CHECK_EQ(StrLength(name), string->length());
323 TEST(GlobalHandles) {
324 CcTest::InitializeVM();
325 Isolate* isolate = CcTest::i_isolate();
326 Heap* heap = isolate->heap();
327 Factory* factory = isolate->factory();
328 GlobalHandles* global_handles = isolate->global_handles();
336 HandleScope scope(isolate);
338 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
339 Handle<Object> u = factory->NewNumber(1.12344);
341 h1 = global_handles->Create(*i);
342 h2 = global_handles->Create(*u);
343 h3 = global_handles->Create(*i);
344 h4 = global_handles->Create(*u);
347 // after gc, it should survive
348 heap->CollectGarbage(NEW_SPACE);
350 CHECK((*h1)->IsString());
351 CHECK((*h2)->IsHeapNumber());
352 CHECK((*h3)->IsString());
353 CHECK((*h4)->IsHeapNumber());
356 GlobalHandles::Destroy(h1.location());
357 GlobalHandles::Destroy(h3.location());
360 GlobalHandles::Destroy(h2.location());
361 GlobalHandles::Destroy(h4.location());
365 static bool WeakPointerCleared = false;
367 static void TestWeakGlobalHandleCallback(
368 const v8::WeakCallbackData<v8::Value, void>& data) {
369 std::pair<v8::Persistent<v8::Value>*, int>* p =
370 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
371 data.GetParameter());
372 if (p->second == 1234) WeakPointerCleared = true;
377 TEST(WeakGlobalHandlesScavenge) {
378 i::FLAG_stress_compaction = false;
379 CcTest::InitializeVM();
380 Isolate* isolate = CcTest::i_isolate();
381 Heap* heap = isolate->heap();
382 Factory* factory = isolate->factory();
383 GlobalHandles* global_handles = isolate->global_handles();
385 WeakPointerCleared = false;
391 HandleScope scope(isolate);
393 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
394 Handle<Object> u = factory->NewNumber(1.12344);
396 h1 = global_handles->Create(*i);
397 h2 = global_handles->Create(*u);
400 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
401 GlobalHandles::MakeWeak(h2.location(),
402 reinterpret_cast<void*>(&handle_and_id),
403 &TestWeakGlobalHandleCallback);
405 // Scavenge treats weak pointers as normal roots.
406 heap->CollectGarbage(NEW_SPACE);
408 CHECK((*h1)->IsString());
409 CHECK((*h2)->IsHeapNumber());
411 CHECK(!WeakPointerCleared);
412 CHECK(!global_handles->IsNearDeath(h2.location()));
413 CHECK(!global_handles->IsNearDeath(h1.location()));
415 GlobalHandles::Destroy(h1.location());
416 GlobalHandles::Destroy(h2.location());
420 TEST(WeakGlobalHandlesMark) {
421 CcTest::InitializeVM();
422 Isolate* isolate = CcTest::i_isolate();
423 Heap* heap = isolate->heap();
424 Factory* factory = isolate->factory();
425 GlobalHandles* global_handles = isolate->global_handles();
427 WeakPointerCleared = false;
433 HandleScope scope(isolate);
435 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
436 Handle<Object> u = factory->NewNumber(1.12344);
438 h1 = global_handles->Create(*i);
439 h2 = global_handles->Create(*u);
442 // Make sure the objects are promoted.
443 heap->CollectGarbage(OLD_POINTER_SPACE);
444 heap->CollectGarbage(NEW_SPACE);
445 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
447 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
448 GlobalHandles::MakeWeak(h2.location(),
449 reinterpret_cast<void*>(&handle_and_id),
450 &TestWeakGlobalHandleCallback);
451 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
452 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
454 // Incremental marking potentially marked handles before they turned weak.
455 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
457 CHECK((*h1)->IsString());
459 CHECK(WeakPointerCleared);
460 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
462 GlobalHandles::Destroy(h1.location());
466 TEST(DeleteWeakGlobalHandle) {
467 i::FLAG_stress_compaction = false;
468 CcTest::InitializeVM();
469 Isolate* isolate = CcTest::i_isolate();
470 Heap* heap = isolate->heap();
471 Factory* factory = isolate->factory();
472 GlobalHandles* global_handles = isolate->global_handles();
474 WeakPointerCleared = false;
479 HandleScope scope(isolate);
481 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
482 h = global_handles->Create(*i);
485 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
486 GlobalHandles::MakeWeak(h.location(),
487 reinterpret_cast<void*>(&handle_and_id),
488 &TestWeakGlobalHandleCallback);
490 // Scanvenge does not recognize weak reference.
491 heap->CollectGarbage(NEW_SPACE);
493 CHECK(!WeakPointerCleared);
495 // Mark-compact treats weak reference properly.
496 heap->CollectGarbage(OLD_POINTER_SPACE);
498 CHECK(WeakPointerCleared);
502 static const char* not_so_random_string_table[] = {
566 static void CheckInternalizedStrings(const char** strings) {
567 Isolate* isolate = CcTest::i_isolate();
568 Factory* factory = isolate->factory();
569 for (const char* string = *strings; *strings != 0; string = *strings++) {
570 HandleScope scope(isolate);
572 isolate->factory()->InternalizeUtf8String(CStrVector(string));
573 // InternalizeUtf8String may return a failure if a GC is needed.
574 CHECK(a->IsInternalizedString());
575 Handle<String> b = factory->InternalizeUtf8String(string);
577 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
578 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
580 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
586 CcTest::InitializeVM();
588 v8::HandleScope sc(CcTest::isolate());
589 CheckInternalizedStrings(not_so_random_string_table);
590 CheckInternalizedStrings(not_so_random_string_table);
594 TEST(FunctionAllocation) {
595 CcTest::InitializeVM();
596 Isolate* isolate = CcTest::i_isolate();
597 Factory* factory = isolate->factory();
599 v8::HandleScope sc(CcTest::isolate());
600 Handle<String> name = factory->InternalizeUtf8String("theFunction");
601 Handle<JSFunction> function = factory->NewFunction(name);
603 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
604 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
606 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
607 Handle<JSObject> obj = factory->NewJSObject(function);
608 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
609 CHECK_EQ(Smi::FromInt(23),
610 *Object::GetProperty(obj, prop_name).ToHandleChecked());
611 // Check that we can add properties to function objects.
612 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
613 CHECK_EQ(Smi::FromInt(24),
614 *Object::GetProperty(function, prop_name).ToHandleChecked());
618 TEST(ObjectProperties) {
619 CcTest::InitializeVM();
620 Isolate* isolate = CcTest::i_isolate();
621 Factory* factory = isolate->factory();
623 v8::HandleScope sc(CcTest::isolate());
624 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
625 Handle<Object> object = Object::GetProperty(
626 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
627 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
628 Handle<JSObject> obj = factory->NewJSObject(constructor);
629 Handle<String> first = factory->InternalizeUtf8String("first");
630 Handle<String> second = factory->InternalizeUtf8String("second");
632 Handle<Smi> one(Smi::FromInt(1), isolate);
633 Handle<Smi> two(Smi::FromInt(2), isolate);
636 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
639 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
640 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
643 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
644 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
646 // add first and then second
647 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
648 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
649 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
650 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
652 // delete first and then second
653 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
654 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
655 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
656 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
657 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
659 // add first and then second
660 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
661 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
662 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
663 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
665 // delete second and then first
666 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
667 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
668 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
669 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
670 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
672 // check string and internalized string match
673 const char* string1 = "fisk";
674 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
675 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
676 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
677 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
679 // check internalized string and string match
680 const char* string2 = "fugl";
681 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
682 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
683 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
684 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
689 CcTest::InitializeVM();
690 Isolate* isolate = CcTest::i_isolate();
691 Factory* factory = isolate->factory();
693 v8::HandleScope sc(CcTest::isolate());
694 Handle<String> name = factory->InternalizeUtf8String("theFunction");
695 Handle<JSFunction> function = factory->NewFunction(name);
697 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
698 Handle<JSObject> obj = factory->NewJSObject(function);
699 Handle<Map> initial_map(function->initial_map());
702 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
703 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
704 CHECK_EQ(Smi::FromInt(23),
705 *Object::GetProperty(obj, prop_name).ToHandleChecked());
707 // Check the map has changed
708 CHECK(*initial_map != obj->map());
713 CcTest::InitializeVM();
714 Isolate* isolate = CcTest::i_isolate();
715 Factory* factory = isolate->factory();
717 v8::HandleScope sc(CcTest::isolate());
718 Handle<String> name = factory->InternalizeUtf8String("Array");
719 Handle<Object> fun_obj = Object::GetProperty(
720 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
721 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
723 // Allocate the object.
724 Handle<Object> element;
725 Handle<JSObject> object = factory->NewJSObject(function);
726 Handle<JSArray> array = Handle<JSArray>::cast(object);
727 // We just initialized the VM, no heap allocation failure yet.
728 JSArray::Initialize(array, 0);
730 // Set array length to 0.
731 JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)).Check();
732 CHECK_EQ(Smi::FromInt(0), array->length());
733 // Must be in fast mode.
734 CHECK(array->HasFastSmiOrObjectElements());
736 // array[length] = name.
737 JSReceiver::SetElement(array, 0, name, NONE, SLOPPY).Check();
738 CHECK_EQ(Smi::FromInt(1), array->length());
739 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
740 CHECK_EQ(*element, *name);
742 // Set array length with larger than smi value.
743 Handle<Object> length =
744 factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
745 JSArray::SetElementsLength(array, length).Check();
747 uint32_t int_length = 0;
748 CHECK(length->ToArrayIndex(&int_length));
749 CHECK_EQ(*length, array->length());
750 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
752 // array[length] = name.
753 JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY).Check();
754 uint32_t new_int_length = 0;
755 CHECK(array->length()->ToArrayIndex(&new_int_length));
756 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
757 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
758 CHECK_EQ(*element, *name);
759 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
760 CHECK_EQ(*element, *name);
765 CcTest::InitializeVM();
766 Isolate* isolate = CcTest::i_isolate();
767 Factory* factory = isolate->factory();
769 v8::HandleScope sc(CcTest::isolate());
770 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
771 Handle<Object> object = Object::GetProperty(
772 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
773 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
774 Handle<JSObject> obj = factory->NewJSObject(constructor);
775 Handle<String> first = factory->InternalizeUtf8String("first");
776 Handle<String> second = factory->InternalizeUtf8String("second");
778 Handle<Smi> one(Smi::FromInt(1), isolate);
779 Handle<Smi> two(Smi::FromInt(2), isolate);
781 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
782 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
784 JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY).Check();
785 JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY).Check();
788 Handle<Object> value1, value2;
789 Handle<JSObject> clone = factory->CopyJSObject(obj);
790 CHECK(!clone.is_identical_to(obj));
792 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
793 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
794 CHECK_EQ(*value1, *value2);
795 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
796 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
797 CHECK_EQ(*value1, *value2);
799 value1 = Object::GetProperty(obj, first).ToHandleChecked();
800 value2 = Object::GetProperty(clone, first).ToHandleChecked();
801 CHECK_EQ(*value1, *value2);
802 value1 = Object::GetProperty(obj, second).ToHandleChecked();
803 value2 = Object::GetProperty(clone, second).ToHandleChecked();
804 CHECK_EQ(*value1, *value2);
807 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
808 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
810 JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY).Check();
811 JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY).Check();
813 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
814 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
815 CHECK_EQ(*value1, *value2);
816 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
817 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
818 CHECK_EQ(*value1, *value2);
820 value1 = Object::GetProperty(obj, second).ToHandleChecked();
821 value2 = Object::GetProperty(clone, first).ToHandleChecked();
822 CHECK_EQ(*value1, *value2);
823 value1 = Object::GetProperty(obj, first).ToHandleChecked();
824 value2 = Object::GetProperty(clone, second).ToHandleChecked();
825 CHECK_EQ(*value1, *value2);
829 TEST(StringAllocation) {
830 CcTest::InitializeVM();
831 Isolate* isolate = CcTest::i_isolate();
832 Factory* factory = isolate->factory();
834 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
835 for (int length = 0; length < 100; length++) {
836 v8::HandleScope scope(CcTest::isolate());
837 char* non_one_byte = NewArray<char>(3 * length + 1);
838 char* one_byte = NewArray<char>(length + 1);
839 non_one_byte[3 * length] = 0;
840 one_byte[length] = 0;
841 for (int i = 0; i < length; i++) {
843 non_one_byte[3 * i] = chars[0];
844 non_one_byte[3 * i + 1] = chars[1];
845 non_one_byte[3 * i + 2] = chars[2];
847 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
848 Vector<const char>(non_one_byte, 3 * length));
849 CHECK_EQ(length, non_one_byte_sym->length());
850 Handle<String> one_byte_sym =
851 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
852 CHECK_EQ(length, one_byte_sym->length());
853 Handle<String> non_one_byte_str =
854 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
856 non_one_byte_str->Hash();
857 CHECK_EQ(length, non_one_byte_str->length());
858 Handle<String> one_byte_str =
859 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
861 one_byte_str->Hash();
862 CHECK_EQ(length, one_byte_str->length());
863 DeleteArray(non_one_byte);
864 DeleteArray(one_byte);
869 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
870 // Count the number of objects found in the heap.
872 HeapIterator iterator(heap);
873 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
874 for (int i = 0; i < size; i++) {
875 if (*objs[i] == obj) {
885 CcTest::InitializeVM();
886 Isolate* isolate = CcTest::i_isolate();
887 Factory* factory = isolate->factory();
888 v8::HandleScope scope(CcTest::isolate());
890 // Array of objects to scan haep for.
891 const int objs_count = 6;
892 Handle<Object> objs[objs_count];
893 int next_objs_index = 0;
895 // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
896 objs[next_objs_index++] = factory->NewJSArray(10);
897 objs[next_objs_index++] = factory->NewJSArray(10,
901 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
902 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
903 objs[next_objs_index++] =
904 factory->NewStringFromStaticChars("abcdefghij", TENURED);
906 // Allocate a large string (for large object space).
907 int large_size = Page::kMaxRegularHeapObjectSize + 1;
908 char* str = new char[large_size];
909 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
910 str[large_size - 1] = '\0';
911 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
914 // Add a Map object to look for.
915 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
917 CHECK_EQ(objs_count, next_objs_index);
918 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
922 TEST(EmptyHandleEscapeFrom) {
923 CcTest::InitializeVM();
925 v8::HandleScope scope(CcTest::isolate());
926 Handle<JSObject> runaway;
929 v8::EscapableHandleScope nested(CcTest::isolate());
930 Handle<JSObject> empty;
931 runaway = empty.EscapeFrom(&nested);
934 CHECK(runaway.is_null());
938 static int LenFromSize(int size) {
939 return (size - FixedArray::kHeaderSize) / kPointerSize;
943 TEST(Regression39128) {
944 // Test case for crbug.com/39128.
945 CcTest::InitializeVM();
946 Isolate* isolate = CcTest::i_isolate();
947 TestHeap* heap = CcTest::test_heap();
949 // Increase the chance of 'bump-the-pointer' allocation in old space.
950 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
952 v8::HandleScope scope(CcTest::isolate());
954 // The plan: create JSObject which references objects in new space.
955 // Then clone this object (forcing it to go into old space) and check
956 // that region dirty marks are updated correctly.
958 // Step 1: prepare a map for the object. We add 1 inobject property to it.
959 // Create a map with single inobject property.
960 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
961 int n_properties = my_map->inobject_properties();
962 CHECK_GT(n_properties, 0);
964 int object_size = my_map->instance_size();
966 // Step 2: allocate a lot of objects so to almost fill new space: we need
967 // just enough room to allocate JSObject and thus fill the newspace.
969 int allocation_amount = Min(FixedArray::kMaxSize,
970 Page::kMaxRegularHeapObjectSize + kPointerSize);
971 int allocation_len = LenFromSize(allocation_amount);
972 NewSpace* new_space = heap->new_space();
973 Address* top_addr = new_space->allocation_top_address();
974 Address* limit_addr = new_space->allocation_limit_address();
975 while ((*limit_addr - *top_addr) > allocation_amount) {
976 CHECK(!heap->always_allocate());
977 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
978 CHECK(new_space->Contains(array));
981 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
982 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
983 int fixed_array_len = LenFromSize(to_fill);
984 CHECK(fixed_array_len < FixedArray::kMaxLength);
986 CHECK(!heap->always_allocate());
987 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
988 CHECK(new_space->Contains(array));
990 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
991 CHECK(new_space->Contains(object));
992 JSObject* jsobject = JSObject::cast(object);
993 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
994 CHECK_EQ(0, jsobject->properties()->length());
995 // Create a reference to object in new space in jsobject.
996 FieldIndex index = FieldIndex::ForInObjectOffset(
997 JSObject::kHeaderSize - kPointerSize);
998 jsobject->FastPropertyAtPut(index, array);
1000 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1002 // Step 4: clone jsobject, but force always allocate first to create a clone
1003 // in old pointer space.
1004 Address old_pointer_space_top = heap->old_pointer_space()->top();
1005 AlwaysAllocateScope aa_scope(isolate);
1006 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1007 JSObject* clone = JSObject::cast(clone_obj);
1008 if (clone->address() != old_pointer_space_top) {
1009 // Alas, got allocated from free list, we cannot do checks.
1012 CHECK(heap->old_pointer_space()->Contains(clone->address()));
1016 UNINITIALIZED_TEST(TestCodeFlushing) {
1017 // If we do not flush code this test is invalid.
1018 if (!FLAG_flush_code) return;
1019 i::FLAG_allow_natives_syntax = true;
1020 i::FLAG_optimize_for_size = false;
1021 v8::Isolate* isolate = v8::Isolate::New();
1022 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1024 Factory* factory = i_isolate->factory();
1026 v8::HandleScope scope(isolate);
1027 v8::Context::New(isolate)->Enter();
1028 const char* source =
1035 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1037 // This compile will add the code to the compilation cache.
1039 v8::HandleScope scope(isolate);
1043 // Check function is compiled.
1044 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1045 foo_name).ToHandleChecked();
1046 CHECK(func_value->IsJSFunction());
1047 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1048 CHECK(function->shared()->is_compiled());
1050 // The code will survive at least two GCs.
1051 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1052 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1053 CHECK(function->shared()->is_compiled());
1055 // Simulate several GCs that use full marking.
1056 const int kAgingThreshold = 6;
1057 for (int i = 0; i < kAgingThreshold; i++) {
1058 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1061 // foo should no longer be in the compilation cache
1062 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1063 CHECK(!function->is_compiled() || function->IsOptimized());
1064 // Call foo to get it recompiled.
1065 CompileRun("foo()");
1066 CHECK(function->shared()->is_compiled());
1067 CHECK(function->is_compiled());
1074 TEST(TestCodeFlushingPreAged) {
1075 // If we do not flush code this test is invalid.
1076 if (!FLAG_flush_code) return;
1077 i::FLAG_allow_natives_syntax = true;
1078 i::FLAG_optimize_for_size = true;
1079 CcTest::InitializeVM();
1080 Isolate* isolate = CcTest::i_isolate();
1081 Factory* factory = isolate->factory();
1082 v8::HandleScope scope(CcTest::isolate());
1083 const char* source = "function foo() {"
1089 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1091 // Compile foo, but don't run it.
1092 { v8::HandleScope scope(CcTest::isolate());
1096 // Check function is compiled.
1097 Handle<Object> func_value =
1098 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1099 CHECK(func_value->IsJSFunction());
1100 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1101 CHECK(function->shared()->is_compiled());
1103 // The code has been run so will survive at least one GC.
1104 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1105 CHECK(function->shared()->is_compiled());
1107 // The code was only run once, so it should be pre-aged and collected on the
1109 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1110 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1112 // Execute the function again twice, and ensure it is reset to the young age.
1113 { v8::HandleScope scope(CcTest::isolate());
1118 // The code will survive at least two GC now that it is young again.
1119 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1120 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1121 CHECK(function->shared()->is_compiled());
1123 // Simulate several GCs that use full marking.
1124 const int kAgingThreshold = 6;
1125 for (int i = 0; i < kAgingThreshold; i++) {
1126 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1129 // foo should no longer be in the compilation cache
1130 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1131 CHECK(!function->is_compiled() || function->IsOptimized());
1132 // Call foo to get it recompiled.
1133 CompileRun("foo()");
1134 CHECK(function->shared()->is_compiled());
1135 CHECK(function->is_compiled());
1139 TEST(TestCodeFlushingIncremental) {
1140 // If we do not flush code this test is invalid.
1141 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1142 i::FLAG_allow_natives_syntax = true;
1143 i::FLAG_optimize_for_size = false;
1144 CcTest::InitializeVM();
1145 Isolate* isolate = CcTest::i_isolate();
1146 Factory* factory = isolate->factory();
1147 v8::HandleScope scope(CcTest::isolate());
1148 const char* source = "function foo() {"
1154 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1156 // This compile will add the code to the compilation cache.
1157 { v8::HandleScope scope(CcTest::isolate());
1161 // Check function is compiled.
1162 Handle<Object> func_value =
1163 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1164 CHECK(func_value->IsJSFunction());
1165 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1166 CHECK(function->shared()->is_compiled());
1168 // The code will survive at least two GCs.
1169 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1170 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1171 CHECK(function->shared()->is_compiled());
1173 // Simulate several GCs that use incremental marking.
1174 const int kAgingThreshold = 6;
1175 for (int i = 0; i < kAgingThreshold; i++) {
1176 SimulateIncrementalMarking(CcTest::heap());
1177 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1179 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1180 CHECK(!function->is_compiled() || function->IsOptimized());
1182 // This compile will compile the function again.
1183 { v8::HandleScope scope(CcTest::isolate());
1184 CompileRun("foo();");
1187 // Simulate several GCs that use incremental marking but make sure
1188 // the loop breaks once the function is enqueued as a candidate.
1189 for (int i = 0; i < kAgingThreshold; i++) {
1190 SimulateIncrementalMarking(CcTest::heap());
1191 if (!function->next_function_link()->IsUndefined()) break;
1192 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1195 // Force optimization while incremental marking is active and while
1196 // the function is enqueued as a candidate.
1197 { v8::HandleScope scope(CcTest::isolate());
1198 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1201 // Simulate one final GC to make sure the candidate queue is sane.
1202 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1203 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1204 CHECK(function->is_compiled() || !function->IsOptimized());
1208 TEST(TestCodeFlushingIncrementalScavenge) {
1209 // If we do not flush code this test is invalid.
1210 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1211 i::FLAG_allow_natives_syntax = true;
1212 i::FLAG_optimize_for_size = false;
1213 CcTest::InitializeVM();
1214 Isolate* isolate = CcTest::i_isolate();
1215 Factory* factory = isolate->factory();
1216 v8::HandleScope scope(CcTest::isolate());
1217 const char* source = "var foo = function() {"
1223 "var bar = function() {"
1227 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1228 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1230 // Perfrom one initial GC to enable code flushing.
1231 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1233 // This compile will add the code to the compilation cache.
1234 { v8::HandleScope scope(CcTest::isolate());
1238 // Check functions are compiled.
1239 Handle<Object> func_value =
1240 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1241 CHECK(func_value->IsJSFunction());
1242 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1243 CHECK(function->shared()->is_compiled());
1244 Handle<Object> func_value2 =
1245 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1246 CHECK(func_value2->IsJSFunction());
1247 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1248 CHECK(function2->shared()->is_compiled());
1250 // Clear references to functions so that one of them can die.
1251 { v8::HandleScope scope(CcTest::isolate());
1252 CompileRun("foo = 0; bar = 0;");
1255 // Bump the code age so that flushing is triggered while the function
1256 // object is still located in new-space.
1257 const int kAgingThreshold = 6;
1258 for (int i = 0; i < kAgingThreshold; i++) {
1259 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1260 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1263 // Simulate incremental marking so that the functions are enqueued as
1264 // code flushing candidates. Then kill one of the functions. Finally
1265 // perform a scavenge while incremental marking is still running.
1266 SimulateIncrementalMarking(CcTest::heap());
1267 *function2.location() = NULL;
1268 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1270 // Simulate one final GC to make sure the candidate queue is sane.
1271 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1272 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1273 CHECK(!function->is_compiled() || function->IsOptimized());
1277 TEST(TestCodeFlushingIncrementalAbort) {
1278 // If we do not flush code this test is invalid.
1279 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1280 i::FLAG_allow_natives_syntax = true;
1281 i::FLAG_optimize_for_size = false;
1282 CcTest::InitializeVM();
1283 Isolate* isolate = CcTest::i_isolate();
1284 Factory* factory = isolate->factory();
1285 Heap* heap = isolate->heap();
1286 v8::HandleScope scope(CcTest::isolate());
1287 const char* source = "function foo() {"
1293 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1295 // This compile will add the code to the compilation cache.
1296 { v8::HandleScope scope(CcTest::isolate());
1300 // Check function is compiled.
1301 Handle<Object> func_value =
1302 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1303 CHECK(func_value->IsJSFunction());
1304 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1305 CHECK(function->shared()->is_compiled());
1307 // The code will survive at least two GCs.
1308 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1309 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1310 CHECK(function->shared()->is_compiled());
1312 // Bump the code age so that flushing is triggered.
1313 const int kAgingThreshold = 6;
1314 for (int i = 0; i < kAgingThreshold; i++) {
1315 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1318 // Simulate incremental marking so that the function is enqueued as
1319 // code flushing candidate.
1320 SimulateIncrementalMarking(heap);
1322 // Enable the debugger and add a breakpoint while incremental marking
1323 // is running so that incremental marking aborts and code flushing is
1326 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1327 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1328 isolate->debug()->ClearAllBreakPoints();
1330 // Force optimization now that code flushing is disabled.
1331 { v8::HandleScope scope(CcTest::isolate());
1332 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1335 // Simulate one final GC to make sure the candidate queue is sane.
1336 heap->CollectAllGarbage(Heap::kNoGCFlags);
1337 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1338 CHECK(function->is_compiled() || !function->IsOptimized());
1342 TEST(CompilationCacheCachingBehavior) {
1343 // If we do not flush code, or have the compilation cache turned off, this
1345 if (!FLAG_flush_code || !FLAG_flush_code_incrementally ||
1346 !FLAG_compilation_cache) {
1349 CcTest::InitializeVM();
1350 Isolate* isolate = CcTest::i_isolate();
1351 Factory* factory = isolate->factory();
1352 Heap* heap = isolate->heap();
1353 CompilationCache* compilation_cache = isolate->compilation_cache();
1354 LanguageMode language_mode =
1355 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1357 v8::HandleScope scope(CcTest::isolate());
1358 const char* raw_source =
1365 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1366 Handle<Context> native_context = isolate->native_context();
1369 v8::HandleScope scope(CcTest::isolate());
1370 CompileRun(raw_source);
1373 // On first compilation, only a hash is inserted in the code cache. We can't
1375 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1376 source, Handle<Object>(), 0, 0, false, true, native_context,
1378 CHECK(info.is_null());
1381 v8::HandleScope scope(CcTest::isolate());
1382 CompileRun(raw_source);
1385 // On second compilation, the hash is replaced by a real cache entry mapping
1386 // the source to the shared function info containing the code.
1387 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1388 true, native_context, language_mode);
1389 CHECK(!info.is_null());
1391 heap->CollectAllGarbage(Heap::kNoGCFlags);
1393 // On second compilation, the hash is replaced by a real cache entry mapping
1394 // the source to the shared function info containing the code.
1395 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1396 true, native_context, language_mode);
1397 CHECK(!info.is_null());
1399 while (!info.ToHandleChecked()->code()->IsOld()) {
1400 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1403 heap->CollectAllGarbage(Heap::kNoGCFlags);
1404 // Ensure code aging cleared the entry from the cache.
1405 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1406 true, native_context, language_mode);
1407 CHECK(info.is_null());
1410 v8::HandleScope scope(CcTest::isolate());
1411 CompileRun(raw_source);
1414 // On first compilation, only a hash is inserted in the code cache. We can't
1416 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1417 true, native_context, language_mode);
1418 CHECK(info.is_null());
1420 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1421 compilation_cache->MarkCompactPrologue();
1425 v8::HandleScope scope(CcTest::isolate());
1426 CompileRun(raw_source);
1429 // If we aged the cache before caching the script, ensure that we didn't cache
1430 // on next compilation.
1431 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
1432 true, native_context, language_mode);
1433 CHECK(info.is_null());
1437 // Count the number of native contexts in the weak list of native contexts.
1438 int CountNativeContexts() {
1440 Object* object = CcTest::heap()->native_contexts_list();
1441 while (!object->IsUndefined()) {
1443 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1449 // Count the number of user functions in the weak list of optimized
1450 // functions attached to a native context.
1451 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1453 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1454 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1455 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1457 object = JSFunction::cast(object)->next_function_link();
1463 TEST(TestInternalWeakLists) {
1464 v8::V8::Initialize();
1466 // Some flags turn Scavenge collections into Mark-sweep collections
1467 // and hence are incompatible with this test case.
1468 if (FLAG_gc_global || FLAG_stress_compaction) return;
1469 FLAG_retain_maps_for_n_gc = 0;
1471 static const int kNumTestContexts = 10;
1473 Isolate* isolate = CcTest::i_isolate();
1474 Heap* heap = isolate->heap();
1475 HandleScope scope(isolate);
1476 v8::Handle<v8::Context> ctx[kNumTestContexts];
1478 CHECK_EQ(0, CountNativeContexts());
1480 // Create a number of global contests which gets linked together.
1481 for (int i = 0; i < kNumTestContexts; i++) {
1482 ctx[i] = v8::Context::New(CcTest::isolate());
1484 // Collect garbage that might have been created by one of the
1485 // installed extensions.
1486 isolate->compilation_cache()->Clear();
1487 heap->CollectAllGarbage(Heap::kNoGCFlags);
1489 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1491 CHECK_EQ(i + 1, CountNativeContexts());
1495 // Create a handle scope so no function objects get stuch in the outer
1497 HandleScope scope(isolate);
1498 const char* source = "function f1() { };"
1499 "function f2() { };"
1500 "function f3() { };"
1501 "function f4() { };"
1502 "function f5() { };";
1504 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1506 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1508 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1510 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1512 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1514 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1516 // Remove function f1, and
1517 CompileRun("f1=null");
1519 // Scavenge treats these references as strong.
1520 for (int j = 0; j < 10; j++) {
1521 CcTest::heap()->CollectGarbage(NEW_SPACE);
1522 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1525 // Mark compact handles the weak references.
1526 isolate->compilation_cache()->Clear();
1527 heap->CollectAllGarbage(Heap::kNoGCFlags);
1528 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1530 // Get rid of f3 and f5 in the same way.
1531 CompileRun("f3=null");
1532 for (int j = 0; j < 10; j++) {
1533 CcTest::heap()->CollectGarbage(NEW_SPACE);
1534 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1536 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1537 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1538 CompileRun("f5=null");
1539 for (int j = 0; j < 10; j++) {
1540 CcTest::heap()->CollectGarbage(NEW_SPACE);
1541 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1543 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1544 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1549 // Force compilation cache cleanup.
1550 CcTest::heap()->NotifyContextDisposed(true);
1551 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1553 // Dispose the native contexts one by one.
1554 for (int i = 0; i < kNumTestContexts; i++) {
1555 // TODO(dcarney): is there a better way to do this?
1556 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1557 *unsafe = CcTest::heap()->undefined_value();
1560 // Scavenge treats these references as strong.
1561 for (int j = 0; j < 10; j++) {
1562 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1563 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1566 // Mark compact handles the weak references.
1567 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1568 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1571 CHECK_EQ(0, CountNativeContexts());
1575 // Count the number of native contexts in the weak list of native contexts
1576 // causing a GC after the specified number of elements.
1577 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1578 Heap* heap = isolate->heap();
1580 Handle<Object> object(heap->native_contexts_list(), isolate);
1581 while (!object->IsUndefined()) {
1583 if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1585 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1592 // Count the number of user functions in the weak list of optimized
1593 // functions attached to a native context causing a GC after the
1594 // specified number of elements.
1595 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1598 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1599 Isolate* isolate = icontext->GetIsolate();
1600 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1602 while (object->IsJSFunction() &&
1603 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1605 if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1606 object = Handle<Object>(
1607 Object::cast(JSFunction::cast(*object)->next_function_link()),
1614 TEST(TestInternalWeakListsTraverseWithGC) {
1615 v8::V8::Initialize();
1616 Isolate* isolate = CcTest::i_isolate();
1618 static const int kNumTestContexts = 10;
1620 HandleScope scope(isolate);
1621 v8::Handle<v8::Context> ctx[kNumTestContexts];
1623 CHECK_EQ(0, CountNativeContexts());
1625 // Create an number of contexts and check the length of the weak list both
1626 // with and without GCs while iterating the list.
1627 for (int i = 0; i < kNumTestContexts; i++) {
1628 ctx[i] = v8::Context::New(CcTest::isolate());
1629 CHECK_EQ(i + 1, CountNativeContexts());
1630 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1633 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1635 // Compile a number of functions the length of the weak list of optimized
1636 // functions both with and without GCs while iterating the list.
1638 const char* source = "function f1() { };"
1639 "function f2() { };"
1640 "function f3() { };"
1641 "function f4() { };"
1642 "function f5() { };";
1644 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1646 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1647 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1649 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1650 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1652 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1653 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1655 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1656 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1658 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1659 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1665 TEST(TestSizeOfRegExpCode) {
1666 if (!FLAG_regexp_optimization) return;
1668 v8::V8::Initialize();
1670 Isolate* isolate = CcTest::i_isolate();
1671 HandleScope scope(isolate);
1673 LocalContext context;
1675 // Adjust source below and this check to match
1676 // RegExpImple::kRegExpTooLargeToOptimize.
1677 DCHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 10 * KB);
1679 // Compile a regexp that is much larger if we are using regexp optimizations.
1681 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1682 "var half_size_reg_exp;"
1683 "while (reg_exp_source.length < 10 * 1024) {"
1684 " half_size_reg_exp = reg_exp_source;"
1685 " reg_exp_source = reg_exp_source + reg_exp_source;"
1688 "reg_exp_source.match(/f/);");
1690 // Get initial heap size after several full GCs, which will stabilize
1691 // the heap size and return with sweeping finished completely.
1692 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1693 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1694 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1695 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1696 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1697 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1698 if (collector->sweeping_in_progress()) {
1699 collector->EnsureSweepingCompleted();
1701 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1703 CompileRun("'foo'.match(reg_exp_source);");
1704 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1705 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1707 CompileRun("'foo'.match(half_size_reg_exp);");
1708 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1709 int size_with_optimized_regexp =
1710 static_cast<int>(CcTest::heap()->SizeOfObjects());
1712 int size_of_regexp_code = size_with_regexp - initial_size;
1714 CHECK_LE(size_of_regexp_code, 1 * MB);
1716 // Small regexp is half the size, but compiles to more than twice the code
1717 // due to the optimization steps.
1718 CHECK_GE(size_with_optimized_regexp,
1719 size_with_regexp + size_of_regexp_code * 2);
1723 TEST(TestSizeOfObjects) {
1724 v8::V8::Initialize();
1726 // Get initial heap size after several full GCs, which will stabilize
1727 // the heap size and return with sweeping finished completely.
1728 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1729 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1730 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1731 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1732 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1733 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1734 if (collector->sweeping_in_progress()) {
1735 collector->EnsureSweepingCompleted();
1737 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1740 // Allocate objects on several different old-space pages so that
1741 // concurrent sweeper threads will be busy sweeping the old space on
1742 // subsequent GC runs.
1743 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1744 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1745 for (int i = 1; i <= 100; i++) {
1746 CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1747 CHECK_EQ(initial_size + i * filler_size,
1748 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1752 // The heap size should go back to initial size after a full GC, even
1753 // though sweeping didn't finish yet.
1754 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1756 // Normally sweeping would not be complete here, but no guarantees.
1758 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1760 // Waiting for sweeper threads should not change heap size.
1761 if (collector->sweeping_in_progress()) {
1762 collector->EnsureSweepingCompleted();
1764 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1768 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1769 CcTest::InitializeVM();
1770 HeapIterator iterator(CcTest::heap());
1771 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1772 intptr_t size_of_objects_2 = 0;
1773 for (HeapObject* obj = iterator.next();
1775 obj = iterator.next()) {
1776 if (!obj->IsFreeSpace()) {
1777 size_of_objects_2 += obj->Size();
1780 // Delta must be within 5% of the larger result.
1781 // TODO(gc): Tighten this up by distinguishing between byte
1782 // arrays that are real and those that merely mark free space
1784 if (size_of_objects_1 > size_of_objects_2) {
1785 intptr_t delta = size_of_objects_1 - size_of_objects_2;
1786 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1787 "Iterator: %" V8_PTR_PREFIX "d, "
1788 "delta: %" V8_PTR_PREFIX "d\n",
1789 size_of_objects_1, size_of_objects_2, delta);
1790 CHECK_GT(size_of_objects_1 / 20, delta);
1792 intptr_t delta = size_of_objects_2 - size_of_objects_1;
1793 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1794 "Iterator: %" V8_PTR_PREFIX "d, "
1795 "delta: %" V8_PTR_PREFIX "d\n",
1796 size_of_objects_1, size_of_objects_2, delta);
1797 CHECK_GT(size_of_objects_2 / 20, delta);
1802 static void FillUpNewSpace(NewSpace* new_space) {
1803 // Fill up new space to the point that it is completely full. Make sure
1804 // that the scavenger does not undo the filling.
1805 Heap* heap = new_space->heap();
1806 Isolate* isolate = heap->isolate();
1807 Factory* factory = isolate->factory();
1808 HandleScope scope(isolate);
1809 AlwaysAllocateScope always_allocate(isolate);
1810 intptr_t available = new_space->Capacity() - new_space->Size();
1811 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1812 for (intptr_t i = 0; i < number_of_fillers; i++) {
1813 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1818 TEST(GrowAndShrinkNewSpace) {
1819 CcTest::InitializeVM();
1820 Heap* heap = CcTest::heap();
1821 NewSpace* new_space = heap->new_space();
1823 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1824 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1825 // The max size cannot exceed the reserved size, since semispaces must be
1826 // always within the reserved space. We can't test new space growing and
1827 // shrinking if the reserved size is the same as the minimum (initial) size.
1831 // Explicitly growing should double the space capacity.
1832 intptr_t old_capacity, new_capacity;
1833 old_capacity = new_space->TotalCapacity();
1835 new_capacity = new_space->TotalCapacity();
1836 CHECK(2 * old_capacity == new_capacity);
1838 old_capacity = new_space->TotalCapacity();
1839 FillUpNewSpace(new_space);
1840 new_capacity = new_space->TotalCapacity();
1841 CHECK(old_capacity == new_capacity);
1843 // Explicitly shrinking should not affect space capacity.
1844 old_capacity = new_space->TotalCapacity();
1845 new_space->Shrink();
1846 new_capacity = new_space->TotalCapacity();
1847 CHECK(old_capacity == new_capacity);
1849 // Let the scavenger empty the new space.
1850 heap->CollectGarbage(NEW_SPACE);
1851 CHECK_LE(new_space->Size(), old_capacity);
1853 // Explicitly shrinking should halve the space capacity.
1854 old_capacity = new_space->TotalCapacity();
1855 new_space->Shrink();
1856 new_capacity = new_space->TotalCapacity();
1857 CHECK(old_capacity == 2 * new_capacity);
1859 // Consecutive shrinking should not affect space capacity.
1860 old_capacity = new_space->TotalCapacity();
1861 new_space->Shrink();
1862 new_space->Shrink();
1863 new_space->Shrink();
1864 new_capacity = new_space->TotalCapacity();
1865 CHECK(old_capacity == new_capacity);
1869 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1870 CcTest::InitializeVM();
1871 Heap* heap = CcTest::heap();
1872 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1873 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1874 // The max size cannot exceed the reserved size, since semispaces must be
1875 // always within the reserved space. We can't test new space growing and
1876 // shrinking if the reserved size is the same as the minimum (initial) size.
1880 v8::HandleScope scope(CcTest::isolate());
1881 NewSpace* new_space = heap->new_space();
1882 intptr_t old_capacity, new_capacity;
1883 old_capacity = new_space->TotalCapacity();
1885 new_capacity = new_space->TotalCapacity();
1886 CHECK(2 * old_capacity == new_capacity);
1887 FillUpNewSpace(new_space);
1888 heap->CollectAllAvailableGarbage();
1889 new_capacity = new_space->TotalCapacity();
1890 CHECK(old_capacity == new_capacity);
1894 static int NumberOfGlobalObjects() {
1896 HeapIterator iterator(CcTest::heap());
1897 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1898 if (obj->IsGlobalObject()) count++;
1904 // Test that we don't embed maps from foreign contexts into
1906 TEST(LeakNativeContextViaMap) {
1907 i::FLAG_allow_natives_syntax = true;
1908 v8::Isolate* isolate = CcTest::isolate();
1909 v8::HandleScope outer_scope(isolate);
1910 v8::Persistent<v8::Context> ctx1p;
1911 v8::Persistent<v8::Context> ctx2p;
1913 v8::HandleScope scope(isolate);
1914 ctx1p.Reset(isolate, v8::Context::New(isolate));
1915 ctx2p.Reset(isolate, v8::Context::New(isolate));
1916 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1919 CcTest::heap()->CollectAllAvailableGarbage();
1920 CHECK_EQ(4, NumberOfGlobalObjects());
1923 v8::HandleScope inner_scope(isolate);
1924 CompileRun("var v = {x: 42}");
1925 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1926 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1927 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1929 ctx2->Global()->Set(v8_str("o"), v);
1930 v8::Local<v8::Value> res = CompileRun(
1931 "function f() { return o.x; }"
1932 "for (var i = 0; i < 10; ++i) f();"
1933 "%OptimizeFunctionOnNextCall(f);"
1935 CHECK_EQ(42, res->Int32Value());
1936 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1938 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1940 isolate->ContextDisposedNotification();
1942 CcTest::heap()->CollectAllAvailableGarbage();
1943 CHECK_EQ(2, NumberOfGlobalObjects());
1945 CcTest::heap()->CollectAllAvailableGarbage();
1946 CHECK_EQ(0, NumberOfGlobalObjects());
1950 // Test that we don't embed functions from foreign contexts into
1952 TEST(LeakNativeContextViaFunction) {
1953 i::FLAG_allow_natives_syntax = true;
1954 v8::Isolate* isolate = CcTest::isolate();
1955 v8::HandleScope outer_scope(isolate);
1956 v8::Persistent<v8::Context> ctx1p;
1957 v8::Persistent<v8::Context> ctx2p;
1959 v8::HandleScope scope(isolate);
1960 ctx1p.Reset(isolate, v8::Context::New(isolate));
1961 ctx2p.Reset(isolate, v8::Context::New(isolate));
1962 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1965 CcTest::heap()->CollectAllAvailableGarbage();
1966 CHECK_EQ(4, NumberOfGlobalObjects());
1969 v8::HandleScope inner_scope(isolate);
1970 CompileRun("var v = function() { return 42; }");
1971 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1972 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1973 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1975 ctx2->Global()->Set(v8_str("o"), v);
1976 v8::Local<v8::Value> res = CompileRun(
1977 "function f(x) { return x(); }"
1978 "for (var i = 0; i < 10; ++i) f(o);"
1979 "%OptimizeFunctionOnNextCall(f);"
1981 CHECK_EQ(42, res->Int32Value());
1982 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1986 isolate->ContextDisposedNotification();
1988 CcTest::heap()->CollectAllAvailableGarbage();
1989 CHECK_EQ(2, NumberOfGlobalObjects());
1991 CcTest::heap()->CollectAllAvailableGarbage();
1992 CHECK_EQ(0, NumberOfGlobalObjects());
1996 TEST(LeakNativeContextViaMapKeyed) {
1997 i::FLAG_allow_natives_syntax = true;
1998 v8::Isolate* isolate = CcTest::isolate();
1999 v8::HandleScope outer_scope(isolate);
2000 v8::Persistent<v8::Context> ctx1p;
2001 v8::Persistent<v8::Context> ctx2p;
2003 v8::HandleScope scope(isolate);
2004 ctx1p.Reset(isolate, v8::Context::New(isolate));
2005 ctx2p.Reset(isolate, v8::Context::New(isolate));
2006 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2009 CcTest::heap()->CollectAllAvailableGarbage();
2010 CHECK_EQ(4, NumberOfGlobalObjects());
2013 v8::HandleScope inner_scope(isolate);
2014 CompileRun("var v = [42, 43]");
2015 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2016 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2017 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2019 ctx2->Global()->Set(v8_str("o"), v);
2020 v8::Local<v8::Value> res = CompileRun(
2021 "function f() { return o[0]; }"
2022 "for (var i = 0; i < 10; ++i) f();"
2023 "%OptimizeFunctionOnNextCall(f);"
2025 CHECK_EQ(42, res->Int32Value());
2026 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2030 isolate->ContextDisposedNotification();
2032 CcTest::heap()->CollectAllAvailableGarbage();
2033 CHECK_EQ(2, NumberOfGlobalObjects());
2035 CcTest::heap()->CollectAllAvailableGarbage();
2036 CHECK_EQ(0, NumberOfGlobalObjects());
2040 TEST(LeakNativeContextViaMapProto) {
2041 i::FLAG_allow_natives_syntax = true;
2042 v8::Isolate* isolate = CcTest::isolate();
2043 v8::HandleScope outer_scope(isolate);
2044 v8::Persistent<v8::Context> ctx1p;
2045 v8::Persistent<v8::Context> ctx2p;
2047 v8::HandleScope scope(isolate);
2048 ctx1p.Reset(isolate, v8::Context::New(isolate));
2049 ctx2p.Reset(isolate, v8::Context::New(isolate));
2050 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2053 CcTest::heap()->CollectAllAvailableGarbage();
2054 CHECK_EQ(4, NumberOfGlobalObjects());
2057 v8::HandleScope inner_scope(isolate);
2058 CompileRun("var v = { y: 42}");
2059 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2060 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2061 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2063 ctx2->Global()->Set(v8_str("o"), v);
2064 v8::Local<v8::Value> res = CompileRun(
2070 "for (var i = 0; i < 10; ++i) f();"
2071 "%OptimizeFunctionOnNextCall(f);"
2073 CHECK_EQ(42, res->Int32Value());
2074 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2078 isolate->ContextDisposedNotification();
2080 CcTest::heap()->CollectAllAvailableGarbage();
2081 CHECK_EQ(2, NumberOfGlobalObjects());
2083 CcTest::heap()->CollectAllAvailableGarbage();
2084 CHECK_EQ(0, NumberOfGlobalObjects());
2088 TEST(InstanceOfStubWriteBarrier) {
2089 i::FLAG_allow_natives_syntax = true;
2091 i::FLAG_verify_heap = true;
2094 CcTest::InitializeVM();
2095 if (!CcTest::i_isolate()->use_crankshaft()) return;
2096 if (i::FLAG_force_marking_deque_overflows) return;
2097 v8::HandleScope outer_scope(CcTest::isolate());
2100 v8::HandleScope scope(CcTest::isolate());
2102 "function foo () { }"
2103 "function mkbar () { return new (new Function(\"\")) (); }"
2104 "function f (x) { return (x instanceof foo); }"
2105 "function g () { f(mkbar()); }"
2106 "f(new foo()); f(new foo());"
2107 "%OptimizeFunctionOnNextCall(f);"
2108 "f(new foo()); g();");
2111 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2115 Handle<JSFunction> f =
2116 v8::Utils::OpenHandle(
2117 *v8::Handle<v8::Function>::Cast(
2118 CcTest::global()->Get(v8_str("f"))));
2120 CHECK(f->IsOptimized());
2122 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2123 !marking->IsStopped()) {
2124 // Discard any pending GC requests otherwise we will get GC when we enter
2126 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2129 CHECK(marking->IsMarking());
2132 v8::HandleScope scope(CcTest::isolate());
2133 v8::Handle<v8::Object> global = CcTest::global();
2134 v8::Handle<v8::Function> g =
2135 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2136 g->Call(global, 0, NULL);
2139 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2140 CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
2144 static int NumberOfProtoTransitions(Map* map) {
2145 return TransitionArray::NumberOfPrototypeTransitions(
2146 TransitionArray::GetPrototypeTransitions(map));
2150 TEST(PrototypeTransitionClearing) {
2151 if (FLAG_never_compact) return;
2152 CcTest::InitializeVM();
2153 Isolate* isolate = CcTest::i_isolate();
2154 Factory* factory = isolate->factory();
2155 v8::HandleScope scope(CcTest::isolate());
2157 CompileRun("var base = {};");
2158 Handle<JSObject> baseObject =
2159 v8::Utils::OpenHandle(
2160 *v8::Handle<v8::Object>::Cast(
2161 CcTest::global()->Get(v8_str("base"))));
2162 int initialTransitions = NumberOfProtoTransitions(baseObject->map());
2166 "for (var i = 0; i < 10; i++) {"
2168 " var prototype = {};"
2169 " object.__proto__ = prototype;"
2170 " if (i >= 3) live.push(object, prototype);"
2173 // Verify that only dead prototype transitions are cleared.
2174 CHECK_EQ(initialTransitions + 10,
2175 NumberOfProtoTransitions(baseObject->map()));
2176 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2177 const int transitions = 10 - 3;
2178 CHECK_EQ(initialTransitions + transitions,
2179 NumberOfProtoTransitions(baseObject->map()));
2181 // Verify that prototype transitions array was compacted.
2183 TransitionArray::GetPrototypeTransitions(baseObject->map());
2184 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2185 int j = TransitionArray::kProtoTransitionHeaderSize + i;
2186 CHECK(trans->get(j)->IsMap());
2189 // Make sure next prototype is placed on an old-space evacuation candidate.
2190 Handle<JSObject> prototype;
2191 PagedSpace* space = CcTest::heap()->old_pointer_space();
2193 AlwaysAllocateScope always_allocate(isolate);
2194 SimulateFullSpace(space);
2195 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
2198 // Add a prototype on an evacuation candidate and verify that transition
2199 // clearing correctly records slots in prototype transition array.
2200 i::FLAG_always_compact = true;
2201 Handle<Map> map(baseObject->map());
2202 CHECK(!space->LastPage()->Contains(
2203 TransitionArray::GetPrototypeTransitions(*map)->address()));
2204 CHECK(space->LastPage()->Contains(prototype->address()));
2208 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2209 i::FLAG_stress_compaction = false;
2210 i::FLAG_allow_natives_syntax = true;
2212 i::FLAG_verify_heap = true;
2215 CcTest::InitializeVM();
2216 if (!CcTest::i_isolate()->use_crankshaft()) return;
2217 v8::HandleScope outer_scope(CcTest::isolate());
2220 v8::HandleScope scope(CcTest::isolate());
2224 " for (var i = 0; i < 100; i++) s += i;"
2228 "%OptimizeFunctionOnNextCall(f);"
2231 Handle<JSFunction> f =
2232 v8::Utils::OpenHandle(
2233 *v8::Handle<v8::Function>::Cast(
2234 CcTest::global()->Get(v8_str("f"))));
2235 CHECK(f->IsOptimized());
2237 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2241 // The following two calls will increment CcTest::heap()->global_ic_age().
2242 const double kLongIdlePauseInSeconds = 1.0;
2243 CcTest::isolate()->ContextDisposedNotification();
2244 CcTest::isolate()->IdleNotificationDeadline(
2245 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2246 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2247 kLongIdlePauseInSeconds);
2249 while (!marking->IsStopped() && !marking->IsComplete()) {
2250 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2252 if (!marking->IsStopped() || marking->should_hurry()) {
2253 // We don't normally finish a GC via Step(), we normally finish by
2254 // setting the stack guard and then do the final steps in the stack
2255 // guard interrupt. But here we didn't ask for that, and there is no
2256 // JS code running to trigger the interrupt, so we explicitly finalize
2258 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
2259 "Test finalizing incremental mark-sweep");
2262 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2263 CHECK_EQ(0, f->shared()->opt_count());
2264 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2268 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2269 i::FLAG_stress_compaction = false;
2270 i::FLAG_allow_natives_syntax = true;
2272 i::FLAG_verify_heap = true;
2275 CcTest::InitializeVM();
2276 if (!CcTest::i_isolate()->use_crankshaft()) return;
2277 v8::HandleScope outer_scope(CcTest::isolate());
2280 v8::HandleScope scope(CcTest::isolate());
2284 " for (var i = 0; i < 100; i++) s += i;"
2288 "%OptimizeFunctionOnNextCall(f);"
2291 Handle<JSFunction> f =
2292 v8::Utils::OpenHandle(
2293 *v8::Handle<v8::Function>::Cast(
2294 CcTest::global()->Get(v8_str("f"))));
2295 CHECK(f->IsOptimized());
2297 CcTest::heap()->incremental_marking()->Abort();
2299 // The following two calls will increment CcTest::heap()->global_ic_age().
2300 // Since incremental marking is off, IdleNotification will do full GC.
2301 const double kLongIdlePauseInSeconds = 1.0;
2302 CcTest::isolate()->ContextDisposedNotification();
2303 CcTest::isolate()->IdleNotificationDeadline(
2304 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2305 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2306 kLongIdlePauseInSeconds);
2308 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2309 CHECK_EQ(0, f->shared()->opt_count());
2310 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2314 TEST(IdleNotificationFinishMarking) {
2315 i::FLAG_allow_natives_syntax = true;
2316 CcTest::InitializeVM();
2317 SimulateFullSpace(CcTest::heap()->old_pointer_space());
2318 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2322 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2324 // TODO(hpayer): We cannot write proper unit test right now for heap.
2325 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2326 // marking delay counter.
2328 // Perform a huge incremental marking step but don't complete marking.
2329 intptr_t bytes_processed = 0;
2332 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2333 IncrementalMarking::FORCE_MARKING,
2334 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2335 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2336 } while (bytes_processed);
2338 // The next invocations of incremental marking are not going to complete
2340 // since the completion threshold is not reached
2341 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2343 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2344 IncrementalMarking::FORCE_MARKING,
2345 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2346 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2349 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2351 // The next idle notification has to finish incremental marking.
2352 const double kLongIdleTime = 1000.0;
2353 CcTest::isolate()->IdleNotificationDeadline(
2354 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2355 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2357 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2361 // Test that HAllocateObject will always return an object in new-space.
2362 TEST(OptimizedAllocationAlwaysInNewSpace) {
2363 i::FLAG_allow_natives_syntax = true;
2364 CcTest::InitializeVM();
2365 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2366 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2367 v8::HandleScope scope(CcTest::isolate());
2369 SimulateFullSpace(CcTest::heap()->new_space());
2370 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2371 v8::Local<v8::Value> res = CompileRun(
2374 " for (var i = 0; i < 32; i++) {"
2375 " this['x' + i] = x;"
2378 "function f(x) { return new c(x); };"
2380 "%OptimizeFunctionOnNextCall(f);"
2383 4, res.As<v8::Object>()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2385 Handle<JSObject> o =
2386 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2388 CHECK(CcTest::heap()->InNewSpace(*o));
2392 TEST(OptimizedPretenuringAllocationFolding) {
2393 i::FLAG_allow_natives_syntax = true;
2394 i::FLAG_expose_gc = true;
2395 CcTest::InitializeVM();
2396 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2397 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2398 v8::HandleScope scope(CcTest::isolate());
2400 // Grow new space unitl maximum capacity reached.
2401 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2402 CcTest::heap()->new_space()->Grow();
2405 i::ScopedVector<char> source(1024);
2408 "var number_elements = %d;"
2409 "var elements = new Array();"
2411 " for (var i = 0; i < number_elements; i++) {"
2412 " elements[i] = [[{}], [1.1]];"
2414 " return elements[number_elements-1]"
2418 "%%OptimizeFunctionOnNextCall(f);"
2420 AllocationSite::kPretenureMinimumCreated);
2422 v8::Local<v8::Value> res = CompileRun(source.start());
2424 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2425 Handle<JSObject> int_array_handle =
2426 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2427 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2428 Handle<JSObject> double_array_handle =
2429 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2431 Handle<JSObject> o =
2432 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2433 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2434 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2435 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2436 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2437 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2441 TEST(OptimizedPretenuringObjectArrayLiterals) {
2442 i::FLAG_allow_natives_syntax = true;
2443 i::FLAG_expose_gc = true;
2444 CcTest::InitializeVM();
2445 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2446 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2447 v8::HandleScope scope(CcTest::isolate());
2449 // Grow new space unitl maximum capacity reached.
2450 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2451 CcTest::heap()->new_space()->Grow();
2454 i::ScopedVector<char> source(1024);
2457 "var number_elements = %d;"
2458 "var elements = new Array(number_elements);"
2460 " for (var i = 0; i < number_elements; i++) {"
2461 " elements[i] = [{}, {}, {}];"
2463 " return elements[number_elements - 1];"
2467 "%%OptimizeFunctionOnNextCall(f);"
2469 AllocationSite::kPretenureMinimumCreated);
2471 v8::Local<v8::Value> res = CompileRun(source.start());
2473 Handle<JSObject> o =
2474 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2476 CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
2477 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2481 TEST(OptimizedPretenuringMixedInObjectProperties) {
2482 i::FLAG_allow_natives_syntax = true;
2483 i::FLAG_expose_gc = true;
2484 CcTest::InitializeVM();
2485 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2486 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2487 v8::HandleScope scope(CcTest::isolate());
2489 // Grow new space unitl maximum capacity reached.
2490 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2491 CcTest::heap()->new_space()->Grow();
2495 i::ScopedVector<char> source(1024);
2498 "var number_elements = %d;"
2499 "var elements = new Array(number_elements);"
2501 " for (var i = 0; i < number_elements; i++) {"
2502 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2504 " return elements[number_elements - 1];"
2508 "%%OptimizeFunctionOnNextCall(f);"
2510 AllocationSite::kPretenureMinimumCreated);
2512 v8::Local<v8::Value> res = CompileRun(source.start());
2514 Handle<JSObject> o =
2515 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2517 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2518 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2519 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2520 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
2521 if (!o->IsUnboxedDoubleField(idx2)) {
2522 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
2524 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2527 JSObject* inner_object =
2528 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2529 CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
2530 if (!inner_object->IsUnboxedDoubleField(idx1)) {
2532 CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
2534 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2537 CcTest::heap()->InOldPointerSpace(inner_object->RawFastPropertyAt(idx2)));
2541 TEST(OptimizedPretenuringDoubleArrayProperties) {
2542 i::FLAG_allow_natives_syntax = true;
2543 i::FLAG_expose_gc = true;
2544 CcTest::InitializeVM();
2545 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2546 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2547 v8::HandleScope scope(CcTest::isolate());
2549 // Grow new space unitl maximum capacity reached.
2550 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2551 CcTest::heap()->new_space()->Grow();
2554 i::ScopedVector<char> source(1024);
2557 "var number_elements = %d;"
2558 "var elements = new Array(number_elements);"
2560 " for (var i = 0; i < number_elements; i++) {"
2561 " elements[i] = {a: 1.1, b: 2.2};"
2563 " return elements[i - 1];"
2567 "%%OptimizeFunctionOnNextCall(f);"
2569 AllocationSite::kPretenureMinimumCreated);
2571 v8::Local<v8::Value> res = CompileRun(source.start());
2573 Handle<JSObject> o =
2574 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2576 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2577 CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
2581 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2582 i::FLAG_allow_natives_syntax = true;
2583 i::FLAG_expose_gc = true;
2584 CcTest::InitializeVM();
2585 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2586 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2587 v8::HandleScope scope(CcTest::isolate());
2589 // Grow new space unitl maximum capacity reached.
2590 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2591 CcTest::heap()->new_space()->Grow();
2594 i::ScopedVector<char> source(1024);
2597 "var number_elements = %d;"
2598 "var elements = new Array(number_elements);"
2600 " for (var i = 0; i < number_elements; i++) {"
2601 " elements[i] = [1.1, 2.2, 3.3];"
2603 " return elements[number_elements - 1];"
2607 "%%OptimizeFunctionOnNextCall(f);"
2609 AllocationSite::kPretenureMinimumCreated);
2611 v8::Local<v8::Value> res = CompileRun(source.start());
2613 Handle<JSObject> o =
2614 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2616 CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
2617 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2621 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2622 i::FLAG_allow_natives_syntax = true;
2623 i::FLAG_expose_gc = true;
2624 CcTest::InitializeVM();
2625 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2626 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2627 v8::HandleScope scope(CcTest::isolate());
2629 // Grow new space unitl maximum capacity reached.
2630 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2631 CcTest::heap()->new_space()->Grow();
2634 i::ScopedVector<char> source(1024);
2637 "var number_elements = 100;"
2638 "var elements = new Array(number_elements);"
2640 " for (var i = 0; i < number_elements; i++) {"
2641 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2643 " return elements[number_elements - 1];"
2647 "%%OptimizeFunctionOnNextCall(f);"
2650 v8::Local<v8::Value> res = CompileRun(source.start());
2652 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2653 Handle<JSObject> int_array_handle =
2654 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2655 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2656 Handle<JSObject> double_array_handle =
2657 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2659 Handle<JSObject> o =
2660 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2661 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2662 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2663 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2664 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2665 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2669 TEST(OptimizedPretenuringNestedObjectLiterals) {
2670 i::FLAG_allow_natives_syntax = true;
2671 i::FLAG_expose_gc = true;
2672 CcTest::InitializeVM();
2673 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2674 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2675 v8::HandleScope scope(CcTest::isolate());
2677 // Grow new space unitl maximum capacity reached.
2678 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2679 CcTest::heap()->new_space()->Grow();
2682 i::ScopedVector<char> source(1024);
2685 "var number_elements = %d;"
2686 "var elements = new Array(number_elements);"
2688 " for (var i = 0; i < number_elements; i++) {"
2689 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2691 " return elements[number_elements - 1];"
2695 "%%OptimizeFunctionOnNextCall(f);"
2697 AllocationSite::kPretenureMinimumCreated);
2699 v8::Local<v8::Value> res = CompileRun(source.start());
2701 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2702 Handle<JSObject> int_array_handle_1 =
2703 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2704 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2705 Handle<JSObject> int_array_handle_2 =
2706 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2708 Handle<JSObject> o =
2709 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2710 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2711 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
2712 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
2713 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
2714 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
2718 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2719 i::FLAG_allow_natives_syntax = true;
2720 i::FLAG_expose_gc = true;
2721 CcTest::InitializeVM();
2722 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2723 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2724 v8::HandleScope scope(CcTest::isolate());
2726 // Grow new space unitl maximum capacity reached.
2727 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2728 CcTest::heap()->new_space()->Grow();
2731 i::ScopedVector<char> source(1024);
2734 "var number_elements = %d;"
2735 "var elements = new Array(number_elements);"
2737 " for (var i = 0; i < number_elements; i++) {"
2738 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2740 " return elements[number_elements - 1];"
2744 "%%OptimizeFunctionOnNextCall(f);"
2746 AllocationSite::kPretenureMinimumCreated);
2748 v8::Local<v8::Value> res = CompileRun(source.start());
2750 v8::Local<v8::Value> double_array_1 =
2751 v8::Object::Cast(*res)->Get(v8_str("0"));
2752 Handle<JSObject> double_array_handle_1 =
2753 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2754 v8::Local<v8::Value> double_array_2 =
2755 v8::Object::Cast(*res)->Get(v8_str("1"));
2756 Handle<JSObject> double_array_handle_2 =
2757 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2759 Handle<JSObject> o =
2760 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2761 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2762 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
2763 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
2764 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
2765 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
2769 // Make sure pretenuring feedback is gathered for constructed objects as well
2771 TEST(OptimizedPretenuringConstructorCalls) {
2772 if (!i::FLAG_pretenuring_call_new) {
2773 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2776 i::FLAG_allow_natives_syntax = true;
2777 i::FLAG_expose_gc = true;
2778 CcTest::InitializeVM();
2779 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2780 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2781 v8::HandleScope scope(CcTest::isolate());
2783 // Grow new space unitl maximum capacity reached.
2784 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2785 CcTest::heap()->new_space()->Grow();
2788 i::ScopedVector<char> source(1024);
2789 // Call new is doing slack tracking for the first
2790 // JSFunction::kGenerousAllocationCount allocations, and we can't find
2791 // mementos during that time.
2794 "var number_elements = %d;"
2795 "var elements = new Array(number_elements);"
2801 " for (var i = 0; i < number_elements; i++) {"
2802 " elements[i] = new foo();"
2804 " return elements[number_elements - 1];"
2808 "%%OptimizeFunctionOnNextCall(f);"
2810 AllocationSite::kPretenureMinimumCreated +
2811 JSFunction::kGenerousAllocationCount);
2813 v8::Local<v8::Value> res = CompileRun(source.start());
2815 Handle<JSObject> o =
2816 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2818 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2822 TEST(OptimizedPretenuringCallNew) {
2823 if (!i::FLAG_pretenuring_call_new) {
2824 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2827 i::FLAG_allow_natives_syntax = true;
2828 i::FLAG_expose_gc = true;
2829 CcTest::InitializeVM();
2830 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2831 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2832 v8::HandleScope scope(CcTest::isolate());
2834 // Grow new space unitl maximum capacity reached.
2835 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2836 CcTest::heap()->new_space()->Grow();
2839 i::ScopedVector<char> source(1024);
2840 // Call new is doing slack tracking for the first
2841 // JSFunction::kGenerousAllocationCount allocations, and we can't find
2842 // mementos during that time.
2845 "var number_elements = %d;"
2846 "var elements = new Array(number_elements);"
2847 "function g() { this.a = 0; }"
2849 " for (var i = 0; i < number_elements; i++) {"
2850 " elements[i] = new g();"
2852 " return elements[number_elements - 1];"
2856 "%%OptimizeFunctionOnNextCall(f);"
2858 AllocationSite::kPretenureMinimumCreated +
2859 JSFunction::kGenerousAllocationCount);
2861 v8::Local<v8::Value> res = CompileRun(source.start());
2863 Handle<JSObject> o =
2864 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2865 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2869 // Test regular array literals allocation.
2870 TEST(OptimizedAllocationArrayLiterals) {
2871 i::FLAG_allow_natives_syntax = true;
2872 CcTest::InitializeVM();
2873 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2874 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2875 v8::HandleScope scope(CcTest::isolate());
2877 v8::Local<v8::Value> res = CompileRun(
2879 " var numbers = new Array(1, 2, 3);"
2880 " numbers[0] = 3.14;"
2884 "%OptimizeFunctionOnNextCall(f);"
2886 CHECK_EQ(static_cast<int>(3.14),
2887 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2889 Handle<JSObject> o =
2890 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2892 CHECK(CcTest::heap()->InNewSpace(o->elements()));
2896 static int CountMapTransitions(Map* map) {
2897 return TransitionArray::NumberOfTransitions(map->raw_transitions());
2901 // Test that map transitions are cleared and maps are collected with
2902 // incremental marking as well.
2904 i::FLAG_stress_compaction = false;
2905 i::FLAG_allow_natives_syntax = true;
2906 i::FLAG_trace_incremental_marking = true;
2907 i::FLAG_retain_maps_for_n_gc = 0;
2908 CcTest::InitializeVM();
2909 v8::HandleScope scope(CcTest::isolate());
2910 static const int transitions_count = 256;
2912 CompileRun("function F() {}");
2914 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2915 for (int i = 0; i < transitions_count; i++) {
2916 EmbeddedVector<char, 64> buffer;
2917 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2918 CompileRun(buffer.start());
2920 CompileRun("var root = new F;");
2923 Handle<JSObject> root =
2924 v8::Utils::OpenHandle(
2925 *v8::Handle<v8::Object>::Cast(
2926 CcTest::global()->Get(v8_str("root"))));
2928 // Count number of live transitions before marking.
2929 int transitions_before = CountMapTransitions(root->map());
2930 CompileRun("%DebugPrint(root);");
2931 CHECK_EQ(transitions_count, transitions_before);
2933 SimulateIncrementalMarking(CcTest::heap());
2934 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2936 // Count number of live transitions after marking. Note that one transition
2937 // is left, because 'o' still holds an instance of one transition target.
2938 int transitions_after = CountMapTransitions(root->map());
2939 CompileRun("%DebugPrint(root);");
2940 CHECK_EQ(1, transitions_after);
2945 static void AddTransitions(int transitions_count) {
2946 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2947 for (int i = 0; i < transitions_count; i++) {
2948 EmbeddedVector<char, 64> buffer;
2949 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2950 CompileRun(buffer.start());
2955 static Handle<JSObject> GetByName(const char* name) {
2956 return v8::Utils::OpenHandle(
2957 *v8::Handle<v8::Object>::Cast(
2958 CcTest::global()->Get(v8_str(name))));
2962 static void AddPropertyTo(
2963 int gc_count, Handle<JSObject> object, const char* property_name) {
2964 Isolate* isolate = CcTest::i_isolate();
2965 Factory* factory = isolate->factory();
2966 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
2967 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
2968 i::FLAG_gc_interval = gc_count;
2969 i::FLAG_gc_global = true;
2970 i::FLAG_retain_maps_for_n_gc = 0;
2971 CcTest::heap()->set_allocation_timeout(gc_count);
2972 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
2976 TEST(TransitionArrayShrinksDuringAllocToZero) {
2977 i::FLAG_stress_compaction = false;
2978 i::FLAG_allow_natives_syntax = true;
2979 CcTest::InitializeVM();
2980 v8::HandleScope scope(CcTest::isolate());
2981 static const int transitions_count = 10;
2982 CompileRun("function F() { }");
2983 AddTransitions(transitions_count);
2984 CompileRun("var root = new F;");
2985 Handle<JSObject> root = GetByName("root");
2987 // Count number of live transitions before marking.
2988 int transitions_before = CountMapTransitions(root->map());
2989 CHECK_EQ(transitions_count, transitions_before);
2992 CompileRun("o = new F;"
2994 root = GetByName("root");
2995 AddPropertyTo(2, root, "funny");
2996 CcTest::heap()->CollectGarbage(NEW_SPACE);
2998 // Count number of live transitions after marking. Note that one transition
2999 // is left, because 'o' still holds an instance of one transition target.
3000 int transitions_after = CountMapTransitions(
3001 Map::cast(root->map()->GetBackPointer()));
3002 CHECK_EQ(1, transitions_after);
3006 TEST(TransitionArrayShrinksDuringAllocToOne) {
3007 i::FLAG_stress_compaction = false;
3008 i::FLAG_allow_natives_syntax = true;
3009 CcTest::InitializeVM();
3010 v8::HandleScope scope(CcTest::isolate());
3011 static const int transitions_count = 10;
3012 CompileRun("function F() {}");
3013 AddTransitions(transitions_count);
3014 CompileRun("var root = new F;");
3015 Handle<JSObject> root = GetByName("root");
3017 // Count number of live transitions before marking.
3018 int transitions_before = CountMapTransitions(root->map());
3019 CHECK_EQ(transitions_count, transitions_before);
3021 root = GetByName("root");
3022 AddPropertyTo(2, root, "funny");
3023 CcTest::heap()->CollectGarbage(NEW_SPACE);
3025 // Count number of live transitions after marking. Note that one transition
3026 // is left, because 'o' still holds an instance of one transition target.
3027 int transitions_after = CountMapTransitions(
3028 Map::cast(root->map()->GetBackPointer()));
3029 CHECK_EQ(2, transitions_after);
3033 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3034 i::FLAG_stress_compaction = false;
3035 i::FLAG_allow_natives_syntax = true;
3036 CcTest::InitializeVM();
3037 v8::HandleScope scope(CcTest::isolate());
3038 static const int transitions_count = 10;
3039 CompileRun("function F() {}");
3040 AddTransitions(transitions_count);
3041 CompileRun("var root = new F;");
3042 Handle<JSObject> root = GetByName("root");
3044 // Count number of live transitions before marking.
3045 int transitions_before = CountMapTransitions(root->map());
3046 CHECK_EQ(transitions_count, transitions_before);
3048 root = GetByName("root");
3049 AddPropertyTo(0, root, "prop9");
3050 CcTest::i_isolate()->heap()->CollectGarbage(OLD_POINTER_SPACE);
3052 // Count number of live transitions after marking. Note that one transition
3053 // is left, because 'o' still holds an instance of one transition target.
3054 int transitions_after = CountMapTransitions(
3055 Map::cast(root->map()->GetBackPointer()));
3056 CHECK_EQ(1, transitions_after);
3060 TEST(TransitionArraySimpleToFull) {
3061 i::FLAG_stress_compaction = false;
3062 i::FLAG_allow_natives_syntax = true;
3063 CcTest::InitializeVM();
3064 v8::HandleScope scope(CcTest::isolate());
3065 static const int transitions_count = 1;
3066 CompileRun("function F() {}");
3067 AddTransitions(transitions_count);
3068 CompileRun("var root = new F;");
3069 Handle<JSObject> root = GetByName("root");
3071 // Count number of live transitions before marking.
3072 int transitions_before = CountMapTransitions(root->map());
3073 CHECK_EQ(transitions_count, transitions_before);
3075 CompileRun("o = new F;"
3077 root = GetByName("root");
3078 DCHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3079 AddPropertyTo(2, root, "happy");
3081 // Count number of live transitions after marking. Note that one transition
3082 // is left, because 'o' still holds an instance of one transition target.
3083 int transitions_after = CountMapTransitions(
3084 Map::cast(root->map()->GetBackPointer()));
3085 CHECK_EQ(1, transitions_after);
3090 TEST(Regress2143a) {
3091 i::FLAG_collect_maps = true;
3092 i::FLAG_incremental_marking = true;
3093 CcTest::InitializeVM();
3094 v8::HandleScope scope(CcTest::isolate());
3096 // Prepare a map transition from the root object together with a yet
3097 // untransitioned root object.
3098 CompileRun("var root = new Object;"
3100 "root = new Object;");
3102 SimulateIncrementalMarking(CcTest::heap());
3104 // Compile a StoreIC that performs the prepared map transition. This
3105 // will restart incremental marking and should make sure the root is
3106 // marked grey again.
3107 CompileRun("function f(o) {"
3113 // This bug only triggers with aggressive IC clearing.
3114 CcTest::heap()->AgeInlineCaches();
3116 // Explicitly request GC to perform final marking step and sweeping.
3117 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3119 Handle<JSObject> root =
3120 v8::Utils::OpenHandle(
3121 *v8::Handle<v8::Object>::Cast(
3122 CcTest::global()->Get(v8_str("root"))));
3124 // The root object should be in a sane state.
3125 CHECK(root->IsJSObject());
3126 CHECK(root->map()->IsMap());
3130 TEST(Regress2143b) {
3131 i::FLAG_collect_maps = true;
3132 i::FLAG_incremental_marking = true;
3133 i::FLAG_allow_natives_syntax = true;
3134 CcTest::InitializeVM();
3135 v8::HandleScope scope(CcTest::isolate());
3137 // Prepare a map transition from the root object together with a yet
3138 // untransitioned root object.
3139 CompileRun("var root = new Object;"
3141 "root = new Object;");
3143 SimulateIncrementalMarking(CcTest::heap());
3145 // Compile an optimized LStoreNamedField that performs the prepared
3146 // map transition. This will restart incremental marking and should
3147 // make sure the root is marked grey again.
3148 CompileRun("function f(o) {"
3153 "%OptimizeFunctionOnNextCall(f);"
3155 "%DeoptimizeFunction(f);");
3157 // This bug only triggers with aggressive IC clearing.
3158 CcTest::heap()->AgeInlineCaches();
3160 // Explicitly request GC to perform final marking step and sweeping.
3161 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3163 Handle<JSObject> root =
3164 v8::Utils::OpenHandle(
3165 *v8::Handle<v8::Object>::Cast(
3166 CcTest::global()->Get(v8_str("root"))));
3168 // The root object should be in a sane state.
3169 CHECK(root->IsJSObject());
3170 CHECK(root->map()->IsMap());
3174 TEST(ReleaseOverReservedPages) {
3175 if (FLAG_never_compact) return;
3176 i::FLAG_trace_gc = true;
3177 // The optimizer can allocate stuff, messing up the test.
3178 i::FLAG_crankshaft = false;
3179 i::FLAG_always_opt = false;
3180 CcTest::InitializeVM();
3181 Isolate* isolate = CcTest::i_isolate();
3182 Factory* factory = isolate->factory();
3183 Heap* heap = isolate->heap();
3184 v8::HandleScope scope(CcTest::isolate());
3185 static const int number_of_test_pages = 20;
3187 // Prepare many pages with low live-bytes count.
3188 PagedSpace* old_pointer_space = heap->old_pointer_space();
3189 CHECK_EQ(1, old_pointer_space->CountTotalPages());
3190 for (int i = 0; i < number_of_test_pages; i++) {
3191 AlwaysAllocateScope always_allocate(isolate);
3192 SimulateFullSpace(old_pointer_space);
3193 factory->NewFixedArray(1, TENURED);
3195 CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3197 // Triggering one GC will cause a lot of garbage to be discovered but
3198 // even spread across all allocated pages.
3199 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
3200 "triggered for preparation");
3201 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3203 // Triggering subsequent GCs should cause at least half of the pages
3204 // to be released to the OS after at most two cycles.
3205 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
3206 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3207 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
3208 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
3210 // Triggering a last-resort GC should cause all pages to be released to the
3211 // OS so that other processes can seize the memory. If we get a failure here
3212 // where there are 2 pages left instead of 1, then we should increase the
3213 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3214 // first page should be small in order to reduce memory used when the VM
3215 // boots, but if the 20 small arrays don't fit on the first page then that's
3216 // an indication that it is too small.
3217 heap->CollectAllAvailableGarbage("triggered really hard");
3218 CHECK_EQ(1, old_pointer_space->CountTotalPages());
3223 i::FLAG_stress_compaction = false;
3224 CcTest::InitializeVM();
3225 Isolate* isolate = CcTest::i_isolate();
3226 Factory* factory = isolate->factory();
3227 v8::HandleScope scope(CcTest::isolate());
3228 Handle<String> slice(CcTest::heap()->empty_string());
3231 // Generate a parent that lives in new-space.
3232 v8::HandleScope inner_scope(CcTest::isolate());
3233 const char* c = "This text is long enough to trigger sliced strings.";
3234 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3235 CHECK(s->IsSeqOneByteString());
3236 CHECK(CcTest::heap()->InNewSpace(*s));
3238 // Generate a sliced string that is based on the above parent and
3239 // lives in old-space.
3240 SimulateFullSpace(CcTest::heap()->new_space());
3241 AlwaysAllocateScope always_allocate(isolate);
3242 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3243 CHECK(t->IsSlicedString());
3244 CHECK(!CcTest::heap()->InNewSpace(*t));
3245 *slice.location() = *t.location();
3248 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3249 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3250 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3255 TEST(PrintSharedFunctionInfo) {
3256 CcTest::InitializeVM();
3257 v8::HandleScope scope(CcTest::isolate());
3258 const char* source = "f = function() { return 987654321; }\n"
3259 "g = function() { return 123456789; }\n";
3261 Handle<JSFunction> g =
3262 v8::Utils::OpenHandle(
3263 *v8::Handle<v8::Function>::Cast(
3264 CcTest::global()->Get(v8_str("g"))));
3266 OFStream os(stdout);
3267 g->shared()->Print(os);
3270 #endif // OBJECT_PRINT
3274 CcTest::InitializeVM();
3275 v8::HandleScope scope(CcTest::isolate());
3277 v8::Handle<v8::String> value = v8_str("val string");
3278 Smi* hash = Smi::FromInt(321);
3279 Factory* factory = CcTest::i_isolate()->factory();
3281 for (int i = 0; i < 2; i++) {
3282 // Store identity hash first and common hidden property second.
3283 v8::Handle<v8::Object> obj = v8::Object::New(CcTest::isolate());
3284 Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
3285 CHECK(internal_obj->HasFastProperties());
3287 // In the first iteration, set hidden value first and identity hash second.
3288 // In the second iteration, reverse the order.
3289 if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
3290 JSObject::SetIdentityHash(internal_obj, handle(hash, CcTest::i_isolate()));
3291 if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
3295 internal_obj->GetHiddenProperty(factory->identity_hash_string()));
3296 CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
3299 FieldIndex index = FieldIndex::ForDescriptor(internal_obj->map(), 0);
3300 ObjectHashTable* hashtable = ObjectHashTable::cast(
3301 internal_obj->RawFastPropertyAt(index));
3302 // HashTable header (5) and 4 initial entries (8).
3303 CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
3308 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3309 if (i::FLAG_always_opt) return;
3310 CcTest::InitializeVM();
3311 v8::HandleScope scope(CcTest::isolate());
3312 v8::Local<v8::Value> fun1, fun2;
3316 CompileRun("function fun() {};");
3317 fun1 = env->Global()->Get(v8_str("fun"));
3322 CompileRun("function fun() {};");
3323 fun2 = env->Global()->Get(v8_str("fun"));
3326 // Prepare function f that contains type feedback for closures
3327 // originating from two different native contexts.
3328 CcTest::global()->Set(v8_str("fun1"), fun1);
3329 CcTest::global()->Set(v8_str("fun2"), fun2);
3330 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3332 Handle<JSFunction> f =
3333 v8::Utils::OpenHandle(
3334 *v8::Handle<v8::Function>::Cast(
3335 CcTest::global()->Get(v8_str("f"))));
3337 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3339 int expected_slots = 2;
3340 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3343 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell());
3344 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
3346 SimulateIncrementalMarking(CcTest::heap());
3347 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3349 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
3351 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2)))
3356 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3357 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3358 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3359 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3360 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3361 RelocInfo* info = it.rinfo();
3362 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3363 if (target->is_inline_cache_stub() && target->kind() == kind) {
3371 static void CheckVectorIC(Handle<JSFunction> f, int ic_slot_index,
3372 InlineCacheState desired_state) {
3373 Handle<TypeFeedbackVector> vector =
3374 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3375 FeedbackVectorICSlot slot(ic_slot_index);
3376 LoadICNexus nexus(vector, slot);
3377 CHECK(nexus.StateFromFeedback() == desired_state);
3381 static void CheckVectorICCleared(Handle<JSFunction> f, int ic_slot_index) {
3382 Handle<TypeFeedbackVector> vector =
3383 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3384 FeedbackVectorICSlot slot(ic_slot_index);
3385 LoadICNexus nexus(vector, slot);
3386 CHECK(IC::IsCleared(&nexus));
3390 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3391 if (i::FLAG_always_opt) return;
3392 CcTest::InitializeVM();
3393 v8::HandleScope scope(CcTest::isolate());
3395 // Prepare function f that contains a monomorphic IC for object
3396 // originating from the same native context.
3397 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3398 "function f(o) { return o.x; } f(obj); f(obj);");
3399 Handle<JSFunction> f =
3400 v8::Utils::OpenHandle(
3401 *v8::Handle<v8::Function>::Cast(
3402 CcTest::global()->Get(v8_str("f"))));
3404 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3405 if (FLAG_vector_ics) {
3406 CheckVectorIC(f, 0, MONOMORPHIC);
3407 CHECK(ic_before->ic_state() == DEFAULT);
3409 CHECK(ic_before->ic_state() == MONOMORPHIC);
3412 SimulateIncrementalMarking(CcTest::heap());
3413 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3415 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3416 if (FLAG_vector_ics) {
3417 CheckVectorIC(f, 0, MONOMORPHIC);
3418 CHECK(ic_after->ic_state() == DEFAULT);
3420 CHECK(ic_after->ic_state() == MONOMORPHIC);
3425 TEST(IncrementalMarkingClearsMonomorphicIC) {
3426 if (i::FLAG_always_opt) return;
3427 CcTest::InitializeVM();
3428 v8::HandleScope scope(CcTest::isolate());
3429 v8::Local<v8::Value> obj1;
3433 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3434 obj1 = env->Global()->Get(v8_str("obj"));
3437 // Prepare function f that contains a monomorphic IC for object
3438 // originating from a different native context.
3439 CcTest::global()->Set(v8_str("obj1"), obj1);
3440 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3441 Handle<JSFunction> f = v8::Utils::OpenHandle(
3442 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3444 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3445 if (FLAG_vector_ics) {
3446 CheckVectorIC(f, 0, MONOMORPHIC);
3447 CHECK(ic_before->ic_state() == DEFAULT);
3449 CHECK(ic_before->ic_state() == MONOMORPHIC);
3452 // Fire context dispose notification.
3453 CcTest::isolate()->ContextDisposedNotification();
3454 SimulateIncrementalMarking(CcTest::heap());
3455 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3457 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3458 if (FLAG_vector_ics) {
3459 CheckVectorICCleared(f, 0);
3460 CHECK(ic_after->ic_state() == DEFAULT);
3462 CHECK(IC::IsCleared(ic_after));
3467 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3468 if (i::FLAG_always_opt) return;
3469 CcTest::InitializeVM();
3470 v8::HandleScope scope(CcTest::isolate());
3471 v8::Local<v8::Value> obj1, obj2;
3475 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3476 obj1 = env->Global()->Get(v8_str("obj"));
3481 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3482 obj2 = env->Global()->Get(v8_str("obj"));
3485 // Prepare function f that contains a polymorphic IC for objects
3486 // originating from two different native contexts.
3487 CcTest::global()->Set(v8_str("obj1"), obj1);
3488 CcTest::global()->Set(v8_str("obj2"), obj2);
3489 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3490 Handle<JSFunction> f = v8::Utils::OpenHandle(
3491 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3493 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3494 if (FLAG_vector_ics) {
3495 CheckVectorIC(f, 0, POLYMORPHIC);
3496 CHECK(ic_before->ic_state() == DEFAULT);
3498 CHECK(ic_before->ic_state() == POLYMORPHIC);
3501 // Fire context dispose notification.
3502 SimulateIncrementalMarking(CcTest::heap());
3503 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3505 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3506 if (FLAG_vector_ics) {
3507 CheckVectorIC(f, 0, POLYMORPHIC);
3508 CHECK(ic_after->ic_state() == DEFAULT);
3510 CHECK(ic_after->ic_state() == POLYMORPHIC);
3515 TEST(IncrementalMarkingClearsPolymorphicIC) {
3516 if (i::FLAG_always_opt) return;
3517 CcTest::InitializeVM();
3518 v8::HandleScope scope(CcTest::isolate());
3519 v8::Local<v8::Value> obj1, obj2;
3523 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3524 obj1 = env->Global()->Get(v8_str("obj"));
3529 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3530 obj2 = env->Global()->Get(v8_str("obj"));
3533 // Prepare function f that contains a polymorphic IC for objects
3534 // originating from two different native contexts.
3535 CcTest::global()->Set(v8_str("obj1"), obj1);
3536 CcTest::global()->Set(v8_str("obj2"), obj2);
3537 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3538 Handle<JSFunction> f = v8::Utils::OpenHandle(
3539 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3541 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3542 if (FLAG_vector_ics) {
3543 CheckVectorIC(f, 0, POLYMORPHIC);
3544 CHECK(ic_before->ic_state() == DEFAULT);
3546 CHECK(ic_before->ic_state() == POLYMORPHIC);
3549 // Fire context dispose notification.
3550 CcTest::isolate()->ContextDisposedNotification();
3551 SimulateIncrementalMarking(CcTest::heap());
3552 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3554 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3555 if (FLAG_vector_ics) {
3556 CheckVectorICCleared(f, 0);
3557 CHECK(ic_before->ic_state() == DEFAULT);
3559 CHECK(IC::IsCleared(ic_after));
3564 class SourceResource : public v8::String::ExternalOneByteStringResource {
3566 explicit SourceResource(const char* data)
3567 : data_(data), length_(strlen(data)) { }
3569 virtual void Dispose() {
3570 i::DeleteArray(data_);
3574 const char* data() const { return data_; }
3576 size_t length() const { return length_; }
3578 bool IsDisposed() { return data_ == NULL; }
3586 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3587 const char* accessor) {
3588 // Test that the data retained by the Error.stack accessor is released
3589 // after the first time the accessor is fired. We use external string
3590 // to check whether the data is being released since the external string
3591 // resource's callback is fired when the external string is GC'ed.
3592 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3593 v8::HandleScope scope(isolate);
3594 SourceResource* resource = new SourceResource(i::StrDup(source));
3596 v8::HandleScope scope(isolate);
3597 v8::Handle<v8::String> source_string =
3598 v8::String::NewExternal(isolate, resource);
3599 i_isolate->heap()->CollectAllAvailableGarbage();
3600 v8::Script::Compile(source_string)->Run();
3601 CHECK(!resource->IsDisposed());
3603 // i_isolate->heap()->CollectAllAvailableGarbage();
3604 CHECK(!resource->IsDisposed());
3606 CompileRun(accessor);
3607 i_isolate->heap()->CollectAllAvailableGarbage();
3609 // External source has been released.
3610 CHECK(resource->IsDisposed());
3615 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3616 if (i::FLAG_always_opt) {
3617 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3618 // See: https://codereview.chromium.org/181833004/
3621 FLAG_use_ic = false; // ICs retain objects.
3622 FLAG_concurrent_recompilation = false;
3623 v8::Isolate* isolate = v8::Isolate::New();
3625 v8::Isolate::Scope isolate_scope(isolate);
3626 v8::HandleScope handle_scope(isolate);
3627 v8::Context::New(isolate)->Enter();
3628 static const char* source1 = "var error = null; "
3629 /* Normal Error */ "try { "
3630 " throw new Error(); "
3634 static const char* source2 = "var error = null; "
3635 /* Stack overflow */ "try { "
3636 " (function f() { f(); })(); "
3640 static const char* source3 = "var error = null; "
3641 /* Normal Error */ "try { "
3642 /* as prototype */ " throw new Error(); "
3645 " error.__proto__ = e; "
3647 static const char* source4 = "var error = null; "
3648 /* Stack overflow */ "try { "
3649 /* as prototype */ " (function f() { f(); })(); "
3652 " error.__proto__ = e; "
3654 static const char* getter = "error.stack";
3655 static const char* setter = "error.stack = 0";
3657 ReleaseStackTraceDataTest(isolate, source1, setter);
3658 ReleaseStackTraceDataTest(isolate, source2, setter);
3659 // We do not test source3 and source4 with setter, since the setter is
3660 // supposed to (untypically) write to the receiver, not the holder. This is
3661 // to emulate the behavior of a data property.
3663 ReleaseStackTraceDataTest(isolate, source1, getter);
3664 ReleaseStackTraceDataTest(isolate, source2, getter);
3665 ReleaseStackTraceDataTest(isolate, source3, getter);
3666 ReleaseStackTraceDataTest(isolate, source4, getter);
3672 TEST(Regress159140) {
3673 i::FLAG_allow_natives_syntax = true;
3674 i::FLAG_flush_code_incrementally = true;
3675 CcTest::InitializeVM();
3676 Isolate* isolate = CcTest::i_isolate();
3677 Heap* heap = isolate->heap();
3678 HandleScope scope(isolate);
3680 // Perform one initial GC to enable code flushing.
3681 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3683 // Prepare several closures that are all eligible for code flushing
3684 // because all reachable ones are not optimized. Make sure that the
3685 // optimized code object is directly reachable through a handle so
3686 // that it is marked black during incremental marking.
3689 HandleScope inner_scope(isolate);
3690 CompileRun("function h(x) {}"
3691 "function mkClosure() {"
3692 " return function(x) { return x + 1; };"
3694 "var f = mkClosure();"
3695 "var g = mkClosure();"
3699 "%OptimizeFunctionOnNextCall(f); f(3);"
3700 "%OptimizeFunctionOnNextCall(h); h(3);");
3702 Handle<JSFunction> f =
3703 v8::Utils::OpenHandle(
3704 *v8::Handle<v8::Function>::Cast(
3705 CcTest::global()->Get(v8_str("f"))));
3706 CHECK(f->is_compiled());
3707 CompileRun("f = null;");
3709 Handle<JSFunction> g =
3710 v8::Utils::OpenHandle(
3711 *v8::Handle<v8::Function>::Cast(
3712 CcTest::global()->Get(v8_str("g"))));
3713 CHECK(g->is_compiled());
3714 const int kAgingThreshold = 6;
3715 for (int i = 0; i < kAgingThreshold; i++) {
3716 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3719 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3722 // Simulate incremental marking so that the functions are enqueued as
3723 // code flushing candidates. Then optimize one function. Finally
3724 // finish the GC to complete code flushing.
3725 SimulateIncrementalMarking(heap);
3726 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3727 heap->CollectAllGarbage(Heap::kNoGCFlags);
3729 // Unoptimized code is missing and the deoptimizer will go ballistic.
3730 CompileRun("g('bozo');");
3734 TEST(Regress165495) {
3735 i::FLAG_allow_natives_syntax = true;
3736 i::FLAG_flush_code_incrementally = true;
3737 CcTest::InitializeVM();
3738 Isolate* isolate = CcTest::i_isolate();
3739 Heap* heap = isolate->heap();
3740 HandleScope scope(isolate);
3742 // Perform one initial GC to enable code flushing.
3743 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3745 // Prepare an optimized closure that the optimized code map will get
3746 // populated. Then age the unoptimized code to trigger code flushing
3747 // but make sure the optimized code is unreachable.
3749 HandleScope inner_scope(isolate);
3750 CompileRun("function mkClosure() {"
3751 " return function(x) { return x + 1; };"
3753 "var f = mkClosure();"
3755 "%OptimizeFunctionOnNextCall(f); f(3);");
3757 Handle<JSFunction> f =
3758 v8::Utils::OpenHandle(
3759 *v8::Handle<v8::Function>::Cast(
3760 CcTest::global()->Get(v8_str("f"))));
3761 CHECK(f->is_compiled());
3762 const int kAgingThreshold = 6;
3763 for (int i = 0; i < kAgingThreshold; i++) {
3764 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3767 CompileRun("f = null;");
3770 // Simulate incremental marking so that unoptimized code is flushed
3771 // even though it still is cached in the optimized code map.
3772 SimulateIncrementalMarking(heap);
3773 heap->CollectAllGarbage(Heap::kNoGCFlags);
3775 // Make a new closure that will get code installed from the code map.
3776 // Unoptimized code is missing and the deoptimizer will go ballistic.
3777 CompileRun("var g = mkClosure(); g('bozo');");
3781 TEST(Regress169209) {
3782 i::FLAG_stress_compaction = false;
3783 i::FLAG_allow_natives_syntax = true;
3784 i::FLAG_flush_code_incrementally = true;
3786 CcTest::InitializeVM();
3787 Isolate* isolate = CcTest::i_isolate();
3788 Heap* heap = isolate->heap();
3789 HandleScope scope(isolate);
3791 // Perform one initial GC to enable code flushing.
3792 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3794 // Prepare a shared function info eligible for code flushing for which
3795 // the unoptimized code will be replaced during optimization.
3796 Handle<SharedFunctionInfo> shared1;
3798 HandleScope inner_scope(isolate);
3799 CompileRun("function f() { return 'foobar'; }"
3800 "function g(x) { if (x) f(); }"
3805 Handle<JSFunction> f =
3806 v8::Utils::OpenHandle(
3807 *v8::Handle<v8::Function>::Cast(
3808 CcTest::global()->Get(v8_str("f"))));
3809 CHECK(f->is_compiled());
3810 const int kAgingThreshold = 6;
3811 for (int i = 0; i < kAgingThreshold; i++) {
3812 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3815 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3818 // Prepare a shared function info eligible for code flushing that will
3819 // represent the dangling tail of the candidate list.
3820 Handle<SharedFunctionInfo> shared2;
3822 HandleScope inner_scope(isolate);
3823 CompileRun("function flushMe() { return 0; }"
3826 Handle<JSFunction> f =
3827 v8::Utils::OpenHandle(
3828 *v8::Handle<v8::Function>::Cast(
3829 CcTest::global()->Get(v8_str("flushMe"))));
3830 CHECK(f->is_compiled());
3831 const int kAgingThreshold = 6;
3832 for (int i = 0; i < kAgingThreshold; i++) {
3833 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3836 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3839 // Simulate incremental marking and collect code flushing candidates.
3840 SimulateIncrementalMarking(heap);
3841 CHECK(shared1->code()->gc_metadata() != NULL);
3843 // Optimize function and make sure the unoptimized code is replaced.
3847 CompileRun("%OptimizeFunctionOnNextCall(g);"
3850 // Finish garbage collection cycle.
3851 heap->CollectAllGarbage(Heap::kNoGCFlags);
3852 CHECK(shared1->code()->gc_metadata() == NULL);
3856 TEST(Regress169928) {
3857 i::FLAG_allow_natives_syntax = true;
3858 i::FLAG_crankshaft = false;
3859 CcTest::InitializeVM();
3860 Isolate* isolate = CcTest::i_isolate();
3861 Factory* factory = isolate->factory();
3862 v8::HandleScope scope(CcTest::isolate());
3864 // Some flags turn Scavenge collections into Mark-sweep collections
3865 // and hence are incompatible with this test case.
3866 if (FLAG_gc_global || FLAG_stress_compaction) return;
3868 // Prepare the environment
3869 CompileRun("function fastliteralcase(literal, value) {"
3870 " literal[0] = value;"
3873 "function get_standard_literal() {"
3874 " var literal = [1, 2, 3];"
3877 "obj = fastliteralcase(get_standard_literal(), 1);"
3878 "obj = fastliteralcase(get_standard_literal(), 1.5);"
3879 "obj = fastliteralcase(get_standard_literal(), 2);");
3882 v8::Local<v8::String> mote_code_string =
3883 v8_str("fastliteralcase(mote, 2.5);");
3885 v8::Local<v8::String> array_name = v8_str("mote");
3886 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
3888 // First make sure we flip spaces
3889 CcTest::heap()->CollectGarbage(NEW_SPACE);
3891 // Allocate the object.
3892 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3893 array_data->set(0, Smi::FromInt(1));
3894 array_data->set(1, Smi::FromInt(2));
3896 AllocateAllButNBytes(CcTest::heap()->new_space(),
3897 JSArray::kSize + AllocationMemento::kSize +
3900 Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3904 CHECK_EQ(Smi::FromInt(2), array->length());
3905 CHECK(array->HasFastSmiOrObjectElements());
3907 // We need filler the size of AllocationMemento object, plus an extra
3908 // fill pointer value.
3909 HeapObject* obj = NULL;
3910 AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw(
3911 AllocationMemento::kSize + kPointerSize);
3912 CHECK(allocation.To(&obj));
3913 Address addr_obj = obj->address();
3914 CcTest::heap()->CreateFillerObjectAt(
3915 addr_obj, AllocationMemento::kSize + kPointerSize);
3917 // Give the array a name, making sure not to allocate strings.
3918 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3919 CcTest::global()->Set(array_name, array_obj);
3921 // This should crash with a protection violation if we are running a build
3923 AlwaysAllocateScope aa_scope(isolate);
3924 v8::Script::Compile(mote_code_string)->Run();
3928 TEST(Regress168801) {
3929 if (i::FLAG_never_compact) return;
3930 i::FLAG_always_compact = true;
3931 i::FLAG_cache_optimized_code = false;
3932 i::FLAG_allow_natives_syntax = true;
3933 i::FLAG_flush_code_incrementally = true;
3934 CcTest::InitializeVM();
3935 Isolate* isolate = CcTest::i_isolate();
3936 Heap* heap = isolate->heap();
3937 HandleScope scope(isolate);
3939 // Perform one initial GC to enable code flushing.
3940 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3942 // Ensure the code ends up on an evacuation candidate.
3943 SimulateFullSpace(heap->code_space());
3945 // Prepare an unoptimized function that is eligible for code flushing.
3946 Handle<JSFunction> function;
3948 HandleScope inner_scope(isolate);
3949 CompileRun("function mkClosure() {"
3950 " return function(x) { return x + 1; };"
3952 "var f = mkClosure();"
3955 Handle<JSFunction> f =
3956 v8::Utils::OpenHandle(
3957 *v8::Handle<v8::Function>::Cast(
3958 CcTest::global()->Get(v8_str("f"))));
3959 CHECK(f->is_compiled());
3960 const int kAgingThreshold = 6;
3961 for (int i = 0; i < kAgingThreshold; i++) {
3962 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3965 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3968 // Simulate incremental marking so that unoptimized function is enqueued as a
3969 // candidate for code flushing. The shared function info however will not be
3970 // explicitly enqueued.
3971 SimulateIncrementalMarking(heap);
3973 // Now optimize the function so that it is taken off the candidate list.
3975 HandleScope inner_scope(isolate);
3976 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3979 // This cycle will bust the heap and subsequent cycles will go ballistic.
3980 heap->CollectAllGarbage(Heap::kNoGCFlags);
3981 heap->CollectAllGarbage(Heap::kNoGCFlags);
3985 TEST(Regress173458) {
3986 if (i::FLAG_never_compact) return;
3987 i::FLAG_always_compact = true;
3988 i::FLAG_cache_optimized_code = false;
3989 i::FLAG_allow_natives_syntax = true;
3990 i::FLAG_flush_code_incrementally = true;
3991 CcTest::InitializeVM();
3992 Isolate* isolate = CcTest::i_isolate();
3993 Heap* heap = isolate->heap();
3994 HandleScope scope(isolate);
3996 // Perform one initial GC to enable code flushing.
3997 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3999 // Ensure the code ends up on an evacuation candidate.
4000 SimulateFullSpace(heap->code_space());
4002 // Prepare an unoptimized function that is eligible for code flushing.
4003 Handle<JSFunction> function;
4005 HandleScope inner_scope(isolate);
4006 CompileRun("function mkClosure() {"
4007 " return function(x) { return x + 1; };"
4009 "var f = mkClosure();"
4012 Handle<JSFunction> f =
4013 v8::Utils::OpenHandle(
4014 *v8::Handle<v8::Function>::Cast(
4015 CcTest::global()->Get(v8_str("f"))));
4016 CHECK(f->is_compiled());
4017 const int kAgingThreshold = 6;
4018 for (int i = 0; i < kAgingThreshold; i++) {
4019 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4022 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4025 // Simulate incremental marking so that unoptimized function is enqueued as a
4026 // candidate for code flushing. The shared function info however will not be
4027 // explicitly enqueued.
4028 SimulateIncrementalMarking(heap);
4030 // Now enable the debugger which in turn will disable code flushing.
4031 CHECK(isolate->debug()->Load());
4033 // This cycle will bust the heap and subsequent cycles will go ballistic.
4034 heap->CollectAllGarbage(Heap::kNoGCFlags);
4035 heap->CollectAllGarbage(Heap::kNoGCFlags);
4039 class DummyVisitor : public ObjectVisitor {
4041 void VisitPointers(Object** start, Object** end) { }
4045 TEST(DeferredHandles) {
4046 CcTest::InitializeVM();
4047 Isolate* isolate = CcTest::i_isolate();
4048 Heap* heap = isolate->heap();
4049 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4050 HandleScopeData* data = isolate->handle_scope_data();
4051 Handle<Object> init(heap->empty_string(), isolate);
4052 while (data->next < data->limit) {
4053 Handle<Object> obj(heap->empty_string(), isolate);
4055 // An entire block of handles has been filled.
4056 // Next handle would require a new block.
4057 DCHECK(data->next == data->limit);
4059 DeferredHandleScope deferred(isolate);
4060 DummyVisitor visitor;
4061 isolate->handle_scope_implementer()->Iterate(&visitor);
4062 delete deferred.Detach();
4066 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4067 CcTest::InitializeVM();
4068 v8::HandleScope scope(CcTest::isolate());
4069 CompileRun("function f(n) {"
4070 " var a = new Array(n);"
4071 " for (var i = 0; i < n; i += 100) a[i] = i;"
4073 "f(10 * 1024 * 1024);");
4074 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4075 if (marking->IsStopped()) marking->Start();
4076 // This big step should be sufficient to mark the whole array.
4077 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4078 DCHECK(marking->IsComplete() ||
4079 marking->IsReadyToOverApproximateWeakClosure());
4083 TEST(DisableInlineAllocation) {
4084 i::FLAG_allow_natives_syntax = true;
4085 CcTest::InitializeVM();
4086 v8::HandleScope scope(CcTest::isolate());
4087 CompileRun("function test() {"
4089 " for (var i = 0; i < 10; i++) {"
4090 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4094 " %OptimizeFunctionOnNextCall(test);"
4096 " %DeoptimizeFunction(test);"
4099 // Warm-up with inline allocation enabled.
4100 CompileRun("test(); test(); run();");
4102 // Run test with inline allocation disabled.
4103 CcTest::heap()->DisableInlineAllocation();
4104 CompileRun("run()");
4106 // Run test with inline allocation re-enabled.
4107 CcTest::heap()->EnableInlineAllocation();
4108 CompileRun("run()");
4112 static int AllocationSitesCount(Heap* heap) {
4114 for (Object* site = heap->allocation_sites_list();
4115 !(site->IsUndefined());
4116 site = AllocationSite::cast(site)->weak_next()) {
4123 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4124 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4125 i::FLAG_allow_natives_syntax = true;
4126 CcTest::InitializeVM();
4127 Isolate* isolate = CcTest::i_isolate();
4128 v8::internal::Heap* heap = CcTest::heap();
4129 GlobalHandles* global_handles = isolate->global_handles();
4131 if (!isolate->use_crankshaft()) return;
4133 // The allocation site at the head of the list is ours.
4134 Handle<AllocationSite> site;
4136 LocalContext context;
4137 v8::HandleScope scope(context->GetIsolate());
4139 int count = AllocationSitesCount(heap);
4140 CompileRun("var bar = function() { return (new Array()); };"
4145 // One allocation site should have been created.
4146 int new_count = AllocationSitesCount(heap);
4147 CHECK_EQ(new_count, (count + 1));
4148 site = Handle<AllocationSite>::cast(
4149 global_handles->Create(
4150 AllocationSite::cast(heap->allocation_sites_list())));
4152 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4154 DependentCode::GroupStartIndexes starts(site->dependent_code());
4155 CHECK_GE(starts.number_of_entries(), 1);
4156 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4157 CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
4158 Code* function_bar = Code::cast(
4159 WeakCell::cast(site->dependent_code()->object_at(index))->value());
4160 Handle<JSFunction> bar_handle =
4161 v8::Utils::OpenHandle(
4162 *v8::Handle<v8::Function>::Cast(
4163 CcTest::global()->Get(v8_str("bar"))));
4164 CHECK_EQ(bar_handle->code(), function_bar);
4167 // Now make sure that a gc should get rid of the function, even though we
4168 // still have the allocation site alive.
4169 for (int i = 0; i < 4; i++) {
4170 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4173 // The site still exists because of our global handle, but the code is no
4174 // longer referred to by dependent_code().
4175 DependentCode::GroupStartIndexes starts(site->dependent_code());
4176 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4177 CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
4178 WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
4182 TEST(CellsInOptimizedCodeAreWeak) {
4183 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4184 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4185 i::FLAG_allow_natives_syntax = true;
4186 CcTest::InitializeVM();
4187 Isolate* isolate = CcTest::i_isolate();
4188 v8::internal::Heap* heap = CcTest::heap();
4190 if (!isolate->use_crankshaft()) return;
4191 HandleScope outer_scope(heap->isolate());
4194 LocalContext context;
4195 HandleScope scope(heap->isolate());
4197 CompileRun("bar = (function() {"
4201 " var foo = function(x) { with (x) { return 1 + x; } };"
4205 " %OptimizeFunctionOnNextCall(bar);"
4207 " return bar;})();");
4209 Handle<JSFunction> bar =
4210 v8::Utils::OpenHandle(
4211 *v8::Handle<v8::Function>::Cast(
4212 CcTest::global()->Get(v8_str("bar"))));
4213 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4216 // Now make sure that a gc should get rid of the function
4217 for (int i = 0; i < 4; i++) {
4218 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4221 DCHECK(code->marked_for_deoptimization());
4225 TEST(ObjectsInOptimizedCodeAreWeak) {
4226 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4227 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4228 i::FLAG_allow_natives_syntax = true;
4229 CcTest::InitializeVM();
4230 Isolate* isolate = CcTest::i_isolate();
4231 v8::internal::Heap* heap = CcTest::heap();
4233 if (!isolate->use_crankshaft()) return;
4234 HandleScope outer_scope(heap->isolate());
4237 LocalContext context;
4238 HandleScope scope(heap->isolate());
4240 CompileRun("function bar() {"
4243 "function foo(x) { with (x) { return 1 + x; } };"
4247 "%OptimizeFunctionOnNextCall(bar);"
4250 Handle<JSFunction> bar =
4251 v8::Utils::OpenHandle(
4252 *v8::Handle<v8::Function>::Cast(
4253 CcTest::global()->Get(v8_str("bar"))));
4254 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4257 // Now make sure that a gc should get rid of the function
4258 for (int i = 0; i < 4; i++) {
4259 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4262 DCHECK(code->marked_for_deoptimization());
4266 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4267 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4268 if (!i::FLAG_incremental_marking) return;
4269 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4270 i::FLAG_allow_natives_syntax = true;
4271 i::FLAG_compilation_cache = false;
4272 i::FLAG_retain_maps_for_n_gc = 0;
4273 CcTest::InitializeVM();
4274 Isolate* isolate = CcTest::i_isolate();
4275 v8::internal::Heap* heap = CcTest::heap();
4277 if (!isolate->use_crankshaft()) return;
4278 HandleScope outer_scope(heap->isolate());
4279 for (int i = 0; i < 3; i++) {
4280 SimulateIncrementalMarking(heap);
4282 LocalContext context;
4283 HandleScope scope(heap->isolate());
4284 EmbeddedVector<char, 256> source;
4286 "function bar%d() {"
4289 "function foo%d(x) { with (x) { return 1 + x; } };"
4293 "%%OptimizeFwunctionOnNextCall(bar%d);"
4295 i, i, i, i, i, i, i, i);
4296 CompileRun(source.start());
4298 heap->CollectAllGarbage(i::Heap::kNoGCFlags);
4301 if (heap->weak_object_to_code_table()->IsHashTable()) {
4302 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4303 elements = t->NumberOfElements();
4305 CHECK_EQ(0, elements);
4309 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
4310 EmbeddedVector<char, 256> source;
4312 "function %s() { return 0; }"
4314 "%%OptimizeFunctionOnNextCall(%s);"
4315 "%s();", name, name, name, name, name);
4316 CompileRun(source.start());
4317 Handle<JSFunction> fun =
4318 v8::Utils::OpenHandle(
4319 *v8::Handle<v8::Function>::Cast(
4320 CcTest::global()->Get(v8_str(name))));
4325 static int GetCodeChainLength(Code* code) {
4327 while (code->next_code_link()->IsCode()) {
4329 code = Code::cast(code->next_code_link());
4335 TEST(NextCodeLinkIsWeak) {
4336 i::FLAG_allow_natives_syntax = true;
4337 i::FLAG_turbo_deoptimization = true;
4338 CcTest::InitializeVM();
4339 Isolate* isolate = CcTest::i_isolate();
4340 v8::internal::Heap* heap = CcTest::heap();
4342 if (!isolate->use_crankshaft()) return;
4343 HandleScope outer_scope(heap->isolate());
4345 heap->CollectAllAvailableGarbage();
4346 int code_chain_length_before, code_chain_length_after;
4348 HandleScope scope(heap->isolate());
4349 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
4350 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
4351 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4352 code_chain_length_before = GetCodeChainLength(immortal->code());
4353 // Keep the immortal code and let the mortal code die.
4354 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4355 CompileRun("mortal = null; immortal = null;");
4357 heap->CollectAllAvailableGarbage();
4358 // Now mortal code should be dead.
4359 code_chain_length_after = GetCodeChainLength(*code);
4360 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4364 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4365 i::byte buffer[i::Assembler::kMinimalBufferSize];
4366 MacroAssembler masm(isolate, buffer, sizeof(buffer));
4368 masm.Push(isolate->factory()->undefined_value());
4370 masm.GetCode(&desc);
4371 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4372 Handle<Code> code = isolate->factory()->NewCode(
4373 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4374 CHECK(code->IsCode());
4379 TEST(NextCodeLinkIsWeak2) {
4380 i::FLAG_allow_natives_syntax = true;
4381 CcTest::InitializeVM();
4382 Isolate* isolate = CcTest::i_isolate();
4383 v8::internal::Heap* heap = CcTest::heap();
4385 if (!isolate->use_crankshaft()) return;
4386 HandleScope outer_scope(heap->isolate());
4387 heap->CollectAllAvailableGarbage();
4388 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4389 Handle<Code> new_head;
4390 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4392 HandleScope scope(heap->isolate());
4393 Handle<Code> immortal = DummyOptimizedCode(isolate);
4394 Handle<Code> mortal = DummyOptimizedCode(isolate);
4395 mortal->set_next_code_link(*old_head);
4396 immortal->set_next_code_link(*mortal);
4397 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4398 new_head = scope.CloseAndEscape(immortal);
4400 heap->CollectAllAvailableGarbage();
4401 // Now mortal code should be dead.
4402 CHECK_EQ(*old_head, new_head->next_code_link());
4406 static bool weak_ic_cleared = false;
4408 static void ClearWeakIC(const v8::WeakCallbackData<v8::Object, void>& data) {
4409 printf("clear weak is called\n");
4410 weak_ic_cleared = true;
4411 v8::Persistent<v8::Value>* p =
4412 reinterpret_cast<v8::Persistent<v8::Value>*>(data.GetParameter());
4413 CHECK(p->IsNearDeath());
4418 // Checks that the value returned by execution of the source is weak.
4419 void CheckWeakness(const char* source) {
4420 i::FLAG_stress_compaction = false;
4421 CcTest::InitializeVM();
4422 v8::Isolate* isolate = CcTest::isolate();
4423 v8::HandleScope scope(isolate);
4424 v8::Persistent<v8::Object> garbage;
4426 v8::HandleScope scope(isolate);
4427 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
4429 weak_ic_cleared = false;
4430 garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
4431 Heap* heap = CcTest::i_isolate()->heap();
4432 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4433 CHECK(weak_ic_cleared);
4437 // Each of the following "weak IC" tests creates an IC that embeds a map with
4438 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4439 TEST(WeakMapInMonomorphicLoadIC) {
4440 CheckWeakness("function loadIC(obj) {"
4444 " var proto = {'name' : 'weak'};"
4445 " var obj = Object.create(proto);"
4454 TEST(WeakMapInPolymorphicLoadIC) {
4456 "function loadIC(obj) {"
4460 " var proto = {'name' : 'weak'};"
4461 " var obj = Object.create(proto);"
4465 " var poly = Object.create(proto);"
4473 TEST(WeakMapInMonomorphicKeyedLoadIC) {
4474 CheckWeakness("function keyedLoadIC(obj, field) {"
4475 " return obj[field];"
4478 " var proto = {'name' : 'weak'};"
4479 " var obj = Object.create(proto);"
4480 " keyedLoadIC(obj, 'name');"
4481 " keyedLoadIC(obj, 'name');"
4482 " keyedLoadIC(obj, 'name');"
4488 TEST(WeakMapInPolymorphicKeyedLoadIC) {
4490 "function keyedLoadIC(obj, field) {"
4491 " return obj[field];"
4494 " var proto = {'name' : 'weak'};"
4495 " var obj = Object.create(proto);"
4496 " keyedLoadIC(obj, 'name');"
4497 " keyedLoadIC(obj, 'name');"
4498 " keyedLoadIC(obj, 'name');"
4499 " var poly = Object.create(proto);"
4501 " keyedLoadIC(poly, 'name');"
4507 TEST(WeakMapInMonomorphicStoreIC) {
4508 CheckWeakness("function storeIC(obj, value) {"
4509 " obj.name = value;"
4512 " var proto = {'name' : 'weak'};"
4513 " var obj = Object.create(proto);"
4514 " storeIC(obj, 'x');"
4515 " storeIC(obj, 'x');"
4516 " storeIC(obj, 'x');"
4522 TEST(WeakMapInPolymorphicStoreIC) {
4524 "function storeIC(obj, value) {"
4525 " obj.name = value;"
4528 " var proto = {'name' : 'weak'};"
4529 " var obj = Object.create(proto);"
4530 " storeIC(obj, 'x');"
4531 " storeIC(obj, 'x');"
4532 " storeIC(obj, 'x');"
4533 " var poly = Object.create(proto);"
4535 " storeIC(poly, 'x');"
4541 TEST(WeakMapInMonomorphicKeyedStoreIC) {
4542 CheckWeakness("function keyedStoreIC(obj, field, value) {"
4543 " obj[field] = value;"
4546 " var proto = {'name' : 'weak'};"
4547 " var obj = Object.create(proto);"
4548 " keyedStoreIC(obj, 'x');"
4549 " keyedStoreIC(obj, 'x');"
4550 " keyedStoreIC(obj, 'x');"
4556 TEST(WeakMapInPolymorphicKeyedStoreIC) {
4558 "function keyedStoreIC(obj, field, value) {"
4559 " obj[field] = value;"
4562 " var proto = {'name' : 'weak'};"
4563 " var obj = Object.create(proto);"
4564 " keyedStoreIC(obj, 'x');"
4565 " keyedStoreIC(obj, 'x');"
4566 " keyedStoreIC(obj, 'x');"
4567 " var poly = Object.create(proto);"
4569 " keyedStoreIC(poly, 'x');"
4575 TEST(WeakMapInMonomorphicCompareNilIC) {
4576 CheckWeakness("function compareNilIC(obj) {"
4577 " return obj == null;"
4580 " var proto = {'name' : 'weak'};"
4581 " var obj = Object.create(proto);"
4582 " compareNilIC(obj);"
4583 " compareNilIC(obj);"
4584 " compareNilIC(obj);"
4590 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4591 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4592 Handle<Object> obj =
4593 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
4594 return Handle<JSFunction>::cast(obj);
4598 void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
4599 int ic_slot, InlineCacheState state) {
4600 if (FLAG_vector_ics &&
4601 (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
4602 kind == Code::CALL_IC)) {
4603 TypeFeedbackVector* vector = shared->feedback_vector();
4604 FeedbackVectorICSlot slot(ic_slot);
4605 if (kind == Code::LOAD_IC) {
4606 LoadICNexus nexus(vector, slot);
4607 CHECK_EQ(nexus.StateFromFeedback(), state);
4608 } else if (kind == Code::KEYED_LOAD_IC) {
4609 KeyedLoadICNexus nexus(vector, slot);
4610 CHECK_EQ(nexus.StateFromFeedback(), state);
4611 } else if (kind == Code::CALL_IC) {
4612 CallICNexus nexus(vector, slot);
4613 CHECK_EQ(nexus.StateFromFeedback(), state);
4616 Code* ic = FindFirstIC(code, kind);
4617 CHECK(ic->is_inline_cache_stub());
4618 CHECK(ic->ic_state() == state);
4623 TEST(MonomorphicStaysMonomorphicAfterGC) {
4624 if (FLAG_always_opt) return;
4625 CcTest::InitializeVM();
4626 Isolate* isolate = CcTest::i_isolate();
4627 Heap* heap = isolate->heap();
4628 v8::HandleScope scope(CcTest::isolate());
4630 "function loadIC(obj) {"
4633 "function testIC() {"
4634 " var proto = {'name' : 'weak'};"
4635 " var obj = Object.create(proto);"
4641 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4643 v8::HandleScope scope(CcTest::isolate());
4644 CompileRun("(testIC())");
4646 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4647 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
4649 v8::HandleScope scope(CcTest::isolate());
4650 CompileRun("(testIC())");
4652 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
4656 TEST(PolymorphicStaysPolymorphicAfterGC) {
4657 if (FLAG_always_opt) return;
4658 CcTest::InitializeVM();
4659 Isolate* isolate = CcTest::i_isolate();
4660 Heap* heap = isolate->heap();
4661 v8::HandleScope scope(CcTest::isolate());
4663 "function loadIC(obj) {"
4666 "function testIC() {"
4667 " var proto = {'name' : 'weak'};"
4668 " var obj = Object.create(proto);"
4672 " var poly = Object.create(proto);"
4677 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4679 v8::HandleScope scope(CcTest::isolate());
4680 CompileRun("(testIC())");
4682 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4683 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
4685 v8::HandleScope scope(CcTest::isolate());
4686 CompileRun("(testIC())");
4688 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
4693 CcTest::InitializeVM();
4694 Isolate* isolate = CcTest::i_isolate();
4695 v8::internal::Heap* heap = CcTest::heap();
4696 v8::internal::Factory* factory = isolate->factory();
4698 HandleScope outer_scope(isolate);
4699 Handle<WeakCell> weak_cell1;
4701 HandleScope inner_scope(isolate);
4702 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
4703 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
4706 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4707 Handle<WeakCell> weak_cell2;
4709 HandleScope inner_scope(isolate);
4710 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
4712 CHECK(weak_cell1->value()->IsFixedArray());
4713 CHECK_EQ(*survivor, weak_cell2->value());
4714 heap->CollectGarbage(NEW_SPACE);
4715 CHECK(weak_cell1->value()->IsFixedArray());
4716 CHECK_EQ(*survivor, weak_cell2->value());
4717 heap->CollectGarbage(NEW_SPACE);
4718 CHECK(weak_cell1->value()->IsFixedArray());
4719 CHECK_EQ(*survivor, weak_cell2->value());
4720 heap->CollectAllAvailableGarbage();
4721 CHECK(weak_cell1->cleared());
4722 CHECK_EQ(*survivor, weak_cell2->value());
4726 TEST(WeakCellsWithIncrementalMarking) {
4727 CcTest::InitializeVM();
4728 Isolate* isolate = CcTest::i_isolate();
4729 v8::internal::Heap* heap = CcTest::heap();
4730 v8::internal::Factory* factory = isolate->factory();
4733 HandleScope outer_scope(isolate);
4734 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4735 Handle<WeakCell> weak_cells[N];
4737 for (int i = 0; i < N; i++) {
4738 HandleScope inner_scope(isolate);
4739 Handle<HeapObject> value =
4740 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
4741 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
4742 CHECK(weak_cell->value()->IsFixedArray());
4743 IncrementalMarking* marking = heap->incremental_marking();
4744 if (marking->IsStopped()) marking->Start();
4745 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4746 heap->CollectGarbage(NEW_SPACE);
4747 CHECK(weak_cell->value()->IsFixedArray());
4748 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
4750 heap->CollectAllGarbage(Heap::kNoGCFlags);
4751 CHECK_EQ(*survivor, weak_cells[0]->value());
4752 for (int i = 1; i < N; i++) {
4753 CHECK(weak_cells[i]->cleared());
4759 TEST(AddInstructionChangesNewSpacePromotion) {
4760 i::FLAG_allow_natives_syntax = true;
4761 i::FLAG_expose_gc = true;
4762 i::FLAG_stress_compaction = true;
4763 i::FLAG_gc_interval = 1000;
4764 CcTest::InitializeVM();
4765 if (!i::FLAG_allocation_site_pretenuring) return;
4766 v8::HandleScope scope(CcTest::isolate());
4767 Isolate* isolate = CcTest::i_isolate();
4768 Heap* heap = isolate->heap();
4771 "function add(a, b) {"
4775 "add(\"a\", \"b\");"
4776 "var oldSpaceObject;"
4778 "function crash(x) {"
4779 " var object = {a: null, b: null};"
4780 " var result = add(1.5, x | 0);"
4781 " object.a = result;"
4782 " oldSpaceObject = object;"
4787 "%OptimizeFunctionOnNextCall(crash);"
4790 v8::Handle<v8::Object> global = CcTest::global();
4791 v8::Handle<v8::Function> g =
4792 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
4793 v8::Handle<v8::Value> args1[] = { v8_num(1) };
4794 heap->DisableInlineAllocation();
4795 heap->set_allocation_timeout(1);
4796 g->Call(global, 1, args1);
4797 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4801 void OnFatalErrorExpectOOM(const char* location, const char* message) {
4802 // Exit with 0 if the location matches our expectation.
4803 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4807 TEST(CEntryStubOOM) {
4808 i::FLAG_allow_natives_syntax = true;
4809 CcTest::InitializeVM();
4810 v8::HandleScope scope(CcTest::isolate());
4811 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
4813 v8::Handle<v8::Value> result = CompileRun(
4814 "%SetFlags('--gc-interval=1');"
4819 CHECK(result->IsNumber());
4825 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4828 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4829 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
4833 TEST(Regress357137) {
4834 CcTest::InitializeVM();
4835 v8::Isolate* isolate = CcTest::isolate();
4836 v8::HandleScope hscope(isolate);
4837 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4838 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
4839 v8::FunctionTemplate::New(isolate, RequestInterrupt));
4840 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
4841 DCHECK(!context.IsEmpty());
4842 v8::Context::Scope cscope(context);
4844 v8::Local<v8::Value> result = CompileRun(
4846 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4847 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4848 "interrupt();" // This triggers a fake stack overflow in f.
4850 CHECK_EQ(42.0, result->ToNumber(isolate)->Value());
4854 TEST(ArrayShiftSweeping) {
4855 i::FLAG_expose_gc = true;
4856 CcTest::InitializeVM();
4857 v8::HandleScope scope(CcTest::isolate());
4858 Isolate* isolate = CcTest::i_isolate();
4859 Heap* heap = isolate->heap();
4861 v8::Local<v8::Value> result = CompileRun(
4862 "var array = new Array(40000);"
4863 "var tmp = new Array(100000);"
4870 Handle<JSObject> o =
4871 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
4872 CHECK(heap->InOldPointerSpace(o->elements()));
4873 CHECK(heap->InOldPointerSpace(*o));
4874 Page* page = Page::FromAddress(o->elements()->address());
4875 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
4876 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
4880 UNINITIALIZED_TEST(PromotionQueue) {
4881 i::FLAG_expose_gc = true;
4882 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
4883 v8::Isolate* isolate = v8::Isolate::New();
4884 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4886 v8::Isolate::Scope isolate_scope(isolate);
4887 v8::HandleScope handle_scope(isolate);
4888 v8::Context::New(isolate)->Enter();
4889 Heap* heap = i_isolate->heap();
4890 NewSpace* new_space = heap->new_space();
4892 // In this test we will try to overwrite the promotion queue which is at the
4893 // end of to-space. To actually make that possible, we need at least two
4894 // semi-space pages and take advantage of fragmentation.
4895 // (1) Grow semi-space to two pages.
4896 // (2) Create a few small long living objects and call the scavenger to
4897 // move them to the other semi-space.
4898 // (3) Create a huge object, i.e., remainder of first semi-space page and
4899 // create another huge object which should be of maximum allocatable memory
4900 // size of the second semi-space page.
4901 // (4) Call the scavenger again.
4902 // What will happen is: the scavenger will promote the objects created in
4903 // (2) and will create promotion queue entries at the end of the second
4904 // semi-space page during the next scavenge when it promotes the objects to
4905 // the old generation. The first allocation of (3) will fill up the first
4906 // semi-space page. The second allocation in (3) will not fit into the
4907 // first semi-space page, but it will overwrite the promotion queue which
4908 // are in the second semi-space page. If the right guards are in place, the
4909 // promotion queue will be evacuated in that case.
4911 // Grow the semi-space to two pages to make semi-space copy overwrite the
4912 // promotion queue, which will be at the end of the second page.
4913 intptr_t old_capacity = new_space->TotalCapacity();
4915 // If we are in a low memory config, we can't grow to two pages and we can't
4916 // run this test. This also means the issue we are testing cannot arise, as
4917 // there is no fragmentation.
4918 if (new_space->IsAtMaximumCapacity()) return;
4921 CHECK(new_space->IsAtMaximumCapacity());
4922 CHECK(2 * old_capacity == new_space->TotalCapacity());
4924 // Call the scavenger two times to get an empty new space
4925 heap->CollectGarbage(NEW_SPACE);
4926 heap->CollectGarbage(NEW_SPACE);
4928 // First create a few objects which will survive a scavenge, and will get
4929 // promoted to the old generation later on. These objects will create
4930 // promotion queue entries at the end of the second semi-space page.
4931 const int number_handles = 12;
4932 Handle<FixedArray> handles[number_handles];
4933 for (int i = 0; i < number_handles; i++) {
4934 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
4936 heap->CollectGarbage(NEW_SPACE);
4938 // Create the first huge object which will exactly fit the first semi-space
4940 int new_linear_size =
4941 static_cast<int>(*heap->new_space()->allocation_limit_address() -
4942 *heap->new_space()->allocation_top_address());
4943 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
4944 Handle<FixedArray> first =
4945 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4946 CHECK(heap->InNewSpace(*first));
4948 // Create the second huge object of maximum allocatable second semi-space
4951 static_cast<int>(*heap->new_space()->allocation_limit_address() -
4952 *heap->new_space()->allocation_top_address());
4953 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
4954 FixedArray::kHeaderSize;
4955 Handle<FixedArray> second =
4956 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4957 CHECK(heap->InNewSpace(*second));
4959 // This scavenge will corrupt memory if the promotion queue is not
4961 heap->CollectGarbage(NEW_SPACE);
4967 TEST(Regress388880) {
4968 i::FLAG_expose_gc = true;
4969 CcTest::InitializeVM();
4970 v8::HandleScope scope(CcTest::isolate());
4971 Isolate* isolate = CcTest::i_isolate();
4972 Factory* factory = isolate->factory();
4973 Heap* heap = isolate->heap();
4975 Handle<Map> map1 = Map::Create(isolate, 1);
4977 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
4978 HeapType::Any(isolate), NONE, Representation::Tagged(),
4979 OMIT_TRANSITION).ToHandleChecked();
4981 int desired_offset = Page::kPageSize - map1->instance_size();
4983 // Allocate fixed array in old pointer space so, that object allocated
4984 // afterwards would end at the end of the page.
4986 SimulateFullSpace(heap->old_pointer_space());
4987 int padding_size = desired_offset - Page::kObjectStartOffset;
4988 int padding_array_length =
4989 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
4991 Handle<FixedArray> temp2 =
4992 factory->NewFixedArray(padding_array_length, TENURED);
4993 Page* page = Page::FromAddress(temp2->address());
4994 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
4997 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED, false);
4998 o->set_properties(*factory->empty_fixed_array());
5000 // Ensure that the object allocated where we need it.
5001 Page* page = Page::FromAddress(o->address());
5002 CHECK_EQ(desired_offset, page->Offset(o->address()));
5004 // Now we have an object right at the end of the page.
5006 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5007 // that would cause crash.
5008 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5011 CHECK(marking->IsMarking());
5013 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5014 // when it calls heap->AdjustLiveBytes(...).
5015 JSObject::MigrateToMap(o, map2);
5020 i::FLAG_expose_gc = true;
5021 CcTest::InitializeVM();
5022 v8::HandleScope scope(CcTest::isolate());
5023 Isolate* isolate = CcTest::i_isolate();
5024 Heap* heap = isolate->heap();
5025 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5026 v8::Local<v8::Value> result = CompileRun(
5027 "var weak_map = new WeakMap();"
5028 "var future_keys = [];"
5029 "for (var i = 0; i < 50; i++) {"
5030 " var key = {'k' : i + 0.1};"
5031 " weak_map.set(key, 1);"
5032 " future_keys.push({'x' : i + 0.2});"
5035 if (marking->IsStopped()) {
5038 // Incrementally mark the backing store.
5039 Handle<JSObject> obj =
5040 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5041 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5042 while (!Marking::IsBlack(
5043 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5044 !marking->IsStopped()) {
5045 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5047 // Stash the backing store in a handle.
5048 Handle<Object> save(weak_map->table(), isolate);
5049 // The following line will update the backing store.
5051 "for (var i = 0; i < 50; i++) {"
5052 " weak_map.set(future_keys[i], i);"
5054 heap->incremental_marking()->set_should_hurry(true);
5055 heap->CollectGarbage(OLD_POINTER_SPACE);
5059 TEST(Regress442710) {
5060 CcTest::InitializeVM();
5061 Isolate* isolate = CcTest::i_isolate();
5062 Heap* heap = isolate->heap();
5063 Factory* factory = isolate->factory();
5065 HandleScope sc(isolate);
5066 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
5067 Handle<JSArray> array = factory->NewJSArray(2);
5069 Handle<String> name = factory->InternalizeUtf8String("testArray");
5070 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5071 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5072 heap->CollectGarbage(OLD_POINTER_SPACE);
5076 TEST(NumberStringCacheSize) {
5077 // Test that the number-string cache has not been resized in the snapshot.
5078 CcTest::InitializeVM();
5079 Isolate* isolate = CcTest::i_isolate();
5080 if (!isolate->snapshot_available()) return;
5081 Heap* heap = isolate->heap();
5082 CHECK_EQ(TestHeap::kInitialNumberStringCacheSize * 2,
5083 heap->number_string_cache()->length());
5088 CcTest::InitializeVM();
5089 Isolate* isolate = CcTest::i_isolate();
5090 Heap* heap = isolate->heap();
5091 Factory* factory = isolate->factory();
5092 HandleScope scope(isolate);
5093 CompileRun("function cls() { this.x = 10; }");
5094 Handle<WeakCell> weak_prototype;
5096 HandleScope inner_scope(isolate);
5097 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5098 Handle<JSObject> proto =
5099 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5100 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5102 CHECK(!weak_prototype->cleared());
5106 "cls.prototype = null;");
5107 for (int i = 0; i < 4; i++) {
5108 heap->CollectAllGarbage(Heap::kNoGCFlags);
5110 // The map of a.x keeps prototype alive
5111 CHECK(!weak_prototype->cleared());
5112 // Change the map of a.x and make the previous map garbage collectable.
5113 CompileRun("a.x.__proto__ = {};");
5114 for (int i = 0; i < 4; i++) {
5115 heap->CollectAllGarbage(Heap::kNoGCFlags);
5117 CHECK(weak_prototype->cleared());
5121 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5122 HandleScope inner_scope(isolate);
5123 Handle<Map> map = Map::Create(isolate, 1);
5124 v8::Local<v8::Value> result =
5125 CompileRun("(function () { return {x : 10}; })();");
5126 Handle<JSObject> proto =
5127 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5128 map->set_prototype(*proto);
5129 heap->AddRetainedMap(map);
5130 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5134 void CheckMapRetainingFor(int n) {
5135 FLAG_retain_maps_for_n_gc = n;
5136 Isolate* isolate = CcTest::i_isolate();
5137 Heap* heap = isolate->heap();
5138 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5139 CHECK(!weak_cell->cleared());
5140 for (int i = 0; i < n; i++) {
5141 heap->CollectGarbage(OLD_POINTER_SPACE);
5143 CHECK(!weak_cell->cleared());
5144 heap->CollectGarbage(OLD_POINTER_SPACE);
5145 CHECK(weak_cell->cleared());
5149 TEST(MapRetaining) {
5150 CcTest::InitializeVM();
5151 v8::HandleScope scope(CcTest::isolate());
5152 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5153 CheckMapRetainingFor(0);
5154 CheckMapRetainingFor(1);
5155 CheckMapRetainingFor(7);
5159 TEST(RegressArrayListGC) {
5160 FLAG_retain_maps_for_n_gc = 1;
5161 FLAG_incremental_marking = 0;
5162 FLAG_gc_global = true;
5163 CcTest::InitializeVM();
5164 v8::HandleScope scope(CcTest::isolate());
5165 Isolate* isolate = CcTest::i_isolate();
5166 Heap* heap = isolate->heap();
5167 AddRetainedMap(isolate, heap);
5168 Handle<Map> map = Map::Create(isolate, 1);
5169 heap->CollectGarbage(OLD_POINTER_SPACE);
5170 // Force GC in old space on next addition of retained map.
5171 Map::WeakCellForMap(map);
5172 SimulateFullSpace(CcTest::heap()->new_space());
5173 for (int i = 0; i < 10; i++) {
5174 heap->AddRetainedMap(map);
5176 heap->CollectGarbage(OLD_POINTER_SPACE);
5182 CcTest::InitializeVM();
5183 v8::HandleScope scope(CcTest::isolate());
5185 v8::Local<v8::Value> result = CompileRun("'abc'");
5186 Handle<Object> o = v8::Utils::OpenHandle(*result);
5187 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5192 TEST(WritableVsImmortalRoots) {
5193 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5194 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5195 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5196 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5197 // A root value can be writable, immortal, or neither, but not both.
5198 CHECK(!immortal || !writable);
5203 static void TestRightTrimFixedTypedArray(v8::ExternalArrayType type,
5205 int elements_to_trim) {
5206 v8::HandleScope scope(CcTest::isolate());
5207 Isolate* isolate = CcTest::i_isolate();
5208 Factory* factory = isolate->factory();
5209 Heap* heap = isolate->heap();
5211 Handle<FixedTypedArrayBase> array =
5212 factory->NewFixedTypedArray(initial_length, type);
5213 int old_size = array->size();
5214 heap->RightTrimFixedArray<Heap::FROM_MUTATOR>(*array, elements_to_trim);
5216 // Check that free space filler is at the right place and did not smash the
5218 CHECK(array->IsFixedArrayBase());
5219 CHECK_EQ(initial_length - elements_to_trim, array->length());
5220 int new_size = array->size();
5221 if (new_size != old_size) {
5222 // Free space filler should be created in this case.
5223 Address next_obj_address = array->address() + array->size();
5224 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5226 heap->CollectAllAvailableGarbage();
5230 TEST(Regress472513) {
5231 CcTest::InitializeVM();
5232 v8::HandleScope scope(CcTest::isolate());
5234 // The combination of type/initial_length/elements_to_trim triggered
5235 // typed array header smashing with free space filler (crbug/472513).
5238 TestRightTrimFixedTypedArray(v8::kExternalUint8Array, 32, 6);
5239 TestRightTrimFixedTypedArray(v8::kExternalUint8Array, 32 - 7, 6);
5240 TestRightTrimFixedTypedArray(v8::kExternalUint16Array, 16, 6);
5241 TestRightTrimFixedTypedArray(v8::kExternalUint16Array, 16 - 3, 6);
5242 TestRightTrimFixedTypedArray(v8::kExternalUint32Array, 8, 6);
5243 TestRightTrimFixedTypedArray(v8::kExternalUint32Array, 8 - 1, 6);
5246 TestRightTrimFixedTypedArray(v8::kExternalUint8Array, 16, 3);
5247 TestRightTrimFixedTypedArray(v8::kExternalUint8Array, 16 - 3, 3);
5248 TestRightTrimFixedTypedArray(v8::kExternalUint16Array, 8, 3);
5249 TestRightTrimFixedTypedArray(v8::kExternalUint16Array, 8 - 1, 3);
5250 TestRightTrimFixedTypedArray(v8::kExternalUint32Array, 4, 3);