1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "src/compilation-cache.h"
34 #include "src/execution.h"
35 #include "src/factory.h"
36 #include "src/global-handles.h"
37 #include "src/ic/ic.h"
38 #include "src/macro-assembler.h"
39 #include "test/cctest/cctest.h"
41 using namespace v8::internal;
43 static void CheckMap(Map* map, int type, int instance_size) {
44 CHECK(map->IsHeapObject());
46 CHECK(CcTest::heap()->Contains(map));
48 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
49 CHECK_EQ(type, map->instance_type());
50 CHECK_EQ(instance_size, map->instance_size());
55 CcTest::InitializeVM();
56 Heap* heap = CcTest::heap();
57 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
58 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
59 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
60 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
64 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
65 CHECK(obj->IsOddball());
66 Handle<Object> handle(obj, isolate);
67 Object* print_string =
68 *Execution::ToString(isolate, handle).ToHandleChecked();
69 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
73 static void CheckSmi(Isolate* isolate, int value, const char* string) {
74 Handle<Object> handle(Smi::FromInt(value), isolate);
75 Object* print_string =
76 *Execution::ToString(isolate, handle).ToHandleChecked();
77 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
81 static void CheckNumber(Isolate* isolate, double value, const char* string) {
82 Handle<Object> number = isolate->factory()->NewNumber(value);
83 CHECK(number->IsNumber());
84 Handle<Object> print_string =
85 Execution::ToString(isolate, number).ToHandleChecked();
86 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
90 static void CheckFindCodeObject(Isolate* isolate) {
91 // Test FindCodeObject
94 Assembler assm(isolate, NULL, 0);
96 __ nop(); // supported on all architectures
100 Handle<Code> code = isolate->factory()->NewCode(
101 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
102 CHECK(code->IsCode());
104 HeapObject* obj = HeapObject::cast(*code);
105 Address obj_addr = obj->address();
107 for (int i = 0; i < obj->Size(); i += kPointerSize) {
108 Object* found = isolate->FindCodeObject(obj_addr + i);
109 CHECK_EQ(*code, found);
112 Handle<Code> copy = isolate->factory()->NewCode(
113 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
114 HeapObject* obj_copy = HeapObject::cast(*copy);
115 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
116 obj_copy->Size() / 2);
117 CHECK(not_right != *code);
122 CcTest::InitializeVM();
123 Isolate* isolate = CcTest::i_isolate();
124 HandleScope outer_scope(isolate);
125 LocalContext context;
126 Handle<Object> n(reinterpret_cast<Object*>(NULL), isolate);
132 CcTest::InitializeVM();
133 Isolate* isolate = CcTest::i_isolate();
134 Factory* factory = isolate->factory();
135 Heap* heap = isolate->heap();
137 HandleScope sc(isolate);
138 Handle<Object> value = factory->NewNumber(1.000123);
139 CHECK(value->IsHeapNumber());
140 CHECK(value->IsNumber());
141 CHECK_EQ(1.000123, value->Number());
143 value = factory->NewNumber(1.0);
144 CHECK(value->IsSmi());
145 CHECK(value->IsNumber());
146 CHECK_EQ(1.0, value->Number());
148 value = factory->NewNumberFromInt(1024);
149 CHECK(value->IsSmi());
150 CHECK(value->IsNumber());
151 CHECK_EQ(1024.0, value->Number());
153 value = factory->NewNumberFromInt(Smi::kMinValue);
154 CHECK(value->IsSmi());
155 CHECK(value->IsNumber());
156 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
158 value = factory->NewNumberFromInt(Smi::kMaxValue);
159 CHECK(value->IsSmi());
160 CHECK(value->IsNumber());
161 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
163 #if !defined(V8_TARGET_ARCH_X64) && !defined(V8_TARGET_ARCH_ARM64) && \
164 !defined(V8_TARGET_ARCH_MIPS64)
165 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
166 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
167 CHECK(value->IsHeapNumber());
168 CHECK(value->IsNumber());
169 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
172 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
173 CHECK(value->IsHeapNumber());
174 CHECK(value->IsNumber());
175 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
178 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
179 CHECK(value->IsHeapNumber());
180 CHECK(value->IsNumber());
181 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
184 // nan oddball checks
185 CHECK(factory->nan_value()->IsNumber());
186 CHECK(std::isnan(factory->nan_value()->Number()));
188 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
189 CHECK(s->IsString());
190 CHECK_EQ(10, s->length());
192 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
193 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
194 v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(global, object_string);
195 CHECK(maybe.has_value);
198 // Check ToString for oddballs
199 CheckOddball(isolate, heap->true_value(), "true");
200 CheckOddball(isolate, heap->false_value(), "false");
201 CheckOddball(isolate, heap->null_value(), "null");
202 CheckOddball(isolate, heap->undefined_value(), "undefined");
204 // Check ToString for Smis
205 CheckSmi(isolate, 0, "0");
206 CheckSmi(isolate, 42, "42");
207 CheckSmi(isolate, -42, "-42");
209 // Check ToString for Numbers
210 CheckNumber(isolate, 1.1, "1.1");
212 CheckFindCodeObject(isolate);
217 CcTest::InitializeVM();
219 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
220 CHECK(Smi::FromInt(42)->IsSmi());
221 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
222 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
226 TEST(GarbageCollection) {
227 CcTest::InitializeVM();
228 Isolate* isolate = CcTest::i_isolate();
229 Heap* heap = isolate->heap();
230 Factory* factory = isolate->factory();
232 HandleScope sc(isolate);
234 heap->CollectGarbage(NEW_SPACE);
236 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
237 Handle<String> name = factory->InternalizeUtf8String("theFunction");
238 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
239 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
240 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
241 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
242 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
245 HandleScope inner_scope(isolate);
246 // Allocate a function and keep it in global object's property.
247 Handle<JSFunction> function = factory->NewFunction(name);
248 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
249 // Allocate an object. Unrooted after leaving the scope.
250 Handle<JSObject> obj = factory->NewJSObject(function);
251 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
252 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
254 CHECK_EQ(Smi::FromInt(23),
255 *Object::GetProperty(obj, prop_name).ToHandleChecked());
256 CHECK_EQ(Smi::FromInt(24),
257 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
260 heap->CollectGarbage(NEW_SPACE);
262 // Function should be alive.
263 v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(global, name);
264 CHECK(maybe.has_value);
266 // Check function is retained.
267 Handle<Object> func_value =
268 Object::GetProperty(global, name).ToHandleChecked();
269 CHECK(func_value->IsJSFunction());
270 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
273 HandleScope inner_scope(isolate);
274 // Allocate another object, make it reachable from global.
275 Handle<JSObject> obj = factory->NewJSObject(function);
276 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
277 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
280 // After gc, it should survive.
281 heap->CollectGarbage(NEW_SPACE);
283 maybe = JSReceiver::HasOwnProperty(global, obj_name);
284 CHECK(maybe.has_value);
287 Object::GetProperty(global, obj_name).ToHandleChecked();
288 CHECK(obj->IsJSObject());
289 CHECK_EQ(Smi::FromInt(23),
290 *Object::GetProperty(obj, prop_name).ToHandleChecked());
294 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
295 HandleScope scope(isolate);
296 Handle<String> s = isolate->factory()->NewStringFromUtf8(
297 CStrVector(string)).ToHandleChecked();
298 CHECK_EQ(StrLength(string), s->length());
299 for (int index = 0; index < s->length(); index++) {
300 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
306 CcTest::InitializeVM();
307 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
309 VerifyStringAllocation(isolate, "a");
310 VerifyStringAllocation(isolate, "ab");
311 VerifyStringAllocation(isolate, "abc");
312 VerifyStringAllocation(isolate, "abcd");
313 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
318 CcTest::InitializeVM();
319 Isolate* isolate = CcTest::i_isolate();
320 Factory* factory = isolate->factory();
322 v8::HandleScope scope(CcTest::isolate());
323 const char* name = "Kasper the spunky";
324 Handle<String> string = factory->NewStringFromAsciiChecked(name);
325 CHECK_EQ(StrLength(name), string->length());
329 TEST(GlobalHandles) {
330 CcTest::InitializeVM();
331 Isolate* isolate = CcTest::i_isolate();
332 Heap* heap = isolate->heap();
333 Factory* factory = isolate->factory();
334 GlobalHandles* global_handles = isolate->global_handles();
342 HandleScope scope(isolate);
344 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
345 Handle<Object> u = factory->NewNumber(1.12344);
347 h1 = global_handles->Create(*i);
348 h2 = global_handles->Create(*u);
349 h3 = global_handles->Create(*i);
350 h4 = global_handles->Create(*u);
353 // after gc, it should survive
354 heap->CollectGarbage(NEW_SPACE);
356 CHECK((*h1)->IsString());
357 CHECK((*h2)->IsHeapNumber());
358 CHECK((*h3)->IsString());
359 CHECK((*h4)->IsHeapNumber());
362 GlobalHandles::Destroy(h1.location());
363 GlobalHandles::Destroy(h3.location());
366 GlobalHandles::Destroy(h2.location());
367 GlobalHandles::Destroy(h4.location());
371 static bool WeakPointerCleared = false;
373 static void TestWeakGlobalHandleCallback(
374 const v8::WeakCallbackData<v8::Value, void>& data) {
375 std::pair<v8::Persistent<v8::Value>*, int>* p =
376 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
377 data.GetParameter());
378 if (p->second == 1234) WeakPointerCleared = true;
383 TEST(WeakGlobalHandlesScavenge) {
384 i::FLAG_stress_compaction = false;
385 CcTest::InitializeVM();
386 Isolate* isolate = CcTest::i_isolate();
387 Heap* heap = isolate->heap();
388 Factory* factory = isolate->factory();
389 GlobalHandles* global_handles = isolate->global_handles();
391 WeakPointerCleared = false;
397 HandleScope scope(isolate);
399 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
400 Handle<Object> u = factory->NewNumber(1.12344);
402 h1 = global_handles->Create(*i);
403 h2 = global_handles->Create(*u);
406 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
407 GlobalHandles::MakeWeak(h2.location(),
408 reinterpret_cast<void*>(&handle_and_id),
409 &TestWeakGlobalHandleCallback);
411 // Scavenge treats weak pointers as normal roots.
412 heap->CollectGarbage(NEW_SPACE);
414 CHECK((*h1)->IsString());
415 CHECK((*h2)->IsHeapNumber());
417 CHECK(!WeakPointerCleared);
418 CHECK(!global_handles->IsNearDeath(h2.location()));
419 CHECK(!global_handles->IsNearDeath(h1.location()));
421 GlobalHandles::Destroy(h1.location());
422 GlobalHandles::Destroy(h2.location());
426 TEST(WeakGlobalHandlesMark) {
427 CcTest::InitializeVM();
428 Isolate* isolate = CcTest::i_isolate();
429 Heap* heap = isolate->heap();
430 Factory* factory = isolate->factory();
431 GlobalHandles* global_handles = isolate->global_handles();
433 WeakPointerCleared = false;
439 HandleScope scope(isolate);
441 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
442 Handle<Object> u = factory->NewNumber(1.12344);
444 h1 = global_handles->Create(*i);
445 h2 = global_handles->Create(*u);
448 // Make sure the objects are promoted.
449 heap->CollectGarbage(OLD_POINTER_SPACE);
450 heap->CollectGarbage(NEW_SPACE);
451 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
453 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
454 GlobalHandles::MakeWeak(h2.location(),
455 reinterpret_cast<void*>(&handle_and_id),
456 &TestWeakGlobalHandleCallback);
457 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
458 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
460 // Incremental marking potentially marked handles before they turned weak.
461 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
463 CHECK((*h1)->IsString());
465 CHECK(WeakPointerCleared);
466 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
468 GlobalHandles::Destroy(h1.location());
472 TEST(DeleteWeakGlobalHandle) {
473 i::FLAG_stress_compaction = false;
474 CcTest::InitializeVM();
475 Isolate* isolate = CcTest::i_isolate();
476 Heap* heap = isolate->heap();
477 Factory* factory = isolate->factory();
478 GlobalHandles* global_handles = isolate->global_handles();
480 WeakPointerCleared = false;
485 HandleScope scope(isolate);
487 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
488 h = global_handles->Create(*i);
491 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
492 GlobalHandles::MakeWeak(h.location(),
493 reinterpret_cast<void*>(&handle_and_id),
494 &TestWeakGlobalHandleCallback);
496 // Scanvenge does not recognize weak reference.
497 heap->CollectGarbage(NEW_SPACE);
499 CHECK(!WeakPointerCleared);
501 // Mark-compact treats weak reference properly.
502 heap->CollectGarbage(OLD_POINTER_SPACE);
504 CHECK(WeakPointerCleared);
508 static const char* not_so_random_string_table[] = {
572 static void CheckInternalizedStrings(const char** strings) {
573 Isolate* isolate = CcTest::i_isolate();
574 Factory* factory = isolate->factory();
575 for (const char* string = *strings; *strings != 0; string = *strings++) {
576 HandleScope scope(isolate);
578 isolate->factory()->InternalizeUtf8String(CStrVector(string));
579 // InternalizeUtf8String may return a failure if a GC is needed.
580 CHECK(a->IsInternalizedString());
581 Handle<String> b = factory->InternalizeUtf8String(string);
583 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
584 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
586 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
592 CcTest::InitializeVM();
594 v8::HandleScope sc(CcTest::isolate());
595 CheckInternalizedStrings(not_so_random_string_table);
596 CheckInternalizedStrings(not_so_random_string_table);
600 TEST(FunctionAllocation) {
601 CcTest::InitializeVM();
602 Isolate* isolate = CcTest::i_isolate();
603 Factory* factory = isolate->factory();
605 v8::HandleScope sc(CcTest::isolate());
606 Handle<String> name = factory->InternalizeUtf8String("theFunction");
607 Handle<JSFunction> function = factory->NewFunction(name);
609 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
610 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
612 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
613 Handle<JSObject> obj = factory->NewJSObject(function);
614 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
615 CHECK_EQ(Smi::FromInt(23),
616 *Object::GetProperty(obj, prop_name).ToHandleChecked());
617 // Check that we can add properties to function objects.
618 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
619 CHECK_EQ(Smi::FromInt(24),
620 *Object::GetProperty(function, prop_name).ToHandleChecked());
624 TEST(ObjectProperties) {
625 CcTest::InitializeVM();
626 Isolate* isolate = CcTest::i_isolate();
627 Factory* factory = isolate->factory();
629 v8::HandleScope sc(CcTest::isolate());
630 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
631 Handle<Object> object = Object::GetProperty(
632 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
633 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
634 Handle<JSObject> obj = factory->NewJSObject(constructor);
635 Handle<String> first = factory->InternalizeUtf8String("first");
636 Handle<String> second = factory->InternalizeUtf8String("second");
638 Handle<Smi> one(Smi::FromInt(1), isolate);
639 Handle<Smi> two(Smi::FromInt(2), isolate);
642 v8::Maybe<bool> maybe = JSReceiver::HasOwnProperty(obj, first);
643 CHECK(maybe.has_value);
647 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
648 maybe = JSReceiver::HasOwnProperty(obj, first);
649 CHECK(maybe.has_value);
653 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
654 maybe = JSReceiver::HasOwnProperty(obj, first);
655 CHECK(maybe.has_value);
658 // add first and then second
659 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
660 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
661 maybe = JSReceiver::HasOwnProperty(obj, first);
662 CHECK(maybe.has_value);
664 maybe = JSReceiver::HasOwnProperty(obj, second);
665 CHECK(maybe.has_value);
668 // delete first and then second
669 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
670 maybe = JSReceiver::HasOwnProperty(obj, second);
671 CHECK(maybe.has_value);
673 JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check();
674 maybe = JSReceiver::HasOwnProperty(obj, first);
675 CHECK(maybe.has_value);
677 maybe = JSReceiver::HasOwnProperty(obj, second);
678 CHECK(maybe.has_value);
681 // add first and then second
682 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
683 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
684 maybe = JSReceiver::HasOwnProperty(obj, first);
685 CHECK(maybe.has_value);
687 maybe = JSReceiver::HasOwnProperty(obj, second);
688 CHECK(maybe.has_value);
691 // delete second and then first
692 JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION).Check();
693 maybe = JSReceiver::HasOwnProperty(obj, first);
694 CHECK(maybe.has_value);
696 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION).Check();
697 maybe = JSReceiver::HasOwnProperty(obj, first);
698 CHECK(maybe.has_value);
700 maybe = JSReceiver::HasOwnProperty(obj, second);
701 CHECK(maybe.has_value);
704 // check string and internalized string match
705 const char* string1 = "fisk";
706 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
707 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
708 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
709 maybe = JSReceiver::HasOwnProperty(obj, s1_string);
710 CHECK(maybe.has_value);
713 // check internalized string and string match
714 const char* string2 = "fugl";
715 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
716 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
717 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
718 maybe = JSReceiver::HasOwnProperty(obj, s2);
719 CHECK(maybe.has_value);
725 CcTest::InitializeVM();
726 Isolate* isolate = CcTest::i_isolate();
727 Factory* factory = isolate->factory();
729 v8::HandleScope sc(CcTest::isolate());
730 Handle<String> name = factory->InternalizeUtf8String("theFunction");
731 Handle<JSFunction> function = factory->NewFunction(name);
733 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
734 Handle<JSObject> obj = factory->NewJSObject(function);
735 Handle<Map> initial_map(function->initial_map());
738 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
739 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
740 CHECK_EQ(Smi::FromInt(23),
741 *Object::GetProperty(obj, prop_name).ToHandleChecked());
743 // Check the map has changed
744 CHECK(*initial_map != obj->map());
749 CcTest::InitializeVM();
750 Isolate* isolate = CcTest::i_isolate();
751 Factory* factory = isolate->factory();
753 v8::HandleScope sc(CcTest::isolate());
754 Handle<String> name = factory->InternalizeUtf8String("Array");
755 Handle<Object> fun_obj = Object::GetProperty(
756 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
757 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
759 // Allocate the object.
760 Handle<Object> element;
761 Handle<JSObject> object = factory->NewJSObject(function);
762 Handle<JSArray> array = Handle<JSArray>::cast(object);
763 // We just initialized the VM, no heap allocation failure yet.
764 JSArray::Initialize(array, 0);
766 // Set array length to 0.
767 JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)).Check();
768 CHECK_EQ(Smi::FromInt(0), array->length());
769 // Must be in fast mode.
770 CHECK(array->HasFastSmiOrObjectElements());
772 // array[length] = name.
773 JSReceiver::SetElement(array, 0, name, NONE, SLOPPY).Check();
774 CHECK_EQ(Smi::FromInt(1), array->length());
775 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
776 CHECK_EQ(*element, *name);
778 // Set array length with larger than smi value.
779 Handle<Object> length =
780 factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
781 JSArray::SetElementsLength(array, length).Check();
783 uint32_t int_length = 0;
784 CHECK(length->ToArrayIndex(&int_length));
785 CHECK_EQ(*length, array->length());
786 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
788 // array[length] = name.
789 JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY).Check();
790 uint32_t new_int_length = 0;
791 CHECK(array->length()->ToArrayIndex(&new_int_length));
792 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
793 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
794 CHECK_EQ(*element, *name);
795 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
796 CHECK_EQ(*element, *name);
801 CcTest::InitializeVM();
802 Isolate* isolate = CcTest::i_isolate();
803 Factory* factory = isolate->factory();
805 v8::HandleScope sc(CcTest::isolate());
806 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
807 Handle<Object> object = Object::GetProperty(
808 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
809 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
810 Handle<JSObject> obj = factory->NewJSObject(constructor);
811 Handle<String> first = factory->InternalizeUtf8String("first");
812 Handle<String> second = factory->InternalizeUtf8String("second");
814 Handle<Smi> one(Smi::FromInt(1), isolate);
815 Handle<Smi> two(Smi::FromInt(2), isolate);
817 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
818 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
820 JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY).Check();
821 JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY).Check();
824 Handle<Object> value1, value2;
825 Handle<JSObject> clone = factory->CopyJSObject(obj);
826 CHECK(!clone.is_identical_to(obj));
828 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
829 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
830 CHECK_EQ(*value1, *value2);
831 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
832 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
833 CHECK_EQ(*value1, *value2);
835 value1 = Object::GetProperty(obj, first).ToHandleChecked();
836 value2 = Object::GetProperty(clone, first).ToHandleChecked();
837 CHECK_EQ(*value1, *value2);
838 value1 = Object::GetProperty(obj, second).ToHandleChecked();
839 value2 = Object::GetProperty(clone, second).ToHandleChecked();
840 CHECK_EQ(*value1, *value2);
843 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
844 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
846 JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY).Check();
847 JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY).Check();
849 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
850 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
851 CHECK_EQ(*value1, *value2);
852 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
853 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
854 CHECK_EQ(*value1, *value2);
856 value1 = Object::GetProperty(obj, second).ToHandleChecked();
857 value2 = Object::GetProperty(clone, first).ToHandleChecked();
858 CHECK_EQ(*value1, *value2);
859 value1 = Object::GetProperty(obj, first).ToHandleChecked();
860 value2 = Object::GetProperty(clone, second).ToHandleChecked();
861 CHECK_EQ(*value1, *value2);
865 TEST(StringAllocation) {
866 CcTest::InitializeVM();
867 Isolate* isolate = CcTest::i_isolate();
868 Factory* factory = isolate->factory();
870 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
871 for (int length = 0; length < 100; length++) {
872 v8::HandleScope scope(CcTest::isolate());
873 char* non_one_byte = NewArray<char>(3 * length + 1);
874 char* one_byte = NewArray<char>(length + 1);
875 non_one_byte[3 * length] = 0;
876 one_byte[length] = 0;
877 for (int i = 0; i < length; i++) {
879 non_one_byte[3 * i] = chars[0];
880 non_one_byte[3 * i + 1] = chars[1];
881 non_one_byte[3 * i + 2] = chars[2];
883 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
884 Vector<const char>(non_one_byte, 3 * length));
885 CHECK_EQ(length, non_one_byte_sym->length());
886 Handle<String> one_byte_sym =
887 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
888 CHECK_EQ(length, one_byte_sym->length());
889 Handle<String> non_one_byte_str =
890 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
892 non_one_byte_str->Hash();
893 CHECK_EQ(length, non_one_byte_str->length());
894 Handle<String> one_byte_str =
895 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
897 one_byte_str->Hash();
898 CHECK_EQ(length, one_byte_str->length());
899 DeleteArray(non_one_byte);
900 DeleteArray(one_byte);
905 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
906 // Count the number of objects found in the heap.
908 HeapIterator iterator(heap);
909 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
910 for (int i = 0; i < size; i++) {
911 if (*objs[i] == obj) {
921 CcTest::InitializeVM();
922 Isolate* isolate = CcTest::i_isolate();
923 Factory* factory = isolate->factory();
924 v8::HandleScope scope(CcTest::isolate());
926 // Array of objects to scan haep for.
927 const int objs_count = 6;
928 Handle<Object> objs[objs_count];
929 int next_objs_index = 0;
931 // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
932 objs[next_objs_index++] = factory->NewJSArray(10);
933 objs[next_objs_index++] = factory->NewJSArray(10,
937 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
938 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
939 objs[next_objs_index++] =
940 factory->NewStringFromStaticChars("abcdefghij", TENURED);
942 // Allocate a large string (for large object space).
943 int large_size = Page::kMaxRegularHeapObjectSize + 1;
944 char* str = new char[large_size];
945 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
946 str[large_size - 1] = '\0';
947 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
950 // Add a Map object to look for.
951 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
953 CHECK_EQ(objs_count, next_objs_index);
954 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
958 TEST(EmptyHandleEscapeFrom) {
959 CcTest::InitializeVM();
961 v8::HandleScope scope(CcTest::isolate());
962 Handle<JSObject> runaway;
965 v8::EscapableHandleScope nested(CcTest::isolate());
966 Handle<JSObject> empty;
967 runaway = empty.EscapeFrom(&nested);
970 CHECK(runaway.is_null());
974 static int LenFromSize(int size) {
975 return (size - FixedArray::kHeaderSize) / kPointerSize;
979 TEST(Regression39128) {
980 // Test case for crbug.com/39128.
981 CcTest::InitializeVM();
982 Isolate* isolate = CcTest::i_isolate();
983 TestHeap* heap = CcTest::test_heap();
985 // Increase the chance of 'bump-the-pointer' allocation in old space.
986 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
988 v8::HandleScope scope(CcTest::isolate());
990 // The plan: create JSObject which references objects in new space.
991 // Then clone this object (forcing it to go into old space) and check
992 // that region dirty marks are updated correctly.
994 // Step 1: prepare a map for the object. We add 1 inobject property to it.
995 // Create a map with single inobject property.
996 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
997 int n_properties = my_map->inobject_properties();
998 CHECK_GT(n_properties, 0);
1000 int object_size = my_map->instance_size();
1002 // Step 2: allocate a lot of objects so to almost fill new space: we need
1003 // just enough room to allocate JSObject and thus fill the newspace.
1005 int allocation_amount = Min(FixedArray::kMaxSize,
1006 Page::kMaxRegularHeapObjectSize + kPointerSize);
1007 int allocation_len = LenFromSize(allocation_amount);
1008 NewSpace* new_space = heap->new_space();
1009 Address* top_addr = new_space->allocation_top_address();
1010 Address* limit_addr = new_space->allocation_limit_address();
1011 while ((*limit_addr - *top_addr) > allocation_amount) {
1012 CHECK(!heap->always_allocate());
1013 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1014 CHECK(new_space->Contains(array));
1017 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1018 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1019 int fixed_array_len = LenFromSize(to_fill);
1020 CHECK(fixed_array_len < FixedArray::kMaxLength);
1022 CHECK(!heap->always_allocate());
1023 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1024 CHECK(new_space->Contains(array));
1026 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1027 CHECK(new_space->Contains(object));
1028 JSObject* jsobject = JSObject::cast(object);
1029 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1030 CHECK_EQ(0, jsobject->properties()->length());
1031 // Create a reference to object in new space in jsobject.
1032 FieldIndex index = FieldIndex::ForInObjectOffset(
1033 JSObject::kHeaderSize - kPointerSize);
1034 jsobject->FastPropertyAtPut(index, array);
1036 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1038 // Step 4: clone jsobject, but force always allocate first to create a clone
1039 // in old pointer space.
1040 Address old_pointer_space_top = heap->old_pointer_space()->top();
1041 AlwaysAllocateScope aa_scope(isolate);
1042 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1043 JSObject* clone = JSObject::cast(clone_obj);
1044 if (clone->address() != old_pointer_space_top) {
1045 // Alas, got allocated from free list, we cannot do checks.
1048 CHECK(heap->old_pointer_space()->Contains(clone->address()));
1052 UNINITIALIZED_TEST(TestCodeFlushing) {
1053 // If we do not flush code this test is invalid.
1054 if (!FLAG_flush_code) return;
1055 i::FLAG_allow_natives_syntax = true;
1056 i::FLAG_optimize_for_size = false;
1057 v8::Isolate* isolate = v8::Isolate::New();
1058 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1060 Factory* factory = i_isolate->factory();
1062 v8::HandleScope scope(isolate);
1063 v8::Context::New(isolate)->Enter();
1064 const char* source =
1071 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1073 // This compile will add the code to the compilation cache.
1075 v8::HandleScope scope(isolate);
1079 // Check function is compiled.
1080 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1081 foo_name).ToHandleChecked();
1082 CHECK(func_value->IsJSFunction());
1083 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1084 CHECK(function->shared()->is_compiled());
1086 // The code will survive at least two GCs.
1087 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1088 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1089 CHECK(function->shared()->is_compiled());
1091 // Simulate several GCs that use full marking.
1092 const int kAgingThreshold = 6;
1093 for (int i = 0; i < kAgingThreshold; i++) {
1094 i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1097 // foo should no longer be in the compilation cache
1098 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1099 CHECK(!function->is_compiled() || function->IsOptimized());
1100 // Call foo to get it recompiled.
1101 CompileRun("foo()");
1102 CHECK(function->shared()->is_compiled());
1103 CHECK(function->is_compiled());
1110 TEST(TestCodeFlushingPreAged) {
1111 // If we do not flush code this test is invalid.
1112 if (!FLAG_flush_code) return;
1113 i::FLAG_allow_natives_syntax = true;
1114 i::FLAG_optimize_for_size = true;
1115 CcTest::InitializeVM();
1116 Isolate* isolate = CcTest::i_isolate();
1117 Factory* factory = isolate->factory();
1118 v8::HandleScope scope(CcTest::isolate());
1119 const char* source = "function foo() {"
1125 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1127 // Compile foo, but don't run it.
1128 { v8::HandleScope scope(CcTest::isolate());
1132 // Check function is compiled.
1133 Handle<Object> func_value =
1134 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1135 CHECK(func_value->IsJSFunction());
1136 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1137 CHECK(function->shared()->is_compiled());
1139 // The code has been run so will survive at least one GC.
1140 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1141 CHECK(function->shared()->is_compiled());
1143 // The code was only run once, so it should be pre-aged and collected on the
1145 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1146 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1148 // Execute the function again twice, and ensure it is reset to the young age.
1149 { v8::HandleScope scope(CcTest::isolate());
1154 // The code will survive at least two GC now that it is young again.
1155 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1156 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1157 CHECK(function->shared()->is_compiled());
1159 // Simulate several GCs that use full marking.
1160 const int kAgingThreshold = 6;
1161 for (int i = 0; i < kAgingThreshold; i++) {
1162 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1165 // foo should no longer be in the compilation cache
1166 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1167 CHECK(!function->is_compiled() || function->IsOptimized());
1168 // Call foo to get it recompiled.
1169 CompileRun("foo()");
1170 CHECK(function->shared()->is_compiled());
1171 CHECK(function->is_compiled());
1175 TEST(TestCodeFlushingIncremental) {
1176 // If we do not flush code this test is invalid.
1177 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1178 i::FLAG_allow_natives_syntax = true;
1179 i::FLAG_optimize_for_size = false;
1180 CcTest::InitializeVM();
1181 Isolate* isolate = CcTest::i_isolate();
1182 Factory* factory = isolate->factory();
1183 v8::HandleScope scope(CcTest::isolate());
1184 const char* source = "function foo() {"
1190 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1192 // This compile will add the code to the compilation cache.
1193 { v8::HandleScope scope(CcTest::isolate());
1197 // Check function is compiled.
1198 Handle<Object> func_value =
1199 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1200 CHECK(func_value->IsJSFunction());
1201 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1202 CHECK(function->shared()->is_compiled());
1204 // The code will survive at least two GCs.
1205 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1206 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1207 CHECK(function->shared()->is_compiled());
1209 // Simulate several GCs that use incremental marking.
1210 const int kAgingThreshold = 6;
1211 for (int i = 0; i < kAgingThreshold; i++) {
1212 SimulateIncrementalMarking(CcTest::heap());
1213 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1215 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1216 CHECK(!function->is_compiled() || function->IsOptimized());
1218 // This compile will compile the function again.
1219 { v8::HandleScope scope(CcTest::isolate());
1220 CompileRun("foo();");
1223 // Simulate several GCs that use incremental marking but make sure
1224 // the loop breaks once the function is enqueued as a candidate.
1225 for (int i = 0; i < kAgingThreshold; i++) {
1226 SimulateIncrementalMarking(CcTest::heap());
1227 if (!function->next_function_link()->IsUndefined()) break;
1228 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1231 // Force optimization while incremental marking is active and while
1232 // the function is enqueued as a candidate.
1233 { v8::HandleScope scope(CcTest::isolate());
1234 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1237 // Simulate one final GC to make sure the candidate queue is sane.
1238 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1239 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1240 CHECK(function->is_compiled() || !function->IsOptimized());
1244 TEST(TestCodeFlushingIncrementalScavenge) {
1245 // If we do not flush code this test is invalid.
1246 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1247 i::FLAG_allow_natives_syntax = true;
1248 i::FLAG_optimize_for_size = false;
1249 CcTest::InitializeVM();
1250 Isolate* isolate = CcTest::i_isolate();
1251 Factory* factory = isolate->factory();
1252 v8::HandleScope scope(CcTest::isolate());
1253 const char* source = "var foo = function() {"
1259 "var bar = function() {"
1263 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1264 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1266 // Perfrom one initial GC to enable code flushing.
1267 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1269 // This compile will add the code to the compilation cache.
1270 { v8::HandleScope scope(CcTest::isolate());
1274 // Check functions are compiled.
1275 Handle<Object> func_value =
1276 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1277 CHECK(func_value->IsJSFunction());
1278 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1279 CHECK(function->shared()->is_compiled());
1280 Handle<Object> func_value2 =
1281 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1282 CHECK(func_value2->IsJSFunction());
1283 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1284 CHECK(function2->shared()->is_compiled());
1286 // Clear references to functions so that one of them can die.
1287 { v8::HandleScope scope(CcTest::isolate());
1288 CompileRun("foo = 0; bar = 0;");
1291 // Bump the code age so that flushing is triggered while the function
1292 // object is still located in new-space.
1293 const int kAgingThreshold = 6;
1294 for (int i = 0; i < kAgingThreshold; i++) {
1295 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1296 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1299 // Simulate incremental marking so that the functions are enqueued as
1300 // code flushing candidates. Then kill one of the functions. Finally
1301 // perform a scavenge while incremental marking is still running.
1302 SimulateIncrementalMarking(CcTest::heap());
1303 *function2.location() = NULL;
1304 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1306 // Simulate one final GC to make sure the candidate queue is sane.
1307 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1308 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1309 CHECK(!function->is_compiled() || function->IsOptimized());
1313 TEST(TestCodeFlushingIncrementalAbort) {
1314 // If we do not flush code this test is invalid.
1315 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1316 i::FLAG_allow_natives_syntax = true;
1317 i::FLAG_optimize_for_size = false;
1318 CcTest::InitializeVM();
1319 Isolate* isolate = CcTest::i_isolate();
1320 Factory* factory = isolate->factory();
1321 Heap* heap = isolate->heap();
1322 v8::HandleScope scope(CcTest::isolate());
1323 const char* source = "function foo() {"
1329 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1331 // This compile will add the code to the compilation cache.
1332 { v8::HandleScope scope(CcTest::isolate());
1336 // Check function is compiled.
1337 Handle<Object> func_value =
1338 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1339 CHECK(func_value->IsJSFunction());
1340 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1341 CHECK(function->shared()->is_compiled());
1343 // The code will survive at least two GCs.
1344 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1345 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1346 CHECK(function->shared()->is_compiled());
1348 // Bump the code age so that flushing is triggered.
1349 const int kAgingThreshold = 6;
1350 for (int i = 0; i < kAgingThreshold; i++) {
1351 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1354 // Simulate incremental marking so that the function is enqueued as
1355 // code flushing candidate.
1356 SimulateIncrementalMarking(heap);
1358 // Enable the debugger and add a breakpoint while incremental marking
1359 // is running so that incremental marking aborts and code flushing is
1362 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1363 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1364 isolate->debug()->ClearAllBreakPoints();
1366 // Force optimization now that code flushing is disabled.
1367 { v8::HandleScope scope(CcTest::isolate());
1368 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1371 // Simulate one final GC to make sure the candidate queue is sane.
1372 heap->CollectAllGarbage(Heap::kNoGCFlags);
1373 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1374 CHECK(function->is_compiled() || !function->IsOptimized());
1378 TEST(CompilationCacheCachingBehavior) {
1379 // If we do not flush code, or have the compilation cache turned off, this
1381 if (!FLAG_flush_code || !FLAG_flush_code_incrementally ||
1382 !FLAG_compilation_cache) {
1385 CcTest::InitializeVM();
1386 Isolate* isolate = CcTest::i_isolate();
1387 Factory* factory = isolate->factory();
1388 Heap* heap = isolate->heap();
1389 CompilationCache* compilation_cache = isolate->compilation_cache();
1391 v8::HandleScope scope(CcTest::isolate());
1392 const char* raw_source =
1399 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1400 Handle<Context> native_context = isolate->native_context();
1403 v8::HandleScope scope(CcTest::isolate());
1404 CompileRun(raw_source);
1407 // On first compilation, only a hash is inserted in the code cache. We can't
1409 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1410 source, Handle<Object>(), 0, 0, true, native_context);
1411 CHECK(info.is_null());
1414 v8::HandleScope scope(CcTest::isolate());
1415 CompileRun(raw_source);
1418 // On second compilation, the hash is replaced by a real cache entry mapping
1419 // the source to the shared function info containing the code.
1420 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, true,
1422 CHECK(!info.is_null());
1424 heap->CollectAllGarbage(Heap::kNoGCFlags);
1426 // On second compilation, the hash is replaced by a real cache entry mapping
1427 // the source to the shared function info containing the code.
1428 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, true,
1430 CHECK(!info.is_null());
1432 while (!info.ToHandleChecked()->code()->IsOld()) {
1433 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1436 heap->CollectAllGarbage(Heap::kNoGCFlags);
1437 // Ensure code aging cleared the entry from the cache.
1438 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, true,
1440 CHECK(info.is_null());
1443 v8::HandleScope scope(CcTest::isolate());
1444 CompileRun(raw_source);
1447 // On first compilation, only a hash is inserted in the code cache. We can't
1449 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, true,
1451 CHECK(info.is_null());
1453 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1454 compilation_cache->MarkCompactPrologue();
1458 v8::HandleScope scope(CcTest::isolate());
1459 CompileRun(raw_source);
1462 // If we aged the cache before caching the script, ensure that we didn't cache
1463 // on next compilation.
1464 info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, true,
1466 CHECK(info.is_null());
1470 // Count the number of native contexts in the weak list of native contexts.
1471 int CountNativeContexts() {
1473 Object* object = CcTest::heap()->native_contexts_list();
1474 while (!object->IsUndefined()) {
1476 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1482 // Count the number of user functions in the weak list of optimized
1483 // functions attached to a native context.
1484 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1486 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1487 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1488 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1490 object = JSFunction::cast(object)->next_function_link();
1496 TEST(TestInternalWeakLists) {
1497 v8::V8::Initialize();
1499 // Some flags turn Scavenge collections into Mark-sweep collections
1500 // and hence are incompatible with this test case.
1501 if (FLAG_gc_global || FLAG_stress_compaction) return;
1503 static const int kNumTestContexts = 10;
1505 Isolate* isolate = CcTest::i_isolate();
1506 Heap* heap = isolate->heap();
1507 HandleScope scope(isolate);
1508 v8::Handle<v8::Context> ctx[kNumTestContexts];
1510 CHECK_EQ(0, CountNativeContexts());
1512 // Create a number of global contests which gets linked together.
1513 for (int i = 0; i < kNumTestContexts; i++) {
1514 ctx[i] = v8::Context::New(CcTest::isolate());
1516 // Collect garbage that might have been created by one of the
1517 // installed extensions.
1518 isolate->compilation_cache()->Clear();
1519 heap->CollectAllGarbage(Heap::kNoGCFlags);
1521 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1523 CHECK_EQ(i + 1, CountNativeContexts());
1527 // Create a handle scope so no function objects get stuch in the outer
1529 HandleScope scope(isolate);
1530 const char* source = "function f1() { };"
1531 "function f2() { };"
1532 "function f3() { };"
1533 "function f4() { };"
1534 "function f5() { };";
1536 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1538 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1540 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1542 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1544 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1546 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1548 // Remove function f1, and
1549 CompileRun("f1=null");
1551 // Scavenge treats these references as strong.
1552 for (int j = 0; j < 10; j++) {
1553 CcTest::heap()->CollectGarbage(NEW_SPACE);
1554 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1557 // Mark compact handles the weak references.
1558 isolate->compilation_cache()->Clear();
1559 heap->CollectAllGarbage(Heap::kNoGCFlags);
1560 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1562 // Get rid of f3 and f5 in the same way.
1563 CompileRun("f3=null");
1564 for (int j = 0; j < 10; j++) {
1565 CcTest::heap()->CollectGarbage(NEW_SPACE);
1566 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1568 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1569 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1570 CompileRun("f5=null");
1571 for (int j = 0; j < 10; j++) {
1572 CcTest::heap()->CollectGarbage(NEW_SPACE);
1573 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1575 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1576 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1581 // Force compilation cache cleanup.
1582 CcTest::heap()->NotifyContextDisposed();
1583 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1585 // Dispose the native contexts one by one.
1586 for (int i = 0; i < kNumTestContexts; i++) {
1587 // TODO(dcarney): is there a better way to do this?
1588 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1589 *unsafe = CcTest::heap()->undefined_value();
1592 // Scavenge treats these references as strong.
1593 for (int j = 0; j < 10; j++) {
1594 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1595 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1598 // Mark compact handles the weak references.
1599 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1600 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1603 CHECK_EQ(0, CountNativeContexts());
1607 // Count the number of native contexts in the weak list of native contexts
1608 // causing a GC after the specified number of elements.
1609 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1610 Heap* heap = isolate->heap();
1612 Handle<Object> object(heap->native_contexts_list(), isolate);
1613 while (!object->IsUndefined()) {
1615 if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1617 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1624 // Count the number of user functions in the weak list of optimized
1625 // functions attached to a native context causing a GC after the
1626 // specified number of elements.
1627 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1630 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1631 Isolate* isolate = icontext->GetIsolate();
1632 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1634 while (object->IsJSFunction() &&
1635 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1637 if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1638 object = Handle<Object>(
1639 Object::cast(JSFunction::cast(*object)->next_function_link()),
1646 TEST(TestInternalWeakListsTraverseWithGC) {
1647 v8::V8::Initialize();
1648 Isolate* isolate = CcTest::i_isolate();
1650 static const int kNumTestContexts = 10;
1652 HandleScope scope(isolate);
1653 v8::Handle<v8::Context> ctx[kNumTestContexts];
1655 CHECK_EQ(0, CountNativeContexts());
1657 // Create an number of contexts and check the length of the weak list both
1658 // with and without GCs while iterating the list.
1659 for (int i = 0; i < kNumTestContexts; i++) {
1660 ctx[i] = v8::Context::New(CcTest::isolate());
1661 CHECK_EQ(i + 1, CountNativeContexts());
1662 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1665 bool opt = (FLAG_always_opt && isolate->use_crankshaft());
1667 // Compile a number of functions the length of the weak list of optimized
1668 // functions both with and without GCs while iterating the list.
1670 const char* source = "function f1() { };"
1671 "function f2() { };"
1672 "function f3() { };"
1673 "function f4() { };"
1674 "function f5() { };";
1676 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1678 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1679 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1681 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1682 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1684 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1685 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1687 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1688 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1690 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1691 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1697 TEST(TestSizeOfObjects) {
1698 v8::V8::Initialize();
1700 // Get initial heap size after several full GCs, which will stabilize
1701 // the heap size and return with sweeping finished completely.
1702 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1703 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1704 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1705 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1706 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1707 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1708 if (collector->sweeping_in_progress()) {
1709 collector->EnsureSweepingCompleted();
1711 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1714 // Allocate objects on several different old-space pages so that
1715 // concurrent sweeper threads will be busy sweeping the old space on
1716 // subsequent GC runs.
1717 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1718 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1719 for (int i = 1; i <= 100; i++) {
1720 CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1721 CHECK_EQ(initial_size + i * filler_size,
1722 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1726 // The heap size should go back to initial size after a full GC, even
1727 // though sweeping didn't finish yet.
1728 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
1730 // Normally sweeping would not be complete here, but no guarantees.
1732 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1734 // Waiting for sweeper threads should not change heap size.
1735 if (collector->sweeping_in_progress()) {
1736 collector->EnsureSweepingCompleted();
1738 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1742 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1743 CcTest::InitializeVM();
1744 HeapIterator iterator(CcTest::heap());
1745 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1746 intptr_t size_of_objects_2 = 0;
1747 for (HeapObject* obj = iterator.next();
1749 obj = iterator.next()) {
1750 if (!obj->IsFreeSpace()) {
1751 size_of_objects_2 += obj->Size();
1754 // Delta must be within 5% of the larger result.
1755 // TODO(gc): Tighten this up by distinguishing between byte
1756 // arrays that are real and those that merely mark free space
1758 if (size_of_objects_1 > size_of_objects_2) {
1759 intptr_t delta = size_of_objects_1 - size_of_objects_2;
1760 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1761 "Iterator: %" V8_PTR_PREFIX "d, "
1762 "delta: %" V8_PTR_PREFIX "d\n",
1763 size_of_objects_1, size_of_objects_2, delta);
1764 CHECK_GT(size_of_objects_1 / 20, delta);
1766 intptr_t delta = size_of_objects_2 - size_of_objects_1;
1767 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1768 "Iterator: %" V8_PTR_PREFIX "d, "
1769 "delta: %" V8_PTR_PREFIX "d\n",
1770 size_of_objects_1, size_of_objects_2, delta);
1771 CHECK_GT(size_of_objects_2 / 20, delta);
1776 static void FillUpNewSpace(NewSpace* new_space) {
1777 // Fill up new space to the point that it is completely full. Make sure
1778 // that the scavenger does not undo the filling.
1779 Heap* heap = new_space->heap();
1780 Isolate* isolate = heap->isolate();
1781 Factory* factory = isolate->factory();
1782 HandleScope scope(isolate);
1783 AlwaysAllocateScope always_allocate(isolate);
1784 intptr_t available = new_space->Capacity() - new_space->Size();
1785 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1786 for (intptr_t i = 0; i < number_of_fillers; i++) {
1787 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1792 TEST(GrowAndShrinkNewSpace) {
1793 CcTest::InitializeVM();
1794 Heap* heap = CcTest::heap();
1795 NewSpace* new_space = heap->new_space();
1797 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1798 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1799 // The max size cannot exceed the reserved size, since semispaces must be
1800 // always within the reserved space. We can't test new space growing and
1801 // shrinking if the reserved size is the same as the minimum (initial) size.
1805 // Explicitly growing should double the space capacity.
1806 intptr_t old_capacity, new_capacity;
1807 old_capacity = new_space->TotalCapacity();
1809 new_capacity = new_space->TotalCapacity();
1810 CHECK(2 * old_capacity == new_capacity);
1812 old_capacity = new_space->TotalCapacity();
1813 FillUpNewSpace(new_space);
1814 new_capacity = new_space->TotalCapacity();
1815 CHECK(old_capacity == new_capacity);
1817 // Explicitly shrinking should not affect space capacity.
1818 old_capacity = new_space->TotalCapacity();
1819 new_space->Shrink();
1820 new_capacity = new_space->TotalCapacity();
1821 CHECK(old_capacity == new_capacity);
1823 // Let the scavenger empty the new space.
1824 heap->CollectGarbage(NEW_SPACE);
1825 CHECK_LE(new_space->Size(), old_capacity);
1827 // Explicitly shrinking should halve the space capacity.
1828 old_capacity = new_space->TotalCapacity();
1829 new_space->Shrink();
1830 new_capacity = new_space->TotalCapacity();
1831 CHECK(old_capacity == 2 * new_capacity);
1833 // Consecutive shrinking should not affect space capacity.
1834 old_capacity = new_space->TotalCapacity();
1835 new_space->Shrink();
1836 new_space->Shrink();
1837 new_space->Shrink();
1838 new_capacity = new_space->TotalCapacity();
1839 CHECK(old_capacity == new_capacity);
1843 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1844 CcTest::InitializeVM();
1845 Heap* heap = CcTest::heap();
1846 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
1847 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1848 // The max size cannot exceed the reserved size, since semispaces must be
1849 // always within the reserved space. We can't test new space growing and
1850 // shrinking if the reserved size is the same as the minimum (initial) size.
1854 v8::HandleScope scope(CcTest::isolate());
1855 NewSpace* new_space = heap->new_space();
1856 intptr_t old_capacity, new_capacity;
1857 old_capacity = new_space->TotalCapacity();
1859 new_capacity = new_space->TotalCapacity();
1860 CHECK(2 * old_capacity == new_capacity);
1861 FillUpNewSpace(new_space);
1862 heap->CollectAllAvailableGarbage();
1863 new_capacity = new_space->TotalCapacity();
1864 CHECK(old_capacity == new_capacity);
1868 static int NumberOfGlobalObjects() {
1870 HeapIterator iterator(CcTest::heap());
1871 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1872 if (obj->IsGlobalObject()) count++;
1878 // Test that we don't embed maps from foreign contexts into
1880 TEST(LeakNativeContextViaMap) {
1881 i::FLAG_allow_natives_syntax = true;
1882 v8::Isolate* isolate = CcTest::isolate();
1883 v8::HandleScope outer_scope(isolate);
1884 v8::Persistent<v8::Context> ctx1p;
1885 v8::Persistent<v8::Context> ctx2p;
1887 v8::HandleScope scope(isolate);
1888 ctx1p.Reset(isolate, v8::Context::New(isolate));
1889 ctx2p.Reset(isolate, v8::Context::New(isolate));
1890 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1893 CcTest::heap()->CollectAllAvailableGarbage();
1894 CHECK_EQ(4, NumberOfGlobalObjects());
1897 v8::HandleScope inner_scope(isolate);
1898 CompileRun("var v = {x: 42}");
1899 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1900 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1901 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1903 ctx2->Global()->Set(v8_str("o"), v);
1904 v8::Local<v8::Value> res = CompileRun(
1905 "function f() { return o.x; }"
1906 "for (var i = 0; i < 10; ++i) f();"
1907 "%OptimizeFunctionOnNextCall(f);"
1909 CHECK_EQ(42, res->Int32Value());
1910 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1912 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1914 isolate->ContextDisposedNotification();
1916 CcTest::heap()->CollectAllAvailableGarbage();
1917 CHECK_EQ(2, NumberOfGlobalObjects());
1919 CcTest::heap()->CollectAllAvailableGarbage();
1920 CHECK_EQ(0, NumberOfGlobalObjects());
1924 // Test that we don't embed functions from foreign contexts into
1926 TEST(LeakNativeContextViaFunction) {
1927 i::FLAG_allow_natives_syntax = true;
1928 v8::Isolate* isolate = CcTest::isolate();
1929 v8::HandleScope outer_scope(isolate);
1930 v8::Persistent<v8::Context> ctx1p;
1931 v8::Persistent<v8::Context> ctx2p;
1933 v8::HandleScope scope(isolate);
1934 ctx1p.Reset(isolate, v8::Context::New(isolate));
1935 ctx2p.Reset(isolate, v8::Context::New(isolate));
1936 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1939 CcTest::heap()->CollectAllAvailableGarbage();
1940 CHECK_EQ(4, NumberOfGlobalObjects());
1943 v8::HandleScope inner_scope(isolate);
1944 CompileRun("var v = function() { return 42; }");
1945 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1946 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1947 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1949 ctx2->Global()->Set(v8_str("o"), v);
1950 v8::Local<v8::Value> res = CompileRun(
1951 "function f(x) { return x(); }"
1952 "for (var i = 0; i < 10; ++i) f(o);"
1953 "%OptimizeFunctionOnNextCall(f);"
1955 CHECK_EQ(42, res->Int32Value());
1956 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
1960 isolate->ContextDisposedNotification();
1962 CcTest::heap()->CollectAllAvailableGarbage();
1963 CHECK_EQ(2, NumberOfGlobalObjects());
1965 CcTest::heap()->CollectAllAvailableGarbage();
1966 CHECK_EQ(0, NumberOfGlobalObjects());
1970 TEST(LeakNativeContextViaMapKeyed) {
1971 i::FLAG_allow_natives_syntax = true;
1972 v8::Isolate* isolate = CcTest::isolate();
1973 v8::HandleScope outer_scope(isolate);
1974 v8::Persistent<v8::Context> ctx1p;
1975 v8::Persistent<v8::Context> ctx2p;
1977 v8::HandleScope scope(isolate);
1978 ctx1p.Reset(isolate, v8::Context::New(isolate));
1979 ctx2p.Reset(isolate, v8::Context::New(isolate));
1980 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1983 CcTest::heap()->CollectAllAvailableGarbage();
1984 CHECK_EQ(4, NumberOfGlobalObjects());
1987 v8::HandleScope inner_scope(isolate);
1988 CompileRun("var v = [42, 43]");
1989 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1990 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1991 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1993 ctx2->Global()->Set(v8_str("o"), v);
1994 v8::Local<v8::Value> res = CompileRun(
1995 "function f() { return o[0]; }"
1996 "for (var i = 0; i < 10; ++i) f();"
1997 "%OptimizeFunctionOnNextCall(f);"
1999 CHECK_EQ(42, res->Int32Value());
2000 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2004 isolate->ContextDisposedNotification();
2006 CcTest::heap()->CollectAllAvailableGarbage();
2007 CHECK_EQ(2, NumberOfGlobalObjects());
2009 CcTest::heap()->CollectAllAvailableGarbage();
2010 CHECK_EQ(0, NumberOfGlobalObjects());
2014 TEST(LeakNativeContextViaMapProto) {
2015 i::FLAG_allow_natives_syntax = true;
2016 v8::Isolate* isolate = CcTest::isolate();
2017 v8::HandleScope outer_scope(isolate);
2018 v8::Persistent<v8::Context> ctx1p;
2019 v8::Persistent<v8::Context> ctx2p;
2021 v8::HandleScope scope(isolate);
2022 ctx1p.Reset(isolate, v8::Context::New(isolate));
2023 ctx2p.Reset(isolate, v8::Context::New(isolate));
2024 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2027 CcTest::heap()->CollectAllAvailableGarbage();
2028 CHECK_EQ(4, NumberOfGlobalObjects());
2031 v8::HandleScope inner_scope(isolate);
2032 CompileRun("var v = { y: 42}");
2033 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2034 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2035 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2037 ctx2->Global()->Set(v8_str("o"), v);
2038 v8::Local<v8::Value> res = CompileRun(
2044 "for (var i = 0; i < 10; ++i) f();"
2045 "%OptimizeFunctionOnNextCall(f);"
2047 CHECK_EQ(42, res->Int32Value());
2048 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2052 isolate->ContextDisposedNotification();
2054 CcTest::heap()->CollectAllAvailableGarbage();
2055 CHECK_EQ(2, NumberOfGlobalObjects());
2057 CcTest::heap()->CollectAllAvailableGarbage();
2058 CHECK_EQ(0, NumberOfGlobalObjects());
2062 TEST(InstanceOfStubWriteBarrier) {
2063 i::FLAG_allow_natives_syntax = true;
2065 i::FLAG_verify_heap = true;
2068 CcTest::InitializeVM();
2069 if (!CcTest::i_isolate()->use_crankshaft()) return;
2070 if (i::FLAG_force_marking_deque_overflows) return;
2071 v8::HandleScope outer_scope(CcTest::isolate());
2074 v8::HandleScope scope(CcTest::isolate());
2076 "function foo () { }"
2077 "function mkbar () { return new (new Function(\"\")) (); }"
2078 "function f (x) { return (x instanceof foo); }"
2079 "function g () { f(mkbar()); }"
2080 "f(new foo()); f(new foo());"
2081 "%OptimizeFunctionOnNextCall(f);"
2082 "f(new foo()); g();");
2085 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2089 Handle<JSFunction> f =
2090 v8::Utils::OpenHandle(
2091 *v8::Handle<v8::Function>::Cast(
2092 CcTest::global()->Get(v8_str("f"))));
2094 CHECK(f->IsOptimized());
2096 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2097 !marking->IsStopped()) {
2098 // Discard any pending GC requests otherwise we will get GC when we enter
2100 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2103 CHECK(marking->IsMarking());
2106 v8::HandleScope scope(CcTest::isolate());
2107 v8::Handle<v8::Object> global = CcTest::global();
2108 v8::Handle<v8::Function> g =
2109 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2110 g->Call(global, 0, NULL);
2113 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2114 CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
2118 TEST(PrototypeTransitionClearing) {
2119 if (FLAG_never_compact) return;
2120 CcTest::InitializeVM();
2121 Isolate* isolate = CcTest::i_isolate();
2122 Factory* factory = isolate->factory();
2123 v8::HandleScope scope(CcTest::isolate());
2125 CompileRun("var base = {};");
2126 Handle<JSObject> baseObject =
2127 v8::Utils::OpenHandle(
2128 *v8::Handle<v8::Object>::Cast(
2129 CcTest::global()->Get(v8_str("base"))));
2130 int initialTransitions = baseObject->map()->NumberOfProtoTransitions();
2134 "for (var i = 0; i < 10; i++) {"
2136 " var prototype = {};"
2137 " object.__proto__ = prototype;"
2138 " if (i >= 3) live.push(object, prototype);"
2141 // Verify that only dead prototype transitions are cleared.
2142 CHECK_EQ(initialTransitions + 10,
2143 baseObject->map()->NumberOfProtoTransitions());
2144 CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2145 const int transitions = 10 - 3;
2146 CHECK_EQ(initialTransitions + transitions,
2147 baseObject->map()->NumberOfProtoTransitions());
2149 // Verify that prototype transitions array was compacted.
2150 FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
2151 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2152 int j = Map::kProtoTransitionHeaderSize +
2153 i * Map::kProtoTransitionElementsPerEntry;
2154 CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
2155 Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
2156 CHECK(proto->IsJSObject());
2159 // Make sure next prototype is placed on an old-space evacuation candidate.
2160 Handle<JSObject> prototype;
2161 PagedSpace* space = CcTest::heap()->old_pointer_space();
2163 AlwaysAllocateScope always_allocate(isolate);
2164 SimulateFullSpace(space);
2165 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
2168 // Add a prototype on an evacuation candidate and verify that transition
2169 // clearing correctly records slots in prototype transition array.
2170 i::FLAG_always_compact = true;
2171 Handle<Map> map(baseObject->map());
2172 CHECK(!space->LastPage()->Contains(
2173 map->GetPrototypeTransitions()->address()));
2174 CHECK(space->LastPage()->Contains(prototype->address()));
2178 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2179 i::FLAG_stress_compaction = false;
2180 i::FLAG_allow_natives_syntax = true;
2182 i::FLAG_verify_heap = true;
2185 CcTest::InitializeVM();
2186 if (!CcTest::i_isolate()->use_crankshaft()) return;
2187 v8::HandleScope outer_scope(CcTest::isolate());
2190 v8::HandleScope scope(CcTest::isolate());
2194 " for (var i = 0; i < 100; i++) s += i;"
2198 "%OptimizeFunctionOnNextCall(f);"
2201 Handle<JSFunction> f =
2202 v8::Utils::OpenHandle(
2203 *v8::Handle<v8::Function>::Cast(
2204 CcTest::global()->Get(v8_str("f"))));
2205 CHECK(f->IsOptimized());
2207 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2211 // The following two calls will increment CcTest::heap()->global_ic_age().
2212 const int kLongIdlePauseInMs = 1000;
2213 CcTest::isolate()->ContextDisposedNotification();
2214 CcTest::isolate()->IdleNotification(kLongIdlePauseInMs);
2216 while (!marking->IsStopped() && !marking->IsComplete()) {
2217 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2219 if (!marking->IsStopped() || marking->should_hurry()) {
2220 // We don't normally finish a GC via Step(), we normally finish by
2221 // setting the stack guard and then do the final steps in the stack
2222 // guard interrupt. But here we didn't ask for that, and there is no
2223 // JS code running to trigger the interrupt, so we explicitly finalize
2225 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
2226 "Test finalizing incremental mark-sweep");
2229 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2230 CHECK_EQ(0, f->shared()->opt_count());
2231 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2235 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2236 i::FLAG_stress_compaction = false;
2237 i::FLAG_allow_natives_syntax = true;
2239 i::FLAG_verify_heap = true;
2242 CcTest::InitializeVM();
2243 if (!CcTest::i_isolate()->use_crankshaft()) return;
2244 v8::HandleScope outer_scope(CcTest::isolate());
2247 v8::HandleScope scope(CcTest::isolate());
2251 " for (var i = 0; i < 100; i++) s += i;"
2255 "%OptimizeFunctionOnNextCall(f);"
2258 Handle<JSFunction> f =
2259 v8::Utils::OpenHandle(
2260 *v8::Handle<v8::Function>::Cast(
2261 CcTest::global()->Get(v8_str("f"))));
2262 CHECK(f->IsOptimized());
2264 CcTest::heap()->incremental_marking()->Abort();
2266 // The following two calls will increment CcTest::heap()->global_ic_age().
2267 // Since incremental marking is off, IdleNotification will do full GC.
2268 const int kLongIdlePauseInMs = 1000;
2269 CcTest::isolate()->ContextDisposedNotification();
2270 CcTest::isolate()->IdleNotification(kLongIdlePauseInMs);
2272 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2273 CHECK_EQ(0, f->shared()->opt_count());
2274 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2278 TEST(IdleNotificationFinishMarking) {
2279 i::FLAG_allow_natives_syntax = true;
2280 CcTest::InitializeVM();
2281 SimulateFullSpace(CcTest::heap()->old_pointer_space());
2282 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2286 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2288 // TODO(hpayer): We cannot write proper unit test right now for heap.
2289 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2290 // marking delay counter.
2292 // Perform a huge incremental marking step but don't complete marking.
2293 intptr_t bytes_processed = 0;
2296 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2297 IncrementalMarking::FORCE_MARKING,
2298 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2299 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2300 } while (bytes_processed);
2302 // The next invocations of incremental marking are not going to complete
2304 // since the completion threshold is not reached
2305 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2307 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2308 IncrementalMarking::FORCE_MARKING,
2309 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2310 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2313 // The next idle notification has to finish incremental marking.
2314 const int kLongIdleTime = 1000000;
2315 CcTest::isolate()->IdleNotification(kLongIdleTime);
2316 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2320 // Test that HAllocateObject will always return an object in new-space.
2321 TEST(OptimizedAllocationAlwaysInNewSpace) {
2322 i::FLAG_allow_natives_syntax = true;
2323 CcTest::InitializeVM();
2324 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2325 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2326 v8::HandleScope scope(CcTest::isolate());
2328 SimulateFullSpace(CcTest::heap()->new_space());
2329 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2330 v8::Local<v8::Value> res = CompileRun(
2333 " for (var i = 0; i < 32; i++) {"
2334 " this['x' + i] = x;"
2337 "function f(x) { return new c(x); };"
2339 "%OptimizeFunctionOnNextCall(f);"
2341 CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2343 Handle<JSObject> o =
2344 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2346 CHECK(CcTest::heap()->InNewSpace(*o));
2350 TEST(OptimizedPretenuringAllocationFolding) {
2351 i::FLAG_allow_natives_syntax = true;
2352 i::FLAG_expose_gc = true;
2353 CcTest::InitializeVM();
2354 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2355 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2356 v8::HandleScope scope(CcTest::isolate());
2358 // Grow new space unitl maximum capacity reached.
2359 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2360 CcTest::heap()->new_space()->Grow();
2363 i::ScopedVector<char> source(1024);
2366 "var number_elements = %d;"
2367 "var elements = new Array();"
2369 " for (var i = 0; i < number_elements; i++) {"
2370 " elements[i] = [[{}], [1.1]];"
2372 " return elements[number_elements-1]"
2376 "%%OptimizeFunctionOnNextCall(f);"
2378 AllocationSite::kPretenureMinimumCreated);
2380 v8::Local<v8::Value> res = CompileRun(source.start());
2382 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2383 Handle<JSObject> int_array_handle =
2384 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2385 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2386 Handle<JSObject> double_array_handle =
2387 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2389 Handle<JSObject> o =
2390 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2391 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2392 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2393 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2394 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2395 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2399 TEST(OptimizedPretenuringObjectArrayLiterals) {
2400 i::FLAG_allow_natives_syntax = true;
2401 i::FLAG_expose_gc = true;
2402 CcTest::InitializeVM();
2403 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2404 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2405 v8::HandleScope scope(CcTest::isolate());
2407 // Grow new space unitl maximum capacity reached.
2408 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2409 CcTest::heap()->new_space()->Grow();
2412 i::ScopedVector<char> source(1024);
2415 "var number_elements = %d;"
2416 "var elements = new Array(number_elements);"
2418 " for (var i = 0; i < number_elements; i++) {"
2419 " elements[i] = [{}, {}, {}];"
2421 " return elements[number_elements - 1];"
2425 "%%OptimizeFunctionOnNextCall(f);"
2427 AllocationSite::kPretenureMinimumCreated);
2429 v8::Local<v8::Value> res = CompileRun(source.start());
2431 Handle<JSObject> o =
2432 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2434 CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
2435 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2439 TEST(OptimizedPretenuringMixedInObjectProperties) {
2440 i::FLAG_allow_natives_syntax = true;
2441 i::FLAG_expose_gc = true;
2442 CcTest::InitializeVM();
2443 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2444 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2445 v8::HandleScope scope(CcTest::isolate());
2447 // Grow new space unitl maximum capacity reached.
2448 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2449 CcTest::heap()->new_space()->Grow();
2453 i::ScopedVector<char> source(1024);
2456 "var number_elements = %d;"
2457 "var elements = new Array(number_elements);"
2459 " for (var i = 0; i < number_elements; i++) {"
2460 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2462 " return elements[number_elements - 1];"
2466 "%%OptimizeFunctionOnNextCall(f);"
2468 AllocationSite::kPretenureMinimumCreated);
2470 v8::Local<v8::Value> res = CompileRun(source.start());
2472 Handle<JSObject> o =
2473 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2475 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2476 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2477 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2478 CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
2479 CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
2481 JSObject* inner_object =
2482 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2483 CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
2484 CHECK(CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
2485 CHECK(CcTest::heap()->InOldPointerSpace(
2486 inner_object->RawFastPropertyAt(idx2)));
2490 TEST(OptimizedPretenuringDoubleArrayProperties) {
2491 i::FLAG_allow_natives_syntax = true;
2492 i::FLAG_expose_gc = true;
2493 CcTest::InitializeVM();
2494 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2495 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2496 v8::HandleScope scope(CcTest::isolate());
2498 // Grow new space unitl maximum capacity reached.
2499 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2500 CcTest::heap()->new_space()->Grow();
2503 i::ScopedVector<char> source(1024);
2506 "var number_elements = %d;"
2507 "var elements = new Array(number_elements);"
2509 " for (var i = 0; i < number_elements; i++) {"
2510 " elements[i] = {a: 1.1, b: 2.2};"
2512 " return elements[i - 1];"
2516 "%%OptimizeFunctionOnNextCall(f);"
2518 AllocationSite::kPretenureMinimumCreated);
2520 v8::Local<v8::Value> res = CompileRun(source.start());
2522 Handle<JSObject> o =
2523 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2525 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2526 CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
2530 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2531 i::FLAG_allow_natives_syntax = true;
2532 i::FLAG_expose_gc = true;
2533 CcTest::InitializeVM();
2534 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2535 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2536 v8::HandleScope scope(CcTest::isolate());
2538 // Grow new space unitl maximum capacity reached.
2539 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2540 CcTest::heap()->new_space()->Grow();
2543 i::ScopedVector<char> source(1024);
2546 "var number_elements = %d;"
2547 "var elements = new Array(number_elements);"
2549 " for (var i = 0; i < number_elements; i++) {"
2550 " elements[i] = [1.1, 2.2, 3.3];"
2552 " return elements[number_elements - 1];"
2556 "%%OptimizeFunctionOnNextCall(f);"
2558 AllocationSite::kPretenureMinimumCreated);
2560 v8::Local<v8::Value> res = CompileRun(source.start());
2562 Handle<JSObject> o =
2563 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2565 CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
2566 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2570 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2571 i::FLAG_allow_natives_syntax = true;
2572 i::FLAG_expose_gc = true;
2573 CcTest::InitializeVM();
2574 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2575 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2576 v8::HandleScope scope(CcTest::isolate());
2578 // Grow new space unitl maximum capacity reached.
2579 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2580 CcTest::heap()->new_space()->Grow();
2583 i::ScopedVector<char> source(1024);
2586 "var number_elements = 100;"
2587 "var elements = new Array(number_elements);"
2589 " for (var i = 0; i < number_elements; i++) {"
2590 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2592 " return elements[number_elements - 1];"
2596 "%%OptimizeFunctionOnNextCall(f);"
2599 v8::Local<v8::Value> res = CompileRun(source.start());
2601 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2602 Handle<JSObject> int_array_handle =
2603 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2604 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2605 Handle<JSObject> double_array_handle =
2606 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2608 Handle<JSObject> o =
2609 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2610 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2611 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
2612 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
2613 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
2614 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
2618 TEST(OptimizedPretenuringNestedObjectLiterals) {
2619 i::FLAG_allow_natives_syntax = true;
2620 i::FLAG_expose_gc = true;
2621 CcTest::InitializeVM();
2622 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2623 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2624 v8::HandleScope scope(CcTest::isolate());
2626 // Grow new space unitl maximum capacity reached.
2627 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2628 CcTest::heap()->new_space()->Grow();
2631 i::ScopedVector<char> source(1024);
2634 "var number_elements = %d;"
2635 "var elements = new Array(number_elements);"
2637 " for (var i = 0; i < number_elements; i++) {"
2638 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2640 " return elements[number_elements - 1];"
2644 "%%OptimizeFunctionOnNextCall(f);"
2646 AllocationSite::kPretenureMinimumCreated);
2648 v8::Local<v8::Value> res = CompileRun(source.start());
2650 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2651 Handle<JSObject> int_array_handle_1 =
2652 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2653 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2654 Handle<JSObject> int_array_handle_2 =
2655 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2657 Handle<JSObject> o =
2658 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2659 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2660 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
2661 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
2662 CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
2663 CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
2667 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2668 i::FLAG_allow_natives_syntax = true;
2669 i::FLAG_expose_gc = true;
2670 CcTest::InitializeVM();
2671 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2672 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2673 v8::HandleScope scope(CcTest::isolate());
2675 // Grow new space unitl maximum capacity reached.
2676 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2677 CcTest::heap()->new_space()->Grow();
2680 i::ScopedVector<char> source(1024);
2683 "var number_elements = %d;"
2684 "var elements = new Array(number_elements);"
2686 " for (var i = 0; i < number_elements; i++) {"
2687 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2689 " return elements[number_elements - 1];"
2693 "%%OptimizeFunctionOnNextCall(f);"
2695 AllocationSite::kPretenureMinimumCreated);
2697 v8::Local<v8::Value> res = CompileRun(source.start());
2699 v8::Local<v8::Value> double_array_1 =
2700 v8::Object::Cast(*res)->Get(v8_str("0"));
2701 Handle<JSObject> double_array_handle_1 =
2702 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2703 v8::Local<v8::Value> double_array_2 =
2704 v8::Object::Cast(*res)->Get(v8_str("1"));
2705 Handle<JSObject> double_array_handle_2 =
2706 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2708 Handle<JSObject> o =
2709 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2710 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2711 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
2712 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
2713 CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
2714 CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
2718 // Make sure pretenuring feedback is gathered for constructed objects as well
2720 TEST(OptimizedPretenuringConstructorCalls) {
2721 if (!i::FLAG_pretenuring_call_new) {
2722 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2725 i::FLAG_allow_natives_syntax = true;
2726 i::FLAG_expose_gc = true;
2727 CcTest::InitializeVM();
2728 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2729 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2730 v8::HandleScope scope(CcTest::isolate());
2732 // Grow new space unitl maximum capacity reached.
2733 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2734 CcTest::heap()->new_space()->Grow();
2737 i::ScopedVector<char> source(1024);
2738 // Call new is doing slack tracking for the first
2739 // JSFunction::kGenerousAllocationCount allocations, and we can't find
2740 // mementos during that time.
2743 "var number_elements = %d;"
2744 "var elements = new Array(number_elements);"
2750 " for (var i = 0; i < number_elements; i++) {"
2751 " elements[i] = new foo();"
2753 " return elements[number_elements - 1];"
2757 "%%OptimizeFunctionOnNextCall(f);"
2759 AllocationSite::kPretenureMinimumCreated +
2760 JSFunction::kGenerousAllocationCount);
2762 v8::Local<v8::Value> res = CompileRun(source.start());
2764 Handle<JSObject> o =
2765 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2767 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2771 TEST(OptimizedPretenuringCallNew) {
2772 if (!i::FLAG_pretenuring_call_new) {
2773 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
2776 i::FLAG_allow_natives_syntax = true;
2777 i::FLAG_expose_gc = true;
2778 CcTest::InitializeVM();
2779 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2780 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2781 v8::HandleScope scope(CcTest::isolate());
2783 // Grow new space unitl maximum capacity reached.
2784 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2785 CcTest::heap()->new_space()->Grow();
2788 i::ScopedVector<char> source(1024);
2789 // Call new is doing slack tracking for the first
2790 // JSFunction::kGenerousAllocationCount allocations, and we can't find
2791 // mementos during that time.
2794 "var number_elements = %d;"
2795 "var elements = new Array(number_elements);"
2796 "function g() { this.a = 0; }"
2798 " for (var i = 0; i < number_elements; i++) {"
2799 " elements[i] = new g();"
2801 " return elements[number_elements - 1];"
2805 "%%OptimizeFunctionOnNextCall(f);"
2807 AllocationSite::kPretenureMinimumCreated +
2808 JSFunction::kGenerousAllocationCount);
2810 v8::Local<v8::Value> res = CompileRun(source.start());
2812 Handle<JSObject> o =
2813 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2814 CHECK(CcTest::heap()->InOldPointerSpace(*o));
2818 // Test regular array literals allocation.
2819 TEST(OptimizedAllocationArrayLiterals) {
2820 i::FLAG_allow_natives_syntax = true;
2821 CcTest::InitializeVM();
2822 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2823 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2824 v8::HandleScope scope(CcTest::isolate());
2826 v8::Local<v8::Value> res = CompileRun(
2828 " var numbers = new Array(1, 2, 3);"
2829 " numbers[0] = 3.14;"
2833 "%OptimizeFunctionOnNextCall(f);"
2835 CHECK_EQ(static_cast<int>(3.14),
2836 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2838 Handle<JSObject> o =
2839 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2841 CHECK(CcTest::heap()->InNewSpace(o->elements()));
2845 static int CountMapTransitions(Map* map) {
2846 return map->transitions()->number_of_transitions();
2850 // Test that map transitions are cleared and maps are collected with
2851 // incremental marking as well.
2853 i::FLAG_stress_compaction = false;
2854 i::FLAG_allow_natives_syntax = true;
2855 i::FLAG_trace_incremental_marking = true;
2856 CcTest::InitializeVM();
2857 v8::HandleScope scope(CcTest::isolate());
2858 static const int transitions_count = 256;
2860 CompileRun("function F() {}");
2862 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2863 for (int i = 0; i < transitions_count; i++) {
2864 EmbeddedVector<char, 64> buffer;
2865 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2866 CompileRun(buffer.start());
2868 CompileRun("var root = new F;");
2871 Handle<JSObject> root =
2872 v8::Utils::OpenHandle(
2873 *v8::Handle<v8::Object>::Cast(
2874 CcTest::global()->Get(v8_str("root"))));
2876 // Count number of live transitions before marking.
2877 int transitions_before = CountMapTransitions(root->map());
2878 CompileRun("%DebugPrint(root);");
2879 CHECK_EQ(transitions_count, transitions_before);
2881 SimulateIncrementalMarking(CcTest::heap());
2882 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
2884 // Count number of live transitions after marking. Note that one transition
2885 // is left, because 'o' still holds an instance of one transition target.
2886 int transitions_after = CountMapTransitions(root->map());
2887 CompileRun("%DebugPrint(root);");
2888 CHECK_EQ(1, transitions_after);
2893 static void AddTransitions(int transitions_count) {
2894 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2895 for (int i = 0; i < transitions_count; i++) {
2896 EmbeddedVector<char, 64> buffer;
2897 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2898 CompileRun(buffer.start());
2903 static Handle<JSObject> GetByName(const char* name) {
2904 return v8::Utils::OpenHandle(
2905 *v8::Handle<v8::Object>::Cast(
2906 CcTest::global()->Get(v8_str(name))));
2910 static void AddPropertyTo(
2911 int gc_count, Handle<JSObject> object, const char* property_name) {
2912 Isolate* isolate = CcTest::i_isolate();
2913 Factory* factory = isolate->factory();
2914 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
2915 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
2916 i::FLAG_gc_interval = gc_count;
2917 i::FLAG_gc_global = true;
2918 CcTest::heap()->set_allocation_timeout(gc_count);
2919 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
2923 TEST(TransitionArrayShrinksDuringAllocToZero) {
2924 i::FLAG_stress_compaction = false;
2925 i::FLAG_allow_natives_syntax = true;
2926 CcTest::InitializeVM();
2927 v8::HandleScope scope(CcTest::isolate());
2928 static const int transitions_count = 10;
2929 CompileRun("function F() { }");
2930 AddTransitions(transitions_count);
2931 CompileRun("var root = new F;");
2932 Handle<JSObject> root = GetByName("root");
2934 // Count number of live transitions before marking.
2935 int transitions_before = CountMapTransitions(root->map());
2936 CHECK_EQ(transitions_count, transitions_before);
2939 CompileRun("o = new F;"
2941 root = GetByName("root");
2942 AddPropertyTo(2, root, "funny");
2943 CcTest::heap()->CollectGarbage(NEW_SPACE);
2945 // Count number of live transitions after marking. Note that one transition
2946 // is left, because 'o' still holds an instance of one transition target.
2947 int transitions_after = CountMapTransitions(
2948 Map::cast(root->map()->GetBackPointer()));
2949 CHECK_EQ(1, transitions_after);
2953 TEST(TransitionArrayShrinksDuringAllocToOne) {
2954 i::FLAG_stress_compaction = false;
2955 i::FLAG_allow_natives_syntax = true;
2956 CcTest::InitializeVM();
2957 v8::HandleScope scope(CcTest::isolate());
2958 static const int transitions_count = 10;
2959 CompileRun("function F() {}");
2960 AddTransitions(transitions_count);
2961 CompileRun("var root = new F;");
2962 Handle<JSObject> root = GetByName("root");
2964 // Count number of live transitions before marking.
2965 int transitions_before = CountMapTransitions(root->map());
2966 CHECK_EQ(transitions_count, transitions_before);
2968 root = GetByName("root");
2969 AddPropertyTo(2, root, "funny");
2970 CcTest::heap()->CollectGarbage(NEW_SPACE);
2972 // Count number of live transitions after marking. Note that one transition
2973 // is left, because 'o' still holds an instance of one transition target.
2974 int transitions_after = CountMapTransitions(
2975 Map::cast(root->map()->GetBackPointer()));
2976 CHECK_EQ(2, transitions_after);
2980 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
2981 i::FLAG_stress_compaction = false;
2982 i::FLAG_allow_natives_syntax = true;
2983 CcTest::InitializeVM();
2984 v8::HandleScope scope(CcTest::isolate());
2985 static const int transitions_count = 10;
2986 CompileRun("function F() {}");
2987 AddTransitions(transitions_count);
2988 CompileRun("var root = new F;");
2989 Handle<JSObject> root = GetByName("root");
2991 // Count number of live transitions before marking.
2992 int transitions_before = CountMapTransitions(root->map());
2993 CHECK_EQ(transitions_count, transitions_before);
2995 root = GetByName("root");
2996 AddPropertyTo(0, root, "prop9");
2997 CcTest::i_isolate()->heap()->CollectGarbage(OLD_POINTER_SPACE);
2999 // Count number of live transitions after marking. Note that one transition
3000 // is left, because 'o' still holds an instance of one transition target.
3001 int transitions_after = CountMapTransitions(
3002 Map::cast(root->map()->GetBackPointer()));
3003 CHECK_EQ(1, transitions_after);
3007 TEST(TransitionArraySimpleToFull) {
3008 i::FLAG_stress_compaction = false;
3009 i::FLAG_allow_natives_syntax = true;
3010 CcTest::InitializeVM();
3011 v8::HandleScope scope(CcTest::isolate());
3012 static const int transitions_count = 1;
3013 CompileRun("function F() {}");
3014 AddTransitions(transitions_count);
3015 CompileRun("var root = new F;");
3016 Handle<JSObject> root = GetByName("root");
3018 // Count number of live transitions before marking.
3019 int transitions_before = CountMapTransitions(root->map());
3020 CHECK_EQ(transitions_count, transitions_before);
3022 CompileRun("o = new F;"
3024 root = GetByName("root");
3025 DCHECK(root->map()->transitions()->IsSimpleTransition());
3026 AddPropertyTo(2, root, "happy");
3028 // Count number of live transitions after marking. Note that one transition
3029 // is left, because 'o' still holds an instance of one transition target.
3030 int transitions_after = CountMapTransitions(
3031 Map::cast(root->map()->GetBackPointer()));
3032 CHECK_EQ(1, transitions_after);
3037 TEST(Regress2143a) {
3038 i::FLAG_collect_maps = true;
3039 i::FLAG_incremental_marking = true;
3040 CcTest::InitializeVM();
3041 v8::HandleScope scope(CcTest::isolate());
3043 // Prepare a map transition from the root object together with a yet
3044 // untransitioned root object.
3045 CompileRun("var root = new Object;"
3047 "root = new Object;");
3049 SimulateIncrementalMarking(CcTest::heap());
3051 // Compile a StoreIC that performs the prepared map transition. This
3052 // will restart incremental marking and should make sure the root is
3053 // marked grey again.
3054 CompileRun("function f(o) {"
3060 // This bug only triggers with aggressive IC clearing.
3061 CcTest::heap()->AgeInlineCaches();
3063 // Explicitly request GC to perform final marking step and sweeping.
3064 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3066 Handle<JSObject> root =
3067 v8::Utils::OpenHandle(
3068 *v8::Handle<v8::Object>::Cast(
3069 CcTest::global()->Get(v8_str("root"))));
3071 // The root object should be in a sane state.
3072 CHECK(root->IsJSObject());
3073 CHECK(root->map()->IsMap());
3077 TEST(Regress2143b) {
3078 i::FLAG_collect_maps = true;
3079 i::FLAG_incremental_marking = true;
3080 i::FLAG_allow_natives_syntax = true;
3081 CcTest::InitializeVM();
3082 v8::HandleScope scope(CcTest::isolate());
3084 // Prepare a map transition from the root object together with a yet
3085 // untransitioned root object.
3086 CompileRun("var root = new Object;"
3088 "root = new Object;");
3090 SimulateIncrementalMarking(CcTest::heap());
3092 // Compile an optimized LStoreNamedField that performs the prepared
3093 // map transition. This will restart incremental marking and should
3094 // make sure the root is marked grey again.
3095 CompileRun("function f(o) {"
3100 "%OptimizeFunctionOnNextCall(f);"
3102 "%DeoptimizeFunction(f);");
3104 // This bug only triggers with aggressive IC clearing.
3105 CcTest::heap()->AgeInlineCaches();
3107 // Explicitly request GC to perform final marking step and sweeping.
3108 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3110 Handle<JSObject> root =
3111 v8::Utils::OpenHandle(
3112 *v8::Handle<v8::Object>::Cast(
3113 CcTest::global()->Get(v8_str("root"))));
3115 // The root object should be in a sane state.
3116 CHECK(root->IsJSObject());
3117 CHECK(root->map()->IsMap());
3121 TEST(ReleaseOverReservedPages) {
3122 if (FLAG_never_compact) return;
3123 i::FLAG_trace_gc = true;
3124 // The optimizer can allocate stuff, messing up the test.
3125 i::FLAG_crankshaft = false;
3126 i::FLAG_always_opt = false;
3127 CcTest::InitializeVM();
3128 Isolate* isolate = CcTest::i_isolate();
3129 Factory* factory = isolate->factory();
3130 Heap* heap = isolate->heap();
3131 v8::HandleScope scope(CcTest::isolate());
3132 static const int number_of_test_pages = 20;
3134 // Prepare many pages with low live-bytes count.
3135 PagedSpace* old_pointer_space = heap->old_pointer_space();
3136 int old_pointer_initial_space_pages = old_pointer_space->CountTotalPages();
3137 for (int i = 0; i < number_of_test_pages; i++) {
3138 AlwaysAllocateScope always_allocate(isolate);
3139 SimulateFullSpace(old_pointer_space);
3140 factory->NewFixedArray(1, TENURED);
3142 CHECK_EQ(number_of_test_pages + old_pointer_initial_space_pages,
3143 old_pointer_space->CountTotalPages());
3145 // Triggering one GC will cause a lot of garbage to be discovered but
3146 // even spread across all allocated pages.
3147 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
3148 "triggered for preparation");
3149 CHECK_GE(number_of_test_pages + old_pointer_initial_space_pages,
3150 old_pointer_space->CountTotalPages());
3152 // Triggering subsequent GCs should cause at least half of the pages
3153 // to be released to the OS after at most two cycles.
3154 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
3155 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
3156 heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
3157 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
3159 // Triggering a last-resort GC should cause all pages to be released to the
3160 // OS so that other processes can seize the memory. If we get a failure here
3161 // where there are 2 pages left instead of 1, then we should increase the
3162 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3163 // first page should be small in order to reduce memory used when the VM
3164 // boots, but if the 20 small arrays don't fit on the first page then that's
3165 // an indication that it is too small.
3166 heap->CollectAllAvailableGarbage("triggered really hard");
3167 CHECK_EQ(old_pointer_initial_space_pages,
3168 old_pointer_space->CountTotalPages());
3173 i::FLAG_stress_compaction = false;
3174 CcTest::InitializeVM();
3175 Isolate* isolate = CcTest::i_isolate();
3176 Factory* factory = isolate->factory();
3177 v8::HandleScope scope(CcTest::isolate());
3178 Handle<String> slice(CcTest::heap()->empty_string());
3181 // Generate a parent that lives in new-space.
3182 v8::HandleScope inner_scope(CcTest::isolate());
3183 const char* c = "This text is long enough to trigger sliced strings.";
3184 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3185 CHECK(s->IsSeqOneByteString());
3186 CHECK(CcTest::heap()->InNewSpace(*s));
3188 // Generate a sliced string that is based on the above parent and
3189 // lives in old-space.
3190 SimulateFullSpace(CcTest::heap()->new_space());
3191 AlwaysAllocateScope always_allocate(isolate);
3192 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3193 CHECK(t->IsSlicedString());
3194 CHECK(!CcTest::heap()->InNewSpace(*t));
3195 *slice.location() = *t.location();
3198 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3199 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3200 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3205 TEST(PrintSharedFunctionInfo) {
3206 CcTest::InitializeVM();
3207 v8::HandleScope scope(CcTest::isolate());
3208 const char* source = "f = function() { return 987654321; }\n"
3209 "g = function() { return 123456789; }\n";
3211 Handle<JSFunction> g =
3212 v8::Utils::OpenHandle(
3213 *v8::Handle<v8::Function>::Cast(
3214 CcTest::global()->Get(v8_str("g"))));
3216 OFStream os(stdout);
3217 g->shared()->Print(os);
3220 #endif // OBJECT_PRINT
3224 CcTest::InitializeVM();
3225 v8::HandleScope scope(CcTest::isolate());
3227 v8::Handle<v8::String> value = v8_str("val string");
3228 Smi* hash = Smi::FromInt(321);
3229 Factory* factory = CcTest::i_isolate()->factory();
3231 for (int i = 0; i < 2; i++) {
3232 // Store identity hash first and common hidden property second.
3233 v8::Handle<v8::Object> obj = v8::Object::New(CcTest::isolate());
3234 Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
3235 CHECK(internal_obj->HasFastProperties());
3237 // In the first iteration, set hidden value first and identity hash second.
3238 // In the second iteration, reverse the order.
3239 if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
3240 JSObject::SetIdentityHash(internal_obj, handle(hash, CcTest::i_isolate()));
3241 if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
3245 internal_obj->GetHiddenProperty(factory->identity_hash_string()));
3246 CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
3249 FieldIndex index = FieldIndex::ForDescriptor(internal_obj->map(), 0);
3250 ObjectHashTable* hashtable = ObjectHashTable::cast(
3251 internal_obj->RawFastPropertyAt(index));
3252 // HashTable header (5) and 4 initial entries (8).
3253 CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
3258 TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
3259 if (i::FLAG_always_opt) return;
3260 CcTest::InitializeVM();
3261 v8::HandleScope scope(CcTest::isolate());
3262 v8::Local<v8::Value> fun1, fun2;
3266 CompileRun("function fun() {};");
3267 fun1 = env->Global()->Get(v8_str("fun"));
3272 CompileRun("function fun() {};");
3273 fun2 = env->Global()->Get(v8_str("fun"));
3276 // Prepare function f that contains type feedback for closures
3277 // originating from two different native contexts.
3278 CcTest::global()->Set(v8_str("fun1"), fun1);
3279 CcTest::global()->Set(v8_str("fun2"), fun2);
3280 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3282 Handle<JSFunction> f =
3283 v8::Utils::OpenHandle(
3284 *v8::Handle<v8::Function>::Cast(
3285 CcTest::global()->Get(v8_str("f"))));
3287 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3289 int expected_slots = 2;
3290 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3291 for (int i = 0; i < expected_slots; i++) {
3292 CHECK(feedback_vector->Get(FeedbackVectorICSlot(i))->IsJSFunction());
3295 SimulateIncrementalMarking(CcTest::heap());
3296 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3298 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3299 for (int i = 0; i < expected_slots; i++) {
3300 CHECK_EQ(feedback_vector->Get(FeedbackVectorICSlot(i)),
3301 *TypeFeedbackVector::UninitializedSentinel(CcTest::i_isolate()));
3306 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3307 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3308 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3309 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3310 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3311 RelocInfo* info = it.rinfo();
3312 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3313 if (target->is_inline_cache_stub() && target->kind() == kind) {
3321 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3322 if (i::FLAG_always_opt) return;
3323 CcTest::InitializeVM();
3324 v8::HandleScope scope(CcTest::isolate());
3326 // Prepare function f that contains a monomorphic IC for object
3327 // originating from the same native context.
3328 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3329 "function f(o) { return o.x; } f(obj); f(obj);");
3330 Handle<JSFunction> f =
3331 v8::Utils::OpenHandle(
3332 *v8::Handle<v8::Function>::Cast(
3333 CcTest::global()->Get(v8_str("f"))));
3335 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3336 CHECK(ic_before->ic_state() == MONOMORPHIC);
3338 SimulateIncrementalMarking(CcTest::heap());
3339 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3341 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3342 CHECK(ic_after->ic_state() == MONOMORPHIC);
3346 TEST(IncrementalMarkingClearsMonomorphicIC) {
3347 if (i::FLAG_always_opt) return;
3348 CcTest::InitializeVM();
3349 v8::HandleScope scope(CcTest::isolate());
3350 v8::Local<v8::Value> obj1;
3354 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3355 obj1 = env->Global()->Get(v8_str("obj"));
3358 // Prepare function f that contains a monomorphic IC for object
3359 // originating from a different native context.
3360 CcTest::global()->Set(v8_str("obj1"), obj1);
3361 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3362 Handle<JSFunction> f =
3363 v8::Utils::OpenHandle(
3364 *v8::Handle<v8::Function>::Cast(
3365 CcTest::global()->Get(v8_str("f"))));
3367 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3368 CHECK(ic_before->ic_state() == MONOMORPHIC);
3370 // Fire context dispose notification.
3371 CcTest::isolate()->ContextDisposedNotification();
3372 SimulateIncrementalMarking(CcTest::heap());
3373 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3375 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3376 CHECK(IC::IsCleared(ic_after));
3380 TEST(IncrementalMarkingClearsPolymorphicIC) {
3381 if (i::FLAG_always_opt) return;
3382 CcTest::InitializeVM();
3383 v8::HandleScope scope(CcTest::isolate());
3384 v8::Local<v8::Value> obj1, obj2;
3388 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3389 obj1 = env->Global()->Get(v8_str("obj"));
3394 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3395 obj2 = env->Global()->Get(v8_str("obj"));
3398 // Prepare function f that contains a polymorphic IC for objects
3399 // originating from two different native contexts.
3400 CcTest::global()->Set(v8_str("obj1"), obj1);
3401 CcTest::global()->Set(v8_str("obj2"), obj2);
3402 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3403 Handle<JSFunction> f =
3404 v8::Utils::OpenHandle(
3405 *v8::Handle<v8::Function>::Cast(
3406 CcTest::global()->Get(v8_str("f"))));
3408 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3409 CHECK(ic_before->ic_state() == POLYMORPHIC);
3411 // Fire context dispose notification.
3412 CcTest::isolate()->ContextDisposedNotification();
3413 SimulateIncrementalMarking(CcTest::heap());
3414 CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
3416 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3417 CHECK(IC::IsCleared(ic_after));
3421 class SourceResource : public v8::String::ExternalOneByteStringResource {
3423 explicit SourceResource(const char* data)
3424 : data_(data), length_(strlen(data)) { }
3426 virtual void Dispose() {
3427 i::DeleteArray(data_);
3431 const char* data() const { return data_; }
3433 size_t length() const { return length_; }
3435 bool IsDisposed() { return data_ == NULL; }
3443 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3444 const char* accessor) {
3445 // Test that the data retained by the Error.stack accessor is released
3446 // after the first time the accessor is fired. We use external string
3447 // to check whether the data is being released since the external string
3448 // resource's callback is fired when the external string is GC'ed.
3449 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3450 v8::HandleScope scope(isolate);
3451 SourceResource* resource = new SourceResource(i::StrDup(source));
3453 v8::HandleScope scope(isolate);
3454 v8::Handle<v8::String> source_string =
3455 v8::String::NewExternal(isolate, resource);
3456 i_isolate->heap()->CollectAllAvailableGarbage();
3457 v8::Script::Compile(source_string)->Run();
3458 CHECK(!resource->IsDisposed());
3460 // i_isolate->heap()->CollectAllAvailableGarbage();
3461 CHECK(!resource->IsDisposed());
3463 CompileRun(accessor);
3464 i_isolate->heap()->CollectAllAvailableGarbage();
3466 // External source has been released.
3467 CHECK(resource->IsDisposed());
3472 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3473 if (i::FLAG_always_opt) {
3474 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3475 // See: https://codereview.chromium.org/181833004/
3478 FLAG_use_ic = false; // ICs retain objects.
3479 FLAG_concurrent_recompilation = false;
3480 v8::Isolate* isolate = v8::Isolate::New();
3482 v8::Isolate::Scope isolate_scope(isolate);
3483 v8::HandleScope handle_scope(isolate);
3484 v8::Context::New(isolate)->Enter();
3485 static const char* source1 = "var error = null; "
3486 /* Normal Error */ "try { "
3487 " throw new Error(); "
3491 static const char* source2 = "var error = null; "
3492 /* Stack overflow */ "try { "
3493 " (function f() { f(); })(); "
3497 static const char* source3 = "var error = null; "
3498 /* Normal Error */ "try { "
3499 /* as prototype */ " throw new Error(); "
3502 " error.__proto__ = e; "
3504 static const char* source4 = "var error = null; "
3505 /* Stack overflow */ "try { "
3506 /* as prototype */ " (function f() { f(); })(); "
3509 " error.__proto__ = e; "
3511 static const char* getter = "error.stack";
3512 static const char* setter = "error.stack = 0";
3514 ReleaseStackTraceDataTest(isolate, source1, setter);
3515 ReleaseStackTraceDataTest(isolate, source2, setter);
3516 // We do not test source3 and source4 with setter, since the setter is
3517 // supposed to (untypically) write to the receiver, not the holder. This is
3518 // to emulate the behavior of a data property.
3520 ReleaseStackTraceDataTest(isolate, source1, getter);
3521 ReleaseStackTraceDataTest(isolate, source2, getter);
3522 ReleaseStackTraceDataTest(isolate, source3, getter);
3523 ReleaseStackTraceDataTest(isolate, source4, getter);
3529 TEST(Regress159140) {
3530 i::FLAG_allow_natives_syntax = true;
3531 i::FLAG_flush_code_incrementally = true;
3532 CcTest::InitializeVM();
3533 Isolate* isolate = CcTest::i_isolate();
3534 Heap* heap = isolate->heap();
3535 HandleScope scope(isolate);
3537 // Perform one initial GC to enable code flushing.
3538 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3540 // Prepare several closures that are all eligible for code flushing
3541 // because all reachable ones are not optimized. Make sure that the
3542 // optimized code object is directly reachable through a handle so
3543 // that it is marked black during incremental marking.
3546 HandleScope inner_scope(isolate);
3547 CompileRun("function h(x) {}"
3548 "function mkClosure() {"
3549 " return function(x) { return x + 1; };"
3551 "var f = mkClosure();"
3552 "var g = mkClosure();"
3556 "%OptimizeFunctionOnNextCall(f); f(3);"
3557 "%OptimizeFunctionOnNextCall(h); h(3);");
3559 Handle<JSFunction> f =
3560 v8::Utils::OpenHandle(
3561 *v8::Handle<v8::Function>::Cast(
3562 CcTest::global()->Get(v8_str("f"))));
3563 CHECK(f->is_compiled());
3564 CompileRun("f = null;");
3566 Handle<JSFunction> g =
3567 v8::Utils::OpenHandle(
3568 *v8::Handle<v8::Function>::Cast(
3569 CcTest::global()->Get(v8_str("g"))));
3570 CHECK(g->is_compiled());
3571 const int kAgingThreshold = 6;
3572 for (int i = 0; i < kAgingThreshold; i++) {
3573 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3576 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3579 // Simulate incremental marking so that the functions are enqueued as
3580 // code flushing candidates. Then optimize one function. Finally
3581 // finish the GC to complete code flushing.
3582 SimulateIncrementalMarking(heap);
3583 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3584 heap->CollectAllGarbage(Heap::kNoGCFlags);
3586 // Unoptimized code is missing and the deoptimizer will go ballistic.
3587 CompileRun("g('bozo');");
3591 TEST(Regress165495) {
3592 i::FLAG_allow_natives_syntax = true;
3593 i::FLAG_flush_code_incrementally = true;
3594 CcTest::InitializeVM();
3595 Isolate* isolate = CcTest::i_isolate();
3596 Heap* heap = isolate->heap();
3597 HandleScope scope(isolate);
3599 // Perform one initial GC to enable code flushing.
3600 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3602 // Prepare an optimized closure that the optimized code map will get
3603 // populated. Then age the unoptimized code to trigger code flushing
3604 // but make sure the optimized code is unreachable.
3606 HandleScope inner_scope(isolate);
3607 CompileRun("function mkClosure() {"
3608 " return function(x) { return x + 1; };"
3610 "var f = mkClosure();"
3612 "%OptimizeFunctionOnNextCall(f); f(3);");
3614 Handle<JSFunction> f =
3615 v8::Utils::OpenHandle(
3616 *v8::Handle<v8::Function>::Cast(
3617 CcTest::global()->Get(v8_str("f"))));
3618 CHECK(f->is_compiled());
3619 const int kAgingThreshold = 6;
3620 for (int i = 0; i < kAgingThreshold; i++) {
3621 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3624 CompileRun("f = null;");
3627 // Simulate incremental marking so that unoptimized code is flushed
3628 // even though it still is cached in the optimized code map.
3629 SimulateIncrementalMarking(heap);
3630 heap->CollectAllGarbage(Heap::kNoGCFlags);
3632 // Make a new closure that will get code installed from the code map.
3633 // Unoptimized code is missing and the deoptimizer will go ballistic.
3634 CompileRun("var g = mkClosure(); g('bozo');");
3638 TEST(Regress169209) {
3639 i::FLAG_stress_compaction = false;
3640 i::FLAG_allow_natives_syntax = true;
3641 i::FLAG_flush_code_incrementally = true;
3643 CcTest::InitializeVM();
3644 Isolate* isolate = CcTest::i_isolate();
3645 Heap* heap = isolate->heap();
3646 HandleScope scope(isolate);
3648 // Perform one initial GC to enable code flushing.
3649 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3651 // Prepare a shared function info eligible for code flushing for which
3652 // the unoptimized code will be replaced during optimization.
3653 Handle<SharedFunctionInfo> shared1;
3655 HandleScope inner_scope(isolate);
3656 CompileRun("function f() { return 'foobar'; }"
3657 "function g(x) { if (x) f(); }"
3662 Handle<JSFunction> f =
3663 v8::Utils::OpenHandle(
3664 *v8::Handle<v8::Function>::Cast(
3665 CcTest::global()->Get(v8_str("f"))));
3666 CHECK(f->is_compiled());
3667 const int kAgingThreshold = 6;
3668 for (int i = 0; i < kAgingThreshold; i++) {
3669 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3672 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3675 // Prepare a shared function info eligible for code flushing that will
3676 // represent the dangling tail of the candidate list.
3677 Handle<SharedFunctionInfo> shared2;
3679 HandleScope inner_scope(isolate);
3680 CompileRun("function flushMe() { return 0; }"
3683 Handle<JSFunction> f =
3684 v8::Utils::OpenHandle(
3685 *v8::Handle<v8::Function>::Cast(
3686 CcTest::global()->Get(v8_str("flushMe"))));
3687 CHECK(f->is_compiled());
3688 const int kAgingThreshold = 6;
3689 for (int i = 0; i < kAgingThreshold; i++) {
3690 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3693 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3696 // Simulate incremental marking and collect code flushing candidates.
3697 SimulateIncrementalMarking(heap);
3698 CHECK(shared1->code()->gc_metadata() != NULL);
3700 // Optimize function and make sure the unoptimized code is replaced.
3704 CompileRun("%OptimizeFunctionOnNextCall(g);"
3707 // Finish garbage collection cycle.
3708 heap->CollectAllGarbage(Heap::kNoGCFlags);
3709 CHECK(shared1->code()->gc_metadata() == NULL);
3713 // Helper function that simulates a fill new-space in the heap.
3714 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
3716 int space_remaining = static_cast<int>(
3717 *space->allocation_limit_address() - *space->allocation_top_address());
3718 CHECK(space_remaining >= extra_bytes);
3719 int new_linear_size = space_remaining - extra_bytes;
3720 v8::internal::AllocationResult allocation =
3721 space->AllocateRaw(new_linear_size);
3722 v8::internal::FreeListNode* node =
3723 v8::internal::FreeListNode::cast(allocation.ToObjectChecked());
3724 node->set_size(space->heap(), new_linear_size);
3728 TEST(Regress169928) {
3729 i::FLAG_allow_natives_syntax = true;
3730 i::FLAG_crankshaft = false;
3731 CcTest::InitializeVM();
3732 Isolate* isolate = CcTest::i_isolate();
3733 Factory* factory = isolate->factory();
3734 v8::HandleScope scope(CcTest::isolate());
3736 // Some flags turn Scavenge collections into Mark-sweep collections
3737 // and hence are incompatible with this test case.
3738 if (FLAG_gc_global || FLAG_stress_compaction) return;
3740 // Prepare the environment
3741 CompileRun("function fastliteralcase(literal, value) {"
3742 " literal[0] = value;"
3745 "function get_standard_literal() {"
3746 " var literal = [1, 2, 3];"
3749 "obj = fastliteralcase(get_standard_literal(), 1);"
3750 "obj = fastliteralcase(get_standard_literal(), 1.5);"
3751 "obj = fastliteralcase(get_standard_literal(), 2);");
3754 v8::Local<v8::String> mote_code_string =
3755 v8_str("fastliteralcase(mote, 2.5);");
3757 v8::Local<v8::String> array_name = v8_str("mote");
3758 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
3760 // First make sure we flip spaces
3761 CcTest::heap()->CollectGarbage(NEW_SPACE);
3763 // Allocate the object.
3764 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3765 array_data->set(0, Smi::FromInt(1));
3766 array_data->set(1, Smi::FromInt(2));
3768 AllocateAllButNBytes(CcTest::heap()->new_space(),
3769 JSArray::kSize + AllocationMemento::kSize +
3772 Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3776 CHECK_EQ(Smi::FromInt(2), array->length());
3777 CHECK(array->HasFastSmiOrObjectElements());
3779 // We need filler the size of AllocationMemento object, plus an extra
3780 // fill pointer value.
3781 HeapObject* obj = NULL;
3782 AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw(
3783 AllocationMemento::kSize + kPointerSize);
3784 CHECK(allocation.To(&obj));
3785 Address addr_obj = obj->address();
3786 CcTest::heap()->CreateFillerObjectAt(
3787 addr_obj, AllocationMemento::kSize + kPointerSize);
3789 // Give the array a name, making sure not to allocate strings.
3790 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3791 CcTest::global()->Set(array_name, array_obj);
3793 // This should crash with a protection violation if we are running a build
3795 AlwaysAllocateScope aa_scope(isolate);
3796 v8::Script::Compile(mote_code_string)->Run();
3800 TEST(Regress168801) {
3801 if (i::FLAG_never_compact) return;
3802 i::FLAG_always_compact = true;
3803 i::FLAG_cache_optimized_code = false;
3804 i::FLAG_allow_natives_syntax = true;
3805 i::FLAG_flush_code_incrementally = true;
3806 CcTest::InitializeVM();
3807 Isolate* isolate = CcTest::i_isolate();
3808 Heap* heap = isolate->heap();
3809 HandleScope scope(isolate);
3811 // Perform one initial GC to enable code flushing.
3812 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3814 // Ensure the code ends up on an evacuation candidate.
3815 SimulateFullSpace(heap->code_space());
3817 // Prepare an unoptimized function that is eligible for code flushing.
3818 Handle<JSFunction> function;
3820 HandleScope inner_scope(isolate);
3821 CompileRun("function mkClosure() {"
3822 " return function(x) { return x + 1; };"
3824 "var f = mkClosure();"
3827 Handle<JSFunction> f =
3828 v8::Utils::OpenHandle(
3829 *v8::Handle<v8::Function>::Cast(
3830 CcTest::global()->Get(v8_str("f"))));
3831 CHECK(f->is_compiled());
3832 const int kAgingThreshold = 6;
3833 for (int i = 0; i < kAgingThreshold; i++) {
3834 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3837 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3840 // Simulate incremental marking so that unoptimized function is enqueued as a
3841 // candidate for code flushing. The shared function info however will not be
3842 // explicitly enqueued.
3843 SimulateIncrementalMarking(heap);
3845 // Now optimize the function so that it is taken off the candidate list.
3847 HandleScope inner_scope(isolate);
3848 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3851 // This cycle will bust the heap and subsequent cycles will go ballistic.
3852 heap->CollectAllGarbage(Heap::kNoGCFlags);
3853 heap->CollectAllGarbage(Heap::kNoGCFlags);
3857 TEST(Regress173458) {
3858 if (i::FLAG_never_compact) return;
3859 i::FLAG_always_compact = true;
3860 i::FLAG_cache_optimized_code = false;
3861 i::FLAG_allow_natives_syntax = true;
3862 i::FLAG_flush_code_incrementally = true;
3863 CcTest::InitializeVM();
3864 Isolate* isolate = CcTest::i_isolate();
3865 Heap* heap = isolate->heap();
3866 HandleScope scope(isolate);
3868 // Perform one initial GC to enable code flushing.
3869 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3871 // Ensure the code ends up on an evacuation candidate.
3872 SimulateFullSpace(heap->code_space());
3874 // Prepare an unoptimized function that is eligible for code flushing.
3875 Handle<JSFunction> function;
3877 HandleScope inner_scope(isolate);
3878 CompileRun("function mkClosure() {"
3879 " return function(x) { return x + 1; };"
3881 "var f = mkClosure();"
3884 Handle<JSFunction> f =
3885 v8::Utils::OpenHandle(
3886 *v8::Handle<v8::Function>::Cast(
3887 CcTest::global()->Get(v8_str("f"))));
3888 CHECK(f->is_compiled());
3889 const int kAgingThreshold = 6;
3890 for (int i = 0; i < kAgingThreshold; i++) {
3891 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3894 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3897 // Simulate incremental marking so that unoptimized function is enqueued as a
3898 // candidate for code flushing. The shared function info however will not be
3899 // explicitly enqueued.
3900 SimulateIncrementalMarking(heap);
3902 // Now enable the debugger which in turn will disable code flushing.
3903 CHECK(isolate->debug()->Load());
3905 // This cycle will bust the heap and subsequent cycles will go ballistic.
3906 heap->CollectAllGarbage(Heap::kNoGCFlags);
3907 heap->CollectAllGarbage(Heap::kNoGCFlags);
3911 class DummyVisitor : public ObjectVisitor {
3913 void VisitPointers(Object** start, Object** end) { }
3917 TEST(DeferredHandles) {
3918 CcTest::InitializeVM();
3919 Isolate* isolate = CcTest::i_isolate();
3920 Heap* heap = isolate->heap();
3921 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3922 HandleScopeData* data = isolate->handle_scope_data();
3923 Handle<Object> init(heap->empty_string(), isolate);
3924 while (data->next < data->limit) {
3925 Handle<Object> obj(heap->empty_string(), isolate);
3927 // An entire block of handles has been filled.
3928 // Next handle would require a new block.
3929 DCHECK(data->next == data->limit);
3931 DeferredHandleScope deferred(isolate);
3932 DummyVisitor visitor;
3933 isolate->handle_scope_implementer()->Iterate(&visitor);
3934 delete deferred.Detach();
3938 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3939 CcTest::InitializeVM();
3940 v8::HandleScope scope(CcTest::isolate());
3941 CompileRun("function f(n) {"
3942 " var a = new Array(n);"
3943 " for (var i = 0; i < n; i += 100) a[i] = i;"
3945 "f(10 * 1024 * 1024);");
3946 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3947 if (marking->IsStopped()) marking->Start();
3948 // This big step should be sufficient to mark the whole array.
3949 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
3950 DCHECK(marking->IsComplete());
3954 TEST(DisableInlineAllocation) {
3955 i::FLAG_allow_natives_syntax = true;
3956 CcTest::InitializeVM();
3957 v8::HandleScope scope(CcTest::isolate());
3958 CompileRun("function test() {"
3960 " for (var i = 0; i < 10; i++) {"
3961 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3965 " %OptimizeFunctionOnNextCall(test);"
3967 " %DeoptimizeFunction(test);"
3970 // Warm-up with inline allocation enabled.
3971 CompileRun("test(); test(); run();");
3973 // Run test with inline allocation disabled.
3974 CcTest::heap()->DisableInlineAllocation();
3975 CompileRun("run()");
3977 // Run test with inline allocation re-enabled.
3978 CcTest::heap()->EnableInlineAllocation();
3979 CompileRun("run()");
3983 static int AllocationSitesCount(Heap* heap) {
3985 for (Object* site = heap->allocation_sites_list();
3986 !(site->IsUndefined());
3987 site = AllocationSite::cast(site)->weak_next()) {
3994 TEST(EnsureAllocationSiteDependentCodesProcessed) {
3995 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
3996 i::FLAG_allow_natives_syntax = true;
3997 CcTest::InitializeVM();
3998 Isolate* isolate = CcTest::i_isolate();
3999 v8::internal::Heap* heap = CcTest::heap();
4000 GlobalHandles* global_handles = isolate->global_handles();
4002 if (!isolate->use_crankshaft()) return;
4004 // The allocation site at the head of the list is ours.
4005 Handle<AllocationSite> site;
4007 LocalContext context;
4008 v8::HandleScope scope(context->GetIsolate());
4010 int count = AllocationSitesCount(heap);
4011 CompileRun("var bar = function() { return (new Array()); };"
4016 // One allocation site should have been created.
4017 int new_count = AllocationSitesCount(heap);
4018 CHECK_EQ(new_count, (count + 1));
4019 site = Handle<AllocationSite>::cast(
4020 global_handles->Create(
4021 AllocationSite::cast(heap->allocation_sites_list())));
4023 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4025 DependentCode::GroupStartIndexes starts(site->dependent_code());
4026 CHECK_GE(starts.number_of_entries(), 1);
4027 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4028 CHECK(site->dependent_code()->is_code_at(index));
4029 Code* function_bar = site->dependent_code()->code_at(index);
4030 Handle<JSFunction> bar_handle =
4031 v8::Utils::OpenHandle(
4032 *v8::Handle<v8::Function>::Cast(
4033 CcTest::global()->Get(v8_str("bar"))));
4034 CHECK_EQ(bar_handle->code(), function_bar);
4037 // Now make sure that a gc should get rid of the function, even though we
4038 // still have the allocation site alive.
4039 for (int i = 0; i < 4; i++) {
4040 heap->CollectAllGarbage(Heap::kNoGCFlags);
4043 // The site still exists because of our global handle, but the code is no
4044 // longer referred to by dependent_code().
4045 DependentCode::GroupStartIndexes starts(site->dependent_code());
4046 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4047 CHECK(!(site->dependent_code()->is_code_at(index)));
4051 TEST(CellsInOptimizedCodeAreWeak) {
4052 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4053 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4054 i::FLAG_allow_natives_syntax = true;
4055 CcTest::InitializeVM();
4056 Isolate* isolate = CcTest::i_isolate();
4057 v8::internal::Heap* heap = CcTest::heap();
4059 if (!isolate->use_crankshaft()) return;
4060 HandleScope outer_scope(heap->isolate());
4063 LocalContext context;
4064 HandleScope scope(heap->isolate());
4066 CompileRun("bar = (function() {"
4070 " var foo = function(x) { with (x) { return 1 + x; } };"
4074 " %OptimizeFunctionOnNextCall(bar);"
4076 " return bar;})();");
4078 Handle<JSFunction> bar =
4079 v8::Utils::OpenHandle(
4080 *v8::Handle<v8::Function>::Cast(
4081 CcTest::global()->Get(v8_str("bar"))));
4082 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4085 // Now make sure that a gc should get rid of the function
4086 for (int i = 0; i < 4; i++) {
4087 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4090 DCHECK(code->marked_for_deoptimization());
4094 TEST(ObjectsInOptimizedCodeAreWeak) {
4095 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4096 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4097 i::FLAG_allow_natives_syntax = true;
4098 CcTest::InitializeVM();
4099 Isolate* isolate = CcTest::i_isolate();
4100 v8::internal::Heap* heap = CcTest::heap();
4102 if (!isolate->use_crankshaft()) return;
4103 HandleScope outer_scope(heap->isolate());
4106 LocalContext context;
4107 HandleScope scope(heap->isolate());
4109 CompileRun("function bar() {"
4112 "function foo(x) { with (x) { return 1 + x; } };"
4116 "%OptimizeFunctionOnNextCall(bar);"
4119 Handle<JSFunction> bar =
4120 v8::Utils::OpenHandle(
4121 *v8::Handle<v8::Function>::Cast(
4122 CcTest::global()->Get(v8_str("bar"))));
4123 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4126 // Now make sure that a gc should get rid of the function
4127 for (int i = 0; i < 4; i++) {
4128 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4131 DCHECK(code->marked_for_deoptimization());
4135 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4136 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4137 if (!i::FLAG_incremental_marking) return;
4138 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4139 i::FLAG_allow_natives_syntax = true;
4140 i::FLAG_compilation_cache = false;
4141 CcTest::InitializeVM();
4142 Isolate* isolate = CcTest::i_isolate();
4143 v8::internal::Heap* heap = CcTest::heap();
4145 if (!isolate->use_crankshaft()) return;
4146 HandleScope outer_scope(heap->isolate());
4147 for (int i = 0; i < 3; i++) {
4148 SimulateIncrementalMarking(heap);
4150 LocalContext context;
4151 HandleScope scope(heap->isolate());
4152 EmbeddedVector<char, 256> source;
4154 "function bar%d() {"
4157 "function foo%d(x) { with (x) { return 1 + x; } };"
4161 "%%OptimizeFunctionOnNextCall(bar%d);"
4162 "bar%d();", i, i, i, i, i, i, i, i);
4163 CompileRun(source.start());
4165 heap->CollectAllGarbage(i::Heap::kNoGCFlags);
4168 if (heap->weak_object_to_code_table()->IsHashTable()) {
4169 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4170 elements = t->NumberOfElements();
4172 CHECK_EQ(0, elements);
4176 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
4177 EmbeddedVector<char, 256> source;
4179 "function %s() { return 0; }"
4181 "%%OptimizeFunctionOnNextCall(%s);"
4182 "%s();", name, name, name, name, name);
4183 CompileRun(source.start());
4184 Handle<JSFunction> fun =
4185 v8::Utils::OpenHandle(
4186 *v8::Handle<v8::Function>::Cast(
4187 CcTest::global()->Get(v8_str(name))));
4192 static int GetCodeChainLength(Code* code) {
4194 while (code->next_code_link()->IsCode()) {
4196 code = Code::cast(code->next_code_link());
4202 TEST(NextCodeLinkIsWeak) {
4203 i::FLAG_allow_natives_syntax = true;
4204 i::FLAG_turbo_deoptimization = true;
4205 CcTest::InitializeVM();
4206 Isolate* isolate = CcTest::i_isolate();
4207 v8::internal::Heap* heap = CcTest::heap();
4209 if (!isolate->use_crankshaft()) return;
4210 HandleScope outer_scope(heap->isolate());
4212 heap->CollectAllAvailableGarbage();
4213 int code_chain_length_before, code_chain_length_after;
4215 HandleScope scope(heap->isolate());
4216 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
4217 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
4218 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4219 code_chain_length_before = GetCodeChainLength(immortal->code());
4220 // Keep the immortal code and let the mortal code die.
4221 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4222 CompileRun("mortal = null; immortal = null;");
4224 heap->CollectAllAvailableGarbage();
4225 // Now mortal code should be dead.
4226 code_chain_length_after = GetCodeChainLength(*code);
4227 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4231 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4232 i::byte buffer[i::Assembler::kMinimalBufferSize];
4233 MacroAssembler masm(isolate, buffer, sizeof(buffer));
4235 masm.Push(isolate->factory()->undefined_value());
4237 masm.GetCode(&desc);
4238 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4239 Handle<Code> code = isolate->factory()->NewCode(
4240 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4241 CHECK(code->IsCode());
4246 TEST(NextCodeLinkIsWeak2) {
4247 i::FLAG_allow_natives_syntax = true;
4248 CcTest::InitializeVM();
4249 Isolate* isolate = CcTest::i_isolate();
4250 v8::internal::Heap* heap = CcTest::heap();
4252 if (!isolate->use_crankshaft()) return;
4253 HandleScope outer_scope(heap->isolate());
4254 heap->CollectAllAvailableGarbage();
4255 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4256 Handle<Code> new_head;
4257 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4259 HandleScope scope(heap->isolate());
4260 Handle<Code> immortal = DummyOptimizedCode(isolate);
4261 Handle<Code> mortal = DummyOptimizedCode(isolate);
4262 mortal->set_next_code_link(*old_head);
4263 immortal->set_next_code_link(*mortal);
4264 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4265 new_head = scope.CloseAndEscape(immortal);
4267 heap->CollectAllAvailableGarbage();
4268 // Now mortal code should be dead.
4269 CHECK_EQ(*old_head, new_head->next_code_link());
4273 static bool weak_ic_cleared = false;
4275 static void ClearWeakIC(const v8::WeakCallbackData<v8::Object, void>& data) {
4276 printf("clear weak is called\n");
4277 weak_ic_cleared = true;
4278 v8::Persistent<v8::Value>* p =
4279 reinterpret_cast<v8::Persistent<v8::Value>*>(data.GetParameter());
4280 CHECK(p->IsNearDeath());
4285 // Checks that the value returned by execution of the source is weak.
4286 void CheckWeakness(const char* source) {
4287 i::FLAG_stress_compaction = false;
4288 CcTest::InitializeVM();
4289 v8::Isolate* isolate = CcTest::isolate();
4290 v8::HandleScope scope(isolate);
4291 v8::Persistent<v8::Object> garbage;
4293 v8::HandleScope scope(isolate);
4294 garbage.Reset(isolate, CompileRun(source)->ToObject());
4296 weak_ic_cleared = false;
4297 garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
4298 Heap* heap = CcTest::i_isolate()->heap();
4299 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4300 CHECK(weak_ic_cleared);
4304 // Each of the following "weak IC" tests creates an IC that embeds a map with
4305 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4306 TEST(WeakMapInMonomorphicLoadIC) {
4307 CheckWeakness("function loadIC(obj) {"
4311 " var proto = {'name' : 'weak'};"
4312 " var obj = Object.create(proto);"
4321 TEST(WeakMapInMonomorphicKeyedLoadIC) {
4322 CheckWeakness("function keyedLoadIC(obj, field) {"
4323 " return obj[field];"
4326 " var proto = {'name' : 'weak'};"
4327 " var obj = Object.create(proto);"
4328 " keyedLoadIC(obj, 'name');"
4329 " keyedLoadIC(obj, 'name');"
4330 " keyedLoadIC(obj, 'name');"
4336 TEST(WeakMapInMonomorphicStoreIC) {
4337 CheckWeakness("function storeIC(obj, value) {"
4338 " obj.name = value;"
4341 " var proto = {'name' : 'weak'};"
4342 " var obj = Object.create(proto);"
4343 " storeIC(obj, 'x');"
4344 " storeIC(obj, 'x');"
4345 " storeIC(obj, 'x');"
4351 TEST(WeakMapInMonomorphicKeyedStoreIC) {
4352 CheckWeakness("function keyedStoreIC(obj, field, value) {"
4353 " obj[field] = value;"
4356 " var proto = {'name' : 'weak'};"
4357 " var obj = Object.create(proto);"
4358 " keyedStoreIC(obj, 'x');"
4359 " keyedStoreIC(obj, 'x');"
4360 " keyedStoreIC(obj, 'x');"
4366 TEST(WeakMapInMonomorphicCompareNilIC) {
4367 CheckWeakness("function compareNilIC(obj) {"
4368 " return obj == null;"
4371 " var proto = {'name' : 'weak'};"
4372 " var obj = Object.create(proto);"
4373 " compareNilIC(obj);"
4374 " compareNilIC(obj);"
4375 " compareNilIC(obj);"
4382 CcTest::InitializeVM();
4383 Isolate* isolate = CcTest::i_isolate();
4384 v8::internal::Heap* heap = CcTest::heap();
4385 v8::internal::Factory* factory = isolate->factory();
4387 HandleScope outer_scope(isolate);
4388 Handle<WeakCell> weak_cell1;
4390 HandleScope inner_scope(isolate);
4391 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
4392 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
4395 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4396 Handle<WeakCell> weak_cell2;
4398 HandleScope inner_scope(isolate);
4399 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
4401 CHECK(weak_cell1->value()->IsFixedArray());
4402 CHECK_EQ(*survivor, weak_cell2->value());
4403 heap->CollectGarbage(NEW_SPACE);
4404 CHECK(weak_cell1->value()->IsFixedArray());
4405 CHECK_EQ(*survivor, weak_cell2->value());
4406 heap->CollectGarbage(NEW_SPACE);
4407 CHECK(weak_cell1->value()->IsFixedArray());
4408 CHECK_EQ(*survivor, weak_cell2->value());
4409 heap->CollectAllAvailableGarbage();
4410 CHECK(weak_cell1->cleared());
4411 CHECK_EQ(*survivor, weak_cell2->value());
4415 TEST(WeakCellsWithIncrementalMarking) {
4416 CcTest::InitializeVM();
4417 Isolate* isolate = CcTest::i_isolate();
4418 v8::internal::Heap* heap = CcTest::heap();
4419 v8::internal::Factory* factory = isolate->factory();
4422 HandleScope outer_scope(isolate);
4423 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4424 Handle<WeakCell> weak_cells[N];
4426 for (int i = 0; i < N; i++) {
4427 HandleScope inner_scope(isolate);
4428 Handle<HeapObject> value =
4429 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
4430 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
4431 CHECK(weak_cell->value()->IsFixedArray());
4432 IncrementalMarking* marking = heap->incremental_marking();
4433 if (marking->IsStopped()) marking->Start();
4434 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4435 heap->CollectGarbage(NEW_SPACE);
4436 CHECK(weak_cell->value()->IsFixedArray());
4437 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
4439 heap->CollectAllGarbage(Heap::kNoGCFlags);
4440 CHECK_EQ(*survivor, weak_cells[0]->value());
4441 for (int i = 1; i < N; i++) {
4442 CHECK(weak_cells[i]->cleared());
4448 TEST(AddInstructionChangesNewSpacePromotion) {
4449 i::FLAG_allow_natives_syntax = true;
4450 i::FLAG_expose_gc = true;
4451 i::FLAG_stress_compaction = true;
4452 i::FLAG_gc_interval = 1000;
4453 CcTest::InitializeVM();
4454 if (!i::FLAG_allocation_site_pretenuring) return;
4455 v8::HandleScope scope(CcTest::isolate());
4456 Isolate* isolate = CcTest::i_isolate();
4457 Heap* heap = isolate->heap();
4460 "function add(a, b) {"
4464 "add(\"a\", \"b\");"
4465 "var oldSpaceObject;"
4467 "function crash(x) {"
4468 " var object = {a: null, b: null};"
4469 " var result = add(1.5, x | 0);"
4470 " object.a = result;"
4471 " oldSpaceObject = object;"
4476 "%OptimizeFunctionOnNextCall(crash);"
4479 v8::Handle<v8::Object> global = CcTest::global();
4480 v8::Handle<v8::Function> g =
4481 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
4482 v8::Handle<v8::Value> args1[] = { v8_num(1) };
4483 heap->DisableInlineAllocation();
4484 heap->set_allocation_timeout(1);
4485 g->Call(global, 1, args1);
4486 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4490 void OnFatalErrorExpectOOM(const char* location, const char* message) {
4491 // Exit with 0 if the location matches our expectation.
4492 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4496 TEST(CEntryStubOOM) {
4497 i::FLAG_allow_natives_syntax = true;
4498 CcTest::InitializeVM();
4499 v8::HandleScope scope(CcTest::isolate());
4500 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
4502 v8::Handle<v8::Value> result = CompileRun(
4503 "%SetFlags('--gc-interval=1');"
4508 CHECK(result->IsNumber());
4514 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4517 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4518 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
4522 TEST(Regress357137) {
4523 CcTest::InitializeVM();
4524 v8::Isolate* isolate = CcTest::isolate();
4525 v8::HandleScope hscope(isolate);
4526 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4527 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
4528 v8::FunctionTemplate::New(isolate, RequestInterrupt));
4529 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
4530 DCHECK(!context.IsEmpty());
4531 v8::Context::Scope cscope(context);
4533 v8::Local<v8::Value> result = CompileRun(
4535 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4536 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4537 "interrupt();" // This triggers a fake stack overflow in f.
4539 CHECK_EQ(42.0, result->ToNumber()->Value());
4543 TEST(ArrayShiftSweeping) {
4544 i::FLAG_expose_gc = true;
4545 CcTest::InitializeVM();
4546 v8::HandleScope scope(CcTest::isolate());
4547 Isolate* isolate = CcTest::i_isolate();
4548 Heap* heap = isolate->heap();
4550 v8::Local<v8::Value> result = CompileRun(
4551 "var array = new Array(40000);"
4552 "var tmp = new Array(100000);"
4559 Handle<JSObject> o =
4560 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
4561 CHECK(heap->InOldPointerSpace(o->elements()));
4562 CHECK(heap->InOldPointerSpace(*o));
4563 Page* page = Page::FromAddress(o->elements()->address());
4564 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
4565 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
4569 UNINITIALIZED_TEST(PromotionQueue) {
4570 i::FLAG_expose_gc = true;
4571 i::FLAG_max_semi_space_size = 2;
4572 v8::Isolate* isolate = v8::Isolate::New();
4573 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4575 v8::Isolate::Scope isolate_scope(isolate);
4576 v8::HandleScope handle_scope(isolate);
4577 v8::Context::New(isolate)->Enter();
4578 Heap* heap = i_isolate->heap();
4579 NewSpace* new_space = heap->new_space();
4581 // In this test we will try to overwrite the promotion queue which is at the
4582 // end of to-space. To actually make that possible, we need at least two
4583 // semi-space pages and take advantage of fragmentation.
4584 // (1) Grow semi-space to two pages.
4585 // (2) Create a few small long living objects and call the scavenger to
4586 // move them to the other semi-space.
4587 // (3) Create a huge object, i.e., remainder of first semi-space page and
4588 // create another huge object which should be of maximum allocatable memory
4589 // size of the second semi-space page.
4590 // (4) Call the scavenger again.
4591 // What will happen is: the scavenger will promote the objects created in
4592 // (2) and will create promotion queue entries at the end of the second
4593 // semi-space page during the next scavenge when it promotes the objects to
4594 // the old generation. The first allocation of (3) will fill up the first
4595 // semi-space page. The second allocation in (3) will not fit into the
4596 // first semi-space page, but it will overwrite the promotion queue which
4597 // are in the second semi-space page. If the right guards are in place, the
4598 // promotion queue will be evacuated in that case.
4600 // Grow the semi-space to two pages to make semi-space copy overwrite the
4601 // promotion queue, which will be at the end of the second page.
4602 intptr_t old_capacity = new_space->TotalCapacity();
4604 // If we are in a low memory config, we can't grow to two pages and we can't
4605 // run this test. This also means the issue we are testing cannot arise, as
4606 // there is no fragmentation.
4607 if (new_space->IsAtMaximumCapacity()) return;
4610 CHECK(new_space->IsAtMaximumCapacity());
4611 CHECK(2 * old_capacity == new_space->TotalCapacity());
4613 // Call the scavenger two times to get an empty new space
4614 heap->CollectGarbage(NEW_SPACE);
4615 heap->CollectGarbage(NEW_SPACE);
4617 // First create a few objects which will survive a scavenge, and will get
4618 // promoted to the old generation later on. These objects will create
4619 // promotion queue entries at the end of the second semi-space page.
4620 const int number_handles = 12;
4621 Handle<FixedArray> handles[number_handles];
4622 for (int i = 0; i < number_handles; i++) {
4623 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
4625 heap->CollectGarbage(NEW_SPACE);
4627 // Create the first huge object which will exactly fit the first semi-space
4629 int new_linear_size =
4630 static_cast<int>(*heap->new_space()->allocation_limit_address() -
4631 *heap->new_space()->allocation_top_address());
4632 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
4633 Handle<FixedArray> first =
4634 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4635 CHECK(heap->InNewSpace(*first));
4637 // Create the second huge object of maximum allocatable second semi-space
4640 static_cast<int>(*heap->new_space()->allocation_limit_address() -
4641 *heap->new_space()->allocation_top_address());
4642 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
4643 FixedArray::kHeaderSize;
4644 Handle<FixedArray> second =
4645 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
4646 CHECK(heap->InNewSpace(*second));
4648 // This scavenge will corrupt memory if the promotion queue is not
4650 heap->CollectGarbage(NEW_SPACE);
4656 TEST(Regress388880) {
4657 i::FLAG_expose_gc = true;
4658 CcTest::InitializeVM();
4659 v8::HandleScope scope(CcTest::isolate());
4660 Isolate* isolate = CcTest::i_isolate();
4661 Factory* factory = isolate->factory();
4662 Heap* heap = isolate->heap();
4664 Handle<Map> map1 = Map::Create(isolate, 1);
4666 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
4667 HeapType::Any(isolate), NONE, Representation::Tagged(),
4668 OMIT_TRANSITION).ToHandleChecked();
4670 int desired_offset = Page::kPageSize - map1->instance_size();
4672 // Allocate fixed array in old pointer space so, that object allocated
4673 // afterwards would end at the end of the page.
4675 SimulateFullSpace(heap->old_pointer_space());
4676 int padding_size = desired_offset - Page::kObjectStartOffset;
4677 int padding_array_length =
4678 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
4680 Handle<FixedArray> temp2 =
4681 factory->NewFixedArray(padding_array_length, TENURED);
4682 Page* page = Page::FromAddress(temp2->address());
4683 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
4686 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED, false);
4687 o->set_properties(*factory->empty_fixed_array());
4689 // Ensure that the object allocated where we need it.
4690 Page* page = Page::FromAddress(o->address());
4691 CHECK_EQ(desired_offset, page->Offset(o->address()));
4693 // Now we have an object right at the end of the page.
4695 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
4696 // that would cause crash.
4697 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4700 CHECK(marking->IsMarking());
4702 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
4703 // when it calls heap->AdjustLiveBytes(...).
4704 JSObject::MigrateToMap(o, map2);
4709 i::FLAG_expose_gc = true;
4710 CcTest::InitializeVM();
4711 v8::HandleScope scope(CcTest::isolate());
4712 Isolate* isolate = CcTest::i_isolate();
4713 Heap* heap = isolate->heap();
4714 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4715 v8::Local<v8::Value> result = CompileRun(
4716 "var weak_map = new WeakMap();"
4717 "var future_keys = [];"
4718 "for (var i = 0; i < 50; i++) {"
4719 " var key = {'k' : i + 0.1};"
4720 " weak_map.set(key, 1);"
4721 " future_keys.push({'x' : i + 0.2});"
4724 if (marking->IsStopped()) {
4727 // Incrementally mark the backing store.
4728 Handle<JSObject> obj =
4729 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
4730 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
4731 while (!Marking::IsBlack(
4732 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
4733 !marking->IsStopped()) {
4734 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4736 // Stash the backing store in a handle.
4737 Handle<Object> save(weak_map->table(), isolate);
4738 // The following line will update the backing store.
4740 "for (var i = 0; i < 50; i++) {"
4741 " weak_map.set(future_keys[i], i);"
4743 heap->incremental_marking()->set_should_hurry(true);
4744 heap->CollectGarbage(OLD_POINTER_SPACE);
4750 CcTest::InitializeVM();
4751 v8::HandleScope scope(CcTest::isolate());
4753 v8::Local<v8::Value> result = CompileRun("'abc'");
4754 Handle<Object> o = v8::Utils::OpenHandle(*result);
4755 CcTest::i_isolate()->heap()->TracePathToObject(*o);