1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "compilation-cache.h"
33 #include "execution.h"
35 #include "macro-assembler.h"
36 #include "global-handles.h"
37 #include "stub-cache.h"
40 using namespace v8::internal;
43 // Go through all incremental marking steps in one swoop.
44 static void SimulateIncrementalMarking() {
45 MarkCompactCollector* collector = HEAP->mark_compact_collector();
46 IncrementalMarking* marking = HEAP->incremental_marking();
47 if (collector->IsConcurrentSweepingInProgress()) {
48 collector->WaitUntilSweepingCompleted();
50 CHECK(marking->IsMarking() || marking->IsStopped());
51 if (marking->IsStopped()) {
54 CHECK(marking->IsMarking());
55 while (!marking->IsComplete()) {
56 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
58 CHECK(marking->IsComplete());
62 static void CheckMap(Map* map, int type, int instance_size) {
63 CHECK(map->IsHeapObject());
65 CHECK(HEAP->Contains(map));
67 CHECK_EQ(HEAP->meta_map(), map->map());
68 CHECK_EQ(type, map->instance_type());
69 CHECK_EQ(instance_size, map->instance_size());
74 CcTest::InitializeVM();
75 CheckMap(HEAP->meta_map(), MAP_TYPE, Map::kSize);
76 CheckMap(HEAP->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
77 CheckMap(HEAP->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
78 CheckMap(HEAP->string_map(), STRING_TYPE, kVariableSizeSentinel);
82 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
83 CHECK(obj->IsOddball());
85 Object* print_string =
86 *Execution::ToString(Handle<Object>(obj, isolate), &exc);
87 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
91 static void CheckSmi(Isolate* isolate, int value, const char* string) {
93 Object* print_string =
94 *Execution::ToString(Handle<Object>(Smi::FromInt(value), isolate), &exc);
95 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
99 static void CheckNumber(Isolate* isolate, double value, const char* string) {
100 Object* obj = HEAP->NumberFromDouble(value)->ToObjectChecked();
101 CHECK(obj->IsNumber());
103 Object* print_string =
104 *Execution::ToString(Handle<Object>(obj, isolate), &exc);
105 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
109 static void CheckFindCodeObject(Isolate* isolate) {
110 // Test FindCodeObject
113 Assembler assm(isolate, NULL, 0);
115 __ nop(); // supported on all architectures
119 Heap* heap = isolate->heap();
120 Object* code = heap->CreateCode(
122 Code::ComputeFlags(Code::STUB),
123 Handle<Code>())->ToObjectChecked();
124 CHECK(code->IsCode());
126 HeapObject* obj = HeapObject::cast(code);
127 Address obj_addr = obj->address();
129 for (int i = 0; i < obj->Size(); i += kPointerSize) {
130 Object* found = isolate->FindCodeObject(obj_addr + i);
131 CHECK_EQ(code, found);
134 Object* copy = heap->CreateCode(
136 Code::ComputeFlags(Code::STUB),
137 Handle<Code>())->ToObjectChecked();
138 CHECK(copy->IsCode());
139 HeapObject* obj_copy = HeapObject::cast(copy);
140 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
141 obj_copy->Size() / 2);
142 CHECK(not_right != code);
147 CcTest::InitializeVM();
148 Isolate* isolate = Isolate::Current();
149 Factory* factory = isolate->factory();
150 Heap* heap = isolate->heap();
152 HandleScope sc(isolate);
153 Object* value = heap->NumberFromDouble(1.000123)->ToObjectChecked();
154 CHECK(value->IsHeapNumber());
155 CHECK(value->IsNumber());
156 CHECK_EQ(1.000123, value->Number());
158 value = heap->NumberFromDouble(1.0)->ToObjectChecked();
159 CHECK(value->IsSmi());
160 CHECK(value->IsNumber());
161 CHECK_EQ(1.0, value->Number());
163 value = heap->NumberFromInt32(1024)->ToObjectChecked();
164 CHECK(value->IsSmi());
165 CHECK(value->IsNumber());
166 CHECK_EQ(1024.0, value->Number());
168 value = heap->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
169 CHECK(value->IsSmi());
170 CHECK(value->IsNumber());
171 CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
173 value = heap->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
174 CHECK(value->IsSmi());
175 CHECK(value->IsNumber());
176 CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
178 #ifndef V8_TARGET_ARCH_X64
179 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
180 value = heap->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
181 CHECK(value->IsHeapNumber());
182 CHECK(value->IsNumber());
183 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
186 MaybeObject* maybe_value =
187 heap->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
188 value = maybe_value->ToObjectChecked();
189 CHECK(value->IsHeapNumber());
190 CHECK(value->IsNumber());
191 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
194 maybe_value = heap->NumberFromUint32(static_cast<uint32_t>(1) << 31);
195 value = maybe_value->ToObjectChecked();
196 CHECK(value->IsHeapNumber());
197 CHECK(value->IsNumber());
198 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
201 // nan oddball checks
202 CHECK(heap->nan_value()->IsNumber());
203 CHECK(std::isnan(heap->nan_value()->Number()));
205 Handle<String> s = factory->NewStringFromAscii(CStrVector("fisk hest "));
206 CHECK(s->IsString());
207 CHECK_EQ(10, s->length());
209 String* object_string = String::cast(heap->Object_string());
211 Isolate::Current()->context()->global_object()->HasLocalProperty(
214 // Check ToString for oddballs
215 CheckOddball(isolate, heap->true_value(), "true");
216 CheckOddball(isolate, heap->false_value(), "false");
217 CheckOddball(isolate, heap->null_value(), "null");
218 CheckOddball(isolate, heap->undefined_value(), "undefined");
220 // Check ToString for Smis
221 CheckSmi(isolate, 0, "0");
222 CheckSmi(isolate, 42, "42");
223 CheckSmi(isolate, -42, "-42");
225 // Check ToString for Numbers
226 CheckNumber(isolate, 1.1, "1.1");
228 CheckFindCodeObject(isolate);
233 CcTest::InitializeVM();
235 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
236 CHECK(Smi::FromInt(42)->IsSmi());
237 CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
239 Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
240 CHECK_EQ(OLD_POINTER_SPACE,
241 Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
242 CHECK(Failure::Exception()->IsFailure());
243 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
244 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
248 TEST(GarbageCollection) {
249 CcTest::InitializeVM();
250 Isolate* isolate = Isolate::Current();
251 Heap* heap = isolate->heap();
252 Factory* factory = isolate->factory();
254 HandleScope sc(isolate);
256 heap->CollectGarbage(NEW_SPACE);
258 Handle<String> name = factory->InternalizeUtf8String("theFunction");
259 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
260 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
261 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
264 HandleScope inner_scope(isolate);
265 // Allocate a function and keep it in global object's property.
266 Handle<JSFunction> function =
267 factory->NewFunction(name, factory->undefined_value());
268 Handle<Map> initial_map =
269 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
270 function->set_initial_map(*initial_map);
271 Isolate::Current()->context()->global_object()->SetProperty(
272 *name, *function, NONE, kNonStrictMode)->ToObjectChecked();
273 // Allocate an object. Unrooted after leaving the scope.
274 Handle<JSObject> obj = factory->NewJSObject(function);
276 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
278 *prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
280 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
281 CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
284 heap->CollectGarbage(NEW_SPACE);
286 // Function should be alive.
287 CHECK(Isolate::Current()->context()->global_object()->
288 HasLocalProperty(*name));
289 // Check function is retained.
290 Object* func_value = Isolate::Current()->context()->global_object()->
291 GetProperty(*name)->ToObjectChecked();
292 CHECK(func_value->IsJSFunction());
293 Handle<JSFunction> function(JSFunction::cast(func_value));
296 HandleScope inner_scope(isolate);
297 // Allocate another object, make it reachable from global.
298 Handle<JSObject> obj = factory->NewJSObject(function);
299 Isolate::Current()->context()->global_object()->SetProperty(
300 *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
302 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
305 // After gc, it should survive.
306 heap->CollectGarbage(NEW_SPACE);
308 CHECK(Isolate::Current()->context()->global_object()->
309 HasLocalProperty(*obj_name));
310 CHECK(Isolate::Current()->context()->global_object()->
311 GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
312 Object* obj = Isolate::Current()->context()->global_object()->
313 GetProperty(*obj_name)->ToObjectChecked();
314 JSObject* js_obj = JSObject::cast(obj);
315 CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
319 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
320 HandleScope scope(isolate);
321 Handle<String> s = isolate->factory()->NewStringFromUtf8(CStrVector(string));
322 CHECK_EQ(StrLength(string), s->length());
323 for (int index = 0; index < s->length(); index++) {
324 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
330 CcTest::InitializeVM();
331 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
333 VerifyStringAllocation(isolate, "a");
334 VerifyStringAllocation(isolate, "ab");
335 VerifyStringAllocation(isolate, "abc");
336 VerifyStringAllocation(isolate, "abcd");
337 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
342 CcTest::InitializeVM();
343 Isolate* isolate = Isolate::Current();
344 Factory* factory = isolate->factory();
346 v8::HandleScope scope(CcTest::isolate());
347 const char* name = "Kasper the spunky";
348 Handle<String> string = factory->NewStringFromAscii(CStrVector(name));
349 CHECK_EQ(StrLength(name), string->length());
353 TEST(GlobalHandles) {
354 CcTest::InitializeVM();
355 Isolate* isolate = Isolate::Current();
356 Heap* heap = isolate->heap();
357 Factory* factory = isolate->factory();
358 GlobalHandles* global_handles = isolate->global_handles();
366 HandleScope scope(isolate);
368 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
369 Handle<Object> u = factory->NewNumber(1.12344);
371 h1 = global_handles->Create(*i);
372 h2 = global_handles->Create(*u);
373 h3 = global_handles->Create(*i);
374 h4 = global_handles->Create(*u);
377 // after gc, it should survive
378 heap->CollectGarbage(NEW_SPACE);
380 CHECK((*h1)->IsString());
381 CHECK((*h2)->IsHeapNumber());
382 CHECK((*h3)->IsString());
383 CHECK((*h4)->IsHeapNumber());
386 global_handles->Destroy(h1.location());
387 global_handles->Destroy(h3.location());
390 global_handles->Destroy(h2.location());
391 global_handles->Destroy(h4.location());
395 static bool WeakPointerCleared = false;
397 static void TestWeakGlobalHandleCallback(v8::Isolate* isolate,
398 v8::Persistent<v8::Value>* handle,
400 if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
401 handle->Dispose(isolate);
405 TEST(WeakGlobalHandlesScavenge) {
406 i::FLAG_stress_compaction = false;
407 CcTest::InitializeVM();
408 Isolate* isolate = Isolate::Current();
409 Heap* heap = isolate->heap();
410 Factory* factory = isolate->factory();
411 GlobalHandles* global_handles = isolate->global_handles();
413 WeakPointerCleared = false;
419 HandleScope scope(isolate);
421 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
422 Handle<Object> u = factory->NewNumber(1.12344);
424 h1 = global_handles->Create(*i);
425 h2 = global_handles->Create(*u);
428 global_handles->MakeWeak(h2.location(),
429 reinterpret_cast<void*>(1234),
430 &TestWeakGlobalHandleCallback);
432 // Scavenge treats weak pointers as normal roots.
433 heap->PerformScavenge();
435 CHECK((*h1)->IsString());
436 CHECK((*h2)->IsHeapNumber());
438 CHECK(!WeakPointerCleared);
439 CHECK(!global_handles->IsNearDeath(h2.location()));
440 CHECK(!global_handles->IsNearDeath(h1.location()));
442 global_handles->Destroy(h1.location());
443 global_handles->Destroy(h2.location());
447 TEST(WeakGlobalHandlesMark) {
448 CcTest::InitializeVM();
449 Isolate* isolate = Isolate::Current();
450 Heap* heap = isolate->heap();
451 Factory* factory = isolate->factory();
452 GlobalHandles* global_handles = isolate->global_handles();
454 WeakPointerCleared = false;
460 HandleScope scope(isolate);
462 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
463 Handle<Object> u = factory->NewNumber(1.12344);
465 h1 = global_handles->Create(*i);
466 h2 = global_handles->Create(*u);
469 // Make sure the objects are promoted.
470 heap->CollectGarbage(OLD_POINTER_SPACE);
471 heap->CollectGarbage(NEW_SPACE);
472 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
474 global_handles->MakeWeak(h2.location(),
475 reinterpret_cast<void*>(1234),
476 &TestWeakGlobalHandleCallback);
477 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
478 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
480 // Incremental marking potentially marked handles before they turned weak.
481 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
483 CHECK((*h1)->IsString());
485 CHECK(WeakPointerCleared);
486 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
488 global_handles->Destroy(h1.location());
492 TEST(DeleteWeakGlobalHandle) {
493 i::FLAG_stress_compaction = false;
494 CcTest::InitializeVM();
495 Isolate* isolate = Isolate::Current();
496 Heap* heap = isolate->heap();
497 Factory* factory = isolate->factory();
498 GlobalHandles* global_handles = isolate->global_handles();
500 WeakPointerCleared = false;
505 HandleScope scope(isolate);
507 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
508 h = global_handles->Create(*i);
511 global_handles->MakeWeak(h.location(),
512 reinterpret_cast<void*>(1234),
513 &TestWeakGlobalHandleCallback);
515 // Scanvenge does not recognize weak reference.
516 heap->PerformScavenge();
518 CHECK(!WeakPointerCleared);
520 // Mark-compact treats weak reference properly.
521 heap->CollectGarbage(OLD_POINTER_SPACE);
523 CHECK(WeakPointerCleared);
527 static const char* not_so_random_string_table[] = {
591 static void CheckInternalizedStrings(const char** strings) {
592 for (const char* string = *strings; *strings != 0; string = *strings++) {
594 MaybeObject* maybe_a = HEAP->InternalizeUtf8String(string);
595 // InternalizeUtf8String may return a failure if a GC is needed.
596 if (!maybe_a->ToObject(&a)) continue;
597 CHECK(a->IsInternalizedString());
599 MaybeObject* maybe_b = HEAP->InternalizeUtf8String(string);
600 if (!maybe_b->ToObject(&b)) continue;
602 CHECK(String::cast(b)->IsUtf8EqualTo(CStrVector(string)));
608 CcTest::InitializeVM();
610 CheckInternalizedStrings(not_so_random_string_table);
611 CheckInternalizedStrings(not_so_random_string_table);
615 TEST(FunctionAllocation) {
616 CcTest::InitializeVM();
617 Isolate* isolate = Isolate::Current();
618 Factory* factory = isolate->factory();
620 v8::HandleScope sc(CcTest::isolate());
621 Handle<String> name = factory->InternalizeUtf8String("theFunction");
622 Handle<JSFunction> function =
623 factory->NewFunction(name, factory->undefined_value());
624 Handle<Map> initial_map =
625 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
626 function->set_initial_map(*initial_map);
628 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
629 Handle<JSObject> obj = factory->NewJSObject(function);
631 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
632 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
633 // Check that we can add properties to function objects.
634 function->SetProperty(
635 *prop_name, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
636 CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
640 TEST(ObjectProperties) {
641 CcTest::InitializeVM();
642 Isolate* isolate = Isolate::Current();
643 Factory* factory = isolate->factory();
645 v8::HandleScope sc(CcTest::isolate());
646 String* object_string = String::cast(HEAP->Object_string());
647 Object* raw_object = Isolate::Current()->context()->global_object()->
648 GetProperty(object_string)->ToObjectChecked();
649 JSFunction* object_function = JSFunction::cast(raw_object);
650 Handle<JSFunction> constructor(object_function);
651 Handle<JSObject> obj = factory->NewJSObject(constructor);
652 Handle<String> first = factory->InternalizeUtf8String("first");
653 Handle<String> second = factory->InternalizeUtf8String("second");
656 CHECK(!obj->HasLocalProperty(*first));
660 *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
661 CHECK(obj->HasLocalProperty(*first));
664 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
665 CHECK(!obj->HasLocalProperty(*first));
667 // add first and then second
669 *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
671 *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
672 CHECK(obj->HasLocalProperty(*first));
673 CHECK(obj->HasLocalProperty(*second));
675 // delete first and then second
676 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
677 CHECK(obj->HasLocalProperty(*second));
678 JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
679 CHECK(!obj->HasLocalProperty(*first));
680 CHECK(!obj->HasLocalProperty(*second));
682 // add first and then second
684 *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
686 *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
687 CHECK(obj->HasLocalProperty(*first));
688 CHECK(obj->HasLocalProperty(*second));
690 // delete second and then first
691 JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
692 CHECK(obj->HasLocalProperty(*first));
693 JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
694 CHECK(!obj->HasLocalProperty(*first));
695 CHECK(!obj->HasLocalProperty(*second));
697 // check string and internalized string match
698 const char* string1 = "fisk";
699 Handle<String> s1 = factory->NewStringFromAscii(CStrVector(string1));
701 *s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
702 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
703 CHECK(obj->HasLocalProperty(*s1_string));
705 // check internalized string and string match
706 const char* string2 = "fugl";
707 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
709 *s2_string, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
710 Handle<String> s2 = factory->NewStringFromAscii(CStrVector(string2));
711 CHECK(obj->HasLocalProperty(*s2));
716 CcTest::InitializeVM();
717 Isolate* isolate = Isolate::Current();
718 Factory* factory = isolate->factory();
720 v8::HandleScope sc(CcTest::isolate());
721 Handle<String> name = factory->InternalizeUtf8String("theFunction");
722 Handle<JSFunction> function =
723 factory->NewFunction(name, factory->undefined_value());
724 Handle<Map> initial_map =
725 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
726 function->set_initial_map(*initial_map);
728 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
729 Handle<JSObject> obj = factory->NewJSObject(function);
733 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
734 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
736 // Check the map has changed
737 CHECK(*initial_map != obj->map());
742 CcTest::InitializeVM();
743 Isolate* isolate = Isolate::Current();
744 Factory* factory = isolate->factory();
746 v8::HandleScope sc(CcTest::isolate());
747 Handle<String> name = factory->InternalizeUtf8String("Array");
748 Object* raw_object = Isolate::Current()->context()->global_object()->
749 GetProperty(*name)->ToObjectChecked();
750 Handle<JSFunction> function = Handle<JSFunction>(
751 JSFunction::cast(raw_object));
753 // Allocate the object.
754 Handle<JSObject> object = factory->NewJSObject(function);
755 Handle<JSArray> array = Handle<JSArray>::cast(object);
756 // We just initialized the VM, no heap allocation failure yet.
757 array->Initialize(0)->ToObjectChecked();
759 // Set array length to 0.
760 array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
761 CHECK_EQ(Smi::FromInt(0), array->length());
762 // Must be in fast mode.
763 CHECK(array->HasFastSmiOrObjectElements());
765 // array[length] = name.
766 array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
767 CHECK_EQ(Smi::FromInt(1), array->length());
768 CHECK_EQ(array->GetElement(0), *name);
770 // Set array length with larger than smi value.
771 Handle<Object> length =
772 factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
773 array->SetElementsLength(*length)->ToObjectChecked();
775 uint32_t int_length = 0;
776 CHECK(length->ToArrayIndex(&int_length));
777 CHECK_EQ(*length, array->length());
778 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
780 // array[length] = name.
781 array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
782 uint32_t new_int_length = 0;
783 CHECK(array->length()->ToArrayIndex(&new_int_length));
784 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
785 CHECK_EQ(array->GetElement(int_length), *name);
786 CHECK_EQ(array->GetElement(0), *name);
791 CcTest::InitializeVM();
792 Isolate* isolate = Isolate::Current();
793 Factory* factory = isolate->factory();
795 v8::HandleScope sc(CcTest::isolate());
796 String* object_string = String::cast(HEAP->Object_string());
797 Object* raw_object = Isolate::Current()->context()->global_object()->
798 GetProperty(object_string)->ToObjectChecked();
799 JSFunction* object_function = JSFunction::cast(raw_object);
800 Handle<JSFunction> constructor(object_function);
801 Handle<JSObject> obj = factory->NewJSObject(constructor);
802 Handle<String> first = factory->InternalizeUtf8String("first");
803 Handle<String> second = factory->InternalizeUtf8String("second");
806 *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
808 *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
810 obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
811 obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
814 Handle<JSObject> clone = Copy(obj);
815 CHECK(!clone.is_identical_to(obj));
817 CHECK_EQ(obj->GetElement(0), clone->GetElement(0));
818 CHECK_EQ(obj->GetElement(1), clone->GetElement(1));
820 CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first));
821 CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
825 *first, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
827 *second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
829 clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
830 clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
832 CHECK_EQ(obj->GetElement(1), clone->GetElement(0));
833 CHECK_EQ(obj->GetElement(0), clone->GetElement(1));
835 CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first));
836 CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second));
840 TEST(StringAllocation) {
841 CcTest::InitializeVM();
842 Isolate* isolate = Isolate::Current();
843 Factory* factory = isolate->factory();
845 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
846 for (int length = 0; length < 100; length++) {
847 v8::HandleScope scope(CcTest::isolate());
848 char* non_ascii = NewArray<char>(3 * length + 1);
849 char* ascii = NewArray<char>(length + 1);
850 non_ascii[3 * length] = 0;
852 for (int i = 0; i < length; i++) {
854 non_ascii[3 * i] = chars[0];
855 non_ascii[3 * i + 1] = chars[1];
856 non_ascii[3 * i + 2] = chars[2];
858 Handle<String> non_ascii_sym =
859 factory->InternalizeUtf8String(
860 Vector<const char>(non_ascii, 3 * length));
861 CHECK_EQ(length, non_ascii_sym->length());
862 Handle<String> ascii_sym =
863 factory->InternalizeOneByteString(OneByteVector(ascii, length));
864 CHECK_EQ(length, ascii_sym->length());
865 Handle<String> non_ascii_str =
866 factory->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
867 non_ascii_str->Hash();
868 CHECK_EQ(length, non_ascii_str->length());
869 Handle<String> ascii_str =
870 factory->NewStringFromUtf8(Vector<const char>(ascii, length));
872 CHECK_EQ(length, ascii_str->length());
873 DeleteArray(non_ascii);
879 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
880 // Count the number of objects found in the heap.
882 heap->EnsureHeapIsIterable();
883 HeapIterator iterator(heap);
884 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
885 for (int i = 0; i < size; i++) {
886 if (*objs[i] == obj) {
896 CcTest::InitializeVM();
897 Isolate* isolate = Isolate::Current();
898 Factory* factory = isolate->factory();
899 v8::HandleScope scope(CcTest::isolate());
901 // Array of objects to scan haep for.
902 const int objs_count = 6;
903 Handle<Object> objs[objs_count];
904 int next_objs_index = 0;
906 // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
907 objs[next_objs_index++] = factory->NewJSArray(10);
908 objs[next_objs_index++] = factory->NewJSArray(10,
912 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
913 objs[next_objs_index++] =
914 factory->NewStringFromAscii(CStrVector("abcdefghij"));
915 objs[next_objs_index++] =
916 factory->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
918 // Allocate a large string (for large object space).
919 int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
920 char* str = new char[large_size];
921 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
922 str[large_size - 1] = '\0';
923 objs[next_objs_index++] =
924 factory->NewStringFromAscii(CStrVector(str), TENURED);
927 // Add a Map object to look for.
928 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
930 CHECK_EQ(objs_count, next_objs_index);
931 CHECK_EQ(objs_count, ObjectsFoundInHeap(HEAP, objs, objs_count));
935 TEST(EmptyHandleEscapeFrom) {
936 CcTest::InitializeVM();
938 v8::HandleScope scope(CcTest::isolate());
939 Handle<JSObject> runaway;
942 v8::HandleScope nested(CcTest::isolate());
943 Handle<JSObject> empty;
944 runaway = empty.EscapeFrom(&nested);
947 CHECK(runaway.is_null());
951 static int LenFromSize(int size) {
952 return (size - FixedArray::kHeaderSize) / kPointerSize;
956 TEST(Regression39128) {
957 // Test case for crbug.com/39128.
958 CcTest::InitializeVM();
959 Isolate* isolate = Isolate::Current();
960 Factory* factory = isolate->factory();
962 // Increase the chance of 'bump-the-pointer' allocation in old space.
963 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
965 v8::HandleScope scope(CcTest::isolate());
967 // The plan: create JSObject which references objects in new space.
968 // Then clone this object (forcing it to go into old space) and check
969 // that region dirty marks are updated correctly.
971 // Step 1: prepare a map for the object. We add 1 inobject property to it.
972 Handle<JSFunction> object_ctor(
973 Isolate::Current()->native_context()->object_function());
974 CHECK(object_ctor->has_initial_map());
975 Handle<Map> object_map(object_ctor->initial_map());
976 // Create a map with single inobject property.
977 Handle<Map> my_map = factory->CopyMap(object_map, 1);
978 int n_properties = my_map->inobject_properties();
979 CHECK_GT(n_properties, 0);
981 int object_size = my_map->instance_size();
983 // Step 2: allocate a lot of objects so to almost fill new space: we need
984 // just enough room to allocate JSObject and thus fill the newspace.
986 int allocation_amount = Min(FixedArray::kMaxSize,
987 Page::kMaxNonCodeHeapObjectSize + kPointerSize);
988 int allocation_len = LenFromSize(allocation_amount);
989 NewSpace* new_space = HEAP->new_space();
990 Address* top_addr = new_space->allocation_top_address();
991 Address* limit_addr = new_space->allocation_limit_address();
992 while ((*limit_addr - *top_addr) > allocation_amount) {
993 CHECK(!HEAP->always_allocate());
994 Object* array = HEAP->AllocateFixedArray(allocation_len)->ToObjectChecked();
995 CHECK(!array->IsFailure());
996 CHECK(new_space->Contains(array));
999 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1000 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1001 int fixed_array_len = LenFromSize(to_fill);
1002 CHECK(fixed_array_len < FixedArray::kMaxLength);
1004 CHECK(!HEAP->always_allocate());
1005 Object* array = HEAP->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
1006 CHECK(!array->IsFailure());
1007 CHECK(new_space->Contains(array));
1009 Object* object = HEAP->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
1010 CHECK(new_space->Contains(object));
1011 JSObject* jsobject = JSObject::cast(object);
1012 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1013 CHECK_EQ(0, jsobject->properties()->length());
1014 // Create a reference to object in new space in jsobject.
1015 jsobject->FastPropertyAtPut(-1, array);
1017 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1019 // Step 4: clone jsobject, but force always allocate first to create a clone
1020 // in old pointer space.
1021 Address old_pointer_space_top = HEAP->old_pointer_space()->top();
1022 AlwaysAllocateScope aa_scope;
1023 Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked();
1024 JSObject* clone = JSObject::cast(clone_obj);
1025 if (clone->address() != old_pointer_space_top) {
1026 // Alas, got allocated from free list, we cannot do checks.
1029 CHECK(HEAP->old_pointer_space()->Contains(clone->address()));
1033 TEST(TestCodeFlushing) {
1034 // If we do not flush code this test is invalid.
1035 if (!FLAG_flush_code) return;
1036 i::FLAG_allow_natives_syntax = true;
1037 CcTest::InitializeVM();
1038 Isolate* isolate = Isolate::Current();
1039 Factory* factory = isolate->factory();
1040 v8::HandleScope scope(CcTest::isolate());
1041 const char* source = "function foo() {"
1047 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1049 // This compile will add the code to the compilation cache.
1050 { v8::HandleScope scope(CcTest::isolate());
1054 // Check function is compiled.
1055 Object* func_value = Isolate::Current()->context()->global_object()->
1056 GetProperty(*foo_name)->ToObjectChecked();
1057 CHECK(func_value->IsJSFunction());
1058 Handle<JSFunction> function(JSFunction::cast(func_value));
1059 CHECK(function->shared()->is_compiled());
1061 // The code will survive at least two GCs.
1062 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1063 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1064 CHECK(function->shared()->is_compiled());
1066 // Simulate several GCs that use full marking.
1067 const int kAgingThreshold = 6;
1068 for (int i = 0; i < kAgingThreshold; i++) {
1069 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1072 // foo should no longer be in the compilation cache
1073 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1074 CHECK(!function->is_compiled() || function->IsOptimized());
1075 // Call foo to get it recompiled.
1076 CompileRun("foo()");
1077 CHECK(function->shared()->is_compiled());
1078 CHECK(function->is_compiled());
1082 TEST(TestCodeFlushingIncremental) {
1083 // If we do not flush code this test is invalid.
1084 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1085 i::FLAG_allow_natives_syntax = true;
1086 CcTest::InitializeVM();
1087 Isolate* isolate = Isolate::Current();
1088 Factory* factory = isolate->factory();
1089 v8::HandleScope scope(CcTest::isolate());
1090 const char* source = "function foo() {"
1096 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1098 // This compile will add the code to the compilation cache.
1099 { v8::HandleScope scope(CcTest::isolate());
1103 // Check function is compiled.
1104 Object* func_value = Isolate::Current()->context()->global_object()->
1105 GetProperty(*foo_name)->ToObjectChecked();
1106 CHECK(func_value->IsJSFunction());
1107 Handle<JSFunction> function(JSFunction::cast(func_value));
1108 CHECK(function->shared()->is_compiled());
1110 // The code will survive at least two GCs.
1111 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1112 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1113 CHECK(function->shared()->is_compiled());
1115 // Simulate several GCs that use incremental marking.
1116 const int kAgingThreshold = 6;
1117 for (int i = 0; i < kAgingThreshold; i++) {
1118 SimulateIncrementalMarking();
1119 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1121 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1122 CHECK(!function->is_compiled() || function->IsOptimized());
1124 // This compile will compile the function again.
1125 { v8::HandleScope scope(CcTest::isolate());
1126 CompileRun("foo();");
1129 // Simulate several GCs that use incremental marking but make sure
1130 // the loop breaks once the function is enqueued as a candidate.
1131 for (int i = 0; i < kAgingThreshold; i++) {
1132 SimulateIncrementalMarking();
1133 if (!function->next_function_link()->IsUndefined()) break;
1134 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1137 // Force optimization while incremental marking is active and while
1138 // the function is enqueued as a candidate.
1139 { v8::HandleScope scope(CcTest::isolate());
1140 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1143 // Simulate one final GC to make sure the candidate queue is sane.
1144 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1145 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1146 CHECK(function->is_compiled() || !function->IsOptimized());
1150 TEST(TestCodeFlushingIncrementalScavenge) {
1151 // If we do not flush code this test is invalid.
1152 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1153 i::FLAG_allow_natives_syntax = true;
1154 CcTest::InitializeVM();
1155 Isolate* isolate = Isolate::Current();
1156 Factory* factory = isolate->factory();
1157 v8::HandleScope scope(CcTest::isolate());
1158 const char* source = "var foo = function() {"
1164 "var bar = function() {"
1168 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1169 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1171 // Perfrom one initial GC to enable code flushing.
1172 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1174 // This compile will add the code to the compilation cache.
1175 { v8::HandleScope scope(CcTest::isolate());
1179 // Check functions are compiled.
1180 Object* func_value = Isolate::Current()->context()->global_object()->
1181 GetProperty(*foo_name)->ToObjectChecked();
1182 CHECK(func_value->IsJSFunction());
1183 Handle<JSFunction> function(JSFunction::cast(func_value));
1184 CHECK(function->shared()->is_compiled());
1185 Object* func_value2 = Isolate::Current()->context()->global_object()->
1186 GetProperty(*bar_name)->ToObjectChecked();
1187 CHECK(func_value2->IsJSFunction());
1188 Handle<JSFunction> function2(JSFunction::cast(func_value2));
1189 CHECK(function2->shared()->is_compiled());
1191 // Clear references to functions so that one of them can die.
1192 { v8::HandleScope scope(CcTest::isolate());
1193 CompileRun("foo = 0; bar = 0;");
1196 // Bump the code age so that flushing is triggered while the function
1197 // object is still located in new-space.
1198 const int kAgingThreshold = 6;
1199 for (int i = 0; i < kAgingThreshold; i++) {
1200 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1201 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1204 // Simulate incremental marking so that the functions are enqueued as
1205 // code flushing candidates. Then kill one of the functions. Finally
1206 // perform a scavenge while incremental marking is still running.
1207 SimulateIncrementalMarking();
1208 *function2.location() = NULL;
1209 HEAP->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1211 // Simulate one final GC to make sure the candidate queue is sane.
1212 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1213 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1214 CHECK(!function->is_compiled() || function->IsOptimized());
1218 TEST(TestCodeFlushingIncrementalAbort) {
1219 // If we do not flush code this test is invalid.
1220 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1221 i::FLAG_allow_natives_syntax = true;
1222 CcTest::InitializeVM();
1223 Isolate* isolate = Isolate::Current();
1224 Factory* factory = isolate->factory();
1225 Heap* heap = isolate->heap();
1226 v8::HandleScope scope(CcTest::isolate());
1227 const char* source = "function foo() {"
1233 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1235 // This compile will add the code to the compilation cache.
1236 { v8::HandleScope scope(CcTest::isolate());
1240 // Check function is compiled.
1241 Object* func_value = Isolate::Current()->context()->global_object()->
1242 GetProperty(*foo_name)->ToObjectChecked();
1243 CHECK(func_value->IsJSFunction());
1244 Handle<JSFunction> function(JSFunction::cast(func_value));
1245 CHECK(function->shared()->is_compiled());
1247 // The code will survive at least two GCs.
1248 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1249 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1250 CHECK(function->shared()->is_compiled());
1252 // Bump the code age so that flushing is triggered.
1253 const int kAgingThreshold = 6;
1254 for (int i = 0; i < kAgingThreshold; i++) {
1255 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1258 // Simulate incremental marking so that the function is enqueued as
1259 // code flushing candidate.
1260 SimulateIncrementalMarking();
1262 #ifdef ENABLE_DEBUGGER_SUPPORT
1263 // Enable the debugger and add a breakpoint while incremental marking
1264 // is running so that incremental marking aborts and code flushing is
1267 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1268 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1269 isolate->debug()->ClearAllBreakPoints();
1270 #endif // ENABLE_DEBUGGER_SUPPORT
1272 // Force optimization now that code flushing is disabled.
1273 { v8::HandleScope scope(CcTest::isolate());
1274 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1277 // Simulate one final GC to make sure the candidate queue is sane.
1278 heap->CollectAllGarbage(Heap::kNoGCFlags);
1279 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1280 CHECK(function->is_compiled() || !function->IsOptimized());
1284 // Count the number of native contexts in the weak list of native contexts.
1285 int CountNativeContexts() {
1287 Object* object = HEAP->native_contexts_list();
1288 while (!object->IsUndefined()) {
1290 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1296 // Count the number of user functions in the weak list of optimized
1297 // functions attached to a native context.
1298 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1300 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1301 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1302 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1304 object = JSFunction::cast(object)->next_function_link();
1310 TEST(TestInternalWeakLists) {
1311 v8::V8::Initialize();
1313 // Some flags turn Scavenge collections into Mark-sweep collections
1314 // and hence are incompatible with this test case.
1315 if (FLAG_gc_global || FLAG_stress_compaction) return;
1317 static const int kNumTestContexts = 10;
1319 Isolate* isolate = Isolate::Current();
1320 Heap* heap = isolate->heap();
1321 HandleScope scope(isolate);
1322 v8::Handle<v8::Context> ctx[kNumTestContexts];
1324 CHECK_EQ(0, CountNativeContexts());
1326 // Create a number of global contests which gets linked together.
1327 for (int i = 0; i < kNumTestContexts; i++) {
1328 ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
1330 // Collect garbage that might have been created by one of the
1331 // installed extensions.
1332 isolate->compilation_cache()->Clear();
1333 heap->CollectAllGarbage(Heap::kNoGCFlags);
1335 bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1337 CHECK_EQ(i + 1, CountNativeContexts());
1341 // Create a handle scope so no function objects get stuch in the outer
1343 HandleScope scope(isolate);
1344 const char* source = "function f1() { };"
1345 "function f2() { };"
1346 "function f3() { };"
1347 "function f4() { };"
1348 "function f5() { };";
1350 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1352 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1354 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1356 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1358 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1360 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1362 // Remove function f1, and
1363 CompileRun("f1=null");
1365 // Scavenge treats these references as strong.
1366 for (int j = 0; j < 10; j++) {
1367 HEAP->PerformScavenge();
1368 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1371 // Mark compact handles the weak references.
1372 isolate->compilation_cache()->Clear();
1373 heap->CollectAllGarbage(Heap::kNoGCFlags);
1374 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1376 // Get rid of f3 and f5 in the same way.
1377 CompileRun("f3=null");
1378 for (int j = 0; j < 10; j++) {
1379 HEAP->PerformScavenge();
1380 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1382 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1383 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1384 CompileRun("f5=null");
1385 for (int j = 0; j < 10; j++) {
1386 HEAP->PerformScavenge();
1387 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1389 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1390 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1395 // Force compilation cache cleanup.
1396 HEAP->NotifyContextDisposed();
1397 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1399 // Dispose the native contexts one by one.
1400 for (int i = 0; i < kNumTestContexts; i++) {
1401 // TODO(dcarney): is there a better way to do this?
1402 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1403 *unsafe = HEAP->undefined_value();
1406 // Scavenge treats these references as strong.
1407 for (int j = 0; j < 10; j++) {
1408 HEAP->PerformScavenge();
1409 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1412 // Mark compact handles the weak references.
1413 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1414 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1417 CHECK_EQ(0, CountNativeContexts());
1421 // Count the number of native contexts in the weak list of native contexts
1422 // causing a GC after the specified number of elements.
1423 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1424 Heap* heap = isolate->heap();
1426 Handle<Object> object(heap->native_contexts_list(), isolate);
1427 while (!object->IsUndefined()) {
1429 if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1431 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1438 // Count the number of user functions in the weak list of optimized
1439 // functions attached to a native context causing a GC after the
1440 // specified number of elements.
1441 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1444 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1445 Isolate* isolate = icontext->GetIsolate();
1446 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1448 while (object->IsJSFunction() &&
1449 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1451 if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1452 object = Handle<Object>(
1453 Object::cast(JSFunction::cast(*object)->next_function_link()),
1460 TEST(TestInternalWeakListsTraverseWithGC) {
1461 v8::V8::Initialize();
1462 Isolate* isolate = Isolate::Current();
1464 static const int kNumTestContexts = 10;
1466 HandleScope scope(isolate);
1467 v8::Handle<v8::Context> ctx[kNumTestContexts];
1469 CHECK_EQ(0, CountNativeContexts());
1471 // Create an number of contexts and check the length of the weak list both
1472 // with and without GCs while iterating the list.
1473 for (int i = 0; i < kNumTestContexts; i++) {
1474 ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
1475 CHECK_EQ(i + 1, CountNativeContexts());
1476 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1479 bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1481 // Compile a number of functions the length of the weak list of optimized
1482 // functions both with and without GCs while iterating the list.
1484 const char* source = "function f1() { };"
1485 "function f2() { };"
1486 "function f3() { };"
1487 "function f4() { };"
1488 "function f5() { };";
1490 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1492 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1493 CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1495 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1496 CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1498 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1499 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1501 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1502 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1504 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1505 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1511 TEST(TestSizeOfObjects) {
1512 v8::V8::Initialize();
1514 // Get initial heap size after several full GCs, which will stabilize
1515 // the heap size and return with sweeping finished completely.
1516 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1517 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1518 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1519 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1520 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1521 CHECK(HEAP->old_pointer_space()->IsLazySweepingComplete());
1522 int initial_size = static_cast<int>(HEAP->SizeOfObjects());
1525 // Allocate objects on several different old-space pages so that
1526 // lazy sweeping kicks in for subsequent GC runs.
1527 AlwaysAllocateScope always_allocate;
1528 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1529 for (int i = 1; i <= 100; i++) {
1530 HEAP->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
1531 CHECK_EQ(initial_size + i * filler_size,
1532 static_cast<int>(HEAP->SizeOfObjects()));
1536 // The heap size should go back to initial size after a full GC, even
1537 // though sweeping didn't finish yet.
1538 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1540 // Normally sweeping would not be complete here, but no guarantees.
1542 CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1544 // Advancing the sweeper step-wise should not change the heap size.
1545 while (!HEAP->old_pointer_space()->IsLazySweepingComplete()) {
1546 HEAP->old_pointer_space()->AdvanceSweeper(KB);
1547 CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1552 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1553 CcTest::InitializeVM();
1554 HEAP->EnsureHeapIsIterable();
1555 intptr_t size_of_objects_1 = HEAP->SizeOfObjects();
1556 HeapIterator iterator(HEAP);
1557 intptr_t size_of_objects_2 = 0;
1558 for (HeapObject* obj = iterator.next();
1560 obj = iterator.next()) {
1561 if (!obj->IsFreeSpace()) {
1562 size_of_objects_2 += obj->Size();
1565 // Delta must be within 5% of the larger result.
1566 // TODO(gc): Tighten this up by distinguishing between byte
1567 // arrays that are real and those that merely mark free space
1569 if (size_of_objects_1 > size_of_objects_2) {
1570 intptr_t delta = size_of_objects_1 - size_of_objects_2;
1571 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1572 "Iterator: %" V8_PTR_PREFIX "d, "
1573 "delta: %" V8_PTR_PREFIX "d\n",
1574 size_of_objects_1, size_of_objects_2, delta);
1575 CHECK_GT(size_of_objects_1 / 20, delta);
1577 intptr_t delta = size_of_objects_2 - size_of_objects_1;
1578 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1579 "Iterator: %" V8_PTR_PREFIX "d, "
1580 "delta: %" V8_PTR_PREFIX "d\n",
1581 size_of_objects_1, size_of_objects_2, delta);
1582 CHECK_GT(size_of_objects_2 / 20, delta);
1587 static void FillUpNewSpace(NewSpace* new_space) {
1588 // Fill up new space to the point that it is completely full. Make sure
1589 // that the scavenger does not undo the filling.
1590 Heap* heap = new_space->heap();
1591 Isolate* isolate = heap->isolate();
1592 Factory* factory = isolate->factory();
1593 HandleScope scope(isolate);
1594 AlwaysAllocateScope always_allocate;
1595 intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
1596 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1597 for (intptr_t i = 0; i < number_of_fillers; i++) {
1598 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1603 TEST(GrowAndShrinkNewSpace) {
1604 CcTest::InitializeVM();
1605 NewSpace* new_space = HEAP->new_space();
1607 if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1608 HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1609 // The max size cannot exceed the reserved size, since semispaces must be
1610 // always within the reserved space. We can't test new space growing and
1611 // shrinking if the reserved size is the same as the minimum (initial) size.
1615 // Explicitly growing should double the space capacity.
1616 intptr_t old_capacity, new_capacity;
1617 old_capacity = new_space->Capacity();
1619 new_capacity = new_space->Capacity();
1620 CHECK(2 * old_capacity == new_capacity);
1622 old_capacity = new_space->Capacity();
1623 FillUpNewSpace(new_space);
1624 new_capacity = new_space->Capacity();
1625 CHECK(old_capacity == new_capacity);
1627 // Explicitly shrinking should not affect space capacity.
1628 old_capacity = new_space->Capacity();
1629 new_space->Shrink();
1630 new_capacity = new_space->Capacity();
1631 CHECK(old_capacity == new_capacity);
1633 // Let the scavenger empty the new space.
1634 HEAP->CollectGarbage(NEW_SPACE);
1635 CHECK_LE(new_space->Size(), old_capacity);
1637 // Explicitly shrinking should halve the space capacity.
1638 old_capacity = new_space->Capacity();
1639 new_space->Shrink();
1640 new_capacity = new_space->Capacity();
1641 CHECK(old_capacity == 2 * new_capacity);
1643 // Consecutive shrinking should not affect space capacity.
1644 old_capacity = new_space->Capacity();
1645 new_space->Shrink();
1646 new_space->Shrink();
1647 new_space->Shrink();
1648 new_capacity = new_space->Capacity();
1649 CHECK(old_capacity == new_capacity);
1653 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1654 CcTest::InitializeVM();
1656 if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1657 HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1658 // The max size cannot exceed the reserved size, since semispaces must be
1659 // always within the reserved space. We can't test new space growing and
1660 // shrinking if the reserved size is the same as the minimum (initial) size.
1664 v8::HandleScope scope(CcTest::isolate());
1665 NewSpace* new_space = HEAP->new_space();
1666 intptr_t old_capacity, new_capacity;
1667 old_capacity = new_space->Capacity();
1669 new_capacity = new_space->Capacity();
1670 CHECK(2 * old_capacity == new_capacity);
1671 FillUpNewSpace(new_space);
1672 HEAP->CollectAllAvailableGarbage();
1673 new_capacity = new_space->Capacity();
1674 CHECK(old_capacity == new_capacity);
1678 static int NumberOfGlobalObjects() {
1680 HeapIterator iterator(HEAP);
1681 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1682 if (obj->IsGlobalObject()) count++;
1688 // Test that we don't embed maps from foreign contexts into
1690 TEST(LeakNativeContextViaMap) {
1691 i::FLAG_allow_natives_syntax = true;
1692 v8::Isolate* isolate = v8::Isolate::GetCurrent();
1693 v8::HandleScope outer_scope(isolate);
1694 v8::Persistent<v8::Context> ctx1p;
1695 v8::Persistent<v8::Context> ctx2p;
1697 v8::HandleScope scope(isolate);
1698 ctx1p.Reset(isolate, v8::Context::New(isolate));
1699 ctx2p.Reset(isolate, v8::Context::New(isolate));
1700 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1703 HEAP->CollectAllAvailableGarbage();
1704 CHECK_EQ(4, NumberOfGlobalObjects());
1707 v8::HandleScope inner_scope(isolate);
1708 CompileRun("var v = {x: 42}");
1709 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1710 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1711 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1713 ctx2->Global()->Set(v8_str("o"), v);
1714 v8::Local<v8::Value> res = CompileRun(
1715 "function f() { return o.x; }"
1716 "for (var i = 0; i < 10; ++i) f();"
1717 "%OptimizeFunctionOnNextCall(f);"
1719 CHECK_EQ(42, res->Int32Value());
1720 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1722 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1723 ctx1p.Dispose(isolate);
1724 v8::V8::ContextDisposedNotification();
1726 HEAP->CollectAllAvailableGarbage();
1727 CHECK_EQ(2, NumberOfGlobalObjects());
1728 ctx2p.Dispose(isolate);
1729 HEAP->CollectAllAvailableGarbage();
1730 CHECK_EQ(0, NumberOfGlobalObjects());
1734 // Test that we don't embed functions from foreign contexts into
1736 TEST(LeakNativeContextViaFunction) {
1737 i::FLAG_allow_natives_syntax = true;
1738 v8::Isolate* isolate = v8::Isolate::GetCurrent();
1739 v8::HandleScope outer_scope(isolate);
1740 v8::Persistent<v8::Context> ctx1p;
1741 v8::Persistent<v8::Context> ctx2p;
1743 v8::HandleScope scope(isolate);
1744 ctx1p.Reset(isolate, v8::Context::New(isolate));
1745 ctx2p.Reset(isolate, v8::Context::New(isolate));
1746 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1749 HEAP->CollectAllAvailableGarbage();
1750 CHECK_EQ(4, NumberOfGlobalObjects());
1753 v8::HandleScope inner_scope(isolate);
1754 CompileRun("var v = function() { return 42; }");
1755 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1756 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1757 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1759 ctx2->Global()->Set(v8_str("o"), v);
1760 v8::Local<v8::Value> res = CompileRun(
1761 "function f(x) { return x(); }"
1762 "for (var i = 0; i < 10; ++i) f(o);"
1763 "%OptimizeFunctionOnNextCall(f);"
1765 CHECK_EQ(42, res->Int32Value());
1766 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1769 ctx1p.Dispose(ctx1->GetIsolate());
1770 v8::V8::ContextDisposedNotification();
1772 HEAP->CollectAllAvailableGarbage();
1773 CHECK_EQ(2, NumberOfGlobalObjects());
1774 ctx2p.Dispose(isolate);
1775 HEAP->CollectAllAvailableGarbage();
1776 CHECK_EQ(0, NumberOfGlobalObjects());
1780 TEST(LeakNativeContextViaMapKeyed) {
1781 i::FLAG_allow_natives_syntax = true;
1782 v8::Isolate* isolate = v8::Isolate::GetCurrent();
1783 v8::HandleScope outer_scope(isolate);
1784 v8::Persistent<v8::Context> ctx1p;
1785 v8::Persistent<v8::Context> ctx2p;
1787 v8::HandleScope scope(isolate);
1788 ctx1p.Reset(isolate, v8::Context::New(isolate));
1789 ctx2p.Reset(isolate, v8::Context::New(isolate));
1790 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1793 HEAP->CollectAllAvailableGarbage();
1794 CHECK_EQ(4, NumberOfGlobalObjects());
1797 v8::HandleScope inner_scope(isolate);
1798 CompileRun("var v = [42, 43]");
1799 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1800 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1801 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1803 ctx2->Global()->Set(v8_str("o"), v);
1804 v8::Local<v8::Value> res = CompileRun(
1805 "function f() { return o[0]; }"
1806 "for (var i = 0; i < 10; ++i) f();"
1807 "%OptimizeFunctionOnNextCall(f);"
1809 CHECK_EQ(42, res->Int32Value());
1810 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1813 ctx1p.Dispose(ctx1->GetIsolate());
1814 v8::V8::ContextDisposedNotification();
1816 HEAP->CollectAllAvailableGarbage();
1817 CHECK_EQ(2, NumberOfGlobalObjects());
1818 ctx2p.Dispose(isolate);
1819 HEAP->CollectAllAvailableGarbage();
1820 CHECK_EQ(0, NumberOfGlobalObjects());
1824 TEST(LeakNativeContextViaMapProto) {
1825 i::FLAG_allow_natives_syntax = true;
1826 v8::Isolate* isolate = v8::Isolate::GetCurrent();
1827 v8::HandleScope outer_scope(isolate);
1828 v8::Persistent<v8::Context> ctx1p;
1829 v8::Persistent<v8::Context> ctx2p;
1831 v8::HandleScope scope(isolate);
1832 ctx1p.Reset(isolate, v8::Context::New(isolate));
1833 ctx2p.Reset(isolate, v8::Context::New(isolate));
1834 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1837 HEAP->CollectAllAvailableGarbage();
1838 CHECK_EQ(4, NumberOfGlobalObjects());
1841 v8::HandleScope inner_scope(isolate);
1842 CompileRun("var v = { y: 42}");
1843 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1844 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1845 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1847 ctx2->Global()->Set(v8_str("o"), v);
1848 v8::Local<v8::Value> res = CompileRun(
1854 "for (var i = 0; i < 10; ++i) f();"
1855 "%OptimizeFunctionOnNextCall(f);"
1857 CHECK_EQ(42, res->Int32Value());
1858 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1861 ctx1p.Dispose(isolate);
1862 v8::V8::ContextDisposedNotification();
1864 HEAP->CollectAllAvailableGarbage();
1865 CHECK_EQ(2, NumberOfGlobalObjects());
1866 ctx2p.Dispose(isolate);
1867 HEAP->CollectAllAvailableGarbage();
1868 CHECK_EQ(0, NumberOfGlobalObjects());
1872 TEST(InstanceOfStubWriteBarrier) {
1873 i::FLAG_allow_natives_syntax = true;
1875 i::FLAG_verify_heap = true;
1878 CcTest::InitializeVM();
1879 if (!i::V8::UseCrankshaft()) return;
1880 if (i::FLAG_force_marking_deque_overflows) return;
1881 v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
1884 v8::HandleScope scope(v8::Isolate::GetCurrent());
1886 "function foo () { }"
1887 "function mkbar () { return new (new Function(\"\")) (); }"
1888 "function f (x) { return (x instanceof foo); }"
1889 "function g () { f(mkbar()); }"
1890 "f(new foo()); f(new foo());"
1891 "%OptimizeFunctionOnNextCall(f);"
1892 "f(new foo()); g();");
1895 IncrementalMarking* marking = HEAP->incremental_marking();
1899 Handle<JSFunction> f =
1900 v8::Utils::OpenHandle(
1901 *v8::Handle<v8::Function>::Cast(
1902 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1904 CHECK(f->IsOptimized());
1906 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
1907 !marking->IsStopped()) {
1908 // Discard any pending GC requests otherwise we will get GC when we enter
1910 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1913 CHECK(marking->IsMarking());
1916 v8::HandleScope scope(v8::Isolate::GetCurrent());
1917 v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
1918 v8::Handle<v8::Function> g =
1919 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
1920 g->Call(global, 0, NULL);
1923 HEAP->incremental_marking()->set_should_hurry(true);
1924 HEAP->CollectGarbage(OLD_POINTER_SPACE);
1928 TEST(PrototypeTransitionClearing) {
1929 CcTest::InitializeVM();
1930 Isolate* isolate = Isolate::Current();
1931 Factory* factory = isolate->factory();
1932 v8::HandleScope scope(CcTest::isolate());
1937 "for (var i = 0; i < 10; i++) {"
1939 " var prototype = {};"
1940 " object.__proto__ = prototype;"
1941 " if (i >= 3) live.push(object, prototype);"
1944 Handle<JSObject> baseObject =
1945 v8::Utils::OpenHandle(
1946 *v8::Handle<v8::Object>::Cast(
1947 v8::Context::GetCurrent()->Global()->Get(v8_str("base"))));
1949 // Verify that only dead prototype transitions are cleared.
1950 CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
1951 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1952 const int transitions = 10 - 3;
1953 CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
1955 // Verify that prototype transitions array was compacted.
1956 FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
1957 for (int i = 0; i < transitions; i++) {
1958 int j = Map::kProtoTransitionHeaderSize +
1959 i * Map::kProtoTransitionElementsPerEntry;
1960 CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
1961 Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
1962 CHECK(proto->IsTheHole() || proto->IsJSObject());
1965 // Make sure next prototype is placed on an old-space evacuation candidate.
1966 Handle<JSObject> prototype;
1967 PagedSpace* space = HEAP->old_pointer_space();
1969 AlwaysAllocateScope always_allocate;
1970 SimulateFullSpace(space);
1971 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
1974 // Add a prototype on an evacuation candidate and verify that transition
1975 // clearing correctly records slots in prototype transition array.
1976 i::FLAG_always_compact = true;
1977 Handle<Map> map(baseObject->map());
1978 CHECK(!space->LastPage()->Contains(
1979 map->GetPrototypeTransitions()->address()));
1980 CHECK(space->LastPage()->Contains(prototype->address()));
1981 JSObject::SetPrototype(baseObject, prototype, false);
1982 CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
1983 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1984 CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
1988 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
1989 i::FLAG_stress_compaction = false;
1990 i::FLAG_allow_natives_syntax = true;
1992 i::FLAG_verify_heap = true;
1995 CcTest::InitializeVM();
1996 if (!i::V8::UseCrankshaft()) return;
1997 v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
2000 v8::HandleScope scope(v8::Isolate::GetCurrent());
2004 " for (var i = 0; i < 100; i++) s += i;"
2008 "%OptimizeFunctionOnNextCall(f);"
2011 Handle<JSFunction> f =
2012 v8::Utils::OpenHandle(
2013 *v8::Handle<v8::Function>::Cast(
2014 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2015 CHECK(f->IsOptimized());
2017 IncrementalMarking* marking = HEAP->incremental_marking();
2021 // The following two calls will increment HEAP->global_ic_age().
2022 const int kLongIdlePauseInMs = 1000;
2023 v8::V8::ContextDisposedNotification();
2024 v8::V8::IdleNotification(kLongIdlePauseInMs);
2026 while (!marking->IsStopped() && !marking->IsComplete()) {
2027 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2029 if (!marking->IsStopped() || marking->should_hurry()) {
2030 // We don't normally finish a GC via Step(), we normally finish by
2031 // setting the stack guard and then do the final steps in the stack
2032 // guard interrupt. But here we didn't ask for that, and there is no
2033 // JS code running to trigger the interrupt, so we explicitly finalize
2035 HEAP->CollectAllGarbage(Heap::kNoGCFlags,
2036 "Test finalizing incremental mark-sweep");
2039 CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
2040 CHECK_EQ(0, f->shared()->opt_count());
2041 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2045 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2046 i::FLAG_stress_compaction = false;
2047 i::FLAG_allow_natives_syntax = true;
2049 i::FLAG_verify_heap = true;
2052 CcTest::InitializeVM();
2053 if (!i::V8::UseCrankshaft()) return;
2054 v8::HandleScope outer_scope(CcTest::isolate());
2057 v8::HandleScope scope(CcTest::isolate());
2061 " for (var i = 0; i < 100; i++) s += i;"
2065 "%OptimizeFunctionOnNextCall(f);"
2068 Handle<JSFunction> f =
2069 v8::Utils::OpenHandle(
2070 *v8::Handle<v8::Function>::Cast(
2071 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2072 CHECK(f->IsOptimized());
2074 HEAP->incremental_marking()->Abort();
2076 // The following two calls will increment HEAP->global_ic_age().
2077 // Since incremental marking is off, IdleNotification will do full GC.
2078 const int kLongIdlePauseInMs = 1000;
2079 v8::V8::ContextDisposedNotification();
2080 v8::V8::IdleNotification(kLongIdlePauseInMs);
2082 CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
2083 CHECK_EQ(0, f->shared()->opt_count());
2084 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2088 // Test that HAllocateObject will always return an object in new-space.
2089 TEST(OptimizedAllocationAlwaysInNewSpace) {
2090 i::FLAG_allow_natives_syntax = true;
2091 CcTest::InitializeVM();
2092 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2093 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2094 v8::HandleScope scope(CcTest::isolate());
2096 SimulateFullSpace(HEAP->new_space());
2097 AlwaysAllocateScope always_allocate;
2098 v8::Local<v8::Value> res = CompileRun(
2101 " for (var i = 0; i < 32; i++) {"
2102 " this['x' + i] = x;"
2105 "function f(x) { return new c(x); };"
2107 "%OptimizeFunctionOnNextCall(f);"
2109 CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2111 Handle<JSObject> o =
2112 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2114 CHECK(HEAP->InNewSpace(*o));
2118 TEST(OptimizedPretenuringObjectArrayLiterals) {
2119 i::FLAG_allow_natives_syntax = true;
2120 CcTest::InitializeVM();
2121 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2122 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2123 v8::HandleScope scope(CcTest::isolate());
2124 HEAP->SetNewSpaceHighPromotionModeActive(true);
2126 v8::Local<v8::Value> res = CompileRun(
2128 " var numbers = [{}, {}, {}];"
2132 "%OptimizeFunctionOnNextCall(f);"
2135 Handle<JSObject> o =
2136 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2138 CHECK(HEAP->InOldPointerSpace(o->elements()));
2139 CHECK(HEAP->InOldPointerSpace(*o));
2143 TEST(OptimizedPretenuringMixedInObjectProperties) {
2144 i::FLAG_allow_natives_syntax = true;
2145 CcTest::InitializeVM();
2146 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2147 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2148 v8::HandleScope scope(CcTest::isolate());
2149 HEAP->SetNewSpaceHighPromotionModeActive(true);
2151 v8::Local<v8::Value> res = CompileRun(
2153 " var numbers = {a: {c: 2.2, d: {}}, b: 1.1};"
2157 "%OptimizeFunctionOnNextCall(f);"
2160 Handle<JSObject> o =
2161 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2163 CHECK(HEAP->InOldPointerSpace(*o));
2164 CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(0)));
2165 CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(1)));
2167 JSObject* inner_object = reinterpret_cast<JSObject*>(o->RawFastPropertyAt(0));
2168 CHECK(HEAP->InOldPointerSpace(inner_object));
2169 CHECK(HEAP->InOldDataSpace(inner_object->RawFastPropertyAt(0)));
2170 CHECK(HEAP->InOldPointerSpace(inner_object->RawFastPropertyAt(1)));
2174 TEST(OptimizedPretenuringDoubleArrayProperties) {
2175 i::FLAG_allow_natives_syntax = true;
2176 CcTest::InitializeVM();
2177 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2178 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2179 v8::HandleScope scope(CcTest::isolate());
2180 HEAP->SetNewSpaceHighPromotionModeActive(true);
2182 v8::Local<v8::Value> res = CompileRun(
2184 " var numbers = {a: 1.1, b: 2.2};"
2188 "%OptimizeFunctionOnNextCall(f);"
2191 Handle<JSObject> o =
2192 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2194 CHECK(HEAP->InOldPointerSpace(*o));
2195 CHECK(HEAP->InOldDataSpace(o->properties()));
2199 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2200 i::FLAG_allow_natives_syntax = true;
2201 CcTest::InitializeVM();
2202 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2203 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2204 v8::HandleScope scope(CcTest::isolate());
2205 HEAP->SetNewSpaceHighPromotionModeActive(true);
2207 v8::Local<v8::Value> res = CompileRun(
2209 " var numbers = [1.1, 2.2, 3.3];"
2213 "%OptimizeFunctionOnNextCall(f);"
2216 Handle<JSObject> o =
2217 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2219 CHECK(HEAP->InOldDataSpace(o->elements()));
2220 CHECK(HEAP->InOldPointerSpace(*o));
2224 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2225 i::FLAG_allow_natives_syntax = true;
2226 CcTest::InitializeVM();
2227 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2228 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2229 v8::HandleScope scope(CcTest::isolate());
2230 HEAP->SetNewSpaceHighPromotionModeActive(true);
2232 v8::Local<v8::Value> res = CompileRun(
2234 " var numbers = [[{}, {}, {}],[1.1, 2.2, 3.3]];"
2238 "%OptimizeFunctionOnNextCall(f);"
2241 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2242 Handle<JSObject> int_array_handle =
2243 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2244 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2245 Handle<JSObject> double_array_handle =
2246 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2248 Handle<JSObject> o =
2249 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2250 CHECK(HEAP->InOldPointerSpace(*o));
2251 CHECK(HEAP->InOldPointerSpace(*int_array_handle));
2252 CHECK(HEAP->InOldPointerSpace(int_array_handle->elements()));
2253 CHECK(HEAP->InOldPointerSpace(*double_array_handle));
2254 CHECK(HEAP->InOldDataSpace(double_array_handle->elements()));
2258 TEST(OptimizedPretenuringNestedObjectLiterals) {
2259 i::FLAG_allow_natives_syntax = true;
2260 CcTest::InitializeVM();
2261 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2262 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2263 v8::HandleScope scope(CcTest::isolate());
2264 HEAP->SetNewSpaceHighPromotionModeActive(true);
2266 v8::Local<v8::Value> res = CompileRun(
2268 " var numbers = [[{}, {}, {}],[{}, {}, {}]];"
2272 "%OptimizeFunctionOnNextCall(f);"
2275 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2276 Handle<JSObject> int_array_handle_1 =
2277 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2278 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2279 Handle<JSObject> int_array_handle_2 =
2280 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2282 Handle<JSObject> o =
2283 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2284 CHECK(HEAP->InOldPointerSpace(*o));
2285 CHECK(HEAP->InOldPointerSpace(*int_array_handle_1));
2286 CHECK(HEAP->InOldPointerSpace(int_array_handle_1->elements()));
2287 CHECK(HEAP->InOldPointerSpace(*int_array_handle_2));
2288 CHECK(HEAP->InOldPointerSpace(int_array_handle_2->elements()));
2292 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2293 i::FLAG_allow_natives_syntax = true;
2294 CcTest::InitializeVM();
2295 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2296 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2297 v8::HandleScope scope(CcTest::isolate());
2298 HEAP->SetNewSpaceHighPromotionModeActive(true);
2300 v8::Local<v8::Value> res = CompileRun(
2302 " var numbers = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2306 "%OptimizeFunctionOnNextCall(f);"
2309 v8::Local<v8::Value> double_array_1 =
2310 v8::Object::Cast(*res)->Get(v8_str("0"));
2311 Handle<JSObject> double_array_handle_1 =
2312 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2313 v8::Local<v8::Value> double_array_2 =
2314 v8::Object::Cast(*res)->Get(v8_str("1"));
2315 Handle<JSObject> double_array_handle_2 =
2316 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2318 Handle<JSObject> o =
2319 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2320 CHECK(HEAP->InOldPointerSpace(*o));
2321 CHECK(HEAP->InOldPointerSpace(*double_array_handle_1));
2322 CHECK(HEAP->InOldDataSpace(double_array_handle_1->elements()));
2323 CHECK(HEAP->InOldPointerSpace(*double_array_handle_2));
2324 CHECK(HEAP->InOldDataSpace(double_array_handle_2->elements()));
2328 // Test regular array literals allocation.
2329 TEST(OptimizedAllocationArrayLiterals) {
2330 i::FLAG_allow_natives_syntax = true;
2331 CcTest::InitializeVM();
2332 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2333 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2334 v8::HandleScope scope(CcTest::isolate());
2336 v8::Local<v8::Value> res = CompileRun(
2338 " var numbers = new Array(1, 2, 3);"
2339 " numbers[0] = 3.14;"
2343 "%OptimizeFunctionOnNextCall(f);"
2345 CHECK_EQ(static_cast<int>(3.14),
2346 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2348 Handle<JSObject> o =
2349 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2351 CHECK(HEAP->InNewSpace(o->elements()));
2355 TEST(OptimizedPretenuringCallNew) {
2356 i::FLAG_allow_natives_syntax = true;
2357 i::FLAG_pretenuring_call_new = true;
2358 CcTest::InitializeVM();
2359 if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2360 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2361 v8::HandleScope scope(CcTest::isolate());
2362 HEAP->SetNewSpaceHighPromotionModeActive(true);
2364 AlwaysAllocateScope always_allocate;
2365 v8::Local<v8::Value> res = CompileRun(
2366 "function g() { this.a = 0; }"
2371 "%OptimizeFunctionOnNextCall(f);"
2374 Handle<JSObject> o =
2375 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2376 CHECK(HEAP->InOldPointerSpace(*o));
2380 static int CountMapTransitions(Map* map) {
2381 return map->transitions()->number_of_transitions();
2385 // Test that map transitions are cleared and maps are collected with
2386 // incremental marking as well.
2388 i::FLAG_stress_compaction = false;
2389 i::FLAG_allow_natives_syntax = true;
2390 i::FLAG_trace_incremental_marking = true;
2391 CcTest::InitializeVM();
2392 v8::HandleScope scope(CcTest::isolate());
2393 static const int transitions_count = 256;
2396 AlwaysAllocateScope always_allocate;
2397 for (int i = 0; i < transitions_count; i++) {
2398 EmbeddedVector<char, 64> buffer;
2399 OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
2400 CompileRun(buffer.start());
2402 CompileRun("var root = new Object;");
2405 Handle<JSObject> root =
2406 v8::Utils::OpenHandle(
2407 *v8::Handle<v8::Object>::Cast(
2408 v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
2410 // Count number of live transitions before marking.
2411 int transitions_before = CountMapTransitions(root->map());
2412 CompileRun("%DebugPrint(root);");
2413 CHECK_EQ(transitions_count, transitions_before);
2415 SimulateIncrementalMarking();
2416 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2418 // Count number of live transitions after marking. Note that one transition
2419 // is left, because 'o' still holds an instance of one transition target.
2420 int transitions_after = CountMapTransitions(root->map());
2421 CompileRun("%DebugPrint(root);");
2422 CHECK_EQ(1, transitions_after);
2426 TEST(Regress2143a) {
2427 i::FLAG_collect_maps = true;
2428 i::FLAG_incremental_marking = true;
2429 CcTest::InitializeVM();
2430 v8::HandleScope scope(CcTest::isolate());
2432 // Prepare a map transition from the root object together with a yet
2433 // untransitioned root object.
2434 CompileRun("var root = new Object;"
2436 "root = new Object;");
2438 SimulateIncrementalMarking();
2440 // Compile a StoreIC that performs the prepared map transition. This
2441 // will restart incremental marking and should make sure the root is
2442 // marked grey again.
2443 CompileRun("function f(o) {"
2449 // This bug only triggers with aggressive IC clearing.
2450 HEAP->AgeInlineCaches();
2452 // Explicitly request GC to perform final marking step and sweeping.
2453 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2455 Handle<JSObject> root =
2456 v8::Utils::OpenHandle(
2457 *v8::Handle<v8::Object>::Cast(
2458 v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
2460 // The root object should be in a sane state.
2461 CHECK(root->IsJSObject());
2462 CHECK(root->map()->IsMap());
2466 TEST(Regress2143b) {
2467 i::FLAG_collect_maps = true;
2468 i::FLAG_incremental_marking = true;
2469 i::FLAG_allow_natives_syntax = true;
2470 CcTest::InitializeVM();
2471 v8::HandleScope scope(CcTest::isolate());
2473 // Prepare a map transition from the root object together with a yet
2474 // untransitioned root object.
2475 CompileRun("var root = new Object;"
2477 "root = new Object;");
2479 SimulateIncrementalMarking();
2481 // Compile an optimized LStoreNamedField that performs the prepared
2482 // map transition. This will restart incremental marking and should
2483 // make sure the root is marked grey again.
2484 CompileRun("function f(o) {"
2489 "%OptimizeFunctionOnNextCall(f);"
2491 "%DeoptimizeFunction(f);");
2493 // This bug only triggers with aggressive IC clearing.
2494 HEAP->AgeInlineCaches();
2496 // Explicitly request GC to perform final marking step and sweeping.
2497 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2499 Handle<JSObject> root =
2500 v8::Utils::OpenHandle(
2501 *v8::Handle<v8::Object>::Cast(
2502 v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
2504 // The root object should be in a sane state.
2505 CHECK(root->IsJSObject());
2506 CHECK(root->map()->IsMap());
2510 TEST(ReleaseOverReservedPages) {
2511 i::FLAG_trace_gc = true;
2512 // The optimizer can allocate stuff, messing up the test.
2513 i::FLAG_crankshaft = false;
2514 i::FLAG_always_opt = false;
2515 CcTest::InitializeVM();
2516 Isolate* isolate = Isolate::Current();
2517 Factory* factory = isolate->factory();
2518 v8::HandleScope scope(CcTest::isolate());
2519 static const int number_of_test_pages = 20;
2521 // Prepare many pages with low live-bytes count.
2522 PagedSpace* old_pointer_space = HEAP->old_pointer_space();
2523 CHECK_EQ(1, old_pointer_space->CountTotalPages());
2524 for (int i = 0; i < number_of_test_pages; i++) {
2525 AlwaysAllocateScope always_allocate;
2526 SimulateFullSpace(old_pointer_space);
2527 factory->NewFixedArray(1, TENURED);
2529 CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2531 // Triggering one GC will cause a lot of garbage to be discovered but
2532 // even spread across all allocated pages.
2533 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
2534 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2536 // Triggering subsequent GCs should cause at least half of the pages
2537 // to be released to the OS after at most two cycles.
2538 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
2539 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2540 HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
2541 CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
2543 // Triggering a last-resort GC should cause all pages to be released to the
2544 // OS so that other processes can seize the memory. If we get a failure here
2545 // where there are 2 pages left instead of 1, then we should increase the
2546 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
2547 // first page should be small in order to reduce memory used when the VM
2548 // boots, but if the 20 small arrays don't fit on the first page then that's
2549 // an indication that it is too small.
2550 HEAP->CollectAllAvailableGarbage("triggered really hard");
2551 CHECK_EQ(1, old_pointer_space->CountTotalPages());
2556 i::FLAG_stress_compaction = false;
2557 CcTest::InitializeVM();
2558 Isolate* isolate = Isolate::Current();
2559 Factory* factory = isolate->factory();
2560 v8::HandleScope scope(CcTest::isolate());
2561 Handle<String> slice(HEAP->empty_string());
2564 // Generate a parent that lives in new-space.
2565 v8::HandleScope inner_scope(CcTest::isolate());
2566 const char* c = "This text is long enough to trigger sliced strings.";
2567 Handle<String> s = factory->NewStringFromAscii(CStrVector(c));
2568 CHECK(s->IsSeqOneByteString());
2569 CHECK(HEAP->InNewSpace(*s));
2571 // Generate a sliced string that is based on the above parent and
2572 // lives in old-space.
2573 SimulateFullSpace(HEAP->new_space());
2574 AlwaysAllocateScope always_allocate;
2575 Handle<String> t = factory->NewProperSubString(s, 5, 35);
2576 CHECK(t->IsSlicedString());
2577 CHECK(!HEAP->InNewSpace(*t));
2578 *slice.location() = *t.location();
2581 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2582 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2583 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2588 TEST(PrintSharedFunctionInfo) {
2589 CcTest::InitializeVM();
2590 v8::HandleScope scope(CcTest::isolate());
2591 const char* source = "f = function() { return 987654321; }\n"
2592 "g = function() { return 123456789; }\n";
2594 Handle<JSFunction> g =
2595 v8::Utils::OpenHandle(
2596 *v8::Handle<v8::Function>::Cast(
2597 v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
2599 DisallowHeapAllocation no_allocation;
2600 g->shared()->PrintLn();
2602 #endif // OBJECT_PRINT
2606 CcTest::InitializeVM();
2607 v8::HandleScope scope(CcTest::isolate());
2609 v8::Handle<v8::String> value = v8_str("val string");
2610 Smi* hash = Smi::FromInt(321);
2611 Heap* heap = Isolate::Current()->heap();
2613 for (int i = 0; i < 2; i++) {
2614 // Store identity hash first and common hidden property second.
2615 v8::Handle<v8::Object> obj = v8::Object::New();
2616 Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
2617 CHECK(internal_obj->HasFastProperties());
2619 // In the first iteration, set hidden value first and identity hash second.
2620 // In the second iteration, reverse the order.
2621 if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
2622 MaybeObject* maybe_obj = internal_obj->SetIdentityHash(hash,
2624 CHECK(!maybe_obj->IsFailure());
2625 if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
2629 internal_obj->GetHiddenProperty(heap->identity_hash_string()));
2630 CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
2633 DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
2634 ObjectHashTable* hashtable = ObjectHashTable::cast(
2635 internal_obj->RawFastPropertyAt(descriptors->GetFieldIndex(0)));
2636 // HashTable header (5) and 4 initial entries (8).
2637 CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
2642 TEST(IncrementalMarkingClearsTypeFeedbackCells) {
2643 if (i::FLAG_always_opt) return;
2644 CcTest::InitializeVM();
2645 v8::HandleScope scope(CcTest::isolate());
2646 v8::Local<v8::Value> fun1, fun2;
2650 CompileRun("function fun() {};");
2651 fun1 = env->Global()->Get(v8_str("fun"));
2656 CompileRun("function fun() {};");
2657 fun2 = env->Global()->Get(v8_str("fun"));
2660 // Prepare function f that contains type feedback for closures
2661 // originating from two different native contexts.
2662 v8::Context::GetCurrent()->Global()->Set(v8_str("fun1"), fun1);
2663 v8::Context::GetCurrent()->Global()->Set(v8_str("fun2"), fun2);
2664 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
2665 Handle<JSFunction> f =
2666 v8::Utils::OpenHandle(
2667 *v8::Handle<v8::Function>::Cast(
2668 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2669 Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
2670 f->shared()->code()->type_feedback_info())->type_feedback_cells());
2672 CHECK_EQ(2, cells->CellCount());
2673 CHECK(cells->GetCell(0)->value()->IsJSFunction());
2674 CHECK(cells->GetCell(1)->value()->IsJSFunction());
2676 SimulateIncrementalMarking();
2677 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2679 CHECK_EQ(2, cells->CellCount());
2680 CHECK(cells->GetCell(0)->value()->IsTheHole());
2681 CHECK(cells->GetCell(1)->value()->IsTheHole());
2685 static Code* FindFirstIC(Code* code, Code::Kind kind) {
2686 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
2687 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
2688 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
2689 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
2690 for (RelocIterator it(code, mask); !it.done(); it.next()) {
2691 RelocInfo* info = it.rinfo();
2692 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
2693 if (target->is_inline_cache_stub() && target->kind() == kind) {
2701 TEST(IncrementalMarkingPreservesMonomorhpicIC) {
2702 if (i::FLAG_always_opt) return;
2703 CcTest::InitializeVM();
2704 v8::HandleScope scope(CcTest::isolate());
2706 // Prepare function f that contains a monomorphic IC for object
2707 // originating from the same native context.
2708 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
2709 "function f(o) { return o.x; } f(obj); f(obj);");
2710 Handle<JSFunction> f =
2711 v8::Utils::OpenHandle(
2712 *v8::Handle<v8::Function>::Cast(
2713 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2715 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2716 CHECK(ic_before->ic_state() == MONOMORPHIC);
2718 SimulateIncrementalMarking();
2719 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2721 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2722 CHECK(ic_after->ic_state() == MONOMORPHIC);
2726 TEST(IncrementalMarkingClearsMonomorhpicIC) {
2727 if (i::FLAG_always_opt) return;
2728 CcTest::InitializeVM();
2729 v8::HandleScope scope(CcTest::isolate());
2730 v8::Local<v8::Value> obj1;
2734 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2735 obj1 = env->Global()->Get(v8_str("obj"));
2738 // Prepare function f that contains a monomorphic IC for object
2739 // originating from a different native context.
2740 v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2741 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
2742 Handle<JSFunction> f =
2743 v8::Utils::OpenHandle(
2744 *v8::Handle<v8::Function>::Cast(
2745 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2747 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2748 CHECK(ic_before->ic_state() == MONOMORPHIC);
2750 // Fire context dispose notification.
2751 v8::V8::ContextDisposedNotification();
2752 SimulateIncrementalMarking();
2753 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2755 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2756 CHECK(ic_after->ic_state() == UNINITIALIZED);
2760 TEST(IncrementalMarkingClearsPolymorhpicIC) {
2761 if (i::FLAG_always_opt) return;
2762 CcTest::InitializeVM();
2763 v8::HandleScope scope(CcTest::isolate());
2764 v8::Local<v8::Value> obj1, obj2;
2768 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2769 obj1 = env->Global()->Get(v8_str("obj"));
2774 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
2775 obj2 = env->Global()->Get(v8_str("obj"));
2778 // Prepare function f that contains a polymorphic IC for objects
2779 // originating from two different native contexts.
2780 v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2781 v8::Context::GetCurrent()->Global()->Set(v8_str("obj2"), obj2);
2782 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
2783 Handle<JSFunction> f =
2784 v8::Utils::OpenHandle(
2785 *v8::Handle<v8::Function>::Cast(
2786 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2788 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2789 CHECK(ic_before->ic_state() == POLYMORPHIC);
2791 // Fire context dispose notification.
2792 v8::V8::ContextDisposedNotification();
2793 SimulateIncrementalMarking();
2794 HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2796 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2797 CHECK(ic_after->ic_state() == UNINITIALIZED);
2801 class SourceResource: public v8::String::ExternalAsciiStringResource {
2803 explicit SourceResource(const char* data)
2804 : data_(data), length_(strlen(data)) { }
2806 virtual void Dispose() {
2807 i::DeleteArray(data_);
2811 const char* data() const { return data_; }
2813 size_t length() const { return length_; }
2815 bool IsDisposed() { return data_ == NULL; }
2823 void ReleaseStackTraceDataTest(const char* source, const char* accessor) {
2824 // Test that the data retained by the Error.stack accessor is released
2825 // after the first time the accessor is fired. We use external string
2826 // to check whether the data is being released since the external string
2827 // resource's callback is fired when the external string is GC'ed.
2828 FLAG_use_ic = false; // ICs retain objects.
2829 FLAG_parallel_recompilation = false;
2830 CcTest::InitializeVM();
2831 v8::HandleScope scope(CcTest::isolate());
2832 SourceResource* resource = new SourceResource(i::StrDup(source));
2834 v8::HandleScope scope(CcTest::isolate());
2835 v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
2836 HEAP->CollectAllAvailableGarbage();
2837 v8::Script::Compile(source_string)->Run();
2838 CHECK(!resource->IsDisposed());
2840 // HEAP->CollectAllAvailableGarbage();
2841 CHECK(!resource->IsDisposed());
2843 CompileRun(accessor);
2844 HEAP->CollectAllAvailableGarbage();
2846 // External source has been released.
2847 CHECK(resource->IsDisposed());
2852 TEST(ReleaseStackTraceData) {
2853 static const char* source1 = "var error = null; "
2854 /* Normal Error */ "try { "
2855 " throw new Error(); "
2859 static const char* source2 = "var error = null; "
2860 /* Stack overflow */ "try { "
2861 " (function f() { f(); })(); "
2865 static const char* source3 = "var error = null; "
2866 /* Normal Error */ "try { "
2867 /* as prototype */ " throw new Error(); "
2870 " error.__proto__ = e; "
2872 static const char* source4 = "var error = null; "
2873 /* Stack overflow */ "try { "
2874 /* as prototype */ " (function f() { f(); })(); "
2877 " error.__proto__ = e; "
2879 static const char* getter = "error.stack";
2880 static const char* setter = "error.stack = 0";
2882 ReleaseStackTraceDataTest(source1, setter);
2883 ReleaseStackTraceDataTest(source2, setter);
2884 // We do not test source3 and source4 with setter, since the setter is
2885 // supposed to (untypically) write to the receiver, not the holder. This is
2886 // to emulate the behavior of a data property.
2888 ReleaseStackTraceDataTest(source1, getter);
2889 ReleaseStackTraceDataTest(source2, getter);
2890 ReleaseStackTraceDataTest(source3, getter);
2891 ReleaseStackTraceDataTest(source4, getter);
2895 TEST(Regression144230) {
2896 i::FLAG_stress_compaction = false;
2897 CcTest::InitializeVM();
2898 Isolate* isolate = Isolate::Current();
2899 Heap* heap = isolate->heap();
2900 HandleScope scope(isolate);
2902 // First make sure that the uninitialized CallIC stub is on a single page
2903 // that will later be selected as an evacuation candidate.
2905 HandleScope inner_scope(isolate);
2906 AlwaysAllocateScope always_allocate;
2907 SimulateFullSpace(heap->code_space());
2908 isolate->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
2911 // Second compile a CallIC and execute it once so that it gets patched to
2912 // the pre-monomorphic stub. These code objects are on yet another page.
2914 HandleScope inner_scope(isolate);
2915 AlwaysAllocateScope always_allocate;
2916 SimulateFullSpace(heap->code_space());
2917 CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
2918 "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
2922 // Third we fill up the last page of the code space so that it does not get
2923 // chosen as an evacuation candidate.
2925 HandleScope inner_scope(isolate);
2926 AlwaysAllocateScope always_allocate;
2927 CompileRun("for (var i = 0; i < 2000; i++) {"
2928 " eval('function f' + i + '() { return ' + i +'; };' +"
2929 " 'f' + i + '();');"
2932 heap->CollectAllGarbage(Heap::kNoGCFlags);
2934 // Fourth is the tricky part. Make sure the code containing the CallIC is
2935 // visited first without clearing the IC. The shared function info is then
2936 // visited later, causing the CallIC to be cleared.
2937 Handle<String> name = isolate->factory()->InternalizeUtf8String("call");
2938 Handle<GlobalObject> global(isolate->context()->global_object());
2939 MaybeObject* maybe_call = global->GetProperty(*name);
2940 JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
2941 USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode));
2942 isolate->compilation_cache()->Clear();
2943 call->shared()->set_ic_age(heap->global_ic_age() + 1);
2944 Handle<Object> call_code(call->code(), isolate);
2945 Handle<Object> call_function(call, isolate);
2947 // Now we are ready to mess up the heap.
2948 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2950 // Either heap verification caught the problem already or we go kaboom once
2951 // the CallIC is executed the next time.
2952 USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
2953 CompileRun("call();");
2957 TEST(Regress159140) {
2958 i::FLAG_allow_natives_syntax = true;
2959 i::FLAG_flush_code_incrementally = true;
2960 CcTest::InitializeVM();
2961 Isolate* isolate = Isolate::Current();
2962 Heap* heap = isolate->heap();
2963 HandleScope scope(isolate);
2965 // Perform one initial GC to enable code flushing.
2966 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2968 // Prepare several closures that are all eligible for code flushing
2969 // because all reachable ones are not optimized. Make sure that the
2970 // optimized code object is directly reachable through a handle so
2971 // that it is marked black during incremental marking.
2974 HandleScope inner_scope(isolate);
2975 CompileRun("function h(x) {}"
2976 "function mkClosure() {"
2977 " return function(x) { return x + 1; };"
2979 "var f = mkClosure();"
2980 "var g = mkClosure();"
2984 "%OptimizeFunctionOnNextCall(f); f(3);"
2985 "%OptimizeFunctionOnNextCall(h); h(3);");
2987 Handle<JSFunction> f =
2988 v8::Utils::OpenHandle(
2989 *v8::Handle<v8::Function>::Cast(
2990 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2991 CHECK(f->is_compiled());
2992 CompileRun("f = null;");
2994 Handle<JSFunction> g =
2995 v8::Utils::OpenHandle(
2996 *v8::Handle<v8::Function>::Cast(
2997 v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
2998 CHECK(g->is_compiled());
2999 const int kAgingThreshold = 6;
3000 for (int i = 0; i < kAgingThreshold; i++) {
3001 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3004 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3007 // Simulate incremental marking so that the functions are enqueued as
3008 // code flushing candidates. Then optimize one function. Finally
3009 // finish the GC to complete code flushing.
3010 SimulateIncrementalMarking();
3011 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3012 heap->CollectAllGarbage(Heap::kNoGCFlags);
3014 // Unoptimized code is missing and the deoptimizer will go ballistic.
3015 CompileRun("g('bozo');");
3019 TEST(Regress165495) {
3020 i::FLAG_allow_natives_syntax = true;
3021 i::FLAG_flush_code_incrementally = true;
3022 CcTest::InitializeVM();
3023 Isolate* isolate = Isolate::Current();
3024 Heap* heap = isolate->heap();
3025 HandleScope scope(isolate);
3027 // Perform one initial GC to enable code flushing.
3028 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3030 // Prepare an optimized closure that the optimized code map will get
3031 // populated. Then age the unoptimized code to trigger code flushing
3032 // but make sure the optimized code is unreachable.
3034 HandleScope inner_scope(isolate);
3035 CompileRun("function mkClosure() {"
3036 " return function(x) { return x + 1; };"
3038 "var f = mkClosure();"
3040 "%OptimizeFunctionOnNextCall(f); f(3);");
3042 Handle<JSFunction> f =
3043 v8::Utils::OpenHandle(
3044 *v8::Handle<v8::Function>::Cast(
3045 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3046 CHECK(f->is_compiled());
3047 const int kAgingThreshold = 6;
3048 for (int i = 0; i < kAgingThreshold; i++) {
3049 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3052 CompileRun("f = null;");
3055 // Simulate incremental marking so that unoptimized code is flushed
3056 // even though it still is cached in the optimized code map.
3057 SimulateIncrementalMarking();
3058 heap->CollectAllGarbage(Heap::kNoGCFlags);
3060 // Make a new closure that will get code installed from the code map.
3061 // Unoptimized code is missing and the deoptimizer will go ballistic.
3062 CompileRun("var g = mkClosure(); g('bozo');");
3066 TEST(Regress169209) {
3067 i::FLAG_stress_compaction = false;
3068 i::FLAG_allow_natives_syntax = true;
3069 i::FLAG_flush_code_incrementally = true;
3071 // Experimental natives are compiled during snapshot deserialization.
3072 // This test breaks because heap layout changes in a way that closure
3073 // is visited before shared function info.
3074 i::FLAG_harmony_typed_arrays = false;
3075 i::FLAG_harmony_array_buffer = false;
3077 // Disable loading the i18n extension which breaks the assumptions of this
3078 // test about the heap layout.
3079 i::FLAG_enable_i18n = false;
3081 CcTest::InitializeVM();
3082 Isolate* isolate = Isolate::Current();
3083 Heap* heap = isolate->heap();
3084 HandleScope scope(isolate);
3086 // Perform one initial GC to enable code flushing.
3087 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3089 // Prepare a shared function info eligible for code flushing for which
3090 // the unoptimized code will be replaced during optimization.
3091 Handle<SharedFunctionInfo> shared1;
3093 HandleScope inner_scope(isolate);
3094 CompileRun("function f() { return 'foobar'; }"
3095 "function g(x) { if (x) f(); }"
3100 Handle<JSFunction> f =
3101 v8::Utils::OpenHandle(
3102 *v8::Handle<v8::Function>::Cast(
3103 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3104 CHECK(f->is_compiled());
3105 const int kAgingThreshold = 6;
3106 for (int i = 0; i < kAgingThreshold; i++) {
3107 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3110 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3113 // Prepare a shared function info eligible for code flushing that will
3114 // represent the dangling tail of the candidate list.
3115 Handle<SharedFunctionInfo> shared2;
3117 HandleScope inner_scope(isolate);
3118 CompileRun("function flushMe() { return 0; }"
3121 Handle<JSFunction> f =
3122 v8::Utils::OpenHandle(
3123 *v8::Handle<v8::Function>::Cast(
3124 v8::Context::GetCurrent()->Global()->Get(v8_str("flushMe"))));
3125 CHECK(f->is_compiled());
3126 const int kAgingThreshold = 6;
3127 for (int i = 0; i < kAgingThreshold; i++) {
3128 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3131 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3134 // Simulate incremental marking and collect code flushing candidates.
3135 SimulateIncrementalMarking();
3136 CHECK(shared1->code()->gc_metadata() != NULL);
3138 // Optimize function and make sure the unoptimized code is replaced.
3142 CompileRun("%OptimizeFunctionOnNextCall(g);"
3145 // Finish garbage collection cycle.
3146 heap->CollectAllGarbage(Heap::kNoGCFlags);
3147 CHECK(shared1->code()->gc_metadata() == NULL);
3151 // Helper function that simulates a fill new-space in the heap.
3152 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
3154 int space_remaining = static_cast<int>(
3155 *space->allocation_limit_address() - *space->allocation_top_address());
3156 CHECK(space_remaining >= extra_bytes);
3157 int new_linear_size = space_remaining - extra_bytes;
3158 v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size);
3159 v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe);
3160 node->set_size(space->heap(), new_linear_size);
3164 TEST(Regress169928) {
3165 i::FLAG_allow_natives_syntax = true;
3166 i::FLAG_crankshaft = false;
3167 CcTest::InitializeVM();
3168 Isolate* isolate = Isolate::Current();
3169 Factory* factory = isolate->factory();
3170 v8::HandleScope scope(CcTest::isolate());
3172 // Some flags turn Scavenge collections into Mark-sweep collections
3173 // and hence are incompatible with this test case.
3174 if (FLAG_gc_global || FLAG_stress_compaction) return;
3176 // Prepare the environment
3177 CompileRun("function fastliteralcase(literal, value) {"
3178 " literal[0] = value;"
3181 "function get_standard_literal() {"
3182 " var literal = [1, 2, 3];"
3185 "obj = fastliteralcase(get_standard_literal(), 1);"
3186 "obj = fastliteralcase(get_standard_literal(), 1.5);"
3187 "obj = fastliteralcase(get_standard_literal(), 2);");
3190 v8::Local<v8::String> mote_code_string =
3191 v8_str("fastliteralcase(mote, 2.5);");
3193 v8::Local<v8::String> array_name = v8_str("mote");
3194 v8::Context::GetCurrent()->Global()->Set(array_name, v8::Int32::New(0));
3196 // First make sure we flip spaces
3197 HEAP->CollectGarbage(NEW_SPACE);
3199 // Allocate the object.
3200 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3201 array_data->set(0, Smi::FromInt(1));
3202 array_data->set(1, Smi::FromInt(2));
3204 AllocateAllButNBytes(HEAP->new_space(),
3205 JSArray::kSize + AllocationMemento::kSize +
3208 Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3212 CHECK_EQ(Smi::FromInt(2), array->length());
3213 CHECK(array->HasFastSmiOrObjectElements());
3215 // We need filler the size of AllocationMemento object, plus an extra
3216 // fill pointer value.
3217 MaybeObject* maybe_object = HEAP->AllocateRaw(
3218 AllocationMemento::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE);
3220 CHECK(maybe_object->ToObject(&obj));
3221 Address addr_obj = reinterpret_cast<Address>(
3222 reinterpret_cast<byte*>(obj - kHeapObjectTag));
3223 HEAP->CreateFillerObjectAt(addr_obj,
3224 AllocationMemento::kSize + kPointerSize);
3226 // Give the array a name, making sure not to allocate strings.
3227 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3228 v8::Context::GetCurrent()->Global()->Set(array_name, array_obj);
3230 // This should crash with a protection violation if we are running a build
3232 AlwaysAllocateScope aa_scope;
3233 v8::Script::Compile(mote_code_string)->Run();
3237 TEST(Regress168801) {
3238 i::FLAG_always_compact = true;
3239 i::FLAG_cache_optimized_code = false;
3240 i::FLAG_allow_natives_syntax = true;
3241 i::FLAG_flush_code_incrementally = true;
3242 CcTest::InitializeVM();
3243 Isolate* isolate = Isolate::Current();
3244 Heap* heap = isolate->heap();
3245 HandleScope scope(isolate);
3247 // Perform one initial GC to enable code flushing.
3248 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3250 // Ensure the code ends up on an evacuation candidate.
3251 SimulateFullSpace(heap->code_space());
3253 // Prepare an unoptimized function that is eligible for code flushing.
3254 Handle<JSFunction> function;
3256 HandleScope inner_scope(isolate);
3257 CompileRun("function mkClosure() {"
3258 " return function(x) { return x + 1; };"
3260 "var f = mkClosure();"
3263 Handle<JSFunction> f =
3264 v8::Utils::OpenHandle(
3265 *v8::Handle<v8::Function>::Cast(
3266 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3267 CHECK(f->is_compiled());
3268 const int kAgingThreshold = 6;
3269 for (int i = 0; i < kAgingThreshold; i++) {
3270 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3273 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3276 // Simulate incremental marking so that unoptimized function is enqueued as a
3277 // candidate for code flushing. The shared function info however will not be
3278 // explicitly enqueued.
3279 SimulateIncrementalMarking();
3281 // Now optimize the function so that it is taken off the candidate list.
3283 HandleScope inner_scope(isolate);
3284 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3287 // This cycle will bust the heap and subsequent cycles will go ballistic.
3288 heap->CollectAllGarbage(Heap::kNoGCFlags);
3289 heap->CollectAllGarbage(Heap::kNoGCFlags);
3293 TEST(Regress173458) {
3294 i::FLAG_always_compact = true;
3295 i::FLAG_cache_optimized_code = false;
3296 i::FLAG_allow_natives_syntax = true;
3297 i::FLAG_flush_code_incrementally = true;
3298 CcTest::InitializeVM();
3299 Isolate* isolate = Isolate::Current();
3300 Heap* heap = isolate->heap();
3301 HandleScope scope(isolate);
3303 // Perform one initial GC to enable code flushing.
3304 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3306 // Ensure the code ends up on an evacuation candidate.
3307 SimulateFullSpace(heap->code_space());
3309 // Prepare an unoptimized function that is eligible for code flushing.
3310 Handle<JSFunction> function;
3312 HandleScope inner_scope(isolate);
3313 CompileRun("function mkClosure() {"
3314 " return function(x) { return x + 1; };"
3316 "var f = mkClosure();"
3319 Handle<JSFunction> f =
3320 v8::Utils::OpenHandle(
3321 *v8::Handle<v8::Function>::Cast(
3322 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3323 CHECK(f->is_compiled());
3324 const int kAgingThreshold = 6;
3325 for (int i = 0; i < kAgingThreshold; i++) {
3326 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3329 function = inner_scope.CloseAndEscape(handle(*f, isolate));
3332 // Simulate incremental marking so that unoptimized function is enqueued as a
3333 // candidate for code flushing. The shared function info however will not be
3334 // explicitly enqueued.
3335 SimulateIncrementalMarking();
3337 #ifdef ENABLE_DEBUGGER_SUPPORT
3338 // Now enable the debugger which in turn will disable code flushing.
3339 CHECK(isolate->debug()->Load());
3340 #endif // ENABLE_DEBUGGER_SUPPORT
3342 // This cycle will bust the heap and subsequent cycles will go ballistic.
3343 heap->CollectAllGarbage(Heap::kNoGCFlags);
3344 heap->CollectAllGarbage(Heap::kNoGCFlags);
3348 class DummyVisitor : public ObjectVisitor {
3350 void VisitPointers(Object** start, Object** end) { }
3354 TEST(DeferredHandles) {
3355 CcTest::InitializeVM();
3356 Isolate* isolate = Isolate::Current();
3357 Heap* heap = isolate->heap();
3358 v8::HandleScope scope;
3359 v8::ImplementationUtilities::HandleScopeData* data =
3360 isolate->handle_scope_data();
3361 Handle<Object> init(heap->empty_string(), isolate);
3362 while (data->next < data->limit) {
3363 Handle<Object> obj(heap->empty_string(), isolate);
3365 // An entire block of handles has been filled.
3366 // Next handle would require a new block.
3367 ASSERT(data->next == data->limit);
3369 DeferredHandleScope deferred(isolate);
3370 DummyVisitor visitor;
3371 isolate->handle_scope_implementer()->Iterate(&visitor);
3376 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3377 CcTest::InitializeVM();
3378 v8::HandleScope scope(CcTest::isolate());
3379 CompileRun("function f(n) {"
3380 " var a = new Array(n);"
3381 " for (var i = 0; i < n; i += 100) a[i] = i;"
3383 "f(10 * 1024 * 1024);");
3384 IncrementalMarking* marking = HEAP->incremental_marking();
3385 if (marking->IsStopped()) marking->Start();
3386 // This big step should be sufficient to mark the whole array.
3387 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
3388 ASSERT(marking->IsComplete());