1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "src/compilation-cache.h"
34 #include "src/deoptimizer.h"
35 #include "src/execution.h"
36 #include "src/factory.h"
37 #include "src/global-handles.h"
38 #include "src/ic/ic.h"
39 #include "src/macro-assembler.h"
40 #include "src/snapshot/snapshot.h"
41 #include "test/cctest/cctest.h"
43 using namespace v8::internal;
46 static void CheckMap(Map* map, int type, int instance_size) {
47 CHECK(map->IsHeapObject());
49 CHECK(CcTest::heap()->Contains(map));
51 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
52 CHECK_EQ(type, map->instance_type());
53 CHECK_EQ(instance_size, map->instance_size());
58 CcTest::InitializeVM();
59 Heap* heap = CcTest::heap();
60 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
61 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
62 CheckMap(heap->float32x4_map(), FLOAT32X4_TYPE, Float32x4::kSize);
63 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
64 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
68 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
69 CHECK(obj->IsOddball());
70 Handle<Object> handle(obj, isolate);
71 Object* print_string =
72 *Execution::ToString(isolate, handle).ToHandleChecked();
73 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
77 static void CheckSmi(Isolate* isolate, int value, const char* string) {
78 Handle<Object> handle(Smi::FromInt(value), isolate);
79 Object* print_string =
80 *Execution::ToString(isolate, handle).ToHandleChecked();
81 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
85 static void CheckNumber(Isolate* isolate, double value, const char* string) {
86 Handle<Object> number = isolate->factory()->NewNumber(value);
87 CHECK(number->IsNumber());
88 Handle<Object> print_string =
89 Execution::ToString(isolate, number).ToHandleChecked();
90 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
94 static void CheckFindCodeObject(Isolate* isolate) {
95 // Test FindCodeObject
98 Assembler assm(isolate, NULL, 0);
100 __ nop(); // supported on all architectures
104 Handle<Code> code = isolate->factory()->NewCode(
105 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
106 CHECK(code->IsCode());
108 HeapObject* obj = HeapObject::cast(*code);
109 Address obj_addr = obj->address();
111 for (int i = 0; i < obj->Size(); i += kPointerSize) {
112 Object* found = isolate->FindCodeObject(obj_addr + i);
113 CHECK_EQ(*code, found);
116 Handle<Code> copy = isolate->factory()->NewCode(
117 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
118 HeapObject* obj_copy = HeapObject::cast(*copy);
119 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
120 obj_copy->Size() / 2);
121 CHECK(not_right != *code);
126 CcTest::InitializeVM();
127 Isolate* isolate = CcTest::i_isolate();
128 HandleScope outer_scope(isolate);
129 LocalContext context;
130 Handle<Object> n(reinterpret_cast<Object*>(NULL), isolate);
136 CcTest::InitializeVM();
137 Isolate* isolate = CcTest::i_isolate();
138 Factory* factory = isolate->factory();
139 Heap* heap = isolate->heap();
141 HandleScope sc(isolate);
142 Handle<Object> value = factory->NewNumber(1.000123);
143 CHECK(value->IsHeapNumber());
144 CHECK(value->IsNumber());
145 CHECK_EQ(1.000123, value->Number());
147 value = factory->NewNumber(1.0);
148 CHECK(value->IsSmi());
149 CHECK(value->IsNumber());
150 CHECK_EQ(1.0, value->Number());
152 value = factory->NewNumberFromInt(1024);
153 CHECK(value->IsSmi());
154 CHECK(value->IsNumber());
155 CHECK_EQ(1024.0, value->Number());
157 value = factory->NewNumberFromInt(Smi::kMinValue);
158 CHECK(value->IsSmi());
159 CHECK(value->IsNumber());
160 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
162 value = factory->NewNumberFromInt(Smi::kMaxValue);
163 CHECK(value->IsSmi());
164 CHECK(value->IsNumber());
165 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
167 #if !defined(V8_TARGET_ARCH_64_BIT)
168 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
169 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
170 CHECK(value->IsHeapNumber());
171 CHECK(value->IsNumber());
172 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
175 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
176 CHECK(value->IsHeapNumber());
177 CHECK(value->IsNumber());
178 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
181 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
182 CHECK(value->IsHeapNumber());
183 CHECK(value->IsNumber());
184 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
187 // nan oddball checks
188 CHECK(factory->nan_value()->IsNumber());
189 CHECK(std::isnan(factory->nan_value()->Number()));
191 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
192 CHECK(s->IsString());
193 CHECK_EQ(10, s->length());
195 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
196 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
197 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
199 // Check ToString for oddballs
200 CheckOddball(isolate, heap->true_value(), "true");
201 CheckOddball(isolate, heap->false_value(), "false");
202 CheckOddball(isolate, heap->null_value(), "null");
203 CheckOddball(isolate, heap->undefined_value(), "undefined");
205 // Check ToString for Smis
206 CheckSmi(isolate, 0, "0");
207 CheckSmi(isolate, 42, "42");
208 CheckSmi(isolate, -42, "-42");
210 // Check ToString for Numbers
211 CheckNumber(isolate, 1.1, "1.1");
213 CheckFindCodeObject(isolate);
217 template <typename T, typename LANE_TYPE, int LANES>
218 static void CheckSimdLanes(T* value) {
219 // Get the original values, and check that all lanes can be set to new values
220 // without disturbing the other lanes.
221 LANE_TYPE lane_values[LANES];
222 for (int i = 0; i < LANES; i++) {
223 lane_values[i] = value->get_lane(i);
225 for (int i = 0; i < LANES; i++) {
227 value->set_lane(i, lane_values[i]);
228 for (int j = 0; j < LANES; j++) {
229 CHECK_EQ(lane_values[j], value->get_lane(j));
236 CcTest::InitializeVM();
237 Isolate* isolate = CcTest::i_isolate();
238 Factory* factory = isolate->factory();
240 HandleScope sc(isolate);
242 Handle<Float32x4> value = factory->NewFloat32x4(1, 2, 3, 4);
243 CHECK(value->IsFloat32x4());
244 CHECK(value->BooleanValue()); // SIMD values map to true.
245 CHECK_EQ(value->get_lane(0), 1);
246 CHECK_EQ(value->get_lane(1), 2);
247 CHECK_EQ(value->get_lane(2), 3);
248 CHECK_EQ(value->get_lane(3), 4);
250 CheckSimdLanes<Float32x4, float, 4>(*value);
252 // Check all lanes, and special lane values.
253 value->set_lane(0, 0);
254 CHECK_EQ(0, value->get_lane(0));
255 value->set_lane(1, -0.0);
256 CHECK_EQ(-0.0, value->get_lane(1));
257 CHECK(std::signbit(value->get_lane(1))); // Sign bit is preserved.
258 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
259 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
260 value->set_lane(2, quiet_NaN);
261 CHECK(std::isnan(value->get_lane(2)));
262 value->set_lane(3, signaling_NaN);
263 CHECK(std::isnan(value->get_lane(3)));
265 // Check SIMD value printing.
267 value = factory->NewFloat32x4(1, 2, 3, 4);
268 std::ostringstream os;
269 value->Float32x4Print(os);
270 CHECK_EQ("1, 2, 3, 4", os.str());
273 value = factory->NewFloat32x4(0, -0.0, quiet_NaN, signaling_NaN);
274 std::ostringstream os;
275 value->Float32x4Print(os);
276 // Value printing doesn't preserve signed zeroes.
277 CHECK_EQ("0, 0, NaN, NaN", os.str());
283 CcTest::InitializeVM();
285 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
286 CHECK(Smi::FromInt(42)->IsSmi());
287 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
288 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
292 TEST(GarbageCollection) {
293 CcTest::InitializeVM();
294 Isolate* isolate = CcTest::i_isolate();
295 Heap* heap = isolate->heap();
296 Factory* factory = isolate->factory();
298 HandleScope sc(isolate);
300 heap->CollectGarbage(NEW_SPACE);
302 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
303 Handle<String> name = factory->InternalizeUtf8String("theFunction");
304 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
305 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
306 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
307 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
308 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
311 HandleScope inner_scope(isolate);
312 // Allocate a function and keep it in global object's property.
313 Handle<JSFunction> function = factory->NewFunction(name);
314 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
315 // Allocate an object. Unrooted after leaving the scope.
316 Handle<JSObject> obj = factory->NewJSObject(function);
317 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
318 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
320 CHECK_EQ(Smi::FromInt(23),
321 *Object::GetProperty(obj, prop_name).ToHandleChecked());
322 CHECK_EQ(Smi::FromInt(24),
323 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
326 heap->CollectGarbage(NEW_SPACE);
328 // Function should be alive.
329 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
330 // Check function is retained.
331 Handle<Object> func_value =
332 Object::GetProperty(global, name).ToHandleChecked();
333 CHECK(func_value->IsJSFunction());
334 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
337 HandleScope inner_scope(isolate);
338 // Allocate another object, make it reachable from global.
339 Handle<JSObject> obj = factory->NewJSObject(function);
340 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
341 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
344 // After gc, it should survive.
345 heap->CollectGarbage(NEW_SPACE);
347 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
349 Object::GetProperty(global, obj_name).ToHandleChecked();
350 CHECK(obj->IsJSObject());
351 CHECK_EQ(Smi::FromInt(23),
352 *Object::GetProperty(obj, prop_name).ToHandleChecked());
356 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
357 HandleScope scope(isolate);
358 Handle<String> s = isolate->factory()->NewStringFromUtf8(
359 CStrVector(string)).ToHandleChecked();
360 CHECK_EQ(StrLength(string), s->length());
361 for (int index = 0; index < s->length(); index++) {
362 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
368 CcTest::InitializeVM();
369 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
371 VerifyStringAllocation(isolate, "a");
372 VerifyStringAllocation(isolate, "ab");
373 VerifyStringAllocation(isolate, "abc");
374 VerifyStringAllocation(isolate, "abcd");
375 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
380 CcTest::InitializeVM();
381 Isolate* isolate = CcTest::i_isolate();
382 Factory* factory = isolate->factory();
384 v8::HandleScope scope(CcTest::isolate());
385 const char* name = "Kasper the spunky";
386 Handle<String> string = factory->NewStringFromAsciiChecked(name);
387 CHECK_EQ(StrLength(name), string->length());
391 TEST(GlobalHandles) {
392 CcTest::InitializeVM();
393 Isolate* isolate = CcTest::i_isolate();
394 Heap* heap = isolate->heap();
395 Factory* factory = isolate->factory();
396 GlobalHandles* global_handles = isolate->global_handles();
404 HandleScope scope(isolate);
406 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
407 Handle<Object> u = factory->NewNumber(1.12344);
409 h1 = global_handles->Create(*i);
410 h2 = global_handles->Create(*u);
411 h3 = global_handles->Create(*i);
412 h4 = global_handles->Create(*u);
415 // after gc, it should survive
416 heap->CollectGarbage(NEW_SPACE);
418 CHECK((*h1)->IsString());
419 CHECK((*h2)->IsHeapNumber());
420 CHECK((*h3)->IsString());
421 CHECK((*h4)->IsHeapNumber());
424 GlobalHandles::Destroy(h1.location());
425 GlobalHandles::Destroy(h3.location());
428 GlobalHandles::Destroy(h2.location());
429 GlobalHandles::Destroy(h4.location());
433 static bool WeakPointerCleared = false;
435 static void TestWeakGlobalHandleCallback(
436 const v8::WeakCallbackData<v8::Value, void>& data) {
437 std::pair<v8::Persistent<v8::Value>*, int>* p =
438 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
439 data.GetParameter());
440 if (p->second == 1234) WeakPointerCleared = true;
445 TEST(WeakGlobalHandlesScavenge) {
446 i::FLAG_stress_compaction = false;
447 CcTest::InitializeVM();
448 Isolate* isolate = CcTest::i_isolate();
449 Heap* heap = isolate->heap();
450 Factory* factory = isolate->factory();
451 GlobalHandles* global_handles = isolate->global_handles();
453 WeakPointerCleared = false;
459 HandleScope scope(isolate);
461 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
462 Handle<Object> u = factory->NewNumber(1.12344);
464 h1 = global_handles->Create(*i);
465 h2 = global_handles->Create(*u);
468 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
469 GlobalHandles::MakeWeak(h2.location(),
470 reinterpret_cast<void*>(&handle_and_id),
471 &TestWeakGlobalHandleCallback);
473 // Scavenge treats weak pointers as normal roots.
474 heap->CollectGarbage(NEW_SPACE);
476 CHECK((*h1)->IsString());
477 CHECK((*h2)->IsHeapNumber());
479 CHECK(!WeakPointerCleared);
480 CHECK(!global_handles->IsNearDeath(h2.location()));
481 CHECK(!global_handles->IsNearDeath(h1.location()));
483 GlobalHandles::Destroy(h1.location());
484 GlobalHandles::Destroy(h2.location());
488 TEST(WeakGlobalHandlesMark) {
489 CcTest::InitializeVM();
490 Isolate* isolate = CcTest::i_isolate();
491 Heap* heap = isolate->heap();
492 Factory* factory = isolate->factory();
493 GlobalHandles* global_handles = isolate->global_handles();
495 WeakPointerCleared = false;
501 HandleScope scope(isolate);
503 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
504 Handle<Object> u = factory->NewNumber(1.12344);
506 h1 = global_handles->Create(*i);
507 h2 = global_handles->Create(*u);
510 // Make sure the objects are promoted.
511 heap->CollectGarbage(OLD_SPACE);
512 heap->CollectGarbage(NEW_SPACE);
513 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
515 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
516 GlobalHandles::MakeWeak(h2.location(),
517 reinterpret_cast<void*>(&handle_and_id),
518 &TestWeakGlobalHandleCallback);
519 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
520 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
522 // Incremental marking potentially marked handles before they turned weak.
523 heap->CollectAllGarbage();
525 CHECK((*h1)->IsString());
527 CHECK(WeakPointerCleared);
528 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
530 GlobalHandles::Destroy(h1.location());
534 TEST(DeleteWeakGlobalHandle) {
535 i::FLAG_stress_compaction = false;
536 CcTest::InitializeVM();
537 Isolate* isolate = CcTest::i_isolate();
538 Heap* heap = isolate->heap();
539 Factory* factory = isolate->factory();
540 GlobalHandles* global_handles = isolate->global_handles();
542 WeakPointerCleared = false;
547 HandleScope scope(isolate);
549 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
550 h = global_handles->Create(*i);
553 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
554 GlobalHandles::MakeWeak(h.location(),
555 reinterpret_cast<void*>(&handle_and_id),
556 &TestWeakGlobalHandleCallback);
558 // Scanvenge does not recognize weak reference.
559 heap->CollectGarbage(NEW_SPACE);
561 CHECK(!WeakPointerCleared);
563 // Mark-compact treats weak reference properly.
564 heap->CollectGarbage(OLD_SPACE);
566 CHECK(WeakPointerCleared);
570 static const char* not_so_random_string_table[] = {
634 static void CheckInternalizedStrings(const char** strings) {
635 Isolate* isolate = CcTest::i_isolate();
636 Factory* factory = isolate->factory();
637 for (const char* string = *strings; *strings != 0; string = *strings++) {
638 HandleScope scope(isolate);
640 isolate->factory()->InternalizeUtf8String(CStrVector(string));
641 // InternalizeUtf8String may return a failure if a GC is needed.
642 CHECK(a->IsInternalizedString());
643 Handle<String> b = factory->InternalizeUtf8String(string);
645 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
646 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
648 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
654 CcTest::InitializeVM();
656 v8::HandleScope sc(CcTest::isolate());
657 CheckInternalizedStrings(not_so_random_string_table);
658 CheckInternalizedStrings(not_so_random_string_table);
662 TEST(FunctionAllocation) {
663 CcTest::InitializeVM();
664 Isolate* isolate = CcTest::i_isolate();
665 Factory* factory = isolate->factory();
667 v8::HandleScope sc(CcTest::isolate());
668 Handle<String> name = factory->InternalizeUtf8String("theFunction");
669 Handle<JSFunction> function = factory->NewFunction(name);
671 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
672 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
674 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
675 Handle<JSObject> obj = factory->NewJSObject(function);
676 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
677 CHECK_EQ(Smi::FromInt(23),
678 *Object::GetProperty(obj, prop_name).ToHandleChecked());
679 // Check that we can add properties to function objects.
680 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
681 CHECK_EQ(Smi::FromInt(24),
682 *Object::GetProperty(function, prop_name).ToHandleChecked());
686 TEST(ObjectProperties) {
687 CcTest::InitializeVM();
688 Isolate* isolate = CcTest::i_isolate();
689 Factory* factory = isolate->factory();
691 v8::HandleScope sc(CcTest::isolate());
692 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
693 Handle<Object> object = Object::GetProperty(
694 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
695 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
696 Handle<JSObject> obj = factory->NewJSObject(constructor);
697 Handle<String> first = factory->InternalizeUtf8String("first");
698 Handle<String> second = factory->InternalizeUtf8String("second");
700 Handle<Smi> one(Smi::FromInt(1), isolate);
701 Handle<Smi> two(Smi::FromInt(2), isolate);
704 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
707 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
708 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
711 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
712 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
714 // add first and then second
715 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
716 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
717 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
718 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
720 // delete first and then second
721 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
722 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
723 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
724 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
725 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
727 // add first and then second
728 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
729 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
730 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
731 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
733 // delete second and then first
734 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
735 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
736 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
737 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
738 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
740 // check string and internalized string match
741 const char* string1 = "fisk";
742 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
743 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
744 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
745 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
747 // check internalized string and string match
748 const char* string2 = "fugl";
749 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
750 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
751 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
752 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
757 CcTest::InitializeVM();
758 Isolate* isolate = CcTest::i_isolate();
759 Factory* factory = isolate->factory();
761 v8::HandleScope sc(CcTest::isolate());
762 Handle<String> name = factory->InternalizeUtf8String("theFunction");
763 Handle<JSFunction> function = factory->NewFunction(name);
765 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
766 Handle<JSObject> obj = factory->NewJSObject(function);
767 Handle<Map> initial_map(function->initial_map());
770 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
771 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
772 CHECK_EQ(Smi::FromInt(23),
773 *Object::GetProperty(obj, prop_name).ToHandleChecked());
775 // Check the map has changed
776 CHECK(*initial_map != obj->map());
781 CcTest::InitializeVM();
782 Isolate* isolate = CcTest::i_isolate();
783 Factory* factory = isolate->factory();
785 v8::HandleScope sc(CcTest::isolate());
786 Handle<String> name = factory->InternalizeUtf8String("Array");
787 Handle<Object> fun_obj = Object::GetProperty(
788 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
789 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
791 // Allocate the object.
792 Handle<Object> element;
793 Handle<JSObject> object = factory->NewJSObject(function);
794 Handle<JSArray> array = Handle<JSArray>::cast(object);
795 // We just initialized the VM, no heap allocation failure yet.
796 JSArray::Initialize(array, 0);
798 // Set array length to 0.
799 JSArray::SetLength(array, 0);
800 CHECK_EQ(Smi::FromInt(0), array->length());
801 // Must be in fast mode.
802 CHECK(array->HasFastSmiOrObjectElements());
804 // array[length] = name.
805 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
806 CHECK_EQ(Smi::FromInt(1), array->length());
807 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
808 CHECK_EQ(*element, *name);
810 // Set array length with larger than smi value.
811 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
813 uint32_t int_length = 0;
814 CHECK(array->length()->ToArrayIndex(&int_length));
815 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
816 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
818 // array[length] = name.
819 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
820 uint32_t new_int_length = 0;
821 CHECK(array->length()->ToArrayIndex(&new_int_length));
822 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
823 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
824 CHECK_EQ(*element, *name);
825 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
826 CHECK_EQ(*element, *name);
831 CcTest::InitializeVM();
832 Isolate* isolate = CcTest::i_isolate();
833 Factory* factory = isolate->factory();
835 v8::HandleScope sc(CcTest::isolate());
836 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
837 Handle<Object> object = Object::GetProperty(
838 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
839 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
840 Handle<JSObject> obj = factory->NewJSObject(constructor);
841 Handle<String> first = factory->InternalizeUtf8String("first");
842 Handle<String> second = factory->InternalizeUtf8String("second");
844 Handle<Smi> one(Smi::FromInt(1), isolate);
845 Handle<Smi> two(Smi::FromInt(2), isolate);
847 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
848 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
850 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
851 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
854 Handle<Object> value1, value2;
855 Handle<JSObject> clone = factory->CopyJSObject(obj);
856 CHECK(!clone.is_identical_to(obj));
858 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
859 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
860 CHECK_EQ(*value1, *value2);
861 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
862 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
863 CHECK_EQ(*value1, *value2);
865 value1 = Object::GetProperty(obj, first).ToHandleChecked();
866 value2 = Object::GetProperty(clone, first).ToHandleChecked();
867 CHECK_EQ(*value1, *value2);
868 value1 = Object::GetProperty(obj, second).ToHandleChecked();
869 value2 = Object::GetProperty(clone, second).ToHandleChecked();
870 CHECK_EQ(*value1, *value2);
873 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
874 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
876 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
877 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
879 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
880 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
881 CHECK_EQ(*value1, *value2);
882 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
883 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
884 CHECK_EQ(*value1, *value2);
886 value1 = Object::GetProperty(obj, second).ToHandleChecked();
887 value2 = Object::GetProperty(clone, first).ToHandleChecked();
888 CHECK_EQ(*value1, *value2);
889 value1 = Object::GetProperty(obj, first).ToHandleChecked();
890 value2 = Object::GetProperty(clone, second).ToHandleChecked();
891 CHECK_EQ(*value1, *value2);
895 TEST(StringAllocation) {
896 CcTest::InitializeVM();
897 Isolate* isolate = CcTest::i_isolate();
898 Factory* factory = isolate->factory();
900 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
901 for (int length = 0; length < 100; length++) {
902 v8::HandleScope scope(CcTest::isolate());
903 char* non_one_byte = NewArray<char>(3 * length + 1);
904 char* one_byte = NewArray<char>(length + 1);
905 non_one_byte[3 * length] = 0;
906 one_byte[length] = 0;
907 for (int i = 0; i < length; i++) {
909 non_one_byte[3 * i] = chars[0];
910 non_one_byte[3 * i + 1] = chars[1];
911 non_one_byte[3 * i + 2] = chars[2];
913 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
914 Vector<const char>(non_one_byte, 3 * length));
915 CHECK_EQ(length, non_one_byte_sym->length());
916 Handle<String> one_byte_sym =
917 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
918 CHECK_EQ(length, one_byte_sym->length());
919 Handle<String> non_one_byte_str =
920 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
922 non_one_byte_str->Hash();
923 CHECK_EQ(length, non_one_byte_str->length());
924 Handle<String> one_byte_str =
925 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
927 one_byte_str->Hash();
928 CHECK_EQ(length, one_byte_str->length());
929 DeleteArray(non_one_byte);
930 DeleteArray(one_byte);
935 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
936 // Count the number of objects found in the heap.
938 HeapIterator iterator(heap);
939 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
940 for (int i = 0; i < size; i++) {
941 if (*objs[i] == obj) {
951 CcTest::InitializeVM();
952 Isolate* isolate = CcTest::i_isolate();
953 Factory* factory = isolate->factory();
954 v8::HandleScope scope(CcTest::isolate());
956 // Array of objects to scan haep for.
957 const int objs_count = 6;
958 Handle<Object> objs[objs_count];
959 int next_objs_index = 0;
961 // Allocate a JS array to OLD_SPACE and NEW_SPACE
962 objs[next_objs_index++] = factory->NewJSArray(10);
963 objs[next_objs_index++] =
964 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
966 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
967 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
968 objs[next_objs_index++] =
969 factory->NewStringFromStaticChars("abcdefghij", TENURED);
971 // Allocate a large string (for large object space).
972 int large_size = Page::kMaxRegularHeapObjectSize + 1;
973 char* str = new char[large_size];
974 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
975 str[large_size - 1] = '\0';
976 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
979 // Add a Map object to look for.
980 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
982 CHECK_EQ(objs_count, next_objs_index);
983 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
987 static int LenFromSize(int size) {
988 return (size - FixedArray::kHeaderSize) / kPointerSize;
992 TEST(Regression39128) {
993 // Test case for crbug.com/39128.
994 CcTest::InitializeVM();
995 Isolate* isolate = CcTest::i_isolate();
996 TestHeap* heap = CcTest::test_heap();
998 // Increase the chance of 'bump-the-pointer' allocation in old space.
999 heap->CollectAllGarbage();
1001 v8::HandleScope scope(CcTest::isolate());
1003 // The plan: create JSObject which references objects in new space.
1004 // Then clone this object (forcing it to go into old space) and check
1005 // that region dirty marks are updated correctly.
1007 // Step 1: prepare a map for the object. We add 1 inobject property to it.
1008 // Create a map with single inobject property.
1009 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
1010 int n_properties = my_map->inobject_properties();
1011 CHECK_GT(n_properties, 0);
1013 int object_size = my_map->instance_size();
1015 // Step 2: allocate a lot of objects so to almost fill new space: we need
1016 // just enough room to allocate JSObject and thus fill the newspace.
1018 int allocation_amount = Min(FixedArray::kMaxSize,
1019 Page::kMaxRegularHeapObjectSize + kPointerSize);
1020 int allocation_len = LenFromSize(allocation_amount);
1021 NewSpace* new_space = heap->new_space();
1022 Address* top_addr = new_space->allocation_top_address();
1023 Address* limit_addr = new_space->allocation_limit_address();
1024 while ((*limit_addr - *top_addr) > allocation_amount) {
1025 CHECK(!heap->always_allocate());
1026 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1027 CHECK(new_space->Contains(array));
1030 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1031 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1032 int fixed_array_len = LenFromSize(to_fill);
1033 CHECK(fixed_array_len < FixedArray::kMaxLength);
1035 CHECK(!heap->always_allocate());
1036 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1037 CHECK(new_space->Contains(array));
1039 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1040 CHECK(new_space->Contains(object));
1041 JSObject* jsobject = JSObject::cast(object);
1042 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1043 CHECK_EQ(0, jsobject->properties()->length());
1044 // Create a reference to object in new space in jsobject.
1045 FieldIndex index = FieldIndex::ForInObjectOffset(
1046 JSObject::kHeaderSize - kPointerSize);
1047 jsobject->FastPropertyAtPut(index, array);
1049 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1051 // Step 4: clone jsobject, but force always allocate first to create a clone
1052 // in old pointer space.
1053 Address old_space_top = heap->old_space()->top();
1054 AlwaysAllocateScope aa_scope(isolate);
1055 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1056 JSObject* clone = JSObject::cast(clone_obj);
1057 if (clone->address() != old_space_top) {
1058 // Alas, got allocated from free list, we cannot do checks.
1061 CHECK(heap->old_space()->Contains(clone->address()));
1065 UNINITIALIZED_TEST(TestCodeFlushing) {
1066 // If we do not flush code this test is invalid.
1067 if (!FLAG_flush_code) return;
1068 i::FLAG_allow_natives_syntax = true;
1069 i::FLAG_optimize_for_size = false;
1070 v8::Isolate::CreateParams create_params;
1071 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1072 v8::Isolate* isolate = v8::Isolate::New(create_params);
1073 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1075 Factory* factory = i_isolate->factory();
1077 v8::HandleScope scope(isolate);
1078 v8::Context::New(isolate)->Enter();
1079 const char* source =
1086 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1088 // This compile will add the code to the compilation cache.
1090 v8::HandleScope scope(isolate);
1094 // Check function is compiled.
1095 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1096 foo_name).ToHandleChecked();
1097 CHECK(func_value->IsJSFunction());
1098 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1099 CHECK(function->shared()->is_compiled());
1101 // The code will survive at least two GCs.
1102 i_isolate->heap()->CollectAllGarbage();
1103 i_isolate->heap()->CollectAllGarbage();
1104 CHECK(function->shared()->is_compiled());
1106 // Simulate several GCs that use full marking.
1107 const int kAgingThreshold = 6;
1108 for (int i = 0; i < kAgingThreshold; i++) {
1109 i_isolate->heap()->CollectAllGarbage();
1112 // foo should no longer be in the compilation cache
1113 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1114 CHECK(!function->is_compiled() || function->IsOptimized());
1115 // Call foo to get it recompiled.
1116 CompileRun("foo()");
1117 CHECK(function->shared()->is_compiled());
1118 CHECK(function->is_compiled());
1125 TEST(TestCodeFlushingPreAged) {
1126 // If we do not flush code this test is invalid.
1127 if (!FLAG_flush_code) return;
1128 i::FLAG_allow_natives_syntax = true;
1129 i::FLAG_optimize_for_size = true;
1130 CcTest::InitializeVM();
1131 Isolate* isolate = CcTest::i_isolate();
1132 Factory* factory = isolate->factory();
1133 v8::HandleScope scope(CcTest::isolate());
1134 const char* source = "function foo() {"
1140 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1142 // Compile foo, but don't run it.
1143 { v8::HandleScope scope(CcTest::isolate());
1147 // Check function is compiled.
1148 Handle<Object> func_value =
1149 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1150 CHECK(func_value->IsJSFunction());
1151 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1152 CHECK(function->shared()->is_compiled());
1154 // The code has been run so will survive at least one GC.
1155 CcTest::heap()->CollectAllGarbage();
1156 CHECK(function->shared()->is_compiled());
1158 // The code was only run once, so it should be pre-aged and collected on the
1160 CcTest::heap()->CollectAllGarbage();
1161 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1163 // Execute the function again twice, and ensure it is reset to the young age.
1164 { v8::HandleScope scope(CcTest::isolate());
1169 // The code will survive at least two GC now that it is young again.
1170 CcTest::heap()->CollectAllGarbage();
1171 CcTest::heap()->CollectAllGarbage();
1172 CHECK(function->shared()->is_compiled());
1174 // Simulate several GCs that use full marking.
1175 const int kAgingThreshold = 6;
1176 for (int i = 0; i < kAgingThreshold; i++) {
1177 CcTest::heap()->CollectAllGarbage();
1180 // foo should no longer be in the compilation cache
1181 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1182 CHECK(!function->is_compiled() || function->IsOptimized());
1183 // Call foo to get it recompiled.
1184 CompileRun("foo()");
1185 CHECK(function->shared()->is_compiled());
1186 CHECK(function->is_compiled());
1190 TEST(TestCodeFlushingIncremental) {
1191 // If we do not flush code this test is invalid.
1192 if (!FLAG_flush_code) return;
1193 i::FLAG_allow_natives_syntax = true;
1194 i::FLAG_optimize_for_size = false;
1195 CcTest::InitializeVM();
1196 Isolate* isolate = CcTest::i_isolate();
1197 Factory* factory = isolate->factory();
1198 v8::HandleScope scope(CcTest::isolate());
1199 const char* source = "function foo() {"
1205 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1207 // This compile will add the code to the compilation cache.
1208 { v8::HandleScope scope(CcTest::isolate());
1212 // Check function is compiled.
1213 Handle<Object> func_value =
1214 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1215 CHECK(func_value->IsJSFunction());
1216 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1217 CHECK(function->shared()->is_compiled());
1219 // The code will survive at least two GCs.
1220 CcTest::heap()->CollectAllGarbage();
1221 CcTest::heap()->CollectAllGarbage();
1222 CHECK(function->shared()->is_compiled());
1224 // Simulate several GCs that use incremental marking.
1225 const int kAgingThreshold = 6;
1226 for (int i = 0; i < kAgingThreshold; i++) {
1227 SimulateIncrementalMarking(CcTest::heap());
1228 CcTest::heap()->CollectAllGarbage();
1230 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1231 CHECK(!function->is_compiled() || function->IsOptimized());
1233 // This compile will compile the function again.
1234 { v8::HandleScope scope(CcTest::isolate());
1235 CompileRun("foo();");
1238 // Simulate several GCs that use incremental marking but make sure
1239 // the loop breaks once the function is enqueued as a candidate.
1240 for (int i = 0; i < kAgingThreshold; i++) {
1241 SimulateIncrementalMarking(CcTest::heap());
1242 if (!function->next_function_link()->IsUndefined()) break;
1243 CcTest::heap()->CollectAllGarbage();
1246 // Force optimization while incremental marking is active and while
1247 // the function is enqueued as a candidate.
1248 { v8::HandleScope scope(CcTest::isolate());
1249 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1252 // Simulate one final GC to make sure the candidate queue is sane.
1253 CcTest::heap()->CollectAllGarbage();
1254 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1255 CHECK(function->is_compiled() || !function->IsOptimized());
1259 TEST(TestCodeFlushingIncrementalScavenge) {
1260 // If we do not flush code this test is invalid.
1261 if (!FLAG_flush_code) return;
1262 i::FLAG_allow_natives_syntax = true;
1263 i::FLAG_optimize_for_size = false;
1264 CcTest::InitializeVM();
1265 Isolate* isolate = CcTest::i_isolate();
1266 Factory* factory = isolate->factory();
1267 v8::HandleScope scope(CcTest::isolate());
1268 const char* source = "var foo = function() {"
1274 "var bar = function() {"
1278 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1279 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1281 // Perfrom one initial GC to enable code flushing.
1282 CcTest::heap()->CollectAllGarbage();
1284 // This compile will add the code to the compilation cache.
1285 { v8::HandleScope scope(CcTest::isolate());
1289 // Check functions are compiled.
1290 Handle<Object> func_value =
1291 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1292 CHECK(func_value->IsJSFunction());
1293 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1294 CHECK(function->shared()->is_compiled());
1295 Handle<Object> func_value2 =
1296 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1297 CHECK(func_value2->IsJSFunction());
1298 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1299 CHECK(function2->shared()->is_compiled());
1301 // Clear references to functions so that one of them can die.
1302 { v8::HandleScope scope(CcTest::isolate());
1303 CompileRun("foo = 0; bar = 0;");
1306 // Bump the code age so that flushing is triggered while the function
1307 // object is still located in new-space.
1308 const int kAgingThreshold = 6;
1309 for (int i = 0; i < kAgingThreshold; i++) {
1310 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1311 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1314 // Simulate incremental marking so that the functions are enqueued as
1315 // code flushing candidates. Then kill one of the functions. Finally
1316 // perform a scavenge while incremental marking is still running.
1317 SimulateIncrementalMarking(CcTest::heap());
1318 *function2.location() = NULL;
1319 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1321 // Simulate one final GC to make sure the candidate queue is sane.
1322 CcTest::heap()->CollectAllGarbage();
1323 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1324 CHECK(!function->is_compiled() || function->IsOptimized());
1328 TEST(TestCodeFlushingIncrementalAbort) {
1329 // If we do not flush code this test is invalid.
1330 if (!FLAG_flush_code) return;
1331 i::FLAG_allow_natives_syntax = true;
1332 i::FLAG_optimize_for_size = false;
1333 CcTest::InitializeVM();
1334 Isolate* isolate = CcTest::i_isolate();
1335 Factory* factory = isolate->factory();
1336 Heap* heap = isolate->heap();
1337 v8::HandleScope scope(CcTest::isolate());
1338 const char* source = "function foo() {"
1344 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1346 // This compile will add the code to the compilation cache.
1347 { v8::HandleScope scope(CcTest::isolate());
1351 // Check function is compiled.
1352 Handle<Object> func_value =
1353 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1354 CHECK(func_value->IsJSFunction());
1355 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1356 CHECK(function->shared()->is_compiled());
1358 // The code will survive at least two GCs.
1359 heap->CollectAllGarbage();
1360 heap->CollectAllGarbage();
1361 CHECK(function->shared()->is_compiled());
1363 // Bump the code age so that flushing is triggered.
1364 const int kAgingThreshold = 6;
1365 for (int i = 0; i < kAgingThreshold; i++) {
1366 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1369 // Simulate incremental marking so that the function is enqueued as
1370 // code flushing candidate.
1371 SimulateIncrementalMarking(heap);
1373 // Enable the debugger and add a breakpoint while incremental marking
1374 // is running so that incremental marking aborts and code flushing is
1377 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1379 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1380 isolate->debug()->ClearAllBreakPoints();
1383 // Force optimization now that code flushing is disabled.
1384 { v8::HandleScope scope(CcTest::isolate());
1385 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1388 // Simulate one final GC to make sure the candidate queue is sane.
1389 heap->CollectAllGarbage();
1390 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1391 CHECK(function->is_compiled() || !function->IsOptimized());
1395 TEST(CompilationCacheCachingBehavior) {
1396 // If we do not flush code, or have the compilation cache turned off, this
1398 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1401 CcTest::InitializeVM();
1402 Isolate* isolate = CcTest::i_isolate();
1403 Factory* factory = isolate->factory();
1404 Heap* heap = isolate->heap();
1405 CompilationCache* compilation_cache = isolate->compilation_cache();
1406 LanguageMode language_mode =
1407 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1409 v8::HandleScope scope(CcTest::isolate());
1410 const char* raw_source =
1417 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1418 Handle<Context> native_context = isolate->native_context();
1421 v8::HandleScope scope(CcTest::isolate());
1422 CompileRun(raw_source);
1425 // On first compilation, only a hash is inserted in the code cache. We can't
1427 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1428 source, Handle<Object>(), 0, 0,
1429 v8::ScriptOriginOptions(false, true, false), native_context,
1431 CHECK(info.is_null());
1434 v8::HandleScope scope(CcTest::isolate());
1435 CompileRun(raw_source);
1438 // On second compilation, the hash is replaced by a real cache entry mapping
1439 // the source to the shared function info containing the code.
1440 info = compilation_cache->LookupScript(
1441 source, Handle<Object>(), 0, 0,
1442 v8::ScriptOriginOptions(false, true, false), native_context,
1444 CHECK(!info.is_null());
1446 heap->CollectAllGarbage();
1448 // On second compilation, the hash is replaced by a real cache entry mapping
1449 // the source to the shared function info containing the code.
1450 info = compilation_cache->LookupScript(
1451 source, Handle<Object>(), 0, 0,
1452 v8::ScriptOriginOptions(false, true, false), native_context,
1454 CHECK(!info.is_null());
1456 while (!info.ToHandleChecked()->code()->IsOld()) {
1457 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1460 heap->CollectAllGarbage();
1461 // Ensure code aging cleared the entry from the cache.
1462 info = compilation_cache->LookupScript(
1463 source, Handle<Object>(), 0, 0,
1464 v8::ScriptOriginOptions(false, true, false), native_context,
1466 CHECK(info.is_null());
1469 v8::HandleScope scope(CcTest::isolate());
1470 CompileRun(raw_source);
1473 // On first compilation, only a hash is inserted in the code cache. We can't
1475 info = compilation_cache->LookupScript(
1476 source, Handle<Object>(), 0, 0,
1477 v8::ScriptOriginOptions(false, true, false), native_context,
1479 CHECK(info.is_null());
1481 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1482 compilation_cache->MarkCompactPrologue();
1486 v8::HandleScope scope(CcTest::isolate());
1487 CompileRun(raw_source);
1490 // If we aged the cache before caching the script, ensure that we didn't cache
1491 // on next compilation.
1492 info = compilation_cache->LookupScript(
1493 source, Handle<Object>(), 0, 0,
1494 v8::ScriptOriginOptions(false, true, false), native_context,
1496 CHECK(info.is_null());
1500 static void OptimizeEmptyFunction(const char* name) {
1501 HandleScope scope(CcTest::i_isolate());
1502 EmbeddedVector<char, 256> source;
1504 "function %s() { return 0; }"
1506 "%%OptimizeFunctionOnNextCall(%s);"
1508 name, name, name, name, name);
1509 CompileRun(source.start());
1513 // Count the number of native contexts in the weak list of native contexts.
1514 int CountNativeContexts() {
1516 Object* object = CcTest::heap()->native_contexts_list();
1517 while (!object->IsUndefined()) {
1519 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1521 // Subtract one to compensate for the code stub context that is always present
1526 // Count the number of user functions in the weak list of optimized
1527 // functions attached to a native context.
1528 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1530 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1531 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1532 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1534 object = JSFunction::cast(object)->next_function_link();
1540 TEST(TestInternalWeakLists) {
1541 FLAG_always_opt = false;
1542 FLAG_allow_natives_syntax = true;
1543 v8::V8::Initialize();
1545 // Some flags turn Scavenge collections into Mark-sweep collections
1546 // and hence are incompatible with this test case.
1547 if (FLAG_gc_global || FLAG_stress_compaction) return;
1548 FLAG_retain_maps_for_n_gc = 0;
1550 static const int kNumTestContexts = 10;
1552 Isolate* isolate = CcTest::i_isolate();
1553 Heap* heap = isolate->heap();
1554 HandleScope scope(isolate);
1555 v8::Handle<v8::Context> ctx[kNumTestContexts];
1556 if (!isolate->use_crankshaft()) return;
1558 CHECK_EQ(0, CountNativeContexts());
1560 // Create a number of global contests which gets linked together.
1561 for (int i = 0; i < kNumTestContexts; i++) {
1562 ctx[i] = v8::Context::New(CcTest::isolate());
1564 // Collect garbage that might have been created by one of the
1565 // installed extensions.
1566 isolate->compilation_cache()->Clear();
1567 heap->CollectAllGarbage();
1569 CHECK_EQ(i + 1, CountNativeContexts());
1573 // Create a handle scope so no function objects get stuck in the outer
1575 HandleScope scope(isolate);
1576 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1577 OptimizeEmptyFunction("f1");
1578 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1579 OptimizeEmptyFunction("f2");
1580 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1581 OptimizeEmptyFunction("f3");
1582 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1583 OptimizeEmptyFunction("f4");
1584 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1585 OptimizeEmptyFunction("f5");
1586 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1588 // Remove function f1, and
1589 CompileRun("f1=null");
1591 // Scavenge treats these references as strong.
1592 for (int j = 0; j < 10; j++) {
1593 CcTest::heap()->CollectGarbage(NEW_SPACE);
1594 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1597 // Mark compact handles the weak references.
1598 isolate->compilation_cache()->Clear();
1599 heap->CollectAllGarbage();
1600 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1602 // Get rid of f3 and f5 in the same way.
1603 CompileRun("f3=null");
1604 for (int j = 0; j < 10; j++) {
1605 CcTest::heap()->CollectGarbage(NEW_SPACE);
1606 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1608 CcTest::heap()->CollectAllGarbage();
1609 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1610 CompileRun("f5=null");
1611 for (int j = 0; j < 10; j++) {
1612 CcTest::heap()->CollectGarbage(NEW_SPACE);
1613 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1615 CcTest::heap()->CollectAllGarbage();
1616 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1621 // Force compilation cache cleanup.
1622 CcTest::heap()->NotifyContextDisposed(true);
1623 CcTest::heap()->CollectAllGarbage();
1625 // Dispose the native contexts one by one.
1626 for (int i = 0; i < kNumTestContexts; i++) {
1627 // TODO(dcarney): is there a better way to do this?
1628 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1629 *unsafe = CcTest::heap()->undefined_value();
1632 // Scavenge treats these references as strong.
1633 for (int j = 0; j < 10; j++) {
1634 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1635 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1638 // Mark compact handles the weak references.
1639 CcTest::heap()->CollectAllGarbage();
1640 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1643 CHECK_EQ(0, CountNativeContexts());
1647 // Count the number of native contexts in the weak list of native contexts
1648 // causing a GC after the specified number of elements.
1649 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1650 Heap* heap = isolate->heap();
1652 Handle<Object> object(heap->native_contexts_list(), isolate);
1653 while (!object->IsUndefined()) {
1655 if (count == n) heap->CollectAllGarbage();
1657 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1660 // Subtract one to compensate for the code stub context that is always present
1665 // Count the number of user functions in the weak list of optimized
1666 // functions attached to a native context causing a GC after the
1667 // specified number of elements.
1668 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1671 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1672 Isolate* isolate = icontext->GetIsolate();
1673 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1675 while (object->IsJSFunction() &&
1676 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1678 if (count == n) isolate->heap()->CollectAllGarbage();
1679 object = Handle<Object>(
1680 Object::cast(JSFunction::cast(*object)->next_function_link()),
1687 TEST(TestInternalWeakListsTraverseWithGC) {
1688 FLAG_always_opt = false;
1689 FLAG_allow_natives_syntax = true;
1690 v8::V8::Initialize();
1692 static const int kNumTestContexts = 10;
1694 Isolate* isolate = CcTest::i_isolate();
1695 HandleScope scope(isolate);
1696 v8::Handle<v8::Context> ctx[kNumTestContexts];
1697 if (!isolate->use_crankshaft()) return;
1699 CHECK_EQ(0, CountNativeContexts());
1701 // Create an number of contexts and check the length of the weak list both
1702 // with and without GCs while iterating the list.
1703 for (int i = 0; i < kNumTestContexts; i++) {
1704 ctx[i] = v8::Context::New(CcTest::isolate());
1705 CHECK_EQ(i + 1, CountNativeContexts());
1706 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1711 // Compile a number of functions the length of the weak list of optimized
1712 // functions both with and without GCs while iterating the list.
1713 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1714 OptimizeEmptyFunction("f1");
1715 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1716 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1717 OptimizeEmptyFunction("f2");
1718 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1719 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1720 OptimizeEmptyFunction("f3");
1721 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1722 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1723 OptimizeEmptyFunction("f4");
1724 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1725 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1726 OptimizeEmptyFunction("f5");
1727 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1728 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1734 TEST(TestSizeOfRegExpCode) {
1735 if (!FLAG_regexp_optimization) return;
1737 v8::V8::Initialize();
1739 Isolate* isolate = CcTest::i_isolate();
1740 HandleScope scope(isolate);
1742 LocalContext context;
1744 // Adjust source below and this check to match
1745 // RegExpImple::kRegExpTooLargeToOptimize.
1746 DCHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1748 // Compile a regexp that is much larger if we are using regexp optimizations.
1750 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1751 "var half_size_reg_exp;"
1752 "while (reg_exp_source.length < 20 * 1024) {"
1753 " half_size_reg_exp = reg_exp_source;"
1754 " reg_exp_source = reg_exp_source + reg_exp_source;"
1757 "reg_exp_source.match(/f/);");
1759 // Get initial heap size after several full GCs, which will stabilize
1760 // the heap size and return with sweeping finished completely.
1761 CcTest::heap()->CollectAllGarbage();
1762 CcTest::heap()->CollectAllGarbage();
1763 CcTest::heap()->CollectAllGarbage();
1764 CcTest::heap()->CollectAllGarbage();
1765 CcTest::heap()->CollectAllGarbage();
1766 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1767 if (collector->sweeping_in_progress()) {
1768 collector->EnsureSweepingCompleted();
1770 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1772 CompileRun("'foo'.match(reg_exp_source);");
1773 CcTest::heap()->CollectAllGarbage();
1774 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1776 CompileRun("'foo'.match(half_size_reg_exp);");
1777 CcTest::heap()->CollectAllGarbage();
1778 int size_with_optimized_regexp =
1779 static_cast<int>(CcTest::heap()->SizeOfObjects());
1781 int size_of_regexp_code = size_with_regexp - initial_size;
1783 // On some platforms the debug-code flag causes huge amounts of regexp code
1784 // to be emitted, breaking this test.
1785 if (!FLAG_debug_code) {
1786 CHECK_LE(size_of_regexp_code, 1 * MB);
1789 // Small regexp is half the size, but compiles to more than twice the code
1790 // due to the optimization steps.
1791 CHECK_GE(size_with_optimized_regexp,
1792 size_with_regexp + size_of_regexp_code * 2);
1796 TEST(TestSizeOfObjects) {
1797 v8::V8::Initialize();
1799 // Get initial heap size after several full GCs, which will stabilize
1800 // the heap size and return with sweeping finished completely.
1801 CcTest::heap()->CollectAllGarbage();
1802 CcTest::heap()->CollectAllGarbage();
1803 CcTest::heap()->CollectAllGarbage();
1804 CcTest::heap()->CollectAllGarbage();
1805 CcTest::heap()->CollectAllGarbage();
1806 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1807 if (collector->sweeping_in_progress()) {
1808 collector->EnsureSweepingCompleted();
1810 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1813 // Allocate objects on several different old-space pages so that
1814 // concurrent sweeper threads will be busy sweeping the old space on
1815 // subsequent GC runs.
1816 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1817 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1818 for (int i = 1; i <= 100; i++) {
1819 CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1820 CHECK_EQ(initial_size + i * filler_size,
1821 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1825 // The heap size should go back to initial size after a full GC, even
1826 // though sweeping didn't finish yet.
1827 CcTest::heap()->CollectAllGarbage();
1829 // Normally sweeping would not be complete here, but no guarantees.
1831 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1833 // Waiting for sweeper threads should not change heap size.
1834 if (collector->sweeping_in_progress()) {
1835 collector->EnsureSweepingCompleted();
1837 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1841 TEST(TestAlignmentCalculations) {
1842 // Maximum fill amounts are consistent.
1843 int maximum_double_misalignment = kDoubleSize - kPointerSize;
1844 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
1845 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1846 CHECK_EQ(0, max_word_fill);
1847 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1848 CHECK_EQ(maximum_double_misalignment, max_double_fill);
1849 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1850 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1851 int max_simd128_unaligned_fill =
1852 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
1853 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
1855 Address base = reinterpret_cast<Address>(NULL);
1858 // Word alignment never requires fill.
1859 fill = Heap::GetFillToAlign(base, kWordAligned);
1861 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
1864 // No fill is required when address is double aligned.
1865 fill = Heap::GetFillToAlign(base, kDoubleAligned);
1867 // Fill is required if address is not double aligned.
1868 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
1869 CHECK_EQ(maximum_double_misalignment, fill);
1870 // kDoubleUnaligned has the opposite fill amounts.
1871 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1872 CHECK_EQ(maximum_double_misalignment, fill);
1873 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
1876 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
1877 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
1878 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
1879 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
1880 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
1881 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
1882 CHECK_EQ(kPointerSize, fill);
1883 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
1888 static HeapObject* NewSpaceAllocateAligned(int size,
1889 AllocationAlignment alignment) {
1890 Heap* heap = CcTest::heap();
1891 AllocationResult allocation =
1892 heap->new_space()->AllocateRawAligned(size, alignment);
1893 HeapObject* obj = NULL;
1894 allocation.To(&obj);
1895 heap->CreateFillerObjectAt(obj->address(), size);
1900 // Get new space allocation into the desired alignment.
1901 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1902 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1903 int fill = Heap::GetFillToAlign(*top_addr, alignment);
1905 NewSpaceAllocateAligned(fill + offset, kWordAligned);
1911 TEST(TestAlignedAllocation) {
1912 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
1913 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
1914 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1918 if (double_misalignment) {
1919 // Allocate a pointer sized object that must be double aligned at an
1921 start = AlignNewSpace(kDoubleAligned, 0);
1922 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1923 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1924 // There is no filler.
1925 CHECK_EQ(kPointerSize, *top_addr - start);
1927 // Allocate a second pointer sized object that must be double aligned at an
1928 // unaligned address.
1929 start = AlignNewSpace(kDoubleAligned, kPointerSize);
1930 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1931 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1932 // There is a filler object before the object.
1933 filler = HeapObject::FromAddress(start);
1934 CHECK(obj != filler && filler->IsFiller() &&
1935 filler->Size() == kPointerSize);
1936 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
1938 // Similarly for kDoubleUnaligned.
1939 start = AlignNewSpace(kDoubleUnaligned, 0);
1940 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1941 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1942 CHECK_EQ(kPointerSize, *top_addr - start);
1943 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
1944 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1945 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1946 // There is a filler object before the object.
1947 filler = HeapObject::FromAddress(start);
1948 CHECK(obj != filler && filler->IsFiller() &&
1949 filler->Size() == kPointerSize);
1950 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
1953 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
1955 start = AlignNewSpace(kSimd128Unaligned, 0);
1956 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1957 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1958 // There is no filler.
1959 CHECK_EQ(kPointerSize, *top_addr - start);
1960 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
1961 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1962 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1963 // There is a filler object before the object.
1964 filler = HeapObject::FromAddress(start);
1965 CHECK(obj != filler && filler->IsFiller() &&
1966 filler->Size() == kSimd128Size - kPointerSize);
1967 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
1969 if (double_misalignment) {
1970 // Test the 2 other alignments possible on 32 bit platforms.
1971 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
1972 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1973 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1974 // There is a filler object before the object.
1975 filler = HeapObject::FromAddress(start);
1976 CHECK(obj != filler && filler->IsFiller() &&
1977 filler->Size() == 2 * kPointerSize);
1978 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
1979 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
1980 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1981 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1982 // There is a filler object before the object.
1983 filler = HeapObject::FromAddress(start);
1984 CHECK(obj != filler && filler->IsFiller() &&
1985 filler->Size() == kPointerSize);
1986 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
1991 static HeapObject* OldSpaceAllocateAligned(int size,
1992 AllocationAlignment alignment) {
1993 Heap* heap = CcTest::heap();
1994 AllocationResult allocation =
1995 heap->old_space()->AllocateRawAligned(size, alignment);
1996 HeapObject* obj = NULL;
1997 allocation.To(&obj);
1998 heap->CreateFillerObjectAt(obj->address(), size);
2003 // Get old space allocation into the desired alignment.
2004 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2005 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2006 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2007 int allocation = fill + offset;
2009 OldSpaceAllocateAligned(allocation, kWordAligned);
2011 Address top = *top_addr;
2012 // Now force the remaining allocation onto the free list.
2013 CcTest::heap()->old_space()->EmptyAllocationInfo();
2018 // Test the case where allocation must be done from the free list, so filler
2019 // may precede or follow the object.
2020 TEST(TestAlignedOverAllocation) {
2021 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2022 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2025 HeapObject* filler1;
2026 HeapObject* filler2;
2027 if (double_misalignment) {
2028 start = AlignOldSpace(kDoubleAligned, 0);
2029 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2030 // The object is aligned, and a filler object is created after.
2031 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2032 filler1 = HeapObject::FromAddress(start + kPointerSize);
2033 CHECK(obj != filler1 && filler1->IsFiller() &&
2034 filler1->Size() == kPointerSize);
2035 // Try the opposite alignment case.
2036 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2037 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2038 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2039 filler1 = HeapObject::FromAddress(start);
2040 CHECK(obj != filler1);
2041 CHECK(filler1->IsFiller());
2042 CHECK(filler1->Size() == kPointerSize);
2043 CHECK(obj != filler1 && filler1->IsFiller() &&
2044 filler1->Size() == kPointerSize);
2046 // Similarly for kDoubleUnaligned.
2047 start = AlignOldSpace(kDoubleUnaligned, 0);
2048 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2049 // The object is aligned, and a filler object is created after.
2050 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2051 filler1 = HeapObject::FromAddress(start + kPointerSize);
2052 CHECK(obj != filler1 && filler1->IsFiller() &&
2053 filler1->Size() == kPointerSize);
2054 // Try the opposite alignment case.
2055 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2056 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2057 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2058 filler1 = HeapObject::FromAddress(start);
2059 CHECK(obj != filler1 && filler1->IsFiller() &&
2060 filler1->Size() == kPointerSize);
2063 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2065 start = AlignOldSpace(kSimd128Unaligned, 0);
2066 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2067 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2068 // There is a filler object after the object.
2069 filler1 = HeapObject::FromAddress(start + kPointerSize);
2070 CHECK(obj != filler1 && filler1->IsFiller() &&
2071 filler1->Size() == kSimd128Size - kPointerSize);
2072 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2073 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2074 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2075 // There is a filler object before the object.
2076 filler1 = HeapObject::FromAddress(start);
2077 CHECK(obj != filler1 && filler1->IsFiller() &&
2078 filler1->Size() == kSimd128Size - kPointerSize);
2080 if (double_misalignment) {
2081 // Test the 2 other alignments possible on 32 bit platforms.
2082 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2083 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2084 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2085 // There are filler objects before and after the object.
2086 filler1 = HeapObject::FromAddress(start);
2087 CHECK(obj != filler1 && filler1->IsFiller() &&
2088 filler1->Size() == 2 * kPointerSize);
2089 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2090 CHECK(obj != filler2 && filler2->IsFiller() &&
2091 filler2->Size() == kPointerSize);
2092 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2093 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2094 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2095 // There are filler objects before and after the object.
2096 filler1 = HeapObject::FromAddress(start);
2097 CHECK(obj != filler1 && filler1->IsFiller() &&
2098 filler1->Size() == kPointerSize);
2099 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2100 CHECK(obj != filler2 && filler2->IsFiller() &&
2101 filler2->Size() == 2 * kPointerSize);
2106 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2107 CcTest::InitializeVM();
2108 HeapIterator iterator(CcTest::heap());
2109 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2110 intptr_t size_of_objects_2 = 0;
2111 for (HeapObject* obj = iterator.next();
2113 obj = iterator.next()) {
2114 if (!obj->IsFreeSpace()) {
2115 size_of_objects_2 += obj->Size();
2118 // Delta must be within 5% of the larger result.
2119 // TODO(gc): Tighten this up by distinguishing between byte
2120 // arrays that are real and those that merely mark free space
2122 if (size_of_objects_1 > size_of_objects_2) {
2123 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2124 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2125 "Iterator: %" V8_PTR_PREFIX "d, "
2126 "delta: %" V8_PTR_PREFIX "d\n",
2127 size_of_objects_1, size_of_objects_2, delta);
2128 CHECK_GT(size_of_objects_1 / 20, delta);
2130 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2131 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2132 "Iterator: %" V8_PTR_PREFIX "d, "
2133 "delta: %" V8_PTR_PREFIX "d\n",
2134 size_of_objects_1, size_of_objects_2, delta);
2135 CHECK_GT(size_of_objects_2 / 20, delta);
2140 static void FillUpNewSpace(NewSpace* new_space) {
2141 // Fill up new space to the point that it is completely full. Make sure
2142 // that the scavenger does not undo the filling.
2143 Heap* heap = new_space->heap();
2144 Isolate* isolate = heap->isolate();
2145 Factory* factory = isolate->factory();
2146 HandleScope scope(isolate);
2147 AlwaysAllocateScope always_allocate(isolate);
2148 intptr_t available = new_space->Capacity() - new_space->Size();
2149 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2150 for (intptr_t i = 0; i < number_of_fillers; i++) {
2151 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2156 TEST(GrowAndShrinkNewSpace) {
2157 CcTest::InitializeVM();
2158 Heap* heap = CcTest::heap();
2159 NewSpace* new_space = heap->new_space();
2161 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2162 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2163 // The max size cannot exceed the reserved size, since semispaces must be
2164 // always within the reserved space. We can't test new space growing and
2165 // shrinking if the reserved size is the same as the minimum (initial) size.
2169 // Explicitly growing should double the space capacity.
2170 intptr_t old_capacity, new_capacity;
2171 old_capacity = new_space->TotalCapacity();
2173 new_capacity = new_space->TotalCapacity();
2174 CHECK(2 * old_capacity == new_capacity);
2176 old_capacity = new_space->TotalCapacity();
2177 FillUpNewSpace(new_space);
2178 new_capacity = new_space->TotalCapacity();
2179 CHECK(old_capacity == new_capacity);
2181 // Explicitly shrinking should not affect space capacity.
2182 old_capacity = new_space->TotalCapacity();
2183 new_space->Shrink();
2184 new_capacity = new_space->TotalCapacity();
2185 CHECK(old_capacity == new_capacity);
2187 // Let the scavenger empty the new space.
2188 heap->CollectGarbage(NEW_SPACE);
2189 CHECK_LE(new_space->Size(), old_capacity);
2191 // Explicitly shrinking should halve the space capacity.
2192 old_capacity = new_space->TotalCapacity();
2193 new_space->Shrink();
2194 new_capacity = new_space->TotalCapacity();
2195 CHECK(old_capacity == 2 * new_capacity);
2197 // Consecutive shrinking should not affect space capacity.
2198 old_capacity = new_space->TotalCapacity();
2199 new_space->Shrink();
2200 new_space->Shrink();
2201 new_space->Shrink();
2202 new_capacity = new_space->TotalCapacity();
2203 CHECK(old_capacity == new_capacity);
2207 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2208 CcTest::InitializeVM();
2209 Heap* heap = CcTest::heap();
2210 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2211 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2212 // The max size cannot exceed the reserved size, since semispaces must be
2213 // always within the reserved space. We can't test new space growing and
2214 // shrinking if the reserved size is the same as the minimum (initial) size.
2218 v8::HandleScope scope(CcTest::isolate());
2219 NewSpace* new_space = heap->new_space();
2220 intptr_t old_capacity, new_capacity;
2221 old_capacity = new_space->TotalCapacity();
2223 new_capacity = new_space->TotalCapacity();
2224 CHECK(2 * old_capacity == new_capacity);
2225 FillUpNewSpace(new_space);
2226 heap->CollectAllAvailableGarbage();
2227 new_capacity = new_space->TotalCapacity();
2228 CHECK(old_capacity == new_capacity);
2232 static int NumberOfGlobalObjects() {
2234 HeapIterator iterator(CcTest::heap());
2235 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2236 if (obj->IsGlobalObject()) count++;
2238 // Subtract two to compensate for the two global objects (not global
2239 // JSObjects, of which there would only be one) that are part of the code stub
2240 // context, which is always present.
2245 // Test that we don't embed maps from foreign contexts into
2247 TEST(LeakNativeContextViaMap) {
2248 i::FLAG_allow_natives_syntax = true;
2249 v8::Isolate* isolate = CcTest::isolate();
2250 v8::HandleScope outer_scope(isolate);
2251 v8::Persistent<v8::Context> ctx1p;
2252 v8::Persistent<v8::Context> ctx2p;
2254 v8::HandleScope scope(isolate);
2255 ctx1p.Reset(isolate, v8::Context::New(isolate));
2256 ctx2p.Reset(isolate, v8::Context::New(isolate));
2257 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2260 CcTest::heap()->CollectAllAvailableGarbage();
2261 CHECK_EQ(4, NumberOfGlobalObjects());
2264 v8::HandleScope inner_scope(isolate);
2265 CompileRun("var v = {x: 42}");
2266 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2267 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2268 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2270 ctx2->Global()->Set(v8_str("o"), v);
2271 v8::Local<v8::Value> res = CompileRun(
2272 "function f() { return o.x; }"
2273 "for (var i = 0; i < 10; ++i) f();"
2274 "%OptimizeFunctionOnNextCall(f);"
2276 CHECK_EQ(42, res->Int32Value());
2277 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2279 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2281 isolate->ContextDisposedNotification();
2283 CcTest::heap()->CollectAllAvailableGarbage();
2284 CHECK_EQ(2, NumberOfGlobalObjects());
2286 CcTest::heap()->CollectAllAvailableGarbage();
2287 CHECK_EQ(0, NumberOfGlobalObjects());
2291 // Test that we don't embed functions from foreign contexts into
2293 TEST(LeakNativeContextViaFunction) {
2294 i::FLAG_allow_natives_syntax = true;
2295 v8::Isolate* isolate = CcTest::isolate();
2296 v8::HandleScope outer_scope(isolate);
2297 v8::Persistent<v8::Context> ctx1p;
2298 v8::Persistent<v8::Context> ctx2p;
2300 v8::HandleScope scope(isolate);
2301 ctx1p.Reset(isolate, v8::Context::New(isolate));
2302 ctx2p.Reset(isolate, v8::Context::New(isolate));
2303 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2306 CcTest::heap()->CollectAllAvailableGarbage();
2307 CHECK_EQ(4, NumberOfGlobalObjects());
2310 v8::HandleScope inner_scope(isolate);
2311 CompileRun("var v = function() { return 42; }");
2312 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2313 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2314 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2316 ctx2->Global()->Set(v8_str("o"), v);
2317 v8::Local<v8::Value> res = CompileRun(
2318 "function f(x) { return x(); }"
2319 "for (var i = 0; i < 10; ++i) f(o);"
2320 "%OptimizeFunctionOnNextCall(f);"
2322 CHECK_EQ(42, res->Int32Value());
2323 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2327 isolate->ContextDisposedNotification();
2329 CcTest::heap()->CollectAllAvailableGarbage();
2330 CHECK_EQ(2, NumberOfGlobalObjects());
2332 CcTest::heap()->CollectAllAvailableGarbage();
2333 CHECK_EQ(0, NumberOfGlobalObjects());
2337 TEST(LeakNativeContextViaMapKeyed) {
2338 i::FLAG_allow_natives_syntax = true;
2339 v8::Isolate* isolate = CcTest::isolate();
2340 v8::HandleScope outer_scope(isolate);
2341 v8::Persistent<v8::Context> ctx1p;
2342 v8::Persistent<v8::Context> ctx2p;
2344 v8::HandleScope scope(isolate);
2345 ctx1p.Reset(isolate, v8::Context::New(isolate));
2346 ctx2p.Reset(isolate, v8::Context::New(isolate));
2347 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2350 CcTest::heap()->CollectAllAvailableGarbage();
2351 CHECK_EQ(4, NumberOfGlobalObjects());
2354 v8::HandleScope inner_scope(isolate);
2355 CompileRun("var v = [42, 43]");
2356 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2357 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2358 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2360 ctx2->Global()->Set(v8_str("o"), v);
2361 v8::Local<v8::Value> res = CompileRun(
2362 "function f() { return o[0]; }"
2363 "for (var i = 0; i < 10; ++i) f();"
2364 "%OptimizeFunctionOnNextCall(f);"
2366 CHECK_EQ(42, res->Int32Value());
2367 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2371 isolate->ContextDisposedNotification();
2373 CcTest::heap()->CollectAllAvailableGarbage();
2374 CHECK_EQ(2, NumberOfGlobalObjects());
2376 CcTest::heap()->CollectAllAvailableGarbage();
2377 CHECK_EQ(0, NumberOfGlobalObjects());
2381 TEST(LeakNativeContextViaMapProto) {
2382 i::FLAG_allow_natives_syntax = true;
2383 v8::Isolate* isolate = CcTest::isolate();
2384 v8::HandleScope outer_scope(isolate);
2385 v8::Persistent<v8::Context> ctx1p;
2386 v8::Persistent<v8::Context> ctx2p;
2388 v8::HandleScope scope(isolate);
2389 ctx1p.Reset(isolate, v8::Context::New(isolate));
2390 ctx2p.Reset(isolate, v8::Context::New(isolate));
2391 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2394 CcTest::heap()->CollectAllAvailableGarbage();
2395 CHECK_EQ(4, NumberOfGlobalObjects());
2398 v8::HandleScope inner_scope(isolate);
2399 CompileRun("var v = { y: 42}");
2400 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2401 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2402 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2404 ctx2->Global()->Set(v8_str("o"), v);
2405 v8::Local<v8::Value> res = CompileRun(
2411 "for (var i = 0; i < 10; ++i) f();"
2412 "%OptimizeFunctionOnNextCall(f);"
2414 CHECK_EQ(42, res->Int32Value());
2415 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2419 isolate->ContextDisposedNotification();
2421 CcTest::heap()->CollectAllAvailableGarbage();
2422 CHECK_EQ(2, NumberOfGlobalObjects());
2424 CcTest::heap()->CollectAllAvailableGarbage();
2425 CHECK_EQ(0, NumberOfGlobalObjects());
2429 TEST(InstanceOfStubWriteBarrier) {
2430 i::FLAG_allow_natives_syntax = true;
2432 i::FLAG_verify_heap = true;
2435 CcTest::InitializeVM();
2436 if (!CcTest::i_isolate()->use_crankshaft()) return;
2437 if (i::FLAG_force_marking_deque_overflows) return;
2438 v8::HandleScope outer_scope(CcTest::isolate());
2441 v8::HandleScope scope(CcTest::isolate());
2443 "function foo () { }"
2444 "function mkbar () { return new (new Function(\"\")) (); }"
2445 "function f (x) { return (x instanceof foo); }"
2446 "function g () { f(mkbar()); }"
2447 "f(new foo()); f(new foo());"
2448 "%OptimizeFunctionOnNextCall(f);"
2449 "f(new foo()); g();");
2452 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2454 marking->Start(Heap::kNoGCFlags);
2456 Handle<JSFunction> f =
2457 v8::Utils::OpenHandle(
2458 *v8::Handle<v8::Function>::Cast(
2459 CcTest::global()->Get(v8_str("f"))));
2461 CHECK(f->IsOptimized());
2463 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2464 !marking->IsStopped()) {
2465 // Discard any pending GC requests otherwise we will get GC when we enter
2467 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2470 CHECK(marking->IsMarking());
2473 v8::HandleScope scope(CcTest::isolate());
2474 v8::Handle<v8::Object> global = CcTest::global();
2475 v8::Handle<v8::Function> g =
2476 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2477 g->Call(global, 0, NULL);
2480 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2481 CcTest::heap()->CollectGarbage(OLD_SPACE);
2485 static int NumberOfProtoTransitions(Map* map) {
2486 return TransitionArray::NumberOfPrototypeTransitions(
2487 TransitionArray::GetPrototypeTransitions(map));
2491 TEST(PrototypeTransitionClearing) {
2492 if (FLAG_never_compact) return;
2493 CcTest::InitializeVM();
2494 Isolate* isolate = CcTest::i_isolate();
2495 Factory* factory = isolate->factory();
2496 v8::HandleScope scope(CcTest::isolate());
2498 CompileRun("var base = {};");
2499 Handle<JSObject> baseObject =
2500 v8::Utils::OpenHandle(
2501 *v8::Handle<v8::Object>::Cast(
2502 CcTest::global()->Get(v8_str("base"))));
2503 int initialTransitions = NumberOfProtoTransitions(baseObject->map());
2507 "for (var i = 0; i < 10; i++) {"
2509 " var prototype = {};"
2510 " object.__proto__ = prototype;"
2511 " if (i >= 3) live.push(object, prototype);"
2514 // Verify that only dead prototype transitions are cleared.
2515 CHECK_EQ(initialTransitions + 10,
2516 NumberOfProtoTransitions(baseObject->map()));
2517 CcTest::heap()->CollectAllGarbage();
2518 const int transitions = 10 - 3;
2519 CHECK_EQ(initialTransitions + transitions,
2520 NumberOfProtoTransitions(baseObject->map()));
2522 // Verify that prototype transitions array was compacted.
2524 TransitionArray::GetPrototypeTransitions(baseObject->map());
2525 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2526 int j = TransitionArray::kProtoTransitionHeaderSize + i;
2527 CHECK(trans->get(j)->IsWeakCell());
2528 CHECK(WeakCell::cast(trans->get(j))->value()->IsMap());
2531 // Make sure next prototype is placed on an old-space evacuation candidate.
2532 Handle<JSObject> prototype;
2533 PagedSpace* space = CcTest::heap()->old_space();
2535 AlwaysAllocateScope always_allocate(isolate);
2536 SimulateFullSpace(space);
2537 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS,
2538 Strength::WEAK, TENURED);
2541 // Add a prototype on an evacuation candidate and verify that transition
2542 // clearing correctly records slots in prototype transition array.
2543 i::FLAG_always_compact = true;
2544 Handle<Map> map(baseObject->map());
2545 CHECK(!space->LastPage()->Contains(
2546 TransitionArray::GetPrototypeTransitions(*map)->address()));
2547 CHECK(space->LastPage()->Contains(prototype->address()));
2551 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2552 i::FLAG_stress_compaction = false;
2553 i::FLAG_allow_natives_syntax = true;
2555 i::FLAG_verify_heap = true;
2558 CcTest::InitializeVM();
2559 if (!CcTest::i_isolate()->use_crankshaft()) return;
2560 v8::HandleScope outer_scope(CcTest::isolate());
2563 v8::HandleScope scope(CcTest::isolate());
2567 " for (var i = 0; i < 100; i++) s += i;"
2571 "%OptimizeFunctionOnNextCall(f);"
2574 Handle<JSFunction> f =
2575 v8::Utils::OpenHandle(
2576 *v8::Handle<v8::Function>::Cast(
2577 CcTest::global()->Get(v8_str("f"))));
2578 CHECK(f->IsOptimized());
2580 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2582 marking->Start(Heap::kNoGCFlags);
2583 // The following calls will increment CcTest::heap()->global_ic_age().
2584 CcTest::isolate()->ContextDisposedNotification();
2585 SimulateIncrementalMarking(CcTest::heap());
2586 CcTest::heap()->CollectAllGarbage();
2587 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2588 CHECK_EQ(0, f->shared()->opt_count());
2589 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2593 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2594 i::FLAG_stress_compaction = false;
2595 i::FLAG_allow_natives_syntax = true;
2597 i::FLAG_verify_heap = true;
2600 CcTest::InitializeVM();
2601 if (!CcTest::i_isolate()->use_crankshaft()) return;
2602 v8::HandleScope outer_scope(CcTest::isolate());
2605 v8::HandleScope scope(CcTest::isolate());
2609 " for (var i = 0; i < 100; i++) s += i;"
2613 "%OptimizeFunctionOnNextCall(f);"
2616 Handle<JSFunction> f =
2617 v8::Utils::OpenHandle(
2618 *v8::Handle<v8::Function>::Cast(
2619 CcTest::global()->Get(v8_str("f"))));
2620 CHECK(f->IsOptimized());
2622 CcTest::heap()->incremental_marking()->Abort();
2624 // The following two calls will increment CcTest::heap()->global_ic_age().
2625 CcTest::isolate()->ContextDisposedNotification();
2626 CcTest::heap()->CollectAllGarbage();
2628 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2629 CHECK_EQ(0, f->shared()->opt_count());
2630 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2634 TEST(IdleNotificationFinishMarking) {
2635 i::FLAG_allow_natives_syntax = true;
2636 CcTest::InitializeVM();
2637 SimulateFullSpace(CcTest::heap()->old_space());
2638 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2640 marking->Start(Heap::kNoGCFlags);
2642 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2644 // TODO(hpayer): We cannot write proper unit test right now for heap.
2645 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2646 // marking delay counter.
2648 // Perform a huge incremental marking step but don't complete marking.
2649 intptr_t bytes_processed = 0;
2652 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2653 IncrementalMarking::FORCE_MARKING,
2654 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2655 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2656 } while (bytes_processed);
2658 // The next invocations of incremental marking are not going to complete
2660 // since the completion threshold is not reached
2661 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2663 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2664 IncrementalMarking::FORCE_MARKING,
2665 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2666 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2669 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2671 // The next idle notification has to finish incremental marking.
2672 const double kLongIdleTime = 1000.0;
2673 CcTest::isolate()->IdleNotificationDeadline(
2674 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2675 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2677 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2681 // Test that HAllocateObject will always return an object in new-space.
2682 TEST(OptimizedAllocationAlwaysInNewSpace) {
2683 i::FLAG_allow_natives_syntax = true;
2684 CcTest::InitializeVM();
2685 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2686 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2687 v8::HandleScope scope(CcTest::isolate());
2689 SimulateFullSpace(CcTest::heap()->new_space());
2690 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2691 v8::Local<v8::Value> res = CompileRun(
2694 " for (var i = 0; i < 32; i++) {"
2695 " this['x' + i] = x;"
2698 "function f(x) { return new c(x); };"
2700 "%OptimizeFunctionOnNextCall(f);"
2703 4, res.As<v8::Object>()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2705 Handle<JSObject> o =
2706 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2708 CHECK(CcTest::heap()->InNewSpace(*o));
2712 TEST(OptimizedPretenuringAllocationFolding) {
2713 i::FLAG_allow_natives_syntax = true;
2714 i::FLAG_expose_gc = true;
2715 CcTest::InitializeVM();
2716 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2717 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2718 v8::HandleScope scope(CcTest::isolate());
2720 // Grow new space unitl maximum capacity reached.
2721 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2722 CcTest::heap()->new_space()->Grow();
2725 i::ScopedVector<char> source(1024);
2728 "var number_elements = %d;"
2729 "var elements = new Array();"
2731 " for (var i = 0; i < number_elements; i++) {"
2732 " elements[i] = [[{}], [1.1]];"
2734 " return elements[number_elements-1]"
2738 "%%OptimizeFunctionOnNextCall(f);"
2740 AllocationSite::kPretenureMinimumCreated);
2742 v8::Local<v8::Value> res = CompileRun(source.start());
2744 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2745 Handle<JSObject> int_array_handle =
2746 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2747 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2748 Handle<JSObject> double_array_handle =
2749 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2751 Handle<JSObject> o =
2752 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2753 CHECK(CcTest::heap()->InOldSpace(*o));
2754 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2755 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2756 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2757 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2761 TEST(OptimizedPretenuringObjectArrayLiterals) {
2762 i::FLAG_allow_natives_syntax = true;
2763 i::FLAG_expose_gc = true;
2764 CcTest::InitializeVM();
2765 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2766 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2767 v8::HandleScope scope(CcTest::isolate());
2769 // Grow new space unitl maximum capacity reached.
2770 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2771 CcTest::heap()->new_space()->Grow();
2774 i::ScopedVector<char> source(1024);
2777 "var number_elements = %d;"
2778 "var elements = new Array(number_elements);"
2780 " for (var i = 0; i < number_elements; i++) {"
2781 " elements[i] = [{}, {}, {}];"
2783 " return elements[number_elements - 1];"
2787 "%%OptimizeFunctionOnNextCall(f);"
2789 AllocationSite::kPretenureMinimumCreated);
2791 v8::Local<v8::Value> res = CompileRun(source.start());
2793 Handle<JSObject> o =
2794 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2796 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2797 CHECK(CcTest::heap()->InOldSpace(*o));
2801 TEST(OptimizedPretenuringMixedInObjectProperties) {
2802 i::FLAG_allow_natives_syntax = true;
2803 i::FLAG_expose_gc = true;
2804 CcTest::InitializeVM();
2805 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2806 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2807 v8::HandleScope scope(CcTest::isolate());
2809 // Grow new space unitl maximum capacity reached.
2810 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2811 CcTest::heap()->new_space()->Grow();
2815 i::ScopedVector<char> source(1024);
2818 "var number_elements = %d;"
2819 "var elements = new Array(number_elements);"
2821 " for (var i = 0; i < number_elements; i++) {"
2822 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2824 " return elements[number_elements - 1];"
2828 "%%OptimizeFunctionOnNextCall(f);"
2830 AllocationSite::kPretenureMinimumCreated);
2832 v8::Local<v8::Value> res = CompileRun(source.start());
2834 Handle<JSObject> o =
2835 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2837 CHECK(CcTest::heap()->InOldSpace(*o));
2838 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2839 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2840 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2841 if (!o->IsUnboxedDoubleField(idx2)) {
2842 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2844 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2847 JSObject* inner_object =
2848 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2849 CHECK(CcTest::heap()->InOldSpace(inner_object));
2850 if (!inner_object->IsUnboxedDoubleField(idx1)) {
2851 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2853 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2855 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2859 TEST(OptimizedPretenuringDoubleArrayProperties) {
2860 i::FLAG_allow_natives_syntax = true;
2861 i::FLAG_expose_gc = true;
2862 CcTest::InitializeVM();
2863 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2864 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2865 v8::HandleScope scope(CcTest::isolate());
2867 // Grow new space unitl maximum capacity reached.
2868 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2869 CcTest::heap()->new_space()->Grow();
2872 i::ScopedVector<char> source(1024);
2875 "var number_elements = %d;"
2876 "var elements = new Array(number_elements);"
2878 " for (var i = 0; i < number_elements; i++) {"
2879 " elements[i] = {a: 1.1, b: 2.2};"
2881 " return elements[i - 1];"
2885 "%%OptimizeFunctionOnNextCall(f);"
2887 AllocationSite::kPretenureMinimumCreated);
2889 v8::Local<v8::Value> res = CompileRun(source.start());
2891 Handle<JSObject> o =
2892 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2894 CHECK(CcTest::heap()->InOldSpace(*o));
2895 CHECK(CcTest::heap()->InOldSpace(o->properties()));
2899 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2900 i::FLAG_allow_natives_syntax = true;
2901 i::FLAG_expose_gc = true;
2902 CcTest::InitializeVM();
2903 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2904 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2905 v8::HandleScope scope(CcTest::isolate());
2907 // Grow new space unitl maximum capacity reached.
2908 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2909 CcTest::heap()->new_space()->Grow();
2912 i::ScopedVector<char> source(1024);
2915 "var number_elements = %d;"
2916 "var elements = new Array(number_elements);"
2918 " for (var i = 0; i < number_elements; i++) {"
2919 " elements[i] = [1.1, 2.2, 3.3];"
2921 " return elements[number_elements - 1];"
2925 "%%OptimizeFunctionOnNextCall(f);"
2927 AllocationSite::kPretenureMinimumCreated);
2929 v8::Local<v8::Value> res = CompileRun(source.start());
2931 Handle<JSObject> o =
2932 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2934 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2935 CHECK(CcTest::heap()->InOldSpace(*o));
2939 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2940 i::FLAG_allow_natives_syntax = true;
2941 i::FLAG_expose_gc = true;
2942 CcTest::InitializeVM();
2943 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2944 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2945 v8::HandleScope scope(CcTest::isolate());
2947 // Grow new space unitl maximum capacity reached.
2948 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2949 CcTest::heap()->new_space()->Grow();
2952 i::ScopedVector<char> source(1024);
2955 "var number_elements = 100;"
2956 "var elements = new Array(number_elements);"
2958 " for (var i = 0; i < number_elements; i++) {"
2959 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2961 " return elements[number_elements - 1];"
2965 "%%OptimizeFunctionOnNextCall(f);"
2968 v8::Local<v8::Value> res = CompileRun(source.start());
2970 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2971 Handle<JSObject> int_array_handle =
2972 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2973 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2974 Handle<JSObject> double_array_handle =
2975 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2977 Handle<JSObject> o =
2978 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2979 CHECK(CcTest::heap()->InOldSpace(*o));
2980 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2981 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2982 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2983 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2987 TEST(OptimizedPretenuringNestedObjectLiterals) {
2988 i::FLAG_allow_natives_syntax = true;
2989 i::FLAG_expose_gc = true;
2990 CcTest::InitializeVM();
2991 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2992 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2993 v8::HandleScope scope(CcTest::isolate());
2995 // Grow new space unitl maximum capacity reached.
2996 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2997 CcTest::heap()->new_space()->Grow();
3000 i::ScopedVector<char> source(1024);
3003 "var number_elements = %d;"
3004 "var elements = new Array(number_elements);"
3006 " for (var i = 0; i < number_elements; i++) {"
3007 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3009 " return elements[number_elements - 1];"
3013 "%%OptimizeFunctionOnNextCall(f);"
3015 AllocationSite::kPretenureMinimumCreated);
3017 v8::Local<v8::Value> res = CompileRun(source.start());
3019 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
3020 Handle<JSObject> int_array_handle_1 =
3021 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
3022 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
3023 Handle<JSObject> int_array_handle_2 =
3024 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
3026 Handle<JSObject> o =
3027 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3028 CHECK(CcTest::heap()->InOldSpace(*o));
3029 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3030 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3031 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3032 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3036 TEST(OptimizedPretenuringNestedDoubleLiterals) {
3037 i::FLAG_allow_natives_syntax = true;
3038 i::FLAG_expose_gc = true;
3039 CcTest::InitializeVM();
3040 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3041 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3042 v8::HandleScope scope(CcTest::isolate());
3044 // Grow new space unitl maximum capacity reached.
3045 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3046 CcTest::heap()->new_space()->Grow();
3049 i::ScopedVector<char> source(1024);
3052 "var number_elements = %d;"
3053 "var elements = new Array(number_elements);"
3055 " for (var i = 0; i < number_elements; i++) {"
3056 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3058 " return elements[number_elements - 1];"
3062 "%%OptimizeFunctionOnNextCall(f);"
3064 AllocationSite::kPretenureMinimumCreated);
3066 v8::Local<v8::Value> res = CompileRun(source.start());
3068 v8::Local<v8::Value> double_array_1 =
3069 v8::Object::Cast(*res)->Get(v8_str("0"));
3070 Handle<JSObject> double_array_handle_1 =
3071 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
3072 v8::Local<v8::Value> double_array_2 =
3073 v8::Object::Cast(*res)->Get(v8_str("1"));
3074 Handle<JSObject> double_array_handle_2 =
3075 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
3077 Handle<JSObject> o =
3078 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3079 CHECK(CcTest::heap()->InOldSpace(*o));
3080 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3081 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3082 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3083 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3087 // Make sure pretenuring feedback is gathered for constructed objects as well
3089 TEST(OptimizedPretenuringConstructorCalls) {
3090 if (!i::FLAG_pretenuring_call_new) {
3091 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3094 i::FLAG_allow_natives_syntax = true;
3095 i::FLAG_expose_gc = true;
3096 CcTest::InitializeVM();
3097 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3098 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3099 v8::HandleScope scope(CcTest::isolate());
3101 // Grow new space unitl maximum capacity reached.
3102 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3103 CcTest::heap()->new_space()->Grow();
3106 i::ScopedVector<char> source(1024);
3107 // Call new is doing slack tracking for the first
3108 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3109 // mementos during that time.
3112 "var number_elements = %d;"
3113 "var elements = new Array(number_elements);"
3119 " for (var i = 0; i < number_elements; i++) {"
3120 " elements[i] = new foo();"
3122 " return elements[number_elements - 1];"
3126 "%%OptimizeFunctionOnNextCall(f);"
3128 AllocationSite::kPretenureMinimumCreated +
3129 JSFunction::kGenerousAllocationCount);
3131 v8::Local<v8::Value> res = CompileRun(source.start());
3133 Handle<JSObject> o =
3134 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3136 CHECK(CcTest::heap()->InOldSpace(*o));
3140 TEST(OptimizedPretenuringCallNew) {
3141 if (!i::FLAG_pretenuring_call_new) {
3142 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3145 i::FLAG_allow_natives_syntax = true;
3146 i::FLAG_expose_gc = true;
3147 CcTest::InitializeVM();
3148 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3149 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3150 v8::HandleScope scope(CcTest::isolate());
3152 // Grow new space unitl maximum capacity reached.
3153 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3154 CcTest::heap()->new_space()->Grow();
3157 i::ScopedVector<char> source(1024);
3158 // Call new is doing slack tracking for the first
3159 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3160 // mementos during that time.
3163 "var number_elements = %d;"
3164 "var elements = new Array(number_elements);"
3165 "function g() { this.a = 0; }"
3167 " for (var i = 0; i < number_elements; i++) {"
3168 " elements[i] = new g();"
3170 " return elements[number_elements - 1];"
3174 "%%OptimizeFunctionOnNextCall(f);"
3176 AllocationSite::kPretenureMinimumCreated +
3177 JSFunction::kGenerousAllocationCount);
3179 v8::Local<v8::Value> res = CompileRun(source.start());
3181 Handle<JSObject> o =
3182 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3183 CHECK(CcTest::heap()->InOldSpace(*o));
3187 // Test regular array literals allocation.
3188 TEST(OptimizedAllocationArrayLiterals) {
3189 i::FLAG_allow_natives_syntax = true;
3190 CcTest::InitializeVM();
3191 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3192 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3193 v8::HandleScope scope(CcTest::isolate());
3195 v8::Local<v8::Value> res = CompileRun(
3197 " var numbers = new Array(1, 2, 3);"
3198 " numbers[0] = 3.14;"
3202 "%OptimizeFunctionOnNextCall(f);"
3204 CHECK_EQ(static_cast<int>(3.14),
3205 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
3207 Handle<JSObject> o =
3208 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3210 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3214 static int CountMapTransitions(Map* map) {
3215 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3219 // Test that map transitions are cleared and maps are collected with
3220 // incremental marking as well.
3222 i::FLAG_stress_compaction = false;
3223 i::FLAG_allow_natives_syntax = true;
3224 i::FLAG_trace_incremental_marking = true;
3225 i::FLAG_retain_maps_for_n_gc = 0;
3226 CcTest::InitializeVM();
3227 v8::HandleScope scope(CcTest::isolate());
3228 static const int transitions_count = 256;
3230 CompileRun("function F() {}");
3232 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3233 for (int i = 0; i < transitions_count; i++) {
3234 EmbeddedVector<char, 64> buffer;
3235 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3236 CompileRun(buffer.start());
3238 CompileRun("var root = new F;");
3241 Handle<JSObject> root =
3242 v8::Utils::OpenHandle(
3243 *v8::Handle<v8::Object>::Cast(
3244 CcTest::global()->Get(v8_str("root"))));
3246 // Count number of live transitions before marking.
3247 int transitions_before = CountMapTransitions(root->map());
3248 CompileRun("%DebugPrint(root);");
3249 CHECK_EQ(transitions_count, transitions_before);
3251 SimulateIncrementalMarking(CcTest::heap());
3252 CcTest::heap()->CollectAllGarbage();
3254 // Count number of live transitions after marking. Note that one transition
3255 // is left, because 'o' still holds an instance of one transition target.
3256 int transitions_after = CountMapTransitions(root->map());
3257 CompileRun("%DebugPrint(root);");
3258 CHECK_EQ(1, transitions_after);
3263 static void AddTransitions(int transitions_count) {
3264 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3265 for (int i = 0; i < transitions_count; i++) {
3266 EmbeddedVector<char, 64> buffer;
3267 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3268 CompileRun(buffer.start());
3273 static Handle<JSObject> GetByName(const char* name) {
3274 return v8::Utils::OpenHandle(
3275 *v8::Handle<v8::Object>::Cast(
3276 CcTest::global()->Get(v8_str(name))));
3280 static void AddPropertyTo(
3281 int gc_count, Handle<JSObject> object, const char* property_name) {
3282 Isolate* isolate = CcTest::i_isolate();
3283 Factory* factory = isolate->factory();
3284 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3285 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3286 i::FLAG_gc_interval = gc_count;
3287 i::FLAG_gc_global = true;
3288 i::FLAG_retain_maps_for_n_gc = 0;
3289 CcTest::heap()->set_allocation_timeout(gc_count);
3290 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3294 TEST(TransitionArrayShrinksDuringAllocToZero) {
3295 i::FLAG_stress_compaction = false;
3296 i::FLAG_allow_natives_syntax = true;
3297 CcTest::InitializeVM();
3298 v8::HandleScope scope(CcTest::isolate());
3299 static const int transitions_count = 10;
3300 CompileRun("function F() { }");
3301 AddTransitions(transitions_count);
3302 CompileRun("var root = new F;");
3303 Handle<JSObject> root = GetByName("root");
3305 // Count number of live transitions before marking.
3306 int transitions_before = CountMapTransitions(root->map());
3307 CHECK_EQ(transitions_count, transitions_before);
3310 CompileRun("o = new F;"
3312 root = GetByName("root");
3313 AddPropertyTo(2, root, "funny");
3314 CcTest::heap()->CollectGarbage(NEW_SPACE);
3316 // Count number of live transitions after marking. Note that one transition
3317 // is left, because 'o' still holds an instance of one transition target.
3318 int transitions_after = CountMapTransitions(
3319 Map::cast(root->map()->GetBackPointer()));
3320 CHECK_EQ(1, transitions_after);
3324 TEST(TransitionArrayShrinksDuringAllocToOne) {
3325 i::FLAG_stress_compaction = false;
3326 i::FLAG_allow_natives_syntax = true;
3327 CcTest::InitializeVM();
3328 v8::HandleScope scope(CcTest::isolate());
3329 static const int transitions_count = 10;
3330 CompileRun("function F() {}");
3331 AddTransitions(transitions_count);
3332 CompileRun("var root = new F;");
3333 Handle<JSObject> root = GetByName("root");
3335 // Count number of live transitions before marking.
3336 int transitions_before = CountMapTransitions(root->map());
3337 CHECK_EQ(transitions_count, transitions_before);
3339 root = GetByName("root");
3340 AddPropertyTo(2, root, "funny");
3341 CcTest::heap()->CollectGarbage(NEW_SPACE);
3343 // Count number of live transitions after marking. Note that one transition
3344 // is left, because 'o' still holds an instance of one transition target.
3345 int transitions_after = CountMapTransitions(
3346 Map::cast(root->map()->GetBackPointer()));
3347 CHECK_EQ(2, transitions_after);
3351 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3352 i::FLAG_stress_compaction = false;
3353 i::FLAG_allow_natives_syntax = true;
3354 CcTest::InitializeVM();
3355 v8::HandleScope scope(CcTest::isolate());
3356 static const int transitions_count = 10;
3357 CompileRun("function F() {}");
3358 AddTransitions(transitions_count);
3359 CompileRun("var root = new F;");
3360 Handle<JSObject> root = GetByName("root");
3362 // Count number of live transitions before marking.
3363 int transitions_before = CountMapTransitions(root->map());
3364 CHECK_EQ(transitions_count, transitions_before);
3366 root = GetByName("root");
3367 AddPropertyTo(0, root, "prop9");
3368 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3370 // Count number of live transitions after marking. Note that one transition
3371 // is left, because 'o' still holds an instance of one transition target.
3372 int transitions_after = CountMapTransitions(
3373 Map::cast(root->map()->GetBackPointer()));
3374 CHECK_EQ(1, transitions_after);
3378 TEST(TransitionArraySimpleToFull) {
3379 i::FLAG_stress_compaction = false;
3380 i::FLAG_allow_natives_syntax = true;
3381 CcTest::InitializeVM();
3382 v8::HandleScope scope(CcTest::isolate());
3383 static const int transitions_count = 1;
3384 CompileRun("function F() {}");
3385 AddTransitions(transitions_count);
3386 CompileRun("var root = new F;");
3387 Handle<JSObject> root = GetByName("root");
3389 // Count number of live transitions before marking.
3390 int transitions_before = CountMapTransitions(root->map());
3391 CHECK_EQ(transitions_count, transitions_before);
3393 CompileRun("o = new F;"
3395 root = GetByName("root");
3396 DCHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3397 AddPropertyTo(2, root, "happy");
3399 // Count number of live transitions after marking. Note that one transition
3400 // is left, because 'o' still holds an instance of one transition target.
3401 int transitions_after = CountMapTransitions(
3402 Map::cast(root->map()->GetBackPointer()));
3403 CHECK_EQ(1, transitions_after);
3408 TEST(Regress2143a) {
3409 i::FLAG_incremental_marking = true;
3410 CcTest::InitializeVM();
3411 v8::HandleScope scope(CcTest::isolate());
3413 // Prepare a map transition from the root object together with a yet
3414 // untransitioned root object.
3415 CompileRun("var root = new Object;"
3417 "root = new Object;");
3419 SimulateIncrementalMarking(CcTest::heap());
3421 // Compile a StoreIC that performs the prepared map transition. This
3422 // will restart incremental marking and should make sure the root is
3423 // marked grey again.
3424 CompileRun("function f(o) {"
3430 // This bug only triggers with aggressive IC clearing.
3431 CcTest::heap()->AgeInlineCaches();
3433 // Explicitly request GC to perform final marking step and sweeping.
3434 CcTest::heap()->CollectAllGarbage();
3436 Handle<JSObject> root =
3437 v8::Utils::OpenHandle(
3438 *v8::Handle<v8::Object>::Cast(
3439 CcTest::global()->Get(v8_str("root"))));
3441 // The root object should be in a sane state.
3442 CHECK(root->IsJSObject());
3443 CHECK(root->map()->IsMap());
3447 TEST(Regress2143b) {
3448 i::FLAG_incremental_marking = true;
3449 i::FLAG_allow_natives_syntax = true;
3450 CcTest::InitializeVM();
3451 v8::HandleScope scope(CcTest::isolate());
3453 // Prepare a map transition from the root object together with a yet
3454 // untransitioned root object.
3455 CompileRun("var root = new Object;"
3457 "root = new Object;");
3459 SimulateIncrementalMarking(CcTest::heap());
3461 // Compile an optimized LStoreNamedField that performs the prepared
3462 // map transition. This will restart incremental marking and should
3463 // make sure the root is marked grey again.
3464 CompileRun("function f(o) {"
3469 "%OptimizeFunctionOnNextCall(f);"
3471 "%DeoptimizeFunction(f);");
3473 // This bug only triggers with aggressive IC clearing.
3474 CcTest::heap()->AgeInlineCaches();
3476 // Explicitly request GC to perform final marking step and sweeping.
3477 CcTest::heap()->CollectAllGarbage();
3479 Handle<JSObject> root =
3480 v8::Utils::OpenHandle(
3481 *v8::Handle<v8::Object>::Cast(
3482 CcTest::global()->Get(v8_str("root"))));
3484 // The root object should be in a sane state.
3485 CHECK(root->IsJSObject());
3486 CHECK(root->map()->IsMap());
3490 TEST(ReleaseOverReservedPages) {
3491 if (FLAG_never_compact) return;
3492 i::FLAG_trace_gc = true;
3493 // The optimizer can allocate stuff, messing up the test.
3494 i::FLAG_crankshaft = false;
3495 i::FLAG_always_opt = false;
3496 CcTest::InitializeVM();
3497 Isolate* isolate = CcTest::i_isolate();
3498 Factory* factory = isolate->factory();
3499 Heap* heap = isolate->heap();
3500 v8::HandleScope scope(CcTest::isolate());
3501 static const int number_of_test_pages = 20;
3503 // Prepare many pages with low live-bytes count.
3504 PagedSpace* old_space = heap->old_space();
3505 CHECK_EQ(1, old_space->CountTotalPages());
3506 for (int i = 0; i < number_of_test_pages; i++) {
3507 AlwaysAllocateScope always_allocate(isolate);
3508 SimulateFullSpace(old_space);
3509 factory->NewFixedArray(1, TENURED);
3511 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3513 // Triggering one GC will cause a lot of garbage to be discovered but
3514 // even spread across all allocated pages.
3515 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3516 "triggered for preparation");
3517 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3519 // Triggering subsequent GCs should cause at least half of the pages
3520 // to be released to the OS after at most two cycles.
3521 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3522 "triggered by test 1");
3523 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3524 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3525 "triggered by test 2");
3526 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3528 // Triggering a last-resort GC should cause all pages to be released to the
3529 // OS so that other processes can seize the memory. If we get a failure here
3530 // where there are 2 pages left instead of 1, then we should increase the
3531 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3532 // first page should be small in order to reduce memory used when the VM
3533 // boots, but if the 20 small arrays don't fit on the first page then that's
3534 // an indication that it is too small.
3535 heap->CollectAllAvailableGarbage("triggered really hard");
3536 CHECK_EQ(1, old_space->CountTotalPages());
3539 static int forced_gc_counter = 0;
3541 void MockUseCounterCallback(v8::Isolate* isolate,
3542 v8::Isolate::UseCounterFeature feature) {
3543 isolate->GetCallingContext();
3544 if (feature == v8::Isolate::kForcedGC) {
3545 forced_gc_counter++;
3550 TEST(CountForcedGC) {
3551 i::FLAG_expose_gc = true;
3552 CcTest::InitializeVM();
3553 Isolate* isolate = CcTest::i_isolate();
3554 v8::HandleScope scope(CcTest::isolate());
3556 isolate->SetUseCounterCallback(MockUseCounterCallback);
3558 forced_gc_counter = 0;
3559 const char* source = "gc();";
3561 CHECK_GT(forced_gc_counter, 0);
3566 i::FLAG_stress_compaction = false;
3567 CcTest::InitializeVM();
3568 Isolate* isolate = CcTest::i_isolate();
3569 Factory* factory = isolate->factory();
3570 v8::HandleScope scope(CcTest::isolate());
3571 Handle<String> slice(CcTest::heap()->empty_string());
3574 // Generate a parent that lives in new-space.
3575 v8::HandleScope inner_scope(CcTest::isolate());
3576 const char* c = "This text is long enough to trigger sliced strings.";
3577 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3578 CHECK(s->IsSeqOneByteString());
3579 CHECK(CcTest::heap()->InNewSpace(*s));
3581 // Generate a sliced string that is based on the above parent and
3582 // lives in old-space.
3583 SimulateFullSpace(CcTest::heap()->new_space());
3584 AlwaysAllocateScope always_allocate(isolate);
3585 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3586 CHECK(t->IsSlicedString());
3587 CHECK(!CcTest::heap()->InNewSpace(*t));
3588 *slice.location() = *t.location();
3591 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3592 CcTest::heap()->CollectAllGarbage();
3593 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3598 TEST(PrintSharedFunctionInfo) {
3599 CcTest::InitializeVM();
3600 v8::HandleScope scope(CcTest::isolate());
3601 const char* source = "f = function() { return 987654321; }\n"
3602 "g = function() { return 123456789; }\n";
3604 Handle<JSFunction> g =
3605 v8::Utils::OpenHandle(
3606 *v8::Handle<v8::Function>::Cast(
3607 CcTest::global()->Get(v8_str("g"))));
3609 OFStream os(stdout);
3610 g->shared()->Print(os);
3613 #endif // OBJECT_PRINT
3616 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3617 if (i::FLAG_always_opt) return;
3618 CcTest::InitializeVM();
3619 v8::HandleScope scope(CcTest::isolate());
3620 v8::Local<v8::Value> fun1, fun2;
3624 CompileRun("function fun() {};");
3625 fun1 = env->Global()->Get(v8_str("fun"));
3630 CompileRun("function fun() {};");
3631 fun2 = env->Global()->Get(v8_str("fun"));
3634 // Prepare function f that contains type feedback for closures
3635 // originating from two different native contexts.
3636 CcTest::global()->Set(v8_str("fun1"), fun1);
3637 CcTest::global()->Set(v8_str("fun2"), fun2);
3638 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3640 Handle<JSFunction> f =
3641 v8::Utils::OpenHandle(
3642 *v8::Handle<v8::Function>::Cast(
3643 CcTest::global()->Get(v8_str("f"))));
3645 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3647 int expected_slots = 2;
3648 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3651 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell());
3652 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
3654 SimulateIncrementalMarking(CcTest::heap());
3655 CcTest::heap()->CollectAllGarbage();
3657 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
3659 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2)))
3664 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3665 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3666 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3667 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3668 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3669 RelocInfo* info = it.rinfo();
3670 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3671 if (target->is_inline_cache_stub() && target->kind() == kind) {
3679 static void CheckVectorIC(Handle<JSFunction> f, int ic_slot_index,
3680 InlineCacheState desired_state) {
3681 Handle<TypeFeedbackVector> vector =
3682 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3683 FeedbackVectorICSlot slot(ic_slot_index);
3684 LoadICNexus nexus(vector, slot);
3685 CHECK(nexus.StateFromFeedback() == desired_state);
3689 static void CheckVectorICCleared(Handle<JSFunction> f, int ic_slot_index) {
3690 Handle<TypeFeedbackVector> vector =
3691 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3692 FeedbackVectorICSlot slot(ic_slot_index);
3693 LoadICNexus nexus(vector, slot);
3694 CHECK(IC::IsCleared(&nexus));
3698 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3699 if (i::FLAG_always_opt) return;
3700 CcTest::InitializeVM();
3701 v8::HandleScope scope(CcTest::isolate());
3703 // Prepare function f that contains a monomorphic IC for object
3704 // originating from the same native context.
3706 "function fun() { this.x = 1; };"
3707 "function f(o) { return new o(); } f(fun); f(fun);");
3708 Handle<JSFunction> f = v8::Utils::OpenHandle(
3709 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3712 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3713 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3715 SimulateIncrementalMarking(CcTest::heap());
3716 CcTest::heap()->CollectAllGarbage();
3718 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3722 TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3723 if (i::FLAG_always_opt) return;
3724 CcTest::InitializeVM();
3725 Isolate* isolate = CcTest::i_isolate();
3726 v8::HandleScope scope(CcTest::isolate());
3727 v8::Local<v8::Value> fun1;
3731 CompileRun("function fun() { this.x = 1; };");
3732 fun1 = env->Global()->Get(v8_str("fun"));
3735 // Prepare function f that contains a monomorphic constructor for object
3736 // originating from a different native context.
3737 CcTest::global()->Set(v8_str("fun1"), fun1);
3739 "function fun() { this.x = 1; };"
3740 "function f(o) { return new o(); } f(fun1); f(fun1);");
3741 Handle<JSFunction> f = v8::Utils::OpenHandle(
3742 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3745 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3746 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3748 // Fire context dispose notification.
3749 CcTest::isolate()->ContextDisposedNotification();
3750 SimulateIncrementalMarking(CcTest::heap());
3751 CcTest::heap()->CollectAllGarbage();
3753 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3754 vector->Get(FeedbackVectorSlot(0)));
3758 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3759 if (i::FLAG_always_opt) return;
3760 CcTest::InitializeVM();
3761 v8::HandleScope scope(CcTest::isolate());
3763 // Prepare function f that contains a monomorphic IC for object
3764 // originating from the same native context.
3765 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3766 "function f(o) { return o.x; } f(obj); f(obj);");
3767 Handle<JSFunction> f =
3768 v8::Utils::OpenHandle(
3769 *v8::Handle<v8::Function>::Cast(
3770 CcTest::global()->Get(v8_str("f"))));
3772 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3773 CheckVectorIC(f, 0, MONOMORPHIC);
3774 CHECK(ic_before->ic_state() == DEFAULT);
3776 SimulateIncrementalMarking(CcTest::heap());
3777 CcTest::heap()->CollectAllGarbage();
3779 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3780 CheckVectorIC(f, 0, MONOMORPHIC);
3781 CHECK(ic_after->ic_state() == DEFAULT);
3785 TEST(IncrementalMarkingClearsMonomorphicIC) {
3786 if (i::FLAG_always_opt) return;
3787 CcTest::InitializeVM();
3788 v8::HandleScope scope(CcTest::isolate());
3789 v8::Local<v8::Value> obj1;
3793 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3794 obj1 = env->Global()->Get(v8_str("obj"));
3797 // Prepare function f that contains a monomorphic IC for object
3798 // originating from a different native context.
3799 CcTest::global()->Set(v8_str("obj1"), obj1);
3800 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3801 Handle<JSFunction> f = v8::Utils::OpenHandle(
3802 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3804 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3805 CheckVectorIC(f, 0, MONOMORPHIC);
3806 CHECK(ic_before->ic_state() == DEFAULT);
3808 // Fire context dispose notification.
3809 CcTest::isolate()->ContextDisposedNotification();
3810 SimulateIncrementalMarking(CcTest::heap());
3811 CcTest::heap()->CollectAllGarbage();
3813 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3814 CheckVectorICCleared(f, 0);
3815 CHECK(ic_after->ic_state() == DEFAULT);
3819 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3820 if (i::FLAG_always_opt) return;
3821 CcTest::InitializeVM();
3822 v8::HandleScope scope(CcTest::isolate());
3823 v8::Local<v8::Value> obj1, obj2;
3827 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3828 obj1 = env->Global()->Get(v8_str("obj"));
3833 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3834 obj2 = env->Global()->Get(v8_str("obj"));
3837 // Prepare function f that contains a polymorphic IC for objects
3838 // originating from two different native contexts.
3839 CcTest::global()->Set(v8_str("obj1"), obj1);
3840 CcTest::global()->Set(v8_str("obj2"), obj2);
3841 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3842 Handle<JSFunction> f = v8::Utils::OpenHandle(
3843 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3845 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3846 CheckVectorIC(f, 0, POLYMORPHIC);
3847 CHECK(ic_before->ic_state() == DEFAULT);
3849 // Fire context dispose notification.
3850 SimulateIncrementalMarking(CcTest::heap());
3851 CcTest::heap()->CollectAllGarbage();
3853 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3854 CheckVectorIC(f, 0, POLYMORPHIC);
3855 CHECK(ic_after->ic_state() == DEFAULT);
3859 TEST(IncrementalMarkingClearsPolymorphicIC) {
3860 if (i::FLAG_always_opt) return;
3861 CcTest::InitializeVM();
3862 v8::HandleScope scope(CcTest::isolate());
3863 v8::Local<v8::Value> obj1, obj2;
3867 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3868 obj1 = env->Global()->Get(v8_str("obj"));
3873 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3874 obj2 = env->Global()->Get(v8_str("obj"));
3877 // Prepare function f that contains a polymorphic IC for objects
3878 // originating from two different native contexts.
3879 CcTest::global()->Set(v8_str("obj1"), obj1);
3880 CcTest::global()->Set(v8_str("obj2"), obj2);
3881 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3882 Handle<JSFunction> f = v8::Utils::OpenHandle(
3883 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3885 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3886 CheckVectorIC(f, 0, POLYMORPHIC);
3887 CHECK(ic_before->ic_state() == DEFAULT);
3889 // Fire context dispose notification.
3890 CcTest::isolate()->ContextDisposedNotification();
3891 SimulateIncrementalMarking(CcTest::heap());
3892 CcTest::heap()->CollectAllGarbage();
3894 CheckVectorICCleared(f, 0);
3895 CHECK(ic_before->ic_state() == DEFAULT);
3899 class SourceResource : public v8::String::ExternalOneByteStringResource {
3901 explicit SourceResource(const char* data)
3902 : data_(data), length_(strlen(data)) { }
3904 virtual void Dispose() {
3905 i::DeleteArray(data_);
3909 const char* data() const { return data_; }
3911 size_t length() const { return length_; }
3913 bool IsDisposed() { return data_ == NULL; }
3921 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3922 const char* accessor) {
3923 // Test that the data retained by the Error.stack accessor is released
3924 // after the first time the accessor is fired. We use external string
3925 // to check whether the data is being released since the external string
3926 // resource's callback is fired when the external string is GC'ed.
3927 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3928 v8::HandleScope scope(isolate);
3929 SourceResource* resource = new SourceResource(i::StrDup(source));
3931 v8::HandleScope scope(isolate);
3932 v8::Handle<v8::String> source_string =
3933 v8::String::NewExternal(isolate, resource);
3934 i_isolate->heap()->CollectAllAvailableGarbage();
3935 v8::Script::Compile(source_string)->Run();
3936 CHECK(!resource->IsDisposed());
3938 // i_isolate->heap()->CollectAllAvailableGarbage();
3939 CHECK(!resource->IsDisposed());
3941 CompileRun(accessor);
3942 i_isolate->heap()->CollectAllAvailableGarbage();
3944 // External source has been released.
3945 CHECK(resource->IsDisposed());
3950 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3951 if (i::FLAG_always_opt) {
3952 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3953 // See: https://codereview.chromium.org/181833004/
3956 FLAG_use_ic = false; // ICs retain objects.
3957 FLAG_concurrent_recompilation = false;
3958 v8::Isolate::CreateParams create_params;
3959 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3960 v8::Isolate* isolate = v8::Isolate::New(create_params);
3962 v8::Isolate::Scope isolate_scope(isolate);
3963 v8::HandleScope handle_scope(isolate);
3964 v8::Context::New(isolate)->Enter();
3965 static const char* source1 = "var error = null; "
3966 /* Normal Error */ "try { "
3967 " throw new Error(); "
3971 static const char* source2 = "var error = null; "
3972 /* Stack overflow */ "try { "
3973 " (function f() { f(); })(); "
3977 static const char* source3 = "var error = null; "
3978 /* Normal Error */ "try { "
3979 /* as prototype */ " throw new Error(); "
3982 " error.__proto__ = e; "
3984 static const char* source4 = "var error = null; "
3985 /* Stack overflow */ "try { "
3986 /* as prototype */ " (function f() { f(); })(); "
3989 " error.__proto__ = e; "
3991 static const char* getter = "error.stack";
3992 static const char* setter = "error.stack = 0";
3994 ReleaseStackTraceDataTest(isolate, source1, setter);
3995 ReleaseStackTraceDataTest(isolate, source2, setter);
3996 // We do not test source3 and source4 with setter, since the setter is
3997 // supposed to (untypically) write to the receiver, not the holder. This is
3998 // to emulate the behavior of a data property.
4000 ReleaseStackTraceDataTest(isolate, source1, getter);
4001 ReleaseStackTraceDataTest(isolate, source2, getter);
4002 ReleaseStackTraceDataTest(isolate, source3, getter);
4003 ReleaseStackTraceDataTest(isolate, source4, getter);
4009 TEST(Regress159140) {
4010 i::FLAG_allow_natives_syntax = true;
4011 CcTest::InitializeVM();
4012 Isolate* isolate = CcTest::i_isolate();
4013 Heap* heap = isolate->heap();
4014 HandleScope scope(isolate);
4016 // Perform one initial GC to enable code flushing.
4017 heap->CollectAllGarbage();
4019 // Prepare several closures that are all eligible for code flushing
4020 // because all reachable ones are not optimized. Make sure that the
4021 // optimized code object is directly reachable through a handle so
4022 // that it is marked black during incremental marking.
4025 HandleScope inner_scope(isolate);
4026 CompileRun("function h(x) {}"
4027 "function mkClosure() {"
4028 " return function(x) { return x + 1; };"
4030 "var f = mkClosure();"
4031 "var g = mkClosure();"
4035 "%OptimizeFunctionOnNextCall(f); f(3);"
4036 "%OptimizeFunctionOnNextCall(h); h(3);");
4038 Handle<JSFunction> f =
4039 v8::Utils::OpenHandle(
4040 *v8::Handle<v8::Function>::Cast(
4041 CcTest::global()->Get(v8_str("f"))));
4042 CHECK(f->is_compiled());
4043 CompileRun("f = null;");
4045 Handle<JSFunction> g =
4046 v8::Utils::OpenHandle(
4047 *v8::Handle<v8::Function>::Cast(
4048 CcTest::global()->Get(v8_str("g"))));
4049 CHECK(g->is_compiled());
4050 const int kAgingThreshold = 6;
4051 for (int i = 0; i < kAgingThreshold; i++) {
4052 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4055 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4058 // Simulate incremental marking so that the functions are enqueued as
4059 // code flushing candidates. Then optimize one function. Finally
4060 // finish the GC to complete code flushing.
4061 SimulateIncrementalMarking(heap);
4062 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4063 heap->CollectAllGarbage();
4065 // Unoptimized code is missing and the deoptimizer will go ballistic.
4066 CompileRun("g('bozo');");
4070 TEST(Regress165495) {
4071 i::FLAG_allow_natives_syntax = true;
4072 CcTest::InitializeVM();
4073 Isolate* isolate = CcTest::i_isolate();
4074 Heap* heap = isolate->heap();
4075 HandleScope scope(isolate);
4077 // Perform one initial GC to enable code flushing.
4078 heap->CollectAllGarbage();
4080 // Prepare an optimized closure that the optimized code map will get
4081 // populated. Then age the unoptimized code to trigger code flushing
4082 // but make sure the optimized code is unreachable.
4084 HandleScope inner_scope(isolate);
4085 CompileRun("function mkClosure() {"
4086 " return function(x) { return x + 1; };"
4088 "var f = mkClosure();"
4090 "%OptimizeFunctionOnNextCall(f); f(3);");
4092 Handle<JSFunction> f =
4093 v8::Utils::OpenHandle(
4094 *v8::Handle<v8::Function>::Cast(
4095 CcTest::global()->Get(v8_str("f"))));
4096 CHECK(f->is_compiled());
4097 const int kAgingThreshold = 6;
4098 for (int i = 0; i < kAgingThreshold; i++) {
4099 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4102 CompileRun("f = null;");
4105 // Simulate incremental marking so that unoptimized code is flushed
4106 // even though it still is cached in the optimized code map.
4107 SimulateIncrementalMarking(heap);
4108 heap->CollectAllGarbage();
4110 // Make a new closure that will get code installed from the code map.
4111 // Unoptimized code is missing and the deoptimizer will go ballistic.
4112 CompileRun("var g = mkClosure(); g('bozo');");
4116 TEST(Regress169209) {
4117 i::FLAG_stress_compaction = false;
4118 i::FLAG_allow_natives_syntax = true;
4120 CcTest::InitializeVM();
4121 Isolate* isolate = CcTest::i_isolate();
4122 Heap* heap = isolate->heap();
4123 HandleScope scope(isolate);
4125 // Perform one initial GC to enable code flushing.
4126 heap->CollectAllGarbage();
4128 // Prepare a shared function info eligible for code flushing for which
4129 // the unoptimized code will be replaced during optimization.
4130 Handle<SharedFunctionInfo> shared1;
4132 HandleScope inner_scope(isolate);
4133 CompileRun("function f() { return 'foobar'; }"
4134 "function g(x) { if (x) f(); }"
4139 Handle<JSFunction> f =
4140 v8::Utils::OpenHandle(
4141 *v8::Handle<v8::Function>::Cast(
4142 CcTest::global()->Get(v8_str("f"))));
4143 CHECK(f->is_compiled());
4144 const int kAgingThreshold = 6;
4145 for (int i = 0; i < kAgingThreshold; i++) {
4146 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4149 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4152 // Prepare a shared function info eligible for code flushing that will
4153 // represent the dangling tail of the candidate list.
4154 Handle<SharedFunctionInfo> shared2;
4156 HandleScope inner_scope(isolate);
4157 CompileRun("function flushMe() { return 0; }"
4160 Handle<JSFunction> f =
4161 v8::Utils::OpenHandle(
4162 *v8::Handle<v8::Function>::Cast(
4163 CcTest::global()->Get(v8_str("flushMe"))));
4164 CHECK(f->is_compiled());
4165 const int kAgingThreshold = 6;
4166 for (int i = 0; i < kAgingThreshold; i++) {
4167 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4170 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4173 // Simulate incremental marking and collect code flushing candidates.
4174 SimulateIncrementalMarking(heap);
4175 CHECK(shared1->code()->gc_metadata() != NULL);
4177 // Optimize function and make sure the unoptimized code is replaced.
4181 CompileRun("%OptimizeFunctionOnNextCall(g);"
4184 // Finish garbage collection cycle.
4185 heap->CollectAllGarbage();
4186 CHECK(shared1->code()->gc_metadata() == NULL);
4190 TEST(Regress169928) {
4191 i::FLAG_allow_natives_syntax = true;
4192 i::FLAG_crankshaft = false;
4193 CcTest::InitializeVM();
4194 Isolate* isolate = CcTest::i_isolate();
4195 Factory* factory = isolate->factory();
4196 v8::HandleScope scope(CcTest::isolate());
4198 // Some flags turn Scavenge collections into Mark-sweep collections
4199 // and hence are incompatible with this test case.
4200 if (FLAG_gc_global || FLAG_stress_compaction) return;
4202 // Prepare the environment
4203 CompileRun("function fastliteralcase(literal, value) {"
4204 " literal[0] = value;"
4207 "function get_standard_literal() {"
4208 " var literal = [1, 2, 3];"
4211 "obj = fastliteralcase(get_standard_literal(), 1);"
4212 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4213 "obj = fastliteralcase(get_standard_literal(), 2);");
4216 v8::Local<v8::String> mote_code_string =
4217 v8_str("fastliteralcase(mote, 2.5);");
4219 v8::Local<v8::String> array_name = v8_str("mote");
4220 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
4222 // First make sure we flip spaces
4223 CcTest::heap()->CollectGarbage(NEW_SPACE);
4225 // Allocate the object.
4226 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4227 array_data->set(0, Smi::FromInt(1));
4228 array_data->set(1, Smi::FromInt(2));
4230 AllocateAllButNBytes(CcTest::heap()->new_space(),
4231 JSArray::kSize + AllocationMemento::kSize +
4234 Handle<JSArray> array =
4235 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4237 CHECK_EQ(Smi::FromInt(2), array->length());
4238 CHECK(array->HasFastSmiOrObjectElements());
4240 // We need filler the size of AllocationMemento object, plus an extra
4241 // fill pointer value.
4242 HeapObject* obj = NULL;
4243 AllocationResult allocation =
4244 CcTest::heap()->new_space()->AllocateRawUnaligned(
4245 AllocationMemento::kSize + kPointerSize);
4246 CHECK(allocation.To(&obj));
4247 Address addr_obj = obj->address();
4248 CcTest::heap()->CreateFillerObjectAt(
4249 addr_obj, AllocationMemento::kSize + kPointerSize);
4251 // Give the array a name, making sure not to allocate strings.
4252 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
4253 CcTest::global()->Set(array_name, array_obj);
4255 // This should crash with a protection violation if we are running a build
4257 AlwaysAllocateScope aa_scope(isolate);
4258 v8::Script::Compile(mote_code_string)->Run();
4262 TEST(Regress168801) {
4263 if (i::FLAG_never_compact) return;
4264 i::FLAG_always_compact = true;
4265 i::FLAG_cache_optimized_code = false;
4266 i::FLAG_allow_natives_syntax = true;
4267 CcTest::InitializeVM();
4268 Isolate* isolate = CcTest::i_isolate();
4269 Heap* heap = isolate->heap();
4270 HandleScope scope(isolate);
4272 // Perform one initial GC to enable code flushing.
4273 heap->CollectAllGarbage();
4275 // Ensure the code ends up on an evacuation candidate.
4276 SimulateFullSpace(heap->code_space());
4278 // Prepare an unoptimized function that is eligible for code flushing.
4279 Handle<JSFunction> function;
4281 HandleScope inner_scope(isolate);
4282 CompileRun("function mkClosure() {"
4283 " return function(x) { return x + 1; };"
4285 "var f = mkClosure();"
4288 Handle<JSFunction> f =
4289 v8::Utils::OpenHandle(
4290 *v8::Handle<v8::Function>::Cast(
4291 CcTest::global()->Get(v8_str("f"))));
4292 CHECK(f->is_compiled());
4293 const int kAgingThreshold = 6;
4294 for (int i = 0; i < kAgingThreshold; i++) {
4295 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4298 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4301 // Simulate incremental marking so that unoptimized function is enqueued as a
4302 // candidate for code flushing. The shared function info however will not be
4303 // explicitly enqueued.
4304 SimulateIncrementalMarking(heap);
4306 // Now optimize the function so that it is taken off the candidate list.
4308 HandleScope inner_scope(isolate);
4309 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
4312 // This cycle will bust the heap and subsequent cycles will go ballistic.
4313 heap->CollectAllGarbage();
4314 heap->CollectAllGarbage();
4318 TEST(Regress173458) {
4319 if (i::FLAG_never_compact) return;
4320 i::FLAG_always_compact = true;
4321 i::FLAG_cache_optimized_code = false;
4322 i::FLAG_allow_natives_syntax = true;
4323 CcTest::InitializeVM();
4324 Isolate* isolate = CcTest::i_isolate();
4325 Heap* heap = isolate->heap();
4326 HandleScope scope(isolate);
4328 // Perform one initial GC to enable code flushing.
4329 heap->CollectAllGarbage();
4331 // Ensure the code ends up on an evacuation candidate.
4332 SimulateFullSpace(heap->code_space());
4334 // Prepare an unoptimized function that is eligible for code flushing.
4335 Handle<JSFunction> function;
4337 HandleScope inner_scope(isolate);
4338 CompileRun("function mkClosure() {"
4339 " return function(x) { return x + 1; };"
4341 "var f = mkClosure();"
4344 Handle<JSFunction> f =
4345 v8::Utils::OpenHandle(
4346 *v8::Handle<v8::Function>::Cast(
4347 CcTest::global()->Get(v8_str("f"))));
4348 CHECK(f->is_compiled());
4349 const int kAgingThreshold = 6;
4350 for (int i = 0; i < kAgingThreshold; i++) {
4351 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4354 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4357 // Simulate incremental marking so that unoptimized function is enqueued as a
4358 // candidate for code flushing. The shared function info however will not be
4359 // explicitly enqueued.
4360 SimulateIncrementalMarking(heap);
4362 // Now enable the debugger which in turn will disable code flushing.
4363 CHECK(isolate->debug()->Load());
4365 // This cycle will bust the heap and subsequent cycles will go ballistic.
4366 heap->CollectAllGarbage();
4367 heap->CollectAllGarbage();
4371 class DummyVisitor : public ObjectVisitor {
4373 void VisitPointers(Object** start, Object** end) { }
4377 TEST(DeferredHandles) {
4378 CcTest::InitializeVM();
4379 Isolate* isolate = CcTest::i_isolate();
4380 Heap* heap = isolate->heap();
4381 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4382 HandleScopeData* data = isolate->handle_scope_data();
4383 Handle<Object> init(heap->empty_string(), isolate);
4384 while (data->next < data->limit) {
4385 Handle<Object> obj(heap->empty_string(), isolate);
4387 // An entire block of handles has been filled.
4388 // Next handle would require a new block.
4389 DCHECK(data->next == data->limit);
4391 DeferredHandleScope deferred(isolate);
4392 DummyVisitor visitor;
4393 isolate->handle_scope_implementer()->Iterate(&visitor);
4394 delete deferred.Detach();
4398 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4399 CcTest::InitializeVM();
4400 v8::HandleScope scope(CcTest::isolate());
4401 CompileRun("function f(n) {"
4402 " var a = new Array(n);"
4403 " for (var i = 0; i < n; i += 100) a[i] = i;"
4405 "f(10 * 1024 * 1024);");
4406 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4407 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
4408 // This big step should be sufficient to mark the whole array.
4409 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4410 DCHECK(marking->IsComplete() ||
4411 marking->IsReadyToOverApproximateWeakClosure());
4415 TEST(DisableInlineAllocation) {
4416 i::FLAG_allow_natives_syntax = true;
4417 CcTest::InitializeVM();
4418 v8::HandleScope scope(CcTest::isolate());
4419 CompileRun("function test() {"
4421 " for (var i = 0; i < 10; i++) {"
4422 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4426 " %OptimizeFunctionOnNextCall(test);"
4428 " %DeoptimizeFunction(test);"
4431 // Warm-up with inline allocation enabled.
4432 CompileRun("test(); test(); run();");
4434 // Run test with inline allocation disabled.
4435 CcTest::heap()->DisableInlineAllocation();
4436 CompileRun("run()");
4438 // Run test with inline allocation re-enabled.
4439 CcTest::heap()->EnableInlineAllocation();
4440 CompileRun("run()");
4444 static int AllocationSitesCount(Heap* heap) {
4446 for (Object* site = heap->allocation_sites_list();
4447 !(site->IsUndefined());
4448 site = AllocationSite::cast(site)->weak_next()) {
4455 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4456 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4457 i::FLAG_allow_natives_syntax = true;
4458 CcTest::InitializeVM();
4459 Isolate* isolate = CcTest::i_isolate();
4460 v8::internal::Heap* heap = CcTest::heap();
4461 GlobalHandles* global_handles = isolate->global_handles();
4463 if (!isolate->use_crankshaft()) return;
4465 // The allocation site at the head of the list is ours.
4466 Handle<AllocationSite> site;
4468 LocalContext context;
4469 v8::HandleScope scope(context->GetIsolate());
4471 int count = AllocationSitesCount(heap);
4472 CompileRun("var bar = function() { return (new Array()); };"
4477 // One allocation site should have been created.
4478 int new_count = AllocationSitesCount(heap);
4479 CHECK_EQ(new_count, (count + 1));
4480 site = Handle<AllocationSite>::cast(
4481 global_handles->Create(
4482 AllocationSite::cast(heap->allocation_sites_list())));
4484 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4486 DependentCode::GroupStartIndexes starts(site->dependent_code());
4487 CHECK_GE(starts.number_of_entries(), 1);
4488 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4489 CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
4490 Code* function_bar = Code::cast(
4491 WeakCell::cast(site->dependent_code()->object_at(index))->value());
4492 Handle<JSFunction> bar_handle =
4493 v8::Utils::OpenHandle(
4494 *v8::Handle<v8::Function>::Cast(
4495 CcTest::global()->Get(v8_str("bar"))));
4496 CHECK_EQ(bar_handle->code(), function_bar);
4499 // Now make sure that a gc should get rid of the function, even though we
4500 // still have the allocation site alive.
4501 for (int i = 0; i < 4; i++) {
4502 heap->CollectAllGarbage();
4505 // The site still exists because of our global handle, but the code is no
4506 // longer referred to by dependent_code().
4507 DependentCode::GroupStartIndexes starts(site->dependent_code());
4508 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4509 CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
4510 WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
4514 TEST(CellsInOptimizedCodeAreWeak) {
4515 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4516 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4517 i::FLAG_allow_natives_syntax = true;
4518 CcTest::InitializeVM();
4519 Isolate* isolate = CcTest::i_isolate();
4520 v8::internal::Heap* heap = CcTest::heap();
4522 if (!isolate->use_crankshaft()) return;
4523 HandleScope outer_scope(heap->isolate());
4526 LocalContext context;
4527 HandleScope scope(heap->isolate());
4529 CompileRun("bar = (function() {"
4533 " var foo = function(x) { with (x) { return 1 + x; } };"
4537 " %OptimizeFunctionOnNextCall(bar);"
4539 " return bar;})();");
4541 Handle<JSFunction> bar =
4542 v8::Utils::OpenHandle(
4543 *v8::Handle<v8::Function>::Cast(
4544 CcTest::global()->Get(v8_str("bar"))));
4545 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4548 // Now make sure that a gc should get rid of the function
4549 for (int i = 0; i < 4; i++) {
4550 heap->CollectAllGarbage();
4553 DCHECK(code->marked_for_deoptimization());
4557 TEST(ObjectsInOptimizedCodeAreWeak) {
4558 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4559 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4560 i::FLAG_allow_natives_syntax = true;
4561 CcTest::InitializeVM();
4562 Isolate* isolate = CcTest::i_isolate();
4563 v8::internal::Heap* heap = CcTest::heap();
4565 if (!isolate->use_crankshaft()) return;
4566 HandleScope outer_scope(heap->isolate());
4569 LocalContext context;
4570 HandleScope scope(heap->isolate());
4572 CompileRun("function bar() {"
4575 "function foo(x) { with (x) { return 1 + x; } };"
4579 "%OptimizeFunctionOnNextCall(bar);"
4582 Handle<JSFunction> bar =
4583 v8::Utils::OpenHandle(
4584 *v8::Handle<v8::Function>::Cast(
4585 CcTest::global()->Get(v8_str("bar"))));
4586 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4589 // Now make sure that a gc should get rid of the function
4590 for (int i = 0; i < 4; i++) {
4591 heap->CollectAllGarbage();
4594 DCHECK(code->marked_for_deoptimization());
4598 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4599 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4600 if (!i::FLAG_incremental_marking) return;
4601 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4602 i::FLAG_allow_natives_syntax = true;
4603 i::FLAG_compilation_cache = false;
4604 i::FLAG_retain_maps_for_n_gc = 0;
4605 CcTest::InitializeVM();
4606 Isolate* isolate = CcTest::i_isolate();
4608 // Do not run for no-snap builds.
4609 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4611 v8::internal::Heap* heap = CcTest::heap();
4613 // Get a clean slate regarding optimized functions on the heap.
4614 i::Deoptimizer::DeoptimizeAll(isolate);
4615 heap->CollectAllGarbage();
4617 if (!isolate->use_crankshaft()) return;
4618 HandleScope outer_scope(heap->isolate());
4619 for (int i = 0; i < 3; i++) {
4620 SimulateIncrementalMarking(heap);
4622 LocalContext context;
4623 HandleScope scope(heap->isolate());
4624 EmbeddedVector<char, 256> source;
4626 "function bar%d() {"
4629 "function foo%d(x) { with (x) { return 1 + x; } };"
4633 "%%OptimizeFunctionOnNextCall(bar%d);"
4635 i, i, i, i, i, i, i, i);
4636 CompileRun(source.start());
4638 heap->CollectAllGarbage();
4641 if (heap->weak_object_to_code_table()->IsHashTable()) {
4642 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4643 elements = t->NumberOfElements();
4645 CHECK_EQ(0, elements);
4649 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
4650 EmbeddedVector<char, 256> source;
4652 "function %s() { return 0; }"
4654 "%%OptimizeFunctionOnNextCall(%s);"
4655 "%s();", name, name, name, name, name);
4656 CompileRun(source.start());
4657 Handle<JSFunction> fun =
4658 v8::Utils::OpenHandle(
4659 *v8::Handle<v8::Function>::Cast(
4660 CcTest::global()->Get(v8_str(name))));
4665 static int GetCodeChainLength(Code* code) {
4667 while (code->next_code_link()->IsCode()) {
4669 code = Code::cast(code->next_code_link());
4675 TEST(NextCodeLinkIsWeak) {
4676 i::FLAG_always_opt = false;
4677 i::FLAG_allow_natives_syntax = true;
4678 CcTest::InitializeVM();
4679 Isolate* isolate = CcTest::i_isolate();
4680 v8::internal::Heap* heap = CcTest::heap();
4682 if (!isolate->use_crankshaft()) return;
4683 HandleScope outer_scope(heap->isolate());
4685 heap->CollectAllAvailableGarbage();
4686 int code_chain_length_before, code_chain_length_after;
4688 HandleScope scope(heap->isolate());
4689 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
4690 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
4691 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4692 code_chain_length_before = GetCodeChainLength(immortal->code());
4693 // Keep the immortal code and let the mortal code die.
4694 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4695 CompileRun("mortal = null; immortal = null;");
4697 heap->CollectAllAvailableGarbage();
4698 // Now mortal code should be dead.
4699 code_chain_length_after = GetCodeChainLength(*code);
4700 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4704 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4705 i::byte buffer[i::Assembler::kMinimalBufferSize];
4706 MacroAssembler masm(isolate, buffer, sizeof(buffer));
4708 masm.Push(isolate->factory()->undefined_value());
4710 masm.GetCode(&desc);
4711 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4712 Handle<Code> code = isolate->factory()->NewCode(
4713 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4714 CHECK(code->IsCode());
4719 TEST(NextCodeLinkIsWeak2) {
4720 i::FLAG_allow_natives_syntax = true;
4721 CcTest::InitializeVM();
4722 Isolate* isolate = CcTest::i_isolate();
4723 v8::internal::Heap* heap = CcTest::heap();
4725 if (!isolate->use_crankshaft()) return;
4726 HandleScope outer_scope(heap->isolate());
4727 heap->CollectAllAvailableGarbage();
4728 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4729 Handle<Code> new_head;
4730 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4732 HandleScope scope(heap->isolate());
4733 Handle<Code> immortal = DummyOptimizedCode(isolate);
4734 Handle<Code> mortal = DummyOptimizedCode(isolate);
4735 mortal->set_next_code_link(*old_head);
4736 immortal->set_next_code_link(*mortal);
4737 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4738 new_head = scope.CloseAndEscape(immortal);
4740 heap->CollectAllAvailableGarbage();
4741 // Now mortal code should be dead.
4742 CHECK_EQ(*old_head, new_head->next_code_link());
4746 static bool weak_ic_cleared = false;
4748 static void ClearWeakIC(
4749 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4750 printf("clear weak is called\n");
4751 weak_ic_cleared = true;
4752 data.GetParameter()->Reset();
4756 TEST(WeakFunctionInConstructor) {
4757 if (i::FLAG_always_opt) return;
4758 i::FLAG_stress_compaction = false;
4759 CcTest::InitializeVM();
4760 v8::Isolate* isolate = CcTest::isolate();
4761 v8::HandleScope scope(isolate);
4763 "function createObj(obj) {"
4764 " return new obj();"
4766 Handle<JSFunction> createObj =
4767 v8::Utils::OpenHandle(*v8::Handle<v8::Function>::Cast(
4768 CcTest::global()->Get(v8_str("createObj"))));
4770 v8::Persistent<v8::Object> garbage;
4772 v8::HandleScope scope(isolate);
4773 const char* source =
4775 " function hat() { this.x = 5; }"
4780 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
4782 weak_ic_cleared = false;
4783 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4784 Heap* heap = CcTest::i_isolate()->heap();
4785 heap->CollectAllGarbage();
4786 CHECK(weak_ic_cleared);
4788 // We've determined the constructor in createObj has had it's weak cell
4789 // cleared. Now, verify that one additional call with a new function
4790 // allows monomorphicity.
4791 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
4792 createObj->shared()->feedback_vector(), CcTest::i_isolate());
4793 for (int i = 0; i < 20; i++) {
4794 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
4795 CHECK(slot_value->IsWeakCell());
4796 if (WeakCell::cast(slot_value)->cleared()) break;
4797 heap->CollectAllGarbage();
4800 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
4801 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
4803 "function coat() { this.x = 6; }"
4804 "createObj(coat);");
4805 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
4806 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
4810 // Checks that the value returned by execution of the source is weak.
4811 void CheckWeakness(const char* source) {
4812 i::FLAG_stress_compaction = false;
4813 CcTest::InitializeVM();
4814 v8::Isolate* isolate = CcTest::isolate();
4815 v8::HandleScope scope(isolate);
4816 v8::Persistent<v8::Object> garbage;
4818 v8::HandleScope scope(isolate);
4819 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
4821 weak_ic_cleared = false;
4822 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4823 Heap* heap = CcTest::i_isolate()->heap();
4824 heap->CollectAllGarbage();
4825 CHECK(weak_ic_cleared);
4829 // Each of the following "weak IC" tests creates an IC that embeds a map with
4830 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4831 TEST(WeakMapInMonomorphicLoadIC) {
4832 CheckWeakness("function loadIC(obj) {"
4836 " var proto = {'name' : 'weak'};"
4837 " var obj = Object.create(proto);"
4846 TEST(WeakMapInPolymorphicLoadIC) {
4848 "function loadIC(obj) {"
4852 " var proto = {'name' : 'weak'};"
4853 " var obj = Object.create(proto);"
4857 " var poly = Object.create(proto);"
4865 TEST(WeakMapInMonomorphicKeyedLoadIC) {
4866 CheckWeakness("function keyedLoadIC(obj, field) {"
4867 " return obj[field];"
4870 " var proto = {'name' : 'weak'};"
4871 " var obj = Object.create(proto);"
4872 " keyedLoadIC(obj, 'name');"
4873 " keyedLoadIC(obj, 'name');"
4874 " keyedLoadIC(obj, 'name');"
4880 TEST(WeakMapInPolymorphicKeyedLoadIC) {
4882 "function keyedLoadIC(obj, field) {"
4883 " return obj[field];"
4886 " var proto = {'name' : 'weak'};"
4887 " var obj = Object.create(proto);"
4888 " keyedLoadIC(obj, 'name');"
4889 " keyedLoadIC(obj, 'name');"
4890 " keyedLoadIC(obj, 'name');"
4891 " var poly = Object.create(proto);"
4893 " keyedLoadIC(poly, 'name');"
4899 TEST(WeakMapInMonomorphicStoreIC) {
4900 CheckWeakness("function storeIC(obj, value) {"
4901 " obj.name = value;"
4904 " var proto = {'name' : 'weak'};"
4905 " var obj = Object.create(proto);"
4906 " storeIC(obj, 'x');"
4907 " storeIC(obj, 'x');"
4908 " storeIC(obj, 'x');"
4914 TEST(WeakMapInPolymorphicStoreIC) {
4916 "function storeIC(obj, value) {"
4917 " obj.name = value;"
4920 " var proto = {'name' : 'weak'};"
4921 " var obj = Object.create(proto);"
4922 " storeIC(obj, 'x');"
4923 " storeIC(obj, 'x');"
4924 " storeIC(obj, 'x');"
4925 " var poly = Object.create(proto);"
4927 " storeIC(poly, 'x');"
4933 TEST(WeakMapInMonomorphicKeyedStoreIC) {
4934 CheckWeakness("function keyedStoreIC(obj, field, value) {"
4935 " obj[field] = value;"
4938 " var proto = {'name' : 'weak'};"
4939 " var obj = Object.create(proto);"
4940 " keyedStoreIC(obj, 'x');"
4941 " keyedStoreIC(obj, 'x');"
4942 " keyedStoreIC(obj, 'x');"
4948 TEST(WeakMapInPolymorphicKeyedStoreIC) {
4950 "function keyedStoreIC(obj, field, value) {"
4951 " obj[field] = value;"
4954 " var proto = {'name' : 'weak'};"
4955 " var obj = Object.create(proto);"
4956 " keyedStoreIC(obj, 'x');"
4957 " keyedStoreIC(obj, 'x');"
4958 " keyedStoreIC(obj, 'x');"
4959 " var poly = Object.create(proto);"
4961 " keyedStoreIC(poly, 'x');"
4967 TEST(WeakMapInMonomorphicCompareNilIC) {
4968 CheckWeakness("function compareNilIC(obj) {"
4969 " return obj == null;"
4972 " var proto = {'name' : 'weak'};"
4973 " var obj = Object.create(proto);"
4974 " compareNilIC(obj);"
4975 " compareNilIC(obj);"
4976 " compareNilIC(obj);"
4982 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4983 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4984 Handle<Object> obj =
4985 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
4986 return Handle<JSFunction>::cast(obj);
4990 void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
4991 int ic_slot, InlineCacheState state) {
4992 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
4993 kind == Code::CALL_IC) {
4994 TypeFeedbackVector* vector = shared->feedback_vector();
4995 FeedbackVectorICSlot slot(ic_slot);
4996 if (kind == Code::LOAD_IC) {
4997 LoadICNexus nexus(vector, slot);
4998 CHECK_EQ(nexus.StateFromFeedback(), state);
4999 } else if (kind == Code::KEYED_LOAD_IC) {
5000 KeyedLoadICNexus nexus(vector, slot);
5001 CHECK_EQ(nexus.StateFromFeedback(), state);
5002 } else if (kind == Code::CALL_IC) {
5003 CallICNexus nexus(vector, slot);
5004 CHECK_EQ(nexus.StateFromFeedback(), state);
5007 Code* ic = FindFirstIC(code, kind);
5008 CHECK(ic->is_inline_cache_stub());
5009 CHECK(ic->ic_state() == state);
5014 TEST(MonomorphicStaysMonomorphicAfterGC) {
5015 if (FLAG_always_opt) return;
5016 CcTest::InitializeVM();
5017 Isolate* isolate = CcTest::i_isolate();
5018 Heap* heap = isolate->heap();
5019 v8::HandleScope scope(CcTest::isolate());
5021 "function loadIC(obj) {"
5024 "function testIC() {"
5025 " var proto = {'name' : 'weak'};"
5026 " var obj = Object.create(proto);"
5032 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5034 v8::HandleScope scope(CcTest::isolate());
5035 CompileRun("(testIC())");
5037 heap->CollectAllGarbage();
5038 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5040 v8::HandleScope scope(CcTest::isolate());
5041 CompileRun("(testIC())");
5043 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5047 TEST(PolymorphicStaysPolymorphicAfterGC) {
5048 if (FLAG_always_opt) return;
5049 CcTest::InitializeVM();
5050 Isolate* isolate = CcTest::i_isolate();
5051 Heap* heap = isolate->heap();
5052 v8::HandleScope scope(CcTest::isolate());
5054 "function loadIC(obj) {"
5057 "function testIC() {"
5058 " var proto = {'name' : 'weak'};"
5059 " var obj = Object.create(proto);"
5063 " var poly = Object.create(proto);"
5068 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5070 v8::HandleScope scope(CcTest::isolate());
5071 CompileRun("(testIC())");
5073 heap->CollectAllGarbage();
5074 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5076 v8::HandleScope scope(CcTest::isolate());
5077 CompileRun("(testIC())");
5079 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5084 CcTest::InitializeVM();
5085 Isolate* isolate = CcTest::i_isolate();
5086 v8::internal::Heap* heap = CcTest::heap();
5087 v8::internal::Factory* factory = isolate->factory();
5089 HandleScope outer_scope(isolate);
5090 Handle<WeakCell> weak_cell1;
5092 HandleScope inner_scope(isolate);
5093 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5094 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5097 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5098 Handle<WeakCell> weak_cell2;
5100 HandleScope inner_scope(isolate);
5101 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5103 CHECK(weak_cell1->value()->IsFixedArray());
5104 CHECK_EQ(*survivor, weak_cell2->value());
5105 heap->CollectGarbage(NEW_SPACE);
5106 CHECK(weak_cell1->value()->IsFixedArray());
5107 CHECK_EQ(*survivor, weak_cell2->value());
5108 heap->CollectGarbage(NEW_SPACE);
5109 CHECK(weak_cell1->value()->IsFixedArray());
5110 CHECK_EQ(*survivor, weak_cell2->value());
5111 heap->CollectAllAvailableGarbage();
5112 CHECK(weak_cell1->cleared());
5113 CHECK_EQ(*survivor, weak_cell2->value());
5117 TEST(WeakCellsWithIncrementalMarking) {
5118 CcTest::InitializeVM();
5119 Isolate* isolate = CcTest::i_isolate();
5120 v8::internal::Heap* heap = CcTest::heap();
5121 v8::internal::Factory* factory = isolate->factory();
5124 HandleScope outer_scope(isolate);
5125 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5126 Handle<WeakCell> weak_cells[N];
5128 for (int i = 0; i < N; i++) {
5129 HandleScope inner_scope(isolate);
5130 Handle<HeapObject> value =
5131 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5132 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5133 CHECK(weak_cell->value()->IsFixedArray());
5134 IncrementalMarking* marking = heap->incremental_marking();
5135 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
5136 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5137 heap->CollectGarbage(NEW_SPACE);
5138 CHECK(weak_cell->value()->IsFixedArray());
5139 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5141 heap->CollectAllGarbage();
5142 CHECK_EQ(*survivor, weak_cells[0]->value());
5143 for (int i = 1; i < N; i++) {
5144 CHECK(weak_cells[i]->cleared());
5150 TEST(AddInstructionChangesNewSpacePromotion) {
5151 i::FLAG_allow_natives_syntax = true;
5152 i::FLAG_expose_gc = true;
5153 i::FLAG_stress_compaction = true;
5154 i::FLAG_gc_interval = 1000;
5155 CcTest::InitializeVM();
5156 if (!i::FLAG_allocation_site_pretenuring) return;
5157 v8::HandleScope scope(CcTest::isolate());
5158 Isolate* isolate = CcTest::i_isolate();
5159 Heap* heap = isolate->heap();
5162 "function add(a, b) {"
5166 "add(\"a\", \"b\");"
5167 "var oldSpaceObject;"
5169 "function crash(x) {"
5170 " var object = {a: null, b: null};"
5171 " var result = add(1.5, x | 0);"
5172 " object.a = result;"
5173 " oldSpaceObject = object;"
5178 "%OptimizeFunctionOnNextCall(crash);"
5181 v8::Handle<v8::Object> global = CcTest::global();
5182 v8::Handle<v8::Function> g =
5183 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
5184 v8::Handle<v8::Value> args1[] = { v8_num(1) };
5185 heap->DisableInlineAllocation();
5186 heap->set_allocation_timeout(1);
5187 g->Call(global, 1, args1);
5188 heap->CollectAllGarbage();
5192 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5193 // Exit with 0 if the location matches our expectation.
5194 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5198 TEST(CEntryStubOOM) {
5199 i::FLAG_allow_natives_syntax = true;
5200 CcTest::InitializeVM();
5201 v8::HandleScope scope(CcTest::isolate());
5202 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
5204 v8::Handle<v8::Value> result = CompileRun(
5205 "%SetFlags('--gc-interval=1');"
5210 CHECK(result->IsNumber());
5216 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5219 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5220 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5224 TEST(Regress357137) {
5225 CcTest::InitializeVM();
5226 v8::Isolate* isolate = CcTest::isolate();
5227 v8::HandleScope hscope(isolate);
5228 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5229 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
5230 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5231 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5232 DCHECK(!context.IsEmpty());
5233 v8::Context::Scope cscope(context);
5235 v8::Local<v8::Value> result = CompileRun(
5237 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5238 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5239 "interrupt();" // This triggers a fake stack overflow in f.
5241 CHECK_EQ(42.0, result->ToNumber(isolate)->Value());
5245 TEST(ArrayShiftSweeping) {
5246 i::FLAG_expose_gc = true;
5247 CcTest::InitializeVM();
5248 v8::HandleScope scope(CcTest::isolate());
5249 Isolate* isolate = CcTest::i_isolate();
5250 Heap* heap = isolate->heap();
5252 v8::Local<v8::Value> result = CompileRun(
5253 "var array = new Array(40000);"
5254 "var tmp = new Array(100000);"
5261 Handle<JSObject> o =
5262 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5263 CHECK(heap->InOldSpace(o->elements()));
5264 CHECK(heap->InOldSpace(*o));
5265 Page* page = Page::FromAddress(o->elements()->address());
5266 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
5267 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
5271 UNINITIALIZED_TEST(PromotionQueue) {
5272 i::FLAG_expose_gc = true;
5273 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5274 v8::Isolate::CreateParams create_params;
5275 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5276 v8::Isolate* isolate = v8::Isolate::New(create_params);
5277 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5279 v8::Isolate::Scope isolate_scope(isolate);
5280 v8::HandleScope handle_scope(isolate);
5281 v8::Context::New(isolate)->Enter();
5282 Heap* heap = i_isolate->heap();
5283 NewSpace* new_space = heap->new_space();
5285 // In this test we will try to overwrite the promotion queue which is at the
5286 // end of to-space. To actually make that possible, we need at least two
5287 // semi-space pages and take advantage of fragmentation.
5288 // (1) Grow semi-space to two pages.
5289 // (2) Create a few small long living objects and call the scavenger to
5290 // move them to the other semi-space.
5291 // (3) Create a huge object, i.e., remainder of first semi-space page and
5292 // create another huge object which should be of maximum allocatable memory
5293 // size of the second semi-space page.
5294 // (4) Call the scavenger again.
5295 // What will happen is: the scavenger will promote the objects created in
5296 // (2) and will create promotion queue entries at the end of the second
5297 // semi-space page during the next scavenge when it promotes the objects to
5298 // the old generation. The first allocation of (3) will fill up the first
5299 // semi-space page. The second allocation in (3) will not fit into the
5300 // first semi-space page, but it will overwrite the promotion queue which
5301 // are in the second semi-space page. If the right guards are in place, the
5302 // promotion queue will be evacuated in that case.
5304 // Grow the semi-space to two pages to make semi-space copy overwrite the
5305 // promotion queue, which will be at the end of the second page.
5306 intptr_t old_capacity = new_space->TotalCapacity();
5308 // If we are in a low memory config, we can't grow to two pages and we can't
5309 // run this test. This also means the issue we are testing cannot arise, as
5310 // there is no fragmentation.
5311 if (new_space->IsAtMaximumCapacity()) return;
5314 CHECK(new_space->IsAtMaximumCapacity());
5315 CHECK(2 * old_capacity == new_space->TotalCapacity());
5317 // Call the scavenger two times to get an empty new space
5318 heap->CollectGarbage(NEW_SPACE);
5319 heap->CollectGarbage(NEW_SPACE);
5321 // First create a few objects which will survive a scavenge, and will get
5322 // promoted to the old generation later on. These objects will create
5323 // promotion queue entries at the end of the second semi-space page.
5324 const int number_handles = 12;
5325 Handle<FixedArray> handles[number_handles];
5326 for (int i = 0; i < number_handles; i++) {
5327 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5329 heap->CollectGarbage(NEW_SPACE);
5331 // Create the first huge object which will exactly fit the first semi-space
5333 int new_linear_size =
5334 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5335 *heap->new_space()->allocation_top_address());
5336 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
5337 Handle<FixedArray> first =
5338 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5339 CHECK(heap->InNewSpace(*first));
5341 // Create the second huge object of maximum allocatable second semi-space
5344 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5345 *heap->new_space()->allocation_top_address());
5346 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
5347 FixedArray::kHeaderSize;
5348 Handle<FixedArray> second =
5349 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5350 CHECK(heap->InNewSpace(*second));
5352 // This scavenge will corrupt memory if the promotion queue is not
5354 heap->CollectGarbage(NEW_SPACE);
5360 TEST(Regress388880) {
5361 i::FLAG_expose_gc = true;
5362 CcTest::InitializeVM();
5363 v8::HandleScope scope(CcTest::isolate());
5364 Isolate* isolate = CcTest::i_isolate();
5365 Factory* factory = isolate->factory();
5366 Heap* heap = isolate->heap();
5368 Handle<Map> map1 = Map::Create(isolate, 1);
5370 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
5371 HeapType::Any(isolate), NONE, Representation::Tagged(),
5372 OMIT_TRANSITION).ToHandleChecked();
5374 int desired_offset = Page::kPageSize - map1->instance_size();
5376 // Allocate fixed array in old pointer space so, that object allocated
5377 // afterwards would end at the end of the page.
5379 SimulateFullSpace(heap->old_space());
5380 int padding_size = desired_offset - Page::kObjectStartOffset;
5381 int padding_array_length =
5382 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
5384 Handle<FixedArray> temp2 =
5385 factory->NewFixedArray(padding_array_length, TENURED);
5386 Page* page = Page::FromAddress(temp2->address());
5387 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
5390 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5391 o->set_properties(*factory->empty_fixed_array());
5393 // Ensure that the object allocated where we need it.
5394 Page* page = Page::FromAddress(o->address());
5395 CHECK_EQ(desired_offset, page->Offset(o->address()));
5397 // Now we have an object right at the end of the page.
5399 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5400 // that would cause crash.
5401 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5403 marking->Start(Heap::kNoGCFlags);
5404 CHECK(marking->IsMarking());
5406 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5407 // when it calls heap->AdjustLiveBytes(...).
5408 JSObject::MigrateToMap(o, map2);
5413 i::FLAG_expose_gc = true;
5414 CcTest::InitializeVM();
5415 v8::HandleScope scope(CcTest::isolate());
5416 Isolate* isolate = CcTest::i_isolate();
5417 Heap* heap = isolate->heap();
5418 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5419 v8::Local<v8::Value> result = CompileRun(
5420 "var weak_map = new WeakMap();"
5421 "var future_keys = [];"
5422 "for (var i = 0; i < 50; i++) {"
5423 " var key = {'k' : i + 0.1};"
5424 " weak_map.set(key, 1);"
5425 " future_keys.push({'x' : i + 0.2});"
5428 if (marking->IsStopped()) {
5429 marking->Start(Heap::kNoGCFlags);
5431 // Incrementally mark the backing store.
5432 Handle<JSObject> obj =
5433 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5434 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5435 while (!Marking::IsBlack(
5436 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5437 !marking->IsStopped()) {
5438 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5440 // Stash the backing store in a handle.
5441 Handle<Object> save(weak_map->table(), isolate);
5442 // The following line will update the backing store.
5444 "for (var i = 0; i < 50; i++) {"
5445 " weak_map.set(future_keys[i], i);"
5447 heap->incremental_marking()->set_should_hurry(true);
5448 heap->CollectGarbage(OLD_SPACE);
5452 TEST(Regress442710) {
5453 CcTest::InitializeVM();
5454 Isolate* isolate = CcTest::i_isolate();
5455 Heap* heap = isolate->heap();
5456 Factory* factory = isolate->factory();
5458 HandleScope sc(isolate);
5459 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
5460 Handle<JSArray> array = factory->NewJSArray(2);
5462 Handle<String> name = factory->InternalizeUtf8String("testArray");
5463 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5464 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5465 heap->CollectGarbage(OLD_SPACE);
5469 TEST(NumberStringCacheSize) {
5470 // Test that the number-string cache has not been resized in the snapshot.
5471 CcTest::InitializeVM();
5472 Isolate* isolate = CcTest::i_isolate();
5473 if (!isolate->snapshot_available()) return;
5474 Heap* heap = isolate->heap();
5475 CHECK_EQ(TestHeap::kInitialNumberStringCacheSize * 2,
5476 heap->number_string_cache()->length());
5481 CcTest::InitializeVM();
5482 Isolate* isolate = CcTest::i_isolate();
5483 Heap* heap = isolate->heap();
5484 Factory* factory = isolate->factory();
5485 HandleScope scope(isolate);
5486 CompileRun("function cls() { this.x = 10; }");
5487 Handle<WeakCell> weak_prototype;
5489 HandleScope inner_scope(isolate);
5490 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5491 Handle<JSObject> proto =
5492 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5493 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5495 CHECK(!weak_prototype->cleared());
5499 "cls.prototype = null;");
5500 for (int i = 0; i < 4; i++) {
5501 heap->CollectAllGarbage();
5503 // The map of a.x keeps prototype alive
5504 CHECK(!weak_prototype->cleared());
5505 // Change the map of a.x and make the previous map garbage collectable.
5506 CompileRun("a.x.__proto__ = {};");
5507 for (int i = 0; i < 4; i++) {
5508 heap->CollectAllGarbage();
5510 CHECK(weak_prototype->cleared());
5514 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5515 HandleScope inner_scope(isolate);
5516 Handle<Map> map = Map::Create(isolate, 1);
5517 v8::Local<v8::Value> result =
5518 CompileRun("(function () { return {x : 10}; })();");
5519 Handle<JSObject> proto =
5520 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5521 Map::SetPrototype(map, proto);
5522 heap->AddRetainedMap(map);
5523 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5527 void CheckMapRetainingFor(int n) {
5528 FLAG_retain_maps_for_n_gc = n;
5529 Isolate* isolate = CcTest::i_isolate();
5530 Heap* heap = isolate->heap();
5531 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5532 CHECK(!weak_cell->cleared());
5533 for (int i = 0; i < n; i++) {
5534 heap->CollectGarbage(OLD_SPACE);
5536 CHECK(!weak_cell->cleared());
5537 heap->CollectGarbage(OLD_SPACE);
5538 CHECK(weak_cell->cleared());
5542 TEST(MapRetaining) {
5543 CcTest::InitializeVM();
5544 v8::HandleScope scope(CcTest::isolate());
5545 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5546 CheckMapRetainingFor(0);
5547 CheckMapRetainingFor(1);
5548 CheckMapRetainingFor(7);
5552 TEST(RegressArrayListGC) {
5553 FLAG_retain_maps_for_n_gc = 1;
5554 FLAG_incremental_marking = 0;
5555 FLAG_gc_global = true;
5556 CcTest::InitializeVM();
5557 v8::HandleScope scope(CcTest::isolate());
5558 Isolate* isolate = CcTest::i_isolate();
5559 Heap* heap = isolate->heap();
5560 AddRetainedMap(isolate, heap);
5561 Handle<Map> map = Map::Create(isolate, 1);
5562 heap->CollectGarbage(OLD_SPACE);
5563 // Force GC in old space on next addition of retained map.
5564 Map::WeakCellForMap(map);
5565 SimulateFullSpace(CcTest::heap()->new_space());
5566 for (int i = 0; i < 10; i++) {
5567 heap->AddRetainedMap(map);
5569 heap->CollectGarbage(OLD_SPACE);
5575 CcTest::InitializeVM();
5576 v8::HandleScope scope(CcTest::isolate());
5578 v8::Local<v8::Value> result = CompileRun("'abc'");
5579 Handle<Object> o = v8::Utils::OpenHandle(*result);
5580 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5585 TEST(WritableVsImmortalRoots) {
5586 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5587 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5588 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5589 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5590 // A root value can be writable, immortal, or neither, but not both.
5591 CHECK(!immortal || !writable);
5596 static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5598 int elements_to_trim) {
5599 v8::HandleScope scope(CcTest::isolate());
5600 Isolate* isolate = CcTest::i_isolate();
5601 Factory* factory = isolate->factory();
5602 Heap* heap = isolate->heap();
5604 Handle<FixedTypedArrayBase> array =
5605 factory->NewFixedTypedArray(initial_length, type, true);
5606 int old_size = array->size();
5607 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5610 // Check that free space filler is at the right place and did not smash the
5612 CHECK(array->IsFixedArrayBase());
5613 CHECK_EQ(initial_length - elements_to_trim, array->length());
5614 int new_size = array->size();
5615 if (new_size != old_size) {
5616 // Free space filler should be created in this case.
5617 Address next_obj_address = array->address() + array->size();
5618 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5620 heap->CollectAllAvailableGarbage();
5624 TEST(Regress472513) {
5625 CcTest::InitializeVM();
5626 v8::HandleScope scope(CcTest::isolate());
5628 // The combination of type/initial_length/elements_to_trim triggered
5629 // typed array header smashing with free space filler (crbug/472513).
5632 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5633 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5634 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5635 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5636 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5637 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5640 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5641 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5642 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5643 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5644 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5648 TEST(WeakFixedArray) {
5649 CcTest::InitializeVM();
5650 v8::HandleScope scope(CcTest::isolate());
5652 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5653 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5654 array->Remove(number);
5656 WeakFixedArray::Add(array, number);
5660 TEST(PreprocessStackTrace) {
5661 // Do not automatically trigger early GC.
5662 FLAG_gc_interval = -1;
5663 CcTest::InitializeVM();
5664 v8::HandleScope scope(CcTest::isolate());
5665 v8::TryCatch try_catch(CcTest::isolate());
5666 CompileRun("throw new Error();");
5667 CHECK(try_catch.HasCaught());
5668 Isolate* isolate = CcTest::i_isolate();
5669 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5670 Handle<Name> key = isolate->factory()->stack_trace_symbol();
5671 Handle<Object> stack_trace =
5672 JSObject::GetProperty(exception, key).ToHandleChecked();
5673 Handle<Object> code =
5674 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5675 CHECK(code->IsCode());
5677 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
5679 Handle<Object> pos =
5680 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5681 CHECK(pos->IsSmi());
5683 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5684 int array_length = Smi::cast(stack_trace_array->length())->value();
5685 for (int i = 0; i < array_length; i++) {
5686 Handle<Object> element =
5687 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5688 CHECK(!element->IsCode());
5693 static bool utils_has_been_collected = false;
5695 static void UtilsHasBeenCollected(
5696 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5697 utils_has_been_collected = true;
5698 data.GetParameter()->Reset();
5702 TEST(BootstrappingExports) {
5703 FLAG_expose_natives_as = "natives";
5704 CcTest::InitializeVM();
5705 v8::Isolate* isolate = CcTest::isolate();
5707 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
5709 utils_has_been_collected = false;
5711 v8::Persistent<v8::Object> utils;
5714 v8::HandleScope scope(isolate);
5715 v8::Handle<v8::Object> natives =
5716 CcTest::global()->Get(v8_str("natives"))->ToObject(isolate);
5717 utils.Reset(isolate, natives->Get(v8_str("utils"))->ToObject(isolate));
5718 natives->Delete(v8_str("utils"));
5721 utils.SetWeak(&utils, UtilsHasBeenCollected,
5722 v8::WeakCallbackType::kParameter);
5724 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
5726 CHECK(utils_has_been_collected);
5731 FLAG_allow_natives_syntax = true;
5732 CcTest::InitializeVM();
5733 v8::Isolate* isolate = CcTest::isolate();
5734 v8::HandleScope scope(isolate);
5735 v8::Local<v8::Function> constructor =
5736 v8::Utils::ToLocal(CcTest::i_isolate()->internal_array_function());
5737 CcTest::global()->Set(v8_str("InternalArray"), constructor);
5739 v8::TryCatch try_catch(isolate);
5743 "for (var i = 0; i < 1000; i++) {"
5744 " var ai = new InternalArray(10000);"
5745 " if (%HaveSameMap(ai, a)) throw Error();"
5746 " if (!%HasFastObjectElements(ai)) throw Error();"
5748 "for (var i = 0; i < 1000; i++) {"
5749 " var ai = new InternalArray(10000);"
5750 " if (%HaveSameMap(ai, a)) throw Error();"
5751 " if (!%HasFastObjectElements(ai)) throw Error();"
5754 CHECK(!try_catch.HasCaught());
5758 void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
5759 CHECK(bytes >= FixedArray::kHeaderSize);
5760 CHECK(bytes % kPointerSize == 0);
5761 Factory* factory = isolate->factory();
5762 HandleScope scope(isolate);
5763 AlwaysAllocateScope always_allocate(isolate);
5765 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
5766 Handle<FixedArray> array = factory->NewFixedArray(
5767 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
5768 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
5769 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
5773 TEST(NewSpaceAllocationCounter) {
5774 CcTest::InitializeVM();
5775 v8::HandleScope scope(CcTest::isolate());
5776 Isolate* isolate = CcTest::i_isolate();
5777 Heap* heap = isolate->heap();
5778 size_t counter1 = heap->NewSpaceAllocationCounter();
5779 heap->CollectGarbage(NEW_SPACE);
5780 const size_t kSize = 1024;
5781 AllocateInSpace(isolate, kSize, NEW_SPACE);
5782 size_t counter2 = heap->NewSpaceAllocationCounter();
5783 CHECK_EQ(kSize, counter2 - counter1);
5784 heap->CollectGarbage(NEW_SPACE);
5785 size_t counter3 = heap->NewSpaceAllocationCounter();
5786 CHECK_EQ(0U, counter3 - counter2);
5787 // Test counter overflow.
5788 size_t max_counter = -1;
5789 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
5790 size_t start = heap->NewSpaceAllocationCounter();
5791 for (int i = 0; i < 20; i++) {
5792 AllocateInSpace(isolate, kSize, NEW_SPACE);
5793 size_t counter = heap->NewSpaceAllocationCounter();
5794 CHECK_EQ(kSize, counter - start);
5800 TEST(OldSpaceAllocationCounter) {
5801 CcTest::InitializeVM();
5802 v8::HandleScope scope(CcTest::isolate());
5803 Isolate* isolate = CcTest::i_isolate();
5804 Heap* heap = isolate->heap();
5805 size_t counter1 = heap->OldGenerationAllocationCounter();
5806 heap->CollectGarbage(NEW_SPACE);
5807 heap->CollectGarbage(NEW_SPACE);
5808 const size_t kSize = 1024;
5809 AllocateInSpace(isolate, kSize, OLD_SPACE);
5810 size_t counter2 = heap->OldGenerationAllocationCounter();
5811 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
5812 CHECK_LE(kSize, counter2 - counter1);
5813 heap->CollectGarbage(NEW_SPACE);
5814 size_t counter3 = heap->OldGenerationAllocationCounter();
5815 CHECK_EQ(0u, counter3 - counter2);
5816 AllocateInSpace(isolate, kSize, OLD_SPACE);
5817 heap->CollectGarbage(OLD_SPACE);
5818 size_t counter4 = heap->OldGenerationAllocationCounter();
5819 CHECK_LE(kSize, counter4 - counter3);
5820 // Test counter overflow.
5821 size_t max_counter = -1;
5822 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
5823 size_t start = heap->OldGenerationAllocationCounter();
5824 for (int i = 0; i < 20; i++) {
5825 AllocateInSpace(isolate, kSize, OLD_SPACE);
5826 size_t counter = heap->OldGenerationAllocationCounter();
5827 CHECK_LE(kSize, counter - start);
5833 TEST(NewSpaceAllocationThroughput) {
5834 CcTest::InitializeVM();
5835 v8::HandleScope scope(CcTest::isolate());
5836 Isolate* isolate = CcTest::i_isolate();
5837 Heap* heap = isolate->heap();
5838 GCTracer* tracer = heap->tracer();
5840 size_t counter1 = 1000;
5841 tracer->SampleAllocation(time1, counter1, 0);
5843 size_t counter2 = 2000;
5844 tracer->SampleAllocation(time2, counter2, 0);
5846 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
5847 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
5849 size_t counter3 = 30000;
5850 tracer->SampleAllocation(time3, counter3, 0);
5851 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
5852 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
5856 TEST(NewSpaceAllocationThroughput2) {
5857 CcTest::InitializeVM();
5858 v8::HandleScope scope(CcTest::isolate());
5859 Isolate* isolate = CcTest::i_isolate();
5860 Heap* heap = isolate->heap();
5861 GCTracer* tracer = heap->tracer();
5863 size_t counter1 = 1000;
5864 tracer->SampleAllocation(time1, counter1, 0);
5866 size_t counter2 = 2000;
5867 tracer->SampleAllocation(time2, counter2, 0);
5869 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
5870 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
5872 size_t counter3 = 30000;
5873 tracer->SampleAllocation(time3, counter3, 0);
5874 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
5875 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
5879 static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
5880 Isolate* isolate = CcTest::i_isolate();
5882 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
5883 CHECK(message->IsTheHole());
5887 TEST(MessageObjectLeak) {
5888 CcTest::InitializeVM();
5889 v8::Isolate* isolate = CcTest::isolate();
5890 v8::HandleScope scope(isolate);
5891 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5892 global->Set(v8::String::NewFromUtf8(isolate, "check"),
5893 v8::FunctionTemplate::New(isolate, CheckLeak));
5894 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5895 v8::Context::Scope cscope(context);
5899 " throw 'message 1';"
5904 " throw 'message 2';"
5911 const char* flag = "--turbo-filter=*";
5912 FlagList::SetFlagsFromString(flag, StrLength(flag));
5913 FLAG_always_opt = true;
5914 FLAG_turbo_try_catch = true;
5915 FLAG_turbo_try_finally = true;
5921 static void CheckEqualSharedFunctionInfos(
5922 const v8::FunctionCallbackInfo<v8::Value>& args) {
5923 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
5924 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
5925 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
5926 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
5927 CHECK(fun1->shared() == fun2->shared());
5931 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
5932 Isolate* isolate = CcTest::i_isolate();
5933 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
5934 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
5935 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
5936 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
5937 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
5941 TEST(CanonicalSharedFunctionInfo) {
5942 CcTest::InitializeVM();
5943 v8::Isolate* isolate = CcTest::isolate();
5944 v8::HandleScope scope(isolate);
5945 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5946 global->Set(isolate, "check", v8::FunctionTemplate::New(
5947 isolate, CheckEqualSharedFunctionInfos));
5948 global->Set(isolate, "remove",
5949 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
5950 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5951 v8::Context::Scope cscope(context);
5953 "function f() { return function g() {}; }"
5960 "function f() { return (function() { return function g() {}; })(); }"
5968 TEST(OldGenerationAllocationThroughput) {
5969 CcTest::InitializeVM();
5970 v8::HandleScope scope(CcTest::isolate());
5971 Isolate* isolate = CcTest::i_isolate();
5972 Heap* heap = isolate->heap();
5973 GCTracer* tracer = heap->tracer();
5975 size_t counter1 = 1000;
5976 tracer->SampleAllocation(time1, 0, counter1);
5978 size_t counter2 = 2000;
5979 tracer->SampleAllocation(time2, 0, counter2);
5981 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
5982 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
5984 size_t counter3 = 30000;
5985 tracer->SampleAllocation(time3, 0, counter3);
5987 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
5988 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
5992 TEST(AllocationThroughput) {
5993 CcTest::InitializeVM();
5994 v8::HandleScope scope(CcTest::isolate());
5995 Isolate* isolate = CcTest::i_isolate();
5996 Heap* heap = isolate->heap();
5997 GCTracer* tracer = heap->tracer();
5999 size_t counter1 = 1000;
6000 tracer->SampleAllocation(time1, counter1, counter1);
6002 size_t counter2 = 2000;
6003 tracer->SampleAllocation(time2, counter2, counter2);
6004 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6005 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6007 size_t counter3 = 30000;
6008 tracer->SampleAllocation(time3, counter3, counter3);
6009 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6010 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6014 TEST(SlotsBufferObjectSlotsRemoval) {
6015 CcTest::InitializeVM();
6016 v8::HandleScope scope(CcTest::isolate());
6017 Isolate* isolate = CcTest::i_isolate();
6018 Heap* heap = isolate->heap();
6019 Factory* factory = isolate->factory();
6021 SlotsBuffer* buffer = new SlotsBuffer(NULL);
6022 void* fake_object[1];
6024 Handle<FixedArray> array = factory->NewFixedArray(2, TENURED);
6025 CHECK(heap->old_space()->Contains(*array));
6026 array->set(0, reinterpret_cast<Object*>(fake_object), SKIP_WRITE_BARRIER);
6028 // Firstly, let's test the regular slots buffer entry.
6029 buffer->Add(HeapObject::RawField(*array, FixedArray::kHeaderSize));
6030 DCHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6031 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6032 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6034 array->address() + array->Size());
6035 DCHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6036 HeapObject::RawField(heap->empty_fixed_array(),
6037 FixedArrayBase::kLengthOffset));
6039 // Secondly, let's test the typed slots buffer entry.
6040 SlotsBuffer::AddTo(NULL, &buffer, SlotsBuffer::EMBEDDED_OBJECT_SLOT,
6041 array->address() + FixedArray::kHeaderSize,
6042 SlotsBuffer::FAIL_ON_OVERFLOW);
6043 DCHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6044 reinterpret_cast<Object**>(SlotsBuffer::EMBEDDED_OBJECT_SLOT));
6045 DCHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6046 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6047 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6049 array->address() + array->Size());
6050 DCHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6051 HeapObject::RawField(heap->empty_fixed_array(),
6052 FixedArrayBase::kLengthOffset));
6053 DCHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6054 HeapObject::RawField(heap->empty_fixed_array(),
6055 FixedArrayBase::kLengthOffset));