1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "src/compilation-cache.h"
32 #include "src/context-measure.h"
33 #include "src/deoptimizer.h"
34 #include "src/execution.h"
35 #include "src/factory.h"
36 #include "src/global-handles.h"
37 #include "src/heap/gc-tracer.h"
38 #include "src/ic/ic.h"
39 #include "src/macro-assembler.h"
40 #include "src/snapshot/snapshot.h"
41 #include "test/cctest/cctest.h"
42 #include "test/cctest/heap-tester.h"
49 static void CheckMap(Map* map, int type, int instance_size) {
50 CHECK(map->IsHeapObject());
52 CHECK(CcTest::heap()->Contains(map));
54 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
55 CHECK_EQ(type, map->instance_type());
56 CHECK_EQ(instance_size, map->instance_size());
61 CcTest::InitializeVM();
62 Heap* heap = CcTest::heap();
63 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
64 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
65 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
66 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
67 SIMD128_TYPES(SIMD128_TYPE)
69 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
70 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
74 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
75 CHECK(obj->IsOddball());
76 Handle<Object> handle(obj, isolate);
77 Object* print_string =
78 *Execution::ToString(isolate, handle).ToHandleChecked();
79 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
83 static void CheckSmi(Isolate* isolate, int value, const char* string) {
84 Handle<Object> handle(Smi::FromInt(value), isolate);
85 Object* print_string =
86 *Execution::ToString(isolate, handle).ToHandleChecked();
87 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
91 static void CheckNumber(Isolate* isolate, double value, const char* string) {
92 Handle<Object> number = isolate->factory()->NewNumber(value);
93 CHECK(number->IsNumber());
94 Handle<Object> print_string =
95 Execution::ToString(isolate, number).ToHandleChecked();
96 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
100 static void CheckFindCodeObject(Isolate* isolate) {
101 // Test FindCodeObject
104 Assembler assm(isolate, NULL, 0);
106 __ nop(); // supported on all architectures
110 Handle<Code> code = isolate->factory()->NewCode(
111 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
112 CHECK(code->IsCode());
114 HeapObject* obj = HeapObject::cast(*code);
115 Address obj_addr = obj->address();
117 for (int i = 0; i < obj->Size(); i += kPointerSize) {
118 Object* found = isolate->FindCodeObject(obj_addr + i);
119 CHECK_EQ(*code, found);
122 Handle<Code> copy = isolate->factory()->NewCode(
123 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
124 HeapObject* obj_copy = HeapObject::cast(*copy);
125 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
126 obj_copy->Size() / 2);
127 CHECK(not_right != *code);
132 CcTest::InitializeVM();
133 Isolate* isolate = CcTest::i_isolate();
134 HandleScope outer_scope(isolate);
135 LocalContext context;
136 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
142 CcTest::InitializeVM();
143 Isolate* isolate = CcTest::i_isolate();
144 Factory* factory = isolate->factory();
145 Heap* heap = isolate->heap();
147 HandleScope sc(isolate);
148 Handle<Object> value = factory->NewNumber(1.000123);
149 CHECK(value->IsHeapNumber());
150 CHECK(value->IsNumber());
151 CHECK_EQ(1.000123, value->Number());
153 value = factory->NewNumber(1.0);
154 CHECK(value->IsSmi());
155 CHECK(value->IsNumber());
156 CHECK_EQ(1.0, value->Number());
158 value = factory->NewNumberFromInt(1024);
159 CHECK(value->IsSmi());
160 CHECK(value->IsNumber());
161 CHECK_EQ(1024.0, value->Number());
163 value = factory->NewNumberFromInt(Smi::kMinValue);
164 CHECK(value->IsSmi());
165 CHECK(value->IsNumber());
166 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
168 value = factory->NewNumberFromInt(Smi::kMaxValue);
169 CHECK(value->IsSmi());
170 CHECK(value->IsNumber());
171 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
173 #if !defined(V8_TARGET_ARCH_64_BIT)
174 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
175 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
176 CHECK(value->IsHeapNumber());
177 CHECK(value->IsNumber());
178 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
181 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
182 CHECK(value->IsHeapNumber());
183 CHECK(value->IsNumber());
184 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
187 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
188 CHECK(value->IsHeapNumber());
189 CHECK(value->IsNumber());
190 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
193 // nan oddball checks
194 CHECK(factory->nan_value()->IsNumber());
195 CHECK(std::isnan(factory->nan_value()->Number()));
197 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
198 CHECK(s->IsString());
199 CHECK_EQ(10, s->length());
201 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
202 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
203 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
205 // Check ToString for oddballs
206 CheckOddball(isolate, heap->true_value(), "true");
207 CheckOddball(isolate, heap->false_value(), "false");
208 CheckOddball(isolate, heap->null_value(), "null");
209 CheckOddball(isolate, heap->undefined_value(), "undefined");
211 // Check ToString for Smis
212 CheckSmi(isolate, 0, "0");
213 CheckSmi(isolate, 42, "42");
214 CheckSmi(isolate, -42, "-42");
216 // Check ToString for Numbers
217 CheckNumber(isolate, 1.1, "1.1");
219 CheckFindCodeObject(isolate);
223 template <typename T, typename LANE_TYPE, int LANES>
224 static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
225 LANE_TYPE other_value) {
226 // Check against lane_values, and check that all lanes can be set to
227 // other_value without disturbing the other lanes.
228 for (int i = 0; i < LANES; i++) {
229 CHECK_EQ(lane_values[i], value->get_lane(i));
231 for (int i = 0; i < LANES; i++) {
232 value->set_lane(i, other_value); // change the value
233 for (int j = 0; j < LANES; j++) {
235 CHECK_EQ(lane_values[j], value->get_lane(j));
237 CHECK_EQ(other_value, value->get_lane(j));
239 value->set_lane(i, lane_values[i]); // restore the lane
241 CHECK(value->BooleanValue()); // SIMD values are 'true'.
246 CcTest::InitializeVM();
247 Isolate* isolate = CcTest::i_isolate();
248 Factory* factory = isolate->factory();
250 HandleScope sc(isolate);
254 float lanes[4] = {1, 2, 3, 4};
255 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
256 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
258 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
259 CHECK(value->IsFloat32x4());
260 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
262 // Check special lane values.
263 value->set_lane(1, -0.0);
264 CHECK_EQ(-0.0, value->get_lane(1));
265 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
266 value->set_lane(2, quiet_NaN);
267 CHECK(std::isnan(value->get_lane(2)));
268 value->set_lane(3, signaling_NaN);
269 CHECK(std::isnan(value->get_lane(3)));
272 // Check value printing.
274 value = factory->NewFloat32x4(lanes);
275 std::ostringstream os;
276 value->Float32x4Print(os);
277 CHECK_EQ("1, 2, 3, 4", os.str());
280 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
281 value = factory->NewFloat32x4(special_lanes);
282 std::ostringstream os;
283 value->Float32x4Print(os);
284 // Value printing doesn't preserve signed zeroes.
285 CHECK_EQ("0, 0, NaN, NaN", os.str());
287 #endif // OBJECT_PRINT
291 int32_t lanes[4] = {1, 2, 3, 4};
293 Handle<Int32x4> value = factory->NewInt32x4(lanes);
294 CHECK(value->IsInt32x4());
295 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
298 std::ostringstream os;
299 value->Int32x4Print(os);
300 CHECK_EQ("1, 2, 3, 4", os.str());
301 #endif // OBJECT_PRINT
305 uint32_t lanes[4] = {1, 2, 3, 4};
307 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
308 CHECK(value->IsUint32x4());
309 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
312 std::ostringstream os;
313 value->Uint32x4Print(os);
314 CHECK_EQ("1, 2, 3, 4", os.str());
315 #endif // OBJECT_PRINT
319 bool lanes[4] = {true, false, true, false};
321 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
322 CHECK(value->IsBool32x4());
323 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
326 std::ostringstream os;
327 value->Bool32x4Print(os);
328 CHECK_EQ("true, false, true, false", os.str());
329 #endif // OBJECT_PRINT
333 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
335 Handle<Int16x8> value = factory->NewInt16x8(lanes);
336 CHECK(value->IsInt16x8());
337 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
340 std::ostringstream os;
341 value->Int16x8Print(os);
342 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
343 #endif // OBJECT_PRINT
347 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
349 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
350 CHECK(value->IsUint16x8());
351 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
354 std::ostringstream os;
355 value->Uint16x8Print(os);
356 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
357 #endif // OBJECT_PRINT
361 bool lanes[8] = {true, false, true, false, true, false, true, false};
363 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
364 CHECK(value->IsBool16x8());
365 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
368 std::ostringstream os;
369 value->Bool16x8Print(os);
370 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
371 #endif // OBJECT_PRINT
375 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
377 Handle<Int8x16> value = factory->NewInt8x16(lanes);
378 CHECK(value->IsInt8x16());
379 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
382 std::ostringstream os;
383 value->Int8x16Print(os);
384 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
385 #endif // OBJECT_PRINT
389 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
391 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
392 CHECK(value->IsUint8x16());
393 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
396 std::ostringstream os;
397 value->Uint8x16Print(os);
398 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
399 #endif // OBJECT_PRINT
403 bool lanes[16] = {true, false, true, false, true, false, true, false,
404 true, false, true, false, true, false, true, false};
406 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
407 CHECK(value->IsBool8x16());
408 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
411 std::ostringstream os;
412 value->Bool8x16Print(os);
414 "true, false, true, false, true, false, true, false, true, false, "
415 "true, false, true, false, true, false",
417 #endif // OBJECT_PRINT
423 CcTest::InitializeVM();
425 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
426 CHECK(Smi::FromInt(42)->IsSmi());
427 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
428 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
432 TEST(GarbageCollection) {
433 CcTest::InitializeVM();
434 Isolate* isolate = CcTest::i_isolate();
435 Heap* heap = isolate->heap();
436 Factory* factory = isolate->factory();
438 HandleScope sc(isolate);
440 heap->CollectGarbage(NEW_SPACE);
442 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
443 Handle<String> name = factory->InternalizeUtf8String("theFunction");
444 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
445 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
446 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
447 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
448 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
451 HandleScope inner_scope(isolate);
452 // Allocate a function and keep it in global object's property.
453 Handle<JSFunction> function = factory->NewFunction(name);
454 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
455 // Allocate an object. Unrooted after leaving the scope.
456 Handle<JSObject> obj = factory->NewJSObject(function);
457 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
458 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
460 CHECK_EQ(Smi::FromInt(23),
461 *Object::GetProperty(obj, prop_name).ToHandleChecked());
462 CHECK_EQ(Smi::FromInt(24),
463 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
466 heap->CollectGarbage(NEW_SPACE);
468 // Function should be alive.
469 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
470 // Check function is retained.
471 Handle<Object> func_value =
472 Object::GetProperty(global, name).ToHandleChecked();
473 CHECK(func_value->IsJSFunction());
474 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
477 HandleScope inner_scope(isolate);
478 // Allocate another object, make it reachable from global.
479 Handle<JSObject> obj = factory->NewJSObject(function);
480 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
481 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
484 // After gc, it should survive.
485 heap->CollectGarbage(NEW_SPACE);
487 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
489 Object::GetProperty(global, obj_name).ToHandleChecked();
490 CHECK(obj->IsJSObject());
491 CHECK_EQ(Smi::FromInt(23),
492 *Object::GetProperty(obj, prop_name).ToHandleChecked());
496 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
497 HandleScope scope(isolate);
498 Handle<String> s = isolate->factory()->NewStringFromUtf8(
499 CStrVector(string)).ToHandleChecked();
500 CHECK_EQ(StrLength(string), s->length());
501 for (int index = 0; index < s->length(); index++) {
502 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
508 CcTest::InitializeVM();
509 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
511 VerifyStringAllocation(isolate, "a");
512 VerifyStringAllocation(isolate, "ab");
513 VerifyStringAllocation(isolate, "abc");
514 VerifyStringAllocation(isolate, "abcd");
515 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
520 CcTest::InitializeVM();
521 Isolate* isolate = CcTest::i_isolate();
522 Factory* factory = isolate->factory();
524 v8::HandleScope scope(CcTest::isolate());
525 const char* name = "Kasper the spunky";
526 Handle<String> string = factory->NewStringFromAsciiChecked(name);
527 CHECK_EQ(StrLength(name), string->length());
531 TEST(GlobalHandles) {
532 CcTest::InitializeVM();
533 Isolate* isolate = CcTest::i_isolate();
534 Heap* heap = isolate->heap();
535 Factory* factory = isolate->factory();
536 GlobalHandles* global_handles = isolate->global_handles();
544 HandleScope scope(isolate);
546 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
547 Handle<Object> u = factory->NewNumber(1.12344);
549 h1 = global_handles->Create(*i);
550 h2 = global_handles->Create(*u);
551 h3 = global_handles->Create(*i);
552 h4 = global_handles->Create(*u);
555 // after gc, it should survive
556 heap->CollectGarbage(NEW_SPACE);
558 CHECK((*h1)->IsString());
559 CHECK((*h2)->IsHeapNumber());
560 CHECK((*h3)->IsString());
561 CHECK((*h4)->IsHeapNumber());
564 GlobalHandles::Destroy(h1.location());
565 GlobalHandles::Destroy(h3.location());
568 GlobalHandles::Destroy(h2.location());
569 GlobalHandles::Destroy(h4.location());
573 static bool WeakPointerCleared = false;
575 static void TestWeakGlobalHandleCallback(
576 const v8::WeakCallbackData<v8::Value, void>& data) {
577 std::pair<v8::Persistent<v8::Value>*, int>* p =
578 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
579 data.GetParameter());
580 if (p->second == 1234) WeakPointerCleared = true;
585 TEST(WeakGlobalHandlesScavenge) {
586 i::FLAG_stress_compaction = false;
587 CcTest::InitializeVM();
588 Isolate* isolate = CcTest::i_isolate();
589 Heap* heap = isolate->heap();
590 Factory* factory = isolate->factory();
591 GlobalHandles* global_handles = isolate->global_handles();
593 WeakPointerCleared = false;
599 HandleScope scope(isolate);
601 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
602 Handle<Object> u = factory->NewNumber(1.12344);
604 h1 = global_handles->Create(*i);
605 h2 = global_handles->Create(*u);
608 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
609 GlobalHandles::MakeWeak(h2.location(),
610 reinterpret_cast<void*>(&handle_and_id),
611 &TestWeakGlobalHandleCallback);
613 // Scavenge treats weak pointers as normal roots.
614 heap->CollectGarbage(NEW_SPACE);
616 CHECK((*h1)->IsString());
617 CHECK((*h2)->IsHeapNumber());
619 CHECK(!WeakPointerCleared);
620 CHECK(!global_handles->IsNearDeath(h2.location()));
621 CHECK(!global_handles->IsNearDeath(h1.location()));
623 GlobalHandles::Destroy(h1.location());
624 GlobalHandles::Destroy(h2.location());
628 TEST(WeakGlobalHandlesMark) {
629 CcTest::InitializeVM();
630 Isolate* isolate = CcTest::i_isolate();
631 Heap* heap = isolate->heap();
632 Factory* factory = isolate->factory();
633 GlobalHandles* global_handles = isolate->global_handles();
635 WeakPointerCleared = false;
641 HandleScope scope(isolate);
643 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
644 Handle<Object> u = factory->NewNumber(1.12344);
646 h1 = global_handles->Create(*i);
647 h2 = global_handles->Create(*u);
650 // Make sure the objects are promoted.
651 heap->CollectGarbage(OLD_SPACE);
652 heap->CollectGarbage(NEW_SPACE);
653 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
655 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
656 GlobalHandles::MakeWeak(h2.location(),
657 reinterpret_cast<void*>(&handle_and_id),
658 &TestWeakGlobalHandleCallback);
659 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
660 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
662 // Incremental marking potentially marked handles before they turned weak.
663 heap->CollectAllGarbage();
665 CHECK((*h1)->IsString());
667 CHECK(WeakPointerCleared);
668 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
670 GlobalHandles::Destroy(h1.location());
674 TEST(DeleteWeakGlobalHandle) {
675 i::FLAG_stress_compaction = false;
676 CcTest::InitializeVM();
677 Isolate* isolate = CcTest::i_isolate();
678 Heap* heap = isolate->heap();
679 Factory* factory = isolate->factory();
680 GlobalHandles* global_handles = isolate->global_handles();
682 WeakPointerCleared = false;
687 HandleScope scope(isolate);
689 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
690 h = global_handles->Create(*i);
693 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
694 GlobalHandles::MakeWeak(h.location(),
695 reinterpret_cast<void*>(&handle_and_id),
696 &TestWeakGlobalHandleCallback);
698 // Scanvenge does not recognize weak reference.
699 heap->CollectGarbage(NEW_SPACE);
701 CHECK(!WeakPointerCleared);
703 // Mark-compact treats weak reference properly.
704 heap->CollectGarbage(OLD_SPACE);
706 CHECK(WeakPointerCleared);
710 TEST(BytecodeArray) {
711 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
712 static const int kRawBytesSize = sizeof(kRawBytes);
713 static const int kFrameSize = 32;
714 static const int kParameterCount = 2;
716 i::FLAG_manual_evacuation_candidates_selection = true;
717 CcTest::InitializeVM();
718 Isolate* isolate = CcTest::i_isolate();
719 Heap* heap = isolate->heap();
720 Factory* factory = isolate->factory();
721 HandleScope scope(isolate);
723 SimulateFullSpace(heap->old_space());
724 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
725 for (int i = 0; i < 5; i++) {
726 Handle<Object> number = factory->NewHeapNumber(i);
727 constant_pool->set(i, *number);
730 // Allocate and initialize BytecodeArray
731 Handle<BytecodeArray> array = factory->NewBytecodeArray(
732 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
734 CHECK(array->IsBytecodeArray());
735 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
736 CHECK_EQ(array->frame_size(), kFrameSize);
737 CHECK_EQ(array->parameter_count(), kParameterCount);
738 CHECK_EQ(array->constant_pool(), *constant_pool);
739 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
740 CHECK_GE(array->address() + array->BytecodeArraySize(),
741 array->GetFirstBytecodeAddress() + array->length());
742 for (int i = 0; i < kRawBytesSize; i++) {
743 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
744 CHECK_EQ(array->get(i), kRawBytes[i]);
747 FixedArray* old_constant_pool_address = *constant_pool;
749 // Perform a full garbage collection and force the constant pool to be on an
750 // evacuation candidate.
751 Page* evac_page = Page::FromAddress(constant_pool->address());
752 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
753 heap->CollectAllGarbage();
755 // BytecodeArray should survive.
756 CHECK_EQ(array->length(), kRawBytesSize);
757 CHECK_EQ(array->frame_size(), kFrameSize);
758 for (int i = 0; i < kRawBytesSize; i++) {
759 CHECK_EQ(array->get(i), kRawBytes[i]);
760 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
763 // Constant pool should have been migrated.
764 CHECK_EQ(array->constant_pool(), *constant_pool);
765 CHECK_NE(array->constant_pool(), old_constant_pool_address);
769 static const char* not_so_random_string_table[] = {
833 static void CheckInternalizedStrings(const char** strings) {
834 Isolate* isolate = CcTest::i_isolate();
835 Factory* factory = isolate->factory();
836 for (const char* string = *strings; *strings != 0; string = *strings++) {
837 HandleScope scope(isolate);
839 isolate->factory()->InternalizeUtf8String(CStrVector(string));
840 // InternalizeUtf8String may return a failure if a GC is needed.
841 CHECK(a->IsInternalizedString());
842 Handle<String> b = factory->InternalizeUtf8String(string);
844 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
845 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
847 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
853 CcTest::InitializeVM();
855 v8::HandleScope sc(CcTest::isolate());
856 CheckInternalizedStrings(not_so_random_string_table);
857 CheckInternalizedStrings(not_so_random_string_table);
861 TEST(FunctionAllocation) {
862 CcTest::InitializeVM();
863 Isolate* isolate = CcTest::i_isolate();
864 Factory* factory = isolate->factory();
866 v8::HandleScope sc(CcTest::isolate());
867 Handle<String> name = factory->InternalizeUtf8String("theFunction");
868 Handle<JSFunction> function = factory->NewFunction(name);
870 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
871 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
873 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
874 Handle<JSObject> obj = factory->NewJSObject(function);
875 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
876 CHECK_EQ(Smi::FromInt(23),
877 *Object::GetProperty(obj, prop_name).ToHandleChecked());
878 // Check that we can add properties to function objects.
879 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
880 CHECK_EQ(Smi::FromInt(24),
881 *Object::GetProperty(function, prop_name).ToHandleChecked());
885 TEST(ObjectProperties) {
886 CcTest::InitializeVM();
887 Isolate* isolate = CcTest::i_isolate();
888 Factory* factory = isolate->factory();
890 v8::HandleScope sc(CcTest::isolate());
891 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
892 Handle<Object> object = Object::GetProperty(
893 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
894 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
895 Handle<JSObject> obj = factory->NewJSObject(constructor);
896 Handle<String> first = factory->InternalizeUtf8String("first");
897 Handle<String> second = factory->InternalizeUtf8String("second");
899 Handle<Smi> one(Smi::FromInt(1), isolate);
900 Handle<Smi> two(Smi::FromInt(2), isolate);
903 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
906 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
907 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
910 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
911 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
913 // add first and then second
914 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
915 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
916 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
917 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
919 // delete first and then second
920 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
921 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
922 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
923 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
924 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
926 // add first and then second
927 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
928 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
929 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
930 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
932 // delete second and then first
933 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
934 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
935 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
936 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
937 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
939 // check string and internalized string match
940 const char* string1 = "fisk";
941 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
942 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
943 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
944 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
946 // check internalized string and string match
947 const char* string2 = "fugl";
948 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
949 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
950 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
951 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
956 CcTest::InitializeVM();
957 Isolate* isolate = CcTest::i_isolate();
958 Factory* factory = isolate->factory();
960 v8::HandleScope sc(CcTest::isolate());
961 Handle<String> name = factory->InternalizeUtf8String("theFunction");
962 Handle<JSFunction> function = factory->NewFunction(name);
964 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
965 Handle<JSObject> obj = factory->NewJSObject(function);
966 Handle<Map> initial_map(function->initial_map());
969 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
970 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
971 CHECK_EQ(Smi::FromInt(23),
972 *Object::GetProperty(obj, prop_name).ToHandleChecked());
974 // Check the map has changed
975 CHECK(*initial_map != obj->map());
980 CcTest::InitializeVM();
981 Isolate* isolate = CcTest::i_isolate();
982 Factory* factory = isolate->factory();
984 v8::HandleScope sc(CcTest::isolate());
985 Handle<String> name = factory->InternalizeUtf8String("Array");
986 Handle<Object> fun_obj = Object::GetProperty(
987 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
988 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
990 // Allocate the object.
991 Handle<Object> element;
992 Handle<JSObject> object = factory->NewJSObject(function);
993 Handle<JSArray> array = Handle<JSArray>::cast(object);
994 // We just initialized the VM, no heap allocation failure yet.
995 JSArray::Initialize(array, 0);
997 // Set array length to 0.
998 JSArray::SetLength(array, 0);
999 CHECK_EQ(Smi::FromInt(0), array->length());
1000 // Must be in fast mode.
1001 CHECK(array->HasFastSmiOrObjectElements());
1003 // array[length] = name.
1004 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
1005 CHECK_EQ(Smi::FromInt(1), array->length());
1006 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1007 CHECK_EQ(*element, *name);
1009 // Set array length with larger than smi value.
1010 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1012 uint32_t int_length = 0;
1013 CHECK(array->length()->ToArrayIndex(&int_length));
1014 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1015 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1017 // array[length] = name.
1018 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1019 uint32_t new_int_length = 0;
1020 CHECK(array->length()->ToArrayIndex(&new_int_length));
1021 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1022 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1023 CHECK_EQ(*element, *name);
1024 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1025 CHECK_EQ(*element, *name);
1029 TEST(JSObjectCopy) {
1030 CcTest::InitializeVM();
1031 Isolate* isolate = CcTest::i_isolate();
1032 Factory* factory = isolate->factory();
1034 v8::HandleScope sc(CcTest::isolate());
1035 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1036 Handle<Object> object = Object::GetProperty(
1037 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1038 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1039 Handle<JSObject> obj = factory->NewJSObject(constructor);
1040 Handle<String> first = factory->InternalizeUtf8String("first");
1041 Handle<String> second = factory->InternalizeUtf8String("second");
1043 Handle<Smi> one(Smi::FromInt(1), isolate);
1044 Handle<Smi> two(Smi::FromInt(2), isolate);
1046 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1047 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1049 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1050 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1053 Handle<Object> value1, value2;
1054 Handle<JSObject> clone = factory->CopyJSObject(obj);
1055 CHECK(!clone.is_identical_to(obj));
1057 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1058 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1059 CHECK_EQ(*value1, *value2);
1060 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1061 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1062 CHECK_EQ(*value1, *value2);
1064 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1065 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1066 CHECK_EQ(*value1, *value2);
1067 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1068 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1069 CHECK_EQ(*value1, *value2);
1072 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1073 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1075 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1076 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1078 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1079 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1080 CHECK_EQ(*value1, *value2);
1081 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1082 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1083 CHECK_EQ(*value1, *value2);
1085 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1086 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1087 CHECK_EQ(*value1, *value2);
1088 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1089 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1090 CHECK_EQ(*value1, *value2);
1094 TEST(StringAllocation) {
1095 CcTest::InitializeVM();
1096 Isolate* isolate = CcTest::i_isolate();
1097 Factory* factory = isolate->factory();
1099 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1100 for (int length = 0; length < 100; length++) {
1101 v8::HandleScope scope(CcTest::isolate());
1102 char* non_one_byte = NewArray<char>(3 * length + 1);
1103 char* one_byte = NewArray<char>(length + 1);
1104 non_one_byte[3 * length] = 0;
1105 one_byte[length] = 0;
1106 for (int i = 0; i < length; i++) {
1108 non_one_byte[3 * i] = chars[0];
1109 non_one_byte[3 * i + 1] = chars[1];
1110 non_one_byte[3 * i + 2] = chars[2];
1112 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1113 Vector<const char>(non_one_byte, 3 * length));
1114 CHECK_EQ(length, non_one_byte_sym->length());
1115 Handle<String> one_byte_sym =
1116 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1117 CHECK_EQ(length, one_byte_sym->length());
1118 Handle<String> non_one_byte_str =
1119 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1121 non_one_byte_str->Hash();
1122 CHECK_EQ(length, non_one_byte_str->length());
1123 Handle<String> one_byte_str =
1124 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1126 one_byte_str->Hash();
1127 CHECK_EQ(length, one_byte_str->length());
1128 DeleteArray(non_one_byte);
1129 DeleteArray(one_byte);
1134 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1135 // Count the number of objects found in the heap.
1136 int found_count = 0;
1137 HeapIterator iterator(heap);
1138 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1139 for (int i = 0; i < size; i++) {
1140 if (*objs[i] == obj) {
1150 CcTest::InitializeVM();
1151 Isolate* isolate = CcTest::i_isolate();
1152 Factory* factory = isolate->factory();
1153 v8::HandleScope scope(CcTest::isolate());
1155 // Array of objects to scan haep for.
1156 const int objs_count = 6;
1157 Handle<Object> objs[objs_count];
1158 int next_objs_index = 0;
1160 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1161 objs[next_objs_index++] = factory->NewJSArray(10);
1162 objs[next_objs_index++] =
1163 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
1165 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1166 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1167 objs[next_objs_index++] =
1168 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1170 // Allocate a large string (for large object space).
1171 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1172 char* str = new char[large_size];
1173 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1174 str[large_size - 1] = '\0';
1175 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1178 // Add a Map object to look for.
1179 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1181 CHECK_EQ(objs_count, next_objs_index);
1182 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1186 static int LenFromSize(int size) {
1187 return (size - FixedArray::kHeaderSize) / kPointerSize;
1191 HEAP_TEST(Regression39128) {
1192 // Test case for crbug.com/39128.
1193 CcTest::InitializeVM();
1194 Isolate* isolate = CcTest::i_isolate();
1195 Heap* heap = CcTest::heap();
1197 // Increase the chance of 'bump-the-pointer' allocation in old space.
1198 heap->CollectAllGarbage();
1200 v8::HandleScope scope(CcTest::isolate());
1202 // The plan: create JSObject which references objects in new space.
1203 // Then clone this object (forcing it to go into old space) and check
1204 // that region dirty marks are updated correctly.
1206 // Step 1: prepare a map for the object. We add 1 inobject property to it.
1207 // Create a map with single inobject property.
1208 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
1209 int n_properties = my_map->GetInObjectProperties();
1210 CHECK_GT(n_properties, 0);
1212 int object_size = my_map->instance_size();
1214 // Step 2: allocate a lot of objects so to almost fill new space: we need
1215 // just enough room to allocate JSObject and thus fill the newspace.
1217 int allocation_amount = Min(FixedArray::kMaxSize,
1218 Page::kMaxRegularHeapObjectSize + kPointerSize);
1219 int allocation_len = LenFromSize(allocation_amount);
1220 NewSpace* new_space = heap->new_space();
1221 Address* top_addr = new_space->allocation_top_address();
1222 Address* limit_addr = new_space->allocation_limit_address();
1223 while ((*limit_addr - *top_addr) > allocation_amount) {
1224 CHECK(!heap->always_allocate());
1225 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1226 CHECK(new_space->Contains(array));
1229 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1230 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1231 int fixed_array_len = LenFromSize(to_fill);
1232 CHECK(fixed_array_len < FixedArray::kMaxLength);
1234 CHECK(!heap->always_allocate());
1235 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1236 CHECK(new_space->Contains(array));
1238 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1239 CHECK(new_space->Contains(object));
1240 JSObject* jsobject = JSObject::cast(object);
1241 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1242 CHECK_EQ(0, jsobject->properties()->length());
1243 // Create a reference to object in new space in jsobject.
1244 FieldIndex index = FieldIndex::ForInObjectOffset(
1245 JSObject::kHeaderSize - kPointerSize);
1246 jsobject->FastPropertyAtPut(index, array);
1248 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1250 // Step 4: clone jsobject, but force always allocate first to create a clone
1251 // in old pointer space.
1252 Address old_space_top = heap->old_space()->top();
1253 AlwaysAllocateScope aa_scope(isolate);
1254 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1255 JSObject* clone = JSObject::cast(clone_obj);
1256 if (clone->address() != old_space_top) {
1257 // Alas, got allocated from free list, we cannot do checks.
1260 CHECK(heap->old_space()->Contains(clone->address()));
1264 UNINITIALIZED_TEST(TestCodeFlushing) {
1265 // If we do not flush code this test is invalid.
1266 if (!FLAG_flush_code) return;
1267 i::FLAG_allow_natives_syntax = true;
1268 i::FLAG_optimize_for_size = false;
1269 v8::Isolate::CreateParams create_params;
1270 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1271 v8::Isolate* isolate = v8::Isolate::New(create_params);
1272 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1274 Factory* factory = i_isolate->factory();
1276 v8::HandleScope scope(isolate);
1277 v8::Context::New(isolate)->Enter();
1278 const char* source =
1285 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1287 // This compile will add the code to the compilation cache.
1289 v8::HandleScope scope(isolate);
1293 // Check function is compiled.
1294 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1295 foo_name).ToHandleChecked();
1296 CHECK(func_value->IsJSFunction());
1297 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1298 CHECK(function->shared()->is_compiled());
1300 // The code will survive at least two GCs.
1301 i_isolate->heap()->CollectAllGarbage();
1302 i_isolate->heap()->CollectAllGarbage();
1303 CHECK(function->shared()->is_compiled());
1305 // Simulate several GCs that use full marking.
1306 const int kAgingThreshold = 6;
1307 for (int i = 0; i < kAgingThreshold; i++) {
1308 i_isolate->heap()->CollectAllGarbage();
1311 // foo should no longer be in the compilation cache
1312 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1313 CHECK(!function->is_compiled() || function->IsOptimized());
1314 // Call foo to get it recompiled.
1315 CompileRun("foo()");
1316 CHECK(function->shared()->is_compiled());
1317 CHECK(function->is_compiled());
1324 TEST(TestCodeFlushingPreAged) {
1325 // If we do not flush code this test is invalid.
1326 if (!FLAG_flush_code) return;
1327 i::FLAG_allow_natives_syntax = true;
1328 i::FLAG_optimize_for_size = true;
1329 CcTest::InitializeVM();
1330 Isolate* isolate = CcTest::i_isolate();
1331 Factory* factory = isolate->factory();
1332 v8::HandleScope scope(CcTest::isolate());
1333 const char* source = "function foo() {"
1339 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1341 // Compile foo, but don't run it.
1342 { v8::HandleScope scope(CcTest::isolate());
1346 // Check function is compiled.
1347 Handle<Object> func_value =
1348 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1349 CHECK(func_value->IsJSFunction());
1350 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1351 CHECK(function->shared()->is_compiled());
1353 // The code has been run so will survive at least one GC.
1354 CcTest::heap()->CollectAllGarbage();
1355 CHECK(function->shared()->is_compiled());
1357 // The code was only run once, so it should be pre-aged and collected on the
1359 CcTest::heap()->CollectAllGarbage();
1360 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1362 // Execute the function again twice, and ensure it is reset to the young age.
1363 { v8::HandleScope scope(CcTest::isolate());
1368 // The code will survive at least two GC now that it is young again.
1369 CcTest::heap()->CollectAllGarbage();
1370 CcTest::heap()->CollectAllGarbage();
1371 CHECK(function->shared()->is_compiled());
1373 // Simulate several GCs that use full marking.
1374 const int kAgingThreshold = 6;
1375 for (int i = 0; i < kAgingThreshold; i++) {
1376 CcTest::heap()->CollectAllGarbage();
1379 // foo should no longer be in the compilation cache
1380 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1381 CHECK(!function->is_compiled() || function->IsOptimized());
1382 // Call foo to get it recompiled.
1383 CompileRun("foo()");
1384 CHECK(function->shared()->is_compiled());
1385 CHECK(function->is_compiled());
1389 TEST(TestCodeFlushingIncremental) {
1390 // If we do not flush code this test is invalid.
1391 if (!FLAG_flush_code) return;
1392 i::FLAG_allow_natives_syntax = true;
1393 i::FLAG_optimize_for_size = false;
1394 CcTest::InitializeVM();
1395 Isolate* isolate = CcTest::i_isolate();
1396 Factory* factory = isolate->factory();
1397 v8::HandleScope scope(CcTest::isolate());
1398 const char* source = "function foo() {"
1404 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1406 // This compile will add the code to the compilation cache.
1407 { v8::HandleScope scope(CcTest::isolate());
1411 // Check function is compiled.
1412 Handle<Object> func_value =
1413 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1414 CHECK(func_value->IsJSFunction());
1415 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1416 CHECK(function->shared()->is_compiled());
1418 // The code will survive at least two GCs.
1419 CcTest::heap()->CollectAllGarbage();
1420 CcTest::heap()->CollectAllGarbage();
1421 CHECK(function->shared()->is_compiled());
1423 // Simulate several GCs that use incremental marking.
1424 const int kAgingThreshold = 6;
1425 for (int i = 0; i < kAgingThreshold; i++) {
1426 SimulateIncrementalMarking(CcTest::heap());
1427 CcTest::heap()->CollectAllGarbage();
1429 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1430 CHECK(!function->is_compiled() || function->IsOptimized());
1432 // This compile will compile the function again.
1433 { v8::HandleScope scope(CcTest::isolate());
1434 CompileRun("foo();");
1437 // Simulate several GCs that use incremental marking but make sure
1438 // the loop breaks once the function is enqueued as a candidate.
1439 for (int i = 0; i < kAgingThreshold; i++) {
1440 SimulateIncrementalMarking(CcTest::heap());
1441 if (!function->next_function_link()->IsUndefined()) break;
1442 CcTest::heap()->CollectAllGarbage();
1445 // Force optimization while incremental marking is active and while
1446 // the function is enqueued as a candidate.
1447 { v8::HandleScope scope(CcTest::isolate());
1448 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1451 // Simulate one final GC to make sure the candidate queue is sane.
1452 CcTest::heap()->CollectAllGarbage();
1453 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1454 CHECK(function->is_compiled() || !function->IsOptimized());
1458 TEST(TestCodeFlushingIncrementalScavenge) {
1459 // If we do not flush code this test is invalid.
1460 if (!FLAG_flush_code) return;
1461 i::FLAG_allow_natives_syntax = true;
1462 i::FLAG_optimize_for_size = false;
1463 CcTest::InitializeVM();
1464 Isolate* isolate = CcTest::i_isolate();
1465 Factory* factory = isolate->factory();
1466 v8::HandleScope scope(CcTest::isolate());
1467 const char* source = "var foo = function() {"
1473 "var bar = function() {"
1477 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1478 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1480 // Perfrom one initial GC to enable code flushing.
1481 CcTest::heap()->CollectAllGarbage();
1483 // This compile will add the code to the compilation cache.
1484 { v8::HandleScope scope(CcTest::isolate());
1488 // Check functions are compiled.
1489 Handle<Object> func_value =
1490 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1491 CHECK(func_value->IsJSFunction());
1492 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1493 CHECK(function->shared()->is_compiled());
1494 Handle<Object> func_value2 =
1495 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1496 CHECK(func_value2->IsJSFunction());
1497 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1498 CHECK(function2->shared()->is_compiled());
1500 // Clear references to functions so that one of them can die.
1501 { v8::HandleScope scope(CcTest::isolate());
1502 CompileRun("foo = 0; bar = 0;");
1505 // Bump the code age so that flushing is triggered while the function
1506 // object is still located in new-space.
1507 const int kAgingThreshold = 6;
1508 for (int i = 0; i < kAgingThreshold; i++) {
1509 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1510 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1513 // Simulate incremental marking so that the functions are enqueued as
1514 // code flushing candidates. Then kill one of the functions. Finally
1515 // perform a scavenge while incremental marking is still running.
1516 SimulateIncrementalMarking(CcTest::heap());
1517 *function2.location() = NULL;
1518 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1520 // Simulate one final GC to make sure the candidate queue is sane.
1521 CcTest::heap()->CollectAllGarbage();
1522 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1523 CHECK(!function->is_compiled() || function->IsOptimized());
1527 TEST(TestCodeFlushingIncrementalAbort) {
1528 // If we do not flush code this test is invalid.
1529 if (!FLAG_flush_code) return;
1530 i::FLAG_allow_natives_syntax = true;
1531 i::FLAG_optimize_for_size = false;
1532 CcTest::InitializeVM();
1533 Isolate* isolate = CcTest::i_isolate();
1534 Factory* factory = isolate->factory();
1535 Heap* heap = isolate->heap();
1536 v8::HandleScope scope(CcTest::isolate());
1537 const char* source = "function foo() {"
1543 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1545 // This compile will add the code to the compilation cache.
1546 { v8::HandleScope scope(CcTest::isolate());
1550 // Check function is compiled.
1551 Handle<Object> func_value =
1552 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1553 CHECK(func_value->IsJSFunction());
1554 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1555 CHECK(function->shared()->is_compiled());
1557 // The code will survive at least two GCs.
1558 heap->CollectAllGarbage();
1559 heap->CollectAllGarbage();
1560 CHECK(function->shared()->is_compiled());
1562 // Bump the code age so that flushing is triggered.
1563 const int kAgingThreshold = 6;
1564 for (int i = 0; i < kAgingThreshold; i++) {
1565 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1568 // Simulate incremental marking so that the function is enqueued as
1569 // code flushing candidate.
1570 SimulateIncrementalMarking(heap);
1572 // Enable the debugger and add a breakpoint while incremental marking
1573 // is running so that incremental marking aborts and code flushing is
1576 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1578 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1579 isolate->debug()->ClearAllBreakPoints();
1582 // Force optimization now that code flushing is disabled.
1583 { v8::HandleScope scope(CcTest::isolate());
1584 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1587 // Simulate one final GC to make sure the candidate queue is sane.
1588 heap->CollectAllGarbage();
1589 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1590 CHECK(function->is_compiled() || !function->IsOptimized());
1594 TEST(CompilationCacheCachingBehavior) {
1595 // If we do not flush code, or have the compilation cache turned off, this
1597 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1600 CcTest::InitializeVM();
1601 Isolate* isolate = CcTest::i_isolate();
1602 Factory* factory = isolate->factory();
1603 Heap* heap = isolate->heap();
1604 CompilationCache* compilation_cache = isolate->compilation_cache();
1605 LanguageMode language_mode =
1606 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1608 v8::HandleScope scope(CcTest::isolate());
1609 const char* raw_source =
1616 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1617 Handle<Context> native_context = isolate->native_context();
1620 v8::HandleScope scope(CcTest::isolate());
1621 CompileRun(raw_source);
1624 // On first compilation, only a hash is inserted in the code cache. We can't
1626 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1627 source, Handle<Object>(), 0, 0,
1628 v8::ScriptOriginOptions(false, true, false), native_context,
1630 CHECK(info.is_null());
1633 v8::HandleScope scope(CcTest::isolate());
1634 CompileRun(raw_source);
1637 // On second compilation, the hash is replaced by a real cache entry mapping
1638 // the source to the shared function info containing the code.
1639 info = compilation_cache->LookupScript(
1640 source, Handle<Object>(), 0, 0,
1641 v8::ScriptOriginOptions(false, true, false), native_context,
1643 CHECK(!info.is_null());
1645 heap->CollectAllGarbage();
1647 // On second compilation, the hash is replaced by a real cache entry mapping
1648 // the source to the shared function info containing the code.
1649 info = compilation_cache->LookupScript(
1650 source, Handle<Object>(), 0, 0,
1651 v8::ScriptOriginOptions(false, true, false), native_context,
1653 CHECK(!info.is_null());
1655 while (!info.ToHandleChecked()->code()->IsOld()) {
1656 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1659 heap->CollectAllGarbage();
1660 // Ensure code aging cleared the entry from the cache.
1661 info = compilation_cache->LookupScript(
1662 source, Handle<Object>(), 0, 0,
1663 v8::ScriptOriginOptions(false, true, false), native_context,
1665 CHECK(info.is_null());
1668 v8::HandleScope scope(CcTest::isolate());
1669 CompileRun(raw_source);
1672 // On first compilation, only a hash is inserted in the code cache. We can't
1674 info = compilation_cache->LookupScript(
1675 source, Handle<Object>(), 0, 0,
1676 v8::ScriptOriginOptions(false, true, false), native_context,
1678 CHECK(info.is_null());
1680 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1681 compilation_cache->MarkCompactPrologue();
1685 v8::HandleScope scope(CcTest::isolate());
1686 CompileRun(raw_source);
1689 // If we aged the cache before caching the script, ensure that we didn't cache
1690 // on next compilation.
1691 info = compilation_cache->LookupScript(
1692 source, Handle<Object>(), 0, 0,
1693 v8::ScriptOriginOptions(false, true, false), native_context,
1695 CHECK(info.is_null());
1699 static void OptimizeEmptyFunction(const char* name) {
1700 HandleScope scope(CcTest::i_isolate());
1701 EmbeddedVector<char, 256> source;
1703 "function %s() { return 0; }"
1705 "%%OptimizeFunctionOnNextCall(%s);"
1707 name, name, name, name, name);
1708 CompileRun(source.start());
1712 // Count the number of native contexts in the weak list of native contexts.
1713 int CountNativeContexts() {
1715 Object* object = CcTest::heap()->native_contexts_list();
1716 while (!object->IsUndefined()) {
1718 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1720 // Subtract one to compensate for the code stub context that is always present
1725 // Count the number of user functions in the weak list of optimized
1726 // functions attached to a native context.
1727 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1729 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1730 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1731 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1733 object = JSFunction::cast(object)->next_function_link();
1739 TEST(TestInternalWeakLists) {
1740 FLAG_always_opt = false;
1741 FLAG_allow_natives_syntax = true;
1742 v8::V8::Initialize();
1744 // Some flags turn Scavenge collections into Mark-sweep collections
1745 // and hence are incompatible with this test case.
1746 if (FLAG_gc_global || FLAG_stress_compaction) return;
1747 FLAG_retain_maps_for_n_gc = 0;
1749 static const int kNumTestContexts = 10;
1751 Isolate* isolate = CcTest::i_isolate();
1752 Heap* heap = isolate->heap();
1753 HandleScope scope(isolate);
1754 v8::Handle<v8::Context> ctx[kNumTestContexts];
1755 if (!isolate->use_crankshaft()) return;
1757 CHECK_EQ(0, CountNativeContexts());
1759 // Create a number of global contests which gets linked together.
1760 for (int i = 0; i < kNumTestContexts; i++) {
1761 ctx[i] = v8::Context::New(CcTest::isolate());
1763 // Collect garbage that might have been created by one of the
1764 // installed extensions.
1765 isolate->compilation_cache()->Clear();
1766 heap->CollectAllGarbage();
1768 CHECK_EQ(i + 1, CountNativeContexts());
1772 // Create a handle scope so no function objects get stuck in the outer
1774 HandleScope scope(isolate);
1775 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1776 OptimizeEmptyFunction("f1");
1777 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1778 OptimizeEmptyFunction("f2");
1779 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1780 OptimizeEmptyFunction("f3");
1781 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1782 OptimizeEmptyFunction("f4");
1783 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1784 OptimizeEmptyFunction("f5");
1785 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1787 // Remove function f1, and
1788 CompileRun("f1=null");
1790 // Scavenge treats these references as strong.
1791 for (int j = 0; j < 10; j++) {
1792 CcTest::heap()->CollectGarbage(NEW_SPACE);
1793 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1796 // Mark compact handles the weak references.
1797 isolate->compilation_cache()->Clear();
1798 heap->CollectAllGarbage();
1799 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1801 // Get rid of f3 and f5 in the same way.
1802 CompileRun("f3=null");
1803 for (int j = 0; j < 10; j++) {
1804 CcTest::heap()->CollectGarbage(NEW_SPACE);
1805 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1807 CcTest::heap()->CollectAllGarbage();
1808 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1809 CompileRun("f5=null");
1810 for (int j = 0; j < 10; j++) {
1811 CcTest::heap()->CollectGarbage(NEW_SPACE);
1812 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1814 CcTest::heap()->CollectAllGarbage();
1815 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1820 // Force compilation cache cleanup.
1821 CcTest::heap()->NotifyContextDisposed(true);
1822 CcTest::heap()->CollectAllGarbage();
1824 // Dispose the native contexts one by one.
1825 for (int i = 0; i < kNumTestContexts; i++) {
1826 // TODO(dcarney): is there a better way to do this?
1827 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1828 *unsafe = CcTest::heap()->undefined_value();
1831 // Scavenge treats these references as strong.
1832 for (int j = 0; j < 10; j++) {
1833 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1834 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1837 // Mark compact handles the weak references.
1838 CcTest::heap()->CollectAllGarbage();
1839 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1842 CHECK_EQ(0, CountNativeContexts());
1846 // Count the number of native contexts in the weak list of native contexts
1847 // causing a GC after the specified number of elements.
1848 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1849 Heap* heap = isolate->heap();
1851 Handle<Object> object(heap->native_contexts_list(), isolate);
1852 while (!object->IsUndefined()) {
1854 if (count == n) heap->CollectAllGarbage();
1856 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1859 // Subtract one to compensate for the code stub context that is always present
1864 // Count the number of user functions in the weak list of optimized
1865 // functions attached to a native context causing a GC after the
1866 // specified number of elements.
1867 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1870 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1871 Isolate* isolate = icontext->GetIsolate();
1872 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1874 while (object->IsJSFunction() &&
1875 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1877 if (count == n) isolate->heap()->CollectAllGarbage();
1878 object = Handle<Object>(
1879 Object::cast(JSFunction::cast(*object)->next_function_link()),
1886 TEST(TestInternalWeakListsTraverseWithGC) {
1887 FLAG_always_opt = false;
1888 FLAG_allow_natives_syntax = true;
1889 v8::V8::Initialize();
1891 static const int kNumTestContexts = 10;
1893 Isolate* isolate = CcTest::i_isolate();
1894 HandleScope scope(isolate);
1895 v8::Handle<v8::Context> ctx[kNumTestContexts];
1896 if (!isolate->use_crankshaft()) return;
1898 CHECK_EQ(0, CountNativeContexts());
1900 // Create an number of contexts and check the length of the weak list both
1901 // with and without GCs while iterating the list.
1902 for (int i = 0; i < kNumTestContexts; i++) {
1903 ctx[i] = v8::Context::New(CcTest::isolate());
1904 CHECK_EQ(i + 1, CountNativeContexts());
1905 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1910 // Compile a number of functions the length of the weak list of optimized
1911 // functions both with and without GCs while iterating the list.
1912 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1913 OptimizeEmptyFunction("f1");
1914 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1915 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1916 OptimizeEmptyFunction("f2");
1917 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1918 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1919 OptimizeEmptyFunction("f3");
1920 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1921 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1922 OptimizeEmptyFunction("f4");
1923 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1924 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1925 OptimizeEmptyFunction("f5");
1926 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1927 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1933 TEST(TestSizeOfRegExpCode) {
1934 if (!FLAG_regexp_optimization) return;
1936 v8::V8::Initialize();
1938 Isolate* isolate = CcTest::i_isolate();
1939 HandleScope scope(isolate);
1941 LocalContext context;
1943 // Adjust source below and this check to match
1944 // RegExpImple::kRegExpTooLargeToOptimize.
1945 DCHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1947 // Compile a regexp that is much larger if we are using regexp optimizations.
1949 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1950 "var half_size_reg_exp;"
1951 "while (reg_exp_source.length < 20 * 1024) {"
1952 " half_size_reg_exp = reg_exp_source;"
1953 " reg_exp_source = reg_exp_source + reg_exp_source;"
1956 "reg_exp_source.match(/f/);");
1958 // Get initial heap size after several full GCs, which will stabilize
1959 // the heap size and return with sweeping finished completely.
1960 CcTest::heap()->CollectAllGarbage();
1961 CcTest::heap()->CollectAllGarbage();
1962 CcTest::heap()->CollectAllGarbage();
1963 CcTest::heap()->CollectAllGarbage();
1964 CcTest::heap()->CollectAllGarbage();
1965 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1966 if (collector->sweeping_in_progress()) {
1967 collector->EnsureSweepingCompleted();
1969 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1971 CompileRun("'foo'.match(reg_exp_source);");
1972 CcTest::heap()->CollectAllGarbage();
1973 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1975 CompileRun("'foo'.match(half_size_reg_exp);");
1976 CcTest::heap()->CollectAllGarbage();
1977 int size_with_optimized_regexp =
1978 static_cast<int>(CcTest::heap()->SizeOfObjects());
1980 int size_of_regexp_code = size_with_regexp - initial_size;
1982 // On some platforms the debug-code flag causes huge amounts of regexp code
1983 // to be emitted, breaking this test.
1984 if (!FLAG_debug_code) {
1985 CHECK_LE(size_of_regexp_code, 1 * MB);
1988 // Small regexp is half the size, but compiles to more than twice the code
1989 // due to the optimization steps.
1990 CHECK_GE(size_with_optimized_regexp,
1991 size_with_regexp + size_of_regexp_code * 2);
1995 HEAP_TEST(TestSizeOfObjects) {
1996 v8::V8::Initialize();
1998 // Get initial heap size after several full GCs, which will stabilize
1999 // the heap size and return with sweeping finished completely.
2000 CcTest::heap()->CollectAllGarbage();
2001 CcTest::heap()->CollectAllGarbage();
2002 CcTest::heap()->CollectAllGarbage();
2003 CcTest::heap()->CollectAllGarbage();
2004 CcTest::heap()->CollectAllGarbage();
2005 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
2006 if (collector->sweeping_in_progress()) {
2007 collector->EnsureSweepingCompleted();
2009 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
2012 // Allocate objects on several different old-space pages so that
2013 // concurrent sweeper threads will be busy sweeping the old space on
2014 // subsequent GC runs.
2015 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2016 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
2017 for (int i = 1; i <= 100; i++) {
2018 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
2019 CHECK_EQ(initial_size + i * filler_size,
2020 static_cast<int>(CcTest::heap()->SizeOfObjects()));
2024 // The heap size should go back to initial size after a full GC, even
2025 // though sweeping didn't finish yet.
2026 CcTest::heap()->CollectAllGarbage();
2028 // Normally sweeping would not be complete here, but no guarantees.
2030 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2032 // Waiting for sweeper threads should not change heap size.
2033 if (collector->sweeping_in_progress()) {
2034 collector->EnsureSweepingCompleted();
2036 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2040 TEST(TestAlignmentCalculations) {
2041 // Maximum fill amounts are consistent.
2042 int maximum_double_misalignment = kDoubleSize - kPointerSize;
2043 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
2044 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
2045 CHECK_EQ(0, max_word_fill);
2046 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
2047 CHECK_EQ(maximum_double_misalignment, max_double_fill);
2048 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
2049 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
2050 int max_simd128_unaligned_fill =
2051 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
2052 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
2054 Address base = static_cast<Address>(NULL);
2057 // Word alignment never requires fill.
2058 fill = Heap::GetFillToAlign(base, kWordAligned);
2060 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
2063 // No fill is required when address is double aligned.
2064 fill = Heap::GetFillToAlign(base, kDoubleAligned);
2066 // Fill is required if address is not double aligned.
2067 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
2068 CHECK_EQ(maximum_double_misalignment, fill);
2069 // kDoubleUnaligned has the opposite fill amounts.
2070 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2071 CHECK_EQ(maximum_double_misalignment, fill);
2072 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2075 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2076 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2077 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2078 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2079 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2080 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2081 CHECK_EQ(kPointerSize, fill);
2082 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2087 static HeapObject* NewSpaceAllocateAligned(int size,
2088 AllocationAlignment alignment) {
2089 Heap* heap = CcTest::heap();
2090 AllocationResult allocation =
2091 heap->new_space()->AllocateRawAligned(size, alignment);
2092 HeapObject* obj = NULL;
2093 allocation.To(&obj);
2094 heap->CreateFillerObjectAt(obj->address(), size);
2099 // Get new space allocation into the desired alignment.
2100 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2101 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2102 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2104 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2110 TEST(TestAlignedAllocation) {
2111 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2112 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2113 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2117 if (double_misalignment) {
2118 // Allocate a pointer sized object that must be double aligned at an
2120 start = AlignNewSpace(kDoubleAligned, 0);
2121 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2122 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2123 // There is no filler.
2124 CHECK_EQ(kPointerSize, *top_addr - start);
2126 // Allocate a second pointer sized object that must be double aligned at an
2127 // unaligned address.
2128 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2129 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2130 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2131 // There is a filler object before the object.
2132 filler = HeapObject::FromAddress(start);
2133 CHECK(obj != filler && filler->IsFiller() &&
2134 filler->Size() == kPointerSize);
2135 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2137 // Similarly for kDoubleUnaligned.
2138 start = AlignNewSpace(kDoubleUnaligned, 0);
2139 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2140 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2141 CHECK_EQ(kPointerSize, *top_addr - start);
2142 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2143 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2144 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2145 // There is a filler object before the object.
2146 filler = HeapObject::FromAddress(start);
2147 CHECK(obj != filler && filler->IsFiller() &&
2148 filler->Size() == kPointerSize);
2149 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2152 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2154 start = AlignNewSpace(kSimd128Unaligned, 0);
2155 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2156 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2157 // There is no filler.
2158 CHECK_EQ(kPointerSize, *top_addr - start);
2159 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2160 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2161 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2162 // There is a filler object before the object.
2163 filler = HeapObject::FromAddress(start);
2164 CHECK(obj != filler && filler->IsFiller() &&
2165 filler->Size() == kSimd128Size - kPointerSize);
2166 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2168 if (double_misalignment) {
2169 // Test the 2 other alignments possible on 32 bit platforms.
2170 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2171 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2172 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2173 // There is a filler object before the object.
2174 filler = HeapObject::FromAddress(start);
2175 CHECK(obj != filler && filler->IsFiller() &&
2176 filler->Size() == 2 * kPointerSize);
2177 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2178 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2179 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2180 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2181 // There is a filler object before the object.
2182 filler = HeapObject::FromAddress(start);
2183 CHECK(obj != filler && filler->IsFiller() &&
2184 filler->Size() == kPointerSize);
2185 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2190 static HeapObject* OldSpaceAllocateAligned(int size,
2191 AllocationAlignment alignment) {
2192 Heap* heap = CcTest::heap();
2193 AllocationResult allocation =
2194 heap->old_space()->AllocateRawAligned(size, alignment);
2195 HeapObject* obj = NULL;
2196 allocation.To(&obj);
2197 heap->CreateFillerObjectAt(obj->address(), size);
2202 // Get old space allocation into the desired alignment.
2203 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2204 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2205 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2206 int allocation = fill + offset;
2208 OldSpaceAllocateAligned(allocation, kWordAligned);
2210 Address top = *top_addr;
2211 // Now force the remaining allocation onto the free list.
2212 CcTest::heap()->old_space()->EmptyAllocationInfo();
2217 // Test the case where allocation must be done from the free list, so filler
2218 // may precede or follow the object.
2219 TEST(TestAlignedOverAllocation) {
2220 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2221 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2224 HeapObject* filler1;
2225 HeapObject* filler2;
2226 if (double_misalignment) {
2227 start = AlignOldSpace(kDoubleAligned, 0);
2228 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2229 // The object is aligned, and a filler object is created after.
2230 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2231 filler1 = HeapObject::FromAddress(start + kPointerSize);
2232 CHECK(obj != filler1 && filler1->IsFiller() &&
2233 filler1->Size() == kPointerSize);
2234 // Try the opposite alignment case.
2235 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2236 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2237 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2238 filler1 = HeapObject::FromAddress(start);
2239 CHECK(obj != filler1);
2240 CHECK(filler1->IsFiller());
2241 CHECK(filler1->Size() == kPointerSize);
2242 CHECK(obj != filler1 && filler1->IsFiller() &&
2243 filler1->Size() == kPointerSize);
2245 // Similarly for kDoubleUnaligned.
2246 start = AlignOldSpace(kDoubleUnaligned, 0);
2247 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2248 // The object is aligned, and a filler object is created after.
2249 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2250 filler1 = HeapObject::FromAddress(start + kPointerSize);
2251 CHECK(obj != filler1 && filler1->IsFiller() &&
2252 filler1->Size() == kPointerSize);
2253 // Try the opposite alignment case.
2254 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2255 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2256 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2257 filler1 = HeapObject::FromAddress(start);
2258 CHECK(obj != filler1 && filler1->IsFiller() &&
2259 filler1->Size() == kPointerSize);
2262 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2264 start = AlignOldSpace(kSimd128Unaligned, 0);
2265 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2266 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2267 // There is a filler object after the object.
2268 filler1 = HeapObject::FromAddress(start + kPointerSize);
2269 CHECK(obj != filler1 && filler1->IsFiller() &&
2270 filler1->Size() == kSimd128Size - kPointerSize);
2271 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2272 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2273 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2274 // There is a filler object before the object.
2275 filler1 = HeapObject::FromAddress(start);
2276 CHECK(obj != filler1 && filler1->IsFiller() &&
2277 filler1->Size() == kSimd128Size - kPointerSize);
2279 if (double_misalignment) {
2280 // Test the 2 other alignments possible on 32 bit platforms.
2281 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2282 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2283 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2284 // There are filler objects before and after the object.
2285 filler1 = HeapObject::FromAddress(start);
2286 CHECK(obj != filler1 && filler1->IsFiller() &&
2287 filler1->Size() == 2 * kPointerSize);
2288 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2289 CHECK(obj != filler2 && filler2->IsFiller() &&
2290 filler2->Size() == kPointerSize);
2291 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2292 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2293 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2294 // There are filler objects before and after the object.
2295 filler1 = HeapObject::FromAddress(start);
2296 CHECK(obj != filler1 && filler1->IsFiller() &&
2297 filler1->Size() == kPointerSize);
2298 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2299 CHECK(obj != filler2 && filler2->IsFiller() &&
2300 filler2->Size() == 2 * kPointerSize);
2305 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2306 CcTest::InitializeVM();
2307 HeapIterator iterator(CcTest::heap());
2308 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2309 intptr_t size_of_objects_2 = 0;
2310 for (HeapObject* obj = iterator.next();
2312 obj = iterator.next()) {
2313 if (!obj->IsFreeSpace()) {
2314 size_of_objects_2 += obj->Size();
2317 // Delta must be within 5% of the larger result.
2318 // TODO(gc): Tighten this up by distinguishing between byte
2319 // arrays that are real and those that merely mark free space
2321 if (size_of_objects_1 > size_of_objects_2) {
2322 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2323 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2324 "Iterator: %" V8_PTR_PREFIX "d, "
2325 "delta: %" V8_PTR_PREFIX "d\n",
2326 size_of_objects_1, size_of_objects_2, delta);
2327 CHECK_GT(size_of_objects_1 / 20, delta);
2329 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2330 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2331 "Iterator: %" V8_PTR_PREFIX "d, "
2332 "delta: %" V8_PTR_PREFIX "d\n",
2333 size_of_objects_1, size_of_objects_2, delta);
2334 CHECK_GT(size_of_objects_2 / 20, delta);
2339 static void FillUpNewSpace(NewSpace* new_space) {
2340 // Fill up new space to the point that it is completely full. Make sure
2341 // that the scavenger does not undo the filling.
2342 Heap* heap = new_space->heap();
2343 Isolate* isolate = heap->isolate();
2344 Factory* factory = isolate->factory();
2345 HandleScope scope(isolate);
2346 AlwaysAllocateScope always_allocate(isolate);
2347 intptr_t available = new_space->Capacity() - new_space->Size();
2348 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2349 for (intptr_t i = 0; i < number_of_fillers; i++) {
2350 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2355 TEST(GrowAndShrinkNewSpace) {
2356 CcTest::InitializeVM();
2357 Heap* heap = CcTest::heap();
2358 NewSpace* new_space = heap->new_space();
2360 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2361 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2362 // The max size cannot exceed the reserved size, since semispaces must be
2363 // always within the reserved space. We can't test new space growing and
2364 // shrinking if the reserved size is the same as the minimum (initial) size.
2368 // Explicitly growing should double the space capacity.
2369 intptr_t old_capacity, new_capacity;
2370 old_capacity = new_space->TotalCapacity();
2372 new_capacity = new_space->TotalCapacity();
2373 CHECK(2 * old_capacity == new_capacity);
2375 old_capacity = new_space->TotalCapacity();
2376 FillUpNewSpace(new_space);
2377 new_capacity = new_space->TotalCapacity();
2378 CHECK(old_capacity == new_capacity);
2380 // Explicitly shrinking should not affect space capacity.
2381 old_capacity = new_space->TotalCapacity();
2382 new_space->Shrink();
2383 new_capacity = new_space->TotalCapacity();
2384 CHECK(old_capacity == new_capacity);
2386 // Let the scavenger empty the new space.
2387 heap->CollectGarbage(NEW_SPACE);
2388 CHECK_LE(new_space->Size(), old_capacity);
2390 // Explicitly shrinking should halve the space capacity.
2391 old_capacity = new_space->TotalCapacity();
2392 new_space->Shrink();
2393 new_capacity = new_space->TotalCapacity();
2394 CHECK(old_capacity == 2 * new_capacity);
2396 // Consecutive shrinking should not affect space capacity.
2397 old_capacity = new_space->TotalCapacity();
2398 new_space->Shrink();
2399 new_space->Shrink();
2400 new_space->Shrink();
2401 new_capacity = new_space->TotalCapacity();
2402 CHECK(old_capacity == new_capacity);
2406 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2407 CcTest::InitializeVM();
2408 Heap* heap = CcTest::heap();
2409 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2410 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2411 // The max size cannot exceed the reserved size, since semispaces must be
2412 // always within the reserved space. We can't test new space growing and
2413 // shrinking if the reserved size is the same as the minimum (initial) size.
2417 v8::HandleScope scope(CcTest::isolate());
2418 NewSpace* new_space = heap->new_space();
2419 intptr_t old_capacity, new_capacity;
2420 old_capacity = new_space->TotalCapacity();
2422 new_capacity = new_space->TotalCapacity();
2423 CHECK(2 * old_capacity == new_capacity);
2424 FillUpNewSpace(new_space);
2425 heap->CollectAllAvailableGarbage();
2426 new_capacity = new_space->TotalCapacity();
2427 CHECK(old_capacity == new_capacity);
2431 static int NumberOfGlobalObjects() {
2433 HeapIterator iterator(CcTest::heap());
2434 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2435 if (obj->IsGlobalObject()) count++;
2437 // Subtract two to compensate for the two global objects (not global
2438 // JSObjects, of which there would only be one) that are part of the code stub
2439 // context, which is always present.
2444 // Test that we don't embed maps from foreign contexts into
2446 TEST(LeakNativeContextViaMap) {
2447 i::FLAG_allow_natives_syntax = true;
2448 v8::Isolate* isolate = CcTest::isolate();
2449 v8::HandleScope outer_scope(isolate);
2450 v8::Persistent<v8::Context> ctx1p;
2451 v8::Persistent<v8::Context> ctx2p;
2453 v8::HandleScope scope(isolate);
2454 ctx1p.Reset(isolate, v8::Context::New(isolate));
2455 ctx2p.Reset(isolate, v8::Context::New(isolate));
2456 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2459 CcTest::heap()->CollectAllAvailableGarbage();
2460 CHECK_EQ(4, NumberOfGlobalObjects());
2463 v8::HandleScope inner_scope(isolate);
2464 CompileRun("var v = {x: 42}");
2465 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2466 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2467 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2469 ctx2->Global()->Set(v8_str("o"), v);
2470 v8::Local<v8::Value> res = CompileRun(
2471 "function f() { return o.x; }"
2472 "for (var i = 0; i < 10; ++i) f();"
2473 "%OptimizeFunctionOnNextCall(f);"
2475 CHECK_EQ(42, res->Int32Value());
2476 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2478 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2480 isolate->ContextDisposedNotification();
2482 CcTest::heap()->CollectAllAvailableGarbage();
2483 CHECK_EQ(2, NumberOfGlobalObjects());
2485 CcTest::heap()->CollectAllAvailableGarbage();
2486 CHECK_EQ(0, NumberOfGlobalObjects());
2490 // Test that we don't embed functions from foreign contexts into
2492 TEST(LeakNativeContextViaFunction) {
2493 i::FLAG_allow_natives_syntax = true;
2494 v8::Isolate* isolate = CcTest::isolate();
2495 v8::HandleScope outer_scope(isolate);
2496 v8::Persistent<v8::Context> ctx1p;
2497 v8::Persistent<v8::Context> ctx2p;
2499 v8::HandleScope scope(isolate);
2500 ctx1p.Reset(isolate, v8::Context::New(isolate));
2501 ctx2p.Reset(isolate, v8::Context::New(isolate));
2502 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2505 CcTest::heap()->CollectAllAvailableGarbage();
2506 CHECK_EQ(4, NumberOfGlobalObjects());
2509 v8::HandleScope inner_scope(isolate);
2510 CompileRun("var v = function() { return 42; }");
2511 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2512 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2513 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2515 ctx2->Global()->Set(v8_str("o"), v);
2516 v8::Local<v8::Value> res = CompileRun(
2517 "function f(x) { return x(); }"
2518 "for (var i = 0; i < 10; ++i) f(o);"
2519 "%OptimizeFunctionOnNextCall(f);"
2521 CHECK_EQ(42, res->Int32Value());
2522 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2526 isolate->ContextDisposedNotification();
2528 CcTest::heap()->CollectAllAvailableGarbage();
2529 CHECK_EQ(2, NumberOfGlobalObjects());
2531 CcTest::heap()->CollectAllAvailableGarbage();
2532 CHECK_EQ(0, NumberOfGlobalObjects());
2536 TEST(LeakNativeContextViaMapKeyed) {
2537 i::FLAG_allow_natives_syntax = true;
2538 v8::Isolate* isolate = CcTest::isolate();
2539 v8::HandleScope outer_scope(isolate);
2540 v8::Persistent<v8::Context> ctx1p;
2541 v8::Persistent<v8::Context> ctx2p;
2543 v8::HandleScope scope(isolate);
2544 ctx1p.Reset(isolate, v8::Context::New(isolate));
2545 ctx2p.Reset(isolate, v8::Context::New(isolate));
2546 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2549 CcTest::heap()->CollectAllAvailableGarbage();
2550 CHECK_EQ(4, NumberOfGlobalObjects());
2553 v8::HandleScope inner_scope(isolate);
2554 CompileRun("var v = [42, 43]");
2555 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2556 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2557 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2559 ctx2->Global()->Set(v8_str("o"), v);
2560 v8::Local<v8::Value> res = CompileRun(
2561 "function f() { return o[0]; }"
2562 "for (var i = 0; i < 10; ++i) f();"
2563 "%OptimizeFunctionOnNextCall(f);"
2565 CHECK_EQ(42, res->Int32Value());
2566 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2570 isolate->ContextDisposedNotification();
2572 CcTest::heap()->CollectAllAvailableGarbage();
2573 CHECK_EQ(2, NumberOfGlobalObjects());
2575 CcTest::heap()->CollectAllAvailableGarbage();
2576 CHECK_EQ(0, NumberOfGlobalObjects());
2580 TEST(LeakNativeContextViaMapProto) {
2581 i::FLAG_allow_natives_syntax = true;
2582 v8::Isolate* isolate = CcTest::isolate();
2583 v8::HandleScope outer_scope(isolate);
2584 v8::Persistent<v8::Context> ctx1p;
2585 v8::Persistent<v8::Context> ctx2p;
2587 v8::HandleScope scope(isolate);
2588 ctx1p.Reset(isolate, v8::Context::New(isolate));
2589 ctx2p.Reset(isolate, v8::Context::New(isolate));
2590 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2593 CcTest::heap()->CollectAllAvailableGarbage();
2594 CHECK_EQ(4, NumberOfGlobalObjects());
2597 v8::HandleScope inner_scope(isolate);
2598 CompileRun("var v = { y: 42}");
2599 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2600 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2601 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2603 ctx2->Global()->Set(v8_str("o"), v);
2604 v8::Local<v8::Value> res = CompileRun(
2610 "for (var i = 0; i < 10; ++i) f();"
2611 "%OptimizeFunctionOnNextCall(f);"
2613 CHECK_EQ(42, res->Int32Value());
2614 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2618 isolate->ContextDisposedNotification();
2620 CcTest::heap()->CollectAllAvailableGarbage();
2621 CHECK_EQ(2, NumberOfGlobalObjects());
2623 CcTest::heap()->CollectAllAvailableGarbage();
2624 CHECK_EQ(0, NumberOfGlobalObjects());
2628 TEST(InstanceOfStubWriteBarrier) {
2629 i::FLAG_allow_natives_syntax = true;
2631 i::FLAG_verify_heap = true;
2634 CcTest::InitializeVM();
2635 if (!CcTest::i_isolate()->use_crankshaft()) return;
2636 if (i::FLAG_force_marking_deque_overflows) return;
2637 v8::HandleScope outer_scope(CcTest::isolate());
2640 v8::HandleScope scope(CcTest::isolate());
2642 "function foo () { }"
2643 "function mkbar () { return new (new Function(\"\")) (); }"
2644 "function f (x) { return (x instanceof foo); }"
2645 "function g () { f(mkbar()); }"
2646 "f(new foo()); f(new foo());"
2647 "%OptimizeFunctionOnNextCall(f);"
2648 "f(new foo()); g();");
2651 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2653 CcTest::heap()->StartIncrementalMarking();
2655 Handle<JSFunction> f =
2656 v8::Utils::OpenHandle(
2657 *v8::Handle<v8::Function>::Cast(
2658 CcTest::global()->Get(v8_str("f"))));
2660 CHECK(f->IsOptimized());
2662 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2663 !marking->IsStopped()) {
2664 // Discard any pending GC requests otherwise we will get GC when we enter
2666 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2669 CHECK(marking->IsMarking());
2672 v8::HandleScope scope(CcTest::isolate());
2673 v8::Handle<v8::Object> global = CcTest::global();
2674 v8::Handle<v8::Function> g =
2675 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2676 g->Call(global, 0, NULL);
2679 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2680 CcTest::heap()->CollectGarbage(OLD_SPACE);
2684 static int NumberOfProtoTransitions(Map* map) {
2685 return TransitionArray::NumberOfPrototypeTransitions(
2686 TransitionArray::GetPrototypeTransitions(map));
2690 TEST(PrototypeTransitionClearing) {
2691 if (FLAG_never_compact) return;
2692 CcTest::InitializeVM();
2693 Isolate* isolate = CcTest::i_isolate();
2694 Factory* factory = isolate->factory();
2695 v8::HandleScope scope(CcTest::isolate());
2697 CompileRun("var base = {};");
2698 Handle<JSObject> baseObject =
2699 v8::Utils::OpenHandle(
2700 *v8::Handle<v8::Object>::Cast(
2701 CcTest::global()->Get(v8_str("base"))));
2702 int initialTransitions = NumberOfProtoTransitions(baseObject->map());
2706 "for (var i = 0; i < 10; i++) {"
2708 " var prototype = {};"
2709 " object.__proto__ = prototype;"
2710 " if (i >= 3) live.push(object, prototype);"
2713 // Verify that only dead prototype transitions are cleared.
2714 CHECK_EQ(initialTransitions + 10,
2715 NumberOfProtoTransitions(baseObject->map()));
2716 CcTest::heap()->CollectAllGarbage();
2717 const int transitions = 10 - 3;
2718 CHECK_EQ(initialTransitions + transitions,
2719 NumberOfProtoTransitions(baseObject->map()));
2721 // Verify that prototype transitions array was compacted.
2723 TransitionArray::GetPrototypeTransitions(baseObject->map());
2724 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2725 int j = TransitionArray::kProtoTransitionHeaderSize + i;
2726 CHECK(trans->get(j)->IsWeakCell());
2727 CHECK(WeakCell::cast(trans->get(j))->value()->IsMap());
2730 // Make sure next prototype is placed on an old-space evacuation candidate.
2731 Handle<JSObject> prototype;
2732 PagedSpace* space = CcTest::heap()->old_space();
2734 AlwaysAllocateScope always_allocate(isolate);
2735 SimulateFullSpace(space);
2736 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS,
2737 Strength::WEAK, TENURED);
2740 // Add a prototype on an evacuation candidate and verify that transition
2741 // clearing correctly records slots in prototype transition array.
2742 i::FLAG_always_compact = true;
2743 Handle<Map> map(baseObject->map());
2744 CHECK(!space->LastPage()->Contains(
2745 TransitionArray::GetPrototypeTransitions(*map)->address()));
2746 CHECK(space->LastPage()->Contains(prototype->address()));
2750 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2751 i::FLAG_stress_compaction = false;
2752 i::FLAG_allow_natives_syntax = true;
2754 i::FLAG_verify_heap = true;
2757 CcTest::InitializeVM();
2758 if (!CcTest::i_isolate()->use_crankshaft()) return;
2759 v8::HandleScope outer_scope(CcTest::isolate());
2762 v8::HandleScope scope(CcTest::isolate());
2766 " for (var i = 0; i < 100; i++) s += i;"
2770 "%OptimizeFunctionOnNextCall(f);"
2773 Handle<JSFunction> f =
2774 v8::Utils::OpenHandle(
2775 *v8::Handle<v8::Function>::Cast(
2776 CcTest::global()->Get(v8_str("f"))));
2777 CHECK(f->IsOptimized());
2779 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2781 CcTest::heap()->StartIncrementalMarking();
2782 // The following calls will increment CcTest::heap()->global_ic_age().
2783 CcTest::isolate()->ContextDisposedNotification();
2784 SimulateIncrementalMarking(CcTest::heap());
2785 CcTest::heap()->CollectAllGarbage();
2786 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2787 CHECK_EQ(0, f->shared()->opt_count());
2788 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2792 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2793 i::FLAG_stress_compaction = false;
2794 i::FLAG_allow_natives_syntax = true;
2796 i::FLAG_verify_heap = true;
2799 CcTest::InitializeVM();
2800 if (!CcTest::i_isolate()->use_crankshaft()) return;
2801 v8::HandleScope outer_scope(CcTest::isolate());
2804 v8::HandleScope scope(CcTest::isolate());
2808 " for (var i = 0; i < 100; i++) s += i;"
2812 "%OptimizeFunctionOnNextCall(f);"
2815 Handle<JSFunction> f =
2816 v8::Utils::OpenHandle(
2817 *v8::Handle<v8::Function>::Cast(
2818 CcTest::global()->Get(v8_str("f"))));
2819 CHECK(f->IsOptimized());
2821 CcTest::heap()->incremental_marking()->Stop();
2823 // The following two calls will increment CcTest::heap()->global_ic_age().
2824 CcTest::isolate()->ContextDisposedNotification();
2825 CcTest::heap()->CollectAllGarbage();
2827 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2828 CHECK_EQ(0, f->shared()->opt_count());
2829 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2833 HEAP_TEST(GCFlags) {
2834 CcTest::InitializeVM();
2835 Heap* heap = CcTest::heap();
2837 heap->set_current_gc_flags(Heap::kNoGCFlags);
2838 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2840 // Set the flags to check whether we appropriately resets them after the GC.
2841 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2842 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2843 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2845 MarkCompactCollector* collector = heap->mark_compact_collector();
2846 if (collector->sweeping_in_progress()) {
2847 collector->EnsureSweepingCompleted();
2850 IncrementalMarking* marking = heap->incremental_marking();
2852 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2853 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2855 heap->CollectGarbage(NEW_SPACE);
2856 // NewSpace scavenges should not overwrite the flags.
2857 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2859 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2860 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2864 TEST(IdleNotificationFinishMarking) {
2865 i::FLAG_allow_natives_syntax = true;
2866 CcTest::InitializeVM();
2867 SimulateFullSpace(CcTest::heap()->old_space());
2868 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2870 CcTest::heap()->StartIncrementalMarking();
2872 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2874 // TODO(hpayer): We cannot write proper unit test right now for heap.
2875 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2876 // marking delay counter.
2878 // Perform a huge incremental marking step but don't complete marking.
2879 intptr_t bytes_processed = 0;
2882 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2883 IncrementalMarking::FORCE_MARKING,
2884 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2885 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2886 } while (bytes_processed);
2888 // The next invocations of incremental marking are not going to complete
2890 // since the completion threshold is not reached
2891 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2893 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2894 IncrementalMarking::FORCE_MARKING,
2895 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2896 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2899 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2901 // The next idle notification has to finish incremental marking.
2902 const double kLongIdleTime = 1000.0;
2903 CcTest::isolate()->IdleNotificationDeadline(
2904 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2905 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2907 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2911 // Test that HAllocateObject will always return an object in new-space.
2912 TEST(OptimizedAllocationAlwaysInNewSpace) {
2913 i::FLAG_allow_natives_syntax = true;
2914 CcTest::InitializeVM();
2915 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2916 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2917 v8::HandleScope scope(CcTest::isolate());
2919 SimulateFullSpace(CcTest::heap()->new_space());
2920 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2921 v8::Local<v8::Value> res = CompileRun(
2924 " for (var i = 0; i < 32; i++) {"
2925 " this['x' + i] = x;"
2928 "function f(x) { return new c(x); };"
2930 "%OptimizeFunctionOnNextCall(f);"
2933 4, res.As<v8::Object>()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2935 Handle<JSObject> o =
2936 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2938 CHECK(CcTest::heap()->InNewSpace(*o));
2942 TEST(OptimizedPretenuringAllocationFolding) {
2943 i::FLAG_allow_natives_syntax = true;
2944 i::FLAG_expose_gc = true;
2945 CcTest::InitializeVM();
2946 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2947 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2948 v8::HandleScope scope(CcTest::isolate());
2950 // Grow new space unitl maximum capacity reached.
2951 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2952 CcTest::heap()->new_space()->Grow();
2955 i::ScopedVector<char> source(1024);
2958 "var number_elements = %d;"
2959 "var elements = new Array();"
2961 " for (var i = 0; i < number_elements; i++) {"
2962 " elements[i] = [[{}], [1.1]];"
2964 " return elements[number_elements-1]"
2968 "%%OptimizeFunctionOnNextCall(f);"
2970 AllocationSite::kPretenureMinimumCreated);
2972 v8::Local<v8::Value> res = CompileRun(source.start());
2974 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2975 Handle<JSObject> int_array_handle =
2976 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2977 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2978 Handle<JSObject> double_array_handle =
2979 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2981 Handle<JSObject> o =
2982 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2983 CHECK(CcTest::heap()->InOldSpace(*o));
2984 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2985 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2986 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2987 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2991 TEST(OptimizedPretenuringObjectArrayLiterals) {
2992 i::FLAG_allow_natives_syntax = true;
2993 i::FLAG_expose_gc = true;
2994 CcTest::InitializeVM();
2995 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2996 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2997 v8::HandleScope scope(CcTest::isolate());
2999 // Grow new space unitl maximum capacity reached.
3000 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3001 CcTest::heap()->new_space()->Grow();
3004 i::ScopedVector<char> source(1024);
3007 "var number_elements = %d;"
3008 "var elements = new Array(number_elements);"
3010 " for (var i = 0; i < number_elements; i++) {"
3011 " elements[i] = [{}, {}, {}];"
3013 " return elements[number_elements - 1];"
3017 "%%OptimizeFunctionOnNextCall(f);"
3019 AllocationSite::kPretenureMinimumCreated);
3021 v8::Local<v8::Value> res = CompileRun(source.start());
3023 Handle<JSObject> o =
3024 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3026 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3027 CHECK(CcTest::heap()->InOldSpace(*o));
3031 TEST(OptimizedPretenuringMixedInObjectProperties) {
3032 i::FLAG_allow_natives_syntax = true;
3033 i::FLAG_expose_gc = true;
3034 CcTest::InitializeVM();
3035 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3036 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3037 v8::HandleScope scope(CcTest::isolate());
3039 // Grow new space unitl maximum capacity reached.
3040 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3041 CcTest::heap()->new_space()->Grow();
3045 i::ScopedVector<char> source(1024);
3048 "var number_elements = %d;"
3049 "var elements = new Array(number_elements);"
3051 " for (var i = 0; i < number_elements; i++) {"
3052 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
3054 " return elements[number_elements - 1];"
3058 "%%OptimizeFunctionOnNextCall(f);"
3060 AllocationSite::kPretenureMinimumCreated);
3062 v8::Local<v8::Value> res = CompileRun(source.start());
3064 Handle<JSObject> o =
3065 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3067 CHECK(CcTest::heap()->InOldSpace(*o));
3068 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
3069 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
3070 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
3071 if (!o->IsUnboxedDoubleField(idx2)) {
3072 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
3074 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
3077 JSObject* inner_object =
3078 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
3079 CHECK(CcTest::heap()->InOldSpace(inner_object));
3080 if (!inner_object->IsUnboxedDoubleField(idx1)) {
3081 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
3083 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
3085 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
3089 TEST(OptimizedPretenuringDoubleArrayProperties) {
3090 i::FLAG_allow_natives_syntax = true;
3091 i::FLAG_expose_gc = true;
3092 CcTest::InitializeVM();
3093 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3094 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3095 v8::HandleScope scope(CcTest::isolate());
3097 // Grow new space unitl maximum capacity reached.
3098 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3099 CcTest::heap()->new_space()->Grow();
3102 i::ScopedVector<char> source(1024);
3105 "var number_elements = %d;"
3106 "var elements = new Array(number_elements);"
3108 " for (var i = 0; i < number_elements; i++) {"
3109 " elements[i] = {a: 1.1, b: 2.2};"
3111 " return elements[i - 1];"
3115 "%%OptimizeFunctionOnNextCall(f);"
3117 AllocationSite::kPretenureMinimumCreated);
3119 v8::Local<v8::Value> res = CompileRun(source.start());
3121 Handle<JSObject> o =
3122 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3124 CHECK(CcTest::heap()->InOldSpace(*o));
3125 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3129 TEST(OptimizedPretenuringdoubleArrayLiterals) {
3130 i::FLAG_allow_natives_syntax = true;
3131 i::FLAG_expose_gc = true;
3132 CcTest::InitializeVM();
3133 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3134 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3135 v8::HandleScope scope(CcTest::isolate());
3137 // Grow new space unitl maximum capacity reached.
3138 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3139 CcTest::heap()->new_space()->Grow();
3142 i::ScopedVector<char> source(1024);
3145 "var number_elements = %d;"
3146 "var elements = new Array(number_elements);"
3148 " for (var i = 0; i < number_elements; i++) {"
3149 " elements[i] = [1.1, 2.2, 3.3];"
3151 " return elements[number_elements - 1];"
3155 "%%OptimizeFunctionOnNextCall(f);"
3157 AllocationSite::kPretenureMinimumCreated);
3159 v8::Local<v8::Value> res = CompileRun(source.start());
3161 Handle<JSObject> o =
3162 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3164 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3165 CHECK(CcTest::heap()->InOldSpace(*o));
3169 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3170 i::FLAG_allow_natives_syntax = true;
3171 i::FLAG_expose_gc = true;
3172 CcTest::InitializeVM();
3173 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3174 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3175 v8::HandleScope scope(CcTest::isolate());
3177 // Grow new space unitl maximum capacity reached.
3178 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3179 CcTest::heap()->new_space()->Grow();
3182 i::ScopedVector<char> source(1024);
3185 "var number_elements = 100;"
3186 "var elements = new Array(number_elements);"
3188 " for (var i = 0; i < number_elements; i++) {"
3189 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3191 " return elements[number_elements - 1];"
3195 "%%OptimizeFunctionOnNextCall(f);"
3198 v8::Local<v8::Value> res = CompileRun(source.start());
3200 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
3201 Handle<JSObject> int_array_handle =
3202 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
3203 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
3204 Handle<JSObject> double_array_handle =
3205 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
3207 Handle<JSObject> o =
3208 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3209 CHECK(CcTest::heap()->InOldSpace(*o));
3210 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3211 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3212 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3213 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3217 TEST(OptimizedPretenuringNestedObjectLiterals) {
3218 i::FLAG_allow_natives_syntax = true;
3219 i::FLAG_expose_gc = true;
3220 CcTest::InitializeVM();
3221 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3222 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3223 v8::HandleScope scope(CcTest::isolate());
3225 // Grow new space unitl maximum capacity reached.
3226 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3227 CcTest::heap()->new_space()->Grow();
3230 i::ScopedVector<char> source(1024);
3233 "var number_elements = %d;"
3234 "var elements = new Array(number_elements);"
3236 " for (var i = 0; i < number_elements; i++) {"
3237 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3239 " return elements[number_elements - 1];"
3243 "%%OptimizeFunctionOnNextCall(f);"
3245 AllocationSite::kPretenureMinimumCreated);
3247 v8::Local<v8::Value> res = CompileRun(source.start());
3249 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
3250 Handle<JSObject> int_array_handle_1 =
3251 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
3252 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
3253 Handle<JSObject> int_array_handle_2 =
3254 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
3256 Handle<JSObject> o =
3257 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3258 CHECK(CcTest::heap()->InOldSpace(*o));
3259 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3260 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3261 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3262 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3266 TEST(OptimizedPretenuringNestedDoubleLiterals) {
3267 i::FLAG_allow_natives_syntax = true;
3268 i::FLAG_expose_gc = true;
3269 CcTest::InitializeVM();
3270 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3271 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3272 v8::HandleScope scope(CcTest::isolate());
3274 // Grow new space unitl maximum capacity reached.
3275 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3276 CcTest::heap()->new_space()->Grow();
3279 i::ScopedVector<char> source(1024);
3282 "var number_elements = %d;"
3283 "var elements = new Array(number_elements);"
3285 " for (var i = 0; i < number_elements; i++) {"
3286 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3288 " return elements[number_elements - 1];"
3292 "%%OptimizeFunctionOnNextCall(f);"
3294 AllocationSite::kPretenureMinimumCreated);
3296 v8::Local<v8::Value> res = CompileRun(source.start());
3298 v8::Local<v8::Value> double_array_1 =
3299 v8::Object::Cast(*res)->Get(v8_str("0"));
3300 Handle<JSObject> double_array_handle_1 =
3301 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
3302 v8::Local<v8::Value> double_array_2 =
3303 v8::Object::Cast(*res)->Get(v8_str("1"));
3304 Handle<JSObject> double_array_handle_2 =
3305 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
3307 Handle<JSObject> o =
3308 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3309 CHECK(CcTest::heap()->InOldSpace(*o));
3310 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3311 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3312 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3313 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3317 // Make sure pretenuring feedback is gathered for constructed objects as well
3319 TEST(OptimizedPretenuringConstructorCalls) {
3320 if (!i::FLAG_pretenuring_call_new) {
3321 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3324 i::FLAG_allow_natives_syntax = true;
3325 i::FLAG_expose_gc = true;
3326 CcTest::InitializeVM();
3327 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3328 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3329 v8::HandleScope scope(CcTest::isolate());
3331 // Grow new space unitl maximum capacity reached.
3332 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3333 CcTest::heap()->new_space()->Grow();
3336 i::ScopedVector<char> source(1024);
3337 // Call new is doing slack tracking for the first
3338 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3339 // mementos during that time.
3342 "var number_elements = %d;"
3343 "var elements = new Array(number_elements);"
3349 " for (var i = 0; i < number_elements; i++) {"
3350 " elements[i] = new foo();"
3352 " return elements[number_elements - 1];"
3356 "%%OptimizeFunctionOnNextCall(f);"
3358 AllocationSite::kPretenureMinimumCreated +
3359 JSFunction::kGenerousAllocationCount);
3361 v8::Local<v8::Value> res = CompileRun(source.start());
3363 Handle<JSObject> o =
3364 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3366 CHECK(CcTest::heap()->InOldSpace(*o));
3370 TEST(OptimizedPretenuringCallNew) {
3371 if (!i::FLAG_pretenuring_call_new) {
3372 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3375 i::FLAG_allow_natives_syntax = true;
3376 i::FLAG_expose_gc = true;
3377 CcTest::InitializeVM();
3378 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3379 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3380 v8::HandleScope scope(CcTest::isolate());
3382 // Grow new space unitl maximum capacity reached.
3383 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3384 CcTest::heap()->new_space()->Grow();
3387 i::ScopedVector<char> source(1024);
3388 // Call new is doing slack tracking for the first
3389 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3390 // mementos during that time.
3393 "var number_elements = %d;"
3394 "var elements = new Array(number_elements);"
3395 "function g() { this.a = 0; }"
3397 " for (var i = 0; i < number_elements; i++) {"
3398 " elements[i] = new g();"
3400 " return elements[number_elements - 1];"
3404 "%%OptimizeFunctionOnNextCall(f);"
3406 AllocationSite::kPretenureMinimumCreated +
3407 JSFunction::kGenerousAllocationCount);
3409 v8::Local<v8::Value> res = CompileRun(source.start());
3411 Handle<JSObject> o =
3412 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3413 CHECK(CcTest::heap()->InOldSpace(*o));
3417 // Test regular array literals allocation.
3418 TEST(OptimizedAllocationArrayLiterals) {
3419 i::FLAG_allow_natives_syntax = true;
3420 CcTest::InitializeVM();
3421 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3422 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3423 v8::HandleScope scope(CcTest::isolate());
3425 v8::Local<v8::Value> res = CompileRun(
3427 " var numbers = new Array(1, 2, 3);"
3428 " numbers[0] = 3.14;"
3432 "%OptimizeFunctionOnNextCall(f);"
3434 CHECK_EQ(static_cast<int>(3.14),
3435 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
3437 Handle<JSObject> o =
3438 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3440 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3444 static int CountMapTransitions(Map* map) {
3445 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3449 // Test that map transitions are cleared and maps are collected with
3450 // incremental marking as well.
3452 i::FLAG_stress_compaction = false;
3453 i::FLAG_allow_natives_syntax = true;
3454 i::FLAG_trace_incremental_marking = true;
3455 i::FLAG_retain_maps_for_n_gc = 0;
3456 CcTest::InitializeVM();
3457 v8::HandleScope scope(CcTest::isolate());
3458 static const int transitions_count = 256;
3460 CompileRun("function F() {}");
3462 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3463 for (int i = 0; i < transitions_count; i++) {
3464 EmbeddedVector<char, 64> buffer;
3465 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3466 CompileRun(buffer.start());
3468 CompileRun("var root = new F;");
3471 Handle<JSObject> root =
3472 v8::Utils::OpenHandle(
3473 *v8::Handle<v8::Object>::Cast(
3474 CcTest::global()->Get(v8_str("root"))));
3476 // Count number of live transitions before marking.
3477 int transitions_before = CountMapTransitions(root->map());
3478 CompileRun("%DebugPrint(root);");
3479 CHECK_EQ(transitions_count, transitions_before);
3481 SimulateIncrementalMarking(CcTest::heap());
3482 CcTest::heap()->CollectAllGarbage();
3484 // Count number of live transitions after marking. Note that one transition
3485 // is left, because 'o' still holds an instance of one transition target.
3486 int transitions_after = CountMapTransitions(root->map());
3487 CompileRun("%DebugPrint(root);");
3488 CHECK_EQ(1, transitions_after);
3493 static void AddTransitions(int transitions_count) {
3494 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3495 for (int i = 0; i < transitions_count; i++) {
3496 EmbeddedVector<char, 64> buffer;
3497 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3498 CompileRun(buffer.start());
3503 static Handle<JSObject> GetByName(const char* name) {
3504 return v8::Utils::OpenHandle(
3505 *v8::Handle<v8::Object>::Cast(
3506 CcTest::global()->Get(v8_str(name))));
3510 static void AddPropertyTo(
3511 int gc_count, Handle<JSObject> object, const char* property_name) {
3512 Isolate* isolate = CcTest::i_isolate();
3513 Factory* factory = isolate->factory();
3514 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3515 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3516 i::FLAG_gc_interval = gc_count;
3517 i::FLAG_gc_global = true;
3518 i::FLAG_retain_maps_for_n_gc = 0;
3519 CcTest::heap()->set_allocation_timeout(gc_count);
3520 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3524 TEST(TransitionArrayShrinksDuringAllocToZero) {
3525 i::FLAG_stress_compaction = false;
3526 i::FLAG_allow_natives_syntax = true;
3527 CcTest::InitializeVM();
3528 v8::HandleScope scope(CcTest::isolate());
3529 static const int transitions_count = 10;
3530 CompileRun("function F() { }");
3531 AddTransitions(transitions_count);
3532 CompileRun("var root = new F;");
3533 Handle<JSObject> root = GetByName("root");
3535 // Count number of live transitions before marking.
3536 int transitions_before = CountMapTransitions(root->map());
3537 CHECK_EQ(transitions_count, transitions_before);
3540 CompileRun("o = new F;"
3542 root = GetByName("root");
3543 AddPropertyTo(2, root, "funny");
3544 CcTest::heap()->CollectGarbage(NEW_SPACE);
3546 // Count number of live transitions after marking. Note that one transition
3547 // is left, because 'o' still holds an instance of one transition target.
3548 int transitions_after = CountMapTransitions(
3549 Map::cast(root->map()->GetBackPointer()));
3550 CHECK_EQ(1, transitions_after);
3554 TEST(TransitionArrayShrinksDuringAllocToOne) {
3555 i::FLAG_stress_compaction = false;
3556 i::FLAG_allow_natives_syntax = true;
3557 CcTest::InitializeVM();
3558 v8::HandleScope scope(CcTest::isolate());
3559 static const int transitions_count = 10;
3560 CompileRun("function F() {}");
3561 AddTransitions(transitions_count);
3562 CompileRun("var root = new F;");
3563 Handle<JSObject> root = GetByName("root");
3565 // Count number of live transitions before marking.
3566 int transitions_before = CountMapTransitions(root->map());
3567 CHECK_EQ(transitions_count, transitions_before);
3569 root = GetByName("root");
3570 AddPropertyTo(2, root, "funny");
3571 CcTest::heap()->CollectGarbage(NEW_SPACE);
3573 // Count number of live transitions after marking. Note that one transition
3574 // is left, because 'o' still holds an instance of one transition target.
3575 int transitions_after = CountMapTransitions(
3576 Map::cast(root->map()->GetBackPointer()));
3577 CHECK_EQ(2, transitions_after);
3581 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3582 i::FLAG_stress_compaction = false;
3583 i::FLAG_allow_natives_syntax = true;
3584 CcTest::InitializeVM();
3585 v8::HandleScope scope(CcTest::isolate());
3586 static const int transitions_count = 10;
3587 CompileRun("function F() {}");
3588 AddTransitions(transitions_count);
3589 CompileRun("var root = new F;");
3590 Handle<JSObject> root = GetByName("root");
3592 // Count number of live transitions before marking.
3593 int transitions_before = CountMapTransitions(root->map());
3594 CHECK_EQ(transitions_count, transitions_before);
3596 root = GetByName("root");
3597 AddPropertyTo(0, root, "prop9");
3598 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3600 // Count number of live transitions after marking. Note that one transition
3601 // is left, because 'o' still holds an instance of one transition target.
3602 int transitions_after = CountMapTransitions(
3603 Map::cast(root->map()->GetBackPointer()));
3604 CHECK_EQ(1, transitions_after);
3608 TEST(TransitionArraySimpleToFull) {
3609 i::FLAG_stress_compaction = false;
3610 i::FLAG_allow_natives_syntax = true;
3611 CcTest::InitializeVM();
3612 v8::HandleScope scope(CcTest::isolate());
3613 static const int transitions_count = 1;
3614 CompileRun("function F() {}");
3615 AddTransitions(transitions_count);
3616 CompileRun("var root = new F;");
3617 Handle<JSObject> root = GetByName("root");
3619 // Count number of live transitions before marking.
3620 int transitions_before = CountMapTransitions(root->map());
3621 CHECK_EQ(transitions_count, transitions_before);
3623 CompileRun("o = new F;"
3625 root = GetByName("root");
3626 DCHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3627 AddPropertyTo(2, root, "happy");
3629 // Count number of live transitions after marking. Note that one transition
3630 // is left, because 'o' still holds an instance of one transition target.
3631 int transitions_after = CountMapTransitions(
3632 Map::cast(root->map()->GetBackPointer()));
3633 CHECK_EQ(1, transitions_after);
3638 TEST(Regress2143a) {
3639 i::FLAG_incremental_marking = true;
3640 CcTest::InitializeVM();
3641 v8::HandleScope scope(CcTest::isolate());
3643 // Prepare a map transition from the root object together with a yet
3644 // untransitioned root object.
3645 CompileRun("var root = new Object;"
3647 "root = new Object;");
3649 SimulateIncrementalMarking(CcTest::heap());
3651 // Compile a StoreIC that performs the prepared map transition. This
3652 // will restart incremental marking and should make sure the root is
3653 // marked grey again.
3654 CompileRun("function f(o) {"
3660 // This bug only triggers with aggressive IC clearing.
3661 CcTest::heap()->AgeInlineCaches();
3663 // Explicitly request GC to perform final marking step and sweeping.
3664 CcTest::heap()->CollectAllGarbage();
3666 Handle<JSObject> root =
3667 v8::Utils::OpenHandle(
3668 *v8::Handle<v8::Object>::Cast(
3669 CcTest::global()->Get(v8_str("root"))));
3671 // The root object should be in a sane state.
3672 CHECK(root->IsJSObject());
3673 CHECK(root->map()->IsMap());
3677 TEST(Regress2143b) {
3678 i::FLAG_incremental_marking = true;
3679 i::FLAG_allow_natives_syntax = true;
3680 CcTest::InitializeVM();
3681 v8::HandleScope scope(CcTest::isolate());
3683 // Prepare a map transition from the root object together with a yet
3684 // untransitioned root object.
3685 CompileRun("var root = new Object;"
3687 "root = new Object;");
3689 SimulateIncrementalMarking(CcTest::heap());
3691 // Compile an optimized LStoreNamedField that performs the prepared
3692 // map transition. This will restart incremental marking and should
3693 // make sure the root is marked grey again.
3694 CompileRun("function f(o) {"
3699 "%OptimizeFunctionOnNextCall(f);"
3701 "%DeoptimizeFunction(f);");
3703 // This bug only triggers with aggressive IC clearing.
3704 CcTest::heap()->AgeInlineCaches();
3706 // Explicitly request GC to perform final marking step and sweeping.
3707 CcTest::heap()->CollectAllGarbage();
3709 Handle<JSObject> root =
3710 v8::Utils::OpenHandle(
3711 *v8::Handle<v8::Object>::Cast(
3712 CcTest::global()->Get(v8_str("root"))));
3714 // The root object should be in a sane state.
3715 CHECK(root->IsJSObject());
3716 CHECK(root->map()->IsMap());
3720 TEST(ReleaseOverReservedPages) {
3721 if (FLAG_never_compact) return;
3722 i::FLAG_trace_gc = true;
3723 // The optimizer can allocate stuff, messing up the test.
3724 i::FLAG_crankshaft = false;
3725 i::FLAG_always_opt = false;
3726 CcTest::InitializeVM();
3727 Isolate* isolate = CcTest::i_isolate();
3728 Factory* factory = isolate->factory();
3729 Heap* heap = isolate->heap();
3730 v8::HandleScope scope(CcTest::isolate());
3731 static const int number_of_test_pages = 20;
3733 // Prepare many pages with low live-bytes count.
3734 PagedSpace* old_space = heap->old_space();
3735 CHECK_EQ(1, old_space->CountTotalPages());
3736 for (int i = 0; i < number_of_test_pages; i++) {
3737 AlwaysAllocateScope always_allocate(isolate);
3738 SimulateFullSpace(old_space);
3739 factory->NewFixedArray(1, TENURED);
3741 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3743 // Triggering one GC will cause a lot of garbage to be discovered but
3744 // even spread across all allocated pages.
3745 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3746 "triggered for preparation");
3747 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3749 // Triggering subsequent GCs should cause at least half of the pages
3750 // to be released to the OS after at most two cycles.
3751 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3752 "triggered by test 1");
3753 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3754 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3755 "triggered by test 2");
3756 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3758 // Triggering a last-resort GC should cause all pages to be released to the
3759 // OS so that other processes can seize the memory. If we get a failure here
3760 // where there are 2 pages left instead of 1, then we should increase the
3761 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3762 // first page should be small in order to reduce memory used when the VM
3763 // boots, but if the 20 small arrays don't fit on the first page then that's
3764 // an indication that it is too small.
3765 heap->CollectAllAvailableGarbage("triggered really hard");
3766 CHECK_EQ(1, old_space->CountTotalPages());
3769 static int forced_gc_counter = 0;
3771 void MockUseCounterCallback(v8::Isolate* isolate,
3772 v8::Isolate::UseCounterFeature feature) {
3773 isolate->GetCallingContext();
3774 if (feature == v8::Isolate::kForcedGC) {
3775 forced_gc_counter++;
3780 TEST(CountForcedGC) {
3781 i::FLAG_expose_gc = true;
3782 CcTest::InitializeVM();
3783 Isolate* isolate = CcTest::i_isolate();
3784 v8::HandleScope scope(CcTest::isolate());
3786 isolate->SetUseCounterCallback(MockUseCounterCallback);
3788 forced_gc_counter = 0;
3789 const char* source = "gc();";
3791 CHECK_GT(forced_gc_counter, 0);
3796 i::FLAG_stress_compaction = false;
3797 CcTest::InitializeVM();
3798 Isolate* isolate = CcTest::i_isolate();
3799 Factory* factory = isolate->factory();
3800 v8::HandleScope scope(CcTest::isolate());
3801 Handle<String> slice(CcTest::heap()->empty_string());
3804 // Generate a parent that lives in new-space.
3805 v8::HandleScope inner_scope(CcTest::isolate());
3806 const char* c = "This text is long enough to trigger sliced strings.";
3807 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3808 CHECK(s->IsSeqOneByteString());
3809 CHECK(CcTest::heap()->InNewSpace(*s));
3811 // Generate a sliced string that is based on the above parent and
3812 // lives in old-space.
3813 SimulateFullSpace(CcTest::heap()->new_space());
3814 AlwaysAllocateScope always_allocate(isolate);
3815 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3816 CHECK(t->IsSlicedString());
3817 CHECK(!CcTest::heap()->InNewSpace(*t));
3818 *slice.location() = *t.location();
3821 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3822 CcTest::heap()->CollectAllGarbage();
3823 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3828 TEST(PrintSharedFunctionInfo) {
3829 CcTest::InitializeVM();
3830 v8::HandleScope scope(CcTest::isolate());
3831 const char* source = "f = function() { return 987654321; }\n"
3832 "g = function() { return 123456789; }\n";
3834 Handle<JSFunction> g =
3835 v8::Utils::OpenHandle(
3836 *v8::Handle<v8::Function>::Cast(
3837 CcTest::global()->Get(v8_str("g"))));
3839 OFStream os(stdout);
3840 g->shared()->Print(os);
3843 #endif // OBJECT_PRINT
3846 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3847 if (i::FLAG_always_opt) return;
3848 CcTest::InitializeVM();
3849 v8::HandleScope scope(CcTest::isolate());
3850 v8::Local<v8::Value> fun1, fun2;
3854 CompileRun("function fun() {};");
3855 fun1 = env->Global()->Get(v8_str("fun"));
3860 CompileRun("function fun() {};");
3861 fun2 = env->Global()->Get(v8_str("fun"));
3864 // Prepare function f that contains type feedback for closures
3865 // originating from two different native contexts.
3866 CcTest::global()->Set(v8_str("fun1"), fun1);
3867 CcTest::global()->Set(v8_str("fun2"), fun2);
3868 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3870 Handle<JSFunction> f =
3871 v8::Utils::OpenHandle(
3872 *v8::Handle<v8::Function>::Cast(
3873 CcTest::global()->Get(v8_str("f"))));
3875 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3877 int expected_slots = 2;
3878 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3881 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell());
3882 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
3884 SimulateIncrementalMarking(CcTest::heap());
3885 CcTest::heap()->CollectAllGarbage();
3887 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
3889 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2)))
3894 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3895 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3896 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3897 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3898 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3899 RelocInfo* info = it.rinfo();
3900 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3901 if (target->is_inline_cache_stub() && target->kind() == kind) {
3909 static void CheckVectorIC(Handle<JSFunction> f, int ic_slot_index,
3910 InlineCacheState desired_state) {
3911 Handle<TypeFeedbackVector> vector =
3912 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3913 FeedbackVectorICSlot slot(ic_slot_index);
3914 LoadICNexus nexus(vector, slot);
3915 CHECK(nexus.StateFromFeedback() == desired_state);
3919 static void CheckVectorICCleared(Handle<JSFunction> f, int ic_slot_index) {
3920 Handle<TypeFeedbackVector> vector =
3921 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3922 FeedbackVectorICSlot slot(ic_slot_index);
3923 LoadICNexus nexus(vector, slot);
3924 CHECK(IC::IsCleared(&nexus));
3928 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3929 if (i::FLAG_always_opt) return;
3930 CcTest::InitializeVM();
3931 v8::HandleScope scope(CcTest::isolate());
3933 // Prepare function f that contains a monomorphic IC for object
3934 // originating from the same native context.
3936 "function fun() { this.x = 1; };"
3937 "function f(o) { return new o(); } f(fun); f(fun);");
3938 Handle<JSFunction> f = v8::Utils::OpenHandle(
3939 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3942 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3943 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3945 SimulateIncrementalMarking(CcTest::heap());
3946 CcTest::heap()->CollectAllGarbage();
3948 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3952 TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3953 if (i::FLAG_always_opt) return;
3954 CcTest::InitializeVM();
3955 Isolate* isolate = CcTest::i_isolate();
3956 v8::HandleScope scope(CcTest::isolate());
3957 v8::Local<v8::Value> fun1;
3961 CompileRun("function fun() { this.x = 1; };");
3962 fun1 = env->Global()->Get(v8_str("fun"));
3965 // Prepare function f that contains a monomorphic constructor for object
3966 // originating from a different native context.
3967 CcTest::global()->Set(v8_str("fun1"), fun1);
3969 "function fun() { this.x = 1; };"
3970 "function f(o) { return new o(); } f(fun1); f(fun1);");
3971 Handle<JSFunction> f = v8::Utils::OpenHandle(
3972 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3975 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3976 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3978 // Fire context dispose notification.
3979 CcTest::isolate()->ContextDisposedNotification();
3980 SimulateIncrementalMarking(CcTest::heap());
3981 CcTest::heap()->CollectAllGarbage();
3983 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3984 vector->Get(FeedbackVectorSlot(0)));
3988 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3989 if (i::FLAG_always_opt) return;
3990 CcTest::InitializeVM();
3991 v8::HandleScope scope(CcTest::isolate());
3993 // Prepare function f that contains a monomorphic IC for object
3994 // originating from the same native context.
3995 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3996 "function f(o) { return o.x; } f(obj); f(obj);");
3997 Handle<JSFunction> f =
3998 v8::Utils::OpenHandle(
3999 *v8::Handle<v8::Function>::Cast(
4000 CcTest::global()->Get(v8_str("f"))));
4002 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4003 CheckVectorIC(f, 0, MONOMORPHIC);
4004 CHECK(ic_before->ic_state() == DEFAULT);
4006 SimulateIncrementalMarking(CcTest::heap());
4007 CcTest::heap()->CollectAllGarbage();
4009 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4010 CheckVectorIC(f, 0, MONOMORPHIC);
4011 CHECK(ic_after->ic_state() == DEFAULT);
4015 TEST(IncrementalMarkingClearsMonomorphicIC) {
4016 if (i::FLAG_always_opt) return;
4017 CcTest::InitializeVM();
4018 v8::HandleScope scope(CcTest::isolate());
4019 v8::Local<v8::Value> obj1;
4023 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
4024 obj1 = env->Global()->Get(v8_str("obj"));
4027 // Prepare function f that contains a monomorphic IC for object
4028 // originating from a different native context.
4029 CcTest::global()->Set(v8_str("obj1"), obj1);
4030 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
4031 Handle<JSFunction> f = v8::Utils::OpenHandle(
4032 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4034 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4035 CheckVectorIC(f, 0, MONOMORPHIC);
4036 CHECK(ic_before->ic_state() == DEFAULT);
4038 // Fire context dispose notification.
4039 CcTest::isolate()->ContextDisposedNotification();
4040 SimulateIncrementalMarking(CcTest::heap());
4041 CcTest::heap()->CollectAllGarbage();
4043 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4044 CheckVectorICCleared(f, 0);
4045 CHECK(ic_after->ic_state() == DEFAULT);
4049 TEST(IncrementalMarkingPreservesPolymorphicIC) {
4050 if (i::FLAG_always_opt) return;
4051 CcTest::InitializeVM();
4052 v8::HandleScope scope(CcTest::isolate());
4053 v8::Local<v8::Value> obj1, obj2;
4057 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
4058 obj1 = env->Global()->Get(v8_str("obj"));
4063 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
4064 obj2 = env->Global()->Get(v8_str("obj"));
4067 // Prepare function f that contains a polymorphic IC for objects
4068 // originating from two different native contexts.
4069 CcTest::global()->Set(v8_str("obj1"), obj1);
4070 CcTest::global()->Set(v8_str("obj2"), obj2);
4071 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
4072 Handle<JSFunction> f = v8::Utils::OpenHandle(
4073 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4075 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4076 CheckVectorIC(f, 0, POLYMORPHIC);
4077 CHECK(ic_before->ic_state() == DEFAULT);
4079 // Fire context dispose notification.
4080 SimulateIncrementalMarking(CcTest::heap());
4081 CcTest::heap()->CollectAllGarbage();
4083 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4084 CheckVectorIC(f, 0, POLYMORPHIC);
4085 CHECK(ic_after->ic_state() == DEFAULT);
4089 TEST(IncrementalMarkingClearsPolymorphicIC) {
4090 if (i::FLAG_always_opt) return;
4091 CcTest::InitializeVM();
4092 v8::HandleScope scope(CcTest::isolate());
4093 v8::Local<v8::Value> obj1, obj2;
4097 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
4098 obj1 = env->Global()->Get(v8_str("obj"));
4103 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
4104 obj2 = env->Global()->Get(v8_str("obj"));
4107 // Prepare function f that contains a polymorphic IC for objects
4108 // originating from two different native contexts.
4109 CcTest::global()->Set(v8_str("obj1"), obj1);
4110 CcTest::global()->Set(v8_str("obj2"), obj2);
4111 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
4112 Handle<JSFunction> f = v8::Utils::OpenHandle(
4113 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4115 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4116 CheckVectorIC(f, 0, POLYMORPHIC);
4117 CHECK(ic_before->ic_state() == DEFAULT);
4119 // Fire context dispose notification.
4120 CcTest::isolate()->ContextDisposedNotification();
4121 SimulateIncrementalMarking(CcTest::heap());
4122 CcTest::heap()->CollectAllGarbage();
4124 CheckVectorICCleared(f, 0);
4125 CHECK(ic_before->ic_state() == DEFAULT);
4129 class SourceResource : public v8::String::ExternalOneByteStringResource {
4131 explicit SourceResource(const char* data)
4132 : data_(data), length_(strlen(data)) { }
4134 virtual void Dispose() {
4135 i::DeleteArray(data_);
4139 const char* data() const { return data_; }
4141 size_t length() const { return length_; }
4143 bool IsDisposed() { return data_ == NULL; }
4151 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
4152 const char* accessor) {
4153 // Test that the data retained by the Error.stack accessor is released
4154 // after the first time the accessor is fired. We use external string
4155 // to check whether the data is being released since the external string
4156 // resource's callback is fired when the external string is GC'ed.
4157 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4158 v8::HandleScope scope(isolate);
4159 SourceResource* resource = new SourceResource(i::StrDup(source));
4161 v8::HandleScope scope(isolate);
4162 v8::Handle<v8::String> source_string =
4163 v8::String::NewExternal(isolate, resource);
4164 i_isolate->heap()->CollectAllAvailableGarbage();
4165 v8::Script::Compile(source_string)->Run();
4166 CHECK(!resource->IsDisposed());
4168 // i_isolate->heap()->CollectAllAvailableGarbage();
4169 CHECK(!resource->IsDisposed());
4171 CompileRun(accessor);
4172 i_isolate->heap()->CollectAllAvailableGarbage();
4174 // External source has been released.
4175 CHECK(resource->IsDisposed());
4180 UNINITIALIZED_TEST(ReleaseStackTraceData) {
4181 if (i::FLAG_always_opt) {
4182 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
4183 // See: https://codereview.chromium.org/181833004/
4186 FLAG_use_ic = false; // ICs retain objects.
4187 FLAG_concurrent_recompilation = false;
4188 v8::Isolate::CreateParams create_params;
4189 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4190 v8::Isolate* isolate = v8::Isolate::New(create_params);
4192 v8::Isolate::Scope isolate_scope(isolate);
4193 v8::HandleScope handle_scope(isolate);
4194 v8::Context::New(isolate)->Enter();
4195 static const char* source1 = "var error = null; "
4196 /* Normal Error */ "try { "
4197 " throw new Error(); "
4201 static const char* source2 = "var error = null; "
4202 /* Stack overflow */ "try { "
4203 " (function f() { f(); })(); "
4207 static const char* source3 = "var error = null; "
4208 /* Normal Error */ "try { "
4209 /* as prototype */ " throw new Error(); "
4212 " error.__proto__ = e; "
4214 static const char* source4 = "var error = null; "
4215 /* Stack overflow */ "try { "
4216 /* as prototype */ " (function f() { f(); })(); "
4219 " error.__proto__ = e; "
4221 static const char* getter = "error.stack";
4222 static const char* setter = "error.stack = 0";
4224 ReleaseStackTraceDataTest(isolate, source1, setter);
4225 ReleaseStackTraceDataTest(isolate, source2, setter);
4226 // We do not test source3 and source4 with setter, since the setter is
4227 // supposed to (untypically) write to the receiver, not the holder. This is
4228 // to emulate the behavior of a data property.
4230 ReleaseStackTraceDataTest(isolate, source1, getter);
4231 ReleaseStackTraceDataTest(isolate, source2, getter);
4232 ReleaseStackTraceDataTest(isolate, source3, getter);
4233 ReleaseStackTraceDataTest(isolate, source4, getter);
4239 TEST(Regress159140) {
4240 i::FLAG_allow_natives_syntax = true;
4241 CcTest::InitializeVM();
4242 Isolate* isolate = CcTest::i_isolate();
4243 Heap* heap = isolate->heap();
4244 HandleScope scope(isolate);
4246 // Perform one initial GC to enable code flushing.
4247 heap->CollectAllGarbage();
4249 // Prepare several closures that are all eligible for code flushing
4250 // because all reachable ones are not optimized. Make sure that the
4251 // optimized code object is directly reachable through a handle so
4252 // that it is marked black during incremental marking.
4255 HandleScope inner_scope(isolate);
4256 CompileRun("function h(x) {}"
4257 "function mkClosure() {"
4258 " return function(x) { return x + 1; };"
4260 "var f = mkClosure();"
4261 "var g = mkClosure();"
4265 "%OptimizeFunctionOnNextCall(f); f(3);"
4266 "%OptimizeFunctionOnNextCall(h); h(3);");
4268 Handle<JSFunction> f =
4269 v8::Utils::OpenHandle(
4270 *v8::Handle<v8::Function>::Cast(
4271 CcTest::global()->Get(v8_str("f"))));
4272 CHECK(f->is_compiled());
4273 CompileRun("f = null;");
4275 Handle<JSFunction> g =
4276 v8::Utils::OpenHandle(
4277 *v8::Handle<v8::Function>::Cast(
4278 CcTest::global()->Get(v8_str("g"))));
4279 CHECK(g->is_compiled());
4280 const int kAgingThreshold = 6;
4281 for (int i = 0; i < kAgingThreshold; i++) {
4282 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4285 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4288 // Simulate incremental marking so that the functions are enqueued as
4289 // code flushing candidates. Then optimize one function. Finally
4290 // finish the GC to complete code flushing.
4291 SimulateIncrementalMarking(heap);
4292 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4293 heap->CollectAllGarbage();
4295 // Unoptimized code is missing and the deoptimizer will go ballistic.
4296 CompileRun("g('bozo');");
4300 TEST(Regress165495) {
4301 i::FLAG_allow_natives_syntax = true;
4302 CcTest::InitializeVM();
4303 Isolate* isolate = CcTest::i_isolate();
4304 Heap* heap = isolate->heap();
4305 HandleScope scope(isolate);
4307 // Perform one initial GC to enable code flushing.
4308 heap->CollectAllGarbage();
4310 // Prepare an optimized closure that the optimized code map will get
4311 // populated. Then age the unoptimized code to trigger code flushing
4312 // but make sure the optimized code is unreachable.
4314 HandleScope inner_scope(isolate);
4315 CompileRun("function mkClosure() {"
4316 " return function(x) { return x + 1; };"
4318 "var f = mkClosure();"
4320 "%OptimizeFunctionOnNextCall(f); f(3);");
4322 Handle<JSFunction> f =
4323 v8::Utils::OpenHandle(
4324 *v8::Handle<v8::Function>::Cast(
4325 CcTest::global()->Get(v8_str("f"))));
4326 CHECK(f->is_compiled());
4327 const int kAgingThreshold = 6;
4328 for (int i = 0; i < kAgingThreshold; i++) {
4329 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4332 CompileRun("f = null;");
4335 // Simulate incremental marking so that unoptimized code is flushed
4336 // even though it still is cached in the optimized code map.
4337 SimulateIncrementalMarking(heap);
4338 heap->CollectAllGarbage();
4340 // Make a new closure that will get code installed from the code map.
4341 // Unoptimized code is missing and the deoptimizer will go ballistic.
4342 CompileRun("var g = mkClosure(); g('bozo');");
4346 TEST(Regress169209) {
4347 i::FLAG_stress_compaction = false;
4348 i::FLAG_allow_natives_syntax = true;
4350 CcTest::InitializeVM();
4351 Isolate* isolate = CcTest::i_isolate();
4352 Heap* heap = isolate->heap();
4353 HandleScope scope(isolate);
4355 // Perform one initial GC to enable code flushing.
4356 heap->CollectAllGarbage();
4358 // Prepare a shared function info eligible for code flushing for which
4359 // the unoptimized code will be replaced during optimization.
4360 Handle<SharedFunctionInfo> shared1;
4362 HandleScope inner_scope(isolate);
4363 CompileRun("function f() { return 'foobar'; }"
4364 "function g(x) { if (x) f(); }"
4369 Handle<JSFunction> f =
4370 v8::Utils::OpenHandle(
4371 *v8::Handle<v8::Function>::Cast(
4372 CcTest::global()->Get(v8_str("f"))));
4373 CHECK(f->is_compiled());
4374 const int kAgingThreshold = 6;
4375 for (int i = 0; i < kAgingThreshold; i++) {
4376 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4379 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4382 // Prepare a shared function info eligible for code flushing that will
4383 // represent the dangling tail of the candidate list.
4384 Handle<SharedFunctionInfo> shared2;
4386 HandleScope inner_scope(isolate);
4387 CompileRun("function flushMe() { return 0; }"
4390 Handle<JSFunction> f =
4391 v8::Utils::OpenHandle(
4392 *v8::Handle<v8::Function>::Cast(
4393 CcTest::global()->Get(v8_str("flushMe"))));
4394 CHECK(f->is_compiled());
4395 const int kAgingThreshold = 6;
4396 for (int i = 0; i < kAgingThreshold; i++) {
4397 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4400 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4403 // Simulate incremental marking and collect code flushing candidates.
4404 SimulateIncrementalMarking(heap);
4405 CHECK(shared1->code()->gc_metadata() != NULL);
4407 // Optimize function and make sure the unoptimized code is replaced.
4411 CompileRun("%OptimizeFunctionOnNextCall(g);"
4414 // Finish garbage collection cycle.
4415 heap->CollectAllGarbage();
4416 CHECK(shared1->code()->gc_metadata() == NULL);
4420 TEST(Regress169928) {
4421 i::FLAG_allow_natives_syntax = true;
4422 i::FLAG_crankshaft = false;
4423 CcTest::InitializeVM();
4424 Isolate* isolate = CcTest::i_isolate();
4425 Factory* factory = isolate->factory();
4426 v8::HandleScope scope(CcTest::isolate());
4428 // Some flags turn Scavenge collections into Mark-sweep collections
4429 // and hence are incompatible with this test case.
4430 if (FLAG_gc_global || FLAG_stress_compaction) return;
4432 // Prepare the environment
4433 CompileRun("function fastliteralcase(literal, value) {"
4434 " literal[0] = value;"
4437 "function get_standard_literal() {"
4438 " var literal = [1, 2, 3];"
4441 "obj = fastliteralcase(get_standard_literal(), 1);"
4442 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4443 "obj = fastliteralcase(get_standard_literal(), 2);");
4446 v8::Local<v8::String> mote_code_string =
4447 v8_str("fastliteralcase(mote, 2.5);");
4449 v8::Local<v8::String> array_name = v8_str("mote");
4450 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
4452 // First make sure we flip spaces
4453 CcTest::heap()->CollectGarbage(NEW_SPACE);
4455 // Allocate the object.
4456 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4457 array_data->set(0, Smi::FromInt(1));
4458 array_data->set(1, Smi::FromInt(2));
4460 AllocateAllButNBytes(CcTest::heap()->new_space(),
4461 JSArray::kSize + AllocationMemento::kSize +
4464 Handle<JSArray> array =
4465 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4467 CHECK_EQ(Smi::FromInt(2), array->length());
4468 CHECK(array->HasFastSmiOrObjectElements());
4470 // We need filler the size of AllocationMemento object, plus an extra
4471 // fill pointer value.
4472 HeapObject* obj = NULL;
4473 AllocationResult allocation =
4474 CcTest::heap()->new_space()->AllocateRawUnaligned(
4475 AllocationMemento::kSize + kPointerSize);
4476 CHECK(allocation.To(&obj));
4477 Address addr_obj = obj->address();
4478 CcTest::heap()->CreateFillerObjectAt(
4479 addr_obj, AllocationMemento::kSize + kPointerSize);
4481 // Give the array a name, making sure not to allocate strings.
4482 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
4483 CcTest::global()->Set(array_name, array_obj);
4485 // This should crash with a protection violation if we are running a build
4487 AlwaysAllocateScope aa_scope(isolate);
4488 v8::Script::Compile(mote_code_string)->Run();
4492 TEST(Regress168801) {
4493 if (i::FLAG_never_compact) return;
4494 i::FLAG_always_compact = true;
4495 i::FLAG_cache_optimized_code = false;
4496 i::FLAG_allow_natives_syntax = true;
4497 CcTest::InitializeVM();
4498 Isolate* isolate = CcTest::i_isolate();
4499 Heap* heap = isolate->heap();
4500 HandleScope scope(isolate);
4502 // Perform one initial GC to enable code flushing.
4503 heap->CollectAllGarbage();
4505 // Ensure the code ends up on an evacuation candidate.
4506 SimulateFullSpace(heap->code_space());
4508 // Prepare an unoptimized function that is eligible for code flushing.
4509 Handle<JSFunction> function;
4511 HandleScope inner_scope(isolate);
4512 CompileRun("function mkClosure() {"
4513 " return function(x) { return x + 1; };"
4515 "var f = mkClosure();"
4518 Handle<JSFunction> f =
4519 v8::Utils::OpenHandle(
4520 *v8::Handle<v8::Function>::Cast(
4521 CcTest::global()->Get(v8_str("f"))));
4522 CHECK(f->is_compiled());
4523 const int kAgingThreshold = 6;
4524 for (int i = 0; i < kAgingThreshold; i++) {
4525 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4528 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4531 // Simulate incremental marking so that unoptimized function is enqueued as a
4532 // candidate for code flushing. The shared function info however will not be
4533 // explicitly enqueued.
4534 SimulateIncrementalMarking(heap);
4536 // Now optimize the function so that it is taken off the candidate list.
4538 HandleScope inner_scope(isolate);
4539 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
4542 // This cycle will bust the heap and subsequent cycles will go ballistic.
4543 heap->CollectAllGarbage();
4544 heap->CollectAllGarbage();
4548 TEST(Regress173458) {
4549 if (i::FLAG_never_compact) return;
4550 i::FLAG_always_compact = true;
4551 i::FLAG_cache_optimized_code = false;
4552 i::FLAG_allow_natives_syntax = true;
4553 CcTest::InitializeVM();
4554 Isolate* isolate = CcTest::i_isolate();
4555 Heap* heap = isolate->heap();
4556 HandleScope scope(isolate);
4558 // Perform one initial GC to enable code flushing.
4559 heap->CollectAllGarbage();
4561 // Ensure the code ends up on an evacuation candidate.
4562 SimulateFullSpace(heap->code_space());
4564 // Prepare an unoptimized function that is eligible for code flushing.
4565 Handle<JSFunction> function;
4567 HandleScope inner_scope(isolate);
4568 CompileRun("function mkClosure() {"
4569 " return function(x) { return x + 1; };"
4571 "var f = mkClosure();"
4574 Handle<JSFunction> f =
4575 v8::Utils::OpenHandle(
4576 *v8::Handle<v8::Function>::Cast(
4577 CcTest::global()->Get(v8_str("f"))));
4578 CHECK(f->is_compiled());
4579 const int kAgingThreshold = 6;
4580 for (int i = 0; i < kAgingThreshold; i++) {
4581 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4584 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4587 // Simulate incremental marking so that unoptimized function is enqueued as a
4588 // candidate for code flushing. The shared function info however will not be
4589 // explicitly enqueued.
4590 SimulateIncrementalMarking(heap);
4592 // Now enable the debugger which in turn will disable code flushing.
4593 CHECK(isolate->debug()->Load());
4595 // This cycle will bust the heap and subsequent cycles will go ballistic.
4596 heap->CollectAllGarbage();
4597 heap->CollectAllGarbage();
4602 TEST(Regress513507) {
4603 i::FLAG_flush_optimized_code_cache = false;
4604 i::FLAG_allow_natives_syntax = true;
4605 i::FLAG_gc_global = true;
4606 CcTest::InitializeVM();
4607 Isolate* isolate = CcTest::i_isolate();
4608 Heap* heap = isolate->heap();
4609 HandleScope scope(isolate);
4611 // Prepare function whose optimized code map we can use.
4612 Handle<SharedFunctionInfo> shared;
4614 HandleScope inner_scope(isolate);
4615 CompileRun("function f() { return 1 }"
4616 "f(); %OptimizeFunctionOnNextCall(f); f();");
4618 Handle<JSFunction> f =
4619 v8::Utils::OpenHandle(
4620 *v8::Handle<v8::Function>::Cast(
4621 CcTest::global()->Get(v8_str("f"))));
4622 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4623 CompileRun("f = null");
4626 // Prepare optimized code that we can use.
4629 HandleScope inner_scope(isolate);
4630 CompileRun("function g() { return 2 }"
4631 "g(); %OptimizeFunctionOnNextCall(g); g();");
4633 Handle<JSFunction> g =
4634 v8::Utils::OpenHandle(
4635 *v8::Handle<v8::Function>::Cast(
4636 CcTest::global()->Get(v8_str("g"))));
4637 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4638 if (!code->is_optimized_code()) return;
4641 Handle<FixedArray> lit = isolate->factory()->empty_fixed_array();
4642 Handle<Context> context(isolate->context());
4644 // Add the new code several times to the optimized code map and also set an
4645 // allocation timeout so that expanding the code map will trigger a GC.
4646 heap->set_allocation_timeout(5);
4647 FLAG_gc_interval = 1000;
4648 for (int i = 0; i < 10; ++i) {
4649 BailoutId id = BailoutId(i);
4650 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4656 TEST(Regress514122) {
4657 i::FLAG_flush_optimized_code_cache = false;
4658 i::FLAG_allow_natives_syntax = true;
4659 CcTest::InitializeVM();
4660 Isolate* isolate = CcTest::i_isolate();
4661 Heap* heap = isolate->heap();
4662 HandleScope scope(isolate);
4664 // Perfrom one initial GC to enable code flushing.
4665 CcTest::heap()->CollectAllGarbage();
4667 // Prepare function whose optimized code map we can use.
4668 Handle<SharedFunctionInfo> shared;
4670 HandleScope inner_scope(isolate);
4671 CompileRun("function f() { return 1 }"
4672 "f(); %OptimizeFunctionOnNextCall(f); f();");
4674 Handle<JSFunction> f =
4675 v8::Utils::OpenHandle(
4676 *v8::Handle<v8::Function>::Cast(
4677 CcTest::global()->Get(v8_str("f"))));
4678 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4679 CompileRun("f = null");
4682 // Prepare optimized code that we can use.
4685 HandleScope inner_scope(isolate);
4686 CompileRun("function g() { return 2 }"
4687 "g(); %OptimizeFunctionOnNextCall(g); g();");
4689 Handle<JSFunction> g =
4690 v8::Utils::OpenHandle(
4691 *v8::Handle<v8::Function>::Cast(
4692 CcTest::global()->Get(v8_str("g"))));
4693 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4694 if (!code->is_optimized_code()) return;
4697 Handle<FixedArray> lit = isolate->factory()->empty_fixed_array();
4698 Handle<Context> context(isolate->context());
4700 // Add the code several times to the optimized code map.
4701 for (int i = 0; i < 3; ++i) {
4702 HandleScope inner_scope(isolate);
4703 BailoutId id = BailoutId(i);
4704 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4706 shared->optimized_code_map()->Print();
4708 // Add the code with a literals array to be evacuated.
4711 HandleScope inner_scope(isolate);
4712 AlwaysAllocateScope always_allocate(isolate);
4713 // Make sure literal is placed on an old-space evacuation candidate.
4714 SimulateFullSpace(heap->old_space());
4715 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(23, TENURED);
4716 evac_page = Page::FromAddress(lit->address());
4717 BailoutId id = BailoutId(100);
4718 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4721 // Heap is ready, force {lit_page} to become an evacuation candidate and
4722 // simulate incremental marking to enqueue optimized code map.
4723 FLAG_manual_evacuation_candidates_selection = true;
4724 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4725 SimulateIncrementalMarking(heap);
4727 // No matter whether reachable or not, {boomer} is doomed.
4728 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4730 // Add the code several times to the optimized code map. This will leave old
4731 // copies of the optimized code map unreachable but still marked.
4732 for (int i = 3; i < 6; ++i) {
4733 HandleScope inner_scope(isolate);
4734 BailoutId id = BailoutId(i);
4735 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4738 // Trigger a GC to flush out the bug.
4739 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4744 TEST(LargeObjectSlotRecording) {
4745 FLAG_manual_evacuation_candidates_selection = true;
4746 CcTest::InitializeVM();
4747 Isolate* isolate = CcTest::i_isolate();
4748 Heap* heap = isolate->heap();
4749 HandleScope scope(isolate);
4751 // Create an object on an evacuation candidate.
4752 SimulateFullSpace(heap->old_space());
4753 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4754 Page* evac_page = Page::FromAddress(lit->address());
4755 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4756 FixedArray* old_location = *lit;
4758 // Allocate a large object.
4759 const int kSize = 1000000;
4760 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(kSize, TENURED);
4761 CHECK(heap->lo_space()->Contains(*lo));
4763 // Start incremental marking to active write barrier.
4764 SimulateIncrementalMarking(heap, false);
4765 heap->AdvanceIncrementalMarking(10000000, 10000000,
4766 IncrementalMarking::IdleStepActions());
4768 // Create references from the large object to the object on the evacuation
4770 const int kStep = kSize / 10;
4771 for (int i = 0; i < kSize; i += kStep) {
4773 CHECK(lo->get(i) == old_location);
4776 // Move the evaucation candidate object.
4777 CcTest::heap()->CollectAllGarbage();
4779 // Verify that the pointers in the large object got updated.
4780 for (int i = 0; i < kSize; i += kStep) {
4781 CHECK_EQ(lo->get(i), *lit);
4782 CHECK(lo->get(i) != old_location);
4787 class DummyVisitor : public ObjectVisitor {
4789 void VisitPointers(Object** start, Object** end) { }
4793 TEST(DeferredHandles) {
4794 CcTest::InitializeVM();
4795 Isolate* isolate = CcTest::i_isolate();
4796 Heap* heap = isolate->heap();
4797 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4798 HandleScopeData* data = isolate->handle_scope_data();
4799 Handle<Object> init(heap->empty_string(), isolate);
4800 while (data->next < data->limit) {
4801 Handle<Object> obj(heap->empty_string(), isolate);
4803 // An entire block of handles has been filled.
4804 // Next handle would require a new block.
4805 DCHECK(data->next == data->limit);
4807 DeferredHandleScope deferred(isolate);
4808 DummyVisitor visitor;
4809 isolate->handle_scope_implementer()->Iterate(&visitor);
4810 delete deferred.Detach();
4814 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4815 CcTest::InitializeVM();
4816 v8::HandleScope scope(CcTest::isolate());
4817 CompileRun("function f(n) {"
4818 " var a = new Array(n);"
4819 " for (var i = 0; i < n; i += 100) a[i] = i;"
4821 "f(10 * 1024 * 1024);");
4822 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4823 if (marking->IsStopped()) {
4824 CcTest::heap()->StartIncrementalMarking();
4826 // This big step should be sufficient to mark the whole array.
4827 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4828 DCHECK(marking->IsComplete() ||
4829 marking->IsReadyToOverApproximateWeakClosure());
4833 TEST(DisableInlineAllocation) {
4834 i::FLAG_allow_natives_syntax = true;
4835 CcTest::InitializeVM();
4836 v8::HandleScope scope(CcTest::isolate());
4837 CompileRun("function test() {"
4839 " for (var i = 0; i < 10; i++) {"
4840 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4844 " %OptimizeFunctionOnNextCall(test);"
4846 " %DeoptimizeFunction(test);"
4849 // Warm-up with inline allocation enabled.
4850 CompileRun("test(); test(); run();");
4852 // Run test with inline allocation disabled.
4853 CcTest::heap()->DisableInlineAllocation();
4854 CompileRun("run()");
4856 // Run test with inline allocation re-enabled.
4857 CcTest::heap()->EnableInlineAllocation();
4858 CompileRun("run()");
4862 static int AllocationSitesCount(Heap* heap) {
4864 for (Object* site = heap->allocation_sites_list();
4865 !(site->IsUndefined());
4866 site = AllocationSite::cast(site)->weak_next()) {
4873 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4874 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4875 i::FLAG_allow_natives_syntax = true;
4876 CcTest::InitializeVM();
4877 Isolate* isolate = CcTest::i_isolate();
4878 v8::internal::Heap* heap = CcTest::heap();
4879 GlobalHandles* global_handles = isolate->global_handles();
4881 if (!isolate->use_crankshaft()) return;
4883 // The allocation site at the head of the list is ours.
4884 Handle<AllocationSite> site;
4886 LocalContext context;
4887 v8::HandleScope scope(context->GetIsolate());
4889 int count = AllocationSitesCount(heap);
4890 CompileRun("var bar = function() { return (new Array()); };"
4895 // One allocation site should have been created.
4896 int new_count = AllocationSitesCount(heap);
4897 CHECK_EQ(new_count, (count + 1));
4898 site = Handle<AllocationSite>::cast(
4899 global_handles->Create(
4900 AllocationSite::cast(heap->allocation_sites_list())));
4902 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4904 DependentCode::GroupStartIndexes starts(site->dependent_code());
4905 CHECK_GE(starts.number_of_entries(), 1);
4906 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4907 CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
4908 Code* function_bar = Code::cast(
4909 WeakCell::cast(site->dependent_code()->object_at(index))->value());
4910 Handle<JSFunction> bar_handle =
4911 v8::Utils::OpenHandle(
4912 *v8::Handle<v8::Function>::Cast(
4913 CcTest::global()->Get(v8_str("bar"))));
4914 CHECK_EQ(bar_handle->code(), function_bar);
4917 // Now make sure that a gc should get rid of the function, even though we
4918 // still have the allocation site alive.
4919 for (int i = 0; i < 4; i++) {
4920 heap->CollectAllGarbage();
4923 // The site still exists because of our global handle, but the code is no
4924 // longer referred to by dependent_code().
4925 DependentCode::GroupStartIndexes starts(site->dependent_code());
4926 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4927 CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
4928 WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
4932 TEST(CellsInOptimizedCodeAreWeak) {
4933 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4934 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4935 i::FLAG_allow_natives_syntax = true;
4936 CcTest::InitializeVM();
4937 Isolate* isolate = CcTest::i_isolate();
4938 v8::internal::Heap* heap = CcTest::heap();
4940 if (!isolate->use_crankshaft()) return;
4941 HandleScope outer_scope(heap->isolate());
4944 LocalContext context;
4945 HandleScope scope(heap->isolate());
4947 CompileRun("bar = (function() {"
4951 " var foo = function(x) { with (x) { return 1 + x; } };"
4955 " %OptimizeFunctionOnNextCall(bar);"
4957 " return bar;})();");
4959 Handle<JSFunction> bar =
4960 v8::Utils::OpenHandle(
4961 *v8::Handle<v8::Function>::Cast(
4962 CcTest::global()->Get(v8_str("bar"))));
4963 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4966 // Now make sure that a gc should get rid of the function
4967 for (int i = 0; i < 4; i++) {
4968 heap->CollectAllGarbage();
4971 DCHECK(code->marked_for_deoptimization());
4975 TEST(ObjectsInOptimizedCodeAreWeak) {
4976 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4977 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4978 i::FLAG_allow_natives_syntax = true;
4979 CcTest::InitializeVM();
4980 Isolate* isolate = CcTest::i_isolate();
4981 v8::internal::Heap* heap = CcTest::heap();
4983 if (!isolate->use_crankshaft()) return;
4984 HandleScope outer_scope(heap->isolate());
4987 LocalContext context;
4988 HandleScope scope(heap->isolate());
4990 CompileRun("function bar() {"
4993 "function foo(x) { with (x) { return 1 + x; } };"
4997 "%OptimizeFunctionOnNextCall(bar);"
5000 Handle<JSFunction> bar =
5001 v8::Utils::OpenHandle(
5002 *v8::Handle<v8::Function>::Cast(
5003 CcTest::global()->Get(v8_str("bar"))));
5004 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
5007 // Now make sure that a gc should get rid of the function
5008 for (int i = 0; i < 4; i++) {
5009 heap->CollectAllGarbage();
5012 DCHECK(code->marked_for_deoptimization());
5016 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
5017 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
5018 if (!i::FLAG_incremental_marking) return;
5019 i::FLAG_weak_embedded_objects_in_optimized_code = true;
5020 i::FLAG_allow_natives_syntax = true;
5021 i::FLAG_compilation_cache = false;
5022 i::FLAG_retain_maps_for_n_gc = 0;
5023 CcTest::InitializeVM();
5024 Isolate* isolate = CcTest::i_isolate();
5026 // Do not run for no-snap builds.
5027 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
5029 v8::internal::Heap* heap = CcTest::heap();
5031 // Get a clean slate regarding optimized functions on the heap.
5032 i::Deoptimizer::DeoptimizeAll(isolate);
5033 heap->CollectAllGarbage();
5035 if (!isolate->use_crankshaft()) return;
5036 HandleScope outer_scope(heap->isolate());
5037 for (int i = 0; i < 3; i++) {
5038 SimulateIncrementalMarking(heap);
5040 LocalContext context;
5041 HandleScope scope(heap->isolate());
5042 EmbeddedVector<char, 256> source;
5044 "function bar%d() {"
5047 "function foo%d(x) { with (x) { return 1 + x; } };"
5051 "%%OptimizeFunctionOnNextCall(bar%d);"
5053 i, i, i, i, i, i, i, i);
5054 CompileRun(source.start());
5056 heap->CollectAllGarbage();
5059 if (heap->weak_object_to_code_table()->IsHashTable()) {
5060 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
5061 elements = t->NumberOfElements();
5063 CHECK_EQ(0, elements);
5067 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
5068 EmbeddedVector<char, 256> source;
5070 "function %s() { return 0; }"
5072 "%%OptimizeFunctionOnNextCall(%s);"
5073 "%s();", name, name, name, name, name);
5074 CompileRun(source.start());
5075 Handle<JSFunction> fun =
5076 v8::Utils::OpenHandle(
5077 *v8::Handle<v8::Function>::Cast(
5078 CcTest::global()->Get(v8_str(name))));
5083 static int GetCodeChainLength(Code* code) {
5085 while (code->next_code_link()->IsCode()) {
5087 code = Code::cast(code->next_code_link());
5093 TEST(NextCodeLinkIsWeak) {
5094 i::FLAG_always_opt = false;
5095 i::FLAG_allow_natives_syntax = true;
5096 CcTest::InitializeVM();
5097 Isolate* isolate = CcTest::i_isolate();
5098 v8::internal::Heap* heap = CcTest::heap();
5100 if (!isolate->use_crankshaft()) return;
5101 HandleScope outer_scope(heap->isolate());
5103 heap->CollectAllAvailableGarbage();
5104 int code_chain_length_before, code_chain_length_after;
5106 HandleScope scope(heap->isolate());
5107 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
5108 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
5109 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
5110 code_chain_length_before = GetCodeChainLength(immortal->code());
5111 // Keep the immortal code and let the mortal code die.
5112 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
5113 CompileRun("mortal = null; immortal = null;");
5115 heap->CollectAllAvailableGarbage();
5116 // Now mortal code should be dead.
5117 code_chain_length_after = GetCodeChainLength(*code);
5118 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
5122 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
5123 i::byte buffer[i::Assembler::kMinimalBufferSize];
5124 MacroAssembler masm(isolate, buffer, sizeof(buffer));
5126 masm.Push(isolate->factory()->undefined_value());
5128 masm.GetCode(&desc);
5129 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
5130 Handle<Code> code = isolate->factory()->NewCode(
5131 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
5132 CHECK(code->IsCode());
5137 TEST(NextCodeLinkIsWeak2) {
5138 i::FLAG_allow_natives_syntax = true;
5139 CcTest::InitializeVM();
5140 Isolate* isolate = CcTest::i_isolate();
5141 v8::internal::Heap* heap = CcTest::heap();
5143 if (!isolate->use_crankshaft()) return;
5144 HandleScope outer_scope(heap->isolate());
5145 heap->CollectAllAvailableGarbage();
5146 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
5147 Handle<Code> new_head;
5148 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5150 HandleScope scope(heap->isolate());
5151 Handle<Code> immortal = DummyOptimizedCode(isolate);
5152 Handle<Code> mortal = DummyOptimizedCode(isolate);
5153 mortal->set_next_code_link(*old_head);
5154 immortal->set_next_code_link(*mortal);
5155 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5156 new_head = scope.CloseAndEscape(immortal);
5158 heap->CollectAllAvailableGarbage();
5159 // Now mortal code should be dead.
5160 CHECK_EQ(*old_head, new_head->next_code_link());
5164 static bool weak_ic_cleared = false;
5166 static void ClearWeakIC(
5167 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5168 printf("clear weak is called\n");
5169 weak_ic_cleared = true;
5170 data.GetParameter()->Reset();
5174 TEST(WeakFunctionInConstructor) {
5175 if (i::FLAG_always_opt) return;
5176 i::FLAG_stress_compaction = false;
5177 CcTest::InitializeVM();
5178 v8::Isolate* isolate = CcTest::isolate();
5179 v8::HandleScope scope(isolate);
5181 "function createObj(obj) {"
5182 " return new obj();"
5184 Handle<JSFunction> createObj =
5185 v8::Utils::OpenHandle(*v8::Handle<v8::Function>::Cast(
5186 CcTest::global()->Get(v8_str("createObj"))));
5188 v8::Persistent<v8::Object> garbage;
5190 v8::HandleScope scope(isolate);
5191 const char* source =
5193 " function hat() { this.x = 5; }"
5198 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
5200 weak_ic_cleared = false;
5201 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5202 Heap* heap = CcTest::i_isolate()->heap();
5203 heap->CollectAllGarbage();
5204 CHECK(weak_ic_cleared);
5206 // We've determined the constructor in createObj has had it's weak cell
5207 // cleared. Now, verify that one additional call with a new function
5208 // allows monomorphicity.
5209 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5210 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5211 for (int i = 0; i < 20; i++) {
5212 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5213 CHECK(slot_value->IsWeakCell());
5214 if (WeakCell::cast(slot_value)->cleared()) break;
5215 heap->CollectAllGarbage();
5218 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5219 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5221 "function coat() { this.x = 6; }"
5222 "createObj(coat);");
5223 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5224 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5228 // Checks that the value returned by execution of the source is weak.
5229 void CheckWeakness(const char* source) {
5230 i::FLAG_stress_compaction = false;
5231 CcTest::InitializeVM();
5232 v8::Isolate* isolate = CcTest::isolate();
5233 v8::HandleScope scope(isolate);
5234 v8::Persistent<v8::Object> garbage;
5236 v8::HandleScope scope(isolate);
5237 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
5239 weak_ic_cleared = false;
5240 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5241 Heap* heap = CcTest::i_isolate()->heap();
5242 heap->CollectAllGarbage();
5243 CHECK(weak_ic_cleared);
5247 // Each of the following "weak IC" tests creates an IC that embeds a map with
5248 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5249 TEST(WeakMapInMonomorphicLoadIC) {
5250 CheckWeakness("function loadIC(obj) {"
5254 " var proto = {'name' : 'weak'};"
5255 " var obj = Object.create(proto);"
5264 TEST(WeakMapInPolymorphicLoadIC) {
5266 "function loadIC(obj) {"
5270 " var proto = {'name' : 'weak'};"
5271 " var obj = Object.create(proto);"
5275 " var poly = Object.create(proto);"
5283 TEST(WeakMapInMonomorphicKeyedLoadIC) {
5284 CheckWeakness("function keyedLoadIC(obj, field) {"
5285 " return obj[field];"
5288 " var proto = {'name' : 'weak'};"
5289 " var obj = Object.create(proto);"
5290 " keyedLoadIC(obj, 'name');"
5291 " keyedLoadIC(obj, 'name');"
5292 " keyedLoadIC(obj, 'name');"
5298 TEST(WeakMapInPolymorphicKeyedLoadIC) {
5300 "function keyedLoadIC(obj, field) {"
5301 " return obj[field];"
5304 " var proto = {'name' : 'weak'};"
5305 " var obj = Object.create(proto);"
5306 " keyedLoadIC(obj, 'name');"
5307 " keyedLoadIC(obj, 'name');"
5308 " keyedLoadIC(obj, 'name');"
5309 " var poly = Object.create(proto);"
5311 " keyedLoadIC(poly, 'name');"
5317 TEST(WeakMapInMonomorphicStoreIC) {
5318 CheckWeakness("function storeIC(obj, value) {"
5319 " obj.name = value;"
5322 " var proto = {'name' : 'weak'};"
5323 " var obj = Object.create(proto);"
5324 " storeIC(obj, 'x');"
5325 " storeIC(obj, 'x');"
5326 " storeIC(obj, 'x');"
5332 TEST(WeakMapInPolymorphicStoreIC) {
5334 "function storeIC(obj, value) {"
5335 " obj.name = value;"
5338 " var proto = {'name' : 'weak'};"
5339 " var obj = Object.create(proto);"
5340 " storeIC(obj, 'x');"
5341 " storeIC(obj, 'x');"
5342 " storeIC(obj, 'x');"
5343 " var poly = Object.create(proto);"
5345 " storeIC(poly, 'x');"
5351 TEST(WeakMapInMonomorphicKeyedStoreIC) {
5352 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5353 " obj[field] = value;"
5356 " var proto = {'name' : 'weak'};"
5357 " var obj = Object.create(proto);"
5358 " keyedStoreIC(obj, 'x');"
5359 " keyedStoreIC(obj, 'x');"
5360 " keyedStoreIC(obj, 'x');"
5366 TEST(WeakMapInPolymorphicKeyedStoreIC) {
5368 "function keyedStoreIC(obj, field, value) {"
5369 " obj[field] = value;"
5372 " var proto = {'name' : 'weak'};"
5373 " var obj = Object.create(proto);"
5374 " keyedStoreIC(obj, 'x');"
5375 " keyedStoreIC(obj, 'x');"
5376 " keyedStoreIC(obj, 'x');"
5377 " var poly = Object.create(proto);"
5379 " keyedStoreIC(poly, 'x');"
5385 TEST(WeakMapInMonomorphicCompareNilIC) {
5386 CheckWeakness("function compareNilIC(obj) {"
5387 " return obj == null;"
5390 " var proto = {'name' : 'weak'};"
5391 " var obj = Object.create(proto);"
5392 " compareNilIC(obj);"
5393 " compareNilIC(obj);"
5394 " compareNilIC(obj);"
5400 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5401 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5402 Handle<Object> obj =
5403 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5404 return Handle<JSFunction>::cast(obj);
5408 void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5409 int ic_slot, InlineCacheState state) {
5410 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5411 kind == Code::CALL_IC) {
5412 TypeFeedbackVector* vector = shared->feedback_vector();
5413 FeedbackVectorICSlot slot(ic_slot);
5414 if (kind == Code::LOAD_IC) {
5415 LoadICNexus nexus(vector, slot);
5416 CHECK_EQ(nexus.StateFromFeedback(), state);
5417 } else if (kind == Code::KEYED_LOAD_IC) {
5418 KeyedLoadICNexus nexus(vector, slot);
5419 CHECK_EQ(nexus.StateFromFeedback(), state);
5420 } else if (kind == Code::CALL_IC) {
5421 CallICNexus nexus(vector, slot);
5422 CHECK_EQ(nexus.StateFromFeedback(), state);
5425 Code* ic = FindFirstIC(code, kind);
5426 CHECK(ic->is_inline_cache_stub());
5427 CHECK(ic->ic_state() == state);
5432 TEST(MonomorphicStaysMonomorphicAfterGC) {
5433 if (FLAG_always_opt) return;
5434 CcTest::InitializeVM();
5435 Isolate* isolate = CcTest::i_isolate();
5436 Heap* heap = isolate->heap();
5437 v8::HandleScope scope(CcTest::isolate());
5439 "function loadIC(obj) {"
5442 "function testIC() {"
5443 " var proto = {'name' : 'weak'};"
5444 " var obj = Object.create(proto);"
5450 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5452 v8::HandleScope scope(CcTest::isolate());
5453 CompileRun("(testIC())");
5455 heap->CollectAllGarbage();
5456 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5458 v8::HandleScope scope(CcTest::isolate());
5459 CompileRun("(testIC())");
5461 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5465 TEST(PolymorphicStaysPolymorphicAfterGC) {
5466 if (FLAG_always_opt) return;
5467 CcTest::InitializeVM();
5468 Isolate* isolate = CcTest::i_isolate();
5469 Heap* heap = isolate->heap();
5470 v8::HandleScope scope(CcTest::isolate());
5472 "function loadIC(obj) {"
5475 "function testIC() {"
5476 " var proto = {'name' : 'weak'};"
5477 " var obj = Object.create(proto);"
5481 " var poly = Object.create(proto);"
5486 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5488 v8::HandleScope scope(CcTest::isolate());
5489 CompileRun("(testIC())");
5491 heap->CollectAllGarbage();
5492 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5494 v8::HandleScope scope(CcTest::isolate());
5495 CompileRun("(testIC())");
5497 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5502 CcTest::InitializeVM();
5503 Isolate* isolate = CcTest::i_isolate();
5504 v8::internal::Heap* heap = CcTest::heap();
5505 v8::internal::Factory* factory = isolate->factory();
5507 HandleScope outer_scope(isolate);
5508 Handle<WeakCell> weak_cell1;
5510 HandleScope inner_scope(isolate);
5511 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5512 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5515 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5516 Handle<WeakCell> weak_cell2;
5518 HandleScope inner_scope(isolate);
5519 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5521 CHECK(weak_cell1->value()->IsFixedArray());
5522 CHECK_EQ(*survivor, weak_cell2->value());
5523 heap->CollectGarbage(NEW_SPACE);
5524 CHECK(weak_cell1->value()->IsFixedArray());
5525 CHECK_EQ(*survivor, weak_cell2->value());
5526 heap->CollectGarbage(NEW_SPACE);
5527 CHECK(weak_cell1->value()->IsFixedArray());
5528 CHECK_EQ(*survivor, weak_cell2->value());
5529 heap->CollectAllAvailableGarbage();
5530 CHECK(weak_cell1->cleared());
5531 CHECK_EQ(*survivor, weak_cell2->value());
5535 TEST(WeakCellsWithIncrementalMarking) {
5536 CcTest::InitializeVM();
5537 Isolate* isolate = CcTest::i_isolate();
5538 v8::internal::Heap* heap = CcTest::heap();
5539 v8::internal::Factory* factory = isolate->factory();
5542 HandleScope outer_scope(isolate);
5543 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5544 Handle<WeakCell> weak_cells[N];
5546 for (int i = 0; i < N; i++) {
5547 HandleScope inner_scope(isolate);
5548 Handle<HeapObject> value =
5549 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5550 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5551 CHECK(weak_cell->value()->IsFixedArray());
5552 IncrementalMarking* marking = heap->incremental_marking();
5553 if (marking->IsStopped()) {
5554 heap->StartIncrementalMarking();
5556 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5557 heap->CollectGarbage(NEW_SPACE);
5558 CHECK(weak_cell->value()->IsFixedArray());
5559 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5561 heap->CollectAllGarbage();
5562 CHECK_EQ(*survivor, weak_cells[0]->value());
5563 for (int i = 1; i < N; i++) {
5564 CHECK(weak_cells[i]->cleared());
5570 TEST(AddInstructionChangesNewSpacePromotion) {
5571 i::FLAG_allow_natives_syntax = true;
5572 i::FLAG_expose_gc = true;
5573 i::FLAG_stress_compaction = true;
5574 i::FLAG_gc_interval = 1000;
5575 CcTest::InitializeVM();
5576 if (!i::FLAG_allocation_site_pretenuring) return;
5577 v8::HandleScope scope(CcTest::isolate());
5578 Isolate* isolate = CcTest::i_isolate();
5579 Heap* heap = isolate->heap();
5582 "function add(a, b) {"
5586 "add(\"a\", \"b\");"
5587 "var oldSpaceObject;"
5589 "function crash(x) {"
5590 " var object = {a: null, b: null};"
5591 " var result = add(1.5, x | 0);"
5592 " object.a = result;"
5593 " oldSpaceObject = object;"
5598 "%OptimizeFunctionOnNextCall(crash);"
5601 v8::Handle<v8::Object> global = CcTest::global();
5602 v8::Handle<v8::Function> g =
5603 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
5604 v8::Handle<v8::Value> args1[] = { v8_num(1) };
5605 heap->DisableInlineAllocation();
5606 heap->set_allocation_timeout(1);
5607 g->Call(global, 1, args1);
5608 heap->CollectAllGarbage();
5612 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5613 // Exit with 0 if the location matches our expectation.
5614 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5618 TEST(CEntryStubOOM) {
5619 i::FLAG_allow_natives_syntax = true;
5620 CcTest::InitializeVM();
5621 v8::HandleScope scope(CcTest::isolate());
5622 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
5624 v8::Handle<v8::Value> result = CompileRun(
5625 "%SetFlags('--gc-interval=1');"
5630 CHECK(result->IsNumber());
5636 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5639 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5640 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5644 TEST(Regress357137) {
5645 CcTest::InitializeVM();
5646 v8::Isolate* isolate = CcTest::isolate();
5647 v8::HandleScope hscope(isolate);
5648 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5649 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
5650 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5651 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5652 DCHECK(!context.IsEmpty());
5653 v8::Context::Scope cscope(context);
5655 v8::Local<v8::Value> result = CompileRun(
5657 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5658 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5659 "interrupt();" // This triggers a fake stack overflow in f.
5661 CHECK_EQ(42.0, result->ToNumber(isolate)->Value());
5665 TEST(Regress507979) {
5666 const int kFixedArrayLen = 10;
5667 CcTest::InitializeVM();
5668 Isolate* isolate = CcTest::i_isolate();
5669 Heap* heap = isolate->heap();
5670 HandleScope handle_scope(isolate);
5672 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5673 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5674 CHECK(heap->InNewSpace(o1->address()));
5675 CHECK(heap->InNewSpace(o2->address()));
5677 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5679 // Replace parts of an object placed before a live object with a filler. This
5680 // way the filler object shares the mark bits with the following live object.
5681 o1->Shrink(kFixedArrayLen - 1);
5683 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5684 // Let's not optimize the loop away.
5685 CHECK(obj->address() != nullptr);
5690 TEST(ArrayShiftSweeping) {
5691 i::FLAG_expose_gc = true;
5692 CcTest::InitializeVM();
5693 v8::HandleScope scope(CcTest::isolate());
5694 Isolate* isolate = CcTest::i_isolate();
5695 Heap* heap = isolate->heap();
5697 v8::Local<v8::Value> result = CompileRun(
5698 "var array = new Array(40000);"
5699 "var tmp = new Array(100000);"
5706 Handle<JSObject> o =
5707 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5708 CHECK(heap->InOldSpace(o->elements()));
5709 CHECK(heap->InOldSpace(*o));
5710 Page* page = Page::FromAddress(o->elements()->address());
5711 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
5712 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
5716 UNINITIALIZED_TEST(PromotionQueue) {
5717 i::FLAG_expose_gc = true;
5718 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5719 v8::Isolate::CreateParams create_params;
5720 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5721 v8::Isolate* isolate = v8::Isolate::New(create_params);
5722 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5724 v8::Isolate::Scope isolate_scope(isolate);
5725 v8::HandleScope handle_scope(isolate);
5726 v8::Context::New(isolate)->Enter();
5727 Heap* heap = i_isolate->heap();
5728 NewSpace* new_space = heap->new_space();
5730 // In this test we will try to overwrite the promotion queue which is at the
5731 // end of to-space. To actually make that possible, we need at least two
5732 // semi-space pages and take advantage of fragmentation.
5733 // (1) Grow semi-space to two pages.
5734 // (2) Create a few small long living objects and call the scavenger to
5735 // move them to the other semi-space.
5736 // (3) Create a huge object, i.e., remainder of first semi-space page and
5737 // create another huge object which should be of maximum allocatable memory
5738 // size of the second semi-space page.
5739 // (4) Call the scavenger again.
5740 // What will happen is: the scavenger will promote the objects created in
5741 // (2) and will create promotion queue entries at the end of the second
5742 // semi-space page during the next scavenge when it promotes the objects to
5743 // the old generation. The first allocation of (3) will fill up the first
5744 // semi-space page. The second allocation in (3) will not fit into the
5745 // first semi-space page, but it will overwrite the promotion queue which
5746 // are in the second semi-space page. If the right guards are in place, the
5747 // promotion queue will be evacuated in that case.
5749 // Grow the semi-space to two pages to make semi-space copy overwrite the
5750 // promotion queue, which will be at the end of the second page.
5751 intptr_t old_capacity = new_space->TotalCapacity();
5753 // If we are in a low memory config, we can't grow to two pages and we can't
5754 // run this test. This also means the issue we are testing cannot arise, as
5755 // there is no fragmentation.
5756 if (new_space->IsAtMaximumCapacity()) return;
5759 CHECK(new_space->IsAtMaximumCapacity());
5760 CHECK(2 * old_capacity == new_space->TotalCapacity());
5762 // Call the scavenger two times to get an empty new space
5763 heap->CollectGarbage(NEW_SPACE);
5764 heap->CollectGarbage(NEW_SPACE);
5766 // First create a few objects which will survive a scavenge, and will get
5767 // promoted to the old generation later on. These objects will create
5768 // promotion queue entries at the end of the second semi-space page.
5769 const int number_handles = 12;
5770 Handle<FixedArray> handles[number_handles];
5771 for (int i = 0; i < number_handles; i++) {
5772 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5774 heap->CollectGarbage(NEW_SPACE);
5776 // Create the first huge object which will exactly fit the first semi-space
5778 int new_linear_size =
5779 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5780 *heap->new_space()->allocation_top_address());
5781 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
5782 Handle<FixedArray> first =
5783 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5784 CHECK(heap->InNewSpace(*first));
5786 // Create the second huge object of maximum allocatable second semi-space
5789 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5790 *heap->new_space()->allocation_top_address());
5791 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
5792 FixedArray::kHeaderSize;
5793 Handle<FixedArray> second =
5794 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5795 CHECK(heap->InNewSpace(*second));
5797 // This scavenge will corrupt memory if the promotion queue is not
5799 heap->CollectGarbage(NEW_SPACE);
5805 TEST(Regress388880) {
5806 i::FLAG_expose_gc = true;
5807 CcTest::InitializeVM();
5808 v8::HandleScope scope(CcTest::isolate());
5809 Isolate* isolate = CcTest::i_isolate();
5810 Factory* factory = isolate->factory();
5811 Heap* heap = isolate->heap();
5813 Handle<Map> map1 = Map::Create(isolate, 1);
5815 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
5816 HeapType::Any(isolate), NONE, Representation::Tagged(),
5817 OMIT_TRANSITION).ToHandleChecked();
5819 int desired_offset = Page::kPageSize - map1->instance_size();
5821 // Allocate fixed array in old pointer space so, that object allocated
5822 // afterwards would end at the end of the page.
5824 SimulateFullSpace(heap->old_space());
5825 int padding_size = desired_offset - Page::kObjectStartOffset;
5826 int padding_array_length =
5827 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
5829 Handle<FixedArray> temp2 =
5830 factory->NewFixedArray(padding_array_length, TENURED);
5831 Page* page = Page::FromAddress(temp2->address());
5832 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
5835 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5836 o->set_properties(*factory->empty_fixed_array());
5838 // Ensure that the object allocated where we need it.
5839 Page* page = Page::FromAddress(o->address());
5840 CHECK_EQ(desired_offset, page->Offset(o->address()));
5842 // Now we have an object right at the end of the page.
5844 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5845 // that would cause crash.
5846 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5848 CcTest::heap()->StartIncrementalMarking();
5849 CHECK(marking->IsMarking());
5851 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5852 // when it calls heap->AdjustLiveBytes(...).
5853 JSObject::MigrateToMap(o, map2);
5858 i::FLAG_expose_gc = true;
5859 CcTest::InitializeVM();
5860 v8::HandleScope scope(CcTest::isolate());
5861 Isolate* isolate = CcTest::i_isolate();
5862 Heap* heap = isolate->heap();
5863 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5864 v8::Local<v8::Value> result = CompileRun(
5865 "var weak_map = new WeakMap();"
5866 "var future_keys = [];"
5867 "for (var i = 0; i < 50; i++) {"
5868 " var key = {'k' : i + 0.1};"
5869 " weak_map.set(key, 1);"
5870 " future_keys.push({'x' : i + 0.2});"
5873 if (marking->IsStopped()) {
5874 CcTest::heap()->StartIncrementalMarking();
5876 // Incrementally mark the backing store.
5877 Handle<JSObject> obj =
5878 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5879 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5880 while (!Marking::IsBlack(
5881 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5882 !marking->IsStopped()) {
5883 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5885 // Stash the backing store in a handle.
5886 Handle<Object> save(weak_map->table(), isolate);
5887 // The following line will update the backing store.
5889 "for (var i = 0; i < 50; i++) {"
5890 " weak_map.set(future_keys[i], i);"
5892 heap->incremental_marking()->set_should_hurry(true);
5893 heap->CollectGarbage(OLD_SPACE);
5897 TEST(Regress442710) {
5898 CcTest::InitializeVM();
5899 Isolate* isolate = CcTest::i_isolate();
5900 Heap* heap = isolate->heap();
5901 Factory* factory = isolate->factory();
5903 HandleScope sc(isolate);
5904 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
5905 Handle<JSArray> array = factory->NewJSArray(2);
5907 Handle<String> name = factory->InternalizeUtf8String("testArray");
5908 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5909 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5910 heap->CollectGarbage(OLD_SPACE);
5914 HEAP_TEST(NumberStringCacheSize) {
5915 // Test that the number-string cache has not been resized in the snapshot.
5916 CcTest::InitializeVM();
5917 Isolate* isolate = CcTest::i_isolate();
5918 if (!isolate->snapshot_available()) return;
5919 Heap* heap = isolate->heap();
5920 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5921 heap->number_string_cache()->length());
5926 CcTest::InitializeVM();
5927 Isolate* isolate = CcTest::i_isolate();
5928 Heap* heap = isolate->heap();
5929 Factory* factory = isolate->factory();
5930 HandleScope scope(isolate);
5931 CompileRun("function cls() { this.x = 10; }");
5932 Handle<WeakCell> weak_prototype;
5934 HandleScope inner_scope(isolate);
5935 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5936 Handle<JSObject> proto =
5937 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5938 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5940 CHECK(!weak_prototype->cleared());
5944 "cls.prototype = null;");
5945 for (int i = 0; i < 4; i++) {
5946 heap->CollectAllGarbage();
5948 // The map of a.x keeps prototype alive
5949 CHECK(!weak_prototype->cleared());
5950 // Change the map of a.x and make the previous map garbage collectable.
5951 CompileRun("a.x.__proto__ = {};");
5952 for (int i = 0; i < 4; i++) {
5953 heap->CollectAllGarbage();
5955 CHECK(weak_prototype->cleared());
5959 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5960 HandleScope inner_scope(isolate);
5961 Handle<Map> map = Map::Create(isolate, 1);
5962 v8::Local<v8::Value> result =
5963 CompileRun("(function () { return {x : 10}; })();");
5964 Handle<JSObject> proto =
5965 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5966 Map::SetPrototype(map, proto);
5967 heap->AddRetainedMap(map);
5968 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5972 void CheckMapRetainingFor(int n) {
5973 FLAG_retain_maps_for_n_gc = n;
5974 Isolate* isolate = CcTest::i_isolate();
5975 Heap* heap = isolate->heap();
5976 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5977 CHECK(!weak_cell->cleared());
5978 for (int i = 0; i < n; i++) {
5979 heap->CollectGarbage(OLD_SPACE);
5981 CHECK(!weak_cell->cleared());
5982 heap->CollectGarbage(OLD_SPACE);
5983 CHECK(weak_cell->cleared());
5987 TEST(MapRetaining) {
5988 CcTest::InitializeVM();
5989 v8::HandleScope scope(CcTest::isolate());
5990 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5991 CheckMapRetainingFor(0);
5992 CheckMapRetainingFor(1);
5993 CheckMapRetainingFor(7);
5997 TEST(RegressArrayListGC) {
5998 FLAG_retain_maps_for_n_gc = 1;
5999 FLAG_incremental_marking = 0;
6000 FLAG_gc_global = true;
6001 CcTest::InitializeVM();
6002 v8::HandleScope scope(CcTest::isolate());
6003 Isolate* isolate = CcTest::i_isolate();
6004 Heap* heap = isolate->heap();
6005 AddRetainedMap(isolate, heap);
6006 Handle<Map> map = Map::Create(isolate, 1);
6007 heap->CollectGarbage(OLD_SPACE);
6008 // Force GC in old space on next addition of retained map.
6009 Map::WeakCellForMap(map);
6010 SimulateFullSpace(CcTest::heap()->new_space());
6011 for (int i = 0; i < 10; i++) {
6012 heap->AddRetainedMap(map);
6014 heap->CollectGarbage(OLD_SPACE);
6020 CcTest::InitializeVM();
6021 v8::HandleScope scope(CcTest::isolate());
6023 v8::Local<v8::Value> result = CompileRun("'abc'");
6024 Handle<Object> o = v8::Utils::OpenHandle(*result);
6025 CcTest::i_isolate()->heap()->TracePathToObject(*o);
6030 TEST(WritableVsImmortalRoots) {
6031 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
6032 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
6033 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
6034 bool immortal = Heap::RootIsImmortalImmovable(root_index);
6035 // A root value can be writable, immortal, or neither, but not both.
6036 CHECK(!immortal || !writable);
6041 static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
6043 int elements_to_trim) {
6044 v8::HandleScope scope(CcTest::isolate());
6045 Isolate* isolate = CcTest::i_isolate();
6046 Factory* factory = isolate->factory();
6047 Heap* heap = isolate->heap();
6049 Handle<FixedTypedArrayBase> array =
6050 factory->NewFixedTypedArray(initial_length, type, true);
6051 int old_size = array->size();
6052 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
6055 // Check that free space filler is at the right place and did not smash the
6057 CHECK(array->IsFixedArrayBase());
6058 CHECK_EQ(initial_length - elements_to_trim, array->length());
6059 int new_size = array->size();
6060 if (new_size != old_size) {
6061 // Free space filler should be created in this case.
6062 Address next_obj_address = array->address() + array->size();
6063 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
6065 heap->CollectAllAvailableGarbage();
6069 TEST(Regress472513) {
6070 CcTest::InitializeVM();
6071 v8::HandleScope scope(CcTest::isolate());
6073 // The combination of type/initial_length/elements_to_trim triggered
6074 // typed array header smashing with free space filler (crbug/472513).
6077 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
6078 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
6079 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
6080 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
6081 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
6082 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
6085 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
6086 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
6087 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
6088 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
6089 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
6093 TEST(WeakFixedArray) {
6094 CcTest::InitializeVM();
6095 v8::HandleScope scope(CcTest::isolate());
6097 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
6098 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
6099 array->Remove(number);
6100 array->Compact<WeakFixedArray::NullCallback>();
6101 WeakFixedArray::Add(array, number);
6105 TEST(PreprocessStackTrace) {
6106 // Do not automatically trigger early GC.
6107 FLAG_gc_interval = -1;
6108 CcTest::InitializeVM();
6109 v8::HandleScope scope(CcTest::isolate());
6110 v8::TryCatch try_catch(CcTest::isolate());
6111 CompileRun("throw new Error();");
6112 CHECK(try_catch.HasCaught());
6113 Isolate* isolate = CcTest::i_isolate();
6114 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
6115 Handle<Name> key = isolate->factory()->stack_trace_symbol();
6116 Handle<Object> stack_trace =
6117 JSObject::GetProperty(exception, key).ToHandleChecked();
6118 Handle<Object> code =
6119 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6120 CHECK(code->IsCode());
6122 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
6124 Handle<Object> pos =
6125 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6126 CHECK(pos->IsSmi());
6128 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
6129 int array_length = Smi::cast(stack_trace_array->length())->value();
6130 for (int i = 0; i < array_length; i++) {
6131 Handle<Object> element =
6132 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
6133 CHECK(!element->IsCode());
6138 static bool utils_has_been_collected = false;
6140 static void UtilsHasBeenCollected(
6141 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
6142 utils_has_been_collected = true;
6143 data.GetParameter()->Reset();
6147 TEST(BootstrappingExports) {
6148 FLAG_expose_natives_as = "natives";
6149 CcTest::InitializeVM();
6150 v8::Isolate* isolate = CcTest::isolate();
6152 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
6154 utils_has_been_collected = false;
6156 v8::Persistent<v8::Object> utils;
6159 v8::HandleScope scope(isolate);
6160 v8::Handle<v8::Object> natives =
6161 CcTest::global()->Get(v8_str("natives"))->ToObject(isolate);
6162 utils.Reset(isolate, natives->Get(v8_str("utils"))->ToObject(isolate));
6163 natives->Delete(v8_str("utils"));
6166 utils.SetWeak(&utils, UtilsHasBeenCollected,
6167 v8::WeakCallbackType::kParameter);
6169 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6171 CHECK(utils_has_been_collected);
6176 FLAG_allow_natives_syntax = true;
6177 CcTest::InitializeVM();
6178 v8::Isolate* isolate = CcTest::isolate();
6179 v8::HandleScope scope(isolate);
6180 v8::Local<v8::Function> constructor =
6181 v8::Utils::ToLocal(CcTest::i_isolate()->internal_array_function());
6182 CcTest::global()->Set(v8_str("InternalArray"), constructor);
6184 v8::TryCatch try_catch(isolate);
6188 "for (var i = 0; i < 1000; i++) {"
6189 " var ai = new InternalArray(10000);"
6190 " if (%HaveSameMap(ai, a)) throw Error();"
6191 " if (!%HasFastObjectElements(ai)) throw Error();"
6193 "for (var i = 0; i < 1000; i++) {"
6194 " var ai = new InternalArray(10000);"
6195 " if (%HaveSameMap(ai, a)) throw Error();"
6196 " if (!%HasFastObjectElements(ai)) throw Error();"
6199 CHECK(!try_catch.HasCaught());
6203 void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6204 CHECK(bytes >= FixedArray::kHeaderSize);
6205 CHECK(bytes % kPointerSize == 0);
6206 Factory* factory = isolate->factory();
6207 HandleScope scope(isolate);
6208 AlwaysAllocateScope always_allocate(isolate);
6210 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6211 Handle<FixedArray> array = factory->NewFixedArray(
6212 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6213 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6214 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6218 TEST(NewSpaceAllocationCounter) {
6219 CcTest::InitializeVM();
6220 v8::HandleScope scope(CcTest::isolate());
6221 Isolate* isolate = CcTest::i_isolate();
6222 Heap* heap = isolate->heap();
6223 size_t counter1 = heap->NewSpaceAllocationCounter();
6224 heap->CollectGarbage(NEW_SPACE);
6225 const size_t kSize = 1024;
6226 AllocateInSpace(isolate, kSize, NEW_SPACE);
6227 size_t counter2 = heap->NewSpaceAllocationCounter();
6228 CHECK_EQ(kSize, counter2 - counter1);
6229 heap->CollectGarbage(NEW_SPACE);
6230 size_t counter3 = heap->NewSpaceAllocationCounter();
6231 CHECK_EQ(0U, counter3 - counter2);
6232 // Test counter overflow.
6233 size_t max_counter = -1;
6234 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6235 size_t start = heap->NewSpaceAllocationCounter();
6236 for (int i = 0; i < 20; i++) {
6237 AllocateInSpace(isolate, kSize, NEW_SPACE);
6238 size_t counter = heap->NewSpaceAllocationCounter();
6239 CHECK_EQ(kSize, counter - start);
6245 TEST(OldSpaceAllocationCounter) {
6246 CcTest::InitializeVM();
6247 v8::HandleScope scope(CcTest::isolate());
6248 Isolate* isolate = CcTest::i_isolate();
6249 Heap* heap = isolate->heap();
6250 size_t counter1 = heap->OldGenerationAllocationCounter();
6251 heap->CollectGarbage(NEW_SPACE);
6252 heap->CollectGarbage(NEW_SPACE);
6253 const size_t kSize = 1024;
6254 AllocateInSpace(isolate, kSize, OLD_SPACE);
6255 size_t counter2 = heap->OldGenerationAllocationCounter();
6256 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6257 CHECK_LE(kSize, counter2 - counter1);
6258 heap->CollectGarbage(NEW_SPACE);
6259 size_t counter3 = heap->OldGenerationAllocationCounter();
6260 CHECK_EQ(0u, counter3 - counter2);
6261 AllocateInSpace(isolate, kSize, OLD_SPACE);
6262 heap->CollectGarbage(OLD_SPACE);
6263 size_t counter4 = heap->OldGenerationAllocationCounter();
6264 CHECK_LE(kSize, counter4 - counter3);
6265 // Test counter overflow.
6266 size_t max_counter = -1;
6267 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6268 size_t start = heap->OldGenerationAllocationCounter();
6269 for (int i = 0; i < 20; i++) {
6270 AllocateInSpace(isolate, kSize, OLD_SPACE);
6271 size_t counter = heap->OldGenerationAllocationCounter();
6272 CHECK_LE(kSize, counter - start);
6278 TEST(NewSpaceAllocationThroughput) {
6279 CcTest::InitializeVM();
6280 v8::HandleScope scope(CcTest::isolate());
6281 Isolate* isolate = CcTest::i_isolate();
6282 Heap* heap = isolate->heap();
6283 GCTracer* tracer = heap->tracer();
6285 size_t counter1 = 1000;
6286 tracer->SampleAllocation(time1, counter1, 0);
6288 size_t counter2 = 2000;
6289 tracer->SampleAllocation(time2, counter2, 0);
6291 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6292 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6294 size_t counter3 = 30000;
6295 tracer->SampleAllocation(time3, counter3, 0);
6296 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6297 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6301 TEST(NewSpaceAllocationThroughput2) {
6302 CcTest::InitializeVM();
6303 v8::HandleScope scope(CcTest::isolate());
6304 Isolate* isolate = CcTest::i_isolate();
6305 Heap* heap = isolate->heap();
6306 GCTracer* tracer = heap->tracer();
6308 size_t counter1 = 1000;
6309 tracer->SampleAllocation(time1, counter1, 0);
6311 size_t counter2 = 2000;
6312 tracer->SampleAllocation(time2, counter2, 0);
6314 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6315 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6317 size_t counter3 = 30000;
6318 tracer->SampleAllocation(time3, counter3, 0);
6319 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6320 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6324 static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6325 Isolate* isolate = CcTest::i_isolate();
6327 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6328 CHECK(message->IsTheHole());
6332 TEST(MessageObjectLeak) {
6333 CcTest::InitializeVM();
6334 v8::Isolate* isolate = CcTest::isolate();
6335 v8::HandleScope scope(isolate);
6336 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6337 global->Set(v8::String::NewFromUtf8(isolate, "check"),
6338 v8::FunctionTemplate::New(isolate, CheckLeak));
6339 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6340 v8::Context::Scope cscope(context);
6344 " throw 'message 1';"
6349 " throw 'message 2';"
6356 const char* flag = "--turbo-filter=*";
6357 FlagList::SetFlagsFromString(flag, StrLength(flag));
6358 FLAG_always_opt = true;
6359 FLAG_turbo_try_catch = true;
6360 FLAG_turbo_try_finally = true;
6366 static void CheckEqualSharedFunctionInfos(
6367 const v8::FunctionCallbackInfo<v8::Value>& args) {
6368 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6369 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6370 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6371 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6372 CHECK(fun1->shared() == fun2->shared());
6376 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6377 Isolate* isolate = CcTest::i_isolate();
6378 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6379 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6380 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6381 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6382 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6386 TEST(CanonicalSharedFunctionInfo) {
6387 CcTest::InitializeVM();
6388 v8::Isolate* isolate = CcTest::isolate();
6389 v8::HandleScope scope(isolate);
6390 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6391 global->Set(isolate, "check", v8::FunctionTemplate::New(
6392 isolate, CheckEqualSharedFunctionInfos));
6393 global->Set(isolate, "remove",
6394 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6395 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6396 v8::Context::Scope cscope(context);
6398 "function f() { return function g() {}; }"
6405 "function f() { return (function() { return function g() {}; })(); }"
6413 TEST(OldGenerationAllocationThroughput) {
6414 CcTest::InitializeVM();
6415 v8::HandleScope scope(CcTest::isolate());
6416 Isolate* isolate = CcTest::i_isolate();
6417 Heap* heap = isolate->heap();
6418 GCTracer* tracer = heap->tracer();
6420 size_t counter1 = 1000;
6421 tracer->SampleAllocation(time1, 0, counter1);
6423 size_t counter2 = 2000;
6424 tracer->SampleAllocation(time2, 0, counter2);
6426 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6427 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6429 size_t counter3 = 30000;
6430 tracer->SampleAllocation(time3, 0, counter3);
6432 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6433 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6437 TEST(AllocationThroughput) {
6438 CcTest::InitializeVM();
6439 v8::HandleScope scope(CcTest::isolate());
6440 Isolate* isolate = CcTest::i_isolate();
6441 Heap* heap = isolate->heap();
6442 GCTracer* tracer = heap->tracer();
6444 size_t counter1 = 1000;
6445 tracer->SampleAllocation(time1, counter1, counter1);
6447 size_t counter2 = 2000;
6448 tracer->SampleAllocation(time2, counter2, counter2);
6449 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6450 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6452 size_t counter3 = 30000;
6453 tracer->SampleAllocation(time3, counter3, counter3);
6454 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6455 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6459 TEST(SlotsBufferObjectSlotsRemoval) {
6460 CcTest::InitializeVM();
6461 v8::HandleScope scope(CcTest::isolate());
6462 Isolate* isolate = CcTest::i_isolate();
6463 Heap* heap = isolate->heap();
6464 Factory* factory = isolate->factory();
6466 SlotsBuffer* buffer = new SlotsBuffer(NULL);
6467 void* fake_object[1];
6469 Handle<FixedArray> array = factory->NewFixedArray(2, TENURED);
6470 CHECK(heap->old_space()->Contains(*array));
6471 array->set(0, reinterpret_cast<Object*>(fake_object), SKIP_WRITE_BARRIER);
6473 // Firstly, let's test the regular slots buffer entry.
6474 buffer->Add(HeapObject::RawField(*array, FixedArray::kHeaderSize));
6475 CHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6476 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6477 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6479 array->address() + array->Size());
6480 CHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6481 HeapObject::RawField(heap->empty_fixed_array(),
6482 FixedArrayBase::kLengthOffset));
6484 // Secondly, let's test the typed slots buffer entry.
6485 SlotsBuffer::AddTo(NULL, &buffer, SlotsBuffer::EMBEDDED_OBJECT_SLOT,
6486 array->address() + FixedArray::kHeaderSize,
6487 SlotsBuffer::FAIL_ON_OVERFLOW);
6488 CHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6489 reinterpret_cast<Object**>(SlotsBuffer::EMBEDDED_OBJECT_SLOT));
6490 CHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6491 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6492 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6494 array->address() + array->Size());
6495 CHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6496 HeapObject::RawField(heap->empty_fixed_array(),
6497 FixedArrayBase::kLengthOffset));
6498 CHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6499 HeapObject::RawField(heap->empty_fixed_array(),
6500 FixedArrayBase::kLengthOffset));
6505 TEST(FilterInvalidSlotsBufferEntries) {
6506 FLAG_manual_evacuation_candidates_selection = true;
6507 CcTest::InitializeVM();
6508 v8::HandleScope scope(CcTest::isolate());
6509 Isolate* isolate = CcTest::i_isolate();
6510 Heap* heap = isolate->heap();
6511 Factory* factory = isolate->factory();
6512 SlotsBuffer* buffer = new SlotsBuffer(NULL);
6514 // Set up a fake black object that will contain a recorded SMI, a recorded
6515 // pointer to a new space object, and a recorded pointer to a non-evacuation
6516 // candidate object. These object should be filtered out. Additionally,
6517 // we point to an evacuation candidate object which should not be filtered
6520 // Create fake object and mark it black.
6521 Handle<FixedArray> fake_object = factory->NewFixedArray(23, TENURED);
6522 MarkBit mark_bit = Marking::MarkBitFrom(*fake_object);
6523 Marking::MarkBlack(mark_bit);
6525 // Write a SMI into field one and record its address;
6526 Object** field_smi = fake_object->RawFieldOfElementAt(0);
6527 *field_smi = Smi::FromInt(100);
6528 buffer->Add(field_smi);
6530 // Write a new space reference into field 2 and record its address;
6531 Handle<FixedArray> new_space_object = factory->NewFixedArray(23);
6532 mark_bit = Marking::MarkBitFrom(*new_space_object);
6533 Marking::MarkBlack(mark_bit);
6534 Object** field_new_space = fake_object->RawFieldOfElementAt(1);
6535 *field_new_space = *new_space_object;
6536 buffer->Add(field_new_space);
6538 // Write an old space reference into field 3 which points to an object not on
6539 // an evacuation candidate.
6540 Handle<FixedArray> old_space_object_non_evacuation =
6541 factory->NewFixedArray(23, TENURED);
6542 mark_bit = Marking::MarkBitFrom(*old_space_object_non_evacuation);
6543 Marking::MarkBlack(mark_bit);
6544 Object** field_old_space_object_non_evacuation =
6545 fake_object->RawFieldOfElementAt(2);
6546 *field_old_space_object_non_evacuation = *old_space_object_non_evacuation;
6547 buffer->Add(field_old_space_object_non_evacuation);
6549 // Write an old space reference into field 4 which points to an object on an
6550 // evacuation candidate.
6551 SimulateFullSpace(heap->old_space());
6552 Handle<FixedArray> valid_object =
6553 isolate->factory()->NewFixedArray(23, TENURED);
6554 Page* page = Page::FromAddress(valid_object->address());
6555 page->SetFlag(MemoryChunk::EVACUATION_CANDIDATE);
6556 Object** valid_field = fake_object->RawFieldOfElementAt(3);
6557 *valid_field = *valid_object;
6558 buffer->Add(valid_field);
6560 SlotsBuffer::RemoveInvalidSlots(heap, buffer);
6561 Object** kRemovedEntry = HeapObject::RawField(heap->empty_fixed_array(),
6562 FixedArrayBase::kLengthOffset);
6563 CHECK_EQ(buffer->Get(0), kRemovedEntry);
6564 CHECK_EQ(buffer->Get(1), kRemovedEntry);
6565 CHECK_EQ(buffer->Get(2), kRemovedEntry);
6566 CHECK_EQ(buffer->Get(3), valid_field);
6568 // Clean-up to make verify heap happy.
6569 mark_bit = Marking::MarkBitFrom(*fake_object);
6570 Marking::MarkWhite(mark_bit);
6571 mark_bit = Marking::MarkBitFrom(*new_space_object);
6572 Marking::MarkWhite(mark_bit);
6573 mark_bit = Marking::MarkBitFrom(*old_space_object_non_evacuation);
6574 Marking::MarkWhite(mark_bit);
6580 TEST(ContextMeasure) {
6581 CcTest::InitializeVM();
6582 v8::HandleScope scope(CcTest::isolate());
6583 Isolate* isolate = CcTest::i_isolate();
6584 LocalContext context;
6586 int size_upper_limit = 0;
6587 int count_upper_limit = 0;
6588 HeapIterator it(CcTest::heap());
6589 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6590 size_upper_limit += obj->Size();
6591 count_upper_limit++;
6594 ContextMeasure measure(*isolate->native_context());
6596 PrintF("Context size : %d bytes\n", measure.Size());
6597 PrintF("Context object count: %d\n", measure.Count());
6599 CHECK_LE(1000, measure.Count());
6600 CHECK_LE(50000, measure.Size());
6602 CHECK_LE(measure.Count(), count_upper_limit);
6603 CHECK_LE(measure.Size(), size_upper_limit);
6607 TEST(ScriptIterator) {
6608 CcTest::InitializeVM();
6609 v8::HandleScope scope(CcTest::isolate());
6610 Isolate* isolate = CcTest::i_isolate();
6611 Heap* heap = CcTest::heap();
6612 LocalContext context;
6614 heap->CollectAllGarbage();
6616 int script_count = 0;
6618 HeapIterator it(heap);
6619 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6620 if (obj->IsScript()) script_count++;
6625 Script::Iterator iterator(isolate);
6626 while (iterator.Next()) script_count--;
6629 CHECK_EQ(0, script_count);
6633 TEST(SharedFunctionInfoIterator) {
6634 CcTest::InitializeVM();
6635 v8::HandleScope scope(CcTest::isolate());
6636 Isolate* isolate = CcTest::i_isolate();
6637 Heap* heap = CcTest::heap();
6638 LocalContext context;
6640 heap->CollectAllGarbage();
6641 heap->CollectAllGarbage();
6645 HeapIterator it(heap);
6646 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6647 if (!obj->IsSharedFunctionInfo()) continue;
6648 // Shared function infos without a script (API functions or C++ builtins)
6649 // are not returned by the iterator because they are not created from a
6650 // script. They are not interesting for type feedback vector anyways.
6651 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
6652 if (shared->script()->IsUndefined()) {
6653 CHECK_EQ(0, shared->feedback_vector()->ICSlots());
6661 SharedFunctionInfo::Iterator iterator(isolate);
6662 while (iterator.Next()) sfi_count--;
6665 CHECK_EQ(0, sfi_count);
6668 } // namespace internal