1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "src/compilation-cache.h"
32 #include "src/context-measure.h"
33 #include "src/deoptimizer.h"
34 #include "src/execution.h"
35 #include "src/factory.h"
36 #include "src/global-handles.h"
37 #include "src/heap/gc-tracer.h"
38 #include "src/ic/ic.h"
39 #include "src/macro-assembler.h"
40 #include "src/snapshot/snapshot.h"
41 #include "test/cctest/cctest.h"
42 #include "test/cctest/heap-tester.h"
49 static void CheckMap(Map* map, int type, int instance_size) {
50 CHECK(map->IsHeapObject());
52 CHECK(CcTest::heap()->Contains(map));
54 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
55 CHECK_EQ(type, map->instance_type());
56 CHECK_EQ(instance_size, map->instance_size());
61 CcTest::InitializeVM();
62 Heap* heap = CcTest::heap();
63 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
64 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
65 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
66 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
67 SIMD128_TYPES(SIMD128_TYPE)
69 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
70 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
74 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
75 CHECK(obj->IsOddball());
76 Handle<Object> handle(obj, isolate);
77 Object* print_string =
78 *Execution::ToString(isolate, handle).ToHandleChecked();
79 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
83 static void CheckSmi(Isolate* isolate, int value, const char* string) {
84 Handle<Object> handle(Smi::FromInt(value), isolate);
85 Object* print_string =
86 *Execution::ToString(isolate, handle).ToHandleChecked();
87 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
91 static void CheckNumber(Isolate* isolate, double value, const char* string) {
92 Handle<Object> number = isolate->factory()->NewNumber(value);
93 CHECK(number->IsNumber());
94 Handle<Object> print_string =
95 Execution::ToString(isolate, number).ToHandleChecked();
96 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
100 static void CheckFindCodeObject(Isolate* isolate) {
101 // Test FindCodeObject
104 Assembler assm(isolate, NULL, 0);
106 __ nop(); // supported on all architectures
110 Handle<Code> code = isolate->factory()->NewCode(
111 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
112 CHECK(code->IsCode());
114 HeapObject* obj = HeapObject::cast(*code);
115 Address obj_addr = obj->address();
117 for (int i = 0; i < obj->Size(); i += kPointerSize) {
118 Object* found = isolate->FindCodeObject(obj_addr + i);
119 CHECK_EQ(*code, found);
122 Handle<Code> copy = isolate->factory()->NewCode(
123 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
124 HeapObject* obj_copy = HeapObject::cast(*copy);
125 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
126 obj_copy->Size() / 2);
127 CHECK(not_right != *code);
132 CcTest::InitializeVM();
133 Isolate* isolate = CcTest::i_isolate();
134 HandleScope outer_scope(isolate);
135 LocalContext context;
136 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
142 CcTest::InitializeVM();
143 Isolate* isolate = CcTest::i_isolate();
144 Factory* factory = isolate->factory();
145 Heap* heap = isolate->heap();
147 HandleScope sc(isolate);
148 Handle<Object> value = factory->NewNumber(1.000123);
149 CHECK(value->IsHeapNumber());
150 CHECK(value->IsNumber());
151 CHECK_EQ(1.000123, value->Number());
153 value = factory->NewNumber(1.0);
154 CHECK(value->IsSmi());
155 CHECK(value->IsNumber());
156 CHECK_EQ(1.0, value->Number());
158 value = factory->NewNumberFromInt(1024);
159 CHECK(value->IsSmi());
160 CHECK(value->IsNumber());
161 CHECK_EQ(1024.0, value->Number());
163 value = factory->NewNumberFromInt(Smi::kMinValue);
164 CHECK(value->IsSmi());
165 CHECK(value->IsNumber());
166 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
168 value = factory->NewNumberFromInt(Smi::kMaxValue);
169 CHECK(value->IsSmi());
170 CHECK(value->IsNumber());
171 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
173 #if !defined(V8_TARGET_ARCH_64_BIT)
174 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
175 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
176 CHECK(value->IsHeapNumber());
177 CHECK(value->IsNumber());
178 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
181 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
182 CHECK(value->IsHeapNumber());
183 CHECK(value->IsNumber());
184 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
187 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
188 CHECK(value->IsHeapNumber());
189 CHECK(value->IsNumber());
190 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
193 // nan oddball checks
194 CHECK(factory->nan_value()->IsNumber());
195 CHECK(std::isnan(factory->nan_value()->Number()));
197 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
198 CHECK(s->IsString());
199 CHECK_EQ(10, s->length());
201 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
202 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
203 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
205 // Check ToString for oddballs
206 CheckOddball(isolate, heap->true_value(), "true");
207 CheckOddball(isolate, heap->false_value(), "false");
208 CheckOddball(isolate, heap->null_value(), "null");
209 CheckOddball(isolate, heap->undefined_value(), "undefined");
211 // Check ToString for Smis
212 CheckSmi(isolate, 0, "0");
213 CheckSmi(isolate, 42, "42");
214 CheckSmi(isolate, -42, "-42");
216 // Check ToString for Numbers
217 CheckNumber(isolate, 1.1, "1.1");
219 CheckFindCodeObject(isolate);
223 template <typename T, typename LANE_TYPE, int LANES>
224 static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
225 LANE_TYPE other_value) {
226 // Check against lane_values, and check that all lanes can be set to
227 // other_value without disturbing the other lanes.
228 for (int i = 0; i < LANES; i++) {
229 CHECK_EQ(lane_values[i], value->get_lane(i));
231 for (int i = 0; i < LANES; i++) {
232 value->set_lane(i, other_value); // change the value
233 for (int j = 0; j < LANES; j++) {
235 CHECK_EQ(lane_values[j], value->get_lane(j));
237 CHECK_EQ(other_value, value->get_lane(j));
239 value->set_lane(i, lane_values[i]); // restore the lane
241 CHECK(value->BooleanValue()); // SIMD values are 'true'.
246 CcTest::InitializeVM();
247 Isolate* isolate = CcTest::i_isolate();
248 Factory* factory = isolate->factory();
250 HandleScope sc(isolate);
254 float lanes[4] = {1, 2, 3, 4};
255 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
256 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
258 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
259 CHECK(value->IsFloat32x4());
260 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
262 // Check special lane values.
263 value->set_lane(1, -0.0);
264 CHECK_EQ(-0.0, value->get_lane(1));
265 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
266 value->set_lane(2, quiet_NaN);
267 CHECK(std::isnan(value->get_lane(2)));
268 value->set_lane(3, signaling_NaN);
269 CHECK(std::isnan(value->get_lane(3)));
272 // Check value printing.
274 value = factory->NewFloat32x4(lanes);
275 std::ostringstream os;
276 value->Float32x4Print(os);
277 CHECK_EQ("1, 2, 3, 4", os.str());
280 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
281 value = factory->NewFloat32x4(special_lanes);
282 std::ostringstream os;
283 value->Float32x4Print(os);
284 // Value printing doesn't preserve signed zeroes.
285 CHECK_EQ("0, 0, NaN, NaN", os.str());
287 #endif // OBJECT_PRINT
291 int32_t lanes[4] = {1, 2, 3, 4};
293 Handle<Int32x4> value = factory->NewInt32x4(lanes);
294 CHECK(value->IsInt32x4());
295 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
298 std::ostringstream os;
299 value->Int32x4Print(os);
300 CHECK_EQ("1, 2, 3, 4", os.str());
301 #endif // OBJECT_PRINT
305 uint32_t lanes[4] = {1, 2, 3, 4};
307 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
308 CHECK(value->IsUint32x4());
309 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
312 std::ostringstream os;
313 value->Uint32x4Print(os);
314 CHECK_EQ("1, 2, 3, 4", os.str());
315 #endif // OBJECT_PRINT
319 bool lanes[4] = {true, false, true, false};
321 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
322 CHECK(value->IsBool32x4());
323 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
326 std::ostringstream os;
327 value->Bool32x4Print(os);
328 CHECK_EQ("true, false, true, false", os.str());
329 #endif // OBJECT_PRINT
333 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
335 Handle<Int16x8> value = factory->NewInt16x8(lanes);
336 CHECK(value->IsInt16x8());
337 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
340 std::ostringstream os;
341 value->Int16x8Print(os);
342 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
343 #endif // OBJECT_PRINT
347 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
349 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
350 CHECK(value->IsUint16x8());
351 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
354 std::ostringstream os;
355 value->Uint16x8Print(os);
356 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
357 #endif // OBJECT_PRINT
361 bool lanes[8] = {true, false, true, false, true, false, true, false};
363 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
364 CHECK(value->IsBool16x8());
365 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
368 std::ostringstream os;
369 value->Bool16x8Print(os);
370 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
371 #endif // OBJECT_PRINT
375 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
377 Handle<Int8x16> value = factory->NewInt8x16(lanes);
378 CHECK(value->IsInt8x16());
379 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
382 std::ostringstream os;
383 value->Int8x16Print(os);
384 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
385 #endif // OBJECT_PRINT
389 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
391 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
392 CHECK(value->IsUint8x16());
393 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
396 std::ostringstream os;
397 value->Uint8x16Print(os);
398 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
399 #endif // OBJECT_PRINT
403 bool lanes[16] = {true, false, true, false, true, false, true, false,
404 true, false, true, false, true, false, true, false};
406 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
407 CHECK(value->IsBool8x16());
408 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
411 std::ostringstream os;
412 value->Bool8x16Print(os);
414 "true, false, true, false, true, false, true, false, true, false, "
415 "true, false, true, false, true, false",
417 #endif // OBJECT_PRINT
423 CcTest::InitializeVM();
425 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
426 CHECK(Smi::FromInt(42)->IsSmi());
427 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
428 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
432 TEST(GarbageCollection) {
433 CcTest::InitializeVM();
434 Isolate* isolate = CcTest::i_isolate();
435 Heap* heap = isolate->heap();
436 Factory* factory = isolate->factory();
438 HandleScope sc(isolate);
440 heap->CollectGarbage(NEW_SPACE);
442 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
443 Handle<String> name = factory->InternalizeUtf8String("theFunction");
444 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
445 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
446 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
447 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
448 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
451 HandleScope inner_scope(isolate);
452 // Allocate a function and keep it in global object's property.
453 Handle<JSFunction> function = factory->NewFunction(name);
454 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
455 // Allocate an object. Unrooted after leaving the scope.
456 Handle<JSObject> obj = factory->NewJSObject(function);
457 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
458 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
460 CHECK_EQ(Smi::FromInt(23),
461 *Object::GetProperty(obj, prop_name).ToHandleChecked());
462 CHECK_EQ(Smi::FromInt(24),
463 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
466 heap->CollectGarbage(NEW_SPACE);
468 // Function should be alive.
469 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
470 // Check function is retained.
471 Handle<Object> func_value =
472 Object::GetProperty(global, name).ToHandleChecked();
473 CHECK(func_value->IsJSFunction());
474 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
477 HandleScope inner_scope(isolate);
478 // Allocate another object, make it reachable from global.
479 Handle<JSObject> obj = factory->NewJSObject(function);
480 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
481 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
484 // After gc, it should survive.
485 heap->CollectGarbage(NEW_SPACE);
487 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
489 Object::GetProperty(global, obj_name).ToHandleChecked();
490 CHECK(obj->IsJSObject());
491 CHECK_EQ(Smi::FromInt(23),
492 *Object::GetProperty(obj, prop_name).ToHandleChecked());
496 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
497 HandleScope scope(isolate);
498 Handle<String> s = isolate->factory()->NewStringFromUtf8(
499 CStrVector(string)).ToHandleChecked();
500 CHECK_EQ(StrLength(string), s->length());
501 for (int index = 0; index < s->length(); index++) {
502 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
508 CcTest::InitializeVM();
509 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
511 VerifyStringAllocation(isolate, "a");
512 VerifyStringAllocation(isolate, "ab");
513 VerifyStringAllocation(isolate, "abc");
514 VerifyStringAllocation(isolate, "abcd");
515 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
520 CcTest::InitializeVM();
521 Isolate* isolate = CcTest::i_isolate();
522 Factory* factory = isolate->factory();
524 v8::HandleScope scope(CcTest::isolate());
525 const char* name = "Kasper the spunky";
526 Handle<String> string = factory->NewStringFromAsciiChecked(name);
527 CHECK_EQ(StrLength(name), string->length());
531 TEST(GlobalHandles) {
532 CcTest::InitializeVM();
533 Isolate* isolate = CcTest::i_isolate();
534 Heap* heap = isolate->heap();
535 Factory* factory = isolate->factory();
536 GlobalHandles* global_handles = isolate->global_handles();
544 HandleScope scope(isolate);
546 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
547 Handle<Object> u = factory->NewNumber(1.12344);
549 h1 = global_handles->Create(*i);
550 h2 = global_handles->Create(*u);
551 h3 = global_handles->Create(*i);
552 h4 = global_handles->Create(*u);
555 // after gc, it should survive
556 heap->CollectGarbage(NEW_SPACE);
558 CHECK((*h1)->IsString());
559 CHECK((*h2)->IsHeapNumber());
560 CHECK((*h3)->IsString());
561 CHECK((*h4)->IsHeapNumber());
564 GlobalHandles::Destroy(h1.location());
565 GlobalHandles::Destroy(h3.location());
568 GlobalHandles::Destroy(h2.location());
569 GlobalHandles::Destroy(h4.location());
573 static bool WeakPointerCleared = false;
575 static void TestWeakGlobalHandleCallback(
576 const v8::WeakCallbackData<v8::Value, void>& data) {
577 std::pair<v8::Persistent<v8::Value>*, int>* p =
578 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
579 data.GetParameter());
580 if (p->second == 1234) WeakPointerCleared = true;
585 TEST(WeakGlobalHandlesScavenge) {
586 i::FLAG_stress_compaction = false;
587 CcTest::InitializeVM();
588 Isolate* isolate = CcTest::i_isolate();
589 Heap* heap = isolate->heap();
590 Factory* factory = isolate->factory();
591 GlobalHandles* global_handles = isolate->global_handles();
593 WeakPointerCleared = false;
599 HandleScope scope(isolate);
601 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
602 Handle<Object> u = factory->NewNumber(1.12344);
604 h1 = global_handles->Create(*i);
605 h2 = global_handles->Create(*u);
608 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
609 GlobalHandles::MakeWeak(h2.location(),
610 reinterpret_cast<void*>(&handle_and_id),
611 &TestWeakGlobalHandleCallback);
613 // Scavenge treats weak pointers as normal roots.
614 heap->CollectGarbage(NEW_SPACE);
616 CHECK((*h1)->IsString());
617 CHECK((*h2)->IsHeapNumber());
619 CHECK(!WeakPointerCleared);
620 CHECK(!global_handles->IsNearDeath(h2.location()));
621 CHECK(!global_handles->IsNearDeath(h1.location()));
623 GlobalHandles::Destroy(h1.location());
624 GlobalHandles::Destroy(h2.location());
628 TEST(WeakGlobalHandlesMark) {
629 CcTest::InitializeVM();
630 Isolate* isolate = CcTest::i_isolate();
631 Heap* heap = isolate->heap();
632 Factory* factory = isolate->factory();
633 GlobalHandles* global_handles = isolate->global_handles();
635 WeakPointerCleared = false;
641 HandleScope scope(isolate);
643 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
644 Handle<Object> u = factory->NewNumber(1.12344);
646 h1 = global_handles->Create(*i);
647 h2 = global_handles->Create(*u);
650 // Make sure the objects are promoted.
651 heap->CollectGarbage(OLD_SPACE);
652 heap->CollectGarbage(NEW_SPACE);
653 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
655 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
656 GlobalHandles::MakeWeak(h2.location(),
657 reinterpret_cast<void*>(&handle_and_id),
658 &TestWeakGlobalHandleCallback);
659 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
660 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
662 // Incremental marking potentially marked handles before they turned weak.
663 heap->CollectAllGarbage();
665 CHECK((*h1)->IsString());
667 CHECK(WeakPointerCleared);
668 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
670 GlobalHandles::Destroy(h1.location());
674 TEST(DeleteWeakGlobalHandle) {
675 i::FLAG_stress_compaction = false;
676 CcTest::InitializeVM();
677 Isolate* isolate = CcTest::i_isolate();
678 Heap* heap = isolate->heap();
679 Factory* factory = isolate->factory();
680 GlobalHandles* global_handles = isolate->global_handles();
682 WeakPointerCleared = false;
687 HandleScope scope(isolate);
689 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
690 h = global_handles->Create(*i);
693 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
694 GlobalHandles::MakeWeak(h.location(),
695 reinterpret_cast<void*>(&handle_and_id),
696 &TestWeakGlobalHandleCallback);
698 // Scanvenge does not recognize weak reference.
699 heap->CollectGarbage(NEW_SPACE);
701 CHECK(!WeakPointerCleared);
703 // Mark-compact treats weak reference properly.
704 heap->CollectGarbage(OLD_SPACE);
706 CHECK(WeakPointerCleared);
710 TEST(BytecodeArray) {
711 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
712 static const int kRawBytesSize = sizeof(kRawBytes);
713 static const int kFrameSize = 32;
715 CcTest::InitializeVM();
716 Isolate* isolate = CcTest::i_isolate();
717 Heap* heap = isolate->heap();
718 Factory* factory = isolate->factory();
719 HandleScope scope(isolate);
721 // Allocate and initialize BytecodeArray
722 Handle<BytecodeArray> array =
723 factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize);
725 CHECK(array->IsBytecodeArray());
726 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
727 CHECK_EQ(array->frame_size(), kFrameSize);
728 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
729 CHECK_GE(array->address() + array->BytecodeArraySize(),
730 array->GetFirstBytecodeAddress() + array->length());
731 for (int i = 0; i < kRawBytesSize; i++) {
732 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
733 CHECK_EQ(array->get(i), kRawBytes[i]);
736 // Full garbage collection
737 heap->CollectAllGarbage();
739 // BytecodeArray should survive
740 CHECK_EQ(array->length(), kRawBytesSize);
741 CHECK_EQ(array->frame_size(), kFrameSize);
743 for (int i = 0; i < kRawBytesSize; i++) {
744 CHECK_EQ(array->get(i), kRawBytes[i]);
745 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
750 static const char* not_so_random_string_table[] = {
814 static void CheckInternalizedStrings(const char** strings) {
815 Isolate* isolate = CcTest::i_isolate();
816 Factory* factory = isolate->factory();
817 for (const char* string = *strings; *strings != 0; string = *strings++) {
818 HandleScope scope(isolate);
820 isolate->factory()->InternalizeUtf8String(CStrVector(string));
821 // InternalizeUtf8String may return a failure if a GC is needed.
822 CHECK(a->IsInternalizedString());
823 Handle<String> b = factory->InternalizeUtf8String(string);
825 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
826 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
828 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
834 CcTest::InitializeVM();
836 v8::HandleScope sc(CcTest::isolate());
837 CheckInternalizedStrings(not_so_random_string_table);
838 CheckInternalizedStrings(not_so_random_string_table);
842 TEST(FunctionAllocation) {
843 CcTest::InitializeVM();
844 Isolate* isolate = CcTest::i_isolate();
845 Factory* factory = isolate->factory();
847 v8::HandleScope sc(CcTest::isolate());
848 Handle<String> name = factory->InternalizeUtf8String("theFunction");
849 Handle<JSFunction> function = factory->NewFunction(name);
851 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
852 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
854 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
855 Handle<JSObject> obj = factory->NewJSObject(function);
856 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
857 CHECK_EQ(Smi::FromInt(23),
858 *Object::GetProperty(obj, prop_name).ToHandleChecked());
859 // Check that we can add properties to function objects.
860 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
861 CHECK_EQ(Smi::FromInt(24),
862 *Object::GetProperty(function, prop_name).ToHandleChecked());
866 TEST(ObjectProperties) {
867 CcTest::InitializeVM();
868 Isolate* isolate = CcTest::i_isolate();
869 Factory* factory = isolate->factory();
871 v8::HandleScope sc(CcTest::isolate());
872 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
873 Handle<Object> object = Object::GetProperty(
874 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
875 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
876 Handle<JSObject> obj = factory->NewJSObject(constructor);
877 Handle<String> first = factory->InternalizeUtf8String("first");
878 Handle<String> second = factory->InternalizeUtf8String("second");
880 Handle<Smi> one(Smi::FromInt(1), isolate);
881 Handle<Smi> two(Smi::FromInt(2), isolate);
884 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
887 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
888 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
891 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
892 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
894 // add first and then second
895 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
896 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
897 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
898 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
900 // delete first and then second
901 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
902 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
903 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
904 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
905 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
907 // add first and then second
908 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
909 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
910 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
911 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
913 // delete second and then first
914 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
915 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
916 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
917 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
918 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
920 // check string and internalized string match
921 const char* string1 = "fisk";
922 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
923 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
924 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
925 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
927 // check internalized string and string match
928 const char* string2 = "fugl";
929 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
930 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
931 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
932 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
937 CcTest::InitializeVM();
938 Isolate* isolate = CcTest::i_isolate();
939 Factory* factory = isolate->factory();
941 v8::HandleScope sc(CcTest::isolate());
942 Handle<String> name = factory->InternalizeUtf8String("theFunction");
943 Handle<JSFunction> function = factory->NewFunction(name);
945 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
946 Handle<JSObject> obj = factory->NewJSObject(function);
947 Handle<Map> initial_map(function->initial_map());
950 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
951 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
952 CHECK_EQ(Smi::FromInt(23),
953 *Object::GetProperty(obj, prop_name).ToHandleChecked());
955 // Check the map has changed
956 CHECK(*initial_map != obj->map());
961 CcTest::InitializeVM();
962 Isolate* isolate = CcTest::i_isolate();
963 Factory* factory = isolate->factory();
965 v8::HandleScope sc(CcTest::isolate());
966 Handle<String> name = factory->InternalizeUtf8String("Array");
967 Handle<Object> fun_obj = Object::GetProperty(
968 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
969 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
971 // Allocate the object.
972 Handle<Object> element;
973 Handle<JSObject> object = factory->NewJSObject(function);
974 Handle<JSArray> array = Handle<JSArray>::cast(object);
975 // We just initialized the VM, no heap allocation failure yet.
976 JSArray::Initialize(array, 0);
978 // Set array length to 0.
979 JSArray::SetLength(array, 0);
980 CHECK_EQ(Smi::FromInt(0), array->length());
981 // Must be in fast mode.
982 CHECK(array->HasFastSmiOrObjectElements());
984 // array[length] = name.
985 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
986 CHECK_EQ(Smi::FromInt(1), array->length());
987 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
988 CHECK_EQ(*element, *name);
990 // Set array length with larger than smi value.
991 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
993 uint32_t int_length = 0;
994 CHECK(array->length()->ToArrayIndex(&int_length));
995 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
996 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
998 // array[length] = name.
999 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1000 uint32_t new_int_length = 0;
1001 CHECK(array->length()->ToArrayIndex(&new_int_length));
1002 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1003 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1004 CHECK_EQ(*element, *name);
1005 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1006 CHECK_EQ(*element, *name);
1010 TEST(JSObjectCopy) {
1011 CcTest::InitializeVM();
1012 Isolate* isolate = CcTest::i_isolate();
1013 Factory* factory = isolate->factory();
1015 v8::HandleScope sc(CcTest::isolate());
1016 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1017 Handle<Object> object = Object::GetProperty(
1018 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1019 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1020 Handle<JSObject> obj = factory->NewJSObject(constructor);
1021 Handle<String> first = factory->InternalizeUtf8String("first");
1022 Handle<String> second = factory->InternalizeUtf8String("second");
1024 Handle<Smi> one(Smi::FromInt(1), isolate);
1025 Handle<Smi> two(Smi::FromInt(2), isolate);
1027 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1028 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1030 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1031 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1034 Handle<Object> value1, value2;
1035 Handle<JSObject> clone = factory->CopyJSObject(obj);
1036 CHECK(!clone.is_identical_to(obj));
1038 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1039 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1040 CHECK_EQ(*value1, *value2);
1041 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1042 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1043 CHECK_EQ(*value1, *value2);
1045 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1046 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1047 CHECK_EQ(*value1, *value2);
1048 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1049 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1050 CHECK_EQ(*value1, *value2);
1053 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1054 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1056 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1057 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1059 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1060 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1061 CHECK_EQ(*value1, *value2);
1062 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1063 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1064 CHECK_EQ(*value1, *value2);
1066 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1067 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1068 CHECK_EQ(*value1, *value2);
1069 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1070 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1071 CHECK_EQ(*value1, *value2);
1075 TEST(StringAllocation) {
1076 CcTest::InitializeVM();
1077 Isolate* isolate = CcTest::i_isolate();
1078 Factory* factory = isolate->factory();
1080 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1081 for (int length = 0; length < 100; length++) {
1082 v8::HandleScope scope(CcTest::isolate());
1083 char* non_one_byte = NewArray<char>(3 * length + 1);
1084 char* one_byte = NewArray<char>(length + 1);
1085 non_one_byte[3 * length] = 0;
1086 one_byte[length] = 0;
1087 for (int i = 0; i < length; i++) {
1089 non_one_byte[3 * i] = chars[0];
1090 non_one_byte[3 * i + 1] = chars[1];
1091 non_one_byte[3 * i + 2] = chars[2];
1093 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1094 Vector<const char>(non_one_byte, 3 * length));
1095 CHECK_EQ(length, non_one_byte_sym->length());
1096 Handle<String> one_byte_sym =
1097 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1098 CHECK_EQ(length, one_byte_sym->length());
1099 Handle<String> non_one_byte_str =
1100 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1102 non_one_byte_str->Hash();
1103 CHECK_EQ(length, non_one_byte_str->length());
1104 Handle<String> one_byte_str =
1105 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1107 one_byte_str->Hash();
1108 CHECK_EQ(length, one_byte_str->length());
1109 DeleteArray(non_one_byte);
1110 DeleteArray(one_byte);
1115 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1116 // Count the number of objects found in the heap.
1117 int found_count = 0;
1118 HeapIterator iterator(heap);
1119 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1120 for (int i = 0; i < size; i++) {
1121 if (*objs[i] == obj) {
1131 CcTest::InitializeVM();
1132 Isolate* isolate = CcTest::i_isolate();
1133 Factory* factory = isolate->factory();
1134 v8::HandleScope scope(CcTest::isolate());
1136 // Array of objects to scan haep for.
1137 const int objs_count = 6;
1138 Handle<Object> objs[objs_count];
1139 int next_objs_index = 0;
1141 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1142 objs[next_objs_index++] = factory->NewJSArray(10);
1143 objs[next_objs_index++] =
1144 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
1146 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1147 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1148 objs[next_objs_index++] =
1149 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1151 // Allocate a large string (for large object space).
1152 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1153 char* str = new char[large_size];
1154 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1155 str[large_size - 1] = '\0';
1156 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1159 // Add a Map object to look for.
1160 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1162 CHECK_EQ(objs_count, next_objs_index);
1163 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1167 static int LenFromSize(int size) {
1168 return (size - FixedArray::kHeaderSize) / kPointerSize;
1172 HEAP_TEST(Regression39128) {
1173 // Test case for crbug.com/39128.
1174 CcTest::InitializeVM();
1175 Isolate* isolate = CcTest::i_isolate();
1176 Heap* heap = CcTest::heap();
1178 // Increase the chance of 'bump-the-pointer' allocation in old space.
1179 heap->CollectAllGarbage();
1181 v8::HandleScope scope(CcTest::isolate());
1183 // The plan: create JSObject which references objects in new space.
1184 // Then clone this object (forcing it to go into old space) and check
1185 // that region dirty marks are updated correctly.
1187 // Step 1: prepare a map for the object. We add 1 inobject property to it.
1188 // Create a map with single inobject property.
1189 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
1190 int n_properties = my_map->GetInObjectProperties();
1191 CHECK_GT(n_properties, 0);
1193 int object_size = my_map->instance_size();
1195 // Step 2: allocate a lot of objects so to almost fill new space: we need
1196 // just enough room to allocate JSObject and thus fill the newspace.
1198 int allocation_amount = Min(FixedArray::kMaxSize,
1199 Page::kMaxRegularHeapObjectSize + kPointerSize);
1200 int allocation_len = LenFromSize(allocation_amount);
1201 NewSpace* new_space = heap->new_space();
1202 Address* top_addr = new_space->allocation_top_address();
1203 Address* limit_addr = new_space->allocation_limit_address();
1204 while ((*limit_addr - *top_addr) > allocation_amount) {
1205 CHECK(!heap->always_allocate());
1206 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1207 CHECK(new_space->Contains(array));
1210 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1211 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1212 int fixed_array_len = LenFromSize(to_fill);
1213 CHECK(fixed_array_len < FixedArray::kMaxLength);
1215 CHECK(!heap->always_allocate());
1216 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1217 CHECK(new_space->Contains(array));
1219 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1220 CHECK(new_space->Contains(object));
1221 JSObject* jsobject = JSObject::cast(object);
1222 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1223 CHECK_EQ(0, jsobject->properties()->length());
1224 // Create a reference to object in new space in jsobject.
1225 FieldIndex index = FieldIndex::ForInObjectOffset(
1226 JSObject::kHeaderSize - kPointerSize);
1227 jsobject->FastPropertyAtPut(index, array);
1229 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1231 // Step 4: clone jsobject, but force always allocate first to create a clone
1232 // in old pointer space.
1233 Address old_space_top = heap->old_space()->top();
1234 AlwaysAllocateScope aa_scope(isolate);
1235 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1236 JSObject* clone = JSObject::cast(clone_obj);
1237 if (clone->address() != old_space_top) {
1238 // Alas, got allocated from free list, we cannot do checks.
1241 CHECK(heap->old_space()->Contains(clone->address()));
1245 UNINITIALIZED_TEST(TestCodeFlushing) {
1246 // If we do not flush code this test is invalid.
1247 if (!FLAG_flush_code) return;
1248 i::FLAG_allow_natives_syntax = true;
1249 i::FLAG_optimize_for_size = false;
1250 v8::Isolate::CreateParams create_params;
1251 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1252 v8::Isolate* isolate = v8::Isolate::New(create_params);
1253 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1255 Factory* factory = i_isolate->factory();
1257 v8::HandleScope scope(isolate);
1258 v8::Context::New(isolate)->Enter();
1259 const char* source =
1266 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1268 // This compile will add the code to the compilation cache.
1270 v8::HandleScope scope(isolate);
1274 // Check function is compiled.
1275 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1276 foo_name).ToHandleChecked();
1277 CHECK(func_value->IsJSFunction());
1278 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1279 CHECK(function->shared()->is_compiled());
1281 // The code will survive at least two GCs.
1282 i_isolate->heap()->CollectAllGarbage();
1283 i_isolate->heap()->CollectAllGarbage();
1284 CHECK(function->shared()->is_compiled());
1286 // Simulate several GCs that use full marking.
1287 const int kAgingThreshold = 6;
1288 for (int i = 0; i < kAgingThreshold; i++) {
1289 i_isolate->heap()->CollectAllGarbage();
1292 // foo should no longer be in the compilation cache
1293 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1294 CHECK(!function->is_compiled() || function->IsOptimized());
1295 // Call foo to get it recompiled.
1296 CompileRun("foo()");
1297 CHECK(function->shared()->is_compiled());
1298 CHECK(function->is_compiled());
1305 TEST(TestCodeFlushingPreAged) {
1306 // If we do not flush code this test is invalid.
1307 if (!FLAG_flush_code) return;
1308 i::FLAG_allow_natives_syntax = true;
1309 i::FLAG_optimize_for_size = true;
1310 CcTest::InitializeVM();
1311 Isolate* isolate = CcTest::i_isolate();
1312 Factory* factory = isolate->factory();
1313 v8::HandleScope scope(CcTest::isolate());
1314 const char* source = "function foo() {"
1320 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1322 // Compile foo, but don't run it.
1323 { v8::HandleScope scope(CcTest::isolate());
1327 // Check function is compiled.
1328 Handle<Object> func_value =
1329 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1330 CHECK(func_value->IsJSFunction());
1331 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1332 CHECK(function->shared()->is_compiled());
1334 // The code has been run so will survive at least one GC.
1335 CcTest::heap()->CollectAllGarbage();
1336 CHECK(function->shared()->is_compiled());
1338 // The code was only run once, so it should be pre-aged and collected on the
1340 CcTest::heap()->CollectAllGarbage();
1341 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1343 // Execute the function again twice, and ensure it is reset to the young age.
1344 { v8::HandleScope scope(CcTest::isolate());
1349 // The code will survive at least two GC now that it is young again.
1350 CcTest::heap()->CollectAllGarbage();
1351 CcTest::heap()->CollectAllGarbage();
1352 CHECK(function->shared()->is_compiled());
1354 // Simulate several GCs that use full marking.
1355 const int kAgingThreshold = 6;
1356 for (int i = 0; i < kAgingThreshold; i++) {
1357 CcTest::heap()->CollectAllGarbage();
1360 // foo should no longer be in the compilation cache
1361 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1362 CHECK(!function->is_compiled() || function->IsOptimized());
1363 // Call foo to get it recompiled.
1364 CompileRun("foo()");
1365 CHECK(function->shared()->is_compiled());
1366 CHECK(function->is_compiled());
1370 TEST(TestCodeFlushingIncremental) {
1371 // If we do not flush code this test is invalid.
1372 if (!FLAG_flush_code) return;
1373 i::FLAG_allow_natives_syntax = true;
1374 i::FLAG_optimize_for_size = false;
1375 CcTest::InitializeVM();
1376 Isolate* isolate = CcTest::i_isolate();
1377 Factory* factory = isolate->factory();
1378 v8::HandleScope scope(CcTest::isolate());
1379 const char* source = "function foo() {"
1385 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1387 // This compile will add the code to the compilation cache.
1388 { v8::HandleScope scope(CcTest::isolate());
1392 // Check function is compiled.
1393 Handle<Object> func_value =
1394 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1395 CHECK(func_value->IsJSFunction());
1396 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1397 CHECK(function->shared()->is_compiled());
1399 // The code will survive at least two GCs.
1400 CcTest::heap()->CollectAllGarbage();
1401 CcTest::heap()->CollectAllGarbage();
1402 CHECK(function->shared()->is_compiled());
1404 // Simulate several GCs that use incremental marking.
1405 const int kAgingThreshold = 6;
1406 for (int i = 0; i < kAgingThreshold; i++) {
1407 SimulateIncrementalMarking(CcTest::heap());
1408 CcTest::heap()->CollectAllGarbage();
1410 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1411 CHECK(!function->is_compiled() || function->IsOptimized());
1413 // This compile will compile the function again.
1414 { v8::HandleScope scope(CcTest::isolate());
1415 CompileRun("foo();");
1418 // Simulate several GCs that use incremental marking but make sure
1419 // the loop breaks once the function is enqueued as a candidate.
1420 for (int i = 0; i < kAgingThreshold; i++) {
1421 SimulateIncrementalMarking(CcTest::heap());
1422 if (!function->next_function_link()->IsUndefined()) break;
1423 CcTest::heap()->CollectAllGarbage();
1426 // Force optimization while incremental marking is active and while
1427 // the function is enqueued as a candidate.
1428 { v8::HandleScope scope(CcTest::isolate());
1429 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1432 // Simulate one final GC to make sure the candidate queue is sane.
1433 CcTest::heap()->CollectAllGarbage();
1434 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1435 CHECK(function->is_compiled() || !function->IsOptimized());
1439 TEST(TestCodeFlushingIncrementalScavenge) {
1440 // If we do not flush code this test is invalid.
1441 if (!FLAG_flush_code) return;
1442 i::FLAG_allow_natives_syntax = true;
1443 i::FLAG_optimize_for_size = false;
1444 CcTest::InitializeVM();
1445 Isolate* isolate = CcTest::i_isolate();
1446 Factory* factory = isolate->factory();
1447 v8::HandleScope scope(CcTest::isolate());
1448 const char* source = "var foo = function() {"
1454 "var bar = function() {"
1458 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1459 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1461 // Perfrom one initial GC to enable code flushing.
1462 CcTest::heap()->CollectAllGarbage();
1464 // This compile will add the code to the compilation cache.
1465 { v8::HandleScope scope(CcTest::isolate());
1469 // Check functions are compiled.
1470 Handle<Object> func_value =
1471 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1472 CHECK(func_value->IsJSFunction());
1473 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1474 CHECK(function->shared()->is_compiled());
1475 Handle<Object> func_value2 =
1476 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1477 CHECK(func_value2->IsJSFunction());
1478 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1479 CHECK(function2->shared()->is_compiled());
1481 // Clear references to functions so that one of them can die.
1482 { v8::HandleScope scope(CcTest::isolate());
1483 CompileRun("foo = 0; bar = 0;");
1486 // Bump the code age so that flushing is triggered while the function
1487 // object is still located in new-space.
1488 const int kAgingThreshold = 6;
1489 for (int i = 0; i < kAgingThreshold; i++) {
1490 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1491 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1494 // Simulate incremental marking so that the functions are enqueued as
1495 // code flushing candidates. Then kill one of the functions. Finally
1496 // perform a scavenge while incremental marking is still running.
1497 SimulateIncrementalMarking(CcTest::heap());
1498 *function2.location() = NULL;
1499 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1501 // Simulate one final GC to make sure the candidate queue is sane.
1502 CcTest::heap()->CollectAllGarbage();
1503 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1504 CHECK(!function->is_compiled() || function->IsOptimized());
1508 TEST(TestCodeFlushingIncrementalAbort) {
1509 // If we do not flush code this test is invalid.
1510 if (!FLAG_flush_code) return;
1511 i::FLAG_allow_natives_syntax = true;
1512 i::FLAG_optimize_for_size = false;
1513 CcTest::InitializeVM();
1514 Isolate* isolate = CcTest::i_isolate();
1515 Factory* factory = isolate->factory();
1516 Heap* heap = isolate->heap();
1517 v8::HandleScope scope(CcTest::isolate());
1518 const char* source = "function foo() {"
1524 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1526 // This compile will add the code to the compilation cache.
1527 { v8::HandleScope scope(CcTest::isolate());
1531 // Check function is compiled.
1532 Handle<Object> func_value =
1533 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1534 CHECK(func_value->IsJSFunction());
1535 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1536 CHECK(function->shared()->is_compiled());
1538 // The code will survive at least two GCs.
1539 heap->CollectAllGarbage();
1540 heap->CollectAllGarbage();
1541 CHECK(function->shared()->is_compiled());
1543 // Bump the code age so that flushing is triggered.
1544 const int kAgingThreshold = 6;
1545 for (int i = 0; i < kAgingThreshold; i++) {
1546 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1549 // Simulate incremental marking so that the function is enqueued as
1550 // code flushing candidate.
1551 SimulateIncrementalMarking(heap);
1553 // Enable the debugger and add a breakpoint while incremental marking
1554 // is running so that incremental marking aborts and code flushing is
1557 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1559 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1560 isolate->debug()->ClearAllBreakPoints();
1563 // Force optimization now that code flushing is disabled.
1564 { v8::HandleScope scope(CcTest::isolate());
1565 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1568 // Simulate one final GC to make sure the candidate queue is sane.
1569 heap->CollectAllGarbage();
1570 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1571 CHECK(function->is_compiled() || !function->IsOptimized());
1575 TEST(CompilationCacheCachingBehavior) {
1576 // If we do not flush code, or have the compilation cache turned off, this
1578 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1581 CcTest::InitializeVM();
1582 Isolate* isolate = CcTest::i_isolate();
1583 Factory* factory = isolate->factory();
1584 Heap* heap = isolate->heap();
1585 CompilationCache* compilation_cache = isolate->compilation_cache();
1586 LanguageMode language_mode =
1587 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1589 v8::HandleScope scope(CcTest::isolate());
1590 const char* raw_source =
1597 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1598 Handle<Context> native_context = isolate->native_context();
1601 v8::HandleScope scope(CcTest::isolate());
1602 CompileRun(raw_source);
1605 // On first compilation, only a hash is inserted in the code cache. We can't
1607 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1608 source, Handle<Object>(), 0, 0,
1609 v8::ScriptOriginOptions(false, true, false), native_context,
1611 CHECK(info.is_null());
1614 v8::HandleScope scope(CcTest::isolate());
1615 CompileRun(raw_source);
1618 // On second compilation, the hash is replaced by a real cache entry mapping
1619 // the source to the shared function info containing the code.
1620 info = compilation_cache->LookupScript(
1621 source, Handle<Object>(), 0, 0,
1622 v8::ScriptOriginOptions(false, true, false), native_context,
1624 CHECK(!info.is_null());
1626 heap->CollectAllGarbage();
1628 // On second compilation, the hash is replaced by a real cache entry mapping
1629 // the source to the shared function info containing the code.
1630 info = compilation_cache->LookupScript(
1631 source, Handle<Object>(), 0, 0,
1632 v8::ScriptOriginOptions(false, true, false), native_context,
1634 CHECK(!info.is_null());
1636 while (!info.ToHandleChecked()->code()->IsOld()) {
1637 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1640 heap->CollectAllGarbage();
1641 // Ensure code aging cleared the entry from the cache.
1642 info = compilation_cache->LookupScript(
1643 source, Handle<Object>(), 0, 0,
1644 v8::ScriptOriginOptions(false, true, false), native_context,
1646 CHECK(info.is_null());
1649 v8::HandleScope scope(CcTest::isolate());
1650 CompileRun(raw_source);
1653 // On first compilation, only a hash is inserted in the code cache. We can't
1655 info = compilation_cache->LookupScript(
1656 source, Handle<Object>(), 0, 0,
1657 v8::ScriptOriginOptions(false, true, false), native_context,
1659 CHECK(info.is_null());
1661 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1662 compilation_cache->MarkCompactPrologue();
1666 v8::HandleScope scope(CcTest::isolate());
1667 CompileRun(raw_source);
1670 // If we aged the cache before caching the script, ensure that we didn't cache
1671 // on next compilation.
1672 info = compilation_cache->LookupScript(
1673 source, Handle<Object>(), 0, 0,
1674 v8::ScriptOriginOptions(false, true, false), native_context,
1676 CHECK(info.is_null());
1680 static void OptimizeEmptyFunction(const char* name) {
1681 HandleScope scope(CcTest::i_isolate());
1682 EmbeddedVector<char, 256> source;
1684 "function %s() { return 0; }"
1686 "%%OptimizeFunctionOnNextCall(%s);"
1688 name, name, name, name, name);
1689 CompileRun(source.start());
1693 // Count the number of native contexts in the weak list of native contexts.
1694 int CountNativeContexts() {
1696 Object* object = CcTest::heap()->native_contexts_list();
1697 while (!object->IsUndefined()) {
1699 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1701 // Subtract one to compensate for the code stub context that is always present
1706 // Count the number of user functions in the weak list of optimized
1707 // functions attached to a native context.
1708 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1710 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1711 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1712 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1714 object = JSFunction::cast(object)->next_function_link();
1720 TEST(TestInternalWeakLists) {
1721 FLAG_always_opt = false;
1722 FLAG_allow_natives_syntax = true;
1723 v8::V8::Initialize();
1725 // Some flags turn Scavenge collections into Mark-sweep collections
1726 // and hence are incompatible with this test case.
1727 if (FLAG_gc_global || FLAG_stress_compaction) return;
1728 FLAG_retain_maps_for_n_gc = 0;
1730 static const int kNumTestContexts = 10;
1732 Isolate* isolate = CcTest::i_isolate();
1733 Heap* heap = isolate->heap();
1734 HandleScope scope(isolate);
1735 v8::Handle<v8::Context> ctx[kNumTestContexts];
1736 if (!isolate->use_crankshaft()) return;
1738 CHECK_EQ(0, CountNativeContexts());
1740 // Create a number of global contests which gets linked together.
1741 for (int i = 0; i < kNumTestContexts; i++) {
1742 ctx[i] = v8::Context::New(CcTest::isolate());
1744 // Collect garbage that might have been created by one of the
1745 // installed extensions.
1746 isolate->compilation_cache()->Clear();
1747 heap->CollectAllGarbage();
1749 CHECK_EQ(i + 1, CountNativeContexts());
1753 // Create a handle scope so no function objects get stuck in the outer
1755 HandleScope scope(isolate);
1756 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1757 OptimizeEmptyFunction("f1");
1758 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1759 OptimizeEmptyFunction("f2");
1760 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1761 OptimizeEmptyFunction("f3");
1762 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1763 OptimizeEmptyFunction("f4");
1764 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1765 OptimizeEmptyFunction("f5");
1766 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1768 // Remove function f1, and
1769 CompileRun("f1=null");
1771 // Scavenge treats these references as strong.
1772 for (int j = 0; j < 10; j++) {
1773 CcTest::heap()->CollectGarbage(NEW_SPACE);
1774 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1777 // Mark compact handles the weak references.
1778 isolate->compilation_cache()->Clear();
1779 heap->CollectAllGarbage();
1780 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1782 // Get rid of f3 and f5 in the same way.
1783 CompileRun("f3=null");
1784 for (int j = 0; j < 10; j++) {
1785 CcTest::heap()->CollectGarbage(NEW_SPACE);
1786 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1788 CcTest::heap()->CollectAllGarbage();
1789 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1790 CompileRun("f5=null");
1791 for (int j = 0; j < 10; j++) {
1792 CcTest::heap()->CollectGarbage(NEW_SPACE);
1793 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1795 CcTest::heap()->CollectAllGarbage();
1796 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1801 // Force compilation cache cleanup.
1802 CcTest::heap()->NotifyContextDisposed(true);
1803 CcTest::heap()->CollectAllGarbage();
1805 // Dispose the native contexts one by one.
1806 for (int i = 0; i < kNumTestContexts; i++) {
1807 // TODO(dcarney): is there a better way to do this?
1808 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1809 *unsafe = CcTest::heap()->undefined_value();
1812 // Scavenge treats these references as strong.
1813 for (int j = 0; j < 10; j++) {
1814 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1815 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1818 // Mark compact handles the weak references.
1819 CcTest::heap()->CollectAllGarbage();
1820 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1823 CHECK_EQ(0, CountNativeContexts());
1827 // Count the number of native contexts in the weak list of native contexts
1828 // causing a GC after the specified number of elements.
1829 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1830 Heap* heap = isolate->heap();
1832 Handle<Object> object(heap->native_contexts_list(), isolate);
1833 while (!object->IsUndefined()) {
1835 if (count == n) heap->CollectAllGarbage();
1837 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1840 // Subtract one to compensate for the code stub context that is always present
1845 // Count the number of user functions in the weak list of optimized
1846 // functions attached to a native context causing a GC after the
1847 // specified number of elements.
1848 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1851 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1852 Isolate* isolate = icontext->GetIsolate();
1853 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1855 while (object->IsJSFunction() &&
1856 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1858 if (count == n) isolate->heap()->CollectAllGarbage();
1859 object = Handle<Object>(
1860 Object::cast(JSFunction::cast(*object)->next_function_link()),
1867 TEST(TestInternalWeakListsTraverseWithGC) {
1868 FLAG_always_opt = false;
1869 FLAG_allow_natives_syntax = true;
1870 v8::V8::Initialize();
1872 static const int kNumTestContexts = 10;
1874 Isolate* isolate = CcTest::i_isolate();
1875 HandleScope scope(isolate);
1876 v8::Handle<v8::Context> ctx[kNumTestContexts];
1877 if (!isolate->use_crankshaft()) return;
1879 CHECK_EQ(0, CountNativeContexts());
1881 // Create an number of contexts and check the length of the weak list both
1882 // with and without GCs while iterating the list.
1883 for (int i = 0; i < kNumTestContexts; i++) {
1884 ctx[i] = v8::Context::New(CcTest::isolate());
1885 CHECK_EQ(i + 1, CountNativeContexts());
1886 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1891 // Compile a number of functions the length of the weak list of optimized
1892 // functions both with and without GCs while iterating the list.
1893 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1894 OptimizeEmptyFunction("f1");
1895 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1896 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1897 OptimizeEmptyFunction("f2");
1898 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1899 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1900 OptimizeEmptyFunction("f3");
1901 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1902 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1903 OptimizeEmptyFunction("f4");
1904 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1905 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1906 OptimizeEmptyFunction("f5");
1907 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1908 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1914 TEST(TestSizeOfRegExpCode) {
1915 if (!FLAG_regexp_optimization) return;
1917 v8::V8::Initialize();
1919 Isolate* isolate = CcTest::i_isolate();
1920 HandleScope scope(isolate);
1922 LocalContext context;
1924 // Adjust source below and this check to match
1925 // RegExpImple::kRegExpTooLargeToOptimize.
1926 DCHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1928 // Compile a regexp that is much larger if we are using regexp optimizations.
1930 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1931 "var half_size_reg_exp;"
1932 "while (reg_exp_source.length < 20 * 1024) {"
1933 " half_size_reg_exp = reg_exp_source;"
1934 " reg_exp_source = reg_exp_source + reg_exp_source;"
1937 "reg_exp_source.match(/f/);");
1939 // Get initial heap size after several full GCs, which will stabilize
1940 // the heap size and return with sweeping finished completely.
1941 CcTest::heap()->CollectAllGarbage();
1942 CcTest::heap()->CollectAllGarbage();
1943 CcTest::heap()->CollectAllGarbage();
1944 CcTest::heap()->CollectAllGarbage();
1945 CcTest::heap()->CollectAllGarbage();
1946 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1947 if (collector->sweeping_in_progress()) {
1948 collector->EnsureSweepingCompleted();
1950 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1952 CompileRun("'foo'.match(reg_exp_source);");
1953 CcTest::heap()->CollectAllGarbage();
1954 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1956 CompileRun("'foo'.match(half_size_reg_exp);");
1957 CcTest::heap()->CollectAllGarbage();
1958 int size_with_optimized_regexp =
1959 static_cast<int>(CcTest::heap()->SizeOfObjects());
1961 int size_of_regexp_code = size_with_regexp - initial_size;
1963 // On some platforms the debug-code flag causes huge amounts of regexp code
1964 // to be emitted, breaking this test.
1965 if (!FLAG_debug_code) {
1966 CHECK_LE(size_of_regexp_code, 1 * MB);
1969 // Small regexp is half the size, but compiles to more than twice the code
1970 // due to the optimization steps.
1971 CHECK_GE(size_with_optimized_regexp,
1972 size_with_regexp + size_of_regexp_code * 2);
1976 HEAP_TEST(TestSizeOfObjects) {
1977 v8::V8::Initialize();
1979 // Get initial heap size after several full GCs, which will stabilize
1980 // the heap size and return with sweeping finished completely.
1981 CcTest::heap()->CollectAllGarbage();
1982 CcTest::heap()->CollectAllGarbage();
1983 CcTest::heap()->CollectAllGarbage();
1984 CcTest::heap()->CollectAllGarbage();
1985 CcTest::heap()->CollectAllGarbage();
1986 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1987 if (collector->sweeping_in_progress()) {
1988 collector->EnsureSweepingCompleted();
1990 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1993 // Allocate objects on several different old-space pages so that
1994 // concurrent sweeper threads will be busy sweeping the old space on
1995 // subsequent GC runs.
1996 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1997 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1998 for (int i = 1; i <= 100; i++) {
1999 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
2000 CHECK_EQ(initial_size + i * filler_size,
2001 static_cast<int>(CcTest::heap()->SizeOfObjects()));
2005 // The heap size should go back to initial size after a full GC, even
2006 // though sweeping didn't finish yet.
2007 CcTest::heap()->CollectAllGarbage();
2009 // Normally sweeping would not be complete here, but no guarantees.
2011 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2013 // Waiting for sweeper threads should not change heap size.
2014 if (collector->sweeping_in_progress()) {
2015 collector->EnsureSweepingCompleted();
2017 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
2021 TEST(TestAlignmentCalculations) {
2022 // Maximum fill amounts are consistent.
2023 int maximum_double_misalignment = kDoubleSize - kPointerSize;
2024 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
2025 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
2026 CHECK_EQ(0, max_word_fill);
2027 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
2028 CHECK_EQ(maximum_double_misalignment, max_double_fill);
2029 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
2030 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
2031 int max_simd128_unaligned_fill =
2032 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
2033 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
2035 Address base = static_cast<Address>(NULL);
2038 // Word alignment never requires fill.
2039 fill = Heap::GetFillToAlign(base, kWordAligned);
2041 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
2044 // No fill is required when address is double aligned.
2045 fill = Heap::GetFillToAlign(base, kDoubleAligned);
2047 // Fill is required if address is not double aligned.
2048 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
2049 CHECK_EQ(maximum_double_misalignment, fill);
2050 // kDoubleUnaligned has the opposite fill amounts.
2051 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2052 CHECK_EQ(maximum_double_misalignment, fill);
2053 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2056 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2057 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2058 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2059 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2060 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2061 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2062 CHECK_EQ(kPointerSize, fill);
2063 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2068 static HeapObject* NewSpaceAllocateAligned(int size,
2069 AllocationAlignment alignment) {
2070 Heap* heap = CcTest::heap();
2071 AllocationResult allocation =
2072 heap->new_space()->AllocateRawAligned(size, alignment);
2073 HeapObject* obj = NULL;
2074 allocation.To(&obj);
2075 heap->CreateFillerObjectAt(obj->address(), size);
2080 // Get new space allocation into the desired alignment.
2081 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2082 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2083 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2085 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2091 TEST(TestAlignedAllocation) {
2092 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2093 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2094 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2098 if (double_misalignment) {
2099 // Allocate a pointer sized object that must be double aligned at an
2101 start = AlignNewSpace(kDoubleAligned, 0);
2102 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2103 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2104 // There is no filler.
2105 CHECK_EQ(kPointerSize, *top_addr - start);
2107 // Allocate a second pointer sized object that must be double aligned at an
2108 // unaligned address.
2109 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2110 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2111 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2112 // There is a filler object before the object.
2113 filler = HeapObject::FromAddress(start);
2114 CHECK(obj != filler && filler->IsFiller() &&
2115 filler->Size() == kPointerSize);
2116 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2118 // Similarly for kDoubleUnaligned.
2119 start = AlignNewSpace(kDoubleUnaligned, 0);
2120 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2121 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2122 CHECK_EQ(kPointerSize, *top_addr - start);
2123 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2124 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2125 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2126 // There is a filler object before the object.
2127 filler = HeapObject::FromAddress(start);
2128 CHECK(obj != filler && filler->IsFiller() &&
2129 filler->Size() == kPointerSize);
2130 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2133 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2135 start = AlignNewSpace(kSimd128Unaligned, 0);
2136 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2137 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2138 // There is no filler.
2139 CHECK_EQ(kPointerSize, *top_addr - start);
2140 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2141 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2142 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2143 // There is a filler object before the object.
2144 filler = HeapObject::FromAddress(start);
2145 CHECK(obj != filler && filler->IsFiller() &&
2146 filler->Size() == kSimd128Size - kPointerSize);
2147 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2149 if (double_misalignment) {
2150 // Test the 2 other alignments possible on 32 bit platforms.
2151 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2152 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2153 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2154 // There is a filler object before the object.
2155 filler = HeapObject::FromAddress(start);
2156 CHECK(obj != filler && filler->IsFiller() &&
2157 filler->Size() == 2 * kPointerSize);
2158 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2159 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2160 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2161 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2162 // There is a filler object before the object.
2163 filler = HeapObject::FromAddress(start);
2164 CHECK(obj != filler && filler->IsFiller() &&
2165 filler->Size() == kPointerSize);
2166 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2171 static HeapObject* OldSpaceAllocateAligned(int size,
2172 AllocationAlignment alignment) {
2173 Heap* heap = CcTest::heap();
2174 AllocationResult allocation =
2175 heap->old_space()->AllocateRawAligned(size, alignment);
2176 HeapObject* obj = NULL;
2177 allocation.To(&obj);
2178 heap->CreateFillerObjectAt(obj->address(), size);
2183 // Get old space allocation into the desired alignment.
2184 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2185 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2186 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2187 int allocation = fill + offset;
2189 OldSpaceAllocateAligned(allocation, kWordAligned);
2191 Address top = *top_addr;
2192 // Now force the remaining allocation onto the free list.
2193 CcTest::heap()->old_space()->EmptyAllocationInfo();
2198 // Test the case where allocation must be done from the free list, so filler
2199 // may precede or follow the object.
2200 TEST(TestAlignedOverAllocation) {
2201 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2202 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2205 HeapObject* filler1;
2206 HeapObject* filler2;
2207 if (double_misalignment) {
2208 start = AlignOldSpace(kDoubleAligned, 0);
2209 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2210 // The object is aligned, and a filler object is created after.
2211 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2212 filler1 = HeapObject::FromAddress(start + kPointerSize);
2213 CHECK(obj != filler1 && filler1->IsFiller() &&
2214 filler1->Size() == kPointerSize);
2215 // Try the opposite alignment case.
2216 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2217 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2218 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2219 filler1 = HeapObject::FromAddress(start);
2220 CHECK(obj != filler1);
2221 CHECK(filler1->IsFiller());
2222 CHECK(filler1->Size() == kPointerSize);
2223 CHECK(obj != filler1 && filler1->IsFiller() &&
2224 filler1->Size() == kPointerSize);
2226 // Similarly for kDoubleUnaligned.
2227 start = AlignOldSpace(kDoubleUnaligned, 0);
2228 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2229 // The object is aligned, and a filler object is created after.
2230 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2231 filler1 = HeapObject::FromAddress(start + kPointerSize);
2232 CHECK(obj != filler1 && filler1->IsFiller() &&
2233 filler1->Size() == kPointerSize);
2234 // Try the opposite alignment case.
2235 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2236 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2237 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2238 filler1 = HeapObject::FromAddress(start);
2239 CHECK(obj != filler1 && filler1->IsFiller() &&
2240 filler1->Size() == kPointerSize);
2243 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2245 start = AlignOldSpace(kSimd128Unaligned, 0);
2246 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2247 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2248 // There is a filler object after the object.
2249 filler1 = HeapObject::FromAddress(start + kPointerSize);
2250 CHECK(obj != filler1 && filler1->IsFiller() &&
2251 filler1->Size() == kSimd128Size - kPointerSize);
2252 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2253 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2254 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2255 // There is a filler object before the object.
2256 filler1 = HeapObject::FromAddress(start);
2257 CHECK(obj != filler1 && filler1->IsFiller() &&
2258 filler1->Size() == kSimd128Size - kPointerSize);
2260 if (double_misalignment) {
2261 // Test the 2 other alignments possible on 32 bit platforms.
2262 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2263 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2264 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2265 // There are filler objects before and after the object.
2266 filler1 = HeapObject::FromAddress(start);
2267 CHECK(obj != filler1 && filler1->IsFiller() &&
2268 filler1->Size() == 2 * kPointerSize);
2269 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2270 CHECK(obj != filler2 && filler2->IsFiller() &&
2271 filler2->Size() == kPointerSize);
2272 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2273 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2274 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2275 // There are filler objects before and after the object.
2276 filler1 = HeapObject::FromAddress(start);
2277 CHECK(obj != filler1 && filler1->IsFiller() &&
2278 filler1->Size() == kPointerSize);
2279 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2280 CHECK(obj != filler2 && filler2->IsFiller() &&
2281 filler2->Size() == 2 * kPointerSize);
2286 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2287 CcTest::InitializeVM();
2288 HeapIterator iterator(CcTest::heap());
2289 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2290 intptr_t size_of_objects_2 = 0;
2291 for (HeapObject* obj = iterator.next();
2293 obj = iterator.next()) {
2294 if (!obj->IsFreeSpace()) {
2295 size_of_objects_2 += obj->Size();
2298 // Delta must be within 5% of the larger result.
2299 // TODO(gc): Tighten this up by distinguishing between byte
2300 // arrays that are real and those that merely mark free space
2302 if (size_of_objects_1 > size_of_objects_2) {
2303 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2304 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2305 "Iterator: %" V8_PTR_PREFIX "d, "
2306 "delta: %" V8_PTR_PREFIX "d\n",
2307 size_of_objects_1, size_of_objects_2, delta);
2308 CHECK_GT(size_of_objects_1 / 20, delta);
2310 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2311 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2312 "Iterator: %" V8_PTR_PREFIX "d, "
2313 "delta: %" V8_PTR_PREFIX "d\n",
2314 size_of_objects_1, size_of_objects_2, delta);
2315 CHECK_GT(size_of_objects_2 / 20, delta);
2320 static void FillUpNewSpace(NewSpace* new_space) {
2321 // Fill up new space to the point that it is completely full. Make sure
2322 // that the scavenger does not undo the filling.
2323 Heap* heap = new_space->heap();
2324 Isolate* isolate = heap->isolate();
2325 Factory* factory = isolate->factory();
2326 HandleScope scope(isolate);
2327 AlwaysAllocateScope always_allocate(isolate);
2328 intptr_t available = new_space->Capacity() - new_space->Size();
2329 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2330 for (intptr_t i = 0; i < number_of_fillers; i++) {
2331 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2336 TEST(GrowAndShrinkNewSpace) {
2337 CcTest::InitializeVM();
2338 Heap* heap = CcTest::heap();
2339 NewSpace* new_space = heap->new_space();
2341 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2342 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2343 // The max size cannot exceed the reserved size, since semispaces must be
2344 // always within the reserved space. We can't test new space growing and
2345 // shrinking if the reserved size is the same as the minimum (initial) size.
2349 // Explicitly growing should double the space capacity.
2350 intptr_t old_capacity, new_capacity;
2351 old_capacity = new_space->TotalCapacity();
2353 new_capacity = new_space->TotalCapacity();
2354 CHECK(2 * old_capacity == new_capacity);
2356 old_capacity = new_space->TotalCapacity();
2357 FillUpNewSpace(new_space);
2358 new_capacity = new_space->TotalCapacity();
2359 CHECK(old_capacity == new_capacity);
2361 // Explicitly shrinking should not affect space capacity.
2362 old_capacity = new_space->TotalCapacity();
2363 new_space->Shrink();
2364 new_capacity = new_space->TotalCapacity();
2365 CHECK(old_capacity == new_capacity);
2367 // Let the scavenger empty the new space.
2368 heap->CollectGarbage(NEW_SPACE);
2369 CHECK_LE(new_space->Size(), old_capacity);
2371 // Explicitly shrinking should halve the space capacity.
2372 old_capacity = new_space->TotalCapacity();
2373 new_space->Shrink();
2374 new_capacity = new_space->TotalCapacity();
2375 CHECK(old_capacity == 2 * new_capacity);
2377 // Consecutive shrinking should not affect space capacity.
2378 old_capacity = new_space->TotalCapacity();
2379 new_space->Shrink();
2380 new_space->Shrink();
2381 new_space->Shrink();
2382 new_capacity = new_space->TotalCapacity();
2383 CHECK(old_capacity == new_capacity);
2387 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2388 CcTest::InitializeVM();
2389 Heap* heap = CcTest::heap();
2390 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2391 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2392 // The max size cannot exceed the reserved size, since semispaces must be
2393 // always within the reserved space. We can't test new space growing and
2394 // shrinking if the reserved size is the same as the minimum (initial) size.
2398 v8::HandleScope scope(CcTest::isolate());
2399 NewSpace* new_space = heap->new_space();
2400 intptr_t old_capacity, new_capacity;
2401 old_capacity = new_space->TotalCapacity();
2403 new_capacity = new_space->TotalCapacity();
2404 CHECK(2 * old_capacity == new_capacity);
2405 FillUpNewSpace(new_space);
2406 heap->CollectAllAvailableGarbage();
2407 new_capacity = new_space->TotalCapacity();
2408 CHECK(old_capacity == new_capacity);
2412 static int NumberOfGlobalObjects() {
2414 HeapIterator iterator(CcTest::heap());
2415 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2416 if (obj->IsGlobalObject()) count++;
2418 // Subtract two to compensate for the two global objects (not global
2419 // JSObjects, of which there would only be one) that are part of the code stub
2420 // context, which is always present.
2425 // Test that we don't embed maps from foreign contexts into
2427 TEST(LeakNativeContextViaMap) {
2428 i::FLAG_allow_natives_syntax = true;
2429 v8::Isolate* isolate = CcTest::isolate();
2430 v8::HandleScope outer_scope(isolate);
2431 v8::Persistent<v8::Context> ctx1p;
2432 v8::Persistent<v8::Context> ctx2p;
2434 v8::HandleScope scope(isolate);
2435 ctx1p.Reset(isolate, v8::Context::New(isolate));
2436 ctx2p.Reset(isolate, v8::Context::New(isolate));
2437 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2440 CcTest::heap()->CollectAllAvailableGarbage();
2441 CHECK_EQ(4, NumberOfGlobalObjects());
2444 v8::HandleScope inner_scope(isolate);
2445 CompileRun("var v = {x: 42}");
2446 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2447 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2448 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2450 ctx2->Global()->Set(v8_str("o"), v);
2451 v8::Local<v8::Value> res = CompileRun(
2452 "function f() { return o.x; }"
2453 "for (var i = 0; i < 10; ++i) f();"
2454 "%OptimizeFunctionOnNextCall(f);"
2456 CHECK_EQ(42, res->Int32Value());
2457 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2459 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2461 isolate->ContextDisposedNotification();
2463 CcTest::heap()->CollectAllAvailableGarbage();
2464 CHECK_EQ(2, NumberOfGlobalObjects());
2466 CcTest::heap()->CollectAllAvailableGarbage();
2467 CHECK_EQ(0, NumberOfGlobalObjects());
2471 // Test that we don't embed functions from foreign contexts into
2473 TEST(LeakNativeContextViaFunction) {
2474 i::FLAG_allow_natives_syntax = true;
2475 v8::Isolate* isolate = CcTest::isolate();
2476 v8::HandleScope outer_scope(isolate);
2477 v8::Persistent<v8::Context> ctx1p;
2478 v8::Persistent<v8::Context> ctx2p;
2480 v8::HandleScope scope(isolate);
2481 ctx1p.Reset(isolate, v8::Context::New(isolate));
2482 ctx2p.Reset(isolate, v8::Context::New(isolate));
2483 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2486 CcTest::heap()->CollectAllAvailableGarbage();
2487 CHECK_EQ(4, NumberOfGlobalObjects());
2490 v8::HandleScope inner_scope(isolate);
2491 CompileRun("var v = function() { return 42; }");
2492 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2493 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2494 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2496 ctx2->Global()->Set(v8_str("o"), v);
2497 v8::Local<v8::Value> res = CompileRun(
2498 "function f(x) { return x(); }"
2499 "for (var i = 0; i < 10; ++i) f(o);"
2500 "%OptimizeFunctionOnNextCall(f);"
2502 CHECK_EQ(42, res->Int32Value());
2503 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2507 isolate->ContextDisposedNotification();
2509 CcTest::heap()->CollectAllAvailableGarbage();
2510 CHECK_EQ(2, NumberOfGlobalObjects());
2512 CcTest::heap()->CollectAllAvailableGarbage();
2513 CHECK_EQ(0, NumberOfGlobalObjects());
2517 TEST(LeakNativeContextViaMapKeyed) {
2518 i::FLAG_allow_natives_syntax = true;
2519 v8::Isolate* isolate = CcTest::isolate();
2520 v8::HandleScope outer_scope(isolate);
2521 v8::Persistent<v8::Context> ctx1p;
2522 v8::Persistent<v8::Context> ctx2p;
2524 v8::HandleScope scope(isolate);
2525 ctx1p.Reset(isolate, v8::Context::New(isolate));
2526 ctx2p.Reset(isolate, v8::Context::New(isolate));
2527 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2530 CcTest::heap()->CollectAllAvailableGarbage();
2531 CHECK_EQ(4, NumberOfGlobalObjects());
2534 v8::HandleScope inner_scope(isolate);
2535 CompileRun("var v = [42, 43]");
2536 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2537 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2538 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2540 ctx2->Global()->Set(v8_str("o"), v);
2541 v8::Local<v8::Value> res = CompileRun(
2542 "function f() { return o[0]; }"
2543 "for (var i = 0; i < 10; ++i) f();"
2544 "%OptimizeFunctionOnNextCall(f);"
2546 CHECK_EQ(42, res->Int32Value());
2547 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2551 isolate->ContextDisposedNotification();
2553 CcTest::heap()->CollectAllAvailableGarbage();
2554 CHECK_EQ(2, NumberOfGlobalObjects());
2556 CcTest::heap()->CollectAllAvailableGarbage();
2557 CHECK_EQ(0, NumberOfGlobalObjects());
2561 TEST(LeakNativeContextViaMapProto) {
2562 i::FLAG_allow_natives_syntax = true;
2563 v8::Isolate* isolate = CcTest::isolate();
2564 v8::HandleScope outer_scope(isolate);
2565 v8::Persistent<v8::Context> ctx1p;
2566 v8::Persistent<v8::Context> ctx2p;
2568 v8::HandleScope scope(isolate);
2569 ctx1p.Reset(isolate, v8::Context::New(isolate));
2570 ctx2p.Reset(isolate, v8::Context::New(isolate));
2571 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2574 CcTest::heap()->CollectAllAvailableGarbage();
2575 CHECK_EQ(4, NumberOfGlobalObjects());
2578 v8::HandleScope inner_scope(isolate);
2579 CompileRun("var v = { y: 42}");
2580 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2581 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2582 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2584 ctx2->Global()->Set(v8_str("o"), v);
2585 v8::Local<v8::Value> res = CompileRun(
2591 "for (var i = 0; i < 10; ++i) f();"
2592 "%OptimizeFunctionOnNextCall(f);"
2594 CHECK_EQ(42, res->Int32Value());
2595 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2599 isolate->ContextDisposedNotification();
2601 CcTest::heap()->CollectAllAvailableGarbage();
2602 CHECK_EQ(2, NumberOfGlobalObjects());
2604 CcTest::heap()->CollectAllAvailableGarbage();
2605 CHECK_EQ(0, NumberOfGlobalObjects());
2609 TEST(InstanceOfStubWriteBarrier) {
2610 i::FLAG_allow_natives_syntax = true;
2612 i::FLAG_verify_heap = true;
2615 CcTest::InitializeVM();
2616 if (!CcTest::i_isolate()->use_crankshaft()) return;
2617 if (i::FLAG_force_marking_deque_overflows) return;
2618 v8::HandleScope outer_scope(CcTest::isolate());
2621 v8::HandleScope scope(CcTest::isolate());
2623 "function foo () { }"
2624 "function mkbar () { return new (new Function(\"\")) (); }"
2625 "function f (x) { return (x instanceof foo); }"
2626 "function g () { f(mkbar()); }"
2627 "f(new foo()); f(new foo());"
2628 "%OptimizeFunctionOnNextCall(f);"
2629 "f(new foo()); g();");
2632 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2634 marking->Start(Heap::kNoGCFlags);
2636 Handle<JSFunction> f =
2637 v8::Utils::OpenHandle(
2638 *v8::Handle<v8::Function>::Cast(
2639 CcTest::global()->Get(v8_str("f"))));
2641 CHECK(f->IsOptimized());
2643 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2644 !marking->IsStopped()) {
2645 // Discard any pending GC requests otherwise we will get GC when we enter
2647 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2650 CHECK(marking->IsMarking());
2653 v8::HandleScope scope(CcTest::isolate());
2654 v8::Handle<v8::Object> global = CcTest::global();
2655 v8::Handle<v8::Function> g =
2656 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2657 g->Call(global, 0, NULL);
2660 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2661 CcTest::heap()->CollectGarbage(OLD_SPACE);
2665 static int NumberOfProtoTransitions(Map* map) {
2666 return TransitionArray::NumberOfPrototypeTransitions(
2667 TransitionArray::GetPrototypeTransitions(map));
2671 TEST(PrototypeTransitionClearing) {
2672 if (FLAG_never_compact) return;
2673 CcTest::InitializeVM();
2674 Isolate* isolate = CcTest::i_isolate();
2675 Factory* factory = isolate->factory();
2676 v8::HandleScope scope(CcTest::isolate());
2678 CompileRun("var base = {};");
2679 Handle<JSObject> baseObject =
2680 v8::Utils::OpenHandle(
2681 *v8::Handle<v8::Object>::Cast(
2682 CcTest::global()->Get(v8_str("base"))));
2683 int initialTransitions = NumberOfProtoTransitions(baseObject->map());
2687 "for (var i = 0; i < 10; i++) {"
2689 " var prototype = {};"
2690 " object.__proto__ = prototype;"
2691 " if (i >= 3) live.push(object, prototype);"
2694 // Verify that only dead prototype transitions are cleared.
2695 CHECK_EQ(initialTransitions + 10,
2696 NumberOfProtoTransitions(baseObject->map()));
2697 CcTest::heap()->CollectAllGarbage();
2698 const int transitions = 10 - 3;
2699 CHECK_EQ(initialTransitions + transitions,
2700 NumberOfProtoTransitions(baseObject->map()));
2702 // Verify that prototype transitions array was compacted.
2704 TransitionArray::GetPrototypeTransitions(baseObject->map());
2705 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2706 int j = TransitionArray::kProtoTransitionHeaderSize + i;
2707 CHECK(trans->get(j)->IsWeakCell());
2708 CHECK(WeakCell::cast(trans->get(j))->value()->IsMap());
2711 // Make sure next prototype is placed on an old-space evacuation candidate.
2712 Handle<JSObject> prototype;
2713 PagedSpace* space = CcTest::heap()->old_space();
2715 AlwaysAllocateScope always_allocate(isolate);
2716 SimulateFullSpace(space);
2717 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS,
2718 Strength::WEAK, TENURED);
2721 // Add a prototype on an evacuation candidate and verify that transition
2722 // clearing correctly records slots in prototype transition array.
2723 i::FLAG_always_compact = true;
2724 Handle<Map> map(baseObject->map());
2725 CHECK(!space->LastPage()->Contains(
2726 TransitionArray::GetPrototypeTransitions(*map)->address()));
2727 CHECK(space->LastPage()->Contains(prototype->address()));
2731 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2732 i::FLAG_stress_compaction = false;
2733 i::FLAG_allow_natives_syntax = true;
2735 i::FLAG_verify_heap = true;
2738 CcTest::InitializeVM();
2739 if (!CcTest::i_isolate()->use_crankshaft()) return;
2740 v8::HandleScope outer_scope(CcTest::isolate());
2743 v8::HandleScope scope(CcTest::isolate());
2747 " for (var i = 0; i < 100; i++) s += i;"
2751 "%OptimizeFunctionOnNextCall(f);"
2754 Handle<JSFunction> f =
2755 v8::Utils::OpenHandle(
2756 *v8::Handle<v8::Function>::Cast(
2757 CcTest::global()->Get(v8_str("f"))));
2758 CHECK(f->IsOptimized());
2760 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2762 marking->Start(Heap::kNoGCFlags);
2763 // The following calls will increment CcTest::heap()->global_ic_age().
2764 CcTest::isolate()->ContextDisposedNotification();
2765 SimulateIncrementalMarking(CcTest::heap());
2766 CcTest::heap()->CollectAllGarbage();
2767 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2768 CHECK_EQ(0, f->shared()->opt_count());
2769 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2773 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2774 i::FLAG_stress_compaction = false;
2775 i::FLAG_allow_natives_syntax = true;
2777 i::FLAG_verify_heap = true;
2780 CcTest::InitializeVM();
2781 if (!CcTest::i_isolate()->use_crankshaft()) return;
2782 v8::HandleScope outer_scope(CcTest::isolate());
2785 v8::HandleScope scope(CcTest::isolate());
2789 " for (var i = 0; i < 100; i++) s += i;"
2793 "%OptimizeFunctionOnNextCall(f);"
2796 Handle<JSFunction> f =
2797 v8::Utils::OpenHandle(
2798 *v8::Handle<v8::Function>::Cast(
2799 CcTest::global()->Get(v8_str("f"))));
2800 CHECK(f->IsOptimized());
2802 CcTest::heap()->incremental_marking()->Stop();
2804 // The following two calls will increment CcTest::heap()->global_ic_age().
2805 CcTest::isolate()->ContextDisposedNotification();
2806 CcTest::heap()->CollectAllGarbage();
2808 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2809 CHECK_EQ(0, f->shared()->opt_count());
2810 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2814 HEAP_TEST(GCFlags) {
2815 CcTest::InitializeVM();
2816 Heap* heap = CcTest::heap();
2818 heap->set_current_gc_flags(Heap::kNoGCFlags);
2819 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
2821 // Set the flags to check whether we appropriately resets them after the GC.
2822 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2823 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2824 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
2826 MarkCompactCollector* collector = heap->mark_compact_collector();
2827 if (collector->sweeping_in_progress()) {
2828 collector->EnsureSweepingCompleted();
2831 IncrementalMarking* marking = heap->incremental_marking();
2833 marking->Start(Heap::kReduceMemoryFootprintMask);
2834 CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
2836 heap->CollectGarbage(NEW_SPACE);
2837 // NewSpace scavenges should not overwrite the flags.
2838 CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
2840 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2841 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
2845 TEST(IdleNotificationFinishMarking) {
2846 i::FLAG_allow_natives_syntax = true;
2847 CcTest::InitializeVM();
2848 SimulateFullSpace(CcTest::heap()->old_space());
2849 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2851 marking->Start(Heap::kNoGCFlags);
2853 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2855 // TODO(hpayer): We cannot write proper unit test right now for heap.
2856 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2857 // marking delay counter.
2859 // Perform a huge incremental marking step but don't complete marking.
2860 intptr_t bytes_processed = 0;
2863 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2864 IncrementalMarking::FORCE_MARKING,
2865 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2866 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2867 } while (bytes_processed);
2869 // The next invocations of incremental marking are not going to complete
2871 // since the completion threshold is not reached
2872 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2874 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2875 IncrementalMarking::FORCE_MARKING,
2876 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2877 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2880 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2882 // The next idle notification has to finish incremental marking.
2883 const double kLongIdleTime = 1000.0;
2884 CcTest::isolate()->IdleNotificationDeadline(
2885 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2886 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2888 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2892 // Test that HAllocateObject will always return an object in new-space.
2893 TEST(OptimizedAllocationAlwaysInNewSpace) {
2894 i::FLAG_allow_natives_syntax = true;
2895 CcTest::InitializeVM();
2896 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2897 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2898 v8::HandleScope scope(CcTest::isolate());
2900 SimulateFullSpace(CcTest::heap()->new_space());
2901 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2902 v8::Local<v8::Value> res = CompileRun(
2905 " for (var i = 0; i < 32; i++) {"
2906 " this['x' + i] = x;"
2909 "function f(x) { return new c(x); };"
2911 "%OptimizeFunctionOnNextCall(f);"
2914 4, res.As<v8::Object>()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2916 Handle<JSObject> o =
2917 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2919 CHECK(CcTest::heap()->InNewSpace(*o));
2923 TEST(OptimizedPretenuringAllocationFolding) {
2924 i::FLAG_allow_natives_syntax = true;
2925 i::FLAG_expose_gc = true;
2926 CcTest::InitializeVM();
2927 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2928 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2929 v8::HandleScope scope(CcTest::isolate());
2931 // Grow new space unitl maximum capacity reached.
2932 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2933 CcTest::heap()->new_space()->Grow();
2936 i::ScopedVector<char> source(1024);
2939 "var number_elements = %d;"
2940 "var elements = new Array();"
2942 " for (var i = 0; i < number_elements; i++) {"
2943 " elements[i] = [[{}], [1.1]];"
2945 " return elements[number_elements-1]"
2949 "%%OptimizeFunctionOnNextCall(f);"
2951 AllocationSite::kPretenureMinimumCreated);
2953 v8::Local<v8::Value> res = CompileRun(source.start());
2955 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2956 Handle<JSObject> int_array_handle =
2957 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2958 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2959 Handle<JSObject> double_array_handle =
2960 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2962 Handle<JSObject> o =
2963 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2964 CHECK(CcTest::heap()->InOldSpace(*o));
2965 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2966 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2967 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2968 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2972 TEST(OptimizedPretenuringObjectArrayLiterals) {
2973 i::FLAG_allow_natives_syntax = true;
2974 i::FLAG_expose_gc = true;
2975 CcTest::InitializeVM();
2976 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2977 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2978 v8::HandleScope scope(CcTest::isolate());
2980 // Grow new space unitl maximum capacity reached.
2981 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2982 CcTest::heap()->new_space()->Grow();
2985 i::ScopedVector<char> source(1024);
2988 "var number_elements = %d;"
2989 "var elements = new Array(number_elements);"
2991 " for (var i = 0; i < number_elements; i++) {"
2992 " elements[i] = [{}, {}, {}];"
2994 " return elements[number_elements - 1];"
2998 "%%OptimizeFunctionOnNextCall(f);"
3000 AllocationSite::kPretenureMinimumCreated);
3002 v8::Local<v8::Value> res = CompileRun(source.start());
3004 Handle<JSObject> o =
3005 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3007 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3008 CHECK(CcTest::heap()->InOldSpace(*o));
3012 TEST(OptimizedPretenuringMixedInObjectProperties) {
3013 i::FLAG_allow_natives_syntax = true;
3014 i::FLAG_expose_gc = true;
3015 CcTest::InitializeVM();
3016 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3017 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3018 v8::HandleScope scope(CcTest::isolate());
3020 // Grow new space unitl maximum capacity reached.
3021 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3022 CcTest::heap()->new_space()->Grow();
3026 i::ScopedVector<char> source(1024);
3029 "var number_elements = %d;"
3030 "var elements = new Array(number_elements);"
3032 " for (var i = 0; i < number_elements; i++) {"
3033 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
3035 " return elements[number_elements - 1];"
3039 "%%OptimizeFunctionOnNextCall(f);"
3041 AllocationSite::kPretenureMinimumCreated);
3043 v8::Local<v8::Value> res = CompileRun(source.start());
3045 Handle<JSObject> o =
3046 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3048 CHECK(CcTest::heap()->InOldSpace(*o));
3049 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
3050 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
3051 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
3052 if (!o->IsUnboxedDoubleField(idx2)) {
3053 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
3055 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
3058 JSObject* inner_object =
3059 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
3060 CHECK(CcTest::heap()->InOldSpace(inner_object));
3061 if (!inner_object->IsUnboxedDoubleField(idx1)) {
3062 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
3064 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
3066 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
3070 TEST(OptimizedPretenuringDoubleArrayProperties) {
3071 i::FLAG_allow_natives_syntax = true;
3072 i::FLAG_expose_gc = true;
3073 CcTest::InitializeVM();
3074 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3075 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3076 v8::HandleScope scope(CcTest::isolate());
3078 // Grow new space unitl maximum capacity reached.
3079 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3080 CcTest::heap()->new_space()->Grow();
3083 i::ScopedVector<char> source(1024);
3086 "var number_elements = %d;"
3087 "var elements = new Array(number_elements);"
3089 " for (var i = 0; i < number_elements; i++) {"
3090 " elements[i] = {a: 1.1, b: 2.2};"
3092 " return elements[i - 1];"
3096 "%%OptimizeFunctionOnNextCall(f);"
3098 AllocationSite::kPretenureMinimumCreated);
3100 v8::Local<v8::Value> res = CompileRun(source.start());
3102 Handle<JSObject> o =
3103 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3105 CHECK(CcTest::heap()->InOldSpace(*o));
3106 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3110 TEST(OptimizedPretenuringdoubleArrayLiterals) {
3111 i::FLAG_allow_natives_syntax = true;
3112 i::FLAG_expose_gc = true;
3113 CcTest::InitializeVM();
3114 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3115 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3116 v8::HandleScope scope(CcTest::isolate());
3118 // Grow new space unitl maximum capacity reached.
3119 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3120 CcTest::heap()->new_space()->Grow();
3123 i::ScopedVector<char> source(1024);
3126 "var number_elements = %d;"
3127 "var elements = new Array(number_elements);"
3129 " for (var i = 0; i < number_elements; i++) {"
3130 " elements[i] = [1.1, 2.2, 3.3];"
3132 " return elements[number_elements - 1];"
3136 "%%OptimizeFunctionOnNextCall(f);"
3138 AllocationSite::kPretenureMinimumCreated);
3140 v8::Local<v8::Value> res = CompileRun(source.start());
3142 Handle<JSObject> o =
3143 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3145 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3146 CHECK(CcTest::heap()->InOldSpace(*o));
3150 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3151 i::FLAG_allow_natives_syntax = true;
3152 i::FLAG_expose_gc = true;
3153 CcTest::InitializeVM();
3154 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3155 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3156 v8::HandleScope scope(CcTest::isolate());
3158 // Grow new space unitl maximum capacity reached.
3159 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3160 CcTest::heap()->new_space()->Grow();
3163 i::ScopedVector<char> source(1024);
3166 "var number_elements = 100;"
3167 "var elements = new Array(number_elements);"
3169 " for (var i = 0; i < number_elements; i++) {"
3170 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3172 " return elements[number_elements - 1];"
3176 "%%OptimizeFunctionOnNextCall(f);"
3179 v8::Local<v8::Value> res = CompileRun(source.start());
3181 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
3182 Handle<JSObject> int_array_handle =
3183 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
3184 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
3185 Handle<JSObject> double_array_handle =
3186 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
3188 Handle<JSObject> o =
3189 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3190 CHECK(CcTest::heap()->InOldSpace(*o));
3191 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3192 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3193 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3194 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3198 TEST(OptimizedPretenuringNestedObjectLiterals) {
3199 i::FLAG_allow_natives_syntax = true;
3200 i::FLAG_expose_gc = true;
3201 CcTest::InitializeVM();
3202 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3203 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3204 v8::HandleScope scope(CcTest::isolate());
3206 // Grow new space unitl maximum capacity reached.
3207 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3208 CcTest::heap()->new_space()->Grow();
3211 i::ScopedVector<char> source(1024);
3214 "var number_elements = %d;"
3215 "var elements = new Array(number_elements);"
3217 " for (var i = 0; i < number_elements; i++) {"
3218 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3220 " return elements[number_elements - 1];"
3224 "%%OptimizeFunctionOnNextCall(f);"
3226 AllocationSite::kPretenureMinimumCreated);
3228 v8::Local<v8::Value> res = CompileRun(source.start());
3230 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
3231 Handle<JSObject> int_array_handle_1 =
3232 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
3233 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
3234 Handle<JSObject> int_array_handle_2 =
3235 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
3237 Handle<JSObject> o =
3238 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3239 CHECK(CcTest::heap()->InOldSpace(*o));
3240 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3241 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3242 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3243 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3247 TEST(OptimizedPretenuringNestedDoubleLiterals) {
3248 i::FLAG_allow_natives_syntax = true;
3249 i::FLAG_expose_gc = true;
3250 CcTest::InitializeVM();
3251 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3252 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3253 v8::HandleScope scope(CcTest::isolate());
3255 // Grow new space unitl maximum capacity reached.
3256 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3257 CcTest::heap()->new_space()->Grow();
3260 i::ScopedVector<char> source(1024);
3263 "var number_elements = %d;"
3264 "var elements = new Array(number_elements);"
3266 " for (var i = 0; i < number_elements; i++) {"
3267 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3269 " return elements[number_elements - 1];"
3273 "%%OptimizeFunctionOnNextCall(f);"
3275 AllocationSite::kPretenureMinimumCreated);
3277 v8::Local<v8::Value> res = CompileRun(source.start());
3279 v8::Local<v8::Value> double_array_1 =
3280 v8::Object::Cast(*res)->Get(v8_str("0"));
3281 Handle<JSObject> double_array_handle_1 =
3282 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
3283 v8::Local<v8::Value> double_array_2 =
3284 v8::Object::Cast(*res)->Get(v8_str("1"));
3285 Handle<JSObject> double_array_handle_2 =
3286 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
3288 Handle<JSObject> o =
3289 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3290 CHECK(CcTest::heap()->InOldSpace(*o));
3291 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3292 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3293 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3294 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3298 // Make sure pretenuring feedback is gathered for constructed objects as well
3300 TEST(OptimizedPretenuringConstructorCalls) {
3301 if (!i::FLAG_pretenuring_call_new) {
3302 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3305 i::FLAG_allow_natives_syntax = true;
3306 i::FLAG_expose_gc = true;
3307 CcTest::InitializeVM();
3308 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3309 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3310 v8::HandleScope scope(CcTest::isolate());
3312 // Grow new space unitl maximum capacity reached.
3313 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3314 CcTest::heap()->new_space()->Grow();
3317 i::ScopedVector<char> source(1024);
3318 // Call new is doing slack tracking for the first
3319 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3320 // mementos during that time.
3323 "var number_elements = %d;"
3324 "var elements = new Array(number_elements);"
3330 " for (var i = 0; i < number_elements; i++) {"
3331 " elements[i] = new foo();"
3333 " return elements[number_elements - 1];"
3337 "%%OptimizeFunctionOnNextCall(f);"
3339 AllocationSite::kPretenureMinimumCreated +
3340 JSFunction::kGenerousAllocationCount);
3342 v8::Local<v8::Value> res = CompileRun(source.start());
3344 Handle<JSObject> o =
3345 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3347 CHECK(CcTest::heap()->InOldSpace(*o));
3351 TEST(OptimizedPretenuringCallNew) {
3352 if (!i::FLAG_pretenuring_call_new) {
3353 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3356 i::FLAG_allow_natives_syntax = true;
3357 i::FLAG_expose_gc = true;
3358 CcTest::InitializeVM();
3359 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3360 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3361 v8::HandleScope scope(CcTest::isolate());
3363 // Grow new space unitl maximum capacity reached.
3364 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3365 CcTest::heap()->new_space()->Grow();
3368 i::ScopedVector<char> source(1024);
3369 // Call new is doing slack tracking for the first
3370 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3371 // mementos during that time.
3374 "var number_elements = %d;"
3375 "var elements = new Array(number_elements);"
3376 "function g() { this.a = 0; }"
3378 " for (var i = 0; i < number_elements; i++) {"
3379 " elements[i] = new g();"
3381 " return elements[number_elements - 1];"
3385 "%%OptimizeFunctionOnNextCall(f);"
3387 AllocationSite::kPretenureMinimumCreated +
3388 JSFunction::kGenerousAllocationCount);
3390 v8::Local<v8::Value> res = CompileRun(source.start());
3392 Handle<JSObject> o =
3393 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3394 CHECK(CcTest::heap()->InOldSpace(*o));
3398 // Test regular array literals allocation.
3399 TEST(OptimizedAllocationArrayLiterals) {
3400 i::FLAG_allow_natives_syntax = true;
3401 CcTest::InitializeVM();
3402 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3403 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3404 v8::HandleScope scope(CcTest::isolate());
3406 v8::Local<v8::Value> res = CompileRun(
3408 " var numbers = new Array(1, 2, 3);"
3409 " numbers[0] = 3.14;"
3413 "%OptimizeFunctionOnNextCall(f);"
3415 CHECK_EQ(static_cast<int>(3.14),
3416 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
3418 Handle<JSObject> o =
3419 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3421 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3425 static int CountMapTransitions(Map* map) {
3426 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3430 // Test that map transitions are cleared and maps are collected with
3431 // incremental marking as well.
3433 i::FLAG_stress_compaction = false;
3434 i::FLAG_allow_natives_syntax = true;
3435 i::FLAG_trace_incremental_marking = true;
3436 i::FLAG_retain_maps_for_n_gc = 0;
3437 CcTest::InitializeVM();
3438 v8::HandleScope scope(CcTest::isolate());
3439 static const int transitions_count = 256;
3441 CompileRun("function F() {}");
3443 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3444 for (int i = 0; i < transitions_count; i++) {
3445 EmbeddedVector<char, 64> buffer;
3446 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3447 CompileRun(buffer.start());
3449 CompileRun("var root = new F;");
3452 Handle<JSObject> root =
3453 v8::Utils::OpenHandle(
3454 *v8::Handle<v8::Object>::Cast(
3455 CcTest::global()->Get(v8_str("root"))));
3457 // Count number of live transitions before marking.
3458 int transitions_before = CountMapTransitions(root->map());
3459 CompileRun("%DebugPrint(root);");
3460 CHECK_EQ(transitions_count, transitions_before);
3462 SimulateIncrementalMarking(CcTest::heap());
3463 CcTest::heap()->CollectAllGarbage();
3465 // Count number of live transitions after marking. Note that one transition
3466 // is left, because 'o' still holds an instance of one transition target.
3467 int transitions_after = CountMapTransitions(root->map());
3468 CompileRun("%DebugPrint(root);");
3469 CHECK_EQ(1, transitions_after);
3474 static void AddTransitions(int transitions_count) {
3475 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3476 for (int i = 0; i < transitions_count; i++) {
3477 EmbeddedVector<char, 64> buffer;
3478 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3479 CompileRun(buffer.start());
3484 static Handle<JSObject> GetByName(const char* name) {
3485 return v8::Utils::OpenHandle(
3486 *v8::Handle<v8::Object>::Cast(
3487 CcTest::global()->Get(v8_str(name))));
3491 static void AddPropertyTo(
3492 int gc_count, Handle<JSObject> object, const char* property_name) {
3493 Isolate* isolate = CcTest::i_isolate();
3494 Factory* factory = isolate->factory();
3495 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3496 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3497 i::FLAG_gc_interval = gc_count;
3498 i::FLAG_gc_global = true;
3499 i::FLAG_retain_maps_for_n_gc = 0;
3500 CcTest::heap()->set_allocation_timeout(gc_count);
3501 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3505 TEST(TransitionArrayShrinksDuringAllocToZero) {
3506 i::FLAG_stress_compaction = false;
3507 i::FLAG_allow_natives_syntax = true;
3508 CcTest::InitializeVM();
3509 v8::HandleScope scope(CcTest::isolate());
3510 static const int transitions_count = 10;
3511 CompileRun("function F() { }");
3512 AddTransitions(transitions_count);
3513 CompileRun("var root = new F;");
3514 Handle<JSObject> root = GetByName("root");
3516 // Count number of live transitions before marking.
3517 int transitions_before = CountMapTransitions(root->map());
3518 CHECK_EQ(transitions_count, transitions_before);
3521 CompileRun("o = new F;"
3523 root = GetByName("root");
3524 AddPropertyTo(2, root, "funny");
3525 CcTest::heap()->CollectGarbage(NEW_SPACE);
3527 // Count number of live transitions after marking. Note that one transition
3528 // is left, because 'o' still holds an instance of one transition target.
3529 int transitions_after = CountMapTransitions(
3530 Map::cast(root->map()->GetBackPointer()));
3531 CHECK_EQ(1, transitions_after);
3535 TEST(TransitionArrayShrinksDuringAllocToOne) {
3536 i::FLAG_stress_compaction = false;
3537 i::FLAG_allow_natives_syntax = true;
3538 CcTest::InitializeVM();
3539 v8::HandleScope scope(CcTest::isolate());
3540 static const int transitions_count = 10;
3541 CompileRun("function F() {}");
3542 AddTransitions(transitions_count);
3543 CompileRun("var root = new F;");
3544 Handle<JSObject> root = GetByName("root");
3546 // Count number of live transitions before marking.
3547 int transitions_before = CountMapTransitions(root->map());
3548 CHECK_EQ(transitions_count, transitions_before);
3550 root = GetByName("root");
3551 AddPropertyTo(2, root, "funny");
3552 CcTest::heap()->CollectGarbage(NEW_SPACE);
3554 // Count number of live transitions after marking. Note that one transition
3555 // is left, because 'o' still holds an instance of one transition target.
3556 int transitions_after = CountMapTransitions(
3557 Map::cast(root->map()->GetBackPointer()));
3558 CHECK_EQ(2, transitions_after);
3562 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3563 i::FLAG_stress_compaction = false;
3564 i::FLAG_allow_natives_syntax = true;
3565 CcTest::InitializeVM();
3566 v8::HandleScope scope(CcTest::isolate());
3567 static const int transitions_count = 10;
3568 CompileRun("function F() {}");
3569 AddTransitions(transitions_count);
3570 CompileRun("var root = new F;");
3571 Handle<JSObject> root = GetByName("root");
3573 // Count number of live transitions before marking.
3574 int transitions_before = CountMapTransitions(root->map());
3575 CHECK_EQ(transitions_count, transitions_before);
3577 root = GetByName("root");
3578 AddPropertyTo(0, root, "prop9");
3579 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3581 // Count number of live transitions after marking. Note that one transition
3582 // is left, because 'o' still holds an instance of one transition target.
3583 int transitions_after = CountMapTransitions(
3584 Map::cast(root->map()->GetBackPointer()));
3585 CHECK_EQ(1, transitions_after);
3589 TEST(TransitionArraySimpleToFull) {
3590 i::FLAG_stress_compaction = false;
3591 i::FLAG_allow_natives_syntax = true;
3592 CcTest::InitializeVM();
3593 v8::HandleScope scope(CcTest::isolate());
3594 static const int transitions_count = 1;
3595 CompileRun("function F() {}");
3596 AddTransitions(transitions_count);
3597 CompileRun("var root = new F;");
3598 Handle<JSObject> root = GetByName("root");
3600 // Count number of live transitions before marking.
3601 int transitions_before = CountMapTransitions(root->map());
3602 CHECK_EQ(transitions_count, transitions_before);
3604 CompileRun("o = new F;"
3606 root = GetByName("root");
3607 DCHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3608 AddPropertyTo(2, root, "happy");
3610 // Count number of live transitions after marking. Note that one transition
3611 // is left, because 'o' still holds an instance of one transition target.
3612 int transitions_after = CountMapTransitions(
3613 Map::cast(root->map()->GetBackPointer()));
3614 CHECK_EQ(1, transitions_after);
3619 TEST(Regress2143a) {
3620 i::FLAG_incremental_marking = true;
3621 CcTest::InitializeVM();
3622 v8::HandleScope scope(CcTest::isolate());
3624 // Prepare a map transition from the root object together with a yet
3625 // untransitioned root object.
3626 CompileRun("var root = new Object;"
3628 "root = new Object;");
3630 SimulateIncrementalMarking(CcTest::heap());
3632 // Compile a StoreIC that performs the prepared map transition. This
3633 // will restart incremental marking and should make sure the root is
3634 // marked grey again.
3635 CompileRun("function f(o) {"
3641 // This bug only triggers with aggressive IC clearing.
3642 CcTest::heap()->AgeInlineCaches();
3644 // Explicitly request GC to perform final marking step and sweeping.
3645 CcTest::heap()->CollectAllGarbage();
3647 Handle<JSObject> root =
3648 v8::Utils::OpenHandle(
3649 *v8::Handle<v8::Object>::Cast(
3650 CcTest::global()->Get(v8_str("root"))));
3652 // The root object should be in a sane state.
3653 CHECK(root->IsJSObject());
3654 CHECK(root->map()->IsMap());
3658 TEST(Regress2143b) {
3659 i::FLAG_incremental_marking = true;
3660 i::FLAG_allow_natives_syntax = true;
3661 CcTest::InitializeVM();
3662 v8::HandleScope scope(CcTest::isolate());
3664 // Prepare a map transition from the root object together with a yet
3665 // untransitioned root object.
3666 CompileRun("var root = new Object;"
3668 "root = new Object;");
3670 SimulateIncrementalMarking(CcTest::heap());
3672 // Compile an optimized LStoreNamedField that performs the prepared
3673 // map transition. This will restart incremental marking and should
3674 // make sure the root is marked grey again.
3675 CompileRun("function f(o) {"
3680 "%OptimizeFunctionOnNextCall(f);"
3682 "%DeoptimizeFunction(f);");
3684 // This bug only triggers with aggressive IC clearing.
3685 CcTest::heap()->AgeInlineCaches();
3687 // Explicitly request GC to perform final marking step and sweeping.
3688 CcTest::heap()->CollectAllGarbage();
3690 Handle<JSObject> root =
3691 v8::Utils::OpenHandle(
3692 *v8::Handle<v8::Object>::Cast(
3693 CcTest::global()->Get(v8_str("root"))));
3695 // The root object should be in a sane state.
3696 CHECK(root->IsJSObject());
3697 CHECK(root->map()->IsMap());
3701 TEST(ReleaseOverReservedPages) {
3702 if (FLAG_never_compact) return;
3703 i::FLAG_trace_gc = true;
3704 // The optimizer can allocate stuff, messing up the test.
3705 i::FLAG_crankshaft = false;
3706 i::FLAG_always_opt = false;
3707 CcTest::InitializeVM();
3708 Isolate* isolate = CcTest::i_isolate();
3709 Factory* factory = isolate->factory();
3710 Heap* heap = isolate->heap();
3711 v8::HandleScope scope(CcTest::isolate());
3712 static const int number_of_test_pages = 20;
3714 // Prepare many pages with low live-bytes count.
3715 PagedSpace* old_space = heap->old_space();
3716 CHECK_EQ(1, old_space->CountTotalPages());
3717 for (int i = 0; i < number_of_test_pages; i++) {
3718 AlwaysAllocateScope always_allocate(isolate);
3719 SimulateFullSpace(old_space);
3720 factory->NewFixedArray(1, TENURED);
3722 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3724 // Triggering one GC will cause a lot of garbage to be discovered but
3725 // even spread across all allocated pages.
3726 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3727 "triggered for preparation");
3728 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3730 // Triggering subsequent GCs should cause at least half of the pages
3731 // to be released to the OS after at most two cycles.
3732 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3733 "triggered by test 1");
3734 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3735 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3736 "triggered by test 2");
3737 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3739 // Triggering a last-resort GC should cause all pages to be released to the
3740 // OS so that other processes can seize the memory. If we get a failure here
3741 // where there are 2 pages left instead of 1, then we should increase the
3742 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3743 // first page should be small in order to reduce memory used when the VM
3744 // boots, but if the 20 small arrays don't fit on the first page then that's
3745 // an indication that it is too small.
3746 heap->CollectAllAvailableGarbage("triggered really hard");
3747 CHECK_EQ(1, old_space->CountTotalPages());
3750 static int forced_gc_counter = 0;
3752 void MockUseCounterCallback(v8::Isolate* isolate,
3753 v8::Isolate::UseCounterFeature feature) {
3754 isolate->GetCallingContext();
3755 if (feature == v8::Isolate::kForcedGC) {
3756 forced_gc_counter++;
3761 TEST(CountForcedGC) {
3762 i::FLAG_expose_gc = true;
3763 CcTest::InitializeVM();
3764 Isolate* isolate = CcTest::i_isolate();
3765 v8::HandleScope scope(CcTest::isolate());
3767 isolate->SetUseCounterCallback(MockUseCounterCallback);
3769 forced_gc_counter = 0;
3770 const char* source = "gc();";
3772 CHECK_GT(forced_gc_counter, 0);
3777 i::FLAG_stress_compaction = false;
3778 CcTest::InitializeVM();
3779 Isolate* isolate = CcTest::i_isolate();
3780 Factory* factory = isolate->factory();
3781 v8::HandleScope scope(CcTest::isolate());
3782 Handle<String> slice(CcTest::heap()->empty_string());
3785 // Generate a parent that lives in new-space.
3786 v8::HandleScope inner_scope(CcTest::isolate());
3787 const char* c = "This text is long enough to trigger sliced strings.";
3788 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3789 CHECK(s->IsSeqOneByteString());
3790 CHECK(CcTest::heap()->InNewSpace(*s));
3792 // Generate a sliced string that is based on the above parent and
3793 // lives in old-space.
3794 SimulateFullSpace(CcTest::heap()->new_space());
3795 AlwaysAllocateScope always_allocate(isolate);
3796 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3797 CHECK(t->IsSlicedString());
3798 CHECK(!CcTest::heap()->InNewSpace(*t));
3799 *slice.location() = *t.location();
3802 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3803 CcTest::heap()->CollectAllGarbage();
3804 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3809 TEST(PrintSharedFunctionInfo) {
3810 CcTest::InitializeVM();
3811 v8::HandleScope scope(CcTest::isolate());
3812 const char* source = "f = function() { return 987654321; }\n"
3813 "g = function() { return 123456789; }\n";
3815 Handle<JSFunction> g =
3816 v8::Utils::OpenHandle(
3817 *v8::Handle<v8::Function>::Cast(
3818 CcTest::global()->Get(v8_str("g"))));
3820 OFStream os(stdout);
3821 g->shared()->Print(os);
3824 #endif // OBJECT_PRINT
3827 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3828 if (i::FLAG_always_opt) return;
3829 CcTest::InitializeVM();
3830 v8::HandleScope scope(CcTest::isolate());
3831 v8::Local<v8::Value> fun1, fun2;
3835 CompileRun("function fun() {};");
3836 fun1 = env->Global()->Get(v8_str("fun"));
3841 CompileRun("function fun() {};");
3842 fun2 = env->Global()->Get(v8_str("fun"));
3845 // Prepare function f that contains type feedback for closures
3846 // originating from two different native contexts.
3847 CcTest::global()->Set(v8_str("fun1"), fun1);
3848 CcTest::global()->Set(v8_str("fun2"), fun2);
3849 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3851 Handle<JSFunction> f =
3852 v8::Utils::OpenHandle(
3853 *v8::Handle<v8::Function>::Cast(
3854 CcTest::global()->Get(v8_str("f"))));
3856 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3858 int expected_slots = 2;
3859 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3862 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell());
3863 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
3865 SimulateIncrementalMarking(CcTest::heap());
3866 CcTest::heap()->CollectAllGarbage();
3868 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
3870 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2)))
3875 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3876 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3877 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3878 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3879 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3880 RelocInfo* info = it.rinfo();
3881 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3882 if (target->is_inline_cache_stub() && target->kind() == kind) {
3890 static void CheckVectorIC(Handle<JSFunction> f, int ic_slot_index,
3891 InlineCacheState desired_state) {
3892 Handle<TypeFeedbackVector> vector =
3893 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3894 FeedbackVectorICSlot slot(ic_slot_index);
3895 LoadICNexus nexus(vector, slot);
3896 CHECK(nexus.StateFromFeedback() == desired_state);
3900 static void CheckVectorICCleared(Handle<JSFunction> f, int ic_slot_index) {
3901 Handle<TypeFeedbackVector> vector =
3902 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3903 FeedbackVectorICSlot slot(ic_slot_index);
3904 LoadICNexus nexus(vector, slot);
3905 CHECK(IC::IsCleared(&nexus));
3909 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3910 if (i::FLAG_always_opt) return;
3911 CcTest::InitializeVM();
3912 v8::HandleScope scope(CcTest::isolate());
3914 // Prepare function f that contains a monomorphic IC for object
3915 // originating from the same native context.
3917 "function fun() { this.x = 1; };"
3918 "function f(o) { return new o(); } f(fun); f(fun);");
3919 Handle<JSFunction> f = v8::Utils::OpenHandle(
3920 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3923 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3924 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3926 SimulateIncrementalMarking(CcTest::heap());
3927 CcTest::heap()->CollectAllGarbage();
3929 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3933 TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3934 if (i::FLAG_always_opt) return;
3935 CcTest::InitializeVM();
3936 Isolate* isolate = CcTest::i_isolate();
3937 v8::HandleScope scope(CcTest::isolate());
3938 v8::Local<v8::Value> fun1;
3942 CompileRun("function fun() { this.x = 1; };");
3943 fun1 = env->Global()->Get(v8_str("fun"));
3946 // Prepare function f that contains a monomorphic constructor for object
3947 // originating from a different native context.
3948 CcTest::global()->Set(v8_str("fun1"), fun1);
3950 "function fun() { this.x = 1; };"
3951 "function f(o) { return new o(); } f(fun1); f(fun1);");
3952 Handle<JSFunction> f = v8::Utils::OpenHandle(
3953 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3956 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3957 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3959 // Fire context dispose notification.
3960 CcTest::isolate()->ContextDisposedNotification();
3961 SimulateIncrementalMarking(CcTest::heap());
3962 CcTest::heap()->CollectAllGarbage();
3964 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3965 vector->Get(FeedbackVectorSlot(0)));
3969 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3970 if (i::FLAG_always_opt) return;
3971 CcTest::InitializeVM();
3972 v8::HandleScope scope(CcTest::isolate());
3974 // Prepare function f that contains a monomorphic IC for object
3975 // originating from the same native context.
3976 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3977 "function f(o) { return o.x; } f(obj); f(obj);");
3978 Handle<JSFunction> f =
3979 v8::Utils::OpenHandle(
3980 *v8::Handle<v8::Function>::Cast(
3981 CcTest::global()->Get(v8_str("f"))));
3983 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3984 CheckVectorIC(f, 0, MONOMORPHIC);
3985 CHECK(ic_before->ic_state() == DEFAULT);
3987 SimulateIncrementalMarking(CcTest::heap());
3988 CcTest::heap()->CollectAllGarbage();
3990 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3991 CheckVectorIC(f, 0, MONOMORPHIC);
3992 CHECK(ic_after->ic_state() == DEFAULT);
3996 TEST(IncrementalMarkingClearsMonomorphicIC) {
3997 if (i::FLAG_always_opt) return;
3998 CcTest::InitializeVM();
3999 v8::HandleScope scope(CcTest::isolate());
4000 v8::Local<v8::Value> obj1;
4004 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
4005 obj1 = env->Global()->Get(v8_str("obj"));
4008 // Prepare function f that contains a monomorphic IC for object
4009 // originating from a different native context.
4010 CcTest::global()->Set(v8_str("obj1"), obj1);
4011 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
4012 Handle<JSFunction> f = v8::Utils::OpenHandle(
4013 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4015 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4016 CheckVectorIC(f, 0, MONOMORPHIC);
4017 CHECK(ic_before->ic_state() == DEFAULT);
4019 // Fire context dispose notification.
4020 CcTest::isolate()->ContextDisposedNotification();
4021 SimulateIncrementalMarking(CcTest::heap());
4022 CcTest::heap()->CollectAllGarbage();
4024 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4025 CheckVectorICCleared(f, 0);
4026 CHECK(ic_after->ic_state() == DEFAULT);
4030 TEST(IncrementalMarkingPreservesPolymorphicIC) {
4031 if (i::FLAG_always_opt) return;
4032 CcTest::InitializeVM();
4033 v8::HandleScope scope(CcTest::isolate());
4034 v8::Local<v8::Value> obj1, obj2;
4038 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
4039 obj1 = env->Global()->Get(v8_str("obj"));
4044 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
4045 obj2 = env->Global()->Get(v8_str("obj"));
4048 // Prepare function f that contains a polymorphic IC for objects
4049 // originating from two different native contexts.
4050 CcTest::global()->Set(v8_str("obj1"), obj1);
4051 CcTest::global()->Set(v8_str("obj2"), obj2);
4052 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
4053 Handle<JSFunction> f = v8::Utils::OpenHandle(
4054 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4056 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4057 CheckVectorIC(f, 0, POLYMORPHIC);
4058 CHECK(ic_before->ic_state() == DEFAULT);
4060 // Fire context dispose notification.
4061 SimulateIncrementalMarking(CcTest::heap());
4062 CcTest::heap()->CollectAllGarbage();
4064 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4065 CheckVectorIC(f, 0, POLYMORPHIC);
4066 CHECK(ic_after->ic_state() == DEFAULT);
4070 TEST(IncrementalMarkingClearsPolymorphicIC) {
4071 if (i::FLAG_always_opt) return;
4072 CcTest::InitializeVM();
4073 v8::HandleScope scope(CcTest::isolate());
4074 v8::Local<v8::Value> obj1, obj2;
4078 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
4079 obj1 = env->Global()->Get(v8_str("obj"));
4084 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
4085 obj2 = env->Global()->Get(v8_str("obj"));
4088 // Prepare function f that contains a polymorphic IC for objects
4089 // originating from two different native contexts.
4090 CcTest::global()->Set(v8_str("obj1"), obj1);
4091 CcTest::global()->Set(v8_str("obj2"), obj2);
4092 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
4093 Handle<JSFunction> f = v8::Utils::OpenHandle(
4094 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4096 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4097 CheckVectorIC(f, 0, POLYMORPHIC);
4098 CHECK(ic_before->ic_state() == DEFAULT);
4100 // Fire context dispose notification.
4101 CcTest::isolate()->ContextDisposedNotification();
4102 SimulateIncrementalMarking(CcTest::heap());
4103 CcTest::heap()->CollectAllGarbage();
4105 CheckVectorICCleared(f, 0);
4106 CHECK(ic_before->ic_state() == DEFAULT);
4110 class SourceResource : public v8::String::ExternalOneByteStringResource {
4112 explicit SourceResource(const char* data)
4113 : data_(data), length_(strlen(data)) { }
4115 virtual void Dispose() {
4116 i::DeleteArray(data_);
4120 const char* data() const { return data_; }
4122 size_t length() const { return length_; }
4124 bool IsDisposed() { return data_ == NULL; }
4132 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
4133 const char* accessor) {
4134 // Test that the data retained by the Error.stack accessor is released
4135 // after the first time the accessor is fired. We use external string
4136 // to check whether the data is being released since the external string
4137 // resource's callback is fired when the external string is GC'ed.
4138 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4139 v8::HandleScope scope(isolate);
4140 SourceResource* resource = new SourceResource(i::StrDup(source));
4142 v8::HandleScope scope(isolate);
4143 v8::Handle<v8::String> source_string =
4144 v8::String::NewExternal(isolate, resource);
4145 i_isolate->heap()->CollectAllAvailableGarbage();
4146 v8::Script::Compile(source_string)->Run();
4147 CHECK(!resource->IsDisposed());
4149 // i_isolate->heap()->CollectAllAvailableGarbage();
4150 CHECK(!resource->IsDisposed());
4152 CompileRun(accessor);
4153 i_isolate->heap()->CollectAllAvailableGarbage();
4155 // External source has been released.
4156 CHECK(resource->IsDisposed());
4161 UNINITIALIZED_TEST(ReleaseStackTraceData) {
4162 if (i::FLAG_always_opt) {
4163 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
4164 // See: https://codereview.chromium.org/181833004/
4167 FLAG_use_ic = false; // ICs retain objects.
4168 FLAG_concurrent_recompilation = false;
4169 v8::Isolate::CreateParams create_params;
4170 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4171 v8::Isolate* isolate = v8::Isolate::New(create_params);
4173 v8::Isolate::Scope isolate_scope(isolate);
4174 v8::HandleScope handle_scope(isolate);
4175 v8::Context::New(isolate)->Enter();
4176 static const char* source1 = "var error = null; "
4177 /* Normal Error */ "try { "
4178 " throw new Error(); "
4182 static const char* source2 = "var error = null; "
4183 /* Stack overflow */ "try { "
4184 " (function f() { f(); })(); "
4188 static const char* source3 = "var error = null; "
4189 /* Normal Error */ "try { "
4190 /* as prototype */ " throw new Error(); "
4193 " error.__proto__ = e; "
4195 static const char* source4 = "var error = null; "
4196 /* Stack overflow */ "try { "
4197 /* as prototype */ " (function f() { f(); })(); "
4200 " error.__proto__ = e; "
4202 static const char* getter = "error.stack";
4203 static const char* setter = "error.stack = 0";
4205 ReleaseStackTraceDataTest(isolate, source1, setter);
4206 ReleaseStackTraceDataTest(isolate, source2, setter);
4207 // We do not test source3 and source4 with setter, since the setter is
4208 // supposed to (untypically) write to the receiver, not the holder. This is
4209 // to emulate the behavior of a data property.
4211 ReleaseStackTraceDataTest(isolate, source1, getter);
4212 ReleaseStackTraceDataTest(isolate, source2, getter);
4213 ReleaseStackTraceDataTest(isolate, source3, getter);
4214 ReleaseStackTraceDataTest(isolate, source4, getter);
4220 TEST(Regress159140) {
4221 i::FLAG_allow_natives_syntax = true;
4222 CcTest::InitializeVM();
4223 Isolate* isolate = CcTest::i_isolate();
4224 Heap* heap = isolate->heap();
4225 HandleScope scope(isolate);
4227 // Perform one initial GC to enable code flushing.
4228 heap->CollectAllGarbage();
4230 // Prepare several closures that are all eligible for code flushing
4231 // because all reachable ones are not optimized. Make sure that the
4232 // optimized code object is directly reachable through a handle so
4233 // that it is marked black during incremental marking.
4236 HandleScope inner_scope(isolate);
4237 CompileRun("function h(x) {}"
4238 "function mkClosure() {"
4239 " return function(x) { return x + 1; };"
4241 "var f = mkClosure();"
4242 "var g = mkClosure();"
4246 "%OptimizeFunctionOnNextCall(f); f(3);"
4247 "%OptimizeFunctionOnNextCall(h); h(3);");
4249 Handle<JSFunction> f =
4250 v8::Utils::OpenHandle(
4251 *v8::Handle<v8::Function>::Cast(
4252 CcTest::global()->Get(v8_str("f"))));
4253 CHECK(f->is_compiled());
4254 CompileRun("f = null;");
4256 Handle<JSFunction> g =
4257 v8::Utils::OpenHandle(
4258 *v8::Handle<v8::Function>::Cast(
4259 CcTest::global()->Get(v8_str("g"))));
4260 CHECK(g->is_compiled());
4261 const int kAgingThreshold = 6;
4262 for (int i = 0; i < kAgingThreshold; i++) {
4263 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4266 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4269 // Simulate incremental marking so that the functions are enqueued as
4270 // code flushing candidates. Then optimize one function. Finally
4271 // finish the GC to complete code flushing.
4272 SimulateIncrementalMarking(heap);
4273 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4274 heap->CollectAllGarbage();
4276 // Unoptimized code is missing and the deoptimizer will go ballistic.
4277 CompileRun("g('bozo');");
4281 TEST(Regress165495) {
4282 i::FLAG_allow_natives_syntax = true;
4283 CcTest::InitializeVM();
4284 Isolate* isolate = CcTest::i_isolate();
4285 Heap* heap = isolate->heap();
4286 HandleScope scope(isolate);
4288 // Perform one initial GC to enable code flushing.
4289 heap->CollectAllGarbage();
4291 // Prepare an optimized closure that the optimized code map will get
4292 // populated. Then age the unoptimized code to trigger code flushing
4293 // but make sure the optimized code is unreachable.
4295 HandleScope inner_scope(isolate);
4296 CompileRun("function mkClosure() {"
4297 " return function(x) { return x + 1; };"
4299 "var f = mkClosure();"
4301 "%OptimizeFunctionOnNextCall(f); f(3);");
4303 Handle<JSFunction> f =
4304 v8::Utils::OpenHandle(
4305 *v8::Handle<v8::Function>::Cast(
4306 CcTest::global()->Get(v8_str("f"))));
4307 CHECK(f->is_compiled());
4308 const int kAgingThreshold = 6;
4309 for (int i = 0; i < kAgingThreshold; i++) {
4310 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4313 CompileRun("f = null;");
4316 // Simulate incremental marking so that unoptimized code is flushed
4317 // even though it still is cached in the optimized code map.
4318 SimulateIncrementalMarking(heap);
4319 heap->CollectAllGarbage();
4321 // Make a new closure that will get code installed from the code map.
4322 // Unoptimized code is missing and the deoptimizer will go ballistic.
4323 CompileRun("var g = mkClosure(); g('bozo');");
4327 TEST(Regress169209) {
4328 i::FLAG_stress_compaction = false;
4329 i::FLAG_allow_natives_syntax = true;
4331 CcTest::InitializeVM();
4332 Isolate* isolate = CcTest::i_isolate();
4333 Heap* heap = isolate->heap();
4334 HandleScope scope(isolate);
4336 // Perform one initial GC to enable code flushing.
4337 heap->CollectAllGarbage();
4339 // Prepare a shared function info eligible for code flushing for which
4340 // the unoptimized code will be replaced during optimization.
4341 Handle<SharedFunctionInfo> shared1;
4343 HandleScope inner_scope(isolate);
4344 CompileRun("function f() { return 'foobar'; }"
4345 "function g(x) { if (x) f(); }"
4350 Handle<JSFunction> f =
4351 v8::Utils::OpenHandle(
4352 *v8::Handle<v8::Function>::Cast(
4353 CcTest::global()->Get(v8_str("f"))));
4354 CHECK(f->is_compiled());
4355 const int kAgingThreshold = 6;
4356 for (int i = 0; i < kAgingThreshold; i++) {
4357 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4360 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4363 // Prepare a shared function info eligible for code flushing that will
4364 // represent the dangling tail of the candidate list.
4365 Handle<SharedFunctionInfo> shared2;
4367 HandleScope inner_scope(isolate);
4368 CompileRun("function flushMe() { return 0; }"
4371 Handle<JSFunction> f =
4372 v8::Utils::OpenHandle(
4373 *v8::Handle<v8::Function>::Cast(
4374 CcTest::global()->Get(v8_str("flushMe"))));
4375 CHECK(f->is_compiled());
4376 const int kAgingThreshold = 6;
4377 for (int i = 0; i < kAgingThreshold; i++) {
4378 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4381 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4384 // Simulate incremental marking and collect code flushing candidates.
4385 SimulateIncrementalMarking(heap);
4386 CHECK(shared1->code()->gc_metadata() != NULL);
4388 // Optimize function and make sure the unoptimized code is replaced.
4392 CompileRun("%OptimizeFunctionOnNextCall(g);"
4395 // Finish garbage collection cycle.
4396 heap->CollectAllGarbage();
4397 CHECK(shared1->code()->gc_metadata() == NULL);
4401 TEST(Regress169928) {
4402 i::FLAG_allow_natives_syntax = true;
4403 i::FLAG_crankshaft = false;
4404 CcTest::InitializeVM();
4405 Isolate* isolate = CcTest::i_isolate();
4406 Factory* factory = isolate->factory();
4407 v8::HandleScope scope(CcTest::isolate());
4409 // Some flags turn Scavenge collections into Mark-sweep collections
4410 // and hence are incompatible with this test case.
4411 if (FLAG_gc_global || FLAG_stress_compaction) return;
4413 // Prepare the environment
4414 CompileRun("function fastliteralcase(literal, value) {"
4415 " literal[0] = value;"
4418 "function get_standard_literal() {"
4419 " var literal = [1, 2, 3];"
4422 "obj = fastliteralcase(get_standard_literal(), 1);"
4423 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4424 "obj = fastliteralcase(get_standard_literal(), 2);");
4427 v8::Local<v8::String> mote_code_string =
4428 v8_str("fastliteralcase(mote, 2.5);");
4430 v8::Local<v8::String> array_name = v8_str("mote");
4431 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
4433 // First make sure we flip spaces
4434 CcTest::heap()->CollectGarbage(NEW_SPACE);
4436 // Allocate the object.
4437 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4438 array_data->set(0, Smi::FromInt(1));
4439 array_data->set(1, Smi::FromInt(2));
4441 AllocateAllButNBytes(CcTest::heap()->new_space(),
4442 JSArray::kSize + AllocationMemento::kSize +
4445 Handle<JSArray> array =
4446 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4448 CHECK_EQ(Smi::FromInt(2), array->length());
4449 CHECK(array->HasFastSmiOrObjectElements());
4451 // We need filler the size of AllocationMemento object, plus an extra
4452 // fill pointer value.
4453 HeapObject* obj = NULL;
4454 AllocationResult allocation =
4455 CcTest::heap()->new_space()->AllocateRawUnaligned(
4456 AllocationMemento::kSize + kPointerSize);
4457 CHECK(allocation.To(&obj));
4458 Address addr_obj = obj->address();
4459 CcTest::heap()->CreateFillerObjectAt(
4460 addr_obj, AllocationMemento::kSize + kPointerSize);
4462 // Give the array a name, making sure not to allocate strings.
4463 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
4464 CcTest::global()->Set(array_name, array_obj);
4466 // This should crash with a protection violation if we are running a build
4468 AlwaysAllocateScope aa_scope(isolate);
4469 v8::Script::Compile(mote_code_string)->Run();
4473 TEST(Regress168801) {
4474 if (i::FLAG_never_compact) return;
4475 i::FLAG_always_compact = true;
4476 i::FLAG_cache_optimized_code = false;
4477 i::FLAG_allow_natives_syntax = true;
4478 CcTest::InitializeVM();
4479 Isolate* isolate = CcTest::i_isolate();
4480 Heap* heap = isolate->heap();
4481 HandleScope scope(isolate);
4483 // Perform one initial GC to enable code flushing.
4484 heap->CollectAllGarbage();
4486 // Ensure the code ends up on an evacuation candidate.
4487 SimulateFullSpace(heap->code_space());
4489 // Prepare an unoptimized function that is eligible for code flushing.
4490 Handle<JSFunction> function;
4492 HandleScope inner_scope(isolate);
4493 CompileRun("function mkClosure() {"
4494 " return function(x) { return x + 1; };"
4496 "var f = mkClosure();"
4499 Handle<JSFunction> f =
4500 v8::Utils::OpenHandle(
4501 *v8::Handle<v8::Function>::Cast(
4502 CcTest::global()->Get(v8_str("f"))));
4503 CHECK(f->is_compiled());
4504 const int kAgingThreshold = 6;
4505 for (int i = 0; i < kAgingThreshold; i++) {
4506 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4509 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4512 // Simulate incremental marking so that unoptimized function is enqueued as a
4513 // candidate for code flushing. The shared function info however will not be
4514 // explicitly enqueued.
4515 SimulateIncrementalMarking(heap);
4517 // Now optimize the function so that it is taken off the candidate list.
4519 HandleScope inner_scope(isolate);
4520 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
4523 // This cycle will bust the heap and subsequent cycles will go ballistic.
4524 heap->CollectAllGarbage();
4525 heap->CollectAllGarbage();
4529 TEST(Regress173458) {
4530 if (i::FLAG_never_compact) return;
4531 i::FLAG_always_compact = true;
4532 i::FLAG_cache_optimized_code = false;
4533 i::FLAG_allow_natives_syntax = true;
4534 CcTest::InitializeVM();
4535 Isolate* isolate = CcTest::i_isolate();
4536 Heap* heap = isolate->heap();
4537 HandleScope scope(isolate);
4539 // Perform one initial GC to enable code flushing.
4540 heap->CollectAllGarbage();
4542 // Ensure the code ends up on an evacuation candidate.
4543 SimulateFullSpace(heap->code_space());
4545 // Prepare an unoptimized function that is eligible for code flushing.
4546 Handle<JSFunction> function;
4548 HandleScope inner_scope(isolate);
4549 CompileRun("function mkClosure() {"
4550 " return function(x) { return x + 1; };"
4552 "var f = mkClosure();"
4555 Handle<JSFunction> f =
4556 v8::Utils::OpenHandle(
4557 *v8::Handle<v8::Function>::Cast(
4558 CcTest::global()->Get(v8_str("f"))));
4559 CHECK(f->is_compiled());
4560 const int kAgingThreshold = 6;
4561 for (int i = 0; i < kAgingThreshold; i++) {
4562 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4565 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4568 // Simulate incremental marking so that unoptimized function is enqueued as a
4569 // candidate for code flushing. The shared function info however will not be
4570 // explicitly enqueued.
4571 SimulateIncrementalMarking(heap);
4573 // Now enable the debugger which in turn will disable code flushing.
4574 CHECK(isolate->debug()->Load());
4576 // This cycle will bust the heap and subsequent cycles will go ballistic.
4577 heap->CollectAllGarbage();
4578 heap->CollectAllGarbage();
4583 TEST(Regress513507) {
4584 i::FLAG_flush_optimized_code_cache = false;
4585 i::FLAG_allow_natives_syntax = true;
4586 i::FLAG_gc_global = true;
4587 CcTest::InitializeVM();
4588 Isolate* isolate = CcTest::i_isolate();
4589 Heap* heap = isolate->heap();
4590 HandleScope scope(isolate);
4592 // Prepare function whose optimized code map we can use.
4593 Handle<SharedFunctionInfo> shared;
4595 HandleScope inner_scope(isolate);
4596 CompileRun("function f() { return 1 }"
4597 "f(); %OptimizeFunctionOnNextCall(f); f();");
4599 Handle<JSFunction> f =
4600 v8::Utils::OpenHandle(
4601 *v8::Handle<v8::Function>::Cast(
4602 CcTest::global()->Get(v8_str("f"))));
4603 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4604 CompileRun("f = null");
4607 // Prepare optimized code that we can use.
4610 HandleScope inner_scope(isolate);
4611 CompileRun("function g() { return 2 }"
4612 "g(); %OptimizeFunctionOnNextCall(g); g();");
4614 Handle<JSFunction> g =
4615 v8::Utils::OpenHandle(
4616 *v8::Handle<v8::Function>::Cast(
4617 CcTest::global()->Get(v8_str("g"))));
4618 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4619 if (!code->is_optimized_code()) return;
4622 Handle<FixedArray> lit = isolate->factory()->empty_fixed_array();
4623 Handle<Context> context(isolate->context());
4625 // Add the new code several times to the optimized code map and also set an
4626 // allocation timeout so that expanding the code map will trigger a GC.
4627 heap->set_allocation_timeout(5);
4628 FLAG_gc_interval = 1000;
4629 for (int i = 0; i < 10; ++i) {
4630 BailoutId id = BailoutId(i);
4631 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4637 TEST(Regress514122) {
4638 i::FLAG_flush_optimized_code_cache = false;
4639 i::FLAG_allow_natives_syntax = true;
4640 CcTest::InitializeVM();
4641 Isolate* isolate = CcTest::i_isolate();
4642 Heap* heap = isolate->heap();
4643 HandleScope scope(isolate);
4645 // Perfrom one initial GC to enable code flushing.
4646 CcTest::heap()->CollectAllGarbage();
4648 // Prepare function whose optimized code map we can use.
4649 Handle<SharedFunctionInfo> shared;
4651 HandleScope inner_scope(isolate);
4652 CompileRun("function f() { return 1 }"
4653 "f(); %OptimizeFunctionOnNextCall(f); f();");
4655 Handle<JSFunction> f =
4656 v8::Utils::OpenHandle(
4657 *v8::Handle<v8::Function>::Cast(
4658 CcTest::global()->Get(v8_str("f"))));
4659 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4660 CompileRun("f = null");
4663 // Prepare optimized code that we can use.
4666 HandleScope inner_scope(isolate);
4667 CompileRun("function g() { return 2 }"
4668 "g(); %OptimizeFunctionOnNextCall(g); g();");
4670 Handle<JSFunction> g =
4671 v8::Utils::OpenHandle(
4672 *v8::Handle<v8::Function>::Cast(
4673 CcTest::global()->Get(v8_str("g"))));
4674 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4675 if (!code->is_optimized_code()) return;
4678 Handle<FixedArray> lit = isolate->factory()->empty_fixed_array();
4679 Handle<Context> context(isolate->context());
4681 // Add the code several times to the optimized code map.
4682 for (int i = 0; i < 3; ++i) {
4683 HandleScope inner_scope(isolate);
4684 BailoutId id = BailoutId(i);
4685 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4687 shared->optimized_code_map()->Print();
4689 // Add the code with a literals array to be evacuated.
4692 HandleScope inner_scope(isolate);
4693 AlwaysAllocateScope always_allocate(isolate);
4694 // Make sure literal is placed on an old-space evacuation candidate.
4695 SimulateFullSpace(heap->old_space());
4696 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(23, TENURED);
4697 evac_page = Page::FromAddress(lit->address());
4698 BailoutId id = BailoutId(100);
4699 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4702 // Heap is ready, force {lit_page} to become an evacuation candidate and
4703 // simulate incremental marking to enqueue optimized code map.
4704 FLAG_manual_evacuation_candidates_selection = true;
4705 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4706 SimulateIncrementalMarking(heap);
4708 // No matter whether reachable or not, {boomer} is doomed.
4709 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4711 // Add the code several times to the optimized code map. This will leave old
4712 // copies of the optimized code map unreachable but still marked.
4713 for (int i = 3; i < 6; ++i) {
4714 HandleScope inner_scope(isolate);
4715 BailoutId id = BailoutId(i);
4716 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4719 // Trigger a GC to flush out the bug.
4720 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4725 TEST(LargeObjectSlotRecording) {
4726 FLAG_manual_evacuation_candidates_selection = true;
4727 CcTest::InitializeVM();
4728 Isolate* isolate = CcTest::i_isolate();
4729 Heap* heap = isolate->heap();
4730 HandleScope scope(isolate);
4732 // Create an object on an evacuation candidate.
4733 SimulateFullSpace(heap->old_space());
4734 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4735 Page* evac_page = Page::FromAddress(lit->address());
4736 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4737 FixedArray* old_location = *lit;
4739 // Allocate a large object.
4740 const int kSize = 1000000;
4741 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(kSize, TENURED);
4742 CHECK(heap->lo_space()->Contains(*lo));
4744 // Start incremental marking to active write barrier.
4745 SimulateIncrementalMarking(heap, false);
4746 heap->AdvanceIncrementalMarking(10000000, 10000000,
4747 IncrementalMarking::IdleStepActions());
4749 // Create references from the large object to the object on the evacuation
4751 const int kStep = kSize / 10;
4752 for (int i = 0; i < kSize; i += kStep) {
4754 CHECK(lo->get(i) == old_location);
4757 // Move the evaucation candidate object.
4758 CcTest::heap()->CollectAllGarbage();
4760 // Verify that the pointers in the large object got updated.
4761 for (int i = 0; i < kSize; i += kStep) {
4762 CHECK_EQ(lo->get(i), *lit);
4763 CHECK(lo->get(i) != old_location);
4768 class DummyVisitor : public ObjectVisitor {
4770 void VisitPointers(Object** start, Object** end) { }
4774 TEST(DeferredHandles) {
4775 CcTest::InitializeVM();
4776 Isolate* isolate = CcTest::i_isolate();
4777 Heap* heap = isolate->heap();
4778 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4779 HandleScopeData* data = isolate->handle_scope_data();
4780 Handle<Object> init(heap->empty_string(), isolate);
4781 while (data->next < data->limit) {
4782 Handle<Object> obj(heap->empty_string(), isolate);
4784 // An entire block of handles has been filled.
4785 // Next handle would require a new block.
4786 DCHECK(data->next == data->limit);
4788 DeferredHandleScope deferred(isolate);
4789 DummyVisitor visitor;
4790 isolate->handle_scope_implementer()->Iterate(&visitor);
4791 delete deferred.Detach();
4795 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4796 CcTest::InitializeVM();
4797 v8::HandleScope scope(CcTest::isolate());
4798 CompileRun("function f(n) {"
4799 " var a = new Array(n);"
4800 " for (var i = 0; i < n; i += 100) a[i] = i;"
4802 "f(10 * 1024 * 1024);");
4803 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4804 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
4805 // This big step should be sufficient to mark the whole array.
4806 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4807 DCHECK(marking->IsComplete() ||
4808 marking->IsReadyToOverApproximateWeakClosure());
4812 TEST(DisableInlineAllocation) {
4813 i::FLAG_allow_natives_syntax = true;
4814 CcTest::InitializeVM();
4815 v8::HandleScope scope(CcTest::isolate());
4816 CompileRun("function test() {"
4818 " for (var i = 0; i < 10; i++) {"
4819 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4823 " %OptimizeFunctionOnNextCall(test);"
4825 " %DeoptimizeFunction(test);"
4828 // Warm-up with inline allocation enabled.
4829 CompileRun("test(); test(); run();");
4831 // Run test with inline allocation disabled.
4832 CcTest::heap()->DisableInlineAllocation();
4833 CompileRun("run()");
4835 // Run test with inline allocation re-enabled.
4836 CcTest::heap()->EnableInlineAllocation();
4837 CompileRun("run()");
4841 static int AllocationSitesCount(Heap* heap) {
4843 for (Object* site = heap->allocation_sites_list();
4844 !(site->IsUndefined());
4845 site = AllocationSite::cast(site)->weak_next()) {
4852 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4853 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4854 i::FLAG_allow_natives_syntax = true;
4855 CcTest::InitializeVM();
4856 Isolate* isolate = CcTest::i_isolate();
4857 v8::internal::Heap* heap = CcTest::heap();
4858 GlobalHandles* global_handles = isolate->global_handles();
4860 if (!isolate->use_crankshaft()) return;
4862 // The allocation site at the head of the list is ours.
4863 Handle<AllocationSite> site;
4865 LocalContext context;
4866 v8::HandleScope scope(context->GetIsolate());
4868 int count = AllocationSitesCount(heap);
4869 CompileRun("var bar = function() { return (new Array()); };"
4874 // One allocation site should have been created.
4875 int new_count = AllocationSitesCount(heap);
4876 CHECK_EQ(new_count, (count + 1));
4877 site = Handle<AllocationSite>::cast(
4878 global_handles->Create(
4879 AllocationSite::cast(heap->allocation_sites_list())));
4881 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4883 DependentCode::GroupStartIndexes starts(site->dependent_code());
4884 CHECK_GE(starts.number_of_entries(), 1);
4885 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4886 CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
4887 Code* function_bar = Code::cast(
4888 WeakCell::cast(site->dependent_code()->object_at(index))->value());
4889 Handle<JSFunction> bar_handle =
4890 v8::Utils::OpenHandle(
4891 *v8::Handle<v8::Function>::Cast(
4892 CcTest::global()->Get(v8_str("bar"))));
4893 CHECK_EQ(bar_handle->code(), function_bar);
4896 // Now make sure that a gc should get rid of the function, even though we
4897 // still have the allocation site alive.
4898 for (int i = 0; i < 4; i++) {
4899 heap->CollectAllGarbage();
4902 // The site still exists because of our global handle, but the code is no
4903 // longer referred to by dependent_code().
4904 DependentCode::GroupStartIndexes starts(site->dependent_code());
4905 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4906 CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
4907 WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
4911 TEST(CellsInOptimizedCodeAreWeak) {
4912 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4913 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4914 i::FLAG_allow_natives_syntax = true;
4915 CcTest::InitializeVM();
4916 Isolate* isolate = CcTest::i_isolate();
4917 v8::internal::Heap* heap = CcTest::heap();
4919 if (!isolate->use_crankshaft()) return;
4920 HandleScope outer_scope(heap->isolate());
4923 LocalContext context;
4924 HandleScope scope(heap->isolate());
4926 CompileRun("bar = (function() {"
4930 " var foo = function(x) { with (x) { return 1 + x; } };"
4934 " %OptimizeFunctionOnNextCall(bar);"
4936 " return bar;})();");
4938 Handle<JSFunction> bar =
4939 v8::Utils::OpenHandle(
4940 *v8::Handle<v8::Function>::Cast(
4941 CcTest::global()->Get(v8_str("bar"))));
4942 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4945 // Now make sure that a gc should get rid of the function
4946 for (int i = 0; i < 4; i++) {
4947 heap->CollectAllGarbage();
4950 DCHECK(code->marked_for_deoptimization());
4954 TEST(ObjectsInOptimizedCodeAreWeak) {
4955 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4956 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4957 i::FLAG_allow_natives_syntax = true;
4958 CcTest::InitializeVM();
4959 Isolate* isolate = CcTest::i_isolate();
4960 v8::internal::Heap* heap = CcTest::heap();
4962 if (!isolate->use_crankshaft()) return;
4963 HandleScope outer_scope(heap->isolate());
4966 LocalContext context;
4967 HandleScope scope(heap->isolate());
4969 CompileRun("function bar() {"
4972 "function foo(x) { with (x) { return 1 + x; } };"
4976 "%OptimizeFunctionOnNextCall(bar);"
4979 Handle<JSFunction> bar =
4980 v8::Utils::OpenHandle(
4981 *v8::Handle<v8::Function>::Cast(
4982 CcTest::global()->Get(v8_str("bar"))));
4983 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4986 // Now make sure that a gc should get rid of the function
4987 for (int i = 0; i < 4; i++) {
4988 heap->CollectAllGarbage();
4991 DCHECK(code->marked_for_deoptimization());
4995 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4996 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4997 if (!i::FLAG_incremental_marking) return;
4998 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4999 i::FLAG_allow_natives_syntax = true;
5000 i::FLAG_compilation_cache = false;
5001 i::FLAG_retain_maps_for_n_gc = 0;
5002 CcTest::InitializeVM();
5003 Isolate* isolate = CcTest::i_isolate();
5005 // Do not run for no-snap builds.
5006 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
5008 v8::internal::Heap* heap = CcTest::heap();
5010 // Get a clean slate regarding optimized functions on the heap.
5011 i::Deoptimizer::DeoptimizeAll(isolate);
5012 heap->CollectAllGarbage();
5014 if (!isolate->use_crankshaft()) return;
5015 HandleScope outer_scope(heap->isolate());
5016 for (int i = 0; i < 3; i++) {
5017 SimulateIncrementalMarking(heap);
5019 LocalContext context;
5020 HandleScope scope(heap->isolate());
5021 EmbeddedVector<char, 256> source;
5023 "function bar%d() {"
5026 "function foo%d(x) { with (x) { return 1 + x; } };"
5030 "%%OptimizeFunctionOnNextCall(bar%d);"
5032 i, i, i, i, i, i, i, i);
5033 CompileRun(source.start());
5035 heap->CollectAllGarbage();
5038 if (heap->weak_object_to_code_table()->IsHashTable()) {
5039 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
5040 elements = t->NumberOfElements();
5042 CHECK_EQ(0, elements);
5046 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
5047 EmbeddedVector<char, 256> source;
5049 "function %s() { return 0; }"
5051 "%%OptimizeFunctionOnNextCall(%s);"
5052 "%s();", name, name, name, name, name);
5053 CompileRun(source.start());
5054 Handle<JSFunction> fun =
5055 v8::Utils::OpenHandle(
5056 *v8::Handle<v8::Function>::Cast(
5057 CcTest::global()->Get(v8_str(name))));
5062 static int GetCodeChainLength(Code* code) {
5064 while (code->next_code_link()->IsCode()) {
5066 code = Code::cast(code->next_code_link());
5072 TEST(NextCodeLinkIsWeak) {
5073 i::FLAG_always_opt = false;
5074 i::FLAG_allow_natives_syntax = true;
5075 CcTest::InitializeVM();
5076 Isolate* isolate = CcTest::i_isolate();
5077 v8::internal::Heap* heap = CcTest::heap();
5079 if (!isolate->use_crankshaft()) return;
5080 HandleScope outer_scope(heap->isolate());
5082 heap->CollectAllAvailableGarbage();
5083 int code_chain_length_before, code_chain_length_after;
5085 HandleScope scope(heap->isolate());
5086 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
5087 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
5088 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
5089 code_chain_length_before = GetCodeChainLength(immortal->code());
5090 // Keep the immortal code and let the mortal code die.
5091 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
5092 CompileRun("mortal = null; immortal = null;");
5094 heap->CollectAllAvailableGarbage();
5095 // Now mortal code should be dead.
5096 code_chain_length_after = GetCodeChainLength(*code);
5097 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
5101 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
5102 i::byte buffer[i::Assembler::kMinimalBufferSize];
5103 MacroAssembler masm(isolate, buffer, sizeof(buffer));
5105 masm.Push(isolate->factory()->undefined_value());
5107 masm.GetCode(&desc);
5108 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
5109 Handle<Code> code = isolate->factory()->NewCode(
5110 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
5111 CHECK(code->IsCode());
5116 TEST(NextCodeLinkIsWeak2) {
5117 i::FLAG_allow_natives_syntax = true;
5118 CcTest::InitializeVM();
5119 Isolate* isolate = CcTest::i_isolate();
5120 v8::internal::Heap* heap = CcTest::heap();
5122 if (!isolate->use_crankshaft()) return;
5123 HandleScope outer_scope(heap->isolate());
5124 heap->CollectAllAvailableGarbage();
5125 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
5126 Handle<Code> new_head;
5127 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5129 HandleScope scope(heap->isolate());
5130 Handle<Code> immortal = DummyOptimizedCode(isolate);
5131 Handle<Code> mortal = DummyOptimizedCode(isolate);
5132 mortal->set_next_code_link(*old_head);
5133 immortal->set_next_code_link(*mortal);
5134 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5135 new_head = scope.CloseAndEscape(immortal);
5137 heap->CollectAllAvailableGarbage();
5138 // Now mortal code should be dead.
5139 CHECK_EQ(*old_head, new_head->next_code_link());
5143 static bool weak_ic_cleared = false;
5145 static void ClearWeakIC(
5146 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5147 printf("clear weak is called\n");
5148 weak_ic_cleared = true;
5149 data.GetParameter()->Reset();
5153 TEST(WeakFunctionInConstructor) {
5154 if (i::FLAG_always_opt) return;
5155 i::FLAG_stress_compaction = false;
5156 CcTest::InitializeVM();
5157 v8::Isolate* isolate = CcTest::isolate();
5158 v8::HandleScope scope(isolate);
5160 "function createObj(obj) {"
5161 " return new obj();"
5163 Handle<JSFunction> createObj =
5164 v8::Utils::OpenHandle(*v8::Handle<v8::Function>::Cast(
5165 CcTest::global()->Get(v8_str("createObj"))));
5167 v8::Persistent<v8::Object> garbage;
5169 v8::HandleScope scope(isolate);
5170 const char* source =
5172 " function hat() { this.x = 5; }"
5177 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
5179 weak_ic_cleared = false;
5180 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5181 Heap* heap = CcTest::i_isolate()->heap();
5182 heap->CollectAllGarbage();
5183 CHECK(weak_ic_cleared);
5185 // We've determined the constructor in createObj has had it's weak cell
5186 // cleared. Now, verify that one additional call with a new function
5187 // allows monomorphicity.
5188 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5189 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5190 for (int i = 0; i < 20; i++) {
5191 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5192 CHECK(slot_value->IsWeakCell());
5193 if (WeakCell::cast(slot_value)->cleared()) break;
5194 heap->CollectAllGarbage();
5197 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5198 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5200 "function coat() { this.x = 6; }"
5201 "createObj(coat);");
5202 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5203 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5207 // Checks that the value returned by execution of the source is weak.
5208 void CheckWeakness(const char* source) {
5209 i::FLAG_stress_compaction = false;
5210 CcTest::InitializeVM();
5211 v8::Isolate* isolate = CcTest::isolate();
5212 v8::HandleScope scope(isolate);
5213 v8::Persistent<v8::Object> garbage;
5215 v8::HandleScope scope(isolate);
5216 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
5218 weak_ic_cleared = false;
5219 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5220 Heap* heap = CcTest::i_isolate()->heap();
5221 heap->CollectAllGarbage();
5222 CHECK(weak_ic_cleared);
5226 // Each of the following "weak IC" tests creates an IC that embeds a map with
5227 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5228 TEST(WeakMapInMonomorphicLoadIC) {
5229 CheckWeakness("function loadIC(obj) {"
5233 " var proto = {'name' : 'weak'};"
5234 " var obj = Object.create(proto);"
5243 TEST(WeakMapInPolymorphicLoadIC) {
5245 "function loadIC(obj) {"
5249 " var proto = {'name' : 'weak'};"
5250 " var obj = Object.create(proto);"
5254 " var poly = Object.create(proto);"
5262 TEST(WeakMapInMonomorphicKeyedLoadIC) {
5263 CheckWeakness("function keyedLoadIC(obj, field) {"
5264 " return obj[field];"
5267 " var proto = {'name' : 'weak'};"
5268 " var obj = Object.create(proto);"
5269 " keyedLoadIC(obj, 'name');"
5270 " keyedLoadIC(obj, 'name');"
5271 " keyedLoadIC(obj, 'name');"
5277 TEST(WeakMapInPolymorphicKeyedLoadIC) {
5279 "function keyedLoadIC(obj, field) {"
5280 " return obj[field];"
5283 " var proto = {'name' : 'weak'};"
5284 " var obj = Object.create(proto);"
5285 " keyedLoadIC(obj, 'name');"
5286 " keyedLoadIC(obj, 'name');"
5287 " keyedLoadIC(obj, 'name');"
5288 " var poly = Object.create(proto);"
5290 " keyedLoadIC(poly, 'name');"
5296 TEST(WeakMapInMonomorphicStoreIC) {
5297 CheckWeakness("function storeIC(obj, value) {"
5298 " obj.name = value;"
5301 " var proto = {'name' : 'weak'};"
5302 " var obj = Object.create(proto);"
5303 " storeIC(obj, 'x');"
5304 " storeIC(obj, 'x');"
5305 " storeIC(obj, 'x');"
5311 TEST(WeakMapInPolymorphicStoreIC) {
5313 "function storeIC(obj, value) {"
5314 " obj.name = value;"
5317 " var proto = {'name' : 'weak'};"
5318 " var obj = Object.create(proto);"
5319 " storeIC(obj, 'x');"
5320 " storeIC(obj, 'x');"
5321 " storeIC(obj, 'x');"
5322 " var poly = Object.create(proto);"
5324 " storeIC(poly, 'x');"
5330 TEST(WeakMapInMonomorphicKeyedStoreIC) {
5331 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5332 " obj[field] = value;"
5335 " var proto = {'name' : 'weak'};"
5336 " var obj = Object.create(proto);"
5337 " keyedStoreIC(obj, 'x');"
5338 " keyedStoreIC(obj, 'x');"
5339 " keyedStoreIC(obj, 'x');"
5345 TEST(WeakMapInPolymorphicKeyedStoreIC) {
5347 "function keyedStoreIC(obj, field, value) {"
5348 " obj[field] = value;"
5351 " var proto = {'name' : 'weak'};"
5352 " var obj = Object.create(proto);"
5353 " keyedStoreIC(obj, 'x');"
5354 " keyedStoreIC(obj, 'x');"
5355 " keyedStoreIC(obj, 'x');"
5356 " var poly = Object.create(proto);"
5358 " keyedStoreIC(poly, 'x');"
5364 TEST(WeakMapInMonomorphicCompareNilIC) {
5365 CheckWeakness("function compareNilIC(obj) {"
5366 " return obj == null;"
5369 " var proto = {'name' : 'weak'};"
5370 " var obj = Object.create(proto);"
5371 " compareNilIC(obj);"
5372 " compareNilIC(obj);"
5373 " compareNilIC(obj);"
5379 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5380 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5381 Handle<Object> obj =
5382 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5383 return Handle<JSFunction>::cast(obj);
5387 void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5388 int ic_slot, InlineCacheState state) {
5389 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5390 kind == Code::CALL_IC) {
5391 TypeFeedbackVector* vector = shared->feedback_vector();
5392 FeedbackVectorICSlot slot(ic_slot);
5393 if (kind == Code::LOAD_IC) {
5394 LoadICNexus nexus(vector, slot);
5395 CHECK_EQ(nexus.StateFromFeedback(), state);
5396 } else if (kind == Code::KEYED_LOAD_IC) {
5397 KeyedLoadICNexus nexus(vector, slot);
5398 CHECK_EQ(nexus.StateFromFeedback(), state);
5399 } else if (kind == Code::CALL_IC) {
5400 CallICNexus nexus(vector, slot);
5401 CHECK_EQ(nexus.StateFromFeedback(), state);
5404 Code* ic = FindFirstIC(code, kind);
5405 CHECK(ic->is_inline_cache_stub());
5406 CHECK(ic->ic_state() == state);
5411 TEST(MonomorphicStaysMonomorphicAfterGC) {
5412 if (FLAG_always_opt) return;
5413 CcTest::InitializeVM();
5414 Isolate* isolate = CcTest::i_isolate();
5415 Heap* heap = isolate->heap();
5416 v8::HandleScope scope(CcTest::isolate());
5418 "function loadIC(obj) {"
5421 "function testIC() {"
5422 " var proto = {'name' : 'weak'};"
5423 " var obj = Object.create(proto);"
5429 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5431 v8::HandleScope scope(CcTest::isolate());
5432 CompileRun("(testIC())");
5434 heap->CollectAllGarbage();
5435 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5437 v8::HandleScope scope(CcTest::isolate());
5438 CompileRun("(testIC())");
5440 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5444 TEST(PolymorphicStaysPolymorphicAfterGC) {
5445 if (FLAG_always_opt) return;
5446 CcTest::InitializeVM();
5447 Isolate* isolate = CcTest::i_isolate();
5448 Heap* heap = isolate->heap();
5449 v8::HandleScope scope(CcTest::isolate());
5451 "function loadIC(obj) {"
5454 "function testIC() {"
5455 " var proto = {'name' : 'weak'};"
5456 " var obj = Object.create(proto);"
5460 " var poly = Object.create(proto);"
5465 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5467 v8::HandleScope scope(CcTest::isolate());
5468 CompileRun("(testIC())");
5470 heap->CollectAllGarbage();
5471 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5473 v8::HandleScope scope(CcTest::isolate());
5474 CompileRun("(testIC())");
5476 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5481 CcTest::InitializeVM();
5482 Isolate* isolate = CcTest::i_isolate();
5483 v8::internal::Heap* heap = CcTest::heap();
5484 v8::internal::Factory* factory = isolate->factory();
5486 HandleScope outer_scope(isolate);
5487 Handle<WeakCell> weak_cell1;
5489 HandleScope inner_scope(isolate);
5490 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5491 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5494 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5495 Handle<WeakCell> weak_cell2;
5497 HandleScope inner_scope(isolate);
5498 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5500 CHECK(weak_cell1->value()->IsFixedArray());
5501 CHECK_EQ(*survivor, weak_cell2->value());
5502 heap->CollectGarbage(NEW_SPACE);
5503 CHECK(weak_cell1->value()->IsFixedArray());
5504 CHECK_EQ(*survivor, weak_cell2->value());
5505 heap->CollectGarbage(NEW_SPACE);
5506 CHECK(weak_cell1->value()->IsFixedArray());
5507 CHECK_EQ(*survivor, weak_cell2->value());
5508 heap->CollectAllAvailableGarbage();
5509 CHECK(weak_cell1->cleared());
5510 CHECK_EQ(*survivor, weak_cell2->value());
5514 TEST(WeakCellsWithIncrementalMarking) {
5515 CcTest::InitializeVM();
5516 Isolate* isolate = CcTest::i_isolate();
5517 v8::internal::Heap* heap = CcTest::heap();
5518 v8::internal::Factory* factory = isolate->factory();
5521 HandleScope outer_scope(isolate);
5522 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5523 Handle<WeakCell> weak_cells[N];
5525 for (int i = 0; i < N; i++) {
5526 HandleScope inner_scope(isolate);
5527 Handle<HeapObject> value =
5528 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5529 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5530 CHECK(weak_cell->value()->IsFixedArray());
5531 IncrementalMarking* marking = heap->incremental_marking();
5532 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
5533 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5534 heap->CollectGarbage(NEW_SPACE);
5535 CHECK(weak_cell->value()->IsFixedArray());
5536 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5538 heap->CollectAllGarbage();
5539 CHECK_EQ(*survivor, weak_cells[0]->value());
5540 for (int i = 1; i < N; i++) {
5541 CHECK(weak_cells[i]->cleared());
5547 TEST(AddInstructionChangesNewSpacePromotion) {
5548 i::FLAG_allow_natives_syntax = true;
5549 i::FLAG_expose_gc = true;
5550 i::FLAG_stress_compaction = true;
5551 i::FLAG_gc_interval = 1000;
5552 CcTest::InitializeVM();
5553 if (!i::FLAG_allocation_site_pretenuring) return;
5554 v8::HandleScope scope(CcTest::isolate());
5555 Isolate* isolate = CcTest::i_isolate();
5556 Heap* heap = isolate->heap();
5559 "function add(a, b) {"
5563 "add(\"a\", \"b\");"
5564 "var oldSpaceObject;"
5566 "function crash(x) {"
5567 " var object = {a: null, b: null};"
5568 " var result = add(1.5, x | 0);"
5569 " object.a = result;"
5570 " oldSpaceObject = object;"
5575 "%OptimizeFunctionOnNextCall(crash);"
5578 v8::Handle<v8::Object> global = CcTest::global();
5579 v8::Handle<v8::Function> g =
5580 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
5581 v8::Handle<v8::Value> args1[] = { v8_num(1) };
5582 heap->DisableInlineAllocation();
5583 heap->set_allocation_timeout(1);
5584 g->Call(global, 1, args1);
5585 heap->CollectAllGarbage();
5589 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5590 // Exit with 0 if the location matches our expectation.
5591 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5595 TEST(CEntryStubOOM) {
5596 i::FLAG_allow_natives_syntax = true;
5597 CcTest::InitializeVM();
5598 v8::HandleScope scope(CcTest::isolate());
5599 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
5601 v8::Handle<v8::Value> result = CompileRun(
5602 "%SetFlags('--gc-interval=1');"
5607 CHECK(result->IsNumber());
5613 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5616 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5617 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5621 TEST(Regress357137) {
5622 CcTest::InitializeVM();
5623 v8::Isolate* isolate = CcTest::isolate();
5624 v8::HandleScope hscope(isolate);
5625 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5626 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
5627 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5628 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5629 DCHECK(!context.IsEmpty());
5630 v8::Context::Scope cscope(context);
5632 v8::Local<v8::Value> result = CompileRun(
5634 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5635 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5636 "interrupt();" // This triggers a fake stack overflow in f.
5638 CHECK_EQ(42.0, result->ToNumber(isolate)->Value());
5642 TEST(Regress507979) {
5643 const int kFixedArrayLen = 10;
5644 CcTest::InitializeVM();
5645 Isolate* isolate = CcTest::i_isolate();
5646 Heap* heap = isolate->heap();
5647 HandleScope handle_scope(isolate);
5649 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5650 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5651 CHECK(heap->InNewSpace(o1->address()));
5652 CHECK(heap->InNewSpace(o2->address()));
5654 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5656 // Replace parts of an object placed before a live object with a filler. This
5657 // way the filler object shares the mark bits with the following live object.
5658 o1->Shrink(kFixedArrayLen - 1);
5660 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5661 // Let's not optimize the loop away.
5662 CHECK(obj->address() != nullptr);
5667 TEST(ArrayShiftSweeping) {
5668 i::FLAG_expose_gc = true;
5669 CcTest::InitializeVM();
5670 v8::HandleScope scope(CcTest::isolate());
5671 Isolate* isolate = CcTest::i_isolate();
5672 Heap* heap = isolate->heap();
5674 v8::Local<v8::Value> result = CompileRun(
5675 "var array = new Array(40000);"
5676 "var tmp = new Array(100000);"
5683 Handle<JSObject> o =
5684 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5685 CHECK(heap->InOldSpace(o->elements()));
5686 CHECK(heap->InOldSpace(*o));
5687 Page* page = Page::FromAddress(o->elements()->address());
5688 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
5689 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
5693 UNINITIALIZED_TEST(PromotionQueue) {
5694 i::FLAG_expose_gc = true;
5695 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5696 v8::Isolate::CreateParams create_params;
5697 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5698 v8::Isolate* isolate = v8::Isolate::New(create_params);
5699 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5701 v8::Isolate::Scope isolate_scope(isolate);
5702 v8::HandleScope handle_scope(isolate);
5703 v8::Context::New(isolate)->Enter();
5704 Heap* heap = i_isolate->heap();
5705 NewSpace* new_space = heap->new_space();
5707 // In this test we will try to overwrite the promotion queue which is at the
5708 // end of to-space. To actually make that possible, we need at least two
5709 // semi-space pages and take advantage of fragmentation.
5710 // (1) Grow semi-space to two pages.
5711 // (2) Create a few small long living objects and call the scavenger to
5712 // move them to the other semi-space.
5713 // (3) Create a huge object, i.e., remainder of first semi-space page and
5714 // create another huge object which should be of maximum allocatable memory
5715 // size of the second semi-space page.
5716 // (4) Call the scavenger again.
5717 // What will happen is: the scavenger will promote the objects created in
5718 // (2) and will create promotion queue entries at the end of the second
5719 // semi-space page during the next scavenge when it promotes the objects to
5720 // the old generation. The first allocation of (3) will fill up the first
5721 // semi-space page. The second allocation in (3) will not fit into the
5722 // first semi-space page, but it will overwrite the promotion queue which
5723 // are in the second semi-space page. If the right guards are in place, the
5724 // promotion queue will be evacuated in that case.
5726 // Grow the semi-space to two pages to make semi-space copy overwrite the
5727 // promotion queue, which will be at the end of the second page.
5728 intptr_t old_capacity = new_space->TotalCapacity();
5730 // If we are in a low memory config, we can't grow to two pages and we can't
5731 // run this test. This also means the issue we are testing cannot arise, as
5732 // there is no fragmentation.
5733 if (new_space->IsAtMaximumCapacity()) return;
5736 CHECK(new_space->IsAtMaximumCapacity());
5737 CHECK(2 * old_capacity == new_space->TotalCapacity());
5739 // Call the scavenger two times to get an empty new space
5740 heap->CollectGarbage(NEW_SPACE);
5741 heap->CollectGarbage(NEW_SPACE);
5743 // First create a few objects which will survive a scavenge, and will get
5744 // promoted to the old generation later on. These objects will create
5745 // promotion queue entries at the end of the second semi-space page.
5746 const int number_handles = 12;
5747 Handle<FixedArray> handles[number_handles];
5748 for (int i = 0; i < number_handles; i++) {
5749 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5751 heap->CollectGarbage(NEW_SPACE);
5753 // Create the first huge object which will exactly fit the first semi-space
5755 int new_linear_size =
5756 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5757 *heap->new_space()->allocation_top_address());
5758 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
5759 Handle<FixedArray> first =
5760 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5761 CHECK(heap->InNewSpace(*first));
5763 // Create the second huge object of maximum allocatable second semi-space
5766 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5767 *heap->new_space()->allocation_top_address());
5768 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
5769 FixedArray::kHeaderSize;
5770 Handle<FixedArray> second =
5771 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5772 CHECK(heap->InNewSpace(*second));
5774 // This scavenge will corrupt memory if the promotion queue is not
5776 heap->CollectGarbage(NEW_SPACE);
5782 TEST(Regress388880) {
5783 i::FLAG_expose_gc = true;
5784 CcTest::InitializeVM();
5785 v8::HandleScope scope(CcTest::isolate());
5786 Isolate* isolate = CcTest::i_isolate();
5787 Factory* factory = isolate->factory();
5788 Heap* heap = isolate->heap();
5790 Handle<Map> map1 = Map::Create(isolate, 1);
5792 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
5793 HeapType::Any(isolate), NONE, Representation::Tagged(),
5794 OMIT_TRANSITION).ToHandleChecked();
5796 int desired_offset = Page::kPageSize - map1->instance_size();
5798 // Allocate fixed array in old pointer space so, that object allocated
5799 // afterwards would end at the end of the page.
5801 SimulateFullSpace(heap->old_space());
5802 int padding_size = desired_offset - Page::kObjectStartOffset;
5803 int padding_array_length =
5804 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
5806 Handle<FixedArray> temp2 =
5807 factory->NewFixedArray(padding_array_length, TENURED);
5808 Page* page = Page::FromAddress(temp2->address());
5809 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
5812 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5813 o->set_properties(*factory->empty_fixed_array());
5815 // Ensure that the object allocated where we need it.
5816 Page* page = Page::FromAddress(o->address());
5817 CHECK_EQ(desired_offset, page->Offset(o->address()));
5819 // Now we have an object right at the end of the page.
5821 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5822 // that would cause crash.
5823 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5825 marking->Start(Heap::kNoGCFlags);
5826 CHECK(marking->IsMarking());
5828 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5829 // when it calls heap->AdjustLiveBytes(...).
5830 JSObject::MigrateToMap(o, map2);
5835 i::FLAG_expose_gc = true;
5836 CcTest::InitializeVM();
5837 v8::HandleScope scope(CcTest::isolate());
5838 Isolate* isolate = CcTest::i_isolate();
5839 Heap* heap = isolate->heap();
5840 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5841 v8::Local<v8::Value> result = CompileRun(
5842 "var weak_map = new WeakMap();"
5843 "var future_keys = [];"
5844 "for (var i = 0; i < 50; i++) {"
5845 " var key = {'k' : i + 0.1};"
5846 " weak_map.set(key, 1);"
5847 " future_keys.push({'x' : i + 0.2});"
5850 if (marking->IsStopped()) {
5851 marking->Start(Heap::kNoGCFlags);
5853 // Incrementally mark the backing store.
5854 Handle<JSObject> obj =
5855 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5856 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5857 while (!Marking::IsBlack(
5858 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5859 !marking->IsStopped()) {
5860 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5862 // Stash the backing store in a handle.
5863 Handle<Object> save(weak_map->table(), isolate);
5864 // The following line will update the backing store.
5866 "for (var i = 0; i < 50; i++) {"
5867 " weak_map.set(future_keys[i], i);"
5869 heap->incremental_marking()->set_should_hurry(true);
5870 heap->CollectGarbage(OLD_SPACE);
5874 TEST(Regress442710) {
5875 CcTest::InitializeVM();
5876 Isolate* isolate = CcTest::i_isolate();
5877 Heap* heap = isolate->heap();
5878 Factory* factory = isolate->factory();
5880 HandleScope sc(isolate);
5881 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
5882 Handle<JSArray> array = factory->NewJSArray(2);
5884 Handle<String> name = factory->InternalizeUtf8String("testArray");
5885 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5886 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5887 heap->CollectGarbage(OLD_SPACE);
5891 HEAP_TEST(NumberStringCacheSize) {
5892 // Test that the number-string cache has not been resized in the snapshot.
5893 CcTest::InitializeVM();
5894 Isolate* isolate = CcTest::i_isolate();
5895 if (!isolate->snapshot_available()) return;
5896 Heap* heap = isolate->heap();
5897 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5898 heap->number_string_cache()->length());
5903 CcTest::InitializeVM();
5904 Isolate* isolate = CcTest::i_isolate();
5905 Heap* heap = isolate->heap();
5906 Factory* factory = isolate->factory();
5907 HandleScope scope(isolate);
5908 CompileRun("function cls() { this.x = 10; }");
5909 Handle<WeakCell> weak_prototype;
5911 HandleScope inner_scope(isolate);
5912 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5913 Handle<JSObject> proto =
5914 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5915 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5917 CHECK(!weak_prototype->cleared());
5921 "cls.prototype = null;");
5922 for (int i = 0; i < 4; i++) {
5923 heap->CollectAllGarbage();
5925 // The map of a.x keeps prototype alive
5926 CHECK(!weak_prototype->cleared());
5927 // Change the map of a.x and make the previous map garbage collectable.
5928 CompileRun("a.x.__proto__ = {};");
5929 for (int i = 0; i < 4; i++) {
5930 heap->CollectAllGarbage();
5932 CHECK(weak_prototype->cleared());
5936 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5937 HandleScope inner_scope(isolate);
5938 Handle<Map> map = Map::Create(isolate, 1);
5939 v8::Local<v8::Value> result =
5940 CompileRun("(function () { return {x : 10}; })();");
5941 Handle<JSObject> proto =
5942 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5943 Map::SetPrototype(map, proto);
5944 heap->AddRetainedMap(map);
5945 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5949 void CheckMapRetainingFor(int n) {
5950 FLAG_retain_maps_for_n_gc = n;
5951 Isolate* isolate = CcTest::i_isolate();
5952 Heap* heap = isolate->heap();
5953 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5954 CHECK(!weak_cell->cleared());
5955 for (int i = 0; i < n; i++) {
5956 heap->CollectGarbage(OLD_SPACE);
5958 CHECK(!weak_cell->cleared());
5959 heap->CollectGarbage(OLD_SPACE);
5960 CHECK(weak_cell->cleared());
5964 TEST(MapRetaining) {
5965 CcTest::InitializeVM();
5966 v8::HandleScope scope(CcTest::isolate());
5967 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5968 CheckMapRetainingFor(0);
5969 CheckMapRetainingFor(1);
5970 CheckMapRetainingFor(7);
5974 TEST(RegressArrayListGC) {
5975 FLAG_retain_maps_for_n_gc = 1;
5976 FLAG_incremental_marking = 0;
5977 FLAG_gc_global = true;
5978 CcTest::InitializeVM();
5979 v8::HandleScope scope(CcTest::isolate());
5980 Isolate* isolate = CcTest::i_isolate();
5981 Heap* heap = isolate->heap();
5982 AddRetainedMap(isolate, heap);
5983 Handle<Map> map = Map::Create(isolate, 1);
5984 heap->CollectGarbage(OLD_SPACE);
5985 // Force GC in old space on next addition of retained map.
5986 Map::WeakCellForMap(map);
5987 SimulateFullSpace(CcTest::heap()->new_space());
5988 for (int i = 0; i < 10; i++) {
5989 heap->AddRetainedMap(map);
5991 heap->CollectGarbage(OLD_SPACE);
5997 CcTest::InitializeVM();
5998 v8::HandleScope scope(CcTest::isolate());
6000 v8::Local<v8::Value> result = CompileRun("'abc'");
6001 Handle<Object> o = v8::Utils::OpenHandle(*result);
6002 CcTest::i_isolate()->heap()->TracePathToObject(*o);
6007 TEST(WritableVsImmortalRoots) {
6008 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
6009 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
6010 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
6011 bool immortal = Heap::RootIsImmortalImmovable(root_index);
6012 // A root value can be writable, immortal, or neither, but not both.
6013 CHECK(!immortal || !writable);
6018 static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
6020 int elements_to_trim) {
6021 v8::HandleScope scope(CcTest::isolate());
6022 Isolate* isolate = CcTest::i_isolate();
6023 Factory* factory = isolate->factory();
6024 Heap* heap = isolate->heap();
6026 Handle<FixedTypedArrayBase> array =
6027 factory->NewFixedTypedArray(initial_length, type, true);
6028 int old_size = array->size();
6029 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
6032 // Check that free space filler is at the right place and did not smash the
6034 CHECK(array->IsFixedArrayBase());
6035 CHECK_EQ(initial_length - elements_to_trim, array->length());
6036 int new_size = array->size();
6037 if (new_size != old_size) {
6038 // Free space filler should be created in this case.
6039 Address next_obj_address = array->address() + array->size();
6040 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
6042 heap->CollectAllAvailableGarbage();
6046 TEST(Regress472513) {
6047 CcTest::InitializeVM();
6048 v8::HandleScope scope(CcTest::isolate());
6050 // The combination of type/initial_length/elements_to_trim triggered
6051 // typed array header smashing with free space filler (crbug/472513).
6054 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
6055 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
6056 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
6057 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
6058 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
6059 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
6062 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
6063 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
6064 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
6065 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
6066 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
6070 TEST(WeakFixedArray) {
6071 CcTest::InitializeVM();
6072 v8::HandleScope scope(CcTest::isolate());
6074 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
6075 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
6076 array->Remove(number);
6077 array->Compact<WeakFixedArray::NullCallback>();
6078 WeakFixedArray::Add(array, number);
6082 TEST(PreprocessStackTrace) {
6083 // Do not automatically trigger early GC.
6084 FLAG_gc_interval = -1;
6085 CcTest::InitializeVM();
6086 v8::HandleScope scope(CcTest::isolate());
6087 v8::TryCatch try_catch(CcTest::isolate());
6088 CompileRun("throw new Error();");
6089 CHECK(try_catch.HasCaught());
6090 Isolate* isolate = CcTest::i_isolate();
6091 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
6092 Handle<Name> key = isolate->factory()->stack_trace_symbol();
6093 Handle<Object> stack_trace =
6094 JSObject::GetProperty(exception, key).ToHandleChecked();
6095 Handle<Object> code =
6096 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6097 CHECK(code->IsCode());
6099 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
6101 Handle<Object> pos =
6102 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6103 CHECK(pos->IsSmi());
6105 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
6106 int array_length = Smi::cast(stack_trace_array->length())->value();
6107 for (int i = 0; i < array_length; i++) {
6108 Handle<Object> element =
6109 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
6110 CHECK(!element->IsCode());
6115 static bool utils_has_been_collected = false;
6117 static void UtilsHasBeenCollected(
6118 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
6119 utils_has_been_collected = true;
6120 data.GetParameter()->Reset();
6124 TEST(BootstrappingExports) {
6125 FLAG_expose_natives_as = "natives";
6126 CcTest::InitializeVM();
6127 v8::Isolate* isolate = CcTest::isolate();
6129 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
6131 utils_has_been_collected = false;
6133 v8::Persistent<v8::Object> utils;
6136 v8::HandleScope scope(isolate);
6137 v8::Handle<v8::Object> natives =
6138 CcTest::global()->Get(v8_str("natives"))->ToObject(isolate);
6139 utils.Reset(isolate, natives->Get(v8_str("utils"))->ToObject(isolate));
6140 natives->Delete(v8_str("utils"));
6143 utils.SetWeak(&utils, UtilsHasBeenCollected,
6144 v8::WeakCallbackType::kParameter);
6146 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6148 CHECK(utils_has_been_collected);
6153 FLAG_allow_natives_syntax = true;
6154 CcTest::InitializeVM();
6155 v8::Isolate* isolate = CcTest::isolate();
6156 v8::HandleScope scope(isolate);
6157 v8::Local<v8::Function> constructor =
6158 v8::Utils::ToLocal(CcTest::i_isolate()->internal_array_function());
6159 CcTest::global()->Set(v8_str("InternalArray"), constructor);
6161 v8::TryCatch try_catch(isolate);
6165 "for (var i = 0; i < 1000; i++) {"
6166 " var ai = new InternalArray(10000);"
6167 " if (%HaveSameMap(ai, a)) throw Error();"
6168 " if (!%HasFastObjectElements(ai)) throw Error();"
6170 "for (var i = 0; i < 1000; i++) {"
6171 " var ai = new InternalArray(10000);"
6172 " if (%HaveSameMap(ai, a)) throw Error();"
6173 " if (!%HasFastObjectElements(ai)) throw Error();"
6176 CHECK(!try_catch.HasCaught());
6180 void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6181 CHECK(bytes >= FixedArray::kHeaderSize);
6182 CHECK(bytes % kPointerSize == 0);
6183 Factory* factory = isolate->factory();
6184 HandleScope scope(isolate);
6185 AlwaysAllocateScope always_allocate(isolate);
6187 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6188 Handle<FixedArray> array = factory->NewFixedArray(
6189 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6190 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6191 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6195 TEST(NewSpaceAllocationCounter) {
6196 CcTest::InitializeVM();
6197 v8::HandleScope scope(CcTest::isolate());
6198 Isolate* isolate = CcTest::i_isolate();
6199 Heap* heap = isolate->heap();
6200 size_t counter1 = heap->NewSpaceAllocationCounter();
6201 heap->CollectGarbage(NEW_SPACE);
6202 const size_t kSize = 1024;
6203 AllocateInSpace(isolate, kSize, NEW_SPACE);
6204 size_t counter2 = heap->NewSpaceAllocationCounter();
6205 CHECK_EQ(kSize, counter2 - counter1);
6206 heap->CollectGarbage(NEW_SPACE);
6207 size_t counter3 = heap->NewSpaceAllocationCounter();
6208 CHECK_EQ(0U, counter3 - counter2);
6209 // Test counter overflow.
6210 size_t max_counter = -1;
6211 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6212 size_t start = heap->NewSpaceAllocationCounter();
6213 for (int i = 0; i < 20; i++) {
6214 AllocateInSpace(isolate, kSize, NEW_SPACE);
6215 size_t counter = heap->NewSpaceAllocationCounter();
6216 CHECK_EQ(kSize, counter - start);
6222 TEST(OldSpaceAllocationCounter) {
6223 CcTest::InitializeVM();
6224 v8::HandleScope scope(CcTest::isolate());
6225 Isolate* isolate = CcTest::i_isolate();
6226 Heap* heap = isolate->heap();
6227 size_t counter1 = heap->OldGenerationAllocationCounter();
6228 heap->CollectGarbage(NEW_SPACE);
6229 heap->CollectGarbage(NEW_SPACE);
6230 const size_t kSize = 1024;
6231 AllocateInSpace(isolate, kSize, OLD_SPACE);
6232 size_t counter2 = heap->OldGenerationAllocationCounter();
6233 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6234 CHECK_LE(kSize, counter2 - counter1);
6235 heap->CollectGarbage(NEW_SPACE);
6236 size_t counter3 = heap->OldGenerationAllocationCounter();
6237 CHECK_EQ(0u, counter3 - counter2);
6238 AllocateInSpace(isolate, kSize, OLD_SPACE);
6239 heap->CollectGarbage(OLD_SPACE);
6240 size_t counter4 = heap->OldGenerationAllocationCounter();
6241 CHECK_LE(kSize, counter4 - counter3);
6242 // Test counter overflow.
6243 size_t max_counter = -1;
6244 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6245 size_t start = heap->OldGenerationAllocationCounter();
6246 for (int i = 0; i < 20; i++) {
6247 AllocateInSpace(isolate, kSize, OLD_SPACE);
6248 size_t counter = heap->OldGenerationAllocationCounter();
6249 CHECK_LE(kSize, counter - start);
6255 TEST(NewSpaceAllocationThroughput) {
6256 CcTest::InitializeVM();
6257 v8::HandleScope scope(CcTest::isolate());
6258 Isolate* isolate = CcTest::i_isolate();
6259 Heap* heap = isolate->heap();
6260 GCTracer* tracer = heap->tracer();
6262 size_t counter1 = 1000;
6263 tracer->SampleAllocation(time1, counter1, 0);
6265 size_t counter2 = 2000;
6266 tracer->SampleAllocation(time2, counter2, 0);
6268 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6269 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6271 size_t counter3 = 30000;
6272 tracer->SampleAllocation(time3, counter3, 0);
6273 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6274 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6278 TEST(NewSpaceAllocationThroughput2) {
6279 CcTest::InitializeVM();
6280 v8::HandleScope scope(CcTest::isolate());
6281 Isolate* isolate = CcTest::i_isolate();
6282 Heap* heap = isolate->heap();
6283 GCTracer* tracer = heap->tracer();
6285 size_t counter1 = 1000;
6286 tracer->SampleAllocation(time1, counter1, 0);
6288 size_t counter2 = 2000;
6289 tracer->SampleAllocation(time2, counter2, 0);
6291 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6292 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6294 size_t counter3 = 30000;
6295 tracer->SampleAllocation(time3, counter3, 0);
6296 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6297 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6301 static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6302 Isolate* isolate = CcTest::i_isolate();
6304 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6305 CHECK(message->IsTheHole());
6309 TEST(MessageObjectLeak) {
6310 CcTest::InitializeVM();
6311 v8::Isolate* isolate = CcTest::isolate();
6312 v8::HandleScope scope(isolate);
6313 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6314 global->Set(v8::String::NewFromUtf8(isolate, "check"),
6315 v8::FunctionTemplate::New(isolate, CheckLeak));
6316 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6317 v8::Context::Scope cscope(context);
6321 " throw 'message 1';"
6326 " throw 'message 2';"
6333 const char* flag = "--turbo-filter=*";
6334 FlagList::SetFlagsFromString(flag, StrLength(flag));
6335 FLAG_always_opt = true;
6336 FLAG_turbo_try_catch = true;
6337 FLAG_turbo_try_finally = true;
6343 static void CheckEqualSharedFunctionInfos(
6344 const v8::FunctionCallbackInfo<v8::Value>& args) {
6345 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6346 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6347 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6348 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6349 CHECK(fun1->shared() == fun2->shared());
6353 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6354 Isolate* isolate = CcTest::i_isolate();
6355 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6356 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6357 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6358 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6359 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6363 TEST(CanonicalSharedFunctionInfo) {
6364 CcTest::InitializeVM();
6365 v8::Isolate* isolate = CcTest::isolate();
6366 v8::HandleScope scope(isolate);
6367 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6368 global->Set(isolate, "check", v8::FunctionTemplate::New(
6369 isolate, CheckEqualSharedFunctionInfos));
6370 global->Set(isolate, "remove",
6371 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6372 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6373 v8::Context::Scope cscope(context);
6375 "function f() { return function g() {}; }"
6382 "function f() { return (function() { return function g() {}; })(); }"
6390 TEST(OldGenerationAllocationThroughput) {
6391 CcTest::InitializeVM();
6392 v8::HandleScope scope(CcTest::isolate());
6393 Isolate* isolate = CcTest::i_isolate();
6394 Heap* heap = isolate->heap();
6395 GCTracer* tracer = heap->tracer();
6397 size_t counter1 = 1000;
6398 tracer->SampleAllocation(time1, 0, counter1);
6400 size_t counter2 = 2000;
6401 tracer->SampleAllocation(time2, 0, counter2);
6403 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6404 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6406 size_t counter3 = 30000;
6407 tracer->SampleAllocation(time3, 0, counter3);
6409 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6410 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6414 TEST(AllocationThroughput) {
6415 CcTest::InitializeVM();
6416 v8::HandleScope scope(CcTest::isolate());
6417 Isolate* isolate = CcTest::i_isolate();
6418 Heap* heap = isolate->heap();
6419 GCTracer* tracer = heap->tracer();
6421 size_t counter1 = 1000;
6422 tracer->SampleAllocation(time1, counter1, counter1);
6424 size_t counter2 = 2000;
6425 tracer->SampleAllocation(time2, counter2, counter2);
6426 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6427 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6429 size_t counter3 = 30000;
6430 tracer->SampleAllocation(time3, counter3, counter3);
6431 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6432 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6436 TEST(SlotsBufferObjectSlotsRemoval) {
6437 CcTest::InitializeVM();
6438 v8::HandleScope scope(CcTest::isolate());
6439 Isolate* isolate = CcTest::i_isolate();
6440 Heap* heap = isolate->heap();
6441 Factory* factory = isolate->factory();
6443 SlotsBuffer* buffer = new SlotsBuffer(NULL);
6444 void* fake_object[1];
6446 Handle<FixedArray> array = factory->NewFixedArray(2, TENURED);
6447 CHECK(heap->old_space()->Contains(*array));
6448 array->set(0, reinterpret_cast<Object*>(fake_object), SKIP_WRITE_BARRIER);
6450 // Firstly, let's test the regular slots buffer entry.
6451 buffer->Add(HeapObject::RawField(*array, FixedArray::kHeaderSize));
6452 CHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6453 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6454 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6456 array->address() + array->Size());
6457 CHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6458 HeapObject::RawField(heap->empty_fixed_array(),
6459 FixedArrayBase::kLengthOffset));
6461 // Secondly, let's test the typed slots buffer entry.
6462 SlotsBuffer::AddTo(NULL, &buffer, SlotsBuffer::EMBEDDED_OBJECT_SLOT,
6463 array->address() + FixedArray::kHeaderSize,
6464 SlotsBuffer::FAIL_ON_OVERFLOW);
6465 CHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6466 reinterpret_cast<Object**>(SlotsBuffer::EMBEDDED_OBJECT_SLOT));
6467 CHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6468 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6469 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6471 array->address() + array->Size());
6472 CHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6473 HeapObject::RawField(heap->empty_fixed_array(),
6474 FixedArrayBase::kLengthOffset));
6475 CHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6476 HeapObject::RawField(heap->empty_fixed_array(),
6477 FixedArrayBase::kLengthOffset));
6482 TEST(ContextMeasure) {
6483 CcTest::InitializeVM();
6484 v8::HandleScope scope(CcTest::isolate());
6485 Isolate* isolate = CcTest::i_isolate();
6486 LocalContext context;
6488 int size_upper_limit = 0;
6489 int count_upper_limit = 0;
6490 HeapIterator it(CcTest::heap());
6491 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6492 size_upper_limit += obj->Size();
6493 count_upper_limit++;
6496 ContextMeasure measure(*isolate->native_context());
6498 PrintF("Context size : %d bytes\n", measure.Size());
6499 PrintF("Context object count: %d\n", measure.Count());
6501 CHECK_LE(1000, measure.Count());
6502 CHECK_LE(50000, measure.Size());
6504 CHECK_LE(measure.Count(), count_upper_limit);
6505 CHECK_LE(measure.Size(), size_upper_limit);
6509 TEST(ScriptIterator) {
6510 CcTest::InitializeVM();
6511 v8::HandleScope scope(CcTest::isolate());
6512 Isolate* isolate = CcTest::i_isolate();
6513 Heap* heap = CcTest::heap();
6514 LocalContext context;
6516 heap->CollectAllGarbage();
6518 int script_count = 0;
6520 HeapIterator it(heap);
6521 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6522 if (obj->IsScript()) script_count++;
6527 Script::Iterator iterator(isolate);
6528 while (iterator.Next()) script_count--;
6531 CHECK_EQ(0, script_count);
6535 TEST(SharedFunctionInfoIterator) {
6536 CcTest::InitializeVM();
6537 v8::HandleScope scope(CcTest::isolate());
6538 Isolate* isolate = CcTest::i_isolate();
6539 Heap* heap = CcTest::heap();
6540 LocalContext context;
6542 heap->CollectAllGarbage();
6543 heap->CollectAllGarbage();
6547 HeapIterator it(heap);
6548 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6549 if (!obj->IsSharedFunctionInfo()) continue;
6550 // Shared function infos without a script (API functions or C++ builtins)
6551 // are not returned by the iterator because they are not created from a
6552 // script. They are not interesting for type feedback vector anyways.
6553 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
6554 if (shared->script()->IsUndefined()) {
6555 CHECK_EQ(0, shared->feedback_vector()->ICSlots());
6563 SharedFunctionInfo::Iterator iterator(isolate);
6564 while (iterator.Next()) sfi_count--;
6567 CHECK_EQ(0, sfi_count);
6570 } // namespace internal