1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "src/compilation-cache.h"
32 #include "src/context-measure.h"
33 #include "src/deoptimizer.h"
34 #include "src/execution.h"
35 #include "src/factory.h"
36 #include "src/global-handles.h"
37 #include "src/heap/gc-tracer.h"
38 #include "src/ic/ic.h"
39 #include "src/macro-assembler.h"
40 #include "src/snapshot/snapshot.h"
41 #include "test/cctest/cctest.h"
42 #include "test/cctest/heap-tester.h"
49 static void CheckMap(Map* map, int type, int instance_size) {
50 CHECK(map->IsHeapObject());
52 CHECK(CcTest::heap()->Contains(map));
54 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
55 CHECK_EQ(type, map->instance_type());
56 CHECK_EQ(instance_size, map->instance_size());
61 CcTest::InitializeVM();
62 Heap* heap = CcTest::heap();
63 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
64 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
65 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
66 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
67 SIMD128_TYPES(SIMD128_TYPE)
69 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
70 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
74 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
75 CHECK(obj->IsOddball());
76 Handle<Object> handle(obj, isolate);
77 Object* print_string =
78 *Execution::ToString(isolate, handle).ToHandleChecked();
79 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
83 static void CheckSmi(Isolate* isolate, int value, const char* string) {
84 Handle<Object> handle(Smi::FromInt(value), isolate);
85 Object* print_string =
86 *Execution::ToString(isolate, handle).ToHandleChecked();
87 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
91 static void CheckNumber(Isolate* isolate, double value, const char* string) {
92 Handle<Object> number = isolate->factory()->NewNumber(value);
93 CHECK(number->IsNumber());
94 Handle<Object> print_string =
95 Execution::ToString(isolate, number).ToHandleChecked();
96 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
100 static void CheckFindCodeObject(Isolate* isolate) {
101 // Test FindCodeObject
104 Assembler assm(isolate, NULL, 0);
106 __ nop(); // supported on all architectures
110 Handle<Code> code = isolate->factory()->NewCode(
111 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
112 CHECK(code->IsCode());
114 HeapObject* obj = HeapObject::cast(*code);
115 Address obj_addr = obj->address();
117 for (int i = 0; i < obj->Size(); i += kPointerSize) {
118 Object* found = isolate->FindCodeObject(obj_addr + i);
119 CHECK_EQ(*code, found);
122 Handle<Code> copy = isolate->factory()->NewCode(
123 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
124 HeapObject* obj_copy = HeapObject::cast(*copy);
125 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
126 obj_copy->Size() / 2);
127 CHECK(not_right != *code);
132 CcTest::InitializeVM();
133 Isolate* isolate = CcTest::i_isolate();
134 HandleScope outer_scope(isolate);
135 LocalContext context;
136 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
142 CcTest::InitializeVM();
143 Isolate* isolate = CcTest::i_isolate();
144 Factory* factory = isolate->factory();
145 Heap* heap = isolate->heap();
147 HandleScope sc(isolate);
148 Handle<Object> value = factory->NewNumber(1.000123);
149 CHECK(value->IsHeapNumber());
150 CHECK(value->IsNumber());
151 CHECK_EQ(1.000123, value->Number());
153 value = factory->NewNumber(1.0);
154 CHECK(value->IsSmi());
155 CHECK(value->IsNumber());
156 CHECK_EQ(1.0, value->Number());
158 value = factory->NewNumberFromInt(1024);
159 CHECK(value->IsSmi());
160 CHECK(value->IsNumber());
161 CHECK_EQ(1024.0, value->Number());
163 value = factory->NewNumberFromInt(Smi::kMinValue);
164 CHECK(value->IsSmi());
165 CHECK(value->IsNumber());
166 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
168 value = factory->NewNumberFromInt(Smi::kMaxValue);
169 CHECK(value->IsSmi());
170 CHECK(value->IsNumber());
171 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
173 #if !defined(V8_TARGET_ARCH_64_BIT)
174 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
175 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
176 CHECK(value->IsHeapNumber());
177 CHECK(value->IsNumber());
178 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
181 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
182 CHECK(value->IsHeapNumber());
183 CHECK(value->IsNumber());
184 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
187 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
188 CHECK(value->IsHeapNumber());
189 CHECK(value->IsNumber());
190 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
193 // nan oddball checks
194 CHECK(factory->nan_value()->IsNumber());
195 CHECK(std::isnan(factory->nan_value()->Number()));
197 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
198 CHECK(s->IsString());
199 CHECK_EQ(10, s->length());
201 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
202 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
203 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
205 // Check ToString for oddballs
206 CheckOddball(isolate, heap->true_value(), "true");
207 CheckOddball(isolate, heap->false_value(), "false");
208 CheckOddball(isolate, heap->null_value(), "null");
209 CheckOddball(isolate, heap->undefined_value(), "undefined");
211 // Check ToString for Smis
212 CheckSmi(isolate, 0, "0");
213 CheckSmi(isolate, 42, "42");
214 CheckSmi(isolate, -42, "-42");
216 // Check ToString for Numbers
217 CheckNumber(isolate, 1.1, "1.1");
219 CheckFindCodeObject(isolate);
223 template <typename T, typename LANE_TYPE, int LANES>
224 static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
225 LANE_TYPE other_value) {
226 // Check against lane_values, and check that all lanes can be set to
227 // other_value without disturbing the other lanes.
228 for (int i = 0; i < LANES; i++) {
229 CHECK_EQ(lane_values[i], value->get_lane(i));
231 for (int i = 0; i < LANES; i++) {
232 value->set_lane(i, other_value); // change the value
233 for (int j = 0; j < LANES; j++) {
235 CHECK_EQ(lane_values[j], value->get_lane(j));
237 CHECK_EQ(other_value, value->get_lane(j));
239 value->set_lane(i, lane_values[i]); // restore the lane
241 CHECK(value->BooleanValue()); // SIMD values are 'true'.
246 CcTest::InitializeVM();
247 Isolate* isolate = CcTest::i_isolate();
248 Factory* factory = isolate->factory();
250 HandleScope sc(isolate);
254 float lanes[4] = {1, 2, 3, 4};
255 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
256 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
258 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
259 CHECK(value->IsFloat32x4());
260 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
262 // Check special lane values.
263 value->set_lane(1, -0.0);
264 CHECK_EQ(-0.0, value->get_lane(1));
265 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
266 value->set_lane(2, quiet_NaN);
267 CHECK(std::isnan(value->get_lane(2)));
268 value->set_lane(3, signaling_NaN);
269 CHECK(std::isnan(value->get_lane(3)));
272 // Check value printing.
274 value = factory->NewFloat32x4(lanes);
275 std::ostringstream os;
276 value->Float32x4Print(os);
277 CHECK_EQ("1, 2, 3, 4", os.str());
280 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
281 value = factory->NewFloat32x4(special_lanes);
282 std::ostringstream os;
283 value->Float32x4Print(os);
284 // Value printing doesn't preserve signed zeroes.
285 CHECK_EQ("0, 0, NaN, NaN", os.str());
287 #endif // OBJECT_PRINT
291 int32_t lanes[4] = {-1, 0, 1, 2};
293 Handle<Int32x4> value = factory->NewInt32x4(lanes);
294 CHECK(value->IsInt32x4());
295 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
298 std::ostringstream os;
299 value->Int32x4Print(os);
300 CHECK_EQ("-1, 0, 1, 2", os.str());
301 #endif // OBJECT_PRINT
305 bool lanes[4] = {true, true, true, false};
307 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
308 CHECK(value->IsBool32x4());
309 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
312 std::ostringstream os;
313 value->Bool32x4Print(os);
314 CHECK_EQ("true, true, true, false", os.str());
315 #endif // OBJECT_PRINT
319 int16_t lanes[8] = {-1, 0, 1, 2, 3, 4, 5, -32768};
321 Handle<Int16x8> value = factory->NewInt16x8(lanes);
322 CHECK(value->IsInt16x8());
323 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
326 std::ostringstream os;
327 value->Int16x8Print(os);
328 CHECK_EQ("-1, 0, 1, 2, 3, 4, 5, -32768", os.str());
329 #endif // OBJECT_PRINT
333 bool lanes[8] = {true, true, true, true, true, true, true, false};
335 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
336 CHECK(value->IsBool16x8());
337 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
340 std::ostringstream os;
341 value->Bool16x8Print(os);
342 CHECK_EQ("true, true, true, true, true, true, true, false", os.str());
343 #endif // OBJECT_PRINT
347 int8_t lanes[16] = {-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, -128};
349 Handle<Int8x16> value = factory->NewInt8x16(lanes);
350 CHECK(value->IsInt8x16());
351 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
354 std::ostringstream os;
355 value->Int8x16Print(os);
356 CHECK_EQ("-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, -128",
358 #endif // OBJECT_PRINT
362 bool lanes[16] = {true, true, true, true, true, true, true, false,
363 true, true, true, true, true, true, true, false};
365 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
366 CHECK(value->IsBool8x16());
367 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
370 std::ostringstream os;
371 value->Bool8x16Print(os);
373 "true, true, true, true, true, true, true, false, true, true, true, "
374 "true, true, true, true, false",
376 #endif // OBJECT_PRINT
382 CcTest::InitializeVM();
384 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
385 CHECK(Smi::FromInt(42)->IsSmi());
386 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
387 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
391 TEST(GarbageCollection) {
392 CcTest::InitializeVM();
393 Isolate* isolate = CcTest::i_isolate();
394 Heap* heap = isolate->heap();
395 Factory* factory = isolate->factory();
397 HandleScope sc(isolate);
399 heap->CollectGarbage(NEW_SPACE);
401 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
402 Handle<String> name = factory->InternalizeUtf8String("theFunction");
403 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
404 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
405 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
406 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
407 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
410 HandleScope inner_scope(isolate);
411 // Allocate a function and keep it in global object's property.
412 Handle<JSFunction> function = factory->NewFunction(name);
413 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
414 // Allocate an object. Unrooted after leaving the scope.
415 Handle<JSObject> obj = factory->NewJSObject(function);
416 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
417 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
419 CHECK_EQ(Smi::FromInt(23),
420 *Object::GetProperty(obj, prop_name).ToHandleChecked());
421 CHECK_EQ(Smi::FromInt(24),
422 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
425 heap->CollectGarbage(NEW_SPACE);
427 // Function should be alive.
428 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
429 // Check function is retained.
430 Handle<Object> func_value =
431 Object::GetProperty(global, name).ToHandleChecked();
432 CHECK(func_value->IsJSFunction());
433 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
436 HandleScope inner_scope(isolate);
437 // Allocate another object, make it reachable from global.
438 Handle<JSObject> obj = factory->NewJSObject(function);
439 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
440 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
443 // After gc, it should survive.
444 heap->CollectGarbage(NEW_SPACE);
446 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
448 Object::GetProperty(global, obj_name).ToHandleChecked();
449 CHECK(obj->IsJSObject());
450 CHECK_EQ(Smi::FromInt(23),
451 *Object::GetProperty(obj, prop_name).ToHandleChecked());
455 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
456 HandleScope scope(isolate);
457 Handle<String> s = isolate->factory()->NewStringFromUtf8(
458 CStrVector(string)).ToHandleChecked();
459 CHECK_EQ(StrLength(string), s->length());
460 for (int index = 0; index < s->length(); index++) {
461 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
467 CcTest::InitializeVM();
468 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
470 VerifyStringAllocation(isolate, "a");
471 VerifyStringAllocation(isolate, "ab");
472 VerifyStringAllocation(isolate, "abc");
473 VerifyStringAllocation(isolate, "abcd");
474 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
479 CcTest::InitializeVM();
480 Isolate* isolate = CcTest::i_isolate();
481 Factory* factory = isolate->factory();
483 v8::HandleScope scope(CcTest::isolate());
484 const char* name = "Kasper the spunky";
485 Handle<String> string = factory->NewStringFromAsciiChecked(name);
486 CHECK_EQ(StrLength(name), string->length());
490 TEST(GlobalHandles) {
491 CcTest::InitializeVM();
492 Isolate* isolate = CcTest::i_isolate();
493 Heap* heap = isolate->heap();
494 Factory* factory = isolate->factory();
495 GlobalHandles* global_handles = isolate->global_handles();
503 HandleScope scope(isolate);
505 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
506 Handle<Object> u = factory->NewNumber(1.12344);
508 h1 = global_handles->Create(*i);
509 h2 = global_handles->Create(*u);
510 h3 = global_handles->Create(*i);
511 h4 = global_handles->Create(*u);
514 // after gc, it should survive
515 heap->CollectGarbage(NEW_SPACE);
517 CHECK((*h1)->IsString());
518 CHECK((*h2)->IsHeapNumber());
519 CHECK((*h3)->IsString());
520 CHECK((*h4)->IsHeapNumber());
523 GlobalHandles::Destroy(h1.location());
524 GlobalHandles::Destroy(h3.location());
527 GlobalHandles::Destroy(h2.location());
528 GlobalHandles::Destroy(h4.location());
532 static bool WeakPointerCleared = false;
534 static void TestWeakGlobalHandleCallback(
535 const v8::WeakCallbackData<v8::Value, void>& data) {
536 std::pair<v8::Persistent<v8::Value>*, int>* p =
537 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
538 data.GetParameter());
539 if (p->second == 1234) WeakPointerCleared = true;
544 TEST(WeakGlobalHandlesScavenge) {
545 i::FLAG_stress_compaction = false;
546 CcTest::InitializeVM();
547 Isolate* isolate = CcTest::i_isolate();
548 Heap* heap = isolate->heap();
549 Factory* factory = isolate->factory();
550 GlobalHandles* global_handles = isolate->global_handles();
552 WeakPointerCleared = false;
558 HandleScope scope(isolate);
560 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
561 Handle<Object> u = factory->NewNumber(1.12344);
563 h1 = global_handles->Create(*i);
564 h2 = global_handles->Create(*u);
567 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
568 GlobalHandles::MakeWeak(h2.location(),
569 reinterpret_cast<void*>(&handle_and_id),
570 &TestWeakGlobalHandleCallback);
572 // Scavenge treats weak pointers as normal roots.
573 heap->CollectGarbage(NEW_SPACE);
575 CHECK((*h1)->IsString());
576 CHECK((*h2)->IsHeapNumber());
578 CHECK(!WeakPointerCleared);
579 CHECK(!global_handles->IsNearDeath(h2.location()));
580 CHECK(!global_handles->IsNearDeath(h1.location()));
582 GlobalHandles::Destroy(h1.location());
583 GlobalHandles::Destroy(h2.location());
587 TEST(WeakGlobalHandlesMark) {
588 CcTest::InitializeVM();
589 Isolate* isolate = CcTest::i_isolate();
590 Heap* heap = isolate->heap();
591 Factory* factory = isolate->factory();
592 GlobalHandles* global_handles = isolate->global_handles();
594 WeakPointerCleared = false;
600 HandleScope scope(isolate);
602 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
603 Handle<Object> u = factory->NewNumber(1.12344);
605 h1 = global_handles->Create(*i);
606 h2 = global_handles->Create(*u);
609 // Make sure the objects are promoted.
610 heap->CollectGarbage(OLD_SPACE);
611 heap->CollectGarbage(NEW_SPACE);
612 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
614 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
615 GlobalHandles::MakeWeak(h2.location(),
616 reinterpret_cast<void*>(&handle_and_id),
617 &TestWeakGlobalHandleCallback);
618 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
619 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
621 // Incremental marking potentially marked handles before they turned weak.
622 heap->CollectAllGarbage();
624 CHECK((*h1)->IsString());
626 CHECK(WeakPointerCleared);
627 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
629 GlobalHandles::Destroy(h1.location());
633 TEST(DeleteWeakGlobalHandle) {
634 i::FLAG_stress_compaction = false;
635 CcTest::InitializeVM();
636 Isolate* isolate = CcTest::i_isolate();
637 Heap* heap = isolate->heap();
638 Factory* factory = isolate->factory();
639 GlobalHandles* global_handles = isolate->global_handles();
641 WeakPointerCleared = false;
646 HandleScope scope(isolate);
648 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
649 h = global_handles->Create(*i);
652 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
653 GlobalHandles::MakeWeak(h.location(),
654 reinterpret_cast<void*>(&handle_and_id),
655 &TestWeakGlobalHandleCallback);
657 // Scanvenge does not recognize weak reference.
658 heap->CollectGarbage(NEW_SPACE);
660 CHECK(!WeakPointerCleared);
662 // Mark-compact treats weak reference properly.
663 heap->CollectGarbage(OLD_SPACE);
665 CHECK(WeakPointerCleared);
669 TEST(BytecodeArray) {
670 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
671 static const int kRawBytesSize = sizeof(kRawBytes);
672 static const int kFrameSize = 32;
674 CcTest::InitializeVM();
675 Isolate* isolate = CcTest::i_isolate();
676 Heap* heap = isolate->heap();
677 Factory* factory = isolate->factory();
678 HandleScope scope(isolate);
680 // Allocate and initialize BytecodeArray
681 Handle<BytecodeArray> array =
682 factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize);
684 CHECK(array->IsBytecodeArray());
685 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
686 CHECK_EQ(array->frame_size(), kFrameSize);
687 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
688 CHECK_GE(array->address() + array->BytecodeArraySize(),
689 array->GetFirstBytecodeAddress() + array->length());
690 for (int i = 0; i < kRawBytesSize; i++) {
691 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
692 CHECK_EQ(array->get(i), kRawBytes[i]);
695 // Full garbage collection
696 heap->CollectAllGarbage();
698 // BytecodeArray should survive
699 CHECK_EQ(array->length(), kRawBytesSize);
700 CHECK_EQ(array->frame_size(), kFrameSize);
702 for (int i = 0; i < kRawBytesSize; i++) {
703 CHECK_EQ(array->get(i), kRawBytes[i]);
704 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
709 static const char* not_so_random_string_table[] = {
773 static void CheckInternalizedStrings(const char** strings) {
774 Isolate* isolate = CcTest::i_isolate();
775 Factory* factory = isolate->factory();
776 for (const char* string = *strings; *strings != 0; string = *strings++) {
777 HandleScope scope(isolate);
779 isolate->factory()->InternalizeUtf8String(CStrVector(string));
780 // InternalizeUtf8String may return a failure if a GC is needed.
781 CHECK(a->IsInternalizedString());
782 Handle<String> b = factory->InternalizeUtf8String(string);
784 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
785 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
787 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
793 CcTest::InitializeVM();
795 v8::HandleScope sc(CcTest::isolate());
796 CheckInternalizedStrings(not_so_random_string_table);
797 CheckInternalizedStrings(not_so_random_string_table);
801 TEST(FunctionAllocation) {
802 CcTest::InitializeVM();
803 Isolate* isolate = CcTest::i_isolate();
804 Factory* factory = isolate->factory();
806 v8::HandleScope sc(CcTest::isolate());
807 Handle<String> name = factory->InternalizeUtf8String("theFunction");
808 Handle<JSFunction> function = factory->NewFunction(name);
810 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
811 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
813 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
814 Handle<JSObject> obj = factory->NewJSObject(function);
815 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
816 CHECK_EQ(Smi::FromInt(23),
817 *Object::GetProperty(obj, prop_name).ToHandleChecked());
818 // Check that we can add properties to function objects.
819 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
820 CHECK_EQ(Smi::FromInt(24),
821 *Object::GetProperty(function, prop_name).ToHandleChecked());
825 TEST(ObjectProperties) {
826 CcTest::InitializeVM();
827 Isolate* isolate = CcTest::i_isolate();
828 Factory* factory = isolate->factory();
830 v8::HandleScope sc(CcTest::isolate());
831 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
832 Handle<Object> object = Object::GetProperty(
833 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
834 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
835 Handle<JSObject> obj = factory->NewJSObject(constructor);
836 Handle<String> first = factory->InternalizeUtf8String("first");
837 Handle<String> second = factory->InternalizeUtf8String("second");
839 Handle<Smi> one(Smi::FromInt(1), isolate);
840 Handle<Smi> two(Smi::FromInt(2), isolate);
843 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
846 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
847 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
850 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
851 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
853 // add first and then second
854 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
855 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
856 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
857 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
859 // delete first and then second
860 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
861 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
862 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
863 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
864 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
866 // add first and then second
867 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
868 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
869 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
870 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
872 // delete second and then first
873 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
874 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
875 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
876 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
877 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
879 // check string and internalized string match
880 const char* string1 = "fisk";
881 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
882 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
883 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
884 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
886 // check internalized string and string match
887 const char* string2 = "fugl";
888 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
889 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
890 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
891 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
896 CcTest::InitializeVM();
897 Isolate* isolate = CcTest::i_isolate();
898 Factory* factory = isolate->factory();
900 v8::HandleScope sc(CcTest::isolate());
901 Handle<String> name = factory->InternalizeUtf8String("theFunction");
902 Handle<JSFunction> function = factory->NewFunction(name);
904 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
905 Handle<JSObject> obj = factory->NewJSObject(function);
906 Handle<Map> initial_map(function->initial_map());
909 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
910 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
911 CHECK_EQ(Smi::FromInt(23),
912 *Object::GetProperty(obj, prop_name).ToHandleChecked());
914 // Check the map has changed
915 CHECK(*initial_map != obj->map());
920 CcTest::InitializeVM();
921 Isolate* isolate = CcTest::i_isolate();
922 Factory* factory = isolate->factory();
924 v8::HandleScope sc(CcTest::isolate());
925 Handle<String> name = factory->InternalizeUtf8String("Array");
926 Handle<Object> fun_obj = Object::GetProperty(
927 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
928 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
930 // Allocate the object.
931 Handle<Object> element;
932 Handle<JSObject> object = factory->NewJSObject(function);
933 Handle<JSArray> array = Handle<JSArray>::cast(object);
934 // We just initialized the VM, no heap allocation failure yet.
935 JSArray::Initialize(array, 0);
937 // Set array length to 0.
938 JSArray::SetLength(array, 0);
939 CHECK_EQ(Smi::FromInt(0), array->length());
940 // Must be in fast mode.
941 CHECK(array->HasFastSmiOrObjectElements());
943 // array[length] = name.
944 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
945 CHECK_EQ(Smi::FromInt(1), array->length());
946 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
947 CHECK_EQ(*element, *name);
949 // Set array length with larger than smi value.
950 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
952 uint32_t int_length = 0;
953 CHECK(array->length()->ToArrayIndex(&int_length));
954 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
955 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
957 // array[length] = name.
958 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
959 uint32_t new_int_length = 0;
960 CHECK(array->length()->ToArrayIndex(&new_int_length));
961 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
962 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
963 CHECK_EQ(*element, *name);
964 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
965 CHECK_EQ(*element, *name);
970 CcTest::InitializeVM();
971 Isolate* isolate = CcTest::i_isolate();
972 Factory* factory = isolate->factory();
974 v8::HandleScope sc(CcTest::isolate());
975 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
976 Handle<Object> object = Object::GetProperty(
977 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
978 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
979 Handle<JSObject> obj = factory->NewJSObject(constructor);
980 Handle<String> first = factory->InternalizeUtf8String("first");
981 Handle<String> second = factory->InternalizeUtf8String("second");
983 Handle<Smi> one(Smi::FromInt(1), isolate);
984 Handle<Smi> two(Smi::FromInt(2), isolate);
986 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
987 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
989 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
990 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
993 Handle<Object> value1, value2;
994 Handle<JSObject> clone = factory->CopyJSObject(obj);
995 CHECK(!clone.is_identical_to(obj));
997 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
998 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
999 CHECK_EQ(*value1, *value2);
1000 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1001 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1002 CHECK_EQ(*value1, *value2);
1004 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1005 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1006 CHECK_EQ(*value1, *value2);
1007 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1008 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1009 CHECK_EQ(*value1, *value2);
1012 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1013 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1015 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1016 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1018 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1019 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1020 CHECK_EQ(*value1, *value2);
1021 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1022 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1023 CHECK_EQ(*value1, *value2);
1025 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1026 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1027 CHECK_EQ(*value1, *value2);
1028 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1029 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1030 CHECK_EQ(*value1, *value2);
1034 TEST(StringAllocation) {
1035 CcTest::InitializeVM();
1036 Isolate* isolate = CcTest::i_isolate();
1037 Factory* factory = isolate->factory();
1039 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1040 for (int length = 0; length < 100; length++) {
1041 v8::HandleScope scope(CcTest::isolate());
1042 char* non_one_byte = NewArray<char>(3 * length + 1);
1043 char* one_byte = NewArray<char>(length + 1);
1044 non_one_byte[3 * length] = 0;
1045 one_byte[length] = 0;
1046 for (int i = 0; i < length; i++) {
1048 non_one_byte[3 * i] = chars[0];
1049 non_one_byte[3 * i + 1] = chars[1];
1050 non_one_byte[3 * i + 2] = chars[2];
1052 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1053 Vector<const char>(non_one_byte, 3 * length));
1054 CHECK_EQ(length, non_one_byte_sym->length());
1055 Handle<String> one_byte_sym =
1056 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1057 CHECK_EQ(length, one_byte_sym->length());
1058 Handle<String> non_one_byte_str =
1059 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1061 non_one_byte_str->Hash();
1062 CHECK_EQ(length, non_one_byte_str->length());
1063 Handle<String> one_byte_str =
1064 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1066 one_byte_str->Hash();
1067 CHECK_EQ(length, one_byte_str->length());
1068 DeleteArray(non_one_byte);
1069 DeleteArray(one_byte);
1074 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1075 // Count the number of objects found in the heap.
1076 int found_count = 0;
1077 HeapIterator iterator(heap);
1078 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1079 for (int i = 0; i < size; i++) {
1080 if (*objs[i] == obj) {
1090 CcTest::InitializeVM();
1091 Isolate* isolate = CcTest::i_isolate();
1092 Factory* factory = isolate->factory();
1093 v8::HandleScope scope(CcTest::isolate());
1095 // Array of objects to scan haep for.
1096 const int objs_count = 6;
1097 Handle<Object> objs[objs_count];
1098 int next_objs_index = 0;
1100 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1101 objs[next_objs_index++] = factory->NewJSArray(10);
1102 objs[next_objs_index++] =
1103 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
1105 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1106 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1107 objs[next_objs_index++] =
1108 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1110 // Allocate a large string (for large object space).
1111 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1112 char* str = new char[large_size];
1113 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1114 str[large_size - 1] = '\0';
1115 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1118 // Add a Map object to look for.
1119 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1121 CHECK_EQ(objs_count, next_objs_index);
1122 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1126 static int LenFromSize(int size) {
1127 return (size - FixedArray::kHeaderSize) / kPointerSize;
1131 HEAP_TEST(Regression39128) {
1132 // Test case for crbug.com/39128.
1133 CcTest::InitializeVM();
1134 Isolate* isolate = CcTest::i_isolate();
1135 Heap* heap = CcTest::heap();
1137 // Increase the chance of 'bump-the-pointer' allocation in old space.
1138 heap->CollectAllGarbage();
1140 v8::HandleScope scope(CcTest::isolate());
1142 // The plan: create JSObject which references objects in new space.
1143 // Then clone this object (forcing it to go into old space) and check
1144 // that region dirty marks are updated correctly.
1146 // Step 1: prepare a map for the object. We add 1 inobject property to it.
1147 // Create a map with single inobject property.
1148 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
1149 int n_properties = my_map->GetInObjectProperties();
1150 CHECK_GT(n_properties, 0);
1152 int object_size = my_map->instance_size();
1154 // Step 2: allocate a lot of objects so to almost fill new space: we need
1155 // just enough room to allocate JSObject and thus fill the newspace.
1157 int allocation_amount = Min(FixedArray::kMaxSize,
1158 Page::kMaxRegularHeapObjectSize + kPointerSize);
1159 int allocation_len = LenFromSize(allocation_amount);
1160 NewSpace* new_space = heap->new_space();
1161 Address* top_addr = new_space->allocation_top_address();
1162 Address* limit_addr = new_space->allocation_limit_address();
1163 while ((*limit_addr - *top_addr) > allocation_amount) {
1164 CHECK(!heap->always_allocate());
1165 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1166 CHECK(new_space->Contains(array));
1169 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1170 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1171 int fixed_array_len = LenFromSize(to_fill);
1172 CHECK(fixed_array_len < FixedArray::kMaxLength);
1174 CHECK(!heap->always_allocate());
1175 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1176 CHECK(new_space->Contains(array));
1178 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1179 CHECK(new_space->Contains(object));
1180 JSObject* jsobject = JSObject::cast(object);
1181 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1182 CHECK_EQ(0, jsobject->properties()->length());
1183 // Create a reference to object in new space in jsobject.
1184 FieldIndex index = FieldIndex::ForInObjectOffset(
1185 JSObject::kHeaderSize - kPointerSize);
1186 jsobject->FastPropertyAtPut(index, array);
1188 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1190 // Step 4: clone jsobject, but force always allocate first to create a clone
1191 // in old pointer space.
1192 Address old_space_top = heap->old_space()->top();
1193 AlwaysAllocateScope aa_scope(isolate);
1194 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1195 JSObject* clone = JSObject::cast(clone_obj);
1196 if (clone->address() != old_space_top) {
1197 // Alas, got allocated from free list, we cannot do checks.
1200 CHECK(heap->old_space()->Contains(clone->address()));
1204 UNINITIALIZED_TEST(TestCodeFlushing) {
1205 // If we do not flush code this test is invalid.
1206 if (!FLAG_flush_code) return;
1207 i::FLAG_allow_natives_syntax = true;
1208 i::FLAG_optimize_for_size = false;
1209 v8::Isolate::CreateParams create_params;
1210 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1211 v8::Isolate* isolate = v8::Isolate::New(create_params);
1212 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1214 Factory* factory = i_isolate->factory();
1216 v8::HandleScope scope(isolate);
1217 v8::Context::New(isolate)->Enter();
1218 const char* source =
1225 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1227 // This compile will add the code to the compilation cache.
1229 v8::HandleScope scope(isolate);
1233 // Check function is compiled.
1234 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1235 foo_name).ToHandleChecked();
1236 CHECK(func_value->IsJSFunction());
1237 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1238 CHECK(function->shared()->is_compiled());
1240 // The code will survive at least two GCs.
1241 i_isolate->heap()->CollectAllGarbage();
1242 i_isolate->heap()->CollectAllGarbage();
1243 CHECK(function->shared()->is_compiled());
1245 // Simulate several GCs that use full marking.
1246 const int kAgingThreshold = 6;
1247 for (int i = 0; i < kAgingThreshold; i++) {
1248 i_isolate->heap()->CollectAllGarbage();
1251 // foo should no longer be in the compilation cache
1252 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1253 CHECK(!function->is_compiled() || function->IsOptimized());
1254 // Call foo to get it recompiled.
1255 CompileRun("foo()");
1256 CHECK(function->shared()->is_compiled());
1257 CHECK(function->is_compiled());
1264 TEST(TestCodeFlushingPreAged) {
1265 // If we do not flush code this test is invalid.
1266 if (!FLAG_flush_code) return;
1267 i::FLAG_allow_natives_syntax = true;
1268 i::FLAG_optimize_for_size = true;
1269 CcTest::InitializeVM();
1270 Isolate* isolate = CcTest::i_isolate();
1271 Factory* factory = isolate->factory();
1272 v8::HandleScope scope(CcTest::isolate());
1273 const char* source = "function foo() {"
1279 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1281 // Compile foo, but don't run it.
1282 { v8::HandleScope scope(CcTest::isolate());
1286 // Check function is compiled.
1287 Handle<Object> func_value =
1288 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1289 CHECK(func_value->IsJSFunction());
1290 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1291 CHECK(function->shared()->is_compiled());
1293 // The code has been run so will survive at least one GC.
1294 CcTest::heap()->CollectAllGarbage();
1295 CHECK(function->shared()->is_compiled());
1297 // The code was only run once, so it should be pre-aged and collected on the
1299 CcTest::heap()->CollectAllGarbage();
1300 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1302 // Execute the function again twice, and ensure it is reset to the young age.
1303 { v8::HandleScope scope(CcTest::isolate());
1308 // The code will survive at least two GC now that it is young again.
1309 CcTest::heap()->CollectAllGarbage();
1310 CcTest::heap()->CollectAllGarbage();
1311 CHECK(function->shared()->is_compiled());
1313 // Simulate several GCs that use full marking.
1314 const int kAgingThreshold = 6;
1315 for (int i = 0; i < kAgingThreshold; i++) {
1316 CcTest::heap()->CollectAllGarbage();
1319 // foo should no longer be in the compilation cache
1320 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1321 CHECK(!function->is_compiled() || function->IsOptimized());
1322 // Call foo to get it recompiled.
1323 CompileRun("foo()");
1324 CHECK(function->shared()->is_compiled());
1325 CHECK(function->is_compiled());
1329 TEST(TestCodeFlushingIncremental) {
1330 // If we do not flush code this test is invalid.
1331 if (!FLAG_flush_code) return;
1332 i::FLAG_allow_natives_syntax = true;
1333 i::FLAG_optimize_for_size = false;
1334 CcTest::InitializeVM();
1335 Isolate* isolate = CcTest::i_isolate();
1336 Factory* factory = isolate->factory();
1337 v8::HandleScope scope(CcTest::isolate());
1338 const char* source = "function foo() {"
1344 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1346 // This compile will add the code to the compilation cache.
1347 { v8::HandleScope scope(CcTest::isolate());
1351 // Check function is compiled.
1352 Handle<Object> func_value =
1353 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1354 CHECK(func_value->IsJSFunction());
1355 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1356 CHECK(function->shared()->is_compiled());
1358 // The code will survive at least two GCs.
1359 CcTest::heap()->CollectAllGarbage();
1360 CcTest::heap()->CollectAllGarbage();
1361 CHECK(function->shared()->is_compiled());
1363 // Simulate several GCs that use incremental marking.
1364 const int kAgingThreshold = 6;
1365 for (int i = 0; i < kAgingThreshold; i++) {
1366 SimulateIncrementalMarking(CcTest::heap());
1367 CcTest::heap()->CollectAllGarbage();
1369 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1370 CHECK(!function->is_compiled() || function->IsOptimized());
1372 // This compile will compile the function again.
1373 { v8::HandleScope scope(CcTest::isolate());
1374 CompileRun("foo();");
1377 // Simulate several GCs that use incremental marking but make sure
1378 // the loop breaks once the function is enqueued as a candidate.
1379 for (int i = 0; i < kAgingThreshold; i++) {
1380 SimulateIncrementalMarking(CcTest::heap());
1381 if (!function->next_function_link()->IsUndefined()) break;
1382 CcTest::heap()->CollectAllGarbage();
1385 // Force optimization while incremental marking is active and while
1386 // the function is enqueued as a candidate.
1387 { v8::HandleScope scope(CcTest::isolate());
1388 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1391 // Simulate one final GC to make sure the candidate queue is sane.
1392 CcTest::heap()->CollectAllGarbage();
1393 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1394 CHECK(function->is_compiled() || !function->IsOptimized());
1398 TEST(TestCodeFlushingIncrementalScavenge) {
1399 // If we do not flush code this test is invalid.
1400 if (!FLAG_flush_code) return;
1401 i::FLAG_allow_natives_syntax = true;
1402 i::FLAG_optimize_for_size = false;
1403 CcTest::InitializeVM();
1404 Isolate* isolate = CcTest::i_isolate();
1405 Factory* factory = isolate->factory();
1406 v8::HandleScope scope(CcTest::isolate());
1407 const char* source = "var foo = function() {"
1413 "var bar = function() {"
1417 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1418 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1420 // Perfrom one initial GC to enable code flushing.
1421 CcTest::heap()->CollectAllGarbage();
1423 // This compile will add the code to the compilation cache.
1424 { v8::HandleScope scope(CcTest::isolate());
1428 // Check functions are compiled.
1429 Handle<Object> func_value =
1430 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1431 CHECK(func_value->IsJSFunction());
1432 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1433 CHECK(function->shared()->is_compiled());
1434 Handle<Object> func_value2 =
1435 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1436 CHECK(func_value2->IsJSFunction());
1437 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1438 CHECK(function2->shared()->is_compiled());
1440 // Clear references to functions so that one of them can die.
1441 { v8::HandleScope scope(CcTest::isolate());
1442 CompileRun("foo = 0; bar = 0;");
1445 // Bump the code age so that flushing is triggered while the function
1446 // object is still located in new-space.
1447 const int kAgingThreshold = 6;
1448 for (int i = 0; i < kAgingThreshold; i++) {
1449 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1450 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1453 // Simulate incremental marking so that the functions are enqueued as
1454 // code flushing candidates. Then kill one of the functions. Finally
1455 // perform a scavenge while incremental marking is still running.
1456 SimulateIncrementalMarking(CcTest::heap());
1457 *function2.location() = NULL;
1458 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1460 // Simulate one final GC to make sure the candidate queue is sane.
1461 CcTest::heap()->CollectAllGarbage();
1462 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1463 CHECK(!function->is_compiled() || function->IsOptimized());
1467 TEST(TestCodeFlushingIncrementalAbort) {
1468 // If we do not flush code this test is invalid.
1469 if (!FLAG_flush_code) return;
1470 i::FLAG_allow_natives_syntax = true;
1471 i::FLAG_optimize_for_size = false;
1472 CcTest::InitializeVM();
1473 Isolate* isolate = CcTest::i_isolate();
1474 Factory* factory = isolate->factory();
1475 Heap* heap = isolate->heap();
1476 v8::HandleScope scope(CcTest::isolate());
1477 const char* source = "function foo() {"
1483 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1485 // This compile will add the code to the compilation cache.
1486 { v8::HandleScope scope(CcTest::isolate());
1490 // Check function is compiled.
1491 Handle<Object> func_value =
1492 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1493 CHECK(func_value->IsJSFunction());
1494 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1495 CHECK(function->shared()->is_compiled());
1497 // The code will survive at least two GCs.
1498 heap->CollectAllGarbage();
1499 heap->CollectAllGarbage();
1500 CHECK(function->shared()->is_compiled());
1502 // Bump the code age so that flushing is triggered.
1503 const int kAgingThreshold = 6;
1504 for (int i = 0; i < kAgingThreshold; i++) {
1505 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1508 // Simulate incremental marking so that the function is enqueued as
1509 // code flushing candidate.
1510 SimulateIncrementalMarking(heap);
1512 // Enable the debugger and add a breakpoint while incremental marking
1513 // is running so that incremental marking aborts and code flushing is
1516 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1518 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1519 isolate->debug()->ClearAllBreakPoints();
1522 // Force optimization now that code flushing is disabled.
1523 { v8::HandleScope scope(CcTest::isolate());
1524 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1527 // Simulate one final GC to make sure the candidate queue is sane.
1528 heap->CollectAllGarbage();
1529 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1530 CHECK(function->is_compiled() || !function->IsOptimized());
1534 TEST(CompilationCacheCachingBehavior) {
1535 // If we do not flush code, or have the compilation cache turned off, this
1537 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1540 CcTest::InitializeVM();
1541 Isolate* isolate = CcTest::i_isolate();
1542 Factory* factory = isolate->factory();
1543 Heap* heap = isolate->heap();
1544 CompilationCache* compilation_cache = isolate->compilation_cache();
1545 LanguageMode language_mode =
1546 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1548 v8::HandleScope scope(CcTest::isolate());
1549 const char* raw_source =
1556 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1557 Handle<Context> native_context = isolate->native_context();
1560 v8::HandleScope scope(CcTest::isolate());
1561 CompileRun(raw_source);
1564 // On first compilation, only a hash is inserted in the code cache. We can't
1566 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1567 source, Handle<Object>(), 0, 0,
1568 v8::ScriptOriginOptions(false, true, false), native_context,
1570 CHECK(info.is_null());
1573 v8::HandleScope scope(CcTest::isolate());
1574 CompileRun(raw_source);
1577 // On second compilation, the hash is replaced by a real cache entry mapping
1578 // the source to the shared function info containing the code.
1579 info = compilation_cache->LookupScript(
1580 source, Handle<Object>(), 0, 0,
1581 v8::ScriptOriginOptions(false, true, false), native_context,
1583 CHECK(!info.is_null());
1585 heap->CollectAllGarbage();
1587 // On second compilation, the hash is replaced by a real cache entry mapping
1588 // the source to the shared function info containing the code.
1589 info = compilation_cache->LookupScript(
1590 source, Handle<Object>(), 0, 0,
1591 v8::ScriptOriginOptions(false, true, false), native_context,
1593 CHECK(!info.is_null());
1595 while (!info.ToHandleChecked()->code()->IsOld()) {
1596 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1599 heap->CollectAllGarbage();
1600 // Ensure code aging cleared the entry from the cache.
1601 info = compilation_cache->LookupScript(
1602 source, Handle<Object>(), 0, 0,
1603 v8::ScriptOriginOptions(false, true, false), native_context,
1605 CHECK(info.is_null());
1608 v8::HandleScope scope(CcTest::isolate());
1609 CompileRun(raw_source);
1612 // On first compilation, only a hash is inserted in the code cache. We can't
1614 info = compilation_cache->LookupScript(
1615 source, Handle<Object>(), 0, 0,
1616 v8::ScriptOriginOptions(false, true, false), native_context,
1618 CHECK(info.is_null());
1620 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1621 compilation_cache->MarkCompactPrologue();
1625 v8::HandleScope scope(CcTest::isolate());
1626 CompileRun(raw_source);
1629 // If we aged the cache before caching the script, ensure that we didn't cache
1630 // on next compilation.
1631 info = compilation_cache->LookupScript(
1632 source, Handle<Object>(), 0, 0,
1633 v8::ScriptOriginOptions(false, true, false), native_context,
1635 CHECK(info.is_null());
1639 static void OptimizeEmptyFunction(const char* name) {
1640 HandleScope scope(CcTest::i_isolate());
1641 EmbeddedVector<char, 256> source;
1643 "function %s() { return 0; }"
1645 "%%OptimizeFunctionOnNextCall(%s);"
1647 name, name, name, name, name);
1648 CompileRun(source.start());
1652 // Count the number of native contexts in the weak list of native contexts.
1653 int CountNativeContexts() {
1655 Object* object = CcTest::heap()->native_contexts_list();
1656 while (!object->IsUndefined()) {
1658 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1660 // Subtract one to compensate for the code stub context that is always present
1665 // Count the number of user functions in the weak list of optimized
1666 // functions attached to a native context.
1667 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1669 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1670 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1671 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1673 object = JSFunction::cast(object)->next_function_link();
1679 TEST(TestInternalWeakLists) {
1680 FLAG_always_opt = false;
1681 FLAG_allow_natives_syntax = true;
1682 v8::V8::Initialize();
1684 // Some flags turn Scavenge collections into Mark-sweep collections
1685 // and hence are incompatible with this test case.
1686 if (FLAG_gc_global || FLAG_stress_compaction) return;
1687 FLAG_retain_maps_for_n_gc = 0;
1689 static const int kNumTestContexts = 10;
1691 Isolate* isolate = CcTest::i_isolate();
1692 Heap* heap = isolate->heap();
1693 HandleScope scope(isolate);
1694 v8::Handle<v8::Context> ctx[kNumTestContexts];
1695 if (!isolate->use_crankshaft()) return;
1697 CHECK_EQ(0, CountNativeContexts());
1699 // Create a number of global contests which gets linked together.
1700 for (int i = 0; i < kNumTestContexts; i++) {
1701 ctx[i] = v8::Context::New(CcTest::isolate());
1703 // Collect garbage that might have been created by one of the
1704 // installed extensions.
1705 isolate->compilation_cache()->Clear();
1706 heap->CollectAllGarbage();
1708 CHECK_EQ(i + 1, CountNativeContexts());
1712 // Create a handle scope so no function objects get stuck in the outer
1714 HandleScope scope(isolate);
1715 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1716 OptimizeEmptyFunction("f1");
1717 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1718 OptimizeEmptyFunction("f2");
1719 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1720 OptimizeEmptyFunction("f3");
1721 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1722 OptimizeEmptyFunction("f4");
1723 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1724 OptimizeEmptyFunction("f5");
1725 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1727 // Remove function f1, and
1728 CompileRun("f1=null");
1730 // Scavenge treats these references as strong.
1731 for (int j = 0; j < 10; j++) {
1732 CcTest::heap()->CollectGarbage(NEW_SPACE);
1733 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1736 // Mark compact handles the weak references.
1737 isolate->compilation_cache()->Clear();
1738 heap->CollectAllGarbage();
1739 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1741 // Get rid of f3 and f5 in the same way.
1742 CompileRun("f3=null");
1743 for (int j = 0; j < 10; j++) {
1744 CcTest::heap()->CollectGarbage(NEW_SPACE);
1745 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1747 CcTest::heap()->CollectAllGarbage();
1748 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1749 CompileRun("f5=null");
1750 for (int j = 0; j < 10; j++) {
1751 CcTest::heap()->CollectGarbage(NEW_SPACE);
1752 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1754 CcTest::heap()->CollectAllGarbage();
1755 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1760 // Force compilation cache cleanup.
1761 CcTest::heap()->NotifyContextDisposed(true);
1762 CcTest::heap()->CollectAllGarbage();
1764 // Dispose the native contexts one by one.
1765 for (int i = 0; i < kNumTestContexts; i++) {
1766 // TODO(dcarney): is there a better way to do this?
1767 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1768 *unsafe = CcTest::heap()->undefined_value();
1771 // Scavenge treats these references as strong.
1772 for (int j = 0; j < 10; j++) {
1773 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1774 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1777 // Mark compact handles the weak references.
1778 CcTest::heap()->CollectAllGarbage();
1779 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1782 CHECK_EQ(0, CountNativeContexts());
1786 // Count the number of native contexts in the weak list of native contexts
1787 // causing a GC after the specified number of elements.
1788 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1789 Heap* heap = isolate->heap();
1791 Handle<Object> object(heap->native_contexts_list(), isolate);
1792 while (!object->IsUndefined()) {
1794 if (count == n) heap->CollectAllGarbage();
1796 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1799 // Subtract one to compensate for the code stub context that is always present
1804 // Count the number of user functions in the weak list of optimized
1805 // functions attached to a native context causing a GC after the
1806 // specified number of elements.
1807 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1810 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1811 Isolate* isolate = icontext->GetIsolate();
1812 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1814 while (object->IsJSFunction() &&
1815 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1817 if (count == n) isolate->heap()->CollectAllGarbage();
1818 object = Handle<Object>(
1819 Object::cast(JSFunction::cast(*object)->next_function_link()),
1826 TEST(TestInternalWeakListsTraverseWithGC) {
1827 FLAG_always_opt = false;
1828 FLAG_allow_natives_syntax = true;
1829 v8::V8::Initialize();
1831 static const int kNumTestContexts = 10;
1833 Isolate* isolate = CcTest::i_isolate();
1834 HandleScope scope(isolate);
1835 v8::Handle<v8::Context> ctx[kNumTestContexts];
1836 if (!isolate->use_crankshaft()) return;
1838 CHECK_EQ(0, CountNativeContexts());
1840 // Create an number of contexts and check the length of the weak list both
1841 // with and without GCs while iterating the list.
1842 for (int i = 0; i < kNumTestContexts; i++) {
1843 ctx[i] = v8::Context::New(CcTest::isolate());
1844 CHECK_EQ(i + 1, CountNativeContexts());
1845 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1850 // Compile a number of functions the length of the weak list of optimized
1851 // functions both with and without GCs while iterating the list.
1852 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1853 OptimizeEmptyFunction("f1");
1854 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1855 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1856 OptimizeEmptyFunction("f2");
1857 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1858 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1859 OptimizeEmptyFunction("f3");
1860 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1861 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1862 OptimizeEmptyFunction("f4");
1863 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1864 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1865 OptimizeEmptyFunction("f5");
1866 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1867 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1873 TEST(TestSizeOfRegExpCode) {
1874 if (!FLAG_regexp_optimization) return;
1876 v8::V8::Initialize();
1878 Isolate* isolate = CcTest::i_isolate();
1879 HandleScope scope(isolate);
1881 LocalContext context;
1883 // Adjust source below and this check to match
1884 // RegExpImple::kRegExpTooLargeToOptimize.
1885 DCHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1887 // Compile a regexp that is much larger if we are using regexp optimizations.
1889 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1890 "var half_size_reg_exp;"
1891 "while (reg_exp_source.length < 20 * 1024) {"
1892 " half_size_reg_exp = reg_exp_source;"
1893 " reg_exp_source = reg_exp_source + reg_exp_source;"
1896 "reg_exp_source.match(/f/);");
1898 // Get initial heap size after several full GCs, which will stabilize
1899 // the heap size and return with sweeping finished completely.
1900 CcTest::heap()->CollectAllGarbage();
1901 CcTest::heap()->CollectAllGarbage();
1902 CcTest::heap()->CollectAllGarbage();
1903 CcTest::heap()->CollectAllGarbage();
1904 CcTest::heap()->CollectAllGarbage();
1905 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1906 if (collector->sweeping_in_progress()) {
1907 collector->EnsureSweepingCompleted();
1909 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1911 CompileRun("'foo'.match(reg_exp_source);");
1912 CcTest::heap()->CollectAllGarbage();
1913 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1915 CompileRun("'foo'.match(half_size_reg_exp);");
1916 CcTest::heap()->CollectAllGarbage();
1917 int size_with_optimized_regexp =
1918 static_cast<int>(CcTest::heap()->SizeOfObjects());
1920 int size_of_regexp_code = size_with_regexp - initial_size;
1922 // On some platforms the debug-code flag causes huge amounts of regexp code
1923 // to be emitted, breaking this test.
1924 if (!FLAG_debug_code) {
1925 CHECK_LE(size_of_regexp_code, 1 * MB);
1928 // Small regexp is half the size, but compiles to more than twice the code
1929 // due to the optimization steps.
1930 CHECK_GE(size_with_optimized_regexp,
1931 size_with_regexp + size_of_regexp_code * 2);
1935 HEAP_TEST(TestSizeOfObjects) {
1936 v8::V8::Initialize();
1938 // Get initial heap size after several full GCs, which will stabilize
1939 // the heap size and return with sweeping finished completely.
1940 CcTest::heap()->CollectAllGarbage();
1941 CcTest::heap()->CollectAllGarbage();
1942 CcTest::heap()->CollectAllGarbage();
1943 CcTest::heap()->CollectAllGarbage();
1944 CcTest::heap()->CollectAllGarbage();
1945 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1946 if (collector->sweeping_in_progress()) {
1947 collector->EnsureSweepingCompleted();
1949 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1952 // Allocate objects on several different old-space pages so that
1953 // concurrent sweeper threads will be busy sweeping the old space on
1954 // subsequent GC runs.
1955 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1956 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1957 for (int i = 1; i <= 100; i++) {
1958 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1959 CHECK_EQ(initial_size + i * filler_size,
1960 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1964 // The heap size should go back to initial size after a full GC, even
1965 // though sweeping didn't finish yet.
1966 CcTest::heap()->CollectAllGarbage();
1968 // Normally sweeping would not be complete here, but no guarantees.
1970 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1972 // Waiting for sweeper threads should not change heap size.
1973 if (collector->sweeping_in_progress()) {
1974 collector->EnsureSweepingCompleted();
1976 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1980 TEST(TestAlignmentCalculations) {
1981 // Maximum fill amounts are consistent.
1982 int maximum_double_misalignment = kDoubleSize - kPointerSize;
1983 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
1984 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1985 CHECK_EQ(0, max_word_fill);
1986 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1987 CHECK_EQ(maximum_double_misalignment, max_double_fill);
1988 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1989 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1990 int max_simd128_unaligned_fill =
1991 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
1992 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
1994 Address base = static_cast<Address>(NULL);
1997 // Word alignment never requires fill.
1998 fill = Heap::GetFillToAlign(base, kWordAligned);
2000 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
2003 // No fill is required when address is double aligned.
2004 fill = Heap::GetFillToAlign(base, kDoubleAligned);
2006 // Fill is required if address is not double aligned.
2007 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
2008 CHECK_EQ(maximum_double_misalignment, fill);
2009 // kDoubleUnaligned has the opposite fill amounts.
2010 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
2011 CHECK_EQ(maximum_double_misalignment, fill);
2012 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
2015 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2016 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2017 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2018 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2019 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2020 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2021 CHECK_EQ(kPointerSize, fill);
2022 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2027 static HeapObject* NewSpaceAllocateAligned(int size,
2028 AllocationAlignment alignment) {
2029 Heap* heap = CcTest::heap();
2030 AllocationResult allocation =
2031 heap->new_space()->AllocateRawAligned(size, alignment);
2032 HeapObject* obj = NULL;
2033 allocation.To(&obj);
2034 heap->CreateFillerObjectAt(obj->address(), size);
2039 // Get new space allocation into the desired alignment.
2040 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2041 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2042 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2044 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2050 TEST(TestAlignedAllocation) {
2051 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2052 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2053 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2057 if (double_misalignment) {
2058 // Allocate a pointer sized object that must be double aligned at an
2060 start = AlignNewSpace(kDoubleAligned, 0);
2061 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2062 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2063 // There is no filler.
2064 CHECK_EQ(kPointerSize, *top_addr - start);
2066 // Allocate a second pointer sized object that must be double aligned at an
2067 // unaligned address.
2068 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2069 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2070 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2071 // There is a filler object before the object.
2072 filler = HeapObject::FromAddress(start);
2073 CHECK(obj != filler && filler->IsFiller() &&
2074 filler->Size() == kPointerSize);
2075 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2077 // Similarly for kDoubleUnaligned.
2078 start = AlignNewSpace(kDoubleUnaligned, 0);
2079 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2080 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2081 CHECK_EQ(kPointerSize, *top_addr - start);
2082 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2083 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2084 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2085 // There is a filler object before the object.
2086 filler = HeapObject::FromAddress(start);
2087 CHECK(obj != filler && filler->IsFiller() &&
2088 filler->Size() == kPointerSize);
2089 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2092 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2094 start = AlignNewSpace(kSimd128Unaligned, 0);
2095 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2096 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2097 // There is no filler.
2098 CHECK_EQ(kPointerSize, *top_addr - start);
2099 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2100 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2101 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2102 // There is a filler object before the object.
2103 filler = HeapObject::FromAddress(start);
2104 CHECK(obj != filler && filler->IsFiller() &&
2105 filler->Size() == kSimd128Size - kPointerSize);
2106 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2108 if (double_misalignment) {
2109 // Test the 2 other alignments possible on 32 bit platforms.
2110 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2111 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2112 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2113 // There is a filler object before the object.
2114 filler = HeapObject::FromAddress(start);
2115 CHECK(obj != filler && filler->IsFiller() &&
2116 filler->Size() == 2 * kPointerSize);
2117 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2118 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2119 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2120 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2121 // There is a filler object before the object.
2122 filler = HeapObject::FromAddress(start);
2123 CHECK(obj != filler && filler->IsFiller() &&
2124 filler->Size() == kPointerSize);
2125 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2130 static HeapObject* OldSpaceAllocateAligned(int size,
2131 AllocationAlignment alignment) {
2132 Heap* heap = CcTest::heap();
2133 AllocationResult allocation =
2134 heap->old_space()->AllocateRawAligned(size, alignment);
2135 HeapObject* obj = NULL;
2136 allocation.To(&obj);
2137 heap->CreateFillerObjectAt(obj->address(), size);
2142 // Get old space allocation into the desired alignment.
2143 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2144 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2145 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2146 int allocation = fill + offset;
2148 OldSpaceAllocateAligned(allocation, kWordAligned);
2150 Address top = *top_addr;
2151 // Now force the remaining allocation onto the free list.
2152 CcTest::heap()->old_space()->EmptyAllocationInfo();
2157 // Test the case where allocation must be done from the free list, so filler
2158 // may precede or follow the object.
2159 TEST(TestAlignedOverAllocation) {
2160 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2161 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2164 HeapObject* filler1;
2165 HeapObject* filler2;
2166 if (double_misalignment) {
2167 start = AlignOldSpace(kDoubleAligned, 0);
2168 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2169 // The object is aligned, and a filler object is created after.
2170 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2171 filler1 = HeapObject::FromAddress(start + kPointerSize);
2172 CHECK(obj != filler1 && filler1->IsFiller() &&
2173 filler1->Size() == kPointerSize);
2174 // Try the opposite alignment case.
2175 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2176 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2177 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2178 filler1 = HeapObject::FromAddress(start);
2179 CHECK(obj != filler1);
2180 CHECK(filler1->IsFiller());
2181 CHECK(filler1->Size() == kPointerSize);
2182 CHECK(obj != filler1 && filler1->IsFiller() &&
2183 filler1->Size() == kPointerSize);
2185 // Similarly for kDoubleUnaligned.
2186 start = AlignOldSpace(kDoubleUnaligned, 0);
2187 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2188 // The object is aligned, and a filler object is created after.
2189 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2190 filler1 = HeapObject::FromAddress(start + kPointerSize);
2191 CHECK(obj != filler1 && filler1->IsFiller() &&
2192 filler1->Size() == kPointerSize);
2193 // Try the opposite alignment case.
2194 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2195 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2196 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2197 filler1 = HeapObject::FromAddress(start);
2198 CHECK(obj != filler1 && filler1->IsFiller() &&
2199 filler1->Size() == kPointerSize);
2202 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2204 start = AlignOldSpace(kSimd128Unaligned, 0);
2205 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2206 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2207 // There is a filler object after the object.
2208 filler1 = HeapObject::FromAddress(start + kPointerSize);
2209 CHECK(obj != filler1 && filler1->IsFiller() &&
2210 filler1->Size() == kSimd128Size - kPointerSize);
2211 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2212 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2213 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2214 // There is a filler object before the object.
2215 filler1 = HeapObject::FromAddress(start);
2216 CHECK(obj != filler1 && filler1->IsFiller() &&
2217 filler1->Size() == kSimd128Size - kPointerSize);
2219 if (double_misalignment) {
2220 // Test the 2 other alignments possible on 32 bit platforms.
2221 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2222 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2223 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2224 // There are filler objects before and after the object.
2225 filler1 = HeapObject::FromAddress(start);
2226 CHECK(obj != filler1 && filler1->IsFiller() &&
2227 filler1->Size() == 2 * kPointerSize);
2228 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2229 CHECK(obj != filler2 && filler2->IsFiller() &&
2230 filler2->Size() == kPointerSize);
2231 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2232 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2233 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2234 // There are filler objects before and after the object.
2235 filler1 = HeapObject::FromAddress(start);
2236 CHECK(obj != filler1 && filler1->IsFiller() &&
2237 filler1->Size() == kPointerSize);
2238 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2239 CHECK(obj != filler2 && filler2->IsFiller() &&
2240 filler2->Size() == 2 * kPointerSize);
2245 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2246 CcTest::InitializeVM();
2247 HeapIterator iterator(CcTest::heap());
2248 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2249 intptr_t size_of_objects_2 = 0;
2250 for (HeapObject* obj = iterator.next();
2252 obj = iterator.next()) {
2253 if (!obj->IsFreeSpace()) {
2254 size_of_objects_2 += obj->Size();
2257 // Delta must be within 5% of the larger result.
2258 // TODO(gc): Tighten this up by distinguishing between byte
2259 // arrays that are real and those that merely mark free space
2261 if (size_of_objects_1 > size_of_objects_2) {
2262 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2263 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2264 "Iterator: %" V8_PTR_PREFIX "d, "
2265 "delta: %" V8_PTR_PREFIX "d\n",
2266 size_of_objects_1, size_of_objects_2, delta);
2267 CHECK_GT(size_of_objects_1 / 20, delta);
2269 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2270 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2271 "Iterator: %" V8_PTR_PREFIX "d, "
2272 "delta: %" V8_PTR_PREFIX "d\n",
2273 size_of_objects_1, size_of_objects_2, delta);
2274 CHECK_GT(size_of_objects_2 / 20, delta);
2279 static void FillUpNewSpace(NewSpace* new_space) {
2280 // Fill up new space to the point that it is completely full. Make sure
2281 // that the scavenger does not undo the filling.
2282 Heap* heap = new_space->heap();
2283 Isolate* isolate = heap->isolate();
2284 Factory* factory = isolate->factory();
2285 HandleScope scope(isolate);
2286 AlwaysAllocateScope always_allocate(isolate);
2287 intptr_t available = new_space->Capacity() - new_space->Size();
2288 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2289 for (intptr_t i = 0; i < number_of_fillers; i++) {
2290 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2295 TEST(GrowAndShrinkNewSpace) {
2296 CcTest::InitializeVM();
2297 Heap* heap = CcTest::heap();
2298 NewSpace* new_space = heap->new_space();
2300 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2301 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2302 // The max size cannot exceed the reserved size, since semispaces must be
2303 // always within the reserved space. We can't test new space growing and
2304 // shrinking if the reserved size is the same as the minimum (initial) size.
2308 // Explicitly growing should double the space capacity.
2309 intptr_t old_capacity, new_capacity;
2310 old_capacity = new_space->TotalCapacity();
2312 new_capacity = new_space->TotalCapacity();
2313 CHECK(2 * old_capacity == new_capacity);
2315 old_capacity = new_space->TotalCapacity();
2316 FillUpNewSpace(new_space);
2317 new_capacity = new_space->TotalCapacity();
2318 CHECK(old_capacity == new_capacity);
2320 // Explicitly shrinking should not affect space capacity.
2321 old_capacity = new_space->TotalCapacity();
2322 new_space->Shrink();
2323 new_capacity = new_space->TotalCapacity();
2324 CHECK(old_capacity == new_capacity);
2326 // Let the scavenger empty the new space.
2327 heap->CollectGarbage(NEW_SPACE);
2328 CHECK_LE(new_space->Size(), old_capacity);
2330 // Explicitly shrinking should halve the space capacity.
2331 old_capacity = new_space->TotalCapacity();
2332 new_space->Shrink();
2333 new_capacity = new_space->TotalCapacity();
2334 CHECK(old_capacity == 2 * new_capacity);
2336 // Consecutive shrinking should not affect space capacity.
2337 old_capacity = new_space->TotalCapacity();
2338 new_space->Shrink();
2339 new_space->Shrink();
2340 new_space->Shrink();
2341 new_capacity = new_space->TotalCapacity();
2342 CHECK(old_capacity == new_capacity);
2346 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2347 CcTest::InitializeVM();
2348 Heap* heap = CcTest::heap();
2349 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2350 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2351 // The max size cannot exceed the reserved size, since semispaces must be
2352 // always within the reserved space. We can't test new space growing and
2353 // shrinking if the reserved size is the same as the minimum (initial) size.
2357 v8::HandleScope scope(CcTest::isolate());
2358 NewSpace* new_space = heap->new_space();
2359 intptr_t old_capacity, new_capacity;
2360 old_capacity = new_space->TotalCapacity();
2362 new_capacity = new_space->TotalCapacity();
2363 CHECK(2 * old_capacity == new_capacity);
2364 FillUpNewSpace(new_space);
2365 heap->CollectAllAvailableGarbage();
2366 new_capacity = new_space->TotalCapacity();
2367 CHECK(old_capacity == new_capacity);
2371 static int NumberOfGlobalObjects() {
2373 HeapIterator iterator(CcTest::heap());
2374 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2375 if (obj->IsGlobalObject()) count++;
2377 // Subtract two to compensate for the two global objects (not global
2378 // JSObjects, of which there would only be one) that are part of the code stub
2379 // context, which is always present.
2384 // Test that we don't embed maps from foreign contexts into
2386 TEST(LeakNativeContextViaMap) {
2387 i::FLAG_allow_natives_syntax = true;
2388 v8::Isolate* isolate = CcTest::isolate();
2389 v8::HandleScope outer_scope(isolate);
2390 v8::Persistent<v8::Context> ctx1p;
2391 v8::Persistent<v8::Context> ctx2p;
2393 v8::HandleScope scope(isolate);
2394 ctx1p.Reset(isolate, v8::Context::New(isolate));
2395 ctx2p.Reset(isolate, v8::Context::New(isolate));
2396 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2399 CcTest::heap()->CollectAllAvailableGarbage();
2400 CHECK_EQ(4, NumberOfGlobalObjects());
2403 v8::HandleScope inner_scope(isolate);
2404 CompileRun("var v = {x: 42}");
2405 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2406 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2407 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2409 ctx2->Global()->Set(v8_str("o"), v);
2410 v8::Local<v8::Value> res = CompileRun(
2411 "function f() { return o.x; }"
2412 "for (var i = 0; i < 10; ++i) f();"
2413 "%OptimizeFunctionOnNextCall(f);"
2415 CHECK_EQ(42, res->Int32Value());
2416 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2418 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2420 isolate->ContextDisposedNotification();
2422 CcTest::heap()->CollectAllAvailableGarbage();
2423 CHECK_EQ(2, NumberOfGlobalObjects());
2425 CcTest::heap()->CollectAllAvailableGarbage();
2426 CHECK_EQ(0, NumberOfGlobalObjects());
2430 // Test that we don't embed functions from foreign contexts into
2432 TEST(LeakNativeContextViaFunction) {
2433 i::FLAG_allow_natives_syntax = true;
2434 v8::Isolate* isolate = CcTest::isolate();
2435 v8::HandleScope outer_scope(isolate);
2436 v8::Persistent<v8::Context> ctx1p;
2437 v8::Persistent<v8::Context> ctx2p;
2439 v8::HandleScope scope(isolate);
2440 ctx1p.Reset(isolate, v8::Context::New(isolate));
2441 ctx2p.Reset(isolate, v8::Context::New(isolate));
2442 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2445 CcTest::heap()->CollectAllAvailableGarbage();
2446 CHECK_EQ(4, NumberOfGlobalObjects());
2449 v8::HandleScope inner_scope(isolate);
2450 CompileRun("var v = function() { return 42; }");
2451 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2452 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2453 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2455 ctx2->Global()->Set(v8_str("o"), v);
2456 v8::Local<v8::Value> res = CompileRun(
2457 "function f(x) { return x(); }"
2458 "for (var i = 0; i < 10; ++i) f(o);"
2459 "%OptimizeFunctionOnNextCall(f);"
2461 CHECK_EQ(42, res->Int32Value());
2462 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2466 isolate->ContextDisposedNotification();
2468 CcTest::heap()->CollectAllAvailableGarbage();
2469 CHECK_EQ(2, NumberOfGlobalObjects());
2471 CcTest::heap()->CollectAllAvailableGarbage();
2472 CHECK_EQ(0, NumberOfGlobalObjects());
2476 TEST(LeakNativeContextViaMapKeyed) {
2477 i::FLAG_allow_natives_syntax = true;
2478 v8::Isolate* isolate = CcTest::isolate();
2479 v8::HandleScope outer_scope(isolate);
2480 v8::Persistent<v8::Context> ctx1p;
2481 v8::Persistent<v8::Context> ctx2p;
2483 v8::HandleScope scope(isolate);
2484 ctx1p.Reset(isolate, v8::Context::New(isolate));
2485 ctx2p.Reset(isolate, v8::Context::New(isolate));
2486 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2489 CcTest::heap()->CollectAllAvailableGarbage();
2490 CHECK_EQ(4, NumberOfGlobalObjects());
2493 v8::HandleScope inner_scope(isolate);
2494 CompileRun("var v = [42, 43]");
2495 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2496 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2497 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2499 ctx2->Global()->Set(v8_str("o"), v);
2500 v8::Local<v8::Value> res = CompileRun(
2501 "function f() { return o[0]; }"
2502 "for (var i = 0; i < 10; ++i) f();"
2503 "%OptimizeFunctionOnNextCall(f);"
2505 CHECK_EQ(42, res->Int32Value());
2506 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2510 isolate->ContextDisposedNotification();
2512 CcTest::heap()->CollectAllAvailableGarbage();
2513 CHECK_EQ(2, NumberOfGlobalObjects());
2515 CcTest::heap()->CollectAllAvailableGarbage();
2516 CHECK_EQ(0, NumberOfGlobalObjects());
2520 TEST(LeakNativeContextViaMapProto) {
2521 i::FLAG_allow_natives_syntax = true;
2522 v8::Isolate* isolate = CcTest::isolate();
2523 v8::HandleScope outer_scope(isolate);
2524 v8::Persistent<v8::Context> ctx1p;
2525 v8::Persistent<v8::Context> ctx2p;
2527 v8::HandleScope scope(isolate);
2528 ctx1p.Reset(isolate, v8::Context::New(isolate));
2529 ctx2p.Reset(isolate, v8::Context::New(isolate));
2530 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2533 CcTest::heap()->CollectAllAvailableGarbage();
2534 CHECK_EQ(4, NumberOfGlobalObjects());
2537 v8::HandleScope inner_scope(isolate);
2538 CompileRun("var v = { y: 42}");
2539 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2540 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2541 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2543 ctx2->Global()->Set(v8_str("o"), v);
2544 v8::Local<v8::Value> res = CompileRun(
2550 "for (var i = 0; i < 10; ++i) f();"
2551 "%OptimizeFunctionOnNextCall(f);"
2553 CHECK_EQ(42, res->Int32Value());
2554 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2558 isolate->ContextDisposedNotification();
2560 CcTest::heap()->CollectAllAvailableGarbage();
2561 CHECK_EQ(2, NumberOfGlobalObjects());
2563 CcTest::heap()->CollectAllAvailableGarbage();
2564 CHECK_EQ(0, NumberOfGlobalObjects());
2568 TEST(InstanceOfStubWriteBarrier) {
2569 i::FLAG_allow_natives_syntax = true;
2571 i::FLAG_verify_heap = true;
2574 CcTest::InitializeVM();
2575 if (!CcTest::i_isolate()->use_crankshaft()) return;
2576 if (i::FLAG_force_marking_deque_overflows) return;
2577 v8::HandleScope outer_scope(CcTest::isolate());
2580 v8::HandleScope scope(CcTest::isolate());
2582 "function foo () { }"
2583 "function mkbar () { return new (new Function(\"\")) (); }"
2584 "function f (x) { return (x instanceof foo); }"
2585 "function g () { f(mkbar()); }"
2586 "f(new foo()); f(new foo());"
2587 "%OptimizeFunctionOnNextCall(f);"
2588 "f(new foo()); g();");
2591 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2593 marking->Start(Heap::kNoGCFlags);
2595 Handle<JSFunction> f =
2596 v8::Utils::OpenHandle(
2597 *v8::Handle<v8::Function>::Cast(
2598 CcTest::global()->Get(v8_str("f"))));
2600 CHECK(f->IsOptimized());
2602 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2603 !marking->IsStopped()) {
2604 // Discard any pending GC requests otherwise we will get GC when we enter
2606 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2609 CHECK(marking->IsMarking());
2612 v8::HandleScope scope(CcTest::isolate());
2613 v8::Handle<v8::Object> global = CcTest::global();
2614 v8::Handle<v8::Function> g =
2615 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2616 g->Call(global, 0, NULL);
2619 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2620 CcTest::heap()->CollectGarbage(OLD_SPACE);
2624 static int NumberOfProtoTransitions(Map* map) {
2625 return TransitionArray::NumberOfPrototypeTransitions(
2626 TransitionArray::GetPrototypeTransitions(map));
2630 TEST(PrototypeTransitionClearing) {
2631 if (FLAG_never_compact) return;
2632 CcTest::InitializeVM();
2633 Isolate* isolate = CcTest::i_isolate();
2634 Factory* factory = isolate->factory();
2635 v8::HandleScope scope(CcTest::isolate());
2637 CompileRun("var base = {};");
2638 Handle<JSObject> baseObject =
2639 v8::Utils::OpenHandle(
2640 *v8::Handle<v8::Object>::Cast(
2641 CcTest::global()->Get(v8_str("base"))));
2642 int initialTransitions = NumberOfProtoTransitions(baseObject->map());
2646 "for (var i = 0; i < 10; i++) {"
2648 " var prototype = {};"
2649 " object.__proto__ = prototype;"
2650 " if (i >= 3) live.push(object, prototype);"
2653 // Verify that only dead prototype transitions are cleared.
2654 CHECK_EQ(initialTransitions + 10,
2655 NumberOfProtoTransitions(baseObject->map()));
2656 CcTest::heap()->CollectAllGarbage();
2657 const int transitions = 10 - 3;
2658 CHECK_EQ(initialTransitions + transitions,
2659 NumberOfProtoTransitions(baseObject->map()));
2661 // Verify that prototype transitions array was compacted.
2663 TransitionArray::GetPrototypeTransitions(baseObject->map());
2664 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2665 int j = TransitionArray::kProtoTransitionHeaderSize + i;
2666 CHECK(trans->get(j)->IsWeakCell());
2667 CHECK(WeakCell::cast(trans->get(j))->value()->IsMap());
2670 // Make sure next prototype is placed on an old-space evacuation candidate.
2671 Handle<JSObject> prototype;
2672 PagedSpace* space = CcTest::heap()->old_space();
2674 AlwaysAllocateScope always_allocate(isolate);
2675 SimulateFullSpace(space);
2676 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS,
2677 Strength::WEAK, TENURED);
2680 // Add a prototype on an evacuation candidate and verify that transition
2681 // clearing correctly records slots in prototype transition array.
2682 i::FLAG_always_compact = true;
2683 Handle<Map> map(baseObject->map());
2684 CHECK(!space->LastPage()->Contains(
2685 TransitionArray::GetPrototypeTransitions(*map)->address()));
2686 CHECK(space->LastPage()->Contains(prototype->address()));
2690 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2691 i::FLAG_stress_compaction = false;
2692 i::FLAG_allow_natives_syntax = true;
2694 i::FLAG_verify_heap = true;
2697 CcTest::InitializeVM();
2698 if (!CcTest::i_isolate()->use_crankshaft()) return;
2699 v8::HandleScope outer_scope(CcTest::isolate());
2702 v8::HandleScope scope(CcTest::isolate());
2706 " for (var i = 0; i < 100; i++) s += i;"
2710 "%OptimizeFunctionOnNextCall(f);"
2713 Handle<JSFunction> f =
2714 v8::Utils::OpenHandle(
2715 *v8::Handle<v8::Function>::Cast(
2716 CcTest::global()->Get(v8_str("f"))));
2717 CHECK(f->IsOptimized());
2719 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2721 marking->Start(Heap::kNoGCFlags);
2722 // The following calls will increment CcTest::heap()->global_ic_age().
2723 CcTest::isolate()->ContextDisposedNotification();
2724 SimulateIncrementalMarking(CcTest::heap());
2725 CcTest::heap()->CollectAllGarbage();
2726 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2727 CHECK_EQ(0, f->shared()->opt_count());
2728 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2732 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2733 i::FLAG_stress_compaction = false;
2734 i::FLAG_allow_natives_syntax = true;
2736 i::FLAG_verify_heap = true;
2739 CcTest::InitializeVM();
2740 if (!CcTest::i_isolate()->use_crankshaft()) return;
2741 v8::HandleScope outer_scope(CcTest::isolate());
2744 v8::HandleScope scope(CcTest::isolate());
2748 " for (var i = 0; i < 100; i++) s += i;"
2752 "%OptimizeFunctionOnNextCall(f);"
2755 Handle<JSFunction> f =
2756 v8::Utils::OpenHandle(
2757 *v8::Handle<v8::Function>::Cast(
2758 CcTest::global()->Get(v8_str("f"))));
2759 CHECK(f->IsOptimized());
2761 CcTest::heap()->incremental_marking()->Stop();
2763 // The following two calls will increment CcTest::heap()->global_ic_age().
2764 CcTest::isolate()->ContextDisposedNotification();
2765 CcTest::heap()->CollectAllGarbage();
2767 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2768 CHECK_EQ(0, f->shared()->opt_count());
2769 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2773 HEAP_TEST(GCFlags) {
2774 CcTest::InitializeVM();
2775 Heap* heap = CcTest::heap();
2777 heap->set_current_gc_flags(Heap::kNoGCFlags);
2778 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
2780 // Set the flags to check whether we appropriately resets them after the GC.
2781 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2782 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2783 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
2785 MarkCompactCollector* collector = heap->mark_compact_collector();
2786 if (collector->sweeping_in_progress()) {
2787 collector->EnsureSweepingCompleted();
2790 IncrementalMarking* marking = heap->incremental_marking();
2792 marking->Start(Heap::kReduceMemoryFootprintMask);
2793 CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
2795 heap->CollectGarbage(NEW_SPACE);
2796 // NewSpace scavenges should not overwrite the flags.
2797 CHECK_NE(0, heap->current_gc_flags() & Heap::kReduceMemoryFootprintMask);
2799 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2800 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags());
2804 TEST(IdleNotificationFinishMarking) {
2805 i::FLAG_allow_natives_syntax = true;
2806 CcTest::InitializeVM();
2807 SimulateFullSpace(CcTest::heap()->old_space());
2808 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2810 marking->Start(Heap::kNoGCFlags);
2812 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2814 // TODO(hpayer): We cannot write proper unit test right now for heap.
2815 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2816 // marking delay counter.
2818 // Perform a huge incremental marking step but don't complete marking.
2819 intptr_t bytes_processed = 0;
2822 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2823 IncrementalMarking::FORCE_MARKING,
2824 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2825 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2826 } while (bytes_processed);
2828 // The next invocations of incremental marking are not going to complete
2830 // since the completion threshold is not reached
2831 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2833 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2834 IncrementalMarking::FORCE_MARKING,
2835 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2836 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2839 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2841 // The next idle notification has to finish incremental marking.
2842 const double kLongIdleTime = 1000.0;
2843 CcTest::isolate()->IdleNotificationDeadline(
2844 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2845 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2847 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2851 // Test that HAllocateObject will always return an object in new-space.
2852 TEST(OptimizedAllocationAlwaysInNewSpace) {
2853 i::FLAG_allow_natives_syntax = true;
2854 CcTest::InitializeVM();
2855 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2856 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2857 v8::HandleScope scope(CcTest::isolate());
2859 SimulateFullSpace(CcTest::heap()->new_space());
2860 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2861 v8::Local<v8::Value> res = CompileRun(
2864 " for (var i = 0; i < 32; i++) {"
2865 " this['x' + i] = x;"
2868 "function f(x) { return new c(x); };"
2870 "%OptimizeFunctionOnNextCall(f);"
2873 4, res.As<v8::Object>()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2875 Handle<JSObject> o =
2876 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2878 CHECK(CcTest::heap()->InNewSpace(*o));
2882 TEST(OptimizedPretenuringAllocationFolding) {
2883 i::FLAG_allow_natives_syntax = true;
2884 i::FLAG_expose_gc = true;
2885 CcTest::InitializeVM();
2886 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2887 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2888 v8::HandleScope scope(CcTest::isolate());
2890 // Grow new space unitl maximum capacity reached.
2891 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2892 CcTest::heap()->new_space()->Grow();
2895 i::ScopedVector<char> source(1024);
2898 "var number_elements = %d;"
2899 "var elements = new Array();"
2901 " for (var i = 0; i < number_elements; i++) {"
2902 " elements[i] = [[{}], [1.1]];"
2904 " return elements[number_elements-1]"
2908 "%%OptimizeFunctionOnNextCall(f);"
2910 AllocationSite::kPretenureMinimumCreated);
2912 v8::Local<v8::Value> res = CompileRun(source.start());
2914 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2915 Handle<JSObject> int_array_handle =
2916 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2917 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2918 Handle<JSObject> double_array_handle =
2919 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2921 Handle<JSObject> o =
2922 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2923 CHECK(CcTest::heap()->InOldSpace(*o));
2924 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2925 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2926 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2927 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2931 TEST(OptimizedPretenuringObjectArrayLiterals) {
2932 i::FLAG_allow_natives_syntax = true;
2933 i::FLAG_expose_gc = true;
2934 CcTest::InitializeVM();
2935 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2936 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2937 v8::HandleScope scope(CcTest::isolate());
2939 // Grow new space unitl maximum capacity reached.
2940 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2941 CcTest::heap()->new_space()->Grow();
2944 i::ScopedVector<char> source(1024);
2947 "var number_elements = %d;"
2948 "var elements = new Array(number_elements);"
2950 " for (var i = 0; i < number_elements; i++) {"
2951 " elements[i] = [{}, {}, {}];"
2953 " return elements[number_elements - 1];"
2957 "%%OptimizeFunctionOnNextCall(f);"
2959 AllocationSite::kPretenureMinimumCreated);
2961 v8::Local<v8::Value> res = CompileRun(source.start());
2963 Handle<JSObject> o =
2964 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2966 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2967 CHECK(CcTest::heap()->InOldSpace(*o));
2971 TEST(OptimizedPretenuringMixedInObjectProperties) {
2972 i::FLAG_allow_natives_syntax = true;
2973 i::FLAG_expose_gc = true;
2974 CcTest::InitializeVM();
2975 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2976 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2977 v8::HandleScope scope(CcTest::isolate());
2979 // Grow new space unitl maximum capacity reached.
2980 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2981 CcTest::heap()->new_space()->Grow();
2985 i::ScopedVector<char> source(1024);
2988 "var number_elements = %d;"
2989 "var elements = new Array(number_elements);"
2991 " for (var i = 0; i < number_elements; i++) {"
2992 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2994 " return elements[number_elements - 1];"
2998 "%%OptimizeFunctionOnNextCall(f);"
3000 AllocationSite::kPretenureMinimumCreated);
3002 v8::Local<v8::Value> res = CompileRun(source.start());
3004 Handle<JSObject> o =
3005 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3007 CHECK(CcTest::heap()->InOldSpace(*o));
3008 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
3009 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
3010 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
3011 if (!o->IsUnboxedDoubleField(idx2)) {
3012 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
3014 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
3017 JSObject* inner_object =
3018 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
3019 CHECK(CcTest::heap()->InOldSpace(inner_object));
3020 if (!inner_object->IsUnboxedDoubleField(idx1)) {
3021 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
3023 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
3025 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
3029 TEST(OptimizedPretenuringDoubleArrayProperties) {
3030 i::FLAG_allow_natives_syntax = true;
3031 i::FLAG_expose_gc = true;
3032 CcTest::InitializeVM();
3033 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3034 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3035 v8::HandleScope scope(CcTest::isolate());
3037 // Grow new space unitl maximum capacity reached.
3038 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3039 CcTest::heap()->new_space()->Grow();
3042 i::ScopedVector<char> source(1024);
3045 "var number_elements = %d;"
3046 "var elements = new Array(number_elements);"
3048 " for (var i = 0; i < number_elements; i++) {"
3049 " elements[i] = {a: 1.1, b: 2.2};"
3051 " return elements[i - 1];"
3055 "%%OptimizeFunctionOnNextCall(f);"
3057 AllocationSite::kPretenureMinimumCreated);
3059 v8::Local<v8::Value> res = CompileRun(source.start());
3061 Handle<JSObject> o =
3062 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3064 CHECK(CcTest::heap()->InOldSpace(*o));
3065 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3069 TEST(OptimizedPretenuringdoubleArrayLiterals) {
3070 i::FLAG_allow_natives_syntax = true;
3071 i::FLAG_expose_gc = true;
3072 CcTest::InitializeVM();
3073 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3074 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3075 v8::HandleScope scope(CcTest::isolate());
3077 // Grow new space unitl maximum capacity reached.
3078 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3079 CcTest::heap()->new_space()->Grow();
3082 i::ScopedVector<char> source(1024);
3085 "var number_elements = %d;"
3086 "var elements = new Array(number_elements);"
3088 " for (var i = 0; i < number_elements; i++) {"
3089 " elements[i] = [1.1, 2.2, 3.3];"
3091 " return elements[number_elements - 1];"
3095 "%%OptimizeFunctionOnNextCall(f);"
3097 AllocationSite::kPretenureMinimumCreated);
3099 v8::Local<v8::Value> res = CompileRun(source.start());
3101 Handle<JSObject> o =
3102 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3104 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3105 CHECK(CcTest::heap()->InOldSpace(*o));
3109 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3110 i::FLAG_allow_natives_syntax = true;
3111 i::FLAG_expose_gc = true;
3112 CcTest::InitializeVM();
3113 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3114 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3115 v8::HandleScope scope(CcTest::isolate());
3117 // Grow new space unitl maximum capacity reached.
3118 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3119 CcTest::heap()->new_space()->Grow();
3122 i::ScopedVector<char> source(1024);
3125 "var number_elements = 100;"
3126 "var elements = new Array(number_elements);"
3128 " for (var i = 0; i < number_elements; i++) {"
3129 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3131 " return elements[number_elements - 1];"
3135 "%%OptimizeFunctionOnNextCall(f);"
3138 v8::Local<v8::Value> res = CompileRun(source.start());
3140 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
3141 Handle<JSObject> int_array_handle =
3142 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
3143 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
3144 Handle<JSObject> double_array_handle =
3145 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
3147 Handle<JSObject> o =
3148 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3149 CHECK(CcTest::heap()->InOldSpace(*o));
3150 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3151 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3152 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3153 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3157 TEST(OptimizedPretenuringNestedObjectLiterals) {
3158 i::FLAG_allow_natives_syntax = true;
3159 i::FLAG_expose_gc = true;
3160 CcTest::InitializeVM();
3161 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3162 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3163 v8::HandleScope scope(CcTest::isolate());
3165 // Grow new space unitl maximum capacity reached.
3166 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3167 CcTest::heap()->new_space()->Grow();
3170 i::ScopedVector<char> source(1024);
3173 "var number_elements = %d;"
3174 "var elements = new Array(number_elements);"
3176 " for (var i = 0; i < number_elements; i++) {"
3177 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3179 " return elements[number_elements - 1];"
3183 "%%OptimizeFunctionOnNextCall(f);"
3185 AllocationSite::kPretenureMinimumCreated);
3187 v8::Local<v8::Value> res = CompileRun(source.start());
3189 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
3190 Handle<JSObject> int_array_handle_1 =
3191 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
3192 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
3193 Handle<JSObject> int_array_handle_2 =
3194 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
3196 Handle<JSObject> o =
3197 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3198 CHECK(CcTest::heap()->InOldSpace(*o));
3199 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3200 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3201 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3202 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3206 TEST(OptimizedPretenuringNestedDoubleLiterals) {
3207 i::FLAG_allow_natives_syntax = true;
3208 i::FLAG_expose_gc = true;
3209 CcTest::InitializeVM();
3210 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3211 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3212 v8::HandleScope scope(CcTest::isolate());
3214 // Grow new space unitl maximum capacity reached.
3215 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3216 CcTest::heap()->new_space()->Grow();
3219 i::ScopedVector<char> source(1024);
3222 "var number_elements = %d;"
3223 "var elements = new Array(number_elements);"
3225 " for (var i = 0; i < number_elements; i++) {"
3226 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3228 " return elements[number_elements - 1];"
3232 "%%OptimizeFunctionOnNextCall(f);"
3234 AllocationSite::kPretenureMinimumCreated);
3236 v8::Local<v8::Value> res = CompileRun(source.start());
3238 v8::Local<v8::Value> double_array_1 =
3239 v8::Object::Cast(*res)->Get(v8_str("0"));
3240 Handle<JSObject> double_array_handle_1 =
3241 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
3242 v8::Local<v8::Value> double_array_2 =
3243 v8::Object::Cast(*res)->Get(v8_str("1"));
3244 Handle<JSObject> double_array_handle_2 =
3245 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
3247 Handle<JSObject> o =
3248 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3249 CHECK(CcTest::heap()->InOldSpace(*o));
3250 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3251 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3252 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3253 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3257 // Make sure pretenuring feedback is gathered for constructed objects as well
3259 TEST(OptimizedPretenuringConstructorCalls) {
3260 if (!i::FLAG_pretenuring_call_new) {
3261 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3264 i::FLAG_allow_natives_syntax = true;
3265 i::FLAG_expose_gc = true;
3266 CcTest::InitializeVM();
3267 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3268 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3269 v8::HandleScope scope(CcTest::isolate());
3271 // Grow new space unitl maximum capacity reached.
3272 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3273 CcTest::heap()->new_space()->Grow();
3276 i::ScopedVector<char> source(1024);
3277 // Call new is doing slack tracking for the first
3278 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3279 // mementos during that time.
3282 "var number_elements = %d;"
3283 "var elements = new Array(number_elements);"
3289 " for (var i = 0; i < number_elements; i++) {"
3290 " elements[i] = new foo();"
3292 " return elements[number_elements - 1];"
3296 "%%OptimizeFunctionOnNextCall(f);"
3298 AllocationSite::kPretenureMinimumCreated +
3299 JSFunction::kGenerousAllocationCount);
3301 v8::Local<v8::Value> res = CompileRun(source.start());
3303 Handle<JSObject> o =
3304 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3306 CHECK(CcTest::heap()->InOldSpace(*o));
3310 TEST(OptimizedPretenuringCallNew) {
3311 if (!i::FLAG_pretenuring_call_new) {
3312 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3315 i::FLAG_allow_natives_syntax = true;
3316 i::FLAG_expose_gc = true;
3317 CcTest::InitializeVM();
3318 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3319 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3320 v8::HandleScope scope(CcTest::isolate());
3322 // Grow new space unitl maximum capacity reached.
3323 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3324 CcTest::heap()->new_space()->Grow();
3327 i::ScopedVector<char> source(1024);
3328 // Call new is doing slack tracking for the first
3329 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3330 // mementos during that time.
3333 "var number_elements = %d;"
3334 "var elements = new Array(number_elements);"
3335 "function g() { this.a = 0; }"
3337 " for (var i = 0; i < number_elements; i++) {"
3338 " elements[i] = new g();"
3340 " return elements[number_elements - 1];"
3344 "%%OptimizeFunctionOnNextCall(f);"
3346 AllocationSite::kPretenureMinimumCreated +
3347 JSFunction::kGenerousAllocationCount);
3349 v8::Local<v8::Value> res = CompileRun(source.start());
3351 Handle<JSObject> o =
3352 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3353 CHECK(CcTest::heap()->InOldSpace(*o));
3357 // Test regular array literals allocation.
3358 TEST(OptimizedAllocationArrayLiterals) {
3359 i::FLAG_allow_natives_syntax = true;
3360 CcTest::InitializeVM();
3361 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3362 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3363 v8::HandleScope scope(CcTest::isolate());
3365 v8::Local<v8::Value> res = CompileRun(
3367 " var numbers = new Array(1, 2, 3);"
3368 " numbers[0] = 3.14;"
3372 "%OptimizeFunctionOnNextCall(f);"
3374 CHECK_EQ(static_cast<int>(3.14),
3375 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
3377 Handle<JSObject> o =
3378 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3380 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3384 static int CountMapTransitions(Map* map) {
3385 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3389 // Test that map transitions are cleared and maps are collected with
3390 // incremental marking as well.
3392 i::FLAG_stress_compaction = false;
3393 i::FLAG_allow_natives_syntax = true;
3394 i::FLAG_trace_incremental_marking = true;
3395 i::FLAG_retain_maps_for_n_gc = 0;
3396 CcTest::InitializeVM();
3397 v8::HandleScope scope(CcTest::isolate());
3398 static const int transitions_count = 256;
3400 CompileRun("function F() {}");
3402 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3403 for (int i = 0; i < transitions_count; i++) {
3404 EmbeddedVector<char, 64> buffer;
3405 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3406 CompileRun(buffer.start());
3408 CompileRun("var root = new F;");
3411 Handle<JSObject> root =
3412 v8::Utils::OpenHandle(
3413 *v8::Handle<v8::Object>::Cast(
3414 CcTest::global()->Get(v8_str("root"))));
3416 // Count number of live transitions before marking.
3417 int transitions_before = CountMapTransitions(root->map());
3418 CompileRun("%DebugPrint(root);");
3419 CHECK_EQ(transitions_count, transitions_before);
3421 SimulateIncrementalMarking(CcTest::heap());
3422 CcTest::heap()->CollectAllGarbage();
3424 // Count number of live transitions after marking. Note that one transition
3425 // is left, because 'o' still holds an instance of one transition target.
3426 int transitions_after = CountMapTransitions(root->map());
3427 CompileRun("%DebugPrint(root);");
3428 CHECK_EQ(1, transitions_after);
3433 static void AddTransitions(int transitions_count) {
3434 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3435 for (int i = 0; i < transitions_count; i++) {
3436 EmbeddedVector<char, 64> buffer;
3437 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3438 CompileRun(buffer.start());
3443 static Handle<JSObject> GetByName(const char* name) {
3444 return v8::Utils::OpenHandle(
3445 *v8::Handle<v8::Object>::Cast(
3446 CcTest::global()->Get(v8_str(name))));
3450 static void AddPropertyTo(
3451 int gc_count, Handle<JSObject> object, const char* property_name) {
3452 Isolate* isolate = CcTest::i_isolate();
3453 Factory* factory = isolate->factory();
3454 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3455 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3456 i::FLAG_gc_interval = gc_count;
3457 i::FLAG_gc_global = true;
3458 i::FLAG_retain_maps_for_n_gc = 0;
3459 CcTest::heap()->set_allocation_timeout(gc_count);
3460 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3464 TEST(TransitionArrayShrinksDuringAllocToZero) {
3465 i::FLAG_stress_compaction = false;
3466 i::FLAG_allow_natives_syntax = true;
3467 CcTest::InitializeVM();
3468 v8::HandleScope scope(CcTest::isolate());
3469 static const int transitions_count = 10;
3470 CompileRun("function F() { }");
3471 AddTransitions(transitions_count);
3472 CompileRun("var root = new F;");
3473 Handle<JSObject> root = GetByName("root");
3475 // Count number of live transitions before marking.
3476 int transitions_before = CountMapTransitions(root->map());
3477 CHECK_EQ(transitions_count, transitions_before);
3480 CompileRun("o = new F;"
3482 root = GetByName("root");
3483 AddPropertyTo(2, root, "funny");
3484 CcTest::heap()->CollectGarbage(NEW_SPACE);
3486 // Count number of live transitions after marking. Note that one transition
3487 // is left, because 'o' still holds an instance of one transition target.
3488 int transitions_after = CountMapTransitions(
3489 Map::cast(root->map()->GetBackPointer()));
3490 CHECK_EQ(1, transitions_after);
3494 TEST(TransitionArrayShrinksDuringAllocToOne) {
3495 i::FLAG_stress_compaction = false;
3496 i::FLAG_allow_natives_syntax = true;
3497 CcTest::InitializeVM();
3498 v8::HandleScope scope(CcTest::isolate());
3499 static const int transitions_count = 10;
3500 CompileRun("function F() {}");
3501 AddTransitions(transitions_count);
3502 CompileRun("var root = new F;");
3503 Handle<JSObject> root = GetByName("root");
3505 // Count number of live transitions before marking.
3506 int transitions_before = CountMapTransitions(root->map());
3507 CHECK_EQ(transitions_count, transitions_before);
3509 root = GetByName("root");
3510 AddPropertyTo(2, root, "funny");
3511 CcTest::heap()->CollectGarbage(NEW_SPACE);
3513 // Count number of live transitions after marking. Note that one transition
3514 // is left, because 'o' still holds an instance of one transition target.
3515 int transitions_after = CountMapTransitions(
3516 Map::cast(root->map()->GetBackPointer()));
3517 CHECK_EQ(2, transitions_after);
3521 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3522 i::FLAG_stress_compaction = false;
3523 i::FLAG_allow_natives_syntax = true;
3524 CcTest::InitializeVM();
3525 v8::HandleScope scope(CcTest::isolate());
3526 static const int transitions_count = 10;
3527 CompileRun("function F() {}");
3528 AddTransitions(transitions_count);
3529 CompileRun("var root = new F;");
3530 Handle<JSObject> root = GetByName("root");
3532 // Count number of live transitions before marking.
3533 int transitions_before = CountMapTransitions(root->map());
3534 CHECK_EQ(transitions_count, transitions_before);
3536 root = GetByName("root");
3537 AddPropertyTo(0, root, "prop9");
3538 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3540 // Count number of live transitions after marking. Note that one transition
3541 // is left, because 'o' still holds an instance of one transition target.
3542 int transitions_after = CountMapTransitions(
3543 Map::cast(root->map()->GetBackPointer()));
3544 CHECK_EQ(1, transitions_after);
3548 TEST(TransitionArraySimpleToFull) {
3549 i::FLAG_stress_compaction = false;
3550 i::FLAG_allow_natives_syntax = true;
3551 CcTest::InitializeVM();
3552 v8::HandleScope scope(CcTest::isolate());
3553 static const int transitions_count = 1;
3554 CompileRun("function F() {}");
3555 AddTransitions(transitions_count);
3556 CompileRun("var root = new F;");
3557 Handle<JSObject> root = GetByName("root");
3559 // Count number of live transitions before marking.
3560 int transitions_before = CountMapTransitions(root->map());
3561 CHECK_EQ(transitions_count, transitions_before);
3563 CompileRun("o = new F;"
3565 root = GetByName("root");
3566 DCHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3567 AddPropertyTo(2, root, "happy");
3569 // Count number of live transitions after marking. Note that one transition
3570 // is left, because 'o' still holds an instance of one transition target.
3571 int transitions_after = CountMapTransitions(
3572 Map::cast(root->map()->GetBackPointer()));
3573 CHECK_EQ(1, transitions_after);
3578 TEST(Regress2143a) {
3579 i::FLAG_incremental_marking = true;
3580 CcTest::InitializeVM();
3581 v8::HandleScope scope(CcTest::isolate());
3583 // Prepare a map transition from the root object together with a yet
3584 // untransitioned root object.
3585 CompileRun("var root = new Object;"
3587 "root = new Object;");
3589 SimulateIncrementalMarking(CcTest::heap());
3591 // Compile a StoreIC that performs the prepared map transition. This
3592 // will restart incremental marking and should make sure the root is
3593 // marked grey again.
3594 CompileRun("function f(o) {"
3600 // This bug only triggers with aggressive IC clearing.
3601 CcTest::heap()->AgeInlineCaches();
3603 // Explicitly request GC to perform final marking step and sweeping.
3604 CcTest::heap()->CollectAllGarbage();
3606 Handle<JSObject> root =
3607 v8::Utils::OpenHandle(
3608 *v8::Handle<v8::Object>::Cast(
3609 CcTest::global()->Get(v8_str("root"))));
3611 // The root object should be in a sane state.
3612 CHECK(root->IsJSObject());
3613 CHECK(root->map()->IsMap());
3617 TEST(Regress2143b) {
3618 i::FLAG_incremental_marking = true;
3619 i::FLAG_allow_natives_syntax = true;
3620 CcTest::InitializeVM();
3621 v8::HandleScope scope(CcTest::isolate());
3623 // Prepare a map transition from the root object together with a yet
3624 // untransitioned root object.
3625 CompileRun("var root = new Object;"
3627 "root = new Object;");
3629 SimulateIncrementalMarking(CcTest::heap());
3631 // Compile an optimized LStoreNamedField that performs the prepared
3632 // map transition. This will restart incremental marking and should
3633 // make sure the root is marked grey again.
3634 CompileRun("function f(o) {"
3639 "%OptimizeFunctionOnNextCall(f);"
3641 "%DeoptimizeFunction(f);");
3643 // This bug only triggers with aggressive IC clearing.
3644 CcTest::heap()->AgeInlineCaches();
3646 // Explicitly request GC to perform final marking step and sweeping.
3647 CcTest::heap()->CollectAllGarbage();
3649 Handle<JSObject> root =
3650 v8::Utils::OpenHandle(
3651 *v8::Handle<v8::Object>::Cast(
3652 CcTest::global()->Get(v8_str("root"))));
3654 // The root object should be in a sane state.
3655 CHECK(root->IsJSObject());
3656 CHECK(root->map()->IsMap());
3660 TEST(ReleaseOverReservedPages) {
3661 if (FLAG_never_compact) return;
3662 i::FLAG_trace_gc = true;
3663 // The optimizer can allocate stuff, messing up the test.
3664 i::FLAG_crankshaft = false;
3665 i::FLAG_always_opt = false;
3666 CcTest::InitializeVM();
3667 Isolate* isolate = CcTest::i_isolate();
3668 Factory* factory = isolate->factory();
3669 Heap* heap = isolate->heap();
3670 v8::HandleScope scope(CcTest::isolate());
3671 static const int number_of_test_pages = 20;
3673 // Prepare many pages with low live-bytes count.
3674 PagedSpace* old_space = heap->old_space();
3675 CHECK_EQ(1, old_space->CountTotalPages());
3676 for (int i = 0; i < number_of_test_pages; i++) {
3677 AlwaysAllocateScope always_allocate(isolate);
3678 SimulateFullSpace(old_space);
3679 factory->NewFixedArray(1, TENURED);
3681 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3683 // Triggering one GC will cause a lot of garbage to be discovered but
3684 // even spread across all allocated pages.
3685 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3686 "triggered for preparation");
3687 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3689 // Triggering subsequent GCs should cause at least half of the pages
3690 // to be released to the OS after at most two cycles.
3691 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3692 "triggered by test 1");
3693 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3694 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3695 "triggered by test 2");
3696 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3698 // Triggering a last-resort GC should cause all pages to be released to the
3699 // OS so that other processes can seize the memory. If we get a failure here
3700 // where there are 2 pages left instead of 1, then we should increase the
3701 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3702 // first page should be small in order to reduce memory used when the VM
3703 // boots, but if the 20 small arrays don't fit on the first page then that's
3704 // an indication that it is too small.
3705 heap->CollectAllAvailableGarbage("triggered really hard");
3706 CHECK_EQ(1, old_space->CountTotalPages());
3709 static int forced_gc_counter = 0;
3711 void MockUseCounterCallback(v8::Isolate* isolate,
3712 v8::Isolate::UseCounterFeature feature) {
3713 isolate->GetCallingContext();
3714 if (feature == v8::Isolate::kForcedGC) {
3715 forced_gc_counter++;
3720 TEST(CountForcedGC) {
3721 i::FLAG_expose_gc = true;
3722 CcTest::InitializeVM();
3723 Isolate* isolate = CcTest::i_isolate();
3724 v8::HandleScope scope(CcTest::isolate());
3726 isolate->SetUseCounterCallback(MockUseCounterCallback);
3728 forced_gc_counter = 0;
3729 const char* source = "gc();";
3731 CHECK_GT(forced_gc_counter, 0);
3736 i::FLAG_stress_compaction = false;
3737 CcTest::InitializeVM();
3738 Isolate* isolate = CcTest::i_isolate();
3739 Factory* factory = isolate->factory();
3740 v8::HandleScope scope(CcTest::isolate());
3741 Handle<String> slice(CcTest::heap()->empty_string());
3744 // Generate a parent that lives in new-space.
3745 v8::HandleScope inner_scope(CcTest::isolate());
3746 const char* c = "This text is long enough to trigger sliced strings.";
3747 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3748 CHECK(s->IsSeqOneByteString());
3749 CHECK(CcTest::heap()->InNewSpace(*s));
3751 // Generate a sliced string that is based on the above parent and
3752 // lives in old-space.
3753 SimulateFullSpace(CcTest::heap()->new_space());
3754 AlwaysAllocateScope always_allocate(isolate);
3755 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3756 CHECK(t->IsSlicedString());
3757 CHECK(!CcTest::heap()->InNewSpace(*t));
3758 *slice.location() = *t.location();
3761 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3762 CcTest::heap()->CollectAllGarbage();
3763 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3768 TEST(PrintSharedFunctionInfo) {
3769 CcTest::InitializeVM();
3770 v8::HandleScope scope(CcTest::isolate());
3771 const char* source = "f = function() { return 987654321; }\n"
3772 "g = function() { return 123456789; }\n";
3774 Handle<JSFunction> g =
3775 v8::Utils::OpenHandle(
3776 *v8::Handle<v8::Function>::Cast(
3777 CcTest::global()->Get(v8_str("g"))));
3779 OFStream os(stdout);
3780 g->shared()->Print(os);
3783 #endif // OBJECT_PRINT
3786 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3787 if (i::FLAG_always_opt) return;
3788 CcTest::InitializeVM();
3789 v8::HandleScope scope(CcTest::isolate());
3790 v8::Local<v8::Value> fun1, fun2;
3794 CompileRun("function fun() {};");
3795 fun1 = env->Global()->Get(v8_str("fun"));
3800 CompileRun("function fun() {};");
3801 fun2 = env->Global()->Get(v8_str("fun"));
3804 // Prepare function f that contains type feedback for closures
3805 // originating from two different native contexts.
3806 CcTest::global()->Set(v8_str("fun1"), fun1);
3807 CcTest::global()->Set(v8_str("fun2"), fun2);
3808 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3810 Handle<JSFunction> f =
3811 v8::Utils::OpenHandle(
3812 *v8::Handle<v8::Function>::Cast(
3813 CcTest::global()->Get(v8_str("f"))));
3815 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3817 int expected_slots = 2;
3818 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3821 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell());
3822 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
3824 SimulateIncrementalMarking(CcTest::heap());
3825 CcTest::heap()->CollectAllGarbage();
3827 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
3829 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2)))
3834 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3835 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3836 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3837 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3838 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3839 RelocInfo* info = it.rinfo();
3840 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3841 if (target->is_inline_cache_stub() && target->kind() == kind) {
3849 static void CheckVectorIC(Handle<JSFunction> f, int ic_slot_index,
3850 InlineCacheState desired_state) {
3851 Handle<TypeFeedbackVector> vector =
3852 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3853 FeedbackVectorICSlot slot(ic_slot_index);
3854 LoadICNexus nexus(vector, slot);
3855 CHECK(nexus.StateFromFeedback() == desired_state);
3859 static void CheckVectorICCleared(Handle<JSFunction> f, int ic_slot_index) {
3860 Handle<TypeFeedbackVector> vector =
3861 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3862 FeedbackVectorICSlot slot(ic_slot_index);
3863 LoadICNexus nexus(vector, slot);
3864 CHECK(IC::IsCleared(&nexus));
3868 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3869 if (i::FLAG_always_opt) return;
3870 CcTest::InitializeVM();
3871 v8::HandleScope scope(CcTest::isolate());
3873 // Prepare function f that contains a monomorphic IC for object
3874 // originating from the same native context.
3876 "function fun() { this.x = 1; };"
3877 "function f(o) { return new o(); } f(fun); f(fun);");
3878 Handle<JSFunction> f = v8::Utils::OpenHandle(
3879 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3882 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3883 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3885 SimulateIncrementalMarking(CcTest::heap());
3886 CcTest::heap()->CollectAllGarbage();
3888 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3892 TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3893 if (i::FLAG_always_opt) return;
3894 CcTest::InitializeVM();
3895 Isolate* isolate = CcTest::i_isolate();
3896 v8::HandleScope scope(CcTest::isolate());
3897 v8::Local<v8::Value> fun1;
3901 CompileRun("function fun() { this.x = 1; };");
3902 fun1 = env->Global()->Get(v8_str("fun"));
3905 // Prepare function f that contains a monomorphic constructor for object
3906 // originating from a different native context.
3907 CcTest::global()->Set(v8_str("fun1"), fun1);
3909 "function fun() { this.x = 1; };"
3910 "function f(o) { return new o(); } f(fun1); f(fun1);");
3911 Handle<JSFunction> f = v8::Utils::OpenHandle(
3912 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3915 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3916 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3918 // Fire context dispose notification.
3919 CcTest::isolate()->ContextDisposedNotification();
3920 SimulateIncrementalMarking(CcTest::heap());
3921 CcTest::heap()->CollectAllGarbage();
3923 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3924 vector->Get(FeedbackVectorSlot(0)));
3928 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3929 if (i::FLAG_always_opt) return;
3930 CcTest::InitializeVM();
3931 v8::HandleScope scope(CcTest::isolate());
3933 // Prepare function f that contains a monomorphic IC for object
3934 // originating from the same native context.
3935 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3936 "function f(o) { return o.x; } f(obj); f(obj);");
3937 Handle<JSFunction> f =
3938 v8::Utils::OpenHandle(
3939 *v8::Handle<v8::Function>::Cast(
3940 CcTest::global()->Get(v8_str("f"))));
3942 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3943 CheckVectorIC(f, 0, MONOMORPHIC);
3944 CHECK(ic_before->ic_state() == DEFAULT);
3946 SimulateIncrementalMarking(CcTest::heap());
3947 CcTest::heap()->CollectAllGarbage();
3949 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3950 CheckVectorIC(f, 0, MONOMORPHIC);
3951 CHECK(ic_after->ic_state() == DEFAULT);
3955 TEST(IncrementalMarkingClearsMonomorphicIC) {
3956 if (i::FLAG_always_opt) return;
3957 CcTest::InitializeVM();
3958 v8::HandleScope scope(CcTest::isolate());
3959 v8::Local<v8::Value> obj1;
3963 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3964 obj1 = env->Global()->Get(v8_str("obj"));
3967 // Prepare function f that contains a monomorphic IC for object
3968 // originating from a different native context.
3969 CcTest::global()->Set(v8_str("obj1"), obj1);
3970 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3971 Handle<JSFunction> f = v8::Utils::OpenHandle(
3972 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3974 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3975 CheckVectorIC(f, 0, MONOMORPHIC);
3976 CHECK(ic_before->ic_state() == DEFAULT);
3978 // Fire context dispose notification.
3979 CcTest::isolate()->ContextDisposedNotification();
3980 SimulateIncrementalMarking(CcTest::heap());
3981 CcTest::heap()->CollectAllGarbage();
3983 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3984 CheckVectorICCleared(f, 0);
3985 CHECK(ic_after->ic_state() == DEFAULT);
3989 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3990 if (i::FLAG_always_opt) return;
3991 CcTest::InitializeVM();
3992 v8::HandleScope scope(CcTest::isolate());
3993 v8::Local<v8::Value> obj1, obj2;
3997 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3998 obj1 = env->Global()->Get(v8_str("obj"));
4003 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
4004 obj2 = env->Global()->Get(v8_str("obj"));
4007 // Prepare function f that contains a polymorphic IC for objects
4008 // originating from two different native contexts.
4009 CcTest::global()->Set(v8_str("obj1"), obj1);
4010 CcTest::global()->Set(v8_str("obj2"), obj2);
4011 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
4012 Handle<JSFunction> f = v8::Utils::OpenHandle(
4013 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4015 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4016 CheckVectorIC(f, 0, POLYMORPHIC);
4017 CHECK(ic_before->ic_state() == DEFAULT);
4019 // Fire context dispose notification.
4020 SimulateIncrementalMarking(CcTest::heap());
4021 CcTest::heap()->CollectAllGarbage();
4023 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4024 CheckVectorIC(f, 0, POLYMORPHIC);
4025 CHECK(ic_after->ic_state() == DEFAULT);
4029 TEST(IncrementalMarkingClearsPolymorphicIC) {
4030 if (i::FLAG_always_opt) return;
4031 CcTest::InitializeVM();
4032 v8::HandleScope scope(CcTest::isolate());
4033 v8::Local<v8::Value> obj1, obj2;
4037 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
4038 obj1 = env->Global()->Get(v8_str("obj"));
4043 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
4044 obj2 = env->Global()->Get(v8_str("obj"));
4047 // Prepare function f that contains a polymorphic IC for objects
4048 // originating from two different native contexts.
4049 CcTest::global()->Set(v8_str("obj1"), obj1);
4050 CcTest::global()->Set(v8_str("obj2"), obj2);
4051 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
4052 Handle<JSFunction> f = v8::Utils::OpenHandle(
4053 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
4055 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
4056 CheckVectorIC(f, 0, POLYMORPHIC);
4057 CHECK(ic_before->ic_state() == DEFAULT);
4059 // Fire context dispose notification.
4060 CcTest::isolate()->ContextDisposedNotification();
4061 SimulateIncrementalMarking(CcTest::heap());
4062 CcTest::heap()->CollectAllGarbage();
4064 CheckVectorICCleared(f, 0);
4065 CHECK(ic_before->ic_state() == DEFAULT);
4069 class SourceResource : public v8::String::ExternalOneByteStringResource {
4071 explicit SourceResource(const char* data)
4072 : data_(data), length_(strlen(data)) { }
4074 virtual void Dispose() {
4075 i::DeleteArray(data_);
4079 const char* data() const { return data_; }
4081 size_t length() const { return length_; }
4083 bool IsDisposed() { return data_ == NULL; }
4091 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
4092 const char* accessor) {
4093 // Test that the data retained by the Error.stack accessor is released
4094 // after the first time the accessor is fired. We use external string
4095 // to check whether the data is being released since the external string
4096 // resource's callback is fired when the external string is GC'ed.
4097 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4098 v8::HandleScope scope(isolate);
4099 SourceResource* resource = new SourceResource(i::StrDup(source));
4101 v8::HandleScope scope(isolate);
4102 v8::Handle<v8::String> source_string =
4103 v8::String::NewExternal(isolate, resource);
4104 i_isolate->heap()->CollectAllAvailableGarbage();
4105 v8::Script::Compile(source_string)->Run();
4106 CHECK(!resource->IsDisposed());
4108 // i_isolate->heap()->CollectAllAvailableGarbage();
4109 CHECK(!resource->IsDisposed());
4111 CompileRun(accessor);
4112 i_isolate->heap()->CollectAllAvailableGarbage();
4114 // External source has been released.
4115 CHECK(resource->IsDisposed());
4120 UNINITIALIZED_TEST(ReleaseStackTraceData) {
4121 if (i::FLAG_always_opt) {
4122 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
4123 // See: https://codereview.chromium.org/181833004/
4126 FLAG_use_ic = false; // ICs retain objects.
4127 FLAG_concurrent_recompilation = false;
4128 v8::Isolate::CreateParams create_params;
4129 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4130 v8::Isolate* isolate = v8::Isolate::New(create_params);
4132 v8::Isolate::Scope isolate_scope(isolate);
4133 v8::HandleScope handle_scope(isolate);
4134 v8::Context::New(isolate)->Enter();
4135 static const char* source1 = "var error = null; "
4136 /* Normal Error */ "try { "
4137 " throw new Error(); "
4141 static const char* source2 = "var error = null; "
4142 /* Stack overflow */ "try { "
4143 " (function f() { f(); })(); "
4147 static const char* source3 = "var error = null; "
4148 /* Normal Error */ "try { "
4149 /* as prototype */ " throw new Error(); "
4152 " error.__proto__ = e; "
4154 static const char* source4 = "var error = null; "
4155 /* Stack overflow */ "try { "
4156 /* as prototype */ " (function f() { f(); })(); "
4159 " error.__proto__ = e; "
4161 static const char* getter = "error.stack";
4162 static const char* setter = "error.stack = 0";
4164 ReleaseStackTraceDataTest(isolate, source1, setter);
4165 ReleaseStackTraceDataTest(isolate, source2, setter);
4166 // We do not test source3 and source4 with setter, since the setter is
4167 // supposed to (untypically) write to the receiver, not the holder. This is
4168 // to emulate the behavior of a data property.
4170 ReleaseStackTraceDataTest(isolate, source1, getter);
4171 ReleaseStackTraceDataTest(isolate, source2, getter);
4172 ReleaseStackTraceDataTest(isolate, source3, getter);
4173 ReleaseStackTraceDataTest(isolate, source4, getter);
4179 TEST(Regress159140) {
4180 i::FLAG_allow_natives_syntax = true;
4181 CcTest::InitializeVM();
4182 Isolate* isolate = CcTest::i_isolate();
4183 Heap* heap = isolate->heap();
4184 HandleScope scope(isolate);
4186 // Perform one initial GC to enable code flushing.
4187 heap->CollectAllGarbage();
4189 // Prepare several closures that are all eligible for code flushing
4190 // because all reachable ones are not optimized. Make sure that the
4191 // optimized code object is directly reachable through a handle so
4192 // that it is marked black during incremental marking.
4195 HandleScope inner_scope(isolate);
4196 CompileRun("function h(x) {}"
4197 "function mkClosure() {"
4198 " return function(x) { return x + 1; };"
4200 "var f = mkClosure();"
4201 "var g = mkClosure();"
4205 "%OptimizeFunctionOnNextCall(f); f(3);"
4206 "%OptimizeFunctionOnNextCall(h); h(3);");
4208 Handle<JSFunction> f =
4209 v8::Utils::OpenHandle(
4210 *v8::Handle<v8::Function>::Cast(
4211 CcTest::global()->Get(v8_str("f"))));
4212 CHECK(f->is_compiled());
4213 CompileRun("f = null;");
4215 Handle<JSFunction> g =
4216 v8::Utils::OpenHandle(
4217 *v8::Handle<v8::Function>::Cast(
4218 CcTest::global()->Get(v8_str("g"))));
4219 CHECK(g->is_compiled());
4220 const int kAgingThreshold = 6;
4221 for (int i = 0; i < kAgingThreshold; i++) {
4222 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4225 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4228 // Simulate incremental marking so that the functions are enqueued as
4229 // code flushing candidates. Then optimize one function. Finally
4230 // finish the GC to complete code flushing.
4231 SimulateIncrementalMarking(heap);
4232 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4233 heap->CollectAllGarbage();
4235 // Unoptimized code is missing and the deoptimizer will go ballistic.
4236 CompileRun("g('bozo');");
4240 TEST(Regress165495) {
4241 i::FLAG_allow_natives_syntax = true;
4242 CcTest::InitializeVM();
4243 Isolate* isolate = CcTest::i_isolate();
4244 Heap* heap = isolate->heap();
4245 HandleScope scope(isolate);
4247 // Perform one initial GC to enable code flushing.
4248 heap->CollectAllGarbage();
4250 // Prepare an optimized closure that the optimized code map will get
4251 // populated. Then age the unoptimized code to trigger code flushing
4252 // but make sure the optimized code is unreachable.
4254 HandleScope inner_scope(isolate);
4255 CompileRun("function mkClosure() {"
4256 " return function(x) { return x + 1; };"
4258 "var f = mkClosure();"
4260 "%OptimizeFunctionOnNextCall(f); f(3);");
4262 Handle<JSFunction> f =
4263 v8::Utils::OpenHandle(
4264 *v8::Handle<v8::Function>::Cast(
4265 CcTest::global()->Get(v8_str("f"))));
4266 CHECK(f->is_compiled());
4267 const int kAgingThreshold = 6;
4268 for (int i = 0; i < kAgingThreshold; i++) {
4269 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4272 CompileRun("f = null;");
4275 // Simulate incremental marking so that unoptimized code is flushed
4276 // even though it still is cached in the optimized code map.
4277 SimulateIncrementalMarking(heap);
4278 heap->CollectAllGarbage();
4280 // Make a new closure that will get code installed from the code map.
4281 // Unoptimized code is missing and the deoptimizer will go ballistic.
4282 CompileRun("var g = mkClosure(); g('bozo');");
4286 TEST(Regress169209) {
4287 i::FLAG_stress_compaction = false;
4288 i::FLAG_allow_natives_syntax = true;
4290 CcTest::InitializeVM();
4291 Isolate* isolate = CcTest::i_isolate();
4292 Heap* heap = isolate->heap();
4293 HandleScope scope(isolate);
4295 // Perform one initial GC to enable code flushing.
4296 heap->CollectAllGarbage();
4298 // Prepare a shared function info eligible for code flushing for which
4299 // the unoptimized code will be replaced during optimization.
4300 Handle<SharedFunctionInfo> shared1;
4302 HandleScope inner_scope(isolate);
4303 CompileRun("function f() { return 'foobar'; }"
4304 "function g(x) { if (x) f(); }"
4309 Handle<JSFunction> f =
4310 v8::Utils::OpenHandle(
4311 *v8::Handle<v8::Function>::Cast(
4312 CcTest::global()->Get(v8_str("f"))));
4313 CHECK(f->is_compiled());
4314 const int kAgingThreshold = 6;
4315 for (int i = 0; i < kAgingThreshold; i++) {
4316 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4319 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4322 // Prepare a shared function info eligible for code flushing that will
4323 // represent the dangling tail of the candidate list.
4324 Handle<SharedFunctionInfo> shared2;
4326 HandleScope inner_scope(isolate);
4327 CompileRun("function flushMe() { return 0; }"
4330 Handle<JSFunction> f =
4331 v8::Utils::OpenHandle(
4332 *v8::Handle<v8::Function>::Cast(
4333 CcTest::global()->Get(v8_str("flushMe"))));
4334 CHECK(f->is_compiled());
4335 const int kAgingThreshold = 6;
4336 for (int i = 0; i < kAgingThreshold; i++) {
4337 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4340 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4343 // Simulate incremental marking and collect code flushing candidates.
4344 SimulateIncrementalMarking(heap);
4345 CHECK(shared1->code()->gc_metadata() != NULL);
4347 // Optimize function and make sure the unoptimized code is replaced.
4351 CompileRun("%OptimizeFunctionOnNextCall(g);"
4354 // Finish garbage collection cycle.
4355 heap->CollectAllGarbage();
4356 CHECK(shared1->code()->gc_metadata() == NULL);
4360 TEST(Regress169928) {
4361 i::FLAG_allow_natives_syntax = true;
4362 i::FLAG_crankshaft = false;
4363 CcTest::InitializeVM();
4364 Isolate* isolate = CcTest::i_isolate();
4365 Factory* factory = isolate->factory();
4366 v8::HandleScope scope(CcTest::isolate());
4368 // Some flags turn Scavenge collections into Mark-sweep collections
4369 // and hence are incompatible with this test case.
4370 if (FLAG_gc_global || FLAG_stress_compaction) return;
4372 // Prepare the environment
4373 CompileRun("function fastliteralcase(literal, value) {"
4374 " literal[0] = value;"
4377 "function get_standard_literal() {"
4378 " var literal = [1, 2, 3];"
4381 "obj = fastliteralcase(get_standard_literal(), 1);"
4382 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4383 "obj = fastliteralcase(get_standard_literal(), 2);");
4386 v8::Local<v8::String> mote_code_string =
4387 v8_str("fastliteralcase(mote, 2.5);");
4389 v8::Local<v8::String> array_name = v8_str("mote");
4390 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
4392 // First make sure we flip spaces
4393 CcTest::heap()->CollectGarbage(NEW_SPACE);
4395 // Allocate the object.
4396 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4397 array_data->set(0, Smi::FromInt(1));
4398 array_data->set(1, Smi::FromInt(2));
4400 AllocateAllButNBytes(CcTest::heap()->new_space(),
4401 JSArray::kSize + AllocationMemento::kSize +
4404 Handle<JSArray> array =
4405 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4407 CHECK_EQ(Smi::FromInt(2), array->length());
4408 CHECK(array->HasFastSmiOrObjectElements());
4410 // We need filler the size of AllocationMemento object, plus an extra
4411 // fill pointer value.
4412 HeapObject* obj = NULL;
4413 AllocationResult allocation =
4414 CcTest::heap()->new_space()->AllocateRawUnaligned(
4415 AllocationMemento::kSize + kPointerSize);
4416 CHECK(allocation.To(&obj));
4417 Address addr_obj = obj->address();
4418 CcTest::heap()->CreateFillerObjectAt(
4419 addr_obj, AllocationMemento::kSize + kPointerSize);
4421 // Give the array a name, making sure not to allocate strings.
4422 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
4423 CcTest::global()->Set(array_name, array_obj);
4425 // This should crash with a protection violation if we are running a build
4427 AlwaysAllocateScope aa_scope(isolate);
4428 v8::Script::Compile(mote_code_string)->Run();
4432 TEST(Regress168801) {
4433 if (i::FLAG_never_compact) return;
4434 i::FLAG_always_compact = true;
4435 i::FLAG_cache_optimized_code = false;
4436 i::FLAG_allow_natives_syntax = true;
4437 CcTest::InitializeVM();
4438 Isolate* isolate = CcTest::i_isolate();
4439 Heap* heap = isolate->heap();
4440 HandleScope scope(isolate);
4442 // Perform one initial GC to enable code flushing.
4443 heap->CollectAllGarbage();
4445 // Ensure the code ends up on an evacuation candidate.
4446 SimulateFullSpace(heap->code_space());
4448 // Prepare an unoptimized function that is eligible for code flushing.
4449 Handle<JSFunction> function;
4451 HandleScope inner_scope(isolate);
4452 CompileRun("function mkClosure() {"
4453 " return function(x) { return x + 1; };"
4455 "var f = mkClosure();"
4458 Handle<JSFunction> f =
4459 v8::Utils::OpenHandle(
4460 *v8::Handle<v8::Function>::Cast(
4461 CcTest::global()->Get(v8_str("f"))));
4462 CHECK(f->is_compiled());
4463 const int kAgingThreshold = 6;
4464 for (int i = 0; i < kAgingThreshold; i++) {
4465 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4468 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4471 // Simulate incremental marking so that unoptimized function is enqueued as a
4472 // candidate for code flushing. The shared function info however will not be
4473 // explicitly enqueued.
4474 SimulateIncrementalMarking(heap);
4476 // Now optimize the function so that it is taken off the candidate list.
4478 HandleScope inner_scope(isolate);
4479 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
4482 // This cycle will bust the heap and subsequent cycles will go ballistic.
4483 heap->CollectAllGarbage();
4484 heap->CollectAllGarbage();
4488 TEST(Regress173458) {
4489 if (i::FLAG_never_compact) return;
4490 i::FLAG_always_compact = true;
4491 i::FLAG_cache_optimized_code = false;
4492 i::FLAG_allow_natives_syntax = true;
4493 CcTest::InitializeVM();
4494 Isolate* isolate = CcTest::i_isolate();
4495 Heap* heap = isolate->heap();
4496 HandleScope scope(isolate);
4498 // Perform one initial GC to enable code flushing.
4499 heap->CollectAllGarbage();
4501 // Ensure the code ends up on an evacuation candidate.
4502 SimulateFullSpace(heap->code_space());
4504 // Prepare an unoptimized function that is eligible for code flushing.
4505 Handle<JSFunction> function;
4507 HandleScope inner_scope(isolate);
4508 CompileRun("function mkClosure() {"
4509 " return function(x) { return x + 1; };"
4511 "var f = mkClosure();"
4514 Handle<JSFunction> f =
4515 v8::Utils::OpenHandle(
4516 *v8::Handle<v8::Function>::Cast(
4517 CcTest::global()->Get(v8_str("f"))));
4518 CHECK(f->is_compiled());
4519 const int kAgingThreshold = 6;
4520 for (int i = 0; i < kAgingThreshold; i++) {
4521 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4524 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4527 // Simulate incremental marking so that unoptimized function is enqueued as a
4528 // candidate for code flushing. The shared function info however will not be
4529 // explicitly enqueued.
4530 SimulateIncrementalMarking(heap);
4532 // Now enable the debugger which in turn will disable code flushing.
4533 CHECK(isolate->debug()->Load());
4535 // This cycle will bust the heap and subsequent cycles will go ballistic.
4536 heap->CollectAllGarbage();
4537 heap->CollectAllGarbage();
4542 TEST(Regress513507) {
4543 i::FLAG_flush_optimized_code_cache = false;
4544 i::FLAG_allow_natives_syntax = true;
4545 i::FLAG_gc_global = true;
4546 CcTest::InitializeVM();
4547 Isolate* isolate = CcTest::i_isolate();
4548 Heap* heap = isolate->heap();
4549 HandleScope scope(isolate);
4551 // Prepare function whose optimized code map we can use.
4552 Handle<SharedFunctionInfo> shared;
4554 HandleScope inner_scope(isolate);
4555 CompileRun("function f() { return 1 }"
4556 "f(); %OptimizeFunctionOnNextCall(f); f();");
4558 Handle<JSFunction> f =
4559 v8::Utils::OpenHandle(
4560 *v8::Handle<v8::Function>::Cast(
4561 CcTest::global()->Get(v8_str("f"))));
4562 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4563 CompileRun("f = null");
4566 // Prepare optimized code that we can use.
4569 HandleScope inner_scope(isolate);
4570 CompileRun("function g() { return 2 }"
4571 "g(); %OptimizeFunctionOnNextCall(g); g();");
4573 Handle<JSFunction> g =
4574 v8::Utils::OpenHandle(
4575 *v8::Handle<v8::Function>::Cast(
4576 CcTest::global()->Get(v8_str("g"))));
4577 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4578 if (!code->is_optimized_code()) return;
4581 Handle<FixedArray> lit = isolate->factory()->empty_fixed_array();
4582 Handle<Context> context(isolate->context());
4584 // Add the new code several times to the optimized code map and also set an
4585 // allocation timeout so that expanding the code map will trigger a GC.
4586 heap->set_allocation_timeout(5);
4587 FLAG_gc_interval = 1000;
4588 for (int i = 0; i < 10; ++i) {
4589 BailoutId id = BailoutId(i);
4590 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4596 TEST(Regress514122) {
4597 i::FLAG_flush_optimized_code_cache = false;
4598 i::FLAG_allow_natives_syntax = true;
4599 CcTest::InitializeVM();
4600 Isolate* isolate = CcTest::i_isolate();
4601 Heap* heap = isolate->heap();
4602 HandleScope scope(isolate);
4604 // Perfrom one initial GC to enable code flushing.
4605 CcTest::heap()->CollectAllGarbage();
4607 // Prepare function whose optimized code map we can use.
4608 Handle<SharedFunctionInfo> shared;
4610 HandleScope inner_scope(isolate);
4611 CompileRun("function f() { return 1 }"
4612 "f(); %OptimizeFunctionOnNextCall(f); f();");
4614 Handle<JSFunction> f =
4615 v8::Utils::OpenHandle(
4616 *v8::Handle<v8::Function>::Cast(
4617 CcTest::global()->Get(v8_str("f"))));
4618 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4619 CompileRun("f = null");
4622 // Prepare optimized code that we can use.
4625 HandleScope inner_scope(isolate);
4626 CompileRun("function g() { return 2 }"
4627 "g(); %OptimizeFunctionOnNextCall(g); g();");
4629 Handle<JSFunction> g =
4630 v8::Utils::OpenHandle(
4631 *v8::Handle<v8::Function>::Cast(
4632 CcTest::global()->Get(v8_str("g"))));
4633 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4634 if (!code->is_optimized_code()) return;
4637 Handle<FixedArray> lit = isolate->factory()->empty_fixed_array();
4638 Handle<Context> context(isolate->context());
4640 // Add the code several times to the optimized code map.
4641 for (int i = 0; i < 3; ++i) {
4642 HandleScope inner_scope(isolate);
4643 BailoutId id = BailoutId(i);
4644 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4646 shared->optimized_code_map()->Print();
4648 // Add the code with a literals array to be evacuated.
4651 HandleScope inner_scope(isolate);
4652 AlwaysAllocateScope always_allocate(isolate);
4653 // Make sure literal is placed on an old-space evacuation candidate.
4654 SimulateFullSpace(heap->old_space());
4655 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(23, TENURED);
4656 evac_page = Page::FromAddress(lit->address());
4657 BailoutId id = BailoutId(100);
4658 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4661 // Heap is ready, force {lit_page} to become an evacuation candidate and
4662 // simulate incremental marking to enqueue optimized code map.
4663 FLAG_manual_evacuation_candidates_selection = true;
4664 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4665 SimulateIncrementalMarking(heap);
4667 // No matter whether reachable or not, {boomer} is doomed.
4668 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4670 // Add the code several times to the optimized code map. This will leave old
4671 // copies of the optimized code map unreachable but still marked.
4672 for (int i = 3; i < 6; ++i) {
4673 HandleScope inner_scope(isolate);
4674 BailoutId id = BailoutId(i);
4675 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4678 // Trigger a GC to flush out the bug.
4679 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4684 TEST(LargeObjectSlotRecording) {
4685 FLAG_manual_evacuation_candidates_selection = true;
4686 CcTest::InitializeVM();
4687 Isolate* isolate = CcTest::i_isolate();
4688 Heap* heap = isolate->heap();
4689 HandleScope scope(isolate);
4691 // Create an object on an evacuation candidate.
4692 SimulateFullSpace(heap->old_space());
4693 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4694 Page* evac_page = Page::FromAddress(lit->address());
4695 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4696 FixedArray* old_location = *lit;
4698 // Allocate a large object.
4699 const int kSize = 1000000;
4700 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(kSize, TENURED);
4701 CHECK(heap->lo_space()->Contains(*lo));
4703 // Start incremental marking to active write barrier.
4704 SimulateIncrementalMarking(heap, false);
4705 heap->AdvanceIncrementalMarking(10000000, 10000000,
4706 IncrementalMarking::IdleStepActions());
4708 // Create references from the large object to the object on the evacuation
4710 const int kStep = kSize / 10;
4711 for (int i = 0; i < kSize; i += kStep) {
4713 CHECK(lo->get(i) == old_location);
4716 // Move the evaucation candidate object.
4717 CcTest::heap()->CollectAllGarbage();
4719 // Verify that the pointers in the large object got updated.
4720 for (int i = 0; i < kSize; i += kStep) {
4721 CHECK_EQ(lo->get(i), *lit);
4722 CHECK(lo->get(i) != old_location);
4727 class DummyVisitor : public ObjectVisitor {
4729 void VisitPointers(Object** start, Object** end) { }
4733 TEST(DeferredHandles) {
4734 CcTest::InitializeVM();
4735 Isolate* isolate = CcTest::i_isolate();
4736 Heap* heap = isolate->heap();
4737 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4738 HandleScopeData* data = isolate->handle_scope_data();
4739 Handle<Object> init(heap->empty_string(), isolate);
4740 while (data->next < data->limit) {
4741 Handle<Object> obj(heap->empty_string(), isolate);
4743 // An entire block of handles has been filled.
4744 // Next handle would require a new block.
4745 DCHECK(data->next == data->limit);
4747 DeferredHandleScope deferred(isolate);
4748 DummyVisitor visitor;
4749 isolate->handle_scope_implementer()->Iterate(&visitor);
4750 delete deferred.Detach();
4754 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4755 CcTest::InitializeVM();
4756 v8::HandleScope scope(CcTest::isolate());
4757 CompileRun("function f(n) {"
4758 " var a = new Array(n);"
4759 " for (var i = 0; i < n; i += 100) a[i] = i;"
4761 "f(10 * 1024 * 1024);");
4762 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4763 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
4764 // This big step should be sufficient to mark the whole array.
4765 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4766 DCHECK(marking->IsComplete() ||
4767 marking->IsReadyToOverApproximateWeakClosure());
4771 TEST(DisableInlineAllocation) {
4772 i::FLAG_allow_natives_syntax = true;
4773 CcTest::InitializeVM();
4774 v8::HandleScope scope(CcTest::isolate());
4775 CompileRun("function test() {"
4777 " for (var i = 0; i < 10; i++) {"
4778 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4782 " %OptimizeFunctionOnNextCall(test);"
4784 " %DeoptimizeFunction(test);"
4787 // Warm-up with inline allocation enabled.
4788 CompileRun("test(); test(); run();");
4790 // Run test with inline allocation disabled.
4791 CcTest::heap()->DisableInlineAllocation();
4792 CompileRun("run()");
4794 // Run test with inline allocation re-enabled.
4795 CcTest::heap()->EnableInlineAllocation();
4796 CompileRun("run()");
4800 static int AllocationSitesCount(Heap* heap) {
4802 for (Object* site = heap->allocation_sites_list();
4803 !(site->IsUndefined());
4804 site = AllocationSite::cast(site)->weak_next()) {
4811 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4812 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4813 i::FLAG_allow_natives_syntax = true;
4814 CcTest::InitializeVM();
4815 Isolate* isolate = CcTest::i_isolate();
4816 v8::internal::Heap* heap = CcTest::heap();
4817 GlobalHandles* global_handles = isolate->global_handles();
4819 if (!isolate->use_crankshaft()) return;
4821 // The allocation site at the head of the list is ours.
4822 Handle<AllocationSite> site;
4824 LocalContext context;
4825 v8::HandleScope scope(context->GetIsolate());
4827 int count = AllocationSitesCount(heap);
4828 CompileRun("var bar = function() { return (new Array()); };"
4833 // One allocation site should have been created.
4834 int new_count = AllocationSitesCount(heap);
4835 CHECK_EQ(new_count, (count + 1));
4836 site = Handle<AllocationSite>::cast(
4837 global_handles->Create(
4838 AllocationSite::cast(heap->allocation_sites_list())));
4840 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4842 DependentCode::GroupStartIndexes starts(site->dependent_code());
4843 CHECK_GE(starts.number_of_entries(), 1);
4844 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4845 CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
4846 Code* function_bar = Code::cast(
4847 WeakCell::cast(site->dependent_code()->object_at(index))->value());
4848 Handle<JSFunction> bar_handle =
4849 v8::Utils::OpenHandle(
4850 *v8::Handle<v8::Function>::Cast(
4851 CcTest::global()->Get(v8_str("bar"))));
4852 CHECK_EQ(bar_handle->code(), function_bar);
4855 // Now make sure that a gc should get rid of the function, even though we
4856 // still have the allocation site alive.
4857 for (int i = 0; i < 4; i++) {
4858 heap->CollectAllGarbage();
4861 // The site still exists because of our global handle, but the code is no
4862 // longer referred to by dependent_code().
4863 DependentCode::GroupStartIndexes starts(site->dependent_code());
4864 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4865 CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
4866 WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
4870 TEST(CellsInOptimizedCodeAreWeak) {
4871 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4872 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4873 i::FLAG_allow_natives_syntax = true;
4874 CcTest::InitializeVM();
4875 Isolate* isolate = CcTest::i_isolate();
4876 v8::internal::Heap* heap = CcTest::heap();
4878 if (!isolate->use_crankshaft()) return;
4879 HandleScope outer_scope(heap->isolate());
4882 LocalContext context;
4883 HandleScope scope(heap->isolate());
4885 CompileRun("bar = (function() {"
4889 " var foo = function(x) { with (x) { return 1 + x; } };"
4893 " %OptimizeFunctionOnNextCall(bar);"
4895 " return bar;})();");
4897 Handle<JSFunction> bar =
4898 v8::Utils::OpenHandle(
4899 *v8::Handle<v8::Function>::Cast(
4900 CcTest::global()->Get(v8_str("bar"))));
4901 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4904 // Now make sure that a gc should get rid of the function
4905 for (int i = 0; i < 4; i++) {
4906 heap->CollectAllGarbage();
4909 DCHECK(code->marked_for_deoptimization());
4913 TEST(ObjectsInOptimizedCodeAreWeak) {
4914 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4915 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4916 i::FLAG_allow_natives_syntax = true;
4917 CcTest::InitializeVM();
4918 Isolate* isolate = CcTest::i_isolate();
4919 v8::internal::Heap* heap = CcTest::heap();
4921 if (!isolate->use_crankshaft()) return;
4922 HandleScope outer_scope(heap->isolate());
4925 LocalContext context;
4926 HandleScope scope(heap->isolate());
4928 CompileRun("function bar() {"
4931 "function foo(x) { with (x) { return 1 + x; } };"
4935 "%OptimizeFunctionOnNextCall(bar);"
4938 Handle<JSFunction> bar =
4939 v8::Utils::OpenHandle(
4940 *v8::Handle<v8::Function>::Cast(
4941 CcTest::global()->Get(v8_str("bar"))));
4942 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4945 // Now make sure that a gc should get rid of the function
4946 for (int i = 0; i < 4; i++) {
4947 heap->CollectAllGarbage();
4950 DCHECK(code->marked_for_deoptimization());
4954 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4955 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4956 if (!i::FLAG_incremental_marking) return;
4957 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4958 i::FLAG_allow_natives_syntax = true;
4959 i::FLAG_compilation_cache = false;
4960 i::FLAG_retain_maps_for_n_gc = 0;
4961 CcTest::InitializeVM();
4962 Isolate* isolate = CcTest::i_isolate();
4964 // Do not run for no-snap builds.
4965 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4967 v8::internal::Heap* heap = CcTest::heap();
4969 // Get a clean slate regarding optimized functions on the heap.
4970 i::Deoptimizer::DeoptimizeAll(isolate);
4971 heap->CollectAllGarbage();
4973 if (!isolate->use_crankshaft()) return;
4974 HandleScope outer_scope(heap->isolate());
4975 for (int i = 0; i < 3; i++) {
4976 SimulateIncrementalMarking(heap);
4978 LocalContext context;
4979 HandleScope scope(heap->isolate());
4980 EmbeddedVector<char, 256> source;
4982 "function bar%d() {"
4985 "function foo%d(x) { with (x) { return 1 + x; } };"
4989 "%%OptimizeFunctionOnNextCall(bar%d);"
4991 i, i, i, i, i, i, i, i);
4992 CompileRun(source.start());
4994 heap->CollectAllGarbage();
4997 if (heap->weak_object_to_code_table()->IsHashTable()) {
4998 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4999 elements = t->NumberOfElements();
5001 CHECK_EQ(0, elements);
5005 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
5006 EmbeddedVector<char, 256> source;
5008 "function %s() { return 0; }"
5010 "%%OptimizeFunctionOnNextCall(%s);"
5011 "%s();", name, name, name, name, name);
5012 CompileRun(source.start());
5013 Handle<JSFunction> fun =
5014 v8::Utils::OpenHandle(
5015 *v8::Handle<v8::Function>::Cast(
5016 CcTest::global()->Get(v8_str(name))));
5021 static int GetCodeChainLength(Code* code) {
5023 while (code->next_code_link()->IsCode()) {
5025 code = Code::cast(code->next_code_link());
5031 TEST(NextCodeLinkIsWeak) {
5032 i::FLAG_always_opt = false;
5033 i::FLAG_allow_natives_syntax = true;
5034 CcTest::InitializeVM();
5035 Isolate* isolate = CcTest::i_isolate();
5036 v8::internal::Heap* heap = CcTest::heap();
5038 if (!isolate->use_crankshaft()) return;
5039 HandleScope outer_scope(heap->isolate());
5041 heap->CollectAllAvailableGarbage();
5042 int code_chain_length_before, code_chain_length_after;
5044 HandleScope scope(heap->isolate());
5045 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
5046 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
5047 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
5048 code_chain_length_before = GetCodeChainLength(immortal->code());
5049 // Keep the immortal code and let the mortal code die.
5050 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
5051 CompileRun("mortal = null; immortal = null;");
5053 heap->CollectAllAvailableGarbage();
5054 // Now mortal code should be dead.
5055 code_chain_length_after = GetCodeChainLength(*code);
5056 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
5060 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
5061 i::byte buffer[i::Assembler::kMinimalBufferSize];
5062 MacroAssembler masm(isolate, buffer, sizeof(buffer));
5064 masm.Push(isolate->factory()->undefined_value());
5066 masm.GetCode(&desc);
5067 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
5068 Handle<Code> code = isolate->factory()->NewCode(
5069 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
5070 CHECK(code->IsCode());
5075 TEST(NextCodeLinkIsWeak2) {
5076 i::FLAG_allow_natives_syntax = true;
5077 CcTest::InitializeVM();
5078 Isolate* isolate = CcTest::i_isolate();
5079 v8::internal::Heap* heap = CcTest::heap();
5081 if (!isolate->use_crankshaft()) return;
5082 HandleScope outer_scope(heap->isolate());
5083 heap->CollectAllAvailableGarbage();
5084 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
5085 Handle<Code> new_head;
5086 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5088 HandleScope scope(heap->isolate());
5089 Handle<Code> immortal = DummyOptimizedCode(isolate);
5090 Handle<Code> mortal = DummyOptimizedCode(isolate);
5091 mortal->set_next_code_link(*old_head);
5092 immortal->set_next_code_link(*mortal);
5093 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5094 new_head = scope.CloseAndEscape(immortal);
5096 heap->CollectAllAvailableGarbage();
5097 // Now mortal code should be dead.
5098 CHECK_EQ(*old_head, new_head->next_code_link());
5102 static bool weak_ic_cleared = false;
5104 static void ClearWeakIC(
5105 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5106 printf("clear weak is called\n");
5107 weak_ic_cleared = true;
5108 data.GetParameter()->Reset();
5112 TEST(WeakFunctionInConstructor) {
5113 if (i::FLAG_always_opt) return;
5114 i::FLAG_stress_compaction = false;
5115 CcTest::InitializeVM();
5116 v8::Isolate* isolate = CcTest::isolate();
5117 v8::HandleScope scope(isolate);
5119 "function createObj(obj) {"
5120 " return new obj();"
5122 Handle<JSFunction> createObj =
5123 v8::Utils::OpenHandle(*v8::Handle<v8::Function>::Cast(
5124 CcTest::global()->Get(v8_str("createObj"))));
5126 v8::Persistent<v8::Object> garbage;
5128 v8::HandleScope scope(isolate);
5129 const char* source =
5131 " function hat() { this.x = 5; }"
5136 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
5138 weak_ic_cleared = false;
5139 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5140 Heap* heap = CcTest::i_isolate()->heap();
5141 heap->CollectAllGarbage();
5142 CHECK(weak_ic_cleared);
5144 // We've determined the constructor in createObj has had it's weak cell
5145 // cleared. Now, verify that one additional call with a new function
5146 // allows monomorphicity.
5147 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5148 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5149 for (int i = 0; i < 20; i++) {
5150 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5151 CHECK(slot_value->IsWeakCell());
5152 if (WeakCell::cast(slot_value)->cleared()) break;
5153 heap->CollectAllGarbage();
5156 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5157 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5159 "function coat() { this.x = 6; }"
5160 "createObj(coat);");
5161 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5162 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5166 // Checks that the value returned by execution of the source is weak.
5167 void CheckWeakness(const char* source) {
5168 i::FLAG_stress_compaction = false;
5169 CcTest::InitializeVM();
5170 v8::Isolate* isolate = CcTest::isolate();
5171 v8::HandleScope scope(isolate);
5172 v8::Persistent<v8::Object> garbage;
5174 v8::HandleScope scope(isolate);
5175 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
5177 weak_ic_cleared = false;
5178 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5179 Heap* heap = CcTest::i_isolate()->heap();
5180 heap->CollectAllGarbage();
5181 CHECK(weak_ic_cleared);
5185 // Each of the following "weak IC" tests creates an IC that embeds a map with
5186 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5187 TEST(WeakMapInMonomorphicLoadIC) {
5188 CheckWeakness("function loadIC(obj) {"
5192 " var proto = {'name' : 'weak'};"
5193 " var obj = Object.create(proto);"
5202 TEST(WeakMapInPolymorphicLoadIC) {
5204 "function loadIC(obj) {"
5208 " var proto = {'name' : 'weak'};"
5209 " var obj = Object.create(proto);"
5213 " var poly = Object.create(proto);"
5221 TEST(WeakMapInMonomorphicKeyedLoadIC) {
5222 CheckWeakness("function keyedLoadIC(obj, field) {"
5223 " return obj[field];"
5226 " var proto = {'name' : 'weak'};"
5227 " var obj = Object.create(proto);"
5228 " keyedLoadIC(obj, 'name');"
5229 " keyedLoadIC(obj, 'name');"
5230 " keyedLoadIC(obj, 'name');"
5236 TEST(WeakMapInPolymorphicKeyedLoadIC) {
5238 "function keyedLoadIC(obj, field) {"
5239 " return obj[field];"
5242 " var proto = {'name' : 'weak'};"
5243 " var obj = Object.create(proto);"
5244 " keyedLoadIC(obj, 'name');"
5245 " keyedLoadIC(obj, 'name');"
5246 " keyedLoadIC(obj, 'name');"
5247 " var poly = Object.create(proto);"
5249 " keyedLoadIC(poly, 'name');"
5255 TEST(WeakMapInMonomorphicStoreIC) {
5256 CheckWeakness("function storeIC(obj, value) {"
5257 " obj.name = value;"
5260 " var proto = {'name' : 'weak'};"
5261 " var obj = Object.create(proto);"
5262 " storeIC(obj, 'x');"
5263 " storeIC(obj, 'x');"
5264 " storeIC(obj, 'x');"
5270 TEST(WeakMapInPolymorphicStoreIC) {
5272 "function storeIC(obj, value) {"
5273 " obj.name = value;"
5276 " var proto = {'name' : 'weak'};"
5277 " var obj = Object.create(proto);"
5278 " storeIC(obj, 'x');"
5279 " storeIC(obj, 'x');"
5280 " storeIC(obj, 'x');"
5281 " var poly = Object.create(proto);"
5283 " storeIC(poly, 'x');"
5289 TEST(WeakMapInMonomorphicKeyedStoreIC) {
5290 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5291 " obj[field] = value;"
5294 " var proto = {'name' : 'weak'};"
5295 " var obj = Object.create(proto);"
5296 " keyedStoreIC(obj, 'x');"
5297 " keyedStoreIC(obj, 'x');"
5298 " keyedStoreIC(obj, 'x');"
5304 TEST(WeakMapInPolymorphicKeyedStoreIC) {
5306 "function keyedStoreIC(obj, field, value) {"
5307 " obj[field] = value;"
5310 " var proto = {'name' : 'weak'};"
5311 " var obj = Object.create(proto);"
5312 " keyedStoreIC(obj, 'x');"
5313 " keyedStoreIC(obj, 'x');"
5314 " keyedStoreIC(obj, 'x');"
5315 " var poly = Object.create(proto);"
5317 " keyedStoreIC(poly, 'x');"
5323 TEST(WeakMapInMonomorphicCompareNilIC) {
5324 CheckWeakness("function compareNilIC(obj) {"
5325 " return obj == null;"
5328 " var proto = {'name' : 'weak'};"
5329 " var obj = Object.create(proto);"
5330 " compareNilIC(obj);"
5331 " compareNilIC(obj);"
5332 " compareNilIC(obj);"
5338 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5339 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5340 Handle<Object> obj =
5341 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5342 return Handle<JSFunction>::cast(obj);
5346 void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5347 int ic_slot, InlineCacheState state) {
5348 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5349 kind == Code::CALL_IC) {
5350 TypeFeedbackVector* vector = shared->feedback_vector();
5351 FeedbackVectorICSlot slot(ic_slot);
5352 if (kind == Code::LOAD_IC) {
5353 LoadICNexus nexus(vector, slot);
5354 CHECK_EQ(nexus.StateFromFeedback(), state);
5355 } else if (kind == Code::KEYED_LOAD_IC) {
5356 KeyedLoadICNexus nexus(vector, slot);
5357 CHECK_EQ(nexus.StateFromFeedback(), state);
5358 } else if (kind == Code::CALL_IC) {
5359 CallICNexus nexus(vector, slot);
5360 CHECK_EQ(nexus.StateFromFeedback(), state);
5363 Code* ic = FindFirstIC(code, kind);
5364 CHECK(ic->is_inline_cache_stub());
5365 CHECK(ic->ic_state() == state);
5370 TEST(MonomorphicStaysMonomorphicAfterGC) {
5371 if (FLAG_always_opt) return;
5372 CcTest::InitializeVM();
5373 Isolate* isolate = CcTest::i_isolate();
5374 Heap* heap = isolate->heap();
5375 v8::HandleScope scope(CcTest::isolate());
5377 "function loadIC(obj) {"
5380 "function testIC() {"
5381 " var proto = {'name' : 'weak'};"
5382 " var obj = Object.create(proto);"
5388 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5390 v8::HandleScope scope(CcTest::isolate());
5391 CompileRun("(testIC())");
5393 heap->CollectAllGarbage();
5394 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5396 v8::HandleScope scope(CcTest::isolate());
5397 CompileRun("(testIC())");
5399 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5403 TEST(PolymorphicStaysPolymorphicAfterGC) {
5404 if (FLAG_always_opt) return;
5405 CcTest::InitializeVM();
5406 Isolate* isolate = CcTest::i_isolate();
5407 Heap* heap = isolate->heap();
5408 v8::HandleScope scope(CcTest::isolate());
5410 "function loadIC(obj) {"
5413 "function testIC() {"
5414 " var proto = {'name' : 'weak'};"
5415 " var obj = Object.create(proto);"
5419 " var poly = Object.create(proto);"
5424 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5426 v8::HandleScope scope(CcTest::isolate());
5427 CompileRun("(testIC())");
5429 heap->CollectAllGarbage();
5430 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5432 v8::HandleScope scope(CcTest::isolate());
5433 CompileRun("(testIC())");
5435 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5440 CcTest::InitializeVM();
5441 Isolate* isolate = CcTest::i_isolate();
5442 v8::internal::Heap* heap = CcTest::heap();
5443 v8::internal::Factory* factory = isolate->factory();
5445 HandleScope outer_scope(isolate);
5446 Handle<WeakCell> weak_cell1;
5448 HandleScope inner_scope(isolate);
5449 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5450 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5453 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5454 Handle<WeakCell> weak_cell2;
5456 HandleScope inner_scope(isolate);
5457 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5459 CHECK(weak_cell1->value()->IsFixedArray());
5460 CHECK_EQ(*survivor, weak_cell2->value());
5461 heap->CollectGarbage(NEW_SPACE);
5462 CHECK(weak_cell1->value()->IsFixedArray());
5463 CHECK_EQ(*survivor, weak_cell2->value());
5464 heap->CollectGarbage(NEW_SPACE);
5465 CHECK(weak_cell1->value()->IsFixedArray());
5466 CHECK_EQ(*survivor, weak_cell2->value());
5467 heap->CollectAllAvailableGarbage();
5468 CHECK(weak_cell1->cleared());
5469 CHECK_EQ(*survivor, weak_cell2->value());
5473 TEST(WeakCellsWithIncrementalMarking) {
5474 CcTest::InitializeVM();
5475 Isolate* isolate = CcTest::i_isolate();
5476 v8::internal::Heap* heap = CcTest::heap();
5477 v8::internal::Factory* factory = isolate->factory();
5480 HandleScope outer_scope(isolate);
5481 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5482 Handle<WeakCell> weak_cells[N];
5484 for (int i = 0; i < N; i++) {
5485 HandleScope inner_scope(isolate);
5486 Handle<HeapObject> value =
5487 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5488 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5489 CHECK(weak_cell->value()->IsFixedArray());
5490 IncrementalMarking* marking = heap->incremental_marking();
5491 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
5492 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5493 heap->CollectGarbage(NEW_SPACE);
5494 CHECK(weak_cell->value()->IsFixedArray());
5495 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5497 heap->CollectAllGarbage();
5498 CHECK_EQ(*survivor, weak_cells[0]->value());
5499 for (int i = 1; i < N; i++) {
5500 CHECK(weak_cells[i]->cleared());
5506 TEST(AddInstructionChangesNewSpacePromotion) {
5507 i::FLAG_allow_natives_syntax = true;
5508 i::FLAG_expose_gc = true;
5509 i::FLAG_stress_compaction = true;
5510 i::FLAG_gc_interval = 1000;
5511 CcTest::InitializeVM();
5512 if (!i::FLAG_allocation_site_pretenuring) return;
5513 v8::HandleScope scope(CcTest::isolate());
5514 Isolate* isolate = CcTest::i_isolate();
5515 Heap* heap = isolate->heap();
5518 "function add(a, b) {"
5522 "add(\"a\", \"b\");"
5523 "var oldSpaceObject;"
5525 "function crash(x) {"
5526 " var object = {a: null, b: null};"
5527 " var result = add(1.5, x | 0);"
5528 " object.a = result;"
5529 " oldSpaceObject = object;"
5534 "%OptimizeFunctionOnNextCall(crash);"
5537 v8::Handle<v8::Object> global = CcTest::global();
5538 v8::Handle<v8::Function> g =
5539 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
5540 v8::Handle<v8::Value> args1[] = { v8_num(1) };
5541 heap->DisableInlineAllocation();
5542 heap->set_allocation_timeout(1);
5543 g->Call(global, 1, args1);
5544 heap->CollectAllGarbage();
5548 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5549 // Exit with 0 if the location matches our expectation.
5550 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5554 TEST(CEntryStubOOM) {
5555 i::FLAG_allow_natives_syntax = true;
5556 CcTest::InitializeVM();
5557 v8::HandleScope scope(CcTest::isolate());
5558 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
5560 v8::Handle<v8::Value> result = CompileRun(
5561 "%SetFlags('--gc-interval=1');"
5566 CHECK(result->IsNumber());
5572 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5575 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5576 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5580 TEST(Regress357137) {
5581 CcTest::InitializeVM();
5582 v8::Isolate* isolate = CcTest::isolate();
5583 v8::HandleScope hscope(isolate);
5584 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5585 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
5586 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5587 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5588 DCHECK(!context.IsEmpty());
5589 v8::Context::Scope cscope(context);
5591 v8::Local<v8::Value> result = CompileRun(
5593 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5594 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5595 "interrupt();" // This triggers a fake stack overflow in f.
5597 CHECK_EQ(42.0, result->ToNumber(isolate)->Value());
5601 TEST(Regress507979) {
5602 const int kFixedArrayLen = 10;
5603 CcTest::InitializeVM();
5604 Isolate* isolate = CcTest::i_isolate();
5605 Heap* heap = isolate->heap();
5606 HandleScope handle_scope(isolate);
5608 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5609 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5610 CHECK(heap->InNewSpace(o1->address()));
5611 CHECK(heap->InNewSpace(o2->address()));
5613 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5615 // Replace parts of an object placed before a live object with a filler. This
5616 // way the filler object shares the mark bits with the following live object.
5617 o1->Shrink(kFixedArrayLen - 1);
5619 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5620 // Let's not optimize the loop away.
5621 CHECK(obj->address() != nullptr);
5626 TEST(ArrayShiftSweeping) {
5627 i::FLAG_expose_gc = true;
5628 CcTest::InitializeVM();
5629 v8::HandleScope scope(CcTest::isolate());
5630 Isolate* isolate = CcTest::i_isolate();
5631 Heap* heap = isolate->heap();
5633 v8::Local<v8::Value> result = CompileRun(
5634 "var array = new Array(40000);"
5635 "var tmp = new Array(100000);"
5642 Handle<JSObject> o =
5643 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5644 CHECK(heap->InOldSpace(o->elements()));
5645 CHECK(heap->InOldSpace(*o));
5646 Page* page = Page::FromAddress(o->elements()->address());
5647 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
5648 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
5652 UNINITIALIZED_TEST(PromotionQueue) {
5653 i::FLAG_expose_gc = true;
5654 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5655 v8::Isolate::CreateParams create_params;
5656 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5657 v8::Isolate* isolate = v8::Isolate::New(create_params);
5658 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5660 v8::Isolate::Scope isolate_scope(isolate);
5661 v8::HandleScope handle_scope(isolate);
5662 v8::Context::New(isolate)->Enter();
5663 Heap* heap = i_isolate->heap();
5664 NewSpace* new_space = heap->new_space();
5666 // In this test we will try to overwrite the promotion queue which is at the
5667 // end of to-space. To actually make that possible, we need at least two
5668 // semi-space pages and take advantage of fragmentation.
5669 // (1) Grow semi-space to two pages.
5670 // (2) Create a few small long living objects and call the scavenger to
5671 // move them to the other semi-space.
5672 // (3) Create a huge object, i.e., remainder of first semi-space page and
5673 // create another huge object which should be of maximum allocatable memory
5674 // size of the second semi-space page.
5675 // (4) Call the scavenger again.
5676 // What will happen is: the scavenger will promote the objects created in
5677 // (2) and will create promotion queue entries at the end of the second
5678 // semi-space page during the next scavenge when it promotes the objects to
5679 // the old generation. The first allocation of (3) will fill up the first
5680 // semi-space page. The second allocation in (3) will not fit into the
5681 // first semi-space page, but it will overwrite the promotion queue which
5682 // are in the second semi-space page. If the right guards are in place, the
5683 // promotion queue will be evacuated in that case.
5685 // Grow the semi-space to two pages to make semi-space copy overwrite the
5686 // promotion queue, which will be at the end of the second page.
5687 intptr_t old_capacity = new_space->TotalCapacity();
5689 // If we are in a low memory config, we can't grow to two pages and we can't
5690 // run this test. This also means the issue we are testing cannot arise, as
5691 // there is no fragmentation.
5692 if (new_space->IsAtMaximumCapacity()) return;
5695 CHECK(new_space->IsAtMaximumCapacity());
5696 CHECK(2 * old_capacity == new_space->TotalCapacity());
5698 // Call the scavenger two times to get an empty new space
5699 heap->CollectGarbage(NEW_SPACE);
5700 heap->CollectGarbage(NEW_SPACE);
5702 // First create a few objects which will survive a scavenge, and will get
5703 // promoted to the old generation later on. These objects will create
5704 // promotion queue entries at the end of the second semi-space page.
5705 const int number_handles = 12;
5706 Handle<FixedArray> handles[number_handles];
5707 for (int i = 0; i < number_handles; i++) {
5708 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5710 heap->CollectGarbage(NEW_SPACE);
5712 // Create the first huge object which will exactly fit the first semi-space
5714 int new_linear_size =
5715 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5716 *heap->new_space()->allocation_top_address());
5717 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
5718 Handle<FixedArray> first =
5719 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5720 CHECK(heap->InNewSpace(*first));
5722 // Create the second huge object of maximum allocatable second semi-space
5725 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5726 *heap->new_space()->allocation_top_address());
5727 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
5728 FixedArray::kHeaderSize;
5729 Handle<FixedArray> second =
5730 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5731 CHECK(heap->InNewSpace(*second));
5733 // This scavenge will corrupt memory if the promotion queue is not
5735 heap->CollectGarbage(NEW_SPACE);
5741 TEST(Regress388880) {
5742 i::FLAG_expose_gc = true;
5743 CcTest::InitializeVM();
5744 v8::HandleScope scope(CcTest::isolate());
5745 Isolate* isolate = CcTest::i_isolate();
5746 Factory* factory = isolate->factory();
5747 Heap* heap = isolate->heap();
5749 Handle<Map> map1 = Map::Create(isolate, 1);
5751 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
5752 HeapType::Any(isolate), NONE, Representation::Tagged(),
5753 OMIT_TRANSITION).ToHandleChecked();
5755 int desired_offset = Page::kPageSize - map1->instance_size();
5757 // Allocate fixed array in old pointer space so, that object allocated
5758 // afterwards would end at the end of the page.
5760 SimulateFullSpace(heap->old_space());
5761 int padding_size = desired_offset - Page::kObjectStartOffset;
5762 int padding_array_length =
5763 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
5765 Handle<FixedArray> temp2 =
5766 factory->NewFixedArray(padding_array_length, TENURED);
5767 Page* page = Page::FromAddress(temp2->address());
5768 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
5771 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5772 o->set_properties(*factory->empty_fixed_array());
5774 // Ensure that the object allocated where we need it.
5775 Page* page = Page::FromAddress(o->address());
5776 CHECK_EQ(desired_offset, page->Offset(o->address()));
5778 // Now we have an object right at the end of the page.
5780 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5781 // that would cause crash.
5782 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5784 marking->Start(Heap::kNoGCFlags);
5785 CHECK(marking->IsMarking());
5787 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5788 // when it calls heap->AdjustLiveBytes(...).
5789 JSObject::MigrateToMap(o, map2);
5794 i::FLAG_expose_gc = true;
5795 CcTest::InitializeVM();
5796 v8::HandleScope scope(CcTest::isolate());
5797 Isolate* isolate = CcTest::i_isolate();
5798 Heap* heap = isolate->heap();
5799 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5800 v8::Local<v8::Value> result = CompileRun(
5801 "var weak_map = new WeakMap();"
5802 "var future_keys = [];"
5803 "for (var i = 0; i < 50; i++) {"
5804 " var key = {'k' : i + 0.1};"
5805 " weak_map.set(key, 1);"
5806 " future_keys.push({'x' : i + 0.2});"
5809 if (marking->IsStopped()) {
5810 marking->Start(Heap::kNoGCFlags);
5812 // Incrementally mark the backing store.
5813 Handle<JSObject> obj =
5814 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5815 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5816 while (!Marking::IsBlack(
5817 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5818 !marking->IsStopped()) {
5819 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5821 // Stash the backing store in a handle.
5822 Handle<Object> save(weak_map->table(), isolate);
5823 // The following line will update the backing store.
5825 "for (var i = 0; i < 50; i++) {"
5826 " weak_map.set(future_keys[i], i);"
5828 heap->incremental_marking()->set_should_hurry(true);
5829 heap->CollectGarbage(OLD_SPACE);
5833 TEST(Regress442710) {
5834 CcTest::InitializeVM();
5835 Isolate* isolate = CcTest::i_isolate();
5836 Heap* heap = isolate->heap();
5837 Factory* factory = isolate->factory();
5839 HandleScope sc(isolate);
5840 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
5841 Handle<JSArray> array = factory->NewJSArray(2);
5843 Handle<String> name = factory->InternalizeUtf8String("testArray");
5844 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5845 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5846 heap->CollectGarbage(OLD_SPACE);
5850 HEAP_TEST(NumberStringCacheSize) {
5851 // Test that the number-string cache has not been resized in the snapshot.
5852 CcTest::InitializeVM();
5853 Isolate* isolate = CcTest::i_isolate();
5854 if (!isolate->snapshot_available()) return;
5855 Heap* heap = isolate->heap();
5856 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5857 heap->number_string_cache()->length());
5862 CcTest::InitializeVM();
5863 Isolate* isolate = CcTest::i_isolate();
5864 Heap* heap = isolate->heap();
5865 Factory* factory = isolate->factory();
5866 HandleScope scope(isolate);
5867 CompileRun("function cls() { this.x = 10; }");
5868 Handle<WeakCell> weak_prototype;
5870 HandleScope inner_scope(isolate);
5871 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5872 Handle<JSObject> proto =
5873 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5874 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5876 CHECK(!weak_prototype->cleared());
5880 "cls.prototype = null;");
5881 for (int i = 0; i < 4; i++) {
5882 heap->CollectAllGarbage();
5884 // The map of a.x keeps prototype alive
5885 CHECK(!weak_prototype->cleared());
5886 // Change the map of a.x and make the previous map garbage collectable.
5887 CompileRun("a.x.__proto__ = {};");
5888 for (int i = 0; i < 4; i++) {
5889 heap->CollectAllGarbage();
5891 CHECK(weak_prototype->cleared());
5895 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5896 HandleScope inner_scope(isolate);
5897 Handle<Map> map = Map::Create(isolate, 1);
5898 v8::Local<v8::Value> result =
5899 CompileRun("(function () { return {x : 10}; })();");
5900 Handle<JSObject> proto =
5901 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5902 Map::SetPrototype(map, proto);
5903 heap->AddRetainedMap(map);
5904 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5908 void CheckMapRetainingFor(int n) {
5909 FLAG_retain_maps_for_n_gc = n;
5910 Isolate* isolate = CcTest::i_isolate();
5911 Heap* heap = isolate->heap();
5912 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5913 CHECK(!weak_cell->cleared());
5914 for (int i = 0; i < n; i++) {
5915 heap->CollectGarbage(OLD_SPACE);
5917 CHECK(!weak_cell->cleared());
5918 heap->CollectGarbage(OLD_SPACE);
5919 CHECK(weak_cell->cleared());
5923 TEST(MapRetaining) {
5924 CcTest::InitializeVM();
5925 v8::HandleScope scope(CcTest::isolate());
5926 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5927 CheckMapRetainingFor(0);
5928 CheckMapRetainingFor(1);
5929 CheckMapRetainingFor(7);
5933 TEST(RegressArrayListGC) {
5934 FLAG_retain_maps_for_n_gc = 1;
5935 FLAG_incremental_marking = 0;
5936 FLAG_gc_global = true;
5937 CcTest::InitializeVM();
5938 v8::HandleScope scope(CcTest::isolate());
5939 Isolate* isolate = CcTest::i_isolate();
5940 Heap* heap = isolate->heap();
5941 AddRetainedMap(isolate, heap);
5942 Handle<Map> map = Map::Create(isolate, 1);
5943 heap->CollectGarbage(OLD_SPACE);
5944 // Force GC in old space on next addition of retained map.
5945 Map::WeakCellForMap(map);
5946 SimulateFullSpace(CcTest::heap()->new_space());
5947 for (int i = 0; i < 10; i++) {
5948 heap->AddRetainedMap(map);
5950 heap->CollectGarbage(OLD_SPACE);
5956 CcTest::InitializeVM();
5957 v8::HandleScope scope(CcTest::isolate());
5959 v8::Local<v8::Value> result = CompileRun("'abc'");
5960 Handle<Object> o = v8::Utils::OpenHandle(*result);
5961 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5966 TEST(WritableVsImmortalRoots) {
5967 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5968 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5969 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5970 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5971 // A root value can be writable, immortal, or neither, but not both.
5972 CHECK(!immortal || !writable);
5977 static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5979 int elements_to_trim) {
5980 v8::HandleScope scope(CcTest::isolate());
5981 Isolate* isolate = CcTest::i_isolate();
5982 Factory* factory = isolate->factory();
5983 Heap* heap = isolate->heap();
5985 Handle<FixedTypedArrayBase> array =
5986 factory->NewFixedTypedArray(initial_length, type, true);
5987 int old_size = array->size();
5988 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5991 // Check that free space filler is at the right place and did not smash the
5993 CHECK(array->IsFixedArrayBase());
5994 CHECK_EQ(initial_length - elements_to_trim, array->length());
5995 int new_size = array->size();
5996 if (new_size != old_size) {
5997 // Free space filler should be created in this case.
5998 Address next_obj_address = array->address() + array->size();
5999 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
6001 heap->CollectAllAvailableGarbage();
6005 TEST(Regress472513) {
6006 CcTest::InitializeVM();
6007 v8::HandleScope scope(CcTest::isolate());
6009 // The combination of type/initial_length/elements_to_trim triggered
6010 // typed array header smashing with free space filler (crbug/472513).
6013 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
6014 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
6015 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
6016 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
6017 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
6018 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
6021 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
6022 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
6023 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
6024 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
6025 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
6029 TEST(WeakFixedArray) {
6030 CcTest::InitializeVM();
6031 v8::HandleScope scope(CcTest::isolate());
6033 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
6034 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
6035 array->Remove(number);
6036 array->Compact<WeakFixedArray::NullCallback>();
6037 WeakFixedArray::Add(array, number);
6041 TEST(PreprocessStackTrace) {
6042 // Do not automatically trigger early GC.
6043 FLAG_gc_interval = -1;
6044 CcTest::InitializeVM();
6045 v8::HandleScope scope(CcTest::isolate());
6046 v8::TryCatch try_catch(CcTest::isolate());
6047 CompileRun("throw new Error();");
6048 CHECK(try_catch.HasCaught());
6049 Isolate* isolate = CcTest::i_isolate();
6050 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
6051 Handle<Name> key = isolate->factory()->stack_trace_symbol();
6052 Handle<Object> stack_trace =
6053 JSObject::GetProperty(exception, key).ToHandleChecked();
6054 Handle<Object> code =
6055 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6056 CHECK(code->IsCode());
6058 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
6060 Handle<Object> pos =
6061 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6062 CHECK(pos->IsSmi());
6064 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
6065 int array_length = Smi::cast(stack_trace_array->length())->value();
6066 for (int i = 0; i < array_length; i++) {
6067 Handle<Object> element =
6068 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
6069 CHECK(!element->IsCode());
6074 static bool utils_has_been_collected = false;
6076 static void UtilsHasBeenCollected(
6077 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
6078 utils_has_been_collected = true;
6079 data.GetParameter()->Reset();
6083 TEST(BootstrappingExports) {
6084 FLAG_expose_natives_as = "natives";
6085 CcTest::InitializeVM();
6086 v8::Isolate* isolate = CcTest::isolate();
6088 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
6090 utils_has_been_collected = false;
6092 v8::Persistent<v8::Object> utils;
6095 v8::HandleScope scope(isolate);
6096 v8::Handle<v8::Object> natives =
6097 CcTest::global()->Get(v8_str("natives"))->ToObject(isolate);
6098 utils.Reset(isolate, natives->Get(v8_str("utils"))->ToObject(isolate));
6099 natives->Delete(v8_str("utils"));
6102 utils.SetWeak(&utils, UtilsHasBeenCollected,
6103 v8::WeakCallbackType::kParameter);
6105 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6107 CHECK(utils_has_been_collected);
6112 FLAG_allow_natives_syntax = true;
6113 CcTest::InitializeVM();
6114 v8::Isolate* isolate = CcTest::isolate();
6115 v8::HandleScope scope(isolate);
6116 v8::Local<v8::Function> constructor =
6117 v8::Utils::ToLocal(CcTest::i_isolate()->internal_array_function());
6118 CcTest::global()->Set(v8_str("InternalArray"), constructor);
6120 v8::TryCatch try_catch(isolate);
6124 "for (var i = 0; i < 1000; i++) {"
6125 " var ai = new InternalArray(10000);"
6126 " if (%HaveSameMap(ai, a)) throw Error();"
6127 " if (!%HasFastObjectElements(ai)) throw Error();"
6129 "for (var i = 0; i < 1000; i++) {"
6130 " var ai = new InternalArray(10000);"
6131 " if (%HaveSameMap(ai, a)) throw Error();"
6132 " if (!%HasFastObjectElements(ai)) throw Error();"
6135 CHECK(!try_catch.HasCaught());
6139 void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6140 CHECK(bytes >= FixedArray::kHeaderSize);
6141 CHECK(bytes % kPointerSize == 0);
6142 Factory* factory = isolate->factory();
6143 HandleScope scope(isolate);
6144 AlwaysAllocateScope always_allocate(isolate);
6146 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6147 Handle<FixedArray> array = factory->NewFixedArray(
6148 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6149 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6150 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6154 TEST(NewSpaceAllocationCounter) {
6155 CcTest::InitializeVM();
6156 v8::HandleScope scope(CcTest::isolate());
6157 Isolate* isolate = CcTest::i_isolate();
6158 Heap* heap = isolate->heap();
6159 size_t counter1 = heap->NewSpaceAllocationCounter();
6160 heap->CollectGarbage(NEW_SPACE);
6161 const size_t kSize = 1024;
6162 AllocateInSpace(isolate, kSize, NEW_SPACE);
6163 size_t counter2 = heap->NewSpaceAllocationCounter();
6164 CHECK_EQ(kSize, counter2 - counter1);
6165 heap->CollectGarbage(NEW_SPACE);
6166 size_t counter3 = heap->NewSpaceAllocationCounter();
6167 CHECK_EQ(0U, counter3 - counter2);
6168 // Test counter overflow.
6169 size_t max_counter = -1;
6170 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6171 size_t start = heap->NewSpaceAllocationCounter();
6172 for (int i = 0; i < 20; i++) {
6173 AllocateInSpace(isolate, kSize, NEW_SPACE);
6174 size_t counter = heap->NewSpaceAllocationCounter();
6175 CHECK_EQ(kSize, counter - start);
6181 TEST(OldSpaceAllocationCounter) {
6182 CcTest::InitializeVM();
6183 v8::HandleScope scope(CcTest::isolate());
6184 Isolate* isolate = CcTest::i_isolate();
6185 Heap* heap = isolate->heap();
6186 size_t counter1 = heap->OldGenerationAllocationCounter();
6187 heap->CollectGarbage(NEW_SPACE);
6188 heap->CollectGarbage(NEW_SPACE);
6189 const size_t kSize = 1024;
6190 AllocateInSpace(isolate, kSize, OLD_SPACE);
6191 size_t counter2 = heap->OldGenerationAllocationCounter();
6192 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6193 CHECK_LE(kSize, counter2 - counter1);
6194 heap->CollectGarbage(NEW_SPACE);
6195 size_t counter3 = heap->OldGenerationAllocationCounter();
6196 CHECK_EQ(0u, counter3 - counter2);
6197 AllocateInSpace(isolate, kSize, OLD_SPACE);
6198 heap->CollectGarbage(OLD_SPACE);
6199 size_t counter4 = heap->OldGenerationAllocationCounter();
6200 CHECK_LE(kSize, counter4 - counter3);
6201 // Test counter overflow.
6202 size_t max_counter = -1;
6203 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6204 size_t start = heap->OldGenerationAllocationCounter();
6205 for (int i = 0; i < 20; i++) {
6206 AllocateInSpace(isolate, kSize, OLD_SPACE);
6207 size_t counter = heap->OldGenerationAllocationCounter();
6208 CHECK_LE(kSize, counter - start);
6214 TEST(NewSpaceAllocationThroughput) {
6215 CcTest::InitializeVM();
6216 v8::HandleScope scope(CcTest::isolate());
6217 Isolate* isolate = CcTest::i_isolate();
6218 Heap* heap = isolate->heap();
6219 GCTracer* tracer = heap->tracer();
6221 size_t counter1 = 1000;
6222 tracer->SampleAllocation(time1, counter1, 0);
6224 size_t counter2 = 2000;
6225 tracer->SampleAllocation(time2, counter2, 0);
6227 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6228 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6230 size_t counter3 = 30000;
6231 tracer->SampleAllocation(time3, counter3, 0);
6232 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6233 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6237 TEST(NewSpaceAllocationThroughput2) {
6238 CcTest::InitializeVM();
6239 v8::HandleScope scope(CcTest::isolate());
6240 Isolate* isolate = CcTest::i_isolate();
6241 Heap* heap = isolate->heap();
6242 GCTracer* tracer = heap->tracer();
6244 size_t counter1 = 1000;
6245 tracer->SampleAllocation(time1, counter1, 0);
6247 size_t counter2 = 2000;
6248 tracer->SampleAllocation(time2, counter2, 0);
6250 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6251 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6253 size_t counter3 = 30000;
6254 tracer->SampleAllocation(time3, counter3, 0);
6255 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6256 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6260 static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6261 Isolate* isolate = CcTest::i_isolate();
6263 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6264 CHECK(message->IsTheHole());
6268 TEST(MessageObjectLeak) {
6269 CcTest::InitializeVM();
6270 v8::Isolate* isolate = CcTest::isolate();
6271 v8::HandleScope scope(isolate);
6272 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6273 global->Set(v8::String::NewFromUtf8(isolate, "check"),
6274 v8::FunctionTemplate::New(isolate, CheckLeak));
6275 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6276 v8::Context::Scope cscope(context);
6280 " throw 'message 1';"
6285 " throw 'message 2';"
6292 const char* flag = "--turbo-filter=*";
6293 FlagList::SetFlagsFromString(flag, StrLength(flag));
6294 FLAG_always_opt = true;
6295 FLAG_turbo_try_catch = true;
6296 FLAG_turbo_try_finally = true;
6302 static void CheckEqualSharedFunctionInfos(
6303 const v8::FunctionCallbackInfo<v8::Value>& args) {
6304 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6305 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6306 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6307 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6308 CHECK(fun1->shared() == fun2->shared());
6312 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6313 Isolate* isolate = CcTest::i_isolate();
6314 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6315 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6316 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6317 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6318 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6322 TEST(CanonicalSharedFunctionInfo) {
6323 CcTest::InitializeVM();
6324 v8::Isolate* isolate = CcTest::isolate();
6325 v8::HandleScope scope(isolate);
6326 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6327 global->Set(isolate, "check", v8::FunctionTemplate::New(
6328 isolate, CheckEqualSharedFunctionInfos));
6329 global->Set(isolate, "remove",
6330 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6331 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6332 v8::Context::Scope cscope(context);
6334 "function f() { return function g() {}; }"
6341 "function f() { return (function() { return function g() {}; })(); }"
6349 TEST(OldGenerationAllocationThroughput) {
6350 CcTest::InitializeVM();
6351 v8::HandleScope scope(CcTest::isolate());
6352 Isolate* isolate = CcTest::i_isolate();
6353 Heap* heap = isolate->heap();
6354 GCTracer* tracer = heap->tracer();
6356 size_t counter1 = 1000;
6357 tracer->SampleAllocation(time1, 0, counter1);
6359 size_t counter2 = 2000;
6360 tracer->SampleAllocation(time2, 0, counter2);
6362 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6363 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6365 size_t counter3 = 30000;
6366 tracer->SampleAllocation(time3, 0, counter3);
6368 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6369 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6373 TEST(AllocationThroughput) {
6374 CcTest::InitializeVM();
6375 v8::HandleScope scope(CcTest::isolate());
6376 Isolate* isolate = CcTest::i_isolate();
6377 Heap* heap = isolate->heap();
6378 GCTracer* tracer = heap->tracer();
6380 size_t counter1 = 1000;
6381 tracer->SampleAllocation(time1, counter1, counter1);
6383 size_t counter2 = 2000;
6384 tracer->SampleAllocation(time2, counter2, counter2);
6385 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6386 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6388 size_t counter3 = 30000;
6389 tracer->SampleAllocation(time3, counter3, counter3);
6390 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6391 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6395 TEST(SlotsBufferObjectSlotsRemoval) {
6396 CcTest::InitializeVM();
6397 v8::HandleScope scope(CcTest::isolate());
6398 Isolate* isolate = CcTest::i_isolate();
6399 Heap* heap = isolate->heap();
6400 Factory* factory = isolate->factory();
6402 SlotsBuffer* buffer = new SlotsBuffer(NULL);
6403 void* fake_object[1];
6405 Handle<FixedArray> array = factory->NewFixedArray(2, TENURED);
6406 CHECK(heap->old_space()->Contains(*array));
6407 array->set(0, reinterpret_cast<Object*>(fake_object), SKIP_WRITE_BARRIER);
6409 // Firstly, let's test the regular slots buffer entry.
6410 buffer->Add(HeapObject::RawField(*array, FixedArray::kHeaderSize));
6411 CHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6412 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6413 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6415 array->address() + array->Size());
6416 CHECK(reinterpret_cast<void*>(buffer->Get(0)) ==
6417 HeapObject::RawField(heap->empty_fixed_array(),
6418 FixedArrayBase::kLengthOffset));
6420 // Secondly, let's test the typed slots buffer entry.
6421 SlotsBuffer::AddTo(NULL, &buffer, SlotsBuffer::EMBEDDED_OBJECT_SLOT,
6422 array->address() + FixedArray::kHeaderSize,
6423 SlotsBuffer::FAIL_ON_OVERFLOW);
6424 CHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6425 reinterpret_cast<Object**>(SlotsBuffer::EMBEDDED_OBJECT_SLOT));
6426 CHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6427 HeapObject::RawField(*array, FixedArray::kHeaderSize));
6428 SlotsBuffer::RemoveObjectSlots(CcTest::i_isolate()->heap(), buffer,
6430 array->address() + array->Size());
6431 CHECK(reinterpret_cast<void*>(buffer->Get(1)) ==
6432 HeapObject::RawField(heap->empty_fixed_array(),
6433 FixedArrayBase::kLengthOffset));
6434 CHECK(reinterpret_cast<void*>(buffer->Get(2)) ==
6435 HeapObject::RawField(heap->empty_fixed_array(),
6436 FixedArrayBase::kLengthOffset));
6441 TEST(ContextMeasure) {
6442 CcTest::InitializeVM();
6443 v8::HandleScope scope(CcTest::isolate());
6444 Isolate* isolate = CcTest::i_isolate();
6445 LocalContext context;
6447 int size_upper_limit = 0;
6448 int count_upper_limit = 0;
6449 HeapIterator it(CcTest::heap());
6450 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6451 size_upper_limit += obj->Size();
6452 count_upper_limit++;
6455 ContextMeasure measure(*isolate->native_context());
6457 PrintF("Context size : %d bytes\n", measure.Size());
6458 PrintF("Context object count: %d\n", measure.Count());
6460 CHECK_LE(1000, measure.Count());
6461 CHECK_LE(50000, measure.Size());
6463 CHECK_LE(measure.Count(), count_upper_limit);
6464 CHECK_LE(measure.Size(), size_upper_limit);
6468 TEST(ScriptIterator) {
6469 CcTest::InitializeVM();
6470 v8::HandleScope scope(CcTest::isolate());
6471 Isolate* isolate = CcTest::i_isolate();
6472 Heap* heap = CcTest::heap();
6473 LocalContext context;
6475 heap->CollectAllGarbage();
6477 int script_count = 0;
6479 HeapIterator it(heap);
6480 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6481 if (obj->IsScript()) script_count++;
6486 Script::Iterator iterator(isolate);
6487 while (iterator.Next()) script_count--;
6490 CHECK_EQ(0, script_count);
6494 TEST(SharedFunctionInfoIterator) {
6495 CcTest::InitializeVM();
6496 v8::HandleScope scope(CcTest::isolate());
6497 Isolate* isolate = CcTest::i_isolate();
6498 Heap* heap = CcTest::heap();
6499 LocalContext context;
6501 heap->CollectAllGarbage();
6502 heap->CollectAllGarbage();
6506 HeapIterator it(heap);
6507 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6508 if (!obj->IsSharedFunctionInfo()) continue;
6509 // Shared function infos without a script (API functions or C++ builtins)
6510 // are not returned by the iterator because they are not created from a
6511 // script. They are not interesting for type feedback vector anyways.
6512 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
6513 if (shared->script()->IsUndefined()) {
6514 CHECK_EQ(0, shared->feedback_vector()->ICSlots());
6522 SharedFunctionInfo::Iterator iterator(isolate);
6523 while (iterator.Next()) sfi_count--;
6526 CHECK_EQ(0, sfi_count);
6529 } // namespace internal