1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "src/compilation-cache.h"
34 #include "src/deoptimizer.h"
35 #include "src/execution.h"
36 #include "src/factory.h"
37 #include "src/global-handles.h"
38 #include "src/ic/ic.h"
39 #include "src/macro-assembler.h"
40 #include "src/snapshot/snapshot.h"
41 #include "test/cctest/cctest.h"
43 using namespace v8::internal;
46 static void CheckMap(Map* map, int type, int instance_size) {
47 CHECK(map->IsHeapObject());
49 CHECK(CcTest::heap()->Contains(map));
51 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
52 CHECK_EQ(type, map->instance_type());
53 CHECK_EQ(instance_size, map->instance_size());
58 CcTest::InitializeVM();
59 Heap* heap = CcTest::heap();
60 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
61 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
62 CheckMap(heap->float32x4_map(), FLOAT32X4_TYPE, Float32x4::kSize);
63 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
64 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
68 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
69 CHECK(obj->IsOddball());
70 Handle<Object> handle(obj, isolate);
71 Object* print_string =
72 *Execution::ToString(isolate, handle).ToHandleChecked();
73 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
77 static void CheckSmi(Isolate* isolate, int value, const char* string) {
78 Handle<Object> handle(Smi::FromInt(value), isolate);
79 Object* print_string =
80 *Execution::ToString(isolate, handle).ToHandleChecked();
81 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
85 static void CheckNumber(Isolate* isolate, double value, const char* string) {
86 Handle<Object> number = isolate->factory()->NewNumber(value);
87 CHECK(number->IsNumber());
88 Handle<Object> print_string =
89 Execution::ToString(isolate, number).ToHandleChecked();
90 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
94 static void CheckFindCodeObject(Isolate* isolate) {
95 // Test FindCodeObject
98 Assembler assm(isolate, NULL, 0);
100 __ nop(); // supported on all architectures
104 Handle<Code> code = isolate->factory()->NewCode(
105 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
106 CHECK(code->IsCode());
108 HeapObject* obj = HeapObject::cast(*code);
109 Address obj_addr = obj->address();
111 for (int i = 0; i < obj->Size(); i += kPointerSize) {
112 Object* found = isolate->FindCodeObject(obj_addr + i);
113 CHECK_EQ(*code, found);
116 Handle<Code> copy = isolate->factory()->NewCode(
117 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
118 HeapObject* obj_copy = HeapObject::cast(*copy);
119 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
120 obj_copy->Size() / 2);
121 CHECK(not_right != *code);
126 CcTest::InitializeVM();
127 Isolate* isolate = CcTest::i_isolate();
128 HandleScope outer_scope(isolate);
129 LocalContext context;
130 Handle<Object> n(reinterpret_cast<Object*>(NULL), isolate);
136 CcTest::InitializeVM();
137 Isolate* isolate = CcTest::i_isolate();
138 Factory* factory = isolate->factory();
139 Heap* heap = isolate->heap();
141 HandleScope sc(isolate);
142 Handle<Object> value = factory->NewNumber(1.000123);
143 CHECK(value->IsHeapNumber());
144 CHECK(value->IsNumber());
145 CHECK_EQ(1.000123, value->Number());
147 value = factory->NewNumber(1.0);
148 CHECK(value->IsSmi());
149 CHECK(value->IsNumber());
150 CHECK_EQ(1.0, value->Number());
152 value = factory->NewNumberFromInt(1024);
153 CHECK(value->IsSmi());
154 CHECK(value->IsNumber());
155 CHECK_EQ(1024.0, value->Number());
157 value = factory->NewNumberFromInt(Smi::kMinValue);
158 CHECK(value->IsSmi());
159 CHECK(value->IsNumber());
160 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
162 value = factory->NewNumberFromInt(Smi::kMaxValue);
163 CHECK(value->IsSmi());
164 CHECK(value->IsNumber());
165 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
167 #if !defined(V8_TARGET_ARCH_64_BIT)
168 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
169 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
170 CHECK(value->IsHeapNumber());
171 CHECK(value->IsNumber());
172 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
175 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
176 CHECK(value->IsHeapNumber());
177 CHECK(value->IsNumber());
178 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
181 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
182 CHECK(value->IsHeapNumber());
183 CHECK(value->IsNumber());
184 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
187 // nan oddball checks
188 CHECK(factory->nan_value()->IsNumber());
189 CHECK(std::isnan(factory->nan_value()->Number()));
191 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
192 CHECK(s->IsString());
193 CHECK_EQ(10, s->length());
195 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
196 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
197 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
199 // Check ToString for oddballs
200 CheckOddball(isolate, heap->true_value(), "true");
201 CheckOddball(isolate, heap->false_value(), "false");
202 CheckOddball(isolate, heap->null_value(), "null");
203 CheckOddball(isolate, heap->undefined_value(), "undefined");
205 // Check ToString for Smis
206 CheckSmi(isolate, 0, "0");
207 CheckSmi(isolate, 42, "42");
208 CheckSmi(isolate, -42, "-42");
210 // Check ToString for Numbers
211 CheckNumber(isolate, 1.1, "1.1");
213 CheckFindCodeObject(isolate);
217 template <typename T, typename LANE_TYPE, int LANES>
218 static void CheckSimdLanes(T* value) {
219 // Get the original values, and check that all lanes can be set to new values
220 // without disturbing the other lanes.
221 LANE_TYPE lane_values[LANES];
222 for (int i = 0; i < LANES; i++) {
223 lane_values[i] = value->get_lane(i);
225 for (int i = 0; i < LANES; i++) {
227 value->set_lane(i, lane_values[i]);
228 for (int j = 0; j < LANES; j++) {
229 CHECK_EQ(lane_values[j], value->get_lane(j));
236 CcTest::InitializeVM();
237 Isolate* isolate = CcTest::i_isolate();
238 Factory* factory = isolate->factory();
240 HandleScope sc(isolate);
242 Handle<Object> value = factory->NewFloat32x4(1, 2, 3, 4);
243 CHECK(value->IsFloat32x4());
244 CHECK(value->BooleanValue()); // SIMD values map to true.
246 Float32x4* float32x4 = *Handle<Float32x4>::cast(value);
247 CheckSimdLanes<Float32x4, float, 4>(float32x4);
249 // Check ToString for SIMD values.
250 // TODO(bbudge): Switch to Check* style function to test ToString().
251 value = factory->NewFloat32x4(1, 2, 3, 4);
252 float32x4 = *Handle<Float32x4>::cast(value);
253 std::ostringstream os;
254 float32x4->Float32x4Print(os);
255 CHECK_EQ("1, 2, 3, 4", os.str());
257 // Check unusual lane values.
258 float32x4->set_lane(0, 0);
259 CHECK_EQ(0, float32x4->get_lane(0));
260 float32x4->set_lane(1, -0.0);
261 CHECK_EQ(-0.0, float32x4->get_lane(1));
262 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
263 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
264 float32x4->set_lane(2, quiet_NaN);
265 CHECK(std::isnan(float32x4->get_lane(2)));
266 float32x4->set_lane(3, signaling_NaN);
267 CHECK(std::isnan(float32x4->get_lane(3)));
272 CcTest::InitializeVM();
274 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
275 CHECK(Smi::FromInt(42)->IsSmi());
276 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
277 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
281 TEST(GarbageCollection) {
282 CcTest::InitializeVM();
283 Isolate* isolate = CcTest::i_isolate();
284 Heap* heap = isolate->heap();
285 Factory* factory = isolate->factory();
287 HandleScope sc(isolate);
289 heap->CollectGarbage(NEW_SPACE);
291 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
292 Handle<String> name = factory->InternalizeUtf8String("theFunction");
293 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
294 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
295 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
296 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
297 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
300 HandleScope inner_scope(isolate);
301 // Allocate a function and keep it in global object's property.
302 Handle<JSFunction> function = factory->NewFunction(name);
303 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
304 // Allocate an object. Unrooted after leaving the scope.
305 Handle<JSObject> obj = factory->NewJSObject(function);
306 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
307 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
309 CHECK_EQ(Smi::FromInt(23),
310 *Object::GetProperty(obj, prop_name).ToHandleChecked());
311 CHECK_EQ(Smi::FromInt(24),
312 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
315 heap->CollectGarbage(NEW_SPACE);
317 // Function should be alive.
318 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
319 // Check function is retained.
320 Handle<Object> func_value =
321 Object::GetProperty(global, name).ToHandleChecked();
322 CHECK(func_value->IsJSFunction());
323 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
326 HandleScope inner_scope(isolate);
327 // Allocate another object, make it reachable from global.
328 Handle<JSObject> obj = factory->NewJSObject(function);
329 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
330 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
333 // After gc, it should survive.
334 heap->CollectGarbage(NEW_SPACE);
336 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
338 Object::GetProperty(global, obj_name).ToHandleChecked();
339 CHECK(obj->IsJSObject());
340 CHECK_EQ(Smi::FromInt(23),
341 *Object::GetProperty(obj, prop_name).ToHandleChecked());
345 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
346 HandleScope scope(isolate);
347 Handle<String> s = isolate->factory()->NewStringFromUtf8(
348 CStrVector(string)).ToHandleChecked();
349 CHECK_EQ(StrLength(string), s->length());
350 for (int index = 0; index < s->length(); index++) {
351 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
357 CcTest::InitializeVM();
358 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
360 VerifyStringAllocation(isolate, "a");
361 VerifyStringAllocation(isolate, "ab");
362 VerifyStringAllocation(isolate, "abc");
363 VerifyStringAllocation(isolate, "abcd");
364 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
369 CcTest::InitializeVM();
370 Isolate* isolate = CcTest::i_isolate();
371 Factory* factory = isolate->factory();
373 v8::HandleScope scope(CcTest::isolate());
374 const char* name = "Kasper the spunky";
375 Handle<String> string = factory->NewStringFromAsciiChecked(name);
376 CHECK_EQ(StrLength(name), string->length());
380 TEST(GlobalHandles) {
381 CcTest::InitializeVM();
382 Isolate* isolate = CcTest::i_isolate();
383 Heap* heap = isolate->heap();
384 Factory* factory = isolate->factory();
385 GlobalHandles* global_handles = isolate->global_handles();
393 HandleScope scope(isolate);
395 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
396 Handle<Object> u = factory->NewNumber(1.12344);
398 h1 = global_handles->Create(*i);
399 h2 = global_handles->Create(*u);
400 h3 = global_handles->Create(*i);
401 h4 = global_handles->Create(*u);
404 // after gc, it should survive
405 heap->CollectGarbage(NEW_SPACE);
407 CHECK((*h1)->IsString());
408 CHECK((*h2)->IsHeapNumber());
409 CHECK((*h3)->IsString());
410 CHECK((*h4)->IsHeapNumber());
413 GlobalHandles::Destroy(h1.location());
414 GlobalHandles::Destroy(h3.location());
417 GlobalHandles::Destroy(h2.location());
418 GlobalHandles::Destroy(h4.location());
422 static bool WeakPointerCleared = false;
424 static void TestWeakGlobalHandleCallback(
425 const v8::WeakCallbackData<v8::Value, void>& data) {
426 std::pair<v8::Persistent<v8::Value>*, int>* p =
427 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
428 data.GetParameter());
429 if (p->second == 1234) WeakPointerCleared = true;
434 TEST(WeakGlobalHandlesScavenge) {
435 i::FLAG_stress_compaction = false;
436 CcTest::InitializeVM();
437 Isolate* isolate = CcTest::i_isolate();
438 Heap* heap = isolate->heap();
439 Factory* factory = isolate->factory();
440 GlobalHandles* global_handles = isolate->global_handles();
442 WeakPointerCleared = false;
448 HandleScope scope(isolate);
450 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
451 Handle<Object> u = factory->NewNumber(1.12344);
453 h1 = global_handles->Create(*i);
454 h2 = global_handles->Create(*u);
457 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
458 GlobalHandles::MakeWeak(h2.location(),
459 reinterpret_cast<void*>(&handle_and_id),
460 &TestWeakGlobalHandleCallback);
462 // Scavenge treats weak pointers as normal roots.
463 heap->CollectGarbage(NEW_SPACE);
465 CHECK((*h1)->IsString());
466 CHECK((*h2)->IsHeapNumber());
468 CHECK(!WeakPointerCleared);
469 CHECK(!global_handles->IsNearDeath(h2.location()));
470 CHECK(!global_handles->IsNearDeath(h1.location()));
472 GlobalHandles::Destroy(h1.location());
473 GlobalHandles::Destroy(h2.location());
477 TEST(WeakGlobalHandlesMark) {
478 CcTest::InitializeVM();
479 Isolate* isolate = CcTest::i_isolate();
480 Heap* heap = isolate->heap();
481 Factory* factory = isolate->factory();
482 GlobalHandles* global_handles = isolate->global_handles();
484 WeakPointerCleared = false;
490 HandleScope scope(isolate);
492 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
493 Handle<Object> u = factory->NewNumber(1.12344);
495 h1 = global_handles->Create(*i);
496 h2 = global_handles->Create(*u);
499 // Make sure the objects are promoted.
500 heap->CollectGarbage(OLD_SPACE);
501 heap->CollectGarbage(NEW_SPACE);
502 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
504 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
505 GlobalHandles::MakeWeak(h2.location(),
506 reinterpret_cast<void*>(&handle_and_id),
507 &TestWeakGlobalHandleCallback);
508 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
509 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
511 // Incremental marking potentially marked handles before they turned weak.
512 heap->CollectAllGarbage();
514 CHECK((*h1)->IsString());
516 CHECK(WeakPointerCleared);
517 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
519 GlobalHandles::Destroy(h1.location());
523 TEST(DeleteWeakGlobalHandle) {
524 i::FLAG_stress_compaction = false;
525 CcTest::InitializeVM();
526 Isolate* isolate = CcTest::i_isolate();
527 Heap* heap = isolate->heap();
528 Factory* factory = isolate->factory();
529 GlobalHandles* global_handles = isolate->global_handles();
531 WeakPointerCleared = false;
536 HandleScope scope(isolate);
538 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
539 h = global_handles->Create(*i);
542 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
543 GlobalHandles::MakeWeak(h.location(),
544 reinterpret_cast<void*>(&handle_and_id),
545 &TestWeakGlobalHandleCallback);
547 // Scanvenge does not recognize weak reference.
548 heap->CollectGarbage(NEW_SPACE);
550 CHECK(!WeakPointerCleared);
552 // Mark-compact treats weak reference properly.
553 heap->CollectGarbage(OLD_SPACE);
555 CHECK(WeakPointerCleared);
559 static const char* not_so_random_string_table[] = {
623 static void CheckInternalizedStrings(const char** strings) {
624 Isolate* isolate = CcTest::i_isolate();
625 Factory* factory = isolate->factory();
626 for (const char* string = *strings; *strings != 0; string = *strings++) {
627 HandleScope scope(isolate);
629 isolate->factory()->InternalizeUtf8String(CStrVector(string));
630 // InternalizeUtf8String may return a failure if a GC is needed.
631 CHECK(a->IsInternalizedString());
632 Handle<String> b = factory->InternalizeUtf8String(string);
634 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
635 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
637 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
643 CcTest::InitializeVM();
645 v8::HandleScope sc(CcTest::isolate());
646 CheckInternalizedStrings(not_so_random_string_table);
647 CheckInternalizedStrings(not_so_random_string_table);
651 TEST(FunctionAllocation) {
652 CcTest::InitializeVM();
653 Isolate* isolate = CcTest::i_isolate();
654 Factory* factory = isolate->factory();
656 v8::HandleScope sc(CcTest::isolate());
657 Handle<String> name = factory->InternalizeUtf8String("theFunction");
658 Handle<JSFunction> function = factory->NewFunction(name);
660 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
661 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
663 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
664 Handle<JSObject> obj = factory->NewJSObject(function);
665 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
666 CHECK_EQ(Smi::FromInt(23),
667 *Object::GetProperty(obj, prop_name).ToHandleChecked());
668 // Check that we can add properties to function objects.
669 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
670 CHECK_EQ(Smi::FromInt(24),
671 *Object::GetProperty(function, prop_name).ToHandleChecked());
675 TEST(ObjectProperties) {
676 CcTest::InitializeVM();
677 Isolate* isolate = CcTest::i_isolate();
678 Factory* factory = isolate->factory();
680 v8::HandleScope sc(CcTest::isolate());
681 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
682 Handle<Object> object = Object::GetProperty(
683 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
684 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
685 Handle<JSObject> obj = factory->NewJSObject(constructor);
686 Handle<String> first = factory->InternalizeUtf8String("first");
687 Handle<String> second = factory->InternalizeUtf8String("second");
689 Handle<Smi> one(Smi::FromInt(1), isolate);
690 Handle<Smi> two(Smi::FromInt(2), isolate);
693 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
696 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
697 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
700 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
701 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
703 // add first and then second
704 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
705 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
706 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
707 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
709 // delete first and then second
710 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
711 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
712 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
713 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
714 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
716 // add first and then second
717 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
718 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
719 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
720 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
722 // delete second and then first
723 JSReceiver::DeleteProperty(obj, second, SLOPPY).Check();
724 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
725 JSReceiver::DeleteProperty(obj, first, SLOPPY).Check();
726 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
727 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
729 // check string and internalized string match
730 const char* string1 = "fisk";
731 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
732 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
733 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
734 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
736 // check internalized string and string match
737 const char* string2 = "fugl";
738 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
739 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
740 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
741 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
746 CcTest::InitializeVM();
747 Isolate* isolate = CcTest::i_isolate();
748 Factory* factory = isolate->factory();
750 v8::HandleScope sc(CcTest::isolate());
751 Handle<String> name = factory->InternalizeUtf8String("theFunction");
752 Handle<JSFunction> function = factory->NewFunction(name);
754 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
755 Handle<JSObject> obj = factory->NewJSObject(function);
756 Handle<Map> initial_map(function->initial_map());
759 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
760 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
761 CHECK_EQ(Smi::FromInt(23),
762 *Object::GetProperty(obj, prop_name).ToHandleChecked());
764 // Check the map has changed
765 CHECK(*initial_map != obj->map());
770 CcTest::InitializeVM();
771 Isolate* isolate = CcTest::i_isolate();
772 Factory* factory = isolate->factory();
774 v8::HandleScope sc(CcTest::isolate());
775 Handle<String> name = factory->InternalizeUtf8String("Array");
776 Handle<Object> fun_obj = Object::GetProperty(
777 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
778 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
780 // Allocate the object.
781 Handle<Object> element;
782 Handle<JSObject> object = factory->NewJSObject(function);
783 Handle<JSArray> array = Handle<JSArray>::cast(object);
784 // We just initialized the VM, no heap allocation failure yet.
785 JSArray::Initialize(array, 0);
787 // Set array length to 0.
788 JSArray::SetElementsLength(array, handle(Smi::FromInt(0), isolate)).Check();
789 CHECK_EQ(Smi::FromInt(0), array->length());
790 // Must be in fast mode.
791 CHECK(array->HasFastSmiOrObjectElements());
793 // array[length] = name.
794 JSReceiver::SetElement(array, 0, name, NONE, SLOPPY).Check();
795 CHECK_EQ(Smi::FromInt(1), array->length());
796 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
797 CHECK_EQ(*element, *name);
799 // Set array length with larger than smi value.
800 Handle<Object> length =
801 factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
802 JSArray::SetElementsLength(array, length).Check();
804 uint32_t int_length = 0;
805 CHECK(length->ToArrayIndex(&int_length));
806 CHECK_EQ(*length, array->length());
807 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
809 // array[length] = name.
810 JSReceiver::SetElement(array, int_length, name, NONE, SLOPPY).Check();
811 uint32_t new_int_length = 0;
812 CHECK(array->length()->ToArrayIndex(&new_int_length));
813 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
814 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
815 CHECK_EQ(*element, *name);
816 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
817 CHECK_EQ(*element, *name);
822 CcTest::InitializeVM();
823 Isolate* isolate = CcTest::i_isolate();
824 Factory* factory = isolate->factory();
826 v8::HandleScope sc(CcTest::isolate());
827 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
828 Handle<Object> object = Object::GetProperty(
829 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
830 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
831 Handle<JSObject> obj = factory->NewJSObject(constructor);
832 Handle<String> first = factory->InternalizeUtf8String("first");
833 Handle<String> second = factory->InternalizeUtf8String("second");
835 Handle<Smi> one(Smi::FromInt(1), isolate);
836 Handle<Smi> two(Smi::FromInt(2), isolate);
838 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
839 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
841 JSReceiver::SetElement(obj, 0, first, NONE, SLOPPY).Check();
842 JSReceiver::SetElement(obj, 1, second, NONE, SLOPPY).Check();
845 Handle<Object> value1, value2;
846 Handle<JSObject> clone = factory->CopyJSObject(obj);
847 CHECK(!clone.is_identical_to(obj));
849 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
850 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
851 CHECK_EQ(*value1, *value2);
852 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
853 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
854 CHECK_EQ(*value1, *value2);
856 value1 = Object::GetProperty(obj, first).ToHandleChecked();
857 value2 = Object::GetProperty(clone, first).ToHandleChecked();
858 CHECK_EQ(*value1, *value2);
859 value1 = Object::GetProperty(obj, second).ToHandleChecked();
860 value2 = Object::GetProperty(clone, second).ToHandleChecked();
861 CHECK_EQ(*value1, *value2);
864 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
865 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
867 JSReceiver::SetElement(clone, 0, second, NONE, SLOPPY).Check();
868 JSReceiver::SetElement(clone, 1, first, NONE, SLOPPY).Check();
870 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
871 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
872 CHECK_EQ(*value1, *value2);
873 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
874 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
875 CHECK_EQ(*value1, *value2);
877 value1 = Object::GetProperty(obj, second).ToHandleChecked();
878 value2 = Object::GetProperty(clone, first).ToHandleChecked();
879 CHECK_EQ(*value1, *value2);
880 value1 = Object::GetProperty(obj, first).ToHandleChecked();
881 value2 = Object::GetProperty(clone, second).ToHandleChecked();
882 CHECK_EQ(*value1, *value2);
886 TEST(StringAllocation) {
887 CcTest::InitializeVM();
888 Isolate* isolate = CcTest::i_isolate();
889 Factory* factory = isolate->factory();
891 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
892 for (int length = 0; length < 100; length++) {
893 v8::HandleScope scope(CcTest::isolate());
894 char* non_one_byte = NewArray<char>(3 * length + 1);
895 char* one_byte = NewArray<char>(length + 1);
896 non_one_byte[3 * length] = 0;
897 one_byte[length] = 0;
898 for (int i = 0; i < length; i++) {
900 non_one_byte[3 * i] = chars[0];
901 non_one_byte[3 * i + 1] = chars[1];
902 non_one_byte[3 * i + 2] = chars[2];
904 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
905 Vector<const char>(non_one_byte, 3 * length));
906 CHECK_EQ(length, non_one_byte_sym->length());
907 Handle<String> one_byte_sym =
908 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
909 CHECK_EQ(length, one_byte_sym->length());
910 Handle<String> non_one_byte_str =
911 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
913 non_one_byte_str->Hash();
914 CHECK_EQ(length, non_one_byte_str->length());
915 Handle<String> one_byte_str =
916 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
918 one_byte_str->Hash();
919 CHECK_EQ(length, one_byte_str->length());
920 DeleteArray(non_one_byte);
921 DeleteArray(one_byte);
926 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
927 // Count the number of objects found in the heap.
929 HeapIterator iterator(heap);
930 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
931 for (int i = 0; i < size; i++) {
932 if (*objs[i] == obj) {
942 CcTest::InitializeVM();
943 Isolate* isolate = CcTest::i_isolate();
944 Factory* factory = isolate->factory();
945 v8::HandleScope scope(CcTest::isolate());
947 // Array of objects to scan haep for.
948 const int objs_count = 6;
949 Handle<Object> objs[objs_count];
950 int next_objs_index = 0;
952 // Allocate a JS array to OLD_SPACE and NEW_SPACE
953 objs[next_objs_index++] = factory->NewJSArray(10);
954 objs[next_objs_index++] =
955 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, WEAK, TENURED);
957 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
958 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
959 objs[next_objs_index++] =
960 factory->NewStringFromStaticChars("abcdefghij", TENURED);
962 // Allocate a large string (for large object space).
963 int large_size = Page::kMaxRegularHeapObjectSize + 1;
964 char* str = new char[large_size];
965 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
966 str[large_size - 1] = '\0';
967 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
970 // Add a Map object to look for.
971 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
973 CHECK_EQ(objs_count, next_objs_index);
974 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
978 TEST(EmptyHandleEscapeFrom) {
979 CcTest::InitializeVM();
981 v8::HandleScope scope(CcTest::isolate());
982 Handle<JSObject> runaway;
985 v8::EscapableHandleScope nested(CcTest::isolate());
986 Handle<JSObject> empty;
987 runaway = empty.EscapeFrom(&nested);
990 CHECK(runaway.is_null());
994 static int LenFromSize(int size) {
995 return (size - FixedArray::kHeaderSize) / kPointerSize;
999 TEST(Regression39128) {
1000 // Test case for crbug.com/39128.
1001 CcTest::InitializeVM();
1002 Isolate* isolate = CcTest::i_isolate();
1003 TestHeap* heap = CcTest::test_heap();
1005 // Increase the chance of 'bump-the-pointer' allocation in old space.
1006 heap->CollectAllGarbage();
1008 v8::HandleScope scope(CcTest::isolate());
1010 // The plan: create JSObject which references objects in new space.
1011 // Then clone this object (forcing it to go into old space) and check
1012 // that region dirty marks are updated correctly.
1014 // Step 1: prepare a map for the object. We add 1 inobject property to it.
1015 // Create a map with single inobject property.
1016 Handle<Map> my_map = Map::Create(CcTest::i_isolate(), 1);
1017 int n_properties = my_map->inobject_properties();
1018 CHECK_GT(n_properties, 0);
1020 int object_size = my_map->instance_size();
1022 // Step 2: allocate a lot of objects so to almost fill new space: we need
1023 // just enough room to allocate JSObject and thus fill the newspace.
1025 int allocation_amount = Min(FixedArray::kMaxSize,
1026 Page::kMaxRegularHeapObjectSize + kPointerSize);
1027 int allocation_len = LenFromSize(allocation_amount);
1028 NewSpace* new_space = heap->new_space();
1029 Address* top_addr = new_space->allocation_top_address();
1030 Address* limit_addr = new_space->allocation_limit_address();
1031 while ((*limit_addr - *top_addr) > allocation_amount) {
1032 CHECK(!heap->always_allocate());
1033 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1034 CHECK(new_space->Contains(array));
1037 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1038 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1039 int fixed_array_len = LenFromSize(to_fill);
1040 CHECK(fixed_array_len < FixedArray::kMaxLength);
1042 CHECK(!heap->always_allocate());
1043 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1044 CHECK(new_space->Contains(array));
1046 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1047 CHECK(new_space->Contains(object));
1048 JSObject* jsobject = JSObject::cast(object);
1049 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1050 CHECK_EQ(0, jsobject->properties()->length());
1051 // Create a reference to object in new space in jsobject.
1052 FieldIndex index = FieldIndex::ForInObjectOffset(
1053 JSObject::kHeaderSize - kPointerSize);
1054 jsobject->FastPropertyAtPut(index, array);
1056 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1058 // Step 4: clone jsobject, but force always allocate first to create a clone
1059 // in old pointer space.
1060 Address old_space_top = heap->old_space()->top();
1061 AlwaysAllocateScope aa_scope(isolate);
1062 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1063 JSObject* clone = JSObject::cast(clone_obj);
1064 if (clone->address() != old_space_top) {
1065 // Alas, got allocated from free list, we cannot do checks.
1068 CHECK(heap->old_space()->Contains(clone->address()));
1072 UNINITIALIZED_TEST(TestCodeFlushing) {
1073 // If we do not flush code this test is invalid.
1074 if (!FLAG_flush_code) return;
1075 i::FLAG_allow_natives_syntax = true;
1076 i::FLAG_optimize_for_size = false;
1077 v8::Isolate::CreateParams create_params;
1078 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1079 v8::Isolate* isolate = v8::Isolate::New(create_params);
1080 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1082 Factory* factory = i_isolate->factory();
1084 v8::HandleScope scope(isolate);
1085 v8::Context::New(isolate)->Enter();
1086 const char* source =
1093 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1095 // This compile will add the code to the compilation cache.
1097 v8::HandleScope scope(isolate);
1101 // Check function is compiled.
1102 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1103 foo_name).ToHandleChecked();
1104 CHECK(func_value->IsJSFunction());
1105 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1106 CHECK(function->shared()->is_compiled());
1108 // The code will survive at least two GCs.
1109 i_isolate->heap()->CollectAllGarbage();
1110 i_isolate->heap()->CollectAllGarbage();
1111 CHECK(function->shared()->is_compiled());
1113 // Simulate several GCs that use full marking.
1114 const int kAgingThreshold = 6;
1115 for (int i = 0; i < kAgingThreshold; i++) {
1116 i_isolate->heap()->CollectAllGarbage();
1119 // foo should no longer be in the compilation cache
1120 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1121 CHECK(!function->is_compiled() || function->IsOptimized());
1122 // Call foo to get it recompiled.
1123 CompileRun("foo()");
1124 CHECK(function->shared()->is_compiled());
1125 CHECK(function->is_compiled());
1132 TEST(TestCodeFlushingPreAged) {
1133 // If we do not flush code this test is invalid.
1134 if (!FLAG_flush_code) return;
1135 i::FLAG_allow_natives_syntax = true;
1136 i::FLAG_optimize_for_size = true;
1137 CcTest::InitializeVM();
1138 Isolate* isolate = CcTest::i_isolate();
1139 Factory* factory = isolate->factory();
1140 v8::HandleScope scope(CcTest::isolate());
1141 const char* source = "function foo() {"
1147 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1149 // Compile foo, but don't run it.
1150 { v8::HandleScope scope(CcTest::isolate());
1154 // Check function is compiled.
1155 Handle<Object> func_value =
1156 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1157 CHECK(func_value->IsJSFunction());
1158 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1159 CHECK(function->shared()->is_compiled());
1161 // The code has been run so will survive at least one GC.
1162 CcTest::heap()->CollectAllGarbage();
1163 CHECK(function->shared()->is_compiled());
1165 // The code was only run once, so it should be pre-aged and collected on the
1167 CcTest::heap()->CollectAllGarbage();
1168 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1170 // Execute the function again twice, and ensure it is reset to the young age.
1171 { v8::HandleScope scope(CcTest::isolate());
1176 // The code will survive at least two GC now that it is young again.
1177 CcTest::heap()->CollectAllGarbage();
1178 CcTest::heap()->CollectAllGarbage();
1179 CHECK(function->shared()->is_compiled());
1181 // Simulate several GCs that use full marking.
1182 const int kAgingThreshold = 6;
1183 for (int i = 0; i < kAgingThreshold; i++) {
1184 CcTest::heap()->CollectAllGarbage();
1187 // foo should no longer be in the compilation cache
1188 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1189 CHECK(!function->is_compiled() || function->IsOptimized());
1190 // Call foo to get it recompiled.
1191 CompileRun("foo()");
1192 CHECK(function->shared()->is_compiled());
1193 CHECK(function->is_compiled());
1197 TEST(TestCodeFlushingIncremental) {
1198 // If we do not flush code this test is invalid.
1199 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1200 i::FLAG_allow_natives_syntax = true;
1201 i::FLAG_optimize_for_size = false;
1202 CcTest::InitializeVM();
1203 Isolate* isolate = CcTest::i_isolate();
1204 Factory* factory = isolate->factory();
1205 v8::HandleScope scope(CcTest::isolate());
1206 const char* source = "function foo() {"
1212 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1214 // This compile will add the code to the compilation cache.
1215 { v8::HandleScope scope(CcTest::isolate());
1219 // Check function is compiled.
1220 Handle<Object> func_value =
1221 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1222 CHECK(func_value->IsJSFunction());
1223 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1224 CHECK(function->shared()->is_compiled());
1226 // The code will survive at least two GCs.
1227 CcTest::heap()->CollectAllGarbage();
1228 CcTest::heap()->CollectAllGarbage();
1229 CHECK(function->shared()->is_compiled());
1231 // Simulate several GCs that use incremental marking.
1232 const int kAgingThreshold = 6;
1233 for (int i = 0; i < kAgingThreshold; i++) {
1234 SimulateIncrementalMarking(CcTest::heap());
1235 CcTest::heap()->CollectAllGarbage();
1237 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1238 CHECK(!function->is_compiled() || function->IsOptimized());
1240 // This compile will compile the function again.
1241 { v8::HandleScope scope(CcTest::isolate());
1242 CompileRun("foo();");
1245 // Simulate several GCs that use incremental marking but make sure
1246 // the loop breaks once the function is enqueued as a candidate.
1247 for (int i = 0; i < kAgingThreshold; i++) {
1248 SimulateIncrementalMarking(CcTest::heap());
1249 if (!function->next_function_link()->IsUndefined()) break;
1250 CcTest::heap()->CollectAllGarbage();
1253 // Force optimization while incremental marking is active and while
1254 // the function is enqueued as a candidate.
1255 { v8::HandleScope scope(CcTest::isolate());
1256 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1259 // Simulate one final GC to make sure the candidate queue is sane.
1260 CcTest::heap()->CollectAllGarbage();
1261 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1262 CHECK(function->is_compiled() || !function->IsOptimized());
1266 TEST(TestCodeFlushingIncrementalScavenge) {
1267 // If we do not flush code this test is invalid.
1268 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1269 i::FLAG_allow_natives_syntax = true;
1270 i::FLAG_optimize_for_size = false;
1271 CcTest::InitializeVM();
1272 Isolate* isolate = CcTest::i_isolate();
1273 Factory* factory = isolate->factory();
1274 v8::HandleScope scope(CcTest::isolate());
1275 const char* source = "var foo = function() {"
1281 "var bar = function() {"
1285 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1286 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1288 // Perfrom one initial GC to enable code flushing.
1289 CcTest::heap()->CollectAllGarbage();
1291 // This compile will add the code to the compilation cache.
1292 { v8::HandleScope scope(CcTest::isolate());
1296 // Check functions are compiled.
1297 Handle<Object> func_value =
1298 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1299 CHECK(func_value->IsJSFunction());
1300 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1301 CHECK(function->shared()->is_compiled());
1302 Handle<Object> func_value2 =
1303 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1304 CHECK(func_value2->IsJSFunction());
1305 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1306 CHECK(function2->shared()->is_compiled());
1308 // Clear references to functions so that one of them can die.
1309 { v8::HandleScope scope(CcTest::isolate());
1310 CompileRun("foo = 0; bar = 0;");
1313 // Bump the code age so that flushing is triggered while the function
1314 // object is still located in new-space.
1315 const int kAgingThreshold = 6;
1316 for (int i = 0; i < kAgingThreshold; i++) {
1317 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1318 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1321 // Simulate incremental marking so that the functions are enqueued as
1322 // code flushing candidates. Then kill one of the functions. Finally
1323 // perform a scavenge while incremental marking is still running.
1324 SimulateIncrementalMarking(CcTest::heap());
1325 *function2.location() = NULL;
1326 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1328 // Simulate one final GC to make sure the candidate queue is sane.
1329 CcTest::heap()->CollectAllGarbage();
1330 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1331 CHECK(!function->is_compiled() || function->IsOptimized());
1335 TEST(TestCodeFlushingIncrementalAbort) {
1336 // If we do not flush code this test is invalid.
1337 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1338 i::FLAG_allow_natives_syntax = true;
1339 i::FLAG_optimize_for_size = false;
1340 CcTest::InitializeVM();
1341 Isolate* isolate = CcTest::i_isolate();
1342 Factory* factory = isolate->factory();
1343 Heap* heap = isolate->heap();
1344 v8::HandleScope scope(CcTest::isolate());
1345 const char* source = "function foo() {"
1351 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1353 // This compile will add the code to the compilation cache.
1354 { v8::HandleScope scope(CcTest::isolate());
1358 // Check function is compiled.
1359 Handle<Object> func_value =
1360 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1361 CHECK(func_value->IsJSFunction());
1362 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1363 CHECK(function->shared()->is_compiled());
1365 // The code will survive at least two GCs.
1366 heap->CollectAllGarbage();
1367 heap->CollectAllGarbage();
1368 CHECK(function->shared()->is_compiled());
1370 // Bump the code age so that flushing is triggered.
1371 const int kAgingThreshold = 6;
1372 for (int i = 0; i < kAgingThreshold; i++) {
1373 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1376 // Simulate incremental marking so that the function is enqueued as
1377 // code flushing candidate.
1378 SimulateIncrementalMarking(heap);
1380 // Enable the debugger and add a breakpoint while incremental marking
1381 // is running so that incremental marking aborts and code flushing is
1384 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1385 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1386 isolate->debug()->ClearAllBreakPoints();
1388 // Force optimization now that code flushing is disabled.
1389 { v8::HandleScope scope(CcTest::isolate());
1390 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1393 // Simulate one final GC to make sure the candidate queue is sane.
1394 heap->CollectAllGarbage();
1395 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1396 CHECK(function->is_compiled() || !function->IsOptimized());
1400 TEST(CompilationCacheCachingBehavior) {
1401 // If we do not flush code, or have the compilation cache turned off, this
1403 if (!FLAG_flush_code || !FLAG_flush_code_incrementally ||
1404 !FLAG_compilation_cache) {
1407 CcTest::InitializeVM();
1408 Isolate* isolate = CcTest::i_isolate();
1409 Factory* factory = isolate->factory();
1410 Heap* heap = isolate->heap();
1411 CompilationCache* compilation_cache = isolate->compilation_cache();
1412 LanguageMode language_mode =
1413 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1415 v8::HandleScope scope(CcTest::isolate());
1416 const char* raw_source =
1423 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1424 Handle<Context> native_context = isolate->native_context();
1427 v8::HandleScope scope(CcTest::isolate());
1428 CompileRun(raw_source);
1431 // On first compilation, only a hash is inserted in the code cache. We can't
1433 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1434 source, Handle<Object>(), 0, 0,
1435 v8::ScriptOriginOptions(false, true, false), native_context,
1437 CHECK(info.is_null());
1440 v8::HandleScope scope(CcTest::isolate());
1441 CompileRun(raw_source);
1444 // On second compilation, the hash is replaced by a real cache entry mapping
1445 // the source to the shared function info containing the code.
1446 info = compilation_cache->LookupScript(
1447 source, Handle<Object>(), 0, 0,
1448 v8::ScriptOriginOptions(false, true, false), native_context,
1450 CHECK(!info.is_null());
1452 heap->CollectAllGarbage();
1454 // On second compilation, the hash is replaced by a real cache entry mapping
1455 // the source to the shared function info containing the code.
1456 info = compilation_cache->LookupScript(
1457 source, Handle<Object>(), 0, 0,
1458 v8::ScriptOriginOptions(false, true, false), native_context,
1460 CHECK(!info.is_null());
1462 while (!info.ToHandleChecked()->code()->IsOld()) {
1463 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1466 heap->CollectAllGarbage();
1467 // Ensure code aging cleared the entry from the cache.
1468 info = compilation_cache->LookupScript(
1469 source, Handle<Object>(), 0, 0,
1470 v8::ScriptOriginOptions(false, true, false), native_context,
1472 CHECK(info.is_null());
1475 v8::HandleScope scope(CcTest::isolate());
1476 CompileRun(raw_source);
1479 // On first compilation, only a hash is inserted in the code cache. We can't
1481 info = compilation_cache->LookupScript(
1482 source, Handle<Object>(), 0, 0,
1483 v8::ScriptOriginOptions(false, true, false), native_context,
1485 CHECK(info.is_null());
1487 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1488 compilation_cache->MarkCompactPrologue();
1492 v8::HandleScope scope(CcTest::isolate());
1493 CompileRun(raw_source);
1496 // If we aged the cache before caching the script, ensure that we didn't cache
1497 // on next compilation.
1498 info = compilation_cache->LookupScript(
1499 source, Handle<Object>(), 0, 0,
1500 v8::ScriptOriginOptions(false, true, false), native_context,
1502 CHECK(info.is_null());
1506 static void OptimizeEmptyFunction(const char* name) {
1507 HandleScope scope(CcTest::i_isolate());
1508 EmbeddedVector<char, 256> source;
1510 "function %s() { return 0; }"
1512 "%%OptimizeFunctionOnNextCall(%s);"
1514 name, name, name, name, name);
1515 CompileRun(source.start());
1519 // Count the number of native contexts in the weak list of native contexts.
1520 int CountNativeContexts() {
1522 Object* object = CcTest::heap()->native_contexts_list();
1523 while (!object->IsUndefined()) {
1525 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1531 // Count the number of user functions in the weak list of optimized
1532 // functions attached to a native context.
1533 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1535 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1536 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1537 while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1539 object = JSFunction::cast(object)->next_function_link();
1545 TEST(TestInternalWeakLists) {
1546 FLAG_always_opt = false;
1547 FLAG_allow_natives_syntax = true;
1548 v8::V8::Initialize();
1550 // Some flags turn Scavenge collections into Mark-sweep collections
1551 // and hence are incompatible with this test case.
1552 if (FLAG_gc_global || FLAG_stress_compaction) return;
1553 FLAG_retain_maps_for_n_gc = 0;
1555 static const int kNumTestContexts = 10;
1557 Isolate* isolate = CcTest::i_isolate();
1558 Heap* heap = isolate->heap();
1559 HandleScope scope(isolate);
1560 v8::Handle<v8::Context> ctx[kNumTestContexts];
1561 if (!isolate->use_crankshaft()) return;
1563 CHECK_EQ(0, CountNativeContexts());
1565 // Create a number of global contests which gets linked together.
1566 for (int i = 0; i < kNumTestContexts; i++) {
1567 ctx[i] = v8::Context::New(CcTest::isolate());
1569 // Collect garbage that might have been created by one of the
1570 // installed extensions.
1571 isolate->compilation_cache()->Clear();
1572 heap->CollectAllGarbage();
1574 CHECK_EQ(i + 1, CountNativeContexts());
1578 // Create a handle scope so no function objects get stuck in the outer
1580 HandleScope scope(isolate);
1581 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1582 OptimizeEmptyFunction("f1");
1583 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1584 OptimizeEmptyFunction("f2");
1585 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1586 OptimizeEmptyFunction("f3");
1587 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1588 OptimizeEmptyFunction("f4");
1589 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1590 OptimizeEmptyFunction("f5");
1591 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1593 // Remove function f1, and
1594 CompileRun("f1=null");
1596 // Scavenge treats these references as strong.
1597 for (int j = 0; j < 10; j++) {
1598 CcTest::heap()->CollectGarbage(NEW_SPACE);
1599 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1602 // Mark compact handles the weak references.
1603 isolate->compilation_cache()->Clear();
1604 heap->CollectAllGarbage();
1605 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1607 // Get rid of f3 and f5 in the same way.
1608 CompileRun("f3=null");
1609 for (int j = 0; j < 10; j++) {
1610 CcTest::heap()->CollectGarbage(NEW_SPACE);
1611 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1613 CcTest::heap()->CollectAllGarbage();
1614 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1615 CompileRun("f5=null");
1616 for (int j = 0; j < 10; j++) {
1617 CcTest::heap()->CollectGarbage(NEW_SPACE);
1618 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1620 CcTest::heap()->CollectAllGarbage();
1621 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1626 // Force compilation cache cleanup.
1627 CcTest::heap()->NotifyContextDisposed(true);
1628 CcTest::heap()->CollectAllGarbage();
1630 // Dispose the native contexts one by one.
1631 for (int i = 0; i < kNumTestContexts; i++) {
1632 // TODO(dcarney): is there a better way to do this?
1633 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1634 *unsafe = CcTest::heap()->undefined_value();
1637 // Scavenge treats these references as strong.
1638 for (int j = 0; j < 10; j++) {
1639 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1640 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1643 // Mark compact handles the weak references.
1644 CcTest::heap()->CollectAllGarbage();
1645 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1648 CHECK_EQ(0, CountNativeContexts());
1652 // Count the number of native contexts in the weak list of native contexts
1653 // causing a GC after the specified number of elements.
1654 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1655 Heap* heap = isolate->heap();
1657 Handle<Object> object(heap->native_contexts_list(), isolate);
1658 while (!object->IsUndefined()) {
1660 if (count == n) heap->CollectAllGarbage();
1662 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1669 // Count the number of user functions in the weak list of optimized
1670 // functions attached to a native context causing a GC after the
1671 // specified number of elements.
1672 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1675 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1676 Isolate* isolate = icontext->GetIsolate();
1677 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1679 while (object->IsJSFunction() &&
1680 !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1682 if (count == n) isolate->heap()->CollectAllGarbage();
1683 object = Handle<Object>(
1684 Object::cast(JSFunction::cast(*object)->next_function_link()),
1691 TEST(TestInternalWeakListsTraverseWithGC) {
1692 FLAG_always_opt = false;
1693 FLAG_allow_natives_syntax = true;
1694 v8::V8::Initialize();
1696 static const int kNumTestContexts = 10;
1698 Isolate* isolate = CcTest::i_isolate();
1699 HandleScope scope(isolate);
1700 v8::Handle<v8::Context> ctx[kNumTestContexts];
1701 if (!isolate->use_crankshaft()) return;
1703 CHECK_EQ(0, CountNativeContexts());
1705 // Create an number of contexts and check the length of the weak list both
1706 // with and without GCs while iterating the list.
1707 for (int i = 0; i < kNumTestContexts; i++) {
1708 ctx[i] = v8::Context::New(CcTest::isolate());
1709 CHECK_EQ(i + 1, CountNativeContexts());
1710 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1715 // Compile a number of functions the length of the weak list of optimized
1716 // functions both with and without GCs while iterating the list.
1717 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1718 OptimizeEmptyFunction("f1");
1719 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1720 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1721 OptimizeEmptyFunction("f2");
1722 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1723 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1724 OptimizeEmptyFunction("f3");
1725 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1726 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1727 OptimizeEmptyFunction("f4");
1728 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1729 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1730 OptimizeEmptyFunction("f5");
1731 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1732 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1738 TEST(TestSizeOfRegExpCode) {
1739 if (!FLAG_regexp_optimization) return;
1741 v8::V8::Initialize();
1743 Isolate* isolate = CcTest::i_isolate();
1744 HandleScope scope(isolate);
1746 LocalContext context;
1748 // Adjust source below and this check to match
1749 // RegExpImple::kRegExpTooLargeToOptimize.
1750 DCHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 10 * KB);
1752 // Compile a regexp that is much larger if we are using regexp optimizations.
1754 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1755 "var half_size_reg_exp;"
1756 "while (reg_exp_source.length < 10 * 1024) {"
1757 " half_size_reg_exp = reg_exp_source;"
1758 " reg_exp_source = reg_exp_source + reg_exp_source;"
1761 "reg_exp_source.match(/f/);");
1763 // Get initial heap size after several full GCs, which will stabilize
1764 // the heap size and return with sweeping finished completely.
1765 CcTest::heap()->CollectAllGarbage();
1766 CcTest::heap()->CollectAllGarbage();
1767 CcTest::heap()->CollectAllGarbage();
1768 CcTest::heap()->CollectAllGarbage();
1769 CcTest::heap()->CollectAllGarbage();
1770 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1771 if (collector->sweeping_in_progress()) {
1772 collector->EnsureSweepingCompleted();
1774 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1776 CompileRun("'foo'.match(reg_exp_source);");
1777 CcTest::heap()->CollectAllGarbage();
1778 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1780 CompileRun("'foo'.match(half_size_reg_exp);");
1781 CcTest::heap()->CollectAllGarbage();
1782 int size_with_optimized_regexp =
1783 static_cast<int>(CcTest::heap()->SizeOfObjects());
1785 int size_of_regexp_code = size_with_regexp - initial_size;
1787 CHECK_LE(size_of_regexp_code, 1 * MB);
1789 // Small regexp is half the size, but compiles to more than twice the code
1790 // due to the optimization steps.
1791 CHECK_GE(size_with_optimized_regexp,
1792 size_with_regexp + size_of_regexp_code * 2);
1796 TEST(TestSizeOfObjects) {
1797 v8::V8::Initialize();
1799 // Get initial heap size after several full GCs, which will stabilize
1800 // the heap size and return with sweeping finished completely.
1801 CcTest::heap()->CollectAllGarbage();
1802 CcTest::heap()->CollectAllGarbage();
1803 CcTest::heap()->CollectAllGarbage();
1804 CcTest::heap()->CollectAllGarbage();
1805 CcTest::heap()->CollectAllGarbage();
1806 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1807 if (collector->sweeping_in_progress()) {
1808 collector->EnsureSweepingCompleted();
1810 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1813 // Allocate objects on several different old-space pages so that
1814 // concurrent sweeper threads will be busy sweeping the old space on
1815 // subsequent GC runs.
1816 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1817 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1818 for (int i = 1; i <= 100; i++) {
1819 CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1820 CHECK_EQ(initial_size + i * filler_size,
1821 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1825 // The heap size should go back to initial size after a full GC, even
1826 // though sweeping didn't finish yet.
1827 CcTest::heap()->CollectAllGarbage();
1829 // Normally sweeping would not be complete here, but no guarantees.
1831 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1833 // Waiting for sweeper threads should not change heap size.
1834 if (collector->sweeping_in_progress()) {
1835 collector->EnsureSweepingCompleted();
1837 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1841 TEST(TestAlignmentCalculations) {
1842 // Maximum fill amounts are consistent.
1843 int maximum_double_misalignment = kDoubleSize - kPointerSize;
1844 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
1845 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1846 CHECK_EQ(0, max_word_fill);
1847 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1848 CHECK_EQ(maximum_double_misalignment, max_double_fill);
1849 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1850 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1851 int max_simd128_unaligned_fill =
1852 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
1853 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
1855 Address base = reinterpret_cast<Address>(NULL);
1858 // Word alignment never requires fill.
1859 fill = Heap::GetFillToAlign(base, kWordAligned);
1861 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
1864 // No fill is required when address is double aligned.
1865 fill = Heap::GetFillToAlign(base, kDoubleAligned);
1867 // Fill is required if address is not double aligned.
1868 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
1869 CHECK_EQ(maximum_double_misalignment, fill);
1870 // kDoubleUnaligned has the opposite fill amounts.
1871 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1872 CHECK_EQ(maximum_double_misalignment, fill);
1873 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
1876 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
1877 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
1878 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
1879 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
1880 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
1881 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
1882 CHECK_EQ(kPointerSize, fill);
1883 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
1888 static HeapObject* NewSpaceAllocateAligned(int size,
1889 AllocationAlignment alignment) {
1890 Heap* heap = CcTest::heap();
1891 AllocationResult allocation =
1892 heap->new_space()->AllocateRawAligned(size, alignment);
1893 HeapObject* obj = NULL;
1894 allocation.To(&obj);
1895 heap->CreateFillerObjectAt(obj->address(), size);
1900 // Get new space allocation into the desired alignment.
1901 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1902 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1903 int fill = Heap::GetFillToAlign(*top_addr, alignment);
1905 NewSpaceAllocateAligned(fill + offset, kWordAligned);
1911 TEST(TestAlignedAllocation) {
1912 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
1913 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
1914 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1918 if (double_misalignment) {
1919 // Allocate a pointer sized object that must be double aligned at an
1921 start = AlignNewSpace(kDoubleAligned, 0);
1922 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1923 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1924 // There is no filler.
1925 CHECK_EQ(kPointerSize, *top_addr - start);
1927 // Allocate a second pointer sized object that must be double aligned at an
1928 // unaligned address.
1929 start = AlignNewSpace(kDoubleAligned, kPointerSize);
1930 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1931 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1932 // There is a filler object before the object.
1933 filler = HeapObject::FromAddress(start);
1934 CHECK(obj != filler && filler->IsFiller() &&
1935 filler->Size() == kPointerSize);
1936 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
1938 // Similarly for kDoubleUnaligned.
1939 start = AlignNewSpace(kDoubleUnaligned, 0);
1940 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1941 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1942 CHECK_EQ(kPointerSize, *top_addr - start);
1943 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
1944 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1945 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1946 // There is a filler object before the object.
1947 filler = HeapObject::FromAddress(start);
1948 CHECK(obj != filler && filler->IsFiller() &&
1949 filler->Size() == kPointerSize);
1950 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
1953 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
1955 start = AlignNewSpace(kSimd128Unaligned, 0);
1956 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1957 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1958 // There is no filler.
1959 CHECK_EQ(kPointerSize, *top_addr - start);
1960 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
1961 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1962 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1963 // There is a filler object before the object.
1964 filler = HeapObject::FromAddress(start);
1965 CHECK(obj != filler && filler->IsFiller() &&
1966 filler->Size() == kSimd128Size - kPointerSize);
1967 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
1969 if (double_misalignment) {
1970 // Test the 2 other alignments possible on 32 bit platforms.
1971 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
1972 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1973 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1974 // There is a filler object before the object.
1975 filler = HeapObject::FromAddress(start);
1976 CHECK(obj != filler && filler->IsFiller() &&
1977 filler->Size() == 2 * kPointerSize);
1978 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
1979 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
1980 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
1981 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
1982 // There is a filler object before the object.
1983 filler = HeapObject::FromAddress(start);
1984 CHECK(obj != filler && filler->IsFiller() &&
1985 filler->Size() == kPointerSize);
1986 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
1991 static HeapObject* OldSpaceAllocateAligned(int size,
1992 AllocationAlignment alignment) {
1993 Heap* heap = CcTest::heap();
1994 AllocationResult allocation =
1995 heap->old_space()->AllocateRawAligned(size, alignment);
1996 HeapObject* obj = NULL;
1997 allocation.To(&obj);
1998 heap->CreateFillerObjectAt(obj->address(), size);
2003 // Get old space allocation into the desired alignment.
2004 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2005 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2006 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2007 int allocation = fill + offset;
2009 OldSpaceAllocateAligned(allocation, kWordAligned);
2011 Address top = *top_addr;
2012 // Now force the remaining allocation onto the free list.
2013 CcTest::heap()->old_space()->EmptyAllocationInfo();
2018 // Test the case where allocation must be done from the free list, so filler
2019 // may precede or follow the object.
2020 TEST(TestAlignedOverAllocation) {
2021 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2022 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2025 HeapObject* filler1;
2026 HeapObject* filler2;
2027 if (double_misalignment) {
2028 start = AlignOldSpace(kDoubleAligned, 0);
2029 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2030 // The object is aligned, and a filler object is created after.
2031 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2032 filler1 = HeapObject::FromAddress(start + kPointerSize);
2033 CHECK(obj != filler1 && filler1->IsFiller() &&
2034 filler1->Size() == kPointerSize);
2035 // Try the opposite alignment case.
2036 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2037 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2038 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2039 filler1 = HeapObject::FromAddress(start);
2040 CHECK(obj != filler1);
2041 CHECK(filler1->IsFiller());
2042 CHECK(filler1->Size() == kPointerSize);
2043 CHECK(obj != filler1 && filler1->IsFiller() &&
2044 filler1->Size() == kPointerSize);
2046 // Similarly for kDoubleUnaligned.
2047 start = AlignOldSpace(kDoubleUnaligned, 0);
2048 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2049 // The object is aligned, and a filler object is created after.
2050 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2051 filler1 = HeapObject::FromAddress(start + kPointerSize);
2052 CHECK(obj != filler1 && filler1->IsFiller() &&
2053 filler1->Size() == kPointerSize);
2054 // Try the opposite alignment case.
2055 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2056 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2057 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2058 filler1 = HeapObject::FromAddress(start);
2059 CHECK(obj != filler1 && filler1->IsFiller() &&
2060 filler1->Size() == kPointerSize);
2063 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2065 start = AlignOldSpace(kSimd128Unaligned, 0);
2066 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2067 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2068 // There is a filler object after the object.
2069 filler1 = HeapObject::FromAddress(start + kPointerSize);
2070 CHECK(obj != filler1 && filler1->IsFiller() &&
2071 filler1->Size() == kSimd128Size - kPointerSize);
2072 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2073 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2074 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2075 // There is a filler object before the object.
2076 filler1 = HeapObject::FromAddress(start);
2077 CHECK(obj != filler1 && filler1->IsFiller() &&
2078 filler1->Size() == kSimd128Size - kPointerSize);
2080 if (double_misalignment) {
2081 // Test the 2 other alignments possible on 32 bit platforms.
2082 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2083 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2084 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2085 // There are filler objects before and after the object.
2086 filler1 = HeapObject::FromAddress(start);
2087 CHECK(obj != filler1 && filler1->IsFiller() &&
2088 filler1->Size() == 2 * kPointerSize);
2089 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2090 CHECK(obj != filler2 && filler2->IsFiller() &&
2091 filler2->Size() == kPointerSize);
2092 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2093 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2094 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2095 // There are filler objects before and after the object.
2096 filler1 = HeapObject::FromAddress(start);
2097 CHECK(obj != filler1 && filler1->IsFiller() &&
2098 filler1->Size() == kPointerSize);
2099 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2100 CHECK(obj != filler2 && filler2->IsFiller() &&
2101 filler2->Size() == 2 * kPointerSize);
2106 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2107 CcTest::InitializeVM();
2108 HeapIterator iterator(CcTest::heap());
2109 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2110 intptr_t size_of_objects_2 = 0;
2111 for (HeapObject* obj = iterator.next();
2113 obj = iterator.next()) {
2114 if (!obj->IsFreeSpace()) {
2115 size_of_objects_2 += obj->Size();
2118 // Delta must be within 5% of the larger result.
2119 // TODO(gc): Tighten this up by distinguishing between byte
2120 // arrays that are real and those that merely mark free space
2122 if (size_of_objects_1 > size_of_objects_2) {
2123 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2124 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2125 "Iterator: %" V8_PTR_PREFIX "d, "
2126 "delta: %" V8_PTR_PREFIX "d\n",
2127 size_of_objects_1, size_of_objects_2, delta);
2128 CHECK_GT(size_of_objects_1 / 20, delta);
2130 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2131 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2132 "Iterator: %" V8_PTR_PREFIX "d, "
2133 "delta: %" V8_PTR_PREFIX "d\n",
2134 size_of_objects_1, size_of_objects_2, delta);
2135 CHECK_GT(size_of_objects_2 / 20, delta);
2140 static void FillUpNewSpace(NewSpace* new_space) {
2141 // Fill up new space to the point that it is completely full. Make sure
2142 // that the scavenger does not undo the filling.
2143 Heap* heap = new_space->heap();
2144 Isolate* isolate = heap->isolate();
2145 Factory* factory = isolate->factory();
2146 HandleScope scope(isolate);
2147 AlwaysAllocateScope always_allocate(isolate);
2148 intptr_t available = new_space->Capacity() - new_space->Size();
2149 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2150 for (intptr_t i = 0; i < number_of_fillers; i++) {
2151 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2156 TEST(GrowAndShrinkNewSpace) {
2157 CcTest::InitializeVM();
2158 Heap* heap = CcTest::heap();
2159 NewSpace* new_space = heap->new_space();
2161 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2162 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2163 // The max size cannot exceed the reserved size, since semispaces must be
2164 // always within the reserved space. We can't test new space growing and
2165 // shrinking if the reserved size is the same as the minimum (initial) size.
2169 // Explicitly growing should double the space capacity.
2170 intptr_t old_capacity, new_capacity;
2171 old_capacity = new_space->TotalCapacity();
2173 new_capacity = new_space->TotalCapacity();
2174 CHECK(2 * old_capacity == new_capacity);
2176 old_capacity = new_space->TotalCapacity();
2177 FillUpNewSpace(new_space);
2178 new_capacity = new_space->TotalCapacity();
2179 CHECK(old_capacity == new_capacity);
2181 // Explicitly shrinking should not affect space capacity.
2182 old_capacity = new_space->TotalCapacity();
2183 new_space->Shrink();
2184 new_capacity = new_space->TotalCapacity();
2185 CHECK(old_capacity == new_capacity);
2187 // Let the scavenger empty the new space.
2188 heap->CollectGarbage(NEW_SPACE);
2189 CHECK_LE(new_space->Size(), old_capacity);
2191 // Explicitly shrinking should halve the space capacity.
2192 old_capacity = new_space->TotalCapacity();
2193 new_space->Shrink();
2194 new_capacity = new_space->TotalCapacity();
2195 CHECK(old_capacity == 2 * new_capacity);
2197 // Consecutive shrinking should not affect space capacity.
2198 old_capacity = new_space->TotalCapacity();
2199 new_space->Shrink();
2200 new_space->Shrink();
2201 new_space->Shrink();
2202 new_capacity = new_space->TotalCapacity();
2203 CHECK(old_capacity == new_capacity);
2207 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2208 CcTest::InitializeVM();
2209 Heap* heap = CcTest::heap();
2210 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2211 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2212 // The max size cannot exceed the reserved size, since semispaces must be
2213 // always within the reserved space. We can't test new space growing and
2214 // shrinking if the reserved size is the same as the minimum (initial) size.
2218 v8::HandleScope scope(CcTest::isolate());
2219 NewSpace* new_space = heap->new_space();
2220 intptr_t old_capacity, new_capacity;
2221 old_capacity = new_space->TotalCapacity();
2223 new_capacity = new_space->TotalCapacity();
2224 CHECK(2 * old_capacity == new_capacity);
2225 FillUpNewSpace(new_space);
2226 heap->CollectAllAvailableGarbage();
2227 new_capacity = new_space->TotalCapacity();
2228 CHECK(old_capacity == new_capacity);
2232 static int NumberOfGlobalObjects() {
2234 HeapIterator iterator(CcTest::heap());
2235 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2236 if (obj->IsGlobalObject()) count++;
2242 // Test that we don't embed maps from foreign contexts into
2244 TEST(LeakNativeContextViaMap) {
2245 i::FLAG_allow_natives_syntax = true;
2246 v8::Isolate* isolate = CcTest::isolate();
2247 v8::HandleScope outer_scope(isolate);
2248 v8::Persistent<v8::Context> ctx1p;
2249 v8::Persistent<v8::Context> ctx2p;
2251 v8::HandleScope scope(isolate);
2252 ctx1p.Reset(isolate, v8::Context::New(isolate));
2253 ctx2p.Reset(isolate, v8::Context::New(isolate));
2254 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2257 CcTest::heap()->CollectAllAvailableGarbage();
2258 CHECK_EQ(4, NumberOfGlobalObjects());
2261 v8::HandleScope inner_scope(isolate);
2262 CompileRun("var v = {x: 42}");
2263 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2264 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2265 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2267 ctx2->Global()->Set(v8_str("o"), v);
2268 v8::Local<v8::Value> res = CompileRun(
2269 "function f() { return o.x; }"
2270 "for (var i = 0; i < 10; ++i) f();"
2271 "%OptimizeFunctionOnNextCall(f);"
2273 CHECK_EQ(42, res->Int32Value());
2274 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2276 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2278 isolate->ContextDisposedNotification();
2280 CcTest::heap()->CollectAllAvailableGarbage();
2281 CHECK_EQ(2, NumberOfGlobalObjects());
2283 CcTest::heap()->CollectAllAvailableGarbage();
2284 CHECK_EQ(0, NumberOfGlobalObjects());
2288 // Test that we don't embed functions from foreign contexts into
2290 TEST(LeakNativeContextViaFunction) {
2291 i::FLAG_allow_natives_syntax = true;
2292 v8::Isolate* isolate = CcTest::isolate();
2293 v8::HandleScope outer_scope(isolate);
2294 v8::Persistent<v8::Context> ctx1p;
2295 v8::Persistent<v8::Context> ctx2p;
2297 v8::HandleScope scope(isolate);
2298 ctx1p.Reset(isolate, v8::Context::New(isolate));
2299 ctx2p.Reset(isolate, v8::Context::New(isolate));
2300 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2303 CcTest::heap()->CollectAllAvailableGarbage();
2304 CHECK_EQ(4, NumberOfGlobalObjects());
2307 v8::HandleScope inner_scope(isolate);
2308 CompileRun("var v = function() { return 42; }");
2309 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2310 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2311 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2313 ctx2->Global()->Set(v8_str("o"), v);
2314 v8::Local<v8::Value> res = CompileRun(
2315 "function f(x) { return x(); }"
2316 "for (var i = 0; i < 10; ++i) f(o);"
2317 "%OptimizeFunctionOnNextCall(f);"
2319 CHECK_EQ(42, res->Int32Value());
2320 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2324 isolate->ContextDisposedNotification();
2326 CcTest::heap()->CollectAllAvailableGarbage();
2327 CHECK_EQ(2, NumberOfGlobalObjects());
2329 CcTest::heap()->CollectAllAvailableGarbage();
2330 CHECK_EQ(0, NumberOfGlobalObjects());
2334 TEST(LeakNativeContextViaMapKeyed) {
2335 i::FLAG_allow_natives_syntax = true;
2336 v8::Isolate* isolate = CcTest::isolate();
2337 v8::HandleScope outer_scope(isolate);
2338 v8::Persistent<v8::Context> ctx1p;
2339 v8::Persistent<v8::Context> ctx2p;
2341 v8::HandleScope scope(isolate);
2342 ctx1p.Reset(isolate, v8::Context::New(isolate));
2343 ctx2p.Reset(isolate, v8::Context::New(isolate));
2344 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2347 CcTest::heap()->CollectAllAvailableGarbage();
2348 CHECK_EQ(4, NumberOfGlobalObjects());
2351 v8::HandleScope inner_scope(isolate);
2352 CompileRun("var v = [42, 43]");
2353 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2354 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2355 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2357 ctx2->Global()->Set(v8_str("o"), v);
2358 v8::Local<v8::Value> res = CompileRun(
2359 "function f() { return o[0]; }"
2360 "for (var i = 0; i < 10; ++i) f();"
2361 "%OptimizeFunctionOnNextCall(f);"
2363 CHECK_EQ(42, res->Int32Value());
2364 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2368 isolate->ContextDisposedNotification();
2370 CcTest::heap()->CollectAllAvailableGarbage();
2371 CHECK_EQ(2, NumberOfGlobalObjects());
2373 CcTest::heap()->CollectAllAvailableGarbage();
2374 CHECK_EQ(0, NumberOfGlobalObjects());
2378 TEST(LeakNativeContextViaMapProto) {
2379 i::FLAG_allow_natives_syntax = true;
2380 v8::Isolate* isolate = CcTest::isolate();
2381 v8::HandleScope outer_scope(isolate);
2382 v8::Persistent<v8::Context> ctx1p;
2383 v8::Persistent<v8::Context> ctx2p;
2385 v8::HandleScope scope(isolate);
2386 ctx1p.Reset(isolate, v8::Context::New(isolate));
2387 ctx2p.Reset(isolate, v8::Context::New(isolate));
2388 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2391 CcTest::heap()->CollectAllAvailableGarbage();
2392 CHECK_EQ(4, NumberOfGlobalObjects());
2395 v8::HandleScope inner_scope(isolate);
2396 CompileRun("var v = { y: 42}");
2397 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2398 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2399 v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
2401 ctx2->Global()->Set(v8_str("o"), v);
2402 v8::Local<v8::Value> res = CompileRun(
2408 "for (var i = 0; i < 10; ++i) f();"
2409 "%OptimizeFunctionOnNextCall(f);"
2411 CHECK_EQ(42, res->Int32Value());
2412 ctx2->Global()->Set(v8_str("o"), v8::Int32::New(isolate, 0));
2416 isolate->ContextDisposedNotification();
2418 CcTest::heap()->CollectAllAvailableGarbage();
2419 CHECK_EQ(2, NumberOfGlobalObjects());
2421 CcTest::heap()->CollectAllAvailableGarbage();
2422 CHECK_EQ(0, NumberOfGlobalObjects());
2426 TEST(InstanceOfStubWriteBarrier) {
2427 i::FLAG_allow_natives_syntax = true;
2429 i::FLAG_verify_heap = true;
2432 CcTest::InitializeVM();
2433 if (!CcTest::i_isolate()->use_crankshaft()) return;
2434 if (i::FLAG_force_marking_deque_overflows) return;
2435 v8::HandleScope outer_scope(CcTest::isolate());
2438 v8::HandleScope scope(CcTest::isolate());
2440 "function foo () { }"
2441 "function mkbar () { return new (new Function(\"\")) (); }"
2442 "function f (x) { return (x instanceof foo); }"
2443 "function g () { f(mkbar()); }"
2444 "f(new foo()); f(new foo());"
2445 "%OptimizeFunctionOnNextCall(f);"
2446 "f(new foo()); g();");
2449 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2451 marking->Start(Heap::kNoGCFlags);
2453 Handle<JSFunction> f =
2454 v8::Utils::OpenHandle(
2455 *v8::Handle<v8::Function>::Cast(
2456 CcTest::global()->Get(v8_str("f"))));
2458 CHECK(f->IsOptimized());
2460 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2461 !marking->IsStopped()) {
2462 // Discard any pending GC requests otherwise we will get GC when we enter
2464 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2467 CHECK(marking->IsMarking());
2470 v8::HandleScope scope(CcTest::isolate());
2471 v8::Handle<v8::Object> global = CcTest::global();
2472 v8::Handle<v8::Function> g =
2473 v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
2474 g->Call(global, 0, NULL);
2477 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2478 CcTest::heap()->CollectGarbage(OLD_SPACE);
2482 static int NumberOfProtoTransitions(Map* map) {
2483 return TransitionArray::NumberOfPrototypeTransitions(
2484 TransitionArray::GetPrototypeTransitions(map));
2488 TEST(PrototypeTransitionClearing) {
2489 if (FLAG_never_compact) return;
2490 CcTest::InitializeVM();
2491 Isolate* isolate = CcTest::i_isolate();
2492 Factory* factory = isolate->factory();
2493 v8::HandleScope scope(CcTest::isolate());
2495 CompileRun("var base = {};");
2496 Handle<JSObject> baseObject =
2497 v8::Utils::OpenHandle(
2498 *v8::Handle<v8::Object>::Cast(
2499 CcTest::global()->Get(v8_str("base"))));
2500 int initialTransitions = NumberOfProtoTransitions(baseObject->map());
2504 "for (var i = 0; i < 10; i++) {"
2506 " var prototype = {};"
2507 " object.__proto__ = prototype;"
2508 " if (i >= 3) live.push(object, prototype);"
2511 // Verify that only dead prototype transitions are cleared.
2512 CHECK_EQ(initialTransitions + 10,
2513 NumberOfProtoTransitions(baseObject->map()));
2514 CcTest::heap()->CollectAllGarbage();
2515 const int transitions = 10 - 3;
2516 CHECK_EQ(initialTransitions + transitions,
2517 NumberOfProtoTransitions(baseObject->map()));
2519 // Verify that prototype transitions array was compacted.
2521 TransitionArray::GetPrototypeTransitions(baseObject->map());
2522 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2523 int j = TransitionArray::kProtoTransitionHeaderSize + i;
2524 CHECK(trans->get(j)->IsMap());
2527 // Make sure next prototype is placed on an old-space evacuation candidate.
2528 Handle<JSObject> prototype;
2529 PagedSpace* space = CcTest::heap()->old_space();
2531 AlwaysAllocateScope always_allocate(isolate);
2532 SimulateFullSpace(space);
2534 factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, WEAK, TENURED);
2537 // Add a prototype on an evacuation candidate and verify that transition
2538 // clearing correctly records slots in prototype transition array.
2539 i::FLAG_always_compact = true;
2540 Handle<Map> map(baseObject->map());
2541 CHECK(!space->LastPage()->Contains(
2542 TransitionArray::GetPrototypeTransitions(*map)->address()));
2543 CHECK(space->LastPage()->Contains(prototype->address()));
2547 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2548 i::FLAG_stress_compaction = false;
2549 i::FLAG_allow_natives_syntax = true;
2551 i::FLAG_verify_heap = true;
2554 CcTest::InitializeVM();
2555 if (!CcTest::i_isolate()->use_crankshaft()) return;
2556 v8::HandleScope outer_scope(CcTest::isolate());
2559 v8::HandleScope scope(CcTest::isolate());
2563 " for (var i = 0; i < 100; i++) s += i;"
2567 "%OptimizeFunctionOnNextCall(f);"
2570 Handle<JSFunction> f =
2571 v8::Utils::OpenHandle(
2572 *v8::Handle<v8::Function>::Cast(
2573 CcTest::global()->Get(v8_str("f"))));
2574 CHECK(f->IsOptimized());
2576 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2578 marking->Start(Heap::kNoGCFlags);
2579 // The following calls will increment CcTest::heap()->global_ic_age().
2580 CcTest::isolate()->ContextDisposedNotification();
2581 SimulateIncrementalMarking(CcTest::heap());
2582 CcTest::heap()->CollectAllGarbage();
2583 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2584 CHECK_EQ(0, f->shared()->opt_count());
2585 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2589 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2590 i::FLAG_stress_compaction = false;
2591 i::FLAG_allow_natives_syntax = true;
2593 i::FLAG_verify_heap = true;
2596 CcTest::InitializeVM();
2597 if (!CcTest::i_isolate()->use_crankshaft()) return;
2598 v8::HandleScope outer_scope(CcTest::isolate());
2601 v8::HandleScope scope(CcTest::isolate());
2605 " for (var i = 0; i < 100; i++) s += i;"
2609 "%OptimizeFunctionOnNextCall(f);"
2612 Handle<JSFunction> f =
2613 v8::Utils::OpenHandle(
2614 *v8::Handle<v8::Function>::Cast(
2615 CcTest::global()->Get(v8_str("f"))));
2616 CHECK(f->IsOptimized());
2618 CcTest::heap()->incremental_marking()->Abort();
2620 // The following two calls will increment CcTest::heap()->global_ic_age().
2621 CcTest::isolate()->ContextDisposedNotification();
2622 CcTest::heap()->CollectAllGarbage();
2624 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2625 CHECK_EQ(0, f->shared()->opt_count());
2626 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2630 TEST(IdleNotificationFinishMarking) {
2631 i::FLAG_allow_natives_syntax = true;
2632 CcTest::InitializeVM();
2633 SimulateFullSpace(CcTest::heap()->old_space());
2634 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2636 marking->Start(Heap::kNoGCFlags);
2638 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2640 // TODO(hpayer): We cannot write proper unit test right now for heap.
2641 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2642 // marking delay counter.
2644 // Perform a huge incremental marking step but don't complete marking.
2645 intptr_t bytes_processed = 0;
2648 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2649 IncrementalMarking::FORCE_MARKING,
2650 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2651 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2652 } while (bytes_processed);
2654 // The next invocations of incremental marking are not going to complete
2656 // since the completion threshold is not reached
2657 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2659 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2660 IncrementalMarking::FORCE_MARKING,
2661 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2662 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2665 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2667 // The next idle notification has to finish incremental marking.
2668 const double kLongIdleTime = 1000.0;
2669 CcTest::isolate()->IdleNotificationDeadline(
2670 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2671 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2673 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2677 // Test that HAllocateObject will always return an object in new-space.
2678 TEST(OptimizedAllocationAlwaysInNewSpace) {
2679 i::FLAG_allow_natives_syntax = true;
2680 CcTest::InitializeVM();
2681 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2682 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2683 v8::HandleScope scope(CcTest::isolate());
2685 SimulateFullSpace(CcTest::heap()->new_space());
2686 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2687 v8::Local<v8::Value> res = CompileRun(
2690 " for (var i = 0; i < 32; i++) {"
2691 " this['x' + i] = x;"
2694 "function f(x) { return new c(x); };"
2696 "%OptimizeFunctionOnNextCall(f);"
2699 4, res.As<v8::Object>()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2701 Handle<JSObject> o =
2702 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2704 CHECK(CcTest::heap()->InNewSpace(*o));
2708 TEST(OptimizedPretenuringAllocationFolding) {
2709 i::FLAG_allow_natives_syntax = true;
2710 i::FLAG_expose_gc = true;
2711 CcTest::InitializeVM();
2712 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2713 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2714 v8::HandleScope scope(CcTest::isolate());
2716 // Grow new space unitl maximum capacity reached.
2717 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2718 CcTest::heap()->new_space()->Grow();
2721 i::ScopedVector<char> source(1024);
2724 "var number_elements = %d;"
2725 "var elements = new Array();"
2727 " for (var i = 0; i < number_elements; i++) {"
2728 " elements[i] = [[{}], [1.1]];"
2730 " return elements[number_elements-1]"
2734 "%%OptimizeFunctionOnNextCall(f);"
2736 AllocationSite::kPretenureMinimumCreated);
2738 v8::Local<v8::Value> res = CompileRun(source.start());
2740 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2741 Handle<JSObject> int_array_handle =
2742 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2743 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2744 Handle<JSObject> double_array_handle =
2745 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2747 Handle<JSObject> o =
2748 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2749 CHECK(CcTest::heap()->InOldSpace(*o));
2750 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2751 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2752 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2753 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2757 TEST(OptimizedPretenuringObjectArrayLiterals) {
2758 i::FLAG_allow_natives_syntax = true;
2759 i::FLAG_expose_gc = true;
2760 CcTest::InitializeVM();
2761 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2762 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2763 v8::HandleScope scope(CcTest::isolate());
2765 // Grow new space unitl maximum capacity reached.
2766 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2767 CcTest::heap()->new_space()->Grow();
2770 i::ScopedVector<char> source(1024);
2773 "var number_elements = %d;"
2774 "var elements = new Array(number_elements);"
2776 " for (var i = 0; i < number_elements; i++) {"
2777 " elements[i] = [{}, {}, {}];"
2779 " return elements[number_elements - 1];"
2783 "%%OptimizeFunctionOnNextCall(f);"
2785 AllocationSite::kPretenureMinimumCreated);
2787 v8::Local<v8::Value> res = CompileRun(source.start());
2789 Handle<JSObject> o =
2790 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2792 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2793 CHECK(CcTest::heap()->InOldSpace(*o));
2797 TEST(OptimizedPretenuringMixedInObjectProperties) {
2798 i::FLAG_allow_natives_syntax = true;
2799 i::FLAG_expose_gc = true;
2800 CcTest::InitializeVM();
2801 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2802 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2803 v8::HandleScope scope(CcTest::isolate());
2805 // Grow new space unitl maximum capacity reached.
2806 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2807 CcTest::heap()->new_space()->Grow();
2811 i::ScopedVector<char> source(1024);
2814 "var number_elements = %d;"
2815 "var elements = new Array(number_elements);"
2817 " for (var i = 0; i < number_elements; i++) {"
2818 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2820 " return elements[number_elements - 1];"
2824 "%%OptimizeFunctionOnNextCall(f);"
2826 AllocationSite::kPretenureMinimumCreated);
2828 v8::Local<v8::Value> res = CompileRun(source.start());
2830 Handle<JSObject> o =
2831 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2833 CHECK(CcTest::heap()->InOldSpace(*o));
2834 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2835 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2836 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2837 if (!o->IsUnboxedDoubleField(idx2)) {
2838 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2840 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2843 JSObject* inner_object =
2844 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2845 CHECK(CcTest::heap()->InOldSpace(inner_object));
2846 if (!inner_object->IsUnboxedDoubleField(idx1)) {
2847 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2849 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2851 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2855 TEST(OptimizedPretenuringDoubleArrayProperties) {
2856 i::FLAG_allow_natives_syntax = true;
2857 i::FLAG_expose_gc = true;
2858 CcTest::InitializeVM();
2859 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2860 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2861 v8::HandleScope scope(CcTest::isolate());
2863 // Grow new space unitl maximum capacity reached.
2864 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2865 CcTest::heap()->new_space()->Grow();
2868 i::ScopedVector<char> source(1024);
2871 "var number_elements = %d;"
2872 "var elements = new Array(number_elements);"
2874 " for (var i = 0; i < number_elements; i++) {"
2875 " elements[i] = {a: 1.1, b: 2.2};"
2877 " return elements[i - 1];"
2881 "%%OptimizeFunctionOnNextCall(f);"
2883 AllocationSite::kPretenureMinimumCreated);
2885 v8::Local<v8::Value> res = CompileRun(source.start());
2887 Handle<JSObject> o =
2888 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2890 CHECK(CcTest::heap()->InOldSpace(*o));
2891 CHECK(CcTest::heap()->InOldSpace(o->properties()));
2895 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2896 i::FLAG_allow_natives_syntax = true;
2897 i::FLAG_expose_gc = true;
2898 CcTest::InitializeVM();
2899 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2900 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2901 v8::HandleScope scope(CcTest::isolate());
2903 // Grow new space unitl maximum capacity reached.
2904 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2905 CcTest::heap()->new_space()->Grow();
2908 i::ScopedVector<char> source(1024);
2911 "var number_elements = %d;"
2912 "var elements = new Array(number_elements);"
2914 " for (var i = 0; i < number_elements; i++) {"
2915 " elements[i] = [1.1, 2.2, 3.3];"
2917 " return elements[number_elements - 1];"
2921 "%%OptimizeFunctionOnNextCall(f);"
2923 AllocationSite::kPretenureMinimumCreated);
2925 v8::Local<v8::Value> res = CompileRun(source.start());
2927 Handle<JSObject> o =
2928 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2930 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2931 CHECK(CcTest::heap()->InOldSpace(*o));
2935 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2936 i::FLAG_allow_natives_syntax = true;
2937 i::FLAG_expose_gc = true;
2938 CcTest::InitializeVM();
2939 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2940 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2941 v8::HandleScope scope(CcTest::isolate());
2943 // Grow new space unitl maximum capacity reached.
2944 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2945 CcTest::heap()->new_space()->Grow();
2948 i::ScopedVector<char> source(1024);
2951 "var number_elements = 100;"
2952 "var elements = new Array(number_elements);"
2954 " for (var i = 0; i < number_elements; i++) {"
2955 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2957 " return elements[number_elements - 1];"
2961 "%%OptimizeFunctionOnNextCall(f);"
2964 v8::Local<v8::Value> res = CompileRun(source.start());
2966 v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2967 Handle<JSObject> int_array_handle =
2968 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2969 v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2970 Handle<JSObject> double_array_handle =
2971 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2973 Handle<JSObject> o =
2974 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2975 CHECK(CcTest::heap()->InOldSpace(*o));
2976 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2977 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2978 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2979 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2983 TEST(OptimizedPretenuringNestedObjectLiterals) {
2984 i::FLAG_allow_natives_syntax = true;
2985 i::FLAG_expose_gc = true;
2986 CcTest::InitializeVM();
2987 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2988 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2989 v8::HandleScope scope(CcTest::isolate());
2991 // Grow new space unitl maximum capacity reached.
2992 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2993 CcTest::heap()->new_space()->Grow();
2996 i::ScopedVector<char> source(1024);
2999 "var number_elements = %d;"
3000 "var elements = new Array(number_elements);"
3002 " for (var i = 0; i < number_elements; i++) {"
3003 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3005 " return elements[number_elements - 1];"
3009 "%%OptimizeFunctionOnNextCall(f);"
3011 AllocationSite::kPretenureMinimumCreated);
3013 v8::Local<v8::Value> res = CompileRun(source.start());
3015 v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
3016 Handle<JSObject> int_array_handle_1 =
3017 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
3018 v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
3019 Handle<JSObject> int_array_handle_2 =
3020 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
3022 Handle<JSObject> o =
3023 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3024 CHECK(CcTest::heap()->InOldSpace(*o));
3025 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3026 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3027 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3028 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3032 TEST(OptimizedPretenuringNestedDoubleLiterals) {
3033 i::FLAG_allow_natives_syntax = true;
3034 i::FLAG_expose_gc = true;
3035 CcTest::InitializeVM();
3036 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3037 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3038 v8::HandleScope scope(CcTest::isolate());
3040 // Grow new space unitl maximum capacity reached.
3041 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3042 CcTest::heap()->new_space()->Grow();
3045 i::ScopedVector<char> source(1024);
3048 "var number_elements = %d;"
3049 "var elements = new Array(number_elements);"
3051 " for (var i = 0; i < number_elements; i++) {"
3052 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3054 " return elements[number_elements - 1];"
3058 "%%OptimizeFunctionOnNextCall(f);"
3060 AllocationSite::kPretenureMinimumCreated);
3062 v8::Local<v8::Value> res = CompileRun(source.start());
3064 v8::Local<v8::Value> double_array_1 =
3065 v8::Object::Cast(*res)->Get(v8_str("0"));
3066 Handle<JSObject> double_array_handle_1 =
3067 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
3068 v8::Local<v8::Value> double_array_2 =
3069 v8::Object::Cast(*res)->Get(v8_str("1"));
3070 Handle<JSObject> double_array_handle_2 =
3071 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
3073 Handle<JSObject> o =
3074 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3075 CHECK(CcTest::heap()->InOldSpace(*o));
3076 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3077 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3078 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3079 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3083 // Make sure pretenuring feedback is gathered for constructed objects as well
3085 TEST(OptimizedPretenuringConstructorCalls) {
3086 if (!i::FLAG_pretenuring_call_new) {
3087 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3090 i::FLAG_allow_natives_syntax = true;
3091 i::FLAG_expose_gc = true;
3092 CcTest::InitializeVM();
3093 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3094 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3095 v8::HandleScope scope(CcTest::isolate());
3097 // Grow new space unitl maximum capacity reached.
3098 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3099 CcTest::heap()->new_space()->Grow();
3102 i::ScopedVector<char> source(1024);
3103 // Call new is doing slack tracking for the first
3104 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3105 // mementos during that time.
3108 "var number_elements = %d;"
3109 "var elements = new Array(number_elements);"
3115 " for (var i = 0; i < number_elements; i++) {"
3116 " elements[i] = new foo();"
3118 " return elements[number_elements - 1];"
3122 "%%OptimizeFunctionOnNextCall(f);"
3124 AllocationSite::kPretenureMinimumCreated +
3125 JSFunction::kGenerousAllocationCount);
3127 v8::Local<v8::Value> res = CompileRun(source.start());
3129 Handle<JSObject> o =
3130 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3132 CHECK(CcTest::heap()->InOldSpace(*o));
3136 TEST(OptimizedPretenuringCallNew) {
3137 if (!i::FLAG_pretenuring_call_new) {
3138 // FLAG_pretenuring_call_new needs to be synced with the snapshot.
3141 i::FLAG_allow_natives_syntax = true;
3142 i::FLAG_expose_gc = true;
3143 CcTest::InitializeVM();
3144 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3145 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3146 v8::HandleScope scope(CcTest::isolate());
3148 // Grow new space unitl maximum capacity reached.
3149 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3150 CcTest::heap()->new_space()->Grow();
3153 i::ScopedVector<char> source(1024);
3154 // Call new is doing slack tracking for the first
3155 // JSFunction::kGenerousAllocationCount allocations, and we can't find
3156 // mementos during that time.
3159 "var number_elements = %d;"
3160 "var elements = new Array(number_elements);"
3161 "function g() { this.a = 0; }"
3163 " for (var i = 0; i < number_elements; i++) {"
3164 " elements[i] = new g();"
3166 " return elements[number_elements - 1];"
3170 "%%OptimizeFunctionOnNextCall(f);"
3172 AllocationSite::kPretenureMinimumCreated +
3173 JSFunction::kGenerousAllocationCount);
3175 v8::Local<v8::Value> res = CompileRun(source.start());
3177 Handle<JSObject> o =
3178 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3179 CHECK(CcTest::heap()->InOldSpace(*o));
3183 // Test regular array literals allocation.
3184 TEST(OptimizedAllocationArrayLiterals) {
3185 i::FLAG_allow_natives_syntax = true;
3186 CcTest::InitializeVM();
3187 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3188 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3189 v8::HandleScope scope(CcTest::isolate());
3191 v8::Local<v8::Value> res = CompileRun(
3193 " var numbers = new Array(1, 2, 3);"
3194 " numbers[0] = 3.14;"
3198 "%OptimizeFunctionOnNextCall(f);"
3200 CHECK_EQ(static_cast<int>(3.14),
3201 v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
3203 Handle<JSObject> o =
3204 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
3206 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3210 static int CountMapTransitions(Map* map) {
3211 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3215 // Test that map transitions are cleared and maps are collected with
3216 // incremental marking as well.
3218 i::FLAG_stress_compaction = false;
3219 i::FLAG_allow_natives_syntax = true;
3220 i::FLAG_trace_incremental_marking = true;
3221 i::FLAG_retain_maps_for_n_gc = 0;
3222 CcTest::InitializeVM();
3223 v8::HandleScope scope(CcTest::isolate());
3224 static const int transitions_count = 256;
3226 CompileRun("function F() {}");
3228 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3229 for (int i = 0; i < transitions_count; i++) {
3230 EmbeddedVector<char, 64> buffer;
3231 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3232 CompileRun(buffer.start());
3234 CompileRun("var root = new F;");
3237 Handle<JSObject> root =
3238 v8::Utils::OpenHandle(
3239 *v8::Handle<v8::Object>::Cast(
3240 CcTest::global()->Get(v8_str("root"))));
3242 // Count number of live transitions before marking.
3243 int transitions_before = CountMapTransitions(root->map());
3244 CompileRun("%DebugPrint(root);");
3245 CHECK_EQ(transitions_count, transitions_before);
3247 SimulateIncrementalMarking(CcTest::heap());
3248 CcTest::heap()->CollectAllGarbage();
3250 // Count number of live transitions after marking. Note that one transition
3251 // is left, because 'o' still holds an instance of one transition target.
3252 int transitions_after = CountMapTransitions(root->map());
3253 CompileRun("%DebugPrint(root);");
3254 CHECK_EQ(1, transitions_after);
3259 static void AddTransitions(int transitions_count) {
3260 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3261 for (int i = 0; i < transitions_count; i++) {
3262 EmbeddedVector<char, 64> buffer;
3263 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3264 CompileRun(buffer.start());
3269 static Handle<JSObject> GetByName(const char* name) {
3270 return v8::Utils::OpenHandle(
3271 *v8::Handle<v8::Object>::Cast(
3272 CcTest::global()->Get(v8_str(name))));
3276 static void AddPropertyTo(
3277 int gc_count, Handle<JSObject> object, const char* property_name) {
3278 Isolate* isolate = CcTest::i_isolate();
3279 Factory* factory = isolate->factory();
3280 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3281 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3282 i::FLAG_gc_interval = gc_count;
3283 i::FLAG_gc_global = true;
3284 i::FLAG_retain_maps_for_n_gc = 0;
3285 CcTest::heap()->set_allocation_timeout(gc_count);
3286 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3290 TEST(TransitionArrayShrinksDuringAllocToZero) {
3291 i::FLAG_stress_compaction = false;
3292 i::FLAG_allow_natives_syntax = true;
3293 CcTest::InitializeVM();
3294 v8::HandleScope scope(CcTest::isolate());
3295 static const int transitions_count = 10;
3296 CompileRun("function F() { }");
3297 AddTransitions(transitions_count);
3298 CompileRun("var root = new F;");
3299 Handle<JSObject> root = GetByName("root");
3301 // Count number of live transitions before marking.
3302 int transitions_before = CountMapTransitions(root->map());
3303 CHECK_EQ(transitions_count, transitions_before);
3306 CompileRun("o = new F;"
3308 root = GetByName("root");
3309 AddPropertyTo(2, root, "funny");
3310 CcTest::heap()->CollectGarbage(NEW_SPACE);
3312 // Count number of live transitions after marking. Note that one transition
3313 // is left, because 'o' still holds an instance of one transition target.
3314 int transitions_after = CountMapTransitions(
3315 Map::cast(root->map()->GetBackPointer()));
3316 CHECK_EQ(1, transitions_after);
3320 TEST(TransitionArrayShrinksDuringAllocToOne) {
3321 i::FLAG_stress_compaction = false;
3322 i::FLAG_allow_natives_syntax = true;
3323 CcTest::InitializeVM();
3324 v8::HandleScope scope(CcTest::isolate());
3325 static const int transitions_count = 10;
3326 CompileRun("function F() {}");
3327 AddTransitions(transitions_count);
3328 CompileRun("var root = new F;");
3329 Handle<JSObject> root = GetByName("root");
3331 // Count number of live transitions before marking.
3332 int transitions_before = CountMapTransitions(root->map());
3333 CHECK_EQ(transitions_count, transitions_before);
3335 root = GetByName("root");
3336 AddPropertyTo(2, root, "funny");
3337 CcTest::heap()->CollectGarbage(NEW_SPACE);
3339 // Count number of live transitions after marking. Note that one transition
3340 // is left, because 'o' still holds an instance of one transition target.
3341 int transitions_after = CountMapTransitions(
3342 Map::cast(root->map()->GetBackPointer()));
3343 CHECK_EQ(2, transitions_after);
3347 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3348 i::FLAG_stress_compaction = false;
3349 i::FLAG_allow_natives_syntax = true;
3350 CcTest::InitializeVM();
3351 v8::HandleScope scope(CcTest::isolate());
3352 static const int transitions_count = 10;
3353 CompileRun("function F() {}");
3354 AddTransitions(transitions_count);
3355 CompileRun("var root = new F;");
3356 Handle<JSObject> root = GetByName("root");
3358 // Count number of live transitions before marking.
3359 int transitions_before = CountMapTransitions(root->map());
3360 CHECK_EQ(transitions_count, transitions_before);
3362 root = GetByName("root");
3363 AddPropertyTo(0, root, "prop9");
3364 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3366 // Count number of live transitions after marking. Note that one transition
3367 // is left, because 'o' still holds an instance of one transition target.
3368 int transitions_after = CountMapTransitions(
3369 Map::cast(root->map()->GetBackPointer()));
3370 CHECK_EQ(1, transitions_after);
3374 TEST(TransitionArraySimpleToFull) {
3375 i::FLAG_stress_compaction = false;
3376 i::FLAG_allow_natives_syntax = true;
3377 CcTest::InitializeVM();
3378 v8::HandleScope scope(CcTest::isolate());
3379 static const int transitions_count = 1;
3380 CompileRun("function F() {}");
3381 AddTransitions(transitions_count);
3382 CompileRun("var root = new F;");
3383 Handle<JSObject> root = GetByName("root");
3385 // Count number of live transitions before marking.
3386 int transitions_before = CountMapTransitions(root->map());
3387 CHECK_EQ(transitions_count, transitions_before);
3389 CompileRun("o = new F;"
3391 root = GetByName("root");
3392 DCHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3393 AddPropertyTo(2, root, "happy");
3395 // Count number of live transitions after marking. Note that one transition
3396 // is left, because 'o' still holds an instance of one transition target.
3397 int transitions_after = CountMapTransitions(
3398 Map::cast(root->map()->GetBackPointer()));
3399 CHECK_EQ(1, transitions_after);
3404 TEST(Regress2143a) {
3405 i::FLAG_collect_maps = true;
3406 i::FLAG_incremental_marking = true;
3407 CcTest::InitializeVM();
3408 v8::HandleScope scope(CcTest::isolate());
3410 // Prepare a map transition from the root object together with a yet
3411 // untransitioned root object.
3412 CompileRun("var root = new Object;"
3414 "root = new Object;");
3416 SimulateIncrementalMarking(CcTest::heap());
3418 // Compile a StoreIC that performs the prepared map transition. This
3419 // will restart incremental marking and should make sure the root is
3420 // marked grey again.
3421 CompileRun("function f(o) {"
3427 // This bug only triggers with aggressive IC clearing.
3428 CcTest::heap()->AgeInlineCaches();
3430 // Explicitly request GC to perform final marking step and sweeping.
3431 CcTest::heap()->CollectAllGarbage();
3433 Handle<JSObject> root =
3434 v8::Utils::OpenHandle(
3435 *v8::Handle<v8::Object>::Cast(
3436 CcTest::global()->Get(v8_str("root"))));
3438 // The root object should be in a sane state.
3439 CHECK(root->IsJSObject());
3440 CHECK(root->map()->IsMap());
3444 TEST(Regress2143b) {
3445 i::FLAG_collect_maps = true;
3446 i::FLAG_incremental_marking = true;
3447 i::FLAG_allow_natives_syntax = true;
3448 CcTest::InitializeVM();
3449 v8::HandleScope scope(CcTest::isolate());
3451 // Prepare a map transition from the root object together with a yet
3452 // untransitioned root object.
3453 CompileRun("var root = new Object;"
3455 "root = new Object;");
3457 SimulateIncrementalMarking(CcTest::heap());
3459 // Compile an optimized LStoreNamedField that performs the prepared
3460 // map transition. This will restart incremental marking and should
3461 // make sure the root is marked grey again.
3462 CompileRun("function f(o) {"
3467 "%OptimizeFunctionOnNextCall(f);"
3469 "%DeoptimizeFunction(f);");
3471 // This bug only triggers with aggressive IC clearing.
3472 CcTest::heap()->AgeInlineCaches();
3474 // Explicitly request GC to perform final marking step and sweeping.
3475 CcTest::heap()->CollectAllGarbage();
3477 Handle<JSObject> root =
3478 v8::Utils::OpenHandle(
3479 *v8::Handle<v8::Object>::Cast(
3480 CcTest::global()->Get(v8_str("root"))));
3482 // The root object should be in a sane state.
3483 CHECK(root->IsJSObject());
3484 CHECK(root->map()->IsMap());
3488 TEST(ReleaseOverReservedPages) {
3489 if (FLAG_never_compact) return;
3490 i::FLAG_trace_gc = true;
3491 // The optimizer can allocate stuff, messing up the test.
3492 i::FLAG_crankshaft = false;
3493 i::FLAG_always_opt = false;
3494 CcTest::InitializeVM();
3495 Isolate* isolate = CcTest::i_isolate();
3496 Factory* factory = isolate->factory();
3497 Heap* heap = isolate->heap();
3498 v8::HandleScope scope(CcTest::isolate());
3499 static const int number_of_test_pages = 20;
3501 // Prepare many pages with low live-bytes count.
3502 PagedSpace* old_space = heap->old_space();
3503 CHECK_EQ(1, old_space->CountTotalPages());
3504 for (int i = 0; i < number_of_test_pages; i++) {
3505 AlwaysAllocateScope always_allocate(isolate);
3506 SimulateFullSpace(old_space);
3507 factory->NewFixedArray(1, TENURED);
3509 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3511 // Triggering one GC will cause a lot of garbage to be discovered but
3512 // even spread across all allocated pages.
3513 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3514 "triggered for preparation");
3515 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3517 // Triggering subsequent GCs should cause at least half of the pages
3518 // to be released to the OS after at most two cycles.
3519 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3520 "triggered by test 1");
3521 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3522 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3523 "triggered by test 2");
3524 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3526 // Triggering a last-resort GC should cause all pages to be released to the
3527 // OS so that other processes can seize the memory. If we get a failure here
3528 // where there are 2 pages left instead of 1, then we should increase the
3529 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3530 // first page should be small in order to reduce memory used when the VM
3531 // boots, but if the 20 small arrays don't fit on the first page then that's
3532 // an indication that it is too small.
3533 heap->CollectAllAvailableGarbage("triggered really hard");
3534 CHECK_EQ(1, old_space->CountTotalPages());
3537 static int forced_gc_counter = 0;
3539 void MockUseCounterCallback(v8::Isolate* isolate,
3540 v8::Isolate::UseCounterFeature feature) {
3541 isolate->GetCallingContext();
3542 if (feature == v8::Isolate::kForcedGC) {
3543 forced_gc_counter++;
3548 TEST(CountForcedGC) {
3549 i::FLAG_expose_gc = true;
3550 CcTest::InitializeVM();
3551 Isolate* isolate = CcTest::i_isolate();
3552 v8::HandleScope scope(CcTest::isolate());
3554 isolate->SetUseCounterCallback(MockUseCounterCallback);
3556 forced_gc_counter = 0;
3557 const char* source = "gc();";
3559 CHECK_GT(forced_gc_counter, 0);
3564 i::FLAG_stress_compaction = false;
3565 CcTest::InitializeVM();
3566 Isolate* isolate = CcTest::i_isolate();
3567 Factory* factory = isolate->factory();
3568 v8::HandleScope scope(CcTest::isolate());
3569 Handle<String> slice(CcTest::heap()->empty_string());
3572 // Generate a parent that lives in new-space.
3573 v8::HandleScope inner_scope(CcTest::isolate());
3574 const char* c = "This text is long enough to trigger sliced strings.";
3575 Handle<String> s = factory->NewStringFromAsciiChecked(c);
3576 CHECK(s->IsSeqOneByteString());
3577 CHECK(CcTest::heap()->InNewSpace(*s));
3579 // Generate a sliced string that is based on the above parent and
3580 // lives in old-space.
3581 SimulateFullSpace(CcTest::heap()->new_space());
3582 AlwaysAllocateScope always_allocate(isolate);
3583 Handle<String> t = factory->NewProperSubString(s, 5, 35);
3584 CHECK(t->IsSlicedString());
3585 CHECK(!CcTest::heap()->InNewSpace(*t));
3586 *slice.location() = *t.location();
3589 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3590 CcTest::heap()->CollectAllGarbage();
3591 CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
3596 TEST(PrintSharedFunctionInfo) {
3597 CcTest::InitializeVM();
3598 v8::HandleScope scope(CcTest::isolate());
3599 const char* source = "f = function() { return 987654321; }\n"
3600 "g = function() { return 123456789; }\n";
3602 Handle<JSFunction> g =
3603 v8::Utils::OpenHandle(
3604 *v8::Handle<v8::Function>::Cast(
3605 CcTest::global()->Get(v8_str("g"))));
3607 OFStream os(stdout);
3608 g->shared()->Print(os);
3611 #endif // OBJECT_PRINT
3614 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3615 if (i::FLAG_always_opt) return;
3616 CcTest::InitializeVM();
3617 v8::HandleScope scope(CcTest::isolate());
3618 v8::Local<v8::Value> fun1, fun2;
3622 CompileRun("function fun() {};");
3623 fun1 = env->Global()->Get(v8_str("fun"));
3628 CompileRun("function fun() {};");
3629 fun2 = env->Global()->Get(v8_str("fun"));
3632 // Prepare function f that contains type feedback for closures
3633 // originating from two different native contexts.
3634 CcTest::global()->Set(v8_str("fun1"), fun1);
3635 CcTest::global()->Set(v8_str("fun2"), fun2);
3636 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3638 Handle<JSFunction> f =
3639 v8::Utils::OpenHandle(
3640 *v8::Handle<v8::Function>::Cast(
3641 CcTest::global()->Get(v8_str("f"))));
3643 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3645 int expected_slots = 2;
3646 CHECK_EQ(expected_slots, feedback_vector->ICSlots());
3649 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot1))->IsWeakCell());
3650 CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
3652 SimulateIncrementalMarking(CcTest::heap());
3653 CcTest::heap()->CollectAllGarbage();
3655 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
3657 CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot2)))
3662 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3663 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3664 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
3665 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3666 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3667 RelocInfo* info = it.rinfo();
3668 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3669 if (target->is_inline_cache_stub() && target->kind() == kind) {
3677 static void CheckVectorIC(Handle<JSFunction> f, int ic_slot_index,
3678 InlineCacheState desired_state) {
3679 Handle<TypeFeedbackVector> vector =
3680 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3681 FeedbackVectorICSlot slot(ic_slot_index);
3682 LoadICNexus nexus(vector, slot);
3683 CHECK(nexus.StateFromFeedback() == desired_state);
3687 static void CheckVectorICCleared(Handle<JSFunction> f, int ic_slot_index) {
3688 Handle<TypeFeedbackVector> vector =
3689 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3690 FeedbackVectorICSlot slot(ic_slot_index);
3691 LoadICNexus nexus(vector, slot);
3692 CHECK(IC::IsCleared(&nexus));
3696 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3697 if (i::FLAG_always_opt) return;
3698 CcTest::InitializeVM();
3699 v8::HandleScope scope(CcTest::isolate());
3701 // Prepare function f that contains a monomorphic IC for object
3702 // originating from the same native context.
3704 "function fun() { this.x = 1; };"
3705 "function f(o) { return new o(); } f(fun); f(fun);");
3706 Handle<JSFunction> f = v8::Utils::OpenHandle(
3707 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3710 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3711 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3713 SimulateIncrementalMarking(CcTest::heap());
3714 CcTest::heap()->CollectAllGarbage();
3716 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3720 TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3721 if (i::FLAG_always_opt) return;
3722 CcTest::InitializeVM();
3723 Isolate* isolate = CcTest::i_isolate();
3724 v8::HandleScope scope(CcTest::isolate());
3725 v8::Local<v8::Value> fun1;
3729 CompileRun("function fun() { this.x = 1; };");
3730 fun1 = env->Global()->Get(v8_str("fun"));
3733 // Prepare function f that contains a monomorphic constructor for object
3734 // originating from a different native context.
3735 CcTest::global()->Set(v8_str("fun1"), fun1);
3737 "function fun() { this.x = 1; };"
3738 "function f(o) { return new o(); } f(fun1); f(fun1);");
3739 Handle<JSFunction> f = v8::Utils::OpenHandle(
3740 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3743 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3744 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3746 // Fire context dispose notification.
3747 CcTest::isolate()->ContextDisposedNotification();
3748 SimulateIncrementalMarking(CcTest::heap());
3749 CcTest::heap()->CollectAllGarbage();
3751 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3752 vector->Get(FeedbackVectorSlot(0)));
3756 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3757 if (i::FLAG_always_opt) return;
3758 CcTest::InitializeVM();
3759 v8::HandleScope scope(CcTest::isolate());
3761 // Prepare function f that contains a monomorphic IC for object
3762 // originating from the same native context.
3763 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3764 "function f(o) { return o.x; } f(obj); f(obj);");
3765 Handle<JSFunction> f =
3766 v8::Utils::OpenHandle(
3767 *v8::Handle<v8::Function>::Cast(
3768 CcTest::global()->Get(v8_str("f"))));
3770 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3771 CheckVectorIC(f, 0, MONOMORPHIC);
3772 CHECK(ic_before->ic_state() == DEFAULT);
3774 SimulateIncrementalMarking(CcTest::heap());
3775 CcTest::heap()->CollectAllGarbage();
3777 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3778 CheckVectorIC(f, 0, MONOMORPHIC);
3779 CHECK(ic_after->ic_state() == DEFAULT);
3783 TEST(IncrementalMarkingClearsMonomorphicIC) {
3784 if (i::FLAG_always_opt) return;
3785 CcTest::InitializeVM();
3786 v8::HandleScope scope(CcTest::isolate());
3787 v8::Local<v8::Value> obj1;
3791 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3792 obj1 = env->Global()->Get(v8_str("obj"));
3795 // Prepare function f that contains a monomorphic IC for object
3796 // originating from a different native context.
3797 CcTest::global()->Set(v8_str("obj1"), obj1);
3798 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3799 Handle<JSFunction> f = v8::Utils::OpenHandle(
3800 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3802 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3803 CheckVectorIC(f, 0, MONOMORPHIC);
3804 CHECK(ic_before->ic_state() == DEFAULT);
3806 // Fire context dispose notification.
3807 CcTest::isolate()->ContextDisposedNotification();
3808 SimulateIncrementalMarking(CcTest::heap());
3809 CcTest::heap()->CollectAllGarbage();
3811 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3812 CheckVectorICCleared(f, 0);
3813 CHECK(ic_after->ic_state() == DEFAULT);
3817 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3818 if (i::FLAG_always_opt) return;
3819 CcTest::InitializeVM();
3820 v8::HandleScope scope(CcTest::isolate());
3821 v8::Local<v8::Value> obj1, obj2;
3825 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3826 obj1 = env->Global()->Get(v8_str("obj"));
3831 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3832 obj2 = env->Global()->Get(v8_str("obj"));
3835 // Prepare function f that contains a polymorphic IC for objects
3836 // originating from two different native contexts.
3837 CcTest::global()->Set(v8_str("obj1"), obj1);
3838 CcTest::global()->Set(v8_str("obj2"), obj2);
3839 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3840 Handle<JSFunction> f = v8::Utils::OpenHandle(
3841 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3843 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3844 CheckVectorIC(f, 0, POLYMORPHIC);
3845 CHECK(ic_before->ic_state() == DEFAULT);
3847 // Fire context dispose notification.
3848 SimulateIncrementalMarking(CcTest::heap());
3849 CcTest::heap()->CollectAllGarbage();
3851 Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3852 CheckVectorIC(f, 0, POLYMORPHIC);
3853 CHECK(ic_after->ic_state() == DEFAULT);
3857 TEST(IncrementalMarkingClearsPolymorphicIC) {
3858 if (i::FLAG_always_opt) return;
3859 CcTest::InitializeVM();
3860 v8::HandleScope scope(CcTest::isolate());
3861 v8::Local<v8::Value> obj1, obj2;
3865 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3866 obj1 = env->Global()->Get(v8_str("obj"));
3871 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3872 obj2 = env->Global()->Get(v8_str("obj"));
3875 // Prepare function f that contains a polymorphic IC for objects
3876 // originating from two different native contexts.
3877 CcTest::global()->Set(v8_str("obj1"), obj1);
3878 CcTest::global()->Set(v8_str("obj2"), obj2);
3879 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3880 Handle<JSFunction> f = v8::Utils::OpenHandle(
3881 *v8::Handle<v8::Function>::Cast(CcTest::global()->Get(v8_str("f"))));
3883 Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
3884 CheckVectorIC(f, 0, POLYMORPHIC);
3885 CHECK(ic_before->ic_state() == DEFAULT);
3887 // Fire context dispose notification.
3888 CcTest::isolate()->ContextDisposedNotification();
3889 SimulateIncrementalMarking(CcTest::heap());
3890 CcTest::heap()->CollectAllGarbage();
3892 CheckVectorICCleared(f, 0);
3893 CHECK(ic_before->ic_state() == DEFAULT);
3897 class SourceResource : public v8::String::ExternalOneByteStringResource {
3899 explicit SourceResource(const char* data)
3900 : data_(data), length_(strlen(data)) { }
3902 virtual void Dispose() {
3903 i::DeleteArray(data_);
3907 const char* data() const { return data_; }
3909 size_t length() const { return length_; }
3911 bool IsDisposed() { return data_ == NULL; }
3919 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3920 const char* accessor) {
3921 // Test that the data retained by the Error.stack accessor is released
3922 // after the first time the accessor is fired. We use external string
3923 // to check whether the data is being released since the external string
3924 // resource's callback is fired when the external string is GC'ed.
3925 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3926 v8::HandleScope scope(isolate);
3927 SourceResource* resource = new SourceResource(i::StrDup(source));
3929 v8::HandleScope scope(isolate);
3930 v8::Handle<v8::String> source_string =
3931 v8::String::NewExternal(isolate, resource);
3932 i_isolate->heap()->CollectAllAvailableGarbage();
3933 v8::Script::Compile(source_string)->Run();
3934 CHECK(!resource->IsDisposed());
3936 // i_isolate->heap()->CollectAllAvailableGarbage();
3937 CHECK(!resource->IsDisposed());
3939 CompileRun(accessor);
3940 i_isolate->heap()->CollectAllAvailableGarbage();
3942 // External source has been released.
3943 CHECK(resource->IsDisposed());
3948 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3949 if (i::FLAG_always_opt) {
3950 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3951 // See: https://codereview.chromium.org/181833004/
3954 FLAG_use_ic = false; // ICs retain objects.
3955 FLAG_concurrent_recompilation = false;
3956 v8::Isolate::CreateParams create_params;
3957 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3958 v8::Isolate* isolate = v8::Isolate::New(create_params);
3960 v8::Isolate::Scope isolate_scope(isolate);
3961 v8::HandleScope handle_scope(isolate);
3962 v8::Context::New(isolate)->Enter();
3963 static const char* source1 = "var error = null; "
3964 /* Normal Error */ "try { "
3965 " throw new Error(); "
3969 static const char* source2 = "var error = null; "
3970 /* Stack overflow */ "try { "
3971 " (function f() { f(); })(); "
3975 static const char* source3 = "var error = null; "
3976 /* Normal Error */ "try { "
3977 /* as prototype */ " throw new Error(); "
3980 " error.__proto__ = e; "
3982 static const char* source4 = "var error = null; "
3983 /* Stack overflow */ "try { "
3984 /* as prototype */ " (function f() { f(); })(); "
3987 " error.__proto__ = e; "
3989 static const char* getter = "error.stack";
3990 static const char* setter = "error.stack = 0";
3992 ReleaseStackTraceDataTest(isolate, source1, setter);
3993 ReleaseStackTraceDataTest(isolate, source2, setter);
3994 // We do not test source3 and source4 with setter, since the setter is
3995 // supposed to (untypically) write to the receiver, not the holder. This is
3996 // to emulate the behavior of a data property.
3998 ReleaseStackTraceDataTest(isolate, source1, getter);
3999 ReleaseStackTraceDataTest(isolate, source2, getter);
4000 ReleaseStackTraceDataTest(isolate, source3, getter);
4001 ReleaseStackTraceDataTest(isolate, source4, getter);
4007 TEST(Regress159140) {
4008 i::FLAG_allow_natives_syntax = true;
4009 i::FLAG_flush_code_incrementally = true;
4010 CcTest::InitializeVM();
4011 Isolate* isolate = CcTest::i_isolate();
4012 Heap* heap = isolate->heap();
4013 HandleScope scope(isolate);
4015 // Perform one initial GC to enable code flushing.
4016 heap->CollectAllGarbage();
4018 // Prepare several closures that are all eligible for code flushing
4019 // because all reachable ones are not optimized. Make sure that the
4020 // optimized code object is directly reachable through a handle so
4021 // that it is marked black during incremental marking.
4024 HandleScope inner_scope(isolate);
4025 CompileRun("function h(x) {}"
4026 "function mkClosure() {"
4027 " return function(x) { return x + 1; };"
4029 "var f = mkClosure();"
4030 "var g = mkClosure();"
4034 "%OptimizeFunctionOnNextCall(f); f(3);"
4035 "%OptimizeFunctionOnNextCall(h); h(3);");
4037 Handle<JSFunction> f =
4038 v8::Utils::OpenHandle(
4039 *v8::Handle<v8::Function>::Cast(
4040 CcTest::global()->Get(v8_str("f"))));
4041 CHECK(f->is_compiled());
4042 CompileRun("f = null;");
4044 Handle<JSFunction> g =
4045 v8::Utils::OpenHandle(
4046 *v8::Handle<v8::Function>::Cast(
4047 CcTest::global()->Get(v8_str("g"))));
4048 CHECK(g->is_compiled());
4049 const int kAgingThreshold = 6;
4050 for (int i = 0; i < kAgingThreshold; i++) {
4051 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4054 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4057 // Simulate incremental marking so that the functions are enqueued as
4058 // code flushing candidates. Then optimize one function. Finally
4059 // finish the GC to complete code flushing.
4060 SimulateIncrementalMarking(heap);
4061 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4062 heap->CollectAllGarbage();
4064 // Unoptimized code is missing and the deoptimizer will go ballistic.
4065 CompileRun("g('bozo');");
4069 TEST(Regress165495) {
4070 i::FLAG_allow_natives_syntax = true;
4071 i::FLAG_flush_code_incrementally = true;
4072 CcTest::InitializeVM();
4073 Isolate* isolate = CcTest::i_isolate();
4074 Heap* heap = isolate->heap();
4075 HandleScope scope(isolate);
4077 // Perform one initial GC to enable code flushing.
4078 heap->CollectAllGarbage();
4080 // Prepare an optimized closure that the optimized code map will get
4081 // populated. Then age the unoptimized code to trigger code flushing
4082 // but make sure the optimized code is unreachable.
4084 HandleScope inner_scope(isolate);
4085 CompileRun("function mkClosure() {"
4086 " return function(x) { return x + 1; };"
4088 "var f = mkClosure();"
4090 "%OptimizeFunctionOnNextCall(f); f(3);");
4092 Handle<JSFunction> f =
4093 v8::Utils::OpenHandle(
4094 *v8::Handle<v8::Function>::Cast(
4095 CcTest::global()->Get(v8_str("f"))));
4096 CHECK(f->is_compiled());
4097 const int kAgingThreshold = 6;
4098 for (int i = 0; i < kAgingThreshold; i++) {
4099 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4102 CompileRun("f = null;");
4105 // Simulate incremental marking so that unoptimized code is flushed
4106 // even though it still is cached in the optimized code map.
4107 SimulateIncrementalMarking(heap);
4108 heap->CollectAllGarbage();
4110 // Make a new closure that will get code installed from the code map.
4111 // Unoptimized code is missing and the deoptimizer will go ballistic.
4112 CompileRun("var g = mkClosure(); g('bozo');");
4116 TEST(Regress169209) {
4117 i::FLAG_stress_compaction = false;
4118 i::FLAG_allow_natives_syntax = true;
4119 i::FLAG_flush_code_incrementally = true;
4121 CcTest::InitializeVM();
4122 Isolate* isolate = CcTest::i_isolate();
4123 Heap* heap = isolate->heap();
4124 HandleScope scope(isolate);
4126 // Perform one initial GC to enable code flushing.
4127 heap->CollectAllGarbage();
4129 // Prepare a shared function info eligible for code flushing for which
4130 // the unoptimized code will be replaced during optimization.
4131 Handle<SharedFunctionInfo> shared1;
4133 HandleScope inner_scope(isolate);
4134 CompileRun("function f() { return 'foobar'; }"
4135 "function g(x) { if (x) f(); }"
4140 Handle<JSFunction> f =
4141 v8::Utils::OpenHandle(
4142 *v8::Handle<v8::Function>::Cast(
4143 CcTest::global()->Get(v8_str("f"))));
4144 CHECK(f->is_compiled());
4145 const int kAgingThreshold = 6;
4146 for (int i = 0; i < kAgingThreshold; i++) {
4147 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4150 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4153 // Prepare a shared function info eligible for code flushing that will
4154 // represent the dangling tail of the candidate list.
4155 Handle<SharedFunctionInfo> shared2;
4157 HandleScope inner_scope(isolate);
4158 CompileRun("function flushMe() { return 0; }"
4161 Handle<JSFunction> f =
4162 v8::Utils::OpenHandle(
4163 *v8::Handle<v8::Function>::Cast(
4164 CcTest::global()->Get(v8_str("flushMe"))));
4165 CHECK(f->is_compiled());
4166 const int kAgingThreshold = 6;
4167 for (int i = 0; i < kAgingThreshold; i++) {
4168 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4171 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4174 // Simulate incremental marking and collect code flushing candidates.
4175 SimulateIncrementalMarking(heap);
4176 CHECK(shared1->code()->gc_metadata() != NULL);
4178 // Optimize function and make sure the unoptimized code is replaced.
4182 CompileRun("%OptimizeFunctionOnNextCall(g);"
4185 // Finish garbage collection cycle.
4186 heap->CollectAllGarbage();
4187 CHECK(shared1->code()->gc_metadata() == NULL);
4191 TEST(Regress169928) {
4192 i::FLAG_allow_natives_syntax = true;
4193 i::FLAG_crankshaft = false;
4194 CcTest::InitializeVM();
4195 Isolate* isolate = CcTest::i_isolate();
4196 Factory* factory = isolate->factory();
4197 v8::HandleScope scope(CcTest::isolate());
4199 // Some flags turn Scavenge collections into Mark-sweep collections
4200 // and hence are incompatible with this test case.
4201 if (FLAG_gc_global || FLAG_stress_compaction) return;
4203 // Prepare the environment
4204 CompileRun("function fastliteralcase(literal, value) {"
4205 " literal[0] = value;"
4208 "function get_standard_literal() {"
4209 " var literal = [1, 2, 3];"
4212 "obj = fastliteralcase(get_standard_literal(), 1);"
4213 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4214 "obj = fastliteralcase(get_standard_literal(), 2);");
4217 v8::Local<v8::String> mote_code_string =
4218 v8_str("fastliteralcase(mote, 2.5);");
4220 v8::Local<v8::String> array_name = v8_str("mote");
4221 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
4223 // First make sure we flip spaces
4224 CcTest::heap()->CollectGarbage(NEW_SPACE);
4226 // Allocate the object.
4227 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4228 array_data->set(0, Smi::FromInt(1));
4229 array_data->set(1, Smi::FromInt(2));
4231 AllocateAllButNBytes(CcTest::heap()->new_space(),
4232 JSArray::kSize + AllocationMemento::kSize +
4235 Handle<JSArray> array =
4236 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4238 CHECK_EQ(Smi::FromInt(2), array->length());
4239 CHECK(array->HasFastSmiOrObjectElements());
4241 // We need filler the size of AllocationMemento object, plus an extra
4242 // fill pointer value.
4243 HeapObject* obj = NULL;
4244 AllocationResult allocation =
4245 CcTest::heap()->new_space()->AllocateRawUnaligned(
4246 AllocationMemento::kSize + kPointerSize);
4247 CHECK(allocation.To(&obj));
4248 Address addr_obj = obj->address();
4249 CcTest::heap()->CreateFillerObjectAt(
4250 addr_obj, AllocationMemento::kSize + kPointerSize);
4252 // Give the array a name, making sure not to allocate strings.
4253 v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
4254 CcTest::global()->Set(array_name, array_obj);
4256 // This should crash with a protection violation if we are running a build
4258 AlwaysAllocateScope aa_scope(isolate);
4259 v8::Script::Compile(mote_code_string)->Run();
4263 TEST(Regress168801) {
4264 if (i::FLAG_never_compact) return;
4265 i::FLAG_always_compact = true;
4266 i::FLAG_cache_optimized_code = false;
4267 i::FLAG_allow_natives_syntax = true;
4268 i::FLAG_flush_code_incrementally = true;
4269 CcTest::InitializeVM();
4270 Isolate* isolate = CcTest::i_isolate();
4271 Heap* heap = isolate->heap();
4272 HandleScope scope(isolate);
4274 // Perform one initial GC to enable code flushing.
4275 heap->CollectAllGarbage();
4277 // Ensure the code ends up on an evacuation candidate.
4278 SimulateFullSpace(heap->code_space());
4280 // Prepare an unoptimized function that is eligible for code flushing.
4281 Handle<JSFunction> function;
4283 HandleScope inner_scope(isolate);
4284 CompileRun("function mkClosure() {"
4285 " return function(x) { return x + 1; };"
4287 "var f = mkClosure();"
4290 Handle<JSFunction> f =
4291 v8::Utils::OpenHandle(
4292 *v8::Handle<v8::Function>::Cast(
4293 CcTest::global()->Get(v8_str("f"))));
4294 CHECK(f->is_compiled());
4295 const int kAgingThreshold = 6;
4296 for (int i = 0; i < kAgingThreshold; i++) {
4297 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4300 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4303 // Simulate incremental marking so that unoptimized function is enqueued as a
4304 // candidate for code flushing. The shared function info however will not be
4305 // explicitly enqueued.
4306 SimulateIncrementalMarking(heap);
4308 // Now optimize the function so that it is taken off the candidate list.
4310 HandleScope inner_scope(isolate);
4311 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
4314 // This cycle will bust the heap and subsequent cycles will go ballistic.
4315 heap->CollectAllGarbage();
4316 heap->CollectAllGarbage();
4320 TEST(Regress173458) {
4321 if (i::FLAG_never_compact) return;
4322 i::FLAG_always_compact = true;
4323 i::FLAG_cache_optimized_code = false;
4324 i::FLAG_allow_natives_syntax = true;
4325 i::FLAG_flush_code_incrementally = true;
4326 CcTest::InitializeVM();
4327 Isolate* isolate = CcTest::i_isolate();
4328 Heap* heap = isolate->heap();
4329 HandleScope scope(isolate);
4331 // Perform one initial GC to enable code flushing.
4332 heap->CollectAllGarbage();
4334 // Ensure the code ends up on an evacuation candidate.
4335 SimulateFullSpace(heap->code_space());
4337 // Prepare an unoptimized function that is eligible for code flushing.
4338 Handle<JSFunction> function;
4340 HandleScope inner_scope(isolate);
4341 CompileRun("function mkClosure() {"
4342 " return function(x) { return x + 1; };"
4344 "var f = mkClosure();"
4347 Handle<JSFunction> f =
4348 v8::Utils::OpenHandle(
4349 *v8::Handle<v8::Function>::Cast(
4350 CcTest::global()->Get(v8_str("f"))));
4351 CHECK(f->is_compiled());
4352 const int kAgingThreshold = 6;
4353 for (int i = 0; i < kAgingThreshold; i++) {
4354 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4357 function = inner_scope.CloseAndEscape(handle(*f, isolate));
4360 // Simulate incremental marking so that unoptimized function is enqueued as a
4361 // candidate for code flushing. The shared function info however will not be
4362 // explicitly enqueued.
4363 SimulateIncrementalMarking(heap);
4365 // Now enable the debugger which in turn will disable code flushing.
4366 CHECK(isolate->debug()->Load());
4368 // This cycle will bust the heap and subsequent cycles will go ballistic.
4369 heap->CollectAllGarbage();
4370 heap->CollectAllGarbage();
4374 class DummyVisitor : public ObjectVisitor {
4376 void VisitPointers(Object** start, Object** end) { }
4380 TEST(DeferredHandles) {
4381 CcTest::InitializeVM();
4382 Isolate* isolate = CcTest::i_isolate();
4383 Heap* heap = isolate->heap();
4384 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4385 HandleScopeData* data = isolate->handle_scope_data();
4386 Handle<Object> init(heap->empty_string(), isolate);
4387 while (data->next < data->limit) {
4388 Handle<Object> obj(heap->empty_string(), isolate);
4390 // An entire block of handles has been filled.
4391 // Next handle would require a new block.
4392 DCHECK(data->next == data->limit);
4394 DeferredHandleScope deferred(isolate);
4395 DummyVisitor visitor;
4396 isolate->handle_scope_implementer()->Iterate(&visitor);
4397 delete deferred.Detach();
4401 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4402 CcTest::InitializeVM();
4403 v8::HandleScope scope(CcTest::isolate());
4404 CompileRun("function f(n) {"
4405 " var a = new Array(n);"
4406 " for (var i = 0; i < n; i += 100) a[i] = i;"
4408 "f(10 * 1024 * 1024);");
4409 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4410 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
4411 // This big step should be sufficient to mark the whole array.
4412 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4413 DCHECK(marking->IsComplete() ||
4414 marking->IsReadyToOverApproximateWeakClosure());
4418 TEST(DisableInlineAllocation) {
4419 i::FLAG_allow_natives_syntax = true;
4420 CcTest::InitializeVM();
4421 v8::HandleScope scope(CcTest::isolate());
4422 CompileRun("function test() {"
4424 " for (var i = 0; i < 10; i++) {"
4425 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4429 " %OptimizeFunctionOnNextCall(test);"
4431 " %DeoptimizeFunction(test);"
4434 // Warm-up with inline allocation enabled.
4435 CompileRun("test(); test(); run();");
4437 // Run test with inline allocation disabled.
4438 CcTest::heap()->DisableInlineAllocation();
4439 CompileRun("run()");
4441 // Run test with inline allocation re-enabled.
4442 CcTest::heap()->EnableInlineAllocation();
4443 CompileRun("run()");
4447 static int AllocationSitesCount(Heap* heap) {
4449 for (Object* site = heap->allocation_sites_list();
4450 !(site->IsUndefined());
4451 site = AllocationSite::cast(site)->weak_next()) {
4458 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4459 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4460 i::FLAG_allow_natives_syntax = true;
4461 CcTest::InitializeVM();
4462 Isolate* isolate = CcTest::i_isolate();
4463 v8::internal::Heap* heap = CcTest::heap();
4464 GlobalHandles* global_handles = isolate->global_handles();
4466 if (!isolate->use_crankshaft()) return;
4468 // The allocation site at the head of the list is ours.
4469 Handle<AllocationSite> site;
4471 LocalContext context;
4472 v8::HandleScope scope(context->GetIsolate());
4474 int count = AllocationSitesCount(heap);
4475 CompileRun("var bar = function() { return (new Array()); };"
4480 // One allocation site should have been created.
4481 int new_count = AllocationSitesCount(heap);
4482 CHECK_EQ(new_count, (count + 1));
4483 site = Handle<AllocationSite>::cast(
4484 global_handles->Create(
4485 AllocationSite::cast(heap->allocation_sites_list())));
4487 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4489 DependentCode::GroupStartIndexes starts(site->dependent_code());
4490 CHECK_GE(starts.number_of_entries(), 1);
4491 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4492 CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
4493 Code* function_bar = Code::cast(
4494 WeakCell::cast(site->dependent_code()->object_at(index))->value());
4495 Handle<JSFunction> bar_handle =
4496 v8::Utils::OpenHandle(
4497 *v8::Handle<v8::Function>::Cast(
4498 CcTest::global()->Get(v8_str("bar"))));
4499 CHECK_EQ(bar_handle->code(), function_bar);
4502 // Now make sure that a gc should get rid of the function, even though we
4503 // still have the allocation site alive.
4504 for (int i = 0; i < 4; i++) {
4505 heap->CollectAllGarbage();
4508 // The site still exists because of our global handle, but the code is no
4509 // longer referred to by dependent_code().
4510 DependentCode::GroupStartIndexes starts(site->dependent_code());
4511 int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
4512 CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
4513 WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
4517 TEST(CellsInOptimizedCodeAreWeak) {
4518 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4519 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4520 i::FLAG_allow_natives_syntax = true;
4521 CcTest::InitializeVM();
4522 Isolate* isolate = CcTest::i_isolate();
4523 v8::internal::Heap* heap = CcTest::heap();
4525 if (!isolate->use_crankshaft()) return;
4526 HandleScope outer_scope(heap->isolate());
4529 LocalContext context;
4530 HandleScope scope(heap->isolate());
4532 CompileRun("bar = (function() {"
4536 " var foo = function(x) { with (x) { return 1 + x; } };"
4540 " %OptimizeFunctionOnNextCall(bar);"
4542 " return bar;})();");
4544 Handle<JSFunction> bar =
4545 v8::Utils::OpenHandle(
4546 *v8::Handle<v8::Function>::Cast(
4547 CcTest::global()->Get(v8_str("bar"))));
4548 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4551 // Now make sure that a gc should get rid of the function
4552 for (int i = 0; i < 4; i++) {
4553 heap->CollectAllGarbage();
4556 DCHECK(code->marked_for_deoptimization());
4560 TEST(ObjectsInOptimizedCodeAreWeak) {
4561 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4562 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4563 i::FLAG_allow_natives_syntax = true;
4564 CcTest::InitializeVM();
4565 Isolate* isolate = CcTest::i_isolate();
4566 v8::internal::Heap* heap = CcTest::heap();
4568 if (!isolate->use_crankshaft()) return;
4569 HandleScope outer_scope(heap->isolate());
4572 LocalContext context;
4573 HandleScope scope(heap->isolate());
4575 CompileRun("function bar() {"
4578 "function foo(x) { with (x) { return 1 + x; } };"
4582 "%OptimizeFunctionOnNextCall(bar);"
4585 Handle<JSFunction> bar =
4586 v8::Utils::OpenHandle(
4587 *v8::Handle<v8::Function>::Cast(
4588 CcTest::global()->Get(v8_str("bar"))));
4589 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4592 // Now make sure that a gc should get rid of the function
4593 for (int i = 0; i < 4; i++) {
4594 heap->CollectAllGarbage();
4597 DCHECK(code->marked_for_deoptimization());
4601 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4602 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4603 if (!i::FLAG_incremental_marking) return;
4604 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4605 i::FLAG_allow_natives_syntax = true;
4606 i::FLAG_compilation_cache = false;
4607 i::FLAG_retain_maps_for_n_gc = 0;
4608 CcTest::InitializeVM();
4609 Isolate* isolate = CcTest::i_isolate();
4611 // Do not run for no-snap builds.
4612 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4614 v8::internal::Heap* heap = CcTest::heap();
4616 // Get a clean slate regarding optimized functions on the heap.
4617 i::Deoptimizer::DeoptimizeAll(isolate);
4618 heap->CollectAllGarbage();
4620 if (!isolate->use_crankshaft()) return;
4621 HandleScope outer_scope(heap->isolate());
4622 for (int i = 0; i < 3; i++) {
4623 SimulateIncrementalMarking(heap);
4625 LocalContext context;
4626 HandleScope scope(heap->isolate());
4627 EmbeddedVector<char, 256> source;
4629 "function bar%d() {"
4632 "function foo%d(x) { with (x) { return 1 + x; } };"
4636 "%%OptimizeFunctionOnNextCall(bar%d);"
4638 i, i, i, i, i, i, i, i);
4639 CompileRun(source.start());
4641 heap->CollectAllGarbage();
4644 if (heap->weak_object_to_code_table()->IsHashTable()) {
4645 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4646 elements = t->NumberOfElements();
4648 CHECK_EQ(0, elements);
4652 static Handle<JSFunction> OptimizeDummyFunction(const char* name) {
4653 EmbeddedVector<char, 256> source;
4655 "function %s() { return 0; }"
4657 "%%OptimizeFunctionOnNextCall(%s);"
4658 "%s();", name, name, name, name, name);
4659 CompileRun(source.start());
4660 Handle<JSFunction> fun =
4661 v8::Utils::OpenHandle(
4662 *v8::Handle<v8::Function>::Cast(
4663 CcTest::global()->Get(v8_str(name))));
4668 static int GetCodeChainLength(Code* code) {
4670 while (code->next_code_link()->IsCode()) {
4672 code = Code::cast(code->next_code_link());
4678 TEST(NextCodeLinkIsWeak) {
4679 i::FLAG_always_opt = false;
4680 i::FLAG_allow_natives_syntax = true;
4681 CcTest::InitializeVM();
4682 Isolate* isolate = CcTest::i_isolate();
4683 v8::internal::Heap* heap = CcTest::heap();
4685 if (!isolate->use_crankshaft()) return;
4686 HandleScope outer_scope(heap->isolate());
4688 heap->CollectAllAvailableGarbage();
4689 int code_chain_length_before, code_chain_length_after;
4691 HandleScope scope(heap->isolate());
4692 Handle<JSFunction> mortal = OptimizeDummyFunction("mortal");
4693 Handle<JSFunction> immortal = OptimizeDummyFunction("immortal");
4694 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4695 code_chain_length_before = GetCodeChainLength(immortal->code());
4696 // Keep the immortal code and let the mortal code die.
4697 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4698 CompileRun("mortal = null; immortal = null;");
4700 heap->CollectAllAvailableGarbage();
4701 // Now mortal code should be dead.
4702 code_chain_length_after = GetCodeChainLength(*code);
4703 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4707 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4708 i::byte buffer[i::Assembler::kMinimalBufferSize];
4709 MacroAssembler masm(isolate, buffer, sizeof(buffer));
4711 masm.Push(isolate->factory()->undefined_value());
4713 masm.GetCode(&desc);
4714 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4715 Handle<Code> code = isolate->factory()->NewCode(
4716 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4717 CHECK(code->IsCode());
4722 TEST(NextCodeLinkIsWeak2) {
4723 i::FLAG_allow_natives_syntax = true;
4724 CcTest::InitializeVM();
4725 Isolate* isolate = CcTest::i_isolate();
4726 v8::internal::Heap* heap = CcTest::heap();
4728 if (!isolate->use_crankshaft()) return;
4729 HandleScope outer_scope(heap->isolate());
4730 heap->CollectAllAvailableGarbage();
4731 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4732 Handle<Code> new_head;
4733 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4735 HandleScope scope(heap->isolate());
4736 Handle<Code> immortal = DummyOptimizedCode(isolate);
4737 Handle<Code> mortal = DummyOptimizedCode(isolate);
4738 mortal->set_next_code_link(*old_head);
4739 immortal->set_next_code_link(*mortal);
4740 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4741 new_head = scope.CloseAndEscape(immortal);
4743 heap->CollectAllAvailableGarbage();
4744 // Now mortal code should be dead.
4745 CHECK_EQ(*old_head, new_head->next_code_link());
4749 static bool weak_ic_cleared = false;
4751 static void ClearWeakIC(
4752 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4753 printf("clear weak is called\n");
4754 weak_ic_cleared = true;
4755 data.GetParameter()->Reset();
4759 TEST(WeakFunctionInConstructor) {
4760 if (i::FLAG_always_opt) return;
4761 i::FLAG_stress_compaction = false;
4762 CcTest::InitializeVM();
4763 v8::Isolate* isolate = CcTest::isolate();
4764 v8::HandleScope scope(isolate);
4766 "function createObj(obj) {"
4767 " return new obj();"
4769 Handle<JSFunction> createObj =
4770 v8::Utils::OpenHandle(*v8::Handle<v8::Function>::Cast(
4771 CcTest::global()->Get(v8_str("createObj"))));
4773 v8::Persistent<v8::Object> garbage;
4775 v8::HandleScope scope(isolate);
4776 const char* source =
4778 " function hat() { this.x = 5; }"
4783 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
4785 weak_ic_cleared = false;
4786 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4787 Heap* heap = CcTest::i_isolate()->heap();
4788 heap->CollectAllGarbage();
4789 CHECK(weak_ic_cleared);
4791 // We've determined the constructor in createObj has had it's weak cell
4792 // cleared. Now, verify that one additional call with a new function
4793 // allows monomorphicity.
4794 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
4795 createObj->shared()->feedback_vector(), CcTest::i_isolate());
4796 for (int i = 0; i < 20; i++) {
4797 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
4798 CHECK(slot_value->IsWeakCell());
4799 if (WeakCell::cast(slot_value)->cleared()) break;
4800 heap->CollectAllGarbage();
4803 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
4804 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
4806 "function coat() { this.x = 6; }"
4807 "createObj(coat);");
4808 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
4809 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
4813 // Checks that the value returned by execution of the source is weak.
4814 void CheckWeakness(const char* source) {
4815 i::FLAG_stress_compaction = false;
4816 CcTest::InitializeVM();
4817 v8::Isolate* isolate = CcTest::isolate();
4818 v8::HandleScope scope(isolate);
4819 v8::Persistent<v8::Object> garbage;
4821 v8::HandleScope scope(isolate);
4822 garbage.Reset(isolate, CompileRun(source)->ToObject(isolate));
4824 weak_ic_cleared = false;
4825 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4826 Heap* heap = CcTest::i_isolate()->heap();
4827 heap->CollectAllGarbage();
4828 CHECK(weak_ic_cleared);
4832 // Each of the following "weak IC" tests creates an IC that embeds a map with
4833 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4834 TEST(WeakMapInMonomorphicLoadIC) {
4835 CheckWeakness("function loadIC(obj) {"
4839 " var proto = {'name' : 'weak'};"
4840 " var obj = Object.create(proto);"
4849 TEST(WeakMapInPolymorphicLoadIC) {
4851 "function loadIC(obj) {"
4855 " var proto = {'name' : 'weak'};"
4856 " var obj = Object.create(proto);"
4860 " var poly = Object.create(proto);"
4868 TEST(WeakMapInMonomorphicKeyedLoadIC) {
4869 CheckWeakness("function keyedLoadIC(obj, field) {"
4870 " return obj[field];"
4873 " var proto = {'name' : 'weak'};"
4874 " var obj = Object.create(proto);"
4875 " keyedLoadIC(obj, 'name');"
4876 " keyedLoadIC(obj, 'name');"
4877 " keyedLoadIC(obj, 'name');"
4883 TEST(WeakMapInPolymorphicKeyedLoadIC) {
4885 "function keyedLoadIC(obj, field) {"
4886 " return obj[field];"
4889 " var proto = {'name' : 'weak'};"
4890 " var obj = Object.create(proto);"
4891 " keyedLoadIC(obj, 'name');"
4892 " keyedLoadIC(obj, 'name');"
4893 " keyedLoadIC(obj, 'name');"
4894 " var poly = Object.create(proto);"
4896 " keyedLoadIC(poly, 'name');"
4902 TEST(WeakMapInMonomorphicStoreIC) {
4903 CheckWeakness("function storeIC(obj, value) {"
4904 " obj.name = value;"
4907 " var proto = {'name' : 'weak'};"
4908 " var obj = Object.create(proto);"
4909 " storeIC(obj, 'x');"
4910 " storeIC(obj, 'x');"
4911 " storeIC(obj, 'x');"
4917 TEST(WeakMapInPolymorphicStoreIC) {
4919 "function storeIC(obj, value) {"
4920 " obj.name = value;"
4923 " var proto = {'name' : 'weak'};"
4924 " var obj = Object.create(proto);"
4925 " storeIC(obj, 'x');"
4926 " storeIC(obj, 'x');"
4927 " storeIC(obj, 'x');"
4928 " var poly = Object.create(proto);"
4930 " storeIC(poly, 'x');"
4936 TEST(WeakMapInMonomorphicKeyedStoreIC) {
4937 CheckWeakness("function keyedStoreIC(obj, field, value) {"
4938 " obj[field] = value;"
4941 " var proto = {'name' : 'weak'};"
4942 " var obj = Object.create(proto);"
4943 " keyedStoreIC(obj, 'x');"
4944 " keyedStoreIC(obj, 'x');"
4945 " keyedStoreIC(obj, 'x');"
4951 TEST(WeakMapInPolymorphicKeyedStoreIC) {
4953 "function keyedStoreIC(obj, field, value) {"
4954 " obj[field] = value;"
4957 " var proto = {'name' : 'weak'};"
4958 " var obj = Object.create(proto);"
4959 " keyedStoreIC(obj, 'x');"
4960 " keyedStoreIC(obj, 'x');"
4961 " keyedStoreIC(obj, 'x');"
4962 " var poly = Object.create(proto);"
4964 " keyedStoreIC(poly, 'x');"
4970 TEST(WeakMapInMonomorphicCompareNilIC) {
4971 CheckWeakness("function compareNilIC(obj) {"
4972 " return obj == null;"
4975 " var proto = {'name' : 'weak'};"
4976 " var obj = Object.create(proto);"
4977 " compareNilIC(obj);"
4978 " compareNilIC(obj);"
4979 " compareNilIC(obj);"
4985 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4986 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4987 Handle<Object> obj =
4988 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
4989 return Handle<JSFunction>::cast(obj);
4993 void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
4994 int ic_slot, InlineCacheState state) {
4995 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
4996 kind == Code::CALL_IC) {
4997 TypeFeedbackVector* vector = shared->feedback_vector();
4998 FeedbackVectorICSlot slot(ic_slot);
4999 if (kind == Code::LOAD_IC) {
5000 LoadICNexus nexus(vector, slot);
5001 CHECK_EQ(nexus.StateFromFeedback(), state);
5002 } else if (kind == Code::KEYED_LOAD_IC) {
5003 KeyedLoadICNexus nexus(vector, slot);
5004 CHECK_EQ(nexus.StateFromFeedback(), state);
5005 } else if (kind == Code::CALL_IC) {
5006 CallICNexus nexus(vector, slot);
5007 CHECK_EQ(nexus.StateFromFeedback(), state);
5010 Code* ic = FindFirstIC(code, kind);
5011 CHECK(ic->is_inline_cache_stub());
5012 CHECK(ic->ic_state() == state);
5017 TEST(MonomorphicStaysMonomorphicAfterGC) {
5018 if (FLAG_always_opt) return;
5019 CcTest::InitializeVM();
5020 Isolate* isolate = CcTest::i_isolate();
5021 Heap* heap = isolate->heap();
5022 v8::HandleScope scope(CcTest::isolate());
5024 "function loadIC(obj) {"
5027 "function testIC() {"
5028 " var proto = {'name' : 'weak'};"
5029 " var obj = Object.create(proto);"
5035 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5037 v8::HandleScope scope(CcTest::isolate());
5038 CompileRun("(testIC())");
5040 heap->CollectAllGarbage();
5041 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5043 v8::HandleScope scope(CcTest::isolate());
5044 CompileRun("(testIC())");
5046 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5050 TEST(PolymorphicStaysPolymorphicAfterGC) {
5051 if (FLAG_always_opt) return;
5052 CcTest::InitializeVM();
5053 Isolate* isolate = CcTest::i_isolate();
5054 Heap* heap = isolate->heap();
5055 v8::HandleScope scope(CcTest::isolate());
5057 "function loadIC(obj) {"
5060 "function testIC() {"
5061 " var proto = {'name' : 'weak'};"
5062 " var obj = Object.create(proto);"
5066 " var poly = Object.create(proto);"
5071 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5073 v8::HandleScope scope(CcTest::isolate());
5074 CompileRun("(testIC())");
5076 heap->CollectAllGarbage();
5077 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5079 v8::HandleScope scope(CcTest::isolate());
5080 CompileRun("(testIC())");
5082 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5087 CcTest::InitializeVM();
5088 Isolate* isolate = CcTest::i_isolate();
5089 v8::internal::Heap* heap = CcTest::heap();
5090 v8::internal::Factory* factory = isolate->factory();
5092 HandleScope outer_scope(isolate);
5093 Handle<WeakCell> weak_cell1;
5095 HandleScope inner_scope(isolate);
5096 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5097 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5100 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5101 Handle<WeakCell> weak_cell2;
5103 HandleScope inner_scope(isolate);
5104 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5106 CHECK(weak_cell1->value()->IsFixedArray());
5107 CHECK_EQ(*survivor, weak_cell2->value());
5108 heap->CollectGarbage(NEW_SPACE);
5109 CHECK(weak_cell1->value()->IsFixedArray());
5110 CHECK_EQ(*survivor, weak_cell2->value());
5111 heap->CollectGarbage(NEW_SPACE);
5112 CHECK(weak_cell1->value()->IsFixedArray());
5113 CHECK_EQ(*survivor, weak_cell2->value());
5114 heap->CollectAllAvailableGarbage();
5115 CHECK(weak_cell1->cleared());
5116 CHECK_EQ(*survivor, weak_cell2->value());
5120 TEST(WeakCellsWithIncrementalMarking) {
5121 CcTest::InitializeVM();
5122 Isolate* isolate = CcTest::i_isolate();
5123 v8::internal::Heap* heap = CcTest::heap();
5124 v8::internal::Factory* factory = isolate->factory();
5127 HandleScope outer_scope(isolate);
5128 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5129 Handle<WeakCell> weak_cells[N];
5131 for (int i = 0; i < N; i++) {
5132 HandleScope inner_scope(isolate);
5133 Handle<HeapObject> value =
5134 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5135 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5136 CHECK(weak_cell->value()->IsFixedArray());
5137 IncrementalMarking* marking = heap->incremental_marking();
5138 if (marking->IsStopped()) marking->Start(Heap::kNoGCFlags);
5139 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5140 heap->CollectGarbage(NEW_SPACE);
5141 CHECK(weak_cell->value()->IsFixedArray());
5142 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5144 heap->CollectAllGarbage();
5145 CHECK_EQ(*survivor, weak_cells[0]->value());
5146 for (int i = 1; i < N; i++) {
5147 CHECK(weak_cells[i]->cleared());
5153 TEST(AddInstructionChangesNewSpacePromotion) {
5154 i::FLAG_allow_natives_syntax = true;
5155 i::FLAG_expose_gc = true;
5156 i::FLAG_stress_compaction = true;
5157 i::FLAG_gc_interval = 1000;
5158 CcTest::InitializeVM();
5159 if (!i::FLAG_allocation_site_pretenuring) return;
5160 v8::HandleScope scope(CcTest::isolate());
5161 Isolate* isolate = CcTest::i_isolate();
5162 Heap* heap = isolate->heap();
5165 "function add(a, b) {"
5169 "add(\"a\", \"b\");"
5170 "var oldSpaceObject;"
5172 "function crash(x) {"
5173 " var object = {a: null, b: null};"
5174 " var result = add(1.5, x | 0);"
5175 " object.a = result;"
5176 " oldSpaceObject = object;"
5181 "%OptimizeFunctionOnNextCall(crash);"
5184 v8::Handle<v8::Object> global = CcTest::global();
5185 v8::Handle<v8::Function> g =
5186 v8::Handle<v8::Function>::Cast(global->Get(v8_str("crash")));
5187 v8::Handle<v8::Value> args1[] = { v8_num(1) };
5188 heap->DisableInlineAllocation();
5189 heap->set_allocation_timeout(1);
5190 g->Call(global, 1, args1);
5191 heap->CollectAllGarbage();
5195 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5196 // Exit with 0 if the location matches our expectation.
5197 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5201 TEST(CEntryStubOOM) {
5202 i::FLAG_allow_natives_syntax = true;
5203 CcTest::InitializeVM();
5204 v8::HandleScope scope(CcTest::isolate());
5205 v8::V8::SetFatalErrorHandler(OnFatalErrorExpectOOM);
5207 v8::Handle<v8::Value> result = CompileRun(
5208 "%SetFlags('--gc-interval=1');"
5213 CHECK(result->IsNumber());
5219 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5222 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5223 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5227 TEST(Regress357137) {
5228 CcTest::InitializeVM();
5229 v8::Isolate* isolate = CcTest::isolate();
5230 v8::HandleScope hscope(isolate);
5231 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5232 global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
5233 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5234 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5235 DCHECK(!context.IsEmpty());
5236 v8::Context::Scope cscope(context);
5238 v8::Local<v8::Value> result = CompileRun(
5240 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5241 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5242 "interrupt();" // This triggers a fake stack overflow in f.
5244 CHECK_EQ(42.0, result->ToNumber(isolate)->Value());
5248 TEST(ArrayShiftSweeping) {
5249 i::FLAG_expose_gc = true;
5250 CcTest::InitializeVM();
5251 v8::HandleScope scope(CcTest::isolate());
5252 Isolate* isolate = CcTest::i_isolate();
5253 Heap* heap = isolate->heap();
5255 v8::Local<v8::Value> result = CompileRun(
5256 "var array = new Array(40000);"
5257 "var tmp = new Array(100000);"
5264 Handle<JSObject> o =
5265 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5266 CHECK(heap->InOldSpace(o->elements()));
5267 CHECK(heap->InOldSpace(*o));
5268 Page* page = Page::FromAddress(o->elements()->address());
5269 CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
5270 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
5274 UNINITIALIZED_TEST(PromotionQueue) {
5275 i::FLAG_expose_gc = true;
5276 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5277 v8::Isolate::CreateParams create_params;
5278 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5279 v8::Isolate* isolate = v8::Isolate::New(create_params);
5280 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5282 v8::Isolate::Scope isolate_scope(isolate);
5283 v8::HandleScope handle_scope(isolate);
5284 v8::Context::New(isolate)->Enter();
5285 Heap* heap = i_isolate->heap();
5286 NewSpace* new_space = heap->new_space();
5288 // In this test we will try to overwrite the promotion queue which is at the
5289 // end of to-space. To actually make that possible, we need at least two
5290 // semi-space pages and take advantage of fragmentation.
5291 // (1) Grow semi-space to two pages.
5292 // (2) Create a few small long living objects and call the scavenger to
5293 // move them to the other semi-space.
5294 // (3) Create a huge object, i.e., remainder of first semi-space page and
5295 // create another huge object which should be of maximum allocatable memory
5296 // size of the second semi-space page.
5297 // (4) Call the scavenger again.
5298 // What will happen is: the scavenger will promote the objects created in
5299 // (2) and will create promotion queue entries at the end of the second
5300 // semi-space page during the next scavenge when it promotes the objects to
5301 // the old generation. The first allocation of (3) will fill up the first
5302 // semi-space page. The second allocation in (3) will not fit into the
5303 // first semi-space page, but it will overwrite the promotion queue which
5304 // are in the second semi-space page. If the right guards are in place, the
5305 // promotion queue will be evacuated in that case.
5307 // Grow the semi-space to two pages to make semi-space copy overwrite the
5308 // promotion queue, which will be at the end of the second page.
5309 intptr_t old_capacity = new_space->TotalCapacity();
5311 // If we are in a low memory config, we can't grow to two pages and we can't
5312 // run this test. This also means the issue we are testing cannot arise, as
5313 // there is no fragmentation.
5314 if (new_space->IsAtMaximumCapacity()) return;
5317 CHECK(new_space->IsAtMaximumCapacity());
5318 CHECK(2 * old_capacity == new_space->TotalCapacity());
5320 // Call the scavenger two times to get an empty new space
5321 heap->CollectGarbage(NEW_SPACE);
5322 heap->CollectGarbage(NEW_SPACE);
5324 // First create a few objects which will survive a scavenge, and will get
5325 // promoted to the old generation later on. These objects will create
5326 // promotion queue entries at the end of the second semi-space page.
5327 const int number_handles = 12;
5328 Handle<FixedArray> handles[number_handles];
5329 for (int i = 0; i < number_handles; i++) {
5330 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5332 heap->CollectGarbage(NEW_SPACE);
5334 // Create the first huge object which will exactly fit the first semi-space
5336 int new_linear_size =
5337 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5338 *heap->new_space()->allocation_top_address());
5339 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
5340 Handle<FixedArray> first =
5341 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5342 CHECK(heap->InNewSpace(*first));
5344 // Create the second huge object of maximum allocatable second semi-space
5347 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5348 *heap->new_space()->allocation_top_address());
5349 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
5350 FixedArray::kHeaderSize;
5351 Handle<FixedArray> second =
5352 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5353 CHECK(heap->InNewSpace(*second));
5355 // This scavenge will corrupt memory if the promotion queue is not
5357 heap->CollectGarbage(NEW_SPACE);
5363 TEST(Regress388880) {
5364 i::FLAG_expose_gc = true;
5365 CcTest::InitializeVM();
5366 v8::HandleScope scope(CcTest::isolate());
5367 Isolate* isolate = CcTest::i_isolate();
5368 Factory* factory = isolate->factory();
5369 Heap* heap = isolate->heap();
5371 Handle<Map> map1 = Map::Create(isolate, 1);
5373 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
5374 HeapType::Any(isolate), NONE, Representation::Tagged(),
5375 OMIT_TRANSITION).ToHandleChecked();
5377 int desired_offset = Page::kPageSize - map1->instance_size();
5379 // Allocate fixed array in old pointer space so, that object allocated
5380 // afterwards would end at the end of the page.
5382 SimulateFullSpace(heap->old_space());
5383 int padding_size = desired_offset - Page::kObjectStartOffset;
5384 int padding_array_length =
5385 (padding_size - FixedArray::kHeaderSize) / kPointerSize;
5387 Handle<FixedArray> temp2 =
5388 factory->NewFixedArray(padding_array_length, TENURED);
5389 Page* page = Page::FromAddress(temp2->address());
5390 CHECK_EQ(Page::kObjectStartOffset, page->Offset(temp2->address()));
5393 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED, false);
5394 o->set_properties(*factory->empty_fixed_array());
5396 // Ensure that the object allocated where we need it.
5397 Page* page = Page::FromAddress(o->address());
5398 CHECK_EQ(desired_offset, page->Offset(o->address()));
5400 // Now we have an object right at the end of the page.
5402 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5403 // that would cause crash.
5404 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5406 marking->Start(Heap::kNoGCFlags);
5407 CHECK(marking->IsMarking());
5409 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5410 // when it calls heap->AdjustLiveBytes(...).
5411 JSObject::MigrateToMap(o, map2);
5416 i::FLAG_expose_gc = true;
5417 CcTest::InitializeVM();
5418 v8::HandleScope scope(CcTest::isolate());
5419 Isolate* isolate = CcTest::i_isolate();
5420 Heap* heap = isolate->heap();
5421 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5422 v8::Local<v8::Value> result = CompileRun(
5423 "var weak_map = new WeakMap();"
5424 "var future_keys = [];"
5425 "for (var i = 0; i < 50; i++) {"
5426 " var key = {'k' : i + 0.1};"
5427 " weak_map.set(key, 1);"
5428 " future_keys.push({'x' : i + 0.2});"
5431 if (marking->IsStopped()) {
5432 marking->Start(Heap::kNoGCFlags);
5434 // Incrementally mark the backing store.
5435 Handle<JSObject> obj =
5436 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5437 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5438 while (!Marking::IsBlack(
5439 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5440 !marking->IsStopped()) {
5441 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5443 // Stash the backing store in a handle.
5444 Handle<Object> save(weak_map->table(), isolate);
5445 // The following line will update the backing store.
5447 "for (var i = 0; i < 50; i++) {"
5448 " weak_map.set(future_keys[i], i);"
5450 heap->incremental_marking()->set_should_hurry(true);
5451 heap->CollectGarbage(OLD_SPACE);
5455 TEST(Regress442710) {
5456 CcTest::InitializeVM();
5457 Isolate* isolate = CcTest::i_isolate();
5458 Heap* heap = isolate->heap();
5459 Factory* factory = isolate->factory();
5461 HandleScope sc(isolate);
5462 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
5463 Handle<JSArray> array = factory->NewJSArray(2);
5465 Handle<String> name = factory->InternalizeUtf8String("testArray");
5466 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5467 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5468 heap->CollectGarbage(OLD_SPACE);
5472 TEST(NumberStringCacheSize) {
5473 // Test that the number-string cache has not been resized in the snapshot.
5474 CcTest::InitializeVM();
5475 Isolate* isolate = CcTest::i_isolate();
5476 if (!isolate->snapshot_available()) return;
5477 Heap* heap = isolate->heap();
5478 CHECK_EQ(TestHeap::kInitialNumberStringCacheSize * 2,
5479 heap->number_string_cache()->length());
5484 CcTest::InitializeVM();
5485 Isolate* isolate = CcTest::i_isolate();
5486 Heap* heap = isolate->heap();
5487 Factory* factory = isolate->factory();
5488 HandleScope scope(isolate);
5489 CompileRun("function cls() { this.x = 10; }");
5490 Handle<WeakCell> weak_prototype;
5492 HandleScope inner_scope(isolate);
5493 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5494 Handle<JSObject> proto =
5495 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5496 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5498 CHECK(!weak_prototype->cleared());
5502 "cls.prototype = null;");
5503 for (int i = 0; i < 4; i++) {
5504 heap->CollectAllGarbage();
5506 // The map of a.x keeps prototype alive
5507 CHECK(!weak_prototype->cleared());
5508 // Change the map of a.x and make the previous map garbage collectable.
5509 CompileRun("a.x.__proto__ = {};");
5510 for (int i = 0; i < 4; i++) {
5511 heap->CollectAllGarbage();
5513 CHECK(weak_prototype->cleared());
5517 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5518 HandleScope inner_scope(isolate);
5519 Handle<Map> map = Map::Create(isolate, 1);
5520 v8::Local<v8::Value> result =
5521 CompileRun("(function () { return {x : 10}; })();");
5522 Handle<JSObject> proto =
5523 v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
5524 Map::SetPrototype(map, proto);
5525 heap->AddRetainedMap(map);
5526 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5530 void CheckMapRetainingFor(int n) {
5531 FLAG_retain_maps_for_n_gc = n;
5532 Isolate* isolate = CcTest::i_isolate();
5533 Heap* heap = isolate->heap();
5534 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5535 CHECK(!weak_cell->cleared());
5536 for (int i = 0; i < n; i++) {
5537 heap->CollectGarbage(OLD_SPACE);
5539 CHECK(!weak_cell->cleared());
5540 heap->CollectGarbage(OLD_SPACE);
5541 CHECK(weak_cell->cleared());
5545 TEST(MapRetaining) {
5546 CcTest::InitializeVM();
5547 v8::HandleScope scope(CcTest::isolate());
5548 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5549 CheckMapRetainingFor(0);
5550 CheckMapRetainingFor(1);
5551 CheckMapRetainingFor(7);
5555 TEST(RegressArrayListGC) {
5556 FLAG_retain_maps_for_n_gc = 1;
5557 FLAG_incremental_marking = 0;
5558 FLAG_gc_global = true;
5559 CcTest::InitializeVM();
5560 v8::HandleScope scope(CcTest::isolate());
5561 Isolate* isolate = CcTest::i_isolate();
5562 Heap* heap = isolate->heap();
5563 AddRetainedMap(isolate, heap);
5564 Handle<Map> map = Map::Create(isolate, 1);
5565 heap->CollectGarbage(OLD_SPACE);
5566 // Force GC in old space on next addition of retained map.
5567 Map::WeakCellForMap(map);
5568 SimulateFullSpace(CcTest::heap()->new_space());
5569 for (int i = 0; i < 10; i++) {
5570 heap->AddRetainedMap(map);
5572 heap->CollectGarbage(OLD_SPACE);
5578 CcTest::InitializeVM();
5579 v8::HandleScope scope(CcTest::isolate());
5581 v8::Local<v8::Value> result = CompileRun("'abc'");
5582 Handle<Object> o = v8::Utils::OpenHandle(*result);
5583 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5588 TEST(WritableVsImmortalRoots) {
5589 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5590 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5591 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5592 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5593 // A root value can be writable, immortal, or neither, but not both.
5594 CHECK(!immortal || !writable);
5599 static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5601 int elements_to_trim) {
5602 v8::HandleScope scope(CcTest::isolate());
5603 Isolate* isolate = CcTest::i_isolate();
5604 Factory* factory = isolate->factory();
5605 Heap* heap = isolate->heap();
5607 Handle<FixedTypedArrayBase> array =
5608 factory->NewFixedTypedArray(initial_length, type, true);
5609 int old_size = array->size();
5610 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5613 // Check that free space filler is at the right place and did not smash the
5615 CHECK(array->IsFixedArrayBase());
5616 CHECK_EQ(initial_length - elements_to_trim, array->length());
5617 int new_size = array->size();
5618 if (new_size != old_size) {
5619 // Free space filler should be created in this case.
5620 Address next_obj_address = array->address() + array->size();
5621 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5623 heap->CollectAllAvailableGarbage();
5627 TEST(Regress472513) {
5628 CcTest::InitializeVM();
5629 v8::HandleScope scope(CcTest::isolate());
5631 // The combination of type/initial_length/elements_to_trim triggered
5632 // typed array header smashing with free space filler (crbug/472513).
5635 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5636 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5637 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5638 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5639 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5640 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5643 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5644 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5645 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5646 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5647 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5651 TEST(WeakFixedArray) {
5652 CcTest::InitializeVM();
5653 v8::HandleScope scope(CcTest::isolate());
5655 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5656 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5657 array->Remove(number);
5659 WeakFixedArray::Add(array, number);
5663 TEST(PreprocessStackTrace) {
5664 // Do not automatically trigger early GC.
5665 FLAG_gc_interval = -1;
5666 CcTest::InitializeVM();
5667 v8::HandleScope scope(CcTest::isolate());
5668 v8::TryCatch try_catch(CcTest::isolate());
5669 CompileRun("throw new Error();");
5670 CHECK(try_catch.HasCaught());
5671 Isolate* isolate = CcTest::i_isolate();
5672 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5673 Handle<Name> key = isolate->factory()->stack_trace_symbol();
5674 Handle<Object> stack_trace =
5675 JSObject::GetProperty(exception, key).ToHandleChecked();
5676 Handle<Object> code =
5677 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5678 CHECK(code->IsCode());
5680 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
5682 Handle<Object> pos =
5683 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5684 CHECK(pos->IsSmi());
5686 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5687 int array_length = Smi::cast(stack_trace_array->length())->value();
5688 for (int i = 0; i < array_length; i++) {
5689 Handle<Object> element =
5690 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5691 CHECK(!element->IsCode());
5696 static bool utils_has_been_collected = false;
5698 static void UtilsHasBeenCollected(
5699 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5700 utils_has_been_collected = true;
5701 data.GetParameter()->Reset();
5705 TEST(BootstrappingExports) {
5706 FLAG_expose_natives_as = "natives";
5707 CcTest::InitializeVM();
5708 v8::Isolate* isolate = CcTest::isolate();
5710 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
5712 utils_has_been_collected = false;
5714 v8::Persistent<v8::Object> utils;
5717 v8::HandleScope scope(isolate);
5718 v8::Handle<v8::Object> natives =
5719 CcTest::global()->Get(v8_str("natives"))->ToObject(isolate);
5720 utils.Reset(isolate, natives->Get(v8_str("utils"))->ToObject(isolate));
5721 natives->Delete(v8_str("utils"));
5724 utils.SetWeak(&utils, UtilsHasBeenCollected,
5725 v8::WeakCallbackType::kParameter);
5727 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
5729 CHECK(utils_has_been_collected);
5734 FLAG_allow_natives_syntax = true;
5735 CcTest::InitializeVM();
5736 v8::Isolate* isolate = CcTest::isolate();
5737 v8::HandleScope scope(isolate);
5738 v8::Local<v8::Function> constructor =
5739 v8::Utils::ToLocal(CcTest::i_isolate()->internal_array_function());
5740 CcTest::global()->Set(v8_str("InternalArray"), constructor);
5742 v8::TryCatch try_catch(isolate);
5746 "for (var i = 0; i < 1000; i++) {"
5747 " var ai = new InternalArray(10000);"
5748 " if (%HaveSameMap(ai, a)) throw Error();"
5749 " if (!%HasFastObjectElements(ai)) throw Error();"
5751 "for (var i = 0; i < 1000; i++) {"
5752 " var ai = new InternalArray(10000);"
5753 " if (%HaveSameMap(ai, a)) throw Error();"
5754 " if (!%HasFastObjectElements(ai)) throw Error();"
5757 CHECK(!try_catch.HasCaught());
5761 void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
5762 CHECK(bytes >= FixedArray::kHeaderSize);
5763 CHECK(bytes % kPointerSize == 0);
5764 Factory* factory = isolate->factory();
5765 HandleScope scope(isolate);
5766 AlwaysAllocateScope always_allocate(isolate);
5768 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
5769 Handle<FixedArray> array = factory->NewFixedArray(
5770 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
5771 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
5772 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
5776 TEST(NewSpaceAllocationCounter) {
5777 CcTest::InitializeVM();
5778 v8::HandleScope scope(CcTest::isolate());
5779 Isolate* isolate = CcTest::i_isolate();
5780 Heap* heap = isolate->heap();
5781 size_t counter1 = heap->NewSpaceAllocationCounter();
5782 heap->CollectGarbage(NEW_SPACE);
5783 const size_t kSize = 1024;
5784 AllocateInSpace(isolate, kSize, NEW_SPACE);
5785 size_t counter2 = heap->NewSpaceAllocationCounter();
5786 CHECK_EQ(kSize, counter2 - counter1);
5787 heap->CollectGarbage(NEW_SPACE);
5788 size_t counter3 = heap->NewSpaceAllocationCounter();
5789 CHECK_EQ(0U, counter3 - counter2);
5790 // Test counter overflow.
5791 size_t max_counter = -1;
5792 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
5793 size_t start = heap->NewSpaceAllocationCounter();
5794 for (int i = 0; i < 20; i++) {
5795 AllocateInSpace(isolate, kSize, NEW_SPACE);
5796 size_t counter = heap->NewSpaceAllocationCounter();
5797 CHECK_EQ(kSize, counter - start);
5803 TEST(OldSpaceAllocationCounter) {
5804 CcTest::InitializeVM();
5805 v8::HandleScope scope(CcTest::isolate());
5806 Isolate* isolate = CcTest::i_isolate();
5807 Heap* heap = isolate->heap();
5808 size_t counter1 = heap->OldGenerationAllocationCounter();
5809 heap->CollectGarbage(NEW_SPACE);
5810 const size_t kSize = 1024;
5811 AllocateInSpace(isolate, kSize, OLD_SPACE);
5812 size_t counter2 = heap->OldGenerationAllocationCounter();
5813 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
5814 CHECK_LE(kSize, counter2 - counter1);
5815 heap->CollectGarbage(NEW_SPACE);
5816 size_t counter3 = heap->OldGenerationAllocationCounter();
5817 CHECK_EQ(0u, counter3 - counter2);
5818 AllocateInSpace(isolate, kSize, OLD_SPACE);
5819 heap->CollectGarbage(OLD_SPACE);
5820 size_t counter4 = heap->OldGenerationAllocationCounter();
5821 CHECK_LE(kSize, counter4 - counter3);
5822 // Test counter overflow.
5823 size_t max_counter = -1;
5824 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
5825 size_t start = heap->OldGenerationAllocationCounter();
5826 for (int i = 0; i < 20; i++) {
5827 AllocateInSpace(isolate, kSize, OLD_SPACE);
5828 size_t counter = heap->OldGenerationAllocationCounter();
5829 CHECK_LE(kSize, counter - start);
5835 TEST(NewSpaceAllocationThroughput) {
5836 CcTest::InitializeVM();
5837 v8::HandleScope scope(CcTest::isolate());
5838 Isolate* isolate = CcTest::i_isolate();
5839 Heap* heap = isolate->heap();
5840 GCTracer* tracer = heap->tracer();
5842 size_t counter1 = 1000;
5843 tracer->SampleAllocation(time1, counter1, 0);
5845 size_t counter2 = 2000;
5846 tracer->SampleAllocation(time2, counter2, 0);
5848 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
5849 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
5851 size_t counter3 = 30000;
5852 tracer->SampleAllocation(time3, counter3, 0);
5853 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
5854 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
5858 TEST(NewSpaceAllocationThroughput2) {
5859 CcTest::InitializeVM();
5860 v8::HandleScope scope(CcTest::isolate());
5861 Isolate* isolate = CcTest::i_isolate();
5862 Heap* heap = isolate->heap();
5863 GCTracer* tracer = heap->tracer();
5865 size_t counter1 = 1000;
5866 tracer->SampleAllocation(time1, counter1, 0);
5868 size_t counter2 = 2000;
5869 tracer->SampleAllocation(time2, counter2, 0);
5870 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
5871 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
5873 size_t counter3 = 30000;
5874 tracer->SampleAllocation(time3, counter3, 0);
5875 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
5876 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
5880 static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
5881 Isolate* isolate = CcTest::i_isolate();
5883 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
5884 CHECK(message->IsTheHole());
5888 TEST(MessageObjectLeak) {
5889 CcTest::InitializeVM();
5890 v8::Isolate* isolate = CcTest::isolate();
5891 v8::HandleScope scope(isolate);
5892 v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5893 global->Set(v8::String::NewFromUtf8(isolate, "check"),
5894 v8::FunctionTemplate::New(isolate, CheckLeak));
5895 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5896 v8::Context::Scope cscope(context);
5900 " throw 'message 1';"
5905 " throw 'message 2';"
5912 const char* flag = "--turbo-filter=*";
5913 FlagList::SetFlagsFromString(flag, StrLength(flag));
5914 FLAG_always_opt = true;
5915 FLAG_turbo_exceptions = true;
5921 TEST(OldGenerationAllocationThroughput) {
5922 CcTest::InitializeVM();
5923 v8::HandleScope scope(CcTest::isolate());
5924 Isolate* isolate = CcTest::i_isolate();
5925 Heap* heap = isolate->heap();
5926 GCTracer* tracer = heap->tracer();
5928 size_t counter1 = 1000;
5929 tracer->SampleAllocation(time1, 0, counter1);
5931 size_t counter2 = 2000;
5932 tracer->SampleAllocation(time2, 0, counter2);
5933 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
5934 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
5936 size_t counter3 = 30000;
5937 tracer->SampleAllocation(time3, 0, counter3);
5938 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
5939 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
5943 TEST(AllocationThroughput) {
5944 CcTest::InitializeVM();
5945 v8::HandleScope scope(CcTest::isolate());
5946 Isolate* isolate = CcTest::i_isolate();
5947 Heap* heap = isolate->heap();
5948 GCTracer* tracer = heap->tracer();
5950 size_t counter1 = 1000;
5951 tracer->SampleAllocation(time1, counter1, counter1);
5953 size_t counter2 = 2000;
5954 tracer->SampleAllocation(time2, counter2, counter2);
5955 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
5956 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
5958 size_t counter3 = 30000;
5959 tracer->SampleAllocation(time3, counter3, counter3);
5960 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
5961 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);