v8: upgrade to 3.20.17
[platform/upstream/nodejs.git] / deps / v8 / test / cctest / test-heap.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #include "v8.h"
31
32 #include "compilation-cache.h"
33 #include "execution.h"
34 #include "factory.h"
35 #include "macro-assembler.h"
36 #include "global-handles.h"
37 #include "stub-cache.h"
38 #include "cctest.h"
39
40 using namespace v8::internal;
41
42
43 // Go through all incremental marking steps in one swoop.
44 static void SimulateIncrementalMarking() {
45   MarkCompactCollector* collector = HEAP->mark_compact_collector();
46   IncrementalMarking* marking = HEAP->incremental_marking();
47   if (collector->IsConcurrentSweepingInProgress()) {
48     collector->WaitUntilSweepingCompleted();
49   }
50   CHECK(marking->IsMarking() || marking->IsStopped());
51   if (marking->IsStopped()) {
52     marking->Start();
53   }
54   CHECK(marking->IsMarking());
55   while (!marking->IsComplete()) {
56     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
57   }
58   CHECK(marking->IsComplete());
59 }
60
61
62 static void CheckMap(Map* map, int type, int instance_size) {
63   CHECK(map->IsHeapObject());
64 #ifdef DEBUG
65   CHECK(HEAP->Contains(map));
66 #endif
67   CHECK_EQ(HEAP->meta_map(), map->map());
68   CHECK_EQ(type, map->instance_type());
69   CHECK_EQ(instance_size, map->instance_size());
70 }
71
72
73 TEST(HeapMaps) {
74   CcTest::InitializeVM();
75   CheckMap(HEAP->meta_map(), MAP_TYPE, Map::kSize);
76   CheckMap(HEAP->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
77   CheckMap(HEAP->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
78   CheckMap(HEAP->string_map(), STRING_TYPE, kVariableSizeSentinel);
79 }
80
81
82 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
83   CHECK(obj->IsOddball());
84   bool exc;
85   Object* print_string =
86       *Execution::ToString(Handle<Object>(obj, isolate), &exc);
87   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
88 }
89
90
91 static void CheckSmi(Isolate* isolate, int value, const char* string) {
92   bool exc;
93   Object* print_string =
94       *Execution::ToString(Handle<Object>(Smi::FromInt(value), isolate), &exc);
95   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
96 }
97
98
99 static void CheckNumber(Isolate* isolate, double value, const char* string) {
100   Object* obj = HEAP->NumberFromDouble(value)->ToObjectChecked();
101   CHECK(obj->IsNumber());
102   bool exc;
103   Object* print_string =
104       *Execution::ToString(Handle<Object>(obj, isolate), &exc);
105   CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
106 }
107
108
109 static void CheckFindCodeObject(Isolate* isolate) {
110   // Test FindCodeObject
111 #define __ assm.
112
113   Assembler assm(isolate, NULL, 0);
114
115   __ nop();  // supported on all architectures
116
117   CodeDesc desc;
118   assm.GetCode(&desc);
119   Heap* heap = isolate->heap();
120   Object* code = heap->CreateCode(
121       desc,
122       Code::ComputeFlags(Code::STUB),
123       Handle<Code>())->ToObjectChecked();
124   CHECK(code->IsCode());
125
126   HeapObject* obj = HeapObject::cast(code);
127   Address obj_addr = obj->address();
128
129   for (int i = 0; i < obj->Size(); i += kPointerSize) {
130     Object* found = isolate->FindCodeObject(obj_addr + i);
131     CHECK_EQ(code, found);
132   }
133
134   Object* copy = heap->CreateCode(
135       desc,
136       Code::ComputeFlags(Code::STUB),
137       Handle<Code>())->ToObjectChecked();
138   CHECK(copy->IsCode());
139   HeapObject* obj_copy = HeapObject::cast(copy);
140   Object* not_right = isolate->FindCodeObject(obj_copy->address() +
141                                               obj_copy->Size() / 2);
142   CHECK(not_right != code);
143 }
144
145
146 TEST(HeapObjects) {
147   CcTest::InitializeVM();
148   Isolate* isolate = Isolate::Current();
149   Factory* factory = isolate->factory();
150   Heap* heap = isolate->heap();
151
152   HandleScope sc(isolate);
153   Object* value = heap->NumberFromDouble(1.000123)->ToObjectChecked();
154   CHECK(value->IsHeapNumber());
155   CHECK(value->IsNumber());
156   CHECK_EQ(1.000123, value->Number());
157
158   value = heap->NumberFromDouble(1.0)->ToObjectChecked();
159   CHECK(value->IsSmi());
160   CHECK(value->IsNumber());
161   CHECK_EQ(1.0, value->Number());
162
163   value = heap->NumberFromInt32(1024)->ToObjectChecked();
164   CHECK(value->IsSmi());
165   CHECK(value->IsNumber());
166   CHECK_EQ(1024.0, value->Number());
167
168   value = heap->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
169   CHECK(value->IsSmi());
170   CHECK(value->IsNumber());
171   CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
172
173   value = heap->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
174   CHECK(value->IsSmi());
175   CHECK(value->IsNumber());
176   CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
177
178 #ifndef V8_TARGET_ARCH_X64
179   // TODO(lrn): We need a NumberFromIntptr function in order to test this.
180   value = heap->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
181   CHECK(value->IsHeapNumber());
182   CHECK(value->IsNumber());
183   CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
184 #endif
185
186   MaybeObject* maybe_value =
187       heap->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
188   value = maybe_value->ToObjectChecked();
189   CHECK(value->IsHeapNumber());
190   CHECK(value->IsNumber());
191   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
192            value->Number());
193
194   maybe_value = heap->NumberFromUint32(static_cast<uint32_t>(1) << 31);
195   value = maybe_value->ToObjectChecked();
196   CHECK(value->IsHeapNumber());
197   CHECK(value->IsNumber());
198   CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
199            value->Number());
200
201   // nan oddball checks
202   CHECK(heap->nan_value()->IsNumber());
203   CHECK(std::isnan(heap->nan_value()->Number()));
204
205   Handle<String> s = factory->NewStringFromAscii(CStrVector("fisk hest "));
206   CHECK(s->IsString());
207   CHECK_EQ(10, s->length());
208
209   String* object_string = String::cast(heap->Object_string());
210   CHECK(
211       Isolate::Current()->context()->global_object()->HasLocalProperty(
212           object_string));
213
214   // Check ToString for oddballs
215   CheckOddball(isolate, heap->true_value(), "true");
216   CheckOddball(isolate, heap->false_value(), "false");
217   CheckOddball(isolate, heap->null_value(), "null");
218   CheckOddball(isolate, heap->undefined_value(), "undefined");
219
220   // Check ToString for Smis
221   CheckSmi(isolate, 0, "0");
222   CheckSmi(isolate, 42, "42");
223   CheckSmi(isolate, -42, "-42");
224
225   // Check ToString for Numbers
226   CheckNumber(isolate, 1.1, "1.1");
227
228   CheckFindCodeObject(isolate);
229 }
230
231
232 TEST(Tagging) {
233   CcTest::InitializeVM();
234   int request = 24;
235   CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
236   CHECK(Smi::FromInt(42)->IsSmi());
237   CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
238   CHECK_EQ(NEW_SPACE,
239            Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
240   CHECK_EQ(OLD_POINTER_SPACE,
241            Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
242   CHECK(Failure::Exception()->IsFailure());
243   CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
244   CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
245 }
246
247
248 TEST(GarbageCollection) {
249   CcTest::InitializeVM();
250   Isolate* isolate = Isolate::Current();
251   Heap* heap = isolate->heap();
252   Factory* factory = isolate->factory();
253
254   HandleScope sc(isolate);
255   // Check GC.
256   heap->CollectGarbage(NEW_SPACE);
257
258   Handle<String> name = factory->InternalizeUtf8String("theFunction");
259   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
260   Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
261   Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
262
263   {
264     HandleScope inner_scope(isolate);
265     // Allocate a function and keep it in global object's property.
266     Handle<JSFunction> function =
267         factory->NewFunction(name, factory->undefined_value());
268     Handle<Map> initial_map =
269         factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
270     function->set_initial_map(*initial_map);
271     Isolate::Current()->context()->global_object()->SetProperty(
272         *name, *function, NONE, kNonStrictMode)->ToObjectChecked();
273     // Allocate an object.  Unrooted after leaving the scope.
274     Handle<JSObject> obj = factory->NewJSObject(function);
275     obj->SetProperty(
276         *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
277     obj->SetProperty(
278         *prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
279
280     CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
281     CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
282   }
283
284   heap->CollectGarbage(NEW_SPACE);
285
286   // Function should be alive.
287   CHECK(Isolate::Current()->context()->global_object()->
288         HasLocalProperty(*name));
289   // Check function is retained.
290   Object* func_value = Isolate::Current()->context()->global_object()->
291       GetProperty(*name)->ToObjectChecked();
292   CHECK(func_value->IsJSFunction());
293   Handle<JSFunction> function(JSFunction::cast(func_value));
294
295   {
296     HandleScope inner_scope(isolate);
297     // Allocate another object, make it reachable from global.
298     Handle<JSObject> obj = factory->NewJSObject(function);
299     Isolate::Current()->context()->global_object()->SetProperty(
300         *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
301     obj->SetProperty(
302         *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
303   }
304
305   // After gc, it should survive.
306   heap->CollectGarbage(NEW_SPACE);
307
308   CHECK(Isolate::Current()->context()->global_object()->
309         HasLocalProperty(*obj_name));
310   CHECK(Isolate::Current()->context()->global_object()->
311         GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
312   Object* obj = Isolate::Current()->context()->global_object()->
313       GetProperty(*obj_name)->ToObjectChecked();
314   JSObject* js_obj = JSObject::cast(obj);
315   CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
316 }
317
318
319 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
320   HandleScope scope(isolate);
321   Handle<String> s = isolate->factory()->NewStringFromUtf8(CStrVector(string));
322   CHECK_EQ(StrLength(string), s->length());
323   for (int index = 0; index < s->length(); index++) {
324     CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
325   }
326 }
327
328
329 TEST(String) {
330   CcTest::InitializeVM();
331   Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
332
333   VerifyStringAllocation(isolate, "a");
334   VerifyStringAllocation(isolate, "ab");
335   VerifyStringAllocation(isolate, "abc");
336   VerifyStringAllocation(isolate, "abcd");
337   VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
338 }
339
340
341 TEST(LocalHandles) {
342   CcTest::InitializeVM();
343   Isolate* isolate = Isolate::Current();
344   Factory* factory = isolate->factory();
345
346   v8::HandleScope scope(CcTest::isolate());
347   const char* name = "Kasper the spunky";
348   Handle<String> string = factory->NewStringFromAscii(CStrVector(name));
349   CHECK_EQ(StrLength(name), string->length());
350 }
351
352
353 TEST(GlobalHandles) {
354   CcTest::InitializeVM();
355   Isolate* isolate = Isolate::Current();
356   Heap* heap = isolate->heap();
357   Factory* factory = isolate->factory();
358   GlobalHandles* global_handles = isolate->global_handles();
359
360   Handle<Object> h1;
361   Handle<Object> h2;
362   Handle<Object> h3;
363   Handle<Object> h4;
364
365   {
366     HandleScope scope(isolate);
367
368     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
369     Handle<Object> u = factory->NewNumber(1.12344);
370
371     h1 = global_handles->Create(*i);
372     h2 = global_handles->Create(*u);
373     h3 = global_handles->Create(*i);
374     h4 = global_handles->Create(*u);
375   }
376
377   // after gc, it should survive
378   heap->CollectGarbage(NEW_SPACE);
379
380   CHECK((*h1)->IsString());
381   CHECK((*h2)->IsHeapNumber());
382   CHECK((*h3)->IsString());
383   CHECK((*h4)->IsHeapNumber());
384
385   CHECK_EQ(*h3, *h1);
386   global_handles->Destroy(h1.location());
387   global_handles->Destroy(h3.location());
388
389   CHECK_EQ(*h4, *h2);
390   global_handles->Destroy(h2.location());
391   global_handles->Destroy(h4.location());
392 }
393
394
395 static bool WeakPointerCleared = false;
396
397 static void TestWeakGlobalHandleCallback(v8::Isolate* isolate,
398                                          v8::Persistent<v8::Value>* handle,
399                                          void* id) {
400   if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
401   handle->Dispose(isolate);
402 }
403
404
405 TEST(WeakGlobalHandlesScavenge) {
406   i::FLAG_stress_compaction = false;
407   CcTest::InitializeVM();
408   Isolate* isolate = Isolate::Current();
409   Heap* heap = isolate->heap();
410   Factory* factory = isolate->factory();
411   GlobalHandles* global_handles = isolate->global_handles();
412
413   WeakPointerCleared = false;
414
415   Handle<Object> h1;
416   Handle<Object> h2;
417
418   {
419     HandleScope scope(isolate);
420
421     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
422     Handle<Object> u = factory->NewNumber(1.12344);
423
424     h1 = global_handles->Create(*i);
425     h2 = global_handles->Create(*u);
426   }
427
428   global_handles->MakeWeak(h2.location(),
429                            reinterpret_cast<void*>(1234),
430                            &TestWeakGlobalHandleCallback);
431
432   // Scavenge treats weak pointers as normal roots.
433   heap->PerformScavenge();
434
435   CHECK((*h1)->IsString());
436   CHECK((*h2)->IsHeapNumber());
437
438   CHECK(!WeakPointerCleared);
439   CHECK(!global_handles->IsNearDeath(h2.location()));
440   CHECK(!global_handles->IsNearDeath(h1.location()));
441
442   global_handles->Destroy(h1.location());
443   global_handles->Destroy(h2.location());
444 }
445
446
447 TEST(WeakGlobalHandlesMark) {
448   CcTest::InitializeVM();
449   Isolate* isolate = Isolate::Current();
450   Heap* heap = isolate->heap();
451   Factory* factory = isolate->factory();
452   GlobalHandles* global_handles = isolate->global_handles();
453
454   WeakPointerCleared = false;
455
456   Handle<Object> h1;
457   Handle<Object> h2;
458
459   {
460     HandleScope scope(isolate);
461
462     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
463     Handle<Object> u = factory->NewNumber(1.12344);
464
465     h1 = global_handles->Create(*i);
466     h2 = global_handles->Create(*u);
467   }
468
469   // Make sure the objects are promoted.
470   heap->CollectGarbage(OLD_POINTER_SPACE);
471   heap->CollectGarbage(NEW_SPACE);
472   CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
473
474   global_handles->MakeWeak(h2.location(),
475                            reinterpret_cast<void*>(1234),
476                            &TestWeakGlobalHandleCallback);
477   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
478   CHECK(!GlobalHandles::IsNearDeath(h2.location()));
479
480   // Incremental marking potentially marked handles before they turned weak.
481   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
482
483   CHECK((*h1)->IsString());
484
485   CHECK(WeakPointerCleared);
486   CHECK(!GlobalHandles::IsNearDeath(h1.location()));
487
488   global_handles->Destroy(h1.location());
489 }
490
491
492 TEST(DeleteWeakGlobalHandle) {
493   i::FLAG_stress_compaction = false;
494   CcTest::InitializeVM();
495   Isolate* isolate = Isolate::Current();
496   Heap* heap = isolate->heap();
497   Factory* factory = isolate->factory();
498   GlobalHandles* global_handles = isolate->global_handles();
499
500   WeakPointerCleared = false;
501
502   Handle<Object> h;
503
504   {
505     HandleScope scope(isolate);
506
507     Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk"));
508     h = global_handles->Create(*i);
509   }
510
511   global_handles->MakeWeak(h.location(),
512                            reinterpret_cast<void*>(1234),
513                            &TestWeakGlobalHandleCallback);
514
515   // Scanvenge does not recognize weak reference.
516   heap->PerformScavenge();
517
518   CHECK(!WeakPointerCleared);
519
520   // Mark-compact treats weak reference properly.
521   heap->CollectGarbage(OLD_POINTER_SPACE);
522
523   CHECK(WeakPointerCleared);
524 }
525
526
527 static const char* not_so_random_string_table[] = {
528   "abstract",
529   "boolean",
530   "break",
531   "byte",
532   "case",
533   "catch",
534   "char",
535   "class",
536   "const",
537   "continue",
538   "debugger",
539   "default",
540   "delete",
541   "do",
542   "double",
543   "else",
544   "enum",
545   "export",
546   "extends",
547   "false",
548   "final",
549   "finally",
550   "float",
551   "for",
552   "function",
553   "goto",
554   "if",
555   "implements",
556   "import",
557   "in",
558   "instanceof",
559   "int",
560   "interface",
561   "long",
562   "native",
563   "new",
564   "null",
565   "package",
566   "private",
567   "protected",
568   "public",
569   "return",
570   "short",
571   "static",
572   "super",
573   "switch",
574   "synchronized",
575   "this",
576   "throw",
577   "throws",
578   "transient",
579   "true",
580   "try",
581   "typeof",
582   "var",
583   "void",
584   "volatile",
585   "while",
586   "with",
587   0
588 };
589
590
591 static void CheckInternalizedStrings(const char** strings) {
592   for (const char* string = *strings; *strings != 0; string = *strings++) {
593     Object* a;
594     MaybeObject* maybe_a = HEAP->InternalizeUtf8String(string);
595     // InternalizeUtf8String may return a failure if a GC is needed.
596     if (!maybe_a->ToObject(&a)) continue;
597     CHECK(a->IsInternalizedString());
598     Object* b;
599     MaybeObject* maybe_b = HEAP->InternalizeUtf8String(string);
600     if (!maybe_b->ToObject(&b)) continue;
601     CHECK_EQ(b, a);
602     CHECK(String::cast(b)->IsUtf8EqualTo(CStrVector(string)));
603   }
604 }
605
606
607 TEST(StringTable) {
608   CcTest::InitializeVM();
609
610   CheckInternalizedStrings(not_so_random_string_table);
611   CheckInternalizedStrings(not_so_random_string_table);
612 }
613
614
615 TEST(FunctionAllocation) {
616   CcTest::InitializeVM();
617   Isolate* isolate = Isolate::Current();
618   Factory* factory = isolate->factory();
619
620   v8::HandleScope sc(CcTest::isolate());
621   Handle<String> name = factory->InternalizeUtf8String("theFunction");
622   Handle<JSFunction> function =
623       factory->NewFunction(name, factory->undefined_value());
624   Handle<Map> initial_map =
625       factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
626   function->set_initial_map(*initial_map);
627
628   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
629   Handle<JSObject> obj = factory->NewJSObject(function);
630   obj->SetProperty(
631       *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
632   CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
633   // Check that we can add properties to function objects.
634   function->SetProperty(
635       *prop_name, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
636   CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
637 }
638
639
640 TEST(ObjectProperties) {
641   CcTest::InitializeVM();
642   Isolate* isolate = Isolate::Current();
643   Factory* factory = isolate->factory();
644
645   v8::HandleScope sc(CcTest::isolate());
646   String* object_string = String::cast(HEAP->Object_string());
647   Object* raw_object = Isolate::Current()->context()->global_object()->
648       GetProperty(object_string)->ToObjectChecked();
649   JSFunction* object_function = JSFunction::cast(raw_object);
650   Handle<JSFunction> constructor(object_function);
651   Handle<JSObject> obj = factory->NewJSObject(constructor);
652   Handle<String> first = factory->InternalizeUtf8String("first");
653   Handle<String> second = factory->InternalizeUtf8String("second");
654
655   // check for empty
656   CHECK(!obj->HasLocalProperty(*first));
657
658   // add first
659   obj->SetProperty(
660       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
661   CHECK(obj->HasLocalProperty(*first));
662
663   // delete first
664   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
665   CHECK(!obj->HasLocalProperty(*first));
666
667   // add first and then second
668   obj->SetProperty(
669       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
670   obj->SetProperty(
671       *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
672   CHECK(obj->HasLocalProperty(*first));
673   CHECK(obj->HasLocalProperty(*second));
674
675   // delete first and then second
676   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
677   CHECK(obj->HasLocalProperty(*second));
678   JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
679   CHECK(!obj->HasLocalProperty(*first));
680   CHECK(!obj->HasLocalProperty(*second));
681
682   // add first and then second
683   obj->SetProperty(
684       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
685   obj->SetProperty(
686       *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
687   CHECK(obj->HasLocalProperty(*first));
688   CHECK(obj->HasLocalProperty(*second));
689
690   // delete second and then first
691   JSReceiver::DeleteProperty(obj, second, JSReceiver::NORMAL_DELETION);
692   CHECK(obj->HasLocalProperty(*first));
693   JSReceiver::DeleteProperty(obj, first, JSReceiver::NORMAL_DELETION);
694   CHECK(!obj->HasLocalProperty(*first));
695   CHECK(!obj->HasLocalProperty(*second));
696
697   // check string and internalized string match
698   const char* string1 = "fisk";
699   Handle<String> s1 = factory->NewStringFromAscii(CStrVector(string1));
700   obj->SetProperty(
701       *s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
702   Handle<String> s1_string = factory->InternalizeUtf8String(string1);
703   CHECK(obj->HasLocalProperty(*s1_string));
704
705   // check internalized string and string match
706   const char* string2 = "fugl";
707   Handle<String> s2_string = factory->InternalizeUtf8String(string2);
708   obj->SetProperty(
709       *s2_string, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
710   Handle<String> s2 = factory->NewStringFromAscii(CStrVector(string2));
711   CHECK(obj->HasLocalProperty(*s2));
712 }
713
714
715 TEST(JSObjectMaps) {
716   CcTest::InitializeVM();
717   Isolate* isolate = Isolate::Current();
718   Factory* factory = isolate->factory();
719
720   v8::HandleScope sc(CcTest::isolate());
721   Handle<String> name = factory->InternalizeUtf8String("theFunction");
722   Handle<JSFunction> function =
723       factory->NewFunction(name, factory->undefined_value());
724   Handle<Map> initial_map =
725       factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
726   function->set_initial_map(*initial_map);
727
728   Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
729   Handle<JSObject> obj = factory->NewJSObject(function);
730
731   // Set a propery
732   obj->SetProperty(
733       *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
734   CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
735
736   // Check the map has changed
737   CHECK(*initial_map != obj->map());
738 }
739
740
741 TEST(JSArray) {
742   CcTest::InitializeVM();
743   Isolate* isolate = Isolate::Current();
744   Factory* factory = isolate->factory();
745
746   v8::HandleScope sc(CcTest::isolate());
747   Handle<String> name = factory->InternalizeUtf8String("Array");
748   Object* raw_object = Isolate::Current()->context()->global_object()->
749       GetProperty(*name)->ToObjectChecked();
750   Handle<JSFunction> function = Handle<JSFunction>(
751       JSFunction::cast(raw_object));
752
753   // Allocate the object.
754   Handle<JSObject> object = factory->NewJSObject(function);
755   Handle<JSArray> array = Handle<JSArray>::cast(object);
756   // We just initialized the VM, no heap allocation failure yet.
757   array->Initialize(0)->ToObjectChecked();
758
759   // Set array length to 0.
760   array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
761   CHECK_EQ(Smi::FromInt(0), array->length());
762   // Must be in fast mode.
763   CHECK(array->HasFastSmiOrObjectElements());
764
765   // array[length] = name.
766   array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
767   CHECK_EQ(Smi::FromInt(1), array->length());
768   CHECK_EQ(array->GetElement(0), *name);
769
770   // Set array length with larger than smi value.
771   Handle<Object> length =
772       factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
773   array->SetElementsLength(*length)->ToObjectChecked();
774
775   uint32_t int_length = 0;
776   CHECK(length->ToArrayIndex(&int_length));
777   CHECK_EQ(*length, array->length());
778   CHECK(array->HasDictionaryElements());  // Must be in slow mode.
779
780   // array[length] = name.
781   array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
782   uint32_t new_int_length = 0;
783   CHECK(array->length()->ToArrayIndex(&new_int_length));
784   CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
785   CHECK_EQ(array->GetElement(int_length), *name);
786   CHECK_EQ(array->GetElement(0), *name);
787 }
788
789
790 TEST(JSObjectCopy) {
791   CcTest::InitializeVM();
792   Isolate* isolate = Isolate::Current();
793   Factory* factory = isolate->factory();
794
795   v8::HandleScope sc(CcTest::isolate());
796   String* object_string = String::cast(HEAP->Object_string());
797   Object* raw_object = Isolate::Current()->context()->global_object()->
798       GetProperty(object_string)->ToObjectChecked();
799   JSFunction* object_function = JSFunction::cast(raw_object);
800   Handle<JSFunction> constructor(object_function);
801   Handle<JSObject> obj = factory->NewJSObject(constructor);
802   Handle<String> first = factory->InternalizeUtf8String("first");
803   Handle<String> second = factory->InternalizeUtf8String("second");
804
805   obj->SetProperty(
806       *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
807   obj->SetProperty(
808       *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
809
810   obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
811   obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
812
813   // Make the clone.
814   Handle<JSObject> clone = Copy(obj);
815   CHECK(!clone.is_identical_to(obj));
816
817   CHECK_EQ(obj->GetElement(0), clone->GetElement(0));
818   CHECK_EQ(obj->GetElement(1), clone->GetElement(1));
819
820   CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first));
821   CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
822
823   // Flip the values.
824   clone->SetProperty(
825       *first, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
826   clone->SetProperty(
827       *second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
828
829   clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
830   clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
831
832   CHECK_EQ(obj->GetElement(1), clone->GetElement(0));
833   CHECK_EQ(obj->GetElement(0), clone->GetElement(1));
834
835   CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first));
836   CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second));
837 }
838
839
840 TEST(StringAllocation) {
841   CcTest::InitializeVM();
842   Isolate* isolate = Isolate::Current();
843   Factory* factory = isolate->factory();
844
845   const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
846   for (int length = 0; length < 100; length++) {
847     v8::HandleScope scope(CcTest::isolate());
848     char* non_ascii = NewArray<char>(3 * length + 1);
849     char* ascii = NewArray<char>(length + 1);
850     non_ascii[3 * length] = 0;
851     ascii[length] = 0;
852     for (int i = 0; i < length; i++) {
853       ascii[i] = 'a';
854       non_ascii[3 * i] = chars[0];
855       non_ascii[3 * i + 1] = chars[1];
856       non_ascii[3 * i + 2] = chars[2];
857     }
858     Handle<String> non_ascii_sym =
859         factory->InternalizeUtf8String(
860             Vector<const char>(non_ascii, 3 * length));
861     CHECK_EQ(length, non_ascii_sym->length());
862     Handle<String> ascii_sym =
863         factory->InternalizeOneByteString(OneByteVector(ascii, length));
864     CHECK_EQ(length, ascii_sym->length());
865     Handle<String> non_ascii_str =
866         factory->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
867     non_ascii_str->Hash();
868     CHECK_EQ(length, non_ascii_str->length());
869     Handle<String> ascii_str =
870         factory->NewStringFromUtf8(Vector<const char>(ascii, length));
871     ascii_str->Hash();
872     CHECK_EQ(length, ascii_str->length());
873     DeleteArray(non_ascii);
874     DeleteArray(ascii);
875   }
876 }
877
878
879 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
880   // Count the number of objects found in the heap.
881   int found_count = 0;
882   heap->EnsureHeapIsIterable();
883   HeapIterator iterator(heap);
884   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
885     for (int i = 0; i < size; i++) {
886       if (*objs[i] == obj) {
887         found_count++;
888       }
889     }
890   }
891   return found_count;
892 }
893
894
895 TEST(Iteration) {
896   CcTest::InitializeVM();
897   Isolate* isolate = Isolate::Current();
898   Factory* factory = isolate->factory();
899   v8::HandleScope scope(CcTest::isolate());
900
901   // Array of objects to scan haep for.
902   const int objs_count = 6;
903   Handle<Object> objs[objs_count];
904   int next_objs_index = 0;
905
906   // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
907   objs[next_objs_index++] = factory->NewJSArray(10);
908   objs[next_objs_index++] = factory->NewJSArray(10,
909                                                 FAST_HOLEY_ELEMENTS,
910                                                 TENURED);
911
912   // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
913   objs[next_objs_index++] =
914       factory->NewStringFromAscii(CStrVector("abcdefghij"));
915   objs[next_objs_index++] =
916       factory->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
917
918   // Allocate a large string (for large object space).
919   int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
920   char* str = new char[large_size];
921   for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
922   str[large_size - 1] = '\0';
923   objs[next_objs_index++] =
924       factory->NewStringFromAscii(CStrVector(str), TENURED);
925   delete[] str;
926
927   // Add a Map object to look for.
928   objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
929
930   CHECK_EQ(objs_count, next_objs_index);
931   CHECK_EQ(objs_count, ObjectsFoundInHeap(HEAP, objs, objs_count));
932 }
933
934
935 TEST(EmptyHandleEscapeFrom) {
936   CcTest::InitializeVM();
937
938   v8::HandleScope scope(CcTest::isolate());
939   Handle<JSObject> runaway;
940
941   {
942       v8::HandleScope nested(CcTest::isolate());
943       Handle<JSObject> empty;
944       runaway = empty.EscapeFrom(&nested);
945   }
946
947   CHECK(runaway.is_null());
948 }
949
950
951 static int LenFromSize(int size) {
952   return (size - FixedArray::kHeaderSize) / kPointerSize;
953 }
954
955
956 TEST(Regression39128) {
957   // Test case for crbug.com/39128.
958   CcTest::InitializeVM();
959   Isolate* isolate = Isolate::Current();
960   Factory* factory = isolate->factory();
961
962   // Increase the chance of 'bump-the-pointer' allocation in old space.
963   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
964
965   v8::HandleScope scope(CcTest::isolate());
966
967   // The plan: create JSObject which references objects in new space.
968   // Then clone this object (forcing it to go into old space) and check
969   // that region dirty marks are updated correctly.
970
971   // Step 1: prepare a map for the object.  We add 1 inobject property to it.
972   Handle<JSFunction> object_ctor(
973       Isolate::Current()->native_context()->object_function());
974   CHECK(object_ctor->has_initial_map());
975   Handle<Map> object_map(object_ctor->initial_map());
976   // Create a map with single inobject property.
977   Handle<Map> my_map = factory->CopyMap(object_map, 1);
978   int n_properties = my_map->inobject_properties();
979   CHECK_GT(n_properties, 0);
980
981   int object_size = my_map->instance_size();
982
983   // Step 2: allocate a lot of objects so to almost fill new space: we need
984   // just enough room to allocate JSObject and thus fill the newspace.
985
986   int allocation_amount = Min(FixedArray::kMaxSize,
987                               Page::kMaxNonCodeHeapObjectSize + kPointerSize);
988   int allocation_len = LenFromSize(allocation_amount);
989   NewSpace* new_space = HEAP->new_space();
990   Address* top_addr = new_space->allocation_top_address();
991   Address* limit_addr = new_space->allocation_limit_address();
992   while ((*limit_addr - *top_addr) > allocation_amount) {
993     CHECK(!HEAP->always_allocate());
994     Object* array = HEAP->AllocateFixedArray(allocation_len)->ToObjectChecked();
995     CHECK(!array->IsFailure());
996     CHECK(new_space->Contains(array));
997   }
998
999   // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1000   int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1001   int fixed_array_len = LenFromSize(to_fill);
1002   CHECK(fixed_array_len < FixedArray::kMaxLength);
1003
1004   CHECK(!HEAP->always_allocate());
1005   Object* array = HEAP->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
1006   CHECK(!array->IsFailure());
1007   CHECK(new_space->Contains(array));
1008
1009   Object* object = HEAP->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
1010   CHECK(new_space->Contains(object));
1011   JSObject* jsobject = JSObject::cast(object);
1012   CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1013   CHECK_EQ(0, jsobject->properties()->length());
1014   // Create a reference to object in new space in jsobject.
1015   jsobject->FastPropertyAtPut(-1, array);
1016
1017   CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1018
1019   // Step 4: clone jsobject, but force always allocate first to create a clone
1020   // in old pointer space.
1021   Address old_pointer_space_top = HEAP->old_pointer_space()->top();
1022   AlwaysAllocateScope aa_scope;
1023   Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked();
1024   JSObject* clone = JSObject::cast(clone_obj);
1025   if (clone->address() != old_pointer_space_top) {
1026     // Alas, got allocated from free list, we cannot do checks.
1027     return;
1028   }
1029   CHECK(HEAP->old_pointer_space()->Contains(clone->address()));
1030 }
1031
1032
1033 TEST(TestCodeFlushing) {
1034   // If we do not flush code this test is invalid.
1035   if (!FLAG_flush_code) return;
1036   i::FLAG_allow_natives_syntax = true;
1037   CcTest::InitializeVM();
1038   Isolate* isolate = Isolate::Current();
1039   Factory* factory = isolate->factory();
1040   v8::HandleScope scope(CcTest::isolate());
1041   const char* source = "function foo() {"
1042                        "  var x = 42;"
1043                        "  var y = 42;"
1044                        "  var z = x + y;"
1045                        "};"
1046                        "foo()";
1047   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1048
1049   // This compile will add the code to the compilation cache.
1050   { v8::HandleScope scope(CcTest::isolate());
1051     CompileRun(source);
1052   }
1053
1054   // Check function is compiled.
1055   Object* func_value = Isolate::Current()->context()->global_object()->
1056       GetProperty(*foo_name)->ToObjectChecked();
1057   CHECK(func_value->IsJSFunction());
1058   Handle<JSFunction> function(JSFunction::cast(func_value));
1059   CHECK(function->shared()->is_compiled());
1060
1061   // The code will survive at least two GCs.
1062   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1063   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1064   CHECK(function->shared()->is_compiled());
1065
1066   // Simulate several GCs that use full marking.
1067   const int kAgingThreshold = 6;
1068   for (int i = 0; i < kAgingThreshold; i++) {
1069     HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1070   }
1071
1072   // foo should no longer be in the compilation cache
1073   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1074   CHECK(!function->is_compiled() || function->IsOptimized());
1075   // Call foo to get it recompiled.
1076   CompileRun("foo()");
1077   CHECK(function->shared()->is_compiled());
1078   CHECK(function->is_compiled());
1079 }
1080
1081
1082 TEST(TestCodeFlushingIncremental) {
1083   // If we do not flush code this test is invalid.
1084   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1085   i::FLAG_allow_natives_syntax = true;
1086   CcTest::InitializeVM();
1087   Isolate* isolate = Isolate::Current();
1088   Factory* factory = isolate->factory();
1089   v8::HandleScope scope(CcTest::isolate());
1090   const char* source = "function foo() {"
1091                        "  var x = 42;"
1092                        "  var y = 42;"
1093                        "  var z = x + y;"
1094                        "};"
1095                        "foo()";
1096   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1097
1098   // This compile will add the code to the compilation cache.
1099   { v8::HandleScope scope(CcTest::isolate());
1100     CompileRun(source);
1101   }
1102
1103   // Check function is compiled.
1104   Object* func_value = Isolate::Current()->context()->global_object()->
1105       GetProperty(*foo_name)->ToObjectChecked();
1106   CHECK(func_value->IsJSFunction());
1107   Handle<JSFunction> function(JSFunction::cast(func_value));
1108   CHECK(function->shared()->is_compiled());
1109
1110   // The code will survive at least two GCs.
1111   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1112   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1113   CHECK(function->shared()->is_compiled());
1114
1115   // Simulate several GCs that use incremental marking.
1116   const int kAgingThreshold = 6;
1117   for (int i = 0; i < kAgingThreshold; i++) {
1118     SimulateIncrementalMarking();
1119     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1120   }
1121   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1122   CHECK(!function->is_compiled() || function->IsOptimized());
1123
1124   // This compile will compile the function again.
1125   { v8::HandleScope scope(CcTest::isolate());
1126     CompileRun("foo();");
1127   }
1128
1129   // Simulate several GCs that use incremental marking but make sure
1130   // the loop breaks once the function is enqueued as a candidate.
1131   for (int i = 0; i < kAgingThreshold; i++) {
1132     SimulateIncrementalMarking();
1133     if (!function->next_function_link()->IsUndefined()) break;
1134     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1135   }
1136
1137   // Force optimization while incremental marking is active and while
1138   // the function is enqueued as a candidate.
1139   { v8::HandleScope scope(CcTest::isolate());
1140     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1141   }
1142
1143   // Simulate one final GC to make sure the candidate queue is sane.
1144   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1145   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1146   CHECK(function->is_compiled() || !function->IsOptimized());
1147 }
1148
1149
1150 TEST(TestCodeFlushingIncrementalScavenge) {
1151   // If we do not flush code this test is invalid.
1152   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1153   i::FLAG_allow_natives_syntax = true;
1154   CcTest::InitializeVM();
1155   Isolate* isolate = Isolate::Current();
1156   Factory* factory = isolate->factory();
1157   v8::HandleScope scope(CcTest::isolate());
1158   const char* source = "var foo = function() {"
1159                        "  var x = 42;"
1160                        "  var y = 42;"
1161                        "  var z = x + y;"
1162                        "};"
1163                        "foo();"
1164                        "var bar = function() {"
1165                        "  var x = 23;"
1166                        "};"
1167                        "bar();";
1168   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1169   Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1170
1171   // Perfrom one initial GC to enable code flushing.
1172   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1173
1174   // This compile will add the code to the compilation cache.
1175   { v8::HandleScope scope(CcTest::isolate());
1176     CompileRun(source);
1177   }
1178
1179   // Check functions are compiled.
1180   Object* func_value = Isolate::Current()->context()->global_object()->
1181       GetProperty(*foo_name)->ToObjectChecked();
1182   CHECK(func_value->IsJSFunction());
1183   Handle<JSFunction> function(JSFunction::cast(func_value));
1184   CHECK(function->shared()->is_compiled());
1185   Object* func_value2 = Isolate::Current()->context()->global_object()->
1186       GetProperty(*bar_name)->ToObjectChecked();
1187   CHECK(func_value2->IsJSFunction());
1188   Handle<JSFunction> function2(JSFunction::cast(func_value2));
1189   CHECK(function2->shared()->is_compiled());
1190
1191   // Clear references to functions so that one of them can die.
1192   { v8::HandleScope scope(CcTest::isolate());
1193     CompileRun("foo = 0; bar = 0;");
1194   }
1195
1196   // Bump the code age so that flushing is triggered while the function
1197   // object is still located in new-space.
1198   const int kAgingThreshold = 6;
1199   for (int i = 0; i < kAgingThreshold; i++) {
1200     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1201     function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1202   }
1203
1204   // Simulate incremental marking so that the functions are enqueued as
1205   // code flushing candidates. Then kill one of the functions. Finally
1206   // perform a scavenge while incremental marking is still running.
1207   SimulateIncrementalMarking();
1208   *function2.location() = NULL;
1209   HEAP->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1210
1211   // Simulate one final GC to make sure the candidate queue is sane.
1212   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1213   CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1214   CHECK(!function->is_compiled() || function->IsOptimized());
1215 }
1216
1217
1218 TEST(TestCodeFlushingIncrementalAbort) {
1219   // If we do not flush code this test is invalid.
1220   if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
1221   i::FLAG_allow_natives_syntax = true;
1222   CcTest::InitializeVM();
1223   Isolate* isolate = Isolate::Current();
1224   Factory* factory = isolate->factory();
1225   Heap* heap = isolate->heap();
1226   v8::HandleScope scope(CcTest::isolate());
1227   const char* source = "function foo() {"
1228                        "  var x = 42;"
1229                        "  var y = 42;"
1230                        "  var z = x + y;"
1231                        "};"
1232                        "foo()";
1233   Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1234
1235   // This compile will add the code to the compilation cache.
1236   { v8::HandleScope scope(CcTest::isolate());
1237     CompileRun(source);
1238   }
1239
1240   // Check function is compiled.
1241   Object* func_value = Isolate::Current()->context()->global_object()->
1242       GetProperty(*foo_name)->ToObjectChecked();
1243   CHECK(func_value->IsJSFunction());
1244   Handle<JSFunction> function(JSFunction::cast(func_value));
1245   CHECK(function->shared()->is_compiled());
1246
1247   // The code will survive at least two GCs.
1248   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1249   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1250   CHECK(function->shared()->is_compiled());
1251
1252   // Bump the code age so that flushing is triggered.
1253   const int kAgingThreshold = 6;
1254   for (int i = 0; i < kAgingThreshold; i++) {
1255     function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1256   }
1257
1258   // Simulate incremental marking so that the function is enqueued as
1259   // code flushing candidate.
1260   SimulateIncrementalMarking();
1261
1262 #ifdef ENABLE_DEBUGGER_SUPPORT
1263   // Enable the debugger and add a breakpoint while incremental marking
1264   // is running so that incremental marking aborts and code flushing is
1265   // disabled.
1266   int position = 0;
1267   Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1268   isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1269   isolate->debug()->ClearAllBreakPoints();
1270 #endif  // ENABLE_DEBUGGER_SUPPORT
1271
1272   // Force optimization now that code flushing is disabled.
1273   { v8::HandleScope scope(CcTest::isolate());
1274     CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1275   }
1276
1277   // Simulate one final GC to make sure the candidate queue is sane.
1278   heap->CollectAllGarbage(Heap::kNoGCFlags);
1279   CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1280   CHECK(function->is_compiled() || !function->IsOptimized());
1281 }
1282
1283
1284 // Count the number of native contexts in the weak list of native contexts.
1285 int CountNativeContexts() {
1286   int count = 0;
1287   Object* object = HEAP->native_contexts_list();
1288   while (!object->IsUndefined()) {
1289     count++;
1290     object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1291   }
1292   return count;
1293 }
1294
1295
1296 // Count the number of user functions in the weak list of optimized
1297 // functions attached to a native context.
1298 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1299   int count = 0;
1300   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1301   Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1302   while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1303     count++;
1304     object = JSFunction::cast(object)->next_function_link();
1305   }
1306   return count;
1307 }
1308
1309
1310 TEST(TestInternalWeakLists) {
1311   v8::V8::Initialize();
1312
1313   // Some flags turn Scavenge collections into Mark-sweep collections
1314   // and hence are incompatible with this test case.
1315   if (FLAG_gc_global || FLAG_stress_compaction) return;
1316
1317   static const int kNumTestContexts = 10;
1318
1319   Isolate* isolate = Isolate::Current();
1320   Heap* heap = isolate->heap();
1321   HandleScope scope(isolate);
1322   v8::Handle<v8::Context> ctx[kNumTestContexts];
1323
1324   CHECK_EQ(0, CountNativeContexts());
1325
1326   // Create a number of global contests which gets linked together.
1327   for (int i = 0; i < kNumTestContexts; i++) {
1328     ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
1329
1330     // Collect garbage that might have been created by one of the
1331     // installed extensions.
1332     isolate->compilation_cache()->Clear();
1333     heap->CollectAllGarbage(Heap::kNoGCFlags);
1334
1335     bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1336
1337     CHECK_EQ(i + 1, CountNativeContexts());
1338
1339     ctx[i]->Enter();
1340
1341     // Create a handle scope so no function objects get stuch in the outer
1342     // handle scope
1343     HandleScope scope(isolate);
1344     const char* source = "function f1() { };"
1345                          "function f2() { };"
1346                          "function f3() { };"
1347                          "function f4() { };"
1348                          "function f5() { };";
1349     CompileRun(source);
1350     CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1351     CompileRun("f1()");
1352     CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1353     CompileRun("f2()");
1354     CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1355     CompileRun("f3()");
1356     CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1357     CompileRun("f4()");
1358     CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1359     CompileRun("f5()");
1360     CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1361
1362     // Remove function f1, and
1363     CompileRun("f1=null");
1364
1365     // Scavenge treats these references as strong.
1366     for (int j = 0; j < 10; j++) {
1367       HEAP->PerformScavenge();
1368       CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1369     }
1370
1371     // Mark compact handles the weak references.
1372     isolate->compilation_cache()->Clear();
1373     heap->CollectAllGarbage(Heap::kNoGCFlags);
1374     CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1375
1376     // Get rid of f3 and f5 in the same way.
1377     CompileRun("f3=null");
1378     for (int j = 0; j < 10; j++) {
1379       HEAP->PerformScavenge();
1380       CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1381     }
1382     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1383     CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1384     CompileRun("f5=null");
1385     for (int j = 0; j < 10; j++) {
1386       HEAP->PerformScavenge();
1387       CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1388     }
1389     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1390     CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1391
1392     ctx[i]->Exit();
1393   }
1394
1395   // Force compilation cache cleanup.
1396   HEAP->NotifyContextDisposed();
1397   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1398
1399   // Dispose the native contexts one by one.
1400   for (int i = 0; i < kNumTestContexts; i++) {
1401     // TODO(dcarney): is there a better way to do this?
1402     i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1403     *unsafe = HEAP->undefined_value();
1404     ctx[i].Clear();
1405
1406     // Scavenge treats these references as strong.
1407     for (int j = 0; j < 10; j++) {
1408       HEAP->PerformScavenge();
1409       CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1410     }
1411
1412     // Mark compact handles the weak references.
1413     HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1414     CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1415   }
1416
1417   CHECK_EQ(0, CountNativeContexts());
1418 }
1419
1420
1421 // Count the number of native contexts in the weak list of native contexts
1422 // causing a GC after the specified number of elements.
1423 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1424   Heap* heap = isolate->heap();
1425   int count = 0;
1426   Handle<Object> object(heap->native_contexts_list(), isolate);
1427   while (!object->IsUndefined()) {
1428     count++;
1429     if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
1430     object =
1431         Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1432                        isolate);
1433   }
1434   return count;
1435 }
1436
1437
1438 // Count the number of user functions in the weak list of optimized
1439 // functions attached to a native context causing a GC after the
1440 // specified number of elements.
1441 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1442                                              int n) {
1443   int count = 0;
1444   Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1445   Isolate* isolate = icontext->GetIsolate();
1446   Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1447                         isolate);
1448   while (object->IsJSFunction() &&
1449          !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1450     count++;
1451     if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
1452     object = Handle<Object>(
1453         Object::cast(JSFunction::cast(*object)->next_function_link()),
1454         isolate);
1455   }
1456   return count;
1457 }
1458
1459
1460 TEST(TestInternalWeakListsTraverseWithGC) {
1461   v8::V8::Initialize();
1462   Isolate* isolate = Isolate::Current();
1463
1464   static const int kNumTestContexts = 10;
1465
1466   HandleScope scope(isolate);
1467   v8::Handle<v8::Context> ctx[kNumTestContexts];
1468
1469   CHECK_EQ(0, CountNativeContexts());
1470
1471   // Create an number of contexts and check the length of the weak list both
1472   // with and without GCs while iterating the list.
1473   for (int i = 0; i < kNumTestContexts; i++) {
1474     ctx[i] = v8::Context::New(v8::Isolate::GetCurrent());
1475     CHECK_EQ(i + 1, CountNativeContexts());
1476     CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1477   }
1478
1479   bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1480
1481   // Compile a number of functions the length of the weak list of optimized
1482   // functions both with and without GCs while iterating the list.
1483   ctx[0]->Enter();
1484   const char* source = "function f1() { };"
1485                        "function f2() { };"
1486                        "function f3() { };"
1487                        "function f4() { };"
1488                        "function f5() { };";
1489   CompileRun(source);
1490   CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1491   CompileRun("f1()");
1492   CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1493   CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1494   CompileRun("f2()");
1495   CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1496   CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1497   CompileRun("f3()");
1498   CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1499   CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1500   CompileRun("f4()");
1501   CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1502   CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1503   CompileRun("f5()");
1504   CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1505   CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1506
1507   ctx[0]->Exit();
1508 }
1509
1510
1511 TEST(TestSizeOfObjects) {
1512   v8::V8::Initialize();
1513
1514   // Get initial heap size after several full GCs, which will stabilize
1515   // the heap size and return with sweeping finished completely.
1516   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1517   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1518   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1519   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1520   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1521   CHECK(HEAP->old_pointer_space()->IsLazySweepingComplete());
1522   int initial_size = static_cast<int>(HEAP->SizeOfObjects());
1523
1524   {
1525     // Allocate objects on several different old-space pages so that
1526     // lazy sweeping kicks in for subsequent GC runs.
1527     AlwaysAllocateScope always_allocate;
1528     int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1529     for (int i = 1; i <= 100; i++) {
1530       HEAP->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
1531       CHECK_EQ(initial_size + i * filler_size,
1532                static_cast<int>(HEAP->SizeOfObjects()));
1533     }
1534   }
1535
1536   // The heap size should go back to initial size after a full GC, even
1537   // though sweeping didn't finish yet.
1538   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1539
1540   // Normally sweeping would not be complete here, but no guarantees.
1541
1542   CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1543
1544   // Advancing the sweeper step-wise should not change the heap size.
1545   while (!HEAP->old_pointer_space()->IsLazySweepingComplete()) {
1546     HEAP->old_pointer_space()->AdvanceSweeper(KB);
1547     CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1548   }
1549 }
1550
1551
1552 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1553   CcTest::InitializeVM();
1554   HEAP->EnsureHeapIsIterable();
1555   intptr_t size_of_objects_1 = HEAP->SizeOfObjects();
1556   HeapIterator iterator(HEAP);
1557   intptr_t size_of_objects_2 = 0;
1558   for (HeapObject* obj = iterator.next();
1559        obj != NULL;
1560        obj = iterator.next()) {
1561     if (!obj->IsFreeSpace()) {
1562       size_of_objects_2 += obj->Size();
1563     }
1564   }
1565   // Delta must be within 5% of the larger result.
1566   // TODO(gc): Tighten this up by distinguishing between byte
1567   // arrays that are real and those that merely mark free space
1568   // on the heap.
1569   if (size_of_objects_1 > size_of_objects_2) {
1570     intptr_t delta = size_of_objects_1 - size_of_objects_2;
1571     PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1572            "Iterator: %" V8_PTR_PREFIX "d, "
1573            "delta: %" V8_PTR_PREFIX "d\n",
1574            size_of_objects_1, size_of_objects_2, delta);
1575     CHECK_GT(size_of_objects_1 / 20, delta);
1576   } else {
1577     intptr_t delta = size_of_objects_2 - size_of_objects_1;
1578     PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1579            "Iterator: %" V8_PTR_PREFIX "d, "
1580            "delta: %" V8_PTR_PREFIX "d\n",
1581            size_of_objects_1, size_of_objects_2, delta);
1582     CHECK_GT(size_of_objects_2 / 20, delta);
1583   }
1584 }
1585
1586
1587 static void FillUpNewSpace(NewSpace* new_space) {
1588   // Fill up new space to the point that it is completely full. Make sure
1589   // that the scavenger does not undo the filling.
1590   Heap* heap = new_space->heap();
1591   Isolate* isolate = heap->isolate();
1592   Factory* factory = isolate->factory();
1593   HandleScope scope(isolate);
1594   AlwaysAllocateScope always_allocate;
1595   intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
1596   intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1597   for (intptr_t i = 0; i < number_of_fillers; i++) {
1598     CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
1599   }
1600 }
1601
1602
1603 TEST(GrowAndShrinkNewSpace) {
1604   CcTest::InitializeVM();
1605   NewSpace* new_space = HEAP->new_space();
1606
1607   if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1608       HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1609     // The max size cannot exceed the reserved size, since semispaces must be
1610     // always within the reserved space.  We can't test new space growing and
1611     // shrinking if the reserved size is the same as the minimum (initial) size.
1612     return;
1613   }
1614
1615   // Explicitly growing should double the space capacity.
1616   intptr_t old_capacity, new_capacity;
1617   old_capacity = new_space->Capacity();
1618   new_space->Grow();
1619   new_capacity = new_space->Capacity();
1620   CHECK(2 * old_capacity == new_capacity);
1621
1622   old_capacity = new_space->Capacity();
1623   FillUpNewSpace(new_space);
1624   new_capacity = new_space->Capacity();
1625   CHECK(old_capacity == new_capacity);
1626
1627   // Explicitly shrinking should not affect space capacity.
1628   old_capacity = new_space->Capacity();
1629   new_space->Shrink();
1630   new_capacity = new_space->Capacity();
1631   CHECK(old_capacity == new_capacity);
1632
1633   // Let the scavenger empty the new space.
1634   HEAP->CollectGarbage(NEW_SPACE);
1635   CHECK_LE(new_space->Size(), old_capacity);
1636
1637   // Explicitly shrinking should halve the space capacity.
1638   old_capacity = new_space->Capacity();
1639   new_space->Shrink();
1640   new_capacity = new_space->Capacity();
1641   CHECK(old_capacity == 2 * new_capacity);
1642
1643   // Consecutive shrinking should not affect space capacity.
1644   old_capacity = new_space->Capacity();
1645   new_space->Shrink();
1646   new_space->Shrink();
1647   new_space->Shrink();
1648   new_capacity = new_space->Capacity();
1649   CHECK(old_capacity == new_capacity);
1650 }
1651
1652
1653 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1654   CcTest::InitializeVM();
1655
1656   if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1657       HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1658     // The max size cannot exceed the reserved size, since semispaces must be
1659     // always within the reserved space.  We can't test new space growing and
1660     // shrinking if the reserved size is the same as the minimum (initial) size.
1661     return;
1662   }
1663
1664   v8::HandleScope scope(CcTest::isolate());
1665   NewSpace* new_space = HEAP->new_space();
1666   intptr_t old_capacity, new_capacity;
1667   old_capacity = new_space->Capacity();
1668   new_space->Grow();
1669   new_capacity = new_space->Capacity();
1670   CHECK(2 * old_capacity == new_capacity);
1671   FillUpNewSpace(new_space);
1672   HEAP->CollectAllAvailableGarbage();
1673   new_capacity = new_space->Capacity();
1674   CHECK(old_capacity == new_capacity);
1675 }
1676
1677
1678 static int NumberOfGlobalObjects() {
1679   int count = 0;
1680   HeapIterator iterator(HEAP);
1681   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1682     if (obj->IsGlobalObject()) count++;
1683   }
1684   return count;
1685 }
1686
1687
1688 // Test that we don't embed maps from foreign contexts into
1689 // optimized code.
1690 TEST(LeakNativeContextViaMap) {
1691   i::FLAG_allow_natives_syntax = true;
1692   v8::Isolate* isolate = v8::Isolate::GetCurrent();
1693   v8::HandleScope outer_scope(isolate);
1694   v8::Persistent<v8::Context> ctx1p;
1695   v8::Persistent<v8::Context> ctx2p;
1696   {
1697     v8::HandleScope scope(isolate);
1698     ctx1p.Reset(isolate, v8::Context::New(isolate));
1699     ctx2p.Reset(isolate, v8::Context::New(isolate));
1700     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1701   }
1702
1703   HEAP->CollectAllAvailableGarbage();
1704   CHECK_EQ(4, NumberOfGlobalObjects());
1705
1706   {
1707     v8::HandleScope inner_scope(isolate);
1708     CompileRun("var v = {x: 42}");
1709     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1710     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1711     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1712     ctx2->Enter();
1713     ctx2->Global()->Set(v8_str("o"), v);
1714     v8::Local<v8::Value> res = CompileRun(
1715         "function f() { return o.x; }"
1716         "for (var i = 0; i < 10; ++i) f();"
1717         "%OptimizeFunctionOnNextCall(f);"
1718         "f();");
1719     CHECK_EQ(42, res->Int32Value());
1720     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1721     ctx2->Exit();
1722     v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1723     ctx1p.Dispose(isolate);
1724     v8::V8::ContextDisposedNotification();
1725   }
1726   HEAP->CollectAllAvailableGarbage();
1727   CHECK_EQ(2, NumberOfGlobalObjects());
1728   ctx2p.Dispose(isolate);
1729   HEAP->CollectAllAvailableGarbage();
1730   CHECK_EQ(0, NumberOfGlobalObjects());
1731 }
1732
1733
1734 // Test that we don't embed functions from foreign contexts into
1735 // optimized code.
1736 TEST(LeakNativeContextViaFunction) {
1737   i::FLAG_allow_natives_syntax = true;
1738   v8::Isolate* isolate = v8::Isolate::GetCurrent();
1739   v8::HandleScope outer_scope(isolate);
1740   v8::Persistent<v8::Context> ctx1p;
1741   v8::Persistent<v8::Context> ctx2p;
1742   {
1743     v8::HandleScope scope(isolate);
1744     ctx1p.Reset(isolate, v8::Context::New(isolate));
1745     ctx2p.Reset(isolate, v8::Context::New(isolate));
1746     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1747   }
1748
1749   HEAP->CollectAllAvailableGarbage();
1750   CHECK_EQ(4, NumberOfGlobalObjects());
1751
1752   {
1753     v8::HandleScope inner_scope(isolate);
1754     CompileRun("var v = function() { return 42; }");
1755     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1756     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1757     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1758     ctx2->Enter();
1759     ctx2->Global()->Set(v8_str("o"), v);
1760     v8::Local<v8::Value> res = CompileRun(
1761         "function f(x) { return x(); }"
1762         "for (var i = 0; i < 10; ++i) f(o);"
1763         "%OptimizeFunctionOnNextCall(f);"
1764         "f(o);");
1765     CHECK_EQ(42, res->Int32Value());
1766     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1767     ctx2->Exit();
1768     ctx1->Exit();
1769     ctx1p.Dispose(ctx1->GetIsolate());
1770     v8::V8::ContextDisposedNotification();
1771   }
1772   HEAP->CollectAllAvailableGarbage();
1773   CHECK_EQ(2, NumberOfGlobalObjects());
1774   ctx2p.Dispose(isolate);
1775   HEAP->CollectAllAvailableGarbage();
1776   CHECK_EQ(0, NumberOfGlobalObjects());
1777 }
1778
1779
1780 TEST(LeakNativeContextViaMapKeyed) {
1781   i::FLAG_allow_natives_syntax = true;
1782   v8::Isolate* isolate = v8::Isolate::GetCurrent();
1783   v8::HandleScope outer_scope(isolate);
1784   v8::Persistent<v8::Context> ctx1p;
1785   v8::Persistent<v8::Context> ctx2p;
1786   {
1787     v8::HandleScope scope(isolate);
1788     ctx1p.Reset(isolate, v8::Context::New(isolate));
1789     ctx2p.Reset(isolate, v8::Context::New(isolate));
1790     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1791   }
1792
1793   HEAP->CollectAllAvailableGarbage();
1794   CHECK_EQ(4, NumberOfGlobalObjects());
1795
1796   {
1797     v8::HandleScope inner_scope(isolate);
1798     CompileRun("var v = [42, 43]");
1799     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1800     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1801     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1802     ctx2->Enter();
1803     ctx2->Global()->Set(v8_str("o"), v);
1804     v8::Local<v8::Value> res = CompileRun(
1805         "function f() { return o[0]; }"
1806         "for (var i = 0; i < 10; ++i) f();"
1807         "%OptimizeFunctionOnNextCall(f);"
1808         "f();");
1809     CHECK_EQ(42, res->Int32Value());
1810     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1811     ctx2->Exit();
1812     ctx1->Exit();
1813     ctx1p.Dispose(ctx1->GetIsolate());
1814     v8::V8::ContextDisposedNotification();
1815   }
1816   HEAP->CollectAllAvailableGarbage();
1817   CHECK_EQ(2, NumberOfGlobalObjects());
1818   ctx2p.Dispose(isolate);
1819   HEAP->CollectAllAvailableGarbage();
1820   CHECK_EQ(0, NumberOfGlobalObjects());
1821 }
1822
1823
1824 TEST(LeakNativeContextViaMapProto) {
1825   i::FLAG_allow_natives_syntax = true;
1826   v8::Isolate* isolate = v8::Isolate::GetCurrent();
1827   v8::HandleScope outer_scope(isolate);
1828   v8::Persistent<v8::Context> ctx1p;
1829   v8::Persistent<v8::Context> ctx2p;
1830   {
1831     v8::HandleScope scope(isolate);
1832     ctx1p.Reset(isolate, v8::Context::New(isolate));
1833     ctx2p.Reset(isolate, v8::Context::New(isolate));
1834     v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1835   }
1836
1837   HEAP->CollectAllAvailableGarbage();
1838   CHECK_EQ(4, NumberOfGlobalObjects());
1839
1840   {
1841     v8::HandleScope inner_scope(isolate);
1842     CompileRun("var v = { y: 42}");
1843     v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1844     v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1845     v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1846     ctx2->Enter();
1847     ctx2->Global()->Set(v8_str("o"), v);
1848     v8::Local<v8::Value> res = CompileRun(
1849         "function f() {"
1850         "  var p = {x: 42};"
1851         "  p.__proto__ = o;"
1852         "  return p.x;"
1853         "}"
1854         "for (var i = 0; i < 10; ++i) f();"
1855         "%OptimizeFunctionOnNextCall(f);"
1856         "f();");
1857     CHECK_EQ(42, res->Int32Value());
1858     ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1859     ctx2->Exit();
1860     ctx1->Exit();
1861     ctx1p.Dispose(isolate);
1862     v8::V8::ContextDisposedNotification();
1863   }
1864   HEAP->CollectAllAvailableGarbage();
1865   CHECK_EQ(2, NumberOfGlobalObjects());
1866   ctx2p.Dispose(isolate);
1867   HEAP->CollectAllAvailableGarbage();
1868   CHECK_EQ(0, NumberOfGlobalObjects());
1869 }
1870
1871
1872 TEST(InstanceOfStubWriteBarrier) {
1873   i::FLAG_allow_natives_syntax = true;
1874 #ifdef VERIFY_HEAP
1875   i::FLAG_verify_heap = true;
1876 #endif
1877
1878   CcTest::InitializeVM();
1879   if (!i::V8::UseCrankshaft()) return;
1880   if (i::FLAG_force_marking_deque_overflows) return;
1881   v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
1882
1883   {
1884     v8::HandleScope scope(v8::Isolate::GetCurrent());
1885     CompileRun(
1886         "function foo () { }"
1887         "function mkbar () { return new (new Function(\"\")) (); }"
1888         "function f (x) { return (x instanceof foo); }"
1889         "function g () { f(mkbar()); }"
1890         "f(new foo()); f(new foo());"
1891         "%OptimizeFunctionOnNextCall(f);"
1892         "f(new foo()); g();");
1893   }
1894
1895   IncrementalMarking* marking = HEAP->incremental_marking();
1896   marking->Abort();
1897   marking->Start();
1898
1899   Handle<JSFunction> f =
1900       v8::Utils::OpenHandle(
1901           *v8::Handle<v8::Function>::Cast(
1902               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1903
1904   CHECK(f->IsOptimized());
1905
1906   while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
1907          !marking->IsStopped()) {
1908     // Discard any pending GC requests otherwise we will get GC when we enter
1909     // code below.
1910     marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1911   }
1912
1913   CHECK(marking->IsMarking());
1914
1915   {
1916     v8::HandleScope scope(v8::Isolate::GetCurrent());
1917     v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
1918     v8::Handle<v8::Function> g =
1919         v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
1920     g->Call(global, 0, NULL);
1921   }
1922
1923   HEAP->incremental_marking()->set_should_hurry(true);
1924   HEAP->CollectGarbage(OLD_POINTER_SPACE);
1925 }
1926
1927
1928 TEST(PrototypeTransitionClearing) {
1929   CcTest::InitializeVM();
1930   Isolate* isolate = Isolate::Current();
1931   Factory* factory = isolate->factory();
1932   v8::HandleScope scope(CcTest::isolate());
1933
1934   CompileRun(
1935       "var base = {};"
1936       "var live = [];"
1937       "for (var i = 0; i < 10; i++) {"
1938       "  var object = {};"
1939       "  var prototype = {};"
1940       "  object.__proto__ = prototype;"
1941       "  if (i >= 3) live.push(object, prototype);"
1942       "}");
1943
1944   Handle<JSObject> baseObject =
1945       v8::Utils::OpenHandle(
1946           *v8::Handle<v8::Object>::Cast(
1947               v8::Context::GetCurrent()->Global()->Get(v8_str("base"))));
1948
1949   // Verify that only dead prototype transitions are cleared.
1950   CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
1951   HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
1952   const int transitions = 10 - 3;
1953   CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
1954
1955   // Verify that prototype transitions array was compacted.
1956   FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
1957   for (int i = 0; i < transitions; i++) {
1958     int j = Map::kProtoTransitionHeaderSize +
1959         i * Map::kProtoTransitionElementsPerEntry;
1960     CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
1961     Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
1962     CHECK(proto->IsTheHole() || proto->IsJSObject());
1963   }
1964
1965   // Make sure next prototype is placed on an old-space evacuation candidate.
1966   Handle<JSObject> prototype;
1967   PagedSpace* space = HEAP->old_pointer_space();
1968   {
1969     AlwaysAllocateScope always_allocate;
1970     SimulateFullSpace(space);
1971     prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
1972   }
1973
1974   // Add a prototype on an evacuation candidate and verify that transition
1975   // clearing correctly records slots in prototype transition array.
1976   i::FLAG_always_compact = true;
1977   Handle<Map> map(baseObject->map());
1978   CHECK(!space->LastPage()->Contains(
1979       map->GetPrototypeTransitions()->address()));
1980   CHECK(space->LastPage()->Contains(prototype->address()));
1981   JSObject::SetPrototype(baseObject, prototype, false);
1982   CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
1983   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1984   CHECK(Map::GetPrototypeTransition(map, prototype)->IsMap());
1985 }
1986
1987
1988 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
1989   i::FLAG_stress_compaction = false;
1990   i::FLAG_allow_natives_syntax = true;
1991 #ifdef VERIFY_HEAP
1992   i::FLAG_verify_heap = true;
1993 #endif
1994
1995   CcTest::InitializeVM();
1996   if (!i::V8::UseCrankshaft()) return;
1997   v8::HandleScope outer_scope(v8::Isolate::GetCurrent());
1998
1999   {
2000     v8::HandleScope scope(v8::Isolate::GetCurrent());
2001     CompileRun(
2002         "function f () {"
2003         "  var s = 0;"
2004         "  for (var i = 0; i < 100; i++)  s += i;"
2005         "  return s;"
2006         "}"
2007         "f(); f();"
2008         "%OptimizeFunctionOnNextCall(f);"
2009         "f();");
2010   }
2011   Handle<JSFunction> f =
2012       v8::Utils::OpenHandle(
2013           *v8::Handle<v8::Function>::Cast(
2014               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2015   CHECK(f->IsOptimized());
2016
2017   IncrementalMarking* marking = HEAP->incremental_marking();
2018   marking->Abort();
2019   marking->Start();
2020
2021   // The following two calls will increment HEAP->global_ic_age().
2022   const int kLongIdlePauseInMs = 1000;
2023   v8::V8::ContextDisposedNotification();
2024   v8::V8::IdleNotification(kLongIdlePauseInMs);
2025
2026   while (!marking->IsStopped() && !marking->IsComplete()) {
2027     marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2028   }
2029   if (!marking->IsStopped() || marking->should_hurry()) {
2030     // We don't normally finish a GC via Step(), we normally finish by
2031     // setting the stack guard and then do the final steps in the stack
2032     // guard interrupt.  But here we didn't ask for that, and there is no
2033     // JS code running to trigger the interrupt, so we explicitly finalize
2034     // here.
2035     HEAP->CollectAllGarbage(Heap::kNoGCFlags,
2036                             "Test finalizing incremental mark-sweep");
2037   }
2038
2039   CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
2040   CHECK_EQ(0, f->shared()->opt_count());
2041   CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2042 }
2043
2044
2045 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2046   i::FLAG_stress_compaction = false;
2047   i::FLAG_allow_natives_syntax = true;
2048 #ifdef VERIFY_HEAP
2049   i::FLAG_verify_heap = true;
2050 #endif
2051
2052   CcTest::InitializeVM();
2053   if (!i::V8::UseCrankshaft()) return;
2054   v8::HandleScope outer_scope(CcTest::isolate());
2055
2056   {
2057     v8::HandleScope scope(CcTest::isolate());
2058     CompileRun(
2059         "function f () {"
2060         "  var s = 0;"
2061         "  for (var i = 0; i < 100; i++)  s += i;"
2062         "  return s;"
2063         "}"
2064         "f(); f();"
2065         "%OptimizeFunctionOnNextCall(f);"
2066         "f();");
2067   }
2068   Handle<JSFunction> f =
2069       v8::Utils::OpenHandle(
2070           *v8::Handle<v8::Function>::Cast(
2071               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2072   CHECK(f->IsOptimized());
2073
2074   HEAP->incremental_marking()->Abort();
2075
2076   // The following two calls will increment HEAP->global_ic_age().
2077   // Since incremental marking is off, IdleNotification will do full GC.
2078   const int kLongIdlePauseInMs = 1000;
2079   v8::V8::ContextDisposedNotification();
2080   v8::V8::IdleNotification(kLongIdlePauseInMs);
2081
2082   CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
2083   CHECK_EQ(0, f->shared()->opt_count());
2084   CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2085 }
2086
2087
2088 // Test that HAllocateObject will always return an object in new-space.
2089 TEST(OptimizedAllocationAlwaysInNewSpace) {
2090   i::FLAG_allow_natives_syntax = true;
2091   CcTest::InitializeVM();
2092   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2093   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2094   v8::HandleScope scope(CcTest::isolate());
2095
2096   SimulateFullSpace(HEAP->new_space());
2097   AlwaysAllocateScope always_allocate;
2098   v8::Local<v8::Value> res = CompileRun(
2099       "function c(x) {"
2100       "  this.x = x;"
2101       "  for (var i = 0; i < 32; i++) {"
2102       "    this['x' + i] = x;"
2103       "  }"
2104       "}"
2105       "function f(x) { return new c(x); };"
2106       "f(1); f(2); f(3);"
2107       "%OptimizeFunctionOnNextCall(f);"
2108       "f(4);");
2109   CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
2110
2111   Handle<JSObject> o =
2112       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2113
2114   CHECK(HEAP->InNewSpace(*o));
2115 }
2116
2117
2118 TEST(OptimizedPretenuringObjectArrayLiterals) {
2119   i::FLAG_allow_natives_syntax = true;
2120   CcTest::InitializeVM();
2121   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2122   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2123   v8::HandleScope scope(CcTest::isolate());
2124   HEAP->SetNewSpaceHighPromotionModeActive(true);
2125
2126   v8::Local<v8::Value> res = CompileRun(
2127       "function f() {"
2128       "  var numbers = [{}, {}, {}];"
2129       "  return numbers;"
2130       "};"
2131       "f(); f(); f();"
2132       "%OptimizeFunctionOnNextCall(f);"
2133       "f();");
2134
2135   Handle<JSObject> o =
2136       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2137
2138   CHECK(HEAP->InOldPointerSpace(o->elements()));
2139   CHECK(HEAP->InOldPointerSpace(*o));
2140 }
2141
2142
2143 TEST(OptimizedPretenuringMixedInObjectProperties) {
2144   i::FLAG_allow_natives_syntax = true;
2145   CcTest::InitializeVM();
2146   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2147   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2148   v8::HandleScope scope(CcTest::isolate());
2149   HEAP->SetNewSpaceHighPromotionModeActive(true);
2150
2151   v8::Local<v8::Value> res = CompileRun(
2152       "function f() {"
2153       "  var numbers = {a: {c: 2.2, d: {}}, b: 1.1};"
2154       "  return numbers;"
2155       "};"
2156       "f(); f(); f();"
2157       "%OptimizeFunctionOnNextCall(f);"
2158       "f();");
2159
2160   Handle<JSObject> o =
2161       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2162
2163   CHECK(HEAP->InOldPointerSpace(*o));
2164   CHECK(HEAP->InOldPointerSpace(o->RawFastPropertyAt(0)));
2165   CHECK(HEAP->InOldDataSpace(o->RawFastPropertyAt(1)));
2166
2167   JSObject* inner_object = reinterpret_cast<JSObject*>(o->RawFastPropertyAt(0));
2168   CHECK(HEAP->InOldPointerSpace(inner_object));
2169   CHECK(HEAP->InOldDataSpace(inner_object->RawFastPropertyAt(0)));
2170   CHECK(HEAP->InOldPointerSpace(inner_object->RawFastPropertyAt(1)));
2171 }
2172
2173
2174 TEST(OptimizedPretenuringDoubleArrayProperties) {
2175   i::FLAG_allow_natives_syntax = true;
2176   CcTest::InitializeVM();
2177   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2178   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2179   v8::HandleScope scope(CcTest::isolate());
2180   HEAP->SetNewSpaceHighPromotionModeActive(true);
2181
2182   v8::Local<v8::Value> res = CompileRun(
2183       "function f() {"
2184       "  var numbers = {a: 1.1, b: 2.2};"
2185       "  return numbers;"
2186       "};"
2187       "f(); f(); f();"
2188       "%OptimizeFunctionOnNextCall(f);"
2189       "f();");
2190
2191   Handle<JSObject> o =
2192       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2193
2194   CHECK(HEAP->InOldPointerSpace(*o));
2195   CHECK(HEAP->InOldDataSpace(o->properties()));
2196 }
2197
2198
2199 TEST(OptimizedPretenuringdoubleArrayLiterals) {
2200   i::FLAG_allow_natives_syntax = true;
2201   CcTest::InitializeVM();
2202   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2203   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2204   v8::HandleScope scope(CcTest::isolate());
2205   HEAP->SetNewSpaceHighPromotionModeActive(true);
2206
2207   v8::Local<v8::Value> res = CompileRun(
2208       "function f() {"
2209       "  var numbers = [1.1, 2.2, 3.3];"
2210       "  return numbers;"
2211       "};"
2212       "f(); f(); f();"
2213       "%OptimizeFunctionOnNextCall(f);"
2214       "f();");
2215
2216   Handle<JSObject> o =
2217       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2218
2219   CHECK(HEAP->InOldDataSpace(o->elements()));
2220   CHECK(HEAP->InOldPointerSpace(*o));
2221 }
2222
2223
2224 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2225   i::FLAG_allow_natives_syntax = true;
2226   CcTest::InitializeVM();
2227   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2228   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2229   v8::HandleScope scope(CcTest::isolate());
2230   HEAP->SetNewSpaceHighPromotionModeActive(true);
2231
2232   v8::Local<v8::Value> res = CompileRun(
2233       "function f() {"
2234       "  var numbers = [[{}, {}, {}],[1.1, 2.2, 3.3]];"
2235       "  return numbers;"
2236       "};"
2237       "f(); f(); f();"
2238       "%OptimizeFunctionOnNextCall(f);"
2239       "f();");
2240
2241   v8::Local<v8::Value> int_array = v8::Object::Cast(*res)->Get(v8_str("0"));
2242   Handle<JSObject> int_array_handle =
2243       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array));
2244   v8::Local<v8::Value> double_array = v8::Object::Cast(*res)->Get(v8_str("1"));
2245   Handle<JSObject> double_array_handle =
2246       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array));
2247
2248   Handle<JSObject> o =
2249       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2250   CHECK(HEAP->InOldPointerSpace(*o));
2251   CHECK(HEAP->InOldPointerSpace(*int_array_handle));
2252   CHECK(HEAP->InOldPointerSpace(int_array_handle->elements()));
2253   CHECK(HEAP->InOldPointerSpace(*double_array_handle));
2254   CHECK(HEAP->InOldDataSpace(double_array_handle->elements()));
2255 }
2256
2257
2258 TEST(OptimizedPretenuringNestedObjectLiterals) {
2259   i::FLAG_allow_natives_syntax = true;
2260   CcTest::InitializeVM();
2261   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2262   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2263   v8::HandleScope scope(CcTest::isolate());
2264   HEAP->SetNewSpaceHighPromotionModeActive(true);
2265
2266   v8::Local<v8::Value> res = CompileRun(
2267       "function f() {"
2268       "  var numbers = [[{}, {}, {}],[{}, {}, {}]];"
2269       "  return numbers;"
2270       "};"
2271       "f(); f(); f();"
2272       "%OptimizeFunctionOnNextCall(f);"
2273       "f();");
2274
2275   v8::Local<v8::Value> int_array_1 = v8::Object::Cast(*res)->Get(v8_str("0"));
2276   Handle<JSObject> int_array_handle_1 =
2277       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_1));
2278   v8::Local<v8::Value> int_array_2 = v8::Object::Cast(*res)->Get(v8_str("1"));
2279   Handle<JSObject> int_array_handle_2 =
2280       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(int_array_2));
2281
2282   Handle<JSObject> o =
2283       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2284   CHECK(HEAP->InOldPointerSpace(*o));
2285   CHECK(HEAP->InOldPointerSpace(*int_array_handle_1));
2286   CHECK(HEAP->InOldPointerSpace(int_array_handle_1->elements()));
2287   CHECK(HEAP->InOldPointerSpace(*int_array_handle_2));
2288   CHECK(HEAP->InOldPointerSpace(int_array_handle_2->elements()));
2289 }
2290
2291
2292 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2293   i::FLAG_allow_natives_syntax = true;
2294   CcTest::InitializeVM();
2295   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2296   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2297   v8::HandleScope scope(CcTest::isolate());
2298   HEAP->SetNewSpaceHighPromotionModeActive(true);
2299
2300   v8::Local<v8::Value> res = CompileRun(
2301       "function f() {"
2302       "  var numbers = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2303       "  return numbers;"
2304       "};"
2305       "f(); f(); f();"
2306       "%OptimizeFunctionOnNextCall(f);"
2307       "f();");
2308
2309   v8::Local<v8::Value> double_array_1 =
2310       v8::Object::Cast(*res)->Get(v8_str("0"));
2311   Handle<JSObject> double_array_handle_1 =
2312       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_1));
2313   v8::Local<v8::Value> double_array_2 =
2314       v8::Object::Cast(*res)->Get(v8_str("1"));
2315   Handle<JSObject> double_array_handle_2 =
2316       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(double_array_2));
2317
2318   Handle<JSObject> o =
2319       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2320   CHECK(HEAP->InOldPointerSpace(*o));
2321   CHECK(HEAP->InOldPointerSpace(*double_array_handle_1));
2322   CHECK(HEAP->InOldDataSpace(double_array_handle_1->elements()));
2323   CHECK(HEAP->InOldPointerSpace(*double_array_handle_2));
2324   CHECK(HEAP->InOldDataSpace(double_array_handle_2->elements()));
2325 }
2326
2327
2328 // Test regular array literals allocation.
2329 TEST(OptimizedAllocationArrayLiterals) {
2330   i::FLAG_allow_natives_syntax = true;
2331   CcTest::InitializeVM();
2332   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2333   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2334   v8::HandleScope scope(CcTest::isolate());
2335
2336   v8::Local<v8::Value> res = CompileRun(
2337       "function f() {"
2338       "  var numbers = new Array(1, 2, 3);"
2339       "  numbers[0] = 3.14;"
2340       "  return numbers;"
2341       "};"
2342       "f(); f(); f();"
2343       "%OptimizeFunctionOnNextCall(f);"
2344       "f();");
2345   CHECK_EQ(static_cast<int>(3.14),
2346            v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
2347
2348   Handle<JSObject> o =
2349       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2350
2351   CHECK(HEAP->InNewSpace(o->elements()));
2352 }
2353
2354
2355 TEST(OptimizedPretenuringCallNew) {
2356   i::FLAG_allow_natives_syntax = true;
2357   i::FLAG_pretenuring_call_new = true;
2358   CcTest::InitializeVM();
2359   if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
2360   if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2361   v8::HandleScope scope(CcTest::isolate());
2362   HEAP->SetNewSpaceHighPromotionModeActive(true);
2363
2364   AlwaysAllocateScope always_allocate;
2365   v8::Local<v8::Value> res = CompileRun(
2366       "function g() { this.a = 0; }"
2367       "function f() {"
2368       "  return new g();"
2369       "};"
2370       "f(); f(); f();"
2371       "%OptimizeFunctionOnNextCall(f);"
2372       "f();");
2373
2374   Handle<JSObject> o =
2375       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
2376   CHECK(HEAP->InOldPointerSpace(*o));
2377 }
2378
2379
2380 static int CountMapTransitions(Map* map) {
2381   return map->transitions()->number_of_transitions();
2382 }
2383
2384
2385 // Test that map transitions are cleared and maps are collected with
2386 // incremental marking as well.
2387 TEST(Regress1465) {
2388   i::FLAG_stress_compaction = false;
2389   i::FLAG_allow_natives_syntax = true;
2390   i::FLAG_trace_incremental_marking = true;
2391   CcTest::InitializeVM();
2392   v8::HandleScope scope(CcTest::isolate());
2393   static const int transitions_count = 256;
2394
2395   {
2396     AlwaysAllocateScope always_allocate;
2397     for (int i = 0; i < transitions_count; i++) {
2398       EmbeddedVector<char, 64> buffer;
2399       OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
2400       CompileRun(buffer.start());
2401     }
2402     CompileRun("var root = new Object;");
2403   }
2404
2405   Handle<JSObject> root =
2406       v8::Utils::OpenHandle(
2407           *v8::Handle<v8::Object>::Cast(
2408               v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
2409
2410   // Count number of live transitions before marking.
2411   int transitions_before = CountMapTransitions(root->map());
2412   CompileRun("%DebugPrint(root);");
2413   CHECK_EQ(transitions_count, transitions_before);
2414
2415   SimulateIncrementalMarking();
2416   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2417
2418   // Count number of live transitions after marking.  Note that one transition
2419   // is left, because 'o' still holds an instance of one transition target.
2420   int transitions_after = CountMapTransitions(root->map());
2421   CompileRun("%DebugPrint(root);");
2422   CHECK_EQ(1, transitions_after);
2423 }
2424
2425
2426 TEST(Regress2143a) {
2427   i::FLAG_collect_maps = true;
2428   i::FLAG_incremental_marking = true;
2429   CcTest::InitializeVM();
2430   v8::HandleScope scope(CcTest::isolate());
2431
2432   // Prepare a map transition from the root object together with a yet
2433   // untransitioned root object.
2434   CompileRun("var root = new Object;"
2435              "root.foo = 0;"
2436              "root = new Object;");
2437
2438   SimulateIncrementalMarking();
2439
2440   // Compile a StoreIC that performs the prepared map transition. This
2441   // will restart incremental marking and should make sure the root is
2442   // marked grey again.
2443   CompileRun("function f(o) {"
2444              "  o.foo = 0;"
2445              "}"
2446              "f(new Object);"
2447              "f(root);");
2448
2449   // This bug only triggers with aggressive IC clearing.
2450   HEAP->AgeInlineCaches();
2451
2452   // Explicitly request GC to perform final marking step and sweeping.
2453   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2454
2455   Handle<JSObject> root =
2456       v8::Utils::OpenHandle(
2457           *v8::Handle<v8::Object>::Cast(
2458               v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
2459
2460   // The root object should be in a sane state.
2461   CHECK(root->IsJSObject());
2462   CHECK(root->map()->IsMap());
2463 }
2464
2465
2466 TEST(Regress2143b) {
2467   i::FLAG_collect_maps = true;
2468   i::FLAG_incremental_marking = true;
2469   i::FLAG_allow_natives_syntax = true;
2470   CcTest::InitializeVM();
2471   v8::HandleScope scope(CcTest::isolate());
2472
2473   // Prepare a map transition from the root object together with a yet
2474   // untransitioned root object.
2475   CompileRun("var root = new Object;"
2476              "root.foo = 0;"
2477              "root = new Object;");
2478
2479   SimulateIncrementalMarking();
2480
2481   // Compile an optimized LStoreNamedField that performs the prepared
2482   // map transition. This will restart incremental marking and should
2483   // make sure the root is marked grey again.
2484   CompileRun("function f(o) {"
2485              "  o.foo = 0;"
2486              "}"
2487              "f(new Object);"
2488              "f(new Object);"
2489              "%OptimizeFunctionOnNextCall(f);"
2490              "f(root);"
2491              "%DeoptimizeFunction(f);");
2492
2493   // This bug only triggers with aggressive IC clearing.
2494   HEAP->AgeInlineCaches();
2495
2496   // Explicitly request GC to perform final marking step and sweeping.
2497   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2498
2499   Handle<JSObject> root =
2500       v8::Utils::OpenHandle(
2501           *v8::Handle<v8::Object>::Cast(
2502               v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
2503
2504   // The root object should be in a sane state.
2505   CHECK(root->IsJSObject());
2506   CHECK(root->map()->IsMap());
2507 }
2508
2509
2510 TEST(ReleaseOverReservedPages) {
2511   i::FLAG_trace_gc = true;
2512   // The optimizer can allocate stuff, messing up the test.
2513   i::FLAG_crankshaft = false;
2514   i::FLAG_always_opt = false;
2515   CcTest::InitializeVM();
2516   Isolate* isolate = Isolate::Current();
2517   Factory* factory = isolate->factory();
2518   v8::HandleScope scope(CcTest::isolate());
2519   static const int number_of_test_pages = 20;
2520
2521   // Prepare many pages with low live-bytes count.
2522   PagedSpace* old_pointer_space = HEAP->old_pointer_space();
2523   CHECK_EQ(1, old_pointer_space->CountTotalPages());
2524   for (int i = 0; i < number_of_test_pages; i++) {
2525     AlwaysAllocateScope always_allocate;
2526     SimulateFullSpace(old_pointer_space);
2527     factory->NewFixedArray(1, TENURED);
2528   }
2529   CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2530
2531   // Triggering one GC will cause a lot of garbage to be discovered but
2532   // even spread across all allocated pages.
2533   HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
2534   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2535
2536   // Triggering subsequent GCs should cause at least half of the pages
2537   // to be released to the OS after at most two cycles.
2538   HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
2539   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
2540   HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
2541   CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
2542
2543   // Triggering a last-resort GC should cause all pages to be released to the
2544   // OS so that other processes can seize the memory.  If we get a failure here
2545   // where there are 2 pages left instead of 1, then we should increase the
2546   // size of the first page a little in SizeOfFirstPage in spaces.cc.  The
2547   // first page should be small in order to reduce memory used when the VM
2548   // boots, but if the 20 small arrays don't fit on the first page then that's
2549   // an indication that it is too small.
2550   HEAP->CollectAllAvailableGarbage("triggered really hard");
2551   CHECK_EQ(1, old_pointer_space->CountTotalPages());
2552 }
2553
2554
2555 TEST(Regress2237) {
2556   i::FLAG_stress_compaction = false;
2557   CcTest::InitializeVM();
2558   Isolate* isolate = Isolate::Current();
2559   Factory* factory = isolate->factory();
2560   v8::HandleScope scope(CcTest::isolate());
2561   Handle<String> slice(HEAP->empty_string());
2562
2563   {
2564     // Generate a parent that lives in new-space.
2565     v8::HandleScope inner_scope(CcTest::isolate());
2566     const char* c = "This text is long enough to trigger sliced strings.";
2567     Handle<String> s = factory->NewStringFromAscii(CStrVector(c));
2568     CHECK(s->IsSeqOneByteString());
2569     CHECK(HEAP->InNewSpace(*s));
2570
2571     // Generate a sliced string that is based on the above parent and
2572     // lives in old-space.
2573     SimulateFullSpace(HEAP->new_space());
2574     AlwaysAllocateScope always_allocate;
2575     Handle<String> t = factory->NewProperSubString(s, 5, 35);
2576     CHECK(t->IsSlicedString());
2577     CHECK(!HEAP->InNewSpace(*t));
2578     *slice.location() = *t.location();
2579   }
2580
2581   CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2582   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2583   CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
2584 }
2585
2586
2587 #ifdef OBJECT_PRINT
2588 TEST(PrintSharedFunctionInfo) {
2589   CcTest::InitializeVM();
2590   v8::HandleScope scope(CcTest::isolate());
2591   const char* source = "f = function() { return 987654321; }\n"
2592                        "g = function() { return 123456789; }\n";
2593   CompileRun(source);
2594   Handle<JSFunction> g =
2595       v8::Utils::OpenHandle(
2596           *v8::Handle<v8::Function>::Cast(
2597               v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
2598
2599   DisallowHeapAllocation no_allocation;
2600   g->shared()->PrintLn();
2601 }
2602 #endif  // OBJECT_PRINT
2603
2604
2605 TEST(Regress2211) {
2606   CcTest::InitializeVM();
2607   v8::HandleScope scope(CcTest::isolate());
2608
2609   v8::Handle<v8::String> value = v8_str("val string");
2610   Smi* hash = Smi::FromInt(321);
2611   Heap* heap = Isolate::Current()->heap();
2612
2613   for (int i = 0; i < 2; i++) {
2614     // Store identity hash first and common hidden property second.
2615     v8::Handle<v8::Object> obj = v8::Object::New();
2616     Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
2617     CHECK(internal_obj->HasFastProperties());
2618
2619     // In the first iteration, set hidden value first and identity hash second.
2620     // In the second iteration, reverse the order.
2621     if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
2622     MaybeObject* maybe_obj = internal_obj->SetIdentityHash(hash,
2623                                                            ALLOW_CREATION);
2624     CHECK(!maybe_obj->IsFailure());
2625     if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
2626
2627     // Check values.
2628     CHECK_EQ(hash,
2629              internal_obj->GetHiddenProperty(heap->identity_hash_string()));
2630     CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
2631
2632     // Check size.
2633     DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
2634     ObjectHashTable* hashtable = ObjectHashTable::cast(
2635         internal_obj->RawFastPropertyAt(descriptors->GetFieldIndex(0)));
2636     // HashTable header (5) and 4 initial entries (8).
2637     CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
2638   }
2639 }
2640
2641
2642 TEST(IncrementalMarkingClearsTypeFeedbackCells) {
2643   if (i::FLAG_always_opt) return;
2644   CcTest::InitializeVM();
2645   v8::HandleScope scope(CcTest::isolate());
2646   v8::Local<v8::Value> fun1, fun2;
2647
2648   {
2649     LocalContext env;
2650     CompileRun("function fun() {};");
2651     fun1 = env->Global()->Get(v8_str("fun"));
2652   }
2653
2654   {
2655     LocalContext env;
2656     CompileRun("function fun() {};");
2657     fun2 = env->Global()->Get(v8_str("fun"));
2658   }
2659
2660   // Prepare function f that contains type feedback for closures
2661   // originating from two different native contexts.
2662   v8::Context::GetCurrent()->Global()->Set(v8_str("fun1"), fun1);
2663   v8::Context::GetCurrent()->Global()->Set(v8_str("fun2"), fun2);
2664   CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
2665   Handle<JSFunction> f =
2666       v8::Utils::OpenHandle(
2667           *v8::Handle<v8::Function>::Cast(
2668               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2669   Handle<TypeFeedbackCells> cells(TypeFeedbackInfo::cast(
2670       f->shared()->code()->type_feedback_info())->type_feedback_cells());
2671
2672   CHECK_EQ(2, cells->CellCount());
2673   CHECK(cells->GetCell(0)->value()->IsJSFunction());
2674   CHECK(cells->GetCell(1)->value()->IsJSFunction());
2675
2676   SimulateIncrementalMarking();
2677   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2678
2679   CHECK_EQ(2, cells->CellCount());
2680   CHECK(cells->GetCell(0)->value()->IsTheHole());
2681   CHECK(cells->GetCell(1)->value()->IsTheHole());
2682 }
2683
2684
2685 static Code* FindFirstIC(Code* code, Code::Kind kind) {
2686   int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
2687              RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
2688              RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
2689              RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
2690   for (RelocIterator it(code, mask); !it.done(); it.next()) {
2691     RelocInfo* info = it.rinfo();
2692     Code* target = Code::GetCodeFromTargetAddress(info->target_address());
2693     if (target->is_inline_cache_stub() && target->kind() == kind) {
2694       return target;
2695     }
2696   }
2697   return NULL;
2698 }
2699
2700
2701 TEST(IncrementalMarkingPreservesMonomorhpicIC) {
2702   if (i::FLAG_always_opt) return;
2703   CcTest::InitializeVM();
2704   v8::HandleScope scope(CcTest::isolate());
2705
2706   // Prepare function f that contains a monomorphic IC for object
2707   // originating from the same native context.
2708   CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
2709              "function f(o) { return o.x; } f(obj); f(obj);");
2710   Handle<JSFunction> f =
2711       v8::Utils::OpenHandle(
2712           *v8::Handle<v8::Function>::Cast(
2713               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2714
2715   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2716   CHECK(ic_before->ic_state() == MONOMORPHIC);
2717
2718   SimulateIncrementalMarking();
2719   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2720
2721   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2722   CHECK(ic_after->ic_state() == MONOMORPHIC);
2723 }
2724
2725
2726 TEST(IncrementalMarkingClearsMonomorhpicIC) {
2727   if (i::FLAG_always_opt) return;
2728   CcTest::InitializeVM();
2729   v8::HandleScope scope(CcTest::isolate());
2730   v8::Local<v8::Value> obj1;
2731
2732   {
2733     LocalContext env;
2734     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2735     obj1 = env->Global()->Get(v8_str("obj"));
2736   }
2737
2738   // Prepare function f that contains a monomorphic IC for object
2739   // originating from a different native context.
2740   v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2741   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
2742   Handle<JSFunction> f =
2743       v8::Utils::OpenHandle(
2744           *v8::Handle<v8::Function>::Cast(
2745               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2746
2747   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2748   CHECK(ic_before->ic_state() == MONOMORPHIC);
2749
2750   // Fire context dispose notification.
2751   v8::V8::ContextDisposedNotification();
2752   SimulateIncrementalMarking();
2753   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2754
2755   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2756   CHECK(ic_after->ic_state() == UNINITIALIZED);
2757 }
2758
2759
2760 TEST(IncrementalMarkingClearsPolymorhpicIC) {
2761   if (i::FLAG_always_opt) return;
2762   CcTest::InitializeVM();
2763   v8::HandleScope scope(CcTest::isolate());
2764   v8::Local<v8::Value> obj1, obj2;
2765
2766   {
2767     LocalContext env;
2768     CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2769     obj1 = env->Global()->Get(v8_str("obj"));
2770   }
2771
2772   {
2773     LocalContext env;
2774     CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
2775     obj2 = env->Global()->Get(v8_str("obj"));
2776   }
2777
2778   // Prepare function f that contains a polymorphic IC for objects
2779   // originating from two different native contexts.
2780   v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2781   v8::Context::GetCurrent()->Global()->Set(v8_str("obj2"), obj2);
2782   CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
2783   Handle<JSFunction> f =
2784       v8::Utils::OpenHandle(
2785           *v8::Handle<v8::Function>::Cast(
2786               v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2787
2788   Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2789   CHECK(ic_before->ic_state() == POLYMORPHIC);
2790
2791   // Fire context dispose notification.
2792   v8::V8::ContextDisposedNotification();
2793   SimulateIncrementalMarking();
2794   HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2795
2796   Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2797   CHECK(ic_after->ic_state() == UNINITIALIZED);
2798 }
2799
2800
2801 class SourceResource: public v8::String::ExternalAsciiStringResource {
2802  public:
2803   explicit SourceResource(const char* data)
2804     : data_(data), length_(strlen(data)) { }
2805
2806   virtual void Dispose() {
2807     i::DeleteArray(data_);
2808     data_ = NULL;
2809   }
2810
2811   const char* data() const { return data_; }
2812
2813   size_t length() const { return length_; }
2814
2815   bool IsDisposed() { return data_ == NULL; }
2816
2817  private:
2818   const char* data_;
2819   size_t length_;
2820 };
2821
2822
2823 void ReleaseStackTraceDataTest(const char* source, const char* accessor) {
2824   // Test that the data retained by the Error.stack accessor is released
2825   // after the first time the accessor is fired.  We use external string
2826   // to check whether the data is being released since the external string
2827   // resource's callback is fired when the external string is GC'ed.
2828   FLAG_use_ic = false;  // ICs retain objects.
2829   FLAG_parallel_recompilation = false;
2830   CcTest::InitializeVM();
2831   v8::HandleScope scope(CcTest::isolate());
2832   SourceResource* resource = new SourceResource(i::StrDup(source));
2833   {
2834     v8::HandleScope scope(CcTest::isolate());
2835     v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
2836     HEAP->CollectAllAvailableGarbage();
2837     v8::Script::Compile(source_string)->Run();
2838     CHECK(!resource->IsDisposed());
2839   }
2840   // HEAP->CollectAllAvailableGarbage();
2841   CHECK(!resource->IsDisposed());
2842
2843   CompileRun(accessor);
2844   HEAP->CollectAllAvailableGarbage();
2845
2846   // External source has been released.
2847   CHECK(resource->IsDisposed());
2848   delete resource;
2849 }
2850
2851
2852 TEST(ReleaseStackTraceData) {
2853   static const char* source1 = "var error = null;            "
2854   /* Normal Error */           "try {                        "
2855                                "  throw new Error();         "
2856                                "} catch (e) {                "
2857                                "  error = e;                 "
2858                                "}                            ";
2859   static const char* source2 = "var error = null;            "
2860   /* Stack overflow */         "try {                        "
2861                                "  (function f() { f(); })(); "
2862                                "} catch (e) {                "
2863                                "  error = e;                 "
2864                                "}                            ";
2865   static const char* source3 = "var error = null;            "
2866   /* Normal Error */           "try {                        "
2867   /* as prototype */           "  throw new Error();         "
2868                                "} catch (e) {                "
2869                                "  error = {};                "
2870                                "  error.__proto__ = e;       "
2871                                "}                            ";
2872   static const char* source4 = "var error = null;            "
2873   /* Stack overflow */         "try {                        "
2874   /* as prototype   */         "  (function f() { f(); })(); "
2875                                "} catch (e) {                "
2876                                "  error = {};                "
2877                                "  error.__proto__ = e;       "
2878                                "}                            ";
2879   static const char* getter = "error.stack";
2880   static const char* setter = "error.stack = 0";
2881
2882   ReleaseStackTraceDataTest(source1, setter);
2883   ReleaseStackTraceDataTest(source2, setter);
2884   // We do not test source3 and source4 with setter, since the setter is
2885   // supposed to (untypically) write to the receiver, not the holder.  This is
2886   // to emulate the behavior of a data property.
2887
2888   ReleaseStackTraceDataTest(source1, getter);
2889   ReleaseStackTraceDataTest(source2, getter);
2890   ReleaseStackTraceDataTest(source3, getter);
2891   ReleaseStackTraceDataTest(source4, getter);
2892 }
2893
2894
2895 TEST(Regression144230) {
2896   i::FLAG_stress_compaction = false;
2897   CcTest::InitializeVM();
2898   Isolate* isolate = Isolate::Current();
2899   Heap* heap = isolate->heap();
2900   HandleScope scope(isolate);
2901
2902   // First make sure that the uninitialized CallIC stub is on a single page
2903   // that will later be selected as an evacuation candidate.
2904   {
2905     HandleScope inner_scope(isolate);
2906     AlwaysAllocateScope always_allocate;
2907     SimulateFullSpace(heap->code_space());
2908     isolate->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
2909   }
2910
2911   // Second compile a CallIC and execute it once so that it gets patched to
2912   // the pre-monomorphic stub. These code objects are on yet another page.
2913   {
2914     HandleScope inner_scope(isolate);
2915     AlwaysAllocateScope always_allocate;
2916     SimulateFullSpace(heap->code_space());
2917     CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
2918                "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
2919                "call();");
2920   }
2921
2922   // Third we fill up the last page of the code space so that it does not get
2923   // chosen as an evacuation candidate.
2924   {
2925     HandleScope inner_scope(isolate);
2926     AlwaysAllocateScope always_allocate;
2927     CompileRun("for (var i = 0; i < 2000; i++) {"
2928                "  eval('function f' + i + '() { return ' + i +'; };' +"
2929                "       'f' + i + '();');"
2930                "}");
2931   }
2932   heap->CollectAllGarbage(Heap::kNoGCFlags);
2933
2934   // Fourth is the tricky part. Make sure the code containing the CallIC is
2935   // visited first without clearing the IC. The shared function info is then
2936   // visited later, causing the CallIC to be cleared.
2937   Handle<String> name = isolate->factory()->InternalizeUtf8String("call");
2938   Handle<GlobalObject> global(isolate->context()->global_object());
2939   MaybeObject* maybe_call = global->GetProperty(*name);
2940   JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
2941   USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode));
2942   isolate->compilation_cache()->Clear();
2943   call->shared()->set_ic_age(heap->global_ic_age() + 1);
2944   Handle<Object> call_code(call->code(), isolate);
2945   Handle<Object> call_function(call, isolate);
2946
2947   // Now we are ready to mess up the heap.
2948   heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2949
2950   // Either heap verification caught the problem already or we go kaboom once
2951   // the CallIC is executed the next time.
2952   USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
2953   CompileRun("call();");
2954 }
2955
2956
2957 TEST(Regress159140) {
2958   i::FLAG_allow_natives_syntax = true;
2959   i::FLAG_flush_code_incrementally = true;
2960   CcTest::InitializeVM();
2961   Isolate* isolate = Isolate::Current();
2962   Heap* heap = isolate->heap();
2963   HandleScope scope(isolate);
2964
2965   // Perform one initial GC to enable code flushing.
2966   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2967
2968   // Prepare several closures that are all eligible for code flushing
2969   // because all reachable ones are not optimized. Make sure that the
2970   // optimized code object is directly reachable through a handle so
2971   // that it is marked black during incremental marking.
2972   Handle<Code> code;
2973   {
2974     HandleScope inner_scope(isolate);
2975     CompileRun("function h(x) {}"
2976                "function mkClosure() {"
2977                "  return function(x) { return x + 1; };"
2978                "}"
2979                "var f = mkClosure();"
2980                "var g = mkClosure();"
2981                "f(1); f(2);"
2982                "g(1); g(2);"
2983                "h(1); h(2);"
2984                "%OptimizeFunctionOnNextCall(f); f(3);"
2985                "%OptimizeFunctionOnNextCall(h); h(3);");
2986
2987     Handle<JSFunction> f =
2988         v8::Utils::OpenHandle(
2989             *v8::Handle<v8::Function>::Cast(
2990                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2991     CHECK(f->is_compiled());
2992     CompileRun("f = null;");
2993
2994     Handle<JSFunction> g =
2995         v8::Utils::OpenHandle(
2996             *v8::Handle<v8::Function>::Cast(
2997                 v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
2998     CHECK(g->is_compiled());
2999     const int kAgingThreshold = 6;
3000     for (int i = 0; i < kAgingThreshold; i++) {
3001       g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3002     }
3003
3004     code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
3005   }
3006
3007   // Simulate incremental marking so that the functions are enqueued as
3008   // code flushing candidates. Then optimize one function. Finally
3009   // finish the GC to complete code flushing.
3010   SimulateIncrementalMarking();
3011   CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
3012   heap->CollectAllGarbage(Heap::kNoGCFlags);
3013
3014   // Unoptimized code is missing and the deoptimizer will go ballistic.
3015   CompileRun("g('bozo');");
3016 }
3017
3018
3019 TEST(Regress165495) {
3020   i::FLAG_allow_natives_syntax = true;
3021   i::FLAG_flush_code_incrementally = true;
3022   CcTest::InitializeVM();
3023   Isolate* isolate = Isolate::Current();
3024   Heap* heap = isolate->heap();
3025   HandleScope scope(isolate);
3026
3027   // Perform one initial GC to enable code flushing.
3028   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3029
3030   // Prepare an optimized closure that the optimized code map will get
3031   // populated. Then age the unoptimized code to trigger code flushing
3032   // but make sure the optimized code is unreachable.
3033   {
3034     HandleScope inner_scope(isolate);
3035     CompileRun("function mkClosure() {"
3036                "  return function(x) { return x + 1; };"
3037                "}"
3038                "var f = mkClosure();"
3039                "f(1); f(2);"
3040                "%OptimizeFunctionOnNextCall(f); f(3);");
3041
3042     Handle<JSFunction> f =
3043         v8::Utils::OpenHandle(
3044             *v8::Handle<v8::Function>::Cast(
3045                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3046     CHECK(f->is_compiled());
3047     const int kAgingThreshold = 6;
3048     for (int i = 0; i < kAgingThreshold; i++) {
3049       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3050     }
3051
3052     CompileRun("f = null;");
3053   }
3054
3055   // Simulate incremental marking so that unoptimized code is flushed
3056   // even though it still is cached in the optimized code map.
3057   SimulateIncrementalMarking();
3058   heap->CollectAllGarbage(Heap::kNoGCFlags);
3059
3060   // Make a new closure that will get code installed from the code map.
3061   // Unoptimized code is missing and the deoptimizer will go ballistic.
3062   CompileRun("var g = mkClosure(); g('bozo');");
3063 }
3064
3065
3066 TEST(Regress169209) {
3067   i::FLAG_stress_compaction = false;
3068   i::FLAG_allow_natives_syntax = true;
3069   i::FLAG_flush_code_incrementally = true;
3070
3071   // Experimental natives are compiled during snapshot deserialization.
3072   // This test breaks because heap layout changes in a way that closure
3073   // is visited before shared function info.
3074   i::FLAG_harmony_typed_arrays = false;
3075   i::FLAG_harmony_array_buffer = false;
3076
3077   // Disable loading the i18n extension which breaks the assumptions of this
3078   // test about the heap layout.
3079   i::FLAG_enable_i18n = false;
3080
3081   CcTest::InitializeVM();
3082   Isolate* isolate = Isolate::Current();
3083   Heap* heap = isolate->heap();
3084   HandleScope scope(isolate);
3085
3086   // Perform one initial GC to enable code flushing.
3087   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3088
3089   // Prepare a shared function info eligible for code flushing for which
3090   // the unoptimized code will be replaced during optimization.
3091   Handle<SharedFunctionInfo> shared1;
3092   {
3093     HandleScope inner_scope(isolate);
3094     CompileRun("function f() { return 'foobar'; }"
3095                "function g(x) { if (x) f(); }"
3096                "f();"
3097                "g(false);"
3098                "g(false);");
3099
3100     Handle<JSFunction> f =
3101         v8::Utils::OpenHandle(
3102             *v8::Handle<v8::Function>::Cast(
3103                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3104     CHECK(f->is_compiled());
3105     const int kAgingThreshold = 6;
3106     for (int i = 0; i < kAgingThreshold; i++) {
3107       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3108     }
3109
3110     shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3111   }
3112
3113   // Prepare a shared function info eligible for code flushing that will
3114   // represent the dangling tail of the candidate list.
3115   Handle<SharedFunctionInfo> shared2;
3116   {
3117     HandleScope inner_scope(isolate);
3118     CompileRun("function flushMe() { return 0; }"
3119                "flushMe(1);");
3120
3121     Handle<JSFunction> f =
3122         v8::Utils::OpenHandle(
3123             *v8::Handle<v8::Function>::Cast(
3124                 v8::Context::GetCurrent()->Global()->Get(v8_str("flushMe"))));
3125     CHECK(f->is_compiled());
3126     const int kAgingThreshold = 6;
3127     for (int i = 0; i < kAgingThreshold; i++) {
3128       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3129     }
3130
3131     shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
3132   }
3133
3134   // Simulate incremental marking and collect code flushing candidates.
3135   SimulateIncrementalMarking();
3136   CHECK(shared1->code()->gc_metadata() != NULL);
3137
3138   // Optimize function and make sure the unoptimized code is replaced.
3139 #ifdef DEBUG
3140   FLAG_stop_at = "f";
3141 #endif
3142   CompileRun("%OptimizeFunctionOnNextCall(g);"
3143              "g(false);");
3144
3145   // Finish garbage collection cycle.
3146   heap->CollectAllGarbage(Heap::kNoGCFlags);
3147   CHECK(shared1->code()->gc_metadata() == NULL);
3148 }
3149
3150
3151 // Helper function that simulates a fill new-space in the heap.
3152 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
3153                                         int extra_bytes) {
3154   int space_remaining = static_cast<int>(
3155       *space->allocation_limit_address() - *space->allocation_top_address());
3156   CHECK(space_remaining >= extra_bytes);
3157   int new_linear_size = space_remaining - extra_bytes;
3158   v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size);
3159   v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe);
3160   node->set_size(space->heap(), new_linear_size);
3161 }
3162
3163
3164 TEST(Regress169928) {
3165   i::FLAG_allow_natives_syntax = true;
3166   i::FLAG_crankshaft = false;
3167   CcTest::InitializeVM();
3168   Isolate* isolate = Isolate::Current();
3169   Factory* factory = isolate->factory();
3170   v8::HandleScope scope(CcTest::isolate());
3171
3172   // Some flags turn Scavenge collections into Mark-sweep collections
3173   // and hence are incompatible with this test case.
3174   if (FLAG_gc_global || FLAG_stress_compaction) return;
3175
3176   // Prepare the environment
3177   CompileRun("function fastliteralcase(literal, value) {"
3178              "    literal[0] = value;"
3179              "    return literal;"
3180              "}"
3181              "function get_standard_literal() {"
3182              "    var literal = [1, 2, 3];"
3183              "    return literal;"
3184              "}"
3185              "obj = fastliteralcase(get_standard_literal(), 1);"
3186              "obj = fastliteralcase(get_standard_literal(), 1.5);"
3187              "obj = fastliteralcase(get_standard_literal(), 2);");
3188
3189   // prepare the heap
3190   v8::Local<v8::String> mote_code_string =
3191       v8_str("fastliteralcase(mote, 2.5);");
3192
3193   v8::Local<v8::String> array_name = v8_str("mote");
3194   v8::Context::GetCurrent()->Global()->Set(array_name, v8::Int32::New(0));
3195
3196   // First make sure we flip spaces
3197   HEAP->CollectGarbage(NEW_SPACE);
3198
3199   // Allocate the object.
3200   Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3201   array_data->set(0, Smi::FromInt(1));
3202   array_data->set(1, Smi::FromInt(2));
3203
3204   AllocateAllButNBytes(HEAP->new_space(),
3205                        JSArray::kSize + AllocationMemento::kSize +
3206                        kPointerSize);
3207
3208   Handle<JSArray> array = factory->NewJSArrayWithElements(array_data,
3209                                                           FAST_SMI_ELEMENTS,
3210                                                           NOT_TENURED);
3211
3212   CHECK_EQ(Smi::FromInt(2), array->length());
3213   CHECK(array->HasFastSmiOrObjectElements());
3214
3215   // We need filler the size of AllocationMemento object, plus an extra
3216   // fill pointer value.
3217   MaybeObject* maybe_object = HEAP->AllocateRaw(
3218       AllocationMemento::kSize + kPointerSize, NEW_SPACE, OLD_POINTER_SPACE);
3219   Object* obj = NULL;
3220   CHECK(maybe_object->ToObject(&obj));
3221   Address addr_obj = reinterpret_cast<Address>(
3222       reinterpret_cast<byte*>(obj - kHeapObjectTag));
3223   HEAP->CreateFillerObjectAt(addr_obj,
3224                              AllocationMemento::kSize + kPointerSize);
3225
3226   // Give the array a name, making sure not to allocate strings.
3227   v8::Handle<v8::Object> array_obj = v8::Utils::ToLocal(array);
3228   v8::Context::GetCurrent()->Global()->Set(array_name, array_obj);
3229
3230   // This should crash with a protection violation if we are running a build
3231   // with the bug.
3232   AlwaysAllocateScope aa_scope;
3233   v8::Script::Compile(mote_code_string)->Run();
3234 }
3235
3236
3237 TEST(Regress168801) {
3238   i::FLAG_always_compact = true;
3239   i::FLAG_cache_optimized_code = false;
3240   i::FLAG_allow_natives_syntax = true;
3241   i::FLAG_flush_code_incrementally = true;
3242   CcTest::InitializeVM();
3243   Isolate* isolate = Isolate::Current();
3244   Heap* heap = isolate->heap();
3245   HandleScope scope(isolate);
3246
3247   // Perform one initial GC to enable code flushing.
3248   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3249
3250   // Ensure the code ends up on an evacuation candidate.
3251   SimulateFullSpace(heap->code_space());
3252
3253   // Prepare an unoptimized function that is eligible for code flushing.
3254   Handle<JSFunction> function;
3255   {
3256     HandleScope inner_scope(isolate);
3257     CompileRun("function mkClosure() {"
3258                "  return function(x) { return x + 1; };"
3259                "}"
3260                "var f = mkClosure();"
3261                "f(1); f(2);");
3262
3263     Handle<JSFunction> f =
3264         v8::Utils::OpenHandle(
3265             *v8::Handle<v8::Function>::Cast(
3266                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3267     CHECK(f->is_compiled());
3268     const int kAgingThreshold = 6;
3269     for (int i = 0; i < kAgingThreshold; i++) {
3270       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3271     }
3272
3273     function = inner_scope.CloseAndEscape(handle(*f, isolate));
3274   }
3275
3276   // Simulate incremental marking so that unoptimized function is enqueued as a
3277   // candidate for code flushing. The shared function info however will not be
3278   // explicitly enqueued.
3279   SimulateIncrementalMarking();
3280
3281   // Now optimize the function so that it is taken off the candidate list.
3282   {
3283     HandleScope inner_scope(isolate);
3284     CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
3285   }
3286
3287   // This cycle will bust the heap and subsequent cycles will go ballistic.
3288   heap->CollectAllGarbage(Heap::kNoGCFlags);
3289   heap->CollectAllGarbage(Heap::kNoGCFlags);
3290 }
3291
3292
3293 TEST(Regress173458) {
3294   i::FLAG_always_compact = true;
3295   i::FLAG_cache_optimized_code = false;
3296   i::FLAG_allow_natives_syntax = true;
3297   i::FLAG_flush_code_incrementally = true;
3298   CcTest::InitializeVM();
3299   Isolate* isolate = Isolate::Current();
3300   Heap* heap = isolate->heap();
3301   HandleScope scope(isolate);
3302
3303   // Perform one initial GC to enable code flushing.
3304   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3305
3306   // Ensure the code ends up on an evacuation candidate.
3307   SimulateFullSpace(heap->code_space());
3308
3309   // Prepare an unoptimized function that is eligible for code flushing.
3310   Handle<JSFunction> function;
3311   {
3312     HandleScope inner_scope(isolate);
3313     CompileRun("function mkClosure() {"
3314                "  return function(x) { return x + 1; };"
3315                "}"
3316                "var f = mkClosure();"
3317                "f(1); f(2);");
3318
3319     Handle<JSFunction> f =
3320         v8::Utils::OpenHandle(
3321             *v8::Handle<v8::Function>::Cast(
3322                 v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
3323     CHECK(f->is_compiled());
3324     const int kAgingThreshold = 6;
3325     for (int i = 0; i < kAgingThreshold; i++) {
3326       f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
3327     }
3328
3329     function = inner_scope.CloseAndEscape(handle(*f, isolate));
3330   }
3331
3332   // Simulate incremental marking so that unoptimized function is enqueued as a
3333   // candidate for code flushing. The shared function info however will not be
3334   // explicitly enqueued.
3335   SimulateIncrementalMarking();
3336
3337 #ifdef ENABLE_DEBUGGER_SUPPORT
3338   // Now enable the debugger which in turn will disable code flushing.
3339   CHECK(isolate->debug()->Load());
3340 #endif  // ENABLE_DEBUGGER_SUPPORT
3341
3342   // This cycle will bust the heap and subsequent cycles will go ballistic.
3343   heap->CollectAllGarbage(Heap::kNoGCFlags);
3344   heap->CollectAllGarbage(Heap::kNoGCFlags);
3345 }
3346
3347
3348 class DummyVisitor : public ObjectVisitor {
3349  public:
3350   void VisitPointers(Object** start, Object** end) { }
3351 };
3352
3353
3354 TEST(DeferredHandles) {
3355   CcTest::InitializeVM();
3356   Isolate* isolate = Isolate::Current();
3357   Heap* heap = isolate->heap();
3358   v8::HandleScope scope;
3359   v8::ImplementationUtilities::HandleScopeData* data =
3360       isolate->handle_scope_data();
3361   Handle<Object> init(heap->empty_string(), isolate);
3362   while (data->next < data->limit) {
3363     Handle<Object> obj(heap->empty_string(), isolate);
3364   }
3365   // An entire block of handles has been filled.
3366   // Next handle would require a new block.
3367   ASSERT(data->next == data->limit);
3368
3369   DeferredHandleScope deferred(isolate);
3370   DummyVisitor visitor;
3371   isolate->handle_scope_implementer()->Iterate(&visitor);
3372   deferred.Detach();
3373 }
3374
3375
3376 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3377   CcTest::InitializeVM();
3378   v8::HandleScope scope(CcTest::isolate());
3379   CompileRun("function f(n) {"
3380              "    var a = new Array(n);"
3381              "    for (var i = 0; i < n; i += 100) a[i] = i;"
3382              "};"
3383              "f(10 * 1024 * 1024);");
3384   IncrementalMarking* marking = HEAP->incremental_marking();
3385   if (marking->IsStopped()) marking->Start();
3386   // This big step should be sufficient to mark the whole array.
3387   marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
3388   ASSERT(marking->IsComplete());
3389 }