1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
10 #include "src/compilation-cache.h"
11 #include "src/execution.h"
12 #include "src/factory.h"
13 #include "src/global-handles.h"
14 #include "src/ic/ic.h"
15 #include "src/macro-assembler.h"
16 #include "test/cctest/cctest.h"
18 using namespace v8::base;
19 using namespace v8::internal;
21 #if V8_DOUBLE_FIELDS_UNBOXING
29 static void InitializeVerifiedMapDescriptors(
30 Map* map, DescriptorArray* descriptors,
31 LayoutDescriptor* layout_descriptor) {
32 map->InitializeDescriptors(descriptors, layout_descriptor);
33 CHECK(layout_descriptor->IsConsistentWithMap(map, true));
37 static Handle<String> MakeString(const char* str) {
38 Isolate* isolate = CcTest::i_isolate();
39 Factory* factory = isolate->factory();
40 return factory->InternalizeUtf8String(str);
44 static Handle<String> MakeName(const char* str, int suffix) {
45 EmbeddedVector<char, 128> buffer;
46 SNPrintF(buffer, "%s%d", str, suffix);
47 return MakeString(buffer.start());
51 Handle<JSObject> GetObject(const char* name) {
52 return v8::Utils::OpenHandle(
53 *v8::Handle<v8::Object>::Cast(CcTest::global()->Get(v8_str(name))));
57 static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
58 if (obj->IsUnboxedDoubleField(field_index)) {
59 return obj->RawFastDoublePropertyAt(field_index);
61 Object* value = obj->RawFastPropertyAt(field_index);
62 DCHECK(value->IsMutableHeapNumber());
63 return HeapNumber::cast(value)->value();
67 const int kNumberOfBits = 32;
70 enum TestPropertyKind {
78 static Representation representations[PROP_KIND_NUMBER] = {
79 Representation::None(), Representation::Smi(), Representation::Double(),
80 Representation::Tagged()};
83 static Handle<DescriptorArray> CreateDescriptorArray(Isolate* isolate,
84 TestPropertyKind* props,
86 Factory* factory = isolate->factory();
88 Handle<String> func_name = factory->InternalizeUtf8String("func");
89 Handle<JSFunction> func = factory->NewFunction(func_name);
91 Handle<DescriptorArray> descriptors =
92 DescriptorArray::Allocate(isolate, 0, kPropsCount);
94 int next_field_offset = 0;
95 for (int i = 0; i < kPropsCount; i++) {
96 EmbeddedVector<char, 64> buffer;
97 SNPrintF(buffer, "prop%d", i);
98 Handle<String> name = factory->InternalizeUtf8String(buffer.start());
100 TestPropertyKind kind = props[i];
102 if (kind == PROP_CONSTANT) {
103 DataConstantDescriptor d(name, func, NONE);
104 descriptors->Append(&d);
107 DataDescriptor f(name, next_field_offset, NONE, representations[kind]);
108 next_field_offset += f.GetDetails().field_width_in_words();
109 descriptors->Append(&f);
116 TEST(LayoutDescriptorBasicFast) {
117 CcTest::InitializeVM();
118 v8::HandleScope scope(CcTest::isolate());
120 LayoutDescriptor* layout_desc = LayoutDescriptor::FastPointerLayout();
122 CHECK(!layout_desc->IsSlowLayout());
123 CHECK(layout_desc->IsFastPointerLayout());
124 CHECK_EQ(kSmiValueSize, layout_desc->capacity());
126 for (int i = 0; i < kSmiValueSize + 13; i++) {
127 CHECK_EQ(true, layout_desc->IsTagged(i));
129 CHECK_EQ(true, layout_desc->IsTagged(-1));
130 CHECK_EQ(true, layout_desc->IsTagged(-12347));
131 CHECK_EQ(true, layout_desc->IsTagged(15635));
132 CHECK(layout_desc->IsFastPointerLayout());
134 for (int i = 0; i < kSmiValueSize; i++) {
135 layout_desc = layout_desc->SetTaggedForTesting(i, false);
136 CHECK_EQ(false, layout_desc->IsTagged(i));
137 layout_desc = layout_desc->SetTaggedForTesting(i, true);
138 CHECK_EQ(true, layout_desc->IsTagged(i));
140 CHECK(layout_desc->IsFastPointerLayout());
143 CHECK_EQ(true, layout_desc->IsTagged(0, std::numeric_limits<int>::max(),
145 CHECK_EQ(std::numeric_limits<int>::max(), sequence_length);
147 CHECK_EQ(true, layout_desc->IsTagged(0, 7, &sequence_length));
148 CHECK_EQ(7, sequence_length);
152 TEST(LayoutDescriptorBasicSlow) {
153 CcTest::InitializeVM();
154 Isolate* isolate = CcTest::i_isolate();
155 v8::HandleScope scope(CcTest::isolate());
157 Handle<LayoutDescriptor> layout_descriptor;
158 const int kPropsCount = kSmiValueSize * 3;
159 TestPropertyKind props[kPropsCount];
160 for (int i = 0; i < kPropsCount; i++) {
161 // All properties tagged.
162 props[i] = PROP_TAGGED;
166 Handle<DescriptorArray> descriptors =
167 CreateDescriptorArray(isolate, props, kPropsCount);
169 Handle<Map> map = Map::Create(isolate, kPropsCount);
171 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
172 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
173 CHECK_EQ(kSmiValueSize, layout_descriptor->capacity());
174 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
177 props[0] = PROP_DOUBLE;
178 props[kPropsCount - 1] = PROP_DOUBLE;
180 Handle<DescriptorArray> descriptors =
181 CreateDescriptorArray(isolate, props, kPropsCount);
184 int inobject_properties = kPropsCount - 1;
185 Handle<Map> map = Map::Create(isolate, inobject_properties);
187 // Should be fast as the only double property is the first one.
188 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
189 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
190 CHECK(!layout_descriptor->IsSlowLayout());
191 CHECK(!layout_descriptor->IsFastPointerLayout());
193 CHECK_EQ(false, layout_descriptor->IsTagged(0));
194 for (int i = 1; i < kPropsCount; i++) {
195 CHECK_EQ(true, layout_descriptor->IsTagged(i));
197 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
201 int inobject_properties = kPropsCount;
202 Handle<Map> map = Map::Create(isolate, inobject_properties);
204 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
205 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
206 CHECK(layout_descriptor->IsSlowLayout());
207 CHECK(!layout_descriptor->IsFastPointerLayout());
208 CHECK(layout_descriptor->capacity() > kSmiValueSize);
210 CHECK_EQ(false, layout_descriptor->IsTagged(0));
211 CHECK_EQ(false, layout_descriptor->IsTagged(kPropsCount - 1));
212 for (int i = 1; i < kPropsCount - 1; i++) {
213 CHECK_EQ(true, layout_descriptor->IsTagged(i));
216 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
218 // Here we have truly slow layout descriptor, so play with the bits.
219 CHECK_EQ(true, layout_descriptor->IsTagged(-1));
220 CHECK_EQ(true, layout_descriptor->IsTagged(-12347));
221 CHECK_EQ(true, layout_descriptor->IsTagged(15635));
223 LayoutDescriptor* layout_desc = *layout_descriptor;
224 // Play with the bits but leave it in consistent state with map at the end.
225 for (int i = 1; i < kPropsCount - 1; i++) {
226 layout_desc = layout_desc->SetTaggedForTesting(i, false);
227 CHECK_EQ(false, layout_desc->IsTagged(i));
228 layout_desc = layout_desc->SetTaggedForTesting(i, true);
229 CHECK_EQ(true, layout_desc->IsTagged(i));
231 CHECK(layout_desc->IsSlowLayout());
232 CHECK(!layout_desc->IsFastPointerLayout());
233 CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
238 static void TestLayoutDescriptorQueries(int layout_descriptor_length,
239 int* bit_flip_positions,
240 int max_sequence_length) {
241 Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::NewForTesting(
242 CcTest::i_isolate(), layout_descriptor_length);
243 layout_descriptor_length = layout_descriptor->capacity();
244 LayoutDescriptor* layout_desc = *layout_descriptor;
247 // Fill in the layout descriptor.
248 int cur_bit_flip_index = 0;
250 for (int i = 0; i < layout_descriptor_length; i++) {
251 if (i == bit_flip_positions[cur_bit_flip_index]) {
253 ++cur_bit_flip_index;
254 CHECK(i < bit_flip_positions[cur_bit_flip_index]); // check test data
256 layout_desc = layout_desc->SetTaggedForTesting(i, tagged);
260 if (layout_desc->IsFastPointerLayout()) {
266 int cur_bit_flip_index = 0;
268 for (int i = 0; i < layout_descriptor_length; i++) {
269 if (i == bit_flip_positions[cur_bit_flip_index]) {
271 ++cur_bit_flip_index;
273 CHECK_EQ(tagged, layout_desc->IsTagged(i));
275 int next_bit_flip_position = bit_flip_positions[cur_bit_flip_index];
276 int expected_sequence_length;
277 if (next_bit_flip_position < layout_desc->capacity()) {
278 expected_sequence_length = next_bit_flip_position - i;
280 expected_sequence_length = tagged ? std::numeric_limits<int>::max()
281 : (layout_desc->capacity() - i);
283 expected_sequence_length =
284 Min(expected_sequence_length, max_sequence_length);
287 layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
288 CHECK(sequence_length > 0);
290 CHECK_EQ(expected_sequence_length, sequence_length);
295 layout_desc->IsTagged(layout_descriptor_length,
296 max_sequence_length, &sequence_length));
297 CHECK_EQ(max_sequence_length, sequence_length);
302 static void TestLayoutDescriptorQueriesFast(int max_sequence_length) {
304 LayoutDescriptor* layout_desc = LayoutDescriptor::FastPointerLayout();
306 for (int i = 0; i < kNumberOfBits; i++) {
308 layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
309 CHECK(sequence_length > 0);
310 CHECK_EQ(max_sequence_length, sequence_length);
315 int bit_flip_positions[] = {1000};
316 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
317 max_sequence_length);
321 int bit_flip_positions[] = {0, 1000};
322 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
323 max_sequence_length);
327 int bit_flip_positions[kNumberOfBits + 1];
328 for (int i = 0; i <= kNumberOfBits; i++) {
329 bit_flip_positions[i] = i;
331 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
332 max_sequence_length);
336 int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30, 1000};
337 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
338 max_sequence_length);
342 int bit_flip_positions[] = {0, 1, 2, 3, 5, 7, 9,
343 12, 15, 18, 22, 26, 29, 1000};
344 TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
345 max_sequence_length);
350 TEST(LayoutDescriptorQueriesFastLimited7) {
351 CcTest::InitializeVM();
352 v8::HandleScope scope(CcTest::isolate());
354 TestLayoutDescriptorQueriesFast(7);
358 TEST(LayoutDescriptorQueriesFastLimited13) {
359 CcTest::InitializeVM();
360 v8::HandleScope scope(CcTest::isolate());
362 TestLayoutDescriptorQueriesFast(13);
366 TEST(LayoutDescriptorQueriesFastUnlimited) {
367 CcTest::InitializeVM();
368 v8::HandleScope scope(CcTest::isolate());
370 TestLayoutDescriptorQueriesFast(std::numeric_limits<int>::max());
374 static void TestLayoutDescriptorQueriesSlow(int max_sequence_length) {
376 int bit_flip_positions[] = {10000};
377 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
378 max_sequence_length);
382 int bit_flip_positions[] = {0, 10000};
383 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
384 max_sequence_length);
388 int bit_flip_positions[kMaxNumberOfDescriptors + 1];
389 for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
390 bit_flip_positions[i] = i;
392 bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
393 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
394 max_sequence_length);
398 int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30,
399 37, 54, 80, 99, 383, 10000};
400 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
401 max_sequence_length);
405 int bit_flip_positions[] = {0, 10, 20, 30, 50, 70, 90,
406 120, 150, 180, 220, 260, 290, 10000};
407 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
408 max_sequence_length);
412 int bit_flip_positions[kMaxNumberOfDescriptors + 1];
414 for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
415 bit_flip_positions[i] = cur;
419 bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
420 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
421 max_sequence_length);
425 int bit_flip_positions[kMaxNumberOfDescriptors + 1];
427 for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
428 bit_flip_positions[i] = cur;
432 bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
433 TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
434 max_sequence_length);
439 TEST(LayoutDescriptorQueriesSlowLimited7) {
440 CcTest::InitializeVM();
441 v8::HandleScope scope(CcTest::isolate());
443 TestLayoutDescriptorQueriesSlow(7);
447 TEST(LayoutDescriptorQueriesSlowLimited13) {
448 CcTest::InitializeVM();
449 v8::HandleScope scope(CcTest::isolate());
451 TestLayoutDescriptorQueriesSlow(13);
455 TEST(LayoutDescriptorQueriesSlowLimited42) {
456 CcTest::InitializeVM();
457 v8::HandleScope scope(CcTest::isolate());
459 TestLayoutDescriptorQueriesSlow(42);
463 TEST(LayoutDescriptorQueriesSlowUnlimited) {
464 CcTest::InitializeVM();
465 v8::HandleScope scope(CcTest::isolate());
467 TestLayoutDescriptorQueriesSlow(std::numeric_limits<int>::max());
471 TEST(LayoutDescriptorCreateNewFast) {
472 CcTest::InitializeVM();
473 Isolate* isolate = CcTest::i_isolate();
474 v8::HandleScope scope(CcTest::isolate());
476 Handle<LayoutDescriptor> layout_descriptor;
477 TestPropertyKind props[] = {
479 PROP_TAGGED, // field #0
481 PROP_DOUBLE, // field #1
483 PROP_TAGGED, // field #2
486 const int kPropsCount = arraysize(props);
488 Handle<DescriptorArray> descriptors =
489 CreateDescriptorArray(isolate, props, kPropsCount);
492 Handle<Map> map = Map::Create(isolate, 0);
493 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
494 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
495 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
499 Handle<Map> map = Map::Create(isolate, 1);
500 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
501 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
502 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
506 Handle<Map> map = Map::Create(isolate, 2);
507 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
508 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
509 CHECK(!layout_descriptor->IsSlowLayout());
510 CHECK_EQ(true, layout_descriptor->IsTagged(0));
511 CHECK_EQ(false, layout_descriptor->IsTagged(1));
512 CHECK_EQ(true, layout_descriptor->IsTagged(2));
513 CHECK_EQ(true, layout_descriptor->IsTagged(125));
514 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
519 TEST(LayoutDescriptorCreateNewSlow) {
520 CcTest::InitializeVM();
521 Isolate* isolate = CcTest::i_isolate();
522 v8::HandleScope scope(CcTest::isolate());
524 Handle<LayoutDescriptor> layout_descriptor;
525 const int kPropsCount = kSmiValueSize * 3;
526 TestPropertyKind props[kPropsCount];
527 for (int i = 0; i < kPropsCount; i++) {
528 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
531 Handle<DescriptorArray> descriptors =
532 CreateDescriptorArray(isolate, props, kPropsCount);
535 Handle<Map> map = Map::Create(isolate, 0);
536 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
537 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
538 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
542 Handle<Map> map = Map::Create(isolate, 1);
543 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
544 CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
545 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
549 Handle<Map> map = Map::Create(isolate, 2);
550 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
551 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
552 CHECK(!layout_descriptor->IsSlowLayout());
553 CHECK_EQ(true, layout_descriptor->IsTagged(0));
554 CHECK_EQ(false, layout_descriptor->IsTagged(1));
555 CHECK_EQ(true, layout_descriptor->IsTagged(2));
556 CHECK_EQ(true, layout_descriptor->IsTagged(125));
557 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
561 int inobject_properties = kPropsCount / 2;
562 Handle<Map> map = Map::Create(isolate, inobject_properties);
563 layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
564 CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
565 CHECK(layout_descriptor->IsSlowLayout());
566 for (int i = 0; i < inobject_properties; i++) {
567 // PROP_DOUBLE has index 1 among DATA properties.
568 const bool tagged = (i % (PROP_KIND_NUMBER - 1)) != 1;
569 CHECK_EQ(tagged, layout_descriptor->IsTagged(i));
571 // Every property after inobject_properties must be tagged.
572 for (int i = inobject_properties; i < kPropsCount; i++) {
573 CHECK_EQ(true, layout_descriptor->IsTagged(i));
575 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
577 // Now test LayoutDescriptor::cast_gc_safe().
578 Handle<LayoutDescriptor> layout_descriptor_copy =
579 LayoutDescriptor::New(map, descriptors, kPropsCount);
581 LayoutDescriptor* layout_desc = *layout_descriptor;
582 CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
583 CHECK_EQ(layout_desc, LayoutDescriptor::cast_gc_safe(layout_desc));
584 CHECK(layout_descriptor->IsFixedTypedArrayBase());
585 // Now make it look like a forwarding pointer to layout_descriptor_copy.
586 MapWord map_word = layout_desc->map_word();
587 CHECK(!map_word.IsForwardingAddress());
588 layout_desc->set_map_word(
589 MapWord::FromForwardingAddress(*layout_descriptor_copy));
590 CHECK(layout_desc->map_word().IsForwardingAddress());
591 CHECK_EQ(*layout_descriptor_copy,
592 LayoutDescriptor::cast_gc_safe(layout_desc));
595 layout_desc->set_map_word(map_word);
596 CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
601 static Handle<LayoutDescriptor> TestLayoutDescriptorAppend(
602 Isolate* isolate, int inobject_properties, TestPropertyKind* props,
604 Factory* factory = isolate->factory();
606 Handle<String> func_name = factory->InternalizeUtf8String("func");
607 Handle<JSFunction> func = factory->NewFunction(func_name);
609 Handle<DescriptorArray> descriptors =
610 DescriptorArray::Allocate(isolate, 0, kPropsCount);
612 Handle<Map> map = Map::Create(isolate, inobject_properties);
613 map->InitializeDescriptors(*descriptors,
614 LayoutDescriptor::FastPointerLayout());
616 int next_field_offset = 0;
617 for (int i = 0; i < kPropsCount; i++) {
618 EmbeddedVector<char, 64> buffer;
619 SNPrintF(buffer, "prop%d", i);
620 Handle<String> name = factory->InternalizeUtf8String(buffer.start());
622 Handle<LayoutDescriptor> layout_descriptor;
623 TestPropertyKind kind = props[i];
624 if (kind == PROP_CONSTANT) {
625 DataConstantDescriptor d(name, func, NONE);
626 layout_descriptor = LayoutDescriptor::ShareAppend(map, d.GetDetails());
627 descriptors->Append(&d);
630 DataDescriptor f(name, next_field_offset, NONE, representations[kind]);
631 int field_width_in_words = f.GetDetails().field_width_in_words();
632 next_field_offset += field_width_in_words;
633 layout_descriptor = LayoutDescriptor::ShareAppend(map, f.GetDetails());
634 descriptors->Append(&f);
636 int field_index = f.GetDetails().field_index();
637 bool is_inobject = field_index < map->inobject_properties();
638 for (int bit = 0; bit < field_width_in_words; bit++) {
639 CHECK_EQ(is_inobject && (kind == PROP_DOUBLE),
640 !layout_descriptor->IsTagged(field_index + bit));
642 CHECK(layout_descriptor->IsTagged(next_field_offset));
644 map->InitializeDescriptors(*descriptors, *layout_descriptor);
646 Handle<LayoutDescriptor> layout_descriptor(map->layout_descriptor(), isolate);
647 CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
648 return layout_descriptor;
652 TEST(LayoutDescriptorAppend) {
653 CcTest::InitializeVM();
654 Isolate* isolate = CcTest::i_isolate();
655 v8::HandleScope scope(CcTest::isolate());
657 Handle<LayoutDescriptor> layout_descriptor;
658 const int kPropsCount = kSmiValueSize * 3;
659 TestPropertyKind props[kPropsCount];
660 for (int i = 0; i < kPropsCount; i++) {
661 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
665 TestLayoutDescriptorAppend(isolate, 0, props, kPropsCount);
666 CHECK(!layout_descriptor->IsSlowLayout());
669 TestLayoutDescriptorAppend(isolate, 13, props, kPropsCount);
670 CHECK(!layout_descriptor->IsSlowLayout());
673 TestLayoutDescriptorAppend(isolate, kSmiValueSize, props, kPropsCount);
674 CHECK(!layout_descriptor->IsSlowLayout());
676 layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize * 2,
678 CHECK(layout_descriptor->IsSlowLayout());
681 TestLayoutDescriptorAppend(isolate, kPropsCount, props, kPropsCount);
682 CHECK(layout_descriptor->IsSlowLayout());
686 TEST(LayoutDescriptorAppendAllDoubles) {
687 CcTest::InitializeVM();
688 Isolate* isolate = CcTest::i_isolate();
689 v8::HandleScope scope(CcTest::isolate());
691 Handle<LayoutDescriptor> layout_descriptor;
692 const int kPropsCount = kSmiValueSize * 3;
693 TestPropertyKind props[kPropsCount];
694 for (int i = 0; i < kPropsCount; i++) {
695 props[i] = PROP_DOUBLE;
699 TestLayoutDescriptorAppend(isolate, 0, props, kPropsCount);
700 CHECK(!layout_descriptor->IsSlowLayout());
703 TestLayoutDescriptorAppend(isolate, 13, props, kPropsCount);
704 CHECK(!layout_descriptor->IsSlowLayout());
707 TestLayoutDescriptorAppend(isolate, kSmiValueSize, props, kPropsCount);
708 CHECK(!layout_descriptor->IsSlowLayout());
710 layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize + 1,
712 CHECK(layout_descriptor->IsSlowLayout());
714 layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize * 2,
716 CHECK(layout_descriptor->IsSlowLayout());
719 TestLayoutDescriptorAppend(isolate, kPropsCount, props, kPropsCount);
720 CHECK(layout_descriptor->IsSlowLayout());
723 // Ensure layout descriptor switches into slow mode at the right moment.
725 TestLayoutDescriptorAppend(isolate, kPropsCount, props, kSmiValueSize);
726 CHECK(!layout_descriptor->IsSlowLayout());
728 layout_descriptor = TestLayoutDescriptorAppend(isolate, kPropsCount, props,
730 CHECK(layout_descriptor->IsSlowLayout());
735 static Handle<LayoutDescriptor> TestLayoutDescriptorAppendIfFastOrUseFull(
736 Isolate* isolate, int inobject_properties,
737 Handle<DescriptorArray> descriptors, int number_of_descriptors) {
738 Handle<Map> map = Map::Create(isolate, inobject_properties);
740 Handle<LayoutDescriptor> full_layout_descriptor = LayoutDescriptor::New(
741 map, descriptors, descriptors->number_of_descriptors());
744 bool switched_to_slow_mode = false;
746 for (int i = 0; i < number_of_descriptors; i++) {
747 PropertyDetails details = descriptors->GetDetails(i);
749 // This method calls LayoutDescriptor::AppendIfFastOrUseFull() internally
750 // and does all the required map-descriptors related book keeping.
751 map = Map::CopyInstallDescriptorsForTesting(map, i, descriptors,
752 full_layout_descriptor);
754 LayoutDescriptor* layout_desc = map->layout_descriptor();
756 if (layout_desc->IsSlowLayout()) {
757 switched_to_slow_mode = true;
758 CHECK_EQ(*full_layout_descriptor, layout_desc);
760 CHECK(!switched_to_slow_mode);
761 if (details.type() == DATA) {
763 int field_index = details.field_index();
764 int field_width_in_words = details.field_width_in_words();
766 bool is_inobject = field_index < map->inobject_properties();
767 for (int bit = 0; bit < field_width_in_words; bit++) {
768 CHECK_EQ(is_inobject && details.representation().IsDouble(),
769 !layout_desc->IsTagged(field_index + bit));
771 CHECK(layout_desc->IsTagged(field_index + field_width_in_words));
774 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
777 Handle<LayoutDescriptor> layout_descriptor(map->GetLayoutDescriptor(),
779 CHECK(layout_descriptor->IsConsistentWithMap(*map));
780 return layout_descriptor;
784 TEST(LayoutDescriptorAppendIfFastOrUseFull) {
785 CcTest::InitializeVM();
786 Isolate* isolate = CcTest::i_isolate();
787 v8::HandleScope scope(CcTest::isolate());
789 Handle<LayoutDescriptor> layout_descriptor;
790 const int kPropsCount = kSmiValueSize * 3;
791 TestPropertyKind props[kPropsCount];
792 for (int i = 0; i < kPropsCount; i++) {
793 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
795 Handle<DescriptorArray> descriptors =
796 CreateDescriptorArray(isolate, props, kPropsCount);
798 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
799 isolate, 0, descriptors, kPropsCount);
800 CHECK(!layout_descriptor->IsSlowLayout());
802 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
803 isolate, 13, descriptors, kPropsCount);
804 CHECK(!layout_descriptor->IsSlowLayout());
806 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
807 isolate, kSmiValueSize, descriptors, kPropsCount);
808 CHECK(!layout_descriptor->IsSlowLayout());
810 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
811 isolate, kSmiValueSize * 2, descriptors, kPropsCount);
812 CHECK(layout_descriptor->IsSlowLayout());
814 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
815 isolate, kPropsCount, descriptors, kPropsCount);
816 CHECK(layout_descriptor->IsSlowLayout());
820 TEST(LayoutDescriptorAppendIfFastOrUseFullAllDoubles) {
821 CcTest::InitializeVM();
822 Isolate* isolate = CcTest::i_isolate();
823 v8::HandleScope scope(CcTest::isolate());
825 Handle<LayoutDescriptor> layout_descriptor;
826 const int kPropsCount = kSmiValueSize * 3;
827 TestPropertyKind props[kPropsCount];
828 for (int i = 0; i < kPropsCount; i++) {
829 props[i] = PROP_DOUBLE;
831 Handle<DescriptorArray> descriptors =
832 CreateDescriptorArray(isolate, props, kPropsCount);
834 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
835 isolate, 0, descriptors, kPropsCount);
836 CHECK(!layout_descriptor->IsSlowLayout());
838 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
839 isolate, 13, descriptors, kPropsCount);
840 CHECK(!layout_descriptor->IsSlowLayout());
842 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
843 isolate, kSmiValueSize, descriptors, kPropsCount);
844 CHECK(!layout_descriptor->IsSlowLayout());
846 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
847 isolate, kSmiValueSize + 1, descriptors, kPropsCount);
848 CHECK(layout_descriptor->IsSlowLayout());
850 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
851 isolate, kSmiValueSize * 2, descriptors, kPropsCount);
852 CHECK(layout_descriptor->IsSlowLayout());
854 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
855 isolate, kPropsCount, descriptors, kPropsCount);
856 CHECK(layout_descriptor->IsSlowLayout());
859 // Ensure layout descriptor switches into slow mode at the right moment.
860 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
861 isolate, kPropsCount, descriptors, kSmiValueSize);
862 CHECK(!layout_descriptor->IsSlowLayout());
864 layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
865 isolate, kPropsCount, descriptors, kSmiValueSize + 1);
866 CHECK(layout_descriptor->IsSlowLayout());
871 TEST(Regress436816) {
872 CcTest::InitializeVM();
873 Isolate* isolate = CcTest::i_isolate();
874 Factory* factory = isolate->factory();
875 v8::HandleScope scope(CcTest::isolate());
877 const int kPropsCount = kSmiValueSize * 3;
878 TestPropertyKind props[kPropsCount];
879 for (int i = 0; i < kPropsCount; i++) {
880 props[i] = PROP_DOUBLE;
882 Handle<DescriptorArray> descriptors =
883 CreateDescriptorArray(isolate, props, kPropsCount);
885 Handle<Map> map = Map::Create(isolate, kPropsCount);
886 Handle<LayoutDescriptor> layout_descriptor =
887 LayoutDescriptor::New(map, descriptors, kPropsCount);
888 map->InitializeDescriptors(*descriptors, *layout_descriptor);
890 Handle<JSObject> object = factory->NewJSObjectFromMap(map, TENURED);
892 Address fake_address = reinterpret_cast<Address>(~kHeapObjectTagMask);
893 HeapObject* fake_object = HeapObject::FromAddress(fake_address);
894 CHECK(fake_object->IsHeapObject());
896 double boom_value = bit_cast<double>(fake_object);
897 for (int i = 0; i < kPropsCount; i++) {
898 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
899 CHECK(map->IsUnboxedDoubleField(index));
900 object->RawFastDoublePropertyAtPut(index, boom_value);
902 CHECK(object->HasFastProperties());
903 CHECK(!object->map()->HasFastPointerLayout());
905 Handle<Map> normalized_map =
906 Map::Normalize(map, KEEP_INOBJECT_PROPERTIES, "testing");
907 JSObject::MigrateToMap(object, normalized_map);
908 CHECK(!object->HasFastProperties());
909 CHECK(object->map()->HasFastPointerLayout());
911 // Trigger GCs and heap verification.
912 CcTest::heap()->CollectAllGarbage();
916 TEST(DescriptorArrayTrimming) {
917 CcTest::InitializeVM();
918 v8::HandleScope scope(CcTest::isolate());
919 Isolate* isolate = CcTest::i_isolate();
921 const int kFieldCount = 128;
922 const int kSplitFieldIndex = 32;
923 const int kTrimmedLayoutDescriptorLength = 64;
925 Handle<HeapType> any_type = HeapType::Any(isolate);
926 Handle<Map> map = Map::Create(isolate, kFieldCount);
927 for (int i = 0; i < kSplitFieldIndex; i++) {
928 map = Map::CopyWithField(map, MakeName("prop", i), any_type, NONE,
929 Representation::Smi(),
930 INSERT_TRANSITION).ToHandleChecked();
932 map = Map::CopyWithField(map, MakeName("dbl", kSplitFieldIndex), any_type,
933 NONE, Representation::Double(),
934 INSERT_TRANSITION).ToHandleChecked();
935 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
936 CHECK(map->layout_descriptor()->IsSlowLayout());
937 CHECK(map->owns_descriptors());
938 CHECK_EQ(2, map->layout_descriptor()->length());
941 // Add transitions to double fields.
942 v8::HandleScope scope(CcTest::isolate());
944 Handle<Map> tmp_map = map;
945 for (int i = kSplitFieldIndex + 1; i < kFieldCount; i++) {
946 tmp_map = Map::CopyWithField(tmp_map, MakeName("dbl", i), any_type, NONE,
947 Representation::Double(),
948 INSERT_TRANSITION).ToHandleChecked();
949 CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
951 // Check that descriptors are shared.
952 CHECK(tmp_map->owns_descriptors());
953 CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
954 CHECK_EQ(map->layout_descriptor(), tmp_map->layout_descriptor());
956 CHECK(map->layout_descriptor()->IsSlowLayout());
957 CHECK_EQ(4, map->layout_descriptor()->length());
959 // The unused tail of the layout descriptor is now "durty" because of sharing.
960 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
961 for (int i = kSplitFieldIndex + 1; i < kTrimmedLayoutDescriptorLength; i++) {
962 CHECK(!map->layout_descriptor()->IsTagged(i));
964 CHECK_LT(map->NumberOfOwnDescriptors(),
965 map->instance_descriptors()->number_of_descriptors());
967 // Call GC that should trim both |map|'s descriptor array and layout
969 CcTest::heap()->CollectAllGarbage();
971 // The unused tail of the layout descriptor is now "clean" again.
972 CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
973 CHECK(map->owns_descriptors());
974 CHECK_EQ(map->NumberOfOwnDescriptors(),
975 map->instance_descriptors()->number_of_descriptors());
976 CHECK(map->layout_descriptor()->IsSlowLayout());
977 CHECK_EQ(2, map->layout_descriptor()->length());
980 // Add transitions to tagged fields.
981 v8::HandleScope scope(CcTest::isolate());
983 Handle<Map> tmp_map = map;
984 for (int i = kSplitFieldIndex + 1; i < kFieldCount - 1; i++) {
985 tmp_map = Map::CopyWithField(tmp_map, MakeName("tagged", i), any_type,
986 NONE, Representation::Tagged(),
987 INSERT_TRANSITION).ToHandleChecked();
988 CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
990 tmp_map = Map::CopyWithField(tmp_map, MakeString("dbl"), any_type, NONE,
991 Representation::Double(),
992 INSERT_TRANSITION).ToHandleChecked();
993 CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
994 // Check that descriptors are shared.
995 CHECK(tmp_map->owns_descriptors());
996 CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
998 CHECK(map->layout_descriptor()->IsSlowLayout());
1003 CcTest::InitializeVM();
1004 v8::HandleScope scope(CcTest::isolate());
1005 Isolate* isolate = CcTest::i_isolate();
1006 Factory* factory = isolate->factory();
1008 // The plan: create |obj| with double field in new space, do scanvenge so
1009 // that |obj| is moved to old space, construct a double value that looks like
1010 // a pointer to "from space" pointer. Do scavenge one more time and ensure
1011 // that it didn't crash or corrupt the double value stored in the object.
1013 Handle<HeapType> any_type = HeapType::Any(isolate);
1014 Handle<Map> map = Map::Create(isolate, 10);
1015 map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
1016 Representation::Double(),
1017 INSERT_TRANSITION).ToHandleChecked();
1019 // Create object in new space.
1020 Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED);
1022 Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
1023 obj->WriteToField(0, *heap_number);
1026 // Ensure the object is properly set up.
1027 FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
1028 CHECK(field_index.is_inobject() && field_index.is_double());
1029 CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
1030 CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
1032 CHECK(isolate->heap()->new_space()->Contains(*obj));
1034 // Do scavenge so that |obj| is moved to survivor space.
1035 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1037 // Create temp object in the new space.
1038 Handle<JSArray> temp = factory->NewJSArray(FAST_ELEMENTS);
1039 CHECK(isolate->heap()->new_space()->Contains(*temp));
1041 // Construct a double value that looks like a pointer to the new space object
1042 // and store it into the obj.
1043 Address fake_object = reinterpret_cast<Address>(*temp) + kPointerSize;
1044 double boom_value = bit_cast<double>(fake_object);
1046 FieldIndex field_index = FieldIndex::ForDescriptor(obj->map(), 0);
1047 Handle<HeapNumber> boom_number = factory->NewHeapNumber(boom_value, MUTABLE);
1048 obj->FastPropertyAtPut(field_index, *boom_number);
1050 // Now |obj| moves to old gen and it has a double field that looks like
1051 // a pointer to a from semi-space.
1052 CcTest::heap()->CollectGarbage(i::NEW_SPACE, "boom");
1054 CHECK(isolate->heap()->old_space()->Contains(*obj));
1056 CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
1060 TEST(DoScavengeWithIncrementalWriteBarrier) {
1061 if (FLAG_never_compact || !FLAG_incremental_marking) return;
1062 CcTest::InitializeVM();
1063 v8::HandleScope scope(CcTest::isolate());
1064 Isolate* isolate = CcTest::i_isolate();
1065 Factory* factory = isolate->factory();
1066 Heap* heap = CcTest::heap();
1067 PagedSpace* old_space = heap->old_space();
1069 // The plan: create |obj_value| in old space and ensure that it is allocated
1070 // on evacuation candidate page, create |obj| with double and tagged fields
1071 // in new space and write |obj_value| to tagged field of |obj|, do two
1072 // scavenges to promote |obj| to old space, a GC in old space and ensure that
1073 // the tagged value was properly updated after candidates evacuation.
1075 Handle<HeapType> any_type = HeapType::Any(isolate);
1076 Handle<Map> map = Map::Create(isolate, 10);
1077 map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
1078 Representation::Double(),
1079 INSERT_TRANSITION).ToHandleChecked();
1080 map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
1081 Representation::Tagged(),
1082 INSERT_TRANSITION).ToHandleChecked();
1084 // Create |obj_value| in old space.
1085 Handle<HeapObject> obj_value;
1088 AlwaysAllocateScope always_allocate(isolate);
1089 // Make sure |obj_value| is placed on an old-space evacuation candidate.
1090 SimulateFullSpace(old_space);
1091 obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS,
1092 Strength::WEAK, TENURED);
1093 ec_page = Page::FromAddress(obj_value->address());
1096 // Create object in new space.
1097 Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED);
1099 Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
1100 obj->WriteToField(0, *heap_number);
1101 obj->WriteToField(1, *obj_value);
1104 // Ensure the object is properly set up.
1105 FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
1106 CHECK(field_index.is_inobject() && field_index.is_double());
1107 CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
1108 CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
1110 field_index = FieldIndex::ForDescriptor(*map, 1);
1111 CHECK(field_index.is_inobject() && !field_index.is_double());
1112 CHECK(!map->IsUnboxedDoubleField(field_index));
1114 CHECK(isolate->heap()->new_space()->Contains(*obj));
1116 // Heap is ready, force |ec_page| to become an evacuation candidate and
1117 // simulate incremental marking.
1118 FLAG_stress_compaction = true;
1119 FLAG_manual_evacuation_candidates_selection = true;
1120 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
1121 SimulateIncrementalMarking(heap);
1122 // Disable stress compaction mode in order to let GC do scavenge.
1123 FLAG_stress_compaction = false;
1125 // Check that everything is ready for triggering incremental write barrier
1126 // during scavenge (i.e. that |obj| is black and incremental marking is
1127 // in compacting mode and |obj_value|'s page is an evacuation candidate).
1128 IncrementalMarking* marking = heap->incremental_marking();
1129 CHECK(marking->IsCompacting());
1130 CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
1131 CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1133 // Trigger GCs so that |obj| moves to old gen.
1134 heap->CollectGarbage(i::NEW_SPACE); // in survivor space now
1135 heap->CollectGarbage(i::NEW_SPACE); // in old gen now
1137 CHECK(isolate->heap()->old_space()->Contains(*obj));
1138 CHECK(isolate->heap()->old_space()->Contains(*obj_value));
1139 CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1141 heap->CollectGarbage(i::OLD_SPACE, "boom");
1143 // |obj_value| must be evacuated.
1144 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1146 FieldIndex field_index = FieldIndex::ForDescriptor(*map, 1);
1147 CHECK_EQ(*obj_value, obj->RawFastPropertyAt(field_index));
1151 static void TestLayoutDescriptorHelper(Isolate* isolate,
1152 int inobject_properties,
1153 Handle<DescriptorArray> descriptors,
1154 int number_of_descriptors) {
1155 Handle<Map> map = Map::Create(isolate, inobject_properties);
1157 Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::New(
1158 map, descriptors, descriptors->number_of_descriptors());
1159 InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
1161 LayoutDescriptorHelper helper(*map);
1162 bool all_fields_tagged = true;
1164 int instance_size = map->instance_size();
1166 int end_offset = instance_size * 2;
1167 int first_non_tagged_field_offset = end_offset;
1168 for (int i = 0; i < number_of_descriptors; i++) {
1169 PropertyDetails details = descriptors->GetDetails(i);
1170 if (details.type() != DATA) continue;
1171 FieldIndex index = FieldIndex::ForDescriptor(*map, i);
1172 if (!index.is_inobject()) continue;
1173 all_fields_tagged &= !details.representation().IsDouble();
1174 bool expected_tagged = !index.is_double();
1175 if (!expected_tagged) {
1176 first_non_tagged_field_offset =
1177 Min(first_non_tagged_field_offset, index.offset());
1180 int end_of_region_offset;
1181 CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
1182 CHECK_EQ(expected_tagged, helper.IsTagged(index.offset(), instance_size,
1183 &end_of_region_offset));
1184 CHECK(end_of_region_offset > 0);
1185 CHECK(end_of_region_offset % kPointerSize == 0);
1186 CHECK(end_of_region_offset <= instance_size);
1188 for (int offset = index.offset(); offset < end_of_region_offset;
1189 offset += kPointerSize) {
1190 CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
1192 if (end_of_region_offset < instance_size) {
1193 CHECK_EQ(!expected_tagged, helper.IsTagged(end_of_region_offset));
1195 CHECK_EQ(true, helper.IsTagged(end_of_region_offset));
1199 for (int offset = 0; offset < JSObject::kHeaderSize; offset += kPointerSize) {
1201 CHECK_EQ(true, helper.IsTagged(offset));
1202 int end_of_region_offset;
1203 CHECK_EQ(true, helper.IsTagged(offset, end_offset, &end_of_region_offset));
1204 CHECK_EQ(first_non_tagged_field_offset, end_of_region_offset);
1206 // Out of bounds queries
1207 CHECK_EQ(true, helper.IsTagged(offset + instance_size));
1210 CHECK_EQ(all_fields_tagged, helper.all_fields_tagged());
1214 TEST(LayoutDescriptorHelperMixed) {
1215 CcTest::InitializeVM();
1216 Isolate* isolate = CcTest::i_isolate();
1217 v8::HandleScope scope(CcTest::isolate());
1219 Handle<LayoutDescriptor> layout_descriptor;
1220 const int kPropsCount = kSmiValueSize * 3;
1221 TestPropertyKind props[kPropsCount];
1222 for (int i = 0; i < kPropsCount; i++) {
1223 props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
1225 Handle<DescriptorArray> descriptors =
1226 CreateDescriptorArray(isolate, props, kPropsCount);
1228 TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
1230 TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
1232 TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
1234 TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
1237 TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
1241 TEST(LayoutDescriptorHelperAllTagged) {
1242 CcTest::InitializeVM();
1243 Isolate* isolate = CcTest::i_isolate();
1244 v8::HandleScope scope(CcTest::isolate());
1246 Handle<LayoutDescriptor> layout_descriptor;
1247 const int kPropsCount = kSmiValueSize * 3;
1248 TestPropertyKind props[kPropsCount];
1249 for (int i = 0; i < kPropsCount; i++) {
1250 props[i] = PROP_TAGGED;
1252 Handle<DescriptorArray> descriptors =
1253 CreateDescriptorArray(isolate, props, kPropsCount);
1255 TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
1257 TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
1259 TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
1261 TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
1264 TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
1268 TEST(LayoutDescriptorHelperAllDoubles) {
1269 CcTest::InitializeVM();
1270 Isolate* isolate = CcTest::i_isolate();
1271 v8::HandleScope scope(CcTest::isolate());
1273 Handle<LayoutDescriptor> layout_descriptor;
1274 const int kPropsCount = kSmiValueSize * 3;
1275 TestPropertyKind props[kPropsCount];
1276 for (int i = 0; i < kPropsCount; i++) {
1277 props[i] = PROP_DOUBLE;
1279 Handle<DescriptorArray> descriptors =
1280 CreateDescriptorArray(isolate, props, kPropsCount);
1282 TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
1284 TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
1286 TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
1288 TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
1291 TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
1295 TEST(LayoutDescriptorSharing) {
1296 CcTest::InitializeVM();
1297 v8::HandleScope scope(CcTest::isolate());
1298 Isolate* isolate = CcTest::i_isolate();
1299 Handle<HeapType> any_type = HeapType::Any(isolate);
1301 Handle<Map> split_map;
1303 Handle<Map> map = Map::Create(isolate, 64);
1304 for (int i = 0; i < 32; i++) {
1305 Handle<String> name = MakeName("prop", i);
1306 map = Map::CopyWithField(map, name, any_type, NONE, Representation::Smi(),
1307 INSERT_TRANSITION).ToHandleChecked();
1309 split_map = Map::CopyWithField(map, MakeString("dbl"), any_type, NONE,
1310 Representation::Double(),
1311 INSERT_TRANSITION).ToHandleChecked();
1313 Handle<LayoutDescriptor> split_layout_descriptor(
1314 split_map->layout_descriptor(), isolate);
1315 CHECK(split_layout_descriptor->IsConsistentWithMap(*split_map, true));
1316 CHECK(split_layout_descriptor->IsSlowLayout());
1317 CHECK(split_map->owns_descriptors());
1319 Handle<Map> map1 = Map::CopyWithField(split_map, MakeString("foo"), any_type,
1320 NONE, Representation::Double(),
1321 INSERT_TRANSITION).ToHandleChecked();
1322 CHECK(!split_map->owns_descriptors());
1323 CHECK_EQ(*split_layout_descriptor, split_map->layout_descriptor());
1325 // Layout descriptors should be shared with |split_map|.
1326 CHECK(map1->owns_descriptors());
1327 CHECK_EQ(*split_layout_descriptor, map1->layout_descriptor());
1328 CHECK(map1->layout_descriptor()->IsConsistentWithMap(*map1, true));
1330 Handle<Map> map2 = Map::CopyWithField(split_map, MakeString("bar"), any_type,
1331 NONE, Representation::Tagged(),
1332 INSERT_TRANSITION).ToHandleChecked();
1334 // Layout descriptors should not be shared with |split_map|.
1335 CHECK(map2->owns_descriptors());
1336 CHECK_NE(*split_layout_descriptor, map2->layout_descriptor());
1337 CHECK(map2->layout_descriptor()->IsConsistentWithMap(*map2, true));
1341 TEST(StoreBufferScanOnScavenge) {
1342 CcTest::InitializeVM();
1343 Isolate* isolate = CcTest::i_isolate();
1344 Factory* factory = isolate->factory();
1345 v8::HandleScope scope(CcTest::isolate());
1347 Handle<HeapType> any_type = HeapType::Any(isolate);
1348 Handle<Map> map = Map::Create(isolate, 10);
1349 map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE,
1350 Representation::Double(),
1351 INSERT_TRANSITION).ToHandleChecked();
1353 // Create object in new space.
1354 Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED);
1356 Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
1357 obj->WriteToField(0, *heap_number);
1360 // Ensure the object is properly set up.
1361 DescriptorArray* descriptors = map->instance_descriptors();
1362 CHECK(descriptors->GetDetails(0).representation().IsDouble());
1363 FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
1364 CHECK(field_index.is_inobject() && field_index.is_double());
1365 CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
1366 CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
1368 CHECK(isolate->heap()->new_space()->Contains(*obj));
1370 // Trigger GCs so that the newly allocated object moves to old gen.
1371 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
1372 CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
1374 CHECK(isolate->heap()->old_space()->Contains(*obj));
1376 // Create temp object in the new space.
1377 Handle<JSArray> temp = factory->NewJSArray(FAST_ELEMENTS);
1378 CHECK(isolate->heap()->new_space()->Contains(*temp));
1380 // Construct a double value that looks like a pointer to the new space object
1381 // and store it into the obj.
1382 Address fake_object = reinterpret_cast<Address>(*temp) + kPointerSize;
1383 double boom_value = bit_cast<double>(fake_object);
1385 FieldIndex field_index = FieldIndex::ForDescriptor(obj->map(), 0);
1386 Handle<HeapNumber> boom_number = factory->NewHeapNumber(boom_value, MUTABLE);
1387 obj->FastPropertyAtPut(field_index, *boom_number);
1389 // Enforce scan on scavenge for the obj's page.
1390 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
1391 chunk->set_scan_on_scavenge(true);
1393 // Trigger GCs and force evacuation. Should not crash there.
1394 CcTest::heap()->CollectAllGarbage();
1396 CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
1400 static int LenFromSize(int size) {
1401 return (size - FixedArray::kHeaderSize) / kPointerSize;
1405 TEST(WriteBarriersInCopyJSObject) {
1406 FLAG_max_semi_space_size = 1; // Ensure new space is not growing.
1407 CcTest::InitializeVM();
1408 Isolate* isolate = CcTest::i_isolate();
1409 TestHeap* heap = CcTest::test_heap();
1411 v8::HandleScope scope(CcTest::isolate());
1413 // The plan: create JSObject which contains unboxed double value that looks
1414 // like a reference to an object in new space.
1415 // Then clone this object (forcing it to go into old space) and check
1416 // that the value of the unboxed double property of the cloned object has
1417 // was not corrupted by GC.
1419 // Step 1: prepare a map for the object. We add unboxed double property to it.
1420 // Create a map with single inobject property.
1421 Handle<Map> my_map = Map::Create(isolate, 1);
1422 Handle<String> name = isolate->factory()->InternalizeUtf8String("foo");
1423 my_map = Map::CopyWithField(my_map, name, HeapType::Any(isolate), NONE,
1424 Representation::Double(),
1425 INSERT_TRANSITION).ToHandleChecked();
1427 int object_size = my_map->instance_size();
1429 // Step 2: allocate a lot of objects so to almost fill new space: we need
1430 // just enough room to allocate JSObject and thus fill the newspace.
1432 int allocation_amount =
1433 Min(FixedArray::kMaxSize, Page::kMaxRegularHeapObjectSize + kPointerSize);
1434 int allocation_len = LenFromSize(allocation_amount);
1435 NewSpace* new_space = heap->new_space();
1436 Address* top_addr = new_space->allocation_top_address();
1437 Address* limit_addr = new_space->allocation_limit_address();
1438 while ((*limit_addr - *top_addr) > allocation_amount) {
1439 CHECK(!heap->always_allocate());
1440 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked();
1441 CHECK(new_space->Contains(array));
1444 // Step 3: now allocate fixed array and JSObject to fill the whole new space.
1445 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
1446 int fixed_array_len = LenFromSize(to_fill);
1447 CHECK(fixed_array_len < FixedArray::kMaxLength);
1449 CHECK(!heap->always_allocate());
1450 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked();
1451 CHECK(new_space->Contains(array));
1453 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked();
1454 CHECK(new_space->Contains(object));
1455 JSObject* jsobject = JSObject::cast(object);
1456 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
1457 CHECK_EQ(0, jsobject->properties()->length());
1459 // Construct a double value that looks like a pointer to the new space object
1460 // and store it into the obj.
1461 Address fake_object = reinterpret_cast<Address>(array) + kPointerSize;
1462 double boom_value = bit_cast<double>(fake_object);
1463 FieldIndex index = FieldIndex::ForDescriptor(*my_map, 0);
1464 jsobject->RawFastDoublePropertyAtPut(index, boom_value);
1466 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
1468 // Step 4: clone jsobject, but force always allocate first to create a clone
1469 // in old pointer space.
1470 AlwaysAllocateScope aa_scope(isolate);
1471 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
1472 Handle<JSObject> clone(JSObject::cast(clone_obj));
1473 CHECK(heap->old_space()->Contains(clone->address()));
1475 CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
1477 // The value in cloned object should not be corrupted by GC.
1478 CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index));
1482 static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
1483 int tagged_descriptor, int double_descriptor,
1484 bool check_tagged_value = true) {
1485 FLAG_stress_compaction = true;
1486 FLAG_manual_evacuation_candidates_selection = true;
1487 Isolate* isolate = CcTest::i_isolate();
1488 Factory* factory = isolate->factory();
1489 Heap* heap = CcTest::heap();
1490 PagedSpace* old_space = heap->old_space();
1492 // The plan: create |obj| by |map| in old space, create |obj_value| in
1493 // new space and ensure that write barrier is triggered when |obj_value| is
1494 // written to property |tagged_descriptor| of |obj|.
1495 // Then migrate object to |new_map| and set proper value for property
1496 // |double_descriptor|. Call GC and ensure that it did not crash during
1497 // store buffer entries updating.
1499 Handle<JSObject> obj;
1500 Handle<HeapObject> obj_value;
1502 AlwaysAllocateScope always_allocate(isolate);
1503 obj = factory->NewJSObjectFromMap(map, TENURED);
1504 CHECK(old_space->Contains(*obj));
1506 obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS);
1509 CHECK(heap->InNewSpace(*obj_value));
1512 FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
1514 for (int i = 0; i < n; i++) {
1515 obj->FastPropertyAtPut(index, *obj_value);
1519 // Migrate |obj| to |new_map| which should shift fields and put the
1520 // |boom_value| to the slot that was earlier recorded by write barrier.
1521 JSObject::MigrateToMap(obj, new_map);
1523 Address fake_object = reinterpret_cast<Address>(*obj_value) + kPointerSize;
1524 double boom_value = bit_cast<double>(fake_object);
1526 FieldIndex double_field_index =
1527 FieldIndex::ForDescriptor(*new_map, double_descriptor);
1528 CHECK(obj->IsUnboxedDoubleField(double_field_index));
1529 obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
1531 // Trigger GC to evacuate all candidates.
1532 CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
1534 if (check_tagged_value) {
1535 FieldIndex tagged_field_index =
1536 FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
1537 CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
1539 CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
1543 static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
1544 int tagged_descriptor,
1545 int double_descriptor,
1546 bool check_tagged_value = true) {
1547 if (FLAG_never_compact || !FLAG_incremental_marking) return;
1548 FLAG_stress_compaction = true;
1549 FLAG_manual_evacuation_candidates_selection = true;
1550 Isolate* isolate = CcTest::i_isolate();
1551 Factory* factory = isolate->factory();
1552 Heap* heap = CcTest::heap();
1553 PagedSpace* old_space = heap->old_space();
1555 // The plan: create |obj| by |map| in old space, create |obj_value| in
1556 // old space and ensure it end up in evacuation candidate page. Start
1557 // incremental marking and ensure that incremental write barrier is triggered
1558 // when |obj_value| is written to property |tagged_descriptor| of |obj|.
1559 // Then migrate object to |new_map| and set proper value for property
1560 // |double_descriptor|. Call GC and ensure that it did not crash during
1561 // slots buffer entries updating.
1563 Handle<JSObject> obj;
1564 Handle<HeapObject> obj_value;
1567 AlwaysAllocateScope always_allocate(isolate);
1568 obj = factory->NewJSObjectFromMap(map, TENURED);
1569 CHECK(old_space->Contains(*obj));
1571 // Make sure |obj_value| is placed on an old-space evacuation candidate.
1572 SimulateFullSpace(old_space);
1573 obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS,
1574 Strength::WEAK, TENURED);
1575 ec_page = Page::FromAddress(obj_value->address());
1576 CHECK_NE(ec_page, Page::FromAddress(obj->address()));
1579 // Heap is ready, force |ec_page| to become an evacuation candidate and
1580 // simulate incremental marking.
1581 ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
1582 SimulateIncrementalMarking(heap);
1584 // Check that everything is ready for triggering incremental write barrier
1585 // (i.e. that both |obj| and |obj_value| are black and the marking phase is
1586 // still active and |obj_value|'s page is indeed an evacuation candidate).
1587 IncrementalMarking* marking = heap->incremental_marking();
1588 CHECK(marking->IsMarking());
1589 CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
1590 CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value)));
1591 CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1593 // Trigger incremental write barrier, which should add a slot to |ec_page|'s
1596 int slots_buffer_len = SlotsBuffer::SizeOfChain(ec_page->slots_buffer());
1597 FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
1598 const int n = SlotsBuffer::kNumberOfElements + 10;
1599 for (int i = 0; i < n; i++) {
1600 obj->FastPropertyAtPut(index, *obj_value);
1602 // Ensure that the slot was actually added to the |ec_page|'s slots buffer.
1603 CHECK_EQ(slots_buffer_len + n,
1604 SlotsBuffer::SizeOfChain(ec_page->slots_buffer()));
1607 // Migrate |obj| to |new_map| which should shift fields and put the
1608 // |boom_value| to the slot that was earlier recorded by incremental write
1610 JSObject::MigrateToMap(obj, new_map);
1612 double boom_value = bit_cast<double>(UINT64_C(0xbaad0176a37c28e1));
1614 FieldIndex double_field_index =
1615 FieldIndex::ForDescriptor(*new_map, double_descriptor);
1616 CHECK(obj->IsUnboxedDoubleField(double_field_index));
1617 obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
1619 // Trigger GC to evacuate all candidates.
1620 CcTest::heap()->CollectGarbage(OLD_SPACE, "boom");
1622 // Ensure that the values are still there and correct.
1623 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
1625 if (check_tagged_value) {
1626 FieldIndex tagged_field_index =
1627 FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
1628 CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
1630 CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
1634 enum WriteBarrierKind { OLD_TO_OLD_WRITE_BARRIER, OLD_TO_NEW_WRITE_BARRIER };
1635 static void TestWriteBarrierObjectShiftFieldsRight(
1636 WriteBarrierKind write_barrier_kind) {
1637 CcTest::InitializeVM();
1638 Isolate* isolate = CcTest::i_isolate();
1639 v8::HandleScope scope(CcTest::isolate());
1641 Handle<HeapType> any_type = HeapType::Any(isolate);
1643 CompileRun("function func() { return 1; }");
1645 Handle<JSObject> func = GetObject("func");
1647 Handle<Map> map = Map::Create(isolate, 10);
1648 map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE,
1649 INSERT_TRANSITION).ToHandleChecked();
1650 map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
1651 Representation::Double(),
1652 INSERT_TRANSITION).ToHandleChecked();
1653 map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE,
1654 Representation::Tagged(),
1655 INSERT_TRANSITION).ToHandleChecked();
1657 // Shift fields right by turning constant property to a field.
1658 Handle<Map> new_map = Map::ReconfigureProperty(
1659 map, 0, kData, NONE, Representation::Tagged(), any_type, FORCE_FIELD);
1661 if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) {
1662 TestWriteBarrier(map, new_map, 2, 1);
1664 CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind);
1665 TestIncrementalWriteBarrier(map, new_map, 2, 1);
1670 // TODO(ishell): enable when this issue is fixed.
1671 DISABLED_TEST(WriteBarrierObjectShiftFieldsRight) {
1672 TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER);
1676 TEST(IncrementalWriteBarrierObjectShiftFieldsRight) {
1677 TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER);
1681 // TODO(ishell): add respective tests for property kind reconfiguring from
1682 // accessor field to double, once accessor fields are supported by
1683 // Map::ReconfigureProperty().
1686 // TODO(ishell): add respective tests for fast property removal case once
1687 // Map::ReconfigureProperty() supports that.