From 4473edd7f19cc16567f241392ec83c4bb9fc42be Mon Sep 17 00:00:00 2001 From: "ishell@chromium.org" Date: Wed, 4 Jun 2014 09:35:41 +0000 Subject: [PATCH] Implemented folding of constant size allocation followed by dynamic size allocation. Manually folded allocations (JSArray, JSRegExpResult) are split into two separate allocations. R=hpayer@chromium.org Review URL: https://codereview.chromium.org/304153009 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21671 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/code-stubs-hydrogen.cc | 1 + src/hydrogen-instructions.cc | 99 +++++++-- src/hydrogen-instructions.h | 19 +- src/hydrogen.cc | 467 +++++++++++++++++++++---------------------- src/hydrogen.h | 74 ++++--- 5 files changed, 369 insertions(+), 291 deletions(-) diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 6cf8bcb..9e97413 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -724,6 +724,7 @@ HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, + max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc index 981aec0..44a6dde 100644 --- a/src/hydrogen-instructions.cc +++ b/src/hydrogen-instructions.cc @@ -724,6 +724,21 @@ void HInstruction::InsertAfter(HInstruction* previous) { } +bool HInstruction::Dominates(HInstruction* other) { + if (block() != other->block()) { + return block()->Dominates(other->block()); + } + // Both instructions are in the same basic block. This instruction + // should precede the other one in order to dominate it. + for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) { + if (instr == other) { + return true; + } + } + return false; +} + + #ifdef DEBUG void HInstruction::Verify() { // Verify that input operands are defined before use. @@ -3746,10 +3761,10 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect, HValue* current_size = size(); // TODO(hpayer): Add support for non-constant allocation in dominator. - if (!current_size->IsInteger32Constant() || - !dominator_size->IsInteger32Constant()) { + if (!dominator_size->IsInteger32Constant()) { if (FLAG_trace_allocation_folding) { - PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n", + PrintF("#%d (%s) cannot fold into #%d (%s), " + "dynamic allocation size in dominator\n", id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); } return false; @@ -3760,6 +3775,32 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect, return false; } + if (!has_size_upper_bound()) { + if (FLAG_trace_allocation_folding) { + PrintF("#%d (%s) cannot fold into #%d (%s), " + "can't estimate total allocation size\n", + id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); + } + return false; + } + + if (!current_size->IsInteger32Constant()) { + // If it's not constant then it is a size_in_bytes calculation graph + // like this: (const_header_size + const_element_size * size). + ASSERT(current_size->IsInstruction()); + + HInstruction* current_instr = HInstruction::cast(current_size); + if (!current_instr->Dominates(dominator_allocate)) { + if (FLAG_trace_allocation_folding) { + PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size " + "value does not dominate target allocation\n", + id(), Mnemonic(), dominator_allocate->id(), + dominator_allocate->Mnemonic()); + } + return false; + } + } + ASSERT((IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) || (IsOldDataSpaceAllocation() && @@ -3772,20 +3813,16 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect, int32_t original_object_size = HConstant::cast(dominator_size)->GetInteger32Constant(); int32_t dominator_size_constant = original_object_size; - int32_t current_size_constant = - HConstant::cast(current_size)->GetInteger32Constant(); - int32_t new_dominator_size = dominator_size_constant + current_size_constant; if (MustAllocateDoubleAligned()) { - if (!dominator_allocate->MustAllocateDoubleAligned()) { - dominator_allocate->MakeDoubleAligned(); - } if ((dominator_size_constant & kDoubleAlignmentMask) != 0) { dominator_size_constant += kDoubleSize / 2; - new_dominator_size += kDoubleSize / 2; } } + int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant(); + int32_t new_dominator_size = dominator_size_constant + current_size_max_value; + // Since we clear the first word after folded memory, we cannot use the // whole Page::kMaxRegularHeapObjectSize memory. if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) { @@ -3797,13 +3834,41 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect, return false; } - HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore( - zone, - context(), - new_dominator_size, - Representation::None(), - dominator_allocate); - dominator_allocate->UpdateSize(new_dominator_size_constant); + HInstruction* new_dominator_size_value; + + if (current_size->IsInteger32Constant()) { + new_dominator_size_value = + HConstant::CreateAndInsertBefore(zone, + context(), + new_dominator_size, + Representation::None(), + dominator_allocate); + } else { + HValue* new_dominator_size_constant = + HConstant::CreateAndInsertBefore(zone, + context(), + dominator_size_constant, + Representation::Integer32(), + dominator_allocate); + + // Add old and new size together and insert. + current_size->ChangeRepresentation(Representation::Integer32()); + + new_dominator_size_value = HAdd::New(zone, context(), + new_dominator_size_constant, current_size); + new_dominator_size_value->ClearFlag(HValue::kCanOverflow); + new_dominator_size_value->ChangeRepresentation(Representation::Integer32()); + + new_dominator_size_value->InsertBefore(dominator_allocate); + } + + dominator_allocate->UpdateSize(new_dominator_size_value); + + if (MustAllocateDoubleAligned()) { + if (!dominator_allocate->MustAllocateDoubleAligned()) { + dominator_allocate->MakeDoubleAligned(); + } + } bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats; #ifdef VERIFY_HEAP diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index f34c1e0..9e6f0f0 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -1158,6 +1158,7 @@ class HInstruction : public HValue { position_.set_operand_position(index, pos); } + bool Dominates(HInstruction* other); bool CanTruncateToSmi() const { return CheckFlag(kTruncatingToSmi); } bool CanTruncateToInt32() const { return CheckFlag(kTruncatingToInt32); } @@ -5458,6 +5459,13 @@ class HAllocate V8_FINAL : public HTemplateInstruction<2> { HValue* context() { return OperandAt(0); } HValue* size() { return OperandAt(1); } + bool has_size_upper_bound() { return size_upper_bound_ != NULL; } + HConstant* size_upper_bound() { return size_upper_bound_; } + void set_size_upper_bound(HConstant* value) { + ASSERT(size_upper_bound_ == NULL); + size_upper_bound_ = value; + } + virtual Representation RequiredInputRepresentation(int index) V8_OVERRIDE { if (index == 0) { return Representation::Tagged(); @@ -5533,9 +5541,10 @@ class HAllocate V8_FINAL : public HTemplateInstruction<2> { : HTemplateInstruction<2>(type), flags_(ComputeFlags(pretenure_flag, instance_type)), dominating_allocate_(NULL), - filler_free_space_size_(NULL) { + filler_free_space_size_(NULL), + size_upper_bound_(NULL) { SetOperandAt(0, context); - SetOperandAt(1, size); + UpdateSize(size); set_representation(Representation::Tagged()); SetFlag(kTrackSideEffectDominators); SetChangesFlag(kNewSpacePromotion); @@ -5582,6 +5591,11 @@ class HAllocate V8_FINAL : public HTemplateInstruction<2> { void UpdateSize(HValue* size) { SetOperandAt(1, size); + if (size->IsInteger32Constant()) { + size_upper_bound_ = HConstant::cast(size); + } else { + size_upper_bound_ = NULL; + } } HAllocate* GetFoldableDominator(HAllocate* dominator); @@ -5603,6 +5617,7 @@ class HAllocate V8_FINAL : public HTemplateInstruction<2> { Handle known_initial_map_; HAllocate* dominating_allocate_; HStoreNamedField* filler_free_space_size_; + HConstant* size_upper_bound_; }; diff --git a/src/hydrogen.cc b/src/hydrogen.cc index aedf3a1..36d62d2 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -1535,25 +1535,18 @@ HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, HValue* index, HValue* input) { NoObservableSideEffectsScope scope(this); + HConstant* max_length = Add(JSObject::kInitialMaxFastElementArray); + Add(length, max_length); - // Compute the size of the RegExpResult followed by FixedArray with length. - HValue* size = length; - // Make sure size does not exceed max regular heap object size. - const int kHeaderSize = JSRegExpResult::kSize + FixedArray::kHeaderSize; - const int kMaxLength = - (Page::kMaxRegularHeapObjectSize - kHeaderSize) >> kPointerSizeLog2; - Add(size, Add(kMaxLength)); - - size = AddUncasted(size, Add(kPointerSizeLog2)); - size = AddUncasted(size, Add(kHeaderSize)); + // Generate size calculation code here in order to make it dominate + // the JSRegExpResult allocation. + ElementsKind elements_kind = FAST_ELEMENTS; + HValue* size = BuildCalculateElementsSize(elements_kind, length); // Allocate the JSRegExpResult and the FixedArray in one step. HValue* result = Add( - size, HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE); - - // Determine the elements FixedArray. - HValue* elements = Add( - result, Add(JSRegExpResult::kSize), HType::HeapObject()); + Add(JSRegExpResult::kSize), HType::JSArray(), + NOT_TENURED, JS_ARRAY_TYPE); // Initialize the JSRegExpResult header. HValue* global_object = Add( @@ -1567,12 +1560,14 @@ HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, Add( native_context, static_cast(NULL), HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX))); + HConstant* empty_fixed_array = + Add(isolate()->factory()->empty_fixed_array()); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset), - Add(isolate()->factory()->empty_fixed_array())); + empty_fixed_array); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), - elements); + empty_fixed_array); Add( result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length); @@ -1584,18 +1579,22 @@ HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset), input); - // Initialize the elements header. - AddStoreMapConstant(elements, isolate()->factory()->fixed_array_map()); - Add(elements, HObjectAccess::ForFixedArrayLength(), length); + // Allocate and initialize the elements header. + HAllocate* elements = BuildAllocateElements(elements_kind, size); + BuildInitializeElementsHeader(elements, elements_kind, length); + + HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize( + elements_kind, max_length->Integer32Value()); + elements->set_size_upper_bound(size_in_bytes_upper_bound); + + Add( + result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), + elements); // Initialize the elements contents with undefined. - LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); - index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); - { - Add(elements, index, graph()->GetConstantUndefined(), - FAST_ELEMENTS); - } - loop.EndBody(); + BuildFillElementsWithValue( + elements, elements_kind, graph()->GetConstant0(), length, + graph()->GetConstantUndefined()); return result; } @@ -2234,17 +2233,19 @@ HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( } - HValue* HGraphBuilder::BuildAllocateArrayFromLength( JSArrayBuilder* array_builder, HValue* length_argument) { if (length_argument->IsConstant() && HConstant::cast(length_argument)->HasSmiValue()) { int array_length = HConstant::cast(length_argument)->Integer32Value(); - HValue* new_object = array_length == 0 - ? array_builder->AllocateEmptyArray() - : array_builder->AllocateArray(length_argument, length_argument); - return new_object; + if (array_length == 0) { + return array_builder->AllocateEmptyArray(); + } else { + return array_builder->AllocateArray(length_argument, + array_length, + length_argument); + } } HValue* constant_zero = graph()->GetConstant0(); @@ -2274,32 +2275,61 @@ HValue* HGraphBuilder::BuildAllocateArrayFromLength( // Figure out total size HValue* length = Pop(); HValue* capacity = Pop(); - return array_builder->AllocateArray(capacity, length); + return array_builder->AllocateArray(capacity, max_alloc_length, length); } -HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind, - HValue* capacity) { - int elements_size; - InstanceType instance_type; - - if (IsFastDoubleElementsKind(kind)) { - elements_size = kDoubleSize; - instance_type = FIXED_DOUBLE_ARRAY_TYPE; - } else { - elements_size = kPointerSize; - instance_type = FIXED_ARRAY_TYPE; - } +HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind, + HValue* capacity) { + int elements_size = IsFastDoubleElementsKind(kind) + ? kDoubleSize + : kPointerSize; HConstant* elements_size_value = Add(elements_size); - HValue* mul = AddUncasted(capacity, elements_size_value); + HInstruction* mul = HMul::NewImul(zone(), context(), + capacity->ActualValue(), + elements_size_value); + AddInstruction(mul); mul->ClearFlag(HValue::kCanOverflow); + STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); + HConstant* header_size = Add(FixedArray::kHeaderSize); HValue* total_size = AddUncasted(mul, header_size); total_size->ClearFlag(HValue::kCanOverflow); + return total_size; +} + + +HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) { + int base_size = JSArray::kSize; + if (mode == TRACK_ALLOCATION_SITE) { + base_size += AllocationMemento::kSize; + } + HConstant* size_in_bytes = Add(base_size); + return Add( + size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE); +} + + +HConstant* HGraphBuilder::EstablishElementsAllocationSize( + ElementsKind kind, + int capacity) { + int base_size = IsFastDoubleElementsKind(kind) + ? FixedDoubleArray::SizeFor(capacity) + : FixedArray::SizeFor(capacity); - return Add(total_size, HType::HeapObject(), NOT_TENURED, + return Add(base_size); +} + + +HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind, + HValue* size_in_bytes) { + InstanceType instance_type = IsFastDoubleElementsKind(kind) + ? FIXED_DOUBLE_ARRAY_TYPE + : FIXED_ARRAY_TYPE; + + return Add(size_in_bytes, HType::HeapObject(), NOT_TENURED, instance_type); } @@ -2324,43 +2354,39 @@ HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader( // The HForceRepresentation is to prevent possible deopt on int-smi // conversion after allocation but before the new object fields are set. capacity = AddUncasted(capacity, Representation::Smi()); - HValue* new_elements = BuildAllocateElements(kind, capacity); + HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity); + HValue* new_elements = BuildAllocateElements(kind, size_in_bytes); BuildInitializeElementsHeader(new_elements, kind, capacity); return new_elements; } -HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array, - HValue* array_map, - AllocationSiteMode mode, - ElementsKind elements_kind, - HValue* allocation_site_payload, - HValue* length_field) { - +void HGraphBuilder::BuildJSArrayHeader(HValue* array, + HValue* array_map, + HValue* elements, + AllocationSiteMode mode, + ElementsKind elements_kind, + HValue* allocation_site_payload, + HValue* length_field) { Add(array, HObjectAccess::ForMap(), array_map); HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); - HObjectAccess access = HObjectAccess::ForPropertiesPointer(); - Add(array, access, empty_fixed_array); - Add(array, HObjectAccess::ForArrayLength(elements_kind), - length_field); + Add( + array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array); + + Add( + array, HObjectAccess::ForElementsPointer(), + elements != NULL ? elements : empty_fixed_array); + + Add( + array, HObjectAccess::ForArrayLength(elements_kind), length_field); if (mode == TRACK_ALLOCATION_SITE) { BuildCreateAllocationMemento( array, Add(JSArray::kSize), allocation_site_payload); } - - int elements_location = JSArray::kSize; - if (mode == TRACK_ALLOCATION_SITE) { - elements_location += AllocationMemento::kSize; - } - - HInnerAllocatedObject* elements = Add( - array, Add(elements_location), HType::HeapObject()); - Add(array, HObjectAccess::ForElementsPointer(), elements); - return elements; } @@ -2397,6 +2423,12 @@ HInstruction* HGraphBuilder::AddElementAccess( } +HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object, + HValue* dependency) { + return Add(object, dependency, HObjectAccess::ForMap()); +} + + HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, HValue* dependency) { return Add( @@ -2449,7 +2481,7 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( new_kind, new_capacity); - BuildCopyElements(object, elements, kind, new_elements, + BuildCopyElements(elements, kind, new_elements, new_kind, length, new_capacity); Add(object, HObjectAccess::ForElementsPointer(), @@ -2459,19 +2491,11 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, } -void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, - ElementsKind elements_kind, - HValue* from, - HValue* to) { - // Fast elements kinds need to be initialized in case statements below cause a - // garbage collection. - Factory* factory = isolate()->factory(); - - double nan_double = FixedDoubleArray::hole_nan_as_double(); - HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) - ? Add(factory->the_hole_value()) - : Add(nan_double); - +void HGraphBuilder::BuildFillElementsWithValue(HValue* elements, + ElementsKind elements_kind, + HValue* from, + HValue* to, + HValue* value) { if (to == NULL) { to = AddLoadFixedArrayLength(elements); } @@ -2498,7 +2522,7 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, if (initial_capacity >= 0) { for (int i = 0; i < initial_capacity; i++) { HInstruction* key = Add(i); - Add(elements, key, hole, elements_kind); + Add(elements, key, value, elements_kind); } } else { // Carefully loop backwards so that the "from" remains live through the loop @@ -2512,15 +2536,31 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, HValue* adjusted_key = AddUncasted(key, graph()->GetConstant1()); adjusted_key->ClearFlag(HValue::kCanOverflow); - Add(elements, adjusted_key, hole, elements_kind); + Add(elements, adjusted_key, value, elements_kind); builder.EndBody(); } } -void HGraphBuilder::BuildCopyElements(HValue* array, - HValue* from_elements, +void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, + ElementsKind elements_kind, + HValue* from, + HValue* to) { + // Fast elements kinds need to be initialized in case statements below cause a + // garbage collection. + Factory* factory = isolate()->factory(); + + double nan_double = FixedDoubleArray::hole_nan_as_double(); + HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) + ? Add(factory->the_hole_value()) + : Add(nan_double); + + BuildFillElementsWithValue(elements, elements_kind, from, to, hole); +} + + +void HGraphBuilder::BuildCopyElements(HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, @@ -2607,88 +2647,43 @@ void HGraphBuilder::BuildCopyElements(HValue* array, AddIncrementCounter(counters->inlined_copied_elements()); } -HValue* HGraphBuilder::BuildCloneShallowArrayCommon( - HValue* boilerplate, - HValue* allocation_site, - HValue* extra_size, - HValue** return_elements, - AllocationSiteMode mode) { - // All sizes here are multiples of kPointerSize. - int array_size = JSArray::kSize; - if (mode == TRACK_ALLOCATION_SITE) { - array_size += AllocationMemento::kSize; - } - - HValue* size_in_bytes = Add(array_size); - if (extra_size != NULL) { - size_in_bytes = AddUncasted(extra_size, size_in_bytes); - size_in_bytes->ClearFlag(HValue::kCanOverflow); - } - - HInstruction* object = Add(size_in_bytes, - HType::JSObject(), - NOT_TENURED, - JS_OBJECT_TYPE); - - // Copy the JS array part. - HValue* map = Add(boilerplate, - static_cast(NULL), HObjectAccess::ForMap()); - Add(object, HObjectAccess::ForPropertiesPointer(), - Add(isolate()->factory()->empty_fixed_array()), - INITIALIZING_STORE); - Add(object, HObjectAccess::ForMap(), map, - INITIALIZING_STORE); - - // Create an allocation site info if requested. - if (mode == TRACK_ALLOCATION_SITE) { - BuildCreateAllocationMemento( - object, Add(JSArray::kSize), allocation_site); - } - - if (extra_size != NULL) { - HValue* elements = Add(object, - Add(array_size), HType::HeapObject()); - if (return_elements != NULL) *return_elements = elements; - } - - return object; -} - HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode, ElementsKind kind) { - HValue* result = BuildCloneShallowArrayCommon(boilerplate, - allocation_site, NULL, NULL, mode); + HAllocate* array = AllocateJSArrayObject(mode); + HValue* map = AddLoadMap(boilerplate); HValue* elements = AddLoadElements(boilerplate); - HObjectAccess access = HObjectAccess::ForElementsPointer(); - Add(result, access, elements, INITIALIZING_STORE); - HValue* length = AddLoadArrayLength(boilerplate, kind); - access = HObjectAccess::ForArrayLength(kind); - Add(result, access, length, INITIALIZING_STORE); - return result; + BuildJSArrayHeader(array, + map, + elements, + mode, + FAST_ELEMENTS, + allocation_site, + length); + return array; } HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode) { - HValue* result = BuildCloneShallowArrayCommon(boilerplate, - allocation_site, NULL, NULL, mode); + HAllocate* array = AllocateJSArrayObject(mode); - HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS); - Add(result, access, graph()->GetConstant0(), - INITIALIZING_STORE); - access = HObjectAccess::ForElementsPointer(); - Add(result, access, - Add(isolate()->factory()->empty_fixed_array()), - INITIALIZING_STORE); + HValue* map = AddLoadMap(boilerplate); - return result; + BuildJSArrayHeader(array, + map, + NULL, // set elements to empty fixed array + mode, + FAST_ELEMENTS, + allocation_site, + graph()->GetConstant0()); + return array; } @@ -2696,27 +2691,31 @@ HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode, ElementsKind kind) { - int elements_kind_size = IsFastDoubleElementsKind(kind) - ? kDoubleSize : kPointerSize; - HValue* boilerplate_elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); - HValue* extra = AddUncasted(capacity, - Add(elements_kind_size)); - extra->ClearFlag(HValue::kCanOverflow); - extra = AddUncasted(extra, Add(FixedArray::kHeaderSize)); - extra->ClearFlag(HValue::kCanOverflow); + + // Generate size calculation code here in order to make it dominate + // the JSArray allocation. + HValue* elements_size = BuildCalculateElementsSize(kind, capacity); + + // Create empty JSArray object for now, store elimination should remove + // redundant initialization of elements and length fields and at the same + // time the object will be fully prepared for GC if it happens during + // elements allocation. + HValue* result = BuildCloneShallowArrayEmpty( + boilerplate, allocation_site, mode); + + HAllocate* elements = BuildAllocateElements(kind, elements_size); + // This function implicitly relies on the fact that the // FastCloneShallowArrayStub is called only for literals shorter than - // JSObject::kInitialMaxFastElementArray and therefore the size of the - // resulting folded allocation will always be in allowed range. + // JSObject::kInitialMaxFastElementArray. // Can't add HBoundsCheck here because otherwise the stub will eager a frame. + HConstant* size_upper_bound = EstablishElementsAllocationSize( + kind, JSObject::kInitialMaxFastElementArray); + elements->set_size_upper_bound(size_upper_bound); - HValue* elements = NULL; - HValue* result = BuildCloneShallowArrayCommon(boilerplate, - allocation_site, extra, &elements, mode); - Add(result, HObjectAccess::ForElementsPointer(), - elements, INITIALIZING_STORE); + Add(result, HObjectAccess::ForElementsPointer(), elements); // The allocation for the cloned array above causes register pressure on // machines with low register counts. Force a reload of the boilerplate @@ -2730,19 +2729,15 @@ HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); Add(elements, access, Add(boilerplate_elements, - static_cast(NULL), access), - INITIALIZING_STORE); + static_cast(NULL), access)); } // And the result of the length - HValue* length = Add(boilerplate, static_cast(NULL), - HObjectAccess::ForArrayLength(kind)); - Add(result, HObjectAccess::ForArrayLength(kind), - length, INITIALIZING_STORE); + HValue* length = AddLoadArrayLength(boilerplate, kind); + Add(result, HObjectAccess::ForArrayLength(kind), length); - BuildCopyElements(result, boilerplate_elements, kind, elements, + BuildCopyElements(boilerplate_elements, kind, elements, kind, length, NULL); - return result; } @@ -2937,67 +2932,47 @@ HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { } -HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize( - HValue* length_node) { - ASSERT(length_node != NULL); - - int base_size = JSArray::kSize; - if (mode_ == TRACK_ALLOCATION_SITE) { - base_size += AllocationMemento::kSize; - } - - STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); - base_size += FixedArray::kHeaderSize; - - HInstruction* elements_size_value = - builder()->Add(elements_size()); - HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(), - length_node, elements_size_value); - builder()->AddInstruction(mul); - HInstruction* base = builder()->Add(base_size); - HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(), - base, mul); - total_size->ClearFlag(HValue::kCanOverflow); - builder()->AddInstruction(total_size); - return total_size; +HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { + HConstant* capacity = builder()->Add(initial_capacity()); + return AllocateArray(capacity, + capacity, + builder()->graph()->GetConstant0()); } -HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() { - int base_size = JSArray::kSize; - if (mode_ == TRACK_ALLOCATION_SITE) { - base_size += AllocationMemento::kSize; - } - - base_size += IsFastDoubleElementsKind(kind_) - ? FixedDoubleArray::SizeFor(initial_capacity()) - : FixedArray::SizeFor(initial_capacity()); - - return builder()->Add(base_size); +HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( + HValue* capacity, + HConstant* capacity_upper_bound, + HValue* length_field, + FillMode fill_mode) { + return AllocateArray(capacity, + capacity_upper_bound->GetInteger32Constant(), + length_field, + fill_mode); } -HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { - HValue* size_in_bytes = EstablishEmptyArrayAllocationSize(); - HConstant* capacity = builder()->Add(initial_capacity()); - return AllocateArray(size_in_bytes, - capacity, - builder()->graph()->GetConstant0()); -} - +HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( + HValue* capacity, + int capacity_upper_bound, + HValue* length_field, + FillMode fill_mode) { + HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant() + ? HConstant::cast(capacity) + : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound); -HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity, - HValue* length_field, - FillMode fill_mode) { - HValue* size_in_bytes = EstablishAllocationSize(capacity); - return AllocateArray(size_in_bytes, capacity, length_field, fill_mode); + HAllocate* array = AllocateArray(capacity, length_field, fill_mode); + if (!elements_location_->has_size_upper_bound()) { + elements_location_->set_size_upper_bound(elememts_size_upper_bound); + } + return array; } -HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes, - HValue* capacity, - HValue* length_field, - FillMode fill_mode) { +HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( + HValue* capacity, + HValue* length_field, + FillMode fill_mode) { // These HForceRepresentations are because we store these as fields in the // objects we construct, and an int32-to-smi HChange could deopt. Accept // the deopt possibility now, before allocation occurs. @@ -3007,14 +2982,14 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes, length_field = builder()->AddUncasted(length_field, Representation::Smi()); - // Allocate (dealing with failure appropriately) - HAllocate* new_object = builder()->Add(size_in_bytes, - HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE); - // Folded array allocation should be aligned if it has fast double elements. - if (IsFastDoubleElementsKind(kind_)) { - new_object->MakeDoubleAligned(); - } + // Generate size calculation code here in order to make it dominate + // the JSArray allocation. + HValue* elements_size = + builder()->BuildCalculateElementsSize(kind_, capacity); + + // Allocate (dealing with failure appropriately) + HAllocate* array_object = builder()->AllocateJSArrayObject(mode_); // Fill in the fields: map, properties, length HValue* map; @@ -3023,22 +2998,30 @@ HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes, } else { map = EmitMapCode(); } - elements_location_ = builder()->BuildJSArrayHeader(new_object, - map, - mode_, - kind_, - allocation_site_payload_, - length_field); - // Initialize the elements + builder()->BuildJSArrayHeader(array_object, + map, + NULL, // set elements to empty fixed array + mode_, + kind_, + allocation_site_payload_, + length_field); + + // Allocate and initialize the elements + elements_location_ = builder()->BuildAllocateElements(kind_, elements_size); + builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); + // Set the elements + builder()->Add( + array_object, HObjectAccess::ForElementsPointer(), elements_location_); + if (fill_mode == FILL_WITH_HOLE) { builder()->BuildFillElementsWithHole(elements_location_, kind_, graph()->GetConstant0(), capacity); } - return new_object; + return array_object; } @@ -10514,7 +10497,7 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral( HConstant* empty_fixed_array = Add( isolate()->factory()->empty_fixed_array()); Add(object, HObjectAccess::ForElementsPointer(), - empty_fixed_array, INITIALIZING_STORE); + empty_fixed_array); BuildEmitObjectHeader(boilerplate_object, object); diff --git a/src/hydrogen.h b/src/hydrogen.h index 04b1b16..acf1d15 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -1396,6 +1396,8 @@ class HGraphBuilder { return Add(object, HObjectAccess::ForMap(), Add(map)); } + HLoadNamedField* AddLoadMap(HValue* object, + HValue* dependency = NULL); HLoadNamedField* AddLoadElements(HValue* object, HValue* dependency = NULL); @@ -1688,10 +1690,24 @@ class HGraphBuilder { }; ElementsKind kind() { return kind_; } - - HValue* AllocateEmptyArray(); - HValue* AllocateArray(HValue* capacity, HValue* length_field, - FillMode fill_mode = FILL_WITH_HOLE); + HAllocate* elements_location() { return elements_location_; } + + HAllocate* AllocateEmptyArray(); + HAllocate* AllocateArray(HValue* capacity, + HValue* length_field, + FillMode fill_mode = FILL_WITH_HOLE); + // Use these allocators when capacity could be unknown at compile time + // but its limit is known. For constant |capacity| the value of + // |capacity_upper_bound| is ignored and the actual |capacity| + // value is used as an upper bound. + HAllocate* AllocateArray(HValue* capacity, + int capacity_upper_bound, + HValue* length_field, + FillMode fill_mode = FILL_WITH_HOLE); + HAllocate* AllocateArray(HValue* capacity, + HConstant* capacity_upper_bound, + HValue* length_field, + FillMode fill_mode = FILL_WITH_HOLE); HValue* GetElementsLocation() { return elements_location_; } HValue* EmitMapCode(); @@ -1708,25 +1724,23 @@ class HGraphBuilder { } HValue* EmitInternalMapCode(); - HValue* EstablishEmptyArrayAllocationSize(); - HValue* EstablishAllocationSize(HValue* length_node); - HValue* AllocateArray(HValue* size_in_bytes, HValue* capacity, - HValue* length_field, - FillMode fill_mode = FILL_WITH_HOLE); HGraphBuilder* builder_; ElementsKind kind_; AllocationSiteMode mode_; HValue* allocation_site_payload_; HValue* constructor_function_; - HInnerAllocatedObject* elements_location_; + HAllocate* elements_location_; }; HValue* BuildAllocateArrayFromLength(JSArrayBuilder* array_builder, HValue* length_argument); + HValue* BuildCalculateElementsSize(ElementsKind kind, + HValue* capacity); + HAllocate* AllocateJSArrayObject(AllocationSiteMode mode); + HConstant* EstablishElementsAllocationSize(ElementsKind kind, int capacity); - HValue* BuildAllocateElements(ElementsKind kind, - HValue* capacity); + HAllocate* BuildAllocateElements(ElementsKind kind, HValue* size_in_bytes); void BuildInitializeElementsHeader(HValue* elements, ElementsKind kind, @@ -1735,16 +1749,17 @@ class HGraphBuilder { HValue* BuildAllocateElementsAndInitializeElementsHeader(ElementsKind kind, HValue* capacity); - // array must have been allocated with enough room for - // 1) the JSArray, 2) a AllocationMemento if mode requires it, - // 3) a FixedArray or FixedDoubleArray. - // A pointer to the Fixed(Double)Array is returned. - HInnerAllocatedObject* BuildJSArrayHeader(HValue* array, - HValue* array_map, - AllocationSiteMode mode, - ElementsKind elements_kind, - HValue* allocation_site_payload, - HValue* length_field); + // |array| must have been allocated with enough room for + // 1) the JSArray and 2) an AllocationMemento if mode requires it. + // If the |elements| value provided is NULL then the array elements storage + // is initialized with empty array. + void BuildJSArrayHeader(HValue* array, + HValue* array_map, + HValue* elements, + AllocationSiteMode mode, + ElementsKind elements_kind, + HValue* allocation_site_payload, + HValue* length_field); HValue* BuildGrowElementsCapacity(HValue* object, HValue* elements, @@ -1753,25 +1768,24 @@ class HGraphBuilder { HValue* length, HValue* new_capacity); + void BuildFillElementsWithValue(HValue* elements, + ElementsKind elements_kind, + HValue* from, + HValue* to, + HValue* value); + void BuildFillElementsWithHole(HValue* elements, ElementsKind elements_kind, HValue* from, HValue* to); - void BuildCopyElements(HValue* array, - HValue* from_elements, + void BuildCopyElements(HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity); - HValue* BuildCloneShallowArrayCommon(HValue* boilerplate, - HValue* allocation_site, - HValue* extra_size, - HValue** return_elements, - AllocationSiteMode mode); - HValue* BuildCloneShallowArrayCow(HValue* boilerplate, HValue* allocation_site, AllocationSiteMode mode, -- 2.7.4