static Register registers[] = { r3, r2, r1 };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Smi(),
+ Representation::Tagged() };
+ descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(constant_elements));
- if (has_fast_elements && constant_elements_values->map() ==
- isolate()->heap()->fixed_cow_array_map()) {
- FastCloneShallowArrayStub stub(
- isolate(),
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
- allocation_site_mode,
- length);
- __ CallStub(&stub);
- __ IncrementCounter(
- isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
- } else if (expr->depth() > 1 || Serializer::enabled() ||
- length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ if (expr->depth() > 1) {
__ mov(r0, Operand(Smi::FromInt(flags)));
__ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
- ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
- FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
- if (has_fast_elements) {
- mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
- }
-
- FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
- length);
+ FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
static Register registers[] = { x3, x2, x1 };
descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]);
descriptor->register_params_ = registers;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Smi(),
+ Representation::Tagged() };
+ descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
__ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
__ Mov(x2, Smi::FromInt(expr->literal_index()));
__ Mov(x1, Operand(constant_elements));
- if (has_fast_elements && constant_elements_values->map() ==
- isolate()->heap()->fixed_cow_array_map()) {
- FastCloneShallowArrayStub stub(
- isolate(),
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
- allocation_site_mode,
- length);
- __ CallStub(&stub);
- __ IncrementCounter(
- isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
- } else if ((expr->depth() > 1) || Serializer::enabled() ||
- length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ if (expr->depth() > 1) {
__ Mov(x0, Smi::FromInt(flags));
__ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
- ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
- FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
- if (has_fast_elements) {
- mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
- }
-
- FastCloneShallowArrayStub stub(isolate(),
- mode,
- allocation_site_mode,
- length);
+ FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
HInstruction* stack_parameter_count = NULL;
for (int i = 0; i < param_count; ++i) {
- Representation r = descriptor_->IsParameterCountRegister(i)
- ? Representation::Integer32()
- : Representation::Tagged();
+ Representation r = descriptor_->register_param_representations_ == NULL
+ ? Representation::Tagged()
+ : descriptor_->register_param_representations_[i];
HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
start_environment->Bind(i, param);
parameters_[i] = param;
Factory* factory = isolate()->factory();
HValue* undefined = graph()->GetConstantUndefined();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
- FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
- int length = casted_stub()->length();
HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
GetParameter(1),
AllocationSite::kTransitionInfoOffset);
HInstruction* boilerplate = Add<HLoadNamedField>(
allocation_site, static_cast<HValue*>(NULL), access);
- HValue* push_value;
- if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
- HValue* elements = AddLoadElements(boilerplate);
-
- IfBuilder if_fixed_cow(this);
- if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
- if_fixed_cow.Then();
- push_value = BuildCloneShallowArray(boilerplate,
- allocation_site,
- alloc_site_mode,
- FAST_ELEMENTS,
- 0/*copy-on-write*/);
- environment()->Push(push_value);
- if_fixed_cow.Else();
-
- IfBuilder if_fixed(this);
- if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
- if_fixed.Then();
- push_value = BuildCloneShallowArray(boilerplate,
- allocation_site,
- alloc_site_mode,
- FAST_ELEMENTS,
- length);
- environment()->Push(push_value);
- if_fixed.Else();
- push_value = BuildCloneShallowArray(boilerplate,
- allocation_site,
- alloc_site_mode,
- FAST_DOUBLE_ELEMENTS,
- length);
- environment()->Push(push_value);
- } else {
- ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
- push_value = BuildCloneShallowArray(boilerplate,
- allocation_site,
- alloc_site_mode,
- elements_kind,
- length);
- environment()->Push(push_value);
- }
+ HValue* elements = AddLoadElements(boilerplate);
+ HValue* capacity = AddLoadFixedArrayLength(elements);
+ IfBuilder zero_capacity(this);
+ zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
+ Token::EQ);
+ zero_capacity.Then();
+ Push(BuildCloneShallowArrayEmpty(boilerplate,
+ allocation_site,
+ alloc_site_mode));
+ zero_capacity.Else();
+ IfBuilder if_fixed_cow(this);
+ if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
+ if_fixed_cow.Then();
+ Push(BuildCloneShallowArrayCow(boilerplate,
+ allocation_site,
+ alloc_site_mode,
+ FAST_ELEMENTS));
+ if_fixed_cow.Else();
+ IfBuilder if_fixed(this);
+ if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
+ if_fixed.Then();
+ Push(BuildCloneShallowArrayNonEmpty(boilerplate,
+ allocation_site,
+ alloc_site_mode,
+ FAST_ELEMENTS));
+
+ if_fixed.Else();
+ Push(BuildCloneShallowArrayNonEmpty(boilerplate,
+ allocation_site,
+ alloc_site_mode,
+ FAST_DOUBLE_ELEMENTS));
+ if_fixed.End();
+ if_fixed_cow.End();
+ zero_capacity.End();
checker.ElseDeopt("Uninitialized boilerplate literals");
checker.End();
hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
register_params_(NULL),
+ register_param_representations_(NULL),
deoptimization_handler_(NULL),
handler_arguments_mode_(DONT_PASS_ARGUMENTS),
miss_handler_(),
// static
void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
- FastCloneShallowArrayStub stub(isolate,
- FastCloneShallowArrayStub::CLONE_ELEMENTS,
- DONT_TRACK_ALLOCATION_SITE, 0);
+ FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE);
InstallDescriptor(isolate, &stub);
}
int hint_stack_parameter_count_;
StubFunctionMode function_mode_;
Register* register_params_;
+ Representation* register_param_representations_;
Address deoptimization_handler_;
HandlerArgumentsMode handler_arguments_mode_;
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
- static const int kMaximumClonedLength = 8;
- enum Mode {
- CLONE_ELEMENTS,
- CLONE_DOUBLE_ELEMENTS,
- COPY_ON_WRITE_ELEMENTS,
- CLONE_ANY_ELEMENTS,
- LAST_CLONE_MODE = CLONE_ANY_ELEMENTS
- };
-
- static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
+ static const int kMaximumInlinedCloneLength = 8;
FastCloneShallowArrayStub(Isolate* isolate,
- Mode mode,
- AllocationSiteMode allocation_site_mode,
- int length)
+ AllocationSiteMode allocation_site_mode)
: HydrogenCodeStub(isolate),
- mode_(mode),
- allocation_site_mode_(allocation_site_mode),
- length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
- ASSERT_GE(length_, 0);
- ASSERT_LE(length_, kMaximumClonedLength);
- }
+ allocation_site_mode_(allocation_site_mode) {}
- Mode mode() const { return mode_; }
- int length() const { return length_; }
AllocationSiteMode allocation_site_mode() const {
return allocation_site_mode_;
}
- ElementsKind ComputeElementsKind() const {
- switch (mode()) {
- case CLONE_ELEMENTS:
- case COPY_ON_WRITE_ELEMENTS:
- return FAST_ELEMENTS;
- case CLONE_DOUBLE_ELEMENTS:
- return FAST_DOUBLE_ELEMENTS;
- case CLONE_ANY_ELEMENTS:
- /*fall-through*/;
- }
- UNREACHABLE();
- return LAST_ELEMENTS_KIND;
- }
-
- virtual Handle<Code> GenerateCode() V8_OVERRIDE;
+ virtual Handle<Code> GenerateCode();
virtual void InitializeInterfaceDescriptor(
CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
static void InstallDescriptors(Isolate* isolate);
private:
- Mode mode_;
AllocationSiteMode allocation_site_mode_;
- int length_;
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
- class ModeBits: public BitField<Mode, 1, 4> {};
- class LengthBits: public BitField<int, 5, 4> {};
// Ensure data fits within available bits.
- STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1);
- STATIC_ASSERT(kFastCloneModeCount < 16);
- STATIC_ASSERT(kMaximumClonedLength < 16);
Major MajorKey() { return FastCloneShallowArray; }
int NotMissMinorKey() {
- return AllocationSiteModeBits::encode(allocation_site_mode_)
- | ModeBits::encode(mode_)
- | LengthBits::encode(length_);
+ return AllocationSiteModeBits::encode(allocation_site_mode_);
}
};
}
-HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
+HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
+ HValue* dependency) {
return Add<HLoadNamedField>(
- object, static_cast<HValue*>(NULL), HObjectAccess::ForElementsPointer());
+ object, dependency, HObjectAccess::ForElementsPointer());
}
-HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
+HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
+ HValue* array,
+ HValue* dependency) {
return Add<HLoadNamedField>(
- object, static_cast<HValue*>(NULL), HObjectAccess::ForFixedArrayLength());
+ array, dependency, HObjectAccess::ForFixedArrayLength());
+}
+
+
+HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
+ ElementsKind kind,
+ HValue* dependency) {
+ return Add<HLoadNamedField>(
+ array, dependency, HObjectAccess::ForArrayLength(kind));
}
HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
new_kind, new_capacity);
- BuildCopyElements(elements, kind,
- new_elements, new_kind,
- length, new_capacity);
+ BuildCopyElements(object, elements, kind, new_elements,
+ new_kind, length, new_capacity);
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
new_elements);
ElementsKind elements_kind,
HValue* from,
HValue* to) {
- // Fast elements kinds need to be initialized in case statements below cause
- // a garbage collection.
+ // Fast elements kinds need to be initialized in case statements below cause a
+ // garbage collection.
Factory* factory = isolate()->factory();
double nan_double = FixedDoubleArray::hole_nan_as_double();
}
-void HGraphBuilder::BuildCopyElements(HValue* from_elements,
+void HGraphBuilder::BuildCopyElements(HValue* array,
+ HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity) {
- bool pre_fill_with_holes =
- IsFastDoubleElementsKind(from_elements_kind) &&
- IsFastObjectElementsKind(to_elements_kind);
-
- if (pre_fill_with_holes) {
- // If the copy might trigger a GC, make sure that the FixedArray is
- // pre-initialized with holes to make sure that it's always in a consistent
- // state.
- BuildFillElementsWithHole(to_elements, to_elements_kind,
- graph()->GetConstant0(), capacity);
+ int constant_capacity = -1;
+ if (capacity->IsConstant() &&
+ HConstant::cast(capacity)->HasInteger32Value()) {
+ int constant_candidate = HConstant::cast(capacity)->Integer32Value();
+ if (constant_candidate <=
+ FastCloneShallowArrayStub::kMaximumInlinedCloneLength) {
+ constant_capacity = constant_candidate;
+ }
}
- LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
-
- HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
+ if (constant_capacity != -1) {
+ // Unroll the loop for small elements kinds.
+ for (int i = 0; i < constant_capacity; i++) {
+ HValue* key_constant = Add<HConstant>(i);
+ HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant,
+ static_cast<HValue*>(NULL),
+ from_elements_kind);
+ Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind);
+ }
+ } else {
+ bool pre_fill_with_holes =
+ IsFastDoubleElementsKind(from_elements_kind) &&
+ IsFastObjectElementsKind(to_elements_kind);
- HValue* element = Add<HLoadKeyed>(from_elements, key,
- static_cast<HValue*>(NULL),
- from_elements_kind,
- ALLOW_RETURN_HOLE);
+ if (pre_fill_with_holes) {
+ // If the copy might trigger a GC, make sure that the FixedArray is
+ // pre-initialized with holes to make sure that it's always in a
+ // consistent state.
+ BuildFillElementsWithHole(to_elements, to_elements_kind,
+ graph()->GetConstant0(), capacity);
+ }
+
+ LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
+
+ // Be very careful to copy the elements up to length backwards down to
+ // zero. This eliminates the need to keep length alive through the loop,
+ // since the termination condition compares to a constant. This reduces
+ // register pressure in code stubs that otherwise would spill and create
+ // a stack frame.
+ HValue* decremented_length = AddUncasted<HSub>(length,
+ graph()->GetConstant1());
+ decremented_length->ClearFlag(HValue::kCanOverflow);
+ HValue* key = builder.BeginBody(decremented_length, Add<HConstant>(-1),
+ Token::NE);
+ HValue* element = Add<HLoadKeyed>(from_elements, key,
+ static_cast<HValue*>(NULL),
+ from_elements_kind,
+ ALLOW_RETURN_HOLE);
- ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
- IsFastSmiElementsKind(to_elements_kind))
+ ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
+ IsFastSmiElementsKind(to_elements_kind))
? FAST_HOLEY_ELEMENTS : to_elements_kind;
- if (IsHoleyElementsKind(from_elements_kind) &&
- from_elements_kind != to_elements_kind) {
- IfBuilder if_hole(this);
- if_hole.If<HCompareHoleAndBranch>(element);
- if_hole.Then();
- HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
+ if (IsHoleyElementsKind(from_elements_kind) &&
+ from_elements_kind != to_elements_kind) {
+ IfBuilder if_hole(this);
+ if_hole.If<HCompareHoleAndBranch>(element);
+ if_hole.Then();
+ HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
: graph()->GetConstantHole();
- Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
- if_hole.Else();
- HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
- store->SetFlag(HValue::kAllowUndefinedAsNaN);
- if_hole.End();
- } else {
- HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
- store->SetFlag(HValue::kAllowUndefinedAsNaN);
- }
+ Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
+ if_hole.Else();
+ HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
+ store->SetFlag(HValue::kAllowUndefinedAsNaN);
+ if_hole.End();
+ } else {
+ HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
+ store->SetFlag(HValue::kAllowUndefinedAsNaN);
+ }
- builder.EndBody();
+ builder.EndBody();
- if (!pre_fill_with_holes && length != capacity) {
- // Fill unused capacity with the hole.
- BuildFillElementsWithHole(to_elements, to_elements_kind,
- key, capacity);
+ if (!pre_fill_with_holes && !length->Equals(capacity)) {
+ // Force an explicit reload of capacity and length from locations where we
+ // know they are available. This caps their live ranges before entering
+ // the the element copy loop above, reducing register pressure enough to
+ // not spill and create stack frames for performance-critical array stubs
+ // on platforms with a small register set.
+ capacity = AddLoadFixedArrayLength(to_elements);
+ capacity->ClearFlag(HValue::kUseGVN);
+ length = AddLoadArrayLength(array, to_elements_kind);
+ length->ClearFlag(HValue::kUseGVN);
+ // Fill unused capacity with the hole.
+ BuildFillElementsWithHole(to_elements, to_elements_kind,
+ length, capacity);
+ }
}
-}
+ Counters* counters = isolate()->counters();
+ AddIncrementCounter(counters->inlined_copyied_elements());
+}
-HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
- HValue* allocation_site,
- AllocationSiteMode mode,
- ElementsKind kind,
- int length) {
- NoObservableSideEffectsScope no_effects(this);
-
+HValue* HGraphBuilder::BuildCloneShallowArrayCommon(
+ HValue* boilerplate,
+ HValue* allocation_site,
+ HValue* extra_size,
+ HValue** return_elements,
+ AllocationSiteMode mode) {
// All sizes here are multiples of kPointerSize.
- int size = JSArray::kSize;
+ int array_size = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
- size += AllocationMemento::kSize;
+ array_size += AllocationMemento::kSize;
+ }
+
+ HValue* size_in_bytes = Add<HConstant>(array_size);
+ if (extra_size != NULL) {
+ size_in_bytes = AddUncasted<HAdd>(extra_size, size_in_bytes);
+ size_in_bytes->ClearFlag(HValue::kCanOverflow);
}
- HValue* size_in_bytes = Add<HConstant>(size);
HInstruction* object = Add<HAllocate>(size_in_bytes,
HType::JSObject(),
NOT_TENURED,
JS_OBJECT_TYPE);
// Copy the JS array part.
- for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
- if ((i != JSArray::kElementsOffset) || (length == 0)) {
- HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
- Add<HStoreNamedField>(
- object, access, Add<HLoadNamedField>(
- boilerplate, static_cast<HValue*>(NULL), access));
- }
- }
+ HValue* map = Add<HLoadNamedField>(boilerplate,
+ static_cast<HValue*>(NULL), HObjectAccess::ForMap());
+ Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
+ Add<HConstant>(isolate()->factory()->empty_fixed_array()),
+ INITIALIZING_STORE);
+ Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map,
+ INITIALIZING_STORE);
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
object, Add<HConstant>(JSArray::kSize), allocation_site);
}
- if (length > 0) {
- // We have to initialize the elements pointer if allocation folding is
- // turned off.
- if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
- HConstant* empty_fixed_array = Add<HConstant>(
- isolate()->factory()->empty_fixed_array());
- Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
- empty_fixed_array, INITIALIZING_STORE);
- }
-
- HValue* boilerplate_elements = AddLoadElements(boilerplate);
- HValue* object_elements;
- if (IsFastDoubleElementsKind(kind)) {
- HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
- object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
- NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
- } else {
- HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
- object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
- NOT_TENURED, FIXED_ARRAY_TYPE);
- }
- Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
- object_elements);
-
- // Copy the elements array header.
- for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
- HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
- Add<HStoreNamedField>(
- object_elements, access, Add<HLoadNamedField>(
- boilerplate_elements, static_cast<HValue*>(NULL), access));
- }
-
- // Copy the elements array contents.
- // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
- // copying loops with constant length up to a given boundary and use this
- // helper here instead.
- for (int i = 0; i < length; i++) {
- HValue* key_constant = Add<HConstant>(i);
- HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
- static_cast<HValue*>(NULL), kind);
- Add<HStoreKeyed>(object_elements, key_constant, value, kind);
- }
+ if (extra_size != NULL) {
+ HValue* elements = Add<HInnerAllocatedObject>(object,
+ Add<HConstant>(array_size));
+ if (return_elements != NULL) *return_elements = elements;
}
return object;
}
+HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
+ HValue* allocation_site,
+ AllocationSiteMode mode,
+ ElementsKind kind) {
+ HValue* result = BuildCloneShallowArrayCommon(boilerplate,
+ allocation_site, NULL, NULL, mode);
+
+ HValue* length = AddLoadArrayLength(boilerplate, kind);
+ HValue* elements = AddLoadElements(boilerplate);
+ HObjectAccess access1 = HObjectAccess::ForArrayLength(kind);
+ HObjectAccess access2 = HObjectAccess::ForElementsPointer();
+ Add<HStoreNamedField>(result, access1, length, INITIALIZING_STORE);
+ Add<HStoreNamedField>(result, access2, elements, INITIALIZING_STORE);
+
+ return result;
+}
+
+
+HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
+ HValue* allocation_site,
+ AllocationSiteMode mode) {
+ HValue* result = BuildCloneShallowArrayCommon(boilerplate,
+ allocation_site, NULL, NULL, mode);
+
+ HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS);
+ Add<HStoreNamedField>(result, access, graph()->GetConstant0(),
+ INITIALIZING_STORE);
+ access = HObjectAccess::ForElementsPointer();
+ Add<HStoreNamedField>(result, access,
+ Add<HConstant>(isolate()->factory()->empty_fixed_array()),
+ INITIALIZING_STORE);
+
+ return result;
+}
+
+
+HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
+ HValue* allocation_site,
+ AllocationSiteMode mode,
+ ElementsKind kind) {
+ int elements_kind_size = IsFastDoubleElementsKind(kind)
+ ? kDoubleSize : kPointerSize;
+
+ HValue* boilerplate_elements = AddLoadElements(boilerplate);
+ HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
+ HValue* extra = AddUncasted<HMul>(capacity,
+ Add<HConstant>(elements_kind_size));
+ extra->ClearFlag(HValue::kCanOverflow);
+ extra = AddUncasted<HAdd>(extra, Add<HConstant>(FixedArray::kHeaderSize));
+ extra->ClearFlag(HValue::kCanOverflow);
+ HValue* elements = NULL;
+ HValue* result = BuildCloneShallowArrayCommon(boilerplate,
+ allocation_site, extra, &elements, mode);
+
+ // Explicitly reload the boilerplate's elements. This frees up a register
+ // during the allocation which otherwise causes spillage in many common code
+ // sequences on platforms with tight register constraints.
+ boilerplate_elements = AddLoadElements(boilerplate);
+ boilerplate_elements->ClearFlag(HValue::kUseGVN);
+ HValue* length = Add<HLoadNamedField>(boilerplate, static_cast<HValue*>(NULL),
+ HObjectAccess::ForArrayLength(kind));
+ Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
+ elements, INITIALIZING_STORE);
+ Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind),
+ length, INITIALIZING_STORE);
+
+ // Copy the elements array header.
+ for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
+ HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
+ Add<HStoreNamedField>(elements, access,
+ Add<HLoadNamedField>(boilerplate_elements,
+ static_cast<HValue*>(NULL), access));
+ }
+
+ BuildCopyElements(result, boilerplate_elements, kind, elements,
+ kind, length, capacity);
+
+ return result;
+}
+
+
void HGraphBuilder::BuildCompareNil(
HValue* value,
Type* type,
store_map->SkipWriteBarrier();
return store_map;
}
- HLoadNamedField* AddLoadElements(HValue* object);
+ HLoadNamedField* AddLoadElements(HValue* object,
+ HValue* dependency = NULL);
bool MatchRotateRight(HValue* left,
HValue* right,
Maybe<int> fixed_right_arg,
HAllocationMode allocation_mode);
- HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
+ HLoadNamedField* AddLoadFixedArrayLength(HValue *object,
+ HValue *dependency = NULL);
+
+ HLoadNamedField* AddLoadArrayLength(HValue *object,
+ ElementsKind kind,
+ HValue *dependency = NULL);
HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin);
HValue* from,
HValue* to);
- void BuildCopyElements(HValue* from_elements,
+ void BuildCopyElements(HValue* array,
+ HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity);
- HValue* BuildCloneShallowArray(HValue* boilerplate,
- HValue* allocation_site,
- AllocationSiteMode mode,
- ElementsKind kind,
- int length);
+ HValue* BuildCloneShallowArrayCommon(HValue* boilerplate,
+ HValue* allocation_site,
+ HValue* extra_size,
+ HValue** return_elements,
+ AllocationSiteMode mode);
+
+ HValue* BuildCloneShallowArrayCow(HValue* boilerplate,
+ HValue* allocation_site,
+ AllocationSiteMode mode,
+ ElementsKind kind);
+
+ HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate,
+ HValue* allocation_site,
+ AllocationSiteMode mode);
+
+ HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
+ HValue* allocation_site,
+ AllocationSiteMode mode,
+ ElementsKind kind);
HValue* BuildElementIndexHash(HValue* index);
static Register registers[] = { eax, ebx, ecx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Smi(),
+ Representation::Tagged() };
+ descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
- Heap* heap = isolate()->heap();
- if (has_constant_fast_elements &&
- constant_elements_values->map() == heap->fixed_cow_array_map()) {
- // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
- // change, so it's possible to specialize the stub in advance.
- __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
- __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
- __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
- __ mov(ecx, Immediate(constant_elements));
- FastCloneShallowArrayStub stub(
- isolate(),
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
- allocation_site_mode,
- length);
- __ CallStub(&stub);
- } else if (expr->depth() > 1 || Serializer::enabled() ||
- length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ if (expr->depth() > 1) {
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(Smi::FromInt(flags)));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
- ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
- FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
- // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
- // change, so it's possible to specialize the stub in advance.
- if (has_constant_fast_elements) {
- mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
- }
-
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
- FastCloneShallowArrayStub stub(isolate(),
- mode,
- allocation_site_mode,
- length);
+ FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1, a1, a2);
} else if (expr->depth() > 1 || Serializer::enabled() ||
- length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ length > FastCloneShallowArrayStub::kMaximumInlinedCloneLength) {
__ li(a0, Operand(Smi::FromInt(flags)));
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \
SC(call_normal_stubs, V8.CallNormalStubs) \
SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \
+ SC(inlined_copyied_elements, V8.InlinedCopiedElements) \
SC(arguments_adaptors, V8.ArgumentsAdaptors) \
SC(compilation_cache_hits, V8.CompilationCacheHits) \
SC(compilation_cache_misses, V8.CompilationCacheMisses) \
static Register registers[] = { rax, rbx, rcx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Smi(),
+ Representation::Tagged() };
+ descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 3;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
descriptor->register_params_ = registers_variable_args;
}
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
+ static Representation representations[] = {
+ Representation::Tagged(),
+ Representation::Integer32() };
+ descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
- Heap* heap = isolate()->heap();
- if (has_constant_fast_elements &&
- constant_elements_values->map() == heap->fixed_cow_array_map()) {
- // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
- // change, so it's possible to specialize the stub in advance.
- __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
- __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
- __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
- __ Move(rbx, Smi::FromInt(expr->literal_index()));
- __ Move(rcx, constant_elements);
- FastCloneShallowArrayStub stub(
- isolate(),
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
- allocation_site_mode,
- length);
- __ CallStub(&stub);
- } else if (expr->depth() > 1 || Serializer::enabled() ||
- length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ if (expr->depth() > 1) {
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(Smi::FromInt(flags));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
- ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
- FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
-
- // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
- // change, so it's possible to specialize the stub in advance.
- if (has_constant_fast_elements) {
- mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
- }
-
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
- FastCloneShallowArrayStub stub(isolate(),
- mode,
- allocation_site_mode, length);
+ FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}