// entering the generic code. In both cases argc in r0 needs to be preserved.
// Both registers are preserved by this code so no need to differentiate between
// construct call and normal call.
-static void ArrayNativeCode(MacroAssembler* masm,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) {
Counters* counters = masm->isolate()->counters();
Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
has_non_smi_element, finish, cant_transition_map, not_double;
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
__ Assert(ne, "Unexpected initial map for Array function");
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
__ Assert(eq, "Unexpected initial map for Array function");
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in r2 or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->heap()->undefined_value(), masm->isolate());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ cmp(r2, Operand(undefined_sentinel));
- __ b(eq, &okay_here);
- __ ldr(r3, FieldMemOperand(r2, 0));
- __ cmp(r3, Operand(global_property_cell_map));
- __ Assert(eq, "Expected property cell in register ebx");
- __ bind(&okay_here);
- }
- }
-
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ tst(r0, r0);
- __ b(ne, ¬_zero_case);
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
-
- __ bind(¬_zero_case);
- __ cmp(r0, Operand(1));
- __ b(gt, ¬_one_case);
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(¬_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as a constructor.
- ArrayNativeCode(masm, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // r1 -- constructor function
+ // r0 -- number of arguments
// r2 -- type info cell with elements kind
- // r0 -- number of arguments to the constructor function
- static Register registers[] = { r1, r2 };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &r0;
+ static Register registers[] = { r2 };
+ descriptor->register_param_count_ = 1;
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &r0;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
}
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
LAST_FAST_ELEMENTS_KIND);
__ cmp(r3, Operand(terminal_kind_sentinel));
- __ b(ne, &miss);
+ __ b(gt, &miss);
// Make sure the function is the Array() function
__ LoadArrayFunction(r3);
__ cmp(r1, r3);
__ Ret();
}
+
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(r3, Operand(kind));
+ __ b(ne, &next);
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // r2 - type info cell
+ // r3 - kind
+ // r0 - number of arguments
+ // r1 - constructor?
+ // sp[0] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ __ tst(r3, Operand(1));
+ Label normal_sequence;
+ __ b(ne, &normal_sequence);
+
+ // look at the first argument
+ __ ldr(r5, MemOperand(sp, 0));
+ __ cmp(r5, Operand::Zero());
+ __ b(eq, &normal_sequence);
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ add(r3, r3, Operand(1));
+ __ cmp(r2, Operand(undefined_sentinel));
+ __ b(eq, &normal_sequence);
+
+ // Save the resulting elements kind in type info
+ __ SmiTag(r3);
+ __ str(r3, FieldMemOperand(r2, kPointerSize));
+ __ SmiUntag(r3);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(r3, Operand(kind));
+ __ b(ne, &next);
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r0 : argc (only if argument_count_ == ANY)
+ // -- r1 : constructor
+ // -- r2 : type info cell
+ // -- sp[0] : return address
+ // -- sp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ __ tst(r3, Operand(kSmiTagMask));
+ __ Assert(ne, "Unexpected initial map for Array function");
+ __ CompareObjectType(r3, r3, r4, MAP_TYPE);
+ __ Assert(eq, "Unexpected initial map for Array function");
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ cmp(r2, Operand(undefined_sentinel));
+ __ b(eq, &okay_here);
+ __ ldr(r3, FieldMemOperand(r2, 0));
+ __ cmp(r3, Operand(global_property_cell_map));
+ __ Assert(eq, "Expected property cell in register ebx");
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ cmp(r2, Operand(undefined_sentinel));
+ __ b(eq, &no_info);
+ __ ldr(r3, FieldMemOperand(r2, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ cmp(r3,
+ Operand(TypeFeedbackCells::MegamorphicSentinel(masm->isolate())));
+ __ b(eq, &no_info);
+ __ SmiUntag(r3);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ mov(r3, Operand(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ tst(r0, r0);
+ __ b(ne, ¬_zero_case);
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(¬_zero_case);
+ __ cmp(r0, Operand(1));
+ __ b(gt, ¬_one_case);
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(¬_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as a constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
+
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
if (NeedsEagerFrame()) {
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
-
- if (instr->has_constant_parameter_count()) {
- int parameter_count = ToInteger32(instr->constant_parameter_count());
- int32_t sp_delta = (parameter_count + 1) * kPointerSize;
- if (sp_delta != 0) {
- __ add(sp, sp, Operand(sp_delta));
- }
- } else {
- Register reg = ToRegister(instr->parameter_count());
- __ add(reg, reg, Operand(1));
- __ add(sp, sp, Operand(reg, LSL, kPointerSizeLog2));
+ }
+ if (instr->has_constant_parameter_count()) {
+ int parameter_count = ToInteger32(instr->constant_parameter_count());
+ int32_t sp_delta = (parameter_count + 1) * kPointerSize;
+ if (sp_delta != 0) {
+ __ add(sp, sp, Operand(sp_delta));
}
+ } else {
+ Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiUntag(reg);
+ __ add(sp, sp, Operand(reg, LSL, kPointerSizeLog2));
}
+
__ Jump(lr);
}
__ mov(r0, Operand(instr->arity()));
__ mov(r2, Operand(instr->hydrogen()->property_cell()));
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
-
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
#include "extensions/externalize-string-extension.h"
#include "extensions/gc-extension.h"
#include "extensions/statistics-extension.h"
+#include "code-stubs.h"
namespace v8 {
namespace internal {
InstallFunction(global, "Array", JS_ARRAY_TYPE, JSArray::kSize,
isolate->initial_object_prototype(),
Builtins::kArrayCode, true);
- array_function->shared()->set_construct_stub(
- isolate->builtins()->builtin(Builtins::kArrayConstructCode));
array_function->shared()->DontAdaptArguments();
// This seems a bit hackish, but we need to make sure Array.length
// as the constructor. 'Array' property on a global object can be
// overwritten by JS code.
native_context()->set_array_function(*array_function);
+
+ // Cache the array maps
+ MaybeObject* cache_result = CacheInitialJSArrayMaps(*native_context(),
+ *initial_map);
+ if (cache_result->IsFailure()) return false;
+
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStub array_constructor_stub(isolate);
+ array_function->shared()->set_construct_stub(
+ *array_constructor_stub.GetCode(isolate));
+ } else {
+ array_function->shared()->set_construct_stub(
+ isolate->builtins()->builtin(Builtins::kCommonArrayConstructCode));
+ }
}
{ // --- N u m b e r ---
factory()->NewJSObject(isolate()->object_function(), TENURED);
SetPrototype(array_function, prototype);
- // TODO(mvstanton): For performance reasons, this code would have to
- // be changed to successfully run with FLAG_optimize_constructed_arrays.
- // The next checkin to enable FLAG_optimize_constructed_arrays by
- // default will address this.
- CHECK(!FLAG_optimize_constructed_arrays);
array_function->shared()->set_construct_stub(
- isolate()->builtins()->builtin(Builtins::kArrayConstructCode));
+ isolate()->builtins()->builtin(Builtins::kCommonArrayConstructCode));
array_function->shared()->DontAdaptArguments();
RUNTIME_FUNCTION(MaybeObject*, ArrayConstructor_StubFailure) {
CONVERT_ARG_STUB_CALLER_ARGS(caller_args);
- // ASSERT(args.length() == 3);
- Handle<JSFunction> function = args.at<JSFunction>(1);
- Handle<Object> type_info = args.at<Object>(2);
+ ASSERT(args.length() == 2);
+ Handle<Object> type_info = args.at<Object>(1);
JSArray* array = NULL;
bool holey = false;
}
}
- ASSERT(function->has_initial_map());
- ElementsKind kind = function->initial_map()->elements_kind();
+ ElementsKind kind = GetInitialFastElementsKind();
if (holey) {
kind = GetHoleyElementsKind(kind);
}
Code::kNoExtraICState) \
V(ArrayCode, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
- V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
+ V(CommonArrayConstructCode, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
\
V(StringConstructCode, BUILTIN, UNINITIALIZED, \
static void Generate_InternalArrayCode(MacroAssembler* masm);
static void Generate_ArrayCode(MacroAssembler* masm);
- static void Generate_ArrayConstructCode(MacroAssembler* masm);
+ static void Generate_CommonArrayConstructCode(MacroAssembler* masm);
static void Generate_StringConstructCode(MacroAssembler* masm);
static void Generate_OnStackReplacement(MacroAssembler* masm);
stack_parameter_count = new(zone) HParameter(param_count,
HParameter::REGISTER_PARAMETER,
Representation::Integer32());
+ stack_parameter_count->set_type(HType::Smi());
// it's essential to bind this value to the environment in case of deopt
- start_environment->Bind(param_count, stack_parameter_count);
AddInstruction(stack_parameter_count);
+ start_environment->Bind(param_count, stack_parameter_count);
arguments_length_ = stack_parameter_count;
} else {
ASSERT(descriptor_->environment_length() == param_count);
// arguments above
HInstruction* stack_pop_count = stack_parameter_count;
if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
- HInstruction* amount = graph()->GetConstant1();
- stack_pop_count = AddInstruction(
- HAdd::New(zone, context_, stack_parameter_count, amount));
- stack_pop_count->ChangeRepresentation(Representation::Integer32());
- stack_pop_count->ClearFlag(HValue::kCanOverflow);
+ if (!stack_parameter_count->IsConstant() &&
+ descriptor_->hint_stack_parameter_count_ < 0) {
+ HInstruction* amount = graph()->GetConstant1();
+ stack_pop_count = AddInstruction(
+ HAdd::New(zone, context_, stack_parameter_count, amount));
+ stack_pop_count->ChangeRepresentation(Representation::Integer32());
+ stack_pop_count->ClearFlag(HValue::kCanOverflow);
+ } else {
+ int count = descriptor_->hint_stack_parameter_count_;
+ stack_pop_count = AddInstruction(new(zone)
+ HConstant(count, Representation::Integer32()));
+ }
}
if (!current_block()->IsFinished()) {
template <>
HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
- HInstruction* deopt = new(zone()) HSoftDeoptimize();
- AddInstruction(deopt);
- current_block()->MarkAsDeoptimizing();
- return GetParameter(0);
+ // ----------- S t a t e -------------
+ // -- Parameter 1 : type info cell
+ // -- Parameter 0 : constructor
+ // -----------------------------------
+ // Get the right map
+ // Should be a constant
+ JSArrayBuilder array_builder(
+ this,
+ casted_stub()->elements_kind(),
+ GetParameter(ArrayConstructorStubBase::kPropertyCell),
+ casted_stub()->mode());
+ return array_builder.AllocateEmptyArray();
}
template <>
HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
BuildCodeStub() {
- HInstruction* deopt = new(zone()) HSoftDeoptimize();
- AddInstruction(deopt);
- current_block()->MarkAsDeoptimizing();
- return GetParameter(0);
+ // Smi check and range check on the input arg.
+ HValue* constant_one = graph()->GetConstant1();
+ HValue* constant_zero = graph()->GetConstant0();
+
+ HInstruction* elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
+ HInstruction* argument = AddInstruction(
+ new(zone()) HAccessArgumentsAt(elements, constant_one, constant_zero));
+
+ HConstant* max_alloc_length =
+ new(zone()) HConstant(JSObject::kInitialMaxFastElementArray,
+ Representation::Tagged());
+ AddInstruction(max_alloc_length);
+ const int initial_capacity = JSArray::kPreallocatedArrayElements;
+ HConstant* initial_capacity_node =
+ new(zone()) HConstant(initial_capacity, Representation::Tagged());
+ AddInstruction(initial_capacity_node);
+
+ // Since we're forcing Integer32 representation for this HBoundsCheck,
+ // there's no need to Smi-check the index.
+ HBoundsCheck* checked_arg = AddBoundsCheck(argument, max_alloc_length,
+ ALLOW_SMI_KEY,
+ Representation::Tagged());
+ IfBuilder if_builder(this);
+ if_builder.IfCompare(checked_arg, constant_zero, Token::EQ);
+ if_builder.Then();
+ Push(initial_capacity_node); // capacity
+ Push(constant_zero); // length
+ if_builder.Else();
+ Push(checked_arg); // capacity
+ Push(checked_arg); // length
+ if_builder.End();
+
+ // Figure out total size
+ HValue* length = Pop();
+ HValue* capacity = Pop();
+
+ JSArrayBuilder array_builder(
+ this,
+ casted_stub()->elements_kind(),
+ GetParameter(ArrayConstructorStubBase::kPropertyCell),
+ casted_stub()->mode());
+ return array_builder.AllocateArray(capacity, length, true);
}
template <>
HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
- HInstruction* deopt = new(zone()) HSoftDeoptimize();
- AddInstruction(deopt);
- current_block()->MarkAsDeoptimizing();
- return GetParameter(0);
+ ElementsKind kind = casted_stub()->elements_kind();
+ HValue* length = GetArgumentsLength();
+
+ JSArrayBuilder array_builder(
+ this,
+ kind,
+ GetParameter(ArrayConstructorStubBase::kPropertyCell),
+ casted_stub()->mode());
+
+ // We need to fill with the hole if it's a smi array in the multi-argument
+ // case because we might have to bail out while copying arguments into
+ // the array because they aren't compatible with a smi array.
+ // If it's a double array, no problem, and if it's fast then no
+ // problem either because doubles are boxed.
+ bool fill_with_hole = IsFastSmiElementsKind(kind);
+ HValue* new_object = array_builder.AllocateArray(length,
+ length,
+ fill_with_hole);
+ HValue* elements = array_builder.GetElementsLocation();
+ ASSERT(elements != NULL);
+
+ // Now populate the elements correctly.
+ LoopBuilder builder(this,
+ context(),
+ LoopBuilder::kPostIncrement);
+ HValue* start = graph()->GetConstant0();
+ HValue* key = builder.BeginBody(start, length, Token::LT);
+ HInstruction* argument_elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
+ HInstruction* argument = AddInstruction(new(zone()) HAccessArgumentsAt(
+ argument_elements, length, key));
+
+ // Checks to prevent incompatible stores
+ if (IsFastSmiElementsKind(kind)) {
+ AddInstruction(new(zone()) HCheckSmi(argument));
+ }
+
+ AddInstruction(new(zone()) HStoreKeyed(elements, key, argument, kind));
+ builder.EndBody();
+ return new_object;
}
CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor()
: register_param_count_(-1),
stack_parameter_count_(NULL),
+ hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
register_params_(NULL),
deoptimization_handler_(NULL),
}
+static void InstallDescriptor(Isolate* isolate, HydrogenCodeStub* stub) {
+ int major_key = stub->MajorKey();
+ CodeStubInterfaceDescriptor* descriptor =
+ isolate->code_stub_interface_descriptor(major_key);
+ if (!descriptor->initialized()) {
+ stub->InitializeInterfaceDescriptor(isolate, descriptor);
+ }
+}
+
+
+void ArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) {
+ ArrayNoArgumentConstructorStub stub1(GetInitialFastElementsKind());
+ InstallDescriptor(isolate, &stub1);
+ ArraySingleArgumentConstructorStub stub2(GetInitialFastElementsKind());
+ InstallDescriptor(isolate, &stub2);
+ ArrayNArgumentsConstructorStub stub3(GetInitialFastElementsKind());
+ InstallDescriptor(isolate, &stub3);
+}
+
+
+ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate)
+ : argument_count_(ANY) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+}
+
+
+ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate,
+ int argument_count) {
+ if (argument_count == 0) {
+ argument_count_ = NONE;
+ } else if (argument_count == 1) {
+ argument_count_ = ONE;
+ } else if (argument_count >= 2) {
+ argument_count_ = MORE_THAN_ONE;
+ } else {
+ UNREACHABLE();
+ }
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+}
+
+
} } // namespace v8::internal
V(TransitionElementsKind) \
V(StoreArrayLiteralElement) \
V(StubFailureTrampoline) \
+ V(ArrayConstructor) \
V(ProfileEntryHook) \
/* IC Handler stubs */ \
V(LoadField)
CodeStubInterfaceDescriptor();
int register_param_count_;
const Register* stack_parameter_count_;
+ // if hint_stack_parameter_count_ > 0, the code stub can optimize the
+ // return sequence. Default value is -1, which means it is ignored.
+ int hint_stack_parameter_count_;
StubFunctionMode function_mode_;
Register* register_params_;
Address deoptimization_handler_;
}
return register_param_count_;
}
+
+ bool initialized() const { return register_param_count_ >= 0; }
};
+// A helper to make up for the fact that type Register is not fully
+// defined outside of the platform directories
+#define DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index) \
+ ((index) == (descriptor)->register_param_count_) \
+ ? *((descriptor)->stack_parameter_count_) \
+ : (descriptor)->register_params_[(index)]
+
class HydrogenCodeStub : public CodeStub {
public:
};
+class ArrayConstructorStub: public PlatformCodeStub {
+ public:
+ enum ArgumentCountKey { ANY, NONE, ONE, MORE_THAN_ONE };
+ ArrayConstructorStub(Isolate* isolate, int argument_count);
+ explicit ArrayConstructorStub(Isolate* isolate);
+
+ void Generate(MacroAssembler* masm);
+
+ private:
+ virtual CodeStub::Major MajorKey() { return ArrayConstructor; }
+ virtual int MinorKey() { return argument_count_; }
+
+ ArgumentCountKey argument_count_;
+};
+
+
class MathPowStub: public PlatformCodeStub {
public:
enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK};
};
-class ArrayNoArgumentConstructorStub : public HydrogenCodeStub {
+class ArrayConstructorStubBase : public HydrogenCodeStub {
public:
- ArrayNoArgumentConstructorStub()
+ ArrayConstructorStubBase(ElementsKind kind, AllocationSiteMode mode)
: HydrogenCodeStub(CODE_STUB_IS_NOT_MISS) {
+ bit_field_ = ElementsKindBits::encode(kind) |
+ AllocationSiteModeBits::encode(mode == TRACK_ALLOCATION_SITE);
+ }
+
+ ElementsKind elements_kind() const {
+ return ElementsKindBits::decode(bit_field_);
+ }
+
+ AllocationSiteMode mode() const {
+ return AllocationSiteModeBits::decode(bit_field_)
+ ? TRACK_ALLOCATION_SITE
+ : DONT_TRACK_ALLOCATION_SITE;
+ }
+
+ virtual bool IsPregenerated() { return true; }
+ static void GenerateStubsAheadOfTime(Isolate* isolate);
+ static void InstallDescriptors(Isolate* isolate);
+
+ // Parameters accessed via CodeStubGraphBuilder::GetParameter()
+ static const int kPropertyCell = 0;
+
+ private:
+ int NotMissMinorKey() { return bit_field_; }
+
+ class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
+ class AllocationSiteModeBits: public BitField<bool, 8, 1> {};
+ uint32_t bit_field_;
+
+ DISALLOW_COPY_AND_ASSIGN(ArrayConstructorStubBase);
+};
+
+
+class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase {
+ public:
+ ArrayNoArgumentConstructorStub(
+ ElementsKind kind,
+ AllocationSiteMode mode = TRACK_ALLOCATION_SITE)
+ : ArrayConstructorStubBase(kind, mode) {
}
virtual Handle<Code> GenerateCode();
private:
Major MajorKey() { return ArrayNoArgumentConstructor; }
- int NotMissMinorKey() { return 0; }
DISALLOW_COPY_AND_ASSIGN(ArrayNoArgumentConstructorStub);
};
-class ArraySingleArgumentConstructorStub : public HydrogenCodeStub {
+class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase {
public:
- ArraySingleArgumentConstructorStub()
- : HydrogenCodeStub(CODE_STUB_IS_NOT_MISS) {}
+ ArraySingleArgumentConstructorStub(
+ ElementsKind kind,
+ AllocationSiteMode mode = TRACK_ALLOCATION_SITE)
+ : ArrayConstructorStubBase(kind, mode) {
+ }
virtual Handle<Code> GenerateCode();
private:
Major MajorKey() { return ArraySingleArgumentConstructor; }
- int NotMissMinorKey() { return 0; }
DISALLOW_COPY_AND_ASSIGN(ArraySingleArgumentConstructorStub);
};
-class ArrayNArgumentsConstructorStub : public HydrogenCodeStub {
+class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase {
public:
- ArrayNArgumentsConstructorStub()
- : HydrogenCodeStub(CODE_STUB_IS_NOT_MISS) {}
+ ArrayNArgumentsConstructorStub(
+ ElementsKind kind,
+ AllocationSiteMode mode = TRACK_ALLOCATION_SITE) :
+ ArrayConstructorStubBase(kind, mode) {
+ }
virtual Handle<Code> GenerateCode();
private:
Major MajorKey() { return ArrayNArgumentsConstructor; }
- int NotMissMinorKey() { return 0; }
DISALLOW_COPY_AND_ASSIGN(ArrayNArgumentsConstructorStub);
};
int CompilationInfo::num_parameters() const {
- if (IsStub()) {
- return 0;
- } else {
- return scope()->num_parameters();
- }
+ ASSERT(!IsStub());
+ return scope()->num_parameters();
}
V(elements_field_string, "%elements") \
V(length_field_string, "%length") \
V(function_class_string, "Function") \
+ V(properties_field_symbol, "%properties") \
+ V(payload_field_symbol, "%payload") \
V(illegal_argument_string, "illegal argument") \
V(MakeReferenceError_string, "MakeReferenceError") \
V(MakeSyntaxError_string, "MakeSyntaxError") \
SetGVNFlag(kChangesNewSpacePromotion);
}
+ static Flags DefaultFlags() {
+ return CAN_ALLOCATE_IN_NEW_SPACE;
+ }
+
+ static Flags DefaultFlags(ElementsKind kind) {
+ Flags flags = CAN_ALLOCATE_IN_NEW_SPACE;
+ if (IsFastDoubleElementsKind(kind)) {
+ flags = static_cast<HAllocate::Flags>(
+ flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
+ }
+ return flags;
+ }
+
HValue* context() { return OperandAt(0); }
HValue* size() { return OperandAt(1); }
total_size->ChangeRepresentation(Representation::Integer32());
total_size->ClearFlag(HValue::kCanOverflow);
- HAllocate::Flags flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
+ HAllocate::Flags flags = HAllocate::DefaultFlags(kind);
if (FLAG_pretenure_literals) {
+ // TODO(hpayer): When pretenuring can be internalized, flags can become
+ // private to HAllocate.
if (IsFastDoubleElementsKind(kind)) {
flags = static_cast<HAllocate::Flags>(
flags | HAllocate::CAN_ALLOCATE_IN_OLD_DATA_SPACE);
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
}
}
- if (IsFastDoubleElementsKind(kind)) {
- flags = static_cast<HAllocate::Flags>(
- flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
- }
HValue* elements =
AddInstruction(new(zone) HAllocate(context, total_size,
}
+HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
+ HValue* array_map,
+ AllocationSiteMode mode,
+ HValue* allocation_site_payload,
+ HValue* length_field) {
+
+ BuildStoreMap(array, array_map);
+
+ HConstant* empty_fixed_array =
+ new(zone()) HConstant(
+ Handle<FixedArray>(isolate()->heap()->empty_fixed_array()),
+ Representation::Tagged());
+ AddInstruction(empty_fixed_array);
+
+ AddInstruction(new(zone()) HStoreNamedField(array,
+ isolate()->factory()->properties_field_symbol(),
+ empty_fixed_array,
+ true,
+ JSArray::kPropertiesOffset));
+
+ HInstruction* length_store = AddInstruction(
+ new(zone()) HStoreNamedField(array,
+ isolate()->factory()->length_field_string(),
+ length_field,
+ true,
+ JSArray::kLengthOffset));
+ length_store->SetGVNFlag(kChangesArrayLengths);
+
+ if (mode == TRACK_ALLOCATION_SITE) {
+ BuildCreateAllocationSiteInfo(array,
+ JSArray::kSize,
+ allocation_site_payload);
+ }
+
+ int elements_location = JSArray::kSize;
+ if (mode == TRACK_ALLOCATION_SITE) {
+ elements_location += AllocationSiteInfo::kSize;
+ }
+
+ HInnerAllocatedObject* elements = new(zone()) HInnerAllocatedObject(
+ array,
+ elements_location);
+ AddInstruction(elements);
+
+ HInstruction* elements_store = AddInstruction(
+ new(zone()) HStoreNamedField(
+ array,
+ isolate()->factory()->elements_field_string(),
+ elements,
+ true,
+ JSArray::kElementsOffset));
+ elements_store->SetGVNFlag(kChangesElementsPointer);
+
+ return elements;
+}
+
+
HInstruction* HGraphBuilder::BuildStoreMap(HValue* object,
HValue* map) {
Zone* zone = this->zone();
: AddInstruction(new(zone) HConstant(nan_double,
Representation::Double()));
- LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
+ // Special loop unfolding case
+ static const int kLoopUnfoldLimit = 4;
+ bool unfold_loop = false;
+ int initial_capacity = JSArray::kPreallocatedArrayElements;
+ if (from->IsConstant() && to->IsConstant() &&
+ initial_capacity <= kLoopUnfoldLimit) {
+ HConstant* constant_from = HConstant::cast(from);
+ HConstant* constant_to = HConstant::cast(to);
+
+ if (constant_from->HasInteger32Value() &&
+ constant_from->Integer32Value() == 0 &&
+ constant_to->HasInteger32Value() &&
+ constant_to->Integer32Value() == initial_capacity) {
+ unfold_loop = true;
+ }
+ }
+
+ if (unfold_loop) {
+ for (int i = 0; i < initial_capacity; i++) {
+ HInstruction* key = AddInstruction(new(zone)
+ HConstant(i, Representation::Integer32()));
+ AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
+ }
+ } else {
+ LoopBuilder builder(this, context, LoopBuilder::kPostIncrement);
- HValue* key = builder.BeginBody(from, to, Token::LT);
+ HValue* key = builder.BeginBody(from, to, Token::LT);
- AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
+ AddInstruction(new(zone) HStoreKeyed(elements, key, hole, elements_kind));
- builder.EndBody();
+ builder.EndBody();
+ }
}
: FixedArray::SizeFor(length);
}
- HAllocate::Flags allocate_flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
- if (IsFastDoubleElementsKind(kind)) {
- allocate_flags = static_cast<HAllocate::Flags>(
- allocate_flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
- }
-
+ HAllocate::Flags allocate_flags = HAllocate::DefaultFlags(kind);
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
HValue* size_in_bytes =
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
- HValue* alloc_site =
- AddInstruction(new(zone) HInnerAllocatedObject(object, JSArray::kSize));
- Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
- BuildStoreMap(alloc_site, alloc_site_map);
- int alloc_payload_offset = AllocationSiteInfo::kPayloadOffset;
- AddInstruction(new(zone) HStoreNamedField(alloc_site,
- factory->empty_string(),
- boilerplate,
- true, alloc_payload_offset));
+ BuildCreateAllocationSiteInfo(object, JSArray::kSize, boilerplate);
}
if (length > 0) {
}
+HValue* HGraphBuilder::BuildCreateAllocationSiteInfo(HValue* previous_object,
+ int previous_object_size,
+ HValue* payload) {
+ HInnerAllocatedObject* alloc_site = new(zone())
+ HInnerAllocatedObject(previous_object, previous_object_size);
+ AddInstruction(alloc_site);
+ Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
+ BuildStoreMap(alloc_site, alloc_site_map);
+ AddInstruction(new(zone()) HStoreNamedField(alloc_site,
+ isolate()->factory()->payload_string(),
+ payload,
+ true,
+ AllocationSiteInfo::kPayloadOffset));
+ return alloc_site;
+}
+
+
+HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
+ ElementsKind kind,
+ HValue* allocation_site_payload,
+ AllocationSiteMode mode) :
+ builder_(builder),
+ kind_(kind),
+ allocation_site_payload_(allocation_site_payload) {
+ if (mode == DONT_TRACK_ALLOCATION_SITE) {
+ mode_ = mode;
+ } else {
+ mode_ = AllocationSiteInfo::GetMode(kind);
+ }
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode(HValue* context) {
+ // Get the global context, the native context, the map array
+ HInstruction* global_object = AddInstruction(new(zone())
+ HGlobalObject(context));
+ HInstruction* native_context = AddInstruction(new(zone())
+ HLoadNamedField(global_object, true, GlobalObject::kNativeContextOffset));
+ size_t offset = Context::kHeaderSize +
+ kPointerSize * Context::JS_ARRAY_MAPS_INDEX;
+ HInstruction* map_array = AddInstruction(new(zone())
+ HLoadNamedField(native_context, true, offset));
+ offset = kind_ * kPointerSize + FixedArrayBase::kHeaderSize;
+ return AddInstruction(new(zone()) HLoadNamedField(map_array, true, offset));
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
+ HValue* length_node) {
+ HValue* context = builder()->environment()->LookupContext();
+ ASSERT(length_node != NULL);
+
+ int base_size = JSArray::kSize;
+ if (mode_ == TRACK_ALLOCATION_SITE) {
+ base_size += AllocationSiteInfo::kSize;
+ }
+
+ if (IsFastDoubleElementsKind(kind_)) {
+ base_size += FixedDoubleArray::kHeaderSize;
+ } else {
+ base_size += FixedArray::kHeaderSize;
+ }
+
+ HInstruction* elements_size_value = new(zone())
+ HConstant(elements_size(), Representation::Integer32());
+ AddInstruction(elements_size_value);
+ HInstruction* mul = HMul::New(zone(), context, length_node,
+ elements_size_value);
+ mul->ChangeRepresentation(Representation::Integer32());
+ mul->ClearFlag(HValue::kCanOverflow);
+ AddInstruction(mul);
+
+ HInstruction* base = new(zone()) HConstant(base_size,
+ Representation::Integer32());
+ AddInstruction(base);
+ HInstruction* total_size = HAdd::New(zone(), context, base, mul);
+ total_size->ChangeRepresentation(Representation::Integer32());
+ total_size->ClearFlag(HValue::kCanOverflow);
+ AddInstruction(total_size);
+ return total_size;
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
+ int base_size = JSArray::kSize;
+ if (mode_ == TRACK_ALLOCATION_SITE) {
+ base_size += AllocationSiteInfo::kSize;
+ }
+
+ base_size += IsFastDoubleElementsKind(kind_)
+ ? FixedDoubleArray::SizeFor(initial_capacity())
+ : FixedArray::SizeFor(initial_capacity());
+
+ HConstant* array_size =
+ new(zone()) HConstant(base_size, Representation::Integer32());
+ AddInstruction(array_size);
+ return array_size;
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
+ HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
+ HConstant* capacity =
+ new(zone()) HConstant(initial_capacity(), Representation::Integer32());
+ AddInstruction(capacity);
+ return AllocateArray(size_in_bytes,
+ capacity,
+ builder()->graph()->GetConstant0(),
+ true);
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
+ HValue* length_field,
+ bool fill_with_hole) {
+ HValue* size_in_bytes = EstablishAllocationSize(capacity);
+ return AllocateArray(size_in_bytes, capacity, length_field, fill_with_hole);
+}
+
+
+HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
+ HValue* capacity,
+ HValue* length_field,
+ bool fill_with_hole) {
+ HValue* context = builder()->environment()->LookupContext();
+
+ // Allocate (dealing with failure appropriately)
+ HAllocate::Flags flags = HAllocate::DefaultFlags(kind_);
+ HAllocate* new_object = new(zone()) HAllocate(context, size_in_bytes,
+ HType::JSArray(), flags);
+ AddInstruction(new_object);
+
+ // Fill in the fields: map, properties, length
+ HValue* map = EmitMapCode(context);
+ elements_location_ = builder()->BuildJSArrayHeader(new_object,
+ map,
+ mode_,
+ allocation_site_payload_,
+ length_field);
+
+ // Initialize the elements
+ builder()->BuildInitializeElements(elements_location_, kind_, capacity);
+
+ if (fill_with_hole) {
+ builder()->BuildFillElementsWithHole(context, elements_location_, kind_,
+ graph()->GetConstant0(), capacity);
+ }
+
+ return new_object;
+}
+
+
HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info,
TypeFeedbackOracle* oracle)
: HGraphBuilder(info),
} else {
// The constructor function is both an operand to the instruction and an
// argument to the construct call.
+ bool use_call_new_array = FLAG_optimize_constructed_arrays &&
+ !(expr->target().is_null()) &&
+ *(expr->target()) == isolate()->global_context()->array_function();
+
CHECK_ALIVE(VisitArgument(expr->expression()));
HValue* constructor = HPushArgument::cast(Top())->argument();
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
HCallNew* call;
- if (FLAG_optimize_constructed_arrays &&
- !(expr->target().is_null()) &&
- *(expr->target()) == isolate()->global_context()->array_function()) {
+ if (use_call_new_array) {
+ AddInstruction(new(zone()) HCheckFunction(constructor,
+ Handle<JSFunction>(isolate()->global_context()->array_function())));
Handle<Object> feedback = oracle()->GetInfo(expr->CallNewFeedbackId());
ASSERT(feedback->IsSmi());
+
+ // TODO(mvstanton): It would be better to use the already created global
+ // property cell that is shared by full code gen. That way, any transition
+ // information that happened after crankshaft won't be lost. The right
+ // way to do that is to begin passing the cell to the type feedback oracle
+ // instead of just the value in the cell. Do this in a follow-up checkin.
Handle<JSGlobalPropertyCell> cell =
isolate()->factory()->NewJSGlobalPropertyCell(feedback);
- AddInstruction(new(zone()) HCheckFunction(constructor,
- Handle<JSFunction>(isolate()->global_context()->array_function())));
+
+ // TODO(mvstanton): Here we should probably insert code to check if the
+ // type cell elements kind is different from when we compiled, and deopt
+ // in that case. Do this in a follow-up checin.
call = new(zone()) HCallNewArray(context, constructor, argument_count,
cell);
} else {
// Build Allocation Site Info if desired
if (create_allocation_site_info) {
- HValue* alloc_site =
- AddInstruction(new(zone) HInnerAllocatedObject(target, JSArray::kSize));
- Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
- BuildStoreMap(alloc_site, alloc_site_map);
- int alloc_payload_offset = AllocationSiteInfo::kPayloadOffset;
- AddInstruction(new(zone) HStoreNamedField(alloc_site,
- factory->payload_string(),
- original_boilerplate,
- true, alloc_payload_offset));
+ BuildCreateAllocationSiteInfo(target, JSArray::kSize, original_boilerplate);
}
if (object_elements != NULL) {
#include "hydrogen-instructions.h"
#include "type-info.h"
#include "zone.h"
+#include "scopes.h"
namespace v8 {
namespace internal {
HGraph* CreateGraph();
+ // Bailout environment manipulation.
+ void Push(HValue* value) { environment()->Push(value); }
+ HValue* Pop() { return environment()->Pop(); }
+
// Adding instructions.
HInstruction* AddInstruction(HInstruction* instr);
void AddSimulate(BailoutId id,
void BuildNewSpaceArrayCheck(HValue* length,
ElementsKind kind);
+ class JSArrayBuilder {
+ public:
+ JSArrayBuilder(HGraphBuilder* builder,
+ ElementsKind kind,
+ HValue* allocation_site_payload,
+ AllocationSiteMode mode);
+
+ HValue* AllocateEmptyArray();
+ HValue* AllocateArray(HValue* capacity, HValue* length_field,
+ bool fill_with_hole);
+ HValue* GetElementsLocation() { return elements_location_; }
+
+ private:
+ Zone* zone() const { return builder_->zone(); }
+ int elements_size() const {
+ return IsFastDoubleElementsKind(kind_) ? kDoubleSize : kPointerSize;
+ }
+ HInstruction* AddInstruction(HInstruction* instr) {
+ return builder_->AddInstruction(instr);
+ }
+ HGraphBuilder* builder() { return builder_; }
+ HGraph* graph() { return builder_->graph(); }
+ int initial_capacity() {
+ STATIC_ASSERT(JSArray::kPreallocatedArrayElements > 0);
+ return JSArray::kPreallocatedArrayElements;
+ }
+
+ HValue* EmitMapCode(HValue* context);
+ HValue* EstablishEmptyArrayAllocationSize();
+ HValue* EstablishAllocationSize(HValue* length_node);
+ HValue* AllocateArray(HValue* size_in_bytes, HValue* capacity,
+ HValue* length_field, bool fill_with_hole);
+
+ HGraphBuilder* builder_;
+ ElementsKind kind_;
+ AllocationSiteMode mode_;
+ HValue* allocation_site_payload_;
+ HInnerAllocatedObject* elements_location_;
+ };
+
HValue* BuildAllocateElements(HValue* context,
ElementsKind kind,
HValue* capacity);
ElementsKind kind,
HValue* capacity);
+ // array must have been allocated with enough room for
+ // 1) the JSArray, 2) a AllocationSiteInfo if mode requires it,
+ // 3) a FixedArray or FixedDoubleArray.
+ // A pointer to the Fixed(Double)Array is returned.
+ HInnerAllocatedObject* BuildJSArrayHeader(HValue* array,
+ HValue* array_map,
+ AllocationSiteMode mode,
+ HValue* allocation_site_payload,
+ HValue* length_field);
+
HValue* BuildGrowElementsCapacity(HValue* object,
HValue* elements,
ElementsKind kind,
int position,
HIfContinuation* continuation);
+ HValue* BuildCreateAllocationSiteInfo(HValue* previous_object,
+ int previous_object_size,
+ HValue* payload);
+
private:
HGraphBuilder();
CompilationInfo* info_;
void AddSoftDeoptimize();
- // Bailout environment manipulation.
- void Push(HValue* value) { environment()->Push(value); }
- HValue* Pop() { return environment()->Pop(); }
-
void Bailout(const char* reason);
HBasicBlock* CreateJoin(HBasicBlock* first,
// that for a construct call the constructor function in edi needs to be
// preserved for entering the generic code. In both cases argc in eax needs to
// be preserved.
-static void ArrayNativeCode(MacroAssembler* masm,
- bool construct_call,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm,
+ bool construct_call,
+ Label* call_generic_code) {
Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call,
empty_array, not_empty_array, finish, cant_transition_map, not_double;
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argc
// -- ebx : type info cell
__ Assert(not_zero, "Unexpected initial map for Array function");
__ CmpObjectType(ecx, MAP_TYPE, ecx);
__ Assert(equal, "Unexpected initial map for Array function");
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in ebx or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->heap()->undefined_value(), masm->isolate());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ cmp(ebx, Immediate(undefined_sentinel));
- __ j(equal, &okay_here);
- __ cmp(FieldOperand(ebx, 0), Immediate(global_property_cell_map));
- __ Assert(equal, "Expected property cell in register ebx");
- __ bind(&okay_here);
- }
}
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ test(eax, eax);
- __ j(not_zero, ¬_zero_case);
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
-
- __ bind(¬_zero_case);
- __ cmp(eax, 1);
- __ j(greater, ¬_one_case);
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(¬_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as constructor.
- ArrayNativeCode(masm, true, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
- }
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, true, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
}
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // edi -- constructor function
+ // eax -- number of arguments
// ebx -- type info cell with elements kind
- // eax -- number of arguments to the constructor function
- static Register registers[] = { edi, ebx };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &eax;
+ static Register registers[] = { ebx };
+ descriptor->register_param_count_ = 1;
+
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &eax;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
}
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
// It is important that the store buffer overflow stubs are generated first.
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
__ ret(0);
}
+
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(edx, kind);
+ __ j(not_equal, &next);
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // ebx - type info cell
+ // edx - kind
+ // eax - number of arguments
+ // edi - constructor?
+ // esp[0] - return address
+ // esp[4] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ __ test_b(edx, 1);
+ Label normal_sequence;
+ __ j(not_zero, &normal_sequence);
+
+ // look at the first argument
+ __ mov(ecx, Operand(esp, kPointerSize));
+ __ test(ecx, ecx);
+ __ j(zero, &normal_sequence);
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ inc(edx);
+ __ cmp(ebx, Immediate(undefined_sentinel));
+ __ j(equal, &normal_sequence);
+
+ // Save the resulting elements kind in type info
+ __ SmiTag(edx);
+ __ mov(FieldOperand(ebx, kPointerSize), edx);
+ __ SmiUntag(edx);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmp(edx, kind);
+ __ j(not_equal, &next);
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : argc (only if argument_count_ == ANY)
+ // -- ebx : type info cell
+ // -- edi : constructor
+ // -- esp[0] : return address
+ // -- esp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ __ test(ecx, Immediate(kSmiTagMask));
+ __ Assert(not_zero, "Unexpected initial map for Array function");
+ __ CmpObjectType(ecx, MAP_TYPE, ecx);
+ __ Assert(equal, "Unexpected initial map for Array function");
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ cmp(ebx, Immediate(undefined_sentinel));
+ __ j(equal, &okay_here);
+ __ cmp(FieldOperand(ebx, 0), Immediate(global_property_cell_map));
+ __ Assert(equal, "Expected property cell in register ebx");
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ cmp(ebx, Immediate(undefined_sentinel));
+ __ j(equal, &no_info);
+ __ mov(edx, FieldOperand(ebx, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ cmp(edx, Immediate(TypeFeedbackCells::MegamorphicSentinel(
+ masm->isolate())));
+ __ j(equal, &no_info);
+ __ SmiUntag(edx);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ mov(edx, Immediate(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ test(eax, eax);
+ __ j(not_zero, ¬_zero_case);
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(¬_zero_case);
+ __ cmp(eax, 1);
+ __ j(greater, ¬_one_case);
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(¬_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, true, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm,
+ bool construct_call,
+ Label* call_generic_code);
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
__ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
} else {
Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiUntag(reg);
Register return_addr_reg = reg.is(ecx) ? ebx : ecx;
if (dynamic_frame_alignment && FLAG_debug_code) {
ASSERT(extra_value_count == 2);
ASSERT(ToRegister(instr->result()).is(eax));
ASSERT(FLAG_optimize_constructed_arrays);
- __ mov(ebx, instr->hydrogen()->property_cell());
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
__ Set(eax, Immediate(instr->arity()));
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ __ mov(ebx, instr->hydrogen()->property_cell());
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
bool LInstruction::HasDoubleRegisterInput() {
for (int i = 0; i < InputCount(); i++) {
LOperand* op = InputAt(i);
- if (op->IsDoubleRegister()) {
+ if (op != NULL && op->IsDoubleRegister()) {
return true;
}
}
LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
info()->MarkAsDeferredCalling();
LOperand* context = UseAny(instr->context());
+ // TODO(mvstanton): why can't size be a constant if possible?
LOperand* size = UseTempRegister(instr->size());
LOperand* temp = TempRegister();
LAllocate* result = new(zone()) LAllocate(context, size, temp);
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
LOperand* parameter_count() { return inputs_[2]; }
DECLARE_CONCRETE_INSTRUCTION(Return, "return")
+ DECLARE_HYDROGEN_ACCESSOR(Return)
};
stub.InitializeInterfaceDescriptor(
this, code_stub_interface_descriptor(CodeStub::FastCloneShallowArray));
CompareNilICStub::InitializeForIsolate(this);
+ ArrayConstructorStubBase::InstallDescriptors(this);
}
if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Start();
}
+bool JSObject::ShouldTrackAllocationInfo() {
+ if (map()->CanTrackAllocationSite()) {
+ if (!IsJSArray()) {
+ return true;
+ }
+
+ return AllocationSiteInfo::GetMode(GetElementsKind()) ==
+ TRACK_ALLOCATION_SITE;
+ }
+ return false;
+}
+
+
+// Heuristic: We only need to create allocation site info if the boilerplate
+// elements kind is the initial elements kind.
+AllocationSiteMode AllocationSiteInfo::GetMode(
+ ElementsKind boilerplate_elements_kind) {
+ if (FLAG_track_allocation_sites &&
+ IsFastSmiElementsKind(boilerplate_elements_kind)) {
+ return TRACK_ALLOCATION_SITE;
+ }
+
+ return DONT_TRACK_ALLOCATION_SITE;
+}
+
+
+AllocationSiteMode AllocationSiteInfo::GetMode(ElementsKind from,
+ ElementsKind to) {
+ if (FLAG_track_allocation_sites &&
+ IsFastSmiElementsKind(from) &&
+ (IsFastObjectElementsKind(to) || IsFastDoubleElementsKind(to))) {
+ return TRACK_ALLOCATION_SITE;
+ }
+
+ return DONT_TRACK_ALLOCATION_SITE;
+}
+
+
MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
ValidateElements();
ElementsKind elements_kind = map()->elements_kind();
bool Code::is_pregenerated() {
- return kind() == STUB && IsPregeneratedField::decode(flags());
+ return (kind() == STUB && IsPregeneratedField::decode(flags()));
}
}
-// Heuristic: We only need to create allocation site info if the boilerplate
-// elements kind is the initial elements kind.
-AllocationSiteMode AllocationSiteInfo::GetMode(
- ElementsKind boilerplate_elements_kind) {
- if (FLAG_track_allocation_sites &&
- IsFastSmiElementsKind(boilerplate_elements_kind)) {
- return TRACK_ALLOCATION_SITE;
- }
-
- return DONT_TRACK_ALLOCATION_SITE;
-}
-
-
-AllocationSiteMode AllocationSiteInfo::GetMode(ElementsKind from,
- ElementsKind to) {
- if (FLAG_track_allocation_sites &&
- IsFastSmiElementsKind(from) &&
- (IsFastObjectElementsKind(to) || IsFastDoubleElementsKind(to))) {
- return TRACK_ALLOCATION_SITE;
- }
-
- return DONT_TRACK_ALLOCATION_SITE;
-}
-
-
uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
// For array indexes mix the length into the hash as an array index could
// be zero.
}
-MUST_USE_RESULT static MaybeObject* CacheInitialJSArrayMaps(
+MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps(
Context* native_context, Map* initial_map) {
// Replace all of the cached initial array maps in the native context with
// the appropriate transitioned elements kind maps.
Heap* heap = native_context->GetHeap();
MaybeObject* maybe_maps =
- heap->AllocateFixedArrayWithHoles(kElementsKindCount);
+ heap->AllocateFixedArrayWithHoles(kElementsKindCount, TENURED);
FixedArray* maps;
if (!maybe_maps->To(&maps)) return maybe_maps;
bool HasDictionaryArgumentsElements();
inline SeededNumberDictionary* element_dictionary(); // Gets slow elements.
+ inline bool ShouldTrackAllocationInfo();
+
inline void set_map_and_elements(
Map* map,
FixedArrayBase* value,
// Returns NULL if no AllocationSiteInfo is available for object.
static AllocationSiteInfo* FindForJSObject(JSObject* object);
-
- static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind);
- static AllocationSiteMode GetMode(ElementsKind from, ElementsKind to);
+ static inline AllocationSiteMode GetMode(
+ ElementsKind boilerplate_elements_kind);
+ static inline AllocationSiteMode GetMode(ElementsKind from, ElementsKind to);
static const int kPayloadOffset = HeapObject::kHeaderSize;
static const int kSize = kPayloadOffset + kPointerSize;
};
+MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps(
+ Context* native_context, Map* initial_map);
+
+
// JSRegExpResult is just a JSArray with a specific initial map.
// This initial map adds in-object properties for "index" and "input"
// properties, as assigned by RegExp.prototype.exec, which allows
// entering the generic code. In both cases argc in rax needs to be preserved.
// Both registers are preserved by this code so no need to differentiate between
// a construct call and a normal call.
-static void ArrayNativeCode(MacroAssembler* masm,
- Label* call_generic_code) {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) {
Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
has_non_smi_element, finish, cant_transition_map, not_double;
}
-void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
+void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : argc
// -- rdi : constructor
__ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rcx, MAP_TYPE, rcx);
__ Check(equal, "Unexpected initial map for Array function");
-
- if (FLAG_optimize_constructed_arrays) {
- // We should either have undefined in ebx or a valid jsglobalpropertycell
- Label okay_here;
- Handle<Object> undefined_sentinel(
- masm->isolate()->factory()->undefined_value());
- Handle<Map> global_property_cell_map(
- masm->isolate()->heap()->global_property_cell_map());
- __ Cmp(rbx, undefined_sentinel);
- __ j(equal, &okay_here);
- __ Cmp(FieldOperand(rbx, 0), global_property_cell_map);
- __ Assert(equal, "Expected property cell in register rbx");
- __ bind(&okay_here);
- }
}
- if (FLAG_optimize_constructed_arrays) {
- Label not_zero_case, not_one_case;
- __ testq(rax, rax);
- __ j(not_zero, ¬_zero_case);
- ArrayNoArgumentConstructorStub no_argument_stub;
- __ TailCallStub(&no_argument_stub);
-
- __ bind(¬_zero_case);
- __ cmpq(rax, Immediate(1));
- __ j(greater, ¬_one_case);
- ArraySingleArgumentConstructorStub single_argument_stub;
- __ TailCallStub(&single_argument_stub);
-
- __ bind(¬_one_case);
- ArrayNArgumentsConstructorStub n_argument_stub;
- __ TailCallStub(&n_argument_stub);
- } else {
- Label generic_constructor;
- // Run the native code for the Array function called as constructor.
- ArrayNativeCode(masm, &generic_constructor);
-
- // Jump to the generic construct code in case the specialized code cannot
- // handle the construction.
- __ bind(&generic_constructor);
- Handle<Code> generic_construct_stub =
- masm->isolate()->builtins()->JSConstructStubGeneric();
- __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
- }
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
+
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : number of arguments
}
-static void InitializeArrayConstructorDescriptor(Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
+static void InitializeArrayConstructorDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor,
+ int constant_stack_parameter_count) {
// register state
- // rdi -- constructor function
+ // rax -- number of arguments
// rbx -- type info cell with elements kind
- // rax -- number of arguments to the constructor function
- static Register registers[] = { rdi, rbx };
- descriptor->register_param_count_ = 2;
- // stack param count needs (constructor pointer, and single argument)
- descriptor->stack_parameter_count_ = &rax;
+ static Register registers[] = { rbx };
+ descriptor->register_param_count_ = 1;
+ if (constant_stack_parameter_count != 0) {
+ // stack param count needs (constructor pointer, and single argument)
+ descriptor->stack_parameter_count_ = &rax;
+ }
+ descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
descriptor->register_params_ = registers;
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
descriptor->deoptimization_handler_ =
void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
}
void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
}
void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate, descriptor);
+ InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
}
TypeFeedbackCells::MonomorphicArraySentinel(isolate,
LAST_FAST_ELEMENTS_KIND);
__ Cmp(rcx, terminal_kind_sentinel);
- __ j(not_equal, &miss);
+ __ j(above, &miss);
// Make sure the function is the Array() function
__ LoadArrayFunction(rcx);
__ cmpq(rdi, rcx);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
// It is important that the store buffer overflow stubs are generated first.
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ if (FLAG_optimize_constructed_arrays) {
+ ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ }
}
__ Ret();
}
+
+template<class T>
+static void CreateArrayDispatch(MacroAssembler* masm) {
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmpl(rdx, Immediate(kind));
+ __ j(not_equal, &next);
+ T stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
+ // rbx - type info cell
+ // rdx - kind
+ // rax - number of arguments
+ // rdi - constructor?
+ // esp[0] - return address
+ // esp[4] - last argument
+ ASSERT(FAST_SMI_ELEMENTS == 0);
+ ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ ASSERT(FAST_ELEMENTS == 2);
+ ASSERT(FAST_HOLEY_ELEMENTS == 3);
+ ASSERT(FAST_DOUBLE_ELEMENTS == 4);
+ ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
+
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ // is the low bit set? If so, we are holey and that is good.
+ __ testb(rdx, Immediate(1));
+ Label normal_sequence;
+ __ j(not_zero, &normal_sequence);
+
+ // look at the first argument
+ __ movq(rcx, Operand(rsp, kPointerSize));
+ __ testq(rcx, rcx);
+ __ j(zero, &normal_sequence);
+
+ // We are going to create a holey array, but our kind is non-holey.
+ // Fix kind and retry
+ __ incl(rdx);
+ __ Cmp(rbx, undefined_sentinel);
+ __ j(equal, &normal_sequence);
+
+ // Save the resulting elements kind in type info
+ __ Integer32ToSmi(rdx, rdx);
+ __ movq(FieldOperand(rbx, kPointerSize), rdx);
+ __ SmiToInteger32(rdx, rdx);
+
+ __ bind(&normal_sequence);
+ int last_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= last_index; ++i) {
+ Label next;
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ __ cmpl(rdx, Immediate(kind));
+ __ j(not_equal, &next);
+ ArraySingleArgumentConstructorStub stub(kind);
+ __ TailCallStub(&stub);
+ __ bind(&next);
+ }
+
+ // If we reached this point there is a problem.
+ __ Abort("Unexpected ElementsKind in array constructor");
+}
+
+
+template<class T>
+static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
+ int to_index = GetSequenceIndexFromFastElementsKind(
+ TERMINAL_FAST_ELEMENTS_KIND);
+ for (int i = 0; i <= to_index; ++i) {
+ ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
+ T stub(kind);
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+ }
+}
+
+
+void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
+ isolate);
+ ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
+ isolate);
+}
+
+
+
+void ArrayConstructorStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- rax : argc
+ // -- rbx : type info cell
+ // -- rdi : constructor
+ // -- rsp[0] : return address
+ // -- rsp[4] : last argument
+ // -----------------------------------
+ Handle<Object> undefined_sentinel(
+ masm->isolate()->heap()->undefined_value(),
+ masm->isolate());
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ STATIC_ASSERT(kSmiTag == 0);
+ Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
+ __ Check(not_smi, "Unexpected initial map for Array function");
+ __ CmpObjectType(rcx, MAP_TYPE, rcx);
+ __ Check(equal, "Unexpected initial map for Array function");
+
+ // We should either have undefined in ebx or a valid jsglobalpropertycell
+ Label okay_here;
+ Handle<Map> global_property_cell_map(
+ masm->isolate()->heap()->global_property_cell_map());
+ __ Cmp(rbx, undefined_sentinel);
+ __ j(equal, &okay_here);
+ __ Cmp(FieldOperand(rbx, 0), global_property_cell_map);
+ __ Assert(equal, "Expected property cell in register rbx");
+ __ bind(&okay_here);
+ }
+
+ if (FLAG_optimize_constructed_arrays) {
+ Label no_info, switch_ready;
+ // Get the elements kind and case on that.
+ __ Cmp(rbx, undefined_sentinel);
+ __ j(equal, &no_info);
+ __ movq(rdx, FieldOperand(rbx, kPointerSize));
+
+ // There is no info if the call site went megamorphic either
+
+ // TODO(mvstanton): Really? I thought if it was the array function that
+ // the cell wouldn't get stamped as megamorphic.
+ __ Cmp(rdx, TypeFeedbackCells::MegamorphicSentinel(masm->isolate()));
+ __ j(equal, &no_info);
+ __ SmiToInteger32(rdx, rdx);
+ __ jmp(&switch_ready);
+ __ bind(&no_info);
+ __ movq(rdx, Immediate(GetInitialFastElementsKind()));
+ __ bind(&switch_ready);
+
+ if (argument_count_ == ANY) {
+ Label not_zero_case, not_one_case;
+ __ testq(rax, rax);
+ __ j(not_zero, ¬_zero_case);
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+
+ __ bind(¬_zero_case);
+ __ cmpl(rax, Immediate(1));
+ __ j(greater, ¬_one_case);
+ CreateArrayDispatchOneArgument(masm);
+
+ __ bind(¬_one_case);
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else if (argument_count_ == NONE) {
+ CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm);
+ } else if (argument_count_ == ONE) {
+ CreateArrayDispatchOneArgument(masm);
+ } else if (argument_count_ == MORE_THAN_ONE) {
+ CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm);
+ } else {
+ UNREACHABLE();
+ }
+ } else {
+ Label generic_constructor;
+ // Run the native code for the Array function called as constructor.
+ ArrayNativeCode(masm, &generic_constructor);
+
+ // Jump to the generic construct code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_constructor);
+ Handle<Code> generic_construct_stub =
+ masm->isolate()->builtins()->JSConstructStubGeneric();
+ __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
+ }
+}
+
+
#undef __
} } // namespace v8::internal
namespace internal {
+void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
+
// Compute a transcendental math function natively, or call the
// TranscendentalCache runtime function.
class TranscendentalCacheStub: public PlatformCodeStub {
rcx);
} else {
Register reg = ToRegister(instr->parameter_count());
+ // The argument count parameter is a smi
+ __ SmiToInteger32(reg, reg);
Register return_addr_reg = reg.is(rcx) ? rbx : rcx;
__ pop(return_addr_reg);
__ shl(reg, Immediate(kPointerSizeLog2));
__ Set(rax, instr->arity());
__ Move(rbx, instr->hydrogen()->property_cell());
- Handle<Code> array_construct_code =
- isolate()->builtins()->ArrayConstructCode();
- CallCode(array_construct_code, RelocInfo::CONSTRUCT_CALL, instr);
+ Object* cell_value = instr->hydrogen()->property_cell()->value();
+ ElementsKind kind = static_cast<ElementsKind>(Smi::cast(cell_value)->value());
+ if (instr->arity() == 0) {
+ ArrayNoArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else if (instr->arity() == 1) {
+ ArraySingleArgumentConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ } else {
+ ArrayNArgumentsConstructorStub stub(kind);
+ CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
+ }
}
ASSERT(info()->IsStub());
CodeStubInterfaceDescriptor* descriptor =
info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
- Register reg = descriptor->register_params_[instr->index()];
+ int index = static_cast<int>(instr->index());
+ Register reg = DESCRIPTOR_GET_PARAMETER_REGISTER(descriptor, index);
return DefineFixed(result, reg);
}
}
LOperand* parameter_count() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(Return, "return")
+ DECLARE_HYDROGEN_ACCESSOR(Return)
};
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
-// Flags: --track-allocation-sites --nooptimize-constructed-arrays
+// Flags: --track-allocation-sites --noalways-opt
// TODO(mvstanton): remove --nooptimize-constructed-arrays and enable
// the constructed array code below when the feature is turned on
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
-support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
+// support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
+support_smi_only_arrays = true;
optimize_constructed_arrays = false;
if (support_smi_only_arrays) {
// sites work again for fast literals
//assertKind(elements_kind.fast_double, obj);
- obj = fastliteralcase([5, 3, 2], 1.5);
- assertKind(elements_kind.fast_double, obj);
- obj = fastliteralcase([3, 6, 2], 1.5);
- assertKind(elements_kind.fast_double, obj);
- obj = fastliteralcase([2, 6, 3], 2);
- assertKind(elements_kind.fast_smi_only, obj);
+ // The test below is in a loop because arrays that live
+ // at global scope without the chance of being recreated
+ // don't have allocation site information attached.
+ for (i = 0; i < 2; i++) {
+ obj = fastliteralcase([5, 3, 2], 1.5);
+ assertKind(elements_kind.fast_double, obj);
+ obj = fastliteralcase([3, 6, 2], 1.5);
+ assertKind(elements_kind.fast_double, obj);
+ obj = fastliteralcase([2, 6, 3], 2);
+ assertKind(elements_kind.fast_smi_only, obj);
+ }
// Verify that we will not pretransition the double->fast path.
obj = fastliteralcase(get_standard_literal(), "elliot");
// obj = fastliteralcase(3);
// assertKind(elements_kind.fast_double, obj);
+ // Make sure this works in crankshafted code too.
+ %OptimizeFunctionOnNextCall(get_standard_literal);
+ get_standard_literal();
+ obj = get_standard_literal();
+ assertKind(elements_kind.fast_double, obj);
+
function fastliteralcase_smifast(value) {
var literal = [1, 2, 3, 4];
literal[0] = value;
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --smi-only-arrays --noparallel-recompilation
+// Flags: --notrack-allocation-sites
+
+// No tracking of allocation sites because it interfers with the semantics
+// the test is trying to ensure.
// Ensure that ElementsKind transitions in various situations are hoisted (or
// not hoisted) correctly, don't change the semantics programs and don't trigger