}
+void KeyedArrayCallStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { r2 };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
+ descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
+}
+
+
void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
}
+void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
+ CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ mov(r1, r0);
+ int parameter_count_offset =
+ StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
+ __ ldr(r0, MemOperand(fp, parameter_count_offset));
+ // The parameter count above includes the receiver for the arguments passed to
+ // the deoptimization handler. Subtract the receiver for the parameter count
+ // for the call.
+ __ sub(r0, r0, Operand(1));
+ masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
+ ParameterCount argument_count(r0);
+ __ InvokeFunction(
+ r1, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+}
+
+
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize);
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
LOperand* context = UseFixed(instr->context(), cp);
LOperand* function = UseFixed(instr->function(), r1);
- return MarkAsCall(
- DefineFixed(new(zone()) LCallFunction(context, function), r0), instr);
+ LCallFunction* call = new(zone()) LCallFunction(context, function);
+ LInstruction* result = DefineFixed(call, r0);
+ if (instr->IsTailCall()) return result;
+ return MarkAsCall(result, instr);
}
}
+static int ArgumentsOffsetWithoutFrame(int index) {
+ ASSERT(index < 0);
+ return -(index + 1) * kPointerSize;
+}
+
+
MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
ASSERT(!op->IsRegister());
ASSERT(!op->IsDoubleRegister());
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
- return MemOperand(fp, StackSlotOffset(op->index()));
+ if (NeedsEagerFrame()) {
+ return MemOperand(fp, StackSlotOffset(op->index()));
+ } else {
+ // Retrieve parameter without eager stack-frame relative to the
+ // stack-pointer.
+ return MemOperand(sp, ArgumentsOffsetWithoutFrame(op->index()));
+ }
}
MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
ASSERT(op->IsDoubleStackSlot());
- return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize);
+ if (NeedsEagerFrame()) {
+ return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize);
+ } else {
+ // Retrieve parameter without eager stack-frame relative to the
+ // stack-pointer.
+ return MemOperand(
+ sp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize);
+ }
}
int arity = instr->arity();
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
- CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ if (instr->hydrogen()->IsTailCall()) {
+ if (NeedsEagerFrame()) __ mov(sp, fp);
+ __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
+ } else {
+ CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ }
}
if (property == NULL) {
// Function call. Specialize for monomorphic calls.
if (is_monomorphic_) target_ = oracle->GetCallTarget(this);
- } else {
+ } else if (property->key()->IsPropertyName()) {
// Method call. Specialize for the receiver types seen at runtime.
Literal* key = property->key()->AsLiteral();
ASSERT(key != NULL && key->value()->IsString());
Handle<Map> map = receiver_types_.first();
is_monomorphic_ = ComputeTarget(map, name);
}
+ } else {
+ if (is_monomorphic_) {
+ keyed_array_call_is_holey_ = oracle->KeyedArrayCallIsHoley(this);
+ }
}
}
return &receiver_types_;
}
virtual bool IsMonomorphic() V8_OVERRIDE { return is_monomorphic_; }
+ bool KeyedArrayCallIsHoley() { return keyed_array_call_is_holey_; }
CheckType check_type() const { return check_type_; }
void set_string_check(Handle<JSObject> holder) {
expression_(expression),
arguments_(arguments),
is_monomorphic_(false),
+ keyed_array_call_is_holey_(true),
check_type_(RECEIVER_MAP_CHECK),
return_id_(GetNextId(isolate)) { }
ZoneList<Expression*>* arguments_;
bool is_monomorphic_;
+ bool keyed_array_call_is_holey_;
CheckType check_type_;
SmallMapList receiver_types_;
Handle<JSFunction> target_;
}
+template<>
+HValue* CodeStubGraphBuilder<KeyedArrayCallStub>::BuildCodeStub() {
+ int argc = casted_stub()->argc() + 1;
+ info()->set_parameter_count(argc);
+
+ HValue* receiver = Add<HParameter>(1);
+
+ // Load the expected initial array map from the context.
+ JSArrayBuilder array_builder(this, casted_stub()->elements_kind());
+ HValue* map = array_builder.EmitMapCode();
+
+ HValue* checked_receiver = Add<HCheckMapValue>(receiver, map);
+
+ HValue* function = BuildUncheckedMonomorphicElementAccess(
+ checked_receiver, GetParameter(0),
+ NULL, true, casted_stub()->elements_kind(),
+ false, NEVER_RETURN_HOLE, STANDARD_STORE);
+ return Add<HCallFunction>(function, argc, TAIL_CALL);
+}
+
+
+Handle<Code> KeyedArrayCallStub::GenerateCode(Isolate* isolate) {
+ return DoGenerateCode(isolate, this);
+}
+
+
template <>
HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
BuildUncheckedMonomorphicElementAccess(
: register_param_count_(-1),
stack_parameter_count_(no_reg),
hint_stack_parameter_count_(-1),
+ continuation_type_(NORMAL_CONTINUATION),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
register_params_(NULL),
deoptimization_handler_(NULL),
has_miss_handler_(false) { }
+void CodeStub::GenerateStubsRequiringBuiltinsAheadOfTime(Isolate* isolate) {
+ StubFailureTailCallTrampolineStub::GenerateAheadOfTime(isolate);
+}
+
+
bool CodeStub::FindCodeInCache(Code** code_out, Isolate* isolate) {
UnseededNumberDictionary* stubs = isolate->heap()->code_stubs();
int index = stubs->FindEntry(GetKey());
}
+void StubFailureTailCallTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
+ StubFailureTailCallTrampolineStub stub;
+ stub.GetCode(isolate)->set_is_pregenerated(true);
+}
+
+
void ProfileEntryHookStub::EntryHookTrampoline(intptr_t function,
intptr_t stack_pointer,
Isolate* isolate) {
V(TransitionElementsKind) \
V(StoreArrayLiteralElement) \
V(StubFailureTrampoline) \
+ V(StubFailureTailCallTrampoline) \
V(ArrayConstructor) \
V(InternalArrayConstructor) \
V(ProfileEntryHook) \
V(StoreGlobal) \
/* IC Handler stubs */ \
V(LoadField) \
- V(KeyedLoadField)
+ V(KeyedLoadField) \
+ V(KeyedArrayCall)
// List of code stubs only used on ARM platforms.
#if V8_TARGET_ARCH_ARM
virtual bool IsPregenerated(Isolate* isolate) { return false; }
static void GenerateStubsAheadOfTime(Isolate* isolate);
+ static void GenerateStubsRequiringBuiltinsAheadOfTime(Isolate* isolate);
static void GenerateFPStubs(Isolate* isolate);
// Some stubs put untagged junk on the stack that cannot be scanned by the
enum StubFunctionMode { NOT_JS_FUNCTION_STUB_MODE, JS_FUNCTION_STUB_MODE };
enum HandlerArgumentsMode { DONT_PASS_ARGUMENTS, PASS_ARGUMENTS };
+enum ContinuationType { NORMAL_CONTINUATION, TAIL_CALL_CONTINUATION };
+
+
struct CodeStubInterfaceDescriptor {
CodeStubInterfaceDescriptor();
int register_param_count_;
// if hint_stack_parameter_count_ > 0, the code stub can optimize the
// return sequence. Default value is -1, which means it is ignored.
int hint_stack_parameter_count_;
+ ContinuationType continuation_type_;
StubFunctionMode function_mode_;
Register* register_params_;
Address deoptimization_handler_;
HandlerArgumentsMode handler_arguments_mode_;
+ bool initialized() const { return register_param_count_ >= 0; }
+
+ bool HasTailCallContinuation() const {
+ return continuation_type_ == TAIL_CALL_CONTINUATION;
+ }
+
int environment_length() const {
return register_param_count_;
}
- bool initialized() const { return register_param_count_ >= 0; }
-
void SetMissHandler(ExternalReference handler) {
miss_handler_ = handler;
has_miss_handler_ = true;
public:
virtual Code::Kind GetCodeKind() const { return Code::HANDLER; }
virtual int GetStubFlags() { return kind(); }
+
+ protected:
+ HandlerStub() : HICStub() { }
+ virtual int NotMissMinorKey() { return bit_field_; }
+ int bit_field_;
};
class IndexBits: public BitField<int, 5, 11> {};
class UnboxedDoubleBits: public BitField<bool, 16, 1> {};
virtual CodeStub::Major MajorKey() { return LoadField; }
- virtual int NotMissMinorKey() { return bit_field_; }
-
- int bit_field_;
};
};
+class KeyedArrayCallStub: public HICStub {
+ public:
+ KeyedArrayCallStub(bool holey, int argc) : HICStub(), argc_(argc) {
+ bit_field_ = KindBits::encode(Code::KEYED_CALL_IC)
+ | HoleyBits::encode(holey);
+ }
+
+ virtual Code::Kind kind() const {
+ return KindBits::decode(bit_field_);
+ }
+
+ virtual Code::ExtraICState GetExtraICState() { return bit_field_; }
+
+ ElementsKind elements_kind() {
+ return HoleyBits::decode(bit_field_) ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ }
+
+ int argc() { return argc_; }
+ virtual int GetStubFlags() { return argc(); }
+
+ static bool IsHoley(Handle<Code> code) {
+ Code::ExtraICState state = code->extra_ic_state();
+ return HoleyBits::decode(state);
+ }
+
+ virtual void InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor);
+
+ virtual Handle<Code> GenerateCode(Isolate* isolate);
+
+ private:
+ virtual int NotMissMinorKey() {
+ return GetExtraICState() | ArgcBits::encode(argc_);
+ }
+
+ STATIC_ASSERT(KindBits::kSize == 4);
+ class HoleyBits: public BitField<bool, 4, 1> {};
+ STATIC_ASSERT(Code::kArgumentsBits <= kStubMinorKeyBits - 5);
+ class ArgcBits: public BitField<int, 5, Code::kArgumentsBits> {};
+ virtual CodeStub::Major MajorKey() { return KeyedArrayCall; }
+ int bit_field_;
+ int argc_;
+};
+
+
class BinaryOpStub: public HydrogenCodeStub {
public:
BinaryOpStub(Token::Value op, OverwriteMode mode)
};
+class StubFailureTailCallTrampolineStub : public PlatformCodeStub {
+ public:
+ StubFailureTailCallTrampolineStub() : fp_registers_(CanUseFPRegisters()) {}
+
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
+
+ static void GenerateAheadOfTime(Isolate* isolate);
+
+ private:
+ class FPRegisters: public BitField<bool, 0, 1> {};
+ Major MajorKey() { return StubFailureTailCallTrampoline; }
+ int MinorKey() { return FPRegisters::encode(fp_registers_); }
+
+ void Generate(MacroAssembler* masm);
+
+ bool fp_registers_;
+
+ DISALLOW_COPY_AND_ASSIGN(StubFailureTailCallTrampolineStub);
+};
+
+
class ProfileEntryHookStub : public PlatformCodeStub {
public:
explicit ProfileEntryHookStub() {}
: flags_(LanguageModeField::encode(CLASSIC_MODE)),
script_(script),
osr_ast_id_(BailoutId::None()),
- osr_pc_offset_(0) {
+ osr_pc_offset_(0),
+ parameter_count_(0) {
Initialize(script->GetIsolate(), BASE, zone);
}
shared_info_(shared_info),
script_(Handle<Script>(Script::cast(shared_info->script()))),
osr_ast_id_(BailoutId::None()),
- osr_pc_offset_(0) {
+ osr_pc_offset_(0),
+ parameter_count_(0) {
Initialize(script_->GetIsolate(), BASE, zone);
}
script_(Handle<Script>(Script::cast(shared_info_->script()))),
context_(closure->context()),
osr_ast_id_(BailoutId::None()),
- osr_pc_offset_(0) {
+ osr_pc_offset_(0),
+ parameter_count_(0) {
Initialize(script_->GetIsolate(), BASE, zone);
}
: flags_(LanguageModeField::encode(CLASSIC_MODE) |
IsLazy::encode(true)),
osr_ast_id_(BailoutId::None()),
- osr_pc_offset_(0) {
+ osr_pc_offset_(0),
+ parameter_count_(0) {
Initialize(isolate, STUB, zone);
code_stub_ = stub;
}
int CompilationInfo::num_parameters() const {
- ASSERT(!IsStub());
- return scope()->num_parameters();
+ if (IsStub()) {
+ ASSERT(parameter_count_ > 0);
+ return parameter_count_;
+ } else {
+ return scope()->num_parameters();
+ }
}
ASSERT(!is_lazy());
flags_ |= IsGlobal::encode(true);
}
+ void set_parameter_count(int parameter_count) {
+ ASSERT(IsStub());
+ parameter_count_ = parameter_count;
+ }
void SetLanguageMode(LanguageMode language_mode) {
ASSERT(this->language_mode() == CLASSIC_MODE ||
this->language_mode() == language_mode ||
// during graph optimization.
int opt_count_;
+ // Number of parameters used for compilation of stubs that require arguments.
+ int parameter_count_;
+
Handle<Foreign> object_wrapper_;
DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
int output_frame_size = height_in_bytes + fixed_frame_size;
if (trace_scope_ != NULL) {
PrintF(trace_scope_->file(),
- " translating %s => StubFailureTrampolineStub, height=%d\n",
+ " translating %s => StubFailure%sTrampolineStub, height=%d\n",
CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
+ descriptor->HasTailCallContinuation() ? "TailCall" : "",
height_in_bytes);
}
top_address + output_frame_offset, output_frame_offset, value);
}
- intptr_t caller_arg_count = 0;
+ intptr_t caller_arg_count = descriptor->HasTailCallContinuation()
+ ? compiled_code_->arguments_count() + 1 : 0;
bool arg_count_known = !descriptor->stack_parameter_count_.is_valid();
// Build the Arguments object for the caller's parameters and a pointer to it.
// Compute this frame's PC, state, and continuation.
Code* trampoline = NULL;
- StubFunctionMode function_mode = descriptor->function_mode_;
- StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
- isolate_);
+ if (descriptor->HasTailCallContinuation()) {
+ StubFailureTailCallTrampolineStub().FindCodeInCache(&trampoline, isolate_);
+ } else {
+ StubFunctionMode function_mode = descriptor->function_mode_;
+ StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
+ isolate_);
+ }
ASSERT(trampoline != NULL);
output_frame->SetPc(reinterpret_cast<intptr_t>(
trampoline->instruction_start()));
return trampoline;
}
+ StubFailureTailCallTrampolineStub().FindCodeInCache(&trampoline, isolate());
+ if (trampoline->contains(pc())) {
+ return trampoline;
+ }
+
UNREACHABLE();
return NULL;
}
}
+void Heap::CreateStubsRequiringBuiltins() {
+ HandleScope scope(isolate());
+ CodeStub::GenerateStubsRequiringBuiltinsAheadOfTime(isolate());
+}
+
+
bool Heap::CreateInitialObjects() {
Object* obj;
NO_INLINE(void CreateJSConstructEntryStub());
void CreateFixedStubs();
+ void CreateStubsRequiringBuiltins();
MUST_USE_RESULT MaybeObject* CreateOddball(const char* to_string,
Object* to_number,
};
+enum CallMode {
+ NORMAL_CALL,
+ TAIL_CALL
+};
+
+
class HCallFunction V8_FINAL : public HBinaryCall {
public:
DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P2(HCallFunction, HValue*, int);
+ DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P3(
+ HCallFunction, HValue*, int, CallMode);
+
+ bool IsTailCall() const { return call_mode_ == TAIL_CALL; }
HValue* context() { return first(); }
HValue* function() { return second(); }
DECLARE_CONCRETE_INSTRUCTION(CallFunction)
+ virtual int argument_delta() const V8_OVERRIDE {
+ if (IsTailCall()) return 0;
+ return -argument_count();
+ }
+
private:
- HCallFunction(HValue* context, HValue* function, int argument_count)
- : HBinaryCall(context, function, argument_count) {
+ HCallFunction(HValue* context,
+ HValue* function,
+ int argument_count,
+ CallMode mode = NORMAL_CALL)
+ : HBinaryCall(context, function, argument_count), call_mode_(mode) {
}
+ CallMode call_mode_;
};
DECLARE_CONCRETE_INSTRUCTION(CheckMapValue)
protected:
+ virtual int RedefinedOperandIndex() { return 0; }
+
virtual bool DataEquals(HValue* other) V8_OVERRIDE {
return true;
}
return builder()->Add<HConstant>(map);
}
- if (kind_ == GetInitialFastElementsKind()) {
+ if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
// No need for a context lookup if the kind_ matches the initial
// map, because we can just load the map in that case.
HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
if (prop != NULL) {
if (!prop->key()->IsPropertyName()) {
// Keyed function call.
- CHECK_ALIVE(VisitArgument(prop->obj()));
-
+ CHECK_ALIVE(VisitForValue(prop->obj()));
CHECK_ALIVE(VisitForValue(prop->key()));
+
// Push receiver and key like the non-optimized code generator expects it.
HValue* key = Pop();
HValue* receiver = Pop();
Push(key);
- Push(receiver);
-
+ Push(Add<HPushArgument>(receiver));
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
- call = New<HCallKeyed>(key, argument_count);
+ if (expr->IsMonomorphic()) {
+ BuildCheckHeapObject(receiver);
+ ElementsKind kind = expr->KeyedArrayCallIsHoley()
+ ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+
+ Handle<Map> map(isolate()->get_initial_js_array_map(kind));
+
+ HValue* function = BuildMonomorphicElementAccess(
+ receiver, key, NULL, NULL, map, false, STANDARD_STORE);
+
+ call = New<HCallFunction>(function, argument_count);
+ } else {
+ call = New<HCallKeyed>(key, argument_count);
+ }
Drop(argument_count + 1); // 1 is the key.
return ast_context()->ReturnInstruction(call, expr->id());
}
JSArrayBuilder(HGraphBuilder* builder,
ElementsKind kind,
- HValue* constructor_function);
+ HValue* constructor_function = NULL);
enum FillMode {
DONT_FILL_WITH_HOLE,
HValue* AllocateArray(HValue* capacity, HValue* length_field,
FillMode fill_mode = FILL_WITH_HOLE);
HValue* GetElementsLocation() { return elements_location_; }
+ HValue* EmitMapCode();
private:
Zone* zone() const { return builder_->zone(); }
return JSArray::kPreallocatedArrayElements;
}
- HValue* EmitMapCode();
HValue* EmitInternalMapCode();
HValue* EstablishEmptyArrayAllocationSize();
HValue* EstablishAllocationSize(HValue* length_node);
}
+void KeyedArrayCallStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { ecx };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
+ descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
+}
+
+
void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
}
+void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
+ CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ mov(edi, eax);
+ int parameter_count_offset =
+ StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
+ __ mov(eax, MemOperand(ebp, parameter_count_offset));
+ // The parameter count above includes the receiver for the arguments passed to
+ // the deoptimization handler. Subtract the receiver for the parameter count
+ // for the call.
+ __ sub(eax, Immediate(1));
+ masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
+ ParameterCount argument_count(eax);
+ __ InvokeFunction(
+ edi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+}
+
+
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
// It's always safe to call the entry hook stub, as the hook itself
}
+static int ArgumentsOffsetWithoutFrame(int index) {
+ ASSERT(index < 0);
+ return -(index + 1) * kPointerSize + kPCOnStackSize;
+}
+
+
Operand LCodeGen::ToOperand(LOperand* op) const {
if (op->IsRegister()) return Operand(ToRegister(op));
if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
- return Operand(ebp, StackSlotOffset(op->index()));
+ if (NeedsEagerFrame()) {
+ return Operand(ebp, StackSlotOffset(op->index()));
+ } else {
+ // Retrieve parameter without eager stack-frame relative to the
+ // stack-pointer.
+ return Operand(esp, ArgumentsOffsetWithoutFrame(op->index()));
+ }
}
Operand LCodeGen::HighOperand(LOperand* op) {
ASSERT(op->IsDoubleStackSlot());
- return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize);
+ if (NeedsEagerFrame()) {
+ return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize);
+ } else {
+ // Retrieve parameter without eager stack-frame relative to the
+ // stack-pointer.
+ return Operand(
+ esp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize);
+ }
}
int arity = instr->arity();
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
- CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ if (instr->hydrogen()->IsTailCall()) {
+ if (NeedsEagerFrame()) __ leave();
+ __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
+ } else {
+ CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ }
}
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
LOperand* context = UseFixed(instr->context(), esi);
LOperand* function = UseFixed(instr->function(), edi);
- LCallFunction* result = new(zone()) LCallFunction(context, function);
- return MarkAsCall(DefineFixed(result, eax), instr);
+ LCallFunction* call = new(zone()) LCallFunction(context, function);
+ LInstruction* result = DefineFixed(call, eax);
+ if (instr->IsTailCall()) return result;
+ return MarkAsCall(result, instr);
}
if (use_ic && state() != MEGAMORPHIC) {
ASSERT(!object->IsJSGlobalProxy());
int argc = target()->arguments_count();
- Handle<Code> stub = isolate()->stub_cache()->ComputeCallMegamorphic(
- argc, Code::KEYED_CALL_IC, Code::kNoExtraICState);
- if (object->IsJSObject()) {
- Handle<JSObject> receiver = Handle<JSObject>::cast(object);
- if (receiver->elements()->map() ==
- isolate()->heap()->non_strict_arguments_elements_map()) {
- stub = isolate()->stub_cache()->ComputeCallArguments(argc);
+ Handle<Code> stub;
+
+ // Use the KeyedArrayCallStub if the call is of the form array[smi](...),
+ // where array is an instance of one of the initial array maps (without
+ // extra named properties).
+ // TODO(verwaest): Also support keyed calls on instances of other maps.
+ if (object->IsJSArray() && key->IsSmi()) {
+ Handle<JSArray> array = Handle<JSArray>::cast(object);
+ ElementsKind kind = array->map()->elements_kind();
+ if (IsFastObjectElementsKind(kind) &&
+ array->map() == isolate()->get_initial_js_array_map(kind)) {
+ KeyedArrayCallStub stub_gen(IsHoleyElementsKind(kind), argc);
+ stub = stub_gen.GetCode(isolate());
}
}
- ASSERT(!stub.is_null());
+
+ if (stub.is_null()) {
+ stub = isolate()->stub_cache()->ComputeCallMegamorphic(
+ argc, Code::KEYED_CALL_IC, Code::kNoExtraICState);
+ if (object->IsJSObject()) {
+ Handle<JSObject> receiver = Handle<JSObject>::cast(object);
+ if (receiver->elements()->map() ==
+ isolate()->heap()->non_strict_arguments_elements_map()) {
+ stub = isolate()->stub_cache()->ComputeCallArguments(argc);
+ }
+ }
+ ASSERT(!stub.is_null());
+ }
set_target(*stub);
TRACE_IC("CallIC", key);
}
}
+RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_MissFromStubFailure) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+ KeyedCallIC ic(isolate);
+ Arguments* caller_args = reinterpret_cast<Arguments*>(args[0]);
+ Handle<Object> key = args.at<Object>(1);
+ Handle<Object> receiver((*caller_args)[0], isolate);
+
+ ic.UpdateState(receiver, key);
+ MaybeObject* maybe_result = ic.LoadFunction(receiver, key);
+ // Result could be a function or a failure.
+ JSFunction* raw_function = NULL;
+ if (!maybe_result->To(&raw_function)) return maybe_result;
+
+ if (raw_function->is_compiled()) return raw_function;
+
+ Handle<JSFunction> function(raw_function, isolate);
+ JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
+ return *function;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) {
SealHandleScope shs(isolate);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreIC_MissFromStubFailure);
+DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_MissFromStubFailure);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, ElementsTransitionAndStoreIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss);
bootstrapper_->Initialize(create_heap_objects);
builtins_.SetUp(this, create_heap_objects);
+ if (create_heap_objects) heap_.CreateStubsRequiringBuiltins();
+
// Only preallocate on the first initialization.
if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) {
// Start the thread which will set aside some memory.
CodeStub::GenerateFPStubs(this);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(this);
StubFailureTrampolineStub::GenerateAheadOfTime(this);
+ StubFailureTailCallTrampolineStub::GenerateAheadOfTime(this);
// TODO(mstarzinger): The following is an ugly hack to make sure the
// interface descriptor is initialized even when stubs have been
// deserialized out of the snapshot without the graph builder.
// shift all parameter indexes down by the number of parameters, and
// make sure they end up negative so they are distinguishable from
// spill slots.
- int result = index - info()->scope()->num_parameters() - 1;
+ int result = index - info()->num_parameters() - 1;
+
ASSERT(result < 0);
return result;
}
kind() == STORE_IC ||
kind() == LOAD_IC ||
kind() == KEYED_LOAD_IC ||
+ kind() == KEYED_CALL_IC ||
kind() == TO_BOOLEAN_IC);
return StubMajorKeyField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
kind() == KEYED_LOAD_IC ||
kind() == STORE_IC ||
kind() == KEYED_STORE_IC ||
+ kind() == KEYED_CALL_IC ||
kind() == TO_BOOLEAN_IC);
ASSERT(0 <= major && major < 256);
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
Handle<Object> value = GetInfo(expr->CallFeedbackId());
return value->IsMap() || value->IsAllocationSite() || value->IsJSFunction() ||
- value->IsSmi();
+ value->IsSmi() ||
+ (value->IsCode() && Handle<Code>::cast(value)->ic_state() == MONOMORPHIC);
+}
+
+
+bool TypeFeedbackOracle::KeyedArrayCallIsHoley(Call* expr) {
+ Handle<Object> value = GetInfo(expr->CallFeedbackId());
+ Handle<Code> code = Handle<Code>::cast(value);
+ return KeyedArrayCallStub::IsHoley(code);
}
case Code::LOAD_IC:
case Code::STORE_IC:
case Code::CALL_IC:
- case Code::KEYED_CALL_IC:
if (target->ic_state() == MONOMORPHIC) {
if (target->kind() == Code::CALL_IC &&
target->check_type() != RECEIVER_MAP_CHECK) {
}
break;
+ case Code::KEYED_CALL_IC:
case Code::KEYED_LOAD_IC:
case Code::KEYED_STORE_IC:
case Code::BINARY_OP_IC:
bool StoreIsPreMonomorphic(TypeFeedbackId ast_id);
bool StoreIsKeyedPolymorphic(TypeFeedbackId ast_id);
bool CallIsMonomorphic(Call* expr);
+ bool KeyedArrayCallIsHoley(Call* expr);
bool CallNewIsMonomorphic(CallNew* expr);
bool ObjectLiteralStoreIsMonomorphic(ObjectLiteralProperty* prop);
Expression* callee = expr->expression();
Property* prop = callee->AsProperty();
if (prop != NULL) {
- if (prop->key()->IsPropertyName())
- expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
+ expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
} else {
expr->RecordTypeFeedback(oracle(), CALL_AS_FUNCTION);
}
}
+void KeyedArrayCallStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { rcx };
+ descriptor->register_param_count_ = 1;
+ descriptor->register_params_ = registers;
+ descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
+ descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
+ descriptor->deoptimization_handler_ =
+ FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
+}
+
+
void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
}
+void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
+ CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ movq(rdi, rax);
+ int parameter_count_offset =
+ StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
+ __ movq(rax, MemOperand(rbp, parameter_count_offset));
+ // The parameter count above includes the receiver for the arguments passed to
+ // the deoptimization handler. Subtract the receiver for the parameter count
+ // for the call.
+ __ subl(rax, Immediate(1));
+ masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
+ ParameterCount argument_count(rax);
+ __ InvokeFunction(
+ rdi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+}
+
+
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
// It's always safe to call the entry hook stub, as the hook itself
}
+static int ArgumentsOffsetWithoutFrame(int index) {
+ ASSERT(index < 0);
+ return -(index + 1) * kPointerSize + kPCOnStackSize;
+}
+
+
Operand LCodeGen::ToOperand(LOperand* op) const {
// Does not handle registers. In X64 assembler, plain registers are not
// representable as an Operand.
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
- return Operand(rbp, StackSlotOffset(op->index()));
+ if (NeedsEagerFrame()) {
+ return Operand(rbp, StackSlotOffset(op->index()));
+ } else {
+ // Retrieve parameter without eager stack-frame relative to the
+ // stack-pointer.
+ return Operand(rsp, ArgumentsOffsetWithoutFrame(op->index()));
+ }
}
int arity = instr->arity();
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
- CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ if (instr->hydrogen()->IsTailCall()) {
+ if (NeedsEagerFrame()) __ leave();
+ __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
+ } else {
+ CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ }
}
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
LOperand* context = UseFixed(instr->context(), rsi);
LOperand* function = UseFixed(instr->function(), rdi);
- LCallFunction* result = new(zone()) LCallFunction(context, function);
- return MarkAsCall(DefineFixed(result, rax), instr);
+ LCallFunction* call = new(zone()) LCallFunction(context, function);
+ LInstruction* result = DefineFixed(call, rax);
+ if (instr->IsTailCall()) return result;
+ return MarkAsCall(result, instr);
}
--- /dev/null
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var a = [function(a) { return a+10; },
+ function(a) { return a+20; }];
+a.__proto__.test = function(a) { return a+30; }
+function f(i) {
+ return "r" + (1, a[i](i+1), a[i](i+2));
+}
+
+assertEquals("r12", f(0));
+assertEquals("r12", f(0));
+assertEquals("r23", f(1));
+assertEquals("r23", f(1));
+
+// Deopt the stub.
+assertEquals("rtest230", f("test"));
+
+var a2 = [function(a) { return a+10; },,
+ function(a) { return a+20; }];
+a2.__proto__.test = function(a) { return a+30; }
+function f2(i) {
+ return "r" + (1, a2[i](i+1), a2[i](i+2));
+}
+
+assertEquals("r12", f2(0));
+assertEquals("r12", f2(0));
+assertEquals("r24", f2(2));
+assertEquals("r24", f2(2));
+
+// Deopt the stub. This will throw given that undefined is not a function.
+assertThrows(function() { f2(1) });