int arity = instr->arity();
CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ after_push_argument_ = false;
}
CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
+ after_push_argument_ = false;
ASSERT(ToRegister(instr->result()).is(x0));
}
ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
}
+ after_push_argument_ = false;
ASSERT(ToRegister(instr->result()).is(x0));
}
if (context->IsRegister()) {
__ Mov(cp, ToRegister(context));
} else if (context->IsStackSlot()) {
- __ Ldr(cp, ToMemOperand(context));
+ __ Ldr(cp, ToMemOperand(context, kMustUseFramePointer));
} else if (context->IsConstantOperand()) {
HConstant* constant =
chunk_->LookupConstant(LConstantOperand::cast(context));
}
-MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
+MemOperand LCodeGen::ToMemOperand(LOperand* op, StackMode stack_mode) const {
ASSERT(op != NULL);
ASSERT(!op->IsRegister());
ASSERT(!op->IsDoubleRegister());
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
if (NeedsEagerFrame()) {
- return MemOperand(fp, StackSlotOffset(op->index()));
+ int fp_offset = StackSlotOffset(op->index());
+ if (op->index() >= 0) {
+ // Loads and stores have a bigger reach in positive offset than negative.
+ // When the load or the store can't be done in one instruction via fp
+ // (too big negative offset), we try to access via jssp (positive offset).
+ // We can reference a stack slot from jssp only if jssp references the end
+ // of the stack slots. It's not the case when:
+ // - stack_mode != kCanUseStackPointer: this is the case when a deferred
+ // code saved the registers.
+ // - after_push_argument_: arguments has been pushed for a call.
+ // - inlined_arguments_: inlined arguments have been pushed once. All the
+ // remainder of the function cannot trust jssp any longer.
+ // - saves_caller_doubles: some double registers have been pushed, jssp
+ // references the end of the double registers and not the end of the
+ // stack slots.
+ // Also, if the offset from fp is small enough to make a load/store in
+ // one instruction, we use a fp access.
+ if ((stack_mode == kCanUseStackPointer) && !after_push_argument_ &&
+ !inlined_arguments_ && !is_int9(fp_offset) &&
+ !info()->saves_caller_doubles()) {
+ int jssp_offset =
+ (GetStackSlotCount() - op->index() - 1) * kPointerSize;
+ return MemOperand(masm()->StackPointer(), jssp_offset);
+ }
+ }
+ return MemOperand(fp, fp_offset);
} else {
// Retrieve parameter without eager stack-frame relative to the
// stack-pointer.
void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
+ // We push some arguments and they will be pop in an other block. We can't
+ // trust that jssp references the end of the stack slots until the end of
+ // the function.
+ inlined_arguments_ = true;
Register result = ToRegister(instr->result());
if (instr->hydrogen()->from_inlined()) {
__ Call(target);
}
generator.AfterCall();
+ after_push_argument_ = false;
}
__ Call(x10);
RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
+ after_push_argument_ = false;
}
void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
CallRuntime(instr->function(), instr->arity(), instr);
+ after_push_argument_ = false;
}
default:
UNREACHABLE();
}
+ after_push_argument_ = false;
}
instr,
x1);
}
+ after_push_argument_ = false;
}
Abort(kDoPushArgumentNotImplementedForDoubleType);
} else {
__ Push(ToRegister(argument));
+ after_push_argument_ = true;
}
}
frame_is_built_(false),
safepoints_(info->zone()),
resolver_(this),
- expected_safepoint_kind_(Safepoint::kSimple) {
+ expected_safepoint_kind_(Safepoint::kSimple),
+ after_push_argument_(false),
+ inlined_arguments_(false) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
}
+ ~LCodeGen() {
+ ASSERT(!after_push_argument_ || inlined_arguments_);
+ }
+
// Simple accessors.
Scope* scope() const { return scope_; }
Operand ToOperand(LOperand* op);
Operand ToOperand32I(LOperand* op);
Operand ToOperand32U(LOperand* op);
- MemOperand ToMemOperand(LOperand* op) const;
+ enum StackMode { kMustUseFramePointer, kCanUseStackPointer };
+ MemOperand ToMemOperand(LOperand* op,
+ StackMode stack_mode = kCanUseStackPointer) const;
Handle<Object> ToHandle(LConstantOperand* op) const;
// TODO(jbramley): Examine these helpers and check that they make sense.
Safepoint::Kind expected_safepoint_kind_;
+ // This flag is true when we are after a push (but before a call).
+ // In this situation, jssp no longer references the end of the stack slots so,
+ // we can only reference a stack slot via fp.
+ bool after_push_argument_;
+ // If we have inlined arguments, we are no longer able to use jssp because
+ // jssp is modified and we never know if we are in a block after or before
+ // the pop of the arguments (which restores jssp).
+ bool inlined_arguments_;
+
int old_position_;
class PushSafepointRegistersScope BASE_EMBEDDED {