Handle<Name>(), Type::Any(), kMachAnyTagged};
}
+
+// static
+FieldAccess AccessBuilder::ForFrameCallerFramePtr() {
+ return {kUntaggedBase, StandardFrameConstants::kCallerFPOffset,
+ MaybeHandle<Name>(), Type::Internal(), kMachPtr};
+}
+
+
+// static
+FieldAccess AccessBuilder::ForFrameMarker() {
+ return {kUntaggedBase, StandardFrameConstants::kMarkerOffset,
+ MaybeHandle<Name>(), Type::Tagged(), kMachAnyTagged};
+}
+
} // namespace compiler
} // namespace internal
} // namespace v8
// Provides access to the TypeFeedbackVector in SharedFunctionInfo.
static FieldAccess ForSharedFunctionInfoTypeFeedbackVector();
+ // Provides access to the next frame pointer in a stack frame.
+ static FieldAccess ForFrameCallerFramePtr();
+
+ // Provides access to the marker in a stack frame.
+ static FieldAccess ForFrameMarker();
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(AccessBuilder);
};
__ mov(i.OutputRegister(), sp);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
+ case kArchFramePointer:
+ __ mov(i.OutputRegister(), fp);
+ DCHECK_EQ(LeaveCC, i.OutputSBit());
+ break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
case kArchStackPointer:
__ mov(i.OutputRegister(), masm()->StackPointer());
break;
+ case kArchFramePointer:
+ __ mov(i.OutputRegister(), fp);
+ break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), esp);
break;
+ case kArchFramePointer:
+ __ mov(i.OutputRegister(), ebp);
+ break;
case kArchTruncateDoubleToI: {
auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0);
V(ArchDeoptimize) \
V(ArchRet) \
V(ArchStackPointer) \
+ V(ArchFramePointer) \
V(ArchTruncateDoubleToI) \
V(CheckedLoadInt8) \
V(CheckedLoadUint8) \
return MarkAsFloat64(node), VisitFloat64InsertHighWord32(node);
case IrOpcode::kLoadStackPointer:
return VisitLoadStackPointer(node);
+ case IrOpcode::kLoadFramePointer:
+ return VisitLoadFramePointer(node);
case IrOpcode::kCheckedLoad: {
MachineType rep = OpParameter<MachineType>(node);
MarkAsRepresentation(rep, node);
}
+void InstructionSelector::VisitLoadFramePointer(Node* node) {
+ OperandGenerator g(this);
+ Emit(kArchFramePointer, g.DefineAsRegister(node));
+}
+
+
void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw,
InstructionOperand& index_operand) {
OperandGenerator g(this);
return ReduceFixedArraySet(node);
case Runtime::kInlineGetTypeFeedbackVector:
return ReduceGetTypeFeedbackVector(node);
+ case Runtime::kInlineGetCallerJSFunction:
+ return ReduceGetCallerJSFunction(node);
default:
break;
}
}
+Reduction JSIntrinsicLowering::ReduceGetCallerJSFunction(Node* node) {
+ Node* effect = NodeProperties::GetEffectInput(node);
+ Node* control = NodeProperties::GetControlInput(node);
+
+ Node* const frame_state = NodeProperties::GetFrameStateInput(node, 0);
+ Node* outer_frame = frame_state->InputAt(kFrameStateOuterStateInput);
+ if (outer_frame->opcode() == IrOpcode::kFrameState) {
+ // Use the runtime implementation to throw the appropriate error if the
+ // containing function is inlined.
+ return NoChange();
+ }
+
+ // TODO(danno): This implementation forces intrinsic lowering to happen after
+ // inlining, which is fine for now, but eventually the frame-querying logic
+ // probably should go later, e.g. in instruction selection, so that there is
+ // no phase-ordering dependency.
+ FieldAccess access = AccessBuilder::ForFrameCallerFramePtr();
+ Node* fp = graph()->NewNode(machine()->LoadFramePointer());
+ Node* next_fp =
+ graph()->NewNode(simplified()->LoadField(access), fp, effect, control);
+ return Change(node, simplified()->LoadField(AccessBuilder::ForFrameMarker()),
+ next_fp, effect, control);
+}
+
+
Reduction JSIntrinsicLowering::Change(Node* node, const Operator* op, Node* a,
Node* b) {
node->set_op(op);
Reduction ReduceValueOf(Node* node);
Reduction ReduceFixedArraySet(Node* node);
Reduction ReduceGetTypeFeedbackVector(Node* node);
+ Reduction ReduceGetCallerJSFunction(Node* node);
Reduction Change(Node* node, const Operator* op);
Reduction Change(Node* node, const Operator* op, Node* a, Node* b);
case Runtime::kInlineCallFunction:
case Runtime::kInlineDateField: // TODO(bmeurer): Remove this.
case Runtime::kInlineDeoptimizeNow:
+ case Runtime::kInlineGetCallerJSFunction:
case Runtime::kInlineGetPrototype:
case Runtime::kInlineRegExpExec:
case Runtime::kInlineThrowIfNotADate:
V(Float32Min, Operator::kNoProperties, 2, 0, 1) \
V(Float64Max, Operator::kNoProperties, 2, 0, 1) \
V(Float64Min, Operator::kNoProperties, 2, 0, 1) \
- V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1)
+ V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1) \
+ V(LoadFramePointer, Operator::kNoProperties, 0, 0, 1)
#define MACHINE_TYPE_LIST(V) \
// Access to the machine stack.
const Operator* LoadStackPointer();
+ const Operator* LoadFramePointer();
// checked-load heap, index, length
const Operator* CheckedLoad(CheckedLoadRepresentation);
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
break;
+ case kArchFramePointer:
+ __ mov(i.OutputRegister(), fp);
+ break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
break;
+ case kArchFramePointer:
+ __ mov(i.OutputRegister(), fp);
+ break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
break;
V(Float64InsertLowWord32) \
V(Float64InsertHighWord32) \
V(LoadStackPointer) \
+ V(LoadFramePointer) \
V(CheckedLoad) \
V(CheckedStore)
__ mr(i.OutputRegister(), sp);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
+ case kArchFramePointer:
+ __ mr(i.OutputRegister(), fp);
+ DCHECK_EQ(LeaveRC, i.OutputRCBit());
+ break;
case kArchTruncateDoubleToI:
// TODO(mbrandy): move slow call to stub out of line.
__ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
// Stack operations.
Node* LoadStackPointer() { return NewNode(machine()->LoadStackPointer()); }
+ Node* LoadFramePointer() { return NewNode(machine()->LoadFramePointer()); }
// Parameters.
Node* Parameter(size_t index);
case IrOpcode::kFloat64InsertHighWord32:
return VisitBinop(node, kMachFloat64, kMachInt32, kMachFloat64);
case IrOpcode::kLoadStackPointer:
+ case IrOpcode::kLoadFramePointer:
return VisitLeaf(node, kMachPtr);
case IrOpcode::kStateValues:
VisitStateValues(node);
}
+Bounds Typer::Visitor::TypeLoadFramePointer(Node* node) {
+ return Bounds(Type::Internal());
+}
+
+
Bounds Typer::Visitor::TypeCheckedLoad(Node* node) {
return Bounds::Unbounded(zone());
}
case IrOpcode::kFloat64InsertLowWord32:
case IrOpcode::kFloat64InsertHighWord32:
case IrOpcode::kLoadStackPointer:
+ case IrOpcode::kLoadFramePointer:
case IrOpcode::kCheckedLoad:
case IrOpcode::kCheckedStore:
// TODO(rossberg): Check.
case kArchStackPointer:
__ movq(i.OutputRegister(), rsp);
break;
+ case kArchFramePointer:
+ __ movq(i.OutputRegister(), rbp);
+ break;
case kArchTruncateDoubleToI: {
auto result = i.OutputRegister();
auto input = i.InputDoubleRegister(0);
CONVERT_ARG_CHECKED(JSFunction, function, 0);
return function->shared()->feedback_vector();
}
+
+
+RUNTIME_FUNCTION(Runtime_GetCallerJSFunction) {
+ SealHandleScope shs(isolate);
+ StackFrameIterator it(isolate);
+ RUNTIME_ASSERT(it.frame()->type() == StackFrame::STUB);
+ it.Advance();
+ RUNTIME_ASSERT(it.frame()->type() == StackFrame::JAVA_SCRIPT);
+ return JavaScriptFrame::cast(it.frame())->function();
+}
} // namespace internal
} // namespace v8
F(Likely, 1, 1) \
F(Unlikely, 1, 1) \
F(HarmonyToString, 0, 1) \
- F(GetTypeFeedbackVector, 1, 1)
+ F(GetTypeFeedbackVector, 1, 1) \
+ F(GetCallerJSFunction, 0, 1)
#define FOR_EACH_INTRINSIC_JSON(F) \
--- /dev/null
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax --noalways-opt --nostress-opt
+
+// Ensure that "real" js functions that call GetCallerJSFunction get an
+// exception, since they are not stubs.
+(function() {
+ var a = function() {
+ return %_GetCallerJSFunction();
+ }
+ assertThrows(a);
+}());
--- /dev/null
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax --turbo-filter=* --nostress-opt
+
+// Test that for fully optimized but non inlined code, GetCallerJSFunction walks
+// up a single stack frame to get the calling function. Full optimization elides
+// the check in the runtime version of the intrinsic that would throw since the
+// caller isn't a stub. It's a bit of a hack, but allows minimal testing of the
+// intrinsic without writing a full-blown cctest.
+(function() {
+ var a = function() {
+ return %_GetCallerJSFunction();
+ };
+ var b = function() {
+ return a();
+ };
+ %OptimizeFunctionOnNextCall(a);
+ assertEquals(b, b());
+}());