}
-static void CallFunctionNoFeedback(MacroAssembler* masm,
- int argc, bool needs_checks,
- bool call_as_method) {
+void CallFunctionStub::Generate(MacroAssembler* masm) {
// r1 : the function to call
Label slow, non_function, wrap, cont;
- if (needs_checks) {
+ if (NeedsChecks()) {
// Check that the function is really a JavaScript function.
// r1: pushed function (to be verified)
__ JumpIfSmi(r1, &non_function);
// Fast-case: Invoke the function now.
// r1: pushed function
+ int argc = argc_;
ParameterCount actual(argc);
- if (call_as_method) {
- if (needs_checks) {
+ if (CallAsMethod()) {
+ if (NeedsChecks()) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Compute the receiver in sloppy mode.
__ ldr(r3, MemOperand(sp, argc * kPointerSize));
- if (needs_checks) {
+ if (NeedsChecks()) {
__ JumpIfSmi(r3, &wrap);
__ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
__ b(lt, &wrap);
__ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
- if (needs_checks) {
+ if (NeedsChecks()) {
// Slow-case: Non-function called.
__ bind(&slow);
EmitSlowCase(masm, argc, &non_function);
}
- if (call_as_method) {
+ if (CallAsMethod()) {
__ bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
}
-void CallFunctionStub::Generate(MacroAssembler* masm) {
- CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
-}
-
-
void CallConstructStub::Generate(MacroAssembler* masm) {
// r0 : number of arguments
// r1 : the function to call
__ bind(&do_call);
// Set expected number of arguments to zero (not changing r0).
__ mov(r2, Operand::Zero());
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
}
-void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
- // r1 - function
- // r2 - feedback vector
- // r3 - slot id
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
- __ cmp(r1, r4);
- __ b(ne, miss);
-
- __ mov(r0, Operand(arg_count()));
- __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
- __ ldr(r2, FieldMemOperand(r4, FixedArray::kHeaderSize));
- // Verify that r2 contains an AllocationSite
- __ AssertUndefinedOrAllocationSite(r2, r4);
- ArrayConstructorStub stub(masm->isolate(), arg_count());
- __ TailCallStub(&stub);
-}
-
-
-void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
- // r1 - function
- // r2 - feedback vector
- // r3 - slot id
- Label miss;
-
- if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
- Generate_MonomorphicArray(masm, &miss);
- } else {
- // So far there is only one customer for our custom feedback scheme.
- UNREACHABLE();
- }
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- CallFunctionNoFeedback(masm,
- arg_count(),
- true,
- CallAsMethod());
-
- // Unreachable.
- __ stop("Unexpected code address");
-}
-
-
void CallICStub::Generate(MacroAssembler* masm) {
// r1 - function
// r3 - slot id (Smi)
EmitLoadTypeFeedbackVector(masm, r2);
- if (state_.stub_type() != CallIC::DEFAULT) {
- Generate_CustomFeedbackCall(masm);
- return;
- }
-
// The checks. First, does r1 match the recorded monomorphic target?
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
}
-static void CallFunctionNoFeedback(MacroAssembler* masm,
- int argc, bool needs_checks,
- bool call_as_method) {
+void CallFunctionStub::Generate(MacroAssembler* masm) {
+ ASM_LOCATION("CallFunctionStub::Generate");
// x1 function the function to call
+
Register function = x1;
Register type = x4;
Label slow, non_function, wrap, cont;
// TODO(jbramley): This function has a lot of unnamed registers. Name them,
// and tidy things up a bit.
- if (needs_checks) {
+ if (NeedsChecks()) {
// Check that the function is really a JavaScript function.
__ JumpIfSmi(function, &non_function);
// Fast-case: Invoke the function now.
// x1 function pushed function
+ int argc = argc_;
ParameterCount actual(argc);
- if (call_as_method) {
- if (needs_checks) {
+ if (CallAsMethod()) {
+ if (NeedsChecks()) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Compute the receiver in sloppy mode.
__ Peek(x3, argc * kPointerSize);
- if (needs_checks) {
+ if (NeedsChecks()) {
__ JumpIfSmi(x3, &wrap);
__ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
} else {
actual,
JUMP_FUNCTION,
NullCallWrapper());
- if (needs_checks) {
+
+ if (NeedsChecks()) {
// Slow-case: Non-function called.
__ Bind(&slow);
EmitSlowCase(masm, argc, function, type, &non_function);
}
- if (call_as_method) {
+ if (CallAsMethod()) {
__ Bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
}
-void CallFunctionStub::Generate(MacroAssembler* masm) {
- ASM_LOCATION("CallFunctionStub::Generate");
- CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
-}
-
-
void CallConstructStub::Generate(MacroAssembler* masm) {
ASM_LOCATION("CallConstructStub::Generate");
// x0 : number of arguments
}
-void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
- // x1 - function
- // x2 - feedback vector
- // x3 - slot id
- Register function = x1;
- Register feedback_vector = x2;
- Register index = x3;
- Register scratch = x4;
-
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch);
- __ Cmp(function, scratch);
- __ B(ne, miss);
-
- Register allocation_site = feedback_vector;
- __ Mov(x0, Operand(arg_count()));
-
- __ Add(scratch, feedback_vector,
- Operand::UntagSmiAndScale(index, kPointerSizeLog2));
- __ Ldr(allocation_site, FieldMemOperand(scratch, FixedArray::kHeaderSize));
-
- // Verify that x2 contains an AllocationSite
- __ AssertUndefinedOrAllocationSite(allocation_site, scratch);
- ArrayConstructorStub stub(masm->isolate(), arg_count());
- __ TailCallStub(&stub);
-}
-
-
-void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
- // x1 - function
- // x2 - feedback vector
- // x3 - slot id
- Label miss;
-
- if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
- Generate_MonomorphicArray(masm, &miss);
- } else {
- // So far there is only one customer for our custom feedback scheme.
- UNREACHABLE();
- }
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- CallFunctionNoFeedback(masm,
- arg_count(),
- true,
- CallAsMethod());
-
- __ Unreachable();
-}
-
-
void CallICStub::Generate(MacroAssembler* masm) {
ASM_LOCATION("CallICStub");
EmitLoadTypeFeedbackVector(masm, feedback_vector);
- if (state_.stub_type() != CallIC::DEFAULT) {
- Generate_CustomFeedbackCall(masm);
- return;
- }
-
// The checks. First, does x1 match the recorded monomorphic target?
__ Add(x4, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
return !target_.is_null();
}
- bool global_call() const {
- VariableProxy* proxy = expression_->AsVariableProxy();
- return proxy != NULL && proxy->var()->IsUnallocated();
- }
-
- bool known_global_function() const {
- return global_call() && !target_.is_null();
- }
-
Handle<JSFunction> target() { return target_; }
Handle<Cell> cell() { return cell_; }
- Handle<AllocationSite> allocation_site() { return allocation_site_; }
-
void set_target(Handle<JSFunction> target) { target_ = target; }
- void set_allocation_site(Handle<AllocationSite> site) {
- allocation_site_ = site;
- }
bool ComputeGlobalTarget(Handle<GlobalObject> global, LookupResult* lookup);
BailoutId ReturnId() const { return return_id_; }
Handle<JSFunction> target_;
Handle<Cell> cell_;
- Handle<AllocationSite> allocation_site_;
int call_feedback_slot_;
const BailoutId return_id_;
// Code generation helpers.
void GenerateMiss(MacroAssembler* masm);
- void Generate_CustomFeedbackCall(MacroAssembler* masm);
- void Generate_MonomorphicArray(MacroAssembler* masm, Label* miss);
CallIC::State state_;
};
}
// Special loop unfolding case
- STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
- kElementLoopUnrollThreshold);
+ static const int kLoopUnfoldLimit = 8;
+ STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
int initial_capacity = -1;
if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
int constant_from = from->GetInteger32Constant();
int constant_to = to->GetInteger32Constant();
- if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
+ if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
initial_capacity = constant_to;
}
}
}
-void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
- int arguments_count,
- HValue* function,
- Handle<AllocationSite> site) {
- Add<HCheckValue>(function, array_function());
-
- if (IsCallArrayInlineable(arguments_count, site)) {
- BuildInlinedCallArray(expression, arguments_count, site);
- return;
- }
-
- HInstruction* call = PreProcessCall(New<HCallNewArray>(
- function, arguments_count + 1, site->GetElementsKind()));
- if (expression->IsCall()) {
- Drop(1);
- }
- ast_context()->ReturnInstruction(call, expression->id());
-}
-
-
-bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
- if (!array_function().is_identical_to(expr->target())) {
- return false;
- }
-
- Handle<AllocationSite> site = expr->allocation_site();
- if (site.is_null()) return false;
-
- BuildArrayCall(expr,
- expr->arguments()->length(),
- function,
- site);
- return true;
-}
-
-
-bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
- HValue* function) {
- if (!array_function().is_identical_to(expr->target())) {
- return false;
- }
-
- BuildArrayCall(expr,
- expr->arguments()->length(),
- function,
- expr->allocation_site());
- return true;
-}
-
-
void HOptimizedGraphBuilder::VisitCall(Call* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
// evaluation of the arguments.
CHECK_ALIVE(VisitForValue(expr->expression()));
HValue* function = Top();
- if (expr->global_call()) {
+ bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
+ if (global_call) {
Variable* var = proxy->var();
bool known_global_function = false;
// If there is a global property cell for the name at compile time and
return;
}
if (TryInlineApiFunctionCall(expr, receiver)) return;
- if (TryHandleArrayCall(expr, function)) return;
if (TryInlineCall(expr)) return;
PushArgumentsFromEnvironment(argument_count);
}
-void HOptimizedGraphBuilder::BuildInlinedCallArray(
- Expression* expression,
- int argument_count,
- Handle<AllocationSite> site) {
- ASSERT(!site.is_null());
- ASSERT(argument_count >= 0 && argument_count <= 1);
+void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
NoObservableSideEffectsScope no_effects(this);
+ int argument_count = expr->arguments()->length();
// We should at least have the constructor on the expression stack.
HValue* constructor = environment()->ExpressionStackAt(argument_count);
+ ElementsKind kind = expr->elements_kind();
+ Handle<AllocationSite> site = expr->allocation_site();
+ ASSERT(!site.is_null());
+
// Register on the site for deoptimization if the transition feedback changes.
AllocationSite::AddDependentCompilationInfo(
site, AllocationSite::TRANSITIONS, top_info());
- ElementsKind kind = site->GetElementsKind();
HInstruction* site_instruction = Add<HConstant>(site);
// In the single constant argument case, we may have to adjust elements kind
site_instruction,
constructor,
DISABLE_ALLOCATION_SITES);
- HValue* new_object = argument_count == 0
- ? array_builder.AllocateEmptyArray()
- : BuildAllocateArrayFromLength(&array_builder, Top());
-
- int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
- Drop(args_to_drop);
+ HValue* new_object;
+ if (argument_count == 0) {
+ new_object = array_builder.AllocateEmptyArray();
+ } else if (argument_count == 1) {
+ HValue* argument = environment()->Top();
+ new_object = BuildAllocateArrayFromLength(&array_builder, argument);
+ } else {
+ HValue* length = Add<HConstant>(argument_count);
+ // Smi arrays need to initialize array elements with the hole because
+ // bailout could occur if the arguments don't fit in a smi.
+ //
+ // TODO(mvstanton): If all the arguments are constants in smi range, then
+ // we could set fill_with_hole to false and save a few instructions.
+ JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
+ ? JSArrayBuilder::FILL_WITH_HOLE
+ : JSArrayBuilder::DONT_FILL_WITH_HOLE;
+ new_object = array_builder.AllocateArray(length, length, fill_mode);
+ HValue* elements = array_builder.GetElementsLocation();
+ for (int i = 0; i < argument_count; i++) {
+ HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
+ HValue* constant_i = Add<HConstant>(i);
+ Add<HStoreKeyed>(elements, constant_i, value, kind);
+ }
+ }
+
+ Drop(argument_count + 1); // drop constructor and args.
ast_context()->ReturnValue(new_object);
}
}
-bool HOptimizedGraphBuilder::IsCallArrayInlineable(
- int argument_count,
- Handle<AllocationSite> site) {
+bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
Handle<JSFunction> caller = current_info()->closure();
- Handle<JSFunction> target = array_function();
+ Handle<JSFunction> target(isolate()->native_context()->array_function(),
+ isolate());
+ int argument_count = expr->arguments()->length();
// We should have the function plus array arguments on the environment stack.
ASSERT(environment()->length() >= (argument_count + 1));
+ Handle<AllocationSite> site = expr->allocation_site();
ASSERT(!site.is_null());
bool inline_ok = false;
HValue* argument = Top();
if (argument->IsConstant()) {
// Do not inline if the constant length argument is not a smi or
- // outside the valid range for unrolled loop initialization.
+ // outside the valid range for a fast array.
HConstant* constant_argument = HConstant::cast(argument);
if (constant_argument->HasSmiValue()) {
int value = constant_argument->Integer32Value();
- inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
+ inline_ok = value >= 0 &&
+ value < JSObject::kInitialMaxFastElementArray;
if (!inline_ok) {
TraceInline(target, caller,
- "Constant length outside of valid inlining range.");
+ "Length outside of valid array range");
}
}
} else {
- TraceInline(target, caller,
- "Dont inline [new] Array(n) where n isn't constant.");
+ inline_ok = true;
}
- } else if (argument_count == 0) {
- inline_ok = true;
} else {
- TraceInline(target, caller, "Too many arguments to inline.");
+ inline_ok = true;
}
} else {
TraceInline(target, caller, "AllocationSite requested no inlining.");
} else {
// The constructor function is both an operand to the instruction and an
// argument to the construct call.
- if (TryHandleArrayCallNew(expr, function)) return;
+ Handle<JSFunction> array_function(
+ isolate()->native_context()->array_function(), isolate());
+ bool use_call_new_array = expr->target().is_identical_to(array_function);
+ if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
+ // Verify we are still calling the array function for our native context.
+ Add<HCheckValue>(function, array_function);
+ BuildInlinedCallNewArray(expr);
+ return;
+ }
- HInstruction* call =
- PreProcessCall(New<HCallNew>(function, argument_count));
+ HBinaryCall* call;
+ if (use_call_new_array) {
+ Add<HCheckValue>(function, array_function);
+ call = New<HCallNewArray>(function, argument_count,
+ expr->elements_kind());
+ } else {
+ call = New<HCallNew>(function, argument_count);
+ }
+ PreProcessCall(call);
return ast_context()->ReturnInstruction(call, expr->id());
}
}
void AddSimulate(BailoutId id, RemovableSimulate removable = FIXED_SIMULATE);
- // When initializing arrays, we'll unfold the loop if the number of elements
- // is known at compile time and is <= kElementLoopUnrollThreshold.
- static const int kElementLoopUnrollThreshold = 8;
-
protected:
virtual bool BuildGraph() = 0;
// Try to optimize fun.apply(receiver, arguments) pattern.
bool TryCallApply(Call* expr);
- bool TryHandleArrayCall(Call* expr, HValue* function);
- bool TryHandleArrayCallNew(CallNew* expr, HValue* function);
- void BuildArrayCall(Expression* expr, int arguments_count, HValue* function,
- Handle<AllocationSite> cell);
-
HValue* ImplicitReceiverFor(HValue* function,
Handle<JSFunction> target);
ElementsKind fixed_elements_kind,
HValue* byte_length, HValue* length);
- Handle<JSFunction> array_function() {
- return handle(isolate()->native_context()->array_function());
- }
-
- bool IsCallArrayInlineable(int argument_count, Handle<AllocationSite> site);
- void BuildInlinedCallArray(Expression* expression, int argument_count,
- Handle<AllocationSite> site);
+ bool IsCallNewArrayInlineable(CallNew* expr);
+ void BuildInlinedCallNewArray(CallNew* expr);
class PropertyAccessInfo {
public:
}
-static void CallFunctionNoFeedback(MacroAssembler* masm,
- int argc, bool needs_checks,
- bool call_as_method) {
+void CallFunctionStub::Generate(MacroAssembler* masm) {
// edi : the function to call
Label slow, non_function, wrap, cont;
- if (needs_checks) {
+ if (NeedsChecks()) {
// Check that the function really is a JavaScript function.
__ JumpIfSmi(edi, &non_function);
}
// Fast-case: Just invoke the function.
- ParameterCount actual(argc);
+ ParameterCount actual(argc_);
- if (call_as_method) {
- if (needs_checks) {
+ if (CallAsMethod()) {
+ if (NeedsChecks()) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Load the receiver from the stack.
- __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
+ __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
- if (call_as_method) {
+ if (NeedsChecks()) {
__ JumpIfSmi(eax, &wrap);
__ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
__ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
- if (needs_checks) {
+ if (NeedsChecks()) {
// Slow-case: Non-function called.
__ bind(&slow);
// (non_function is bound in EmitSlowCase)
- EmitSlowCase(masm->isolate(), masm, argc, &non_function);
+ EmitSlowCase(isolate(), masm, argc_, &non_function);
}
- if (call_as_method) {
+ if (CallAsMethod()) {
__ bind(&wrap);
- EmitWrapCase(masm, argc, &cont);
+ EmitWrapCase(masm, argc_, &cont);
}
}
-void CallFunctionStub::Generate(MacroAssembler* masm) {
- CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
-}
-
-
void CallConstructStub::Generate(MacroAssembler* masm) {
// eax : number of arguments
// ebx : feedback vector
}
-void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
- // edi - function
- // ebx - feedback vector
- // edx - slot id
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
- __ cmp(edi, ecx);
- __ j(not_equal, miss);
-
- __ mov(eax, arg_count());
- __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
- FixedArray::kHeaderSize));
- // Verify that ecx contains an AllocationSite
- __ AssertUndefinedOrAllocationSite(ebx);
- ArrayConstructorStub stub(masm->isolate(), arg_count());
- __ TailCallStub(&stub);
-}
-
-
-void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
- // edi - function
- // ebx - feedback vector
- // edx - slot id
- Label miss;
-
- if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
- Generate_MonomorphicArray(masm, &miss);
- } else {
- // So far there is only one customer for our custom feedback scheme.
- UNREACHABLE();
- }
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- CallFunctionNoFeedback(masm,
- arg_count(),
- true,
- CallAsMethod());
-
- // Unreachable.
- __ int3();
-}
-
-
void CallICStub::Generate(MacroAssembler* masm) {
// edi - function
// edx - slot id
EmitLoadTypeFeedbackVector(masm, ebx);
- if (state_.stub_type() != CallIC::DEFAULT) {
- Generate_CustomFeedbackCall(masm);
- return;
- }
-
// The checks. First, does edi match the recorded monomorphic target?
__ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
Code* target,
ConstantPoolArray* constant_pool) {
// Currently, CallIC doesn't have state changes.
- if (target->ic_state() != v8::internal::MONOMORPHIC) return;
- CallIC::State existing_state(target->extra_ic_state());
-
- // Monomorphic array stubs don't need to be cleared because
- // 1) the stub doesn't store information that should be cleared, and
- // 2) the AllocationSite stored in the type feedback vector is immune
- // from gc type feedback clearing.
- ASSERT(existing_state.stub_type() == MONOMORPHIC_ARRAY);
+ ASSERT(target->ic_state() == v8::internal::GENERIC);
}
CallIC::State::State(ExtraICState extra_ic_state)
: argc_(ArgcBits::decode(extra_ic_state)),
- call_type_(CallTypeBits::decode(extra_ic_state)),
- stub_type_(StubTypeBits::decode(extra_ic_state)) {
+ call_type_(CallTypeBits::decode(extra_ic_state)) {
}
ExtraICState CallIC::State::GetExtraICState() const {
ExtraICState extra_ic_state =
ArgcBits::encode(argc_) |
- CallTypeBits::encode(call_type_) |
- StubTypeBits::encode(stub_type_);
+ CallTypeBits::encode(call_type_);
return extra_ic_state;
}
-bool CallIC::DoCustomHandler(Handle<Object> receiver,
- Handle<Object> function,
- Handle<FixedArray> vector,
- Handle<Smi> slot,
- const State& state) {
- ASSERT(function->IsJSFunction());
- // Are we the array function?
- Handle<JSFunction> array_function = Handle<JSFunction>(
- isolate()->context()->native_context()->array_function(), isolate());
- if (array_function.is_identical_to(Handle<JSFunction>::cast(function))) {
- // Alter the slot.
- Handle<AllocationSite> new_site = isolate()->factory()->NewAllocationSite();
- vector->set(slot->value(), *new_site);
- State new_state = state.ToMonomorphicArrayCallState();
- CallICStub stub(isolate(), new_state);
- set_target(*stub.GetCode());
- Handle<String> name;
- if (array_function->shared()->name()->IsString()) {
- name = Handle<String>(String::cast(array_function->shared()->name()),
- isolate());
- }
-
- TRACE_IC("CallIC (Array call)", name);
- return true;
- }
- return false;
-}
-
-
void CallIC::HandleMiss(Handle<Object> receiver,
Handle<Object> function,
Handle<FixedArray> vector,
State state(target()->extra_ic_state());
Object* feedback = vector->get(slot->value());
- if (feedback->IsJSFunction() || !function->IsJSFunction() ||
- state.stub_type() != DEFAULT) {
+ if (feedback->IsJSFunction() || !function->IsJSFunction()) {
// We are going generic.
+ ASSERT(!function->IsJSFunction() || *function != feedback);
+
vector->set(slot->value(),
*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
SKIP_WRITE_BARRIER);
-
- State new_state = state.ToGenericState();
- if (new_state != state) {
- // Only happens when the array ic goes generic.
- ASSERT(state.stub_type() == MONOMORPHIC_ARRAY);
- CallICStub stub(isolate(), new_state);
- Handle<Code> code = stub.GetCode();
- set_target(*code);
- }
-
TRACE_GENERIC_IC(isolate(), "CallIC", "megamorphic");
} else {
// If we came here feedback must be the uninitialized sentinel,
// and we are going monomorphic.
ASSERT(feedback == *TypeFeedbackInfo::UninitializedSentinel(isolate()));
-
- // Do we want to install a custom handler?
- if (DoCustomHandler(receiver, function, vector, slot, state)) {
- return;
- }
-
Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
Handle<Object> name(js_function->shared()->name(), isolate());
TRACE_IC("CallIC", name);
class CallIC: public IC {
public:
enum CallType { METHOD, FUNCTION };
- enum StubType { DEFAULT, MONOMORPHIC_ARRAY };
class State V8_FINAL BASE_EMBEDDED {
public:
explicit State(ExtraICState extra_ic_state);
- static State MonomorphicArrayCallState(int argc, CallType call_type) {
- return State(argc, call_type, MONOMORPHIC_ARRAY);
- }
-
static State DefaultCallState(int argc, CallType call_type) {
- return State(argc, call_type, DEFAULT);
+ return State(argc, call_type);
}
- // Transition from the current state to another.
- State ToGenericState() const {
- return DefaultCallState(arg_count(), call_type());
+ static State MegamorphicCallState(int argc, CallType call_type) {
+ return State(argc, call_type);
}
- State ToMonomorphicArrayCallState() const {
- return MonomorphicArrayCallState(arg_count(), call_type());
- }
-
- InlineCacheState GetICState() const {
- return stub_type_ == CallIC::DEFAULT
- ? ::v8::internal::GENERIC
- : ::v8::internal::MONOMORPHIC;
- }
+ InlineCacheState GetICState() const { return ::v8::internal::GENERIC; }
ExtraICState GetExtraICState() const;
int arg_count() const { return argc_; }
CallType call_type() const { return call_type_; }
- StubType stub_type() const { return stub_type_; }
bool CallAsMethod() const { return call_type_ == METHOD; }
}
private:
- State(int argc, CallType call_type, StubType stub_type)
+ State(int argc,
+ CallType call_type)
: argc_(argc),
- call_type_(call_type),
- stub_type_(stub_type) {
+ call_type_(call_type) {
}
class ArgcBits: public BitField<int, 0, Code::kArgumentsBits> {};
class CallTypeBits: public BitField<CallType, Code::kArgumentsBits, 1> {};
- class StubTypeBits:
- public BitField<StubType, Code::kArgumentsBits + 1, 1> {}; // NOLINT
const int argc_;
const CallType call_type_;
- const StubType stub_type_;
};
explicit CallIC(Isolate* isolate)
Handle<FixedArray> vector,
Handle<Smi> slot);
- // Returns true if a custom handler was installed.
- bool DoCustomHandler(Handle<Object> receiver,
- Handle<Object> function,
- Handle<FixedArray> vector,
- Handle<Smi> slot,
- const State& new_state);
-
// Code generator routines.
static Handle<Code> initialize_stub(Isolate* isolate,
int argc,
}
-static void CallFunctionNoFeedback(MacroAssembler* masm,
- int argc, bool needs_checks,
- bool call_as_method) {
+void CallFunctionStub::Generate(MacroAssembler* masm) {
// a1 : the function to call
Label slow, non_function, wrap, cont;
- if (needs_checks) {
+ if (NeedsChecks()) {
// Check that the function is really a JavaScript function.
// a1: pushed function (to be verified)
__ JumpIfSmi(a1, &non_function);
// Fast-case: Invoke the function now.
// a1: pushed function
+ int argc = argc_;
ParameterCount actual(argc);
- if (call_as_method) {
- if (needs_checks) {
+ if (CallAsMethod()) {
+ if (NeedsChecks()) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Compute the receiver in sloppy mode.
__ lw(a3, MemOperand(sp, argc * kPointerSize));
- if (needs_checks) {
+ if (NeedsChecks()) {
__ JumpIfSmi(a3, &wrap);
__ GetObjectType(a3, t0, t0);
__ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE));
__ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());
- if (needs_checks) {
+ if (NeedsChecks()) {
// Slow-case: Non-function called.
__ bind(&slow);
EmitSlowCase(masm, argc, &non_function);
}
- if (call_as_method) {
+ if (CallAsMethod()) {
__ bind(&wrap);
// Wrap the receiver and patch it back onto the stack.
EmitWrapCase(masm, argc, &cont);
}
-void CallFunctionStub::Generate(MacroAssembler* masm) {
- CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
-}
-
-
void CallConstructStub::Generate(MacroAssembler* masm) {
// a0 : number of arguments
// a1 : the function to call
__ bind(&do_call);
// Set expected number of arguments to zero (not changing r0).
__ li(a2, Operand(0, RelocInfo::NONE32));
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
- RelocInfo::CODE_TARGET);
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ RelocInfo::CODE_TARGET);
}
}
-void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
- // a1 - function
- // a2 - feedback vector
- // a3 - slot id
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
- __ Branch(miss, ne, a1, Operand(at));
-
- __ li(a0, Operand(arg_count()));
- __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
- __ Addu(at, a2, Operand(at));
- __ lw(a2, FieldMemOperand(at, FixedArray::kHeaderSize));
- // Verify that a2 contains an AllocationSite
- __ AssertUndefinedOrAllocationSite(a2, at);
- ArrayConstructorStub stub(masm->isolate(), arg_count());
- __ TailCallStub(&stub);
-}
-
-
-void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
- // a1 - function
- // a2 - feedback vector
- // a3 - slot id
- Label miss;
-
- if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
- Generate_MonomorphicArray(masm, &miss);
- } else {
- // So far there is only one customer for our custom feedback scheme.
- UNREACHABLE();
- }
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- CallFunctionNoFeedback(masm,
- arg_count(),
- true,
- CallAsMethod());
-
- // Unreachable.
- __ stop("Unexpected code address");
-}
-
-
void CallICStub::Generate(MacroAssembler* masm) {
// r1 - function
// r3 - slot id (Smi)
EmitLoadTypeFeedbackVector(masm, a2);
- if (state_.stub_type() != CallIC::DEFAULT) {
- Generate_CustomFeedbackCall(masm);
- return;
- }
-
// The checks. First, does r1 match the recorded monomorphic target?
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, a2, Operand(t0));
void SharedFunctionInfo::ClearTypeFeedbackInfo() {
FixedArray* vector = feedback_vector();
Heap* heap = GetHeap();
- int length = vector->length();
-
- for (int i = 0; i < length; i++) {
+ for (int i = 0; i < vector->length(); i++) {
Object* obj = vector->get(i);
- if (obj->IsHeapObject()) {
- InstanceType instance_type =
- HeapObject::cast(obj)->map()->instance_type();
- switch (instance_type) {
- case ALLOCATION_SITE_TYPE:
- // AllocationSites are not cleared because they do not store
- // information that leaks.
- break;
- case JS_FUNCTION_TYPE:
- // No need to clear the native context array function.
- if (obj == JSFunction::cast(obj)->context()->native_context()->
- get(Context::ARRAY_FUNCTION_INDEX)) {
- break;
- }
- // Fall through...
-
- default:
- vector->set(i, TypeFeedbackInfo::RawUninitializedSentinel(heap),
- SKIP_WRITE_BARRIER);
- }
+ if (!obj->IsAllocationSite()) {
+ vector->set(
+ i,
+ TypeFeedbackInfo::RawUninitializedSentinel(heap),
+ SKIP_WRITE_BARRIER);
}
}
}
bool TypeFeedbackOracle::CallIsMonomorphic(int slot) {
Handle<Object> value = GetInfo(slot);
- return value->IsAllocationSite() || value->IsJSFunction();
+ return FLAG_pretenuring_call_new
+ ? value->IsJSFunction()
+ : value->IsAllocationSite() || value->IsJSFunction();
}
Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(int slot) {
Handle<Object> info = GetInfo(slot);
- if (info->IsAllocationSite()) {
- ASSERT(!FLAG_pretenuring_call_new);
- return Handle<JSFunction>(isolate()->native_context()->array_function());
- } else {
+ if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
return Handle<JSFunction>::cast(info);
}
}
-Handle<AllocationSite> TypeFeedbackOracle::GetCallAllocationSite(int slot) {
- Handle<Object> info = GetInfo(slot);
- if (info->IsAllocationSite()) {
- return Handle<AllocationSite>::cast(info);
- }
- return Handle<AllocationSite>::null();
-}
-
-
Handle<AllocationSite> TypeFeedbackOracle::GetCallNewAllocationSite(int slot) {
Handle<Object> info = GetInfo(slot);
if (FLAG_pretenuring_call_new || info->IsAllocationSite()) {
Context* native_context);
Handle<JSFunction> GetCallTarget(int slot);
- Handle<AllocationSite> GetCallAllocationSite(int slot);
Handle<JSFunction> GetCallNewTarget(int slot);
Handle<AllocationSite> GetCallNewAllocationSite(int slot);
expr->IsUsingCallFeedbackSlot(isolate()) &&
oracle()->CallIsMonomorphic(expr->CallFeedbackSlot())) {
expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackSlot()));
- Handle<AllocationSite> site =
- oracle()->GetCallAllocationSite(expr->CallFeedbackSlot());
- expr->set_allocation_site(site);
}
ZoneList<Expression*>* args = expr->arguments();
}
-static void CallFunctionNoFeedback(MacroAssembler* masm,
- int argc, bool needs_checks,
- bool call_as_method) {
+void CallFunctionStub::Generate(MacroAssembler* masm) {
// rdi : the function to call
// wrap_and_call can only be true if we are compiling a monomorphic method.
Isolate* isolate = masm->isolate();
Label slow, non_function, wrap, cont;
+ int argc = argc_;
StackArgumentsAccessor args(rsp, argc);
- if (needs_checks) {
+ if (NeedsChecks()) {
// Check that the function really is a JavaScript function.
__ JumpIfSmi(rdi, &non_function);
// Fast-case: Just invoke the function.
ParameterCount actual(argc);
- if (call_as_method) {
- if (needs_checks) {
+ if (CallAsMethod()) {
+ if (NeedsChecks()) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Load the receiver from the stack.
__ movp(rax, args.GetReceiverOperand());
- if (needs_checks) {
+ if (NeedsChecks()) {
__ JumpIfSmi(rax, &wrap);
__ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
__ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
- if (needs_checks) {
+ if (NeedsChecks()) {
// Slow-case: Non-function called.
__ bind(&slow);
EmitSlowCase(isolate, masm, &args, argc, &non_function);
}
- if (call_as_method) {
+ if (CallAsMethod()) {
__ bind(&wrap);
EmitWrapCase(masm, &args, &cont);
}
}
-void CallFunctionStub::Generate(MacroAssembler* masm) {
- CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
-}
-
-
void CallConstructStub::Generate(MacroAssembler* masm) {
// rax : number of arguments
// rbx : feedback vector
}
-void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
- // rdi - function
- // rbx - feedback vector
- // rdx - slot id (as integer)
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
- __ cmpq(rdi, rcx);
- __ j(not_equal, miss);
-
- __ movq(rax, Immediate(arg_count()));
- __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
- FixedArray::kHeaderSize));
-
- // Verify that ecx contains an AllocationSite
- __ AssertUndefinedOrAllocationSite(rbx);
- ArrayConstructorStub stub(masm->isolate(), arg_count());
- __ TailCallStub(&stub);
-}
-
-
-void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
- // rdi - function
- // rbx - feedback vector
- // rdx - slot id
- Label miss;
-
- __ SmiToInteger32(rdx, rdx);
-
- if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
- Generate_MonomorphicArray(masm, &miss);
- } else {
- // So far there is only one customer for our custom feedback scheme.
- UNREACHABLE();
- }
-
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- CallFunctionNoFeedback(masm,
- arg_count(),
- true,
- CallAsMethod());
-
- // Unreachable.
- __ int3();
-}
-
-
void CallICStub::Generate(MacroAssembler* masm) {
// rdi - function
// rbx - vector
EmitLoadTypeFeedbackVector(masm, rbx);
- if (state_.stub_type() != CallIC::DEFAULT) {
- Generate_CustomFeedbackCall(masm);
- return;
- }
-
// The checks. First, does rdi match the recorded monomorphic target?
__ SmiToInteger32(rdx, rdx);
__ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size,
a = bar(10);
assertKind(elements_kind.fast, a);
assertOptimized(bar);
+ // bar should deopt because the length is too large.
+ a = bar(100000);
+ assertUnoptimized(bar);
+ assertKind(elements_kind.dictionary, a);
+ // The allocation site now has feedback that means the array constructor
+ // will not be inlined.
+ %OptimizeFunctionOnNextCall(bar);
a = bar(100000);
assertKind(elements_kind.dictionary, a);
assertOptimized(bar);
- // If the argument isn't a smi, things should still work.
+ // If the argument isn't a smi, it bails out as well
a = bar("oops");
assertOptimized(bar);
assertKind(elements_kind.fast, a);
barn(1, 2, 3);
assertOptimized(barn);
a = barn(1, "oops", 3);
+ // The method should deopt, but learn from the failure to avoid inlining
+ // the array.
+ assertKind(elements_kind.fast, a);
+ assertUnoptimized(barn);
+ %OptimizeFunctionOnNextCall(barn);
+ a = barn(1, "oops", 3);
assertOptimized(barn);
})();
assertTrue(Realm.eval(contextB, "bar2() instanceof Array"));
})();
- // Test: create array with packed feedback, then optimize function, which
- // should deal with arguments that create holey arrays.
+ // Test: create array with packed feedback, then optimize/inline
+ // function. Verify that if we ask for a holey array then we deopt.
+ // Reoptimization will proceed with the correct feedback and we
+ // won't deopt anymore.
(function() {
function bar(len) { return new Array(len); }
bar(0);
assertOptimized(bar);
assertFalse(isHoley(a));
a = bar(1); // ouch!
- assertOptimized(bar);
+ assertUnoptimized(bar);
assertTrue(isHoley(a));
+ // Try again
+ %OptimizeFunctionOnNextCall(bar);
a = bar(100);
+ assertOptimized(bar);
assertTrue(isHoley(a));
a = bar(0);
assertOptimized(bar);
- // Crankshafted functions don't use mementos, so feedback still
- // indicates a packed array is desired. (unless --nocrankshaft is in use).
- if (4 != %GetOptimizationStatus(bar)) {
- assertFalse(isHoley(a));
- }
+ assertTrue(isHoley(a));
})();
}
// Verify that basic elements kind feedback works for non-constructor
// array calls (as long as the call is made through an IC, and not
// a CallStub).
- (function (){
- function create0() {
- return Array();
- }
-
- // Calls through ICs need warm up through uninitialized, then
- // premonomorphic first.
- create0();
- a = create0();
- assertKind(elements_kind.fast_smi_only, a);
- a[0] = 3.5;
- b = create0();
- assertKind(elements_kind.fast_double, b);
-
- function create1(arg) {
- return Array(arg);
- }
-
- create1(0);
- create1(0);
- a = create1(0);
- assertFalse(isHoley(a));
- assertKind(elements_kind.fast_smi_only, a);
- a[0] = "hello";
- b = create1(10);
- assertTrue(isHoley(b));
- assertKind(elements_kind.fast, b);
-
- a = create1(100000);
- assertKind(elements_kind.dictionary, a);
-
- function create3(arg1, arg2, arg3) {
- return Array(arg1, arg2, arg3);
- }
-
- create3(1,2,3);
- create3(1,2,3);
- a = create3(1,2,3);
- a[0] = 3.035;
- assertKind(elements_kind.fast_double, a);
- b = create3(1,2,3);
- assertKind(elements_kind.fast_double, b);
- assertFalse(isHoley(b));
- })();
+ // (function (){
+ // function create0() {
+ // return Array();
+ // }
+
+ // // Calls through ICs need warm up through uninitialized, then
+ // // premonomorphic first.
+ // create0();
+ // create0();
+ // a = create0();
+ // assertKind(elements_kind.fast_smi_only, a);
+ // a[0] = 3.5;
+ // b = create0();
+ // assertKind(elements_kind.fast_double, b);
+
+ // function create1(arg) {
+ // return Array(arg);
+ // }
+
+ // create1(0);
+ // create1(0);
+ // a = create1(0);
+ // assertFalse(isHoley(a));
+ // assertKind(elements_kind.fast_smi_only, a);
+ // a[0] = "hello";
+ // b = create1(10);
+ // assertTrue(isHoley(b));
+ // assertKind(elements_kind.fast, b);
+
+ // a = create1(100000);
+ // assertKind(elements_kind.dictionary, a);
+
+ // function create3(arg1, arg2, arg3) {
+ // return Array(arg1, arg2, arg3);
+ // }
+
+ // create3();
+ // create3();
+ // a = create3(1,2,3);
+ // a[0] = 3.5;
+ // b = create3(1,2,3);
+ // assertKind(elements_kind.fast_double, b);
+ // assertFalse(isHoley(b));
+ // })();
// Verify that keyed calls work
- (function (){
- function create0(name) {
- return this[name]();
- }
-
- name = "Array";
- create0(name);
- create0(name);
- a = create0(name);
- a[0] = 3.5;
- b = create0(name);
- assertKind(elements_kind.fast_double, b);
- })();
-
-
- // Verify that crankshaft consumes type feedback.
+ // (function (){
+ // function create0(name) {
+ // return this[name]();
+ // }
+
+ // name = "Array";
+ // create0(name);
+ // create0(name);
+ // a = create0(name);
+ // a[0] = 3.5;
+ // b = create0(name);
+ // assertKind(elements_kind.fast_double, b);
+ // })();
+
+
+ // Verify that the IC can't be spoofed by patching
(function (){
function create0() {
return Array();
create0();
create0();
a = create0();
- a[0] = 3.5;
- %OptimizeFunctionOnNextCall(create0);
- create0();
- create0();
+ assertKind(elements_kind.fast_smi_only, a);
+ var oldArray = this.Array;
+ this.Array = function() { return ["hi"]; };
b = create0();
- assertKind(elements_kind.fast_double, b);
- assertOptimized(create0);
-
- function create1(arg) {
- return Array(arg);
- }
+ assertEquals(["hi"], b);
+ this.Array = oldArray;
+ })();
- create1(8);
- create1(8);
- a = create1(8);
- a[0] = 3.5;
- %OptimizeFunctionOnNextCall(create1);
- b = create1(8);
- assertKind(elements_kind.fast_double, b);
- assertOptimized(create1);
-
- function createN(arg1, arg2, arg3) {
- return Array(arg1, arg2, arg3);
- }
+ // Verify that calls are still made through an IC after crankshaft,
+ // though the type information is reset.
+ // TODO(mvstanton): instead, consume the type feedback gathered up
+ // until crankshaft time.
+ // (function (){
+ // function create0() {
+ // return Array();
+ // }
+
+ // create0();
+ // create0();
+ // a = create0();
+ // a[0] = 3.5;
+ // %OptimizeFunctionOnNextCall(create0);
+ // create0();
+ // // This test only makes sense if crankshaft is allowed
+ // if (4 != %GetOptimizationStatus(create0)) {
+ // create0();
+ // b = create0();
+ // assertKind(elements_kind.fast_smi_only, b);
+ // b[0] = 3.5;
+ // c = create0();
+ // assertKind(elements_kind.fast_double, c);
+ // assertOptimized(create0);
+ // }
+ // })();
- createN(1, 2, 3);
- createN(1, 2, 3);
- a = createN(1, 2, 3);
- a[0] = 3.5;
- %OptimizeFunctionOnNextCall(createN);
- b = createN(1, 2, 3);
- assertKind(elements_kind.fast_double, b);
- assertOptimized(createN);
- })();
// Verify that cross context calls work
(function (){