CallICStub with a "never patch" approach by default. Patching will
authormvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 30 Apr 2014 14:33:35 +0000 (14:33 +0000)
committermvstanton@chromium.org <mvstanton@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 30 Apr 2014 14:33:35 +0000 (14:33 +0000)
occur only when custom feedback needs to be gathered (future CLs).

Now rebased on https://codereview.chromium.org/254623002/, which moves the type feedback vector to the SharedFunctionInfo.

R=verwaest@chromium.org

Review URL: https://codereview.chromium.org/247373002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21093 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

42 files changed:
src/arm/builtins-arm.cc
src/arm/code-stubs-arm.cc
src/arm/debug-arm.cc
src/arm/full-codegen-arm.cc
src/arm/lithium-codegen-arm.cc
src/arm64/builtins-arm64.cc
src/arm64/code-stubs-arm64.cc
src/arm64/debug-arm64.cc
src/arm64/full-codegen-arm64.cc
src/arm64/lithium-codegen-arm64.cc
src/ast.cc
src/builtins.cc
src/builtins.h
src/code-stubs.cc
src/code-stubs.h
src/debug.cc
src/debug.h
src/factory.h
src/full-codegen.h
src/ia32/builtins-ia32.cc
src/ia32/code-stubs-ia32.cc
src/ia32/debug-ia32.cc
src/ia32/full-codegen-ia32.cc
src/ia32/lithium-codegen-ia32.cc
src/ic.cc
src/ic.h
src/liveedit.cc
src/liveedit.h
src/log.cc
src/log.h
src/objects-inl.h
src/objects-visiting-inl.h
src/objects.cc
src/objects.h
src/spaces.cc
src/v8globals.h
src/x64/builtins-x64.cc
src/x64/code-stubs-x64.cc
src/x64/debug-x64.cc
src/x64/full-codegen-x64.cc
src/x64/lithium-codegen-x64.cc
test/mjsunit/debug-liveedit-newsource.js

index 0083f23..74fe5f1 100644 (file)
@@ -782,7 +782,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     if (is_construct) {
       // No type feedback cell is available
       __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
-      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(r0);
index 44ed3a6..7b29351 100644 (file)
@@ -2856,11 +2856,62 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
 }
 
 
+static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
+  // Do not transform the receiver for strict mode functions.
+  __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+  __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset));
+  __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
+                           kSmiTagSize)));
+  __ b(ne, cont);
+
+  // Do not transform the receiver for native (Compilerhints already in r3).
+  __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
+  __ b(ne, cont);
+}
+
+
+static void EmitSlowCase(MacroAssembler* masm,
+                         int argc,
+                         Label* non_function) {
+  // Check for function proxy.
+  __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE));
+  __ b(ne, non_function);
+  __ push(r1);  // put proxy as additional argument
+  __ mov(r0, Operand(argc + 1, RelocInfo::NONE32));
+  __ mov(r2, Operand::Zero());
+  __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
+  {
+    Handle<Code> adaptor =
+        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+    __ Jump(adaptor, RelocInfo::CODE_TARGET);
+  }
+
+  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
+  // of the original receiver from the call site).
+  __ bind(non_function);
+  __ str(r1, MemOperand(sp, argc * kPointerSize));
+  __ mov(r0, Operand(argc));  // Set up the number of arguments.
+  __ mov(r2, Operand::Zero());
+  __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
+  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+          RelocInfo::CODE_TARGET);
+}
+
+
+static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
+  // Wrap the receiver and patch it back onto the stack.
+  { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
+    __ Push(r1, r3);
+    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+    __ pop(r1);
+  }
+  __ str(r0, MemOperand(sp, argc * kPointerSize));
+  __ jmp(cont);
+}
+
+
 void CallFunctionStub::Generate(MacroAssembler* masm) {
   // r1 : the function to call
-  // r2 : feedback vector
-  // r3 : (only if r2 is not the megamorphic symbol) slot in feedback
-  //      vector (Smi)
   Label slow, non_function, wrap, cont;
 
   if (NeedsChecks()) {
@@ -2871,36 +2922,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     // Goto slow case if we do not have a function.
     __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
     __ b(ne, &slow);
-
-    if (RecordCallTarget()) {
-      GenerateRecordCallTarget(masm);
-      // Type information was updated. Because we may call Array, which
-      // expects either undefined or an AllocationSite in ebx we need
-      // to set ebx to undefined.
-      __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
-    }
   }
 
   // Fast-case: Invoke the function now.
   // r1: pushed function
-  ParameterCount actual(argc_);
+  int argc = argc_;
+  ParameterCount actual(argc);
 
   if (CallAsMethod()) {
     if (NeedsChecks()) {
-      // Do not transform the receiver for strict mode functions.
-      __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
-      __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset));
-      __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
-                               kSmiTagSize)));
-      __ b(ne, &cont);
-
-      // Do not transform the receiver for native (Compilerhints already in r3).
-      __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
-      __ b(ne, &cont);
+      EmitContinueIfStrictOrNative(masm, &cont);
     }
 
     // Compute the receiver in sloppy mode.
-    __ ldr(r3, MemOperand(sp, argc_ * kPointerSize));
+    __ ldr(r3, MemOperand(sp, argc * kPointerSize));
 
     if (NeedsChecks()) {
       __ JumpIfSmi(r3, &wrap);
@@ -2912,55 +2947,18 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
 
     __ bind(&cont);
   }
+
   __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
 
   if (NeedsChecks()) {
     // Slow-case: Non-function called.
     __ bind(&slow);
-    if (RecordCallTarget()) {
-      // If there is a call target cache, mark it megamorphic in the
-      // non-function case.  MegamorphicSentinel is an immortal immovable
-      // object (megamorphic symbol) so no write barrier is needed.
-      ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
-                isolate()->heap()->megamorphic_symbol());
-      __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
-      __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
-      __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
-    }
-    // Check for function proxy.
-    __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE));
-    __ b(ne, &non_function);
-    __ push(r1);  // put proxy as additional argument
-    __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32));
-    __ mov(r2, Operand::Zero());
-    __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
-    {
-      Handle<Code> adaptor =
-        isolate()->builtins()->ArgumentsAdaptorTrampoline();
-      __ Jump(adaptor, RelocInfo::CODE_TARGET);
-    }
-
-    // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
-    // of the original receiver from the call site).
-    __ bind(&non_function);
-    __ str(r1, MemOperand(sp, argc_ * kPointerSize));
-    __ mov(r0, Operand(argc_));  // Set up the number of arguments.
-    __ mov(r2, Operand::Zero());
-    __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
-    __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
-            RelocInfo::CODE_TARGET);
+    EmitSlowCase(masm, argc, &non_function);
   }
 
   if (CallAsMethod()) {
     __ bind(&wrap);
-    // Wrap the receiver and patch it back onto the stack.
-    { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
-      __ Push(r1, r3);
-      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
-      __ pop(r1);
-    }
-    __ str(r0, MemOperand(sp, argc_ * kPointerSize));
-    __ jmp(&cont);
+    EmitWrapCase(masm, argc, &cont);
   }
 }
 
@@ -3029,6 +3027,109 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
 }
 
 
+static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
+  __ ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+  __ ldr(vector, FieldMemOperand(vector,
+                                 JSFunction::kSharedFunctionInfoOffset));
+  __ ldr(vector, FieldMemOperand(vector,
+                                 SharedFunctionInfo::kFeedbackVectorOffset));
+}
+
+
+void CallICStub::Generate(MacroAssembler* masm) {
+  // r1 - function
+  // r3 - slot id (Smi)
+  Label extra_checks_or_miss, slow_start;
+  Label slow, non_function, wrap, cont;
+  Label have_js_function;
+  int argc = state_.arg_count();
+  ParameterCount actual(argc);
+
+  EmitLoadTypeFeedbackVector(masm, r2);
+
+  // The checks. First, does r1 match the recorded monomorphic target?
+  __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
+  __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
+  __ cmp(r1, r4);
+  __ b(ne, &extra_checks_or_miss);
+
+  __ bind(&have_js_function);
+  if (state_.CallAsMethod()) {
+    EmitContinueIfStrictOrNative(masm, &cont);
+    // Compute the receiver in sloppy mode.
+    __ ldr(r3, MemOperand(sp, argc * kPointerSize));
+
+    __ JumpIfSmi(r3, &wrap);
+    __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
+    __ b(lt, &wrap);
+
+    __ bind(&cont);
+  }
+
+  __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
+
+  __ bind(&slow);
+  EmitSlowCase(masm, argc, &non_function);
+
+  if (state_.CallAsMethod()) {
+    __ bind(&wrap);
+    EmitWrapCase(masm, argc, &cont);
+  }
+
+  __ bind(&extra_checks_or_miss);
+  Label miss;
+
+  __ CompareRoot(r4, Heap::kMegamorphicSymbolRootIndex);
+  __ b(eq, &slow_start);
+  __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex);
+  __ b(eq, &miss);
+
+  if (!FLAG_trace_ic) {
+    // We are going megamorphic, and we don't want to visit the runtime.
+    __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
+    __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
+    __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
+    __ jmp(&slow_start);
+  }
+
+  // We are here because tracing is on or we are going monomorphic.
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+  // the slow case
+  __ bind(&slow_start);
+  // Check that the function is really a JavaScript function.
+  // r1: pushed function (to be verified)
+  __ JumpIfSmi(r1, &non_function);
+
+  // Goto slow case if we do not have a function.
+  __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
+  __ b(ne, &slow);
+  __ jmp(&have_js_function);
+}
+
+
+void CallICStub::GenerateMiss(MacroAssembler* masm) {
+  // Get the receiver of the function from the stack; 1 ~ return address.
+  __ ldr(r4, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize));
+
+  {
+    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+
+    // Push the receiver and the function and feedback info.
+    __ Push(r4, r1, r2, r3);
+
+    // Call the entry.
+    ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
+                                               masm->isolate());
+    __ CallExternalReference(miss, 4);
+
+    // Move result to edi and exit the internal frame.
+    __ mov(r1, r0);
+  }
+}
+
+
 // StringCharCodeAtGenerator
 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
   Label flat_string;
index 6aaeb02..cbc0c99 100644 (file)
@@ -155,6 +155,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
 }
 
 
+void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) {
+  // Register state for CallICStub
+  // ----------- S t a t e -------------
+  //  -- r1 : function
+  //  -- r3 : slot in feedback array (smi)
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, r1.bit() | r3.bit(), 0);
+}
+
+
 void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
   // Calling convention for IC load (from ic-arm.cc).
   // ----------- S t a t e -------------
@@ -211,15 +221,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
-  // Calling convention for IC call (from ic-arm.cc)
-  // ----------- S t a t e -------------
-  //  -- r2     : name
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, r2.bit(), 0);
-}
-
-
 void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
   // In places other than IC call sites it is expected that r0 is TOS which
   // is an object - this is not generally the case so this should be used with
@@ -237,17 +238,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
-  // Register state for CallFunctionStub (from code-stubs-arm.cc).
-  // ----------- S t a t e -------------
-  //  -- r1 : function
-  //  -- r2 : feedback array
-  //  -- r3 : slot in feedback array
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit() | r3.bit(), 0);
-}
-
-
 void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
   // Calling convention for CallConstructStub (from code-stubs-arm.cc)
   // ----------- S t a t e -------------
index 1e7f201..08aee9c 100644 (file)
@@ -2609,14 +2609,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
 
 
 // Code common for calls using the IC.
-void FullCodeGenerator::EmitCallWithIC(Call* expr) {
+void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
-  CallFunctionFlags flags;
+  CallIC::CallType call_type = callee->IsVariableProxy()
+      ? CallIC::FUNCTION
+      : CallIC::METHOD;
+
   // Get the target function.
-  if (callee->IsVariableProxy()) {
+  if (call_type == CallIC::FUNCTION) {
     { StackValueContext context(this);
       EmitVariableLoad(callee->AsVariableProxy());
       PrepareForBailout(callee, NO_REGISTERS);
@@ -2624,7 +2625,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     // Push undefined as receiver. This is patched in the method prologue if it
     // is a sloppy mode method.
     __ Push(isolate()->factory()->undefined_value());
-    flags = NO_CALL_FUNCTION_FLAGS;
   } else {
     // Load the function from the receiver.
     ASSERT(callee->IsProperty());
@@ -2635,40 +2635,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     __ ldr(ip, MemOperand(sp, 0));
     __ push(ip);
     __ str(r0, MemOperand(sp, kPointerSize));
-    flags = CALL_AS_METHOD;
-  }
-
-  // Load the arguments.
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
   }
 
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, flags);
-  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-
-  // Restore context register.
-  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, r0);
+  EmitCall(expr, call_type);
 }
 
 
 // Code common for calls using the IC.
-void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
-                                            Expression* key) {
+void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
+                                                Expression* key) {
   // Load the key.
   VisitForAccumulatorValue(key);
 
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
   // Load the function from the receiver.
   ASSERT(callee->IsProperty());
@@ -2681,28 +2660,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   __ push(ip);
   __ str(r0, MemOperand(sp, kPointerSize));
 
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
-  }
-
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
-  __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-  // Restore context register.
-  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, r0);
+  EmitCall(expr, CallIC::METHOD);
 }
 
 
-void FullCodeGenerator::EmitCallWithStub(Call* expr) {
-  // Code common for calls using the call stub.
+void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
+  // Load the arguments.
   ZoneList<Expression*>* args = expr->arguments();
   int arg_count = args->length();
   { PreservePositionScope scope(masm()->positions_recorder());
@@ -2710,16 +2673,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
       VisitForStackValue(args->at(i));
     }
   }
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
 
-  __ Move(r2, FeedbackVector());
+  // Record source position of the IC call.
+  SetSourcePosition(expr->position());
+  Handle<Code> ic = CallIC::initialize_stub(
+      isolate(), arg_count, call_type);
   __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
-
-  // Record call targets in unoptimized code.
-  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
+  // Don't assign a type feedback id to the IC, since type feedback is provided
+  // by the vector above.
+  CallIC(ic);
+
   RecordJSReturnSite(expr);
   // Restore context register.
   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2802,7 +2766,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
     context()->DropAndPlug(1, r0);
   } else if (call_type == Call::GLOBAL_CALL) {
-    EmitCallWithIC(expr);
+    EmitCallWithLoadIC(expr);
 
   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
     // Call to a lookup slot (dynamically introduced variable).
@@ -2842,16 +2806,16 @@ void FullCodeGenerator::VisitCall(Call* expr) {
 
     // The receiver is either the global receiver or an object found
     // by LoadContextSlot.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   } else if (call_type == Call::PROPERTY_CALL) {
     Property* property = callee->AsProperty();
     { PreservePositionScope scope(masm()->positions_recorder());
       VisitForStackValue(property->obj());
     }
     if (property->key()->IsPropertyName()) {
-      EmitCallWithIC(expr);
+      EmitCallWithLoadIC(expr);
     } else {
-      EmitKeyedCallWithIC(expr, property->key());
+      EmitKeyedCallWithLoadIC(expr, property->key());
     }
   } else {
     ASSERT(call_type == Call::OTHER_CALL);
@@ -2862,7 +2826,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
     __ push(r1);
     // Emit function call.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   }
 
 #ifdef DEBUG
@@ -2908,7 +2872,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ Move(r2, FeedbackVector());
   __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
 
-  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(r0);
index 2148383..7ddbaa9 100644 (file)
@@ -3984,7 +3984,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   __ mov(r0, Operand(instr->arity()));
   // No cell in r2 for construct type feedback in optimized code
   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
-  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
   CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
 }
 
index dd01cec..2aed6b3 100644 (file)
@@ -760,7 +760,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
       // No type feedback cell is available.
       __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
 
-      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(x0);
index 884be58..9a75499 100644 (file)
@@ -3133,14 +3133,64 @@ static void GenerateRecordCallTarget(MacroAssembler* masm,
 }
 
 
+static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
+  // Do not transform the receiver for strict mode functions.
+  __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
+  __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
+  __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont);
+
+  // Do not transform the receiver for native (Compilerhints already in x3).
+  __ Tbnz(w4, SharedFunctionInfo::kNative, cont);
+}
+
+
+static void EmitSlowCase(MacroAssembler* masm,
+                         int argc,
+                         Register function,
+                         Register type,
+                         Label* non_function) {
+  // Check for function proxy.
+  // x10 : function type.
+  __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, non_function);
+  __ Push(function);  // put proxy as additional argument
+  __ Mov(x0, argc + 1);
+  __ Mov(x2, 0);
+  __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
+  {
+    Handle<Code> adaptor =
+        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+    __ Jump(adaptor, RelocInfo::CODE_TARGET);
+  }
+
+  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
+  // of the original receiver from the call site).
+  __ Bind(non_function);
+  __ Poke(function, argc * kXRegSize);
+  __ Mov(x0, argc);  // Set up the number of arguments.
+  __ Mov(x2, 0);
+  __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
+  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+          RelocInfo::CODE_TARGET);
+}
+
+
+static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
+  // Wrap the receiver and patch it back onto the stack.
+  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
+    __ Push(x1, x3);
+    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+    __ Pop(x1);
+  }
+  __ Poke(x0, argc * kPointerSize);
+  __ B(cont);
+}
+
+
 void CallFunctionStub::Generate(MacroAssembler* masm) {
   ASM_LOCATION("CallFunctionStub::Generate");
   // x1  function    the function to call
-  // x2 : feedback vector
-  // x3 : slot in feedback vector (smi) (if x2 is not the megamorphic symbol)
+
   Register function = x1;
-  Register cache_cell = x2;
-  Register slot = x3;
   Register type = x4;
   Label slow, non_function, wrap, cont;
 
@@ -3153,33 +3203,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
 
     // Goto slow case if we do not have a function.
     __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
-
-    if (RecordCallTarget()) {
-      GenerateRecordCallTarget(masm, x0, function, cache_cell, slot, x4, x5);
-      // Type information was updated. Because we may call Array, which
-      // expects either undefined or an AllocationSite in ebx we need
-      // to set ebx to undefined.
-      __ LoadRoot(cache_cell, Heap::kUndefinedValueRootIndex);
-    }
   }
 
   // Fast-case: Invoke the function now.
   // x1  function  pushed function
-  ParameterCount actual(argc_);
+  int argc = argc_;
+  ParameterCount actual(argc);
 
   if (CallAsMethod()) {
     if (NeedsChecks()) {
-      // Do not transform the receiver for strict mode functions.
-      __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
-      __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
-      __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, &cont);
-
-      // Do not transform the receiver for native (Compilerhints already in x3).
-      __ Tbnz(w4, SharedFunctionInfo::kNative, &cont);
+      EmitContinueIfStrictOrNative(masm, &cont);
     }
 
     // Compute the receiver in sloppy mode.
-    __ Peek(x3, argc_ * kPointerSize);
+    __ Peek(x3, argc * kPointerSize);
 
     if (NeedsChecks()) {
       __ JumpIfSmi(x3, &wrap);
@@ -3190,6 +3227,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
 
     __ Bind(&cont);
   }
+
   __ InvokeFunction(function,
                     actual,
                     JUMP_FUNCTION,
@@ -3198,51 +3236,12 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   if (NeedsChecks()) {
     // Slow-case: Non-function called.
     __ Bind(&slow);
-    if (RecordCallTarget()) {
-      // If there is a call target cache, mark it megamorphic in the
-      // non-function case. MegamorphicSentinel is an immortal immovable object
-      // (megamorphic symbol) so no write barrier is needed.
-      ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
-                isolate()->heap()->megamorphic_symbol());
-      __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot,
-                                                        kPointerSizeLog2));
-      __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
-      __ Str(x11, FieldMemOperand(x12, FixedArray::kHeaderSize));
-    }
-    // Check for function proxy.
-    // x10 : function type.
-    __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, &non_function);
-    __ Push(function);  // put proxy as additional argument
-    __ Mov(x0, argc_ + 1);
-    __ Mov(x2, 0);
-    __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
-    {
-      Handle<Code> adaptor =
-          isolate()->builtins()->ArgumentsAdaptorTrampoline();
-      __ Jump(adaptor, RelocInfo::CODE_TARGET);
-    }
-
-    // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
-    // of the original receiver from the call site).
-    __ Bind(&non_function);
-    __ Poke(function, argc_ * kXRegSize);
-    __ Mov(x0, argc_);  // Set up the number of arguments.
-    __ Mov(x2, 0);
-    __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
-    __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
-            RelocInfo::CODE_TARGET);
+    EmitSlowCase(masm, argc, function, type, &non_function);
   }
 
   if (CallAsMethod()) {
     __ Bind(&wrap);
-    // Wrap the receiver and patch it back onto the stack.
-    { FrameScope frame_scope(masm, StackFrame::INTERNAL);
-      __ Push(x1, x3);
-      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
-      __ Pop(x1);
-    }
-    __ Poke(x0, argc_ * kPointerSize);
-    __ B(&cont);
+    EmitWrapCase(masm, argc, &cont);
   }
 }
 
@@ -3317,6 +3316,121 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
 }
 
 
+static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
+  __ Ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+  __ Ldr(vector, FieldMemOperand(vector,
+                                 JSFunction::kSharedFunctionInfoOffset));
+  __ Ldr(vector, FieldMemOperand(vector,
+                                 SharedFunctionInfo::kFeedbackVectorOffset));
+}
+
+
+void CallICStub::Generate(MacroAssembler* masm) {
+  ASM_LOCATION("CallICStub");
+
+  // x1 - function
+  // x3 - slot id (Smi)
+  Label extra_checks_or_miss, slow_start;
+  Label slow, non_function, wrap, cont;
+  Label have_js_function;
+  int argc = state_.arg_count();
+  ParameterCount actual(argc);
+
+  Register function = x1;
+  Register feedback_vector = x2;
+  Register index = x3;
+  Register type = x4;
+
+  EmitLoadTypeFeedbackVector(masm, feedback_vector);
+
+  // The checks. First, does x1 match the recorded monomorphic target?
+  __ Add(x4, feedback_vector,
+         Operand::UntagSmiAndScale(index, kPointerSizeLog2));
+  __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
+
+  __ Cmp(x4, function);
+  __ B(ne, &extra_checks_or_miss);
+
+  __ bind(&have_js_function);
+  if (state_.CallAsMethod()) {
+    EmitContinueIfStrictOrNative(masm, &cont);
+
+    // Compute the receiver in sloppy mode.
+    __ Peek(x3, argc * kPointerSize);
+
+    __ JumpIfSmi(x3, &wrap);
+    __ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
+
+    __ Bind(&cont);
+  }
+
+  __ InvokeFunction(function,
+                    actual,
+                    JUMP_FUNCTION,
+                    NullCallWrapper());
+
+  __ bind(&slow);
+  EmitSlowCase(masm, argc, function, type, &non_function);
+
+  if (state_.CallAsMethod()) {
+    __ bind(&wrap);
+    EmitWrapCase(masm, argc, &cont);
+  }
+
+  __ bind(&extra_checks_or_miss);
+  Label miss;
+
+  __ JumpIfRoot(x4, Heap::kMegamorphicSymbolRootIndex, &slow_start);
+  __ JumpIfRoot(x4, Heap::kUninitializedSymbolRootIndex, &miss);
+
+  if (!FLAG_trace_ic) {
+    // We are going megamorphic, and we don't want to visit the runtime.
+    __ Add(x4, feedback_vector,
+           Operand::UntagSmiAndScale(index, kPointerSizeLog2));
+    __ LoadRoot(x5, Heap::kMegamorphicSymbolRootIndex);
+    __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
+    __ B(&slow_start);
+  }
+
+  // We are here because tracing is on or we are going monomorphic.
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+  // the slow case
+  __ bind(&slow_start);
+
+  // Check that the function is really a JavaScript function.
+  __ JumpIfSmi(function, &non_function);
+
+  // Goto slow case if we do not have a function.
+  __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
+  __ B(&have_js_function);
+}
+
+
+void CallICStub::GenerateMiss(MacroAssembler* masm) {
+  ASM_LOCATION("CallICStub[Miss]");
+
+  // Get the receiver of the function from the stack; 1 ~ return address.
+  __ Peek(x4, (state_.arg_count() + 1) * kPointerSize);
+
+  {
+    FrameScope scope(masm, StackFrame::INTERNAL);
+
+    // Push the receiver and the function and feedback info.
+    __ Push(x4, x1, x2, x3);
+
+    // Call the entry.
+    ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
+                                               masm->isolate());
+    __ CallExternalReference(miss, 4);
+
+    // Move result to edi and exit the internal frame.
+    __ Mov(x1, x0);
+  }
+}
+
+
 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
   // If the receiver is a smi trigger the non-string case.
   __ JumpIfSmi(object_, receiver_not_string_);
index c7d3267..309a1b5 100644 (file)
@@ -215,6 +215,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
 }
 
 
+void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) {
+  // Register state for CallICStub
+  // ----------- S t a t e -------------
+  //  -- x1 : function
+  //  -- x3 : slot in feedback array
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, x1.Bit() | x3.Bit(), 0, x10);
+}
+
+
 void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
   // Calling convention for IC load (from ic-arm.cc).
   // ----------- S t a t e -------------
@@ -271,15 +281,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
-  // Calling convention for IC call (from ic-arm.cc)
-  // ----------- S t a t e -------------
-  //  -- x2     : name
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, x2.Bit(), 0, x10);
-}
-
-
 void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
   // In places other than IC call sites it is expected that r0 is TOS which
   // is an object - this is not generally the case so this should be used with
@@ -297,17 +298,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
-  // Register state for CallFunctionStub (from code-stubs-arm64.cc).
-  // ----------- S t a t e -------------
-  //  -- x1 : function
-  //  -- x2 : feedback array
-  //  -- x3 : slot in feedback array
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, x1.Bit() | x2.Bit() | x3.Bit(), 0, x10);
-}
-
-
 void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
   // Calling convention for CallConstructStub (from code-stubs-arm64.cc).
   // ----------- S t a t e -------------
index ca25bd7..d3b1c70 100644 (file)
@@ -2319,16 +2319,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
 
 
 // Code common for calls using the IC.
-void FullCodeGenerator::EmitCallWithIC(Call* expr) {
-  ASM_LOCATION("EmitCallWithIC");
-
+void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
-  CallFunctionFlags flags;
+  CallIC::CallType call_type = callee->IsVariableProxy()
+      ? CallIC::FUNCTION
+      : CallIC::METHOD;
+
   // Get the target function.
-  if (callee->IsVariableProxy()) {
+  if (call_type == CallIC::FUNCTION) {
     { StackValueContext context(this);
       EmitVariableLoad(callee->AsVariableProxy());
       PrepareForBailout(callee, NO_REGISTERS);
@@ -2336,7 +2335,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     // Push undefined as receiver. This is patched in the method prologue if it
     // is a sloppy mode method.
     __ Push(isolate()->factory()->undefined_value());
-    flags = NO_CALL_FUNCTION_FLAGS;
   } else {
     // Load the function from the receiver.
     ASSERT(callee->IsProperty());
@@ -2346,40 +2344,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     // Push the target function under the receiver.
     __ Pop(x10);
     __ Push(x0, x10);
-    flags = CALL_AS_METHOD;
-  }
-
-  // Load the arguments.
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
   }
 
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, flags);
-  __ Peek(x1, (arg_count + 1) * kPointerSize);
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-
-  // Restore context register.
-  __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, x0);
+  EmitCall(expr, call_type);
 }
 
 
 // Code common for calls using the IC.
-void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
-                                            Expression* key) {
+void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
+                                                Expression* key) {
   // Load the key.
   VisitForAccumulatorValue(key);
 
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
   // Load the function from the receiver.
   ASSERT(callee->IsProperty());
@@ -2391,28 +2368,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   __ Pop(x10);
   __ Push(x0, x10);
 
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
-  }
-
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
-  __ Peek(x1, (arg_count + 1) * kPointerSize);
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-  // Restore context register.
-  __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, x0);
+  EmitCall(expr, CallIC::METHOD);
 }
 
 
-void FullCodeGenerator::EmitCallWithStub(Call* expr) {
-  // Code common for calls using the call stub.
+void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
+  // Load the arguments.
   ZoneList<Expression*>* args = expr->arguments();
   int arg_count = args->length();
   { PreservePositionScope scope(masm()->positions_recorder());
@@ -2420,16 +2381,17 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
       VisitForStackValue(args->at(i));
     }
   }
-  // Record source position for debugger.
+  // Record source position of the IC call.
   SetSourcePosition(expr->position());
 
-  __ LoadObject(x2, FeedbackVector());
+  Handle<Code> ic = CallIC::initialize_stub(
+      isolate(), arg_count, call_type);
   __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
-
-  // Record call targets in unoptimized code.
-  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
   __ Peek(x1, (arg_count + 1) * kXRegSize);
-  __ CallStub(&stub);
+  // Don't assign a type feedback id to the IC, since type feedback is provided
+  // by the vector above.
+  CallIC(ic);
+
   RecordJSReturnSite(expr);
   // Restore context register.
   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2521,7 +2483,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     context()->DropAndPlug(1, x0);
 
   } else if (call_type == Call::GLOBAL_CALL) {
-    EmitCallWithIC(expr);
+    EmitCallWithLoadIC(expr);
 
   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
     // Call to a lookup slot (dynamically introduced variable).
@@ -2561,16 +2523,16 @@ void FullCodeGenerator::VisitCall(Call* expr) {
 
     // The receiver is either the global receiver or an object found
     // by LoadContextSlot.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   } else if (call_type == Call::PROPERTY_CALL) {
     Property* property = callee->AsProperty();
     { PreservePositionScope scope(masm()->positions_recorder());
       VisitForStackValue(property->obj());
     }
     if (property->key()->IsPropertyName()) {
-      EmitCallWithIC(expr);
+      EmitCallWithLoadIC(expr);
     } else {
-      EmitKeyedCallWithIC(expr, property->key());
+      EmitKeyedCallWithLoadIC(expr, property->key());
     }
 
   } else {
@@ -2582,7 +2544,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
     __ Push(x1);
     // Emit function call.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   }
 
 #ifdef DEBUG
@@ -2628,7 +2590,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ LoadObject(x2, FeedbackVector());
   __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
 
-  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(x0);
index bc57bc9..dce5a1a 100644 (file)
@@ -421,7 +421,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   // No cell in x2 for construct type feedback in optimized code.
   __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
 
-  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
   CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
 
   ASSERT(ToRegister(instr->result()).is(x0));
index aacad68..303c442 100644 (file)
@@ -573,7 +573,7 @@ void Expression::RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) {
 
 bool Call::IsUsingCallFeedbackSlot(Isolate* isolate) const {
   CallType call_type = GetCallType(isolate);
-  return call_type == LOOKUP_SLOT_CALL || call_type == OTHER_CALL;
+  return (call_type != POSSIBLY_EVAL_CALL);
 }
 
 
index e5a15a3..d0c1a44 100644 (file)
@@ -1420,6 +1420,11 @@ static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
 }
 
 
+static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
+  Debug::GenerateCallICStubDebugBreak(masm);
+}
+
+
 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
   Debug::GenerateLoadICDebugBreak(masm);
 }
@@ -1455,12 +1460,6 @@ static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
 }
 
 
-static void Generate_CallFunctionStub_Recording_DebugBreak(
-    MacroAssembler* masm) {
-  Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
-}
-
-
 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
   Debug::GenerateCallConstructStubDebugBreak(masm);
 }
index 6f9b760..e6b60c7 100644 (file)
@@ -180,12 +180,12 @@ enum BuiltinExtraArguments {
                                                DEBUG_BREAK)                   \
   V(CallFunctionStub_DebugBreak,               BUILTIN, DEBUG_STUB,           \
                                                DEBUG_BREAK)                   \
-  V(CallFunctionStub_Recording_DebugBreak,     BUILTIN, DEBUG_STUB,           \
-                                               DEBUG_BREAK)                   \
   V(CallConstructStub_DebugBreak,              BUILTIN, DEBUG_STUB,           \
                                                DEBUG_BREAK)                   \
   V(CallConstructStub_Recording_DebugBreak,    BUILTIN, DEBUG_STUB,           \
                                                DEBUG_BREAK)                   \
+  V(CallICStub_DebugBreak,                     CALL_IC, DEBUG_STUB,           \
+                                               DEBUG_BREAK)                   \
   V(LoadIC_DebugBreak,                         LOAD_IC, DEBUG_STUB,           \
                                                DEBUG_BREAK)                   \
   V(KeyedLoadIC_DebugBreak,                    KEYED_LOAD_IC, DEBUG_STUB,     \
index a79d8d3..24f60ed 100644 (file)
@@ -478,6 +478,11 @@ Type* CompareNilICStub::GetInputType(Zone* zone, Handle<Map> map) {
 }
 
 
+void CallICStub::PrintState(StringStream* stream) {
+  state_.Print(stream);
+}
+
+
 void InstanceofStub::PrintName(StringStream* stream) {
   const char* args = "";
   if (HasArgsInRegisters()) {
@@ -560,7 +565,6 @@ void ArgumentsAccessStub::PrintName(StringStream* stream) {
 
 void CallFunctionStub::PrintName(StringStream* stream) {
   stream->Add("CallFunctionStub_Args%d", argc_);
-  if (RecordCallTarget()) stream->Add("_Recording");
 }
 
 
index 852a970..8380266 100644 (file)
@@ -28,6 +28,7 @@ namespace internal {
   V(CompareIC)                           \
   V(CompareNilIC)                        \
   V(MathPow)                             \
+  V(CallIC)                              \
   V(FunctionPrototype)                   \
   V(RecordWrite)                         \
   V(StoreBufferOverflow)                 \
@@ -823,6 +824,48 @@ class ICStub: public PlatformCodeStub {
 };
 
 
+class CallICStub: public PlatformCodeStub {
+ public:
+  CallICStub(Isolate* isolate, const CallIC::State& state)
+      : PlatformCodeStub(isolate), state_(state) {}
+
+  bool CallAsMethod() const { return state_.CallAsMethod(); }
+
+  int arg_count() const { return state_.arg_count(); }
+
+  static int ExtractArgcFromMinorKey(int minor_key) {
+    CallIC::State state((ExtraICState) minor_key);
+    return state.arg_count();
+  }
+
+  virtual void Generate(MacroAssembler* masm);
+
+  virtual Code::Kind GetCodeKind() const V8_OVERRIDE {
+    return Code::CALL_IC;
+  }
+
+  virtual InlineCacheState GetICState() V8_FINAL V8_OVERRIDE {
+    return state_.GetICState();
+  }
+
+  virtual ExtraICState GetExtraICState() V8_FINAL V8_OVERRIDE {
+    return state_.GetExtraICState();
+  }
+
+ protected:
+  virtual int MinorKey() { return GetExtraICState(); }
+  virtual void PrintState(StringStream* stream) V8_FINAL V8_OVERRIDE;
+
+ private:
+  virtual CodeStub::Major MajorKey() { return CallIC; }
+
+  // Code generation helpers.
+  void GenerateMiss(MacroAssembler* masm);
+
+  CallIC::State state_;
+};
+
+
 class FunctionPrototypeStub: public ICStub {
  public:
   FunctionPrototypeStub(Isolate* isolate, Code::Kind kind)
@@ -1600,10 +1643,6 @@ class CallFunctionStub: public PlatformCodeStub {
 
   void Generate(MacroAssembler* masm);
 
-  virtual void FinishCode(Handle<Code> code) {
-    code->set_has_function_cache(RecordCallTarget());
-  }
-
   static int ExtractArgcFromMinorKey(int minor_key) {
     return ArgcBits::decode(minor_key);
   }
@@ -1624,10 +1663,6 @@ class CallFunctionStub: public PlatformCodeStub {
     return FlagBits::encode(flags_) | ArgcBits::encode(argc_);
   }
 
-  bool RecordCallTarget() {
-    return flags_ == RECORD_CALL_TARGET;
-  }
-
   bool CallAsMethod() {
     return flags_ == CALL_AS_METHOD || flags_ == WRAP_AND_CALL;
   }
@@ -1640,7 +1675,7 @@ class CallFunctionStub: public PlatformCodeStub {
 
 class CallConstructStub: public PlatformCodeStub {
  public:
-  CallConstructStub(Isolate* isolate, CallFunctionFlags flags)
+  CallConstructStub(Isolate* isolate, CallConstructorFlags flags)
       : PlatformCodeStub(isolate), flags_(flags) {}
 
   void Generate(MacroAssembler* masm);
@@ -1650,7 +1685,7 @@ class CallConstructStub: public PlatformCodeStub {
   }
 
  private:
-  CallFunctionFlags flags_;
+  CallConstructorFlags flags_;
 
   virtual void PrintName(StringStream* stream);
 
@@ -1658,11 +1693,7 @@ class CallConstructStub: public PlatformCodeStub {
   int MinorKey() { return flags_; }
 
   bool RecordCallTarget() {
-    return (flags_ & RECORD_CALL_TARGET) != 0;
-  }
-
-  bool CallAsMethod() {
-    return (flags_ & CALL_AS_METHOD) != 0;
+    return (flags_ & RECORD_CONSTRUCTOR_TARGET) != 0;
   }
 };
 
index 641d7e5..ecbc0a1 100644 (file)
@@ -381,6 +381,7 @@ bool BreakLocationIterator::IsStepInLocation(Isolate* isolate) {
     if (target_code->kind() == Code::STUB) {
       return target_code->major_key() == CodeStub::CallFunction;
     }
+    return target_code->is_call_stub();
   }
   return false;
 }
@@ -1442,6 +1443,9 @@ void Debug::PrepareStep(StepAction step_action,
       bool is_call_target = false;
       Address target = it.rinfo()->target_address();
       Code* code = Code::GetCodeFromTargetAddress(target);
+      if (code->is_call_stub()) {
+        is_call_target = true;
+      }
       if (code->is_inline_cache_stub()) {
         is_inline_cache_stub = true;
         is_load_or_store = !is_call_target;
@@ -1456,8 +1460,9 @@ void Debug::PrepareStep(StepAction step_action,
         maybe_call_function_stub =
             Code::GetCodeFromTargetAddress(original_target);
       }
-      if (maybe_call_function_stub->kind() == Code::STUB &&
-          maybe_call_function_stub->major_key() == CodeStub::CallFunction) {
+      if ((maybe_call_function_stub->kind() == Code::STUB &&
+           maybe_call_function_stub->major_key() == CodeStub::CallFunction) ||
+          maybe_call_function_stub->kind() == Code::CALL_IC) {
         // Save reference to the code as we may need it to find out arguments
         // count for 'step in' later.
         call_function_stub = Handle<Code>(maybe_call_function_stub);
@@ -1513,6 +1518,7 @@ void Debug::PrepareStep(StepAction step_action,
     } else if (!call_function_stub.is_null()) {
       // If it's CallFunction stub ensure target function is compiled and flood
       // it with one shot breakpoints.
+      bool is_call_ic = call_function_stub->kind() == Code::CALL_IC;
 
       // Find out number of arguments from the stub minor key.
       // Reverse lookup required as the minor key cannot be retrieved
@@ -1528,11 +1534,13 @@ void Debug::PrepareStep(StepAction step_action,
       uint32_t key = Smi::cast(*obj)->value();
       // Argc in the stub is the number of arguments passed - not the
       // expected arguments of the called function.
-      int call_function_arg_count =
-          CallFunctionStub::ExtractArgcFromMinorKey(
+      int call_function_arg_count = is_call_ic
+          ? CallICStub::ExtractArgcFromMinorKey(CodeStub::MinorKeyFromKey(key))
+          : CallFunctionStub::ExtractArgcFromMinorKey(
               CodeStub::MinorKeyFromKey(key));
-      ASSERT(call_function_stub->major_key() ==
-             CodeStub::MajorKeyFromKey(key));
+
+      ASSERT(is_call_ic ||
+             call_function_stub->major_key() == CodeStub::MajorKeyFromKey(key));
 
       // Find target function on the expression stack.
       // Expression stack looks like this (top to bottom):
@@ -1660,6 +1668,9 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
   // used by the call site.
   if (code->is_inline_cache_stub()) {
     switch (code->kind()) {
+      case Code::CALL_IC:
+        return isolate->builtins()->CallICStub_DebugBreak();
+
       case Code::LOAD_IC:
         return isolate->builtins()->LoadIC_DebugBreak();
 
@@ -1688,11 +1699,7 @@ Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
   }
   if (code->kind() == Code::STUB) {
     ASSERT(code->major_key() == CodeStub::CallFunction);
-    if (code->has_function_cache()) {
-      return isolate->builtins()->CallFunctionStub_Recording_DebugBreak();
-    } else {
-      return isolate->builtins()->CallFunctionStub_DebugBreak();
-    }
+    return isolate->builtins()->CallFunctionStub_DebugBreak();
   }
 
   UNREACHABLE();
index b76c512..a4220a2 100644 (file)
@@ -412,6 +412,7 @@ class Debug {
 
   // Code generator routines.
   static void GenerateSlot(MacroAssembler* masm);
+  static void GenerateCallICStubDebugBreak(MacroAssembler* masm);
   static void GenerateLoadICDebugBreak(MacroAssembler* masm);
   static void GenerateStoreICDebugBreak(MacroAssembler* masm);
   static void GenerateKeyedLoadICDebugBreak(MacroAssembler* masm);
@@ -419,7 +420,6 @@ class Debug {
   static void GenerateCompareNilICDebugBreak(MacroAssembler* masm);
   static void GenerateReturnDebugBreak(MacroAssembler* masm);
   static void GenerateCallFunctionStubDebugBreak(MacroAssembler* masm);
-  static void GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm);
   static void GenerateCallConstructStubDebugBreak(MacroAssembler* masm);
   static void GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm);
   static void GenerateSlotDebugBreak(MacroAssembler* masm);
@@ -431,9 +431,6 @@ class Debug {
   // called, it only gets returned to.
   static void GenerateFrameDropperLiveEdit(MacroAssembler* masm);
 
-  // Called from stub-cache.cc.
-  static void GenerateCallICDebugBreak(MacroAssembler* masm);
-
   // Describes how exactly a frame has been dropped from stack.
   enum FrameDropMode {
     // No frame has been dropped.
index bbc5de9..a525fd1 100644 (file)
@@ -600,8 +600,6 @@ class Factory V8_FINAL {
     return Handle<String>(&isolate()->heap()->hidden_string_);
   }
 
-  Handle<FixedArray> NewTypeFeedbackVector(int slot_count);
-
   // Allocates a new SharedFunctionInfo object.
   Handle<SharedFunctionInfo> NewSharedFunctionInfo(
       Handle<String> name,
@@ -612,6 +610,9 @@ class Factory V8_FINAL {
       Handle<FixedArray> feedback_vector);
   Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
 
+  // Allocate a new type feedback vector
+  Handle<FixedArray> NewTypeFeedbackVector(int slot_count);
+
   // Allocates a new JSMessageObject object.
   Handle<JSMessageObject> NewJSMessageObject(
       Handle<String> type,
index 31d2c6d..167538e 100644 (file)
@@ -99,20 +99,20 @@ class FullCodeGenerator: public AstVisitor {
 
   // Platform-specific code size multiplier.
 #if V8_TARGET_ARCH_IA32
-  static const int kCodeSizeMultiplier = 100;
+  static const int kCodeSizeMultiplier = 105;
   static const int kBootCodeSizeMultiplier = 100;
 #elif V8_TARGET_ARCH_X64
-  static const int kCodeSizeMultiplier = 162;
+  static const int kCodeSizeMultiplier = 170;
   static const int kBootCodeSizeMultiplier = 140;
 #elif V8_TARGET_ARCH_ARM
-  static const int kCodeSizeMultiplier = 142;
+  static const int kCodeSizeMultiplier = 149;
   static const int kBootCodeSizeMultiplier = 110;
 #elif V8_TARGET_ARCH_ARM64
 // TODO(all): Copied ARM value. Check this is sensible for ARM64.
-  static const int kCodeSizeMultiplier = 142;
+  static const int kCodeSizeMultiplier = 149;
   static const int kBootCodeSizeMultiplier = 110;
 #elif V8_TARGET_ARCH_MIPS
-  static const int kCodeSizeMultiplier = 142;
+  static const int kCodeSizeMultiplier = 149;
   static const int kBootCodeSizeMultiplier = 120;
 #else
 #error Unsupported target architecture.
@@ -464,9 +464,9 @@ class FullCodeGenerator: public AstVisitor {
   void EmitReturnSequence();
 
   // Platform-specific code sequences for calls
-  void EmitCallWithStub(Call* expr);
-  void EmitCallWithIC(Call* expr);
-  void EmitKeyedCallWithIC(Call* expr, Expression* key);
+  void EmitCall(Call* expr, CallIC::CallType = CallIC::FUNCTION);
+  void EmitCallWithLoadIC(Call* expr);
+  void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
 
   // Platform-specific code for inline runtime calls.
   InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
index 3f77758..b3af2b2 100644 (file)
@@ -515,7 +515,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     if (is_construct) {
       // No type feedback cell is available
       __ mov(ebx, masm->isolate()->factory()->undefined_value());
-      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(eax);
index 3887f92..174ebbb 100644 (file)
@@ -2303,10 +2303,64 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
 }
 
 
+static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
+  // Do not transform the receiver for strict mode functions.
+  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+  __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
+            1 << SharedFunctionInfo::kStrictModeBitWithinByte);
+  __ j(not_equal, cont);
+
+  // Do not transform the receiver for natives (shared already in ecx).
+  __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
+            1 << SharedFunctionInfo::kNativeBitWithinByte);
+  __ j(not_equal, cont);
+}
+
+
+static void EmitSlowCase(Isolate* isolate,
+                         MacroAssembler* masm,
+                         int argc,
+                         Label* non_function) {
+  // Check for function proxy.
+  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
+  __ j(not_equal, non_function);
+  __ pop(ecx);
+  __ push(edi);  // put proxy as additional argument under return address
+  __ push(ecx);
+  __ Move(eax, Immediate(argc + 1));
+  __ Move(ebx, Immediate(0));
+  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
+  {
+    Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
+    __ jmp(adaptor, RelocInfo::CODE_TARGET);
+  }
+
+  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
+  // of the original receiver from the call site).
+  __ bind(non_function);
+  __ mov(Operand(esp, (argc + 1) * kPointerSize), edi);
+  __ Move(eax, Immediate(argc));
+  __ Move(ebx, Immediate(0));
+  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
+  Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
+  __ jmp(adaptor, RelocInfo::CODE_TARGET);
+}
+
+
+static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
+  // Wrap the receiver and patch it back onto the stack.
+  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
+    __ push(edi);
+    __ push(eax);
+    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+    __ pop(edi);
+  }
+  __ mov(Operand(esp, (argc + 1) * kPointerSize), eax);
+  __ jmp(cont);
+}
+
+
 void CallFunctionStub::Generate(MacroAssembler* masm) {
-  // ebx : feedback vector
-  // edx : (only if ebx is not the megamorphic symbol) slot in feedback
-  //       vector (Smi)
   // edi : the function to call
   Label slow, non_function, wrap, cont;
 
@@ -2317,14 +2371,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     // Goto slow case if we do not have a function.
     __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
     __ j(not_equal, &slow);
-
-    if (RecordCallTarget()) {
-      GenerateRecordCallTarget(masm);
-      // Type information was updated. Because we may call Array, which
-      // expects either undefined or an AllocationSite in ebx we need
-      // to set ebx to undefined.
-      __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
-    }
   }
 
   // Fast-case: Just invoke the function.
@@ -2332,16 +2378,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
 
   if (CallAsMethod()) {
     if (NeedsChecks()) {
-      // Do not transform the receiver for strict mode functions.
-      __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
-      __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
-                1 << SharedFunctionInfo::kStrictModeBitWithinByte);
-      __ j(not_equal, &cont);
-
-      // Do not transform the receiver for natives (shared already in ecx).
-      __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
-                1 << SharedFunctionInfo::kNativeBitWithinByte);
-      __ j(not_equal, &cont);
+      EmitContinueIfStrictOrNative(masm, &cont);
     }
 
     // Load the receiver from the stack.
@@ -2364,51 +2401,13 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   if (NeedsChecks()) {
     // Slow-case: Non-function called.
     __ bind(&slow);
-    if (RecordCallTarget()) {
-      // If there is a call target cache, mark it megamorphic in the
-      // non-function case.  MegamorphicSentinel is an immortal immovable
-      // object (megamorphic symbol) so no write barrier is needed.
-      __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
-                          FixedArray::kHeaderSize),
-             Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
-    }
-    // Check for function proxy.
-    __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
-    __ j(not_equal, &non_function);
-    __ pop(ecx);
-    __ push(edi);  // put proxy as additional argument under return address
-    __ push(ecx);
-    __ Move(eax, Immediate(argc_ + 1));
-    __ Move(ebx, Immediate(0));
-    __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
-    {
-      Handle<Code> adaptor =
-          isolate()->builtins()->ArgumentsAdaptorTrampoline();
-      __ jmp(adaptor, RelocInfo::CODE_TARGET);
-    }
-
-    // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
-    // of the original receiver from the call site).
-    __ bind(&non_function);
-    __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
-    __ Move(eax, Immediate(argc_));
-    __ Move(ebx, Immediate(0));
-    __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
-    Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
-    __ jmp(adaptor, RelocInfo::CODE_TARGET);
+    // (non_function is bound in EmitSlowCase)
+    EmitSlowCase(isolate(), masm, argc_, &non_function);
   }
 
   if (CallAsMethod()) {
     __ bind(&wrap);
-    // Wrap the receiver and patch it back onto the stack.
-    { FrameScope frame_scope(masm, StackFrame::INTERNAL);
-      __ push(edi);
-      __ push(eax);
-      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
-      __ pop(edi);
-    }
-    __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
-    __ jmp(&cont);
+    EmitWrapCase(masm, argc_, &cont);
   }
 }
 
@@ -2481,6 +2480,118 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
 }
 
 
+static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
+  __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+  __ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
+  __ mov(vector, FieldOperand(vector,
+                              SharedFunctionInfo::kFeedbackVectorOffset));
+}
+
+
+void CallICStub::Generate(MacroAssembler* masm) {
+  // edi - function
+  // edx - slot id
+  Isolate* isolate = masm->isolate();
+  Label extra_checks_or_miss, slow_start;
+  Label slow, non_function, wrap, cont;
+  Label have_js_function;
+  int argc = state_.arg_count();
+  ParameterCount actual(argc);
+
+  EmitLoadTypeFeedbackVector(masm, ebx);
+
+  // The checks. First, does edi match the recorded monomorphic target?
+  __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size,
+                           FixedArray::kHeaderSize));
+  __ j(not_equal, &extra_checks_or_miss);
+
+  __ bind(&have_js_function);
+  if (state_.CallAsMethod()) {
+    EmitContinueIfStrictOrNative(masm, &cont);
+
+    // Load the receiver from the stack.
+    __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
+
+    __ JumpIfSmi(eax, &wrap);
+
+    __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
+    __ j(below, &wrap);
+
+    __ bind(&cont);
+  }
+
+  __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
+
+  __ bind(&slow);
+  EmitSlowCase(isolate, masm, argc, &non_function);
+
+  if (state_.CallAsMethod()) {
+    __ bind(&wrap);
+    EmitWrapCase(masm, argc, &cont);
+  }
+
+  __ bind(&extra_checks_or_miss);
+  Label miss;
+
+  __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
+                           FixedArray::kHeaderSize));
+  __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
+  __ j(equal, &slow_start);
+  __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
+  __ j(equal, &miss);
+
+  if (!FLAG_trace_ic) {
+    // We are going megamorphic, and we don't want to visit the runtime.
+    __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
+                        FixedArray::kHeaderSize),
+           Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
+    __ jmp(&slow_start);
+  }
+
+  // We are here because tracing is on or we are going monomorphic.
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+  // the slow case
+  __ bind(&slow_start);
+
+  // Check that the function really is a JavaScript function.
+  __ JumpIfSmi(edi, &non_function);
+
+  // Goto slow case if we do not have a function.
+  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
+  __ j(not_equal, &slow);
+  __ jmp(&have_js_function);
+
+  // Unreachable
+  __ int3();
+}
+
+
+void CallICStub::GenerateMiss(MacroAssembler* masm) {
+  // Get the receiver of the function from the stack; 1 ~ return address.
+  __ mov(ecx, Operand(esp, (state_.arg_count() + 1) * kPointerSize));
+
+  {
+    FrameScope scope(masm, StackFrame::INTERNAL);
+
+    // Push the receiver and the function and feedback info.
+    __ push(ecx);
+    __ push(edi);
+    __ push(ebx);
+    __ push(edx);
+
+    // Call the entry.
+    ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
+                                               masm->isolate());
+    __ CallExternalReference(miss, 4);
+
+    // Move result to edi and exit the internal frame.
+    __ mov(edi, eax);
+  }
+}
+
+
 bool CEntryStub::NeedsImmovableCode() {
   return false;
 }
index cb12b0e..563199b 100644 (file)
@@ -172,6 +172,17 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
 }
 
 
+void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) {
+  // Register state for CallICStub
+  // ----------- S t a t e -------------
+  //  -- edx    : type feedback slot (smi)
+  //  -- edi    : function
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, edx.bit() | edi.bit(),
+                                0, false);
+}
+
+
 void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
   // Register state for IC load call (from ic-ia32.cc).
   // ----------- S t a t e -------------
@@ -225,15 +236,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
-  // Register state for keyed IC call call (from ic-ia32.cc)
-  // ----------- S t a t e -------------
-  //  -- ecx: name
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, ecx.bit(), 0, false);
-}
-
-
 void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
   // Register state just before return from JS function (from codegen-ia32.cc).
   // ----------- S t a t e -------------
@@ -252,18 +254,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
-  // Register state for CallFunctionStub (from code-stubs-ia32.cc).
-  // ----------- S t a t e -------------
-  //  -- ebx: feedback array
-  //  -- edx: slot in feedback array
-  //  -- edi: function
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, ebx.bit() | edx.bit() | edi.bit(),
-                                0, false);
-}
-
-
 void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
   // Register state for CallConstructStub (from code-stubs-ia32.cc).
   // eax is the actual number of arguments not encoded as a smi see comment
index dd980b8..5696764 100644 (file)
@@ -2557,17 +2557,15 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
 }
 
 
-
-
 // Code common for calls using the IC.
-void FullCodeGenerator::EmitCallWithIC(Call* expr) {
+void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
-  CallFunctionFlags flags;
+  CallIC::CallType call_type = callee->IsVariableProxy()
+      ? CallIC::FUNCTION
+      : CallIC::METHOD;
   // Get the target function.
-  if (callee->IsVariableProxy()) {
+  if (call_type == CallIC::FUNCTION) {
     { StackValueContext context(this);
       EmitVariableLoad(callee->AsVariableProxy());
       PrepareForBailout(callee, NO_REGISTERS);
@@ -2575,7 +2573,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     // Push undefined as receiver. This is patched in the method prologue if it
     // is a sloppy mode method.
     __ push(Immediate(isolate()->factory()->undefined_value()));
-    flags = NO_CALL_FUNCTION_FLAGS;
   } else {
     // Load the function from the receiver.
     ASSERT(callee->IsProperty());
@@ -2585,39 +2582,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     // Push the target function under the receiver.
     __ push(Operand(esp, 0));
     __ mov(Operand(esp, kPointerSize), eax);
-    flags = CALL_AS_METHOD;
   }
 
-  // Load the arguments.
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
-  }
-
-  // Record source position of the IC call.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, flags);
-  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-  RecordJSReturnSite(expr);
-
-  // Restore context register.
-  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, eax);
+  EmitCall(expr, call_type);
 }
 
 
 // Code common for calls using the IC.
-void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
-                                            Expression* key) {
+void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
+                                                Expression* key) {
   // Load the key.
   VisitForAccumulatorValue(key);
 
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
   // Load the function from the receiver.
   ASSERT(callee->IsProperty());
@@ -2631,29 +2608,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   __ push(Operand(esp, 0));
   __ mov(Operand(esp, kPointerSize), eax);
 
-  // Load the arguments.
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
-  }
-
-  // Record source position of the IC call.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
-  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-  RecordJSReturnSite(expr);
-
-  // Restore context register.
-  __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, eax);
+  EmitCall(expr, CallIC::METHOD);
 }
 
 
-void FullCodeGenerator::EmitCallWithStub(Call* expr) {
-  // Code common for calls using the call stub.
+void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
+  // Load the arguments.
   ZoneList<Expression*>* args = expr->arguments();
   int arg_count = args->length();
   { PreservePositionScope scope(masm()->positions_recorder());
@@ -2661,20 +2621,22 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
       VisitForStackValue(args->at(i));
     }
   }
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-
-  __ LoadHeapObject(ebx, FeedbackVector());
-  __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
 
-  // Record call targets in unoptimized code.
-  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
+  // Record source position of the IC call.
+  SetSourcePosition(expr->position());
+  Handle<Code> ic = CallIC::initialize_stub(
+      isolate(), arg_count, call_type);
+  __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
+  // Don't assign a type feedback id to the IC, since type feedback is provided
+  // by the vector above.
+  CallIC(ic);
 
   RecordJSReturnSite(expr);
+
   // Restore context register.
   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+
   context()->DropAndPlug(1, eax);
 }
 
@@ -2747,7 +2709,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     context()->DropAndPlug(1, eax);
 
   } else if (call_type == Call::GLOBAL_CALL) {
-    EmitCallWithIC(expr);
+    EmitCallWithLoadIC(expr);
 
   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
     // Call to a lookup slot (dynamically introduced variable).
@@ -2783,7 +2745,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
 
     // The receiver is either the global receiver or an object found by
     // LoadContextSlot.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
 
   } else if (call_type == Call::PROPERTY_CALL) {
     Property* property = callee->AsProperty();
@@ -2791,9 +2753,9 @@ void FullCodeGenerator::VisitCall(Call* expr) {
       VisitForStackValue(property->obj());
     }
     if (property->key()->IsPropertyName()) {
-      EmitCallWithIC(expr);
+      EmitCallWithLoadIC(expr);
     } else {
-      EmitKeyedCallWithIC(expr, property->key());
+      EmitKeyedCallWithLoadIC(expr, property->key());
     }
 
   } else {
@@ -2804,7 +2766,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     }
     __ push(Immediate(isolate()->factory()->undefined_value()));
     // Emit function call.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   }
 
 #ifdef DEBUG
@@ -2850,7 +2812,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ LoadHeapObject(ebx, FeedbackVector());
   __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
 
-  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(eax);
index 3a3d50f..e1dce51 100644 (file)
@@ -4265,7 +4265,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
 
   // No cell in ebx for construct type feedback in optimized code
   __ mov(ebx, isolate()->factory()->undefined_value());
-  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
   __ Move(eax, Immediate(instr->arity()));
   CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
 }
index 82aa218..6ecb462 100644 (file)
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -68,9 +68,11 @@ void IC::TraceIC(const char* type,
     }
     JavaScriptFrame::PrintTop(isolate(), stdout, false, true);
     ExtraICState extra_state = new_target->extra_ic_state();
-    const char* modifier =
-        GetTransitionMarkModifier(
-            KeyedStoreIC::GetKeyedAccessStoreMode(extra_state));
+    const char* modifier = "";
+    if (new_target->kind() == Code::KEYED_STORE_IC) {
+      modifier = GetTransitionMarkModifier(
+          KeyedStoreIC::GetKeyedAccessStoreMode(extra_state));
+    }
     PrintF(" (%c->%c%s)",
            TransitionMarkFromState(state()),
            TransitionMarkFromState(new_state),
@@ -390,6 +392,10 @@ void IC::PostPatching(Address address, Code* target, Code* old_target) {
       target->is_inline_cache_stub()) {
     int delta = ComputeTypeInfoCountDelta(old_target->ic_state(),
                                           target->ic_state());
+    // Call ICs don't have interesting state changes from this point
+    // of view.
+    ASSERT(target->kind() != Code::CALL_IC || delta == 0);
+
     // Not all Code objects have TypeFeedbackInfo.
     if (host->type_feedback_info()->IsTypeFeedbackInfo() && delta != 0) {
       TypeFeedbackInfo* info =
@@ -462,6 +468,8 @@ void IC::Clear(Isolate* isolate, Address address,
       return StoreIC::Clear(isolate, address, target, constant_pool);
     case Code::KEYED_STORE_IC:
       return KeyedStoreIC::Clear(isolate, address, target, constant_pool);
+    case Code::CALL_IC:
+      return CallIC::Clear(isolate, address, target, constant_pool);
     case Code::COMPARE_IC:
       return CompareIC::Clear(isolate, address, target, constant_pool);
     case Code::COMPARE_NIL_IC:
@@ -488,6 +496,15 @@ void KeyedLoadIC::Clear(Isolate* isolate,
 }
 
 
+void CallIC::Clear(Isolate* isolate,
+                   Address address,
+                   Code* target,
+                   ConstantPoolArray* constant_pool) {
+  // Currently, CallIC doesn't have state changes.
+  ASSERT(target->ic_state() == v8::internal::GENERIC);
+}
+
+
 void LoadIC::Clear(Isolate* isolate,
                    Address address,
                    Code* target,
@@ -1316,6 +1333,23 @@ MaybeHandle<Object> StoreIC::Store(Handle<Object> object,
 }
 
 
+void CallIC::State::Print(StringStream* stream) const {
+  stream->Add("(args(%d), ",
+              argc_);
+  stream->Add("%s, ",
+              call_type_ == CallIC::METHOD ? "METHOD" : "FUNCTION");
+}
+
+
+Handle<Code> CallIC::initialize_stub(Isolate* isolate,
+                                     int argc,
+                                     CallType call_type) {
+  CallICStub stub(isolate, State::DefaultCallState(argc, call_type));
+  Handle<Code> code = stub.GetCode();
+  return code;
+}
+
+
 Handle<Code> StoreIC::initialize_stub(Isolate* isolate,
                                       StrictMode strict_mode) {
   ExtraICState extra_state = ComputeExtraICState(strict_mode);
@@ -1780,6 +1814,47 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object,
 }
 
 
+CallIC::State::State(ExtraICState extra_ic_state)
+    : argc_(ArgcBits::decode(extra_ic_state)),
+      call_type_(CallTypeBits::decode(extra_ic_state)) {
+}
+
+
+ExtraICState CallIC::State::GetExtraICState() const {
+  ExtraICState extra_ic_state =
+      ArgcBits::encode(argc_) |
+      CallTypeBits::encode(call_type_);
+  return extra_ic_state;
+}
+
+
+void CallIC::HandleMiss(Handle<Object> receiver,
+                        Handle<Object> function,
+                        Handle<FixedArray> vector,
+                        Handle<Smi> slot) {
+  State state(target()->extra_ic_state());
+  Object* feedback = vector->get(slot->value());
+
+  if (feedback->IsJSFunction() || !function->IsJSFunction()) {
+    // We are going generic.
+    ASSERT(!function->IsJSFunction() || *function != feedback);
+
+    vector->set(slot->value(),
+                *TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+                SKIP_WRITE_BARRIER);
+    TRACE_GENERIC_IC(isolate(), "CallIC", "megamorphic");
+  } else {
+    // If we came here feedback must be the uninitialized sentinel,
+    // and we are going monomorphic.
+    ASSERT(feedback == *TypeFeedbackInfo::UninitializedSentinel(isolate()));
+    Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
+    Handle<Object> name(js_function->shared()->name(), isolate());
+    TRACE_IC("CallIC", name);
+    vector->set(slot->value(), *function);
+  }
+}
+
+
 #undef TRACE_IC
 
 
@@ -1788,6 +1863,19 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object,
 //
 
 // Used from ic-<arch>.cc.
+RUNTIME_FUNCTION(CallIC_Miss) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 4);
+  CallIC ic(isolate);
+  Handle<Object> receiver = args.at<Object>(0);
+  Handle<Object> function = args.at<Object>(1);
+  Handle<FixedArray> vector = args.at<FixedArray>(2);
+  Handle<Smi> slot = args.at<Smi>(3);
+  ic.HandleMiss(receiver, function, vector, slot);
+  return *function;
+}
+
+
 // Used from ic-<arch>.cc.
 RUNTIME_FUNCTION(LoadIC_Miss) {
   HandleScope scope(isolate);
index eefd653..b42b5fa 100644 (file)
--- a/src/ic.h
+++ b/src/ic.h
@@ -19,6 +19,7 @@ const int kMaxKeyedPolymorphism = 4;
 #define IC_UTIL_LIST(ICU)                             \
   ICU(LoadIC_Miss)                                    \
   ICU(KeyedLoadIC_Miss)                               \
+  ICU(CallIC_Miss)                                    \
   ICU(StoreIC_Miss)                                   \
   ICU(StoreIC_ArrayLength)                            \
   ICU(StoreIC_Slow)                                   \
@@ -104,6 +105,10 @@ class IC {
   bool IsStoreStub() const {
     return target()->is_store_stub() || target()->is_keyed_store_stub();
   }
+
+  bool IsCallStub() const {
+    return target()->is_call_stub();
+  }
 #endif
 
   // Determines which map must be used for keeping the code stub.
@@ -325,6 +330,78 @@ class IC_Utility {
 };
 
 
+class CallIC: public IC {
+ public:
+  enum CallType { METHOD, FUNCTION };
+
+  class State V8_FINAL BASE_EMBEDDED {
+   public:
+    explicit State(ExtraICState extra_ic_state);
+
+    static State DefaultCallState(int argc, CallType call_type) {
+      return State(argc, call_type);
+    }
+
+    static State MegamorphicCallState(int argc, CallType call_type) {
+      return State(argc, call_type);
+    }
+
+    InlineCacheState GetICState() const { return ::v8::internal::GENERIC; }
+
+    ExtraICState GetExtraICState() const;
+
+    static void GenerateAheadOfTime(
+        Isolate*, void (*Generate)(Isolate*, const State&));
+
+    int arg_count() const { return argc_; }
+    CallType call_type() const { return call_type_; }
+
+    bool CallAsMethod() const { return call_type_ == METHOD; }
+
+    void Print(StringStream* stream) const;
+
+    bool operator==(const State& other_state) const {
+      return (argc_ == other_state.argc_ &&
+              call_type_ == other_state.call_type_);
+    }
+
+    bool operator!=(const State& other_state) const {
+      return !(*this == other_state);
+    }
+
+   private:
+    State(int argc,
+          CallType call_type)
+        : argc_(argc),
+        call_type_(call_type) {
+    }
+
+    class ArgcBits: public BitField<int, 0, Code::kArgumentsBits> {};
+    class CallTypeBits: public BitField<CallType, Code::kArgumentsBits, 1> {};
+
+    const int argc_;
+    const CallType call_type_;
+  };
+
+  explicit CallIC(Isolate* isolate)
+      : IC(EXTRA_CALL_FRAME, isolate) {
+  }
+
+  void HandleMiss(Handle<Object> receiver,
+                  Handle<Object> function,
+                  Handle<FixedArray> vector,
+                  Handle<Smi> slot);
+
+  // Code generator routines.
+  static Handle<Code> initialize_stub(Isolate* isolate,
+                                      int argc,
+                                      CallType call_type);
+
+  static void Clear(Isolate* isolate, Address address, Code* target,
+                    ConstantPoolArray* constant_pool);
+};
+
+
 class LoadIC: public IC {
  public:
   // ExtraICState bits
index e4d35d6..5449541 100644 (file)
@@ -610,6 +610,7 @@ void FunctionInfoWrapper::SetInitialProperties(Handle<String> name,
                                                int end_position,
                                                int param_num,
                                                int literal_count,
+                                               int slot_count,
                                                int parent_index) {
   HandleScope scope(isolate());
   this->SetField(kFunctionNameOffset_, name);
@@ -617,6 +618,7 @@ void FunctionInfoWrapper::SetInitialProperties(Handle<String> name,
   this->SetSmiValueField(kEndPositionOffset_, end_position);
   this->SetSmiValueField(kParamNumOffset_, param_num);
   this->SetSmiValueField(kLiteralNumOffset_, literal_count);
+  this->SetSmiValueField(kSlotNumOffset_, slot_count);
   this->SetSmiValueField(kParentIndexOffset_, parent_index);
 }
 
@@ -647,6 +649,26 @@ Handle<Code> FunctionInfoWrapper::GetFunctionCode() {
 }
 
 
+Handle<FixedArray> FunctionInfoWrapper::GetFeedbackVector() {
+  Handle<Object> element = this->GetField(kSharedFunctionInfoOffset_);
+  Handle<FixedArray> result;
+  if (element->IsJSValue()) {
+    Handle<JSValue> value_wrapper = Handle<JSValue>::cast(element);
+    Handle<Object> raw_result = UnwrapJSValue(value_wrapper);
+    Handle<SharedFunctionInfo> shared =
+        Handle<SharedFunctionInfo>::cast(raw_result);
+    result = Handle<FixedArray>(shared->feedback_vector(), isolate());
+    CHECK_EQ(result->length(), GetSlotCount());
+  } else {
+    // Scripts may never have a SharedFunctionInfo created, so
+    // create a type feedback vector here.
+    int slot_count = GetSlotCount();
+    result = isolate()->factory()->NewTypeFeedbackVector(slot_count);
+  }
+  return result;
+}
+
+
 Handle<Object> FunctionInfoWrapper::GetCodeScopeInfo() {
   Handle<Object> element = this->GetField(kCodeScopeInfoOffset_);
   return UnwrapJSValue(Handle<JSValue>::cast(element));
@@ -687,6 +709,7 @@ class FunctionInfoListener {
     info.SetInitialProperties(fun->name(), fun->start_position(),
                               fun->end_position(), fun->parameter_count(),
                               fun->materialized_literal_count(),
+                              fun->slot_count(),
                               current_parent_index_);
     current_parent_index_ = len_;
     SetElementSloppy(result_, len_, info.GetJSArray());
@@ -1152,6 +1175,10 @@ void LiveEdit::ReplaceFunctionCode(
       shared_info->set_scope_info(ScopeInfo::cast(*code_scope_info));
     }
     shared_info->DisableOptimization(kLiveEdit);
+    // Update the type feedback vector
+    Handle<FixedArray> feedback_vector =
+        compile_info_wrapper.GetFeedbackVector();
+    shared_info->set_feedback_vector(*feedback_vector);
   }
 
   if (shared_info->debug_info()->IsDebugInfo()) {
index 6858941..5be63ac 100644 (file)
@@ -218,6 +218,7 @@ class FunctionInfoWrapper : public JSArrayBasedStruct<FunctionInfoWrapper> {
                             int end_position,
                             int param_num,
                             int literal_count,
+                            int slot_count,
                             int parent_index);
 
   void SetFunctionCode(Handle<Code> function_code,
@@ -239,6 +240,8 @@ class FunctionInfoWrapper : public JSArrayBasedStruct<FunctionInfoWrapper> {
 
   Handle<Code> GetFunctionCode();
 
+  Handle<FixedArray> GetFeedbackVector();
+
   Handle<Object> GetCodeScopeInfo();
 
   int GetStartPosition() {
@@ -247,6 +250,10 @@ class FunctionInfoWrapper : public JSArrayBasedStruct<FunctionInfoWrapper> {
 
   int GetEndPosition() { return this->GetSmiValueField(kEndPositionOffset_); }
 
+  int GetSlotCount() {
+    return this->GetSmiValueField(kSlotNumOffset_);
+  }
+
  private:
   static const int kFunctionNameOffset_ = 0;
   static const int kStartPositionOffset_ = 1;
@@ -258,7 +265,8 @@ class FunctionInfoWrapper : public JSArrayBasedStruct<FunctionInfoWrapper> {
   static const int kParentIndexOffset_ = 7;
   static const int kSharedFunctionInfoOffset_ = 8;
   static const int kLiteralNumOffset_ = 9;
-  static const int kSize_ = 10;
+  static const int kSlotNumOffset_ = 10;
+  static const int kSize_ = 11;
 
   friend class JSArrayBasedStruct<FunctionInfoWrapper>;
 };
index 5dc4687..ecacad1 100644 (file)
@@ -1853,6 +1853,10 @@ void Logger::LogCodeObject(Object* object) {
       description = "A load IC from the snapshot";
       tag = Logger::LOAD_IC_TAG;
       break;
+    case Code::CALL_IC:
+      description = "A call IC from the snapshot";
+      tag = Logger::CALL_IC_TAG;
+      break;
     case Code::STORE_IC:
       description = "A store IC from the snapshot";
       tag = Logger::STORE_IC_TAG;
index 0299e71..f307d91 100644 (file)
--- a/src/log.h
+++ b/src/log.h
@@ -121,6 +121,7 @@ struct TickSample;
   V(KEYED_STORE_POLYMORPHIC_IC_TAG, "KeyedStorePolymorphicIC")          \
   V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC")     \
   V(LAZY_COMPILE_TAG,               "LazyCompile")                      \
+  V(CALL_IC_TAG,                    "CallIC")                           \
   V(LOAD_IC_TAG,                    "LoadIC")                           \
   V(LOAD_POLYMORPHIC_IC_TAG,        "LoadPolymorphicIC")                \
   V(REG_EXP_TAG,                    "RegExp")                           \
index 2f3de9d..520a110 100644 (file)
@@ -4336,6 +4336,7 @@ bool Code::has_major_key() {
       kind() == LOAD_IC ||
       kind() == KEYED_LOAD_IC ||
       kind() == STORE_IC ||
+      kind() == CALL_IC ||
       kind() == KEYED_STORE_IC ||
       kind() == TO_BOOLEAN_IC;
 }
@@ -5799,7 +5800,7 @@ void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
 
 int Code::stub_info() {
   ASSERT(kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
-         kind() == BINARY_OP_IC || kind() == LOAD_IC);
+         kind() == BINARY_OP_IC || kind() == LOAD_IC || kind() == CALL_IC);
   return Smi::cast(raw_type_feedback_info())->value();
 }
 
@@ -5810,6 +5811,7 @@ void Code::set_stub_info(int value) {
          kind() == BINARY_OP_IC ||
          kind() == STUB ||
          kind() == LOAD_IC ||
+         kind() == CALL_IC ||
          kind() == KEYED_LOAD_IC ||
          kind() == STORE_IC ||
          kind() == KEYED_STORE_IC);
index 28c8e94..bb2e992 100644 (file)
@@ -286,6 +286,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
   // Monomorphic ICs are preserved when possible, but need to be flushed
   // when they might be keeping a Context alive, or when the heap is about
   // to be serialized.
+
   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
       && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
           target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
index 1daf1eb..363d105 100644 (file)
@@ -10730,6 +10730,9 @@ void SharedFunctionInfo::AttachInitialMap(Map* map) {
 
 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
   code()->ClearInlineCaches();
+  // If we clear ICs, we need to clear the type feedback vector too, since
+  // CallICs are synced with a feedback vector slot.
+  ClearTypeFeedbackInfo();
   set_ic_age(new_ic_age);
   if (code()->kind() == Code::FUNCTION) {
     code()->set_profiler_ticks(0);
index dc928a6..4f91688 100644 (file)
@@ -5240,6 +5240,7 @@ class Code: public HeapObject {
 #define IC_KIND_LIST(V) \
   V(LOAD_IC)            \
   V(KEYED_LOAD_IC)      \
+  V(CALL_IC)            \
   V(STORE_IC)           \
   V(KEYED_STORE_IC)     \
   V(BINARY_OP_IC)       \
@@ -5349,6 +5350,7 @@ class Code: public HeapObject {
   inline bool is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
   inline bool is_store_stub() { return kind() == STORE_IC; }
   inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
+  inline bool is_call_stub() { return kind() == CALL_IC; }
   inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; }
   inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
   inline bool is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
index 6d25d75..8e923af 100644 (file)
@@ -1057,7 +1057,9 @@ intptr_t PagedSpace::SizeOfFirstPage() {
         // upgraded to handle small pages.
         size = AreaSize();
       } else {
-        size = 480 * KB * FullCodeGenerator::kBootCodeSizeMultiplier / 100;
+        size = RoundUp(
+            480 * KB * FullCodeGenerator::kBootCodeSizeMultiplier / 100,
+            kPointerSize);
       }
       break;
     default:
index 4c9da40..b9ca952 100644 (file)
@@ -261,8 +261,6 @@ enum InlineCacheState {
 
 enum CallFunctionFlags {
   NO_CALL_FUNCTION_FLAGS,
-  // The call target is cached in the instruction stream.
-  RECORD_CALL_TARGET,
   CALL_AS_METHOD,
   // Always wrap the receiver and call to the JSFunction. Only use this flag
   // both the receiver type and the target method are statically known.
@@ -270,6 +268,13 @@ enum CallFunctionFlags {
 };
 
 
+enum CallConstructorFlags {
+  NO_CALL_CONSTRUCTOR_FLAGS,
+  // The call target is cached in the instruction stream.
+  RECORD_CONSTRUCTOR_TARGET
+};
+
+
 enum InlineCacheHolderFlag {
   OWN_MAP,  // For fast properties objects.
   PROTOTYPE_MAP  // For slow properties objects (except GlobalObjects).
index f35efd0..9e3b89a 100644 (file)
@@ -575,7 +575,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
       // No type feedback cell is available
       __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
       // Expects rdi to hold function pointer.
-      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(rax);
index feb2281..546595a 100644 (file)
@@ -2141,13 +2141,77 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
 }
 
 
+static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
+  // Do not transform the receiver for strict mode functions.
+  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+  __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
+           Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
+  __ j(not_equal, cont);
+
+  // Do not transform the receiver for natives.
+  // SharedFunctionInfo is already loaded into rcx.
+  __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
+           Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
+  __ j(not_equal, cont);
+}
+
+
+static void EmitSlowCase(Isolate* isolate,
+                         MacroAssembler* masm,
+                         StackArgumentsAccessor* args,
+                         int argc,
+                         Label* non_function) {
+  // Check for function proxy.
+  __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
+  __ j(not_equal, non_function);
+  __ PopReturnAddressTo(rcx);
+  __ Push(rdi);  // put proxy as additional argument under return address
+  __ PushReturnAddressFrom(rcx);
+  __ Set(rax, argc + 1);
+  __ Set(rbx, 0);
+  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
+  {
+    Handle<Code> adaptor =
+        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+    __ jmp(adaptor, RelocInfo::CODE_TARGET);
+  }
+
+  // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
+  // of the original receiver from the call site).
+  __ bind(non_function);
+  __ movp(args->GetReceiverOperand(), rdi);
+  __ Set(rax, argc);
+  __ Set(rbx, 0);
+  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
+  Handle<Code> adaptor =
+      isolate->builtins()->ArgumentsAdaptorTrampoline();
+  __ Jump(adaptor, RelocInfo::CODE_TARGET);
+}
+
+
+static void EmitWrapCase(MacroAssembler* masm,
+                         StackArgumentsAccessor* args,
+                         Label* cont) {
+  // Wrap the receiver and patch it back onto the stack.
+  { FrameScope frame_scope(masm, StackFrame::INTERNAL);
+    __ Push(rdi);
+    __ Push(rax);
+    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+    __ Pop(rdi);
+  }
+  __ movp(args->GetReceiverOperand(), rax);
+  __ jmp(cont);
+}
+
+
 void CallFunctionStub::Generate(MacroAssembler* masm) {
-  // rbx : feedback vector
-  // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
-  //       vector (Smi)
   // rdi : the function to call
+
+  // wrap_and_call can only be true if we are compiling a monomorphic method.
+  Isolate* isolate = masm->isolate();
   Label slow, non_function, wrap, cont;
-  StackArgumentsAccessor args(rsp, argc_);
+  int argc = argc_;
+  StackArgumentsAccessor args(rsp, argc);
 
   if (NeedsChecks()) {
     // Check that the function really is a JavaScript function.
@@ -2156,35 +2220,16 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     // Goto slow case if we do not have a function.
     __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
     __ j(not_equal, &slow);
-
-    if (RecordCallTarget()) {
-      GenerateRecordCallTarget(masm);
-      // Type information was updated. Because we may call Array, which
-      // expects either undefined or an AllocationSite in rbx we need
-      // to set rbx to undefined.
-      __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
-    }
   }
 
   // Fast-case: Just invoke the function.
-  ParameterCount actual(argc_);
+  ParameterCount actual(argc);
 
   if (CallAsMethod()) {
     if (NeedsChecks()) {
-      // Do not transform the receiver for strict mode functions.
-      __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
-      __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
-               Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
-      __ j(not_equal, &cont);
-
-      // Do not transform the receiver for natives.
-      // SharedFunctionInfo is already loaded into rcx.
-      __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
-               Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
-      __ j(not_equal, &cont);
+      EmitContinueIfStrictOrNative(masm, &cont);
     }
 
-
     // Load the receiver from the stack.
     __ movp(rax, args.GetReceiverOperand());
 
@@ -2199,59 +2244,18 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
 
     __ bind(&cont);
   }
+
   __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
 
   if (NeedsChecks()) {
     // Slow-case: Non-function called.
     __ bind(&slow);
-    if (RecordCallTarget()) {
-      // If there is a call target cache, mark it megamorphic in the
-      // non-function case.  MegamorphicSentinel is an immortal immovable
-      // object (megamorphic symbol) so no write barrier is needed.
-      __ SmiToInteger32(rdx, rdx);
-      __ Move(FieldOperand(rbx, rdx, times_pointer_size,
-                           FixedArray::kHeaderSize),
-              TypeFeedbackInfo::MegamorphicSentinel(isolate()));
-      __ Integer32ToSmi(rdx, rdx);
-    }
-    // Check for function proxy.
-    __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
-    __ j(not_equal, &non_function);
-    __ PopReturnAddressTo(rcx);
-    __ Push(rdi);  // put proxy as additional argument under return address
-    __ PushReturnAddressFrom(rcx);
-    __ Set(rax, argc_ + 1);
-    __ Set(rbx, 0);
-    __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
-    {
-      Handle<Code> adaptor =
-        isolate()->builtins()->ArgumentsAdaptorTrampoline();
-      __ jmp(adaptor, RelocInfo::CODE_TARGET);
-    }
-
-    // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
-    // of the original receiver from the call site).
-    __ bind(&non_function);
-    __ movp(args.GetReceiverOperand(), rdi);
-    __ Set(rax, argc_);
-    __ Set(rbx, 0);
-    __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
-    Handle<Code> adaptor =
-        isolate()->builtins()->ArgumentsAdaptorTrampoline();
-    __ Jump(adaptor, RelocInfo::CODE_TARGET);
+    EmitSlowCase(isolate, masm, &args, argc, &non_function);
   }
 
   if (CallAsMethod()) {
     __ bind(&wrap);
-    // Wrap the receiver and patch it back onto the stack.
-    { FrameScope frame_scope(masm, StackFrame::INTERNAL);
-      __ Push(rdi);
-      __ Push(rax);
-      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
-      __ Pop(rdi);
-    }
-    __ movp(args.GetReceiverOperand(), rax);
-    __ jmp(&cont);
+    EmitWrapCase(masm, &args, &cont);
   }
 }
 
@@ -2322,6 +2326,120 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
 }
 
 
+static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
+  __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+  __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
+  __ movp(vector, FieldOperand(vector,
+                               SharedFunctionInfo::kFeedbackVectorOffset));
+}
+
+
+void CallICStub::Generate(MacroAssembler* masm) {
+  // rdi - function
+  // rbx - vector
+  // rdx - slot id
+  Isolate* isolate = masm->isolate();
+  Label extra_checks_or_miss, slow_start;
+  Label slow, non_function, wrap, cont;
+  Label have_js_function;
+  int argc = state_.arg_count();
+  StackArgumentsAccessor args(rsp, argc);
+  ParameterCount actual(argc);
+
+  EmitLoadTypeFeedbackVector(masm, rbx);
+
+  // The checks. First, does rdi match the recorded monomorphic target?
+  __ SmiToInteger32(rdx, rdx);
+  __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size,
+                            FixedArray::kHeaderSize));
+  __ j(not_equal, &extra_checks_or_miss);
+
+  __ bind(&have_js_function);
+  if (state_.CallAsMethod()) {
+    EmitContinueIfStrictOrNative(masm, &cont);
+
+    // Load the receiver from the stack.
+    __ movp(rax, args.GetReceiverOperand());
+
+    __ JumpIfSmi(rax, &wrap);
+
+    __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
+    __ j(below, &wrap);
+
+    __ bind(&cont);
+  }
+
+  __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
+
+  __ bind(&slow);
+  EmitSlowCase(isolate, masm, &args, argc, &non_function);
+
+  if (state_.CallAsMethod()) {
+    __ bind(&wrap);
+    EmitWrapCase(masm, &args, &cont);
+  }
+
+  __ bind(&extra_checks_or_miss);
+  Label miss;
+
+  __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
+                            FixedArray::kHeaderSize));
+  __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
+  __ j(equal, &slow_start);
+  __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
+  __ j(equal, &miss);
+
+  if (!FLAG_trace_ic) {
+    // We are going megamorphic, and we don't want to visit the runtime.
+    __ Move(FieldOperand(rbx, rdx, times_pointer_size,
+                         FixedArray::kHeaderSize),
+            TypeFeedbackInfo::MegamorphicSentinel(isolate));
+    __ jmp(&slow_start);
+  }
+
+  // We are here because tracing is on or we are going monomorphic.
+  __ bind(&miss);
+  GenerateMiss(masm);
+
+  // the slow case
+  __ bind(&slow_start);
+  // Check that function is not a smi.
+  __ JumpIfSmi(rdi, &non_function);
+  // Check that function is a JSFunction.
+  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
+  __ j(not_equal, &slow);
+  __ jmp(&have_js_function);
+
+  // Unreachable
+  __ int3();
+}
+
+
+void CallICStub::GenerateMiss(MacroAssembler* masm) {
+  // Get the receiver of the function from the stack; 1 ~ return address.
+  __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize));
+
+  {
+    FrameScope scope(masm, StackFrame::INTERNAL);
+
+    // Push the receiver and the function and feedback info.
+    __ Push(rcx);
+    __ Push(rdi);
+    __ Push(rbx);
+    __ Integer32ToSmi(rdx, rdx);
+    __ Push(rdx);
+
+    // Call the entry.
+    ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss),
+                                               masm->isolate());
+    __ CallExternalReference(miss, 4);
+
+    // Move result to edi and exit the internal frame.
+    __ movp(rdi, rax);
+  }
+}
+
+
 bool CEntryStub::NeedsImmovableCode() {
   return false;
 }
index 36a403b..dfe5ba1 100644 (file)
@@ -152,6 +152,16 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
 }
 
 
+void Debug::GenerateCallICStubDebugBreak(MacroAssembler* masm) {
+  // Register state for CallICStub
+  // ----------- S t a t e -------------
+  //  -- rdx    : type feedback slot (smi)
+  //  -- rdi    : function
+  // -----------------------------------
+  Generate_DebugBreakCallHelper(masm, rdx.bit() | rdi.bit(), 0, false);
+}
+
+
 void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
   // Register state for IC load call (from ic-x64.cc).
   // ----------- S t a t e -------------
@@ -205,15 +215,6 @@ void Debug::GenerateCompareNilICDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
-  // Register state for IC call call (from ic-x64.cc)
-  // ----------- S t a t e -------------
-  //  -- rcx: function name
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, rcx.bit(), 0, false);
-}
-
-
 void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
   // Register state just before return from JS function (from codegen-x64.cc).
   // ----------- S t a t e -------------
@@ -232,18 +233,6 @@ void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
 }
 
 
-void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
-  // Register state for CallFunctionStub (from code-stubs-x64.cc).
-  // ----------- S t a t e -------------
-  //  -- rdi : function
-  //  -- rbx: feedback array
-  //  -- rdx: slot in feedback array
-  // -----------------------------------
-  Generate_DebugBreakCallHelper(masm, rbx.bit() | rdx.bit() | rdi.bit(),
-                                0, false);
-}
-
-
 void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
   // Register state for CallConstructStub (from code-stubs-x64.cc).
   // rax is the actual number of arguments not encoded as a smi, see comment
index 36361a8..1e3b87e 100644 (file)
@@ -2554,14 +2554,14 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
 
 
 // Code common for calls using the IC.
-void FullCodeGenerator::EmitCallWithIC(Call* expr) {
+void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
-  CallFunctionFlags flags;
-  // Get the target function;
-  if (callee->IsVariableProxy()) {
+  CallIC::CallType call_type = callee->IsVariableProxy()
+      ? CallIC::FUNCTION
+      : CallIC::METHOD;
+  // Get the target function.
+  if (call_type == CallIC::FUNCTION) {
     { StackValueContext context(this);
       EmitVariableLoad(callee->AsVariableProxy());
       PrepareForBailout(callee, NO_REGISTERS);
@@ -2569,7 +2569,6 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     // Push undefined as receiver. This is patched in the method prologue if it
     // is a sloppy mode method.
     __ Push(isolate()->factory()->undefined_value());
-    flags = NO_CALL_FUNCTION_FLAGS;
   } else {
     // Load the function from the receiver.
     ASSERT(callee->IsProperty());
@@ -2579,40 +2578,19 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
     // Push the target function under the receiver.
     __ Push(Operand(rsp, 0));
     __ movp(Operand(rsp, kPointerSize), rax);
-    flags = CALL_AS_METHOD;
-  }
-
-  // Load the arguments.
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
   }
 
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, flags);
-  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-
-  // Restore context register.
-  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, rax);
+  EmitCall(expr, call_type);
 }
 
 
 // Common code for calls using the IC.
-void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
-                                            Expression* key) {
+void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
+                                                Expression* key) {
   // Load the key.
   VisitForAccumulatorValue(key);
 
   Expression* callee = expr->expression();
-  ZoneList<Expression*>* args = expr->arguments();
-  int arg_count = args->length();
 
   // Load the function from the receiver.
   ASSERT(callee->IsProperty());
@@ -2624,29 +2602,12 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
   __ Push(Operand(rsp, 0));
   __ movp(Operand(rsp, kPointerSize), rax);
 
-  // Load the arguments.
-  { PreservePositionScope scope(masm()->positions_recorder());
-    for (int i = 0; i < arg_count; i++) {
-      VisitForStackValue(args->at(i));
-    }
-  }
-
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
-  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
-  __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
-
-  RecordJSReturnSite(expr);
-  // Restore context register.
-  __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
-
-  context()->DropAndPlug(1, rax);
+  EmitCall(expr, CallIC::METHOD);
 }
 
 
-void FullCodeGenerator::EmitCallWithStub(Call* expr) {
-  // Code common for calls using the call stub.
+void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
+  // Load the arguments.
   ZoneList<Expression*>* args = expr->arguments();
   int arg_count = args->length();
   { PreservePositionScope scope(masm()->positions_recorder());
@@ -2654,17 +2615,19 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
       VisitForStackValue(args->at(i));
     }
   }
-  // Record source position for debugger.
-  SetSourcePosition(expr->position());
 
-  __ Move(rbx, FeedbackVector());
+  // Record source position of the IC call.
+  SetSourcePosition(expr->position());
+  Handle<Code> ic = CallIC::initialize_stub(
+      isolate(), arg_count, call_type);
   __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
-
-  // Record call targets in unoptimized code.
-  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
-  __ CallStub(&stub);
+  // Don't assign a type feedback id to the IC, since type feedback is provided
+  // by the vector above.
+  CallIC(ic);
+
   RecordJSReturnSite(expr);
+
   // Restore context register.
   __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   // Discard the function left on TOS.
@@ -2741,7 +2704,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
     context()->DropAndPlug(1, rax);
   } else if (call_type == Call::GLOBAL_CALL) {
-    EmitCallWithIC(expr);
+    EmitCallWithLoadIC(expr);
 
   } else if (call_type == Call::LOOKUP_SLOT_CALL) {
     // Call to a lookup slot (dynamically introduced variable).
@@ -2778,16 +2741,16 @@ void FullCodeGenerator::VisitCall(Call* expr) {
 
     // The receiver is either the global receiver or an object found by
     // LoadContextSlot.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   } else if (call_type == Call::PROPERTY_CALL) {
     Property* property = callee->AsProperty();
     { PreservePositionScope scope(masm()->positions_recorder());
       VisitForStackValue(property->obj());
     }
     if (property->key()->IsPropertyName()) {
-      EmitCallWithIC(expr);
+      EmitCallWithLoadIC(expr);
     } else {
-      EmitKeyedCallWithIC(expr, property->key());
+      EmitKeyedCallWithLoadIC(expr, property->key());
     }
   } else {
     ASSERT(call_type == Call::OTHER_CALL);
@@ -2797,7 +2760,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     }
     __ PushRoot(Heap::kUndefinedValueRootIndex);
     // Emit function call.
-    EmitCallWithStub(expr);
+    EmitCall(expr);
   }
 
 #ifdef DEBUG
@@ -2843,7 +2806,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ Move(rbx, FeedbackVector());
   __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
 
-  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(rax);
index 654e092..421aba8 100644 (file)
@@ -3898,7 +3898,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   __ Set(rax, instr->arity());
   // No cell in ebx for construct type feedback in optimized code
   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
-  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
   CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
 }
 
index a60e69f..49fde66 100644 (file)
@@ -32,6 +32,7 @@ Debug = debug.Debug
 
 eval("var something1 = 25; \n"
      + "var something2 = 2010; \n"
+     + "// Array(); \n"
      + "function ChooseAnimal() {\n"
      + "  return 'Cat';\n"
      + "} \n"
@@ -54,6 +55,13 @@ var new_source = script.source.replace("Cat", "Cap' + 'yb' + 'ara");
 var new_source = new_source.replace("25", "26");
 var new_source = new_source.replace("Help", "Hello");
 var new_source = new_source.replace("17", "18");
+// The call to array causes a change in the number of type feedback slots for
+// the script.
+//
+// TODO(mvstanton): For now, the inclusion of the Array() call at the top level
+// of the script causes us to visit a corner case, but I'd like to validate
+// correctness more explicitly.
+var new_source = new_source.replace("// Array", "Array");
 print("new source: " + new_source);
 
 var change_log = new Array();