Landing for Zaheer Ahmad.
authorantonm@chromium.org <antonm@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 4 Feb 2011 13:43:38 +0000 (13:43 +0000)
committerantonm@chromium.org <antonm@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 4 Feb 2011 13:43:38 +0000 (13:43 +0000)
Direct call api functions (arm implementation)

See: http://codereview.chromium.org/6170001/

Review URL: http://codereview.chromium.org/6286078

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6639 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

15 files changed:
AUTHORS
src/arm/code-stubs-arm.cc
src/arm/code-stubs-arm.h
src/arm/macro-assembler-arm.cc
src/arm/macro-assembler-arm.h
src/arm/simulator-arm.cc
src/arm/simulator-arm.h
src/arm/stub-cache-arm.cc
src/assembler.cc
src/assembler.h
src/code-stubs.h
src/heap.cc
src/heap.h
src/top.h
test/cctest/test-api.cc

diff --git a/AUTHORS b/AUTHORS
index da86488..1b756ca 100644 (file)
--- a/AUTHORS
+++ b/AUTHORS
@@ -26,6 +26,7 @@ Kun Zhang <zhangk@codeaurora.org>
 Matt Hanselman <mjhanselman@gmail.com>
 Martyn Capewell <martyn.capewell@arm.com>
 Michael Smith <mike@w3.org>
+Mike Gilbert <floppymaster@gmail.com>
 Paolo Giarrusso <p.giarrusso@gmail.com>
 Patrick Gansterer <paroga@paroga.com>
 Rafal Krypa <rafal@krypa.net>
@@ -35,4 +36,4 @@ Ryan Dahl <coldredlemur@gmail.com>
 Sanjoy Das <sanjoy@playingwithpointers.com>
 Subrato K De <subratokde@codeaurora.org>
 Vlad Burlik <vladbph@gmail.com>
-Mike Gilbert <floppymaster@gmail.com>
+Zaheer Ahmad <zahmad@codeaurora.org>
index aa8ebb7..437dfd2 100644 (file)
@@ -3501,9 +3501,17 @@ void CEntryStub::Generate(MacroAssembler* masm) {
   // this by performing a garbage collection and retrying the
   // builtin once.
 
+  // Compute the argv pointer in a callee-saved register.
+  __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
+  __ sub(r6, r6, Operand(kPointerSize));
+
   // Enter the exit frame that transitions from JavaScript to C++.
   __ EnterExitFrame(save_doubles_);
 
+  // Setup argc and the builtin function in callee-saved registers.
+  __ mov(r4, Operand(r0));
+  __ mov(r5, Operand(r1));
+
   // r4: number of arguments (C callee-saved)
   // r5: pointer to builtin function (C callee-saved)
   // r6: pointer to first argument (C callee-saved)
@@ -5906,6 +5914,23 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
 }
 
 
+void DirectCEntryStub::Generate(MacroAssembler* masm) {
+  __ ldr(pc, MemOperand(sp, 0));
+}
+
+
+void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
+                                    ApiFunction *function) {
+  __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
+                     RelocInfo::CODE_TARGET));
+  // Push return address (accessible to GC through exit frame pc).
+  __ mov(r2,
+         Operand(ExternalReference(function, ExternalReference::DIRECT_CALL)));
+  __ str(pc, MemOperand(sp, 0));
+  __ Jump(r2);  // Call the api function.
+}
+
+
 void GenerateFastPixelArrayLoad(MacroAssembler* masm,
                                 Register receiver,
                                 Register key,
index 0b1c59e..bf7d635 100644 (file)
@@ -571,6 +571,24 @@ class RegExpCEntryStub: public CodeStub {
 };
 
 
+// Trampoline stub to call into native code. To call safely into native code
+// in the presence of compacting GC (which can move code objects) we need to
+// keep the code which called into native pinned in the memory. Currently the
+// simplest approach is to generate such stub early enough so it can never be
+// moved by GC
+class DirectCEntryStub: public CodeStub {
+ public:
+  DirectCEntryStub() {}
+  void Generate(MacroAssembler* masm);
+  void GenerateCall(MacroAssembler* masm, ApiFunction *function);
+
+ private:
+  Major MajorKey() { return DirectCEntry; }
+  int MinorKey() { return 0; }
+  const char* GetName() { return "DirectCEntryStub"; }
+};
+
+
 // Generate code the to load an element from a pixel array. The receiver is
 // assumed to not be a smi and to have elements, the caller must guarantee this
 // precondition. If the receiver does not have elements that are pixel arrays,
index 86ebcf5..c11d664 100644 (file)
@@ -632,11 +632,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
 }
 
 
-void MacroAssembler::EnterExitFrame(bool save_doubles) {
-  // Compute the argv pointer in a callee-saved register.
-  add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
-  sub(r6, r6, Operand(kPointerSize));
-
+void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
   // Setup the frame structure on the stack.
   ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
   ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
@@ -658,10 +654,6 @@ void MacroAssembler::EnterExitFrame(bool save_doubles) {
   mov(ip, Operand(ExternalReference(Top::k_context_address)));
   str(cp, MemOperand(ip));
 
-  // Setup argc and the builtin function in callee-saved registers.
-  mov(r4, Operand(r0));
-  mov(r5, Operand(r1));
-
   // Optionally save all double registers.
   if (save_doubles) {
     sub(sp, sp, Operand(DwVfpRegister::kNumRegisters * kDoubleSize));
@@ -675,10 +667,10 @@ void MacroAssembler::EnterExitFrame(bool save_doubles) {
     // since the sp slot and code slot were pushed after the fp.
   }
 
-  // Reserve place for the return address and align the frame preparing for
-  // calling the runtime function.
+  // Reserve place for the return address and stack space and align the frame
+  // preparing for calling the runtime function.
   const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
-  sub(sp, sp, Operand(kPointerSize));
+  sub(sp, sp, Operand((stack_space + 1) * kPointerSize));
   if (frame_alignment > 0) {
     ASSERT(IsPowerOf2(frame_alignment));
     and_(sp, sp, Operand(-frame_alignment));
@@ -1475,17 +1467,115 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
 
 
 void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
-  ASSERT(allow_stub_calls());  // stub calls are not allowed in some stubs
+  ASSERT(allow_stub_calls());  // Stub calls are not allowed in some stubs.
   Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
 }
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
-  ASSERT(allow_stub_calls());  // stub calls are not allowed in some stubs
+  ASSERT(allow_stub_calls());  // Stub calls are not allowed in some stubs.
   Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
 }
 
 
+MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) {
+  ASSERT(allow_stub_calls());  // Stub calls are not allowed in some stubs.
+  Object* result;
+  { MaybeObject* maybe_result = stub->TryGetCode();
+    if (!maybe_result->ToObject(&result)) return maybe_result;
+  }
+  Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
+  return result;
+}
+
+
+static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
+  return ref0.address() - ref1.address();
+}
+
+
+MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
+    ApiFunction* function, int stack_space) {
+  ExternalReference next_address =
+      ExternalReference::handle_scope_next_address();
+  const int kNextOffset = 0;
+  const int kLimitOffset = AddressOffset(
+      ExternalReference::handle_scope_limit_address(),
+      next_address);
+  const int kLevelOffset = AddressOffset(
+      ExternalReference::handle_scope_level_address(),
+      next_address);
+
+  // Allocate HandleScope in callee-save registers.
+  mov(r7, Operand(next_address));
+  ldr(r4, MemOperand(r7, kNextOffset));
+  ldr(r5, MemOperand(r7, kLimitOffset));
+  ldr(r6, MemOperand(r7, kLevelOffset));
+  add(r6, r6, Operand(1));
+  str(r6, MemOperand(r7, kLevelOffset));
+
+  // Native call returns to the DirectCEntry stub which redirects to the
+  // return address pushed on stack (could have moved after GC).
+  // DirectCEntry stub itself is generated early and never moves.
+  DirectCEntryStub stub;
+  stub.GenerateCall(this, function);
+
+  Label promote_scheduled_exception;
+  Label delete_allocated_handles;
+  Label leave_exit_frame;
+
+  // If result is non-zero, dereference to get the result value
+  // otherwise set it to undefined.
+  cmp(r0, Operand(0));
+  LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
+  ldr(r0, MemOperand(r0), ne);
+
+  // No more valid handles (the result handle was the last one). Restore
+  // previous handle scope.
+  str(r4, MemOperand(r7, kNextOffset));
+  if (FLAG_debug_code) {
+    ldr(r1, MemOperand(r7, kLevelOffset));
+    cmp(r1, r6);
+    Check(eq, "Unexpected level after return from api call");
+  }
+  sub(r6, r6, Operand(1));
+  str(r6, MemOperand(r7, kLevelOffset));
+  ldr(ip, MemOperand(r7, kLimitOffset));
+  cmp(r5, ip);
+  b(ne, &delete_allocated_handles);
+
+  // Check if the function scheduled an exception.
+  bind(&leave_exit_frame);
+  LoadRoot(r4, Heap::kTheHoleValueRootIndex);
+  mov(ip, Operand(ExternalReference::scheduled_exception_address()));
+  ldr(r5, MemOperand(ip));
+  cmp(r4, r5);
+  b(ne, &promote_scheduled_exception);
+
+  // LeaveExitFrame expects unwind space to be in r4.
+  mov(r4, Operand(stack_space));
+  LeaveExitFrame(false);
+
+  bind(&promote_scheduled_exception);
+  MaybeObject* result = TryTailCallExternalReference(
+      ExternalReference(Runtime::kPromoteScheduledException), 0, 1);
+  if (result->IsFailure()) {
+    return result;
+  }
+
+  // HandleScope limit has changed. Delete allocated extensions.
+  bind(&delete_allocated_handles);
+  str(r5, MemOperand(r7, kLimitOffset));
+  mov(r4, r0);
+  PrepareCallCFunction(0, r5);
+  CallCFunction(ExternalReference::delete_handle_scope_extensions(), 0);
+  mov(r0, r4);
+  jmp(&leave_exit_frame);
+
+  return result;
+}
+
+
 void MacroAssembler::IllegalOperation(int num_arguments) {
   if (num_arguments > 0) {
     add(sp, sp, Operand(num_arguments * kPointerSize));
@@ -1740,6 +1830,17 @@ void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
 }
 
 
+MaybeObject* MacroAssembler::TryTailCallExternalReference(
+    const ExternalReference& ext, int num_arguments, int result_size) {
+  // TODO(1236192): Most runtime routines don't need the number of
+  // arguments passed in because it is constant. At some point we
+  // should remove this need and make the runtime routine entry code
+  // smarter.
+  mov(r0, Operand(num_arguments));
+  return TryJumpToExternalReference(ext);
+}
+
+
 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
                                      int num_arguments,
                                      int result_size) {
@@ -1758,6 +1859,18 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
 }
 
 
+MaybeObject* MacroAssembler::TryJumpToExternalReference(
+    const ExternalReference& builtin) {
+#if defined(__thumb__)
+  // Thumb mode builtin.
+  ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
+#endif
+  mov(r1, Operand(builtin));
+  CEntryStub stub(1);
+  return TryTailCallStub(&stub);
+}
+
+
 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
                                    InvokeJSFlags flags,
                                    PostCallGenerator* post_call_generator) {
index 80986b1..c9ffde8 100644 (file)
@@ -287,10 +287,8 @@ class MacroAssembler: public Assembler {
   void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
 
   // Enter exit frame.
-  // Expects the number of arguments in register r0 and
-  // the builtin function to call in register r1. Exits with argc in
-  // r4, argv in r6, and and the builtin function to call in r5.
-  void EnterExitFrame(bool save_doubles);
+  // stack_space - extra stack space, used for alignment before call to C.
+  void EnterExitFrame(bool save_doubles, int stack_space = 0);
 
   // Leave the current exit frame. Expects the return value in r0.
   void LeaveExitFrame(bool save_doubles);
@@ -616,6 +614,12 @@ class MacroAssembler: public Assembler {
   // Call a code stub.
   void TailCallStub(CodeStub* stub, Condition cond = al);
 
+  // Tail call a code stub (jump) and return the code object called.  Try to
+  // generate the code if necessary.  Do not perform a GC but instead return
+  // a retry after GC failure.
+  MUST_USE_RESULT MaybeObject* TryTailCallStub(CodeStub* stub,
+                                               Condition cond = al);
+
   // Call a runtime routine.
   void CallRuntime(Runtime::Function* f, int num_arguments);
   void CallRuntimeSaveDoubles(Runtime::FunctionId id);
@@ -634,6 +638,12 @@ class MacroAssembler: public Assembler {
                                  int num_arguments,
                                  int result_size);
 
+  // Tail call of a runtime routine (jump). Try to generate the code if
+  // necessary. Do not perform a GC but instead return a retry after GC
+  // failure.
+  MUST_USE_RESULT MaybeObject* TryTailCallExternalReference(
+      const ExternalReference& ext, int num_arguments, int result_size);
+
   // Convenience function: tail call a runtime routine (jump).
   void TailCallRuntime(Runtime::FunctionId fid,
                        int num_arguments,
@@ -657,9 +667,18 @@ class MacroAssembler: public Assembler {
   void CallCFunction(ExternalReference function, int num_arguments);
   void CallCFunction(Register function, int num_arguments);
 
+  // Calls an API function. Allocates HandleScope, extracts returned value
+  // from handle and propagates exceptions. Restores context.
+  // stack_space - space to be unwound on exit (includes the call js
+  // arguments space and the additional space allocated for the fast call).
+  MaybeObject* TryCallApiFunctionAndReturn(ApiFunction* function,
+                                           int stack_space);
+
   // Jump to a runtime routine.
   void JumpToExternalReference(const ExternalReference& builtin);
 
+  MaybeObject* TryJumpToExternalReference(const ExternalReference& ext);
+
   // Invoke specified builtin JavaScript function. Adds an entry to
   // the unresolved list if the name does not resolve.
   void InvokeBuiltin(Builtins::JavaScript id,
index fcefdee..8104747 100644 (file)
@@ -744,10 +744,10 @@ Simulator::Simulator() {
 // offset from the svc instruction so the simulator knows what to call.
 class Redirection {
  public:
-  Redirection(void* external_function, bool fp_return)
+  Redirection(void* external_function, ExternalReference::Type type)
       : external_function_(external_function),
         swi_instruction_(al | (0xf*B24) | kCallRtRedirected),
-        fp_return_(fp_return),
+        type_(type),
         next_(list_) {
     Simulator::current()->
         FlushICache(reinterpret_cast<void*>(&swi_instruction_),
@@ -760,14 +760,15 @@ class Redirection {
   }
 
   void* external_function() { return external_function_; }
-  bool fp_return() { return fp_return_; }
+  ExternalReference::Type type() { return type_; }
 
-  static Redirection* Get(void* external_function, bool fp_return) {
+  static Redirection* Get(void* external_function,
+                          ExternalReference::Type type) {
     Redirection* current;
     for (current = list_; current != NULL; current = current->next_) {
       if (current->external_function_ == external_function) return current;
     }
-    return new Redirection(external_function, fp_return);
+    return new Redirection(external_function, type);
   }
 
   static Redirection* FromSwiInstruction(Instruction* swi_instruction) {
@@ -780,7 +781,7 @@ class Redirection {
  private:
   void* external_function_;
   uint32_t swi_instruction_;
-  bool fp_return_;
+  ExternalReference::Type type_;
   Redirection* next_;
   static Redirection* list_;
 };
@@ -790,8 +791,8 @@ Redirection* Redirection::list_ = NULL;
 
 
 void* Simulator::RedirectExternalReference(void* external_function,
-                                           bool fp_return) {
-  Redirection* redirection = Redirection::Get(external_function, fp_return);
+                                           ExternalReference::Type type) {
+  Redirection* redirection = Redirection::Get(external_function, type);
   return redirection->address_of_swi_instruction();
 }
 
@@ -1528,6 +1529,9 @@ typedef double (*SimulatorRuntimeFPCall)(int32_t arg0,
                                          int32_t arg2,
                                          int32_t arg3);
 
+// This signature supports direct call in to API function native callback
+// (refer to InvocationCallback in v8.h).
+typedef v8::Handle<v8::Value> (*SimulatorRuntimeApiCall)(int32_t arg0);
 
 // Software interrupt instructions are used by the simulator to call into the
 // C-based V8 runtime.
@@ -1550,9 +1554,9 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
       // This is dodgy but it works because the C entry stubs are never moved.
       // See comment in codegen-arm.cc and bug 1242173.
       int32_t saved_lr = get_register(lr);
-      if (redirection->fp_return()) {
-        intptr_t external =
-            reinterpret_cast<intptr_t>(redirection->external_function());
+      intptr_t external =
+          reinterpret_cast<intptr_t>(redirection->external_function());
+      if (redirection->type() == ExternalReference::FP_RETURN_CALL) {
         SimulatorRuntimeFPCall target =
             reinterpret_cast<SimulatorRuntimeFPCall>(external);
         if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
@@ -1568,9 +1572,28 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
         CHECK(stack_aligned);
         double result = target(arg0, arg1, arg2, arg3);
         SetFpResult(result);
+      } else if (redirection->type() == ExternalReference::DIRECT_CALL) {
+        SimulatorRuntimeApiCall target =
+            reinterpret_cast<SimulatorRuntimeApiCall>(external);
+        if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
+          PrintF(
+              "Call to host function at %p args %08x",
+              FUNCTION_ADDR(target),
+              arg0);
+          if (!stack_aligned) {
+            PrintF(" with unaligned stack %08x\n", get_register(sp));
+          }
+          PrintF("\n");
+        }
+        CHECK(stack_aligned);
+        v8::Handle<v8::Value> result = target(arg0);
+        if (::v8::internal::FLAG_trace_sim) {
+          PrintF("Returned %p\n", reinterpret_cast<void *>(*result));
+        }
+        set_register(r0, (int32_t) *result);
       } else {
-        intptr_t external =
-            reinterpret_cast<int32_t>(redirection->external_function());
+        // builtin call.
+        ASSERT(redirection->type() == ExternalReference::BUILTIN_CALL);
         SimulatorRuntimeCall target =
             reinterpret_cast<SimulatorRuntimeCall>(external);
         if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
index 40a3fce..5256ae3 100644 (file)
@@ -79,6 +79,7 @@ class SimulatorStack : public v8::internal::AllStatic {
 
 #include "constants-arm.h"
 #include "hashmap.h"
+#include "assembler.h"
 
 namespace v8 {
 namespace internal {
@@ -285,8 +286,9 @@ class Simulator {
   static CachePage* GetCachePage(void* page);
 
   // Runtime call support.
-  static void* RedirectExternalReference(void* external_function,
-                                         bool fp_return);
+  static void* RedirectExternalReference(
+      void* external_function,
+      v8::internal::ExternalReference::Type type);
 
   // For use in calls that take two double values, constructed from r0, r1, r2
   // and r3.
index 5c5d245..9ef6115 100644 (file)
@@ -575,72 +575,94 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
   __ CallStub(&stub);
 }
 
+static const int kFastApiCallArguments = 3;
 
 // Reserves space for the extra arguments to FastHandleApiCall in the
 // caller's frame.
 //
-// These arguments are set by CheckPrototypes and GenerateFastApiCall.
+// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
                                        Register scratch) {
   __ mov(scratch, Operand(Smi::FromInt(0)));
-  __ push(scratch);
-  __ push(scratch);
-  __ push(scratch);
-  __ push(scratch);
+  for (int i = 0; i < kFastApiCallArguments; i++) {
+    __ push(scratch);
+  }
 }
 
 
 // Undoes the effects of ReserveSpaceForFastApiCall.
 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
-  __ Drop(4);
+  __ Drop(kFastApiCallArguments);
 }
 
 
-// Generates call to FastHandleApiCall builtin.
-static void GenerateFastApiCall(MacroAssembler* masm,
-                                const CallOptimization& optimization,
-                                int argc) {
+static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
+                                      const CallOptimization& optimization,
+                                      int argc) {
+  // ----------- S t a t e -------------
+  //  -- sp[0]              : holder (set by CheckPrototypes)
+  //  -- sp[4]              : callee js function
+  //  -- sp[8]              : call data
+  //  -- sp[12]             : last js argument
+  //  -- ...
+  //  -- sp[(argc + 3) * 4] : first js argument
+  //  -- sp[(argc + 4) * 4] : receiver
+  // -----------------------------------
   // Get the function and setup the context.
   JSFunction* function = optimization.constant_function();
   __ mov(r5, Operand(Handle<JSFunction>(function)));
   __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
 
   // Pass the additional arguments FastHandleApiCall expects.
-  bool info_loaded = false;
-  Object* callback = optimization.api_call_info()->callback();
-  if (Heap::InNewSpace(callback)) {
-    info_loaded = true;
-    __ Move(r0, Handle<CallHandlerInfo>(optimization.api_call_info()));
-    __ ldr(r7, FieldMemOperand(r0, CallHandlerInfo::kCallbackOffset));
-  } else {
-    __ Move(r7, Handle<Object>(callback));
-  }
   Object* call_data = optimization.api_call_info()->data();
+  Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
   if (Heap::InNewSpace(call_data)) {
-    if (!info_loaded) {
-      __ Move(r0, Handle<CallHandlerInfo>(optimization.api_call_info()));
-    }
+    __ Move(r0, api_call_info_handle);
     __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
   } else {
     __ Move(r6, Handle<Object>(call_data));
   }
+  // Store js function and call data.
+  __ stm(ib, sp, r5.bit() | r6.bit());
 
-  __ add(sp, sp, Operand(1 * kPointerSize));
-  __ stm(ia, sp, r5.bit() | r6.bit() | r7.bit());
-  __ sub(sp, sp, Operand(1 * kPointerSize));
-
-  // Set the number of arguments.
-  __ mov(r0, Operand(argc + 4));
+  // r2 points to call data as expected by Arguments
+  // (refer to layout above).
+  __ add(r2, sp, Operand(2 * kPointerSize));
 
-  // Jump to the fast api call builtin (tail call).
-  Handle<Code> code = Handle<Code>(
-      Builtins::builtin(Builtins::FastHandleApiCall));
-  ParameterCount expected(0);
-  __ InvokeCode(code, expected, expected,
-                RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+  Object* callback = optimization.api_call_info()->callback();
+  Address api_function_address = v8::ToCData<Address>(callback);
+  ApiFunction fun(api_function_address);
+
+  const int kApiStackSpace = 4;
+  __ EnterExitFrame(false, kApiStackSpace);
+
+  // r0 = v8::Arguments&
+  // Arguments is after the return address.
+  __ add(r0, sp, Operand(1 * kPointerSize));
+  // v8::Arguments::implicit_args = data
+  __ str(r2, MemOperand(r0, 0 * kPointerSize));
+  // v8::Arguments::values = last argument
+  __ add(ip, r2, Operand(argc * kPointerSize));
+  __ str(ip, MemOperand(r0, 1 * kPointerSize));
+  // v8::Arguments::length_ = argc
+  __ mov(ip, Operand(argc));
+  __ str(ip, MemOperand(r0, 2 * kPointerSize));
+  // v8::Arguments::is_construct_call = 0
+  __ mov(ip, Operand(0));
+  __ str(ip, MemOperand(r0, 3 * kPointerSize));
+
+  // Emitting a stub call may try to allocate (if the code is not
+  // already generated). Do not allow the assembler to perform a
+  // garbage collection but instead return the allocation failure
+  // object.
+  MaybeObject* result = masm->TryCallApiFunctionAndReturn(
+      &fun, argc + kFastApiCallArguments + 1);
+  if (result->IsFailure()) {
+    return result;
+  }
+  return Heap::undefined_value();
 }
 
-
 class CallInterceptorCompiler BASE_EMBEDDED {
  public:
   CallInterceptorCompiler(StubCompiler* stub_compiler,
@@ -650,16 +672,16 @@ class CallInterceptorCompiler BASE_EMBEDDED {
         arguments_(arguments),
         name_(name) {}
 
-  void Compile(MacroAssembler* masm,
-               JSObject* object,
-               JSObject* holder,
-               String* name,
-               LookupResult* lookup,
-               Register receiver,
-               Register scratch1,
-               Register scratch2,
-               Register scratch3,
-               Label* miss) {
+  MaybeObject* Compile(MacroAssembler* masm,
+                       JSObject* object,
+                       JSObject* holder,
+                       String* name,
+                       LookupResult* lookup,
+                       Register receiver,
+                       Register scratch1,
+                       Register scratch2,
+                       Register scratch3,
+                       Label* miss) {
     ASSERT(holder->HasNamedInterceptor());
     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
 
@@ -669,17 +691,17 @@ class CallInterceptorCompiler BASE_EMBEDDED {
     CallOptimization optimization(lookup);
 
     if (optimization.is_constant_call()) {
-      CompileCacheable(masm,
-                       object,
-                       receiver,
-                       scratch1,
-                       scratch2,
-                       scratch3,
-                       holder,
-                       lookup,
-                       name,
-                       optimization,
-                       miss);
+      return CompileCacheable(masm,
+                              object,
+                              receiver,
+                              scratch1,
+                              scratch2,
+                              scratch3,
+                              holder,
+                              lookup,
+                              name,
+                              optimization,
+                              miss);
     } else {
       CompileRegular(masm,
                      object,
@@ -690,21 +712,22 @@ class CallInterceptorCompiler BASE_EMBEDDED {
                      name,
                      holder,
                      miss);
+      return Heap::undefined_value();
     }
   }
 
  private:
-  void CompileCacheable(MacroAssembler* masm,
-                       JSObject* object,
-                       Register receiver,
-                       Register scratch1,
-                       Register scratch2,
-                       Register scratch3,
-                       JSObject* interceptor_holder,
-                       LookupResult* lookup,
-                       String* name,
-                       const CallOptimization& optimization,
-                       Label* miss_label) {
+  MaybeObject* CompileCacheable(MacroAssembler* masm,
+                                JSObject* object,
+                                Register receiver,
+                                Register scratch1,
+                                Register scratch2,
+                                Register scratch3,
+                                JSObject* interceptor_holder,
+                                LookupResult* lookup,
+                                String* name,
+                                const CallOptimization& optimization,
+                                Label* miss_label) {
     ASSERT(optimization.is_constant_call());
     ASSERT(!lookup->holder()->IsGlobalObject());
 
@@ -768,7 +791,10 @@ class CallInterceptorCompiler BASE_EMBEDDED {
 
     // Invoke function.
     if (can_do_fast_api_call) {
-      GenerateFastApiCall(masm, optimization, arguments_.immediate());
+      MaybeObject* result = GenerateFastApiDirectCall(masm,
+                                                      optimization,
+                                                      arguments_.immediate());
+      if (result->IsFailure()) return result;
     } else {
       __ InvokeFunction(optimization.constant_function(), arguments_,
                         JUMP_FUNCTION);
@@ -786,6 +812,8 @@ class CallInterceptorCompiler BASE_EMBEDDED {
     if (can_do_fast_api_call) {
       FreeSpaceForFastApiCall(masm);
     }
+
+    return Heap::undefined_value();
   }
 
   void CompileRegular(MacroAssembler* masm,
@@ -2368,7 +2396,8 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
   }
 
   if (depth != kInvalidProtoDepth) {
-    GenerateFastApiCall(masm(), optimization, argc);
+    MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
+    if (result->IsFailure()) return result;
   } else {
     __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
   }
@@ -2412,16 +2441,19 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
 
   CallInterceptorCompiler compiler(this, arguments(), r2);
-  compiler.Compile(masm(),
-                   object,
-                   holder,
-                   name,
-                   &lookup,
-                   r1,
-                   r3,
-                   r4,
-                   r0,
-                   &miss);
+  MaybeObject* result = compiler.Compile(masm(),
+                                         object,
+                                         holder,
+                                         name,
+                                         &lookup,
+                                         r1,
+                                         r3,
+                                         r4,
+                                         r0,
+                                         &miss);
+  if (result->IsFailure()) {
+      return result;
+  }
 
   // Move returned value, the function to call, to r1.
   __ mov(r1, r0);
index ca72d63..ef2094f 100644 (file)
@@ -553,8 +553,9 @@ ExternalReference::ExternalReference(Builtins::CFunctionId id)
   : address_(Redirect(Builtins::c_function_address(id))) {}
 
 
-ExternalReference::ExternalReference(ApiFunction* fun)
-  : address_(Redirect(fun->address())) {}
+ExternalReference::ExternalReference(
+    ApiFunction* fun, Type type = ExternalReference::BUILTIN_CALL)
+  : address_(Redirect(fun->address(), type)) {}
 
 
 ExternalReference::ExternalReference(Builtins::Name name)
@@ -888,17 +889,18 @@ ExternalReference ExternalReference::double_fp_operation(
       UNREACHABLE();
   }
   // Passing true as 2nd parameter indicates that they return an fp value.
-  return ExternalReference(Redirect(FUNCTION_ADDR(function), true));
+  return ExternalReference(Redirect(FUNCTION_ADDR(function), FP_RETURN_CALL));
 }
 
 
 ExternalReference ExternalReference::compare_doubles() {
   return ExternalReference(Redirect(FUNCTION_ADDR(native_compare_doubles),
-                                    false));
+                                    BUILTIN_CALL));
 }
 
 
-ExternalReferenceRedirector* ExternalReference::redirector_ = NULL;
+ExternalReference::ExternalReferenceRedirector*
+    ExternalReference::redirector_ = NULL;
 
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
index a29aa06..e8bc5d6 100644 (file)
@@ -459,9 +459,6 @@ class Debug_Address;
 #endif
 
 
-typedef void* ExternalReferenceRedirector(void* original, bool fp_return);
-
-
 // An ExternalReference represents a C++ address used in the generated
 // code. All references to C++ functions and variables must be encapsulated in
 // an ExternalReference instance. This is done in order to track the origin of
@@ -469,9 +466,29 @@ typedef void* ExternalReferenceRedirector(void* original, bool fp_return);
 // addresses when deserializing a heap.
 class ExternalReference BASE_EMBEDDED {
  public:
+  // Used in the simulator to support different native api calls.
+  //
+  // BUILTIN_CALL - builtin call.
+  // MaybeObject* f(v8::internal::Arguments).
+  //
+  // FP_RETURN_CALL - builtin call that returns floating point.
+  // double f(double, double).
+  //
+  // DIRECT_CALL - direct call to API function native callback
+  // from generated code.
+  // Handle<Value> f(v8::Arguments&)
+  //
+  enum Type {
+    BUILTIN_CALL,  // default
+    FP_RETURN_CALL,
+    DIRECT_CALL
+  };
+
+  typedef void* ExternalReferenceRedirector(void* original, Type type);
+
   explicit ExternalReference(Builtins::CFunctionId id);
 
-  explicit ExternalReference(ApiFunction* ptr);
+  explicit ExternalReference(ApiFunction* ptr, Type type);
 
   explicit ExternalReference(Builtins::Name name);
 
@@ -599,17 +616,19 @@ class ExternalReference BASE_EMBEDDED {
 
   static ExternalReferenceRedirector* redirector_;
 
-  static void* Redirect(void* address, bool fp_return = false) {
+  static void* Redirect(void* address,
+                        Type type = ExternalReference::BUILTIN_CALL) {
     if (redirector_ == NULL) return address;
-    void* answer = (*redirector_)(address, fp_return);
+    void* answer = (*redirector_)(address, type);
     return answer;
   }
 
-  static void* Redirect(Address address_arg, bool fp_return = false) {
+  static void* Redirect(Address address_arg,
+                        Type type = ExternalReference::BUILTIN_CALL) {
     void* address = reinterpret_cast<void*>(address_arg);
     void* answer = (redirector_ == NULL) ?
                    address :
-                   (*redirector_)(address, fp_return);
+                   (*redirector_)(address, type);
     return answer;
   }
 
index f80c89b..0d0e37f 100644 (file)
@@ -75,7 +75,8 @@ namespace internal {
   V(GetProperty)               \
   V(SetProperty)               \
   V(InvokeBuiltin)             \
-  V(RegExpCEntry)
+  V(RegExpCEntry)              \
+  V(DirectCEntry)
 #else
 #define CODE_STUB_LIST_ARM(V)
 #endif
index 0093829..0e3a2b8 100644 (file)
@@ -1943,6 +1943,14 @@ void Heap::CreateJSConstructEntryStub() {
 }
 
 
+#if V8_TARGET_ARCH_ARM
+void Heap::CreateDirectCEntryStub() {
+  DirectCEntryStub stub;
+  set_direct_c_entry_code(*stub.GetCode());
+}
+#endif
+
+
 void Heap::CreateFixedStubs() {
   // Here we create roots for fixed stubs. They are needed at GC
   // for cooking and uncooking (check out frames.cc).
@@ -1963,6 +1971,9 @@ void Heap::CreateFixedStubs() {
 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
   Heap::CreateRegExpCEntryStub();
 #endif
+#if V8_TARGET_ARCH_ARM
+  Heap::CreateDirectCEntryStub();
+#endif
 }
 
 
index 19fb89f..dcd813b 100644 (file)
@@ -122,7 +122,12 @@ namespace internal {
 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
 #define STRONG_ROOT_LIST(V)                                                    \
   UNCONDITIONAL_STRONG_ROOT_LIST(V)                                            \
-  V(Code, re_c_entry_code, RegExpCEntryCode)
+  V(Code, re_c_entry_code, RegExpCEntryCode)                                   \
+  V(Code, direct_c_entry_code, DirectCEntryCode)
+#elif V8_TARGET_ARCH_ARM
+#define STRONG_ROOT_LIST(V)                                                    \
+  UNCONDITIONAL_STRONG_ROOT_LIST(V)                                            \
+  V(Code, direct_c_entry_code, DirectCEntryCode)
 #else
 #define STRONG_ROOT_LIST(V) UNCONDITIONAL_STRONG_ROOT_LIST(V)
 #endif
@@ -1320,12 +1325,13 @@ class Heap : public AllStatic {
   static bool CreateInitialMaps();
   static bool CreateInitialObjects();
 
-  // These four Create*EntryStub functions are here and forced to not be inlined
+  // These five Create*EntryStub functions are here and forced to not be inlined
   // because of a gcc-4.4 bug that assigns wrong vtable entries.
   NO_INLINE(static void CreateCEntryStub());
   NO_INLINE(static void CreateJSEntryStub());
   NO_INLINE(static void CreateJSConstructEntryStub());
   NO_INLINE(static void CreateRegExpCEntryStub());
+  NO_INLINE(static void CreateDirectCEntryStub());
 
   static void CreateFixedStubs();
 
index 5b0fd61..9d8aa82 100644 (file)
--- a/src/top.h
+++ b/src/top.h
 #include "compilation-cache.h"
 #include "frames-inl.h"
 #include "runtime-profiler.h"
-#include "simulator.h"
 
 namespace v8 {
 namespace internal {
 
+class Simulator;
 
 #define RETURN_IF_SCHEDULED_EXCEPTION() \
   if (Top::has_scheduled_exception()) return Top::PromoteScheduledException()
index afc973b..f88ba38 100644 (file)
@@ -7525,6 +7525,61 @@ static void GenerateSomeGarbage() {
       "garbage = undefined;");
 }
 
+v8::Handle<v8::Value> DirectApiCallback(const v8::Arguments& args) {
+  static int count = 0;
+  if (count++ % 3 == 0) {
+    v8::V8::LowMemoryNotification();  // This should move the stub
+    GenerateSomeGarbage();  // This should ensure the old stub memory is flushed
+  }
+  return v8::Handle<v8::Value>();
+}
+
+
+THREADED_TEST(CallICFastApi_DirectCall_GCMoveStub) {
+  v8::HandleScope scope;
+  LocalContext context;
+  v8::Handle<v8::ObjectTemplate> nativeobject_templ = v8::ObjectTemplate::New();
+  nativeobject_templ->Set("callback",
+                          v8::FunctionTemplate::New(DirectApiCallback));
+  v8::Local<v8::Object> nativeobject_obj = nativeobject_templ->NewInstance();
+  context->Global()->Set(v8_str("nativeobject"), nativeobject_obj);
+  // call the api function multiple times to ensure direct call stub creation.
+  CompileRun(
+        "function f() {"
+        "  for (var i = 1; i <= 30; i++) {"
+        "    nativeobject.callback();"
+        "  }"
+        "}"
+        "f();");
+}
+
+
+v8::Handle<v8::Value> ThrowingDirectApiCallback(const v8::Arguments& args) {
+  return v8::ThrowException(v8_str("g"));
+}
+
+
+THREADED_TEST(CallICFastApi_DirectCall_Throw) {
+  v8::HandleScope scope;
+  LocalContext context;
+  v8::Handle<v8::ObjectTemplate> nativeobject_templ = v8::ObjectTemplate::New();
+  nativeobject_templ->Set("callback",
+                          v8::FunctionTemplate::New(ThrowingDirectApiCallback));
+  v8::Local<v8::Object> nativeobject_obj = nativeobject_templ->NewInstance();
+  context->Global()->Set(v8_str("nativeobject"), nativeobject_obj);
+  // call the api function multiple times to ensure direct call stub creation.
+  v8::Handle<Value> result = CompileRun(
+      "var result = '';"
+      "function f() {"
+      "  for (var i = 1; i <= 5; i++) {"
+      "    try { nativeobject.callback(); } catch (e) { result += e; }"
+      "  }"
+      "}"
+      "f(); result;");
+  CHECK_EQ(v8_str("ggggg"), result);
+}
+
+
 THREADED_TEST(InterceptorCallICFastApi_TrivialSignature) {
   int interceptor_call_count = 0;
   v8::HandleScope scope;