MIPS: Minor fixes to simulator and builtins-mips.
authorsgjesse@chromium.org <sgjesse@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 23 May 2011 07:12:58 +0000 (07:12 +0000)
committersgjesse@chromium.org <sgjesse@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 23 May 2011 07:12:58 +0000 (07:12 +0000)
Updated to include fixes to several mips arch-specific files, corresponding to recent changes in r7944, r7935, r7926, r7914, r7910, r7895, and parts of r7423, which had previously been missed for mips. Rebased on r7964.

The simulator changes were missed on r7893 for code-stubs-mips,
where the DirectCEntry stuff was added.

There are also a couple small changes to builtins-mips following
r7879 for the other architectures.

BUG=
TEST=

Review URL: http://codereview.chromium.org//7042031

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7977 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/mips/builtins-mips.cc
src/mips/code-stubs-mips.cc
src/mips/full-codegen-mips.cc
src/mips/ic-mips.cc
src/mips/macro-assembler-mips.cc
src/mips/macro-assembler-mips.h
src/mips/simulator-mips.cc
src/mips/simulator-mips.h
src/mips/stub-cache-mips.cc

index d41b0f13fd44ea54dd96e7c64d7dc4079dac8328..fdcb7c73c0ff65fdb67a111805c23e365c8bc32b 100644 (file)
@@ -1202,13 +1202,10 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
                                  kSmiTagSize)));
     __ Branch(&shift_arguments, ne, t0, Operand(zero_reg));
 
-    // Do not transform the receiver for native (shared already in r2).
-    __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kScriptOffset));
-    __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
-    __ Branch(&shift_arguments, eq, a2, Operand(a3));
-    __ lw(a2, FieldMemOperand(a2, Script::kTypeOffset));
-    __ sra(a2, a2, kSmiTagSize);
-    __ Branch(&shift_arguments, eq, a2, Operand(Script::TYPE_NATIVE));
+    // Do not transform the receiver for native (Compilerhints already in a3).
+    __ And(t0, a3, Operand(1 << (SharedFunctionInfo::kES5Native +
+                                 kSmiTagSize)));
+    __ Branch(&shift_arguments, ne, t0, Operand(zero_reg));
 
     // Compute the receiver in non-strict mode.
     // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
@@ -1220,7 +1217,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
     // a2: first argument
     __ JumpIfSmi(a2, &convert_to_object, t2);
 
-    // Heap::kUndefinedValueRootIndex is already in a3.
+    __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
     __ Branch(&use_global_receiver, eq, a2, Operand(a3));
     __ LoadRoot(a3, Heap::kNullValueRootIndex);
     __ Branch(&use_global_receiver, eq, a2, Operand(a3));
@@ -1389,20 +1386,17 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
                                kSmiTagSize)));
   __ Branch(&push_receiver, ne, t0, Operand(zero_reg));
 
-  // Do not transform the receiver for native (shared already in a1).
-  __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kScriptOffset));
-  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-  __ Branch(&push_receiver, eq, a1, Operand(a2));
-  __ lw(a1, FieldMemOperand(a1, Script::kTypeOffset));
-  __ sra(a1, a1, kSmiTagSize);
-  __ Branch(&push_receiver, eq, a1, Operand(Script::TYPE_NATIVE));
+  // Do not transform the receiver for native (Compilerhints already in a2).
+  __ And(t0, a2, Operand(1 << (SharedFunctionInfo::kES5Native +
+                               kSmiTagSize)));
+  __ Branch(&push_receiver, ne, t0, Operand(zero_reg));
 
   // Compute the receiver in non-strict mode.
   __ And(t0, a0, Operand(kSmiTagMask));
   __ Branch(&call_to_object, eq, t0, Operand(zero_reg));
   __ LoadRoot(a1, Heap::kNullValueRootIndex);
   __ Branch(&use_global_receiver, eq, a0, Operand(a1));
-  // Heap::kUndefinedValueRootIndex is already in a2.
+  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
   __ Branch(&use_global_receiver, eq, a0, Operand(a2));
 
   // Check if the receiver is already a JavaScript object.
index dd4e7c12fa3beb690199437613d73f790753072b..bb009218db07bc33f3bbc94fb9eeeea56bb19b26 100644 (file)
@@ -1472,7 +1472,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
                   scratch1,
                   Heap::kHeapNumberMapRootIndex,
                   not_found,
-                  true);
+                  DONT_DO_SMI_CHECK);
 
       STATIC_ASSERT(8 == kDoubleSize);
       __ Addu(scratch1,
@@ -1736,12 +1736,33 @@ void CompareStub::Generate(MacroAssembler* masm) {
 // The stub returns zero for false, and a non-zero value for true.
 void ToBooleanStub::Generate(MacroAssembler* masm) {
   // This stub uses FPU instructions.
-  ASSERT(CpuFeatures::IsEnabled(FPU));
+  CpuFeatures::Scope scope(FPU);
 
   Label false_result;
   Label not_heap_number;
   Register scratch0 = t5.is(tos_) ? t3 : t5;
 
+  // undefined -> false
+  __ LoadRoot(scratch0, Heap::kUndefinedValueRootIndex);
+  __ Branch(&false_result, eq, tos_, Operand(scratch0));
+
+  // Boolean -> its value
+  __ LoadRoot(scratch0, Heap::kFalseValueRootIndex);
+  __ Branch(&false_result, eq, tos_, Operand(scratch0));
+  __ LoadRoot(scratch0, Heap::kTrueValueRootIndex);
+  // "tos_" is a register and contains a non-zero value.  Hence we implicitly
+  // return true if the equal condition is satisfied.
+  __ Ret(eq, tos_, Operand(scratch0));
+
+  // Smis: 0 -> false, all other -> true
+  __ And(scratch0, tos_, tos_);
+  __ Branch(&false_result, eq, scratch0, Operand(zero_reg));
+  __ And(scratch0, tos_, Operand(kSmiTagMask));
+  // "tos_" is a register and contains a non-zero value.  Hence we implicitly
+  // return true if the not equal condition is satisfied.
+  __ Ret(eq, scratch0, Operand(zero_reg));
+
+  // 'null' -> false
   __ LoadRoot(scratch0, Heap::kNullValueRootIndex);
   __ Branch(&false_result, eq, tos_, Operand(scratch0));
 
@@ -1750,8 +1771,7 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
   __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
   __ Branch(&not_heap_number, ne, scratch0, Operand(at));
 
-  __ Subu(at, tos_, Operand(kHeapObjectTag));
-  __ ldc1(f12, MemOperand(at, HeapNumber::kValueOffset));
+  __ ldc1(f12, FieldMemOperand(tos_, HeapNumber::kValueOffset));
   __ fcmp(f12, 0.0, UEQ);
 
   // "tos_" is a register, and contains a non zero value by default.
@@ -1762,11 +1782,6 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
 
   __ bind(&not_heap_number);
 
-  // Check if the value is 'null'.
-  // 'null' => false.
-  __ LoadRoot(at, Heap::kNullValueRootIndex);
-  __ Branch(&false_result, eq, tos_, Operand(at));
-
   // It can be an undetectable object.
   // Undetectable => false.
   __ lw(at, FieldMemOperand(tos_, HeapObject::kMapOffset));
@@ -1944,12 +1959,14 @@ void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
 
 
 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
-  Label non_smi, slow;
-  GenerateSmiCodeSub(masm, &non_smi, &slow);
+  Label non_smi, slow, call_builtin;
+  GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
   __ bind(&non_smi);
   GenerateHeapNumberCodeSub(masm, &slow);
   __ bind(&slow);
   GenerateTypeTransition(masm);
+  __ bind(&call_builtin);
+  GenerateGenericCodeFallback(masm);
 }
 
 
@@ -3185,7 +3202,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
                   a1,
                   Heap::kHeapNumberMapRootIndex,
                   &calculate,
-                  true);
+                  DONT_DO_SMI_CHECK);
       // Input is a HeapNumber. Store the
       // low and high words into a2, a3.
       __ lw(a2, FieldMemOperand(a0, HeapNumber::kValueOffset));
@@ -3764,16 +3781,21 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   #ifdef ENABLE_LOGGING_AND_PROFILING
     // If this is the outermost JS call, set js_entry_sp value.
+    Label non_outermost_js;
     ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
                                   masm->isolate());
     __ li(t1, Operand(ExternalReference(js_entry_sp)));
     __ lw(t2, MemOperand(t1));
-    {
-      Label skip;
-      __ Branch(&skip, ne, t2, Operand(zero_reg));
-      __ sw(fp, MemOperand(t1));
-      __ bind(&skip);
-    }
+    __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
+    __ sw(fp, MemOperand(t1));
+    __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+    Label cont;
+    __ b(&cont);
+    __ nop();   // Branch delay slot nop.
+    __ bind(&non_outermost_js);
+    __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
+    __ bind(&cont);
+    __ push(t0);
   #endif
 
   // Call a faked try-block that does the invoke.
@@ -3839,32 +3861,21 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
   __ Call(t9);
 
-  // Unlink this frame from the handler chain. When reading the
-  // address of the next handler, there is no need to use the address
-  // displacement since the current stack pointer (sp) points directly
-  // to the stack handler.
-  __ lw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
-  __ li(t0, Operand(ExternalReference(Isolate::k_handler_address,
-                                      masm->isolate())));
-  __ sw(t1, MemOperand(t0));
+  // Unlink this frame from the handler chain.
+  __ PopTryHandler();
 
-  // This restores sp to its position before PushTryHandler.
-  __ addiu(sp, sp, StackHandlerConstants::kSize);
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-  // If current FP value is the same as js_entry_sp value, it means that
-  // the current function is the outermost.
-  __ li(t1, Operand(ExternalReference(js_entry_sp)));
-  __ lw(t2, MemOperand(t1));
-  {
-    Label skip;
-    __ Branch(&skip, ne, fp, Operand(t2));
+  __ bind(&exit);  // v0 holds result
+  #ifdef ENABLE_LOGGING_AND_PROFILING
+    // Check if the current stack frame is marked as the outermost JS frame.
+    Label non_outermost_js_2;
+    __ pop(t1);
+    __ Branch(&non_outermost_js_2, ne, t1,
+              Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+    __ li(t1, Operand(ExternalReference(js_entry_sp)));
     __ sw(zero_reg, MemOperand(t1));
-    __ bind(&skip);
-  }
-#endif
+    __ bind(&non_outermost_js_2);
+  #endif
 
-  __ bind(&exit);  // v0 holds result.
   // Restore the top frame descriptors from the stack.
   __ pop(t1);
   __ li(t0, Operand(ExternalReference(Isolate::k_c_entry_fp_address,
@@ -4846,7 +4857,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
               scratch_,
               Heap::kHeapNumberMapRootIndex,
               index_not_number_,
-              true);
+              DONT_DO_SMI_CHECK);
   call_helper.BeforeCall(masm);
   // Consumed by runtime conversion function:
   __ Push(object_, index_, index_);
index 8c26fb3301efb98d59b84e75e2c85a257cb0ce0c..6f0cd2a6be64eba63284980c49b8b6bef6dc6de5 100644 (file)
@@ -566,19 +566,6 @@ void FullCodeGenerator::DoTest(Label* if_true,
                                Label* if_false,
                                Label* fall_through) {
   if (CpuFeatures::IsSupported(FPU)) {
-    CpuFeatures::Scope scope(FPU);
-    // Emit the inlined tests assumed by the stub.
-    __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
-    __ Branch(if_false, eq, result_register(), Operand(at));
-    __ LoadRoot(at, Heap::kTrueValueRootIndex);
-    __ Branch(if_true, eq, result_register(), Operand(at));
-    __ LoadRoot(at, Heap::kFalseValueRootIndex);
-    __ Branch(if_false, eq, result_register(), Operand(at));
-    STATIC_ASSERT(kSmiTag == 0);
-    __ Branch(if_false, eq, result_register(), Operand(zero_reg));
-    __ JumpIfSmi(result_register(), if_true);
-
-    // Call the ToBoolean stub for all other cases.
     ToBooleanStub stub(result_register());
     __ CallStub(&stub);
     __ mov(at, zero_reg);
@@ -589,8 +576,6 @@ void FullCodeGenerator::DoTest(Label* if_true,
     __ CallRuntime(Runtime::kToBool, 1);
     __ LoadRoot(at, Heap::kFalseValueRootIndex);
   }
-
-  // The stub returns nonzero for true.
   Split(ne, v0, Operand(at), if_true, if_false, fall_through);
 }
 
index c454d98bebd6cca425328d471721f97041eebd9a..1946bf18a1abac37b73ceb71029ab063efb6ed6b 100644 (file)
@@ -874,10 +874,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
 }
 
 
-Object* KeyedLoadIC_Miss(Arguments args);
-
-
-void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
+void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
   // ---------- S t a t e --------------
   //  -- ra     : return address
   //  -- a0     : key
@@ -889,8 +886,11 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
 
   __ Push(a1, a0);
 
-  ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss),
-                                            isolate);
+  // Perform tail call to the entry.
+  ExternalReference ref = force_generic
+      ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
+      : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
+
   __ TailCallExternalReference(ref, 2, 1);
 }
 
@@ -1097,7 +1097,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
   char_at_generator.GenerateSlow(masm, call_helper);
 
   __ bind(&miss);
-  GenerateMiss(masm);
+  GenerateMiss(masm, false);
 }
 
 
@@ -1263,11 +1263,30 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
        IC_Utility(kKeyedLoadPropertyWithInterceptor), masm->isolate()), 2, 1);
 
   __ bind(&slow);
-  GenerateMiss(masm);
+  GenerateMiss(masm, false);
+}
+
+
+void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
+  // ---------- S t a t e --------------
+  //  -- a0     : value
+  //  -- a1     : key
+  //  -- a2     : receiver
+  //  -- ra     : return address
+  // -----------------------------------
+
+  // Push receiver, key and value for runtime call.
+  __ Push(a2, a1, a0);
+
+  ExternalReference ref = force_generic
+      ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
+                          masm->isolate())
+      : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
+  __ TailCallExternalReference(ref, 3, 1);
 }
 
 
-void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
   // ---------- S t a t e --------------
   //  -- a0     : value
   //  -- a1     : key
@@ -1279,8 +1298,11 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
   // We can't use MultiPush as the order of the registers is important.
   __ Push(a2, a1, a0);
 
-  ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss),
-                                            masm->isolate());
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  ExternalReference ref =
+      ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
+
   __ TailCallExternalReference(ref, 3, 1);
 }
 
index 8591698c11c844af2a02efa7088c19cb6453b1b8..0d1eed56c572a66749c1c6fa70c10e694604023c 100644 (file)
@@ -2545,8 +2545,8 @@ void MacroAssembler::CheckMap(Register obj,
                               Register scratch,
                               Handle<Map> map,
                               Label* fail,
-                              bool is_heap_object) {
-  if (!is_heap_object) {
+                              SmiCheckType smi_check_type) {
+  if (smi_check_type == DO_SMI_CHECK) {
     JumpIfSmi(obj, fail);
   }
   lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
@@ -2555,12 +2555,27 @@ void MacroAssembler::CheckMap(Register obj,
 }
 
 
+void MacroAssembler::DispatchMap(Register obj,
+                                 Register scratch,
+                                 Handle<Map> map,
+                                 Handle<Code> success,
+                                 SmiCheckType smi_check_type) {
+  Label fail;
+  if (smi_check_type == DO_SMI_CHECK) {
+    JumpIfSmi(obj, &fail);
+  }
+  lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
+  Jump(success, RelocInfo::CODE_TARGET, eq, scratch, Operand(map));
+  bind(&fail);
+}
+
+
 void MacroAssembler::CheckMap(Register obj,
                               Register scratch,
                               Heap::RootListIndex index,
                               Label* fail,
-                              bool is_heap_object) {
-  if (!is_heap_object) {
+                              SmiCheckType smi_check_type) {
+  if (smi_check_type == DO_SMI_CHECK) {
     JumpIfSmi(obj, fail);
   }
   lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
@@ -2860,7 +2875,7 @@ MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub,
   { MaybeObject* maybe_result = stub->TryGetCode();
     if (!maybe_result->ToObject(&result)) return maybe_result;
   }
-  Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2);
+  Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2);
   return result;
 }
 
@@ -3410,7 +3425,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
   lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
   if (emit_debug_code()) {
     Label ok, fail;
-    CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, false);
+    CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
     Branch(&ok);
     bind(&fail);
     Abort("Global functions must have initial map");
index ea49dfa30c33b851895a1e3dea27c5f1d51d2410..d4da96fa7f4d91160552d5ee0d7ee9fb818f54d6 100644 (file)
@@ -30,6 +30,7 @@
 
 #include "assembler.h"
 #include "mips/assembler-mips.h"
+#include "v8globals.h"
 
 namespace v8 {
 namespace internal {
@@ -710,13 +711,22 @@ DECLARE_NOTARGET_PROTOTYPE(Ret)
                 Register scratch,
                 Handle<Map> map,
                 Label* fail,
-                bool is_heap_object);
+                SmiCheckType smi_check_type);
 
   void CheckMap(Register obj,
                 Register scratch,
                 Heap::RootListIndex index,
                 Label* fail,
-                bool is_heap_object);
+                SmiCheckType smi_check_type);
+
+  // Check if the map of an object is equal to a specified map and branch to a
+  // specified target if equal. Skip the smi check if not required (object is
+  // known to be a heap object)
+  void DispatchMap(Register obj,
+                   Register scratch,
+                   Handle<Map> map,
+                   Handle<Code> success,
+                   SmiCheckType smi_check_type);
 
   // Generates code for reporting that an illegal operation has
   // occurred.
index f9ee00fb2731ec9123f6d1bd698d412526bbe056..8a335441b32e3eb8d2ce22bbf7bc2466ff002102 100644 (file)
@@ -1165,7 +1165,11 @@ typedef double (*SimulatorRuntimeFPCall)(int32_t arg0,
 
 // This signature supports direct call in to API function native callback
 // (refer to InvocationCallback in v8.h).
-typedef v8::Handle<v8::Value> (*SimulatorRuntimeApiCall)(int32_t arg0);
+typedef v8::Handle<v8::Value> (*SimulatorRuntimeDirectApiCall)(int32_t arg0);
+
+// This signature supports direct call to accessor getter callback.
+typedef v8::Handle<v8::Value> (*SimulatorRuntimeDirectGetterCall)(int32_t arg0,
+                                                                  int32_t arg1);
 
 // Software interrupt instructions are used by the simulator to call into the
 // C-based V8 runtime. They are also used for debugging with simulator.
@@ -1240,15 +1244,27 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
       set_register(v0, gpreg_pair[0]);
       set_register(v1, gpreg_pair[1]);
     } else if (redirection->type() == ExternalReference::DIRECT_API_CALL) {
-      SimulatorRuntimeApiCall target =
-                  reinterpret_cast<SimulatorRuntimeApiCall>(external);
+      // See DirectCEntryStub::GenerateCall for explanation of register usage.
+      SimulatorRuntimeDirectApiCall target =
+                  reinterpret_cast<SimulatorRuntimeDirectApiCall>(external);
       if (::v8::internal::FLAG_trace_sim) {
         PrintF("Call to host function at %p args %08x\n",
-               FUNCTION_ADDR(target),
-               arg0);
+               FUNCTION_ADDR(target), arg1);
+      }
+      v8::Handle<v8::Value> result = target(arg1);
+      *(reinterpret_cast<int*>(arg0)) = (int32_t) *result;
+      set_register(v0, arg0);
+    } else if (redirection->type() == ExternalReference::DIRECT_GETTER_CALL) {
+      // See DirectCEntryStub::GenerateCall for explanation of register usage.
+      SimulatorRuntimeDirectGetterCall target =
+                  reinterpret_cast<SimulatorRuntimeDirectGetterCall>(external);
+      if (::v8::internal::FLAG_trace_sim) {
+        PrintF("Call to host function at %p args %08x %08x\n",
+               FUNCTION_ADDR(target), arg1, arg2);
       }
-      v8::Handle<v8::Value> result = target(arg0);
-      set_register(v0, (int32_t) *result);
+      v8::Handle<v8::Value> result = target(arg1, arg2);
+      *(reinterpret_cast<int*>(arg0)) = (int32_t) *result;
+      set_register(v0, arg0);
     } else {
       SimulatorRuntimeCall target =
                   reinterpret_cast<SimulatorRuntimeCall>(external);
index 0a8de9497adc21176bc72566553769f6d11bc3da..38d097480df70b24ef4026f1898468da6a90ca94 100644 (file)
@@ -50,7 +50,7 @@ namespace internal {
   entry(p0, p1, p2, p3, p4)
 
 typedef int (*mips_regexp_matcher)(String*, int, const byte*, const byte*,
-                                   int*, Address, int, Isolate*);
+                                   void*, int*, Address, int, Isolate*);
 
 
 // Call the generated regexp code directly. The code at the entry address
@@ -58,7 +58,8 @@ typedef int (*mips_regexp_matcher)(String*, int, const byte*, const byte*,
 // The fifth argument is a dummy that reserves the space used for
 // the return address added by the ExitFrame in native calls.
 #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
-  (FUNCTION_CAST<mips_regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
+  (FUNCTION_CAST<mips_regexp_matcher>(entry)( \
+      p0, p1, p2, p3, NULL, p4, p5, p6, p7))
 
 #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
   reinterpret_cast<TryCatch*>(try_catch_address)
@@ -361,7 +362,7 @@ class Simulator {
 
 #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
     Simulator::current(Isolate::Current())->Call( \
-        entry, 8, p0, p1, p2, p3, p4, p5, p6, p7)
+        entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
 
 #define TRY_CATCH_FROM_ADDRESS(try_catch_address)                              \
   try_catch_address == NULL ?                                                  \
index 5ab8e119815dfe9cfe86eb954265343d766ddc8d..cb65d88dd586e9361a52c915e843356db559d910 100644 (file)
@@ -1561,8 +1561,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
     __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
 
     // Check that the elements are in fast mode and writable.
-    __ CheckMap(elements, v0,
-                Heap::kFixedArrayMapRootIndex, &call_builtin, true);
+    __ CheckMap(elements,
+                v0,
+                Heap::kFixedArrayMapRootIndex,
+                &call_builtin,
+                DONT_DO_SMI_CHECK);
 
     if (argc == 1) {  // Otherwise fall through to call the builtin.
       Label exit, with_write_barrier, attempt_to_grow_elements;
@@ -1710,7 +1713,11 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
   __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
 
   // Check that the elements are in fast mode and writable.
-  __ CheckMap(elements, v0, Heap::kFixedArrayMapRootIndex, &call_builtin, true);
+  __ CheckMap(elements,
+              v0,
+              Heap::kFixedArrayMapRootIndex,
+              &call_builtin,
+              DONT_DO_SMI_CHECK);
 
   // Get the array's length into t0 and calculate new length.
   __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
@@ -2049,7 +2056,7 @@ MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
   __ Drop(argc + 1, eq, t0, Operand(zero_reg));
   __ Ret(eq, t0, Operand(zero_reg));
 
-  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, true);
+  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
 
   Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
 
@@ -2200,7 +2207,7 @@ MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
   // Check if the argument is a heap number and load its exponent and
   // sign.
   __ bind(&not_smi);
-  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, true);
+  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
   __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
 
   // Check the sign of the argument. If the argument is positive,
@@ -2246,19 +2253,17 @@ MaybeObject* CallStubCompiler::CompileFastApiCall(
     JSFunction* function,
     String* name) {
 
-  Isolate* isolate = masm()->isolate();
-  Heap* heap = isolate->heap();
-  Counters* counters = isolate->counters();
+  Counters* counters = isolate()->counters();
 
   ASSERT(optimization.is_simple_api_call());
   // Bail out if object is a global object as we don't want to
   // repatch it to global receiver.
-  if (object->IsGlobalObject()) return heap->undefined_value();
-  if (cell != NULL) return heap->undefined_value();
+  if (object->IsGlobalObject()) return heap()->undefined_value();
+  if (cell != NULL) return heap()->undefined_value();
   if (!object->IsJSObject()) return heap()->undefined_value();
   int depth = optimization.GetPrototypeDepthOfExpectedType(
             JSObject::cast(object), holder);
-  if (depth == kInvalidProtoDepth) return heap->undefined_value();
+  if (depth == kInvalidProtoDepth) return heap()->undefined_value();
 
   Label miss, miss_before_stack_reserved;
 
@@ -3067,48 +3072,54 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
 }
 
 
-MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
+MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
   // ----------- S t a t e -------------
   //  -- ra    : return address
   //  -- a0    : key
   //  -- a1    : receiver
   // -----------------------------------
-  Label miss;
-
-  // Check that the receiver isn't a smi.
-  __ JumpIfSmi(a1, &miss);
-
-  // Check that the map matches.
-  __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
-  __ Branch(&miss, ne, a2, Operand(Handle<Map>(receiver->map())));
+  MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
+  Code* stub;
+  if (!maybe_stub->To(&stub)) return maybe_stub;
+  __ DispatchMap(a1,
+                 a2,
+                 Handle<Map>(receiver_map),
+                 Handle<Code>(stub),
+                 DO_SMI_CHECK);
+
+  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
-  // Check that the key is a smi.
-  __ JumpIfNotSmi(a0, &miss);
+  // Return the generated code.
+  return GetCode(NORMAL, NULL);
+}
 
-  // Get the elements array.
-  __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
-  __ AssertFastElements(a2);
 
-  // Check that the key is within bounds.
-  __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
-  __ Branch(&miss, hs, a0, Operand(a3));
+MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
+    MapList* receiver_maps,
+    CodeList* handler_ics) {
+  // ----------- S t a t e -------------
+  //  -- ra    : return address
+  //  -- a0    : key
+  //  -- a1    : receiver
+  // -----------------------------------
+  Label miss;
+  __ JumpIfSmi(a1, &miss);
 
-  // Load the result and make sure it's not the hole.
-  __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
-  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
-  __ sll(t1, a0, kPointerSizeLog2 - kSmiTagSize);
-  __ Addu(t1, t1, a3);
-  __ lw(t0, MemOperand(t1));
-  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
-  __ Branch(&miss, eq, t0, Operand(t1));
-  __ mov(v0, t0);
-  __ Ret();
+  int receiver_count = receiver_maps->length();
+  __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
+  for (int current = 0; current < receiver_count; ++current) {
+    Handle<Map> map(receiver_maps->at(current));
+    Handle<Code> code(handler_ics->at(current));
+    __ Jump(code, RelocInfo::CODE_TARGET, eq, a2, Operand(map));
+  }
 
   __ bind(&miss);
-  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
+  Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
+  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, NULL, MEGAMORPHIC);
 }
 
 
@@ -3150,67 +3161,61 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
 }
 
 
-MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
-    JSObject* receiver) {
+MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
+    Map* receiver_map) {
   // ----------- S t a t e -------------
   //  -- a0    : value
   //  -- a1    : key
   //  -- a2    : receiver
   //  -- ra    : return address
   //  -- a3    : scratch
-  //  -- t0    : scratch (elements)
   // -----------------------------------
-  Label miss;
-  Register value_reg = a0;
-  Register key_reg = a1;
-  Register receiver_reg = a2;
-  Register scratch = a3;
-  Register elements_reg = t0;
-
-  // Check that the receiver isn't a smi.
-  __ JumpIfSmi(receiver_reg, &miss);
+  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
+  MaybeObject* maybe_stub =
+      KeyedStoreFastElementStub(is_js_array).TryGetCode();
+  Code* stub;
+  if (!maybe_stub->To(&stub)) return maybe_stub;
+  __ DispatchMap(a2,
+                 a3,
+                 Handle<Map>(receiver_map),
+                 Handle<Code>(stub),
+                 DO_SMI_CHECK);
+
+  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 
-  // Check that the map matches.
-  __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
-  __ Branch(&miss, ne, scratch, Operand(Handle<Map>(receiver->map())));
+  // Return the generated code.
+  return GetCode(NORMAL, NULL);
+}
 
-  // Check that the key is a smi.
-  __ JumpIfNotSmi(key_reg, &miss);
 
-  // Get the elements array and make sure it is a fast element array, not 'cow'.
-  __ lw(elements_reg,
-        FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
-  __ lw(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
-  __ Branch(&miss, ne, scratch,
-      Operand(Handle<Map>(FACTORY->fixed_array_map())));
-
-  // Check that the key is within bounds.
-  if (receiver->IsJSArray()) {
-    __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
-  } else {
-    __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
+MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
+    MapList* receiver_maps,
+    CodeList* handler_ics) {
+  // ----------- S t a t e -------------
+  //  -- a0    : value
+  //  -- a1    : key
+  //  -- a2    : receiver
+  //  -- ra    : return address
+  //  -- a3    : scratch
+  // -----------------------------------
+  Label miss;
+  __ JumpIfSmi(a2, &miss);
+
+  int receiver_count = receiver_maps->length();
+  __ lw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
+  for (int current = 0; current < receiver_count; ++current) {
+    Handle<Map> map(receiver_maps->at(current));
+    Handle<Code> code(handler_ics->at(current));
+    __ Jump(code, RelocInfo::CODE_TARGET, eq, a3, Operand(map));
   }
-  // Compare smis.
-  __ Branch(&miss, hs, key_reg, Operand(scratch));
-  __ Addu(scratch,
-          elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
-  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
-  __ sll(key_reg, key_reg, kPointerSizeLog2 - kSmiTagSize);
-  __ Addu(v0, scratch, key_reg);
-  __ sw(value_reg, MemOperand(v0));
-  __ RecordWrite(scratch, Operand(key_reg), receiver_reg , elements_reg);
-
-  // value_reg (a0) is preserved.
-  // Done.
-  __ mov(v0, value_reg);
-  __ Ret();
 
   __ bind(&miss);
-  Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
-  __ Jump(ic, RelocInfo::CODE_TARGET);
+  Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
+  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
 
   // Return the generated code.
-  return GetCode(NORMAL, NULL);
+  return GetCode(NORMAL, NULL, MEGAMORPHIC);
 }
 
 
@@ -3364,6 +3369,60 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
 }
 
 
+MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
+    JSObject*receiver, ExternalArrayType array_type) {
+  // ----------- S t a t e -------------
+  //  -- ra    : return address
+  //  -- a0    : key
+  //  -- a1    : receiver
+  // -----------------------------------
+  MaybeObject* maybe_stub =
+      KeyedLoadExternalArrayStub(array_type).TryGetCode();
+  Code* stub;
+  if (!maybe_stub->To(&stub)) return maybe_stub;
+  __ DispatchMap(a1,
+                 a2,
+                 Handle<Map>(receiver->map()),
+                 Handle<Code>(stub),
+                 DO_SMI_CHECK);
+
+  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
+  __ Jump(ic, RelocInfo::CODE_TARGET);
+
+  // Return the generated code.
+  return GetCode();
+}
+
+
+MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
+    JSObject* receiver, ExternalArrayType array_type) {
+  // ----------- S t a t e -------------
+  //  -- a0    : value
+  //  -- a1    : name
+  //  -- a2    : receiver
+  //  -- ra    : return address
+  // -----------------------------------
+  MaybeObject* maybe_stub =
+      KeyedStoreExternalArrayStub(array_type).TryGetCode();
+  Code* stub;
+  if (!maybe_stub->To(&stub)) return maybe_stub;
+  __ DispatchMap(a2,
+                 a3,
+                 Handle<Map>(receiver->map()),
+                 Handle<Code>(stub),
+                 DONT_DO_SMI_CHECK);
+
+  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
+  __ Jump(ic, RelocInfo::CODE_TARGET);
+
+  return GetCode();
+}
+
+
+#undef __
+#define __ ACCESS_MASM(masm)
+
+
 static bool IsElementTypeSigned(ExternalArrayType array_type) {
   switch (array_type) {
     case kExternalByteArray:
@@ -3383,29 +3442,24 @@ static bool IsElementTypeSigned(ExternalArrayType array_type) {
 }
 
 
-MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
-    JSObject* receiver_object,
-    ExternalArrayType array_type,
-    Code::Flags flags) {
+void KeyedLoadStubCompiler::GenerateLoadExternalArray(
+    MacroAssembler* masm,
+    ExternalArrayType array_type) {
   // ---------- S t a t e --------------
   //  -- ra     : return address
   //  -- a0     : key
   //  -- a1     : receiver
   // -----------------------------------
-  Label slow, failed_allocation;
+  Label miss_force_generic, slow, failed_allocation;
 
   Register key = a0;
   Register receiver = a1;
 
-  // Check that the object isn't a smi.
-  __ JumpIfSmi(receiver, &slow);
+  // This stub is meant to be tail-jumped to, the receiver must already
+  // have been verified by the caller to not be a smi.
 
   // Check that the key is a smi.
-  __ JumpIfNotSmi(key, &slow);
-
-  // Make sure that we've got the right map.
-  __ lw(a2, FieldMemOperand(receiver, HeapObject::kMapOffset));
-  __ Branch(&slow, ne, a2, Operand(Handle<Map>(receiver_object->map())));
+  __ JumpIfNotSmi(key, &miss_force_generic);
 
   __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
   // a3: elements array
@@ -3414,8 +3468,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
   __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
   __ sra(t2, key, kSmiTagSize);
   // Unsigned comparison catches both negative and too-large values.
-  __ Branch(&slow, Uless, t1, Operand(t2));
-
+  __ Branch(&miss_force_generic, Uless, t1, Operand(t2));
 
   __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
   // a3: base pointer of external storage
@@ -3565,12 +3618,12 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
 
       __ bind(&box_int_0);
       // Integer does not have leading zeros.
-      GenerateUInt2Double(masm(), hiword, loword, t0, 0);
+      GenerateUInt2Double(masm, hiword, loword, t0, 0);
       __ Branch(&done);
 
       __ bind(&box_int_1);
       // Integer has one leading zero.
-      GenerateUInt2Double(masm(), hiword, loword, t0, 1);
+      GenerateUInt2Double(masm, hiword, loword, t0, 1);
 
 
       __ bind(&done);
@@ -3686,7 +3739,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
   // Slow case, key and receiver still in a0 and a1.
   __ bind(&slow);
   __ IncrementCounter(
-      masm()->isolate()->counters()->keyed_load_external_array_slow(),
+      masm->isolate()->counters()->keyed_load_external_array_slow(),
       1, a2, a3);
 
   // ---------- S t a t e --------------
@@ -3699,16 +3752,16 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
 
   __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
 
-  return GetCode(flags);
+  __ bind(&miss_force_generic);
+  Code* stub = masm->isolate()->builtins()->builtin(
+      Builtins::kKeyedLoadIC_MissForceGeneric);
+  __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
 }
 
 
-
-
-MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
-    JSObject* receiver_object,
-    ExternalArrayType array_type,
-    Code::Flags flags) {
+void KeyedStoreStubCompiler::GenerateStoreExternalArray(
+    MacroAssembler* masm,
+    ExternalArrayType array_type) {
   // ---------- S t a t e --------------
   //  -- a0     : value
   //  -- a1     : key
@@ -3716,7 +3769,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
   //  -- ra     : return address
   // -----------------------------------
 
-  Label slow, check_heap_number;
+  Label slow, check_heap_number, miss_force_generic;
 
   // Register usage.
   Register value = a0;
@@ -3724,23 +3777,19 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
   Register receiver = a2;
   // a3 mostly holds the elements array or the destination external array.
 
-  // Check that the object isn't a smi.
-  __ JumpIfSmi(receiver, &slow);
-
-  // Make sure that we've got the right map.
-  __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
-  __ Branch(&slow, ne, a3, Operand(Handle<Map>(receiver_object->map())));
+  // This stub is meant to be tail-jumped to, the receiver must already
+  // have been verified by the caller to not be a smi.
 
   __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
 
   // Check that the key is a smi.
-  __ JumpIfNotSmi(key, &slow);
+  __ JumpIfNotSmi(key, &miss_force_generic);
 
   // Check that the index is in range.
   __ SmiUntag(t0, key);
   __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
   // Unsigned comparison catches both negative and too-large values.
-  __ Branch(&slow, Ugreater_equal, t0, Operand(t1));
+  __ Branch(&miss_force_generic, Ugreater_equal, t0, Operand(t1));
 
   // Handle both smis and HeapNumbers in the fast path. Go to the
   // runtime for all other kinds of values.
@@ -3797,7 +3846,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
       break;
     case kExternalFloatArray:
       // Perform int-to-float conversion and store to memory.
-      StoreIntAsFloat(masm(), a3, t0, t1, t2, t3, t4);
+      StoreIntAsFloat(masm, a3, t0, t1, t2, t3, t4);
       break;
     case kExternalDoubleArray:
       __ sll(t8, t0, 3);
@@ -3810,7 +3859,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
         destination = FloatingPointHelper::kCoreRegisters;
       }
       FloatingPointHelper::ConvertIntToDouble(
-          masm(), t1, destination,
+          masm, t1, destination,
           f0, t2, t3,  // These are: double_dst, dst1, dst2.
           t0, f2);  // These are: scratch2, single_scratch.
       if (destination == FloatingPointHelper::kFPURegisters) {
@@ -4072,27 +4121,136 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
     }
   }
 
-  // Slow case: call runtime.
+  // Slow case, key and receiver still in a0 and a1.
   __ bind(&slow);
+  __ IncrementCounter(
+      masm->isolate()->counters()->keyed_load_external_array_slow(),
+      1, a2, a3);
   // Entry registers are intact.
   // ---------- S t a t e --------------
-  //  -- a0     : value
-  //  -- a1     : key
-  //  -- a2     : receiver
   //  -- ra     : return address
+  //  -- a0     : key
+  //  -- a1     : receiver
   // -----------------------------------
+  Handle<Code> slow_ic =
+      masm->isolate()->builtins()->KeyedStoreIC_Slow();
+  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
 
-  // Push receiver, key and value for runtime call.
-  __ Push(a2, a1, a0);
+  // Miss case, call the runtime.
+  __ bind(&miss_force_generic);
 
-  __ li(a1, Operand(Smi::FromInt(NONE)));  // PropertyAttributes.
-  __ li(a0, Operand(Smi::FromInt(
-       Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
-  __ Push(a1, a0);
+  // ---------- S t a t e --------------
+  //  -- ra     : return address
+  //  -- a0     : key
+  //  -- a1     : receiver
+  // -----------------------------------
+
+  Handle<Code> miss_ic =
+     masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
+  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
+}
+
+
+void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- ra    : return address
+  //  -- a0    : key
+  //  -- a1    : receiver
+  // -----------------------------------
+  Label miss_force_generic;
+
+  // This stub is meant to be tail-jumped to, the receiver must already
+  // have been verified by the caller to not be a smi.
+
+  // Check that the key is a smi.
+  __ JumpIfNotSmi(a0, &miss_force_generic);
+
+  // Get the elements array.
+  __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
+  __ AssertFastElements(a2);
+
+  // Check that the key is within bounds.
+  __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
+  __ Branch(&miss_force_generic, hs, a0, Operand(a3));
+
+  // Load the result and make sure it's not the hole.
+  __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
+  __ Addu(t0, t0, a3);
+  __ lw(t0, MemOperand(t0));
+  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
+  __ Branch(&miss_force_generic, eq, t0, Operand(t1));
+  __ mov(v0, t0);
+  __ Ret();
+
+  __ bind(&miss_force_generic);
+  Code* stub = masm->isolate()->builtins()->builtin(
+      Builtins::kKeyedLoadIC_MissForceGeneric);
+  __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
+}
+
+
+void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
+                                                      bool is_js_array) {
+  // ----------- S t a t e -------------
+  //  -- a0    : value
+  //  -- a1    : key
+  //  -- a2    : receiver
+  //  -- ra    : return address
+  //  -- a3    : scratch
+  //  -- a4    : scratch (elements)
+  // -----------------------------------
+  Label miss_force_generic;
 
-  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
+  Register value_reg = a0;
+  Register key_reg = a1;
+  Register receiver_reg = a2;
+  Register scratch = a3;
+  Register elements_reg = t0;
+  Register scratch2 = t1;
+  Register scratch3 = t2;
 
-  return GetCode(flags);
+  // This stub is meant to be tail-jumped to, the receiver must already
+  // have been verified by the caller to not be a smi.
+
+  // Check that the key is a smi.
+  __ JumpIfNotSmi(a0, &miss_force_generic);
+
+  // Get the elements array and make sure it is a fast element array, not 'cow'.
+  __ lw(elements_reg,
+        FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+  __ CheckMap(elements_reg,
+              scratch,
+              Heap::kFixedArrayMapRootIndex,
+              &miss_force_generic,
+              DONT_DO_SMI_CHECK);
+
+  // Check that the key is within bounds.
+  if (is_js_array) {
+    __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+  } else {
+    __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
+  }
+  // Compare smis.
+  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
+
+  __ Addu(scratch,
+          elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+  __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
+  __ Addu(scratch3, scratch2, scratch);
+  __ sw(value_reg, MemOperand(scratch3));
+  __ RecordWrite(scratch, Operand(scratch2), receiver_reg , elements_reg);
+
+  // value_reg (a0) is preserved.
+  // Done.
+  __ Ret();
+
+  __ bind(&miss_force_generic);
+  Handle<Code> ic =
+      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
+  __ Jump(ic, RelocInfo::CODE_TARGET);
 }