}
-// Generate code to check if an object is a string. If the object is
-// a string, the map's instance type is left in the scratch1 register.
-static void GenerateStringCheck(MacroAssembler* masm,
- Register receiver,
- Register scratch1,
- Register scratch2,
- Label* smi,
- Label* non_string_object) {
- // Check that the receiver isn't a smi.
- __ tst(receiver, Operand(kSmiTagMask));
- __ b(eq, smi);
-
- // Check that the object is a string.
- __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
- __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
- // The cast is to resolve the overload for the argument of 0x0.
- __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
- __ b(ne, non_string_object);
-}
-
-
void LoadIC::GenerateStringLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r2 : name
// -- lr : return address
// -- [sp] : receiver
// -----------------------------------
- Label miss, load_length, check_wrapper;
+ Label miss;
__ ldr(r0, MemOperand(sp, 0));
- // Check if the object is a string leaving the instance type in the
- // r1 register.
- GenerateStringCheck(masm, r0, r1, r3, &miss, &check_wrapper);
-
- // Load length directly from the string.
- __ bind(&load_length);
- __ and_(r1, r1, Operand(kStringSizeMask));
- __ add(r1, r1, Operand(String::kHashShift));
- __ ldr(r0, FieldMemOperand(r0, String::kLengthOffset));
- __ mov(r0, Operand(r0, LSR, r1));
- __ mov(r0, Operand(r0, LSL, kSmiTagSize));
- __ Ret();
-
- // Check if the object is a JSValue wrapper.
- __ bind(&check_wrapper);
- __ cmp(r1, Operand(JS_VALUE_TYPE));
- __ b(ne, &miss);
-
- // Check if the wrapped value is a string and load the length
- // directly if it is.
- __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
- GenerateStringCheck(masm, r0, r1, r3, &miss, &miss);
- __ b(&load_length);
-
+ StubCompiler::GenerateLoadStringLength2(masm, r0, r1, r3, &miss);
// Cache miss: Jump to runtime.
__ bind(&miss);
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
}
-// TODO(1224671): ICs for keyed load/store is not completed on ARM.
Object* KeyedLoadIC_Miss(Arguments args);
}
-// TODO(1224671): implement the fast case.
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// ---------- S t a t e --------------
// -- lr : return address
// -- sp[0] : key
// -- sp[4] : receiver
+ Label slow, fast;
- KeyedLoadIC::Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
+ // Get the key and receiver object from the stack.
+ __ ldm(ia, sp, r0.bit() | r1.bit());
+ // Check that the key is a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ // Check that the object isn't a smi.
+ __ tst(r1, Operand(kSmiTagMask));
+ __ b(eq, &slow);
+
+ // Check that the object is some kind of JS object EXCEPT JS Value type.
+ // In the case that the object is a value-wrapper object,
+ // we enter the runtime system to make sure that indexing into string
+ // objects work as intended.
+ ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
+ __ cmp(r2, Operand(JS_OBJECT_TYPE));
+ __ b(lt, &slow);
+
+ // Get the elements array of the object.
+ __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
+ // Check that the object is in fast mode (not dictionary).
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ cmp(r3, Operand(Factory::hash_table_map()));
+ __ b(eq, &slow);
+ // Check that the key (index) is within bounds.
+ __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
+ __ cmp(r0, Operand(r3));
+ __ b(lo, &fast);
+
+ // Slow case: Push extra copies of the arguments (2).
+ __ bind(&slow);
+ __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r0, r1);
+ __ ldm(ia, sp, r0.bit() | r1.bit());
+ __ stm(db_w, sp, r0.bit() | r1.bit());
+ // Do tail-call to runtime routine.
+ __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
+
+ // Fast case: Do the load.
+ __ bind(&fast);
+ __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag));
+ __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
+ __ cmp(r0, Operand(Factory::the_hole_value()));
+ // In case the loaded value is the_hole we have to consult GetProperty
+ // to ensure the prototype chain is searched.
+ __ b(eq, &slow);
+
+ __ Ret();
}
}
-// TODO(1224671): implement the fast case.
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// ---------- S t a t e --------------
// -- r0 : value
// -- lr : return address
// -- sp[0] : key
// -- sp[1] : receiver
+ Label slow, fast, array, extra, exit;
+ // Get the key and the object from the stack.
+ __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
+ // Check that the key is a smi.
+ __ tst(r1, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ // Check that the object isn't a smi.
+ __ tst(r3, Operand(kSmiTagMask));
+ __ b(eq, &slow);
+ // Get the type of the object from its map.
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
+ // Check if the object is a JS array or not.
+ __ cmp(r2, Operand(JS_ARRAY_TYPE));
+ // r1 == key.
+ __ b(eq, &array);
+ // Check that the object is some kind of JS object.
+ __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
+ __ b(lt, &slow);
+
+
+ // Object case: Check key against length in the elements array.
+ __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
+ // Check that the object is in fast mode (not dictionary).
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
+ __ cmp(r2, Operand(Factory::hash_table_map()));
+ __ b(eq, &slow);
+ // Untag the key (for checking against untagged length in the fixed array).
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize));
+ // Compute address to store into and check array bounds.
+ __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag));
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
+ __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset));
+ __ cmp(r1, Operand(ip));
+ __ b(lo, &fast);
+
+
+ // Slow case: Push extra copies of the arguments (3).
+ __ bind(&slow);
+ __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
+ __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
+ // Do tail-call to runtime routine.
+ __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
+
+ // Extra capacity case: Check if there is extra capacity to
+ // perform the store and update the length. Used for adding one
+ // element to the array by writing to array[array.length].
+ // r0 == value, r1 == key, r2 == elements, r3 == object
+ __ bind(&extra);
+ __ b(ne, &slow); // do not leave holes in the array
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag
+ __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
+ __ cmp(r1, Operand(ip));
+ __ b(hs, &slow);
+ __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag
+ __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment
+ __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
+ __ mov(r3, Operand(r2));
+ // NOTE: Computing the address to store into must take the fact
+ // that the key has been incremented into account.
+ int displacement = Array::kHeaderSize - kHeapObjectTag -
+ ((1 << kSmiTagSize) * 2);
+ __ add(r2, r2, Operand(displacement));
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ b(&fast);
+
+
+ // Array case: Get the length and the elements array from the JS
+ // array. Check that the array is in fast mode; if it is the
+ // length is always a smi.
+ // r0 == value, r1 == key, r3 == object
+ __ bind(&array);
+ __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
+ __ ldr(ip, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ cmp(ip, Operand(Factory::hash_table_map()));
+ __ b(eq, &slow);
+
+ // Check the key against the length in the array, compute the
+ // address to store into and fall through to fast case.
+ // r0 == value, r1 == key, r2 == elements, r3 == object.
+ __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
+ __ cmp(r1, Operand(ip));
+ __ b(hs, &extra);
+ __ mov(r3, Operand(r2));
+ __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag));
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
+
+
+ // Fast case: Do the store.
+ // r0 == value, r2 == address to store into, r3 == elements
+ __ bind(&fast);
+ __ str(r0, MemOperand(r2));
+ // Skip write barrier if the written value is a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &exit);
+ // Update write barrier for the elements array address.
+ __ sub(r1, r2, Operand(r3));
+ __ RecordWrite(r3, r1, r2);
- KeyedStoreIC::Generate(masm, ExternalReference(Runtime::kSetProperty));
+ __ bind(&exit);
+ __ Ret();
}
}
+void MacroAssembler::SetCounter(StatsCounter* counter, int value,
+ Register scratch1, Register scratch2) {
+ if (FLAG_native_code_counters && counter->Enabled()) {
+ mov(scratch1, Operand(value));
+ mov(scratch2, Operand(ExternalReference(counter)));
+ str(scratch1, MemOperand(scratch2));
+ }
+}
+
+
+void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
+ Register scratch1, Register scratch2) {
+ ASSERT(value > 0);
+ if (FLAG_native_code_counters && counter->Enabled()) {
+ mov(scratch2, Operand(ExternalReference(counter)));
+ ldr(scratch1, MemOperand(scratch2));
+ add(scratch1, scratch1, Operand(value));
+ str(scratch1, MemOperand(scratch2));
+ }
+}
+
+
+void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
+ Register scratch1, Register scratch2) {
+ ASSERT(value > 0);
+ if (FLAG_native_code_counters && counter->Enabled()) {
+ mov(scratch2, Operand(ExternalReference(counter)));
+ ldr(scratch1, MemOperand(scratch2));
+ sub(scratch1, scratch1, Operand(value));
+ str(scratch1, MemOperand(scratch2));
+ }
+}
+
+
void MacroAssembler::Assert(Condition cc, const char* msg) {
if (FLAG_debug_code)
Check(cc, msg);
// ---------------------------------------------------------------------------
+ // StatsCounter support
+
+ void SetCounter(StatsCounter* counter, int value,
+ Register scratch1, Register scratch2);
+ void IncrementCounter(StatsCounter* counter, int value,
+ Register scratch1, Register scratch2);
+ void DecrementCounter(StatsCounter* counter, int value,
+ Register scratch1, Register scratch2);
+
+
+ // ---------------------------------------------------------------------------
// Debugging
// Calls Abort(msg) if the condition cc is not satisfied.
}
+// Generate code to check if an object is a string. If the object is
+// a string, the map's instance type is left in the scratch1 register.
+static void GenerateStringCheck(MacroAssembler* masm,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ Label* smi,
+ Label* non_string_object) {
+ // Check that the receiver isn't a smi.
+ __ tst(receiver, Operand(kSmiTagMask));
+ __ b(eq, smi);
+
+ // Check that the object is a string.
+ __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
+ __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
+ __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
+ // The cast is to resolve the overload for the argument of 0x0.
+ __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
+ __ b(ne, non_string_object);
+}
+
+
+void StubCompiler::GenerateLoadStringLength2(MacroAssembler* masm,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ Label* miss) {
+ Label load_length, check_wrapper;
+
+ // Check if the object is a string leaving the instance type in the
+ // scratch1 register.
+ GenerateStringCheck(masm, receiver, scratch1, scratch2,
+ miss, &check_wrapper);
+
+ // Load length directly from the string.
+ __ bind(&load_length);
+ __ and_(scratch1, scratch1, Operand(kStringSizeMask));
+ __ add(scratch1, scratch1, Operand(String::kHashShift));
+ __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
+ __ mov(r0, Operand(r0, LSR, scratch1));
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
+ __ Ret();
+
+ // Check if the object is a JSValue wrapper.
+ __ bind(&check_wrapper);
+ __ cmp(scratch1, Operand(JS_VALUE_TYPE));
+ __ b(ne, miss);
+
+ // Check if the wrapped value is a string and load the length
+ // directly if it is.
+ __ ldr(r0, FieldMemOperand(receiver, JSValue::kValueOffset));
+ GenerateStringCheck(masm, receiver, scratch1, scratch1, miss, miss);
+ __ b(&load_length);
+}
+
+
+// Generate StoreField code, value is passed in r0 register.
+// After executing generated code, the receiver_reg and name_reg
+// may be clobbered.
+void StubCompiler::GenerateStoreField(MacroAssembler* masm,
+ Builtins::Name storage_extend,
+ JSObject* object,
+ int index,
+ Map* transition,
+ Register receiver_reg,
+ Register name_reg,
+ Register scratch,
+ Label* miss_label) {
+ // r0 : value
+ Label exit;
+
+ // Check that the receiver isn't a smi.
+ __ tst(receiver_reg, Operand(kSmiTagMask));
+ __ b(eq, miss_label);
+
+ // Check that the map of the receiver hasn't changed.
+ __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+ __ cmp(scratch, Operand(Handle<Map>(object->map())));
+ __ b(ne, miss_label);
+
+ // Perform global security token check if needed.
+ if (object->IsJSGlobalProxy()) {
+ __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ }
+
+ // Stub never generated for non-global objects that require access
+ // checks.
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+
+ // Perform map transition for the receiver if necessary.
+ if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
+ // The properties must be extended before we can store the value.
+ // We jump to a runtime call that extends the propeties array.
+ __ mov(r2, Operand(Handle<Map>(transition)));
+ // Please note, if we implement keyed store for arm we need
+ // to call the Builtins::KeyedStoreIC_ExtendStorage.
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_ExtendStorage));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+ return;
+ }
+
+ if (transition != NULL) {
+ // Update the map of the object; no write barrier updating is
+ // needed because the map is never in new space.
+ __ mov(ip, Operand(Handle<Map>(transition)));
+ __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+ }
+
+ // Adjust for the number of properties stored in the object. Even in the
+ // face of a transition we can use the old map here because the size of the
+ // object and the number of in-object properties is not going to change.
+ index -= object->map()->inobject_properties();
+
+ if (index < 0) {
+ // Set the property straight into the object.
+ int offset = object->map()->instance_size() + (index * kPointerSize);
+ __ str(r0, FieldMemOperand(receiver_reg, offset));
+
+ // Skip updating write barrier if storing a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &exit);
+
+ // Update the write barrier for the array address.
+ // Pass the value being stored in the now unused name_reg.
+ __ mov(name_reg, Operand(offset));
+ __ RecordWrite(receiver_reg, name_reg, scratch);
+ } else {
+ // Write to the properties array.
+ int offset = index * kPointerSize + Array::kHeaderSize;
+ // Get the properties array
+ __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ str(r0, FieldMemOperand(scratch, offset));
+
+ // Skip updating write barrier if storing a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &exit);
+
+ // Update the write barrier for the array address.
+ // Ok to clobber receiver_reg and name_reg, since we return.
+ __ mov(name_reg, Operand(offset));
+ __ RecordWrite(scratch, name_reg, receiver_reg);
+ }
+
+ // Return the value (register r0).
+ __ bind(&exit);
+ __ Ret();
+}
+
+
void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
Code* code = NULL;
// -----------------------------------
HandleScope scope;
- Label miss, exit;
+ Label miss;
// Get the receiver from the stack.
__ ldr(r3, MemOperand(sp, 0 * kPointerSize));
- // Check that the receiver isn't a smi.
- __ tst(r3, Operand(kSmiTagMask));
- __ b(eq, &miss);
-
- // Check that the map of the receiver hasn't changed.
- __ ldr(r1, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ cmp(r1, Operand(Handle<Map>(object->map())));
- __ b(ne, &miss);
-
- // Perform global security token check if needed.
- if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(r3, r1, &miss);
- }
-
- // Stub never generated for non-global objects that require access
- // checks.
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
-
- // Perform map transition for the receiver if necessary.
- if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
- // The properties must be extended before we can store the value.
- // We jump to a runtime call that extends the propeties array.
- __ mov(r2, Operand(Handle<Map>(transition)));
- // Please note, if we implement keyed store for arm we need
- // to call the Builtins::KeyedStoreIC_ExtendStorage.
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_ExtendStorage));
- __ Jump(ic, RelocInfo::CODE_TARGET);
- } else {
- // Adjust for the number of properties stored in the object. Even in the
- // face of a transition we can use the old map here because the size of the
- // object and the number of in-object properties is not going to change.
- index -= object->map()->inobject_properties();
-
- if (index >= 0) {
- // Get the properties array
- __ ldr(r1, FieldMemOperand(r3, JSObject::kPropertiesOffset));
- }
-
- if (transition != NULL) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ mov(ip, Operand(Handle<Map>(transition)));
- __ str(ip, FieldMemOperand(r3, HeapObject::kMapOffset));
- }
-
- if (index < 0) {
- // Set the property straight into the object.
- int offset = object->map()->instance_size() + (index * kPointerSize);
- __ str(r0, FieldMemOperand(r3, offset));
-
- // Skip updating write barrier if storing a smi.
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, &exit);
-
- // Update the write barrier for the array address.
- __ mov(r1, Operand(offset));
- __ RecordWrite(r3, r1, r2);
- } else {
- // Write to the properties array.
- int offset = index * kPointerSize + Array::kHeaderSize;
- __ str(r0, FieldMemOperand(r1, offset));
-
- // Skip updating write barrier if storing a smi.
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, &exit);
-
- // Update the write barrier for the array address.
- __ mov(r3, Operand(offset));
- __ RecordWrite(r1, r3, r2); // OK to clobber r2, since we return
- }
-
- // Return the value (register r0).
- __ bind(&exit);
- __ Ret();
- }
- // Handle store cache miss.
+ // name register might be clobbered.
+ GenerateStoreField(masm(),
+ Builtins::StoreIC_ExtendStorage,
+ object,
+ index,
+ transition,
+ r3, r2, r1,
+ &miss);
__ bind(&miss);
__ mov(r2, Operand(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
}
-// TODO(1224671): implement the fast case.
Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
// ----------- S t a t e -------------
// -- lr : return address
// -- sp[4] : receiver
// -----------------------------------
HandleScope scope;
+
+ Label miss;
+ __ IncrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
+
+ __ ldr(r2, MemOperand(sp));
+ __ ldr(r0, MemOperand(sp, kPointerSize)); // receiver
+
+ __ cmp(r2, Operand(Handle<String>(name)));
+ __ b(ne, &miss);
+
+ GenerateLoadStringLength2(masm(), r0, r1, r3, &miss);
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
+
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
return GetCode(CALLBACKS);
}
-// TODO(1224671): implement the fast case.
Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
int index,
Map* transition,
// -- [sp] : receiver
// -----------------------------------
HandleScope scope;
+ Label miss;
+
+ __ IncrementCounter(&Counters::keyed_store_field, 1, r1, r3);
+
+ // Check that the name has not changed.
+ __ cmp(r2, Operand(Handle<String>(name)));
+ __ b(ne, &miss);
+
+ // Load receiver from the stack.
+ __ ldr(r3, MemOperand(sp));
+ // r1 is used as scratch register, r3 and r2 might be clobbered.
+ GenerateStoreField(masm(),
+ Builtins::StoreIC_ExtendStorage,
+ object,
+ index,
+ transition,
+ r3, r2, r1,
+ &miss);
+ __ bind(&miss);
+
+ __ DecrementCounter(&Counters::keyed_store_field, 1, r1, r3);
+ __ mov(r2, Operand(Handle<String>(name))); // restore name register.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
__ Jump(ic, RelocInfo::CODE_TARGET);
Register receiver,
Register scratch,
Label* miss_label);
+ static void GenerateLoadStringLength2(MacroAssembler* masm,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ Label* miss_label);
static void GenerateLoadFunctionPrototype(MacroAssembler* masm,
Register receiver,
Register scratch1,