}
-static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
- KeyedStoreIC::GenerateSloppyArguments(masm);
-}
-
-
static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
DebugCodegen::GenerateCallICStubDebugBreak(masm);
}
StoreICState::kStrictModeState) \
V(KeyedStoreIC_Megamorphic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \
StoreICState::kStrictModeState) \
- V(KeyedStoreIC_SloppyArguments, KEYED_STORE_IC, MONOMORPHIC, \
- kNoExtraICState) \
\
/* Uses KeyedLoadIC_Initialize; must be after in list. */ \
V(FunctionCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \
MULTIPLE
};
- HValue* UnmappedCase(HValue* elements, HValue* key);
+ HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
+ HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
+ HValue* value);
HValue* BuildArrayConstructor(ElementsKind kind,
AllocationSiteOverrideMode override_mode,
Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
-HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key) {
- HValue* result;
+HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
+ HValue* value) {
+ HValue* result = NULL;
HInstruction* backing_store =
Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS,
ALLOW_RETURN_HOLE);
Token::LT);
in_unmapped_range.Then();
{
- result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
- NEVER_RETURN_HOLE);
+ if (value == NULL) {
+ result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS,
+ NEVER_RETURN_HOLE);
+ } else {
+ Add<HStoreKeyed>(backing_store, key, value, FAST_HOLEY_ELEMENTS);
+ }
}
in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange);
in_unmapped_range.End();
}
-template <>
-HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
- HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
- HValue* key = GetParameter(LoadDescriptor::kNameIndex);
-
+HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
+ HValue* key,
+ HValue* value) {
// Mapped arguments are actual arguments. Unmapped arguments are values added
// to the arguments object after it was created for the call. Mapped arguments
// are stored in the context at indexes given by elements[key + 2]. Unmapped
// index into the context array given at elements[0]. Return the value at
// context[t].
+ bool is_load = value == NULL;
+
key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
IfBuilder positive_smi(this);
positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
nullptr, FAST_ELEMENTS);
STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
- HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
- FAST_ELEMENTS, ALLOW_RETURN_HOLE);
- environment()->Push(result);
+ if (is_load) {
+ HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr,
+ FAST_ELEMENTS, ALLOW_RETURN_HOLE);
+ environment()->Push(result);
+ } else {
+ DCHECK(value != NULL);
+ Add<HStoreKeyed>(the_context, mapped_index, value, FAST_ELEMENTS);
+ environment()->Push(value);
+ }
}
is_valid.Else();
{
- HValue* result = UnmappedCase(elements, key);
- environment()->Push(result);
+ HValue* result = UnmappedCase(elements, key, value);
+ environment()->Push(is_load ? result : value);
}
is_valid.End();
}
in_range.Else();
{
- HValue* result = UnmappedCase(elements, key);
- environment()->Push(result);
+ HValue* result = UnmappedCase(elements, key, value);
+ environment()->Push(is_load ? result : value);
}
in_range.End();
}
+template <>
+HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
+ HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex);
+ HValue* key = GetParameter(LoadDescriptor::kNameIndex);
+
+ return EmitKeyedSloppyArguments(receiver, key, NULL);
+}
+
+
Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
return DoGenerateCode(this);
}
+template <>
+HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
+ HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex);
+ HValue* key = GetParameter(StoreDescriptor::kNameIndex);
+ HValue* value = GetParameter(StoreDescriptor::kValueIndex);
+
+ return EmitKeyedSloppyArguments(receiver, key, value);
+}
+
+
+Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
+ return DoGenerateCode(this);
+}
+
+
void CodeStubGraphBuilderBase::BuildStoreNamedField(
HValue* object, HValue* value, FieldIndex index,
Representation representation, bool transition_to_field) {
descriptor->Initialize(FUNCTION_ADDR(StoreIC_MissFromStubFailure));
} else if (kind() == Code::KEYED_LOAD_IC) {
descriptor->Initialize(FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure));
+ } else if (kind() == Code::KEYED_STORE_IC) {
+ descriptor->Initialize(FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure));
}
}
if (kind() == Code::LOAD_IC || kind() == Code::KEYED_LOAD_IC) {
return LoadWithVectorDescriptor(isolate());
} else {
- DCHECK_EQ(Code::STORE_IC, kind());
+ DCHECK(kind() == Code::STORE_IC || kind() == Code::KEYED_STORE_IC);
return StoreDescriptor(isolate());
}
}
V(LoadFastElement) \
V(LoadField) \
V(KeyedLoadSloppyArguments) \
+ V(KeyedStoreSloppyArguments) \
V(StoreField) \
V(StoreGlobal) \
V(StoreTransition) \
};
+class KeyedStoreSloppyArgumentsStub : public HandlerStub {
+ public:
+ explicit KeyedStoreSloppyArgumentsStub(Isolate* isolate)
+ : HandlerStub(isolate) {}
+
+ protected:
+ Code::Kind kind() const override { return Code::KEYED_STORE_IC; }
+ Code::StubType GetStubType() const override { return Code::FAST; }
+
+ private:
+ DEFINE_HANDLER_CODE_STUB(KeyedStoreSloppyArguments, HandlerStub);
+};
+
+
class LoadConstantStub : public HandlerStub {
public:
LoadConstantStub(Isolate* isolate, int constant_index)
}
-static MemOperand GenerateMappedArgumentsLookup(
- MacroAssembler* masm, Register object, Register key, Register scratch1,
- Register scratch2, Register scratch3, Label* unmapped_case,
- Label* slow_case) {
- Heap* heap = masm->isolate()->heap();
-
- // Check that the receiver is a JSObject. Because of the map check
- // later, we do not need to check for interceptors or whether it
- // requires access checks.
- __ JumpIfSmi(object, slow_case);
- // Check that the object is some kind of JSObject.
- __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE);
- __ b(lt, slow_case);
-
- // Check that the key is a positive smi.
- __ tst(key, Operand(0x80000001));
- __ b(ne, slow_case);
-
- // Load the elements into scratch1 and check its map.
- Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
- __ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
- __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
-
- // Check if element is in the range of mapped arguments. If not, jump
- // to the unmapped lookup with the parameter map in scratch1.
- __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
- __ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
- __ cmp(key, Operand(scratch2));
- __ b(cs, unmapped_case);
-
- // Load element index and check whether it is the hole.
- const int kOffset =
- FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
-
- __ mov(scratch3, Operand(kPointerSize >> 1));
- __ mul(scratch3, key, scratch3);
- __ add(scratch3, scratch3, Operand(kOffset));
-
- __ ldr(scratch2, MemOperand(scratch1, scratch3));
- __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
- __ cmp(scratch2, scratch3);
- __ b(eq, unmapped_case);
-
- // Load value from context and return it. We can reuse scratch1 because
- // we do not jump to the unmapped lookup (which requires the parameter
- // map in scratch1).
- __ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
- __ mov(scratch3, Operand(kPointerSize >> 1));
- __ mul(scratch3, scratch2, scratch3);
- __ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
- return MemOperand(scratch1, scratch3);
-}
-
-
-static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
- Register key,
- Register parameter_map,
- Register scratch,
- Label* slow_case) {
- // Element is in arguments backing store, which is referenced by the
- // second element of the parameter_map. The parameter_map register
- // must be loaded with the parameter map of the arguments object and is
- // overwritten.
- const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
- Register backing_store = parameter_map;
- __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
- Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
- __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
- DONT_DO_SMI_CHECK);
- __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
- __ cmp(key, Operand(scratch));
- __ b(cs, slow_case);
- __ mov(scratch, Operand(kPointerSize >> 1));
- __ mul(scratch, key, scratch);
- __ add(scratch, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- return MemOperand(backing_store, scratch);
-}
-
-
-void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
- Register receiver = StoreDescriptor::ReceiverRegister();
- Register key = StoreDescriptor::NameRegister();
- Register value = StoreDescriptor::ValueRegister();
- DCHECK(receiver.is(r1));
- DCHECK(key.is(r2));
- DCHECK(value.is(r0));
-
- Label slow, notin;
- MemOperand mapped_location = GenerateMappedArgumentsLookup(
- masm, receiver, key, r3, r4, r5, ¬in, &slow);
- __ str(value, mapped_location);
- __ add(r6, r3, r5);
- __ mov(r9, value);
- __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
- __ Ret();
- __ bind(¬in);
- // The unmapped lookup expects that the parameter map is in r3.
- MemOperand unmapped_location =
- GenerateUnmappedArgumentsLookup(masm, key, r3, r4, &slow);
- __ str(value, unmapped_location);
- __ add(r6, r3, r4);
- __ mov(r9, value);
- __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
- __ Ret();
- __ bind(&slow);
- GenerateMiss(masm);
-}
-
-
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
// The return address is in lr.
Isolate* isolate = masm->isolate();
}
-// Neither 'object' nor 'key' are modified by this function.
-//
-// If the 'unmapped_case' or 'slow_case' exit is taken, the 'map' register is
-// left with the object's elements map. Otherwise, it is used as a scratch
-// register.
-static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
- Register object, Register key,
- Register map, Register scratch1,
- Register scratch2,
- Label* unmapped_case,
- Label* slow_case) {
- DCHECK(!AreAliased(object, key, map, scratch1, scratch2));
-
- Heap* heap = masm->isolate()->heap();
-
- // Check that the receiver is a JSObject. Because of the elements
- // map check later, we do not need to check for interceptors or
- // whether it requires access checks.
- __ JumpIfSmi(object, slow_case);
- // Check that the object is some kind of JSObject.
- __ JumpIfObjectType(object, map, scratch1, FIRST_JS_RECEIVER_TYPE, slow_case,
- lt);
-
- // Check that the key is a positive smi.
- __ JumpIfNotSmi(key, slow_case);
- __ Tbnz(key, kXSignBit, slow_case);
-
- // Load the elements object and check its map.
- Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
- __ Ldr(map, FieldMemOperand(object, JSObject::kElementsOffset));
- __ CheckMap(map, scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
-
- // Check if element is in the range of mapped arguments. If not, jump
- // to the unmapped lookup.
- __ Ldr(scratch1, FieldMemOperand(map, FixedArray::kLengthOffset));
- __ Sub(scratch1, scratch1, Smi::FromInt(2));
- __ Cmp(key, scratch1);
- __ B(hs, unmapped_case);
-
- // Load element index and check whether it is the hole.
- static const int offset =
- FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
-
- __ Add(scratch1, map, offset);
- __ SmiUntag(scratch2, key);
- __ Ldr(scratch1, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2));
- __ JumpIfRoot(scratch1, Heap::kTheHoleValueRootIndex, unmapped_case);
-
- // Load value from context and return it.
- __ Ldr(scratch2, FieldMemOperand(map, FixedArray::kHeaderSize));
- __ SmiUntag(scratch1);
- __ Lsl(scratch1, scratch1, kPointerSizeLog2);
- __ Add(scratch1, scratch1, Context::kHeaderSize - kHeapObjectTag);
- // The base of the result (scratch2) is passed to RecordWrite in
- // KeyedStoreIC::GenerateSloppyArguments and it must be a HeapObject.
- return MemOperand(scratch2, scratch1);
-}
-
-
-// The 'parameter_map' register must be loaded with the parameter map of the
-// arguments object and is overwritten.
-static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
- Register key,
- Register parameter_map,
- Register scratch,
- Label* slow_case) {
- DCHECK(!AreAliased(key, parameter_map, scratch));
-
- // Element is in arguments backing store, which is referenced by the
- // second element of the parameter_map.
- const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
- Register backing_store = parameter_map;
- __ Ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
- Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
- __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
- DONT_DO_SMI_CHECK);
- __ Ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
- __ Cmp(key, scratch);
- __ B(hs, slow_case);
-
- __ Add(backing_store, backing_store,
- FixedArray::kHeaderSize - kHeapObjectTag);
- __ SmiUntag(scratch, key);
- return MemOperand(backing_store, scratch, LSL, kPointerSizeLog2);
-}
-
-
void LoadIC::GenerateNormal(MacroAssembler* masm) {
Register dictionary = x0;
DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
}
-void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
- ASM_LOCATION("KeyedStoreIC::GenerateSloppyArguments");
- Label slow, notin;
- Register value = StoreDescriptor::ValueRegister();
- Register key = StoreDescriptor::NameRegister();
- Register receiver = StoreDescriptor::ReceiverRegister();
- DCHECK(receiver.is(x1));
- DCHECK(key.is(x2));
- DCHECK(value.is(x0));
-
- Register map = x3;
-
- // These registers are used by GenerateMappedArgumentsLookup to build a
- // MemOperand. They are live for as long as the MemOperand is live.
- Register mapped1 = x4;
- Register mapped2 = x5;
-
- MemOperand mapped = GenerateMappedArgumentsLookup(
- masm, receiver, key, map, mapped1, mapped2, ¬in, &slow);
- Operand mapped_offset = mapped.OffsetAsOperand();
- __ Str(value, mapped);
- __ Add(x10, mapped.base(), mapped_offset);
- __ Mov(x11, value);
- __ RecordWrite(mapped.base(), x10, x11, kLRHasNotBeenSaved, kDontSaveFPRegs);
- __ Ret();
-
- __ Bind(¬in);
-
- // These registers are used by GenerateMappedArgumentsLookup to build a
- // MemOperand. They are live for as long as the MemOperand is live.
- Register unmapped1 = map; // This is assumed to alias 'map'.
- Register unmapped2 = x4;
- MemOperand unmapped =
- GenerateUnmappedArgumentsLookup(masm, key, unmapped1, unmapped2, &slow);
- Operand unmapped_offset = unmapped.OffsetAsOperand();
- __ Str(value, unmapped);
- __ Add(x10, unmapped.base(), unmapped_offset);
- __ Mov(x11, value);
- __ RecordWrite(unmapped.base(), x10, x11, kLRHasNotBeenSaved,
- kDontSaveFPRegs);
- __ Ret();
- __ Bind(&slow);
- GenerateMiss(masm);
-}
-
-
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
// The return address is in lr.
Isolate* isolate = masm->isolate();
}
-static Operand GenerateMappedArgumentsLookup(
- MacroAssembler* masm, Register object, Register key, Register scratch1,
- Register scratch2, Label* unmapped_case, Label* slow_case) {
- Heap* heap = masm->isolate()->heap();
- Factory* factory = masm->isolate()->factory();
-
- // Check that the receiver is a JSObject. Because of the elements
- // map check later, we do not need to check for interceptors or
- // whether it requires access checks.
- __ JumpIfSmi(object, slow_case);
- // Check that the object is some kind of JSObject.
- __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
- __ j(below, slow_case);
-
- // Check that the key is a positive smi.
- __ test(key, Immediate(0x80000001));
- __ j(not_zero, slow_case);
-
- // Load the elements into scratch1 and check its map.
- Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
- __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
- __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
-
- // Check if element is in the range of mapped arguments. If not, jump
- // to the unmapped lookup with the parameter map in scratch1.
- __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
- __ sub(scratch2, Immediate(Smi::FromInt(2)));
- __ cmp(key, scratch2);
- __ j(above_equal, unmapped_case);
-
- // Load element index and check whether it is the hole.
- const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
- __ mov(scratch2,
- FieldOperand(scratch1, key, times_half_pointer_size, kHeaderSize));
- __ cmp(scratch2, factory->the_hole_value());
- __ j(equal, unmapped_case);
-
- // Load value from context and return it. We can reuse scratch1 because
- // we do not jump to the unmapped lookup (which requires the parameter
- // map in scratch1).
- const int kContextOffset = FixedArray::kHeaderSize;
- __ mov(scratch1, FieldOperand(scratch1, kContextOffset));
- return FieldOperand(scratch1, scratch2, times_half_pointer_size,
- Context::kHeaderSize);
-}
-
-
-static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
- Register key,
- Register parameter_map,
- Register scratch,
- Label* slow_case) {
- // Element is in arguments backing store, which is referenced by the
- // second element of the parameter_map.
- const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
- Register backing_store = parameter_map;
- __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
- Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
- __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
- __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
- __ cmp(key, scratch);
- __ j(greater_equal, slow_case);
- return FieldOperand(backing_store, key, times_half_pointer_size,
- FixedArray::kHeaderSize);
-}
-
-
void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// The return address is on the stack.
Label slow, check_name, index_smi, index_name, property_array_property;
}
-void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
- // Return address is on the stack.
- Label slow, notin;
- Register receiver = StoreDescriptor::ReceiverRegister();
- Register name = StoreDescriptor::NameRegister();
- Register value = StoreDescriptor::ValueRegister();
- DCHECK(receiver.is(edx));
- DCHECK(name.is(ecx));
- DCHECK(value.is(eax));
-
- Operand mapped_location = GenerateMappedArgumentsLookup(
- masm, receiver, name, ebx, edi, ¬in, &slow);
- __ mov(mapped_location, value);
- __ lea(ecx, mapped_location);
- __ mov(edx, value);
- __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs);
- __ Ret();
- __ bind(¬in);
- // The unmapped lookup expects that the parameter map is in ebx.
- Operand unmapped_location =
- GenerateUnmappedArgumentsLookup(masm, name, ebx, edi, &slow);
- __ mov(unmapped_location, value);
- __ lea(edi, unmapped_location);
- __ mov(edx, value);
- __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs);
- __ Ret();
- __ bind(&slow);
- GenerateMiss(masm);
-}
-
-
static void KeyedStoreGenerateMegamorphicHelper(
MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
} else if (receiver_map->instance_type() < FIRST_JS_RECEIVER_TYPE) {
cached_stub = isolate()->builtins()->KeyedStoreIC_Slow();
} else {
- if (receiver_map->has_fast_elements() ||
- receiver_map->has_external_array_elements() ||
- receiver_map->has_fixed_typed_array_elements()) {
+ if (IsSloppyArgumentsElements(elements_kind)) {
+ cached_stub = KeyedStoreSloppyArgumentsStub(isolate()).GetCode();
+ } else if (receiver_map->has_fast_elements() ||
+ receiver_map->has_external_array_elements() ||
+ receiver_map->has_fixed_typed_array_elements()) {
cached_stub = StoreFastElementStub(isolate(), is_js_array,
elements_kind, store_mode).GetCode();
} else {
ElementsKind elements_kind = receiver_map->elements_kind();
bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE;
Handle<Code> stub;
- if (receiver_map->has_fast_elements() ||
- receiver_map->has_external_array_elements() ||
- receiver_map->has_fixed_typed_array_elements()) {
+ if (receiver_map->has_sloppy_arguments_elements()) {
+ stub = KeyedStoreSloppyArgumentsStub(isolate()).GetCode();
+ } else if (receiver_map->has_fast_elements() ||
+ receiver_map->has_external_array_elements() ||
+ receiver_map->has_fixed_typed_array_elements()) {
stub = StoreFastElementStub(isolate(), is_jsarray, elements_kind,
store_mode).GetCode();
} else {
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
bool key_is_smi_like = !Object::ToSmi(isolate(), key).is_null();
if (receiver->elements()->map() ==
- isolate()->heap()->sloppy_arguments_elements_map()) {
- if (is_sloppy(language_mode())) {
- stub = sloppy_arguments_stub();
- } else {
- TRACE_GENERIC_IC(isolate(), "KeyedStoreIC", "arguments receiver");
- }
- } else if (key_is_smi_like &&
- !(target().is_identical_to(sloppy_arguments_stub()))) {
+ isolate()->heap()->sloppy_arguments_elements_map() &&
+ !is_sloppy(language_mode())) {
+ TRACE_GENERIC_IC(isolate(), "KeyedStoreIC", "arguments receiver");
+ } else if (key_is_smi_like) {
// We should go generic if receiver isn't a dictionary, but our
// prototype chain does have dictionary elements. This ensures that
// other non-dictionary receivers in the polymorphic case benefit
static void GenerateSlow(MacroAssembler* masm);
static void GenerateMegamorphic(MacroAssembler* masm,
LanguageMode language_mode);
- static void GenerateSloppyArguments(MacroAssembler* masm);
static Handle<Code> initialize_stub(Isolate* isolate,
LanguageMode language_mode,
private:
inline void set_target(Code* code);
- // Stub accessors.
- Handle<Code> sloppy_arguments_stub() {
- return isolate()->builtins()->KeyedStoreIC_SloppyArguments();
- }
-
static void Clear(Isolate* isolate, Address address, Code* target,
ConstantPoolArray* constant_pool);
}
-static MemOperand GenerateMappedArgumentsLookup(
- MacroAssembler* masm, Register object, Register key, Register scratch1,
- Register scratch2, Register scratch3, Label* unmapped_case,
- Label* slow_case) {
- Heap* heap = masm->isolate()->heap();
-
- // Check that the receiver is a JSObject. Because of the map check
- // later, we do not need to check for interceptors or whether it
- // requires access checks.
- __ JumpIfSmi(object, slow_case);
- // Check that the object is some kind of JSObject.
- __ GetObjectType(object, scratch1, scratch2);
- __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE));
-
- // Check that the key is a positive smi.
- __ And(scratch1, key, Operand(0x80000001));
- __ Branch(slow_case, ne, scratch1, Operand(zero_reg));
-
- // Load the elements into scratch1 and check its map.
- Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
- __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
- __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
- // Check if element is in the range of mapped arguments. If not, jump
- // to the unmapped lookup with the parameter map in scratch1.
- __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
- __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2)));
- __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2));
-
- // Load element index and check whether it is the hole.
- const int kOffset =
- FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
-
- __ li(scratch3, Operand(kPointerSize >> 1));
- __ Mul(scratch3, key, scratch3);
- __ Addu(scratch3, scratch3, Operand(kOffset));
-
- __ Addu(scratch2, scratch1, scratch3);
- __ lw(scratch2, MemOperand(scratch2));
- __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
- __ Branch(unmapped_case, eq, scratch2, Operand(scratch3));
-
- // Load value from context and return it. We can reuse scratch1 because
- // we do not jump to the unmapped lookup (which requires the parameter
- // map in scratch1).
- __ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
- __ li(scratch3, Operand(kPointerSize >> 1));
- __ Mul(scratch3, scratch2, scratch3);
- __ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
- __ Addu(scratch2, scratch1, scratch3);
- return MemOperand(scratch2);
-}
-
-
-static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
- Register key,
- Register parameter_map,
- Register scratch,
- Label* slow_case) {
- // Element is in arguments backing store, which is referenced by the
- // second element of the parameter_map. The parameter_map register
- // must be loaded with the parameter map of the arguments object and is
- // overwritten.
- const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
- Register backing_store = parameter_map;
- __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
- __ CheckMap(backing_store, scratch, Heap::kFixedArrayMapRootIndex, slow_case,
- DONT_DO_SMI_CHECK);
- __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
- __ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
- __ li(scratch, Operand(kPointerSize >> 1));
- __ Mul(scratch, key, scratch);
- __ Addu(scratch, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ Addu(scratch, backing_store, scratch);
- return MemOperand(scratch);
-}
-
-
-void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
- Register receiver = StoreDescriptor::ReceiverRegister();
- Register key = StoreDescriptor::NameRegister();
- Register value = StoreDescriptor::ValueRegister();
- DCHECK(value.is(a0));
-
- Label slow, notin;
- // Store address is returned in register (of MemOperand) mapped_location.
- MemOperand mapped_location = GenerateMappedArgumentsLookup(
- masm, receiver, key, a3, t0, t1, ¬in, &slow);
- __ sw(value, mapped_location);
- __ mov(t5, value);
- DCHECK_EQ(mapped_location.offset(), 0);
- __ RecordWrite(a3, mapped_location.rm(), t5, kRAHasNotBeenSaved,
- kDontSaveFPRegs);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, value); // (In delay slot) return the value stored in v0.
- __ bind(¬in);
- // The unmapped lookup expects that the parameter map is in a3.
- // Store address is returned in register (of MemOperand) unmapped_location.
- MemOperand unmapped_location =
- GenerateUnmappedArgumentsLookup(masm, key, a3, t0, &slow);
- __ sw(value, unmapped_location);
- __ mov(t5, value);
- DCHECK_EQ(unmapped_location.offset(), 0);
- __ RecordWrite(a3, unmapped_location.rm(), t5, kRAHasNotBeenSaved,
- kDontSaveFPRegs);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0); // (In delay slot) return the value stored in v0.
- __ bind(&slow);
- GenerateMiss(masm);
-}
-
-
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
// The return address is in ra.
Isolate* isolate = masm->isolate();
}
-static MemOperand GenerateMappedArgumentsLookup(
- MacroAssembler* masm, Register object, Register key, Register scratch1,
- Register scratch2, Register scratch3, Label* unmapped_case,
- Label* slow_case) {
- Heap* heap = masm->isolate()->heap();
-
- // Check that the receiver is a JSObject. Because of the map check
- // later, we do not need to check for interceptors or whether it
- // requires access checks.
- __ JumpIfSmi(object, slow_case);
- // Check that the object is some kind of JSObject.
- __ GetObjectType(object, scratch1, scratch2);
- __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE));
-
- // Check that the key is a positive smi.
- __ NonNegativeSmiTst(key, scratch1);
- __ Branch(slow_case, ne, scratch1, Operand(zero_reg));
-
- // Load the elements into scratch1 and check its map.
- Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
- __ ld(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
- __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
- // Check if element is in the range of mapped arguments. If not, jump
- // to the unmapped lookup with the parameter map in scratch1.
- __ ld(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
- __ Dsubu(scratch2, scratch2, Operand(Smi::FromInt(2)));
- __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2));
-
- // Load element index and check whether it is the hole.
- const int kOffset =
- FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
-
- __ SmiUntag(scratch3, key);
- __ dsll(scratch3, scratch3, kPointerSizeLog2);
- __ Daddu(scratch3, scratch3, Operand(kOffset));
-
- __ Daddu(scratch2, scratch1, scratch3);
- __ ld(scratch2, MemOperand(scratch2));
- __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
- __ Branch(unmapped_case, eq, scratch2, Operand(scratch3));
-
- // Load value from context and return it. We can reuse scratch1 because
- // we do not jump to the unmapped lookup (which requires the parameter
- // map in scratch1).
- __ ld(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
- __ SmiUntag(scratch3, scratch2);
- __ dsll(scratch3, scratch3, kPointerSizeLog2);
- __ Daddu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
- __ Daddu(scratch2, scratch1, scratch3);
- return MemOperand(scratch2);
-}
-
-
-static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
- Register key,
- Register parameter_map,
- Register scratch,
- Label* slow_case) {
- // Element is in arguments backing store, which is referenced by the
- // second element of the parameter_map. The parameter_map register
- // must be loaded with the parameter map of the arguments object and is
- // overwritten.
- const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
- Register backing_store = parameter_map;
- __ ld(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
- __ CheckMap(backing_store, scratch, Heap::kFixedArrayMapRootIndex, slow_case,
- DONT_DO_SMI_CHECK);
- __ ld(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
- __ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
- __ SmiUntag(scratch, key);
- __ dsll(scratch, scratch, kPointerSizeLog2);
- __ Daddu(scratch, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ Daddu(scratch, backing_store, scratch);
- return MemOperand(scratch);
-}
-
-
-void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
- Register receiver = StoreDescriptor::ReceiverRegister();
- Register key = StoreDescriptor::NameRegister();
- Register value = StoreDescriptor::ValueRegister();
- DCHECK(value.is(a0));
-
- Label slow, notin;
- // Store address is returned in register (of MemOperand) mapped_location.
- MemOperand mapped_location = GenerateMappedArgumentsLookup(
- masm, receiver, key, a3, a4, a5, ¬in, &slow);
- __ sd(value, mapped_location);
- __ mov(t1, value);
- DCHECK_EQ(mapped_location.offset(), 0);
- __ RecordWrite(a3, mapped_location.rm(), t1, kRAHasNotBeenSaved,
- kDontSaveFPRegs);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, value); // (In delay slot) return the value stored in v0.
- __ bind(¬in);
- // The unmapped lookup expects that the parameter map is in a3.
- // Store address is returned in register (of MemOperand) unmapped_location.
- MemOperand unmapped_location =
- GenerateUnmappedArgumentsLookup(masm, key, a3, a4, &slow);
- __ sd(value, unmapped_location);
- __ mov(t1, value);
- DCHECK_EQ(unmapped_location.offset(), 0);
- __ RecordWrite(a3, unmapped_location.rm(), t1, kRAHasNotBeenSaved,
- kDontSaveFPRegs);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0); // (In delay slot) return the value stored in v0.
- __ bind(&slow);
- GenerateMiss(masm);
-}
-
-
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
// The return address is in ra.
Isolate* isolate = masm->isolate();
}
-static Operand GenerateMappedArgumentsLookup(
- MacroAssembler* masm, Register object, Register key, Register scratch1,
- Register scratch2, Register scratch3, Label* unmapped_case,
- Label* slow_case) {
- Heap* heap = masm->isolate()->heap();
-
- // Check that the receiver is a JSObject. Because of the elements
- // map check later, we do not need to check for interceptors or
- // whether it requires access checks.
- __ JumpIfSmi(object, slow_case);
- // Check that the object is some kind of JSObject.
- __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
- __ j(below, slow_case);
-
- // Check that the key is a positive smi.
- Condition check = masm->CheckNonNegativeSmi(key);
- __ j(NegateCondition(check), slow_case);
-
- // Load the elements into scratch1 and check its map. If not, jump
- // to the unmapped lookup with the parameter map in scratch1.
- Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
- __ movp(scratch1, FieldOperand(object, JSObject::kElementsOffset));
- __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
-
- // Check if element is in the range of mapped arguments.
- __ movp(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
- __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
- __ cmpp(key, scratch2);
- __ j(greater_equal, unmapped_case);
-
- // Load element index and check whether it is the hole.
- const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
- __ SmiToInteger64(scratch3, key);
- __ movp(scratch2,
- FieldOperand(scratch1, scratch3, times_pointer_size, kHeaderSize));
- __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
- __ j(equal, unmapped_case);
-
- // Load value from context and return it. We can reuse scratch1 because
- // we do not jump to the unmapped lookup (which requires the parameter
- // map in scratch1).
- __ movp(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
- __ SmiToInteger64(scratch3, scratch2);
- return FieldOperand(scratch1, scratch3, times_pointer_size,
- Context::kHeaderSize);
-}
-
-
-static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
- Register key,
- Register parameter_map,
- Register scratch,
- Label* slow_case) {
- // Element is in arguments backing store, which is referenced by the
- // second element of the parameter_map. The parameter_map register
- // must be loaded with the parameter map of the arguments object and is
- // overwritten.
- const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
- Register backing_store = parameter_map;
- __ movp(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
- Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
- __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
- __ movp(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
- __ cmpp(key, scratch);
- __ j(greater_equal, slow_case);
- __ SmiToInteger64(scratch, key);
- return FieldOperand(backing_store, scratch, times_pointer_size,
- FixedArray::kHeaderSize);
-}
-
-
-void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
- // The return address is on the stack.
- Label slow, notin;
- Register receiver = StoreDescriptor::ReceiverRegister();
- Register name = StoreDescriptor::NameRegister();
- Register value = StoreDescriptor::ValueRegister();
- DCHECK(receiver.is(rdx));
- DCHECK(name.is(rcx));
- DCHECK(value.is(rax));
-
- Operand mapped_location = GenerateMappedArgumentsLookup(
- masm, receiver, name, rbx, rdi, r8, ¬in, &slow);
- __ movp(mapped_location, value);
- __ leap(r9, mapped_location);
- __ movp(r8, value);
- __ RecordWrite(rbx, r9, r8, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
- INLINE_SMI_CHECK);
- __ Ret();
- __ bind(¬in);
- // The unmapped lookup expects that the parameter map is in rbx.
- Operand unmapped_location =
- GenerateUnmappedArgumentsLookup(masm, name, rbx, rdi, &slow);
- __ movp(unmapped_location, value);
- __ leap(r9, unmapped_location);
- __ movp(r8, value);
- __ RecordWrite(rbx, r9, r8, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
- INLINE_SMI_CHECK);
- __ Ret();
- __ bind(&slow);
- GenerateMiss(masm);
-}
-
-
void LoadIC::GenerateNormal(MacroAssembler* masm) {
Register dictionary = rax;
DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));