}
+void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context = cp;
+ Register result = r0;
+ Register slot = r2;
+ Register name = r3;
+ Label slow_case;
+
+ // Go up the context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ ldr(result, ContextOperand(context, Context::PREVIOUS_INDEX));
+ context = result;
+ }
+
+ // Load the PropertyCell value at the specified slot.
+ __ add(result, context, Operand(slot, LSL, kPointerSizeLog2));
+ __ ldr(result, ContextOperand(result));
+ __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
+
+ // If the result is not the_hole, return. Otherwise, handle in the runtime.
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
+ __ Ret(ne);
+
+ // Fallback to runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot);
+ __ push(slot);
+ __ push(name);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
+}
+
+
+void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register value = r0;
+ Register slot = r2;
+ Register name = r3;
+
+ Register cell = r1;
+ Register cell_details = r4;
+ Register cell_value = r5;
+ Register cell_value_map = r6;
+ Register scratch = r9;
+
+ Register context = cp;
+ Register context_temp = cell;
+
+ Label fast_heapobject_case, fast_smi_case, slow_case;
+
+ if (FLAG_debug_code) {
+ __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
+ __ Check(ne, kUnexpectedValue);
+ __ AssertName(name);
+ }
+
+ // Go up the context chain to the script context.
+ for (int i = 0; i < depth(); i++) {
+ __ ldr(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX));
+ context = context_temp;
+ }
+
+ // Load the PropertyCell at the specified slot.
+ __ add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
+ __ ldr(cell, ContextOperand(cell));
+
+ // Load PropertyDetails for the cell (actually only the cell_type and kind).
+ __ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
+ __ SmiUntag(cell_details);
+ __ and_(cell_details, cell_details,
+ Operand(PropertyDetails::PropertyCellTypeField::kMask |
+ PropertyDetails::KindField::kMask));
+
+ // Check if PropertyCell holds mutable data.
+ Label not_mutable_data;
+ __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kMutable) |
+ PropertyDetails::KindField::encode(kData)));
+ __ b(ne, ¬_mutable_data);
+ __ JumpIfSmi(value, &fast_smi_case);
+
+ __ bind(&fast_heapobject_case);
+ __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ // RecordWriteField clobbers the value register, so we copy it before the
+ // call.
+ __ mov(r4, Operand(value));
+ __ RecordWriteField(cell, PropertyCell::kValueOffset, r4, scratch,
+ kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ Ret();
+
+ __ bind(¬_mutable_data);
+ // Check if PropertyCell value matches the new value (relevant for Constant,
+ // ConstantType and Undefined cells).
+ Label not_same_value;
+ __ ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ __ cmp(cell_value, value);
+ __ b(ne, ¬_same_value);
+
+ if (FLAG_debug_code) {
+ Label done;
+ // This can only be true for Constant, ConstantType and Undefined cells,
+ // because we never store the_hole via this stub.
+ __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstant) |
+ PropertyDetails::KindField::encode(kData)));
+ __ b(eq, &done);
+ __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ b(eq, &done);
+ __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kUndefined) |
+ PropertyDetails::KindField::encode(kData)));
+ __ Check(eq, kUnexpectedValue);
+ __ bind(&done);
+ }
+ __ Ret();
+ __ bind(¬_same_value);
+
+ // Check if PropertyCell contains data with constant type.
+ __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ b(ne, &slow_case);
+
+ // Now either both old and new values must be smis or both must be heap
+ // objects with same map.
+ Label value_is_heap_object;
+ __ JumpIfNotSmi(value, &value_is_heap_object);
+ __ JumpIfNotSmi(cell_value, &slow_case);
+ // Old and new values are smis, no need for a write barrier here.
+ __ bind(&fast_smi_case);
+ __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ __ Ret();
+
+ __ bind(&value_is_heap_object);
+ __ JumpIfSmi(cell_value, &slow_case);
+
+ __ ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
+ __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ cmp(cell_value_map, scratch);
+ __ b(eq, &fast_heapobject_case);
+
+ // Fallback to runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot);
+ __ push(slot);
+ __ push(name);
+ __ push(value);
+ __ TailCallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3, 1);
+}
+
+
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index();
- int depth = scope()->ContextChainLength(var->scope());
- __ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
- Operand(var->name()));
- LoadGlobalViaContextStub stub(isolate(), depth);
- __ CallStub(&stub);
-
+ const int slot = var->index();
+ const int depth = scope()->ContextChainLength(var->scope());
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
+ Operand(var->name()));
+ LoadGlobalViaContextStub stub(isolate(), depth);
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
} else {
__ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
__ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index() + 1;
- int depth = scope()->ContextChainLength(var->scope());
- __ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(var->name()));
- DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
- StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
- __ CallStub(&stub);
-
+ const int slot = var->index() + 1;
+ const int depth = scope()->ContextChainLength(var->scope());
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
+ Operand(var->name()));
+ DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
+ StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ push(r0);
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
DCHECK(!var->IsLookupSlot());
const Register StoreTransitionDescriptor::MapRegister() { return r3; }
-const Register LoadGlobalViaContextDescriptor::DepthRegister() { return r1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return r2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return r3; }
-const Register StoreGlobalViaContextDescriptor::DepthRegister() { return r1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return r3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r0; }
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(r0));
- __ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
- Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub =
+ CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
}
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
- __ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
- instr->language_mode()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
+ isolate(), depth, instr->language_mode())
+ .code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ push(StoreGlobalViaContextDescriptor::ValueRegister());
+ __ CallRuntime(is_strict(instr->language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
}
// |object| is the object being stored into, |value| is the object being
// stored. value and scratch registers are clobbered by the operation.
// The offset is the offset from the start of the object, not the offset from
- // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
+ // the tagged HeapObject pointer. For use with FieldMemOperand(reg, off).
void RecordWriteField(
Register object,
int offset,
// -----------------------------------------------------------------------------
// Static helper functions.
-inline MemOperand ContextOperand(Register context, int index) {
+inline MemOperand ContextOperand(Register context, int index = 0) {
return MemOperand(context, Context::SlotOffset(index));
}
}
+void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context = cp;
+ Register result = x0;
+ Register slot = x2;
+ Register name = x3;
+ Label slow_case;
+
+ // Go up the context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ Ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX));
+ context = result;
+ }
+
+ // Load the PropertyCell value at the specified slot.
+ __ Add(result, context, Operand(slot, LSL, kPointerSizeLog2));
+ __ Ldr(result, ContextMemOperand(result));
+ __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
+
+ // If the result is not the_hole, return. Otherwise, handle in the runtime.
+ __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case);
+ __ Ret();
+
+ // Fallback to runtime.
+ __ Bind(&slow_case);
+ __ SmiTag(slot);
+ __ Push(slot, name);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
+}
+
+
+void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context = cp;
+ Register value = x0;
+ Register slot = x2;
+ Register name = x3;
+ Register context_temp = x10;
+ Register cell = x10;
+ Register cell_details = x11;
+ Register cell_value = x12;
+ Register cell_value_map = x13;
+ Register value_map = x14;
+ Label fast_heapobject_case, fast_smi_case, slow_case;
+
+ if (FLAG_debug_code) {
+ __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
+ __ Check(ne, kUnexpectedValue);
+ __ AssertName(name);
+ }
+
+ // Go up the context chain to the script context.
+ for (int i = 0; i < depth(); i++) {
+ __ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX));
+ context = context_temp;
+ }
+
+ // Load the PropertyCell at the specified slot.
+ __ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
+ __ Ldr(cell, ContextMemOperand(cell));
+
+ // Load PropertyDetails for the cell (actually only the cell_type and kind).
+ __ Ldr(cell_details,
+ UntagSmiFieldMemOperand(cell, PropertyCell::kDetailsOffset));
+ __ And(cell_details, cell_details,
+ PropertyDetails::PropertyCellTypeField::kMask |
+ PropertyDetails::KindField::kMask);
+
+ // Check if PropertyCell holds mutable data.
+ Label not_mutable_data;
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kMutable) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(ne, ¬_mutable_data);
+ __ JumpIfSmi(value, &fast_smi_case);
+ __ Bind(&fast_heapobject_case);
+ __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ // RecordWriteField clobbers the value register, so we copy it before the
+ // call.
+ __ Mov(x11, value);
+ __ RecordWriteField(cell, PropertyCell::kValueOffset, x11, x12,
+ kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ Ret();
+
+ __ Bind(¬_mutable_data);
+ // Check if PropertyCell value matches the new value (relevant for Constant,
+ // ConstantType and Undefined cells).
+ Label not_same_value;
+ __ Ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ __ Cmp(cell_value, value);
+ __ B(ne, ¬_same_value);
+
+ if (FLAG_debug_code) {
+ Label done;
+ // This can only be true for Constant, ConstantType and Undefined cells,
+ // because we never store the_hole via this stub.
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstant) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(eq, &done);
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(eq, &done);
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kUndefined) |
+ PropertyDetails::KindField::encode(kData));
+ __ Check(eq, kUnexpectedValue);
+ __ Bind(&done);
+ }
+ __ Ret();
+ __ Bind(¬_same_value);
+
+ // Check if PropertyCell contains data with constant type.
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(ne, &slow_case);
+
+ // Now either both old and new values must be smis or both must be heap
+ // objects with same map.
+ Label value_is_heap_object;
+ __ JumpIfNotSmi(value, &value_is_heap_object);
+ __ JumpIfNotSmi(cell_value, &slow_case);
+ // Old and new values are smis, no need for a write barrier here.
+ __ Bind(&fast_smi_case);
+ __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ __ Ret();
+
+ __ Bind(&value_is_heap_object);
+ __ JumpIfSmi(cell_value, &slow_case);
+
+ __ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
+ __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ Cmp(cell_value_map, value_map);
+ __ B(eq, &fast_heapobject_case);
+
+ // Fall back to the runtime.
+ __ Bind(&slow_case);
+ __ SmiTag(slot);
+ __ Push(slot, name, value);
+ __ TailCallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3, 1);
+}
+
+
// The number of register that CallApiFunctionAndReturn will need to save on
// the stack. The space for these registers need to be allocated in the
// ExitFrame before calling CallApiFunctionAndReturn.
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index();
- int depth = scope()->ContextChainLength(var->scope());
- __ Mov(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ Mov(LoadGlobalViaContextDescriptor::NameRegister(),
- Operand(var->name()));
- LoadGlobalViaContextStub stub(isolate(), depth);
- __ CallStub(&stub);
-
+ int const slot = var->index();
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ Mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
+ LoadGlobalViaContextStub stub(isolate(), depth);
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
} else {
__ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
__ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index() + 1;
- int depth = scope()->ContextChainLength(var->scope());
- __ Mov(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ Mov(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(var->name()));
- DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
- StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
- __ CallStub(&stub);
-
+ int const slot = var->index() + 1;
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ Mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
+ DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
+ StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ Push(x0);
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
DCHECK(!var->IsLookupSlot());
const Register StoreTransitionDescriptor::MapRegister() { return x3; }
-const Register LoadGlobalViaContextDescriptor::DepthRegister() { return x1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return x2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return x3; }
-const Register StoreGlobalViaContextDescriptor::DepthRegister() { return x1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return x2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return x3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return x0; }
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(x0));
- __ Mov(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ Mov(LoadGlobalViaContextDescriptor::NameRegister(),
- Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ Mov(LoadGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub =
+ CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
}
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
- __ Mov(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ Mov(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
- instr->language_mode()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ Mov(StoreGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
+ isolate(), depth, instr->language_mode())
+ .code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ Push(StoreGlobalViaContextDescriptor::ValueRegister());
+ __ CallRuntime(is_strict(instr->language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
}
// |object| is the object being stored into, |value| is the object being
// stored. value and scratch registers are clobbered by the operation.
// The offset is the offset from the start of the object, not the offset from
- // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
+ // the tagged HeapObject pointer. For use with FieldMemOperand(reg, off).
void RecordWriteField(
Register object,
int offset,
};
-inline MemOperand ContextMemOperand(Register context, int index) {
+inline MemOperand ContextMemOperand(Register context, int index = 0) {
return MemOperand(context, Context::SlotOffset(index));
}
template <>
-HValue* CodeStubGraphBuilder<LoadGlobalViaContextStub>::BuildCodeStub() {
- LoadGlobalViaContextStub* stub = casted_stub();
- int depth_value = stub->depth();
- HValue* depth = GetParameter(0);
- HValue* slot_index = GetParameter(1);
- HValue* name = GetParameter(2);
-
- // Choose between dynamic or static context script fetching versions.
- depth = depth_value < LoadGlobalViaContextStub::kDynamicDepth
- ? nullptr
- : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
- slot_index =
- AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
-
- HValue* script_context = BuildGetParentContext(depth, depth_value);
- HValue* cell =
- Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
-
- HValue* value = Add<HLoadNamedField>(cell, nullptr,
- HObjectAccess::ForPropertyCellValue());
-
- IfBuilder builder(this);
- HValue* hole_value = graph()->GetConstantHole();
- builder.IfNot<HCompareObjectEqAndBranch, HValue*>(value, hole_value);
- builder.Then();
- { Push(value); }
- builder.Else();
- {
- Add<HPushArguments>(script_context, slot_index, name);
- Push(Add<HCallRuntime>(
- isolate()->factory()->empty_string(),
- Runtime::FunctionForId(Runtime::kLoadGlobalViaContext), 3));
- }
- builder.End();
- return Pop();
-}
-
-
-Handle<Code> LoadGlobalViaContextStub::GenerateCode() {
- return DoGenerateCode(this);
-}
-
-
-template <>
-HValue* CodeStubGraphBuilder<StoreGlobalViaContextStub>::BuildCodeStub() {
- StoreGlobalViaContextStub* stub = casted_stub();
- int depth_value = stub->depth();
- HValue* depth = GetParameter(0);
- HValue* slot_index = GetParameter(1);
- HValue* name = GetParameter(2);
- HValue* value = GetParameter(3);
-
- // Choose between dynamic or static context script fetching versions.
- depth = depth_value < StoreGlobalViaContextStub::kDynamicDepth
- ? nullptr
- : AddUncasted<HForceRepresentation>(depth, Representation::Smi());
- slot_index =
- AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
-
- HValue* script_context = BuildGetParentContext(depth, depth_value);
- HValue* cell =
- Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
-
- // Fast case that requires storing to cell.
- HIfContinuation if_fast_store_continuation(graph()->CreateBasicBlock(),
- graph()->CreateBasicBlock());
-
- // Fast case that does not require storing to cell.
- HIfContinuation if_fast_no_store_continuation(graph()->CreateBasicBlock(),
- graph()->CreateBasicBlock());
-
- // This stub does the same as StoreGlobalStub but in a dynamic manner.
-
- HValue* cell_contents = Add<HLoadNamedField>(
- cell, nullptr, HObjectAccess::ForPropertyCellValue());
-
- IfBuilder if_hole(this);
- HValue* hole_value = graph()->GetConstantHole();
- if_hole.IfNot<HCompareObjectEqAndBranch, HValue*>(cell_contents, hole_value);
- if_hole.Then();
- {
- HValue* details = Add<HLoadNamedField>(
- cell, nullptr, HObjectAccess::ForPropertyCellDetails());
- HValue* cell_type =
- BuildDecodeField<PropertyDetails::PropertyCellTypeField>(details);
-
- // The code below relies on this.
- STATIC_ASSERT(PropertyCellType::kUndefined < PropertyCellType::kConstant);
- STATIC_ASSERT(PropertyCellType::kConstant <
- PropertyCellType::kConstantType);
- STATIC_ASSERT(PropertyCellType::kConstant < PropertyCellType::kMutable);
-
- // Handle all cell type cases.
- IfBuilder if_not_const(this);
-
- int cell_type_constant = static_cast<int>(PropertyCellType::kConstant);
- if_not_const.If<HCompareNumericAndBranch, HValue*>(
- cell_type, Add<HConstant>(cell_type_constant), Token::GT);
- if_not_const.Then();
- {
- // kConstantType or kMutable.
- IfBuilder if_const_type(this);
- int cell_type_constant_type =
- static_cast<int>(PropertyCellType::kConstantType);
- if_const_type.If<HCompareNumericAndBranch, HValue*>(
- cell_type, Add<HConstant>(cell_type_constant_type), Token::EQ);
- if_const_type.Then();
- {
- // Check that either both value and cell_contents are smi or
- // both have the same map.
- IfBuilder if_cell_is_smi(this);
- if_cell_is_smi.If<HIsSmiAndBranch>(cell_contents);
- if_cell_is_smi.Then();
- {
- IfBuilder if_value_is_smi(this);
- if_value_is_smi.If<HIsSmiAndBranch>(value);
- if_value_is_smi.Then();
- {
- // Both cell_contents and value are smis, do store.
- }
- if_value_is_smi.Else(); // Slow case.
- if_value_is_smi.JoinContinuation(&if_fast_store_continuation);
- }
- if_cell_is_smi.Else();
- {
- IfBuilder if_value_is_heap_object(this);
- if_value_is_heap_object.IfNot<HIsSmiAndBranch>(value);
- if_value_is_heap_object.Then();
- {
- // Both cell_contents and value are heap objects, do store.
- HValue* expected_map = Add<HLoadNamedField>(
- cell_contents, nullptr, HObjectAccess::ForMap());
- HValue* map =
- Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
- IfBuilder map_check(this);
- map_check.If<HCompareObjectEqAndBranch>(expected_map, map);
- map_check.Then();
- map_check.Else(); // Slow case.
- map_check.JoinContinuation(&if_fast_store_continuation);
-
- // The accessor case is handled by the map check above, since
- // the value must not have a AccessorPair map.
- }
- if_value_is_heap_object.Else(); // Slow case.
- if_value_is_heap_object.JoinContinuation(&if_fast_store_continuation);
- }
- if_cell_is_smi.EndUnreachable();
- }
- if_const_type.Else();
- {
- // Check that the property kind is kData.
- HValue* kind = BuildDecodeField<PropertyDetails::KindField>(details);
- HValue* data_kind_value = Add<HConstant>(kData);
-
- IfBuilder builder(this);
- builder.If<HCompareNumericAndBranch, HValue*>(kind, data_kind_value,
- Token::EQ);
- builder.Then();
- builder.Else(); // Slow case.
- builder.JoinContinuation(&if_fast_store_continuation);
- }
- if_const_type.EndUnreachable();
- }
- if_not_const.Else();
- {
- // kUndefined or kConstant, just check that the value matches.
- IfBuilder builder(this);
- builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
- builder.Then();
- builder.Else(); // Slow case.
- builder.JoinContinuation(&if_fast_no_store_continuation);
- }
- if_not_const.EndUnreachable();
- }
- if_hole.Else(); // Slow case.
- if_hole.JoinContinuation(&if_fast_store_continuation);
-
- // Do store for fast case.
- IfBuilder if_fast_store(this, &if_fast_store_continuation);
- if_fast_store.Then();
- {
- // All checks are done, store the value to the cell.
- Add<HStoreNamedField>(cell, HObjectAccess::ForPropertyCellValue(), value);
- }
- if_fast_store.Else();
- if_fast_store.JoinContinuation(&if_fast_no_store_continuation);
-
- // Bailout to runtime call for slow case.
- IfBuilder if_no_fast_store(this, &if_fast_no_store_continuation);
- if_no_fast_store.Then();
- {
- // Nothing else to do.
- }
- if_no_fast_store.Else();
- {
- // Slow case, call runtime.
- HInstruction* lang_mode = Add<HConstant>(casted_stub()->language_mode());
- Add<HPushArguments>(script_context, slot_index, name, value);
- Add<HPushArguments>(lang_mode);
- Add<HCallRuntime>(isolate()->factory()->empty_string(),
- Runtime::FunctionForId(Runtime::kStoreGlobalViaContext),
- 5);
- }
- if_no_fast_store.End();
- return value;
-}
-
-
-Handle<Code> StoreGlobalViaContextStub::GenerateCode() {
- return DoGenerateCode(this);
-}
-
-
-template <>
HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);
HValue* key = GetParameter(StoreTransitionDescriptor::kNameIndex);
}
-void LoadGlobalViaContextStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- // Must never deoptimize.
- descriptor->Initialize(FUNCTION_ADDR(UnexpectedStubMiss));
-}
-
-
-void StoreGlobalViaContextStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- // Must never deoptimize.
- descriptor->Initialize(FUNCTION_ADDR(UnexpectedStubMiss));
-}
-
-
void TransitionElementsKindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
descriptor->Initialize(
};
-class LoadGlobalViaContextStub : public HydrogenCodeStub {
+class LoadGlobalViaContextStub final : public PlatformCodeStub {
public:
- // Use the loop version for depths higher than this one.
- static const int kDynamicDepth = 7;
+ static const int kMaximumDepth = 15;
LoadGlobalViaContextStub(Isolate* isolate, int depth)
- : HydrogenCodeStub(isolate) {
- if (depth > kDynamicDepth) depth = kDynamicDepth;
- set_sub_minor_key(DepthBits::encode(depth));
+ : PlatformCodeStub(isolate) {
+ minor_key_ = DepthBits::encode(depth);
}
- int depth() const { return DepthBits::decode(sub_minor_key()); }
+ int depth() const { return DepthBits::decode(minor_key_); }
private:
- class DepthBits : public BitField<unsigned int, 0, 3> {};
- STATIC_ASSERT(kDynamicDepth <= DepthBits::kMax);
+ class DepthBits : public BitField<int, 0, 4> {};
+ STATIC_ASSERT(DepthBits::kMax == kMaximumDepth);
DEFINE_CALL_INTERFACE_DESCRIPTOR(LoadGlobalViaContext);
- DEFINE_HYDROGEN_CODE_STUB(LoadGlobalViaContext, HydrogenCodeStub);
+ DEFINE_PLATFORM_CODE_STUB(LoadGlobalViaContext, PlatformCodeStub);
};
-class StoreGlobalViaContextStub : public HydrogenCodeStub {
+class StoreGlobalViaContextStub final : public PlatformCodeStub {
public:
- // Use the loop version for depths higher than this one.
- static const int kDynamicDepth = 7;
+ static const int kMaximumDepth = 15;
StoreGlobalViaContextStub(Isolate* isolate, int depth,
LanguageMode language_mode)
- : HydrogenCodeStub(isolate) {
- if (depth > kDynamicDepth) depth = kDynamicDepth;
- set_sub_minor_key(DepthBits::encode(depth) |
- LanguageModeBits::encode(language_mode));
+ : PlatformCodeStub(isolate) {
+ minor_key_ =
+ DepthBits::encode(depth) | LanguageModeBits::encode(language_mode);
}
- int depth() const { return DepthBits::decode(sub_minor_key()); }
-
+ int depth() const { return DepthBits::decode(minor_key_); }
LanguageMode language_mode() const {
- return LanguageModeBits::decode(sub_minor_key());
+ return LanguageModeBits::decode(minor_key_);
}
private:
- class DepthBits : public BitField<unsigned int, 0, 4> {};
- STATIC_ASSERT(kDynamicDepth <= DepthBits::kMax);
-
+ class DepthBits : public BitField<int, 0, 4> {};
+ STATIC_ASSERT(DepthBits::kMax == kMaximumDepth);
class LanguageModeBits : public BitField<LanguageMode, 4, 2> {};
STATIC_ASSERT(LANGUAGE_END == 3);
- private:
DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreGlobalViaContext);
- DEFINE_HYDROGEN_CODE_STUB(StoreGlobalViaContext, HydrogenCodeStub);
+ DEFINE_PLATFORM_CODE_STUB(StoreGlobalViaContext, PlatformCodeStub);
};
if (p.slot_index() >= 0) {
Callable callable = CodeFactory::LoadGlobalViaContext(isolate(), 0);
Node* script_context = node->InputAt(0);
- node->ReplaceInput(0, jsgraph()->SmiConstant(0));
- node->ReplaceInput(1, jsgraph()->SmiConstant(p.slot_index()));
- node->ReplaceInput(2, jsgraph()->HeapConstant(p.name()));
- node->ReplaceInput(3, script_context); // Replace old context.
+ node->ReplaceInput(0, jsgraph()->Int32Constant(p.slot_index()));
+ node->ReplaceInput(1, jsgraph()->HeapConstant(p.name()));
+ node->ReplaceInput(2, script_context); // Set new context...
+ node->RemoveInput(3); // ...instead of old one.
ReplaceWithStubCall(node, callable, flags);
} else {
CodeFactory::StoreGlobalViaContext(isolate(), 0, p.language_mode());
Node* script_context = node->InputAt(0);
Node* value = node->InputAt(2);
- node->ReplaceInput(0, jsgraph()->SmiConstant(0));
- node->ReplaceInput(1, jsgraph()->SmiConstant(p.slot_index()));
- node->ReplaceInput(2, jsgraph()->HeapConstant(p.name()));
- node->ReplaceInput(3, value);
- node->ReplaceInput(4, script_context); // Replace old context.
+ node->ReplaceInput(0, jsgraph()->Int32Constant(p.slot_index()));
+ node->ReplaceInput(1, jsgraph()->HeapConstant(p.name()));
+ node->ReplaceInput(2, value);
+ node->ReplaceInput(3, script_context); // Set new context...
+ node->RemoveInput(4); // ...instead of old one.
ReplaceWithStubCall(node, callable, flags);
} else {
}
+void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = esi;
+ Register slot_reg = ebx;
+ Register name_reg = ecx;
+ Register result_reg = eax;
+ Label slow_case;
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = result_reg;
+ }
+
+ // Load the PropertyCell value at the specified slot.
+ __ mov(result_reg, ContextOperand(context_reg, slot_reg));
+ __ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
+
+ // Check that value is not the_hole.
+ __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
+ __ j(equal, &slow_case, Label::kNear);
+ __ Ret();
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot_reg);
+ __ Pop(result_reg); // Pop return address.
+ __ Push(slot_reg);
+ __ Push(name_reg);
+ __ Push(result_reg); // Push return address.
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
+}
+
+
+void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = esi;
+ Register slot_reg = ebx;
+ Register name_reg = ecx;
+ Register value_reg = eax;
+ Register cell_reg = edi;
+ Register cell_details_reg = edx;
+ Label fast_heapobject_case, fast_smi_case, slow_case;
+
+ if (FLAG_debug_code) {
+ __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
+ __ Check(not_equal, kUnexpectedValue);
+ __ AssertName(name_reg);
+ }
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = cell_reg;
+ }
+
+ // Load the PropertyCell at the specified slot.
+ __ mov(cell_reg, ContextOperand(context_reg, slot_reg));
+
+ // Load PropertyDetails for the cell (actually only the cell_type and kind).
+ __ mov(cell_details_reg,
+ FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
+ __ SmiUntag(cell_details_reg);
+ __ and_(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::kMask |
+ PropertyDetails::KindField::kMask));
+
+
+ // Check if PropertyCell holds mutable data.
+ Label not_mutable_data;
+ __ cmp(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kMutable) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(not_equal, ¬_mutable_data);
+ __ JumpIfSmi(value_reg, &fast_smi_case);
+ __ bind(&fast_heapobject_case);
+ __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
+ __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
+ cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ // RecordWriteField clobbers the value register, so we need to reload.
+ __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
+ __ Ret();
+ __ bind(¬_mutable_data);
+
+ // Check if PropertyCell value matches the new value (relevant for Constant,
+ // ConstantType and Undefined cells).
+ Label not_same_value;
+ __ cmp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
+ __ j(not_equal, ¬_same_value,
+ FLAG_debug_code ? Label::kFar : Label::kNear);
+ if (FLAG_debug_code) {
+ Label done;
+ // This can only be true for Constant, ConstantType and Undefined cells,
+ // because we never store the_hole via this stub.
+ __ cmp(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstant) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(equal, &done);
+ __ cmp(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(equal, &done);
+ __ cmp(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kUndefined) |
+ PropertyDetails::KindField::encode(kData)));
+ __ Check(equal, kUnexpectedValue);
+ __ bind(&done);
+ }
+ __ Ret();
+ __ bind(¬_same_value);
+
+ // Check if PropertyCell contains data with constant type.
+ __ cmp(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(not_equal, &slow_case, Label::kNear);
+
+ // Now either both old and new values must be SMIs or both must be heap
+ // objects with same map.
+ Label value_is_heap_object;
+ Register cell_value_reg = cell_details_reg;
+ __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
+ __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
+ __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
+ // Old and new values are SMIs, no need for a write barrier here.
+ __ bind(&fast_smi_case);
+ __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
+ __ Ret();
+ __ bind(&value_is_heap_object);
+ __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
+ Register cell_value_map_reg = cell_value_reg;
+ __ mov(cell_value_map_reg,
+ FieldOperand(cell_value_reg, HeapObject::kMapOffset));
+ __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
+ __ j(equal, &fast_heapobject_case);
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot_reg);
+ __ Pop(cell_reg); // Pop return address.
+ __ Push(slot_reg);
+ __ Push(name_reg);
+ __ Push(value_reg);
+ __ Push(cell_reg); // Push return address.
+ __ TailCallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3, 1);
+}
+
+
// Generates an Operand for saving parameters after PrepareCallApiFunction.
static Operand ApiParameterOperand(int index) {
return Operand(esp, index * kPointerSize);
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index();
- int depth = scope()->ContextChainLength(var->scope());
- __ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
- Immediate(Smi::FromInt(depth)));
- __ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
- Immediate(Smi::FromInt(slot_index)));
- __ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
- LoadGlobalViaContextStub stub(isolate(), depth);
- __ CallStub(&stub);
+ int const slot = var->index();
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ Move(LoadGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
+ __ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
+ LoadGlobalViaContextStub stub(isolate(), depth);
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
} else {
__ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index() + 1;
- int depth = scope()->ContextChainLength(var->scope());
- __ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
- Immediate(Smi::FromInt(depth)));
- __ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
- Immediate(Smi::FromInt(slot_index)));
- __ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
- DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
- StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
- __ CallStub(&stub);
+ int const slot = var->index() + 1;
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ Move(StoreGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
+ __ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
+ DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
+ StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ Push(eax);
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
}
-const Register LoadGlobalViaContextDescriptor::DepthRegister() { return edx; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return ebx; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return ecx; }
-const Register StoreGlobalViaContextDescriptor::DepthRegister() { return edx; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return ebx; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return ecx; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return eax; }
DCHECK(ToRegister(instr->context()).is(esi));
DCHECK(ToRegister(instr->result()).is(eax));
- __ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
- Immediate(Smi::FromInt(instr->depth())));
- __ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
- Immediate(Smi::FromInt(instr->slot_index())));
- __ mov(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
-
- Handle<Code> stub =
- CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ mov(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
+ Handle<Code> stub =
+ CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
}
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
- __ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
- Immediate(Smi::FromInt(instr->depth())));
- __ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
- Immediate(Smi::FromInt(instr->slot_index())));
- __ mov(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
-
- Handle<Code> stub =
- CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
- instr->language_mode()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ mov(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
+ Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
+ isolate(), depth, instr->language_mode())
+ .code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ Push(StoreGlobalViaContextDescriptor::ValueRegister());
+ __ CallRuntime(is_strict(instr->language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
}
}
+inline Operand ContextOperand(Register context, Register index) {
+ return Operand(context, index, times_pointer_size, Context::SlotOffset(0));
+}
+
+
inline Operand GlobalObjectOperand() {
return ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX);
}
LoadGlobalViaContextDescriptor::BuildCallInterfaceDescriptorFunctionType(
Isolate* isolate, int paramater_count) {
Type::FunctionType* function = Type::FunctionType::New(
- AnyTagged(), Type::Undefined(), 3, isolate->interface_descriptor_zone());
- function->InitParameter(0, SmiType());
- function->InitParameter(1, SmiType());
- function->InitParameter(2, AnyTagged());
+ AnyTagged(), Type::Undefined(), 2, isolate->interface_descriptor_zone());
+ function->InitParameter(0, UntaggedSigned32());
+ function->InitParameter(1, AnyTagged());
return function;
}
void LoadGlobalViaContextDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
- Register registers[] = {DepthRegister(), SlotRegister(), NameRegister()};
+ Register registers[] = {SlotRegister(), NameRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
StoreGlobalViaContextDescriptor::BuildCallInterfaceDescriptorFunctionType(
Isolate* isolate, int paramater_count) {
Type::FunctionType* function = Type::FunctionType::New(
- AnyTagged(), Type::Undefined(), 4, isolate->interface_descriptor_zone());
- function->InitParameter(0, SmiType());
- function->InitParameter(1, SmiType());
+ AnyTagged(), Type::Undefined(), 3, isolate->interface_descriptor_zone());
+ function->InitParameter(0, UntaggedSigned32());
+ function->InitParameter(1, AnyTagged());
function->InitParameter(2, AnyTagged());
- function->InitParameter(3, AnyTagged());
return function;
}
void StoreGlobalViaContextDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
- Register registers[] = {DepthRegister(), SlotRegister(), NameRegister(),
- ValueRegister()};
+ Register registers[] = {SlotRegister(), NameRegister(), ValueRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(LoadGlobalViaContextDescriptor,
CallInterfaceDescriptor)
- static const Register DepthRegister();
static const Register SlotRegister();
static const Register NameRegister();
};
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(StoreGlobalViaContextDescriptor,
CallInterfaceDescriptor)
- static const Register DepthRegister();
static const Register SlotRegister();
static const Register NameRegister();
static const Register ValueRegister();
}
+void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = cp;
+ Register slot_reg = a2;
+ Register name_reg = a3;
+ Register result_reg = v0;
+ Label slow_case;
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = result_reg;
+ }
+
+ // Load the PropertyCell value at the specified slot.
+ __ sll(at, slot_reg, kPointerSizeLog2);
+ __ Addu(at, at, Operand(cp));
+ __ Addu(at, at, Context::SlotOffset(0));
+ __ lw(result_reg, MemOperand(at));
+ __ lw(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
+
+ // Check that value is not the_hole.
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ Branch(&slow_case, eq, result_reg, Operand(at));
+ __ Ret();
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot_reg);
+ __ Drop(1); // Pop return address.
+ __ Push(slot_reg, name_reg, result_reg);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
+}
+
+
+void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = cp;
+ Register slot_reg = a2;
+ Register name_reg = a3;
+ Register value_reg = a0;
+ Register cell_reg = t0;
+ Register cell_details_reg = t1;
+ Label fast_heapobject_case, fast_smi_case, slow_case;
+
+ if (FLAG_debug_code) {
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ Check(ne, kUnexpectedValue, value_reg, Operand(at));
+ __ AssertName(name_reg);
+ }
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ lw(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = cell_reg;
+ }
+
+ // Load the PropertyCell at the specified slot.
+ __ sll(at, slot_reg, kPointerSizeLog2);
+ __ Addu(at, at, Operand(cp));
+ __ Addu(at, at, Context::SlotOffset(0));
+ __ lw(cell_reg, MemOperand(at));
+
+ // Load PropertyDetails for the cell (actually only the cell_type and kind).
+ __ lw(cell_details_reg,
+ FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset));
+ __ SmiUntag(cell_details_reg);
+ __ And(cell_details_reg, cell_details_reg,
+ PropertyDetails::PropertyCellTypeField::kMask |
+ PropertyDetails::KindField::kMask);
+
+ // Check if PropertyCell holds mutable data.
+ Label not_mutable_data;
+ __ Branch(¬_mutable_data, ne, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kMutable) |
+ PropertyDetails::KindField::encode(kData)));
+ __ JumpIfSmi(value_reg, &fast_smi_case);
+ __ bind(&fast_heapobject_case);
+ __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
+ cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+ // RecordWriteField clobbers the value register, so we need to reload.
+ __ lw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ Ret();
+ __ bind(¬_mutable_data);
+
+ // Check if PropertyCell value matches the new value (relevant for Constant,
+ // ConstantType and Undefined cells).
+ Label not_same_value;
+ __ lw(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ Branch(¬_same_value, ne, value_reg, Operand(at));
+ if (FLAG_debug_code) {
+ Label done;
+ // This can only be true for Constant, ConstantType and Undefined cells,
+ // because we never store the_hole via this stub.
+ __ Branch(&done, eq, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstant) |
+ PropertyDetails::KindField::encode(kData)));
+ __ Branch(&done, eq, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ Check(eq, kUnexpectedValue, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kUndefined) |
+ PropertyDetails::KindField::encode(kData)));
+ __ bind(&done);
+ }
+ __ Ret();
+ __ bind(¬_same_value);
+
+ // Check if PropertyCell contains data with constant type.
+ __ Branch(&slow_case, ne, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+
+ // Now either both old and new values must be SMIs or both must be heap
+ // objects with same map.
+ Label value_is_heap_object;
+ Register cell_value_reg = cell_details_reg;
+ __ lw(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ JumpIfNotSmi(value_reg, &value_is_heap_object);
+ __ JumpIfNotSmi(cell_value_reg, &slow_case);
+ // Old and new values are SMIs, no need for a write barrier here.
+ __ bind(&fast_smi_case);
+ __ Ret(USE_DELAY_SLOT);
+ __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ bind(&value_is_heap_object);
+ __ JumpIfSmi(cell_value_reg, &slow_case);
+ Register cell_value_map_reg = cell_value_reg;
+ __ lw(cell_value_map_reg,
+ FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
+ __ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
+ FieldMemOperand(value_reg, HeapObject::kMapOffset));
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot_reg);
+ __ Drop(1); // Pop return address.
+ __ Push(slot_reg, name_reg, value_reg, cell_reg);
+ __ TailCallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3, 1);
+}
+
+
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index();
- int depth = scope()->ContextChainLength(var->scope());
- __ li(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ li(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name()));
- LoadGlobalViaContextStub stub(isolate(), depth);
- __ CallStub(&stub);
+ int const slot = var->index();
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
+ LoadGlobalViaContextStub stub(isolate(), depth);
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
} else {
__ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
// Global var, const, or let.
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
- // Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index() + 1;
- int depth = scope()->ContextChainLength(var->scope());
- __ li(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ li(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ li(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(var->name()));
+ DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0));
__ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
- StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
- __ CallStub(&stub);
+ // Each var occupies two slots in the context: for reads and writes.
+ int const slot = var->index() + 1;
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
+ StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ Push(a0);
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
const Register StoreTransitionDescriptor::MapRegister() { return a3; }
-const Register LoadGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return a3; }
-const Register StoreGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return a3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(v0));
- __ li(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ li(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(LoadGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub =
+ CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
}
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
- __ li(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ li(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ li(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
- instr->language_mode()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(StoreGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
+ isolate(), depth, instr->language_mode())
+ .code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ Push(StoreGlobalViaContextDescriptor::ValueRegister());
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
}
}
+void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = cp;
+ Register slot_reg = a2;
+ Register name_reg = a3;
+ Register result_reg = v0;
+ Label slow_case;
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = result_reg;
+ }
+
+ // Load the PropertyCell value at the specified slot.
+ __ dsll(at, slot_reg, kPointerSizeLog2);
+ __ Daddu(at, at, Operand(cp));
+ __ Daddu(at, at, Context::SlotOffset(0));
+ __ ld(result_reg, MemOperand(at));
+ __ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
+
+ // Check that value is not the_hole.
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ Branch(&slow_case, eq, result_reg, Operand(at));
+ __ Ret();
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot_reg);
+ __ Drop(1); // Pop return address.
+ __ Push(slot_reg, name_reg, result_reg);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
+}
+
+
+void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = cp;
+ Register slot_reg = a2;
+ Register name_reg = a3;
+ Register value_reg = a0;
+ Register cell_reg = a4;
+ Register cell_details_reg = a5;
+ Label fast_heapobject_case, fast_smi_case, slow_case;
+
+ if (FLAG_debug_code) {
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ Check(ne, kUnexpectedValue, value_reg, Operand(at));
+ __ AssertName(name_reg);
+ }
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ ld(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = cell_reg;
+ }
+
+ // Load the PropertyCell at the specified slot.
+ __ dsll(at, slot_reg, kPointerSizeLog2);
+ __ Daddu(at, at, Operand(cp));
+ __ Daddu(at, at, Context::SlotOffset(0));
+ __ ld(cell_reg, MemOperand(at));
+
+ // Load PropertyDetails for the cell (actually only the cell_type and kind).
+ __ ld(cell_details_reg,
+ FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset));
+ __ SmiUntag(cell_details_reg);
+ __ And(cell_details_reg, cell_details_reg,
+ PropertyDetails::PropertyCellTypeField::kMask |
+ PropertyDetails::KindField::kMask);
+
+ // Check if PropertyCell holds mutable data.
+ Label not_mutable_data;
+ __ Branch(¬_mutable_data, ne, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kMutable) |
+ PropertyDetails::KindField::encode(kData)));
+ __ JumpIfSmi(value_reg, &fast_smi_case);
+ __ bind(&fast_heapobject_case);
+ __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
+ cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+ // RecordWriteField clobbers the value register, so we need to reload.
+ __ Ret(USE_DELAY_SLOT);
+ __ ld(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ bind(¬_mutable_data);
+
+ // Check if PropertyCell value matches the new value (relevant for Constant,
+ // ConstantType and Undefined cells).
+ Label not_same_value;
+ __ ld(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ Branch(¬_same_value, ne, value_reg, Operand(at));
+ if (FLAG_debug_code) {
+ Label done;
+ // This can only be true for Constant, ConstantType and Undefined cells,
+ // because we never store the_hole via this stub.
+ __ Branch(&done, eq, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstant) |
+ PropertyDetails::KindField::encode(kData)));
+ __ Branch(&done, eq, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ Check(eq, kUnexpectedValue, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kUndefined) |
+ PropertyDetails::KindField::encode(kData)));
+ __ bind(&done);
+ }
+ __ Ret();
+ __ bind(¬_same_value);
+
+ // Check if PropertyCell contains data with constant type.
+ __ Branch(&slow_case, ne, cell_details_reg,
+ Operand(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+
+ // Now either both old and new values must be SMIs or both must be heap
+ // objects with same map.
+ Label value_is_heap_object;
+ Register cell_value_reg = cell_details_reg;
+ __ ld(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ JumpIfNotSmi(value_reg, &value_is_heap_object);
+ __ JumpIfNotSmi(cell_value_reg, &slow_case);
+ // Old and new values are SMIs, no need for a write barrier here.
+ __ bind(&fast_smi_case);
+ __ Ret(USE_DELAY_SLOT);
+ __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
+ __ bind(&value_is_heap_object);
+ __ JumpIfSmi(cell_value_reg, &slow_case);
+ Register cell_value_map_reg = cell_value_reg;
+ __ ld(cell_value_map_reg,
+ FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
+ __ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
+ FieldMemOperand(value_reg, HeapObject::kMapOffset));
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ SmiTag(slot_reg);
+ __ Drop(1); // Pop return address.
+ __ Push(slot_reg, name_reg, value_reg, cell_reg);
+ __ TailCallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3, 1);
+}
+
+
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
DCHECK(static_cast<int>(offset) == offset);
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index();
- int depth = scope()->ContextChainLength(var->scope());
- __ li(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ li(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name()));
- LoadGlobalViaContextStub stub(isolate(), depth);
- __ CallStub(&stub);
+ int const slot = var->index();
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
+ LoadGlobalViaContextStub stub(isolate(), depth);
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
} else {
__ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
// Global var, const, or let.
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
- // Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index() + 1;
- int depth = scope()->ContextChainLength(var->scope());
- __ li(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(depth)));
- __ li(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(slot_index)));
- __ li(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(var->name()));
+ DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0));
__ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
- StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
- __ CallStub(&stub);
+ // Each var occupies two slots in the context: for reads and writes.
+ int const slot = var->index() + 1;
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
+ StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ Push(a0);
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
const Register StoreTransitionDescriptor::MapRegister() { return a3; }
-const Register LoadGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return a3; }
-const Register StoreGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return a3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(v0));
- __ li(LoadGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ li(LoadGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(LoadGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub =
+ CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
}
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
- __ li(StoreGlobalViaContextDescriptor::DepthRegister(),
- Operand(Smi::FromInt(instr->depth())));
- __ li(StoreGlobalViaContextDescriptor::SlotRegister(),
- Operand(Smi::FromInt(instr->slot_index())));
- __ li(StoreGlobalViaContextDescriptor::NameRegister(),
- Operand(instr->name()));
-
- Handle<Code> stub =
- CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
- instr->language_mode()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ __ li(StoreGlobalViaContextDescriptor::NameRegister(),
+ Operand(instr->name()));
+ Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
+ isolate(), depth, instr->language_mode())
+ .code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ Push(StoreGlobalViaContextDescriptor::ValueRegister());
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
}
RUNTIME_FUNCTION(Runtime_LoadGlobalViaContext) {
HandleScope scope(isolate);
- DCHECK(args.length() == 3);
- CONVERT_ARG_HANDLE_CHECKED(Context, script_context, 0);
- CONVERT_SMI_ARG_CHECKED(index, 1);
- CONVERT_ARG_HANDLE_CHECKED(Name, name, 2);
+ DCHECK_EQ(2, args.length());
+ CONVERT_SMI_ARG_CHECKED(slot, 0);
+ CONVERT_ARG_HANDLE_CHECKED(Name, name, 1);
+
+ // Go up context chain to the script context.
+ Handle<Context> script_context(isolate->context()->script_context(), isolate);
DCHECK(script_context->IsScriptContext());
- DCHECK(script_context->get(index)->IsPropertyCell());
+ DCHECK(script_context->get(slot)->IsPropertyCell());
- Handle<GlobalObject> global(script_context->global_object());
+ // Lookup the named property on the global object.
+ Handle<GlobalObject> global_object(script_context->global_object(), isolate);
+ LookupIterator it(global_object, name, LookupIterator::HIDDEN);
- LookupIterator it(global, name, LookupIterator::HIDDEN);
// Switch to fast mode only if there is a data property and it's not on
// a hidden prototype.
- if (LookupIterator::DATA == it.state() &&
+ if (it.state() == LookupIterator::DATA &&
it.GetHolder<Object>()->IsJSGlobalObject()) {
- // Now update cell in the script context.
+ // Now update the cell in the script context.
Handle<PropertyCell> cell = it.GetPropertyCell();
- script_context->set(index, *cell);
+ script_context->set(slot, *cell);
} else {
// This is not a fast case, so keep this access in a slow mode.
// Store empty_property_cell here to release the outdated property cell.
- script_context->set(index, isolate->heap()->empty_property_cell());
+ script_context->set(slot, isolate->heap()->empty_property_cell());
}
Handle<Object> result;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, Object::GetProperty(&it));
-
return *result;
}
-RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext) {
- HandleScope scope(isolate);
- DCHECK(args.length() == 5);
- CONVERT_ARG_HANDLE_CHECKED(Context, script_context, 0);
- CONVERT_SMI_ARG_CHECKED(index, 1);
- CONVERT_ARG_HANDLE_CHECKED(Name, name, 2);
- CONVERT_ARG_HANDLE_CHECKED(Object, value, 3);
- CONVERT_LANGUAGE_MODE_ARG_CHECKED(language_mode_arg, 4);
- DCHECK(script_context->IsScriptContext());
- DCHECK(script_context->get(index)->IsPropertyCell());
- LanguageMode language_mode = language_mode_arg;
+namespace {
- Handle<GlobalObject> global(script_context->global_object());
+Object* StoreGlobalViaContext(Isolate* isolate, int slot, Handle<Name> name,
+ Handle<Object> value,
+ LanguageMode language_mode) {
+ // Go up context chain to the script context.
+ Handle<Context> script_context(isolate->context()->script_context(), isolate);
+ DCHECK(script_context->IsScriptContext());
+ DCHECK(script_context->get(slot)->IsPropertyCell());
- LookupIterator it(global, name, LookupIterator::HIDDEN);
+ // Lookup the named property on the global object.
+ Handle<GlobalObject> global_object(script_context->global_object(), isolate);
+ LookupIterator it(global_object, name, LookupIterator::HIDDEN);
// Switch to fast mode only if there is a data property and it's not on
// a hidden prototype.
if (LookupIterator::DATA == it.state() &&
it.GetHolder<Object>()->IsJSGlobalObject()) {
// Now update cell in the script context.
Handle<PropertyCell> cell = it.GetPropertyCell();
- script_context->set(index, *cell);
+ script_context->set(slot, *cell);
} else {
// This is not a fast case, so keep this access in a slow mode.
// Store empty_property_cell here to release the outdated property cell.
- script_context->set(index, isolate->heap()->empty_property_cell());
+ script_context->set(slot, isolate->heap()->empty_property_cell());
}
Handle<Object> result;
isolate, result,
Object::SetProperty(&it, value, language_mode,
Object::CERTAINLY_NOT_STORE_FROM_KEYED));
-
return *result;
}
+} // namespace
+
+
+RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext_Sloppy) {
+ HandleScope scope(isolate);
+ DCHECK_EQ(3, args.length());
+ CONVERT_SMI_ARG_CHECKED(slot, 0);
+ CONVERT_ARG_HANDLE_CHECKED(Name, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, value, 2);
+
+ return StoreGlobalViaContext(isolate, slot, name, value, SLOPPY);
+}
+
+
+RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext_Strict) {
+ HandleScope scope(isolate);
+ DCHECK_EQ(3, args.length());
+ CONVERT_SMI_ARG_CHECKED(slot, 0);
+ CONVERT_ARG_HANDLE_CHECKED(Name, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, value, 2);
+
+ return StoreGlobalViaContext(isolate, slot, name, value, STRICT);
+}
+
RUNTIME_FUNCTION(Runtime_GetProperty) {
HandleScope scope(isolate);
F(GetPropertyStrong, 2, 1) \
F(KeyedGetProperty, 2, 1) \
F(KeyedGetPropertyStrong, 2, 1) \
- F(LoadGlobalViaContext, 3, 1) \
- F(StoreGlobalViaContext, 5, 1) \
+ F(LoadGlobalViaContext, 2, 1) \
+ F(StoreGlobalViaContext_Sloppy, 3, 1) \
+ F(StoreGlobalViaContext_Strict, 3, 1) \
F(AddNamedProperty, 4, 1) \
F(SetProperty, 4, 1) \
F(AddElement, 3, 1) \
}
+void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = rsi;
+ Register slot_reg = rbx;
+ Register name_reg = rcx;
+ Register result_reg = rax;
+ Label slow_case;
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = rdi;
+ }
+
+ // Load the PropertyCell value at the specified slot.
+ __ movp(result_reg, ContextOperand(context_reg, slot_reg));
+ __ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
+
+ // Check that value is not the_hole.
+ __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
+ __ j(equal, &slow_case, Label::kNear);
+ __ Ret();
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ Integer32ToSmi(slot_reg, slot_reg);
+ __ PopReturnAddressTo(kScratchRegister);
+ __ Push(slot_reg);
+ __ Push(name_reg);
+ __ Push(kScratchRegister);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
+}
+
+
+void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context_reg = rsi;
+ Register slot_reg = rbx;
+ Register name_reg = rcx;
+ Register value_reg = rax;
+ Register cell_reg = r8;
+ Register cell_details_reg = rdx;
+ Register cell_value_reg = r9;
+ Label fast_heapobject_case, fast_smi_case, slow_case;
+
+ if (FLAG_debug_code) {
+ __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
+ __ Check(not_equal, kUnexpectedValue);
+ __ AssertName(name_reg);
+ }
+
+ // Go up context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ context_reg = rdi;
+ }
+
+ // Load the PropertyCell at the specified slot.
+ __ movp(cell_reg, ContextOperand(context_reg, slot_reg));
+
+ // Load PropertyDetails for the cell (actually only the cell_type and kind).
+ __ SmiToInteger32(cell_details_reg,
+ FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
+ __ andl(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::kMask |
+ PropertyDetails::KindField::kMask));
+
+
+ // Check if PropertyCell holds mutable data.
+ Label not_mutable_data;
+ __ cmpl(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kMutable) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(not_equal, ¬_mutable_data);
+ __ JumpIfSmi(value_reg, &fast_smi_case);
+ __ bind(&fast_heapobject_case);
+ __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
+ __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
+ cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ // RecordWriteField clobbers the value register, so we need to reload.
+ __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
+ __ Ret();
+ __ bind(¬_mutable_data);
+
+ // Check if PropertyCell value matches the new value (relevant for Constant,
+ // ConstantType and Undefined cells).
+ Label not_same_value;
+ __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
+ __ cmpp(cell_value_reg, value_reg);
+ __ j(not_equal, ¬_same_value,
+ FLAG_debug_code ? Label::kFar : Label::kNear);
+ if (FLAG_debug_code) {
+ Label done;
+ // This can only be true for Constant, ConstantType and Undefined cells,
+ // because we never store the_hole via this stub.
+ __ cmpl(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstant) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(equal, &done);
+ __ cmpl(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(equal, &done);
+ __ cmpl(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kUndefined) |
+ PropertyDetails::KindField::encode(kData)));
+ __ Check(equal, kUnexpectedValue);
+ __ bind(&done);
+ }
+ __ Ret();
+ __ bind(¬_same_value);
+
+ // Check if PropertyCell contains data with constant type.
+ __ cmpl(cell_details_reg,
+ Immediate(PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData)));
+ __ j(not_equal, &slow_case, Label::kNear);
+
+ // Now either both old and new values must be SMIs or both must be heap
+ // objects with same map.
+ Label value_is_heap_object;
+ __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
+ __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
+ // Old and new values are SMIs, no need for a write barrier here.
+ __ bind(&fast_smi_case);
+ __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
+ __ Ret();
+ __ bind(&value_is_heap_object);
+ __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
+ Register cell_value_map_reg = cell_value_reg;
+ __ movp(cell_value_map_reg,
+ FieldOperand(cell_value_reg, HeapObject::kMapOffset));
+ __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
+ __ j(equal, &fast_heapobject_case);
+
+ // Fallback to the runtime.
+ __ bind(&slow_case);
+ __ Integer32ToSmi(slot_reg, slot_reg);
+ __ PopReturnAddressTo(kScratchRegister);
+ __ Push(slot_reg);
+ __ Push(name_reg);
+ __ Push(value_reg);
+ __ Push(kScratchRegister);
+ __ TailCallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3, 1);
+}
+
+
static int Offset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
// Check that fits into int.
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index();
- int depth = scope()->ContextChainLength(var->scope());
- __ Move(LoadGlobalViaContextDescriptor::DepthRegister(),
- Smi::FromInt(depth));
- __ Move(LoadGlobalViaContextDescriptor::SlotRegister(),
- Smi::FromInt(slot_index));
- __ Move(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
- LoadGlobalViaContextStub stub(isolate(), depth);
- __ CallStub(&stub);
+ int const slot = var->index();
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ Move(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
+ LoadGlobalViaContextStub stub(isolate(), depth);
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
} else {
__ Move(LoadDescriptor::NameRegister(), var->name());
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
- int slot_index = var->index() + 1;
- int depth = scope()->ContextChainLength(var->scope());
- __ Move(StoreGlobalViaContextDescriptor::DepthRegister(),
- Smi::FromInt(depth));
- __ Move(StoreGlobalViaContextDescriptor::SlotRegister(),
- Smi::FromInt(slot_index));
- __ Move(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
- DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax));
- StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
- __ CallStub(&stub);
+ int const slot = var->index() + 1;
+ int const depth = scope()->ContextChainLength(var->scope());
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ Move(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
+ DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax));
+ StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
+ __ CallStub(&stub);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(var->name());
+ __ Push(rax);
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
const Register StoreTransitionDescriptor::MapRegister() { return rbx; }
-const Register LoadGlobalViaContextDescriptor::DepthRegister() { return rdx; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return rbx; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return rcx; }
-const Register StoreGlobalViaContextDescriptor::DepthRegister() { return rdx; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return rbx; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return rcx; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return rax; }
void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(rsi));
DCHECK(ToRegister(instr->result()).is(rax));
-
- __ Move(LoadGlobalViaContextDescriptor::DepthRegister(),
- Smi::FromInt(instr->depth()));
- __ Move(LoadGlobalViaContextDescriptor::SlotRegister(),
- Smi::FromInt(instr->slot_index()));
- __ Move(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
-
- Handle<Code> stub =
- CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ Move(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
+ Handle<Code> stub =
+ CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
+ }
}
DCHECK(ToRegister(instr->context()).is(rsi));
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
-
- __ Move(StoreGlobalViaContextDescriptor::DepthRegister(),
- Smi::FromInt(instr->depth()));
- __ Move(StoreGlobalViaContextDescriptor::SlotRegister(),
- Smi::FromInt(instr->slot_index()));
- __ Move(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
-
- Handle<Code> stub =
- CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
- instr->language_mode()).code();
- CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
+ __ Move(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
+ Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
+ isolate(), depth, instr->language_mode())
+ .code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(instr->name());
+ __ Push(StoreGlobalViaContextDescriptor::ValueRegister());
+ __ CallRuntime(is_strict(instr->language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3);
+ }
}
}
+inline Operand ContextOperand(Register context, Register index) {
+ return Operand(context, index, times_pointer_size, Context::SlotOffset(0));
+}
+
+
inline Operand GlobalObjectOperand() {
return ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX);
}