From: bmeurer Date: Fri, 24 Jul 2015 07:16:46 +0000 (-0700) Subject: [stubs] Optimize LoadGlobalViaContextStub and StoreGlobalViaContextStub. X-Git-Tag: upstream/4.7.83~1206 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=d6ee366d5c5aef7c6bc550889a33520058b4e33a;p=platform%2Fupstream%2Fv8.git [stubs] Optimize LoadGlobalViaContextStub and StoreGlobalViaContextStub. This is the initial round of optimizations for the LoadGlobalViaContextStub and StoreGlobalViaContextStub, basically turning them into platform code stubs to avoid the Crankshaft overhead in the fast case, and making the runtime interface cheaper. R=ishell@chromium.org BUG=chromium:510694 LOG=n Review URL: https://codereview.chromium.org/1238143002 Cr-Commit-Position: refs/heads/master@{#29834} --- diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 4aeeb60b6..0ca4ff1be 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -5057,6 +5057,160 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { } +void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context = cp; + Register result = r0; + Register slot = r2; + Register name = r3; + Label slow_case; + + // Go up the context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ ldr(result, ContextOperand(context, Context::PREVIOUS_INDEX)); + context = result; + } + + // Load the PropertyCell value at the specified slot. + __ add(result, context, Operand(slot, LSL, kPointerSizeLog2)); + __ ldr(result, ContextOperand(result)); + __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); + + // If the result is not the_hole, return. Otherwise, handle in the runtime. + __ CompareRoot(result, Heap::kTheHoleValueRootIndex); + __ Ret(ne); + + // Fallback to runtime. + __ bind(&slow_case); + __ SmiTag(slot); + __ push(slot); + __ push(name); + __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); +} + + +void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register value = r0; + Register slot = r2; + Register name = r3; + + Register cell = r1; + Register cell_details = r4; + Register cell_value = r5; + Register cell_value_map = r6; + Register scratch = r9; + + Register context = cp; + Register context_temp = cell; + + Label fast_heapobject_case, fast_smi_case, slow_case; + + if (FLAG_debug_code) { + __ CompareRoot(value, Heap::kTheHoleValueRootIndex); + __ Check(ne, kUnexpectedValue); + __ AssertName(name); + } + + // Go up the context chain to the script context. + for (int i = 0; i < depth(); i++) { + __ ldr(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX)); + context = context_temp; + } + + // Load the PropertyCell at the specified slot. + __ add(cell, context, Operand(slot, LSL, kPointerSizeLog2)); + __ ldr(cell, ContextOperand(cell)); + + // Load PropertyDetails for the cell (actually only the cell_type and kind). + __ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset)); + __ SmiUntag(cell_details); + __ and_(cell_details, cell_details, + Operand(PropertyDetails::PropertyCellTypeField::kMask | + PropertyDetails::KindField::kMask)); + + // Check if PropertyCell holds mutable data. + Label not_mutable_data; + __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kMutable) | + PropertyDetails::KindField::encode(kData))); + __ b(ne, ¬_mutable_data); + __ JumpIfSmi(value, &fast_smi_case); + + __ bind(&fast_heapobject_case); + __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); + // RecordWriteField clobbers the value register, so we copy it before the + // call. + __ mov(r4, Operand(value)); + __ RecordWriteField(cell, PropertyCell::kValueOffset, r4, scratch, + kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + __ Ret(); + + __ bind(¬_mutable_data); + // Check if PropertyCell value matches the new value (relevant for Constant, + // ConstantType and Undefined cells). + Label not_same_value; + __ ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset)); + __ cmp(cell_value, value); + __ b(ne, ¬_same_value); + + if (FLAG_debug_code) { + Label done; + // This can only be true for Constant, ConstantType and Undefined cells, + // because we never store the_hole via this stub. + __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstant) | + PropertyDetails::KindField::encode(kData))); + __ b(eq, &done); + __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ b(eq, &done); + __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kUndefined) | + PropertyDetails::KindField::encode(kData))); + __ Check(eq, kUnexpectedValue); + __ bind(&done); + } + __ Ret(); + __ bind(¬_same_value); + + // Check if PropertyCell contains data with constant type. + __ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ b(ne, &slow_case); + + // Now either both old and new values must be smis or both must be heap + // objects with same map. + Label value_is_heap_object; + __ JumpIfNotSmi(value, &value_is_heap_object); + __ JumpIfNotSmi(cell_value, &slow_case); + // Old and new values are smis, no need for a write barrier here. + __ bind(&fast_smi_case); + __ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); + __ Ret(); + + __ bind(&value_is_heap_object); + __ JumpIfSmi(cell_value, &slow_case); + + __ ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset)); + __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset)); + __ cmp(cell_value_map, scratch); + __ b(eq, &fast_heapobject_case); + + // Fallback to runtime. + __ bind(&slow_case); + __ SmiTag(slot); + __ push(slot); + __ push(name); + __ push(value); + __ TailCallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3, 1); +} + + static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { return ref0.address() - ref1.address(); } diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 34ac1a542..67840ee67 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -1415,17 +1415,19 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index(); - int depth = scope()->ContextChainLength(var->scope()); - __ mov(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ mov(LoadGlobalViaContextDescriptor::NameRegister(), - Operand(var->name())); - LoadGlobalViaContextStub stub(isolate(), depth); - __ CallStub(&stub); - + const int slot = var->index(); + const int depth = scope()->ContextChainLength(var->scope()); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ mov(LoadGlobalViaContextDescriptor::NameRegister(), + Operand(var->name())); + LoadGlobalViaContextStub stub(isolate(), depth); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } else { __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); @@ -2715,18 +2717,24 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index() + 1; - int depth = scope()->ContextChainLength(var->scope()); - __ mov(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ mov(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(var->name())); - DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0)); - StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); - __ CallStub(&stub); - + const int slot = var->index() + 1; + const int depth = scope()->ContextChainLength(var->scope()); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ mov(StoreGlobalViaContextDescriptor::NameRegister(), + Operand(var->name())); + DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0)); + StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ push(r0); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } else if (var->mode() == LET && op != Token::INIT_LET) { // Non-initializing assignment to let variable needs a write barrier. DCHECK(!var->IsLookupSlot()); diff --git a/src/arm/interface-descriptors-arm.cc b/src/arm/interface-descriptors-arm.cc index 0fd64b71b..5e98b98a7 100644 --- a/src/arm/interface-descriptors-arm.cc +++ b/src/arm/interface-descriptors-arm.cc @@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return r3; } const Register StoreTransitionDescriptor::MapRegister() { return r3; } -const Register LoadGlobalViaContextDescriptor::DepthRegister() { return r1; } const Register LoadGlobalViaContextDescriptor::SlotRegister() { return r2; } const Register LoadGlobalViaContextDescriptor::NameRegister() { return r3; } -const Register StoreGlobalViaContextDescriptor::DepthRegister() { return r1; } const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r2; } const Register StoreGlobalViaContextDescriptor::NameRegister() { return r3; } const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r0; } diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index 194b5fe0c..97881ba1a 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -2987,16 +2987,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) { DCHECK(ToRegister(instr->context()).is(cp)); DCHECK(ToRegister(instr->result()).is(r0)); - __ mov(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ mov(LoadGlobalViaContextDescriptor::NameRegister(), - Operand(instr->name())); - - Handle stub = - CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ mov(LoadGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = + CodeFactory::LoadGlobalViaContext(isolate(), depth).code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } @@ -4249,17 +4253,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) { DCHECK(ToRegister(instr->value()) .is(StoreGlobalViaContextDescriptor::ValueRegister())); - __ mov(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ mov(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(instr->name())); - - Handle stub = - CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(), - instr->language_mode()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ mov(StoreGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = CodeFactory::StoreGlobalViaContext( + isolate(), depth, instr->language_mode()) + .code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ push(StoreGlobalViaContextDescriptor::ValueRegister()); + __ CallRuntime(is_strict(instr->language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index 962662887..a9c220352 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -250,7 +250,7 @@ class MacroAssembler: public Assembler { // |object| is the object being stored into, |value| is the object being // stored. value and scratch registers are clobbered by the operation. // The offset is the offset from the start of the object, not the offset from - // the tagged HeapObject pointer. For use with FieldOperand(reg, off). + // the tagged HeapObject pointer. For use with FieldMemOperand(reg, off). void RecordWriteField( Register object, int offset, @@ -1532,7 +1532,7 @@ class CodePatcher { // ----------------------------------------------------------------------------- // Static helper functions. -inline MemOperand ContextOperand(Register context, int index) { +inline MemOperand ContextOperand(Register context, int index = 0) { return MemOperand(context, Context::SlotOffset(index)); } diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index 01189f212..d0ee80e68 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -5494,6 +5494,153 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { } +void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context = cp; + Register result = x0; + Register slot = x2; + Register name = x3; + Label slow_case; + + // Go up the context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ Ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX)); + context = result; + } + + // Load the PropertyCell value at the specified slot. + __ Add(result, context, Operand(slot, LSL, kPointerSizeLog2)); + __ Ldr(result, ContextMemOperand(result)); + __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); + + // If the result is not the_hole, return. Otherwise, handle in the runtime. + __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); + __ Ret(); + + // Fallback to runtime. + __ Bind(&slow_case); + __ SmiTag(slot); + __ Push(slot, name); + __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); +} + + +void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context = cp; + Register value = x0; + Register slot = x2; + Register name = x3; + Register context_temp = x10; + Register cell = x10; + Register cell_details = x11; + Register cell_value = x12; + Register cell_value_map = x13; + Register value_map = x14; + Label fast_heapobject_case, fast_smi_case, slow_case; + + if (FLAG_debug_code) { + __ CompareRoot(value, Heap::kTheHoleValueRootIndex); + __ Check(ne, kUnexpectedValue); + __ AssertName(name); + } + + // Go up the context chain to the script context. + for (int i = 0; i < depth(); i++) { + __ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX)); + context = context_temp; + } + + // Load the PropertyCell at the specified slot. + __ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2)); + __ Ldr(cell, ContextMemOperand(cell)); + + // Load PropertyDetails for the cell (actually only the cell_type and kind). + __ Ldr(cell_details, + UntagSmiFieldMemOperand(cell, PropertyCell::kDetailsOffset)); + __ And(cell_details, cell_details, + PropertyDetails::PropertyCellTypeField::kMask | + PropertyDetails::KindField::kMask); + + // Check if PropertyCell holds mutable data. + Label not_mutable_data; + __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kMutable) | + PropertyDetails::KindField::encode(kData)); + __ B(ne, ¬_mutable_data); + __ JumpIfSmi(value, &fast_smi_case); + __ Bind(&fast_heapobject_case); + __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); + // RecordWriteField clobbers the value register, so we copy it before the + // call. + __ Mov(x11, value); + __ RecordWriteField(cell, PropertyCell::kValueOffset, x11, x12, + kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + __ Ret(); + + __ Bind(¬_mutable_data); + // Check if PropertyCell value matches the new value (relevant for Constant, + // ConstantType and Undefined cells). + Label not_same_value; + __ Ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset)); + __ Cmp(cell_value, value); + __ B(ne, ¬_same_value); + + if (FLAG_debug_code) { + Label done; + // This can only be true for Constant, ConstantType and Undefined cells, + // because we never store the_hole via this stub. + __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstant) | + PropertyDetails::KindField::encode(kData)); + __ B(eq, &done); + __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData)); + __ B(eq, &done); + __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kUndefined) | + PropertyDetails::KindField::encode(kData)); + __ Check(eq, kUnexpectedValue); + __ Bind(&done); + } + __ Ret(); + __ Bind(¬_same_value); + + // Check if PropertyCell contains data with constant type. + __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData)); + __ B(ne, &slow_case); + + // Now either both old and new values must be smis or both must be heap + // objects with same map. + Label value_is_heap_object; + __ JumpIfNotSmi(value, &value_is_heap_object); + __ JumpIfNotSmi(cell_value, &slow_case); + // Old and new values are smis, no need for a write barrier here. + __ Bind(&fast_smi_case); + __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); + __ Ret(); + + __ Bind(&value_is_heap_object); + __ JumpIfSmi(cell_value, &slow_case); + + __ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset)); + __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset)); + __ Cmp(cell_value_map, value_map); + __ B(eq, &fast_heapobject_case); + + // Fall back to the runtime. + __ Bind(&slow_case); + __ SmiTag(slot); + __ Push(slot, name, value); + __ TailCallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3, 1); +} + + // The number of register that CallApiFunctionAndReturn will need to save on // the stack. The space for these registers need to be allocated in the // ExitFrame before calling CallApiFunctionAndReturn. diff --git a/src/arm64/full-codegen-arm64.cc b/src/arm64/full-codegen-arm64.cc index 10d1cc2b0..b06f2f7ce 100644 --- a/src/arm64/full-codegen-arm64.cc +++ b/src/arm64/full-codegen-arm64.cc @@ -1398,17 +1398,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index(); - int depth = scope()->ContextChainLength(var->scope()); - __ Mov(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ Mov(LoadGlobalViaContextDescriptor::NameRegister(), - Operand(var->name())); - LoadGlobalViaContextStub stub(isolate(), depth); - __ CallStub(&stub); - + int const slot = var->index(); + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot); + __ Mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name()); + LoadGlobalViaContextStub stub(isolate(), depth); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } else { __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand()); __ Mov(LoadDescriptor::NameRegister(), Operand(var->name())); @@ -2401,18 +2402,23 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index() + 1; - int depth = scope()->ContextChainLength(var->scope()); - __ Mov(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ Mov(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(var->name())); - DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0)); - StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); - __ CallStub(&stub); - + int const slot = var->index() + 1; + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot); + __ Mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name()); + DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0)); + StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ Push(x0); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } else if (var->mode() == LET && op != Token::INIT_LET) { // Non-initializing assignment to let variable needs a write barrier. DCHECK(!var->IsLookupSlot()); diff --git a/src/arm64/interface-descriptors-arm64.cc b/src/arm64/interface-descriptors-arm64.cc index 18f8210a5..a3366cd10 100644 --- a/src/arm64/interface-descriptors-arm64.cc +++ b/src/arm64/interface-descriptors-arm64.cc @@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return x3; } const Register StoreTransitionDescriptor::MapRegister() { return x3; } -const Register LoadGlobalViaContextDescriptor::DepthRegister() { return x1; } const Register LoadGlobalViaContextDescriptor::SlotRegister() { return x2; } const Register LoadGlobalViaContextDescriptor::NameRegister() { return x3; } -const Register StoreGlobalViaContextDescriptor::DepthRegister() { return x1; } const Register StoreGlobalViaContextDescriptor::SlotRegister() { return x2; } const Register StoreGlobalViaContextDescriptor::NameRegister() { return x3; } const Register StoreGlobalViaContextDescriptor::ValueRegister() { return x0; } diff --git a/src/arm64/lithium-codegen-arm64.cc b/src/arm64/lithium-codegen-arm64.cc index 836a0609b..cac7f92ec 100644 --- a/src/arm64/lithium-codegen-arm64.cc +++ b/src/arm64/lithium-codegen-arm64.cc @@ -3384,16 +3384,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) { DCHECK(ToRegister(instr->context()).is(cp)); DCHECK(ToRegister(instr->result()).is(x0)); - __ Mov(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ Mov(LoadGlobalViaContextDescriptor::NameRegister(), - Operand(instr->name())); - - Handle stub = - CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ Mov(LoadGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = + CodeFactory::LoadGlobalViaContext(isolate(), depth).code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } @@ -5540,17 +5544,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) { DCHECK(ToRegister(instr->value()) .is(StoreGlobalViaContextDescriptor::ValueRegister())); - __ Mov(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ Mov(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(instr->name())); - - Handle stub = - CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(), - instr->language_mode()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ Mov(StoreGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = CodeFactory::StoreGlobalViaContext( + isolate(), depth, instr->language_mode()) + .code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ Push(StoreGlobalViaContextDescriptor::ValueRegister()); + __ CallRuntime(is_strict(instr->language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h index f9aa8aa8e..135b9887e 100644 --- a/src/arm64/macro-assembler-arm64.h +++ b/src/arm64/macro-assembler-arm64.h @@ -1769,7 +1769,7 @@ class MacroAssembler : public Assembler { // |object| is the object being stored into, |value| is the object being // stored. value and scratch registers are clobbered by the operation. // The offset is the offset from the start of the object, not the offset from - // the tagged HeapObject pointer. For use with FieldOperand(reg, off). + // the tagged HeapObject pointer. For use with FieldMemOperand(reg, off). void RecordWriteField( Register object, int offset, @@ -2233,7 +2233,7 @@ class UseScratchRegisterScope { }; -inline MemOperand ContextMemOperand(Register context, int index) { +inline MemOperand ContextMemOperand(Register context, int index = 0) { return MemOperand(context, Context::SlotOffset(index)); } diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 7ee424e75..d5addc036 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -1622,220 +1622,6 @@ Handle StoreGlobalStub::GenerateCode() { } -template <> -HValue* CodeStubGraphBuilder::BuildCodeStub() { - LoadGlobalViaContextStub* stub = casted_stub(); - int depth_value = stub->depth(); - HValue* depth = GetParameter(0); - HValue* slot_index = GetParameter(1); - HValue* name = GetParameter(2); - - // Choose between dynamic or static context script fetching versions. - depth = depth_value < LoadGlobalViaContextStub::kDynamicDepth - ? nullptr - : AddUncasted(depth, Representation::Smi()); - slot_index = - AddUncasted(slot_index, Representation::Smi()); - - HValue* script_context = BuildGetParentContext(depth, depth_value); - HValue* cell = - Add(script_context, slot_index, nullptr, FAST_ELEMENTS); - - HValue* value = Add(cell, nullptr, - HObjectAccess::ForPropertyCellValue()); - - IfBuilder builder(this); - HValue* hole_value = graph()->GetConstantHole(); - builder.IfNot(value, hole_value); - builder.Then(); - { Push(value); } - builder.Else(); - { - Add(script_context, slot_index, name); - Push(Add( - isolate()->factory()->empty_string(), - Runtime::FunctionForId(Runtime::kLoadGlobalViaContext), 3)); - } - builder.End(); - return Pop(); -} - - -Handle LoadGlobalViaContextStub::GenerateCode() { - return DoGenerateCode(this); -} - - -template <> -HValue* CodeStubGraphBuilder::BuildCodeStub() { - StoreGlobalViaContextStub* stub = casted_stub(); - int depth_value = stub->depth(); - HValue* depth = GetParameter(0); - HValue* slot_index = GetParameter(1); - HValue* name = GetParameter(2); - HValue* value = GetParameter(3); - - // Choose between dynamic or static context script fetching versions. - depth = depth_value < StoreGlobalViaContextStub::kDynamicDepth - ? nullptr - : AddUncasted(depth, Representation::Smi()); - slot_index = - AddUncasted(slot_index, Representation::Smi()); - - HValue* script_context = BuildGetParentContext(depth, depth_value); - HValue* cell = - Add(script_context, slot_index, nullptr, FAST_ELEMENTS); - - // Fast case that requires storing to cell. - HIfContinuation if_fast_store_continuation(graph()->CreateBasicBlock(), - graph()->CreateBasicBlock()); - - // Fast case that does not require storing to cell. - HIfContinuation if_fast_no_store_continuation(graph()->CreateBasicBlock(), - graph()->CreateBasicBlock()); - - // This stub does the same as StoreGlobalStub but in a dynamic manner. - - HValue* cell_contents = Add( - cell, nullptr, HObjectAccess::ForPropertyCellValue()); - - IfBuilder if_hole(this); - HValue* hole_value = graph()->GetConstantHole(); - if_hole.IfNot(cell_contents, hole_value); - if_hole.Then(); - { - HValue* details = Add( - cell, nullptr, HObjectAccess::ForPropertyCellDetails()); - HValue* cell_type = - BuildDecodeField(details); - - // The code below relies on this. - STATIC_ASSERT(PropertyCellType::kUndefined < PropertyCellType::kConstant); - STATIC_ASSERT(PropertyCellType::kConstant < - PropertyCellType::kConstantType); - STATIC_ASSERT(PropertyCellType::kConstant < PropertyCellType::kMutable); - - // Handle all cell type cases. - IfBuilder if_not_const(this); - - int cell_type_constant = static_cast(PropertyCellType::kConstant); - if_not_const.If( - cell_type, Add(cell_type_constant), Token::GT); - if_not_const.Then(); - { - // kConstantType or kMutable. - IfBuilder if_const_type(this); - int cell_type_constant_type = - static_cast(PropertyCellType::kConstantType); - if_const_type.If( - cell_type, Add(cell_type_constant_type), Token::EQ); - if_const_type.Then(); - { - // Check that either both value and cell_contents are smi or - // both have the same map. - IfBuilder if_cell_is_smi(this); - if_cell_is_smi.If(cell_contents); - if_cell_is_smi.Then(); - { - IfBuilder if_value_is_smi(this); - if_value_is_smi.If(value); - if_value_is_smi.Then(); - { - // Both cell_contents and value are smis, do store. - } - if_value_is_smi.Else(); // Slow case. - if_value_is_smi.JoinContinuation(&if_fast_store_continuation); - } - if_cell_is_smi.Else(); - { - IfBuilder if_value_is_heap_object(this); - if_value_is_heap_object.IfNot(value); - if_value_is_heap_object.Then(); - { - // Both cell_contents and value are heap objects, do store. - HValue* expected_map = Add( - cell_contents, nullptr, HObjectAccess::ForMap()); - HValue* map = - Add(value, nullptr, HObjectAccess::ForMap()); - IfBuilder map_check(this); - map_check.If(expected_map, map); - map_check.Then(); - map_check.Else(); // Slow case. - map_check.JoinContinuation(&if_fast_store_continuation); - - // The accessor case is handled by the map check above, since - // the value must not have a AccessorPair map. - } - if_value_is_heap_object.Else(); // Slow case. - if_value_is_heap_object.JoinContinuation(&if_fast_store_continuation); - } - if_cell_is_smi.EndUnreachable(); - } - if_const_type.Else(); - { - // Check that the property kind is kData. - HValue* kind = BuildDecodeField(details); - HValue* data_kind_value = Add(kData); - - IfBuilder builder(this); - builder.If(kind, data_kind_value, - Token::EQ); - builder.Then(); - builder.Else(); // Slow case. - builder.JoinContinuation(&if_fast_store_continuation); - } - if_const_type.EndUnreachable(); - } - if_not_const.Else(); - { - // kUndefined or kConstant, just check that the value matches. - IfBuilder builder(this); - builder.If(cell_contents, value); - builder.Then(); - builder.Else(); // Slow case. - builder.JoinContinuation(&if_fast_no_store_continuation); - } - if_not_const.EndUnreachable(); - } - if_hole.Else(); // Slow case. - if_hole.JoinContinuation(&if_fast_store_continuation); - - // Do store for fast case. - IfBuilder if_fast_store(this, &if_fast_store_continuation); - if_fast_store.Then(); - { - // All checks are done, store the value to the cell. - Add(cell, HObjectAccess::ForPropertyCellValue(), value); - } - if_fast_store.Else(); - if_fast_store.JoinContinuation(&if_fast_no_store_continuation); - - // Bailout to runtime call for slow case. - IfBuilder if_no_fast_store(this, &if_fast_no_store_continuation); - if_no_fast_store.Then(); - { - // Nothing else to do. - } - if_no_fast_store.Else(); - { - // Slow case, call runtime. - HInstruction* lang_mode = Add(casted_stub()->language_mode()); - Add(script_context, slot_index, name, value); - Add(lang_mode); - Add(isolate()->factory()->empty_string(), - Runtime::FunctionForId(Runtime::kStoreGlobalViaContext), - 5); - } - if_no_fast_store.End(); - return value; -} - - -Handle StoreGlobalViaContextStub::GenerateCode() { - return DoGenerateCode(this); -} - - template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex); diff --git a/src/code-stubs.cc b/src/code-stubs.cc index e6720dfe7..877da682f 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -726,20 +726,6 @@ void RegExpConstructResultStub::InitializeDescriptor( } -void LoadGlobalViaContextStub::InitializeDescriptor( - CodeStubDescriptor* descriptor) { - // Must never deoptimize. - descriptor->Initialize(FUNCTION_ADDR(UnexpectedStubMiss)); -} - - -void StoreGlobalViaContextStub::InitializeDescriptor( - CodeStubDescriptor* descriptor) { - // Must never deoptimize. - descriptor->Initialize(FUNCTION_ADDR(UnexpectedStubMiss)); -} - - void TransitionElementsKindStub::InitializeDescriptor( CodeStubDescriptor* descriptor) { descriptor->Initialize( diff --git a/src/code-stubs.h b/src/code-stubs.h index f83e73640..372b788fe 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -1396,57 +1396,50 @@ class StoreGlobalStub : public HandlerStub { }; -class LoadGlobalViaContextStub : public HydrogenCodeStub { +class LoadGlobalViaContextStub final : public PlatformCodeStub { public: - // Use the loop version for depths higher than this one. - static const int kDynamicDepth = 7; + static const int kMaximumDepth = 15; LoadGlobalViaContextStub(Isolate* isolate, int depth) - : HydrogenCodeStub(isolate) { - if (depth > kDynamicDepth) depth = kDynamicDepth; - set_sub_minor_key(DepthBits::encode(depth)); + : PlatformCodeStub(isolate) { + minor_key_ = DepthBits::encode(depth); } - int depth() const { return DepthBits::decode(sub_minor_key()); } + int depth() const { return DepthBits::decode(minor_key_); } private: - class DepthBits : public BitField {}; - STATIC_ASSERT(kDynamicDepth <= DepthBits::kMax); + class DepthBits : public BitField {}; + STATIC_ASSERT(DepthBits::kMax == kMaximumDepth); DEFINE_CALL_INTERFACE_DESCRIPTOR(LoadGlobalViaContext); - DEFINE_HYDROGEN_CODE_STUB(LoadGlobalViaContext, HydrogenCodeStub); + DEFINE_PLATFORM_CODE_STUB(LoadGlobalViaContext, PlatformCodeStub); }; -class StoreGlobalViaContextStub : public HydrogenCodeStub { +class StoreGlobalViaContextStub final : public PlatformCodeStub { public: - // Use the loop version for depths higher than this one. - static const int kDynamicDepth = 7; + static const int kMaximumDepth = 15; StoreGlobalViaContextStub(Isolate* isolate, int depth, LanguageMode language_mode) - : HydrogenCodeStub(isolate) { - if (depth > kDynamicDepth) depth = kDynamicDepth; - set_sub_minor_key(DepthBits::encode(depth) | - LanguageModeBits::encode(language_mode)); + : PlatformCodeStub(isolate) { + minor_key_ = + DepthBits::encode(depth) | LanguageModeBits::encode(language_mode); } - int depth() const { return DepthBits::decode(sub_minor_key()); } - + int depth() const { return DepthBits::decode(minor_key_); } LanguageMode language_mode() const { - return LanguageModeBits::decode(sub_minor_key()); + return LanguageModeBits::decode(minor_key_); } private: - class DepthBits : public BitField {}; - STATIC_ASSERT(kDynamicDepth <= DepthBits::kMax); - + class DepthBits : public BitField {}; + STATIC_ASSERT(DepthBits::kMax == kMaximumDepth); class LanguageModeBits : public BitField {}; STATIC_ASSERT(LANGUAGE_END == 3); - private: DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreGlobalViaContext); - DEFINE_HYDROGEN_CODE_STUB(StoreGlobalViaContext, HydrogenCodeStub); + DEFINE_PLATFORM_CODE_STUB(StoreGlobalViaContext, PlatformCodeStub); }; diff --git a/src/compiler/js-generic-lowering.cc b/src/compiler/js-generic-lowering.cc index e3e4cddd8..17b1597ed 100644 --- a/src/compiler/js-generic-lowering.cc +++ b/src/compiler/js-generic-lowering.cc @@ -338,10 +338,10 @@ void JSGenericLowering::LowerJSLoadGlobal(Node* node) { if (p.slot_index() >= 0) { Callable callable = CodeFactory::LoadGlobalViaContext(isolate(), 0); Node* script_context = node->InputAt(0); - node->ReplaceInput(0, jsgraph()->SmiConstant(0)); - node->ReplaceInput(1, jsgraph()->SmiConstant(p.slot_index())); - node->ReplaceInput(2, jsgraph()->HeapConstant(p.name())); - node->ReplaceInput(3, script_context); // Replace old context. + node->ReplaceInput(0, jsgraph()->Int32Constant(p.slot_index())); + node->ReplaceInput(1, jsgraph()->HeapConstant(p.name())); + node->ReplaceInput(2, script_context); // Set new context... + node->RemoveInput(3); // ...instead of old one. ReplaceWithStubCall(node, callable, flags); } else { @@ -397,11 +397,11 @@ void JSGenericLowering::LowerJSStoreGlobal(Node* node) { CodeFactory::StoreGlobalViaContext(isolate(), 0, p.language_mode()); Node* script_context = node->InputAt(0); Node* value = node->InputAt(2); - node->ReplaceInput(0, jsgraph()->SmiConstant(0)); - node->ReplaceInput(1, jsgraph()->SmiConstant(p.slot_index())); - node->ReplaceInput(2, jsgraph()->HeapConstant(p.name())); - node->ReplaceInput(3, value); - node->ReplaceInput(4, script_context); // Replace old context. + node->ReplaceInput(0, jsgraph()->Int32Constant(p.slot_index())); + node->ReplaceInput(1, jsgraph()->HeapConstant(p.name())); + node->ReplaceInput(2, value); + node->ReplaceInput(3, script_context); // Set new context... + node->RemoveInput(4); // ...instead of old one. ReplaceWithStubCall(node, callable, flags); } else { diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 2cc75bf11..c034b223d 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -5116,6 +5116,161 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { } +void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = esi; + Register slot_reg = ebx; + Register name_reg = ecx; + Register result_reg = eax; + Label slow_case; + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = result_reg; + } + + // Load the PropertyCell value at the specified slot. + __ mov(result_reg, ContextOperand(context_reg, slot_reg)); + __ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset)); + + // Check that value is not the_hole. + __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex); + __ j(equal, &slow_case, Label::kNear); + __ Ret(); + + // Fallback to the runtime. + __ bind(&slow_case); + __ SmiTag(slot_reg); + __ Pop(result_reg); // Pop return address. + __ Push(slot_reg); + __ Push(name_reg); + __ Push(result_reg); // Push return address. + __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); +} + + +void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = esi; + Register slot_reg = ebx; + Register name_reg = ecx; + Register value_reg = eax; + Register cell_reg = edi; + Register cell_details_reg = edx; + Label fast_heapobject_case, fast_smi_case, slow_case; + + if (FLAG_debug_code) { + __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex); + __ Check(not_equal, kUnexpectedValue); + __ AssertName(name_reg); + } + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = cell_reg; + } + + // Load the PropertyCell at the specified slot. + __ mov(cell_reg, ContextOperand(context_reg, slot_reg)); + + // Load PropertyDetails for the cell (actually only the cell_type and kind). + __ mov(cell_details_reg, + FieldOperand(cell_reg, PropertyCell::kDetailsOffset)); + __ SmiUntag(cell_details_reg); + __ and_(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::kMask | + PropertyDetails::KindField::kMask)); + + + // Check if PropertyCell holds mutable data. + Label not_mutable_data; + __ cmp(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kMutable) | + PropertyDetails::KindField::encode(kData))); + __ j(not_equal, ¬_mutable_data); + __ JumpIfSmi(value_reg, &fast_smi_case); + __ bind(&fast_heapobject_case); + __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); + __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, + cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + // RecordWriteField clobbers the value register, so we need to reload. + __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); + __ Ret(); + __ bind(¬_mutable_data); + + // Check if PropertyCell value matches the new value (relevant for Constant, + // ConstantType and Undefined cells). + Label not_same_value; + __ cmp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); + __ j(not_equal, ¬_same_value, + FLAG_debug_code ? Label::kFar : Label::kNear); + if (FLAG_debug_code) { + Label done; + // This can only be true for Constant, ConstantType and Undefined cells, + // because we never store the_hole via this stub. + __ cmp(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstant) | + PropertyDetails::KindField::encode(kData))); + __ j(equal, &done); + __ cmp(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ j(equal, &done); + __ cmp(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kUndefined) | + PropertyDetails::KindField::encode(kData))); + __ Check(equal, kUnexpectedValue); + __ bind(&done); + } + __ Ret(); + __ bind(¬_same_value); + + // Check if PropertyCell contains data with constant type. + __ cmp(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ j(not_equal, &slow_case, Label::kNear); + + // Now either both old and new values must be SMIs or both must be heap + // objects with same map. + Label value_is_heap_object; + Register cell_value_reg = cell_details_reg; + __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); + __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear); + __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear); + // Old and new values are SMIs, no need for a write barrier here. + __ bind(&fast_smi_case); + __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); + __ Ret(); + __ bind(&value_is_heap_object); + __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear); + Register cell_value_map_reg = cell_value_reg; + __ mov(cell_value_map_reg, + FieldOperand(cell_value_reg, HeapObject::kMapOffset)); + __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset)); + __ j(equal, &fast_heapobject_case); + + // Fallback to the runtime. + __ bind(&slow_case); + __ SmiTag(slot_reg); + __ Pop(cell_reg); // Pop return address. + __ Push(slot_reg); + __ Push(name_reg); + __ Push(value_reg); + __ Push(cell_reg); // Push return address. + __ TailCallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3, 1); +} + + // Generates an Operand for saving parameters after PrepareCallApiFunction. static Operand ApiParameterOperand(int index) { return Operand(esp, index * kPointerSize); diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 588b04e2b..3fc4020a1 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1341,15 +1341,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index(); - int depth = scope()->ContextChainLength(var->scope()); - __ mov(LoadGlobalViaContextDescriptor::DepthRegister(), - Immediate(Smi::FromInt(depth))); - __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), - Immediate(Smi::FromInt(slot_index))); - __ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name()); - LoadGlobalViaContextStub stub(isolate(), depth); - __ CallStub(&stub); + int const slot = var->index(); + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ Move(LoadGlobalViaContextDescriptor::SlotRegister(), Immediate(slot)); + __ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name()); + LoadGlobalViaContextStub stub(isolate(), depth); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } else { __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); @@ -2620,16 +2623,23 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index() + 1; - int depth = scope()->ContextChainLength(var->scope()); - __ mov(StoreGlobalViaContextDescriptor::DepthRegister(), - Immediate(Smi::FromInt(depth))); - __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), - Immediate(Smi::FromInt(slot_index))); - __ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name()); - DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax)); - StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); - __ CallStub(&stub); + int const slot = var->index() + 1; + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ Move(StoreGlobalViaContextDescriptor::SlotRegister(), Immediate(slot)); + __ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name()); + DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax)); + StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ Push(eax); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } else if (var->mode() == LET && op != Token::INIT_LET) { // Non-initializing assignment to let variable needs a write barrier. diff --git a/src/ia32/interface-descriptors-ia32.cc b/src/ia32/interface-descriptors-ia32.cc index 17b053c57..a91e1efec 100644 --- a/src/ia32/interface-descriptors-ia32.cc +++ b/src/ia32/interface-descriptors-ia32.cc @@ -37,12 +37,10 @@ const Register StoreTransitionDescriptor::MapRegister() { } -const Register LoadGlobalViaContextDescriptor::DepthRegister() { return edx; } const Register LoadGlobalViaContextDescriptor::SlotRegister() { return ebx; } const Register LoadGlobalViaContextDescriptor::NameRegister() { return ecx; } -const Register StoreGlobalViaContextDescriptor::DepthRegister() { return edx; } const Register StoreGlobalViaContextDescriptor::SlotRegister() { return ebx; } const Register StoreGlobalViaContextDescriptor::NameRegister() { return ecx; } const Register StoreGlobalViaContextDescriptor::ValueRegister() { return eax; } diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index 4bed05280..db2602c17 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -2870,15 +2870,19 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) { DCHECK(ToRegister(instr->context()).is(esi)); DCHECK(ToRegister(instr->result()).is(eax)); - __ mov(LoadGlobalViaContextDescriptor::DepthRegister(), - Immediate(Smi::FromInt(instr->depth()))); - __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), - Immediate(Smi::FromInt(instr->slot_index()))); - __ mov(LoadGlobalViaContextDescriptor::NameRegister(), instr->name()); - - Handle stub = - CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot); + __ mov(LoadGlobalViaContextDescriptor::NameRegister(), instr->name()); + Handle stub = + CodeFactory::LoadGlobalViaContext(isolate(), depth).code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } @@ -4143,16 +4147,24 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) { DCHECK(ToRegister(instr->value()) .is(StoreGlobalViaContextDescriptor::ValueRegister())); - __ mov(StoreGlobalViaContextDescriptor::DepthRegister(), - Immediate(Smi::FromInt(instr->depth()))); - __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), - Immediate(Smi::FromInt(instr->slot_index()))); - __ mov(StoreGlobalViaContextDescriptor::NameRegister(), instr->name()); - - Handle stub = - CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(), - instr->language_mode()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot); + __ mov(StoreGlobalViaContextDescriptor::NameRegister(), instr->name()); + Handle stub = CodeFactory::StoreGlobalViaContext( + isolate(), depth, instr->language_mode()) + .code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ Push(StoreGlobalViaContextDescriptor::ValueRegister()); + __ CallRuntime(is_strict(instr->language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index d4d4fd6b9..15dbaed69 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -1068,6 +1068,11 @@ inline Operand ContextOperand(Register context, int index) { } +inline Operand ContextOperand(Register context, Register index) { + return Operand(context, index, times_pointer_size, Context::SlotOffset(0)); +} + + inline Operand GlobalObjectOperand() { return ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX); } diff --git a/src/interface-descriptors.cc b/src/interface-descriptors.cc index 20cd7802d..0526bf998 100644 --- a/src/interface-descriptors.cc +++ b/src/interface-descriptors.cc @@ -117,17 +117,16 @@ Type::FunctionType* LoadGlobalViaContextDescriptor::BuildCallInterfaceDescriptorFunctionType( Isolate* isolate, int paramater_count) { Type::FunctionType* function = Type::FunctionType::New( - AnyTagged(), Type::Undefined(), 3, isolate->interface_descriptor_zone()); - function->InitParameter(0, SmiType()); - function->InitParameter(1, SmiType()); - function->InitParameter(2, AnyTagged()); + AnyTagged(), Type::Undefined(), 2, isolate->interface_descriptor_zone()); + function->InitParameter(0, UntaggedSigned32()); + function->InitParameter(1, AnyTagged()); return function; } void LoadGlobalViaContextDescriptor::InitializePlatformSpecific( CallInterfaceDescriptorData* data) { - Register registers[] = {DepthRegister(), SlotRegister(), NameRegister()}; + Register registers[] = {SlotRegister(), NameRegister()}; data->InitializePlatformSpecific(arraysize(registers), registers); } @@ -136,19 +135,17 @@ Type::FunctionType* StoreGlobalViaContextDescriptor::BuildCallInterfaceDescriptorFunctionType( Isolate* isolate, int paramater_count) { Type::FunctionType* function = Type::FunctionType::New( - AnyTagged(), Type::Undefined(), 4, isolate->interface_descriptor_zone()); - function->InitParameter(0, SmiType()); - function->InitParameter(1, SmiType()); + AnyTagged(), Type::Undefined(), 3, isolate->interface_descriptor_zone()); + function->InitParameter(0, UntaggedSigned32()); + function->InitParameter(1, AnyTagged()); function->InitParameter(2, AnyTagged()); - function->InitParameter(3, AnyTagged()); return function; } void StoreGlobalViaContextDescriptor::InitializePlatformSpecific( CallInterfaceDescriptorData* data) { - Register registers[] = {DepthRegister(), SlotRegister(), NameRegister(), - ValueRegister()}; + Register registers[] = {SlotRegister(), NameRegister(), ValueRegister()}; data->InitializePlatformSpecific(arraysize(registers), registers); } diff --git a/src/interface-descriptors.h b/src/interface-descriptors.h index 328d69c5e..94b1a8166 100644 --- a/src/interface-descriptors.h +++ b/src/interface-descriptors.h @@ -423,7 +423,6 @@ class LoadGlobalViaContextDescriptor : public CallInterfaceDescriptor { DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(LoadGlobalViaContextDescriptor, CallInterfaceDescriptor) - static const Register DepthRegister(); static const Register SlotRegister(); static const Register NameRegister(); }; @@ -434,7 +433,6 @@ class StoreGlobalViaContextDescriptor : public CallInterfaceDescriptor { DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(StoreGlobalViaContextDescriptor, CallInterfaceDescriptor) - static const Register DepthRegister(); static const Register SlotRegister(); static const Register NameRegister(); static const Register ValueRegister(); diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index 6a3d15043..272fedafe 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -5270,6 +5270,155 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { } +void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = cp; + Register slot_reg = a2; + Register name_reg = a3; + Register result_reg = v0; + Label slow_case; + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = result_reg; + } + + // Load the PropertyCell value at the specified slot. + __ sll(at, slot_reg, kPointerSizeLog2); + __ Addu(at, at, Operand(cp)); + __ Addu(at, at, Context::SlotOffset(0)); + __ lw(result_reg, MemOperand(at)); + __ lw(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset)); + + // Check that value is not the_hole. + __ LoadRoot(at, Heap::kTheHoleValueRootIndex); + __ Branch(&slow_case, eq, result_reg, Operand(at)); + __ Ret(); + + // Fallback to the runtime. + __ bind(&slow_case); + __ SmiTag(slot_reg); + __ Drop(1); // Pop return address. + __ Push(slot_reg, name_reg, result_reg); + __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); +} + + +void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = cp; + Register slot_reg = a2; + Register name_reg = a3; + Register value_reg = a0; + Register cell_reg = t0; + Register cell_details_reg = t1; + Label fast_heapobject_case, fast_smi_case, slow_case; + + if (FLAG_debug_code) { + __ LoadRoot(at, Heap::kTheHoleValueRootIndex); + __ Check(ne, kUnexpectedValue, value_reg, Operand(at)); + __ AssertName(name_reg); + } + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ lw(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = cell_reg; + } + + // Load the PropertyCell at the specified slot. + __ sll(at, slot_reg, kPointerSizeLog2); + __ Addu(at, at, Operand(cp)); + __ Addu(at, at, Context::SlotOffset(0)); + __ lw(cell_reg, MemOperand(at)); + + // Load PropertyDetails for the cell (actually only the cell_type and kind). + __ lw(cell_details_reg, + FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset)); + __ SmiUntag(cell_details_reg); + __ And(cell_details_reg, cell_details_reg, + PropertyDetails::PropertyCellTypeField::kMask | + PropertyDetails::KindField::kMask); + + // Check if PropertyCell holds mutable data. + Label not_mutable_data; + __ Branch(¬_mutable_data, ne, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kMutable) | + PropertyDetails::KindField::encode(kData))); + __ JumpIfSmi(value_reg, &fast_smi_case); + __ bind(&fast_heapobject_case); + __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, + cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs, + EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); + // RecordWriteField clobbers the value register, so we need to reload. + __ lw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ Ret(); + __ bind(¬_mutable_data); + + // Check if PropertyCell value matches the new value (relevant for Constant, + // ConstantType and Undefined cells). + Label not_same_value; + __ lw(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ Branch(¬_same_value, ne, value_reg, Operand(at)); + if (FLAG_debug_code) { + Label done; + // This can only be true for Constant, ConstantType and Undefined cells, + // because we never store the_hole via this stub. + __ Branch(&done, eq, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstant) | + PropertyDetails::KindField::encode(kData))); + __ Branch(&done, eq, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ Check(eq, kUnexpectedValue, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kUndefined) | + PropertyDetails::KindField::encode(kData))); + __ bind(&done); + } + __ Ret(); + __ bind(¬_same_value); + + // Check if PropertyCell contains data with constant type. + __ Branch(&slow_case, ne, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + + // Now either both old and new values must be SMIs or both must be heap + // objects with same map. + Label value_is_heap_object; + Register cell_value_reg = cell_details_reg; + __ lw(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ JumpIfNotSmi(value_reg, &value_is_heap_object); + __ JumpIfNotSmi(cell_value_reg, &slow_case); + // Old and new values are SMIs, no need for a write barrier here. + __ bind(&fast_smi_case); + __ Ret(USE_DELAY_SLOT); + __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ bind(&value_is_heap_object); + __ JumpIfSmi(cell_value_reg, &slow_case); + Register cell_value_map_reg = cell_value_reg; + __ lw(cell_value_map_reg, + FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); + __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, + FieldMemOperand(value_reg, HeapObject::kMapOffset)); + + // Fallback to the runtime. + __ bind(&slow_case); + __ SmiTag(slot_reg); + __ Drop(1); // Pop return address. + __ Push(slot_reg, name_reg, value_reg, cell_reg); + __ TailCallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3, 1); +} + + static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { return ref0.address() - ref1.address(); } diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc index d0904a6d8..b501413be 100644 --- a/src/mips/full-codegen-mips.cc +++ b/src/mips/full-codegen-mips.cc @@ -1407,15 +1407,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index(); - int depth = scope()->ContextChainLength(var->scope()); - __ li(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ li(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name())); - LoadGlobalViaContextStub stub(isolate(), depth); - __ CallStub(&stub); + int const slot = var->index(); + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(LoadGlobalViaContextDescriptor::NameRegister(), var->name()); + LoadGlobalViaContextStub stub(isolate(), depth); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } else { __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); @@ -2700,18 +2703,25 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, // Global var, const, or let. DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); - // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index() + 1; - int depth = scope()->ContextChainLength(var->scope()); - __ li(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ li(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ li(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(var->name())); + DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0)); __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register()); - StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); - __ CallStub(&stub); + // Each var occupies two slots in the context: for reads and writes. + int const slot = var->index() + 1; + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(StoreGlobalViaContextDescriptor::NameRegister(), var->name()); + StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ Push(a0); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } else if (var->mode() == LET && op != Token::INIT_LET) { // Non-initializing assignment to let variable needs a write barrier. diff --git a/src/mips/interface-descriptors-mips.cc b/src/mips/interface-descriptors-mips.cc index 6733c63fd..91690497c 100644 --- a/src/mips/interface-descriptors-mips.cc +++ b/src/mips/interface-descriptors-mips.cc @@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return a3; } const Register StoreTransitionDescriptor::MapRegister() { return a3; } -const Register LoadGlobalViaContextDescriptor::DepthRegister() { return a1; } const Register LoadGlobalViaContextDescriptor::SlotRegister() { return a2; } const Register LoadGlobalViaContextDescriptor::NameRegister() { return a3; } -const Register StoreGlobalViaContextDescriptor::DepthRegister() { return a1; } const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; } const Register StoreGlobalViaContextDescriptor::NameRegister() { return a3; } const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; } diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc index 2703bdbe1..c5db715d6 100644 --- a/src/mips/lithium-codegen-mips.cc +++ b/src/mips/lithium-codegen-mips.cc @@ -2899,15 +2899,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) { DCHECK(ToRegister(instr->context()).is(cp)); DCHECK(ToRegister(instr->result()).is(v0)); - __ li(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ li(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(instr->name())); - - Handle stub = - CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(LoadGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = + CodeFactory::LoadGlobalViaContext(isolate(), depth).code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } @@ -4209,17 +4214,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) { DCHECK(ToRegister(instr->value()) .is(StoreGlobalViaContextDescriptor::ValueRegister())); - __ li(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ li(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ li(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(instr->name())); - - Handle stub = - CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(), - instr->language_mode()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(StoreGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = CodeFactory::StoreGlobalViaContext( + isolate(), depth, instr->language_mode()) + .code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ Push(StoreGlobalViaContextDescriptor::ValueRegister()); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } diff --git a/src/mips64/code-stubs-mips64.cc b/src/mips64/code-stubs-mips64.cc index b04b04217..d8797ffab 100644 --- a/src/mips64/code-stubs-mips64.cc +++ b/src/mips64/code-stubs-mips64.cc @@ -5301,6 +5301,155 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { } +void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = cp; + Register slot_reg = a2; + Register name_reg = a3; + Register result_reg = v0; + Label slow_case; + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = result_reg; + } + + // Load the PropertyCell value at the specified slot. + __ dsll(at, slot_reg, kPointerSizeLog2); + __ Daddu(at, at, Operand(cp)); + __ Daddu(at, at, Context::SlotOffset(0)); + __ ld(result_reg, MemOperand(at)); + __ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset)); + + // Check that value is not the_hole. + __ LoadRoot(at, Heap::kTheHoleValueRootIndex); + __ Branch(&slow_case, eq, result_reg, Operand(at)); + __ Ret(); + + // Fallback to the runtime. + __ bind(&slow_case); + __ SmiTag(slot_reg); + __ Drop(1); // Pop return address. + __ Push(slot_reg, name_reg, result_reg); + __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); +} + + +void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = cp; + Register slot_reg = a2; + Register name_reg = a3; + Register value_reg = a0; + Register cell_reg = a4; + Register cell_details_reg = a5; + Label fast_heapobject_case, fast_smi_case, slow_case; + + if (FLAG_debug_code) { + __ LoadRoot(at, Heap::kTheHoleValueRootIndex); + __ Check(ne, kUnexpectedValue, value_reg, Operand(at)); + __ AssertName(name_reg); + } + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ ld(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = cell_reg; + } + + // Load the PropertyCell at the specified slot. + __ dsll(at, slot_reg, kPointerSizeLog2); + __ Daddu(at, at, Operand(cp)); + __ Daddu(at, at, Context::SlotOffset(0)); + __ ld(cell_reg, MemOperand(at)); + + // Load PropertyDetails for the cell (actually only the cell_type and kind). + __ ld(cell_details_reg, + FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset)); + __ SmiUntag(cell_details_reg); + __ And(cell_details_reg, cell_details_reg, + PropertyDetails::PropertyCellTypeField::kMask | + PropertyDetails::KindField::kMask); + + // Check if PropertyCell holds mutable data. + Label not_mutable_data; + __ Branch(¬_mutable_data, ne, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kMutable) | + PropertyDetails::KindField::encode(kData))); + __ JumpIfSmi(value_reg, &fast_smi_case); + __ bind(&fast_heapobject_case); + __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, + cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs, + EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); + // RecordWriteField clobbers the value register, so we need to reload. + __ Ret(USE_DELAY_SLOT); + __ ld(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ bind(¬_mutable_data); + + // Check if PropertyCell value matches the new value (relevant for Constant, + // ConstantType and Undefined cells). + Label not_same_value; + __ ld(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ Branch(¬_same_value, ne, value_reg, Operand(at)); + if (FLAG_debug_code) { + Label done; + // This can only be true for Constant, ConstantType and Undefined cells, + // because we never store the_hole via this stub. + __ Branch(&done, eq, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstant) | + PropertyDetails::KindField::encode(kData))); + __ Branch(&done, eq, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ Check(eq, kUnexpectedValue, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kUndefined) | + PropertyDetails::KindField::encode(kData))); + __ bind(&done); + } + __ Ret(); + __ bind(¬_same_value); + + // Check if PropertyCell contains data with constant type. + __ Branch(&slow_case, ne, cell_details_reg, + Operand(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + + // Now either both old and new values must be SMIs or both must be heap + // objects with same map. + Label value_is_heap_object; + Register cell_value_reg = cell_details_reg; + __ ld(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ JumpIfNotSmi(value_reg, &value_is_heap_object); + __ JumpIfNotSmi(cell_value_reg, &slow_case); + // Old and new values are SMIs, no need for a write barrier here. + __ bind(&fast_smi_case); + __ Ret(USE_DELAY_SLOT); + __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); + __ bind(&value_is_heap_object); + __ JumpIfSmi(cell_value_reg, &slow_case); + Register cell_value_map_reg = cell_value_reg; + __ ld(cell_value_map_reg, + FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); + __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, + FieldMemOperand(value_reg, HeapObject::kMapOffset)); + + // Fallback to the runtime. + __ bind(&slow_case); + __ SmiTag(slot_reg); + __ Drop(1); // Pop return address. + __ Push(slot_reg, name_reg, value_reg, cell_reg); + __ TailCallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3, 1); +} + + static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { int64_t offset = (ref0.address() - ref1.address()); DCHECK(static_cast(offset) == offset); diff --git a/src/mips64/full-codegen-mips64.cc b/src/mips64/full-codegen-mips64.cc index 1aacde6f5..7894a1803 100644 --- a/src/mips64/full-codegen-mips64.cc +++ b/src/mips64/full-codegen-mips64.cc @@ -1403,15 +1403,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index(); - int depth = scope()->ContextChainLength(var->scope()); - __ li(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ li(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name())); - LoadGlobalViaContextStub stub(isolate(), depth); - __ CallStub(&stub); + int const slot = var->index(); + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(LoadGlobalViaContextDescriptor::NameRegister(), var->name()); + LoadGlobalViaContextStub stub(isolate(), depth); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } else { __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); @@ -2697,18 +2700,25 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, // Global var, const, or let. DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); - // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index() + 1; - int depth = scope()->ContextChainLength(var->scope()); - __ li(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(depth))); - __ li(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(slot_index))); - __ li(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(var->name())); + DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0)); __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register()); - StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); - __ CallStub(&stub); + // Each var occupies two slots in the context: for reads and writes. + int const slot = var->index() + 1; + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(StoreGlobalViaContextDescriptor::NameRegister(), var->name()); + StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ Push(a0); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } else if (var->mode() == LET && op != Token::INIT_LET) { // Non-initializing assignment to let variable needs a write barrier. diff --git a/src/mips64/interface-descriptors-mips64.cc b/src/mips64/interface-descriptors-mips64.cc index b0f28d15d..13a09c383 100644 --- a/src/mips64/interface-descriptors-mips64.cc +++ b/src/mips64/interface-descriptors-mips64.cc @@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return a3; } const Register StoreTransitionDescriptor::MapRegister() { return a3; } -const Register LoadGlobalViaContextDescriptor::DepthRegister() { return a1; } const Register LoadGlobalViaContextDescriptor::SlotRegister() { return a2; } const Register LoadGlobalViaContextDescriptor::NameRegister() { return a3; } -const Register StoreGlobalViaContextDescriptor::DepthRegister() { return a1; } const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; } const Register StoreGlobalViaContextDescriptor::NameRegister() { return a3; } const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; } diff --git a/src/mips64/lithium-codegen-mips64.cc b/src/mips64/lithium-codegen-mips64.cc index de7d51892..0313878b2 100644 --- a/src/mips64/lithium-codegen-mips64.cc +++ b/src/mips64/lithium-codegen-mips64.cc @@ -3003,15 +3003,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) { DCHECK(ToRegister(instr->context()).is(cp)); DCHECK(ToRegister(instr->result()).is(v0)); - __ li(LoadGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ li(LoadGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(instr->name())); - - Handle stub = - CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(LoadGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = + CodeFactory::LoadGlobalViaContext(isolate(), depth).code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } @@ -4402,17 +4407,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) { DCHECK(ToRegister(instr->value()) .is(StoreGlobalViaContextDescriptor::ValueRegister())); - __ li(StoreGlobalViaContextDescriptor::DepthRegister(), - Operand(Smi::FromInt(instr->depth()))); - __ li(StoreGlobalViaContextDescriptor::SlotRegister(), - Operand(Smi::FromInt(instr->slot_index()))); - __ li(StoreGlobalViaContextDescriptor::NameRegister(), - Operand(instr->name())); - - Handle stub = - CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(), - instr->language_mode()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot)); + __ li(StoreGlobalViaContextDescriptor::NameRegister(), + Operand(instr->name())); + Handle stub = CodeFactory::StoreGlobalViaContext( + isolate(), depth, instr->language_mode()) + .code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ Push(StoreGlobalViaContextDescriptor::ValueRegister()); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } diff --git a/src/runtime/runtime-object.cc b/src/runtime/runtime-object.cc index 0e5c66fc0..ebaf006a4 100644 --- a/src/runtime/runtime-object.cc +++ b/src/runtime/runtime-object.cc @@ -420,62 +420,62 @@ RUNTIME_FUNCTION(Runtime_ObjectSeal) { RUNTIME_FUNCTION(Runtime_LoadGlobalViaContext) { HandleScope scope(isolate); - DCHECK(args.length() == 3); - CONVERT_ARG_HANDLE_CHECKED(Context, script_context, 0); - CONVERT_SMI_ARG_CHECKED(index, 1); - CONVERT_ARG_HANDLE_CHECKED(Name, name, 2); + DCHECK_EQ(2, args.length()); + CONVERT_SMI_ARG_CHECKED(slot, 0); + CONVERT_ARG_HANDLE_CHECKED(Name, name, 1); + + // Go up context chain to the script context. + Handle script_context(isolate->context()->script_context(), isolate); DCHECK(script_context->IsScriptContext()); - DCHECK(script_context->get(index)->IsPropertyCell()); + DCHECK(script_context->get(slot)->IsPropertyCell()); - Handle global(script_context->global_object()); + // Lookup the named property on the global object. + Handle global_object(script_context->global_object(), isolate); + LookupIterator it(global_object, name, LookupIterator::HIDDEN); - LookupIterator it(global, name, LookupIterator::HIDDEN); // Switch to fast mode only if there is a data property and it's not on // a hidden prototype. - if (LookupIterator::DATA == it.state() && + if (it.state() == LookupIterator::DATA && it.GetHolder()->IsJSGlobalObject()) { - // Now update cell in the script context. + // Now update the cell in the script context. Handle cell = it.GetPropertyCell(); - script_context->set(index, *cell); + script_context->set(slot, *cell); } else { // This is not a fast case, so keep this access in a slow mode. // Store empty_property_cell here to release the outdated property cell. - script_context->set(index, isolate->heap()->empty_property_cell()); + script_context->set(slot, isolate->heap()->empty_property_cell()); } Handle result; ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, Object::GetProperty(&it)); - return *result; } -RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext) { - HandleScope scope(isolate); - DCHECK(args.length() == 5); - CONVERT_ARG_HANDLE_CHECKED(Context, script_context, 0); - CONVERT_SMI_ARG_CHECKED(index, 1); - CONVERT_ARG_HANDLE_CHECKED(Name, name, 2); - CONVERT_ARG_HANDLE_CHECKED(Object, value, 3); - CONVERT_LANGUAGE_MODE_ARG_CHECKED(language_mode_arg, 4); - DCHECK(script_context->IsScriptContext()); - DCHECK(script_context->get(index)->IsPropertyCell()); - LanguageMode language_mode = language_mode_arg; +namespace { - Handle global(script_context->global_object()); +Object* StoreGlobalViaContext(Isolate* isolate, int slot, Handle name, + Handle value, + LanguageMode language_mode) { + // Go up context chain to the script context. + Handle script_context(isolate->context()->script_context(), isolate); + DCHECK(script_context->IsScriptContext()); + DCHECK(script_context->get(slot)->IsPropertyCell()); - LookupIterator it(global, name, LookupIterator::HIDDEN); + // Lookup the named property on the global object. + Handle global_object(script_context->global_object(), isolate); + LookupIterator it(global_object, name, LookupIterator::HIDDEN); // Switch to fast mode only if there is a data property and it's not on // a hidden prototype. if (LookupIterator::DATA == it.state() && it.GetHolder()->IsJSGlobalObject()) { // Now update cell in the script context. Handle cell = it.GetPropertyCell(); - script_context->set(index, *cell); + script_context->set(slot, *cell); } else { // This is not a fast case, so keep this access in a slow mode. // Store empty_property_cell here to release the outdated property cell. - script_context->set(index, isolate->heap()->empty_property_cell()); + script_context->set(slot, isolate->heap()->empty_property_cell()); } Handle result; @@ -483,10 +483,33 @@ RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext) { isolate, result, Object::SetProperty(&it, value, language_mode, Object::CERTAINLY_NOT_STORE_FROM_KEYED)); - return *result; } +} // namespace + + +RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext_Sloppy) { + HandleScope scope(isolate); + DCHECK_EQ(3, args.length()); + CONVERT_SMI_ARG_CHECKED(slot, 0); + CONVERT_ARG_HANDLE_CHECKED(Name, name, 1); + CONVERT_ARG_HANDLE_CHECKED(Object, value, 2); + + return StoreGlobalViaContext(isolate, slot, name, value, SLOPPY); +} + + +RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext_Strict) { + HandleScope scope(isolate); + DCHECK_EQ(3, args.length()); + CONVERT_SMI_ARG_CHECKED(slot, 0); + CONVERT_ARG_HANDLE_CHECKED(Name, name, 1); + CONVERT_ARG_HANDLE_CHECKED(Object, value, 2); + + return StoreGlobalViaContext(isolate, slot, name, value, STRICT); +} + RUNTIME_FUNCTION(Runtime_GetProperty) { HandleScope scope(isolate); diff --git a/src/runtime/runtime.h b/src/runtime/runtime.h index 9a885c9df..1e2471cf9 100644 --- a/src/runtime/runtime.h +++ b/src/runtime/runtime.h @@ -447,8 +447,9 @@ namespace internal { F(GetPropertyStrong, 2, 1) \ F(KeyedGetProperty, 2, 1) \ F(KeyedGetPropertyStrong, 2, 1) \ - F(LoadGlobalViaContext, 3, 1) \ - F(StoreGlobalViaContext, 5, 1) \ + F(LoadGlobalViaContext, 2, 1) \ + F(StoreGlobalViaContext_Sloppy, 3, 1) \ + F(StoreGlobalViaContext_Strict, 3, 1) \ F(AddNamedProperty, 4, 1) \ F(SetProperty, 4, 1) \ F(AddElement, 3, 1) \ diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index d3de24377..303e5954c 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -5033,6 +5033,160 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { } +void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = rsi; + Register slot_reg = rbx; + Register name_reg = rcx; + Register result_reg = rax; + Label slow_case; + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = rdi; + } + + // Load the PropertyCell value at the specified slot. + __ movp(result_reg, ContextOperand(context_reg, slot_reg)); + __ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset)); + + // Check that value is not the_hole. + __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex); + __ j(equal, &slow_case, Label::kNear); + __ Ret(); + + // Fallback to the runtime. + __ bind(&slow_case); + __ Integer32ToSmi(slot_reg, slot_reg); + __ PopReturnAddressTo(kScratchRegister); + __ Push(slot_reg); + __ Push(name_reg); + __ Push(kScratchRegister); + __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); +} + + +void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { + Register context_reg = rsi; + Register slot_reg = rbx; + Register name_reg = rcx; + Register value_reg = rax; + Register cell_reg = r8; + Register cell_details_reg = rdx; + Register cell_value_reg = r9; + Label fast_heapobject_case, fast_smi_case, slow_case; + + if (FLAG_debug_code) { + __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex); + __ Check(not_equal, kUnexpectedValue); + __ AssertName(name_reg); + } + + // Go up context chain to the script context. + for (int i = 0; i < depth(); ++i) { + __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); + context_reg = rdi; + } + + // Load the PropertyCell at the specified slot. + __ movp(cell_reg, ContextOperand(context_reg, slot_reg)); + + // Load PropertyDetails for the cell (actually only the cell_type and kind). + __ SmiToInteger32(cell_details_reg, + FieldOperand(cell_reg, PropertyCell::kDetailsOffset)); + __ andl(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::kMask | + PropertyDetails::KindField::kMask)); + + + // Check if PropertyCell holds mutable data. + Label not_mutable_data; + __ cmpl(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kMutable) | + PropertyDetails::KindField::encode(kData))); + __ j(not_equal, ¬_mutable_data); + __ JumpIfSmi(value_reg, &fast_smi_case); + __ bind(&fast_heapobject_case); + __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); + __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, + cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + // RecordWriteField clobbers the value register, so we need to reload. + __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); + __ Ret(); + __ bind(¬_mutable_data); + + // Check if PropertyCell value matches the new value (relevant for Constant, + // ConstantType and Undefined cells). + Label not_same_value; + __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); + __ cmpp(cell_value_reg, value_reg); + __ j(not_equal, ¬_same_value, + FLAG_debug_code ? Label::kFar : Label::kNear); + if (FLAG_debug_code) { + Label done; + // This can only be true for Constant, ConstantType and Undefined cells, + // because we never store the_hole via this stub. + __ cmpl(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstant) | + PropertyDetails::KindField::encode(kData))); + __ j(equal, &done); + __ cmpl(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ j(equal, &done); + __ cmpl(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kUndefined) | + PropertyDetails::KindField::encode(kData))); + __ Check(equal, kUnexpectedValue); + __ bind(&done); + } + __ Ret(); + __ bind(¬_same_value); + + // Check if PropertyCell contains data with constant type. + __ cmpl(cell_details_reg, + Immediate(PropertyDetails::PropertyCellTypeField::encode( + PropertyCellType::kConstantType) | + PropertyDetails::KindField::encode(kData))); + __ j(not_equal, &slow_case, Label::kNear); + + // Now either both old and new values must be SMIs or both must be heap + // objects with same map. + Label value_is_heap_object; + __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear); + __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear); + // Old and new values are SMIs, no need for a write barrier here. + __ bind(&fast_smi_case); + __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); + __ Ret(); + __ bind(&value_is_heap_object); + __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear); + Register cell_value_map_reg = cell_value_reg; + __ movp(cell_value_map_reg, + FieldOperand(cell_value_reg, HeapObject::kMapOffset)); + __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset)); + __ j(equal, &fast_heapobject_case); + + // Fallback to the runtime. + __ bind(&slow_case); + __ Integer32ToSmi(slot_reg, slot_reg); + __ PopReturnAddressTo(kScratchRegister); + __ Push(slot_reg); + __ Push(name_reg); + __ Push(value_reg); + __ Push(kScratchRegister); + __ TailCallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3, 1); +} + + static int Offset(ExternalReference ref0, ExternalReference ref1) { int64_t offset = (ref0.address() - ref1.address()); // Check that fits into int. diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index acd47cb1f..7a15602ed 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1370,15 +1370,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index(); - int depth = scope()->ContextChainLength(var->scope()); - __ Move(LoadGlobalViaContextDescriptor::DepthRegister(), - Smi::FromInt(depth)); - __ Move(LoadGlobalViaContextDescriptor::SlotRegister(), - Smi::FromInt(slot_index)); - __ Move(LoadGlobalViaContextDescriptor::NameRegister(), var->name()); - LoadGlobalViaContextStub stub(isolate(), depth); - __ CallStub(&stub); + int const slot = var->index(); + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot); + __ Move(LoadGlobalViaContextDescriptor::NameRegister(), var->name()); + LoadGlobalViaContextStub stub(isolate(), depth); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } else { __ Move(LoadDescriptor::NameRegister(), var->name()); @@ -2615,16 +2618,23 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, DCHECK(var->index() > 0); DCHECK(var->IsStaticGlobalObjectProperty()); // Each var occupies two slots in the context: for reads and writes. - int slot_index = var->index() + 1; - int depth = scope()->ContextChainLength(var->scope()); - __ Move(StoreGlobalViaContextDescriptor::DepthRegister(), - Smi::FromInt(depth)); - __ Move(StoreGlobalViaContextDescriptor::SlotRegister(), - Smi::FromInt(slot_index)); - __ Move(StoreGlobalViaContextDescriptor::NameRegister(), var->name()); - DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax)); - StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); - __ CallStub(&stub); + int const slot = var->index() + 1; + int const depth = scope()->ContextChainLength(var->scope()); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot); + __ Move(StoreGlobalViaContextDescriptor::NameRegister(), var->name()); + DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax)); + StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); + __ CallStub(&stub); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(var->name()); + __ Push(rax); + __ CallRuntime(is_strict(language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } else if (var->mode() == LET && op != Token::INIT_LET) { // Non-initializing assignment to let variable needs a write barrier. diff --git a/src/x64/interface-descriptors-x64.cc b/src/x64/interface-descriptors-x64.cc index f59b67d78..e9579899a 100644 --- a/src/x64/interface-descriptors-x64.cc +++ b/src/x64/interface-descriptors-x64.cc @@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return rbx; } const Register StoreTransitionDescriptor::MapRegister() { return rbx; } -const Register LoadGlobalViaContextDescriptor::DepthRegister() { return rdx; } const Register LoadGlobalViaContextDescriptor::SlotRegister() { return rbx; } const Register LoadGlobalViaContextDescriptor::NameRegister() { return rcx; } -const Register StoreGlobalViaContextDescriptor::DepthRegister() { return rdx; } const Register StoreGlobalViaContextDescriptor::SlotRegister() { return rbx; } const Register StoreGlobalViaContextDescriptor::NameRegister() { return rcx; } const Register StoreGlobalViaContextDescriptor::ValueRegister() { return rax; } diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index 92688ce49..a4c36b893 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -2898,16 +2898,19 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) { DCHECK(ToRegister(instr->context()).is(rsi)); DCHECK(ToRegister(instr->result()).is(rax)); - - __ Move(LoadGlobalViaContextDescriptor::DepthRegister(), - Smi::FromInt(instr->depth())); - __ Move(LoadGlobalViaContextDescriptor::SlotRegister(), - Smi::FromInt(instr->slot_index())); - __ Move(LoadGlobalViaContextDescriptor::NameRegister(), instr->name()); - - Handle stub = - CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= LoadGlobalViaContextStub::kMaximumDepth) { + __ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot); + __ Move(LoadGlobalViaContextDescriptor::NameRegister(), instr->name()); + Handle stub = + CodeFactory::LoadGlobalViaContext(isolate(), depth).code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ CallRuntime(Runtime::kLoadGlobalViaContext, 2); + } } @@ -4272,17 +4275,24 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) { DCHECK(ToRegister(instr->context()).is(rsi)); DCHECK(ToRegister(instr->value()) .is(StoreGlobalViaContextDescriptor::ValueRegister())); - - __ Move(StoreGlobalViaContextDescriptor::DepthRegister(), - Smi::FromInt(instr->depth())); - __ Move(StoreGlobalViaContextDescriptor::SlotRegister(), - Smi::FromInt(instr->slot_index())); - __ Move(StoreGlobalViaContextDescriptor::NameRegister(), instr->name()); - - Handle stub = - CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(), - instr->language_mode()).code(); - CallCode(stub, RelocInfo::CODE_TARGET, instr); + int const slot = instr->slot_index(); + int const depth = instr->depth(); + if (depth <= StoreGlobalViaContextStub::kMaximumDepth) { + __ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot); + __ Move(StoreGlobalViaContextDescriptor::NameRegister(), instr->name()); + Handle stub = CodeFactory::StoreGlobalViaContext( + isolate(), depth, instr->language_mode()) + .code(); + CallCode(stub, RelocInfo::CODE_TARGET, instr); + } else { + __ Push(Smi::FromInt(slot)); + __ Push(instr->name()); + __ Push(StoreGlobalViaContextDescriptor::ValueRegister()); + __ CallRuntime(is_strict(instr->language_mode()) + ? Runtime::kStoreGlobalViaContext_Strict + : Runtime::kStoreGlobalViaContext_Sloppy, + 3); + } } diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index 892bff674..f8276d6f1 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -1568,6 +1568,11 @@ inline Operand ContextOperand(Register context, int index) { } +inline Operand ContextOperand(Register context, Register index) { + return Operand(context, index, times_pointer_size, Context::SlotOffset(0)); +} + + inline Operand GlobalObjectOperand() { return ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX); }