From: ager@chromium.org Date: Thu, 9 Sep 2010 10:44:24 +0000 (+0000) Subject: Handle global variables potentially shadowed by eval-introduced X-Git-Tag: upstream/4.7.83~21228 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=e5b63e72138548665aa3eafa1e65bc29137642c2;p=platform%2Fupstream%2Fv8.git Handle global variables potentially shadowed by eval-introduced variables in full-codegen. Review URL: http://codereview.chromium.org/3295022 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5430 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index f32da6d..c76ca8c 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -493,7 +493,7 @@ MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { int context_chain_length = scope()->ContextChainLength(slot->var()->scope()); __ LoadContext(scratch, context_chain_length); - return CodeGenerator::ContextOperand(scratch, slot->index()); + return ContextOperand(scratch, slot->index()); } case Slot::LOOKUP: UNREACHABLE(); @@ -557,19 +557,17 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable, ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); if (FLAG_debug_code) { // Check if we have the correct context pointer. - __ ldr(r1, - CodeGenerator::ContextOperand(cp, Context::FCONTEXT_INDEX)); + __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX)); __ cmp(r1, cp); __ Check(eq, "Unexpected declaration in current context."); } if (mode == Variable::CONST) { __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ str(ip, CodeGenerator::ContextOperand(cp, slot->index())); + __ str(ip, ContextOperand(cp, slot->index())); // No write barrier since the_hole_value is in old space. } else if (function != NULL) { VisitForValue(function, kAccumulator); - __ str(result_register(), - CodeGenerator::ContextOperand(cp, slot->index())); + __ str(result_register(), ContextOperand(cp, slot->index())); int offset = Context::SlotOffset(slot->index()); // We know that we have written a function, which is not a smi. __ mov(r1, Operand(cp)); @@ -881,6 +879,68 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { } +void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( + Slot* slot, + TypeofState typeof_state, + Label* slow) { + Register current = cp; + Register next = r1; + Register temp = r2; + + Scope* s = scope(); + while (s != NULL) { + if (s->num_heap_slots() > 0) { + if (s->calls_eval()) { + // Check that extension is NULL. + __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); + __ tst(temp, temp); + __ b(ne, slow); + } + // Load next context in chain. + __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX)); + __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); + // Walk the rest of the chain using a single register without + // clobbering cp. + current = next; + } + // If no outer scope calls eval, we do not need to check more + // context extensions. + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; + s = s->outer_scope(); + } + + if (s->is_eval_scope()) { + Label loop, fast; + if (!current.is(next)) { + __ Move(next, current); + } + __ bind(&loop); + // Terminate at global context. + __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); + __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex); + __ cmp(temp, ip); + __ b(eq, &fast); + // Check that extension is NULL. + __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); + __ tst(temp, temp); + __ b(ne, slow); + // Load next context in chain. + __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX)); + __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); + __ b(&loop); + __ bind(&fast); + } + + __ ldr(r0, CodeGenerator::GlobalObject()); + __ mov(r2, Operand(slot->var()->name())); + RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) + ? RelocInfo::CODE_TARGET + : RelocInfo::CODE_TARGET_CONTEXT; + Handle ic(Builtins::builtin(Builtins::LoadIC_Initialize)); + __ Call(ic, mode); +} + + void FullCodeGenerator::EmitVariableLoad(Variable* var, Expression::Context context) { // Four cases: non-this global variables, lookup slots, all other @@ -900,11 +960,26 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, Apply(context, r0); } else if (slot != NULL && slot->type() == Slot::LOOKUP) { + Label done, slow; + + // Generate fast-case code for variables that might be shadowed by + // eval-introduced variables. Eval is used a lot without + // introducing variables. In those cases, we do not want to + // perform a runtime call for all variables in the scope + // containing the eval. + if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { + EmitLoadGlobalSlotCheckExtensions(slot, NOT_INSIDE_TYPEOF, &slow); + Apply(context, r0); + __ jmp(&done); + } + + __ bind(&slow); Comment cmnt(masm_, "Lookup slot"); __ mov(r1, Operand(var->name())); __ Push(cp, r1); // Context and name. __ CallRuntime(Runtime::kLoadContextSlot, 2); Apply(context, r0); + __ bind(&done); } else if (slot != NULL) { Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) @@ -2464,11 +2539,9 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList* args) { Register key = r0; Register cache = r1; - __ ldr(cache, CodeGenerator::ContextOperand(cp, Context::GLOBAL_INDEX)); + __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX)); __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset)); - __ ldr(cache, - CodeGenerator::ContextOperand( - cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); + __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); __ ldr(cache, FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); @@ -3187,7 +3260,7 @@ void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { void FullCodeGenerator::LoadContextField(Register dst, int context_index) { - __ ldr(dst, CodeGenerator::ContextOperand(cp, context_index)); + __ ldr(dst, ContextOperand(cp, context_index)); } diff --git a/src/full-codegen.cc b/src/full-codegen.cc index 5ffebfb..7de4a00 100644 --- a/src/full-codegen.cc +++ b/src/full-codegen.cc @@ -298,6 +298,11 @@ Handle FullCodeGenerator::MakeCode(CompilationInfo* info) { } +MemOperand FullCodeGenerator::ContextOperand(Register context, int index) { + return CodeGenerator::ContextOperand(context, index); +} + + int FullCodeGenerator::SlotOffset(Slot* slot) { ASSERT(slot != NULL); // Offset is negative because higher indexes are at lower addresses. diff --git a/src/full-codegen.h b/src/full-codegen.h index 840c825..bc3589f 100644 --- a/src/full-codegen.h +++ b/src/full-codegen.h @@ -381,6 +381,9 @@ class FullCodeGenerator: public AstVisitor { #undef EMIT_INLINE_RUNTIME_CALL // Platform-specific code for loading variables. + void EmitLoadGlobalSlotCheckExtensions(Slot* slot, + TypeofState typeof_state, + Label* slow); void EmitVariableLoad(Variable* expr, Expression::Context context); // Platform-specific support for allocating a new closure based on @@ -500,6 +503,9 @@ class FullCodeGenerator: public AstVisitor { // in v8::internal::Context. void LoadContextField(Register dst, int context_index); + // Create an operand for a context field. + MemOperand ContextOperand(Register context, int context_index); + // AST node visit functions. #define DECLARE_VISIT(type) virtual void Visit##type(type* node); AST_NODE_LIST(DECLARE_VISIT) diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 1631b04..ca0313e 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -514,7 +514,7 @@ MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { int context_chain_length = scope()->ContextChainLength(slot->var()->scope()); __ LoadContext(scratch, context_chain_length); - return CodeGenerator::ContextOperand(scratch, slot->index()); + return ContextOperand(scratch, slot->index()); } case Slot::LOOKUP: UNREACHABLE(); @@ -574,19 +574,17 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable, ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); if (FLAG_debug_code) { // Check if we have the correct context pointer. - __ mov(ebx, - CodeGenerator::ContextOperand(esi, Context::FCONTEXT_INDEX)); + __ mov(ebx, ContextOperand(esi, Context::FCONTEXT_INDEX)); __ cmp(ebx, Operand(esi)); __ Check(equal, "Unexpected declaration in current context."); } if (mode == Variable::CONST) { - __ mov(CodeGenerator::ContextOperand(esi, slot->index()), + __ mov(ContextOperand(esi, slot->index()), Immediate(Factory::the_hole_value())); // No write barrier since the hole value is in old space. } else if (function != NULL) { VisitForValue(function, kAccumulator); - __ mov(CodeGenerator::ContextOperand(esi, slot->index()), - result_register()); + __ mov(ContextOperand(esi, slot->index()), result_register()); int offset = Context::SlotOffset(slot->index()); __ mov(ebx, esi); __ RecordWrite(ebx, offset, result_register(), ecx); @@ -885,6 +883,70 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { } +void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( + Slot* slot, + TypeofState typeof_state, + Label* slow) { + Register context = esi; + Register temp = edx; + + Scope* s = scope(); + while (s != NULL) { + if (s->num_heap_slots() > 0) { + if (s->calls_eval()) { + // Check that extension is NULL. + __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), + Immediate(0)); + __ j(not_equal, slow); + } + // Load next context in chain. + __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX)); + __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset)); + // Walk the rest of the chain using a single register without + // clobbering esi. + context = temp; + } + // If no outer scope calls eval, we do not need to check more + // context extensions. If we have reached an eval scope, we check + // all extensions from this point. + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; + s = s->outer_scope(); + } + + if (s != NULL && s->is_eval_scope()) { + // Loop up the context chain. There is no frame effect so it is + // safe to use raw labels here. + Label next, fast; + if (!context.is(temp)) { + __ mov(temp, context); + } + __ bind(&next); + // Terminate at global context. + __ cmp(FieldOperand(temp, HeapObject::kMapOffset), + Immediate(Factory::global_context_map())); + __ j(equal, &fast); + // Check that extension is NULL. + __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); + __ j(not_equal, slow); + // Load next context in chain. + __ mov(temp, ContextOperand(temp, Context::CLOSURE_INDEX)); + __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset)); + __ jmp(&next); + __ bind(&fast); + } + + // All extension objects were empty and it is safe to use a global + // load IC call. + __ mov(eax, CodeGenerator::GlobalObject()); + __ mov(ecx, slot->var()->name()); + Handle ic(Builtins::builtin(Builtins::LoadIC_Initialize)); + RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) + ? RelocInfo::CODE_TARGET + : RelocInfo::CODE_TARGET_CONTEXT; + __ call(ic, mode); +} + + void FullCodeGenerator::EmitVariableLoad(Variable* var, Expression::Context context) { // Four cases: non-this global variables, lookup slots, all other @@ -909,11 +971,26 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, Apply(context, eax); } else if (slot != NULL && slot->type() == Slot::LOOKUP) { + Label done, slow; + + // Generate fast-case code for variables that might be shadowed by + // eval-introduced variables. Eval is used a lot without + // introducing variables. In those cases, we do not want to + // perform a runtime call for all variables in the scope + // containing the eval. + if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { + EmitLoadGlobalSlotCheckExtensions(slot, NOT_INSIDE_TYPEOF, &slow); + Apply(context, eax); + __ jmp(&done); + } + + __ bind(&slow); Comment cmnt(masm_, "Lookup slot"); __ push(esi); // Context. __ push(Immediate(var->name())); __ CallRuntime(Runtime::kLoadContextSlot, 2); Apply(context, eax); + __ bind(&done); } else if (slot != NULL) { Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) @@ -2781,12 +2858,10 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList* args) { Register key = eax; Register cache = ebx; Register tmp = ecx; - __ mov(cache, CodeGenerator::ContextOperand(esi, Context::GLOBAL_INDEX)); + __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX)); __ mov(cache, FieldOperand(cache, GlobalObject::kGlobalContextOffset)); - __ mov(cache, - CodeGenerator::ContextOperand( - cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); + __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); __ mov(cache, FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); @@ -3512,7 +3587,7 @@ void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { void FullCodeGenerator::LoadContextField(Register dst, int context_index) { - __ mov(dst, CodeGenerator::ContextOperand(esi, context_index)); + __ mov(dst, ContextOperand(esi, context_index)); } diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index ccd0392..74699b5 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -507,7 +507,7 @@ MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { int context_chain_length = scope()->ContextChainLength(slot->var()->scope()); __ LoadContext(scratch, context_chain_length); - return CodeGenerator::ContextOperand(scratch, slot->index()); + return ContextOperand(scratch, slot->index()); } case Slot::LOOKUP: UNREACHABLE(); @@ -568,20 +568,17 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable, ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); if (FLAG_debug_code) { // Check if we have the correct context pointer. - __ movq(rbx, - CodeGenerator::ContextOperand(rsi, Context::FCONTEXT_INDEX)); + __ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX)); __ cmpq(rbx, rsi); __ Check(equal, "Unexpected declaration in current context."); } if (mode == Variable::CONST) { __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); - __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), - kScratchRegister); + __ movq(ContextOperand(rsi, slot->index()), kScratchRegister); // No write barrier since the hole value is in old space. } else if (function != NULL) { VisitForValue(function, kAccumulator); - __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), - result_register()); + __ movq(ContextOperand(rsi, slot->index()), result_register()); int offset = Context::SlotOffset(slot->index()); __ movq(rbx, rsi); __ RecordWrite(rbx, offset, result_register(), rcx); @@ -881,6 +878,71 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { } +void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( + Slot* slot, + TypeofState typeof_state, + Label* slow) { + Register context = rsi; + Register temp = rdx; + + Scope* s = scope(); + while (s != NULL) { + if (s->num_heap_slots() > 0) { + if (s->calls_eval()) { + // Check that extension is NULL. + __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), + Immediate(0)); + __ j(not_equal, slow); + } + // Load next context in chain. + __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX)); + __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset)); + // Walk the rest of the chain using a single register without + // clobbering rsi. + context = temp; + } + // If no outer scope calls eval, we do not need to check more + // context extensions. If we have reached an eval scope, we check + // all extensions from this point. + if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; + s = s->outer_scope(); + } + + if (s != NULL && s->is_eval_scope()) { + // Loop up the context chain. There is no frame effect so it is + // safe to use raw labels here. + Label next, fast; + if (!context.is(temp)) { + __ movq(temp, context); + } + // Load map for comparison into register, outside loop. + __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex); + __ bind(&next); + // Terminate at global context. + __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset)); + __ j(equal, &fast); + // Check that extension is NULL. + __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); + __ j(not_equal, slow); + // Load next context in chain. + __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX)); + __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset)); + __ jmp(&next); + __ bind(&fast); + } + + // All extension objects were empty and it is safe to use a global + // load IC call. + __ movq(rax, CodeGenerator::GlobalObject()); + __ Move(rcx, slot->var()->name()); + Handle ic(Builtins::builtin(Builtins::LoadIC_Initialize)); + RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) + ? RelocInfo::CODE_TARGET + : RelocInfo::CODE_TARGET_CONTEXT; + __ call(ic, mode); +} + + void FullCodeGenerator::EmitVariableLoad(Variable* var, Expression::Context context) { // Four cases: non-this global variables, lookup slots, all other @@ -904,11 +966,26 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, Apply(context, rax); } else if (slot != NULL && slot->type() == Slot::LOOKUP) { + Label done, slow; + + // Generate fast-case code for variables that might be shadowed by + // eval-introduced variables. Eval is used a lot without + // introducing variables. In those cases, we do not want to + // perform a runtime call for all variables in the scope + // containing the eval. + if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { + EmitLoadGlobalSlotCheckExtensions(slot, NOT_INSIDE_TYPEOF, &slow); + Apply(context, rax); + __ jmp(&done); + } + + __ bind(&slow); Comment cmnt(masm_, "Lookup slot"); __ push(rsi); // Context. __ Push(var->name()); __ CallRuntime(Runtime::kLoadContextSlot, 2); Apply(context, rax); + __ bind(&done); } else if (slot != NULL) { Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) @@ -2522,12 +2599,11 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList* args) { Register key = rax; Register cache = rbx; Register tmp = rcx; - __ movq(cache, CodeGenerator::ContextOperand(rsi, Context::GLOBAL_INDEX)); + __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX)); __ movq(cache, FieldOperand(cache, GlobalObject::kGlobalContextOffset)); __ movq(cache, - CodeGenerator::ContextOperand( - cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); + ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); __ movq(cache, FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); @@ -3243,7 +3319,7 @@ void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { void FullCodeGenerator::LoadContextField(Register dst, int context_index) { - __ movq(dst, CodeGenerator::ContextOperand(rsi, context_index)); + __ movq(dst, ContextOperand(rsi, context_index)); }