From: sgjesse@chromium.org Date: Thu, 24 Feb 2011 07:17:43 +0000 (+0000) Subject: ARM: Port r6635 and r6659 X-Git-Tag: upstream/4.7.83~20104 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=acd4f8999902e0bdea3b3b16b1fe74216cd974ef;p=platform%2Fupstream%2Fv8.git ARM: Port r6635 and r6659 r6635: Remove the redundant load on every context lookup. r6659: Do not compile the unreachable body of functions with illegal redeclarations. Review URL: http://codereview.chromium.org/6572003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6920 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 9d57076..fea9a8c 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -219,46 +219,47 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { Move(dot_arguments_slot, r3, r1, r2); } - { Comment cmnt(masm_, "[ Declarations"); - // For named function expressions, declare the function name as a - // constant. - if (scope()->is_function_scope() && scope()->function() != NULL) { - EmitDeclaration(scope()->function(), Variable::CONST, NULL); - } - // Visit all the explicit declarations unless there is an illegal - // redeclaration. - if (scope()->HasIllegalRedeclaration()) { - scope()->VisitIllegalRedeclaration(this); - } else { - VisitDeclarations(scope()->declarations()); - } - } - if (FLAG_trace) { __ CallRuntime(Runtime::kTraceEnter, 0); } - // Check the stack for overflow or break request. - { Comment cmnt(masm_, "[ Stack check"); - PrepareForBailout(info->function(), NO_REGISTERS); - Label ok; - __ LoadRoot(ip, Heap::kStackLimitRootIndex); - __ cmp(sp, Operand(ip)); - __ b(hs, &ok); - StackCheckStub stub; - __ CallStub(&stub); - __ bind(&ok); - } + // Visit the declarations and body unless there is an illegal + // redeclaration. + if (scope()->HasIllegalRedeclaration()) { + Comment cmnt(masm_, "[ Declarations"); + scope()->VisitIllegalRedeclaration(this); + + } else { + { Comment cmnt(masm_, "[ Declarations"); + // For named function expressions, declare the function name as a + // constant. + if (scope()->is_function_scope() && scope()->function() != NULL) { + EmitDeclaration(scope()->function(), Variable::CONST, NULL); + } + VisitDeclarations(scope()->declarations()); + } - { Comment cmnt(masm_, "[ Body"); - ASSERT(loop_depth() == 0); - VisitStatements(function()->body()); - ASSERT(loop_depth() == 0); + { Comment cmnt(masm_, "[ Stack check"); + PrepareForBailout(info->function(), NO_REGISTERS); + Label ok; + __ LoadRoot(ip, Heap::kStackLimitRootIndex); + __ cmp(sp, Operand(ip)); + __ b(hs, &ok); + StackCheckStub stub; + __ CallStub(&stub); + __ bind(&ok); + } + + { Comment cmnt(masm_, "[ Body"); + ASSERT(loop_depth() == 0); + VisitStatements(function()->body()); + ASSERT(loop_depth() == 0); + } } + // Always emit a 'return undefined' in case control fell off the end of + // the body. { Comment cmnt(masm_, "[ return ;"); - // Emit a 'return undefined' in case control fell off the end of the - // body. __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); } EmitReturnSequence(); @@ -694,10 +695,11 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable, // We bypass the general EmitSlotSearch because we know more about // this specific context. - // The variable in the decl always resides in the current context. + // The variable in the decl always resides in the current function + // context. ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); if (FLAG_debug_code) { - // Check if we have the correct context pointer. + // Check that we're not inside a 'with'. __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX)); __ cmp(r1, cp); __ Check(eq, "Unexpected declaration in current context."); @@ -1037,7 +1039,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( Slot* slot, Label* slow) { ASSERT(slot->type() == Slot::CONTEXT); - Register current = cp; + Register context = cp; Register next = r3; Register temp = r4; @@ -1045,22 +1047,25 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( if (s->num_heap_slots() > 0) { if (s->calls_eval()) { // Check that extension is NULL. - __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); + __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); __ tst(temp, temp); __ b(ne, slow); } - __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX)); + __ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX)); __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset)); // Walk the rest of the chain without clobbering cp. - current = next; + context = next; } } // Check that last extension is NULL. - __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); + __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); __ tst(temp, temp); __ b(ne, slow); - __ ldr(temp, ContextOperand(current, Context::FCONTEXT_INDEX)); - return ContextOperand(temp, slot->index()); + + // This function is used only for loads, not stores, so it's safe to + // return an cp-based operand (the write barrier cannot be allowed to + // destroy the cp register). + return ContextOperand(context, slot->index()); } @@ -2004,34 +2009,60 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, : Builtins::StoreIC_Initialize)); EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); - } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) { - // Perform the assignment for non-const variables and for initialization - // of const variables. Const assignments are simply skipped. - Label done; + } else if (op == Token::INIT_CONST) { + // Like var declarations, const declarations are hoisted to function + // scope. However, unlike var initializers, const initializers are able + // to drill a hole to that function context, even from inside a 'with' + // context. We thus bypass the normal static scope lookup. + Slot* slot = var->AsSlot(); + Label skip; + switch (slot->type()) { + case Slot::PARAMETER: + // No const parameters. + UNREACHABLE(); + break; + case Slot::LOCAL: + // Detect const reinitialization by checking for the hole value. + __ ldr(r1, MemOperand(fp, SlotOffset(slot))); + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ cmp(r1, ip); + __ b(ne, &skip); + __ str(result_register(), MemOperand(fp, SlotOffset(slot))); + break; + case Slot::CONTEXT: { + __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX)); + __ ldr(r2, ContextOperand(r1, slot->index())); + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ cmp(r2, ip); + __ b(ne, &skip); + __ str(r0, ContextOperand(r1, slot->index())); + int offset = Context::SlotOffset(slot->index()); + __ mov(r3, r0); // Preserve the stored value in r0. + __ RecordWrite(r1, Operand(offset), r3, r2); + break; + } + case Slot::LOOKUP: + __ push(r0); + __ mov(r0, Operand(slot->var()->name())); + __ Push(cp, r0); // Context and name. + __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); + break; + } + __ bind(&skip); + + } else if (var->mode() != Variable::CONST) { + // Perform the assignment for non-const variables. Const assignments + // are simply skipped. Slot* slot = var->AsSlot(); switch (slot->type()) { case Slot::PARAMETER: case Slot::LOCAL: - if (op == Token::INIT_CONST) { - // Detect const reinitialization by checking for the hole value. - __ ldr(r1, MemOperand(fp, SlotOffset(slot))); - __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ cmp(r1, ip); - __ b(ne, &done); - } // Perform the assignment. __ str(result_register(), MemOperand(fp, SlotOffset(slot))); break; case Slot::CONTEXT: { MemOperand target = EmitSlotSearch(slot, r1); - if (op == Token::INIT_CONST) { - // Detect const reinitialization by checking for the hole value. - __ ldr(r2, target); - __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ cmp(r2, ip); - __ b(ne, &done); - } // Perform the assignment and issue the write barrier. __ str(result_register(), target); // RecordWrite may destroy all its register arguments. @@ -2042,20 +2073,13 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, } case Slot::LOOKUP: - // Call the runtime for the assignment. The runtime will ignore - // const reinitialization. + // Call the runtime for the assignment. __ push(r0); // Value. __ mov(r0, Operand(slot->var()->name())); __ Push(cp, r0); // Context and name. - if (op == Token::INIT_CONST) { - // The runtime will ignore const redeclaration. - __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); - } else { - __ CallRuntime(Runtime::kStoreContextSlot, 3); - } + __ CallRuntime(Runtime::kStoreContextSlot, 3); break; } - __ bind(&done); } } diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc index 15498cb..d3c9fee 100644 --- a/src/arm/lithium-arm.cc +++ b/src/arm/lithium-arm.cc @@ -1737,11 +1737,13 @@ LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) { LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) { - LOperand* context = UseTempRegister(instr->context()); + LOperand* context; LOperand* value; if (instr->NeedsWriteBarrier()) { + context = UseTempRegister(instr->context()); value = UseTempRegister(instr->value()); } else { + context = UseRegister(instr->context()); value = UseRegister(instr->value()); } return new LStoreContextSlot(context, value); diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index 94f5116..ca64442 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -2112,17 +2112,13 @@ void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { Register context = ToRegister(instr->context()); Register result = ToRegister(instr->result()); - __ ldr(result, - MemOperand(context, Context::SlotOffset(Context::FCONTEXT_INDEX))); - __ ldr(result, ContextOperand(result, instr->slot_index())); + __ ldr(result, ContextOperand(context, instr->slot_index())); } void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register context = ToRegister(instr->context()); Register value = ToRegister(instr->value()); - __ ldr(context, - MemOperand(context, Context::SlotOffset(Context::FCONTEXT_INDEX))); __ str(value, ContextOperand(context, instr->slot_index())); if (instr->needs_write_barrier()) { int offset = Context::SlotOffset(instr->slot_index()); diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index 4abf02b..29e6418 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -2163,12 +2163,24 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { ldr(dst, MemOperand(dst, Context::SlotOffset(Context::CLOSURE_INDEX))); ldr(dst, FieldMemOperand(dst, JSFunction::kContextOffset)); } - // The context may be an intermediate context, not a function context. - ldr(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); - } else { // Slot is in the current function context. - // The context may be an intermediate context, not a function context. - ldr(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX))); + } else { + // Slot is in the current function context. Move it into the + // destination register in case we store into it (the write barrier + // cannot be allowed to destroy the context in esi). + mov(dst, cp); } + + // We should not have found a 'with' context by walking the context chain + // (i.e., the static scope chain and runtime context chain do not agree). + // A variable occurring in such a scope should have slot type LOOKUP and + // not CONTEXT. + if (FLAG_debug_code) { + ldr(ip, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); + cmp(dst, ip); + Check(eq, "Yo dawg, I heard you liked function contexts " + "so I put function contexts in all your contexts"); + } + }