// Copy any necessary parameters into the context.
int num_parameters = info->scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ Variable* var = scope()->parameter(i);
+ if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ ldr(r0, MemOperand(fp, parameter_offset));
// Store it in the context.
- __ mov(r1, Operand(Context::SlotOffset(slot->index())));
+ __ mov(r1, Operand(Context::SlotOffset(var->index())));
__ str(r0, MemOperand(cp, r1));
// Update the write barrier. This clobbers all involved
// registers, so we have to use two more registers to avoid
ArgumentsAccessStub stub(type);
__ CallStub(&stub);
- Move(arguments->AsSlot(), r0, r1, r2);
+ SetVar(arguments, r0, r1, r2);
}
if (FLAG_trace) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ int ignored = 0;
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
}
VisitDeclarations(scope()->declarations());
}
}
-void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
+void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
}
-void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
- codegen()->Move(result_register(), slot);
+void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ codegen()->GetVar(result_register(), var);
}
-void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
- codegen()->Move(result_register(), slot);
+void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ codegen()->GetVar(result_register(), var);
__ push(result_register());
}
-void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
+void FullCodeGenerator::TestContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
// For simplicity we always test the accumulator register.
- codegen()->Move(result_register(), slot);
+ codegen()->GetVar(result_register(), var);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
}
}
-MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- return MemOperand(fp, SlotOffset(slot));
- case Slot::CONTEXT: {
- int context_chain_length =
- scope()->ContextChainLength(slot->var()->scope());
- __ LoadContext(scratch, context_chain_length);
- return ContextOperand(scratch, slot->index());
- }
- case Slot::LOOKUP:
- UNREACHABLE();
+MemOperand FullCodeGenerator::StackOperand(Variable* var) {
+ ASSERT(var->IsStackAllocated());
+ // Offset is negative because higher indexes are at lower addresses.
+ int offset = -var->index() * kPointerSize;
+ // Adjust by a (parameter or local) base offset.
+ if (var->IsParameter()) {
+ offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
+ } else {
+ offset += JavaScriptFrameConstants::kLocal0Offset;
}
- UNREACHABLE();
- return MemOperand(r0, 0);
+ return MemOperand(fp, offset);
}
-void FullCodeGenerator::Move(Register destination, Slot* source) {
+MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ if (var->IsContextSlot()) {
+ int context_chain_length = scope()->ContextChainLength(var->scope());
+ __ LoadContext(scratch, context_chain_length);
+ return ContextOperand(scratch, var->index());
+ } else {
+ return StackOperand(var);
+ }
+}
+
+
+void FullCodeGenerator::GetVar(Register dest, Variable* var) {
// Use destination as scratch.
- MemOperand slot_operand = EmitSlotSearch(source, destination);
- __ ldr(destination, slot_operand);
+ MemOperand location = VarOperand(var, dest);
+ __ ldr(dest, location);
}
-void FullCodeGenerator::Move(Slot* dst,
- Register src,
- Register scratch1,
- Register scratch2) {
- ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
- ASSERT(!scratch1.is(src) && !scratch2.is(src));
- MemOperand location = EmitSlotSearch(dst, scratch1);
+void FullCodeGenerator::SetVar(Variable* var,
+ Register src,
+ Register scratch0,
+ Register scratch1) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ ASSERT(!scratch0.is(src));
+ ASSERT(!scratch0.is(scratch1));
+ ASSERT(!scratch1.is(src));
+ MemOperand location = VarOperand(var, scratch0);
__ str(src, location);
// Emit the write barrier code if the location is in the heap.
- if (dst->type() == Slot::CONTEXT) {
- __ RecordWrite(scratch1,
- Operand(Context::SlotOffset(dst->index())),
- scratch2,
+ if (var->IsContextSlot()) {
+ __ RecordWrite(scratch0,
+ Operand(Context::SlotOffset(var->index())),
+ scratch1,
src);
}
}
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
Variable::Mode mode,
- FunctionLiteral* function) {
- Comment cmnt(masm_, "[ Declaration");
+ FunctionLiteral* function,
+ int* global_count) {
+ // If it was not possible to allocate the variable at compile time, we
+ // need to "declare" it at runtime to make sure it actually exists in the
+ // local context.
Variable* variable = proxy->var();
- ASSERT(variable != NULL); // Must have been resolved.
- Slot* slot = variable->AsSlot();
- ASSERT(slot != NULL);
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ ++(*global_count);
+ break;
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
+ __ str(result_register(), StackOperand(variable));
} else if (mode == Variable::CONST || mode == Variable::LET) {
+ Comment cmnt(masm_, "[ Declaration");
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ str(ip, MemOperand(fp, SlotOffset(slot)));
+ __ str(ip, StackOperand(variable));
}
break;
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
+ case Variable::CONTEXT:
// The variable in the decl always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
__ Check(ne, "Declaration in catch context.");
}
if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ str(result_register(), ContextOperand(cp, slot->index()));
- int offset = Context::SlotOffset(slot->index());
+ __ str(result_register(), ContextOperand(cp, variable->index()));
+ int offset = Context::SlotOffset(variable->index());
// We know that we have written a function, which is not a smi.
__ mov(r1, Operand(cp));
__ RecordWrite(r1, Operand(offset), r2, result_register());
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
} else if (mode == Variable::CONST || mode == Variable::LET) {
+ Comment cmnt(masm_, "[ Declaration");
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ str(ip, ContextOperand(cp, slot->index()));
+ __ str(ip, ContextOperand(cp, variable->index()));
// No write barrier since the_hole_value is in old space.
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
}
break;
- case Slot::LOOKUP: {
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ Declaration");
__ mov(r2, Operand(variable->name()));
- // Declaration nodes are always introduced in one of two modes.
+ // Declaration nodes are always introduced in one of three modes.
ASSERT(mode == Variable::VAR ||
mode == Variable::CONST ||
mode == Variable::LET);
__ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
__ Push(cp, r2, r1, r0);
} else {
- __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
+ __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
__ Push(cp, r2, r1, r0);
}
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
- EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
-}
+void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
}
-void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow) {
+void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
+ TypeofState typeof_state,
+ Label* slow) {
Register current = cp;
Register next = r1;
Register temp = r2;
}
__ ldr(r0, GlobalObjectOperand());
- __ mov(r2, Operand(slot->var()->name()));
+ __ mov(r2, Operand(var->name()));
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
}
-MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
- Slot* slot,
- Label* slow) {
- ASSERT(slot->type() == Slot::CONTEXT);
+MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
+ Label* slow) {
+ ASSERT(var->IsContextSlot());
Register context = cp;
Register next = r3;
Register temp = r4;
- for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+ for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
if (s->num_heap_slots() > 0) {
if (s->calls_eval()) {
// Check that extension is NULL.
// This function is used only for loads, not stores, so it's safe to
// return an cp-based operand (the write barrier cannot be allowed to
// destroy the cp register).
- return ContextOperand(context, slot->index());
+ return ContextOperand(context, var->index());
}
-void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow,
- Label* done) {
+void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
+ TypeofState typeof_state,
+ Label* slow,
+ Label* done) {
// Generate fast-case code for variables that might be shadowed by
// eval-introduced variables. Eval is used a lot without
// introducing variables. In those cases, we do not want to
// perform a runtime call for all variables in the scope
// containing the eval.
- if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
- EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
+ if (var->mode() == Variable::DYNAMIC_GLOBAL) {
+ EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
__ jmp(done);
- } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
- Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
- Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
- if (potential_slot != NULL) {
- // Generate fast case for locals that rewrite to slots.
- __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow));
- if (potential_slot->var()->mode() == Variable::CONST) {
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r0, ip);
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
- }
- __ jmp(done);
- } else if (rewrite != NULL) {
- // Generate fast case for calls of an argument function.
- Property* property = rewrite->AsProperty();
- if (property != NULL) {
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- Literal* key_literal = property->key()->AsLiteral();
- if (obj_proxy != NULL &&
- key_literal != NULL &&
- obj_proxy->IsArguments() &&
- key_literal->handle()->IsSmi()) {
- // Load arguments object if there are no eval-introduced
- // variables. Then load the argument from the arguments
- // object using keyed load.
- __ ldr(r1,
- ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
- slow));
- __ mov(r0, Operand(key_literal->handle()));
- Handle<Code> ic =
- isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
- __ jmp(done);
- }
- }
+ } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
+ Variable* local = var->local_if_not_shadowed();
+ __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
+ if (local->mode() == Variable::CONST) {
+ __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
}
+ __ jmp(done);
}
}
SetSourcePosition(proxy->position());
Variable* var = proxy->var();
- // Three cases: non-this global variables, lookup slots, and all other
- // types of slots.
- Slot* slot = var->AsSlot();
- ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
-
- if (slot == NULL) {
- Comment cmnt(masm_, "Global variable");
- // Use inline caching. Variable name is passed in r2 and the global
- // object (receiver) in r0.
- __ ldr(r0, GlobalObjectOperand());
- __ mov(r2, Operand(var->name()));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
- context()->Plug(r0);
-
- } else if (slot->type() == Slot::LOOKUP) {
- Label done, slow;
-
- // Generate code for loading from variables potentially shadowed
- // by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
-
- __ bind(&slow);
- Comment cmnt(masm_, "Lookup slot");
- __ mov(r1, Operand(var->name()));
- __ Push(cp, r1); // Context and name.
- __ CallRuntime(Runtime::kLoadContextSlot, 2);
- __ bind(&done);
+ // Three cases: global variables, lookup variables, and all other types of
+ // variables.
+ switch (var->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "Global variable");
+ // Use inline caching. Variable name is passed in r2 and the global
+ // object (receiver) in r0.
+ __ ldr(r0, GlobalObjectOperand());
+ __ mov(r2, Operand(var->name()));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ context()->Plug(r0);
+ break;
+ }
- context()->Plug(r0);
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, var->IsContextSlot()
+ ? "Context variable"
+ : "Stack variable");
+ if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
+ context()->Plug(var);
+ } else {
+ // Let and const need a read barrier.
+ GetVar(r0, var);
+ __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
+ if (var->mode() == Variable::LET) {
+ Label done;
+ __ b(ne, &done);
+ __ mov(r0, Operand(var->name()));
+ __ push(r0);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&done);
+ } else {
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
+ }
+ context()->Plug(r0);
+ }
+ break;
+ }
- } else {
- Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
- ? "Context slot"
- : "Stack slot");
- if (var->mode() == Variable::CONST) {
- // Constants may be the hole value if they have not been initialized.
- // Unhole them.
- MemOperand slot_operand = EmitSlotSearch(slot, r0);
- __ ldr(r0, slot_operand);
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r0, ip);
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
- context()->Plug(r0);
- } else if (var->mode() == Variable::LET) {
- // Let bindings may be the hole value if they have not been initialized.
- // Throw a type error in this case.
- Label done;
- MemOperand slot_operand = EmitSlotSearch(slot, r0);
- __ ldr(r0, slot_operand);
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r0, ip);
- __ b(ne, &done);
- __ mov(r0, Operand(var->name()));
- __ push(r0);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ case Variable::LOOKUP: {
+ Label done, slow;
+ // Generate code for loading from variables potentially shadowed
+ // by eval-introduced variables.
+ EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
+ __ bind(&slow);
+ Comment cmnt(masm_, "Lookup variable");
+ __ mov(r1, Operand(var->name()));
+ __ Push(cp, r1); // Context and name.
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
__ bind(&done);
context()->Plug(r0);
- } else {
- context()->Plug(slot);
}
}
}
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
- ASSERT(var != NULL);
- ASSERT(var->is_global() || var->AsSlot() != NULL);
-
- if (var->is_global()) {
- ASSERT(!var->is_this());
- // Assignment to a global variable. Use inline caching for the
- // assignment. Right-hand-side value is passed in r0, variable name in
- // r2, and the global object in r1.
+ if (var->IsUnallocated()) {
+ // Global var, const, or let.
__ mov(r2, Operand(var->name()));
__ ldr(r1, GlobalObjectOperand());
Handle<Code> ic = is_strict_mode()
__ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
- // Like var declarations, const declarations are hoisted to function
- // scope. However, unlike var initializers, const initializers are able
- // to drill a hole to that function context, even from inside a 'with'
- // context. We thus bypass the normal static scope lookup.
- Slot* slot = var->AsSlot();
- Label skip;
- switch (slot->type()) {
- case Slot::PARAMETER:
- // No const parameters.
- UNREACHABLE();
- break;
- case Slot::LOCAL:
- // Detect const reinitialization by checking for the hole value.
- __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r1, ip);
- __ b(ne, &skip);
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
- break;
- case Slot::CONTEXT:
- case Slot::LOOKUP:
- __ push(r0);
- __ mov(r0, Operand(slot->var()->name()));
- __ Push(cp, r0); // Context and name.
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- break;
+ // Const initializers need a write barrier.
+ ASSERT(!var->IsParameter()); // No const parameters.
+ if (var->IsStackLocal()) {
+ Label skip;
+ __ ldr(r1, StackOperand(var));
+ __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
+ __ b(ne, &skip);
+ __ str(result_register(), StackOperand(var));
+ __ bind(&skip);
+ } else {
+ ASSERT(var->IsContextSlot() || var->IsLookupSlot());
+ // Like var declarations, const declarations are hoisted to function
+ // scope. However, unlike var initializers, const initializers are
+ // able to drill a hole to that function context, even from inside a
+ // 'with' context. We thus bypass the normal static scope lookup for
+ // var->IsContextSlot().
+ __ push(r0);
+ __ mov(r0, Operand(var->name()));
+ __ Push(cp, r0); // Context and name.
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
}
- __ bind(&skip);
} else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
- // Perform the assignment for non-const variables. Const assignments
- // are simply skipped.
- Slot* slot = var->AsSlot();
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL: {
- Label assign;
- // Check for an initialized let binding.
- __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r1, ip);
- __ b(ne, &assign);
- __ mov(r1, Operand(var->name()));
- __ push(r1);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- // Perform the assignment.
- __ bind(&assign);
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
- break;
- }
- case Slot::CONTEXT: {
- // Let variables may be the hole value if they have not been
- // initialized. Throw a type error in this case.
- Label assign;
- MemOperand target = EmitSlotSearch(slot, r1);
- // Check for an initialized let binding.
- __ ldr(r3, target);
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r3, ip);
- __ b(ne, &assign);
- __ mov(r3, Operand(var->name()));
- __ push(r3);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- // Perform the assignment.
- __ bind(&assign);
- __ str(result_register(), target);
+ // Non-initializing assignment to let variable needs a write barrier.
+ if (var->IsLookupSlot()) {
+ __ push(r0); // Value.
+ __ mov(r1, Operand(var->name()));
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(cp, r1, r0); // Context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ } else {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ Label assign;
+ MemOperand location = VarOperand(var, r1);
+ __ ldr(r3, location);
+ __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
+ __ b(ne, &assign);
+ __ mov(r3, Operand(var->name()));
+ __ push(r3);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ str(result_register(), location);
+ if (var->IsContextSlot()) {
// RecordWrite may destroy all its register arguments.
__ mov(r3, result_register());
- int offset = Context::SlotOffset(slot->index());
+ int offset = Context::SlotOffset(var->index());
__ RecordWrite(r1, Operand(offset), r2, r3);
- break;
}
- case Slot::LOOKUP:
- // Call the runtime for the assignment.
- __ push(r0); // Value.
- __ mov(r1, Operand(slot->var()->name()));
- __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
- __ Push(cp, r1, r0); // Context, name, strict mode.
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
- break;
}
} else if (var->mode() != Variable::CONST) {
- // Perform the assignment for non-const variables. Const assignments
- // are simply skipped.
- Slot* slot = var->AsSlot();
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (FLAG_debug_code && op == Token::INIT_LET) {
- // Check for an uninitialized let binding.
- __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r1, ip);
- __ Check(eq, "Let binding re-initialization.");
- }
- // Perform the assignment.
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
- break;
-
- case Slot::CONTEXT: {
- MemOperand target = EmitSlotSearch(slot, r1);
- if (FLAG_debug_code && op == Token::INIT_LET) {
- // Check for an uninitialized let binding.
- __ ldr(r3, target);
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r3, ip);
- __ Check(eq, "Let binding re-initialization.");
- }
- // Perform the assignment and issue the write barrier.
- __ str(result_register(), target);
- // RecordWrite may destroy all its register arguments.
- __ mov(r3, result_register());
- int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
- __ RecordWrite(r1, Operand(offset), r2, r3);
- break;
+ // Assignment to var or initializing assignment to let.
+ if (var->IsStackAllocated() || var->IsContextSlot()) {
+ MemOperand location = VarOperand(var, r1);
+ if (FLAG_debug_code && op == Token::INIT_LET) {
+ // Check for an uninitialized let binding.
+ __ ldr(r2, location);
+ __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
+ __ Check(eq, "Let binding re-initialization.");
}
-
- case Slot::LOOKUP:
- ASSERT(op != Token::INIT_LET);
- // Call the runtime for the assignment.
- __ push(r0); // Value.
- __ mov(r1, Operand(slot->var()->name()));
- __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
- __ Push(cp, r1, r0); // Context, name, strict mode.
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
- break;
+ // Perform the assignment.
+ __ str(r0, location);
+ if (var->IsContextSlot()) {
+ __ mov(r3, r0);
+ __ RecordWrite(r1, Operand(Context::SlotOffset(var->index())), r2, r3);
+ }
+ } else {
+ ASSERT(var->IsLookupSlot());
+ __ push(r0); // Value.
+ __ mov(r1, Operand(var->name()));
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(cp, r1, r0); // Context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
}
}
+ // Non-initializing assignments to consts are ignored.
}
#endif
Comment cmnt(masm_, "[ Call");
- Expression* fun = expr->expression();
- Variable* var = fun->AsVariableProxy()->AsVariable();
+ Expression* callee = expr->expression();
+ VariableProxy* proxy = callee->AsVariableProxy();
+ Property* property = callee->AsProperty();
- if (var != NULL && var->is_possibly_eval()) {
+ if (proxy != NULL && proxy->var()->is_possibly_eval()) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
// resolve the function we need to call and the receiver of the
// call. Then we call the resolved function using the given
int arg_count = args->length();
{ PreservePositionScope pos_scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ push(r2); // Reserved receiver slot.
// in generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
Label done;
- if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Variable* var = proxy->var();
+ if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
Label slow;
- EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow);
+ EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
// Push the function and resolve eval.
__ push(r0);
EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
__ bind(&slow);
}
- // Push copy of the function (found below the arguments) and
+ // Push a copy of the function (found below the arguments) and
// resolve eval.
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ push(r1);
EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
- if (done.is_linked()) {
- __ bind(&done);
- }
+ __ bind(&done);
// The runtime call returns a pair of values in r0 (function) and
// r1 (receiver). Touch up the stack with the right values.
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, r0);
- } else if (var != NULL && !var->is_this() && var->is_global()) {
+ } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Push global object as receiver for the call IC.
__ ldr(r0, GlobalObjectOperand());
__ push(r0);
- EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
- } else if (var != NULL && var->AsSlot() != NULL &&
- var->AsSlot()->type() == Slot::LOOKUP) {
+ EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
+ } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
{ PreservePositionScope scope(masm()->positions_recorder());
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow,
- &done);
+ EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
}
__ bind(&slow);
// Call the runtime to find the function to call (returned in r0)
// and the object holding it (returned in edx).
__ push(context_register());
- __ mov(r2, Operand(var->name()));
+ __ mov(r2, Operand(proxy->name()));
__ push(r2);
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ Push(r0, r1); // Function, receiver.
// by LoadContextSlot. That object could be the hole if the
// receiver is implicitly the global object.
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
- } else if (fun->AsProperty() != NULL) {
- // Call to an object property.
- Property* prop = fun->AsProperty();
- Literal* key = prop->key()->AsLiteral();
- if (key != NULL && key->handle()->IsSymbol()) {
- // Call to a named property, use call IC.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
+ } else if (property != NULL) {
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(property->obj());
+ }
+ if (property->key()->IsPropertyName()) {
+ EmitCallWithIC(expr,
+ property->key()->AsLiteral()->handle(),
+ RelocInfo::CODE_TARGET);
} else {
- // Call to a keyed property.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ EmitKeyedCallWithIC(expr, property->key());
}
} else {
+ // Call to an arbitrary expression not handled specially above.
{ PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
}
// Load global receiver object.
__ ldr(r1, GlobalObjectOperand());
switch (expr->op()) {
case Token::DELETE: {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
- Property* prop = expr->expression()->AsProperty();
- Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
+ Property* property = expr->expression()->AsProperty();
+ VariableProxy* proxy = expr->expression()->AsVariableProxy();
- if (prop != NULL) {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
+ if (property != NULL) {
+ VisitForStackValue(property->obj());
+ VisitForStackValue(property->key());
__ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
__ push(r1);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(r0);
- } else if (var != NULL) {
+ } else if (proxy != NULL) {
+ Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
- // but "delete this" is.
+ // but "delete this" is allowed.
ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
- if (var->is_global()) {
+ if (var->IsUnallocated()) {
__ ldr(r2, GlobalObjectOperand());
__ mov(r1, Operand(var->name()));
__ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
__ Push(r2, r1, r0);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(r0);
- } else if (var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
+ } else if (var->IsStackAllocated() || var->IsContextSlot()) {
// Result of deleting non-global, non-dynamic variables is false.
// The subexpression does not have side effects.
- context()->Plug(false);
+ context()->Plug(var->is_this());
} else {
// Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
ASSERT(!context()->IsEffect());
ASSERT(!context()->IsTest());
VariableProxy* proxy = expr->AsVariableProxy();
- if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
+ if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
__ ldr(r0, GlobalObjectOperand());
__ mov(r2, Operand(proxy->name()));
__ Call(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(r0);
- } else if (proxy != NULL &&
- proxy->var()->AsSlot() != NULL &&
- proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
+ } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
- Slot* slot = proxy->var()->AsSlot();
- EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
+ EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
__ mov(r0, Operand(proxy->name()));
// Copy any necessary parameters into the context.
int num_parameters = scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ Variable* var = scope()->parameter(i);
+ if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ ldr(r0, MemOperand(fp, parameter_offset));
// Store it in the context.
- __ mov(r1, Operand(Context::SlotOffset(slot->index())));
+ __ mov(r1, Operand(Context::SlotOffset(var->index())));
__ str(r0, MemOperand(cp, r1));
// Update the write barrier. This clobbers all involved
// registers, so we have to use two more registers to avoid
// ----------------------------------------------------------------------------
// All the Accept member functions for each syntax tree node type.
-void Slot::Accept(AstVisitor* v) { v->VisitSlot(this); }
-
#define DECL_ACCEPT(type) \
void type::Accept(AstVisitor* v) { v->Visit##type(this); }
AST_NODE_LIST(DECL_ACCEPT)
}
-bool Slot::IsInlineable() const {
- UNREACHABLE();
- return false;
-}
-
-
bool ForInStatement::IsInlineable() const {
return false;
}
bool VariableProxy::IsInlineable() const {
- return var()->is_global() || var()->IsStackAllocated();
+ return var()->IsUnallocated() || var()->IsStackAllocated();
}
virtual BreakableStatement* AsBreakableStatement() { return NULL; }
virtual IterationStatement* AsIterationStatement() { return NULL; }
virtual MaterializedLiteral* AsMaterializedLiteral() { return NULL; }
- virtual Slot* AsSlot() { return NULL; }
// True if the node is simple enough for us to inline calls containing it.
virtual bool IsInlineable() const = 0;
DECLARE_NODE_TYPE(VariableProxy)
- // Type testing & conversion
- Variable* AsVariable() { return (this == NULL) ? NULL : var_; }
-
virtual bool IsValidLeftHandSide() {
return var_ == NULL ? true : var_->IsValidLeftHandSide();
}
return !is_this() && name().is_identical_to(n);
}
- bool IsArguments() {
- Variable* variable = AsVariable();
- return (variable == NULL) ? false : variable->is_arguments();
- }
+ bool IsArguments() { return var_ != NULL && var_->is_arguments(); }
Handle<String> name() const { return name_; }
Variable* var() const { return var_; }
};
-class Slot: public Expression {
- public:
- enum Type {
- // A slot in the parameter section on the stack. index() is
- // the parameter index, counting left-to-right, starting at 0.
- PARAMETER,
-
- // A slot in the local section on the stack. index() is
- // the variable index in the stack frame, starting at 0.
- LOCAL,
-
- // An indexed slot in a heap context. index() is the
- // variable index in the context object on the heap,
- // starting at 0. var()->scope() is the corresponding
- // scope.
- CONTEXT,
-
- // A named slot in a heap context. var()->name() is the
- // variable name in the context object on the heap,
- // with lookup starting at the current context. index()
- // is invalid.
- LOOKUP
- };
-
- Slot(Isolate* isolate, Variable* var, Type type, int index)
- : Expression(isolate), var_(var), type_(type), index_(index) {
- ASSERT(var != NULL);
- }
-
- virtual void Accept(AstVisitor* v);
-
- virtual Slot* AsSlot() { return this; }
-
- bool IsStackAllocated() { return type_ == PARAMETER || type_ == LOCAL; }
-
- // Accessors
- Variable* var() const { return var_; }
- Type type() const { return type_; }
- int index() const { return index_; }
- bool is_arguments() const { return var_->is_arguments(); }
- virtual bool IsInlineable() const;
-
- private:
- Variable* var_;
- Type type_;
- int index_;
-};
-
-
class Property: public Expression {
public:
Property(Isolate* isolate,
void SetStackOverflow() { stack_overflow_ = true; }
void ClearStackOverflow() { stack_overflow_ = false; }
- // Nodes not appearing in the AST, including slots.
- virtual void VisitSlot(Slot* node) { UNREACHABLE(); }
-
// Individual AST nodes.
#define DEF_VISIT(type) \
virtual void Visit##type(type* node) = 0;
void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
// If assigning to a property (including a global property) the assignment is
// breakable.
- Variable* var = expr->target()->AsVariableProxy()->AsVariable();
+ VariableProxy* proxy = expr->target()->AsVariableProxy();
Property* prop = expr->target()->AsProperty();
- if (prop != NULL || (var != NULL && var->is_global())) {
+ if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
is_breakable_ = true;
return;
}
}
-int FullCodeGenerator::SlotOffset(Slot* slot) {
- ASSERT(slot != NULL);
- // Offset is negative because higher indexes are at lower addresses.
- int offset = -slot->index() * kPointerSize;
- // Adjust by a (parameter or local) base offset.
- switch (slot->type()) {
- case Slot::PARAMETER:
- offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
- break;
- case Slot::LOCAL:
- offset += JavaScriptFrameConstants::kLocal0Offset;
- break;
- case Slot::CONTEXT:
- case Slot::LOOKUP:
- UNREACHABLE();
- }
- return offset;
-}
-
-
bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
// Inline smi case inside loops, but not division and modulo which
// are too complicated and take up too much space.
void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) {
int length = declarations->length();
- int globals = 0;
+ int global_count = 0;
for (int i = 0; i < length; i++) {
Declaration* decl = declarations->at(i);
- Variable* var = decl->proxy()->var();
- Slot* slot = var->AsSlot();
-
- // If it was not possible to allocate the variable at compile
- // time, we need to "declare" it at runtime to make sure it
- // actually exists in the local context.
- if ((slot != NULL && slot->type() == Slot::LOOKUP) || !var->is_global()) {
- VisitDeclaration(decl);
- } else {
- // Count global variables and functions for later processing
- globals++;
- }
+ EmitDeclaration(decl->proxy(), decl->mode(), decl->fun(), &global_count);
}
- // Compute array of global variable and function declarations.
- // Do nothing in case of no declared global functions or variables.
- if (globals > 0) {
+ // Batch declare global functions and variables.
+ if (global_count > 0) {
Handle<FixedArray> array =
- isolate()->factory()->NewFixedArray(2 * globals, TENURED);
+ isolate()->factory()->NewFixedArray(2 * global_count, TENURED);
for (int j = 0, i = 0; i < length; i++) {
Declaration* decl = declarations->at(i);
Variable* var = decl->proxy()->var();
- Slot* slot = var->AsSlot();
- if ((slot == NULL || slot->type() != Slot::LOOKUP) && var->is_global()) {
+ if (var->IsUnallocated()) {
array->set(j++, *(var->name()));
if (decl->fun() == NULL) {
if (var->mode() == Variable::CONST) {
}
}
// Invoke the platform-dependent code generator to do the actual
- // declaration the global variables and functions.
+ // declaration the global functions and variables.
DeclareGlobals(array);
}
}
// with a GC-safe value.
void ClearAccumulator();
- // Compute the frame pointer relative offset for a given local or
- // parameter slot.
- int SlotOffset(Slot* slot);
-
// Determine whether or not to inline the smi case for the given
// operation.
bool ShouldInlineSmiCase(Token::Value op);
Label* fall_through);
#endif // V8_TARGET_ARCH_MIPS
- void Move(Slot* dst, Register source, Register scratch1, Register scratch2);
- void Move(Register dst, Slot* source);
-
- // Return an operand used to read/write to a known (ie, non-LOOKUP) slot.
- // May emit code to traverse the context chain, destroying the scratch
- // register.
- MemOperand EmitSlotSearch(Slot* slot, Register scratch);
+ // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
+ // a register. Emits a context chain walk if if necessary (so does
+ // SetVar) so avoid calling both on the same variable.
+ void GetVar(Register destination, Variable* var);
+
+ // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
+ // the context, the write barrier will be emitted and source, scratch0,
+ // scratch1 will be clobbered. Emits a context chain walk if if necessary
+ // (so does GetVar) so avoid calling both on the same variable.
+ void SetVar(Variable* var,
+ Register source,
+ Register scratch0,
+ Register scratch1);
+
+ // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
+ // variable. Writing does not need the write barrier.
+ MemOperand StackOperand(Variable* var);
+
+ // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
+ // variable. May emit code to traverse the context chain, loading the
+ // found context into the scratch register. Writing to this operand will
+ // need the write barrier if location is CONTEXT.
+ MemOperand VarOperand(Variable* var, Register scratch);
// Forward the bailout responsibility for the given expression to
// the next child visited (which must be in a test context).
// declaration. Functions have an initial value.
void EmitDeclaration(VariableProxy* proxy,
Variable::Mode mode,
- FunctionLiteral* function);
+ FunctionLiteral* function,
+ int* global_count);
// Platform-specific code for checking the stack limit at the back edge of
// a loop.
#undef EMIT_INLINE_RUNTIME_CALL
// Platform-specific code for loading variables.
- void EmitLoadGlobalSlotCheckExtensions(Slot* slot,
- TypeofState typeof_state,
- Label* slow);
- MemOperand ContextSlotOperandCheckExtensions(Slot* slot, Label* slow);
- void EmitDynamicLoadFromSlotFastCase(Slot* slot,
- TypeofState typeof_state,
- Label* slow,
- Label* done);
+ void EmitLoadGlobalCheckExtensions(Variable* var,
+ TypeofState typeof_state,
+ Label* slow);
+ MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
+ void EmitDynamicLookupFastCase(Variable* var,
+ TypeofState typeof_state,
+ Label* slow,
+ Label* done);
void EmitVariableLoad(VariableProxy* proxy);
enum ResolveEvalFlag {
// this expression context.
virtual void Plug(bool flag) const = 0;
- // Emit code to convert a pure value (in a register, slot, as a literal,
- // or on top of the stack) into the result expected according to this
- // expression context.
+ // Emit code to convert a pure value (in a register, known variable
+ // location, as a literal, or on top of the stack) into the result
+ // expected according to this expression context.
virtual void Plug(Register reg) const = 0;
- virtual void Plug(Slot* slot) const = 0;
+ virtual void Plug(Variable* var) const = 0;
virtual void Plug(Handle<Object> lit) const = 0;
virtual void Plug(Heap::RootListIndex index) const = 0;
virtual void PlugTOS() const = 0;
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
virtual void Plug(Label* materialize_true, Label* materialize_false) const;
- virtual void Plug(Slot* slot) const;
+ virtual void Plug(Variable* var) const;
virtual void Plug(Handle<Object> lit) const;
virtual void Plug(Heap::RootListIndex) const;
virtual void PlugTOS() const;
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
virtual void Plug(Label* materialize_true, Label* materialize_false) const;
- virtual void Plug(Slot* slot) const;
+ virtual void Plug(Variable* var) const;
virtual void Plug(Handle<Object> lit) const;
virtual void Plug(Heap::RootListIndex) const;
virtual void PlugTOS() const;
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
virtual void Plug(Label* materialize_true, Label* materialize_false) const;
- virtual void Plug(Slot* slot) const;
+ virtual void Plug(Variable* var) const;
virtual void Plug(Handle<Object> lit) const;
virtual void Plug(Heap::RootListIndex) const;
virtual void PlugTOS() const;
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
virtual void Plug(Label* materialize_true, Label* materialize_false) const;
- virtual void Plug(Slot* slot) const;
+ virtual void Plug(Variable* var) const;
virtual void Plug(Handle<Object> lit) const;
virtual void Plug(Heap::RootListIndex) const;
virtual void PlugTOS() const;
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- Variable* variable = expr->AsVariable();
- if (variable == NULL) {
- return Bailout("reference to rewritten variable");
- } else if (variable->mode() == Variable::LET) {
+ Variable* variable = expr->var();
+ if (variable->mode() == Variable::LET) {
return Bailout("reference to let variable");
- } else if (variable->IsStackAllocated()) {
- HValue* value = environment()->Lookup(variable);
- if (variable->mode() == Variable::CONST &&
- value == graph()->GetConstantHole()) {
- return Bailout("reference to uninitialized const variable");
- }
- return ast_context()->ReturnValue(value);
- } else if (variable->IsContextSlot()) {
- if (variable->mode() == Variable::CONST) {
- return Bailout("reference to const context slot");
- }
- HValue* context = BuildContextChainWalk(variable);
- int index = variable->AsSlot()->index();
- HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, index);
- return ast_context()->ReturnInstruction(instr, expr->id());
- } else if (variable->is_global()) {
- LookupResult lookup;
- GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, false);
+ }
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ LookupResult lookup;
+ GlobalPropertyAccess type = LookupGlobalProperty(variable, &lookup, false);
+
+ if (type == kUseCell &&
+ info()->global_object()->IsAccessCheckNeeded()) {
+ type = kUseGeneric;
+ }
- if (type == kUseCell &&
- info()->global_object()->IsAccessCheckNeeded()) {
- type = kUseGeneric;
+ if (type == kUseCell) {
+ Handle<GlobalObject> global(info()->global_object());
+ Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
+ bool check_hole = !lookup.IsDontDelete() || lookup.IsReadOnly();
+ HLoadGlobalCell* instr = new(zone()) HLoadGlobalCell(cell, check_hole);
+ return ast_context()->ReturnInstruction(instr, expr->id());
+ } else {
+ HValue* context = environment()->LookupContext();
+ HGlobalObject* global_object = new(zone()) HGlobalObject(context);
+ AddInstruction(global_object);
+ HLoadGlobalGeneric* instr =
+ new(zone()) HLoadGlobalGeneric(context,
+ global_object,
+ variable->name(),
+ ast_context()->is_for_typeof());
+ instr->set_position(expr->position());
+ return ast_context()->ReturnInstruction(instr, expr->id());
+ }
}
- if (type == kUseCell) {
- Handle<GlobalObject> global(info()->global_object());
- Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(&lookup));
- bool check_hole = !lookup.IsDontDelete() || lookup.IsReadOnly();
- HLoadGlobalCell* instr = new(zone()) HLoadGlobalCell(cell, check_hole);
- return ast_context()->ReturnInstruction(instr, expr->id());
- } else {
- HValue* context = environment()->LookupContext();
- HGlobalObject* global_object = new(zone()) HGlobalObject(context);
- AddInstruction(global_object);
- HLoadGlobalGeneric* instr =
- new(zone()) HLoadGlobalGeneric(context,
- global_object,
- variable->name(),
- ast_context()->is_for_typeof());
- instr->set_position(expr->position());
- ASSERT(instr->HasSideEffects());
+ case Variable::PARAMETER:
+ case Variable::LOCAL: {
+ HValue* value = environment()->Lookup(variable);
+ if (variable->mode() == Variable::CONST &&
+ value == graph()->GetConstantHole()) {
+ return Bailout("reference to uninitialized const variable");
+ }
+ return ast_context()->ReturnValue(value);
+ }
+
+ case Variable::CONTEXT:{
+ if (variable->mode() == Variable::CONST) {
+ return Bailout("reference to const context slot");
+ }
+ HValue* context = BuildContextChainWalk(variable);
+ HLoadContextSlot* instr =
+ new(zone()) HLoadContextSlot(context, variable->index());
return ast_context()->ReturnInstruction(instr, expr->id());
}
- } else {
- return Bailout("reference to a variable which requires dynamic lookup");
+
+ case Variable::LOOKUP:
+ return Bailout("reference to a variable which requires dynamic lookup");
}
}
void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
Expression* target = expr->target();
VariableProxy* proxy = target->AsVariableProxy();
- Variable* var = proxy->AsVariable();
Property* prop = target->AsProperty();
- ASSERT(var == NULL || prop == NULL);
+ ASSERT(proxy == NULL || prop == NULL);
// We have a second position recorded in the FullCodeGenerator to have
// type feedback for the binary operation.
BinaryOperation* operation = expr->binary_operation();
- if (var != NULL) {
- if (var->mode() == Variable::CONST ||
- var->mode() == Variable::LET) {
+ if (proxy != NULL) {
+ Variable* var = proxy->var();
+ if (var->mode() == Variable::CONST || var->mode() == Variable::LET) {
return Bailout("unsupported let or const compound assignment");
}
CHECK_ALIVE(VisitForValue(operation));
- if (var->is_global()) {
- HandleGlobalVariableAssignment(var,
- Top(),
- expr->position(),
- expr->AssignmentId());
- } else if (var->IsStackAllocated()) {
- Bind(var, Top());
- } else if (var->IsContextSlot()) {
- // Bail out if we try to mutate a parameter value in a function using
- // the arguments object. We do not (yet) correctly handle the
- // arguments property of the function.
- if (info()->scope()->arguments() != NULL) {
- // Parameters will rewrite to context slots. We have no direct way
- // to detect that the variable is a parameter.
- int count = info()->scope()->num_parameters();
- for (int i = 0; i < count; ++i) {
- if (var == info()->scope()->parameter(i)) {
- Bailout("assignment to parameter, function uses arguments object");
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ HandleGlobalVariableAssignment(var,
+ Top(),
+ expr->position(),
+ expr->AssignmentId());
+ break;
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ Bind(var, Top());
+ break;
+
+ case Variable::CONTEXT: {
+ // Bail out if we try to mutate a parameter value in a function
+ // using the arguments object. We do not (yet) correctly handle the
+ // arguments property of the function.
+ if (info()->scope()->arguments() != NULL) {
+ // Parameters will be allocated to context slots. We have no
+ // direct way to detect that the variable is a parameter so we do
+ // a linear search of the parameter variables.
+ int count = info()->scope()->num_parameters();
+ for (int i = 0; i < count; ++i) {
+ if (var == info()->scope()->parameter(i)) {
+ Bailout("assignment to parameter, function uses arguments object");
+ }
}
}
+
+ HValue* context = BuildContextChainWalk(var);
+ HStoreContextSlot* instr =
+ new(zone()) HStoreContextSlot(context, var->index(), Top());
+ AddInstruction(instr);
+ if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
+ break;
}
- HValue* context = BuildContextChainWalk(var);
- int index = var->AsSlot()->index();
- HStoreContextSlot* instr =
- new(zone()) HStoreContextSlot(context, index, Top());
- AddInstruction(instr);
- if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
- } else {
- return Bailout("compound assignment to lookup slot");
+ case Variable::LOOKUP:
+ return Bailout("compound assignment to lookup slot");
}
return ast_context()->ReturnValue(Pop());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
VariableProxy* proxy = expr->target()->AsVariableProxy();
- Variable* var = proxy->AsVariable();
Property* prop = expr->target()->AsProperty();
- ASSERT(var == NULL || prop == NULL);
+ ASSERT(proxy == NULL || prop == NULL);
if (expr->is_compound()) {
HandleCompoundAssignment(expr);
return;
}
- if (var != NULL) {
+ if (prop != NULL) {
+ HandlePropertyAssignment(expr);
+ } else if (proxy != NULL) {
+ Variable* var = proxy->var();
if (var->mode() == Variable::CONST) {
if (expr->op() != Token::INIT_CONST) {
return Bailout("non-initializer assignment to const");
if (proxy->IsArguments()) return Bailout("assignment to arguments");
// Handle the assignment.
- if (var->IsStackAllocated()) {
- // We do not allow the arguments object to occur in a context where it
- // may escape, but assignments to stack-allocated locals are
- // permitted.
- CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
- HValue* value = Pop();
- Bind(var, value);
- return ast_context()->ReturnValue(value);
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ CHECK_ALIVE(VisitForValue(expr->value()));
+ HandleGlobalVariableAssignment(var,
+ Top(),
+ expr->position(),
+ expr->AssignmentId());
+ return ast_context()->ReturnValue(Pop());
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL: {
+ // We do not allow the arguments object to occur in a context where it
+ // may escape, but assignments to stack-allocated locals are
+ // permitted.
+ CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
+ HValue* value = Pop();
+ Bind(var, value);
+ return ast_context()->ReturnValue(value);
+ }
- } else if (var->IsContextSlot()) {
- ASSERT(var->mode() != Variable::CONST);
- // Bail out if we try to mutate a parameter value in a function using
- // the arguments object. We do not (yet) correctly handle the
- // arguments property of the function.
- if (info()->scope()->arguments() != NULL) {
- // Parameters will rewrite to context slots. We have no direct way
- // to detect that the variable is a parameter.
- int count = info()->scope()->num_parameters();
- for (int i = 0; i < count; ++i) {
- if (var == info()->scope()->parameter(i)) {
- Bailout("assignment to parameter, function uses arguments object");
+ case Variable::CONTEXT: {
+ ASSERT(var->mode() != Variable::CONST);
+ // Bail out if we try to mutate a parameter value in a function using
+ // the arguments object. We do not (yet) correctly handle the
+ // arguments property of the function.
+ if (info()->scope()->arguments() != NULL) {
+ // Parameters will rewrite to context slots. We have no direct way
+ // to detect that the variable is a parameter.
+ int count = info()->scope()->num_parameters();
+ for (int i = 0; i < count; ++i) {
+ if (var == info()->scope()->parameter(i)) {
+ return Bailout("assignment to parameter in arguments object");
+ }
}
}
- }
-
- CHECK_ALIVE(VisitForValue(expr->value()));
- HValue* context = BuildContextChainWalk(var);
- int index = var->AsSlot()->index();
- HStoreContextSlot* instr =
- new(zone()) HStoreContextSlot(context, index, Top());
- AddInstruction(instr);
- if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
- return ast_context()->ReturnValue(Pop());
- } else if (var->is_global()) {
- CHECK_ALIVE(VisitForValue(expr->value()));
- HandleGlobalVariableAssignment(var,
- Top(),
- expr->position(),
- expr->AssignmentId());
- return ast_context()->ReturnValue(Pop());
+ CHECK_ALIVE(VisitForValue(expr->value()));
+ HValue* context = BuildContextChainWalk(var);
+ HStoreContextSlot* instr =
+ new(zone()) HStoreContextSlot(context, var->index(), Top());
+ AddInstruction(instr);
+ if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
+ return ast_context()->ReturnValue(Pop());
+ }
- } else {
- return Bailout("assignment to LOOKUP or const CONTEXT variable");
+ case Variable::LOOKUP:
+ return Bailout("assignment to LOOKUP variable");
}
-
- } else if (prop != NULL) {
- HandlePropertyAssignment(expr);
} else {
return Bailout("invalid left-hand side in assignment");
}
}
} else {
- Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
- bool global_call = (var != NULL) && var->is_global() && !var->is_this();
+ VariableProxy* proxy = expr->expression()->AsVariableProxy();
+ // FIXME.
+ bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
if (global_call) {
+ Variable* var = proxy->var();
bool known_global_function = false;
// If there is a global property cell for the name at compile time and
// access check is not enabled we assume that the function will not change
void HGraphBuilder::VisitDelete(UnaryOperation* expr) {
Property* prop = expr->expression()->AsProperty();
- Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
- if (prop == NULL && var == NULL) {
- // Result of deleting non-property, non-variable reference is true.
- // Evaluate the subexpression for side effects.
- CHECK_ALIVE(VisitForEffect(expr->expression()));
- return ast_context()->ReturnValue(graph()->GetConstantTrue());
- } else if (var != NULL &&
- !var->is_global() &&
- var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
- // Result of deleting non-global, non-dynamic variables is false.
- // The subexpression does not have side effects.
- return ast_context()->ReturnValue(graph()->GetConstantFalse());
- } else if (prop != NULL) {
+ VariableProxy* proxy = expr->expression()->AsVariableProxy();
+ if (prop != NULL) {
CHECK_ALIVE(VisitForValue(prop->obj()));
CHECK_ALIVE(VisitForValue(prop->key()));
HValue* key = Pop();
HValue* context = environment()->LookupContext();
HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key);
return ast_context()->ReturnInstruction(instr, expr->id());
- } else if (var->is_global()) {
- Bailout("delete with global variable");
+ } else if (proxy != NULL) {
+ Variable* var = proxy->var();
+ if (var->IsUnallocated()) {
+ Bailout("delete with global variable");
+ } else if (var->IsStackAllocated() || var->IsContextSlot()) {
+ // Result of deleting non-global variables is false. 'this' is not
+ // really a variable, though we implement it as one. The
+ // subexpression does not have side effects.
+ HValue* value = var->is_this()
+ ? graph()->GetConstantTrue()
+ : graph()->GetConstantFalse();
+ return ast_context()->ReturnValue(value);
+ } else {
+ Bailout("delete with non-global variable");
+ }
} else {
- Bailout("delete with non-global variable");
+ // Result of deleting non-property, non-variable reference is true.
+ // Evaluate the subexpression for side effects.
+ CHECK_ALIVE(VisitForEffect(expr->expression()));
+ return ast_context()->ReturnValue(graph()->GetConstantTrue());
}
}
ASSERT(current_block()->HasPredecessor());
Expression* target = expr->expression();
VariableProxy* proxy = target->AsVariableProxy();
- Variable* var = proxy->AsVariable();
Property* prop = target->AsProperty();
- if (var == NULL && prop == NULL) {
+ if (proxy == NULL && prop == NULL) {
return Bailout("invalid lhs in count operation");
}
HValue* input = NULL; // ToNumber(original_input).
HValue* after = NULL; // The result after incrementing or decrementing.
- if (var != NULL) {
+ if (proxy != NULL) {
+ Variable* var = proxy->var();
if (var->mode() == Variable::CONST) {
return Bailout("unsupported count operation with const");
}
input = returns_original_input ? Top() : Pop();
Push(after);
- if (var->is_global()) {
- HandleGlobalVariableAssignment(var,
- after,
- expr->position(),
- expr->AssignmentId());
- } else if (var->IsStackAllocated()) {
- Bind(var, after);
- } else if (var->IsContextSlot()) {
- // Bail out if we try to mutate a parameter value in a function using
- // the arguments object. We do not (yet) correctly handle the
- // arguments property of the function.
- if (info()->scope()->arguments() != NULL) {
- // Parameters will rewrite to context slots. We have no direct way
- // to detect that the variable is a parameter.
- int count = info()->scope()->num_parameters();
- for (int i = 0; i < count; ++i) {
- if (var == info()->scope()->parameter(i)) {
- Bailout("assignment to parameter, function uses arguments object");
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ HandleGlobalVariableAssignment(var,
+ after,
+ expr->position(),
+ expr->AssignmentId());
+ break;
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ Bind(var, after);
+ break;
+
+ case Variable::CONTEXT: {
+ // Bail out if we try to mutate a parameter value in a function
+ // using the arguments object. We do not (yet) correctly handle the
+ // arguments property of the function.
+ if (info()->scope()->arguments() != NULL) {
+ // Parameters will rewrite to context slots. We have no direct
+ // way to detect that the variable is a parameter so we use a
+ // linear search of the parameter list.
+ int count = info()->scope()->num_parameters();
+ for (int i = 0; i < count; ++i) {
+ if (var == info()->scope()->parameter(i)) {
+ return Bailout("assignment to parameter in arguments object");
+ }
}
}
+
+ HValue* context = BuildContextChainWalk(var);
+ HStoreContextSlot* instr =
+ new(zone()) HStoreContextSlot(context, var->index(), after);
+ AddInstruction(instr);
+ if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
+ break;
}
- HValue* context = BuildContextChainWalk(var);
- int index = var->AsSlot()->index();
- HStoreContextSlot* instr =
- new(zone()) HStoreContextSlot(context, index, after);
- AddInstruction(instr);
- if (instr->HasSideEffects()) AddSimulate(expr->AssignmentId());
- } else {
- return Bailout("lookup variable in count operation");
+ case Variable::LOOKUP:
+ return Bailout("lookup variable in count operation");
}
} else {
// residing in new space. If it is we assume that the function will stay the
// same.
Handle<JSFunction> target = Handle<JSFunction>::null();
- Variable* var = expr->right()->AsVariableProxy()->AsVariable();
- bool global_function = (var != NULL) && var->is_global() && !var->is_this();
+ VariableProxy* proxy = expr->right()->AsVariableProxy();
+ bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
if (global_function &&
info()->has_global_object() &&
!info()->global_object()->IsAccessCheckNeeded()) {
- Handle<String> name = var->name();
+ Handle<String> name = proxy->name();
Handle<GlobalObject> global(info()->global_object());
LookupResult lookup;
global->Lookup(*name, &lookup);
void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
- Variable::Mode mode,
- FunctionLiteral* function) {
+ Variable::Mode mode,
+ FunctionLiteral* function) {
if (mode == Variable::LET) return Bailout("unsupported let declaration");
Variable* var = proxy->var();
- Slot* slot = var->AsSlot();
- ASSERT(slot != NULL);
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (mode == Variable::CONST) {
- environment()->Bind(var, graph()->GetConstantHole());
- } else if (function != NULL) {
- VisitForValue(function);
- HValue* function_value = Pop();
- environment()->Bind(var, function_value);
- }
- break;
- case Slot::CONTEXT: {
- HValue* context = environment()->LookupContext();
- if (mode == Variable::CONST) {
- HStoreContextSlot* store =
- new HStoreContextSlot(context,
- slot->index(),
- graph()->GetConstantHole());
- AddInstruction(store);
- if (store->HasSideEffects()) AddSimulate(proxy->id());
- } else if (function != NULL) {
- VisitForValue(function);
- HValue* function_value = Pop();
- HStoreContextSlot* store =
- new HStoreContextSlot(context,
- slot->index(),
- function_value);
- AddInstruction(store);
- if (store->HasSideEffects()) AddSimulate(proxy->id());
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ return Bailout("unsupported global declaration");
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::CONTEXT:
+ if (mode == Variable::CONST || function != NULL) {
+ HValue* value = NULL;
+ if (mode == Variable::CONST) {
+ value = graph()->GetConstantHole();
+ } else {
+ VisitForValue(function);
+ value = Pop();
+ }
+ if (var->IsContextSlot()) {
+ HValue* context = environment()->LookupContext();
+ HStoreContextSlot* store =
+ new HStoreContextSlot(context, var->index(), value);
+ AddInstruction(store);
+ if (store->HasSideEffects()) AddSimulate(proxy->id());
+ } else {
+ environment()->Bind(var, value);
+ }
}
break;
- }
- case Slot::LOOKUP:
+ case Variable::LOOKUP:
return Bailout("unsupported lookup slot in declaration");
}
}
// by 1 (receiver is parameter index -1 but environment index 0).
// Stack-allocated local indices are shifted by the number of parameters.
int IndexFor(Variable* variable) const {
- Slot* slot = variable->AsSlot();
- ASSERT(slot != NULL && slot->IsStackAllocated());
- int shift = (slot->type() == Slot::PARAMETER)
+ ASSERT(variable->IsStackAllocated());
+ int shift = variable->IsParameter()
? 1
: parameter_count_ + specials_count_;
- return slot->index() + shift;
+ return variable->index() + shift;
}
Handle<JSFunction> closure_;
namespace v8 {
namespace internal {
-
#define __ ACCESS_MASM(masm_)
// Copy parameters into context if necessary.
int num_parameters = info->scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ Variable* var = scope()->parameter(i);
+ if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ mov(eax, Operand(ebp, parameter_offset));
// Store it in the context.
- int context_offset = Context::SlotOffset(slot->index());
+ int context_offset = Context::SlotOffset(var->index());
__ mov(Operand(esi, context_offset), eax);
// Update the write barrier. This clobbers all involved
// registers, so we have use a third register to avoid
ArgumentsAccessStub stub(type);
__ CallStub(&stub);
- Move(arguments->AsSlot(), eax, ebx, edx);
+ SetVar(arguments, eax, ebx, edx);
}
if (FLAG_trace) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ int ignored = 0;
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
}
VisitDeclarations(scope()->declarations());
}
}
-void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
+void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
}
-void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
- MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
- __ mov(result_register(), slot_operand);
+void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ codegen()->GetVar(result_register(), var);
}
-void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
- MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
+void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ MemOperand operand = codegen()->VarOperand(var, result_register());
// Memory operands can be pushed directly.
- __ push(slot_operand);
+ __ push(operand);
codegen()->increment_stack_height();
}
-void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
+void FullCodeGenerator::TestContext::Plug(Variable* var) const {
// For simplicity we always test the accumulator register.
- codegen()->Move(result_register(), slot);
+ codegen()->GetVar(result_register(), var);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
}
}
-MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- return Operand(ebp, SlotOffset(slot));
- case Slot::CONTEXT: {
- int context_chain_length =
- scope()->ContextChainLength(slot->var()->scope());
- __ LoadContext(scratch, context_chain_length);
- return ContextOperand(scratch, slot->index());
- }
- case Slot::LOOKUP:
- UNREACHABLE();
+MemOperand FullCodeGenerator::StackOperand(Variable* var) {
+ ASSERT(var->IsStackAllocated());
+ // Offset is negative because higher indexes are at lower addresses.
+ int offset = -var->index() * kPointerSize;
+ // Adjust by a (parameter or local) base offset.
+ if (var->IsParameter()) {
+ offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
+ } else {
+ offset += JavaScriptFrameConstants::kLocal0Offset;
}
- UNREACHABLE();
- return Operand(eax, 0);
+ return Operand(ebp, offset);
}
-void FullCodeGenerator::Move(Register destination, Slot* source) {
- MemOperand location = EmitSlotSearch(source, destination);
- __ mov(destination, location);
+MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ if (var->IsContextSlot()) {
+ int context_chain_length = scope()->ContextChainLength(var->scope());
+ __ LoadContext(scratch, context_chain_length);
+ return ContextOperand(scratch, var->index());
+ } else {
+ return StackOperand(var);
+ }
}
-void FullCodeGenerator::Move(Slot* dst,
- Register src,
- Register scratch1,
- Register scratch2) {
- ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
- ASSERT(!scratch1.is(src) && !scratch2.is(src));
- MemOperand location = EmitSlotSearch(dst, scratch1);
+void FullCodeGenerator::GetVar(Register dest, Variable* var) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ MemOperand location = VarOperand(var, dest);
+ __ mov(dest, location);
+}
+
+
+void FullCodeGenerator::SetVar(Variable* var,
+ Register src,
+ Register scratch0,
+ Register scratch1) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ ASSERT(!scratch0.is(src));
+ ASSERT(!scratch0.is(scratch1));
+ ASSERT(!scratch1.is(src));
+ MemOperand location = VarOperand(var, scratch0);
__ mov(location, src);
// Emit the write barrier code if the location is in the heap.
- if (dst->type() == Slot::CONTEXT) {
- int offset = Context::SlotOffset(dst->index());
- ASSERT(!scratch1.is(esi) && !src.is(esi) && !scratch2.is(esi));
- __ RecordWrite(scratch1, offset, src, scratch2);
+ if (var->IsContextSlot()) {
+ int offset = Context::SlotOffset(var->index());
+ ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
+ __ RecordWrite(scratch0, offset, src, scratch1);
}
}
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
Variable::Mode mode,
- FunctionLiteral* function) {
- Comment cmnt(masm_, "[ Declaration");
+ FunctionLiteral* function,
+ int* global_count) {
+ // If it was not possible to allocate the variable at compile time, we
+ // need to "declare" it at runtime to make sure it actually exists in the
+ // local context.
Variable* variable = proxy->var();
- ASSERT(variable != NULL); // Must have been resolved.
- Slot* slot = variable->AsSlot();
- ASSERT(slot != NULL);
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ ++(*global_count);
+ break;
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ mov(Operand(ebp, SlotOffset(slot)), result_register());
+ __ mov(StackOperand(variable), result_register());
} else if (mode == Variable::CONST || mode == Variable::LET) {
- __ mov(Operand(ebp, SlotOffset(slot)),
+ Comment cmnt(masm_, "[ Declaration");
+ __ mov(StackOperand(variable),
Immediate(isolate()->factory()->the_hole_value()));
}
break;
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
+ case Variable::CONTEXT:
// The variable in the decl always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
__ Check(not_equal, "Declaration in catch context.");
}
if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ mov(ContextOperand(esi, slot->index()), result_register());
- int offset = Context::SlotOffset(slot->index());
+ __ mov(ContextOperand(esi, variable->index()), result_register());
+ int offset = Context::SlotOffset(variable->index());
__ mov(ebx, esi);
__ RecordWrite(ebx, offset, result_register(), ecx);
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
} else if (mode == Variable::CONST || mode == Variable::LET) {
- __ mov(ContextOperand(esi, slot->index()),
+ Comment cmnt(masm_, "[ Declaration");
+ __ mov(ContextOperand(esi, variable->index()),
Immediate(isolate()->factory()->the_hole_value()));
// No write barrier since the hole value is in old space.
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
}
break;
- case Slot::LOOKUP: {
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ Declaration");
__ push(esi);
__ push(Immediate(variable->name()));
- // Declaration nodes are always introduced in one of two modes.
+ // Declaration nodes are always introduced in one of three modes.
ASSERT(mode == Variable::VAR ||
mode == Variable::CONST ||
mode == Variable::LET);
__ push(Immediate(isolate()->factory()->the_hole_value()));
increment_stack_height();
} else {
- __ push(Immediate(Smi::FromInt(0))); // No initial value!
+ __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
increment_stack_height();
}
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
- EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
-}
+void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
}
-void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow) {
+void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
+ TypeofState typeof_state,
+ Label* slow) {
Register context = esi;
Register temp = edx;
// All extension objects were empty and it is safe to use a global
// load IC call.
__ mov(eax, GlobalObjectOperand());
- __ mov(ecx, slot->var()->name());
+ __ mov(ecx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
}
-MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
- Slot* slot,
- Label* slow) {
- ASSERT(slot->type() == Slot::CONTEXT);
+MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
+ Label* slow) {
+ ASSERT(var->IsContextSlot());
Register context = esi;
Register temp = ebx;
- for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+ for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
if (s->num_heap_slots() > 0) {
if (s->calls_eval()) {
// Check that extension is NULL.
// This function is used only for loads, not stores, so it's safe to
// return an esi-based operand (the write barrier cannot be allowed to
// destroy the esi register).
- return ContextOperand(context, slot->index());
+ return ContextOperand(context, var->index());
}
-void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow,
- Label* done) {
+void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
+ TypeofState typeof_state,
+ Label* slow,
+ Label* done) {
// Generate fast-case code for variables that might be shadowed by
// eval-introduced variables. Eval is used a lot without
// introducing variables. In those cases, we do not want to
// perform a runtime call for all variables in the scope
// containing the eval.
- if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
- EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
+ if (var->mode() == Variable::DYNAMIC_GLOBAL) {
+ EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
__ jmp(done);
- } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
- Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
- Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
- if (potential_slot != NULL) {
- // Generate fast case for locals that rewrite to slots.
- __ mov(eax,
- ContextSlotOperandCheckExtensions(potential_slot, slow));
- if (potential_slot->var()->mode() == Variable::CONST) {
- __ cmp(eax, isolate()->factory()->the_hole_value());
- __ j(not_equal, done);
- __ mov(eax, isolate()->factory()->undefined_value());
- }
- __ jmp(done);
- } else if (rewrite != NULL) {
- // Generate fast case for calls of an argument function.
- Property* property = rewrite->AsProperty();
- if (property != NULL) {
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- Literal* key_literal = property->key()->AsLiteral();
- if (obj_proxy != NULL &&
- key_literal != NULL &&
- obj_proxy->IsArguments() &&
- key_literal->handle()->IsSmi()) {
- // Load arguments object if there are no eval-introduced
- // variables. Then load the argument from the arguments
- // object using keyed load.
- __ mov(edx,
- ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
- slow));
- __ SafeSet(eax, Immediate(key_literal->handle()));
- Handle<Code> ic =
- isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
- __ jmp(done);
- }
- }
+ } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
+ Variable* local = var->local_if_not_shadowed();
+ __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
+ if (local->mode() == Variable::CONST) {
+ __ cmp(eax, isolate()->factory()->the_hole_value());
+ __ j(not_equal, done);
+ __ mov(eax, isolate()->factory()->undefined_value());
}
+ __ jmp(done);
}
}
SetSourcePosition(proxy->position());
Variable* var = proxy->var();
- // Three cases: non-this global variables, lookup slots, and all other
- // types of slots.
- Slot* slot = var->AsSlot();
- ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
-
- if (slot == NULL) {
- Comment cmnt(masm_, "Global variable");
- // Use inline caching. Variable name is passed in ecx and the global
- // object on the stack.
- __ mov(eax, GlobalObjectOperand());
- __ mov(ecx, var->name());
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
- context()->Plug(eax);
-
- } else if (slot->type() == Slot::LOOKUP) {
- Label done, slow;
-
- // Generate code for loading from variables potentially shadowed
- // by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
-
- __ bind(&slow);
- Comment cmnt(masm_, "Lookup slot");
- __ push(esi); // Context.
- __ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kLoadContextSlot, 2);
- __ bind(&done);
+ // Three cases: global variables, lookup variables, and all other types of
+ // variables.
+ switch (var->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "Global variable");
+ // Use inline caching. Variable name is passed in ecx and the global
+ // object in eax.
+ __ mov(eax, GlobalObjectOperand());
+ __ mov(ecx, var->name());
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ context()->Plug(eax);
+ break;
+ }
- context()->Plug(eax);
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, var->IsContextSlot()
+ ? "Context variable"
+ : "Stack variable");
+ if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
+ context()->Plug(var);
+ } else {
+ // Let and const need a read barrier.
+ Label done;
+ GetVar(eax, var);
+ __ cmp(eax, isolate()->factory()->the_hole_value());
+ __ j(not_equal, &done, Label::kNear);
+ if (var->mode() == Variable::LET) {
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ } else { // Variable::CONST
+ __ mov(eax, isolate()->factory()->undefined_value());
+ }
+ __ bind(&done);
+ context()->Plug(eax);
+ }
+ break;
+ }
- } else {
- Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
- ? "Context slot"
- : "Stack slot");
- if (var->mode() == Variable::CONST) {
- // Constants may be the hole value if they have not been initialized.
- // Unhole them.
- Label done;
- MemOperand slot_operand = EmitSlotSearch(slot, eax);
- __ mov(eax, slot_operand);
- __ cmp(eax, isolate()->factory()->the_hole_value());
- __ j(not_equal, &done, Label::kNear);
- __ mov(eax, isolate()->factory()->undefined_value());
- __ bind(&done);
- context()->Plug(eax);
- } else if (var->mode() == Variable::LET) {
- // Let bindings may be the hole value if they have not been initialized.
- // Throw a type error in this case.
- Label done;
- MemOperand slot_operand = EmitSlotSearch(slot, eax);
- __ mov(eax, slot_operand);
- __ cmp(eax, isolate()->factory()->the_hole_value());
- __ j(not_equal, &done, Label::kNear);
+ case Variable::LOOKUP: {
+ Label done, slow;
+ // Generate code for loading from variables potentially shadowed
+ // by eval-introduced variables.
+ EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
+ __ bind(&slow);
+ Comment cmnt(masm_, "Lookup variable");
+ __ push(esi); // Context.
__ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
__ bind(&done);
context()->Plug(eax);
- } else {
- context()->Plug(slot);
+ break;
}
}
}
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
- ASSERT(var != NULL);
- ASSERT(var->is_global() || var->AsSlot() != NULL);
-
- if (var->is_global()) {
- ASSERT(!var->is_this());
- // Assignment to a global variable. Use inline caching for the
- // assignment. Right-hand-side value is passed in eax, variable name in
- // ecx, and the global object on the stack.
+ if (var->IsUnallocated()) {
+ // Global var, const, or let.
__ mov(ecx, var->name());
__ mov(edx, GlobalObjectOperand());
Handle<Code> ic = is_strict_mode()
__ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
- // Like var declarations, const declarations are hoisted to function
- // scope. However, unlike var initializers, const initializers are able
- // to drill a hole to that function context, even from inside a 'with'
- // context. We thus bypass the normal static scope lookup.
- Slot* slot = var->AsSlot();
- Label skip;
- switch (slot->type()) {
- case Slot::PARAMETER:
- // No const parameters.
- UNREACHABLE();
- break;
- case Slot::LOCAL:
- __ mov(edx, Operand(ebp, SlotOffset(slot)));
- __ cmp(edx, isolate()->factory()->the_hole_value());
- __ j(not_equal, &skip);
- __ mov(Operand(ebp, SlotOffset(slot)), eax);
- break;
- case Slot::CONTEXT:
- case Slot::LOOKUP:
- __ push(eax);
- __ push(esi);
- __ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- break;
+ // Const initializers need a write barrier.
+ ASSERT(!var->IsParameter()); // No const parameters.
+ if (var->IsStackLocal()) {
+ Label skip;
+ __ mov(edx, StackOperand(var));
+ __ cmp(edx, isolate()->factory()->the_hole_value());
+ __ j(not_equal, &skip);
+ __ mov(StackOperand(var), eax);
+ __ bind(&skip);
+ } else {
+ ASSERT(var->IsContextSlot() || var->IsLookupSlot());
+ // Like var declarations, const declarations are hoisted to function
+ // scope. However, unlike var initializers, const initializers are
+ // able to drill a hole to that function context, even from inside a
+ // 'with' context. We thus bypass the normal static scope lookup for
+ // var->IsContextSlot().
+ __ push(eax);
+ __ push(esi);
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
}
- __ bind(&skip);
} else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
- // Perform the assignment for non-const variables. Const assignments
- // are simply skipped.
- Slot* slot = var->AsSlot();
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL: {
- Label assign;
- // Check for an initialized let binding.
- __ mov(edx, Operand(ebp, SlotOffset(slot)));
- __ cmp(edx, isolate()->factory()->the_hole_value());
- __ j(not_equal, &assign);
- __ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- // Perform the assignment.
- __ bind(&assign);
- __ mov(Operand(ebp, SlotOffset(slot)), eax);
- break;
- }
-
- case Slot::CONTEXT: {
- // Let variables may be the hole value if they have not been
- // initialized. Throw a type error in this case.
- Label assign;
- MemOperand target = EmitSlotSearch(slot, ecx);
- // Check for an initialized let binding.
- __ mov(edx, target);
- __ cmp(edx, isolate()->factory()->the_hole_value());
- __ j(not_equal, &assign, Label::kNear);
- __ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- // Perform the assignment.
- __ bind(&assign);
- __ mov(target, eax);
- // The value of the assignment is in eax. RecordWrite clobbers its
- // register arguments.
+ // Non-initializing assignment to let variable needs a write barrier.
+ if (var->IsLookupSlot()) {
+ __ push(eax); // Value.
+ __ push(esi); // Context.
+ __ push(Immediate(var->name()));
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ } else {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ Label assign;
+ MemOperand location = VarOperand(var, ecx);
+ __ mov(edx, location);
+ __ cmp(edx, isolate()->factory()->the_hole_value());
+ __ j(not_equal, &assign, Label::kNear);
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&assign);
+ __ mov(location, eax);
+ if (var->IsContextSlot()) {
__ mov(edx, eax);
- int offset = Context::SlotOffset(slot->index());
- __ RecordWrite(ecx, offset, edx, ebx);
- break;
+ __ RecordWrite(ecx, Context::SlotOffset(var->index()), edx, ebx);
}
-
- case Slot::LOOKUP:
- // Call the runtime for the assignment.
- __ push(eax); // Value.
- __ push(esi); // Context.
- __ push(Immediate(var->name()));
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
- break;
}
- } else if (var->mode() != Variable::CONST) {
- // Perform the assignment for non-const variables. Const assignments
- // are simply skipped.
- Slot* slot = var->AsSlot();
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (FLAG_debug_code && op == Token::INIT_LET) {
- // Check for an uninitialized let binding.
- __ mov(edx, Operand(ebp, SlotOffset(slot)));
- __ cmp(edx, isolate()->factory()->the_hole_value());
- __ Check(equal, "Let binding re-initialization.");
- }
- // Perform the assignment.
- __ mov(Operand(ebp, SlotOffset(slot)), eax);
- break;
- case Slot::CONTEXT: {
- MemOperand target = EmitSlotSearch(slot, ecx);
- if (FLAG_debug_code && op == Token::INIT_LET) {
- // Check for an uninitialized let binding.
- __ mov(edx, target);
- __ cmp(edx, isolate()->factory()->the_hole_value());
- __ Check(equal, "Let binding re-initialization.");
- }
- // Perform the assignment and issue the write barrier.
- __ mov(target, eax);
- // The value of the assignment is in eax. RecordWrite clobbers its
- // register arguments.
+ } else if (var->mode() != Variable::CONST) {
+ // Assignment to var or initializing assignment to let.
+ if (var->IsStackAllocated() || var->IsContextSlot()) {
+ MemOperand location = VarOperand(var, ecx);
+ if (FLAG_debug_code && op == Token::INIT_LET) {
+ // Check for an uninitialized let binding.
+ __ mov(edx, location);
+ __ cmp(edx, isolate()->factory()->the_hole_value());
+ __ Check(equal, "Let binding re-initialization.");
+ }
+ // Perform the assignment.
+ __ mov(location, eax);
+ if (var->IsContextSlot()) {
__ mov(edx, eax);
- int offset = Context::SlotOffset(slot->index());
- __ RecordWrite(ecx, offset, edx, ebx);
- break;
+ __ RecordWrite(ecx, Context::SlotOffset(var->index()), edx, ebx);
}
-
- case Slot::LOOKUP:
- ASSERT(op != Token::INIT_LET);
- // Call the runtime for the assignment.
- __ push(eax); // Value.
- __ push(esi); // Context.
- __ push(Immediate(var->name()));
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
- break;
+ } else {
+ ASSERT(var->IsLookupSlot());
+ __ push(eax); // Value.
+ __ push(esi); // Context.
+ __ push(Immediate(var->name()));
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
}
}
+ // Non-initializing assignments to consts are ignored.
}
#endif
Comment cmnt(masm_, "[ Call");
- Expression* fun = expr->expression();
- Variable* var = fun->AsVariableProxy()->AsVariable();
+ Expression* callee = expr->expression();
+ VariableProxy* proxy = callee->AsVariableProxy();
+ Property* property = callee->AsProperty();
- if (var != NULL && var->is_possibly_eval()) {
+ if (proxy != NULL && proxy->var()->is_possibly_eval()) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
- // resolve the function we need to call and the receiver of the
- // call. Then we call the resolved function using the given
- // arguments.
+ // resolve the function we need to call and the receiver of the call.
+ // Then we call the resolved function using the given arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreservePositionScope pos_scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
// Reserved receiver slot.
__ push(Immediate(isolate()->factory()->undefined_value()));
increment_stack_height();
}
// If we know that eval can only be shadowed by eval-introduced
- // variables we attempt to load the global eval function directly
- // in generated code. If we succeed, there is no need to perform a
+ // variables we attempt to load the global eval function directly in
+ // generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
Label done;
- if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Variable* var = proxy->var();
+ if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
Label slow;
- EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow);
+ EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
// Push the function and resolve eval.
__ push(eax);
EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
__ bind(&slow);
}
- // Push copy of the function (found below the arguments) and
+ // Push a copy of the function (found below the arguments) and
// resolve eval.
__ push(Operand(esp, (arg_count + 1) * kPointerSize));
EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
- if (done.is_linked()) {
- __ bind(&done);
- }
+ __ bind(&done);
// The runtime call returns a pair of values in eax (function) and
// edx (receiver). Touch up the stack with the right values.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
decrement_stack_height(arg_count + 1); // Function is left on the stack.
context()->DropAndPlug(1, eax);
- } else if (var != NULL && !var->is_this() && var->is_global()) {
+
+ } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Push global object as receiver for the call IC.
__ push(GlobalObjectOperand());
increment_stack_height();
- EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
- } else if (var != NULL && var->AsSlot() != NULL &&
- var->AsSlot()->type() == Slot::LOOKUP) {
+ EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
+
+ } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
-
{ PreservePositionScope scope(masm()->positions_recorder());
- // Generate code for loading from variables potentially shadowed
- // by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow,
- &done);
+ // Generate code for loading from variables potentially shadowed by
+ // eval-introduced variables.
+ EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
}
-
__ bind(&slow);
- // Call the runtime to find the function to call (returned in eax)
- // and the object holding it (returned in edx).
+ // Call the runtime to find the function to call (returned in eax) and
+ // the object holding it (returned in edx).
__ push(context_register());
- __ push(Immediate(var->name()));
+ __ push(Immediate(proxy->name()));
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ push(eax); // Function.
- increment_stack_height();
__ push(edx); // Receiver.
- increment_stack_height();
+ increment_stack_height(2);
- // If fast case code has been generated, emit code to push the
- // function and receiver and have the slow path jump around this
- // code.
+ // If fast case code has been generated, emit code to push the function
+ // and receiver and have the slow path jump around this code.
if (done.is_linked()) {
Label call;
- __ jmp(&call);
+ __ jmp(&call, Label::kNear);
__ bind(&done);
- // Push function. Stack height already incremented in slow case above.
+ // Push function. Stack height already incremented in slow case
+ // above.
__ push(eax);
- // The receiver is implicitly the global receiver. Indicate this
- // by passing the hole to the call function stub.
+ // The receiver is implicitly the global receiver. Indicate this by
+ // passing the hole to the call function stub.
__ push(Immediate(isolate()->factory()->the_hole_value()));
__ bind(&call);
}
- // The receiver is either the global receiver or an object found
- // by LoadContextSlot. That object could be the hole if the
- // receiver is implicitly the global object.
+ // The receiver is either the global receiver or an object found by
+ // LoadContextSlot. That object could be the hole if the receiver is
+ // implicitly the global object.
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
- } else if (fun->AsProperty() != NULL) {
- // Call to an object property.
- Property* prop = fun->AsProperty();
- Literal* key = prop->key()->AsLiteral();
- if (key != NULL && key->handle()->IsSymbol()) {
- // Call to a named property, use call IC.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
+
+ } else if (property != NULL) {
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(property->obj());
+ }
+ if (property->key()->IsPropertyName()) {
+ EmitCallWithIC(expr,
+ property->key()->AsLiteral()->handle(),
+ RelocInfo::CODE_TARGET);
} else {
- // Call to a keyed property.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ EmitKeyedCallWithIC(expr, property->key());
}
+
} else {
+ // Call to an arbitrary expression not handled specially above.
{ PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
}
// Load global receiver object.
__ mov(ebx, GlobalObjectOperand());
switch (expr->op()) {
case Token::DELETE: {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
- Property* prop = expr->expression()->AsProperty();
- Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
+ Property* property = expr->expression()->AsProperty();
+ VariableProxy* proxy = expr->expression()->AsVariableProxy();
- if (prop != NULL) {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
+ if (property != NULL) {
+ VisitForStackValue(property->obj());
+ VisitForStackValue(property->key());
__ push(Immediate(Smi::FromInt(strict_mode_flag())));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
decrement_stack_height(2);
context()->Plug(eax);
- } else if (var != NULL) {
+ } else if (proxy != NULL) {
+ Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
- // but "delete this" is.
+ // but "delete this" is allowed.
ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
- if (var->is_global()) {
+ if (var->IsUnallocated()) {
__ push(GlobalObjectOperand());
__ push(Immediate(var->name()));
__ push(Immediate(Smi::FromInt(kNonStrictMode)));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(eax);
- } else if (var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
- // Result of deleting non-global, non-dynamic variables is false.
- // The subexpression does not have side effects.
- context()->Plug(false);
+ } else if (var->IsStackAllocated() || var->IsContextSlot()) {
+ // Result of deleting non-global variables is false. 'this' is
+ // not really a variable, though we implement it as one. The
+ // subexpression does not have side effects.
+ context()->Plug(var->is_this());
} else {
// Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
ASSERT(!context()->IsEffect());
ASSERT(!context()->IsTest());
- if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
+ if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
__ mov(eax, GlobalObjectOperand());
__ mov(ecx, Immediate(proxy->name()));
__ call(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(eax);
- } else if (proxy != NULL &&
- proxy->var()->AsSlot() != NULL &&
- proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
+ } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
- Slot* slot = proxy->var()->AsSlot();
- EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
+ EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
__ push(esi);
// Copy parameters into context if necessary.
int num_parameters = scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ Variable* var = scope()->parameter(i);
+ if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ mov(eax, Operand(ebp, parameter_offset));
// Store it in the context.
- int context_offset = Context::SlotOffset(slot->index());
+ int context_offset = Context::SlotOffset(var->index());
__ mov(Operand(esi, context_offset), eax);
// Update the write barrier. This clobbers all involved
// registers, so we have to use a third register to avoid
int j = 0;
for (int i = 0; i < list.length(); i++) {
Variable* var1 = list[i];
- Slot* slot = var1->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ if (var1->IsContextSlot()) {
if (j != i) {
list[j] = var1;
}
for (int k = 1; k < j; k++) {
int l = k;
for (int m = k + 1; m < j; m++) {
- if (list[l]->AsSlot()->index() > list[m]->AsSlot()->index()) {
+ if (list[l]->index() > list[m]->index()) {
l = m;
}
}
SetElementNonStrict(
scope_info_list,
scope_info_length,
- Handle<Smi>(Smi::FromInt(list[i]->AsSlot()->index())));
+ Handle<Smi>(Smi::FromInt(list[i]->index())));
scope_info_length++;
}
SetElementNonStrict(scope_info_list,
// Copy any necessary parameters into the context.
int num_parameters = info->scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
+ Slot* slot = scope()->parameter(i)->rewrite();
if (slot != NULL && slot->type() == Slot::CONTEXT) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
ArgumentsAccessStub stub(type);
__ CallStub(&stub);
- Move(arguments->AsSlot(), v0, a1, a2);
+ Move(arguments->rewrite(), v0, a1, a2);
}
if (FLAG_trace) {
return ContextOperand(scratch, slot->index());
}
case Slot::LOOKUP:
+ case Slot::GLOBAL:
UNREACHABLE();
}
UNREACHABLE();
Comment cmnt(masm_, "[ Declaration");
Variable* variable = proxy->var();
ASSERT(variable != NULL); // Must have been resolved.
- Slot* slot = variable->AsSlot();
+ Slot* slot = variable->rewrite();
ASSERT(slot != NULL);
switch (slot->type()) {
case Slot::PARAMETER:
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
break;
}
+
+ case Slot::GLOBAL:
+ UNREACHABLE();
}
}
EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
__ Branch(done);
} else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
- Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
+ Slot* potential_slot = slot->var()->local_if_not_shadowed()->rewrite();
Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
if (potential_slot != NULL) {
// Generate fast case for locals that rewrite to slots.
// variables. Then load the argument from the arguments
// object using keyed load.
__ lw(a1,
- ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
+ ContextSlotOperandCheckExtensions(obj_proxy->var()->rewrite(),
slow));
__ li(a0, Operand(key_literal->handle()));
Handle<Code> ic =
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
- Slot* slot = var->AsSlot();
+ Slot* slot = var->rewrite();
ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
if (slot == NULL) {
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
ASSERT(var != NULL);
- ASSERT(var->is_global() || var->AsSlot() != NULL);
+ ASSERT(var->is_global() || var->rewrite() != NULL);
if (var->is_global()) {
ASSERT(!var->is_this());
// scope. However, unlike var initializers, const initializers are able
// to drill a hole to that function context, even from inside a 'with'
// context. We thus bypass the normal static scope lookup.
- Slot* slot = var->AsSlot();
+ Slot* slot = var->rewrite();
Label skip;
switch (slot->type()) {
case Slot::PARAMETER:
__ Push(cp, a0); // Context and name.
__ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
break;
+ case Slot::GLOBAL:
+ UNREACHABLE();
}
__ bind(&skip);
} else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
} else if (var->mode() != Variable::CONST) {
// Perform the assignment for non-const variables. Const assignments
// are simply skipped.
- Slot* slot = var->AsSlot();
+ Slot* slot = var->rewrite();
switch (slot->type()) {
case Slot::PARAMETER:
case Slot::LOCAL:
__ Push(cp, a1, a0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
break;
+
+ case Slot::GLOBAL:
+ UNREACHABLE();
}
}
}
// in generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
Label done;
- if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ if (var->rewrite() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
Label slow;
- EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
+ EmitLoadGlobalSlotCheckExtensions(var->rewrite(),
NOT_INSIDE_TYPEOF,
&slow);
// Push the function and resolve eval.
__ lw(a0, GlobalObjectOperand());
__ push(a0);
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
- } else if (var != NULL && var->AsSlot() != NULL &&
- var->AsSlot()->type() == Slot::LOOKUP) {
+ } else if (var != NULL && var->rewrite() != NULL &&
+ var->rewrite()->type() == Slot::LOOKUP) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
{ PreservePositionScope scope(masm()->positions_recorder());
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
+ EmitDynamicLoadFromSlotFastCase(var->rewrite(),
NOT_INSIDE_TYPEOF,
&slow,
&done);
__ Push(a2, a1, a0);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(v0);
- } else if (var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
+ } else if (var->rewrite() != NULL &&
+ var->rewrite()->type() != Slot::LOOKUP) {
// Result of deleting non-global, non-dynamic variables is false.
// The subexpression does not have side effects.
context()->Plug(false);
PrepareForBailout(expr, TOS_REG);
context()->Plug(v0);
} else if (proxy != NULL &&
- proxy->var()->AsSlot() != NULL &&
- proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
+ proxy->var()->rewrite() != NULL &&
+ proxy->var()->rewrite()->type() == Slot::LOOKUP) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
- Slot* slot = proxy->var()->AsSlot();
+ Slot* slot = proxy->var()->rewrite();
EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
}
-void PrettyPrinter::VisitSlot(Slot* node) {
- switch (node->type()) {
- case Slot::PARAMETER:
- Print("parameter[%d]", node->index());
- break;
- case Slot::LOCAL:
- Print("local[%d]", node->index());
- break;
- case Slot::CONTEXT:
- Print("context[%d]", node->index());
- break;
- case Slot::LOOKUP:
- Print("lookup[");
- PrintLiteral(node->var()->name(), false);
- Print("]");
- break;
- default:
- UNREACHABLE();
- }
-}
-
-
void PrettyPrinter::VisitVariableProxy(VariableProxy* node) {
PrintLiteral(node->name(), false);
}
if (node->fun() == NULL) {
// var or const declarations
PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
- node->proxy()->AsVariable(),
+ node->proxy()->var(),
node->proxy()->name());
} else {
// function declarations
}
-void AstPrinter::VisitSlot(Slot* node) {
- PrintIndented("SLOT ");
- PrettyPrinter::VisitSlot(node);
- Print("\n");
-}
-
-
void AstPrinter::VisitVariableProxy(VariableProxy* node) {
- PrintLiteralWithModeIndented("VAR PROXY", node->AsVariable(), node->name());
Variable* var = node->var();
- if (var != NULL && var->rewrite() != NULL) {
- IndentedScope indent(this);
- Visit(var->rewrite());
+ PrintLiteralWithModeIndented("VAR PROXY", var, node->name());
+ { IndentedScope indent(this);
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ break;
+ case Variable::PARAMETER:
+ Print("parameter[%d]", var->index());
+ break;
+ case Variable::LOCAL:
+ Print("local[%d]", var->index());
+ break;
+ case Variable::CONTEXT:
+ Print("context[%d]", var->index());
+ break;
+ case Variable::LOOKUP:
+ Print("lookup");
+ break;
+ }
}
}
}
-void JsonAstBuilder::VisitSlot(Slot* expr) {
- TagScope tag(this, "Slot");
+void JsonAstBuilder::VisitVariableProxy(VariableProxy* expr) {
+ TagScope tag(this, "Variable");
{
AttributesScope attributes(this);
- switch (expr->type()) {
- case Slot::PARAMETER:
- AddAttribute("type", "PARAMETER");
+ Variable* var = expr->var();
+ AddAttribute("name", var->name());
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ AddAttribute("location", "UNALLOCATED");
break;
- case Slot::LOCAL:
- AddAttribute("type", "LOCAL");
+ case Variable::PARAMETER:
+ AddAttribute("location", "PARAMETER");
+ AddAttribute("index", var->index());
break;
- case Slot::CONTEXT:
- AddAttribute("type", "CONTEXT");
+ case Variable::LOCAL:
+ AddAttribute("location", "LOCAL");
+ AddAttribute("index", var->index());
break;
- case Slot::LOOKUP:
- AddAttribute("type", "LOOKUP");
+ case Variable::CONTEXT:
+ AddAttribute("location", "CONTEXT");
+ AddAttribute("index", var->index());
+ break;
+ case Variable::LOOKUP:
+ AddAttribute("location", "LOOKUP");
break;
}
- AddAttribute("index", expr->index());
- }
-}
-
-
-void JsonAstBuilder::VisitVariableProxy(VariableProxy* expr) {
- if (expr->var()->rewrite() == NULL) {
- TagScope tag(this, "VariableProxy");
- {
- AttributesScope attributes(this);
- AddAttribute("name", expr->name());
- AddAttribute("mode", Variable::Mode2String(expr->var()->mode()));
- }
- } else {
- Visit(expr->var()->rewrite());
}
}
// Print a node to stdout.
static void PrintOut(AstNode* node);
- virtual void VisitSlot(Slot* node);
// Individual nodes
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
const char* PrintProgram(FunctionLiteral* program);
// Individual nodes
- virtual void VisitSlot(Slot* node);
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
void AddAttribute(const char* name, bool value);
// AST node visit functions.
- virtual void VisitSlot(Slot* node);
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
static int CompareLocal(Variable* const* v, Variable* const* w) {
- Slot* s = (*v)->AsSlot();
- Slot* t = (*w)->AsSlot();
- // We may have rewritten parameters (that are in the arguments object)
- // and which may have a NULL slot... - find a better solution...
- int x = (s != NULL ? s->index() : 0);
- int y = (t != NULL ? t->index() : 0);
+ int x = (*v)->index();
+ int y = (*w)->index();
// Consider sorting them according to type as well?
return x - y;
}
for (int i = 0; i < locals.length(); i++) {
Variable* var = locals[i];
if (var->is_used()) {
- Slot* slot = var->AsSlot();
- if (slot != NULL) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- // explicitly added to parameters_ above - ignore
- break;
-
- case Slot::LOCAL:
- ASSERT(stack_slots_.length() == slot->index());
- stack_slots_.Add(var->name());
- break;
-
- case Slot::CONTEXT:
- heap_locals.Add(var);
- break;
-
- case Slot::LOOKUP:
- // This is currently not used.
- UNREACHABLE();
- break;
- }
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ case Variable::PARAMETER:
+ break;
+
+ case Variable::LOCAL:
+ ASSERT(stack_slots_.length() == var->index());
+ stack_slots_.Add(var->name());
+ break;
+
+ case Variable::CONTEXT:
+ heap_locals.Add(var);
+ break;
+
+ case Variable::LOOKUP:
+ // We don't expect lookup variables in the locals list.
+ UNREACHABLE();
+ break;
}
}
}
if (scope->num_heap_slots() > 0) {
// Add user-defined slots.
for (int i = 0; i < heap_locals.length(); i++) {
- ASSERT(heap_locals[i]->AsSlot()->index() - Context::MIN_CONTEXT_SLOTS ==
+ ASSERT(heap_locals[i]->index() - Context::MIN_CONTEXT_SLOTS ==
context_slots_.length());
- ASSERT(heap_locals[i]->AsSlot()->index() - Context::MIN_CONTEXT_SLOTS ==
+ ASSERT(heap_locals[i]->index() - Context::MIN_CONTEXT_SLOTS ==
context_modes_.length());
context_slots_.Add(heap_locals[i]->name());
context_modes_.Add(heap_locals[i]->mode());
// list - instead it must be handled separately in the
// Contexts::Lookup() function. Thus record an empty symbol here so we
// get the correct number of context slots.
- ASSERT(proxy->var()->AsSlot()->index() - Context::MIN_CONTEXT_SLOTS ==
+ ASSERT(proxy->var()->index() - Context::MIN_CONTEXT_SLOTS ==
context_slots_.length());
- ASSERT(proxy->var()->AsSlot()->index() - Context::MIN_CONTEXT_SLOTS ==
+ ASSERT(proxy->var()->index() - Context::MIN_CONTEXT_SLOTS ==
context_modes_.length());
context_slots_.Add(FACTORY->empty_symbol());
context_modes_.Add(Variable::INTERNAL);
#include "bootstrapper.h"
#include "compiler.h"
-#include "prettyprinter.h"
#include "scopeinfo.h"
#include "allocation-inl.h"
Variable::VAR,
false,
Variable::THIS);
- var->set_rewrite(NewSlot(var, Slot::PARAMETER, -1));
+ var->AllocateTo(Variable::PARAMETER, -1);
receiver_ = var;
}
Variable* var =
variables_.Declare(this, name, mode, true, Variable::NORMAL);
- var->set_rewrite(NewSlot(var, Slot::CONTEXT, index));
+ var->AllocateTo(Variable::CONTEXT, index);
return var;
}
Variable* function_var =
new Variable(this, name, Variable::CONST, true, Variable::NORMAL);
function_ = new(isolate_->zone()) VariableProxy(isolate_, function_var);
- return function_->var();
+ return function_var;
}
Variable* Scope::DeclareGlobal(Handle<String> name) {
ASSERT(is_global_scope());
- return variables_.Declare(this, name, Variable::DYNAMIC_GLOBAL, true,
+ return variables_.Declare(this, name, Variable::DYNAMIC_GLOBAL,
+ true,
Variable::NORMAL);
}
Variable* Scope::NewTemporary(Handle<String> name) {
ASSERT(!already_resolved());
- Variable* var =
- new Variable(this, name, Variable::TEMPORARY, true, Variable::NORMAL);
+ Variable* var = new Variable(this,
+ name,
+ Variable::TEMPORARY,
+ true,
+ Variable::NORMAL);
temps_.Add(var);
return var;
}
}
-static void PrintVar(PrettyPrinter* printer, int indent, Variable* var) {
- if (var->is_used() || var->rewrite() != NULL) {
+static void PrintLocation(Variable* var) {
+ switch (var->location()) {
+ case Variable::UNALLOCATED:
+ break;
+ case Variable::PARAMETER:
+ PrintF("parameter[%d]", var->index());
+ break;
+ case Variable::LOCAL:
+ PrintF("local[%d]", var->index());
+ break;
+ case Variable::CONTEXT:
+ PrintF("context[%d]", var->index());
+ break;
+ case Variable::LOOKUP:
+ PrintF("lookup");
+ break;
+ }
+}
+
+
+static void PrintVar(int indent, Variable* var) {
+ if (var->is_used() || !var->IsUnallocated()) {
Indent(indent, Variable::Mode2String(var->mode()));
PrintF(" ");
PrintName(var->name());
PrintF("; // ");
- if (var->rewrite() != NULL) {
- PrintF("%s, ", printer->Print(var->rewrite()));
- if (var->is_accessed_from_inner_function_scope()) PrintF(", ");
- }
+ PrintLocation(var);
if (var->is_accessed_from_inner_function_scope()) {
+ if (!var->IsUnallocated()) PrintF(", ");
PrintF("inner scope access");
}
PrintF("\n");
}
-static void PrintMap(PrettyPrinter* printer, int indent, VariableMap* map) {
+static void PrintMap(int indent, VariableMap* map) {
for (VariableMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
Variable* var = reinterpret_cast<Variable*>(p->value);
- PrintVar(printer, indent, var);
+ PrintVar(indent, var);
}
}
PrintF("%d heap slots\n", num_heap_slots_); }
// Print locals.
- PrettyPrinter printer;
Indent(n1, "// function var\n");
if (function_ != NULL) {
- PrintVar(&printer, n1, function_->var());
+ PrintVar(n1, function_->var());
}
Indent(n1, "// temporary vars\n");
for (int i = 0; i < temps_.length(); i++) {
- PrintVar(&printer, n1, temps_[i]);
+ PrintVar(n1, temps_[i]);
}
Indent(n1, "// local vars\n");
- PrintMap(&printer, n1, &variables_);
+ PrintMap(n1, &variables_);
Indent(n1, "// dynamic vars\n");
if (dynamics_ != NULL) {
- PrintMap(&printer, n1, dynamics_->GetMap(Variable::DYNAMIC));
- PrintMap(&printer, n1, dynamics_->GetMap(Variable::DYNAMIC_LOCAL));
- PrintMap(&printer, n1, dynamics_->GetMap(Variable::DYNAMIC_GLOBAL));
+ PrintMap(n1, dynamics_->GetMap(Variable::DYNAMIC));
+ PrintMap(n1, dynamics_->GetMap(Variable::DYNAMIC_LOCAL));
+ PrintMap(n1, dynamics_->GetMap(Variable::DYNAMIC_GLOBAL));
}
// Print inner scopes (disable by providing negative n).
// Declare a new non-local.
var = map->Declare(NULL, name, mode, true, Variable::NORMAL);
// Allocate it by giving it a dynamic lookup.
- var->set_rewrite(NewSlot(var, Slot::LOOKUP, -1));
+ var->AllocateTo(Variable::LOOKUP, -1);
}
return var;
}
void Scope::AllocateStackSlot(Variable* var) {
- var->set_rewrite(NewSlot(var, Slot::LOCAL, num_stack_slots_++));
+ var->AllocateTo(Variable::LOCAL, num_stack_slots_++);
}
void Scope::AllocateHeapSlot(Variable* var) {
- var->set_rewrite(NewSlot(var, Slot::CONTEXT, num_heap_slots_++));
+ var->AllocateTo(Variable::CONTEXT, num_heap_slots_++);
}
if (MustAllocate(var)) {
if (MustAllocateInContext(var)) {
- ASSERT(var->rewrite() == NULL || var->IsContextSlot());
- if (var->rewrite() == NULL) {
+ ASSERT(var->IsUnallocated() || var->IsContextSlot());
+ if (var->IsUnallocated()) {
AllocateHeapSlot(var);
}
} else {
- ASSERT(var->rewrite() == NULL || var->IsParameter());
- if (var->rewrite() == NULL) {
- var->set_rewrite(NewSlot(var, Slot::PARAMETER, i));
+ ASSERT(var->IsUnallocated() || var->IsParameter());
+ if (var->IsUnallocated()) {
+ var->AllocateTo(Variable::PARAMETER, i);
}
}
}
void Scope::AllocateNonParameterLocal(Variable* var) {
ASSERT(var->scope() == this);
- ASSERT(var->rewrite() == NULL ||
- !var->IsVariable(isolate_->factory()->result_symbol()) ||
- var->AsSlot() == NULL ||
- var->AsSlot()->type() != Slot::LOCAL);
- if (var->rewrite() == NULL && MustAllocate(var)) {
+ ASSERT(!var->IsVariable(isolate_->factory()->result_symbol()) ||
+ !var->IsStackLocal());
+ if (var->IsUnallocated() && MustAllocate(var)) {
if (MustAllocateInContext(var)) {
AllocateHeapSlot(var);
} else {
// Construct a catch scope with a binding for the name.
Scope(Scope* inner_scope, Handle<String> catch_variable_name);
- inline Slot* NewSlot(Variable* var, Slot::Type type, int index) {
- return new(isolate_->zone()) Slot(isolate_, var, type, index);
- }
-
void AddInnerScope(Scope* inner_scope) {
if (inner_scope != NULL) {
inner_scopes_.Add(inner_scope);
}
-Property* Variable::AsProperty() const {
- return rewrite_ == NULL ? NULL : rewrite_->AsProperty();
-}
-
-
-Slot* Variable::AsSlot() const { return rewrite_; }
-
-
-bool Variable::IsStackAllocated() const {
- return rewrite_ != NULL && rewrite_->IsStackAllocated();
-}
-
-
-bool Variable::IsParameter() const {
- return rewrite_ != NULL && rewrite_->type() == Slot::PARAMETER;
-}
-
-
-bool Variable::IsStackLocal() const {
- return rewrite_ != NULL && rewrite_->type() == Slot::LOCAL;
-}
-
-
-bool Variable::IsContextSlot() const {
- return rewrite_ != NULL && rewrite_->type() == Slot::CONTEXT;
-}
-
-
Variable::Variable(Scope* scope,
Handle<String> name,
Mode mode,
name_(name),
mode_(mode),
kind_(kind),
+ location_(UNALLOCATED),
+ index_(-1),
local_if_not_shadowed_(NULL),
- rewrite_(NULL),
is_valid_LHS_(is_valid_LHS),
is_accessed_from_inner_function_scope_(false),
is_used_(false) {
ARGUMENTS
};
+ enum Location {
+ // Before and during variable allocation, a variable whose location is
+ // not yet determined. After allocation, a variable looked up as a
+ // property on the global object (and possibly absent). name() is the
+ // variable name, index() is invalid.
+ UNALLOCATED,
+
+ // A slot in the parameter section on the stack. index() is the
+ // parameter index, counting left-to-right. The reciever is index -1;
+ // the first parameter is index 0.
+ PARAMETER,
+
+ // A slot in the local section on the stack. index() is the variable
+ // index in the stack frame, starting at 0.
+ LOCAL,
+
+ // An indexed slot in a heap context. index() is the variable index in
+ // the context object on the heap, starting at 0. scope() is the
+ // corresponding scope.
+ CONTEXT,
+
+ // A named slot in a heap context. name() is the variable name in the
+ // context object on the heap, with lookup starting at the current
+ // context. index() is invalid.
+ LOOKUP
+ };
+
Variable(Scope* scope,
Handle<String> name,
Mode mode,
// Printing support
static const char* Mode2String(Mode mode);
- // Type testing & conversion. Global variables are not slots.
- Property* AsProperty() const;
- Slot* AsSlot() const;
-
bool IsValidLeftHandSide() { return is_valid_LHS_; }
// The source code for an eval() call may refer to a variable that is
return !is_this() && name().is_identical_to(n);
}
- bool IsStackAllocated() const;
- bool IsParameter() const; // Includes 'this'.
- bool IsStackLocal() const;
- bool IsContextSlot() const;
+ bool IsUnallocated() const { return location_ == UNALLOCATED; }
+ bool IsParameter() const { return location_ == PARAMETER; }
+ bool IsStackLocal() const { return location_ == LOCAL; }
+ bool IsStackAllocated() const { return IsParameter() || IsStackLocal(); }
+ bool IsContextSlot() const { return location_ == CONTEXT; }
+ bool IsLookupSlot() const { return location_ == LOOKUP; }
bool is_dynamic() const {
return (mode_ == DYNAMIC ||
local_if_not_shadowed_ = local;
}
- Slot* rewrite() const { return rewrite_; }
- void set_rewrite(Slot* slot) { rewrite_ = slot; }
+ Location location() const { return location_; }
+ int index() const { return index_; }
+
+ void AllocateTo(Location location, int index) {
+ location_ = location;
+ index_ = index;
+ }
private:
Scope* scope_;
Handle<String> name_;
Mode mode_;
Kind kind_;
+ Location location_;
+ int index_;
Variable* local_if_not_shadowed_;
- // Code generation.
- Slot* rewrite_;
-
// Valid as a LHS? (const and this are not valid LHS, for example)
bool is_valid_LHS_;
class JumpPatchSite BASE_EMBEDDED {
public:
- explicit JumpPatchSite(MacroAssembler* masm)
- : masm_(masm) {
+ explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
#ifdef DEBUG
info_emitted_ = false;
#endif
// Copy any necessary parameters into the context.
int num_parameters = info->scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ Variable* var = scope()->parameter(i);
+ if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ movq(rax, Operand(rbp, parameter_offset));
// Store it in the context.
- int context_offset = Context::SlotOffset(slot->index());
+ int context_offset = Context::SlotOffset(var->index());
__ movq(Operand(rsi, context_offset), rax);
// Update the write barrier. This clobbers all involved
// registers, so we have use a third register to avoid
: ArgumentsAccessStub::NEW_NON_STRICT_SLOW);
__ CallStub(&stub);
- Move(arguments->AsSlot(), rax, rbx, rdx);
+ SetVar(arguments, rax, rbx, rdx);
}
if (FLAG_trace) {
if (scope()->HasIllegalRedeclaration()) {
Comment cmnt(masm_, "[ Declarations");
scope()->VisitIllegalRedeclaration(this);
+
} else {
PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ int ignored = 0;
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
}
VisitDeclarations(scope()->declarations());
}
}
-void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
+void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
}
-void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
- MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
- __ movq(result_register(), slot_operand);
+void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ codegen()->GetVar(result_register(), var);
}
-void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
- MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
- __ push(slot_operand);
+void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ MemOperand operand = codegen()->VarOperand(var, result_register());
+ __ push(operand);
}
-void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
- codegen()->Move(result_register(), slot);
+void FullCodeGenerator::TestContext::Plug(Variable* var) const {
+ codegen()->GetVar(result_register(), var);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
}
}
-MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- return Operand(rbp, SlotOffset(slot));
- case Slot::CONTEXT: {
- int context_chain_length =
- scope()->ContextChainLength(slot->var()->scope());
- __ LoadContext(scratch, context_chain_length);
- return ContextOperand(scratch, slot->index());
- }
- case Slot::LOOKUP:
- UNREACHABLE();
+MemOperand FullCodeGenerator::StackOperand(Variable* var) {
+ ASSERT(var->IsStackAllocated());
+ // Offset is negative because higher indexes are at lower addresses.
+ int offset = -var->index() * kPointerSize;
+ // Adjust by a (parameter or local) base offset.
+ if (var->IsParameter()) {
+ offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
+ } else {
+ offset += JavaScriptFrameConstants::kLocal0Offset;
}
- UNREACHABLE();
- return Operand(rax, 0);
+ return Operand(rbp, offset);
}
-void FullCodeGenerator::Move(Register destination, Slot* source) {
- MemOperand location = EmitSlotSearch(source, destination);
- __ movq(destination, location);
+MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ if (var->IsContextSlot()) {
+ int context_chain_length = scope()->ContextChainLength(var->scope());
+ __ LoadContext(scratch, context_chain_length);
+ return ContextOperand(scratch, var->index());
+ } else {
+ return StackOperand(var);
+ }
}
-void FullCodeGenerator::Move(Slot* dst,
- Register src,
- Register scratch1,
- Register scratch2) {
- ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
- ASSERT(!scratch1.is(src) && !scratch2.is(src));
- MemOperand location = EmitSlotSearch(dst, scratch1);
+void FullCodeGenerator::GetVar(Register dest, Variable* var) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ MemOperand location = VarOperand(var, dest);
+ __ movq(dest, location);
+}
+
+
+void FullCodeGenerator::SetVar(Variable* var,
+ Register src,
+ Register scratch0,
+ Register scratch1) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ ASSERT(!scratch0.is(src));
+ ASSERT(!scratch0.is(scratch1));
+ ASSERT(!scratch1.is(src));
+ MemOperand location = VarOperand(var, scratch0);
__ movq(location, src);
// Emit the write barrier code if the location is in the heap.
- if (dst->type() == Slot::CONTEXT) {
- int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
- __ RecordWrite(scratch1, offset, src, scratch2);
+ if (var->IsContextSlot()) {
+ int offset = Context::SlotOffset(var->index());
+ __ RecordWrite(scratch0, offset, src, scratch1);
}
}
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
Variable::Mode mode,
- FunctionLiteral* function) {
- Comment cmnt(masm_, "[ Declaration");
+ FunctionLiteral* function,
+ int* global_count) {
+ // If it was not possible to allocate the variable at compile time, we
+ // need to "declare" it at runtime to make sure it actually exists in the
+ // local context.
Variable* variable = proxy->var();
- ASSERT(variable != NULL); // Must have been resolved.
- Slot* slot = variable->AsSlot();
- ASSERT(slot != NULL);
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ ++(*global_count);
+ break;
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ movq(Operand(rbp, SlotOffset(slot)), result_register());
+ __ movq(StackOperand(variable), result_register());
} else if (mode == Variable::CONST || mode == Variable::LET) {
+ Comment cmnt(masm_, "[ Declaration");
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
- __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
+ __ movq(StackOperand(variable), kScratchRegister);
}
break;
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
+ case Variable::CONTEXT:
// The variable in the decl always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
__ Check(not_equal, "Declaration in catch context.");
}
if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ movq(ContextOperand(rsi, slot->index()), result_register());
- int offset = Context::SlotOffset(slot->index());
+ __ movq(ContextOperand(rsi, variable->index()), result_register());
+ int offset = Context::SlotOffset(variable->index());
__ movq(rbx, rsi);
__ RecordWrite(rbx, offset, result_register(), rcx);
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
} else if (mode == Variable::CONST || mode == Variable::LET) {
+ Comment cmnt(masm_, "[ Declaration");
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
- __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
+ __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
// No write barrier since the hole value is in old space.
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
}
break;
- case Slot::LOOKUP: {
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ Declaration");
__ push(rsi);
__ Push(variable->name());
- // Declaration nodes are always introduced in one of two modes.
+ // Declaration nodes are always introduced in one of three modes.
ASSERT(mode == Variable::VAR ||
mode == Variable::CONST ||
mode == Variable::LET);
} else if (mode == Variable::CONST || mode == Variable::LET) {
__ PushRoot(Heap::kTheHoleValueRootIndex);
} else {
- __ Push(Smi::FromInt(0)); // no initial value!
+ __ Push(Smi::FromInt(0)); // Indicates no initial value.
}
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
break;
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
- EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
-}
+void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
}
-void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow) {
+void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
+ TypeofState typeof_state,
+ Label* slow) {
Register context = rsi;
Register temp = rdx;
// All extension objects were empty and it is safe to use a global
// load IC call.
__ movq(rax, GlobalObjectOperand());
- __ Move(rcx, slot->var()->name());
+ __ Move(rcx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
}
-MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
- Slot* slot,
- Label* slow) {
- ASSERT(slot->type() == Slot::CONTEXT);
+MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
+ Label* slow) {
+ ASSERT(var->IsContextSlot());
Register context = rsi;
Register temp = rbx;
- for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+ for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
if (s->num_heap_slots() > 0) {
if (s->calls_eval()) {
// Check that extension is NULL.
// This function is used only for loads, not stores, so it's safe to
// return an rsi-based operand (the write barrier cannot be allowed to
// destroy the rsi register).
- return ContextOperand(context, slot->index());
+ return ContextOperand(context, var->index());
}
-void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow,
- Label* done) {
+void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
+ TypeofState typeof_state,
+ Label* slow,
+ Label* done) {
// Generate fast-case code for variables that might be shadowed by
// eval-introduced variables. Eval is used a lot without
// introducing variables. In those cases, we do not want to
// perform a runtime call for all variables in the scope
// containing the eval.
- if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
- EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
+ if (var->mode() == Variable::DYNAMIC_GLOBAL) {
+ EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
__ jmp(done);
- } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
- Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
- Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
- if (potential_slot != NULL) {
- // Generate fast case for locals that rewrite to slots.
- __ movq(rax,
- ContextSlotOperandCheckExtensions(potential_slot, slow));
- if (potential_slot->var()->mode() == Variable::CONST) {
- __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, done);
- __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
- }
- __ jmp(done);
- } else if (rewrite != NULL) {
- // Generate fast case for calls of an argument function.
- Property* property = rewrite->AsProperty();
- if (property != NULL) {
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- Literal* key_literal = property->key()->AsLiteral();
- if (obj_proxy != NULL &&
- key_literal != NULL &&
- obj_proxy->IsArguments() &&
- key_literal->handle()->IsSmi()) {
- // Load arguments object if there are no eval-introduced
- // variables. Then load the argument from the arguments
- // object using keyed load.
- __ movq(rdx,
- ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
- slow));
- __ Move(rax, key_literal->handle());
- Handle<Code> ic =
- isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
- __ jmp(done);
- }
- }
+ } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
+ Variable* local = var->local_if_not_shadowed();
+ __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
+ if (local->mode() == Variable::CONST) {
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, done);
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
}
+ __ jmp(done);
}
}
SetSourcePosition(proxy->position());
Variable* var = proxy->var();
- // Three cases: non-this global variables, lookup slots, and all other
- // types of slots.
- Slot* slot = var->AsSlot();
- ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
-
- if (slot == NULL) {
- Comment cmnt(masm_, "Global variable");
- // Use inline caching. Variable name is passed in rcx and the global
- // object on the stack.
- __ Move(rcx, var->name());
- __ movq(rax, GlobalObjectOperand());
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
- context()->Plug(rax);
-
- } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
- Label done, slow;
-
- // Generate code for loading from variables potentially shadowed
- // by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
-
- __ bind(&slow);
- Comment cmnt(masm_, "Lookup slot");
- __ push(rsi); // Context.
- __ Push(var->name());
- __ CallRuntime(Runtime::kLoadContextSlot, 2);
- __ bind(&done);
+ // Three cases: global variables, lookup variables, and all other types of
+ // variables.
+ switch (var->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "Global variable");
+ // Use inline caching. Variable name is passed in rcx and the global
+ // object on the stack.
+ __ Move(rcx, var->name());
+ __ movq(rax, GlobalObjectOperand());
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ context()->Plug(rax);
+ break;
+ }
- context()->Plug(rax);
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
+ if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
+ context()->Plug(var);
+ } else {
+ // Let and const need a read barrier.
+ Label done;
+ GetVar(rax, var);
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &done, Label::kNear);
+ if (var->mode() == Variable::LET) {
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ } else { // Variable::CONST
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+ }
+ __ bind(&done);
+ context()->Plug(rax);
+ }
+ break;
+ }
- } else {
- Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
- ? "Context slot"
- : "Stack slot");
- if (var->mode() == Variable::CONST) {
- // Constants may be the hole value if they have not been initialized.
- // Unhole them.
- Label done;
- MemOperand slot_operand = EmitSlotSearch(slot, rax);
- __ movq(rax, slot_operand);
- __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &done, Label::kNear);
- __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
- __ bind(&done);
- context()->Plug(rax);
- } else if (var->mode() == Variable::LET) {
- // Let bindings may be the hole value if they have not been initialized.
- // Throw a type error in this case.
- Label done;
- MemOperand slot_operand = EmitSlotSearch(slot, rax);
- __ movq(rax, slot_operand);
- __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &done, Label::kNear);
+ case Variable::LOOKUP: {
+ Label done, slow;
+ // Generate code for loading from variables potentially shadowed
+ // by eval-introduced variables.
+ EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
+ __ bind(&slow);
+ Comment cmnt(masm_, "Lookup slot");
+ __ push(rsi); // Context.
__ Push(var->name());
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
__ bind(&done);
context()->Plug(rax);
- } else {
- context()->Plug(slot);
+ break;
}
}
}
FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
__ movq(rbx, FieldOperand(rcx, literal_offset));
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
- __ j(not_equal, &materialized);
+ __ j(not_equal, &materialized, Label::kNear);
// Create regexp literal using runtime function
// Result will be in rax.
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
- ASSERT(var != NULL);
- ASSERT(var->is_global() || var->AsSlot() != NULL);
-
- if (var->is_global()) {
- ASSERT(!var->is_this());
- // Assignment to a global variable. Use inline caching for the
- // assignment. Right-hand-side value is passed in rax, variable name in
- // rcx, and the global object on the stack.
+ if (var->IsUnallocated()) {
+ // Global var, const, or let.
__ Move(rcx, var->name());
__ movq(rdx, GlobalObjectOperand());
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
__ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
-
} else if (op == Token::INIT_CONST) {
- // Like var declarations, const declarations are hoisted to function
- // scope. However, unlike var initializers, const initializers are able
- // to drill a hole to that function context, even from inside a 'with'
- // context. We thus bypass the normal static scope lookup.
- Slot* slot = var->AsSlot();
- Label skip;
- switch (slot->type()) {
- case Slot::PARAMETER:
- // No const parameters.
- UNREACHABLE();
- break;
- case Slot::LOCAL:
- __ movq(rdx, Operand(rbp, SlotOffset(slot)));
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &skip);
- __ movq(Operand(rbp, SlotOffset(slot)), rax);
- break;
- case Slot::CONTEXT:
- case Slot::LOOKUP:
- __ push(rax);
- __ push(rsi);
- __ Push(var->name());
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- break;
+ // Const initializers need a write barrier.
+ ASSERT(!var->IsParameter()); // No const parameters.
+ if (var->IsStackLocal()) {
+ Label skip;
+ __ movq(rdx, StackOperand(var));
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &skip);
+ __ movq(StackOperand(var), rax);
+ __ bind(&skip);
+ } else {
+ ASSERT(var->IsContextSlot() || var->IsLookupSlot());
+ // Like var declarations, const declarations are hoisted to function
+ // scope. However, unlike var initializers, const initializers are
+ // able to drill a hole to that function context, even from inside a
+ // 'with' context. We thus bypass the normal static scope lookup for
+ // var->IsContextSlot().
+ __ push(rax);
+ __ push(rsi);
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
}
- __ bind(&skip);
} else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
- // Perform the assignment for non-const variables. Const assignments
- // are simply skipped.
- Slot* slot = var->AsSlot();
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL: {
- Label assign;
- // Check for an initialized let binding.
- __ movq(rdx, Operand(rbp, SlotOffset(slot)));
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &assign);
- __ Push(var->name());
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- // Perform the assignment.
- __ bind(&assign);
- __ movq(Operand(rbp, SlotOffset(slot)), rax);
- break;
- }
-
- case Slot::CONTEXT: {
- // Let variables may be the hole value if they have not been
- // initialized. Throw a type error in this case.
- Label assign;
- MemOperand target = EmitSlotSearch(slot, rcx);
- // Check for an initialized let binding.
- __ movq(rdx, target);
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &assign, Label::kNear);
- __ Push(var->name());
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- // Perform the assignment.
- __ bind(&assign);
- __ movq(target, rax);
- // The value of the assignment is in eax. RecordWrite clobbers its
- // register arguments.
+ // Non-initializing assignment to let variable needs a write barrier.
+ if (var->IsLookupSlot()) {
+ __ push(rax); // Value.
+ __ push(rsi); // Context.
+ __ Push(var->name());
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ } else {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ Label assign;
+ MemOperand location = VarOperand(var, rcx);
+ __ movq(rdx, location);
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &assign, Label::kNear);
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&assign);
+ __ movq(location, rax);
+ if (var->IsContextSlot()) {
__ movq(rdx, rax);
- int offset = Context::SlotOffset(slot->index());
- __ RecordWrite(rcx, offset, rdx, rbx);
- break;
+ __ RecordWrite(rcx, Context::SlotOffset(var->index()), rdx, rbx);
}
-
- case Slot::LOOKUP:
- // Call the runtime for the assignment.
- __ push(rax); // Value.
- __ push(rsi); // Context.
- __ Push(var->name());
- __ Push(Smi::FromInt(strict_mode_flag()));
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
- break;
}
- } else if (var->mode() != Variable::CONST) {
- // Perform the assignment for non-const variables. Const assignments
- // are simply skipped.
- Slot* slot = var->AsSlot();
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (FLAG_debug_code && op == Token::INIT_LET) {
- // Check for an uninitialized let binding.
- __ movq(rdx, Operand(rbp, SlotOffset(slot)));
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ Check(equal, "Let binding re-initialization.");
- }
- // Perform the assignment.
- __ movq(Operand(rbp, SlotOffset(slot)), rax);
- break;
- case Slot::CONTEXT: {
- MemOperand target = EmitSlotSearch(slot, rcx);
- if (FLAG_debug_code && op == Token::INIT_LET) {
- // Check for an uninitialized let binding.
- __ movq(rdx, target);
- __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
- __ Check(equal, "Let binding re-initialization.");
- }
- // Perform the assignment and issue the write barrier.
- __ movq(target, rax);
- // The value of the assignment is in rax. RecordWrite clobbers its
- // register arguments.
+ } else if (var->mode() != Variable::CONST) {
+ // Assignment to var or initializing assignment to let.
+ if (var->IsStackAllocated() || var->IsContextSlot()) {
+ MemOperand location = VarOperand(var, rcx);
+ if (FLAG_debug_code && op == Token::INIT_LET) {
+ // Check for an uninitialized let binding.
+ __ movq(rdx, location);
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ Check(equal, "Let binding re-initialization.");
+ }
+ // Perform the assignment.
+ __ movq(location, rax);
+ if (var->IsContextSlot()) {
__ movq(rdx, rax);
- int offset = Context::SlotOffset(slot->index());
- __ RecordWrite(rcx, offset, rdx, rbx);
- break;
+ __ RecordWrite(rcx, Context::SlotOffset(var->index()), rdx, rbx);
}
-
- case Slot::LOOKUP:
- ASSERT(op != Token::INIT_LET);
- // Call the runtime for the assignment.
- __ push(rax); // Value.
- __ push(rsi); // Context.
- __ Push(var->name());
- __ Push(Smi::FromInt(strict_mode_flag()));
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
- break;
+ } else {
+ ASSERT(var->IsLookupSlot());
+ __ push(rax); // Value.
+ __ push(rsi); // Context.
+ __ Push(var->name());
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
}
}
+ // Non-initializing assignments to consts are ignored.
}
#endif
Comment cmnt(masm_, "[ Call");
- Expression* fun = expr->expression();
- Variable* var = fun->AsVariableProxy()->AsVariable();
+ Expression* callee = expr->expression();
+ VariableProxy* proxy = callee->AsVariableProxy();
+ Property* property = callee->AsProperty();
- if (var != NULL && var->is_possibly_eval()) {
+ if (proxy != NULL && proxy->var()->is_possibly_eval()) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
- // resolve the function we need to call and the receiver of the
- // call. Then we call the resolved function using the given
- // arguments.
+ // resolve the function we need to call and the receiver of the call.
+ // Then we call the resolved function using the given arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
{ PreservePositionScope pos_scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
__ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
// Push the arguments.
}
// If we know that eval can only be shadowed by eval-introduced
- // variables we attempt to load the global eval function directly
- // in generated code. If we succeed, there is no need to perform a
+ // variables we attempt to load the global eval function directly in
+ // generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
Label done;
- if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Variable* var = proxy->var();
+ if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
Label slow;
- EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow);
+ EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
// Push the function and resolve eval.
__ push(rax);
EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
__ bind(&slow);
}
- // Push copy of the function (found below the arguments) and
- // resolve eval.
+ // Push a copy of the function (found below the arguments) and resolve
+ // eval.
__ push(Operand(rsp, (arg_count + 1) * kPointerSize));
EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
- if (done.is_linked()) {
- __ bind(&done);
- }
+ __ bind(&done);
// The runtime call returns a pair of values in rax (function) and
// rdx (receiver). Touch up the stack with the right values.
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, rax);
- } else if (var != NULL && !var->is_this() && var->is_global()) {
- // Call to a global variable.
- // Push global object as receiver for the call IC lookup.
+ } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
+ // Call to a global variable. Push global object as receiver for the
+ // call IC lookup.
__ push(GlobalObjectOperand());
- EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
- } else if (var != NULL && var->AsSlot() != NULL &&
- var->AsSlot()->type() == Slot::LOOKUP) {
+ EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
+ } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
{ PreservePositionScope scope(masm()->positions_recorder());
- // Generate code for loading from variables potentially shadowed
- // by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow,
- &done);
-
- __ bind(&slow);
+ // Generate code for loading from variables potentially shadowed by
+ // eval-introduced variables.
+ EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
}
- // Call the runtime to find the function to call (returned in rax)
- // and the object holding it (returned in rdx).
+ __ bind(&slow);
+ // Call the runtime to find the function to call (returned in rax) and
+ // the object holding it (returned in rdx).
__ push(context_register());
- __ Push(var->name());
+ __ Push(proxy->name());
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ push(rax); // Function.
__ push(rdx); // Receiver.
- // If fast case code has been generated, emit code to push the
- // function and receiver and have the slow path jump around this
- // code.
+ // If fast case code has been generated, emit code to push the function
+ // and receiver and have the slow path jump around this code.
if (done.is_linked()) {
Label call;
__ jmp(&call, Label::kNear);
__ bind(&done);
// Push function.
__ push(rax);
- // The receiver is implicitly the global receiver. Indicate this
- // by passing the hole to the call function stub.
+ // The receiver is implicitly the global receiver. Indicate this by
+ // passing the hole to the call function stub.
__ PushRoot(Heap::kTheHoleValueRootIndex);
__ bind(&call);
}
- // The receiver is either the global receiver or an object found
- // by LoadContextSlot. That object could be the hole if the
- // receiver is implicitly the global object.
+ // The receiver is either the global receiver or an object found by
+ // LoadContextSlot. That object could be the hole if the receiver is
+ // implicitly the global object.
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
- } else if (fun->AsProperty() != NULL) {
- // Call to an object property.
- Property* prop = fun->AsProperty();
- Literal* key = prop->key()->AsLiteral();
- if (key != NULL && key->handle()->IsSymbol()) {
- // Call to a named property, use call IC.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
+ } else if (property != NULL) {
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(property->obj());
+ }
+ if (property->key()->IsPropertyName()) {
+ EmitCallWithIC(expr,
+ property->key()->AsLiteral()->handle(),
+ RelocInfo::CODE_TARGET);
} else {
- // Call to a keyed property.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ EmitKeyedCallWithIC(expr, property->key());
}
} else {
+ // Call to an arbitrary expression not handled specially above.
{ PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
}
// Load global receiver object.
__ movq(rbx, GlobalObjectOperand());
switch (expr->op()) {
case Token::DELETE: {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
- Property* prop = expr->expression()->AsProperty();
- Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
+ Property* property = expr->expression()->AsProperty();
+ VariableProxy* proxy = expr->expression()->AsVariableProxy();
- if (prop != NULL) {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
+ if (property != NULL) {
+ VisitForStackValue(property->obj());
+ VisitForStackValue(property->key());
__ Push(Smi::FromInt(strict_mode_flag()));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(rax);
- } else if (var != NULL) {
+ } else if (proxy != NULL) {
+ Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
- // but "delete this" is.
+ // but "delete this" is allowed.
ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
- if (var->is_global()) {
+ if (var->IsUnallocated()) {
__ push(GlobalObjectOperand());
__ Push(var->name());
__ Push(Smi::FromInt(kNonStrictMode));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(rax);
- } else if (var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
- // Result of deleting non-global, non-dynamic variables is false.
- // The subexpression does not have side effects.
- context()->Plug(false);
+ } else if (var->IsStackAllocated() || var->IsContextSlot()) {
+ // Result of deleting non-global variables is false. 'this' is
+ // not really a variable, though we implement it as one. The
+ // subexpression does not have side effects.
+ context()->Plug(var->is_this());
} else {
// Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
ASSERT(!context()->IsEffect());
ASSERT(!context()->IsTest());
- if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
+ if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
__ Move(rcx, proxy->name());
__ movq(rax, GlobalObjectOperand());
__ call(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(rax);
- } else if (proxy != NULL &&
- proxy->var()->AsSlot() != NULL &&
- proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
+ } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
- Slot* slot = proxy->var()->AsSlot();
- EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
+ EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
__ bind(&slow);
__ push(rsi);
// Copy any necessary parameters into the context.
int num_parameters = scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ Variable* var = scope()->parameter(i);
+ if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ movq(rax, Operand(rbp, parameter_offset));
// Store it in the context.
- int context_offset = Context::SlotOffset(slot->index());
+ int context_offset = Context::SlotOffset(var->index());
__ movq(Operand(rsi, context_offset), rax);
// Update the write barrier. This clobbers all involved
// registers, so we have use a third register to avoid
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
assertEquals(3, (function (x) { return ((delete x) || 2) + 1; })(0));
-// 'this' at toplevel is different from all other global variables---not
-// deletable.
+// 'this' is not a Reference so delete returns true (see section 11.4.1,
+// step 2 of ES 5.1).
assertEquals(true, delete this);
+assertEquals(true, (function () { return delete this; })());