void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
+ ASSERT(!destination().is(no_reg));
ASSERT(cell->IsJSGlobalPropertyCell());
- __ mov(accumulator0(), Operand(cell));
- __ ldr(accumulator0(),
- FieldMemOperand(accumulator0(), JSGlobalPropertyCell::kValueOffset));
+
+ __ mov(destination(), Operand(cell));
+ __ ldr(destination(),
+ FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset));
if (FLAG_debug_code) {
__ mov(ip, Operand(Factory::the_hole_value()));
- __ cmp(accumulator0(), ip);
+ __ cmp(destination(), ip);
__ Check(ne, "DontDelete cells can't contain the hole");
}
}
__ str(accumulator0(), FieldMemOperand(scratch0(), offset));
__ mov(scratch1(), Operand(offset));
__ RecordWrite(scratch0(), scratch1(), ip);
+ if (destination().is(accumulator1())) {
+ __ mov(accumulator1(), accumulator0());
+ }
+}
+
+
+void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
+ ASSERT(!destination().is(no_reg));
+ LookupResult lookup;
+ info()->receiver()->Lookup(*name, &lookup);
+
+ ASSERT(lookup.holder() == *info()->receiver());
+ ASSERT(lookup.type() == FIELD);
+ Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
+ int index = lookup.GetFieldIndex() - map->inobject_properties();
+ int offset = index * kPointerSize;
+
+ // Perform the load. Negative offsets are inobject properties.
+ if (offset < 0) {
+ offset += map->instance_size();
+ __ ldr(destination(), FieldMemOperand(receiver_reg(), offset));
+ } else {
+ offset += FixedArray::kHeaderSize;
+ __ ldr(scratch0(),
+ FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset));
+ __ ldr(destination(), FieldMemOperand(scratch0(), offset));
+ }
+}
+
+
+void FastCodeGenerator::EmitBitOr() {
+ Register check; // A register is used for the smi check/operation.
+ if (destination().is(no_reg)) {
+ check = scratch0(); // Do not clobber either operand register.
+ } else {
+ // Preserve whichever operand shares the destination register in case we
+ // have to bail out.
+ __ mov(scratch0(), destination());
+ check = destination();
+ }
+ __ orr(check, accumulator1(), Operand(accumulator0()));
+ // Restore the clobbered operand if necessary.
+ if (destination().is(no_reg)) {
+ __ BranchOnNotSmi(check, bailout());
+ } else {
+ Label done;
+ __ BranchOnSmi(check, &done);
+ __ mov(destination(), scratch0());
+ __ jmp(bailout());
+ __ bind(&done);
+ }
}
// names because [] for string objects is handled only by keyed ICs.
virtual bool IsPropertyName() { return false; }
+ // True if the expression does not have (evaluated) subexpressions.
+ // Function literals are leaves because their subexpressions are not
+ // evaluated.
+ virtual bool IsLeaf() { return false; }
+
// Mark the expression as being compiled as an expression
// statement. This is used to transform postfix increments to
// (faster) prefix increments.
return false;
}
+ virtual bool IsLeaf() { return true; }
+
// Identity testers.
bool IsNull() const { return handle_.is_identical_to(Factory::null_value()); }
bool IsTrue() const { return handle_.is_identical_to(Factory::true_value()); }
virtual ObjectLiteral* AsObjectLiteral() { return this; }
virtual void Accept(AstVisitor* v);
+ virtual bool IsLeaf() { return properties()->is_empty(); }
+
Handle<FixedArray> constant_properties() const {
return constant_properties_;
}
virtual void Accept(AstVisitor* v);
+ virtual bool IsLeaf() { return true; }
+
Handle<String> pattern() const { return pattern_; }
Handle<String> flags() const { return flags_; }
virtual void Accept(AstVisitor* v);
virtual ArrayLiteral* AsArrayLiteral() { return this; }
+ virtual bool IsLeaf() { return values()->is_empty(); }
+
Handle<FixedArray> constant_elements() const { return constant_elements_; }
ZoneList<Expression*>* values() const { return values_; }
return var_ == NULL ? true : var_->IsValidLeftHandSide();
}
+ virtual bool IsLeaf() {
+ ASSERT(var_ != NULL); // Variable must be resolved.
+ return var()->is_global() || var()->rewrite()->IsLeaf();
+ }
+
bool IsVariable(Handle<String> n) {
return !is_this() && name().is_identical_to(n);
}
// Type testing & conversion
virtual Slot* AsSlot() { return this; }
+ virtual bool IsLeaf() { return true; }
+
// Accessors
Variable* var() const { return var_; }
Type type() const { return type_; }
// Type testing & conversion
virtual FunctionLiteral* AsFunctionLiteral() { return this; }
+ virtual bool IsLeaf() { return true; }
+
Handle<String> name() const { return name_; }
Scope* scope() const { return scope_; }
ZoneList<Statement*>* body() const { return body_; }
Handle<JSFunction> boilerplate() const { return boilerplate_; }
+ virtual bool IsLeaf() { return true; }
+
virtual void Accept(AstVisitor* v);
private:
class ThisFunction: public Expression {
public:
virtual void Accept(AstVisitor* v);
+ virtual bool IsLeaf() { return true; }
};
void AstLabeler::VisitAssignment(Assignment* expr) {
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
- if (prop != NULL) {
- ASSERT(prop->key()->IsPropertyName());
- VariableProxy* proxy = prop->obj()->AsVariableProxy();
- if (proxy != NULL && proxy->var()->is_this()) {
- info()->set_has_this_properties(true);
- } else {
- Visit(prop->obj());
- }
- }
+ ASSERT(prop->key()->IsPropertyName());
+ VariableProxy* proxy = prop->obj()->AsVariableProxy();
+ USE(proxy);
+ ASSERT(proxy != NULL && proxy->var()->is_this());
+ info()->set_has_this_properties(true);
Visit(expr->value());
expr->set_num(next_number_++);
}
void AstLabeler::VisitProperty(Property* expr) {
- UNREACHABLE();
+ ASSERT(expr->key()->IsPropertyName());
+ VariableProxy* proxy = expr->obj()->AsVariableProxy();
+ USE(proxy);
+ ASSERT(proxy != NULL && proxy->var()->is_this());
+ info()->set_has_this_properties(true);
+ expr->set_num(next_number_++);
}
void FastCodeGenSyntaxChecker::VisitProperty(Property* expr) {
- BAILOUT("Property");
+ // We support named this property references.
+ VariableProxy* proxy = expr->obj()->AsVariableProxy();
+ if (proxy == NULL || !proxy->var()->is_this()) {
+ BAILOUT("Non-this-property reference");
+ }
+ if (!expr->key()->IsPropertyName()) {
+ BAILOUT("Non-named-property reference");
+ }
+
+ // We will only specialize for fields on the object itself.
+ // Expression::IsPropertyName implies that the name is a literal
+ // symbol but we do not assume that.
+ Literal* key = expr->key()->AsLiteral();
+ if (key != NULL && key->handle()->IsString()) {
+ Handle<Object> receiver = info()->receiver();
+ Handle<String> name = Handle<String>::cast(key->handle());
+ LookupResult lookup;
+ receiver->Lookup(*name, &lookup);
+ if (lookup.holder() != *receiver) BAILOUT("Non-own property reference");
+ if (!lookup.type() == FIELD) BAILOUT("Non-field property reference");
+ } else {
+ UNREACHABLE();
+ BAILOUT("Unexpected non-string-literal property key");
+ }
}
void FastCodeGenSyntaxChecker::VisitBinaryOperation(BinaryOperation* expr) {
- BAILOUT("BinaryOperation");
+ // We support bitwise OR.
+ switch (expr->op()) {
+ case Token::COMMA:
+ BAILOUT("BinaryOperation COMMA");
+ case Token::OR:
+ BAILOUT("BinaryOperation OR");
+ case Token::AND:
+ BAILOUT("BinaryOperation AND");
+
+ case Token::BIT_OR:
+ // We support expressions nested on the left because they only require
+ // a pair of registers to keep all intermediate values in registers
+ // (i.e., the expression stack has height no more than two).
+ if (!expr->right()->IsLeaf()) BAILOUT("expression nested on right");
+ Visit(expr->left());
+ CHECK_BAILOUT;
+ Visit(expr->right());
+ break;
+
+ case Token::BIT_XOR:
+ BAILOUT("BinaryOperation BIT_XOR");
+ case Token::BIT_AND:
+ BAILOUT("BinaryOperation BIT_AND");
+ case Token::SHL:
+ BAILOUT("BinaryOperation SHL");
+ case Token::SAR:
+ BAILOUT("BinaryOperation SAR");
+ case Token::SHR:
+ BAILOUT("BinaryOperation SHR");
+ case Token::ADD:
+ BAILOUT("BinaryOperation ADD");
+ case Token::SUB:
+ BAILOUT("BinaryOperation SUB");
+ case Token::MUL:
+ BAILOUT("BinaryOperation MUL");
+ case Token::DIV:
+ BAILOUT("BinaryOperation DIV");
+ case Token::MOD:
+ BAILOUT("BinaryOperation MOD");
+ default:
+ UNREACHABLE();
+ }
}
void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
ASSERT(expr->var()->is_global() && !expr->var()->is_this());
- Comment cmnt(masm(), ";; Global");
- if (FLAG_print_ir) {
- SmartPointer<char> name = expr->name()->ToCString();
- PrintF("%d: t%d = Global(%s)\n", expr->num(), expr->num(), *name);
- }
-
// Check if we can compile a global variable load directly from the cell.
ASSERT(info()->has_global_object());
LookupResult lookup;
ASSERT(lookup.IsValid());
ASSERT(lookup.IsDontDelete());
Handle<Object> cell(info()->global_object()->GetPropertyCell(&lookup));
- EmitGlobalVariableLoad(cell);
+
+ // Global variable lookups do not have side effects, so we do not need to
+ // emit code if we are in an effect context.
+ if (!destination().is(no_reg)) {
+ Comment cmnt(masm(), ";; Global");
+ if (FLAG_print_ir) {
+ SmartPointer<char> name = expr->name()->ToCString();
+ PrintF("%d: t%d = Global(%s)\n", expr->num(), expr->num(), *name);
+ }
+ EmitGlobalVariableLoad(cell);
+ }
}
void FastCodeGenerator::VisitAssignment(Assignment* expr) {
- // Known to be a simple this property assignment.
- Visit(expr->value());
+ // Known to be a simple this property assignment. Effectively a unary
+ // operation.
+ { Register my_destination = destination();
+ set_destination(accumulator0());
+ Visit(expr->value());
+ set_destination(my_destination);
+ }
Property* prop = expr->target()->AsProperty();
ASSERT_NOT_NULL(prop);
Handle<String> name =
Handle<String>::cast(prop->key()->AsLiteral()->handle());
- Comment cmnt(masm(), ";; Store(this)");
+ Comment cmnt(masm(), ";; Store to this");
if (FLAG_print_ir) {
SmartPointer<char> name_string = name->ToCString();
- PrintF("%d: t%d = Store(this, \"%s\", t%d)\n",
- expr->num(), expr->num(), *name_string, expr->value()->num());
+ PrintF("%d: ", expr->num());
+ if (!destination().is(no_reg)) PrintF("t%d = ", expr->num());
+ PrintF("Store(this, \"%s\", t%d)\n", *name_string, expr->value()->num());
}
EmitThisPropertyStore(name);
void FastCodeGenerator::VisitProperty(Property* expr) {
- UNREACHABLE();
+ ASSERT_NOT_NULL(expr->obj()->AsVariableProxy());
+ ASSERT(expr->obj()->AsVariableProxy()->var()->is_this());
+ ASSERT(expr->key()->IsPropertyName());
+ if (!destination().is(no_reg)) {
+ Handle<String> name =
+ Handle<String>::cast(expr->key()->AsLiteral()->handle());
+
+ Comment cmnt(masm(), ";; Load from this");
+ if (FLAG_print_ir) {
+ SmartPointer<char> name_string = name->ToCString();
+ PrintF("%d: t%d = Load(this, \"%s\")\n",
+ expr->num(), expr->num(), *name_string);
+ }
+ EmitThisPropertyLoad(name);
+ }
}
void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
- UNREACHABLE();
+ // We support limited binary operations: bitwise OR only allowed to be
+ // nested on the left.
+ ASSERT(expr->op() == Token::BIT_OR);
+ ASSERT(expr->right()->IsLeaf());
+
+ { Register my_destination = destination();
+ set_destination(accumulator1());
+ Visit(expr->left());
+ set_destination(accumulator0());
+ Visit(expr->right());
+ set_destination(my_destination);
+ }
+
+ Comment cmnt(masm(), ";; BIT_OR");
+ if (FLAG_print_ir) {
+ PrintF("%d: ", expr->num());
+ if (!destination().is(no_reg)) PrintF("t%d = ", expr->num());
+ PrintF("BIT_OR(t%d, t%d)\n", expr->left()->num(), expr->right()->num());
+ }
+ EmitBitOr();
}
class FastCodeGenerator: public AstVisitor {
public:
- explicit FastCodeGenerator(MacroAssembler* masm) : masm_(masm), info_(NULL) {}
+ explicit FastCodeGenerator(MacroAssembler* masm)
+ : masm_(masm), info_(NULL), destination_(no_reg) {
+ }
static Handle<Code> MakeCode(CompilationInfo* info);
CompilationInfo* info() { return info_; }
Label* bailout() { return &bailout_; }
+ Register destination() { return destination_; }
+ void set_destination(Register reg) { destination_ = reg; }
+
FunctionLiteral* function() { return info_->function(); }
Scope* scope() { return info_->scope(); }
Register receiver_reg();
Register context_reg();
+ Register other_accumulator(Register reg) {
+ ASSERT(reg.is(accumulator0()) || reg.is(accumulator1()));
+ return (reg.is(accumulator0())) ? accumulator1() : accumulator0();
+ }
+
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
- // Emit code to load the receiver from the stack into the fixed receiver
- // register.
+ // Emit code to load the receiver from the stack into receiver_reg.
void EmitLoadReceiver();
- // Emit code to check that the receiver has the same map as the
- // compile-time receiver. Receiver is expected in {ia32-edx, x64-rdx,
- // arm-r1}. Emit a branch to the (single) bailout label if check fails.
- void EmitReceiverMapCheck();
-
- // Emit code to check that the global object has the same map as the
- // global object seen at compile time.
- void EmitGlobalMapCheck();
-
- // Emit code to load a global variable directly from a global
- // property cell into {ia32-eax, x64-rax, arm-r0}.
+ // Emit code to load a global variable directly from a global property
+ // cell into the destination register.
void EmitGlobalVariableLoad(Handle<Object> cell);
// Emit a store to an own property of this. The stored value is expected
- // in {ia32-eax, x64-rax, arm-r0} and the receiver in {is32-edx, x64-rdx,
- // arm-r1}. Both are preserve.
+ // in accumulator0 and the receiver in receiver_reg. The receiver
+ // register is preserved and the result (the stored value) is left in the
+ // destination register.
void EmitThisPropertyStore(Handle<String> name);
- MacroAssembler* masm_;
+ // Emit a load from an own property of this. The receiver is expected in
+ // receiver_reg. The receiver register is preserved and the result is
+ // left in the destination register.
+ void EmitThisPropertyLoad(Handle<String> name);
- CompilationInfo* info_;
+ // Emit a bitwise or operation. The left operand is in accumulator1 and
+ // the right is in accumulator0. The result should be left in the
+ // destination register.
+ void EmitBitOr();
+ MacroAssembler* masm_;
+ CompilationInfo* info_;
Label bailout_;
+ Register destination_;
DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator);
};
void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
+ ASSERT(!destination().is(no_reg));
ASSERT(cell->IsJSGlobalPropertyCell());
- __ mov(accumulator0(), Immediate(cell));
- __ mov(accumulator0(),
- FieldOperand(accumulator0(), JSGlobalPropertyCell::kValueOffset));
+
+ __ mov(destination(), Immediate(cell));
+ __ mov(destination(),
+ FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
if (FLAG_debug_code) {
- __ cmp(accumulator0(), Factory::the_hole_value());
+ __ cmp(destination(), Factory::the_hole_value());
__ Check(not_equal, "DontDelete cells can't contain the hole");
}
}
}
// Perform the store.
__ mov(FieldOperand(scratch0(), offset), accumulator0());
- // Preserve value from write barrier in case it's needed.
- __ mov(accumulator1(), accumulator0());
- // The other accumulator register is available as a scratch register
- // because this is not an AST leaf node.
- __ RecordWrite(scratch0(), offset, accumulator1(), scratch1());
+ if (destination().is(no_reg)) {
+ __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
+ } else {
+ // Copy the value to the other accumulator to preserve a copy from the
+ // write barrier. One of the accumulators is available as a scratch
+ // register.
+ __ mov(accumulator1(), accumulator0());
+ Register value_scratch = other_accumulator(destination());
+ __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
+ }
+}
+
+
+void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
+ ASSERT(!destination().is(no_reg));
+ LookupResult lookup;
+ info()->receiver()->Lookup(*name, &lookup);
+
+ ASSERT(lookup.holder() == *info()->receiver());
+ ASSERT(lookup.type() == FIELD);
+ Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
+ int index = lookup.GetFieldIndex() - map->inobject_properties();
+ int offset = index * kPointerSize;
+
+ // Perform the load. Negative offsets are inobject properties.
+ if (offset < 0) {
+ offset += map->instance_size();
+ __ mov(destination(), FieldOperand(receiver_reg(), offset));
+ } else {
+ offset += FixedArray::kHeaderSize;
+ __ mov(scratch0(),
+ FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
+ __ mov(destination(), FieldOperand(scratch0(), offset));
+ }
+}
+
+
+void FastCodeGenerator::EmitBitOr() {
+ Register copied; // One operand is copied to a scratch register.
+ Register other; // The other is not modified by the operation.
+ Register check; // A register is used for the smi check/operation.
+ if (destination().is(no_reg)) {
+ copied = accumulator1(); // Arbitrary choice of operand to copy.
+ other = accumulator0();
+ check = scratch0(); // Do not clobber either operand register.
+ } else {
+ copied = destination();
+ other = other_accumulator(destination());
+ check = destination();
+ }
+ __ mov(scratch0(), copied);
+ __ or_(check, Operand(other));
+ __ test(check, Immediate(kSmiTagMask));
+
+ // Restore the clobbered operand if necessary.
+ if (destination().is(no_reg)) {
+ __ j(not_zero, bailout(), not_taken);
+ } else {
+ Label done;
+ __ j(zero, &done, taken);
+ __ mov(copied, scratch0());
+ __ jmp(bailout());
+ __ bind(&done);
+ }
}
if (info()->has_this_properties()) {
Comment cmnt(masm(), ";; MapCheck(this)");
if (FLAG_print_ir) {
- PrintF("MapCheck(this)\n");
+ PrintF("#: MapCheck(this)\n");
}
ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
if (info()->has_globals()) {
Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
if (FLAG_print_ir) {
- PrintF("MapCheck(GLOBAL)\n");
+ PrintF("#: MapCheck(GLOBAL)\n");
}
ASSERT(info()->has_global_object());
Handle<Map> map(info()->global_object()->map());
Comment return_cmnt(masm(), ";; Return(<undefined>)");
if (FLAG_print_ir) {
- PrintF("Return(<undefined>)\n");
+ PrintF("#: Return(<undefined>)\n");
}
__ mov(eax, Factory::undefined_value());
__ mov(esp, ebp);
#define __ ACCESS_MASM(masm())
-// Registers rcx, rdi, and r8-r15 are free to use as scratch registers
-// without saving and restoring any other registers.
Register FastCodeGenerator::accumulator0() { return rax; }
Register FastCodeGenerator::accumulator1() { return rdx; }
Register FastCodeGenerator::scratch0() { return rcx; }
void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
+ ASSERT(!destination().is(no_reg));
ASSERT(cell->IsJSGlobalPropertyCell());
- __ Move(accumulator0(), cell);
- __ movq(accumulator0(),
- FieldOperand(accumulator0(), JSGlobalPropertyCell::kValueOffset));
+ __ Move(destination(), cell);
+ __ movq(destination(),
+ FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset));
if (FLAG_debug_code) {
- __ Cmp(accumulator0(), Factory::the_hole_value());
+ __ Cmp(destination(), Factory::the_hole_value());
__ Check(not_equal, "DontDelete cells can't contain the hole");
}
}
}
// Perform the store.
__ movq(FieldOperand(scratch0(), offset), accumulator0());
- // Preserve value from write barrier in case it's needed.
- __ movq(accumulator1(), accumulator0());
- // The other accumulator register is available as a scratch register
- // because this is not an AST leaf node.
- __ RecordWrite(scratch0(), offset, accumulator1(), scratch1());
+ if (destination().is(no_reg)) {
+ __ RecordWrite(scratch0(), offset, accumulator0(), scratch1());
+ } else {
+ // Copy the value to the other accumulator to preserve a copy from the
+ // write barrier. One of the accumulators is available as a scratch
+ // register.
+ __ movq(accumulator1(), accumulator0());
+ Register value_scratch = other_accumulator(destination());
+ __ RecordWrite(scratch0(), offset, value_scratch, scratch1());
+ }
+}
+
+
+void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
+ ASSERT(!destination().is(no_reg));
+ LookupResult lookup;
+ info()->receiver()->Lookup(*name, &lookup);
+
+ ASSERT(lookup.holder() == *info()->receiver());
+ ASSERT(lookup.type() == FIELD);
+ Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
+ int index = lookup.GetFieldIndex() - map->inobject_properties();
+ int offset = index * kPointerSize;
+
+ // Perform the load. Negative offsets are inobject properties.
+ if (offset < 0) {
+ offset += map->instance_size();
+ __ movq(destination(), FieldOperand(receiver_reg(), offset));
+ } else {
+ offset += FixedArray::kHeaderSize;
+ __ movq(scratch0(),
+ FieldOperand(receiver_reg(), JSObject::kPropertiesOffset));
+ __ movq(destination(), FieldOperand(scratch0(), offset));
+ }
+}
+
+
+void FastCodeGenerator::EmitBitOr() {
+ Register copied; // One operand is copied to a scratch register.
+ Register other; // The other is not modified by the operation.
+ Register check; // A register is used for the smi check/operation.
+ if (destination().is(no_reg)) {
+ copied = accumulator1(); // Arbitrary choice of operand to copy.
+ other = accumulator0();
+ check = scratch0(); // Do not clobber either operand register.
+ } else {
+ copied = destination();
+ other = other_accumulator(destination());
+ check = destination();
+ }
+ __ movq(scratch0(), copied);
+ __ or_(check, other);
+ // Restore the clobbered operand if necessary.
+ if (destination().is(no_reg)) {
+ __ JumpIfNotSmi(check, bailout());
+ } else {
+ Label done;
+ __ JumpIfSmi(check, &done);
+ __ movq(copied, scratch0());
+ __ jmp(bailout());
+ __ bind(&done);
+ }
}
--- /dev/null
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --fast-compiler
+
+// Test references to properties of this.
+function Test() {
+ this.a = 0;
+ this.b = 1;
+ this.c = 2;
+ this.d = 3;
+}
+
+Test.prototype.test0 = function () {
+ this.a = this.b;
+};
+
+Test.prototype.test1 = function() {
+ this.a = this.b = this.c;
+};
+
+Test.prototype.test2 = function() {
+ this.c = this.d;
+ this.b = this.c;
+ this.a = this.b;
+};
+
+var t = new Test();
+
+t.test0();
+assertEquals(1, t.a);
+
+t.test1();
+assertEquals(2, t.a);
+assertEquals(2, t.b);
+
+t.test2();
+assertEquals(3, t.a);
+assertEquals(3, t.b);
+assertEquals(3, t.c);