From: ager@chromium.org Date: Thu, 20 May 2010 13:50:09 +0000 (+0000) Subject: Complete the full codegenerator on x64. X-Git-Tag: upstream/4.7.83~21776 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=355d3166e171a97f8334ef8f13d023409930b728;p=platform%2Fupstream%2Fv8.git Complete the full codegenerator on x64. Review URL: http://codereview.chromium.org/2078022 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4686 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/compiler.cc b/src/compiler.cc index 5f37951..27d4835 100755 --- a/src/compiler.cc +++ b/src/compiler.cc @@ -121,7 +121,7 @@ static Handle MakeCode(Handle context, CompilationInfo* info) { : (shared->is_toplevel() || shared->try_full_codegen()); bool force_full_compiler = false; -#ifdef V8_TARGET_ARCH_IA32 +#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64) // On ia32 the full compiler can compile all code whereas the other platforms // the constructs supported is checked by the associated syntax checker. When // --always-full-compiler is used on ia32 the syntax checker is still in diff --git a/src/flag-definitions.h b/src/flag-definitions.h index 0760d44..a96264b 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -149,7 +149,7 @@ DEFINE_bool(full_compiler, true, "enable dedicated backend for run-once code") DEFINE_bool(fast_compiler, false, "enable speculative optimizing backend") DEFINE_bool(always_full_compiler, false, "try to use the dedicated run-once backend for all code") -#ifdef V8_TARGET_ARCH_IA32 +#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64) DEFINE_bool(force_full_compiler, false, "force use of the dedicated run-once backend for all code") #endif diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc index 431f1f3..28b8a34 100644 --- a/src/ia32/codegen-ia32.cc +++ b/src/ia32/codegen-ia32.cc @@ -6163,11 +6163,11 @@ void CodeGenerator::GenerateIsObject(ZoneList* args) { __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset)); __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE); - destination()->false_target()->Branch(less); + destination()->false_target()->Branch(below); __ cmp(map.reg(), LAST_JS_OBJECT_TYPE); obj.Unuse(); map.Unuse(); - destination()->Split(less_equal); + destination()->Split(below_equal); } @@ -6280,7 +6280,7 @@ void CodeGenerator::GenerateClassOf(ZoneList* args) { __ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); __ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset)); __ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE); - null.Branch(less); + null.Branch(below); // As long as JS_FUNCTION_TYPE is the last instance type and it is // right after LAST_JS_OBJECT_TYPE, we can avoid checking for @@ -6869,7 +6869,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList* args) { // Check that object doesn't require security checks and // has no indexed interceptor. __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); - deferred->Branch(less); + deferred->Branch(below); __ movzx_b(tmp1.reg(), FieldOperand(tmp1.reg(), Map::kBitFieldOffset)); __ test(tmp1.reg(), Immediate(KeyedLoadIC::kSlowCaseBitFieldMask)); deferred->Branch(not_zero); @@ -8185,11 +8185,11 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { __ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset)); __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE); - destination()->false_target()->Branch(less); + destination()->false_target()->Branch(below); __ cmp(map.reg(), LAST_JS_OBJECT_TYPE); answer.Unuse(); map.Unuse(); - destination()->Split(less_equal); + destination()->Split(below_equal); } else { // Uncommon case: typeof testing against a string literal that is // never returned from the typeof operator. @@ -11586,7 +11586,7 @@ void CompareStub::Generate(MacroAssembler* masm) { ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); Label first_non_object; __ cmp(ecx, FIRST_JS_OBJECT_TYPE); - __ j(less, &first_non_object); + __ j(below, &first_non_object); // Return non-zero (eax is not zero) Label return_not_equal; @@ -11603,7 +11603,7 @@ void CompareStub::Generate(MacroAssembler* masm) { __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); __ cmp(ecx, FIRST_JS_OBJECT_TYPE); - __ j(greater_equal, &return_not_equal); + __ j(above_equal, &return_not_equal); // Check for oddballs: true, false, null, undefined. __ cmp(ecx, ODDBALL_TYPE); @@ -12251,9 +12251,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object map __ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx - type __ cmp(ecx, FIRST_JS_OBJECT_TYPE); - __ j(less, &slow, not_taken); + __ j(below, &slow, not_taken); __ cmp(ecx, LAST_JS_OBJECT_TYPE); - __ j(greater, &slow, not_taken); + __ j(above, &slow, not_taken); // Get the prototype of the function. __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address @@ -12281,9 +12281,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset)); __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); __ cmp(ecx, FIRST_JS_OBJECT_TYPE); - __ j(less, &slow, not_taken); + __ j(below, &slow, not_taken); __ cmp(ecx, LAST_JS_OBJECT_TYPE); - __ j(greater, &slow, not_taken); + __ j(above, &slow, not_taken); // Register mapping: // eax is object map. diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index b894064..329f13d 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -806,8 +806,8 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable, __ Check(equal, "Unexpected declaration in current context."); } if (mode == Variable::CONST) { - __ mov(eax, Immediate(Factory::the_hole_value())); - __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax); + __ mov(CodeGenerator::ContextOperand(esi, slot->index()), + Immediate(Factory::the_hole_value())); // No write barrier since the hole value is in old space. } else if (function != NULL) { VisitForValue(function, kAccumulator); @@ -823,10 +823,8 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable, __ push(esi); __ push(Immediate(variable->name())); // Declaration nodes are always introduced in one of two modes. - ASSERT(mode == Variable::VAR || - mode == Variable::CONST); - PropertyAttributes attr = - (mode == Variable::VAR) ? NONE : READ_ONLY; + ASSERT(mode == Variable::VAR || mode == Variable::CONST); + PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY; __ push(Immediate(Smi::FromInt(attr))); // Push initial value, if any. // Note: For variables we must not push an initial value (such as @@ -1068,8 +1066,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ StackLimitCheck(&stack_limit_hit); __ bind(&stack_check_done); - // Generate code for the going to the next element by incrementing - // the index (smi) stored on top of the stack. + // Generate code for going to the next element by incrementing the + // index (smi) stored on top of the stack. __ bind(loop_statement.continue_target()); __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1))); __ jmp(&loop); @@ -2031,9 +2029,9 @@ void FullCodeGenerator::EmitIsObject(ZoneList* args) { __ j(not_zero, if_false); __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset)); __ cmp(ecx, FIRST_JS_OBJECT_TYPE); - __ j(less, if_false); + __ j(below, if_false); __ cmp(ecx, LAST_JS_OBJECT_TYPE); - __ j(less_equal, if_true); + __ j(below_equal, if_true); __ jmp(if_false); Apply(context_, if_true, if_false); @@ -2225,7 +2223,7 @@ void FullCodeGenerator::EmitClassOf(ZoneList* args) { __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); __ movzx_b(ebx, FieldOperand(eax, Map::kInstanceTypeOffset)); __ cmp(ebx, FIRST_JS_OBJECT_TYPE); - __ j(less, &null); + __ j(below, &null); // As long as JS_FUNCTION_TYPE is the last instance type and it is // right after LAST_JS_OBJECT_TYPE, we can avoid checking for diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc index f35289a..0dbd1b2 100644 --- a/src/x64/codegen-x64.cc +++ b/src/x64/codegen-x64.cc @@ -3840,11 +3840,13 @@ void CodeGenerator::GenerateIsObject(ZoneList* args) { __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), Immediate(1 << Map::kIsUndetectable)); destination()->false_target()->Branch(not_zero); - __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE); - destination()->false_target()->Branch(less); - __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); + __ movzxbq(kScratchRegister, + FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); + __ cmpq(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE)); + destination()->false_target()->Branch(below); + __ cmpq(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE)); obj.Unuse(); - destination()->Split(less_equal); + destination()->Split(below_equal); } @@ -4336,7 +4338,7 @@ void CodeGenerator::GenerateRandomHeapNumber( __ PrepareCallCFunction(0); __ CallCFunction(ExternalReference::random_uint32_function(), 0); - // Convert 32 random bits in eax to 0.(32 random bits) in a double + // Convert 32 random bits in rax to 0.(32 random bits) in a double // by computing: // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single. diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 8b9358e..d090485 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -81,11 +81,17 @@ void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) { bool function_in_register = true; // Possibly allocate a local context. - if (scope()->num_heap_slots() > 0) { + int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; + if (heap_slots > 0) { Comment cmnt(masm_, "[ Allocate local context"); // Argument to NewContext is the function, which is still in rdi. __ push(rdi); - __ CallRuntime(Runtime::kNewContext, 1); + if (heap_slots <= FastNewContextStub::kMaximumSlots) { + FastNewContextStub stub(heap_slots); + __ CallStub(&stub); + } else { + __ CallRuntime(Runtime::kNewContext, 1); + } function_in_register = false; // Context is returned in both rax and rsi. It replaces the context // passed to us. It's saved in the stack and kept live in rsi. @@ -145,7 +151,18 @@ void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) { } { Comment cmnt(masm_, "[ Declarations"); - VisitDeclarations(scope()->declarations()); + // For named function expressions, declare the function name as a + // constant. + if (scope()->is_function_scope() && scope()->function() != NULL) { + EmitDeclaration(scope()->function(), Variable::CONST, NULL); + } + // Visit all the explicit declarations unless there is an illegal + // redeclaration. + if (scope()->HasIllegalRedeclaration()) { + scope()->VisitIllegalRedeclaration(this); + } else { + VisitDeclarations(scope()->declarations()); + } } { Comment cmnt(masm_, "[ Stack check"); @@ -429,6 +446,39 @@ void FullCodeGenerator::DropAndApply(int count, } +void FullCodeGenerator::PrepareTest(Label* materialize_true, + Label* materialize_false, + Label** if_true, + Label** if_false) { + switch (context_) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + case Expression::kEffect: + // In an effect context, the true and the false case branch to the + // same label. + *if_true = *if_false = materialize_true; + break; + case Expression::kValue: + *if_true = materialize_true; + *if_false = materialize_false; + break; + case Expression::kTest: + *if_true = true_label_; + *if_false = false_label_; + break; + case Expression::kValueTest: + *if_true = materialize_true; + *if_false = false_label_; + break; + case Expression::kTestValue: + *if_true = true_label_; + *if_false = materialize_false; + break; + } +} + + void FullCodeGenerator::Apply(Expression::Context context, Label* materialize_true, Label* materialize_false) { @@ -494,6 +544,61 @@ void FullCodeGenerator::Apply(Expression::Context context, } +// Convert constant control flow (true or false) to the result expected for +// a given expression context. +void FullCodeGenerator::Apply(Expression::Context context, bool flag) { + switch (context) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + case Expression::kEffect: + break; + case Expression::kValue: { + Heap::RootListIndex value_root_index = + flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; + switch (location_) { + case kAccumulator: + __ LoadRoot(result_register(), value_root_index); + break; + case kStack: + __ PushRoot(value_root_index); + break; + } + break; + } + case Expression::kTest: + __ jmp(flag ? true_label_ : false_label_); + break; + case Expression::kTestValue: + switch (location_) { + case kAccumulator: + // If value is false it's needed. + if (!flag) __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); + break; + case kStack: + // If value is false it's needed. + if (!flag) __ PushRoot(Heap::kFalseValueRootIndex); + break; + } + __ jmp(flag ? true_label_ : false_label_); + break; + case Expression::kValueTest: + switch (location_) { + case kAccumulator: + // If value is true it's needed. + if (flag) __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); + break; + case kStack: + // If value is true it's needed. + if (flag) __ PushRoot(Heap::kTrueValueRootIndex); + break; + } + __ jmp(flag ? true_label_ : false_label_); + break; + } +} + + void FullCodeGenerator::DoTest(Expression::Context context) { // The value to test is in the accumulator. If the value might be needed // on the stack (value/test and test/value contexts with a stack location @@ -669,22 +774,23 @@ void FullCodeGenerator::Move(Slot* dst, } -void FullCodeGenerator::VisitDeclaration(Declaration* decl) { +void FullCodeGenerator::EmitDeclaration(Variable* variable, + Variable::Mode mode, + FunctionLiteral* function) { Comment cmnt(masm_, "[ Declaration"); - Variable* var = decl->proxy()->var(); - ASSERT(var != NULL); // Must have been resolved. - Slot* slot = var->slot(); - Property* prop = var->AsProperty(); + ASSERT(variable != NULL); // Must have been resolved. + Slot* slot = variable->slot(); + Property* prop = variable->AsProperty(); if (slot != NULL) { switch (slot->type()) { case Slot::PARAMETER: case Slot::LOCAL: - if (decl->mode() == Variable::CONST) { + if (mode == Variable::CONST) { __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister); - } else if (decl->fun() != NULL) { - VisitForValue(decl->fun(), kAccumulator); + } else if (function != NULL) { + VisitForValue(function, kAccumulator); __ movq(Operand(rbp, SlotOffset(slot)), result_register()); } break; @@ -694,7 +800,7 @@ void FullCodeGenerator::VisitDeclaration(Declaration* decl) { // this specific context. // The variable in the decl always resides in the current context. - ASSERT_EQ(0, scope()->ContextChainLength(var->scope())); + ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); if (FLAG_debug_code) { // Check if we have the correct context pointer. __ movq(rbx, @@ -702,13 +808,13 @@ void FullCodeGenerator::VisitDeclaration(Declaration* decl) { __ cmpq(rbx, rsi); __ Check(equal, "Unexpected declaration in current context."); } - if (decl->mode() == Variable::CONST) { + if (mode == Variable::CONST) { __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), kScratchRegister); // No write barrier since the hole value is in old space. - } else if (decl->fun() != NULL) { - VisitForValue(decl->fun(), kAccumulator); + } else if (function != NULL) { + VisitForValue(function, kAccumulator); __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), result_register()); int offset = Context::SlotOffset(slot->index()); @@ -719,21 +825,19 @@ void FullCodeGenerator::VisitDeclaration(Declaration* decl) { case Slot::LOOKUP: { __ push(rsi); - __ Push(var->name()); + __ Push(variable->name()); // Declaration nodes are always introduced in one of two modes. - ASSERT(decl->mode() == Variable::VAR || - decl->mode() == Variable::CONST); - PropertyAttributes attr = - (decl->mode() == Variable::VAR) ? NONE : READ_ONLY; + ASSERT(mode == Variable::VAR || mode == Variable::CONST); + PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY; __ Push(Smi::FromInt(attr)); // Push initial value, if any. // Note: For variables we must not push an initial value (such as // 'undefined') because we may have a (legal) redeclaration and we // must not destroy the current value. - if (decl->mode() == Variable::CONST) { + if (mode == Variable::CONST) { __ PushRoot(Heap::kTheHoleValueRootIndex); - } else if (decl->fun() != NULL) { - VisitForValue(decl->fun(), kStack); + } else if (function != NULL) { + VisitForValue(function, kStack); } else { __ Push(Smi::FromInt(0)); // no initial value! } @@ -743,14 +847,14 @@ void FullCodeGenerator::VisitDeclaration(Declaration* decl) { } } else if (prop != NULL) { - if (decl->fun() != NULL || decl->mode() == Variable::CONST) { + if (function != NULL || mode == Variable::CONST) { // We are declaring a function or constant that rewrites to a // property. Use (keyed) IC to set the initial value. VisitForValue(prop->obj(), kStack); VisitForValue(prop->key(), kStack); - if (decl->fun() != NULL) { - VisitForValue(decl->fun(), kAccumulator); + if (function != NULL) { + VisitForValue(function, kAccumulator); } else { __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex); } @@ -769,6 +873,11 @@ void FullCodeGenerator::VisitDeclaration(Declaration* decl) { } +void FullCodeGenerator::VisitDeclaration(Declaration* decl) { + EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun()); +} + + void FullCodeGenerator::DeclareGlobals(Handle pairs) { // Call the runtime to declare the globals. __ push(rsi); // The context is the first argument. @@ -780,12 +889,208 @@ void FullCodeGenerator::DeclareGlobals(Handle pairs) { void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { - UNREACHABLE(); + Comment cmnt(masm_, "[ SwitchStatement"); + Breakable nested_statement(this, stmt); + SetStatementPosition(stmt); + // Keep the switch value on the stack until a case matches. + VisitForValue(stmt->tag(), kStack); + + ZoneList* clauses = stmt->cases(); + CaseClause* default_clause = NULL; // Can occur anywhere in the list. + + Label next_test; // Recycled for each test. + // Compile all the tests with branches to their bodies. + for (int i = 0; i < clauses->length(); i++) { + CaseClause* clause = clauses->at(i); + // The default is not a test, but remember it as final fall through. + if (clause->is_default()) { + default_clause = clause; + continue; + } + + Comment cmnt(masm_, "[ Case comparison"); + __ bind(&next_test); + next_test.Unuse(); + + // Compile the label expression. + VisitForValue(clause->label(), kAccumulator); + + // Perform the comparison as if via '==='. The comparison stub expects + // the smi vs. smi case to be handled before it is called. + Label slow_case; + __ movq(rdx, Operand(rsp, 0)); // Switch value. + __ JumpIfNotBothSmi(rdx, rax, &slow_case); + __ SmiCompare(rdx, rax); + __ j(not_equal, &next_test); + __ Drop(1); // Switch value is no longer needed. + __ jmp(clause->body_target()->entry_label()); + + __ bind(&slow_case); + CompareStub stub(equal, true); + __ CallStub(&stub); + __ testq(rax, rax); + __ j(not_equal, &next_test); + __ Drop(1); // Switch value is no longer needed. + __ jmp(clause->body_target()->entry_label()); + } + + // Discard the test value and jump to the default if present, otherwise to + // the end of the statement. + __ bind(&next_test); + __ Drop(1); // Switch value is no longer needed. + if (default_clause == NULL) { + __ jmp(nested_statement.break_target()); + } else { + __ jmp(default_clause->body_target()->entry_label()); + } + + // Compile all the case bodies. + for (int i = 0; i < clauses->length(); i++) { + Comment cmnt(masm_, "[ Case body"); + CaseClause* clause = clauses->at(i); + __ bind(clause->body_target()->entry_label()); + VisitStatements(clause->statements()); + } + + __ bind(nested_statement.break_target()); } void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { - UNREACHABLE(); + Comment cmnt(masm_, "[ ForInStatement"); + SetStatementPosition(stmt); + + Label loop, exit; + ForIn loop_statement(this, stmt); + increment_loop_depth(); + + // Get the object to enumerate over. Both SpiderMonkey and JSC + // ignore null and undefined in contrast to the specification; see + // ECMA-262 section 12.6.4. + VisitForValue(stmt->enumerable(), kAccumulator); + __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); + __ j(equal, &exit); + __ CompareRoot(rax, Heap::kNullValueRootIndex); + __ j(equal, &exit); + + // Convert the object to a JS object. + Label convert, done_convert; + __ JumpIfSmi(rax, &convert); + __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); + __ j(above_equal, &done_convert); + __ bind(&convert); + __ push(rax); + __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); + __ bind(&done_convert); + __ push(rax); + + // TODO(kasperl): Check cache validity in generated code. This is a + // fast case for the JSObject::IsSimpleEnum cache validity + // checks. If we cannot guarantee cache validity, call the runtime + // system to check cache validity or get the property names in a + // fixed array. + + // Get the set of properties to enumerate. + __ push(rax); // Duplicate the enumerable object on the stack. + __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); + + // If we got a map from the runtime call, we can do a fast + // modification check. Otherwise, we got a fixed array, and we have + // to do a slow check. + Label fixed_array; + __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), + Heap::kMetaMapRootIndex); + __ j(not_equal, &fixed_array); + + // We got a map in register rax. Get the enumeration cache from it. + __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset)); + __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); + __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); + + // Setup the four remaining stack slots. + __ push(rax); // Map. + __ push(rdx); // Enumeration cache. + __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); + __ push(rax); // Enumeration cache length (as smi). + __ Push(Smi::FromInt(0)); // Initial index. + __ jmp(&loop); + + // We got a fixed array in register rax. Iterate through that. + __ bind(&fixed_array); + __ Push(Smi::FromInt(0)); // Map (0) - force slow check. + __ push(rax); + __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); + __ push(rax); // Fixed array length (as smi). + __ Push(Smi::FromInt(0)); // Initial index. + + // Generate code for doing the condition check. + __ bind(&loop); + __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. + __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. + __ j(above_equal, loop_statement.break_target()); + + // Get the current entry of the array into register rbx. + __ movq(rbx, Operand(rsp, 2 * kPointerSize)); + SmiIndex index = __ SmiToIndex(rax, rax, kPointerSizeLog2); + __ movq(rbx, FieldOperand(rbx, + index.reg, + index.scale, + FixedArray::kHeaderSize)); + + // Get the expected map from the stack or a zero map in the + // permanent slow case into register rdx. + __ movq(rdx, Operand(rsp, 3 * kPointerSize)); + + // Check if the expected map still matches that of the enumerable. + // If not, we have to filter the key. + Label update_each; + __ movq(rcx, Operand(rsp, 4 * kPointerSize)); + __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); + __ j(equal, &update_each); + + // Convert the entry to a string or null if it isn't a property + // anymore. If the property has been removed while iterating, we + // just skip it. + __ push(rcx); // Enumerable. + __ push(rbx); // Current entry. + __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); + __ CompareRoot(rax, Heap::kNullValueRootIndex); + __ j(equal, loop_statement.continue_target()); + __ movq(rbx, rax); + + // Update the 'each' property or variable from the possibly filtered + // entry in register rbx. + __ bind(&update_each); + __ movq(result_register(), rbx); + // Perform the assignment as if via '='. + EmitAssignment(stmt->each()); + + // Generate code for the body of the loop. + Label stack_limit_hit, stack_check_done; + Visit(stmt->body()); + + __ StackLimitCheck(&stack_limit_hit); + __ bind(&stack_check_done); + + // Generate code for going to the next element by incrementing the + // index (smi) stored on top of the stack. + __ bind(loop_statement.continue_target()); + __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1)); + __ jmp(&loop); + + // Slow case for the stack limit check. + StackCheckStub stack_check_stub; + __ bind(&stack_limit_hit); + __ CallStub(&stack_check_stub); + __ jmp(&stack_check_done); + + // Remove the pointers stored on the stack. + __ bind(loop_statement.break_target()); + __ addq(rsp, Immediate(5 * kPointerSize)); + + // Exit and decrement the loop depth. + __ bind(&exit); + decrement_loop_depth(); } @@ -844,7 +1149,20 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var, Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) ? "Context slot" : "Stack slot"); - Apply(context, slot); + if (var->mode() == Variable::CONST) { + // Constants may be the hole value if they have not been initialized. + // Unhole them. + Label done; + MemOperand slot_operand = EmitSlotSearch(slot, rax); + __ movq(rax, slot_operand); + __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); + __ j(not_equal, &done); + __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); + __ bind(&done); + Apply(context, rax); + } else { + Apply(context, slot); + } } else { Comment cmnt(masm_, "Rewritten parameter"); @@ -980,22 +1298,28 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { Comment cmnt(masm_, "[ ArrayLiteral"); + + ZoneList* subexprs = expr->values(); + int length = subexprs->length(); + __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Push(Smi::FromInt(expr->literal_index())); __ Push(expr->constant_elements()); if (expr->depth() > 1) { __ CallRuntime(Runtime::kCreateArrayLiteral, 3); - } else { + } else if (length > FastCloneShallowArrayStub::kMaximumLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); + } else { + FastCloneShallowArrayStub stub(length); + __ CallStub(&stub); } bool result_saved = false; // Is the result saved to the stack? // Emit code to evaluate all the non-constant subexpressions and to store // them into the newly cloned array. - ZoneList* subexprs = expr->values(); - for (int i = 0, len = subexprs->length(); i < len; i++) { + for (int i = 0; i < length; i++) { Expression* subexpr = subexprs->at(i); // If the subexpression is a literal or a simple materialized literal it // is already set in the cloned array. @@ -1157,6 +1481,57 @@ void FullCodeGenerator::EmitBinaryOp(Token::Value op, } +void FullCodeGenerator::EmitAssignment(Expression* expr) { + // Invalid left-hand sides are rewritten to have a 'throw + // ReferenceError' on the left-hand side. + if (!expr->IsValidLeftHandSide()) { + VisitForEffect(expr); + return; + } + + // Left-hand side can only be a property, a global or a (parameter or local) + // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. + enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; + LhsKind assign_type = VARIABLE; + Property* prop = expr->AsProperty(); + if (prop != NULL) { + assign_type = (prop->key()->IsPropertyName()) + ? NAMED_PROPERTY + : KEYED_PROPERTY; + } + + switch (assign_type) { + case VARIABLE: { + Variable* var = expr->AsVariableProxy()->var(); + EmitVariableAssignment(var, Token::ASSIGN, Expression::kEffect); + break; + } + case NAMED_PROPERTY: { + __ push(rax); // Preserve value. + VisitForValue(prop->obj(), kAccumulator); + __ movq(rdx, rax); + __ pop(rax); // Restore value. + __ Move(rcx, prop->key()->AsLiteral()->handle()); + Handle ic(Builtins::builtin(Builtins::StoreIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + __ nop(); // Signal no inlined code. + break; + } + case KEYED_PROPERTY: { + __ push(rax); // Preserve value. + VisitForValue(prop->obj(), kStack); + VisitForValue(prop->key(), kStack); + __ movq(rax, Operand(rsp, 2 * kPointerSize)); + Handle ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + __ nop(); // Signal no inlined code. + __ Drop(3); // Receiver, key, and extra copy of value. + break; + } + } +} + + void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, Expression::Context context) { @@ -1187,7 +1562,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, if (op == Token::INIT_CONST) { // Detect const reinitialization by checking for the hole value. __ movq(rdx, Operand(rbp, SlotOffset(slot))); - __ Cmp(rdx, Factory::the_hole_value()); + __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); __ j(not_equal, &done); } // Perform the assignment. @@ -1199,7 +1574,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, if (op == Token::INIT_CONST) { // Detect const reinitialization by checking for the hole value. __ movq(rdx, target); - __ Cmp(rdx, Factory::the_hole_value()); + __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); __ j(not_equal, &done); } // Perform the assignment and issue the write barrier. @@ -1362,7 +1737,8 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) { } // Record source position for debugger. SetSourcePosition(expr->position()); - CallFunctionStub stub(arg_count, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); + InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; + CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); __ CallStub(&stub); // Restore context register. __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); @@ -1377,8 +1753,47 @@ void FullCodeGenerator::VisitCall(Call* expr) { Variable* var = fun->AsVariableProxy()->AsVariable(); if (var != NULL && var->is_possibly_eval()) { - // Call to the identifier 'eval'. - UNREACHABLE(); + // In a call to eval, we first call %ResolvePossiblyDirectEval to + // resolve the function we need to call and the receiver of the + // call. The we call the resolved function using the given + // arguments. + VisitForValue(fun, kStack); + __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. + + // Push the arguments. + ZoneList* args = expr->arguments(); + int arg_count = args->length(); + for (int i = 0; i < arg_count; i++) { + VisitForValue(args->at(i), kStack); + } + + // Push copy of the function - found below the arguments. + __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); + + // Push copy of the first argument or undefined if it doesn't exist. + if (arg_count > 0) { + __ push(Operand(rsp, arg_count * kPointerSize)); + } else { + __ PushRoot(Heap::kUndefinedValueRootIndex); + } + + // Push the receiver of the enclosing function and do runtime call. + __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize)); + __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3); + + // The runtime call returns a pair of values in rax (function) and + // rdx (receiver). Touch up the stack with the right values. + __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); + __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); + + // Record source position for debugger. + SetSourcePosition(expr->position()); + InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; + CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); + __ CallStub(&stub); + // Restore context register. + __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); + DropAndApply(1, context_, rax); } else if (var != NULL && !var->is_this() && var->is_global()) { // Call to a global variable. // Push global object as receiver for the call IC lookup. @@ -1386,8 +1801,15 @@ void FullCodeGenerator::VisitCall(Call* expr) { EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); } else if (var != NULL && var->slot() != NULL && var->slot()->type() == Slot::LOOKUP) { - // Call to a lookup slot. - UNREACHABLE(); + // Call to a lookup slot (dynamically introduced variable). Call + // the runtime to find the function to call (returned in rax) and + // the object holding it (returned in rdx). + __ push(context_register()); + __ Push(var->name()); + __ CallRuntime(Runtime::kLoadContextSlot, 2); + __ push(rax); // Function. + __ push(rdx); // Receiver. + EmitCallWithStub(expr); } else if (fun->AsProperty() != NULL) { // Call to an object property. Property* prop = fun->AsProperty(); @@ -1478,7 +1900,711 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) { } +void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) { + Handle name = expr->name(); + if (strcmp("_IsSmi", *name->ToCString()) == 0) { + EmitIsSmi(expr->arguments()); + } else if (strcmp("_IsNonNegativeSmi", *name->ToCString()) == 0) { + EmitIsNonNegativeSmi(expr->arguments()); + } else if (strcmp("_IsObject", *name->ToCString()) == 0) { + EmitIsObject(expr->arguments()); + } else if (strcmp("_IsUndetectableObject", *name->ToCString()) == 0) { + EmitIsUndetectableObject(expr->arguments()); + } else if (strcmp("_IsFunction", *name->ToCString()) == 0) { + EmitIsFunction(expr->arguments()); + } else if (strcmp("_IsArray", *name->ToCString()) == 0) { + EmitIsArray(expr->arguments()); + } else if (strcmp("_IsRegExp", *name->ToCString()) == 0) { + EmitIsRegExp(expr->arguments()); + } else if (strcmp("_IsConstructCall", *name->ToCString()) == 0) { + EmitIsConstructCall(expr->arguments()); + } else if (strcmp("_ObjectEquals", *name->ToCString()) == 0) { + EmitObjectEquals(expr->arguments()); + } else if (strcmp("_Arguments", *name->ToCString()) == 0) { + EmitArguments(expr->arguments()); + } else if (strcmp("_ArgumentsLength", *name->ToCString()) == 0) { + EmitArgumentsLength(expr->arguments()); + } else if (strcmp("_ClassOf", *name->ToCString()) == 0) { + EmitClassOf(expr->arguments()); + } else if (strcmp("_Log", *name->ToCString()) == 0) { + EmitLog(expr->arguments()); + } else if (strcmp("_RandomHeapNumber", *name->ToCString()) == 0) { + EmitRandomHeapNumber(expr->arguments()); + } else if (strcmp("_SubString", *name->ToCString()) == 0) { + EmitSubString(expr->arguments()); + } else if (strcmp("_RegExpExec", *name->ToCString()) == 0) { + EmitRegExpExec(expr->arguments()); + } else if (strcmp("_ValueOf", *name->ToCString()) == 0) { + EmitValueOf(expr->arguments()); + } else if (strcmp("_SetValueOf", *name->ToCString()) == 0) { + EmitSetValueOf(expr->arguments()); + } else if (strcmp("_NumberToString", *name->ToCString()) == 0) { + EmitNumberToString(expr->arguments()); + } else if (strcmp("_CharFromCode", *name->ToCString()) == 0) { + EmitCharFromCode(expr->arguments()); + } else if (strcmp("_FastCharCodeAt", *name->ToCString()) == 0) { + EmitFastCharCodeAt(expr->arguments()); + } else if (strcmp("_StringAdd", *name->ToCString()) == 0) { + EmitStringAdd(expr->arguments()); + } else if (strcmp("_StringCompare", *name->ToCString()) == 0) { + EmitStringCompare(expr->arguments()); + } else if (strcmp("_MathPow", *name->ToCString()) == 0) { + EmitMathPow(expr->arguments()); + } else if (strcmp("_MathSin", *name->ToCString()) == 0) { + EmitMathSin(expr->arguments()); + } else if (strcmp("_MathCos", *name->ToCString()) == 0) { + EmitMathCos(expr->arguments()); + } else if (strcmp("_MathSqrt", *name->ToCString()) == 0) { + EmitMathSqrt(expr->arguments()); + } else if (strcmp("_CallFunction", *name->ToCString()) == 0) { + EmitCallFunction(expr->arguments()); + } else if (strcmp("_RegExpConstructResult", *name->ToCString()) == 0) { + EmitRegExpConstructResult(expr->arguments()); + } else if (strcmp("_SwapElements", *name->ToCString()) == 0) { + EmitSwapElements(expr->arguments()); + } else if (strcmp("_GetFromCache", *name->ToCString()) == 0) { + EmitGetFromCache(expr->arguments()); + } else { + UNREACHABLE(); + } +} + + +void FullCodeGenerator::EmitIsSmi(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + __ JumpIfSmi(rax, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + Condition positive_smi = __ CheckPositiveSmi(rax); + __ j(positive_smi, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitIsObject(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + __ JumpIfSmi(rax, if_false); + __ CompareRoot(rax, Heap::kNullValueRootIndex); + __ j(equal, if_true); + __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); + // Undetectable objects behave like undefined when tested with typeof. + __ testb(FieldOperand(rbx, Map::kBitFieldOffset), + Immediate(1 << Map::kIsUndetectable)); + __ j(not_zero, if_false); + __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); + __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE)); + __ j(below, if_false); + __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE)); + __ j(below_equal, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitIsUndetectableObject(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + __ JumpIfSmi(rax, if_false); + __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); + __ testb(FieldOperand(rbx, Map::kBitFieldOffset), + Immediate(1 << Map::kIsUndetectable)); + __ j(not_zero, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitIsFunction(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + __ JumpIfSmi(rax, if_false); + __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); + __ j(equal, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitIsArray(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + __ JumpIfSmi(rax, if_false); + __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx); + __ j(equal, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitIsRegExp(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + __ JumpIfSmi(rax, if_false); + __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx); + __ j(equal, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + + +void FullCodeGenerator::EmitIsConstructCall(ZoneList* args) { + ASSERT(args->length() == 0); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + // Get the frame pointer for the calling frame. + __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); + + // Skip the arguments adaptor frame if it exists. + Label check_frame_marker; + __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset), + Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); + __ j(not_equal, &check_frame_marker); + __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset)); + + // Check the marker in the calling frame. + __ bind(&check_frame_marker); + __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset), + Smi::FromInt(StackFrame::CONSTRUCT)); + __ j(equal, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitObjectEquals(ZoneList* args) { + ASSERT(args->length() == 2); + + // Load the two objects into registers and perform the comparison. + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + __ pop(rbx); + __ cmpq(rax, rbx); + __ j(equal, if_true); + __ jmp(if_false); + + Apply(context_, if_true, if_false); +} + + +void FullCodeGenerator::EmitArguments(ZoneList* args) { + ASSERT(args->length() == 1); + + // ArgumentsAccessStub expects the key in edx and the formal + // parameter count in eax. + VisitForValue(args->at(0), kAccumulator); + __ movq(rdx, rax); + __ Move(rax, Smi::FromInt(scope()->num_parameters())); + ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitArgumentsLength(ZoneList* args) { + ASSERT(args->length() == 0); + + Label exit; + // Get the number of formal parameters. + __ Move(rax, Smi::FromInt(scope()->num_parameters())); + + // Check if the calling frame is an arguments adaptor frame. + __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); + __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset), + Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); + __ j(not_equal, &exit); + + // Arguments adaptor case: Read the arguments length from the + // adaptor frame. + __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); + + __ bind(&exit); + if (FLAG_debug_code) __ AbortIfNotSmi(rax); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitClassOf(ZoneList* args) { + ASSERT(args->length() == 1); + Label done, null, function, non_function_constructor; + + VisitForValue(args->at(0), kAccumulator); + + // If the object is a smi, we return null. + __ JumpIfSmi(rax, &null); + + // Check that the object is a JS object but take special care of JS + // functions to make sure they have 'Function' as their class. + __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); + __ j(below, &null); + + // As long as JS_FUNCTION_TYPE is the last instance type and it is + // right after LAST_JS_OBJECT_TYPE, we can avoid checking for + // LAST_JS_OBJECT_TYPE. + ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); + ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); + __ CmpInstanceType(rax, JS_FUNCTION_TYPE); + __ j(equal, &function); + + // Check if the constructor in the map is a function. + __ movq(rax, FieldOperand(rax, Map::kConstructorOffset)); + __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); + __ j(not_equal, &non_function_constructor); + + // rax now contains the constructor function. Grab the + // instance class name from there. + __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); + __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset)); + __ jmp(&done); + + // Functions have class 'Function'. + __ bind(&function); + __ Move(rax, Factory::function_class_symbol()); + __ jmp(&done); + + // Objects with a non-function constructor have class 'Object'. + __ bind(&non_function_constructor); + __ Move(rax, Factory::Object_symbol()); + __ jmp(&done); + + // Non-JS objects have class null. + __ bind(&null); + __ LoadRoot(rax, Heap::kNullValueRootIndex); + + // All done. + __ bind(&done); + + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitLog(ZoneList* args) { + // Conditionally generate a log call. + // Args: + // 0 (literal string): The type of logging (corresponds to the flags). + // This is used to determine whether or not to generate the log call. + // 1 (string): Format string. Access the string at argument index 2 + // with '%2s' (see Logger::LogRuntime for all the formats). + // 2 (array): Arguments to the format string. + ASSERT_EQ(args->length(), 3); +#ifdef ENABLE_LOGGING_AND_PROFILING + if (CodeGenerator::ShouldGenerateLog(args->at(0))) { + VisitForValue(args->at(1), kStack); + VisitForValue(args->at(2), kStack); + __ CallRuntime(Runtime::kLog, 2); + } +#endif + // Finally, we're expected to leave a value on the top of the stack. + __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitRandomHeapNumber(ZoneList* args) { + ASSERT(args->length() == 0); + + Label slow_allocate_heapnumber; + Label heapnumber_allocated; + + __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber); + __ jmp(&heapnumber_allocated); + + __ bind(&slow_allocate_heapnumber); + // To allocate a heap number, and ensure that it is not a smi, we + // call the runtime function FUnaryMinus on 0, returning the double + // -0.0. A new, distinct heap number is returned each time. + __ Push(Smi::FromInt(0)); + __ CallRuntime(Runtime::kNumberUnaryMinus, 1); + __ movq(rbx, rax); + + __ bind(&heapnumber_allocated); + + // Return a random uint32 number in rax. + // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. + __ PrepareCallCFunction(0); + __ CallCFunction(ExternalReference::random_uint32_function(), 0); + + // Convert 32 random bits in rax to 0.(32 random bits) in a double + // by computing: + // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). + __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single. + __ movd(xmm1, rcx); + __ movd(xmm0, rax); + __ cvtss2sd(xmm1, xmm1); + __ xorpd(xmm0, xmm1); + __ subsd(xmm0, xmm1); + __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0); + + __ movq(rax, rbx); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitSubString(ZoneList* args) { + // Load the arguments on the stack and call the stub. + SubStringStub stub; + ASSERT(args->length() == 3); + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kStack); + VisitForValue(args->at(2), kStack); + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitRegExpExec(ZoneList* args) { + // Load the arguments on the stack and call the stub. + RegExpExecStub stub; + ASSERT(args->length() == 4); + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kStack); + VisitForValue(args->at(2), kStack); + VisitForValue(args->at(3), kStack); + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitValueOf(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); // Load the object. + + Label done; + // If the object is a smi return the object. + __ JumpIfSmi(rax, &done); + // If the object is not a value type, return the object. + __ CmpObjectType(rax, JS_VALUE_TYPE, rbx); + __ j(not_equal, &done); + __ movq(rax, FieldOperand(rax, JSValue::kValueOffset)); + + __ bind(&done); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitMathPow(ZoneList* args) { + // Load the arguments on the stack and call the runtime function. + ASSERT(args->length() == 2); + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kStack); + __ CallRuntime(Runtime::kMath_pow, 2); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitSetValueOf(ZoneList* args) { + ASSERT(args->length() == 2); + + VisitForValue(args->at(0), kStack); // Load the object. + VisitForValue(args->at(1), kAccumulator); // Load the value. + __ pop(rbx); // rax = value. ebx = object. + + Label done; + // If the object is a smi, return the value. + __ JumpIfSmi(rbx, &done); + + // If the object is not a value type, return the value. + __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx); + __ j(not_equal, &done); + + // Store the value. + __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax); + // Update the write barrier. Save the value as it will be + // overwritten by the write barrier code and is needed afterward. + __ movq(rdx, rax); + __ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx); + + __ bind(&done); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitNumberToString(ZoneList* args) { + ASSERT_EQ(args->length(), 1); + + // Load the argument on the stack and call the stub. + VisitForValue(args->at(0), kStack); + + NumberToStringStub stub; + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitCharFromCode(ZoneList* args) { + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label slow_case, done; + // Fast case of Heap::LookupSingleCharacterStringFromCode. + __ JumpIfNotSmi(rax, &slow_case); + __ SmiToInteger32(rcx, rax); + __ cmpl(rcx, Immediate(String::kMaxAsciiCharCode)); + __ j(above, &slow_case); + + __ Move(rbx, Factory::single_character_string_cache()); + __ movq(rbx, FieldOperand(rbx, + rcx, + times_pointer_size, + FixedArray::kHeaderSize)); + + __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); + __ j(equal, &slow_case); + __ movq(rax, rbx); + __ jmp(&done); + + __ bind(&slow_case); + __ push(rax); + __ CallRuntime(Runtime::kCharFromCode, 1); + + __ bind(&done); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitFastCharCodeAt(ZoneList* args) { + // TODO(fsc): Port the complete implementation from the classic back-end. + // Move the undefined value into the result register, which will + // trigger the slow case. + __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); + Apply(context_, rax); +} + +void FullCodeGenerator::EmitStringAdd(ZoneList* args) { + ASSERT_EQ(2, args->length()); + + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kStack); + + StringAddStub stub(NO_STRING_ADD_FLAGS); + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitStringCompare(ZoneList* args) { + ASSERT_EQ(2, args->length()); + + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kStack); + + StringCompareStub stub; + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitMathSin(ZoneList* args) { + // Load the argument on the stack and call the stub. + TranscendentalCacheStub stub(TranscendentalCache::SIN); + ASSERT(args->length() == 1); + VisitForValue(args->at(0), kStack); + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitMathCos(ZoneList* args) { + // Load the argument on the stack and call the stub. + TranscendentalCacheStub stub(TranscendentalCache::COS); + ASSERT(args->length() == 1); + VisitForValue(args->at(0), kStack); + __ CallStub(&stub); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitMathSqrt(ZoneList* args) { + // Load the argument on the stack and call the runtime function. + ASSERT(args->length() == 1); + VisitForValue(args->at(0), kStack); + __ CallRuntime(Runtime::kMath_sqrt, 1); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitCallFunction(ZoneList* args) { + ASSERT(args->length() >= 2); + + int arg_count = args->length() - 2; // For receiver and function. + VisitForValue(args->at(0), kStack); // Receiver. + for (int i = 0; i < arg_count; i++) { + VisitForValue(args->at(i + 1), kStack); + } + VisitForValue(args->at(arg_count + 1), kAccumulator); // Function. + + // InvokeFunction requires function in rdi. Move it in there. + if (!result_register().is(rdi)) __ movq(rdi, result_register()); + ParameterCount count(arg_count); + __ InvokeFunction(rdi, count, CALL_FUNCTION); + __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitRegExpConstructResult(ZoneList* args) { + ASSERT(args->length() == 3); + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kStack); + VisitForValue(args->at(2), kStack); + __ CallRuntime(Runtime::kRegExpConstructResult, 3); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitSwapElements(ZoneList* args) { + ASSERT(args->length() == 3); + VisitForValue(args->at(0), kStack); + VisitForValue(args->at(1), kStack); + VisitForValue(args->at(2), kStack); + __ CallRuntime(Runtime::kSwapElements, 3); + Apply(context_, rax); +} + + +void FullCodeGenerator::EmitGetFromCache(ZoneList* args) { + ASSERT_EQ(2, args->length()); + + ASSERT_NE(NULL, args->at(0)->AsLiteral()); + int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); + + Handle jsfunction_result_caches( + Top::global_context()->jsfunction_result_caches()); + if (jsfunction_result_caches->length() <= cache_id) { + __ Abort("Attempt to use undefined cache."); + __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); + Apply(context_, rax); + return; + } + + VisitForValue(args->at(1), kAccumulator); + + Register key = rax; + Register cache = rbx; + Register tmp = rcx; + __ movq(cache, CodeGenerator::ContextOperand(rsi, Context::GLOBAL_INDEX)); + __ movq(cache, + FieldOperand(cache, GlobalObject::kGlobalContextOffset)); + __ movq(cache, + CodeGenerator::ContextOperand( + cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); + __ movq(cache, + FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); + + Label done, not_found; + // tmp now holds finger offset as a smi. + ASSERT(kSmiTag == 0 && kSmiTagSize == 1); + __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); + SmiIndex index = + __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); + __ cmpq(key, FieldOperand(cache, + index.reg, + index.scale, + FixedArray::kHeaderSize)); + __ j(not_equal, ¬_found); + __ movq(rax, FieldOperand(cache, + index.reg, + index.scale, + FixedArray::kHeaderSize + kPointerSize)); + __ jmp(&done); + + __ bind(¬_found); + // Call runtime to perform the lookup. + __ push(cache); + __ push(key); + __ CallRuntime(Runtime::kGetFromCache, 2); + + __ bind(&done); + Apply(context_, rax); +} + + void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { + Handle name = expr->name(); + if (name->length() > 0 && name->Get(0) == '_') { + Comment cmnt(masm_, "[ InlineRuntimeCall"); + EmitInlineRuntimeCall(expr); + return; + } + Comment cmnt(masm_, "[ CallRuntime"); ZoneList* args = expr->arguments(); @@ -1511,6 +2637,46 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { switch (expr->op()) { + case Token::DELETE: { + Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); + Property* prop = expr->expression()->AsProperty(); + Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); + if (prop == NULL && var == NULL) { + // Result of deleting non-property, non-variable reference is true. + // The subexpression may have side effects. + VisitForEffect(expr->expression()); + Apply(context_, true); + } else if (var != NULL && + !var->is_global() && + var->slot() != NULL && + var->slot()->type() != Slot::LOOKUP) { + // Result of deleting non-global, non-dynamic variables is false. + // The subexpression does not have side effects. + Apply(context_, false); + } else { + // Property or variable reference. Call the delete builtin with + // object and property name as arguments. + if (prop != NULL) { + VisitForValue(prop->obj(), kStack); + VisitForValue(prop->key(), kStack); + } else if (var->is_global()) { + __ push(CodeGenerator::GlobalObject()); + __ Push(var->name()); + } else { + // Non-global variable. Call the runtime to look up the context + // where the variable was introduced. + __ push(context_register()); + __ Push(var->name()); + __ CallRuntime(Runtime::kLookupContext, 2); + __ push(rax); + __ Push(var->name()); + } + __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); + Apply(context_, rax); + } + break; + } + case Token::VOID: { Comment cmnt(masm_, "[ UnaryOperation (VOID)"); VisitForEffect(expr->expression()); @@ -1551,33 +2717,15 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { case Token::NOT: { Comment cmnt(masm_, "[ UnaryOperation (NOT)"); - Label materialize_true, materialize_false, done; - // Initially assume a pure test context. Notice that the labels are - // swapped. - Label* if_true = false_label_; - Label* if_false = true_label_; - switch (context_) { - case Expression::kUninitialized: - UNREACHABLE(); - break; - case Expression::kEffect: - if_true = &done; - if_false = &done; - break; - case Expression::kValue: - if_true = &materialize_false; - if_false = &materialize_true; - break; - case Expression::kTest: - break; - case Expression::kValueTest: - if_false = &materialize_true; - break; - case Expression::kTestValue: - if_true = &materialize_false; - break; - } + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + + // Notice that the labels are swapped. + PrepareTest(&materialize_true, &materialize_false, &if_false, &if_true); + VisitForControl(expr->expression(), if_true, if_false); + Apply(context_, if_false, if_true); // Labels swapped. break; } @@ -1673,6 +2821,13 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { Comment cmnt(masm_, "[ CountOperation"); + // Invalid left-hand-sides are rewritten to have a 'throw + // ReferenceError' as the left-hand side. + if (!expr->expression()->IsValidLeftHandSide()) { + VisitForEffect(expr->expression()); + return; + } + // Expression can only be a property, a global or a (parameter or local) // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; @@ -1693,7 +2848,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { EmitVariableLoad(expr->expression()->AsVariableProxy()->var(), Expression::kValue); location_ = saved_location; - } else { + } else { // Reserve space for result of postfix operation. if (expr->is_postfix() && context_ != Expression::kEffect) { __ Push(Smi::FromInt(0)); @@ -1865,36 +3020,40 @@ void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { } + void FullCodeGenerator::EmitNullCompare(bool strict, + Register obj, + Register null_const, + Label* if_true, + Label* if_false, + Register scratch) { + __ cmpq(obj, null_const); + if (strict) { + __ j(equal, if_true); + } else { + __ j(equal, if_true); + __ CompareRoot(obj, Heap::kUndefinedValueRootIndex); + __ j(equal, if_true); + __ JumpIfSmi(obj, if_false); + // It can be an undetectable object. + __ movq(scratch, FieldOperand(obj, HeapObject::kMapOffset)); + __ testb(FieldOperand(scratch, Map::kBitFieldOffset), + Immediate(1 << Map::kIsUndetectable)); + __ j(not_zero, if_true); + } + __ jmp(if_false); + } + + + void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { Comment cmnt(masm_, "[ CompareOperation"); // Always perform the comparison for its control flow. Pack the result // into the expression's context after the comparison is performed. - Label materialize_true, materialize_false, done; - // Initially assume we are in a test context. - Label* if_true = true_label_; - Label* if_false = false_label_; - switch (context_) { - case Expression::kUninitialized: - UNREACHABLE(); - break; - case Expression::kEffect: - if_true = &done; - if_false = &done; - break; - case Expression::kValue: - if_true = &materialize_true; - if_false = &materialize_false; - break; - case Expression::kTest: - break; - case Expression::kValueTest: - if_true = &materialize_true; - break; - case Expression::kTestValue: - if_false = &materialize_false; - break; - } + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); VisitForValue(expr->left(), kStack); switch (expr->op()) { @@ -1924,10 +3083,24 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { case Token::EQ_STRICT: strict = true; // Fall through. - case Token::EQ: + case Token::EQ: { cc = equal; __ pop(rdx); + // If either operand is constant null we do a fast compare + // against null. + Literal* right_literal = expr->right()->AsLiteral(); + Literal* left_literal = expr->left()->AsLiteral(); + if (right_literal != NULL && right_literal->handle()->IsNull()) { + EmitNullCompare(strict, rdx, rax, if_true, if_false, rcx); + Apply(context_, if_true, if_false); + return; + } else if (left_literal != NULL && left_literal->handle()->IsNull()) { + EmitNullCompare(strict, rax, rdx, if_true, if_false, rcx); + Apply(context_, if_true, if_false); + return; + } break; + } case Token::LT: cc = less; __ pop(rdx); diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 45e6b6b..42caa92 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -747,7 +747,7 @@ void MacroAssembler::SmiSub(Register dst, void MacroAssembler::SmiSub(Register dst, Register src1, - Operand const& src2, + const Operand& src2, Label* on_not_smi_result) { if (on_not_smi_result == NULL) { // No overflow checking. Use only when it's known that @@ -866,6 +866,7 @@ void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { + ASSERT(!dst.is(kScratchRegister)); if (constant->value() != 0) { Move(kScratchRegister, constant); addq(dst, kScratchRegister); diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index 82ab45e..bec142e 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -360,7 +360,7 @@ class MacroAssembler: public Assembler { void SmiSub(Register dst, Register src1, - Operand const& src2, + const Operand& src2, Label* on_not_smi_result); // Multiplies smi values and return the result as a smi, diff --git a/test/cctest/test-log-stack-tracer.cc b/test/cctest/test-log-stack-tracer.cc index 261222e..3fd5c69 100644 --- a/test/cctest/test-log-stack-tracer.cc +++ b/test/cctest/test-log-stack-tracer.cc @@ -273,7 +273,7 @@ static void CreateTraceCallerFunction(const char* func_name, // StackTracer uses Top::c_entry_fp as a starting point for stack // walking. TEST(CFromJSStackTrace) { -#ifdef V8_HOST_ARCH_IA32 +#if defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64) // TODO(711) The hack of replacing the inline runtime function // RandomHeapNumber with GetFrameNumber does not work with the way the full // compiler generates inline runtime calls. @@ -315,7 +315,7 @@ TEST(CFromJSStackTrace) { // Top::c_entry_fp value. In this case, StackTracer uses passed frame // pointer value as a starting point for stack walking. TEST(PureJSStackTrace) { -#ifdef V8_HOST_ARCH_IA32 +#if defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64) // TODO(711) The hack of replacing the inline runtime function // RandomHeapNumber with GetFrameNumber does not work with the way the full // compiler generates inline runtime calls.