From 9cee28138a1bfc86d6fd89dbfd0e6b02a29d84f4 Mon Sep 17 00:00:00 2001 From: "whesse@chromium.org" Date: Tue, 18 Jan 2011 14:32:13 +0000 Subject: [PATCH] X64 Crankshaft: Add addition of tagged and int32 values to Crankshaft on x64. Enable CallIC to global function to call optimized code. Review URL: http://codereview.chromium.org/6329006 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6366 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/x64/code-stubs-x64.h | 1 + src/x64/lithium-codegen-x64.cc | 25 +++++++++++++++++++++++-- src/x64/lithium-x64.cc | 33 ++++++++++++++++++++++++++++++--- src/x64/stub-cache-x64.cc | 17 ++++++++++++----- 4 files changed, 66 insertions(+), 10 deletions(-) diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h index 5056f34..9feced2 100644 --- a/src/x64/code-stubs-x64.h +++ b/src/x64/code-stubs-x64.h @@ -198,6 +198,7 @@ class GenericBinaryOpStub: public CodeStub { } friend class CodeGenerator; + friend class LCodeGen; }; diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index e06b536..151fad7 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -885,7 +885,22 @@ void LCodeGen::DoThrow(LThrow* instr) { void LCodeGen::DoAddI(LAddI* instr) { - Abort("Unimplemented: %s", "DoAddI"); + LOperand* left = instr->InputAt(0); + LOperand* right = instr->InputAt(1); + ASSERT(left->Equals(instr->result())); + + if (right->IsConstantOperand()) { + __ addl(ToRegister(left), + Immediate(ToInteger32(LConstantOperand::cast(right)))); + } else if (right->IsRegister()) { + __ addl(ToRegister(left), ToRegister(right)); + } else { + __ addl(ToRegister(left), ToOperand(right)); + } + + if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { + DeoptimizeIf(overflow, instr->environment()); + } } @@ -895,7 +910,13 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) { void LCodeGen::DoArithmeticT(LArithmeticT* instr) { - Abort("Unimplemented: %s", "DoArithmeticT"); + ASSERT(ToRegister(instr->InputAt(0)).is(rdx)); + ASSERT(ToRegister(instr->InputAt(1)).is(rax)); + ASSERT(ToRegister(instr->result()).is(rax)); + + GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, NO_GENERIC_BINARY_FLAGS); + stub.SetArgsInRegisters(); + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc index a384510..5ef6eb7 100644 --- a/src/x64/lithium-x64.cc +++ b/src/x64/lithium-x64.cc @@ -754,8 +754,19 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op, LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op, HArithmeticBinaryOperation* instr) { - Abort("Unimplemented: %s", "DoArithmeticT"); - return NULL; + ASSERT(op == Token::ADD || + op == Token::DIV || + op == Token::MOD || + op == Token::MUL || + op == Token::SUB); + HValue* left = instr->left(); + HValue* right = instr->right(); + ASSERT(left->representation().IsTagged()); + ASSERT(right->representation().IsTagged()); + LOperand* left_operand = UseFixed(left, rdx); + LOperand* right_operand = UseFixed(right, rax); + LArithmeticT* result = new LArithmeticT(op, left_operand, right_operand); + return MarkAsCall(DefineFixed(result, rax), instr); } @@ -1085,7 +1096,23 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) { LInstruction* LChunkBuilder::DoAdd(HAdd* instr) { - Abort("Unimplemented: %s", "DoAdd"); + if (instr->representation().IsInteger32()) { + ASSERT(instr->left()->representation().IsInteger32()); + ASSERT(instr->right()->representation().IsInteger32()); + LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand()); + LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand()); + LAddI* add = new LAddI(left, right); + LInstruction* result = DefineSameAsFirst(add); + if (instr->CheckFlag(HValue::kCanOverflow)) { + result = AssignEnvironment(result); + } + return result; + } else if (instr->representation().IsDouble()) { + Abort("Unimplemented: %s", "DoAdd on Doubles"); + } else { + ASSERT(instr->representation().IsTagged()); + return DoArithmeticT(Token::ADD, instr); + } return NULL; } diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index 57cba14..9b3719a 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -2262,17 +2262,24 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); } - // Setup the context (function already in edi). + // Setup the context (function already in rdi). __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); // Jump to the cached code (tail call). __ IncrementCounter(&Counters::call_global_inline, 1); ASSERT(function->is_compiled()); - Handle code(function->code()); ParameterCount expected(function->shared()->formal_parameter_count()); - __ InvokeCode(code, expected, arguments(), - RelocInfo::CODE_TARGET, JUMP_FUNCTION); - + if (V8::UseCrankshaft()) { + // TODO(kasperl): For now, we always call indirectly through the + // code field in the function to allow recompilation to take effect + // without changing any of the call sites. + __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); + __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION); + } else { + Handle code(function->code()); + __ InvokeCode(code, expected, arguments(), + RelocInfo::CODE_TARGET, JUMP_FUNCTION); + } // Handle call cache miss. __ bind(&miss); __ IncrementCounter(&Counters::call_global_inline_miss, 1); -- 2.7.4