From 950a372834ad4252cade7c92e6565012a9d50865 Mon Sep 17 00:00:00 2001 From: "olivf@chromium.org" Date: Wed, 3 Jul 2013 19:57:25 +0000 Subject: [PATCH] Revert "Convert UnaryOpStub to a HydrogenCodeStub" The problem is the HCallConstantFunction which is not context sensitive, so we leak the builtin. We first need a Hydrogen version of __ IvokeBuiltin. BUG= R=danno@chromium.org, machenbach@chromium.org Review URL: https://codereview.chromium.org/18650003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15486 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/code-stubs-arm.cc | 283 +++++++++++++++++++++++++++++++-- src/arm/code-stubs-arm.h | 65 ++++++++ src/arm/full-codegen-arm.cc | 5 +- src/arm/lithium-arm.cc | 12 -- src/arm/lithium-arm.h | 14 -- src/arm/lithium-codegen-arm.cc | 23 +-- src/code-stubs-hydrogen.cc | 38 ----- src/code-stubs.cc | 146 ++++------------- src/code-stubs.h | 97 ++---------- src/hydrogen-instructions.cc | 5 - src/hydrogen-instructions.h | 45 ------ src/hydrogen.cc | 111 +++++++------ src/hydrogen.h | 10 +- src/ia32/code-stubs-ia32.cc | 330 +++++++++++++++++++++++++++++++++++++-- src/ia32/code-stubs-ia32.h | 74 +++++++++ src/ia32/full-codegen-ia32.cc | 5 +- src/ia32/lithium-codegen-ia32.cc | 33 +--- src/ia32/lithium-ia32.cc | 24 +-- src/ia32/lithium-ia32.h | 27 ---- src/ic.cc | 145 +++++++++++++++-- src/ic.h | 25 ++- src/objects.cc | 8 +- src/objects.h | 3 +- src/property-details.h | 6 - src/type-info.cc | 15 +- src/types.cc | 9 -- src/x64/code-stubs-x64.cc | 266 +++++++++++++++++++++++++++++-- src/x64/code-stubs-x64.h | 71 +++++++++ src/x64/full-codegen-x64.cc | 5 +- src/x64/lithium-codegen-x64.cc | 23 +-- src/x64/lithium-x64.cc | 12 -- src/x64/lithium-x64.h | 14 -- 32 files changed, 1336 insertions(+), 613 deletions(-) diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index b8e8cb3..6af5cce 100755 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -226,20 +226,8 @@ void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( } -void UnaryOpStub::InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor) { - static Register registers[] = { r0 }; - descriptor->register_param_count_ = 1; - descriptor->register_params_ = registers; - descriptor->deoptimization_handler_ = - FUNCTION_ADDR(UnaryOpIC_Miss); -} - - #define __ ACCESS_MASM(masm) - static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow, Condition cond); @@ -1301,6 +1289,277 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { } +void UnaryOpStub::PrintName(StringStream* stream) { + const char* op_name = Token::Name(op_); + const char* overwrite_name = NULL; // Make g++ happy. + switch (mode_) { + case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; + case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; + } + stream->Add("UnaryOpStub_%s_%s_%s", + op_name, + overwrite_name, + UnaryOpIC::GetName(operand_type_)); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::Generate(MacroAssembler* masm) { + switch (operand_type_) { + case UnaryOpIC::UNINITIALIZED: + GenerateTypeTransition(masm); + break; + case UnaryOpIC::SMI: + GenerateSmiStub(masm); + break; + case UnaryOpIC::NUMBER: + GenerateNumberStub(masm); + break; + case UnaryOpIC::GENERIC: + GenerateGenericStub(masm); + break; + } +} + + +void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { + __ mov(r3, Operand(r0)); // the operand + __ mov(r2, Operand(Smi::FromInt(op_))); + __ mov(r1, Operand(Smi::FromInt(mode_))); + __ mov(r0, Operand(Smi::FromInt(operand_type_))); + __ Push(r3, r2, r1, r0); + + __ TailCallExternalReference( + ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateSmiStubSub(masm); + break; + case Token::BIT_NOT: + GenerateSmiStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeSub(masm, &non_smi, &slow); + __ bind(&non_smi); + __ bind(&slow); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { + Label non_smi; + GenerateSmiCodeBitNot(masm, &non_smi); + __ bind(&non_smi); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, + Label* non_smi, + Label* slow) { + __ JumpIfNotSmi(r0, non_smi); + + // The result of negating zero or the smallest negative smi is not a smi. + __ bic(ip, r0, Operand(0x80000000), SetCC); + __ b(eq, slow); + + // Return '0 - value'. + __ rsb(r0, r0, Operand::Zero()); + __ Ret(); +} + + +void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, + Label* non_smi) { + __ JumpIfNotSmi(r0, non_smi); + + // Flip bits and revert inverted smi-tag. + __ mvn(r0, Operand(r0)); + __ bic(r0, r0, Operand(kSmiTagMask)); + __ Ret(); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateNumberStubSub(masm); + break; + case Token::BIT_NOT: + GenerateNumberStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) { + Label non_smi, slow, call_builtin; + GenerateSmiCodeSub(masm, &non_smi, &call_builtin); + __ bind(&non_smi); + GenerateHeapNumberCodeSub(masm, &slow); + __ bind(&slow); + GenerateTypeTransition(masm); + __ bind(&call_builtin); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateNumberStubBitNot(MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeBitNot(masm, &non_smi); + __ bind(&non_smi); + GenerateHeapNumberCodeBitNot(masm, &slow); + __ bind(&slow); + GenerateTypeTransition(masm); +} + +void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, + Label* slow) { + EmitCheckForHeapNumber(masm, r0, r1, r6, slow); + // r0 is a heap number. Get a new heap number in r1. + if (mode_ == UNARY_OVERWRITE) { + __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); + __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. + __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); + } else { + Label slow_allocate_heapnumber, heapnumber_allocated; + __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); + __ jmp(&heapnumber_allocated); + + __ bind(&slow_allocate_heapnumber); + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ push(r0); + __ CallRuntime(Runtime::kNumberAlloc, 0); + __ mov(r1, Operand(r0)); + __ pop(r0); + } + + __ bind(&heapnumber_allocated); + __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); + __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); + __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); + __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. + __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); + __ mov(r0, Operand(r1)); + } + __ Ret(); +} + + +void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm, + Label* slow) { + EmitCheckForHeapNumber(masm, r0, r1, r6, slow); + + // Convert the heap number in r0 to an untagged integer in r1. + __ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset)); + __ ECMAToInt32(r1, d0, r2, r3, r4, d1); + + // Do the bitwise operation and check if the result fits in a smi. + Label try_float; + __ mvn(r1, Operand(r1)); + __ cmn(r1, Operand(0x40000000)); + __ b(mi, &try_float); + + // Tag the result as a smi and we're done. + __ SmiTag(r0, r1); + __ Ret(); + + // Try to store the result in a heap number. + __ bind(&try_float); + if (mode_ == UNARY_NO_OVERWRITE) { + Label slow_allocate_heapnumber, heapnumber_allocated; + __ AllocateHeapNumber(r0, r3, r4, r6, &slow_allocate_heapnumber); + __ jmp(&heapnumber_allocated); + + __ bind(&slow_allocate_heapnumber); + { + FrameScope scope(masm, StackFrame::INTERNAL); + // Push the lower bit of the result (left shifted to look like a smi). + __ mov(r2, Operand(r1, LSL, 31)); + // Push the 31 high bits (bit 0 cleared to look like a smi). + __ bic(r1, r1, Operand(1)); + __ Push(r2, r1); + __ CallRuntime(Runtime::kNumberAlloc, 0); + __ Pop(r2, r1); // Restore the result. + __ orr(r1, r1, Operand(r2, LSR, 31)); + } + __ bind(&heapnumber_allocated); + } + + __ vmov(s0, r1); + __ vcvt_f64_s32(d0, s0); + __ vstr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset)); + __ Ret(); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateGenericStubSub(masm); + break; + case Token::BIT_NOT: + GenerateGenericStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeSub(masm, &non_smi, &slow); + __ bind(&non_smi); + GenerateHeapNumberCodeSub(masm, &slow); + __ bind(&slow); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeBitNot(masm, &non_smi); + __ bind(&non_smi); + GenerateHeapNumberCodeBitNot(masm, &slow); + __ bind(&slow); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) { + // Handle the slow case by jumping to the JavaScript builtin. + __ push(r0); + switch (op_) { + case Token::SUB: + __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); + break; + case Token::BIT_NOT: + __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); + break; + default: + UNREACHABLE(); + } +} + + // Generates code to call a C function to do a double operation. // This code never falls through, but returns with a heap number containing // the result in r0. diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h index 0202a8a..1f663f5 100644 --- a/src/arm/code-stubs-arm.h +++ b/src/arm/code-stubs-arm.h @@ -80,6 +80,71 @@ class StoreBufferOverflowStub: public PlatformCodeStub { }; +class UnaryOpStub: public PlatformCodeStub { + public: + UnaryOpStub(Token::Value op, + UnaryOverwriteMode mode, + UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED) + : op_(op), + mode_(mode), + operand_type_(operand_type) { + } + + private: + Token::Value op_; + UnaryOverwriteMode mode_; + + // Operand type information determined at runtime. + UnaryOpIC::TypeInfo operand_type_; + + virtual void PrintName(StringStream* stream); + + class ModeBits: public BitField {}; + class OpBits: public BitField {}; + class OperandTypeInfoBits: public BitField {}; + + Major MajorKey() { return UnaryOp; } + int MinorKey() { + return ModeBits::encode(mode_) + | OpBits::encode(op_) + | OperandTypeInfoBits::encode(operand_type_); + } + + // Note: A lot of the helper functions below will vanish when we use virtual + // function instead of switch more often. + void Generate(MacroAssembler* masm); + + void GenerateTypeTransition(MacroAssembler* masm); + + void GenerateSmiStub(MacroAssembler* masm); + void GenerateSmiStubSub(MacroAssembler* masm); + void GenerateSmiStubBitNot(MacroAssembler* masm); + void GenerateSmiCodeSub(MacroAssembler* masm, Label* non_smi, Label* slow); + void GenerateSmiCodeBitNot(MacroAssembler* masm, Label* slow); + + void GenerateNumberStub(MacroAssembler* masm); + void GenerateNumberStubSub(MacroAssembler* masm); + void GenerateNumberStubBitNot(MacroAssembler* masm); + void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow); + void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow); + + void GenerateGenericStub(MacroAssembler* masm); + void GenerateGenericStubSub(MacroAssembler* masm); + void GenerateGenericStubBitNot(MacroAssembler* masm); + void GenerateGenericCodeFallback(MacroAssembler* masm); + + virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; } + + virtual InlineCacheState GetICState() { + return UnaryOpIC::ToState(operand_type_); + } + + virtual void FinishCode(Handle code) { + code->set_unary_op_type(operand_type_); + } +}; + + class StringHelper : public AllStatic { public: // Generate code for copying characters using a simple loop. This should only diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 7716e85..41f02be 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -4366,7 +4366,10 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, const char* comment) { // TODO(svenpanne): Allowing format strings in Comment would be nice here... Comment cmt(masm_, comment); - UnaryOpStub stub(expr->op()); + bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); + UnaryOverwriteMode overwrite = + can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; + UnaryOpStub stub(expr->op(), overwrite); // UnaryOpStub expects the argument to be in the // accumulator register r0. VisitForAccumulatorValue(expr->expression()); diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc index 854ea15..b08353e 100644 --- a/src/arm/lithium-arm.cc +++ b/src/arm/lithium-arm.cc @@ -1998,18 +1998,6 @@ LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { } -LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) { - LOperand* value = UseRegisterAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckSmi(value)); -} - - -LInstruction* LChunkBuilder::DoIsNumberAndBranch(HIsNumberAndBranch* instr) { - return new(zone()) - LIsNumberAndBranch(UseRegisterOrConstantAtStart(instr->value())); -} - - LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) { LOperand* value = UseRegisterAtStart(instr->value()); LInstruction* result = new(zone()) LCheckInstanceType(value); diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h index b9d6783..39cab8f 100644 --- a/src/arm/lithium-arm.h +++ b/src/arm/lithium-arm.h @@ -118,7 +118,6 @@ class LCodeGen; V(IsConstructCallAndBranch) \ V(IsObjectAndBranch) \ V(IsStringAndBranch) \ - V(IsNumberAndBranch) \ V(IsSmiAndBranch) \ V(IsUndetectableAndBranch) \ V(Label) \ @@ -926,19 +925,6 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> { }; -class LIsNumberAndBranch: public LControlInstruction<1, 0> { - public: - explicit LIsNumberAndBranch(LOperand* value) { - inputs_[0] = value; - } - - LOperand* value() { return inputs_[0]; } - - DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch, "is-number-and-branch") - DECLARE_HYDROGEN_ACCESSOR(IsNumberAndBranch) -}; - - class LIsStringAndBranch: public LControlInstruction<1, 1> { public: LIsStringAndBranch(LOperand* value, LOperand* temp) { diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index f445bf0..272db15 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -2130,12 +2130,12 @@ int LCodeGen::GetNextEmittedBlock() const { template void LCodeGen::EmitBranch(InstrType instr, Condition cc) { - int left_block = instr->TrueDestination(chunk_); int right_block = instr->FalseDestination(chunk_); + int left_block = instr->TrueDestination(chunk_); int next_block = GetNextEmittedBlock(); - if (right_block == left_block || cc == al) { + if (right_block == left_block) { EmitGoto(left_block); } else if (left_block == next_block) { __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); @@ -2153,25 +2153,6 @@ void LCodeGen::DoDebugBreak(LDebugBreak* instr) { } -void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { - Representation r = instr->hydrogen()->value()->representation(); - if (r.IsSmiOrInteger32() || r.IsDouble()) { - EmitBranch(instr, al); - } else { - ASSERT(r.IsTagged()); - Register reg = ToRegister(instr->value()); - HType type = instr->hydrogen()->value()->type(); - if (type.IsTaggedNumber()) { - EmitBranch(instr, al); - } - __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); - __ ldr(scratch0(), FieldMemOperand(reg, HeapObject::kMapOffset)); - __ CompareRoot(scratch0(), Heap::kHeapNumberMapRootIndex); - EmitBranch(instr, eq); - } -} - - void LCodeGen::DoBranch(LBranch* instr) { Representation r = instr->hydrogen()->value()->representation(); if (r.IsInteger32() || r.IsSmi()) { diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index d14435e..96266af 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -764,44 +764,6 @@ Handle CompareNilICStub::GenerateCode() { template <> -HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { - UnaryOpStub* stub = casted_stub(); - Handle type = stub->GetType(graph()->isolate()); - HValue* input = GetParameter(0); - - // Prevent unwanted HChange being inserted to ensure that the stub - // deopts on newly encountered types. - if (!type->Maybe(Type::Double())) { - input = AddInstruction(new(zone()) - HForceRepresentation(input, Representation::Smi())); - } - - if (!type->Is(Type::Number())) { - // If we expect to see other things than Numbers, we will create a generic - // stub, which handles all numbers and calls into the runtime for the rest. - IfBuilder if_number(this); - if_number.If(input); - if_number.Then(); - HInstruction* res = BuildUnaryMathOp(input, type, stub->operation()); - if_number.Return(AddInstruction(res)); - if_number.Else(); - AddInstruction(new(zone()) HPushArgument(GetParameter(0))); - if_number.Return(AddInstruction(new(zone()) HCallConstantFunction( - stub->ToJSFunction(isolate()), 1))); - if_number.End(); - return graph()->GetConstantUndefined(); - } - - return AddInstruction(BuildUnaryMathOp(input, type, stub->operation())); -} - - -Handle UnaryOpStub::GenerateCode() { - return DoGenerateCode(this); -} - - -template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); diff --git a/src/code-stubs.cc b/src/code-stubs.cc index fdc6a15..2ed2ba3 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -184,79 +184,9 @@ const char* CodeStub::MajorName(CodeStub::Major major_key, } } -void CodeStub::PrintBaseName(StringStream* stream) { - stream->Add("%s", MajorName(MajorKey(), false)); -} - void CodeStub::PrintName(StringStream* stream) { - PrintBaseName(stream); - PrintState(stream); -} - - -Builtins::JavaScript UnaryOpStub::ToJSBuiltin() { - switch (operation_) { - default: - UNREACHABLE(); - case Token::SUB: - return Builtins::UNARY_MINUS; - case Token::BIT_NOT: - return Builtins::BIT_NOT; - } -} - - -Handle UnaryOpStub::ToJSFunction(Isolate* isolate) { - Handle builtins(isolate->js_builtins_object()); - Object* builtin = builtins->javascript_builtin(ToJSBuiltin()); - return Handle(JSFunction::cast(builtin), isolate); -} - - -MaybeObject* UnaryOpStub::Result(Handle object, Isolate* isolate) { - Handle builtin_function = ToJSFunction(isolate); - bool caught_exception; - Handle result = Execution::Call(builtin_function, object, - 0, NULL, &caught_exception); - if (caught_exception) { - return Failure::Exception(); - } - return *result; -} - - -void UnaryOpStub::UpdateStatus(Handle object) { - State old_state(state_); - if (object->IsSmi()) { - state_.Add(SMI); - if (operation_ == Token::SUB && *object == 0) { - // The result (-0) has to be represented as double. - state_.Add(HEAP_NUMBER); - } - } else if (object->IsHeapNumber()) { - state_.Add(HEAP_NUMBER); - } else { - state_.Add(GENERIC); - } - TraceTransition(old_state, state_); -} - - -Handle UnaryOpStub::GetType(Isolate* isolate) { - if (state_.Contains(GENERIC)) { - return handle(Type::Any(), isolate); - } - Handle type = handle(Type::None(), isolate); - if (state_.Contains(SMI)) { - type = handle( - Type::Union(type, handle(Type::Smi(), isolate)), isolate); - } - if (state_.Contains(HEAP_NUMBER)) { - type = handle( - Type::Union(type, handle(Type::Double(), isolate)), isolate); - } - return type; + stream->Add("%s", MajorName(MajorKey(), false)); } @@ -345,29 +275,6 @@ void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) { #undef __ -void UnaryOpStub::PrintBaseName(StringStream* stream) { - CodeStub::PrintBaseName(stream); - if (operation_ == Token::SUB) stream->Add("Minus"); - if (operation_ == Token::BIT_NOT) stream->Add("Not"); -} - - -void UnaryOpStub::PrintState(StringStream* stream) { - state_.Print(stream); -} - - -void UnaryOpStub::State::Print(StringStream* stream) const { - stream->Add("("); - SimpleListPrinter printer(stream); - if (IsEmpty()) printer.Add("None"); - if (Contains(GENERIC)) printer.Add("Generic"); - if (Contains(HEAP_NUMBER)) printer.Add("HeapNumber"); - if (Contains(SMI)) printer.Add("Smi"); - stream->Add(")"); -} - - void BinaryOpStub::PrintName(StringStream* stream) { const char* op_name = Token::Name(op_); const char* overwrite_name; @@ -524,9 +431,8 @@ void ICCompareStub::Generate(MacroAssembler* masm) { } -void CompareNilICStub::UpdateStatus(Handle object) { +void CompareNilICStub::Record(Handle object) { ASSERT(state_ != State::Generic()); - State old_state(state_); if (object->IsNull()) { state_.Add(NULL_TYPE); } else if (object->IsUndefined()) { @@ -540,22 +446,18 @@ void CompareNilICStub::UpdateStatus(Handle object) { } else { state_.Add(MONOMORPHIC_MAP); } - TraceTransition(old_state, state_); } -template -void HydrogenCodeStub::TraceTransition(StateType from, StateType to) { +void CompareNilICStub::State::TraceTransition(State to) const { #ifdef DEBUG if (!FLAG_trace_ic) return; char buffer[100]; NoAllocationStringAllocator allocator(buffer, static_cast(sizeof(buffer))); StringStream stream(&allocator); - stream.Add("["); - PrintBaseName(&stream); - stream.Add(": "); - from.Print(&stream); + stream.Add("[CompareNilIC : "); + Print(&stream); stream.Add("=>"); to.Print(&stream); stream.Add("]\n"); @@ -563,14 +465,12 @@ void HydrogenCodeStub::TraceTransition(StateType from, StateType to) { #endif } -void CompareNilICStub::PrintBaseName(StringStream* stream) { - CodeStub::PrintBaseName(stream); - stream->Add((nil_value_ == kNullValue) ? "(NullValue)": - "(UndefinedValue)"); -} -void CompareNilICStub::PrintState(StringStream* stream) { +void CompareNilICStub::PrintName(StringStream* stream) { + stream->Add("CompareNilICStub_"); state_.Print(stream); + stream->Add((nil_value_ == kNullValue) ? "(NullValue|": + "(UndefinedValue|"); } @@ -715,15 +615,16 @@ void CallConstructStub::PrintName(StringStream* stream) { } -bool ToBooleanStub::UpdateStatus(Handle object) { +bool ToBooleanStub::Record(Handle object) { Types old_types(types_); - bool to_boolean_value = types_.UpdateStatus(object); - TraceTransition(old_types, types_); + bool to_boolean_value = types_.Record(object); + old_types.TraceTransition(types_); return to_boolean_value; } -void ToBooleanStub::PrintState(StringStream* stream) { +void ToBooleanStub::PrintName(StringStream* stream) { + stream->Add("ToBooleanStub_"); types_.Print(stream); } @@ -744,7 +645,24 @@ void ToBooleanStub::Types::Print(StringStream* stream) const { } -bool ToBooleanStub::Types::UpdateStatus(Handle object) { +void ToBooleanStub::Types::TraceTransition(Types to) const { + #ifdef DEBUG + if (!FLAG_trace_ic) return; + char buffer[100]; + NoAllocationStringAllocator allocator(buffer, + static_cast(sizeof(buffer))); + StringStream stream(&allocator); + stream.Add("[ToBooleanIC : "); + Print(&stream); + stream.Add("=>"); + to.Print(&stream); + stream.Add("]\n"); + stream.OutputToStdOut(); + #endif +} + + +bool ToBooleanStub::Types::Record(Handle object) { if (object->IsUndefined()) { Add(UNDEFINED); return false; diff --git a/src/code-stubs.h b/src/code-stubs.h index 4b341bb..d197c84 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -123,6 +123,8 @@ namespace internal { // Mode to overwrite BinaryExpression values. enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT }; +enum UnaryOverwriteMode { UNARY_OVERWRITE, UNARY_NO_OVERWRITE }; + // Stub is base classes of all stubs. class CodeStub BASE_EMBEDDED { @@ -195,8 +197,6 @@ class CodeStub BASE_EMBEDDED { return -1; } - virtual void PrintName(StringStream* stream); - protected: static bool CanUseFPRegisters(); @@ -208,11 +208,6 @@ class CodeStub BASE_EMBEDDED { // a fixed (non-moveable) code object. virtual bool NeedsImmovableCode() { return false; } - // Returns a name for logging/debugging purposes. - SmartArrayPointer GetName(); - virtual void PrintBaseName(StringStream* stream); - virtual void PrintState(StringStream* stream) { } - private: // Perform bookkeeping required after code generation when stub code is // initially generated. @@ -241,6 +236,10 @@ class CodeStub BASE_EMBEDDED { // If a stub uses a special cache override this. virtual bool UseSpecialCache() { return false; } + // Returns a name for logging/debugging purposes. + SmartArrayPointer GetName(); + virtual void PrintName(StringStream* stream); + // Computes the key based on major and minor. uint32_t GetKey() { ASSERT(static_cast(MajorKey()) < NUMBER_OF_IDS); @@ -355,9 +354,6 @@ class HydrogenCodeStub : public CodeStub { Handle GenerateLightweightMissCode(Isolate* isolate); - template - void TraceTransition(StateType from, StateType to); - private: class MinorKeyBits: public BitField {}; class IsMissBits: public BitField {}; @@ -524,74 +520,6 @@ class FastNewBlockContextStub : public PlatformCodeStub { }; -class UnaryOpStub : public HydrogenCodeStub { - public: - // Stub without type info available -> construct uninitialized - explicit UnaryOpStub(Token::Value operation) - : HydrogenCodeStub(UNINITIALIZED), operation_(operation) { } - explicit UnaryOpStub(Code::ExtraICState ic_state) : - state_(StateBits::decode(ic_state)), - operation_(OperatorBits::decode(ic_state)) { } - - virtual void InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor); - - virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; } - virtual InlineCacheState GetICState() { - if (state_.Contains(GENERIC)) { - return MEGAMORPHIC; - } else if (state_.IsEmpty()) { - return PREMONOMORPHIC; - } else { - return MONOMORPHIC; - } - } - virtual Code::ExtraICState GetExtraICState() { - return OperatorBits::encode(operation_) | - StateBits::encode(state_.ToIntegral()); - } - - Token::Value operation() { return operation_; } - Handle ToJSFunction(Isolate* isolate); - - void UpdateStatus(Handle object); - MaybeObject* Result(Handle object, Isolate* isolate); - Handle GenerateCode(); - Handle GetType(Isolate* isolate); - - protected: - void PrintState(StringStream* stream); - void PrintBaseName(StringStream* stream); - - private: - Builtins::JavaScript ToJSBuiltin(); - - enum UnaryOpType { - SMI, - HEAP_NUMBER, - GENERIC, - NUMBER_OF_TYPES - }; - - class State : public EnumSet { - public: - State() : EnumSet() { } - explicit State(byte bits) : EnumSet(bits) { } - void Print(StringStream* stream) const; - }; - - class StateBits : public BitField { }; - class OperatorBits : public BitField { }; - - State state_; - Token::Value operation_; - - virtual CodeStub::Major MajorKey() { return UnaryOp; } - virtual int NotMissMinorKey() { return GetExtraICState(); } -}; - - class FastCloneShallowArrayStub : public HydrogenCodeStub { public: // Maximum length of copied elements array. @@ -1215,6 +1143,7 @@ class CompareNilICStub : public HydrogenCodeStub { } void Print(StringStream* stream) const; + void TraceTransition(State to) const; }; static Handle StateToType( @@ -1277,15 +1206,14 @@ class CompareNilICStub : public HydrogenCodeStub { return NilValueField::decode(state); } - void UpdateStatus(Handle object); + void Record(Handle object); bool IsMonomorphic() const { return state_.Contains(MONOMORPHIC_MAP); } NilValue GetNilValue() const { return nil_value_; } State GetState() const { return state_; } void ClearState() { state_.RemoveAll(); } - virtual void PrintState(StringStream* stream); - virtual void PrintBaseName(StringStream* stream); + virtual void PrintName(StringStream* stream); private: friend class CompareNilIC; @@ -2081,7 +2009,8 @@ class ToBooleanStub: public HydrogenCodeStub { byte ToByte() const { return ToIntegral(); } void Print(StringStream* stream) const; - bool UpdateStatus(Handle object); + void TraceTransition(Types to) const; + bool Record(Handle object); bool NeedsMap() const; bool CanBeUndetectable() const; bool IsGeneric() const { return ToIntegral() == Generic().ToIntegral(); } @@ -2094,7 +2023,7 @@ class ToBooleanStub: public HydrogenCodeStub { explicit ToBooleanStub(Code::ExtraICState state) : types_(static_cast(state)) { } - bool UpdateStatus(Handle object); + bool Record(Handle object); Types GetTypes() { return types_; } virtual Handle GenerateCode(); @@ -2103,7 +2032,7 @@ class ToBooleanStub: public HydrogenCodeStub { CodeStubInterfaceDescriptor* descriptor); virtual Code::Kind GetCodeKind() const { return Code::TO_BOOLEAN_IC; } - virtual void PrintState(StringStream* stream); + virtual void PrintName(StringStream* stream); virtual bool SometimesSetsUpAFrame() { return false; } diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc index 08837c0..932fd47 100644 --- a/src/hydrogen-instructions.cc +++ b/src/hydrogen-instructions.cc @@ -3078,11 +3078,6 @@ HType HCheckHeapObject::CalculateInferredType() { } -HType HCheckSmi::CalculateInferredType() { - return HType::Smi(); -} - - HType HPhi::CalculateInferredType() { HType result = HType::Uninitialized(); for (int i = 0; i < OperandCount(); ++i) { diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index 9893918..26bda87 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -92,7 +92,6 @@ class LChunkBuilder; V(CheckHeapObject) \ V(CheckInstanceType) \ V(CheckMaps) \ - V(CheckSmi) \ V(CheckPrototypeMaps) \ V(ClampToUint8) \ V(ClassOfTestAndBranch) \ @@ -129,7 +128,6 @@ class LChunkBuilder; V(InvokeFunction) \ V(IsConstructCallAndBranch) \ V(IsObjectAndBranch) \ - V(IsNumberAndBranch) \ V(IsStringAndBranch) \ V(IsSmiAndBranch) \ V(IsUndetectableAndBranch) \ @@ -2936,49 +2934,6 @@ class HCheckInstanceType: public HUnaryOperation { }; -class HCheckSmi: public HUnaryOperation { - public: - explicit HCheckSmi(HValue* value) : HUnaryOperation(value) { - set_representation(Representation::Smi()); - SetFlag(kUseGVN); - } - - virtual Representation RequiredInputRepresentation(int index) { - return Representation::Tagged(); - } - - virtual HType CalculateInferredType(); - - virtual HValue* Canonicalize() { - HType value_type = value()->type(); - if (value_type.IsSmi()) { - return NULL; - } - return this; - } - - DECLARE_CONCRETE_INSTRUCTION(CheckSmi) - - protected: - virtual bool DataEquals(HValue* other) { return true; } -}; - - -class HIsNumberAndBranch: public HUnaryControlInstruction { - public: - explicit HIsNumberAndBranch(HValue* value) - : HUnaryControlInstruction(value, NULL, NULL) { - SetFlag(kFlexibleRepresentation); - } - - virtual Representation RequiredInputRepresentation(int index) { - return Representation::None(); - } - - DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch) -}; - - class HCheckHeapObject: public HUnaryOperation { public: explicit HCheckHeapObject(HValue* value) : HUnaryOperation(value) { diff --git a/src/hydrogen.cc b/src/hydrogen.cc index d3e67bb..5c50ed0 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -1003,17 +1003,6 @@ HReturn* HGraphBuilder::AddReturn(HValue* value) { } -void HGraphBuilder::AddSoftDeoptimize() { - isolate()->counters()->soft_deopts_requested()->Increment(); - if (FLAG_always_opt) return; - if (current_block()->IsDeoptimizing()) return; - Add(); - isolate()->counters()->soft_deopts_inserted()->Increment(); - current_block()->MarkAsDeoptimizing(); - graph()->set_has_soft_deoptimize(true); -} - - HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { HBasicBlock* b = graph()->CreateBasicBlock(); b->SetInitialEnvironment(env); @@ -1663,39 +1652,6 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context, } -HInstruction* HGraphBuilder::BuildUnaryMathOp( - HValue* input, Handle type, Token::Value operation) { - // We only handle the numeric cases here - type = handle( - Type::Intersect(type, handle(Type::Number(), isolate())), isolate()); - - switch (operation) { - default: - UNREACHABLE(); - case Token::SUB: { - HInstruction* instr = - HMul::New(zone(), environment()->LookupContext(), - input, graph()->GetConstantMinus1()); - Representation rep = Representation::FromType(type); - if (type->Is(Type::None())) { - AddSoftDeoptimize(); - } - if (instr->IsBinaryOperation()) { - HBinaryOperation* binop = HBinaryOperation::cast(instr); - binop->set_observed_input_representation(1, rep); - binop->set_observed_input_representation(2, rep); - } - return instr; - } - case Token::BIT_NOT: - if (type->Is(Type::None())) { - AddSoftDeoptimize(); - } - return new(zone()) HBitNot(input); - } -} - - void HGraphBuilder::BuildCompareNil( HValue* value, Handle type, @@ -4275,6 +4231,17 @@ void HOptimizedGraphBuilder::PushAndAdd(HInstruction* instr) { } +void HOptimizedGraphBuilder::AddSoftDeoptimize() { + isolate()->counters()->soft_deopts_requested()->Increment(); + if (FLAG_always_opt) return; + if (current_block()->IsDeoptimizing()) return; + Add(); + isolate()->counters()->soft_deopts_inserted()->Increment(); + current_block()->MarkAsDeoptimizing(); + graph()->set_has_soft_deoptimize(true); +} + + template HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) { int count = call->argument_count(); @@ -8559,8 +8526,18 @@ void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) { void HOptimizedGraphBuilder::VisitSub(UnaryOperation* expr) { CHECK_ALIVE(VisitForValue(expr->expression())); HValue* value = Pop(); + HValue* context = environment()->LookupContext(); + HInstruction* instr = + HMul::New(zone(), context, value, graph()->GetConstantMinus1()); Handle operand_type = expr->expression()->lower_type(); - HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::SUB); + Representation rep = ToRepresentation(operand_type); + if (operand_type->Is(Type::None())) { + AddSoftDeoptimize(); + } + if (instr->IsBinaryOperation()) { + HBinaryOperation::cast(instr)->set_observed_input_representation(1, rep); + HBinaryOperation::cast(instr)->set_observed_input_representation(2, rep); + } return ast_context()->ReturnInstruction(instr, expr->id()); } @@ -8569,7 +8546,10 @@ void HOptimizedGraphBuilder::VisitBitNot(UnaryOperation* expr) { CHECK_ALIVE(VisitForValue(expr->expression())); HValue* value = Pop(); Handle operand_type = expr->expression()->lower_type(); - HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::BIT_NOT); + if (operand_type->Is(Type::None())) { + AddSoftDeoptimize(); + } + HInstruction* instr = new(zone()) HBitNot(value); return ast_context()->ReturnInstruction(instr, expr->id()); } @@ -8623,7 +8603,7 @@ HInstruction* HOptimizedGraphBuilder::BuildIncrement( CountOperation* expr) { // The input to the count operation is on top of the expression stack. TypeInfo info = expr->type(); - Representation rep = Representation::FromType(info); + Representation rep = ToRepresentation(info); if (rep.IsNone() || rep.IsTagged()) { rep = Representation::Smi(); } @@ -8931,10 +8911,9 @@ HInstruction* HOptimizedGraphBuilder::BuildBinaryOperation( Handle right_type = expr->right()->lower_type(); Handle result_type = expr->result_type(); Maybe fixed_right_arg = expr->fixed_right_arg(); - Representation left_rep = Representation::FromType(left_type); - Representation right_rep = Representation::FromType(right_type); - Representation result_rep = Representation::FromType(result_type); - + Representation left_rep = ToRepresentation(left_type); + Representation right_rep = ToRepresentation(right_type); + Representation result_rep = ToRepresentation(result_type); if (left_type->Is(Type::None())) { AddSoftDeoptimize(); // TODO(rossberg): we should be able to get rid of non-continuous defaults. @@ -9162,6 +9141,26 @@ void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) { } +// TODO(rossberg): this should die eventually. +Representation HOptimizedGraphBuilder::ToRepresentation(TypeInfo info) { + if (info.IsUninitialized()) return Representation::None(); + // TODO(verwaest): Return Smi rather than Integer32. + if (info.IsSmi()) return Representation::Integer32(); + if (info.IsInteger32()) return Representation::Integer32(); + if (info.IsDouble()) return Representation::Double(); + if (info.IsNumber()) return Representation::Double(); + return Representation::Tagged(); +} + + +Representation HOptimizedGraphBuilder::ToRepresentation(Handle type) { + if (type->Is(Type::None())) return Representation::None(); + if (type->Is(Type::Signed32())) return Representation::Integer32(); + if (type->Is(Type::Number())) return Representation::Double(); + return Representation::Tagged(); +} + + void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr, HTypeof* typeof_expr, Handle check) { @@ -9254,9 +9253,9 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) { Handle left_type = expr->left()->lower_type(); Handle right_type = expr->right()->lower_type(); Handle combined_type = expr->combined_type(); - Representation combined_rep = Representation::FromType(combined_type); - Representation left_rep = Representation::FromType(left_type); - Representation right_rep = Representation::FromType(right_type); + Representation combined_rep = ToRepresentation(combined_type); + Representation left_rep = ToRepresentation(left_type); + Representation right_rep = ToRepresentation(right_type); CHECK_ALIVE(VisitForValue(expr->left())); CHECK_ALIVE(VisitForValue(expr->right())); @@ -9385,8 +9384,8 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) { result->set_position(expr->position()); return ast_context()->ReturnInstruction(result, expr->id()); } else { - // TODO(verwaest): Remove once Representation::FromType properly - // returns Smi when the IC measures Smi. + // TODO(verwaest): Remove once ToRepresentation properly returns Smi when + // the IC measures Smi. if (left_type->Is(Type::Smi())) left_rep = Representation::Smi(); if (right_type->Is(Type::Smi())) right_rep = Representation::Smi(); HCompareIDAndBranch* result = diff --git a/src/hydrogen.h b/src/hydrogen.h index 2f3414e..3a8d242 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -1137,8 +1137,6 @@ class HGraphBuilder { HLoadNamedField* AddLoadFixedArrayLength(HValue *object); - void AddSoftDeoptimize(); - class IfBuilder { public: explicit IfBuilder(HGraphBuilder* builder, @@ -1419,9 +1417,6 @@ class HGraphBuilder { ElementsKind kind, int length); - HInstruction* BuildUnaryMathOp( - HValue* value, Handle type, Token::Value token); - void BuildCompareNil( HValue* value, Handle type, @@ -1509,6 +1504,8 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor { bool inline_bailout() { return inline_bailout_; } + void AddSoftDeoptimize(); + void Bailout(const char* reason); HBasicBlock* CreateJoin(HBasicBlock* first, @@ -1690,6 +1687,9 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor { // to push them as outgoing parameters. template HInstruction* PreProcessCall(Instruction* call); + static Representation ToRepresentation(TypeInfo info); + static Representation ToRepresentation(Handle type); + void SetUpScope(Scope* scope); virtual void VisitStatements(ZoneList* statements); diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index b4a06b4..29a4be2 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -230,17 +230,6 @@ void ToBooleanStub::InitializeInterfaceDescriptor( } -void UnaryOpStub::InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor) { - static Register registers[] = { eax }; - descriptor->register_param_count_ = 1; - descriptor->register_params_ = registers; - descriptor->deoptimization_handler_ = - FUNCTION_ADDR(UnaryOpIC_Miss); -} - - #define __ ACCESS_MASM(masm) @@ -770,6 +759,325 @@ static void ConvertHeapNumberToInt32(MacroAssembler* masm, } +void UnaryOpStub::PrintName(StringStream* stream) { + const char* op_name = Token::Name(op_); + const char* overwrite_name = NULL; // Make g++ happy. + switch (mode_) { + case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; + case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; + } + stream->Add("UnaryOpStub_%s_%s_%s", + op_name, + overwrite_name, + UnaryOpIC::GetName(operand_type_)); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::Generate(MacroAssembler* masm) { + switch (operand_type_) { + case UnaryOpIC::UNINITIALIZED: + GenerateTypeTransition(masm); + break; + case UnaryOpIC::SMI: + GenerateSmiStub(masm); + break; + case UnaryOpIC::NUMBER: + GenerateNumberStub(masm); + break; + case UnaryOpIC::GENERIC: + GenerateGenericStub(masm); + break; + } +} + + +void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { + __ pop(ecx); // Save return address. + + __ push(eax); // the operand + __ push(Immediate(Smi::FromInt(op_))); + __ push(Immediate(Smi::FromInt(mode_))); + __ push(Immediate(Smi::FromInt(operand_type_))); + + __ push(ecx); // Push return address. + + // Patch the caller to an appropriate specialized stub and return the + // operation result to the caller of the stub. + __ TailCallExternalReference( + ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateSmiStubSub(masm); + break; + case Token::BIT_NOT: + GenerateSmiStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { + Label non_smi, undo, slow; + GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, + Label::kNear, Label::kNear, Label::kNear); + __ bind(&undo); + GenerateSmiCodeUndo(masm); + __ bind(&non_smi); + __ bind(&slow); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { + Label non_smi; + GenerateSmiCodeBitNot(masm, &non_smi); + __ bind(&non_smi); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, + Label* non_smi, + Label* undo, + Label* slow, + Label::Distance non_smi_near, + Label::Distance undo_near, + Label::Distance slow_near) { + // Check whether the value is a smi. + __ JumpIfNotSmi(eax, non_smi, non_smi_near); + + // We can't handle -0 with smis, so use a type transition for that case. + __ test(eax, eax); + __ j(zero, slow, slow_near); + + // Try optimistic subtraction '0 - value', saving operand in eax for undo. + __ mov(edx, eax); + __ Set(eax, Immediate(0)); + __ sub(eax, edx); + __ j(overflow, undo, undo_near); + __ ret(0); +} + + +void UnaryOpStub::GenerateSmiCodeBitNot( + MacroAssembler* masm, + Label* non_smi, + Label::Distance non_smi_near) { + // Check whether the value is a smi. + __ JumpIfNotSmi(eax, non_smi, non_smi_near); + + // Flip bits and revert inverted smi-tag. + __ not_(eax); + __ and_(eax, ~kSmiTagMask); + __ ret(0); +} + + +void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { + __ mov(eax, edx); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateNumberStubSub(masm); + break; + case Token::BIT_NOT: + GenerateNumberStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) { + Label non_smi, undo, slow, call_builtin; + GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeSub(masm, &slow); + __ bind(&undo); + GenerateSmiCodeUndo(masm); + __ bind(&slow); + GenerateTypeTransition(masm); + __ bind(&call_builtin); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateNumberStubBitNot( + MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeBitNot(masm, &slow); + __ bind(&slow); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, + Label* slow) { + __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); + __ cmp(edx, masm->isolate()->factory()->heap_number_map()); + __ j(not_equal, slow); + + if (mode_ == UNARY_OVERWRITE) { + __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset), + Immediate(HeapNumber::kSignMask)); // Flip sign. + } else { + __ mov(edx, eax); + // edx: operand + + Label slow_allocate_heapnumber, heapnumber_allocated; + __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); + __ jmp(&heapnumber_allocated, Label::kNear); + + __ bind(&slow_allocate_heapnumber); + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ push(edx); + __ CallRuntime(Runtime::kNumberAlloc, 0); + __ pop(edx); + } + + __ bind(&heapnumber_allocated); + // eax: allocated 'empty' number + __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); + __ xor_(ecx, HeapNumber::kSignMask); // Flip sign. + __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx); + __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset)); + __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); + } + __ ret(0); +} + + +void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm, + Label* slow) { + __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); + __ cmp(edx, masm->isolate()->factory()->heap_number_map()); + __ j(not_equal, slow); + + // Convert the heap number in eax to an untagged integer in ecx. + IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow); + + // Do the bitwise operation and check if the result fits in a smi. + Label try_float; + __ not_(ecx); + __ cmp(ecx, 0xc0000000); + __ j(sign, &try_float, Label::kNear); + + // Tag the result as a smi and we're done. + STATIC_ASSERT(kSmiTagSize == 1); + __ lea(eax, Operand(ecx, times_2, kSmiTag)); + __ ret(0); + + // Try to store the result in a heap number. + __ bind(&try_float); + if (mode_ == UNARY_NO_OVERWRITE) { + Label slow_allocate_heapnumber, heapnumber_allocated; + __ mov(ebx, eax); + __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber); + __ jmp(&heapnumber_allocated); + + __ bind(&slow_allocate_heapnumber); + { + FrameScope scope(masm, StackFrame::INTERNAL); + // Push the original HeapNumber on the stack. The integer value can't + // be stored since it's untagged and not in the smi range (so we can't + // smi-tag it). We'll recalculate the value after the GC instead. + __ push(ebx); + __ CallRuntime(Runtime::kNumberAlloc, 0); + // New HeapNumber is in eax. + __ pop(edx); + } + // IntegerConvert uses ebx and edi as scratch registers. + // This conversion won't go slow-case. + IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow); + __ not_(ecx); + + __ bind(&heapnumber_allocated); + } + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope use_sse2(masm, SSE2); + __ cvtsi2sd(xmm0, ecx); + __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); + } else { + __ push(ecx); + __ fild_s(Operand(esp, 0)); + __ pop(ecx); + __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); + } + __ ret(0); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateGenericStubSub(masm); + break; + case Token::BIT_NOT: + GenerateGenericStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { + Label non_smi, undo, slow; + GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeSub(masm, &slow); + __ bind(&undo); + GenerateSmiCodeUndo(masm); + __ bind(&slow); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeBitNot(masm, &slow); + __ bind(&slow); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) { + // Handle the slow case by jumping to the corresponding JavaScript builtin. + __ pop(ecx); // pop return address. + __ push(eax); + __ push(ecx); // push return address + switch (op_) { + case Token::SUB: + __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); + break; + case Token::BIT_NOT: + __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); + break; + default: + UNREACHABLE(); + } +} + + void BinaryOpStub::Initialize() { platform_specific_bit_ = CpuFeatures::IsSupported(SSE3); } diff --git a/src/ia32/code-stubs-ia32.h b/src/ia32/code-stubs-ia32.h index dc21d40..6dc63bd 100644 --- a/src/ia32/code-stubs-ia32.h +++ b/src/ia32/code-stubs-ia32.h @@ -86,6 +86,80 @@ class StoreBufferOverflowStub: public PlatformCodeStub { }; +class UnaryOpStub: public PlatformCodeStub { + public: + UnaryOpStub(Token::Value op, + UnaryOverwriteMode mode, + UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED) + : op_(op), + mode_(mode), + operand_type_(operand_type) { + } + + private: + Token::Value op_; + UnaryOverwriteMode mode_; + + // Operand type information determined at runtime. + UnaryOpIC::TypeInfo operand_type_; + + virtual void PrintName(StringStream* stream); + + class ModeBits: public BitField {}; + class OpBits: public BitField {}; + class OperandTypeInfoBits: public BitField {}; + + Major MajorKey() { return UnaryOp; } + int MinorKey() { + return ModeBits::encode(mode_) + | OpBits::encode(op_) + | OperandTypeInfoBits::encode(operand_type_); + } + + // Note: A lot of the helper functions below will vanish when we use virtual + // function instead of switch more often. + void Generate(MacroAssembler* masm); + + void GenerateTypeTransition(MacroAssembler* masm); + + void GenerateSmiStub(MacroAssembler* masm); + void GenerateSmiStubSub(MacroAssembler* masm); + void GenerateSmiStubBitNot(MacroAssembler* masm); + void GenerateSmiCodeSub(MacroAssembler* masm, + Label* non_smi, + Label* undo, + Label* slow, + Label::Distance non_smi_near = Label::kFar, + Label::Distance undo_near = Label::kFar, + Label::Distance slow_near = Label::kFar); + void GenerateSmiCodeBitNot(MacroAssembler* masm, + Label* non_smi, + Label::Distance non_smi_near = Label::kFar); + void GenerateSmiCodeUndo(MacroAssembler* masm); + + void GenerateNumberStub(MacroAssembler* masm); + void GenerateNumberStubSub(MacroAssembler* masm); + void GenerateNumberStubBitNot(MacroAssembler* masm); + void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow); + void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow); + + void GenerateGenericStub(MacroAssembler* masm); + void GenerateGenericStubSub(MacroAssembler* masm); + void GenerateGenericStubBitNot(MacroAssembler* masm); + void GenerateGenericCodeFallback(MacroAssembler* masm); + + virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; } + + virtual InlineCacheState GetICState() { + return UnaryOpIC::ToState(operand_type_); + } + + virtual void FinishCode(Handle code) { + code->set_unary_op_type(operand_type_); + } +}; + + class StringHelper : public AllStatic { public: // Generate code for copying characters using a simple loop. This should only diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 6e98645..cf3132d 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -4363,7 +4363,10 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, const char* comment) { Comment cmt(masm_, comment); - UnaryOpStub stub(expr->op()); + bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); + UnaryOverwriteMode overwrite = + can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; + UnaryOpStub stub(expr->op(), overwrite); // UnaryOpStub expects the argument to be in the // accumulator register eax. VisitForAccumulatorValue(expr->expression()); diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index 2ddeb27..defae1c 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -2036,16 +2036,6 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) { } -void LCodeGen::DoNegateNoSSE2D(LNegateNoSSE2D* instr) { - __ push(Immediate(-1)); - __ fild_s(Operand(esp, 0)); - __ add(esp, Immediate(kPointerSize)); - __ fmulp(); - CurrentInstructionReturnsX87Result(); -} - - - void LCodeGen::DoArithmeticT(LArithmeticT* instr) { ASSERT(ToRegister(instr->context()).is(esi)); ASSERT(ToRegister(instr->left()).is(edx)); @@ -2068,12 +2058,12 @@ int LCodeGen::GetNextEmittedBlock() const { template void LCodeGen::EmitBranch(InstrType instr, Condition cc) { - int left_block = instr->TrueDestination(chunk_); int right_block = instr->FalseDestination(chunk_); + int left_block = instr->TrueDestination(chunk_); int next_block = GetNextEmittedBlock(); - if (right_block == left_block || cc == no_condition) { + if (right_block == left_block) { EmitGoto(left_block); } else if (left_block == next_block) { __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); @@ -2086,25 +2076,6 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cc) { } -void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { - Representation r = instr->hydrogen()->value()->representation(); - if (r.IsSmiOrInteger32() || r.IsDouble()) { - EmitBranch(instr, no_condition); - } else { - ASSERT(r.IsTagged()); - Register reg = ToRegister(instr->value()); - HType type = instr->hydrogen()->value()->type(); - if (type.IsTaggedNumber()) { - EmitBranch(instr, no_condition); - } - __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); - __ cmp(FieldOperand(reg, HeapObject::kMapOffset), - factory()->heap_number_map()); - EmitBranch(instr, equal); - } -} - - void LCodeGen::DoBranch(LBranch* instr) { Representation r = instr->hydrogen()->value()->representation(); if (r.IsSmiOrInteger32()) { diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc index c16ada1..8231c4e 100644 --- a/src/ia32/lithium-ia32.cc +++ b/src/ia32/lithium-ia32.cc @@ -1578,17 +1578,7 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) { } return DefineSameAsFirst(mul); } else if (instr->representation().IsDouble()) { - if (CpuFeatures::IsSafeForSnapshot(SSE2)) { - return DoArithmeticD(Token::MUL, instr); - } - ASSERT(instr->right()->IsConstant() && - static_cast(instr->right())->DoubleValue() == -1); - // TODO(olivf) This is currently just a hack to support the UnaryOp Minus - // Stub. This will go away once we can use more than one X87 register, - // thus fully support binary instructions without SSE2. - LOperand* left = UseX87TopOfStack(instr->left()); - LNegateNoSSE2D* result = new(zone()) LNegateNoSSE2D(left); - return DefineX87TOS(result); + return DoArithmeticD(Token::MUL, instr); } else { ASSERT(instr->representation().IsSmiOrTagged()); return DoArithmeticT(Token::MUL, instr); @@ -2048,18 +2038,6 @@ LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { } -LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) { - LOperand* value = UseRegisterAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckSmi(value)); -} - - -LInstruction* LChunkBuilder::DoIsNumberAndBranch(HIsNumberAndBranch* instr) { - return new(zone()) - LIsNumberAndBranch(UseRegisterOrConstantAtStart(instr->value())); -} - - LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) { LOperand* value = UseRegisterAtStart(instr->value()); LOperand* temp = TempRegister(); diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h index 04edb11..e48e881 100644 --- a/src/ia32/lithium-ia32.h +++ b/src/ia32/lithium-ia32.h @@ -114,7 +114,6 @@ class LCodeGen; V(IsObjectAndBranch) \ V(IsStringAndBranch) \ V(IsSmiAndBranch) \ - V(IsNumberAndBranch) \ V(IsUndetectableAndBranch) \ V(Label) \ V(LazyBailout) \ @@ -143,7 +142,6 @@ class LCodeGen; V(MathTan) \ V(ModI) \ V(MulI) \ - V(NegateNoSSE2D) \ V(NumberTagD) \ V(NumberTagI) \ V(NumberTagU) \ @@ -661,18 +659,6 @@ class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> { }; -class LNegateNoSSE2D: public LTemplateInstruction<1, 1, 0> { - public: - explicit LNegateNoSSE2D(LOperand* value) { - inputs_[0] = value; - } - - LOperand* value() { return inputs_[0]; } - - DECLARE_CONCRETE_INSTRUCTION(NegateNoSSE2D, "negate-no-sse2-d") -}; - - class LMulI: public LTemplateInstruction<1, 2, 1> { public: LMulI(LOperand* left, LOperand* right, LOperand* temp) { @@ -897,19 +883,6 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> { }; -class LIsNumberAndBranch: public LControlInstruction<1, 0> { - public: - explicit LIsNumberAndBranch(LOperand* value) { - inputs_[0] = value; - } - - LOperand* value() { return inputs_[0]; } - - DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch, "is-number-and-branch") - DECLARE_HYDROGEN_ACCESSOR(IsNumberAndBranch) -}; - - class LIsStringAndBranch: public LControlInstruction<1, 1> { public: LIsStringAndBranch(LOperand* value, LOperand* temp) { diff --git a/src/ic.cc b/src/ic.cc index 8c1df53..ff3a94d 100644 --- a/src/ic.cc +++ b/src/ic.cc @@ -2401,6 +2401,86 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) { } +void UnaryOpIC::patch(Code* code) { + set_target(code); +} + + +const char* UnaryOpIC::GetName(TypeInfo type_info) { + switch (type_info) { + case UNINITIALIZED: return "Uninitialized"; + case SMI: return "Smi"; + case NUMBER: return "Number"; + case GENERIC: return "Generic"; + default: return "Invalid"; + } +} + + +UnaryOpIC::State UnaryOpIC::ToState(TypeInfo type_info) { + switch (type_info) { + case UNINITIALIZED: + return v8::internal::UNINITIALIZED; + case SMI: + case NUMBER: + return MONOMORPHIC; + case GENERIC: + return v8::internal::GENERIC; + } + UNREACHABLE(); + return v8::internal::UNINITIALIZED; +} + + +Handle UnaryOpIC::TypeInfoToType(TypeInfo type_info, Isolate* isolate) { + switch (type_info) { + case UNINITIALIZED: + return handle(Type::None(), isolate); + case SMI: + return handle(Type::Smi(), isolate); + case NUMBER: + return handle(Type::Number(), isolate); + case GENERIC: + return handle(Type::Any(), isolate); + } + UNREACHABLE(); + return handle(Type::Any(), isolate); +} + + +UnaryOpIC::TypeInfo UnaryOpIC::GetTypeInfo(Handle operand) { + v8::internal::TypeInfo operand_type = + v8::internal::TypeInfo::FromValue(operand); + if (operand_type.IsSmi()) { + return SMI; + } else if (operand_type.IsNumber()) { + return NUMBER; + } else { + return GENERIC; + } +} + + +UnaryOpIC::TypeInfo UnaryOpIC::ComputeNewType( + TypeInfo current_type, + TypeInfo previous_type) { + switch (previous_type) { + case UNINITIALIZED: + return current_type; + case SMI: + return (current_type == GENERIC) ? GENERIC : NUMBER; + case NUMBER: + return GENERIC; + case GENERIC: + // We should never do patching if we are in GENERIC state. + UNREACHABLE(); + return GENERIC; + } + UNREACHABLE(); + return GENERIC; +} + + void BinaryOpIC::patch(Code* code) { set_target(code); } @@ -2478,24 +2558,57 @@ void BinaryOpIC::StubInfoToType(int minor_key, } -MaybeObject* UnaryOpIC::Transition(Handle object) { - Code::ExtraICState extra_ic_state = target()->extended_extra_ic_state(); - UnaryOpStub stub(extra_ic_state); +RUNTIME_FUNCTION(MaybeObject*, UnaryOp_Patch) { + ASSERT(args.length() == 4); - stub.UpdateStatus(object); + HandleScope scope(isolate); + Handle operand = args.at(0); + Token::Value op = static_cast(args.smi_at(1)); + UnaryOverwriteMode mode = static_cast(args.smi_at(2)); + UnaryOpIC::TypeInfo previous_type = + static_cast(args.smi_at(3)); - Handle code = stub.GetCode(isolate()); - set_target(*code); + UnaryOpIC::TypeInfo type = UnaryOpIC::GetTypeInfo(operand); + type = UnaryOpIC::ComputeNewType(type, previous_type); - return stub.Result(object, isolate()); -} + UnaryOpStub stub(op, mode, type); + Handle code = stub.GetCode(isolate); + if (!code.is_null()) { + if (FLAG_trace_ic) { + PrintF("[UnaryOpIC in "); + JavaScriptFrame::PrintTop(isolate, stdout, false, true); + PrintF(" %s => %s #%s @ %p]\n", + UnaryOpIC::GetName(previous_type), + UnaryOpIC::GetName(type), + Token::Name(op), + static_cast(*code)); + } + UnaryOpIC ic(isolate); + ic.patch(*code); + } + + Handle builtins(isolate->js_builtins_object()); + Object* builtin = NULL; // Initialization calms down the compiler. + switch (op) { + case Token::SUB: + builtin = builtins->javascript_builtin(Builtins::UNARY_MINUS); + break; + case Token::BIT_NOT: + builtin = builtins->javascript_builtin(Builtins::BIT_NOT); + break; + default: + UNREACHABLE(); + } + Handle builtin_function(JSFunction::cast(builtin), isolate); -RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss) { - HandleScope scope(isolate); - Handle object = args.at(0); - UnaryOpIC ic(isolate); - return ic.Transition(object); + bool caught_exception; + Handle result = Execution::Call(builtin_function, operand, 0, NULL, + &caught_exception); + if (caught_exception) { + return Failure::Exception(); + } + return *result; } @@ -2956,7 +3069,9 @@ MaybeObject* CompareNilIC::CompareNil(Handle object) { // types must be supported as a result of the miss. bool already_monomorphic = stub.IsMonomorphic(); - stub.UpdateStatus(object); + CompareNilICStub::State old_state = stub.GetState(); + stub.Record(object); + old_state.TraceTransition(stub.GetState()); NilValue nil = stub.GetNilValue(); @@ -2993,7 +3108,7 @@ RUNTIME_FUNCTION(MaybeObject*, Unreachable) { MaybeObject* ToBooleanIC::ToBoolean(Handle object, Code::ExtraICState extra_ic_state) { ToBooleanStub stub(extra_ic_state); - bool to_boolean_value = stub.UpdateStatus(object); + bool to_boolean_value = stub.Record(object); Handle code = stub.GetCode(isolate()); set_target(*code); return Smi::FromInt(to_boolean_value ? 1 : 0); diff --git a/src/ic.h b/src/ic.h index f6b4800..829c6b1 100644 --- a/src/ic.h +++ b/src/ic.h @@ -57,6 +57,7 @@ namespace internal { ICU(LoadPropertyWithInterceptorForCall) \ ICU(KeyedLoadPropertyWithInterceptor) \ ICU(StoreInterceptorProperty) \ + ICU(UnaryOp_Patch) \ ICU(BinaryOp_Patch) \ ICU(CompareIC_Miss) \ ICU(CompareNilIC_Miss) \ @@ -680,9 +681,28 @@ class KeyedStoreIC: public StoreIC { class UnaryOpIC: public IC { public: - explicit UnaryOpIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { } + // sorted: increasingly more unspecific (ignoring UNINITIALIZED) + // TODO(svenpanne) Using enums+switch is an antipattern, use a class instead. + enum TypeInfo { + UNINITIALIZED, + SMI, + NUMBER, + GENERIC + }; + + static Handle TypeInfoToType(TypeInfo info, Isolate* isolate); + + explicit UnaryOpIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { } + + void patch(Code* code); + + static const char* GetName(TypeInfo type_info); + + static State ToState(TypeInfo type_info); + + static TypeInfo GetTypeInfo(Handle operand); - MUST_USE_RESULT MaybeObject* Transition(Handle object); + static TypeInfo ComputeNewType(TypeInfo type, TypeInfo previous); }; @@ -818,7 +838,6 @@ void PatchInlinedSmiCode(Address address, InlinedSmiCheck check); DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissFromStubFailure); DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure); -DECLARE_RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss); DECLARE_RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss); DECLARE_RUNTIME_FUNCTION(MaybeObject*, ToBooleanIC_Miss); diff --git a/src/objects.cc b/src/objects.cc index ba894e9..aa67876 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -10610,7 +10610,6 @@ const char* Code::StubType2String(StubType type) { void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) { - PrintF(out, "extra_ic_state = "); const char* name = NULL; switch (kind) { case CALL_IC: @@ -10628,9 +10627,9 @@ void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) { break; } if (name != NULL) { - PrintF(out, "%s\n", name); + PrintF(out, "extra_ic_state = %s\n", name); } else { - PrintF(out, "%d\n", extra); + PrintF(out, "extra_ic_state = %d\n", extra); } } @@ -10639,8 +10638,7 @@ void Code::Disassemble(const char* name, FILE* out) { PrintF(out, "kind = %s\n", Kind2String(kind())); if (is_inline_cache_stub()) { PrintF(out, "ic_state = %s\n", ICState2String(ic_state())); - PrintExtraICState(out, kind(), needs_extended_extra_ic_state(kind()) ? - extended_extra_ic_state() : extra_ic_state()); + PrintExtraICState(out, kind(), extra_ic_state()); if (ic_state() == MONOMORPHIC) { PrintF(out, "type = %s\n", StubType2String(type())); } diff --git a/src/objects.h b/src/objects.h index 7c7f7ed..416ed7f 100644 --- a/src/objects.h +++ b/src/objects.h @@ -4567,8 +4567,7 @@ class Code: public HeapObject { // TODO(danno): This is a bit of a hack right now since there are still // clients of this API that pass "extra" values in for argc. These clients // should be retrofitted to used ExtendedExtraICState. - return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC || - kind == UNARY_OP_IC; + return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC; } inline StubType type(); // Only valid for monomorphic IC stubs. diff --git a/src/property-details.h b/src/property-details.h index ac36563..b0d10e1 100644 --- a/src/property-details.h +++ b/src/property-details.h @@ -55,8 +55,6 @@ namespace v8 { namespace internal { class Smi; -class Type; -class TypeInfo; // Type of properties. // Order of properties is significant. @@ -103,10 +101,6 @@ class Representation { static Representation FromKind(Kind kind) { return Representation(kind); } - // TODO(rossberg): this should die eventually. - static Representation FromType(TypeInfo info); - static Representation FromType(Handle type); - bool Equals(const Representation& other) const { return kind_ == other.kind_; } diff --git a/src/type-info.cc b/src/type-info.cc index e2be3aa..83eb9c4 100644 --- a/src/type-info.cc +++ b/src/type-info.cc @@ -395,7 +395,8 @@ Handle TypeFeedbackOracle::UnaryType(TypeFeedbackId id) { } Handle code = Handle::cast(object); ASSERT(code->is_unary_op_stub()); - return UnaryOpStub(code->extra_ic_state()).GetType(isolate()); + return UnaryOpIC::TypeInfoToType( + static_cast(code->unary_op_type()), isolate()); } @@ -697,16 +698,4 @@ void TypeFeedbackOracle::SetInfo(TypeFeedbackId ast_id, Object* target) { #endif } - -Representation Representation::FromType(TypeInfo info) { - if (info.IsUninitialized()) return Representation::None(); - // TODO(verwaest): Return Smi rather than Integer32. - if (info.IsSmi()) return Representation::Integer32(); - if (info.IsInteger32()) return Representation::Integer32(); - if (info.IsDouble()) return Representation::Double(); - if (info.IsNumber()) return Representation::Double(); - return Representation::Tagged(); -} - - } } // namespace v8::internal diff --git a/src/types.cc b/src/types.cc index d39a22e..1275dea 100644 --- a/src/types.cc +++ b/src/types.cc @@ -476,13 +476,4 @@ Type* Type::Optional(Handle type) { : Union(type, Undefined()->handle_via_isolate_of(*type)); } - -Representation Representation::FromType(Handle type) { - if (type->Is(Type::None())) return Representation::None(); - if (type->Is(Type::Signed32())) return Representation::Integer32(); - if (type->Is(Type::Number())) return Representation::Double(); - return Representation::Tagged(); -} - - } } // namespace v8::internal diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 31e2353..9233848 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -222,18 +222,7 @@ void ToBooleanStub::InitializeInterfaceDescriptor( descriptor->deoptimization_handler_ = FUNCTION_ADDR(ToBooleanIC_Miss); descriptor->SetMissHandler( - ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); -} - - -void UnaryOpStub::InitializeInterfaceDescriptor( - Isolate* isolate, - CodeStubInterfaceDescriptor* descriptor) { - static Register registers[] = { rax }; - descriptor->register_param_count_ = 1; - descriptor->register_params_ = registers; - descriptor->deoptimization_handler_ = - FUNCTION_ADDR(UnaryOpIC_Miss); + ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); } @@ -654,6 +643,259 @@ void IntegerConvert(MacroAssembler* masm, } +void UnaryOpStub::Generate(MacroAssembler* masm) { + switch (operand_type_) { + case UnaryOpIC::UNINITIALIZED: + GenerateTypeTransition(masm); + break; + case UnaryOpIC::SMI: + GenerateSmiStub(masm); + break; + case UnaryOpIC::NUMBER: + GenerateNumberStub(masm); + break; + case UnaryOpIC::GENERIC: + GenerateGenericStub(masm); + break; + } +} + + +void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { + __ pop(rcx); // Save return address. + + __ push(rax); // the operand + __ Push(Smi::FromInt(op_)); + __ Push(Smi::FromInt(mode_)); + __ Push(Smi::FromInt(operand_type_)); + + __ push(rcx); // Push return address. + + // Patch the caller to an appropriate specialized stub and return the + // operation result to the caller of the stub. + __ TailCallExternalReference( + ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateSmiStubSub(masm); + break; + case Token::BIT_NOT: + GenerateSmiStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { + Label slow; + GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear); + __ bind(&slow); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { + Label non_smi; + GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); + __ bind(&non_smi); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, + Label* non_smi, + Label* slow, + Label::Distance non_smi_near, + Label::Distance slow_near) { + Label done; + __ JumpIfNotSmi(rax, non_smi, non_smi_near); + __ SmiNeg(rax, rax, &done, Label::kNear); + __ jmp(slow, slow_near); + __ bind(&done); + __ ret(0); +} + + +void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, + Label* non_smi, + Label::Distance non_smi_near) { + __ JumpIfNotSmi(rax, non_smi, non_smi_near); + __ SmiNot(rax, rax); + __ ret(0); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateNumberStubSub(masm); + break; + case Token::BIT_NOT: + GenerateNumberStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) { + Label non_smi, slow, call_builtin; + GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeSub(masm, &slow); + __ bind(&slow); + GenerateTypeTransition(masm); + __ bind(&call_builtin); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateNumberStubBitNot( + MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeBitNot(masm, &slow); + __ bind(&slow); + GenerateTypeTransition(masm); +} + + +void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, + Label* slow) { + // Check if the operand is a heap number. + __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), + Heap::kHeapNumberMapRootIndex); + __ j(not_equal, slow); + + // Operand is a float, negate its value by flipping the sign bit. + if (mode_ == UNARY_OVERWRITE) { + __ Set(kScratchRegister, 0x01); + __ shl(kScratchRegister, Immediate(63)); + __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); + } else { + // Allocate a heap number before calculating the answer, + // so we don't have an untagged double around during GC. + Label slow_allocate_heapnumber, heapnumber_allocated; + __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); + __ jmp(&heapnumber_allocated); + + __ bind(&slow_allocate_heapnumber); + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ push(rax); + __ CallRuntime(Runtime::kNumberAlloc, 0); + __ movq(rcx, rax); + __ pop(rax); + } + __ bind(&heapnumber_allocated); + // rcx: allocated 'empty' number + + // Copy the double value to the new heap number, flipping the sign. + __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); + __ Set(kScratchRegister, 0x01); + __ shl(kScratchRegister, Immediate(63)); + __ xor_(rdx, kScratchRegister); // Flip sign. + __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); + __ movq(rax, rcx); + } + __ ret(0); +} + + +void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm, + Label* slow) { + // Check if the operand is a heap number. + __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), + Heap::kHeapNumberMapRootIndex); + __ j(not_equal, slow); + + // Convert the heap number in rax to an untagged integer in rcx. + IntegerConvert(masm, rax, rax); + + // Do the bitwise operation and smi tag the result. + __ notl(rax); + __ Integer32ToSmi(rax, rax); + __ ret(0); +} + + +// TODO(svenpanne): Use virtual functions instead of switch. +void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { + switch (op_) { + case Token::SUB: + GenerateGenericStubSub(masm); + break; + case Token::BIT_NOT: + GenerateGenericStubBitNot(masm); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeSub(masm, &slow); + __ bind(&slow); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { + Label non_smi, slow; + GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); + __ bind(&non_smi); + GenerateHeapNumberCodeBitNot(masm, &slow); + __ bind(&slow); + GenerateGenericCodeFallback(masm); +} + + +void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) { + // Handle the slow case by jumping to the JavaScript builtin. + __ pop(rcx); // pop return address + __ push(rax); + __ push(rcx); // push return address + switch (op_) { + case Token::SUB: + __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); + break; + case Token::BIT_NOT: + __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); + break; + default: + UNREACHABLE(); + } +} + + +void UnaryOpStub::PrintName(StringStream* stream) { + const char* op_name = Token::Name(op_); + const char* overwrite_name = NULL; // Make g++ happy. + switch (mode_) { + case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; + case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; + } + stream->Add("UnaryOpStub_%s_%s_%s", + op_name, + overwrite_name, + UnaryOpIC::GetName(operand_type_)); +} + + void BinaryOpStub::Initialize() {} diff --git a/src/x64/code-stubs-x64.h b/src/x64/code-stubs-x64.h index 228a82a..f6cfad0 100644 --- a/src/x64/code-stubs-x64.h +++ b/src/x64/code-stubs-x64.h @@ -81,6 +81,77 @@ class StoreBufferOverflowStub: public PlatformCodeStub { }; +class UnaryOpStub: public PlatformCodeStub { + public: + UnaryOpStub(Token::Value op, + UnaryOverwriteMode mode, + UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED) + : op_(op), + mode_(mode), + operand_type_(operand_type) { + } + + private: + Token::Value op_; + UnaryOverwriteMode mode_; + + // Operand type information determined at runtime. + UnaryOpIC::TypeInfo operand_type_; + + virtual void PrintName(StringStream* stream); + + class ModeBits: public BitField {}; + class OpBits: public BitField {}; + class OperandTypeInfoBits: public BitField {}; + + Major MajorKey() { return UnaryOp; } + int MinorKey() { + return ModeBits::encode(mode_) + | OpBits::encode(op_) + | OperandTypeInfoBits::encode(operand_type_); + } + + // Note: A lot of the helper functions below will vanish when we use virtual + // function instead of switch more often. + void Generate(MacroAssembler* masm); + + void GenerateTypeTransition(MacroAssembler* masm); + + void GenerateSmiStub(MacroAssembler* masm); + void GenerateSmiStubSub(MacroAssembler* masm); + void GenerateSmiStubBitNot(MacroAssembler* masm); + void GenerateSmiCodeSub(MacroAssembler* masm, + Label* non_smi, + Label* slow, + Label::Distance non_smi_near = Label::kFar, + Label::Distance slow_near = Label::kFar); + void GenerateSmiCodeBitNot(MacroAssembler* masm, + Label* non_smi, + Label::Distance non_smi_near); + + void GenerateNumberStub(MacroAssembler* masm); + void GenerateNumberStubSub(MacroAssembler* masm); + void GenerateNumberStubBitNot(MacroAssembler* masm); + void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow); + void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow); + + void GenerateGenericStub(MacroAssembler* masm); + void GenerateGenericStubSub(MacroAssembler* masm); + void GenerateGenericStubBitNot(MacroAssembler* masm); + void GenerateGenericCodeFallback(MacroAssembler* masm); + + virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; } + + virtual InlineCacheState GetICState() { + return UnaryOpIC::ToState(operand_type_); + } + + virtual void FinishCode(Handle code) { + code->set_unary_op_type(operand_type_); + } +}; + + class StringHelper : public AllStatic { public: // Generate code for copying characters using a simple loop. This should only diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 66bc38b..9ad7f58 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -4353,7 +4353,10 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, const char* comment) { // TODO(svenpanne): Allowing format strings in Comment would be nice here... Comment cmt(masm_, comment); - UnaryOpStub stub(expr->op()); + bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); + UnaryOverwriteMode overwrite = + can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; + UnaryOpStub stub(expr->op(), overwrite); // UnaryOpStub expects the argument to be in the // accumulator register rax. VisitForAccumulatorValue(expr->expression()); diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index 2c3b74f..de43f86 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -1834,12 +1834,12 @@ int LCodeGen::GetNextEmittedBlock() const { template void LCodeGen::EmitBranch(InstrType instr, Condition cc) { - int left_block = instr->TrueDestination(chunk_); int right_block = instr->FalseDestination(chunk_); + int left_block = instr->TrueDestination(chunk_); int next_block = GetNextEmittedBlock(); - if (right_block == left_block || cc == no_condition) { + if (right_block == left_block) { EmitGoto(left_block); } else if (left_block == next_block) { __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); @@ -1859,25 +1859,6 @@ void LCodeGen::DoDebugBreak(LDebugBreak* instr) { } -void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { - Representation r = instr->hydrogen()->value()->representation(); - if (r.IsSmiOrInteger32() || r.IsDouble()) { - EmitBranch(instr, no_condition); - } else { - ASSERT(r.IsTagged()); - Register reg = ToRegister(instr->value()); - HType type = instr->hydrogen()->value()->type(); - if (type.IsTaggedNumber()) { - EmitBranch(instr, no_condition); - } - __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); - __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset), - Heap::kHeapNumberMapRootIndex); - EmitBranch(instr, equal); - } -} - - void LCodeGen::DoBranch(LBranch* instr) { Representation r = instr->hydrogen()->value()->representation(); if (r.IsInteger32()) { diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc index f7e006b..95a44f0 100644 --- a/src/x64/lithium-x64.cc +++ b/src/x64/lithium-x64.cc @@ -1911,18 +1911,6 @@ LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) { } -LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) { - LOperand* value = UseRegisterAtStart(instr->value()); - return AssignEnvironment(new(zone()) LCheckSmi(value)); -} - - -LInstruction* LChunkBuilder::DoIsNumberAndBranch(HIsNumberAndBranch* instr) { - return new(zone()) LIsNumberAndBranch( - UseRegisterOrConstantAtStart(instr->value())); -} - - LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) { LOperand* value = UseRegisterAtStart(instr->value()); LCheckInstanceType* result = new(zone()) LCheckInstanceType(value); diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h index 5397c52..a7530be 100644 --- a/src/x64/lithium-x64.h +++ b/src/x64/lithium-x64.h @@ -120,7 +120,6 @@ class LCodeGen; V(IsObjectAndBranch) \ V(IsStringAndBranch) \ V(IsSmiAndBranch) \ - V(IsNumberAndBranch) \ V(IsUndetectableAndBranch) \ V(Label) \ V(LazyBailout) \ @@ -867,19 +866,6 @@ class LIsObjectAndBranch: public LControlInstruction<1, 0> { }; -class LIsNumberAndBranch: public LControlInstruction<1, 0> { - public: - explicit LIsNumberAndBranch(LOperand* value) { - inputs_[0] = value; - } - - LOperand* value() { return inputs_[0]; } - - DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch, "is-number-and-branch") - DECLARE_HYDROGEN_ACCESSOR(IsNumberAndBranch) -}; - - class LIsStringAndBranch: public LControlInstruction<1, 1> { public: explicit LIsStringAndBranch(LOperand* value, LOperand* temp) { -- 2.7.4