From 996fa7d36e4b033794a306f89aae58557e70bc8b Mon Sep 17 00:00:00 2001 From: dcarney Date: Fri, 6 Feb 2015 01:00:40 -0800 Subject: [PATCH] [turbofan] Remove global InstructionOperand caches. Review URL: https://codereview.chromium.org/904693002 Cr-Commit-Position: refs/heads/master@{#26479} --- src/compiler/graph-visualizer.cc | 8 +- src/compiler/instruction-selector-impl.h | 2 +- src/compiler/instruction-selector.cc | 10 +-- src/compiler/instruction.cc | 40 +-------- src/compiler/instruction.h | 95 +++++++++------------- src/compiler/pipeline.cc | 10 --- src/compiler/pipeline.h | 6 +- src/compiler/register-allocator-verifier.cc | 8 +- src/compiler/register-allocator.cc | 82 +++++++++++++------ src/compiler/register-allocator.h | 38 +++++++-- src/v8.cc | 3 - test/cctest/compiler/test-gap-resolver.cc | 10 +-- test/cctest/compiler/test-jump-threading.cc | 8 +- test/unittests/compiler/move-optimizer-unittest.cc | 6 +- 14 files changed, 155 insertions(+), 171 deletions(-) diff --git a/src/compiler/graph-visualizer.cc b/src/compiler/graph-visualizer.cc index abdd1c9..3cec026 100644 --- a/src/compiler/graph-visualizer.cc +++ b/src/compiler/graph-visualizer.cc @@ -698,13 +698,13 @@ void GraphC1Visualizer::PrintLiveRange(LiveRange* range, const char* type) { PrintIndent(); os_ << range->id() << " " << type; if (range->HasRegisterAssigned()) { - InstructionOperand* op = range->CreateAssignedOperand(zone()); - int assigned_reg = op->index(); - if (op->IsDoubleRegister()) { + InstructionOperand op = range->GetAssignedOperand(); + int assigned_reg = op.index(); + if (op.IsDoubleRegister()) { os_ << " \"" << DoubleRegister::AllocationIndexToString(assigned_reg) << "\""; } else { - DCHECK(op->IsRegister()); + DCHECK(op.IsRegister()); os_ << " \"" << Register::AllocationIndexToString(assigned_reg) << "\""; } } else if (range->IsSpilled()) { diff --git a/src/compiler/instruction-selector-impl.h b/src/compiler/instruction-selector-impl.h index 0eb3fbe..90898ba 100644 --- a/src/compiler/instruction-selector-impl.h +++ b/src/compiler/instruction-selector-impl.h @@ -128,7 +128,7 @@ class OperandGenerator { InstructionOperand TempRegister(Register reg) { return UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER, Register::ToAllocationIndex(reg), - UnallocatedOperand::kInvalidVirtualRegister); + InstructionOperand::kInvalidVirtualRegister); } InstructionOperand TempImmediate(int32_t imm) { diff --git a/src/compiler/instruction-selector.cc b/src/compiler/instruction-selector.cc index 65f1b00..11f455c 100644 --- a/src/compiler/instruction-selector.cc +++ b/src/compiler/instruction-selector.cc @@ -30,7 +30,7 @@ InstructionSelector::InstructionSelector(Zone* zone, size_t node_count, defined_(node_count, false, zone), used_(node_count, false, zone), virtual_registers_(node_count, - UnallocatedOperand::kInvalidVirtualRegister, zone) { + InstructionOperand::kInvalidVirtualRegister, zone) { instructions_.reserve(node_count); } @@ -182,7 +182,7 @@ int InstructionSelector::GetVirtualRegister(const Node* node) { size_t const id = node->id(); DCHECK_LT(id, virtual_registers_.size()); int virtual_register = virtual_registers_[id]; - if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { + if (virtual_register == InstructionOperand::kInvalidVirtualRegister) { virtual_register = sequence()->NextVirtualRegister(); virtual_registers_[id] = virtual_register; } @@ -194,7 +194,7 @@ const std::map InstructionSelector::GetVirtualRegistersForTesting() const { std::map virtual_registers; for (size_t n = 0; n < virtual_registers_.size(); ++n) { - if (virtual_registers_[n] != UnallocatedOperand::kInvalidVirtualRegister) { + if (virtual_registers_[n] != InstructionOperand::kInvalidVirtualRegister) { NodeId const id = static_cast(n); virtual_registers.insert(std::make_pair(id, virtual_registers_[n])); } @@ -239,7 +239,7 @@ void InstructionSelector::MarkAsUsed(Node* node) { bool InstructionSelector::IsDouble(const Node* node) const { DCHECK_NOT_NULL(node); int const virtual_register = virtual_registers_[node->id()]; - if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { + if (virtual_register == InstructionOperand::kInvalidVirtualRegister) { return false; } return sequence()->IsDouble(virtual_register); @@ -256,7 +256,7 @@ void InstructionSelector::MarkAsDouble(Node* node) { bool InstructionSelector::IsReference(const Node* node) const { DCHECK_NOT_NULL(node); int const virtual_register = virtual_registers_[node->id()]; - if (virtual_register == UnallocatedOperand::kInvalidVirtualRegister) { + if (virtual_register == InstructionOperand::kInvalidVirtualRegister) { return false; } return sequence()->IsReference(virtual_register); diff --git a/src/compiler/instruction.cc b/src/compiler/instruction.cc index 9ec2fb9..23f33ac 100644 --- a/src/compiler/instruction.cc +++ b/src/compiler/instruction.cc @@ -58,44 +58,6 @@ std::ostream& operator<<(std::ostream& os, } -template -SubKindOperand* - SubKindOperand::cache = NULL; - - -template -void SubKindOperand::SetUpCache() { - if (cache) return; - cache = new SubKindOperand[kNumCachedOperands]; - for (int i = 0; i < kNumCachedOperands; i++) { - cache[i].ConvertTo(kOperandKind, i); - } -} - - -template -void SubKindOperand::TearDownCache() { - delete[] cache; - cache = NULL; -} - - -void InstructionOperand::SetUpCaches() { -#define INSTRUCTION_OPERAND_SETUP(name, type, number) \ - name##Operand::SetUpCache(); - INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_SETUP) -#undef INSTRUCTION_OPERAND_SETUP -} - - -void InstructionOperand::TearDownCaches() { -#define INSTRUCTION_OPERAND_TEARDOWN(name, type, number) \ - name##Operand::TearDownCache(); - INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_TEARDOWN) -#undef INSTRUCTION_OPERAND_TEARDOWN -} - - std::ostream& operator<<(std::ostream& os, const PrintableMoveOperands& printable) { const MoveOperands& mo = *printable.move_operands_; @@ -488,7 +450,7 @@ InstructionSequence::InstructionSequence(Isolate* isolate, int InstructionSequence::NextVirtualRegister() { int virtual_register = next_virtual_register_++; - CHECK_NE(virtual_register, UnallocatedOperand::kInvalidVirtualRegister); + CHECK_NE(virtual_register, InstructionOperand::kInvalidVirtualRegister); return virtual_register; } diff --git a/src/compiler/instruction.h b/src/compiler/instruction.h index 59caf48..b5299b3 100644 --- a/src/compiler/instruction.h +++ b/src/compiler/instruction.h @@ -27,13 +27,13 @@ namespace compiler { const InstructionCode kGapInstruction = -1; const InstructionCode kSourcePositionInstruction = -2; -#define INSTRUCTION_OPERAND_LIST(V) \ - V(Constant, CONSTANT, 0) \ - V(Immediate, IMMEDIATE, 0) \ - V(StackSlot, STACK_SLOT, 128) \ - V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \ - V(Register, REGISTER, RegisterConfiguration::kMaxGeneralRegisters) \ - V(DoubleRegister, DOUBLE_REGISTER, RegisterConfiguration::kMaxDoubleRegisters) +#define INSTRUCTION_OPERAND_LIST(V) \ + V(Constant, CONSTANT) \ + V(Immediate, IMMEDIATE) \ + V(StackSlot, STACK_SLOT) \ + V(DoubleStackSlot, DOUBLE_STACK_SLOT) \ + V(Register, REGISTER) \ + V(DoubleRegister, DOUBLE_REGISTER) class InstructionOperand { public: @@ -62,11 +62,11 @@ class InstructionOperand { Kind kind() const { return KindField::decode(value_); } int index() const { return static_cast(value_) >> KindField::kSize; } -#define INSTRUCTION_OPERAND_PREDICATE(name, type, number) \ +#define INSTRUCTION_OPERAND_PREDICATE(name, type) \ bool Is##name() const { return kind() == type; } INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_PREDICATE) - INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0) - INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID, 0) + INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED) + INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID) #undef INSTRUCTION_OPERAND_PREDICATE bool Equals(const InstructionOperand* other) const { return value_ == other->value_; @@ -80,10 +80,6 @@ class InstructionOperand { if (kind != UNALLOCATED) virtual_register_ = kInvalidVirtualRegister; } - // Calls SetUpCache()/TearDownCache() for each subclass. - static void SetUpCaches(); - static void TearDownCaches(); - // TODO(dcarney): get rid of these void* operator new(size_t, void* location) { return location; } void* operator new(size_t size, Zone* zone) { @@ -138,9 +134,6 @@ class UnallocatedOperand : public InstructionOperand { USED_AT_END }; - // TODO(dcarney): remove this. - static const int kInvalidVirtualRegister = -1; - UnallocatedOperand(ExtendedPolicy policy, int virtual_register) : InstructionOperand(UNALLOCATED, 0, virtual_register) { value_ |= BasicPolicyField::encode(EXTENDED_POLICY); @@ -349,47 +342,33 @@ struct PrintableMoveOperands { std::ostream& operator<<(std::ostream& os, const PrintableMoveOperands& mo); -template -class SubKindOperand FINAL : public InstructionOperand { - public: - explicit SubKindOperand(int index) - : InstructionOperand(kOperandKind, index) {} - - static SubKindOperand* Create(int index, Zone* zone) { - DCHECK(index >= 0); - if (index < kNumCachedOperands) return &cache[index]; - return new (zone) SubKindOperand(index); - } - - static SubKindOperand* cast(InstructionOperand* op) { - DCHECK(op->kind() == kOperandKind); - return reinterpret_cast(op); - } - - static const SubKindOperand* cast(const InstructionOperand* op) { - DCHECK(op->kind() == kOperandKind); - return reinterpret_cast(op); - } - - static SubKindOperand cast(const InstructionOperand& op) { - DCHECK(op.kind() == kOperandKind); - return *static_cast(&op); - } - - static void SetUpCache(); - static void TearDownCache(); - - private: - static SubKindOperand* cache; - - SubKindOperand() : InstructionOperand(kOperandKind, 0) {} // For the caches. -}; - - -#define INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \ - typedef SubKindOperand name##Operand; -INSTRUCTION_OPERAND_LIST(INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS) -#undef INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS +#define INSTRUCTION_SUBKIND_OPERAND_CLASS(SubKind, kOperandKind) \ + class SubKind##Operand FINAL : public InstructionOperand { \ + public: \ + explicit SubKind##Operand(int index) \ + : InstructionOperand(kOperandKind, index) {} \ + \ + static SubKind##Operand* New(int index, Zone* zone) { \ + return new (zone) SubKind##Operand(index); \ + } \ + \ + static SubKind##Operand* cast(InstructionOperand* op) { \ + DCHECK(op->kind() == kOperandKind); \ + return reinterpret_cast(op); \ + } \ + \ + static const SubKind##Operand* cast(const InstructionOperand* op) { \ + DCHECK(op->kind() == kOperandKind); \ + return reinterpret_cast(op); \ + } \ + \ + static SubKind##Operand cast(const InstructionOperand& op) { \ + DCHECK(op.kind() == kOperandKind); \ + return *static_cast(&op); \ + } \ + }; +INSTRUCTION_OPERAND_LIST(INSTRUCTION_SUBKIND_OPERAND_CLASS) +#undef INSTRUCTION_SUBKIND_OPERAND_CLASS class ParallelMove FINAL : public ZoneObject { diff --git a/src/compiler/pipeline.cc b/src/compiler/pipeline.cc index 0153d62..59089e0 100644 --- a/src/compiler/pipeline.cc +++ b/src/compiler/pipeline.cc @@ -1174,16 +1174,6 @@ void Pipeline::AllocateRegisters(const RegisterConfiguration* config, } } - -void Pipeline::SetUp() { - InstructionOperand::SetUpCaches(); -} - - -void Pipeline::TearDown() { - InstructionOperand::TearDownCaches(); -} - } // namespace compiler } // namespace internal } // namespace v8 diff --git a/src/compiler/pipeline.h b/src/compiler/pipeline.h index 03d889b..b2ea193 100644 --- a/src/compiler/pipeline.h +++ b/src/compiler/pipeline.h @@ -7,13 +7,14 @@ #include "src/v8.h" +// Clients of this interface shouldn't depend on lots of compiler internals. +// Do not include anything from src/compiler here! #include "src/compiler.h" namespace v8 { namespace internal { namespace compiler { -// Clients of this interface shouldn't depend on lots of compiler internals. class CallDescriptor; class Graph; class InstructionSequence; @@ -50,9 +51,6 @@ class Pipeline { static inline bool SupportedBackend() { return V8_TURBOFAN_BACKEND != 0; } static inline bool SupportedTarget() { return V8_TURBOFAN_TARGET != 0; } - static void SetUp(); - static void TearDown(); - private: static Handle GenerateCodeForTesting(CompilationInfo* info, CallDescriptor* call_descriptor, diff --git a/src/compiler/register-allocator-verifier.cc b/src/compiler/register-allocator-verifier.cc index 5afbf72..45bf5ec 100644 --- a/src/compiler/register-allocator-verifier.cc +++ b/src/compiler/register-allocator-verifier.cc @@ -29,7 +29,7 @@ void RegisterAllocatorVerifier::VerifyInput( const OperandConstraint& constraint) { CHECK_NE(kSameAsFirst, constraint.type_); if (constraint.type_ != kImmediate) { - CHECK_NE(UnallocatedOperand::kInvalidVirtualRegister, + CHECK_NE(InstructionOperand::kInvalidVirtualRegister, constraint.virtual_register_); } } @@ -46,7 +46,7 @@ void RegisterAllocatorVerifier::VerifyTemp( void RegisterAllocatorVerifier::VerifyOutput( const OperandConstraint& constraint) { CHECK_NE(kImmediate, constraint.type_); - CHECK_NE(UnallocatedOperand::kInvalidVirtualRegister, + CHECK_NE(InstructionOperand::kInvalidVirtualRegister, constraint.virtual_register_); } @@ -120,7 +120,7 @@ void RegisterAllocatorVerifier::VerifyAssignment() { void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op, OperandConstraint* constraint) { constraint->value_ = kMinInt; - constraint->virtual_register_ = UnallocatedOperand::kInvalidVirtualRegister; + constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister; if (op->IsConstant()) { constraint->type_ = kConstant; constraint->value_ = ConstantOperand::cast(op)->index(); @@ -217,7 +217,7 @@ namespace { typedef BasicBlock::RpoNumber Rpo; -static const int kInvalidVreg = UnallocatedOperand::kInvalidVirtualRegister; +static const int kInvalidVreg = InstructionOperand::kInvalidVirtualRegister; struct PhiData : public ZoneObject { PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg, diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc index 7a72b9f..38efdd4 100644 --- a/src/compiler/register-allocator.cc +++ b/src/compiler/register-allocator.cc @@ -135,11 +135,12 @@ LiveRange::LiveRange(int id, Zone* zone) spills_at_definition_(nullptr) {} -void LiveRange::set_assigned_register(int reg, Zone* zone) { +void LiveRange::set_assigned_register(int reg, + InstructionOperandCache* operand_cache) { DCHECK(!HasRegisterAssigned() && !IsSpilled()); assigned_register_ = reg; // TODO(dcarney): stop aliasing hint operands. - ConvertUsesToOperand(CreateAssignedOperand(zone)); + ConvertUsesToOperand(GetAssignedOperand(operand_cache)); } @@ -250,30 +251,47 @@ bool LiveRange::CanBeSpilled(LifetimePosition pos) { } -InstructionOperand* LiveRange::CreateAssignedOperand(Zone* zone) const { - InstructionOperand* op = nullptr; +InstructionOperand* LiveRange::GetAssignedOperand( + InstructionOperandCache* cache) const { if (HasRegisterAssigned()) { DCHECK(!IsSpilled()); switch (Kind()) { case GENERAL_REGISTERS: - op = RegisterOperand::Create(assigned_register(), zone); - break; + return cache->RegisterOperand(assigned_register()); case DOUBLE_REGISTERS: - op = DoubleRegisterOperand::Create(assigned_register(), zone); - break; + return cache->DoubleRegisterOperand(assigned_register()); default: UNREACHABLE(); } - } else { - DCHECK(IsSpilled()); - DCHECK(!HasRegisterAssigned()); - op = TopLevel()->GetSpillOperand(); - DCHECK(!op->IsUnallocated()); } + DCHECK(IsSpilled()); + DCHECK(!HasRegisterAssigned()); + auto op = TopLevel()->GetSpillOperand(); + DCHECK(!op->IsUnallocated()); return op; } +InstructionOperand LiveRange::GetAssignedOperand() const { + if (HasRegisterAssigned()) { + DCHECK(!IsSpilled()); + switch (Kind()) { + case GENERAL_REGISTERS: + return RegisterOperand(assigned_register()); + case DOUBLE_REGISTERS: + return DoubleRegisterOperand(assigned_register()); + default: + UNREACHABLE(); + } + } + DCHECK(IsSpilled()); + DCHECK(!HasRegisterAssigned()); + auto op = TopLevel()->GetSpillOperand(); + DCHECK(!op->IsUnallocated()); + return *op; +} + + UseInterval* LiveRange::FirstSearchIntervalForPosition( LifetimePosition position) const { if (current_interval_ == nullptr) return first_interval_; @@ -553,6 +571,18 @@ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) { } +InstructionOperandCache::InstructionOperandCache() { + for (size_t i = 0; i < arraysize(general_register_operands_); ++i) { + general_register_operands_[i] = + i::compiler::RegisterOperand(static_cast(i)); + } + for (size_t i = 0; i < arraysize(double_register_operands_); ++i) { + double_register_operands_[i] = + i::compiler::DoubleRegisterOperand(static_cast(i)); + } +} + + RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, Zone* zone, Frame* frame, InstructionSequence* code, @@ -562,6 +592,7 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, code_(code), debug_name_(debug_name), config_(config), + operand_cache_(new (code_zone()) InstructionOperandCache()), phi_map_(PhiMap::key_compare(), PhiMap::allocator_type(local_zone())), live_in_sets_(code->InstructionBlockCount(), nullptr, local_zone()), live_ranges_(code->VirtualRegisterCount() * 2, nullptr, local_zone()), @@ -572,7 +603,6 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, unhandled_live_ranges_(local_zone()), active_live_ranges_(local_zone()), inactive_live_ranges_(local_zone()), - reusable_slots_(local_zone()), spill_ranges_(local_zone()), mode_(UNALLOCATED_REGISTERS), num_registers_(-1) { @@ -588,7 +618,6 @@ RegisterAllocator::RegisterAllocator(const RegisterConfiguration* config, static_cast(code->VirtualRegisterCount() * 2)); active_live_ranges().reserve(8); inactive_live_ranges().reserve(8); - reusable_slots().reserve(8); spill_ranges().reserve(8); assigned_registers_ = new (code_zone()) BitVector(config->num_general_registers(), code_zone()); @@ -930,7 +959,7 @@ void RegisterAllocator::CommitAssignment() { if (range == nullptr || range->IsEmpty()) continue; // Register assignments were committed in set_assigned_register. if (range->HasRegisterAssigned()) continue; - auto assigned = range->CreateAssignedOperand(code_zone()); + auto assigned = range->GetAssignedOperand(operand_cache()); range->ConvertUsesToOperand(assigned); if (range->IsSpilled()) { range->CommitSpillsAtDefinition(code(), assigned); @@ -1425,8 +1454,10 @@ void RegisterAllocator::ConnectRanges() { } if (should_insert) { auto move = GetConnectingParallelMove(pos); - auto prev_operand = first_range->CreateAssignedOperand(code_zone()); - auto cur_operand = second_range->CreateAssignedOperand(code_zone()); + auto prev_operand = + first_range->GetAssignedOperand(operand_cache()); + auto cur_operand = + second_range->GetAssignedOperand(operand_cache()); move->AddMove(prev_operand, cur_operand, code_zone()); } } @@ -1590,14 +1621,15 @@ void RegisterAllocator::ResolveControlFlow() { auto* block_bound = finder.ArrayFor(phi->virtual_register())->FindSucc(block); auto phi_output = - block_bound->range_->CreateAssignedOperand(code_zone()); + block_bound->range_->GetAssignedOperand(operand_cache()); phi->output()->ConvertTo(phi_output->kind(), phi_output->index()); size_t pred_index = 0; for (auto pred : block->predecessors()) { const InstructionBlock* pred_block = code()->InstructionBlockAt(pred); auto* pred_bound = finder.ArrayFor(phi->operands()[pred_index]) ->FindPred(pred_block); - auto pred_op = pred_bound->range_->CreateAssignedOperand(code_zone()); + auto pred_op = + pred_bound->range_->GetAssignedOperand(operand_cache()); phi->inputs()[pred_index] = pred_op; ResolveControlFlow(block, phi_output, pred_block, pred_op); pred_index++; @@ -1615,8 +1647,8 @@ void RegisterAllocator::ResolveControlFlow() { if (result.cur_cover_ == result.pred_cover_ || result.cur_cover_->IsSpilled()) continue; - auto pred_op = result.pred_cover_->CreateAssignedOperand(code_zone()); - auto cur_op = result.cur_cover_->CreateAssignedOperand(code_zone()); + auto pred_op = result.pred_cover_->GetAssignedOperand(operand_cache()); + auto cur_op = result.cur_cover_->GetAssignedOperand(operand_cache()); ResolveControlFlow(block, cur_op, pred_block, pred_op); } iterator.Advance(); @@ -1838,7 +1870,7 @@ void RegisterAllocator::PopulatePointerMaps() { "Pointer in register for range %d (start at %d) " "at safe point %d\n", cur->id(), cur->Start().Value(), safe_point); - InstructionOperand* operand = cur->CreateAssignedOperand(code_zone()); + InstructionOperand* operand = cur->GetAssignedOperand(operand_cache()); DCHECK(!operand->IsStackSlot()); map->RecordPointer(operand, code_zone()); } @@ -1873,7 +1905,6 @@ void RegisterAllocator::AllocateRegisters() { SortUnhandled(); DCHECK(UnhandledIsSorted()); - DCHECK(reusable_slots().empty()); DCHECK(active_live_ranges().empty()); DCHECK(inactive_live_ranges().empty()); @@ -1960,7 +1991,6 @@ void RegisterAllocator::AllocateRegisters() { } } - reusable_slots().clear(); active_live_ranges().clear(); inactive_live_ranges().clear(); } @@ -2475,7 +2505,7 @@ void RegisterAllocator::SetLiveRangeAssignedRegister(LiveRange* range, DCHECK(range->Kind() == GENERAL_REGISTERS); assigned_registers_->Add(reg); } - range->set_assigned_register(reg, code_zone()); + range->set_assigned_register(reg, operand_cache()); } } // namespace compiler diff --git a/src/compiler/register-allocator.h b/src/compiler/register-allocator.h index 2c77059..bd9bd55 100644 --- a/src/compiler/register-allocator.h +++ b/src/compiler/register-allocator.h @@ -173,6 +173,33 @@ class UsePosition FINAL : public ZoneObject { class SpillRange; + +// TODO(dcarney): remove this cache. +class InstructionOperandCache FINAL : public ZoneObject { + public: + InstructionOperandCache(); + + InstructionOperand* RegisterOperand(int index) { + DCHECK(index >= 0 && + index < static_cast(arraysize(general_register_operands_))); + return &general_register_operands_[index]; + } + InstructionOperand* DoubleRegisterOperand(int index) { + DCHECK(index >= 0 && + index < static_cast(arraysize(double_register_operands_))); + return &double_register_operands_[index]; + } + + private: + InstructionOperand + general_register_operands_[RegisterConfiguration::kMaxGeneralRegisters]; + InstructionOperand + double_register_operands_[RegisterConfiguration::kMaxDoubleRegisters]; + + DISALLOW_COPY_AND_ASSIGN(InstructionOperandCache); +}; + + // Representation of SSA values' live ranges as a collection of (continuous) // intervals over the instruction ordering. class LiveRange FINAL : public ZoneObject { @@ -193,10 +220,12 @@ class LiveRange FINAL : public ZoneObject { int id() const { return id_; } bool IsFixed() const { return id_ < 0; } bool IsEmpty() const { return first_interval() == nullptr; } - InstructionOperand* CreateAssignedOperand(Zone* zone) const; + // TODO(dcarney): remove this. + InstructionOperand* GetAssignedOperand(InstructionOperandCache* cache) const; + InstructionOperand GetAssignedOperand() const; int assigned_register() const { return assigned_register_; } int spill_start_index() const { return spill_start_index_; } - void set_assigned_register(int reg, Zone* zone); + void set_assigned_register(int reg, InstructionOperandCache* cache); void MakeSpilled(); bool is_phi() const { return is_phi_; } void set_is_phi(bool is_phi) { is_phi_ = is_phi; } @@ -558,6 +587,7 @@ class RegisterAllocator FINAL : public ZoneObject { Frame* frame() const { return frame_; } const char* debug_name() const { return debug_name_; } const RegisterConfiguration* config() const { return config_; } + InstructionOperandCache* operand_cache() const { return operand_cache_; } ZoneVector& live_ranges() { return live_ranges_; } ZoneVector& fixed_live_ranges() { return fixed_live_ranges_; } ZoneVector& fixed_double_live_ranges() { @@ -570,7 +600,6 @@ class RegisterAllocator FINAL : public ZoneObject { ZoneVector& inactive_live_ranges() { return inactive_live_ranges_; } - ZoneVector& reusable_slots() { return reusable_slots_; } ZoneVector& spill_ranges() { return spill_ranges_; } struct PhiMapValue { @@ -588,7 +617,7 @@ class RegisterAllocator FINAL : public ZoneObject { const char* const debug_name_; const RegisterConfiguration* config_; - + InstructionOperandCache* const operand_cache_; PhiMap phi_map_; // During liveness analysis keep a mapping from block id to live_in sets @@ -604,7 +633,6 @@ class RegisterAllocator FINAL : public ZoneObject { ZoneVector unhandled_live_ranges_; ZoneVector active_live_ranges_; ZoneVector inactive_live_ranges_; - ZoneVector reusable_slots_; ZoneVector spill_ranges_; RegisterKind mode_; diff --git a/src/v8.cc b/src/v8.cc index b04ccc0..495921e 100644 --- a/src/v8.cc +++ b/src/v8.cc @@ -8,7 +8,6 @@ #include "src/base/once.h" #include "src/base/platform/platform.h" #include "src/bootstrapper.h" -#include "src/compiler/pipeline.h" #include "src/debug.h" #include "src/deoptimizer.h" #include "src/elements.h" @@ -50,7 +49,6 @@ void V8::TearDown() { Bootstrapper::TearDownExtensions(); ElementsAccessor::TearDown(); LOperand::TearDownCaches(); - compiler::Pipeline::TearDown(); ExternalReference::TearDownMathExpData(); RegisteredExtension::UnregisterAll(); Isolate::GlobalTearDown(); @@ -94,7 +92,6 @@ void V8::InitializeOncePerProcessImpl() { #endif ElementsAccessor::InitializeOncePerProcess(); LOperand::SetUpCaches(); - compiler::Pipeline::SetUp(); SetUpJSCallerSavedCodeData(); ExternalReference::SetUp(); Bootstrapper::InitializeOncePerProcess(); diff --git a/test/cctest/compiler/test-gap-resolver.cc b/test/cctest/compiler/test-gap-resolver.cc index ea6f4ee..818a0bd 100644 --- a/test/cctest/compiler/test-gap-resolver.cc +++ b/test/cctest/compiler/test-gap-resolver.cc @@ -137,15 +137,15 @@ class ParallelMoveCreator : public HandleAndZoneScope { int index = rng_->NextInt(6); switch (rng_->NextInt(5)) { case 0: - return ConstantOperand::Create(index, main_zone()); + return ConstantOperand::New(index, main_zone()); case 1: - return StackSlotOperand::Create(index, main_zone()); + return StackSlotOperand::New(index, main_zone()); case 2: - return DoubleStackSlotOperand::Create(index, main_zone()); + return DoubleStackSlotOperand::New(index, main_zone()); case 3: - return RegisterOperand::Create(index, main_zone()); + return RegisterOperand::New(index, main_zone()); case 4: - return DoubleRegisterOperand::Create(index, main_zone()); + return DoubleRegisterOperand::New(index, main_zone()); } UNREACHABLE(); return NULL; diff --git a/test/cctest/compiler/test-jump-threading.cc b/test/cctest/compiler/test-jump-threading.cc index 549e4ac..d9de18e 100644 --- a/test/cctest/compiler/test-jump-threading.cc +++ b/test/cctest/compiler/test-jump-threading.cc @@ -61,15 +61,15 @@ class TestCode : public HandleAndZoneScope { Start(); sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop)); int index = static_cast(sequence_.instructions().size()) - 2; - sequence_.AddGapMove(index, RegisterOperand::Create(13, main_zone()), - RegisterOperand::Create(13, main_zone())); + sequence_.AddGapMove(index, RegisterOperand::New(13, main_zone()), + RegisterOperand::New(13, main_zone())); } void NonRedundantMoves() { Start(); sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop)); int index = static_cast(sequence_.instructions().size()) - 2; - sequence_.AddGapMove(index, ImmediateOperand::Create(11, main_zone()), - RegisterOperand::Create(11, main_zone())); + sequence_.AddGapMove(index, ImmediateOperand::New(11, main_zone()), + RegisterOperand::New(11, main_zone())); } void Other() { Start(); diff --git a/test/unittests/compiler/move-optimizer-unittest.cc b/test/unittests/compiler/move-optimizer-unittest.cc index 2452d19..b8375fa 100644 --- a/test/unittests/compiler/move-optimizer-unittest.cc +++ b/test/unittests/compiler/move-optimizer-unittest.cc @@ -69,12 +69,12 @@ class MoveOptimizerTest : public InstructionSequenceTest { CHECK_NE(kNoValue, op.value_); switch (op.type_) { case kConstant: - return ConstantOperand::Create(op.value_, zone()); + return ConstantOperand::New(op.value_, zone()); case kFixedSlot: - return StackSlotOperand::Create(op.value_, zone()); + return StackSlotOperand::New(op.value_, zone()); case kFixedRegister: CHECK(0 <= op.value_ && op.value_ < num_general_registers()); - return RegisterOperand::Create(op.value_, zone()); + return RegisterOperand::New(op.value_, zone()); default: break; } -- 2.7.4