DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
- FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
+ FrameOffset offset = linkage()->GetFrameOffset(
+ AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
- FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
+ FrameOffset offset = linkage()->GetFrameOffset(
+ AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
offset.offset());
}
}
Register ToRegister(InstructionOperand* op) {
- DCHECK(op->IsRegister());
- return Register::FromAllocationIndex(op->index());
+ return Register::FromAllocationIndex(RegisterOperand::cast(op)->index());
}
DoubleRegister ToDoubleRegister(InstructionOperand* op) {
- DCHECK(op->IsDoubleRegister());
- return DoubleRegister::FromAllocationIndex(op->index());
+ return DoubleRegister::FromAllocationIndex(
+ DoubleRegisterOperand::cast(op)->index());
}
Constant ToConstant(InstructionOperand* op) {
if (op->IsImmediate()) {
- return gen_->code()->GetImmediate(op->index());
+ return gen_->code()->GetImmediate(ImmediateOperand::cast(op)->index());
}
- return gen_->code()->GetConstant(op->index());
+ return gen_->code()->GetConstant(
+ ConstantOperand::cast(op)->virtual_register());
}
double ToDouble(InstructionOperand* op) { return ToConstant(op).ToFloat64(); }
for (int i = 0; i < operands->length(); i++) {
InstructionOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index(), zone());
+ safepoint.DefinePointerSlot(StackSlotOperand::cast(pointer)->index(),
+ zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- Register reg = Register::FromAllocationIndex(pointer->index());
+ Register reg = Register::FromAllocationIndex(
+ RegisterOperand::cast(pointer)->index());
safepoint.DefinePointerRegister(reg, zone());
}
}
// rather than creating an int value.
if (type == kMachBool || type == kRepBit || type == kMachInt32 ||
type == kMachInt8 || type == kMachInt16) {
- translation->StoreInt32StackSlot(op->index());
+ translation->StoreInt32StackSlot(StackSlotOperand::cast(op)->index());
} else if (type == kMachUint32 || type == kMachUint16 ||
type == kMachUint8) {
- translation->StoreUint32StackSlot(op->index());
+ translation->StoreUint32StackSlot(StackSlotOperand::cast(op)->index());
} else if ((type & kRepMask) == kRepTagged) {
- translation->StoreStackSlot(op->index());
+ translation->StoreStackSlot(StackSlotOperand::cast(op)->index());
} else {
CHECK(false);
}
} else if (op->IsDoubleStackSlot()) {
DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
- translation->StoreDoubleStackSlot(op->index());
+ translation->StoreDoubleStackSlot(
+ DoubleStackSlotOperand::cast(op)->index());
} else if (op->IsRegister()) {
InstructionOperandConverter converter(this, instr);
// TODO(jarin) kMachBool and kRepBit should materialize true and false
typedef ZoneList<MoveOperands>::iterator op_iterator;
#ifdef ENABLE_SLOW_DCHECKS
-// TODO(svenpanne) Brush up InstructionOperand with comparison?
struct InstructionOperandComparator {
bool operator()(const InstructionOperand* x,
const InstructionOperand* y) const {
- return (x->kind() < y->kind()) ||
- (x->kind() == y->kind() && x->index() < y->index());
+ return *x < *y;
}
};
#endif
PrintIndent();
os_ << range->id() << " " << type;
if (range->HasRegisterAssigned()) {
- InstructionOperand op = range->GetAssignedOperand();
+ AllocatedOperand op = AllocatedOperand::cast(range->GetAssignedOperand());
int assigned_reg = op.index();
if (op.IsDoubleRegister()) {
os_ << " \"" << DoubleRegister::AllocationIndexToString(assigned_reg)
os_ << " \"" << Register::AllocationIndexToString(assigned_reg) << "\"";
}
} else if (range->IsSpilled()) {
+ auto top = range->TopLevel();
int index = -1;
- if (range->TopLevel()->HasSpillRange()) {
+ if (top->HasSpillRange()) {
index = kMaxInt; // This hasn't been set yet.
+ } else if (top->GetSpillOperand()->IsConstant()) {
+ os_ << " \"const(nostack):"
+ << ConstantOperand::cast(top->GetSpillOperand())->virtual_register()
+ << "\"";
} else {
- index = range->TopLevel()->GetSpillOperand()->index();
- }
- if (range->TopLevel()->Kind() == DOUBLE_REGISTERS) {
- os_ << " \"double_stack:" << index << "\"";
- } else if (range->TopLevel()->Kind() == GENERAL_REGISTERS) {
- os_ << " \"stack:" << index << "\"";
- } else {
- os_ << " \"const(nostack):" << index << "\"";
+ index = AllocatedOperand::cast(top->GetSpillOperand())->index();
+ if (top->Kind() == DOUBLE_REGISTERS) {
+ os_ << " \"double_stack:" << index << "\"";
+ } else if (top->Kind() == GENERAL_REGISTERS) {
+ os_ << " \"stack:" << index << "\"";
+ }
}
}
int parent_index = -1;
}
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
- FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), extra);
+ FrameOffset offset = linkage()->GetFrameOffset(
+ AllocatedOperand::cast(op)->index(), frame(), extra);
return Operand(offset.from_stack_pointer() ? esp : ebp, offset.offset());
}
}
}
case InstructionOperand::CONSTANT:
- return os << "[constant:" << op.index() << "]";
+ return os << "[constant:" << ConstantOperand::cast(op).virtual_register()
+ << "]";
case InstructionOperand::IMMEDIATE:
- return os << "[immediate:" << op.index() << "]";
+ return os << "[immediate:" << ImmediateOperand::cast(op).index() << "]";
case InstructionOperand::STACK_SLOT:
- return os << "[stack:" << op.index() << "]";
+ return os << "[stack:" << StackSlotOperand::cast(op).index() << "]";
case InstructionOperand::DOUBLE_STACK_SLOT:
- return os << "[double_stack:" << op.index() << "]";
+ return os << "[double_stack:" << DoubleStackSlotOperand::cast(op).index()
+ << "]";
case InstructionOperand::REGISTER:
- return os << "[" << conf->general_register_name(op.index()) << "|R]";
+ return os << "["
+ << conf->general_register_name(
+ RegisterOperand::cast(op).index()) << "|R]";
case InstructionOperand::DOUBLE_REGISTER:
- return os << "[" << conf->double_register_name(op.index()) << "|R]";
+ return os << "["
+ << conf->double_register_name(
+ DoubleRegisterOperand::cast(op).index()) << "|R]";
case InstructionOperand::INVALID:
return os << "(x)";
}
void PointerMap::RecordPointer(InstructionOperand* op, Zone* zone) {
// Do not record arguments as pointers.
- if (op->IsStackSlot() && op->index() < 0) return;
+ if (op->IsStackSlot() && StackSlotOperand::cast(op)->index() < 0) return;
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
pointer_operands_.Add(op, zone);
}
void PointerMap::RemovePointer(InstructionOperand* op) {
// Do not record arguments as pointers.
- if (op->IsStackSlot() && op->index() < 0) return;
+ if (op->IsStackSlot() && StackSlotOperand::cast(op)->index() < 0) return;
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
for (int i = 0; i < pointer_operands_.length(); ++i) {
if (pointer_operands_[i]->Equals(op)) {
void PointerMap::RecordUntagged(InstructionOperand* op, Zone* zone) {
// Do not record arguments as pointers.
- if (op->IsStackSlot() && op->index() < 0) return;
+ if (op->IsStackSlot() && StackSlotOperand::cast(op)->index() < 0) return;
DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
untagged_operands_.Add(op, zone);
}
}
+RpoNumber InstructionSequence::InputRpo(Instruction* instr, size_t index) {
+ InstructionOperand* operand = instr->InputAt(index);
+ Constant constant =
+ operand->IsImmediate()
+ ? GetImmediate(ImmediateOperand::cast(operand)->index())
+ : GetConstant(ConstantOperand::cast(operand)->virtual_register());
+ return constant.ToRpoNumber();
+}
+
+
FrameStateDescriptor::FrameStateDescriptor(
Zone* zone, const FrameStateCallInfo& state_info, size_t parameters_count,
size_t locals_count, size_t stack_count, FrameStateDescriptor* outer_state)
// A couple of reserved opcodes are used for internal use.
const InstructionCode kSourcePositionInstruction = -1;
-#define INSTRUCTION_OPERAND_LIST(V) \
- V(Constant, CONSTANT) \
- V(Immediate, IMMEDIATE) \
+#define ALLOCATED_OPERAND_LIST(V) \
V(StackSlot, STACK_SLOT) \
V(DoubleStackSlot, DOUBLE_STACK_SLOT) \
V(Register, REGISTER) \
DOUBLE_REGISTER
};
- InstructionOperand() { ConvertTo(INVALID, 0, kInvalidVirtualRegister); }
-
- InstructionOperand(Kind kind, int index) {
- DCHECK(kind != UNALLOCATED && kind != INVALID);
- ConvertTo(kind, index, kInvalidVirtualRegister);
- }
-
- static InstructionOperand* New(Zone* zone, Kind kind, int index) {
- return New(zone, InstructionOperand(kind, index));
- }
+ InstructionOperand()
+ : InstructionOperand(INVALID, 0, kInvalidVirtualRegister) {}
Kind kind() const { return KindField::decode(value_); }
- // TODO(dcarney): move this to subkind operand.
- int index() const {
- DCHECK(kind() != UNALLOCATED && kind() != INVALID);
- return static_cast<int64_t>(value_) >> IndexField::kShift;
- }
+
#define INSTRUCTION_OPERAND_PREDICATE(name, type) \
bool Is##name() const { return kind() == type; }
- INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_PREDICATE)
+ ALLOCATED_OPERAND_LIST(INSTRUCTION_OPERAND_PREDICATE)
+ INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
+ INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
#undef INSTRUCTION_OPERAND_PREDICATE
+
bool Equals(const InstructionOperand* other) const {
return value_ == other->value_;
}
- void ConvertTo(Kind kind, int index) {
- DCHECK(kind != UNALLOCATED && kind != INVALID);
- ConvertTo(kind, index, kInvalidVirtualRegister);
- }
-
// Useful for map/set keys.
bool operator<(const InstructionOperand& op) const {
return value_ < op.value_;
}
- protected:
template <typename SubKindOperand>
static SubKindOperand* New(Zone* zone, const SubKindOperand& op) {
void* buffer = zone->New(sizeof(op));
return new (buffer) SubKindOperand(op);
}
- InstructionOperand(Kind kind, int index, int virtual_register) {
- ConvertTo(kind, index, virtual_register);
+ static void ReplaceWith(InstructionOperand* dest,
+ const InstructionOperand* src) {
+ *dest = *src;
}
- void ConvertTo(Kind kind, int index, int virtual_register) {
+ protected:
+ InstructionOperand(Kind kind, int index, int virtual_register) {
if (kind == REGISTER || kind == DOUBLE_REGISTER) DCHECK(index >= 0);
- if (kind != UNALLOCATED) {
+ if (kind != UNALLOCATED && kind != CONSTANT) {
DCHECK(virtual_register == kInvalidVirtualRegister);
}
value_ = KindField::encode(kind);
value_ |=
VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
value_ |= static_cast<int64_t>(index) << IndexField::kShift;
- DCHECK(((kind == UNALLOCATED || kind == INVALID) && index == 0) ||
- this->index() == index);
}
typedef BitField64<Kind, 0, 3> KindField;
};
+class ConstantOperand : public InstructionOperand {
+ public:
+ explicit ConstantOperand(int virtual_register)
+ : InstructionOperand(CONSTANT, 0, virtual_register) {}
+
+ int32_t virtual_register() const {
+ return static_cast<int32_t>(VirtualRegisterField::decode(value_));
+ }
+
+ static ConstantOperand* New(Zone* zone, int virtual_register) {
+ return InstructionOperand::New(zone, ConstantOperand(virtual_register));
+ }
+
+ static ConstantOperand* cast(InstructionOperand* op) {
+ DCHECK(op->kind() == CONSTANT);
+ return static_cast<ConstantOperand*>(op);
+ }
+
+ static const ConstantOperand* cast(const InstructionOperand* op) {
+ DCHECK(op->kind() == CONSTANT);
+ return static_cast<const ConstantOperand*>(op);
+ }
+
+ static ConstantOperand cast(const InstructionOperand& op) {
+ DCHECK(op.kind() == CONSTANT);
+ return *static_cast<const ConstantOperand*>(&op);
+ }
+};
+
+
+class ImmediateOperand : public InstructionOperand {
+ public:
+ explicit ImmediateOperand(int index)
+ : InstructionOperand(IMMEDIATE, index, kInvalidVirtualRegister) {}
+
+ int index() const {
+ return static_cast<int64_t>(value_) >> IndexField::kShift;
+ }
+
+ static ImmediateOperand* New(Zone* zone, int index) {
+ return InstructionOperand::New(zone, ImmediateOperand(index));
+ }
+
+ static ImmediateOperand* cast(InstructionOperand* op) {
+ DCHECK(op->kind() == IMMEDIATE);
+ return static_cast<ImmediateOperand*>(op);
+ }
+
+ static const ImmediateOperand* cast(const InstructionOperand* op) {
+ DCHECK(op->kind() == IMMEDIATE);
+ return static_cast<const ImmediateOperand*>(op);
+ }
+
+ static ImmediateOperand cast(const InstructionOperand& op) {
+ DCHECK(op.kind() == IMMEDIATE);
+ return *static_cast<const ImmediateOperand*>(&op);
+ }
+};
+
+
+class AllocatedOperand : public InstructionOperand {
+#define ALLOCATED_OPERAND_CHECK(Name, Kind) || kind == Kind
+#define CHECK_ALLOCATED_KIND() \
+ DCHECK(false ALLOCATED_OPERAND_LIST(ALLOCATED_OPERAND_CHECK)); \
+ USE(kind);
+
+ public:
+ int index() const {
+ return static_cast<int64_t>(value_) >> IndexField::kShift;
+ }
+
+ AllocatedOperand(Kind kind, int index)
+ : InstructionOperand(kind, index, kInvalidVirtualRegister) {
+ CHECK_ALLOCATED_KIND();
+ }
+
+ static AllocatedOperand* New(Zone* zone, Kind kind, int index) {
+ return InstructionOperand::New(zone, AllocatedOperand(kind, index));
+ }
+
+ static AllocatedOperand* cast(InstructionOperand* op) {
+ Kind kind = op->kind();
+ CHECK_ALLOCATED_KIND();
+ return static_cast<AllocatedOperand*>(op);
+ }
+
+ static const AllocatedOperand* cast(const InstructionOperand* op) {
+ Kind kind = op->kind();
+ CHECK_ALLOCATED_KIND();
+ return static_cast<const AllocatedOperand*>(op);
+ }
+
+ static AllocatedOperand cast(const InstructionOperand& op) {
+ Kind kind = op.kind();
+ CHECK_ALLOCATED_KIND();
+ return *static_cast<const AllocatedOperand*>(&op);
+ }
+
+#undef CHECK_ALLOCATED_KIND
+#undef ALLOCATED_OPERAND_CAST_CHECK
+};
+
+
+#define INSTRUCTION_SUBKIND_OPERAND_CLASS(SubKind, kOperandKind) \
+ class SubKind##Operand FINAL : public AllocatedOperand { \
+ public: \
+ explicit SubKind##Operand(int index) \
+ : AllocatedOperand(kOperandKind, index) {} \
+ \
+ static SubKind##Operand* New(Zone* zone, int index) { \
+ return InstructionOperand::New(zone, SubKind##Operand(index)); \
+ } \
+ \
+ static SubKind##Operand* cast(InstructionOperand* op) { \
+ DCHECK(op->kind() == kOperandKind); \
+ return reinterpret_cast<SubKind##Operand*>(op); \
+ } \
+ \
+ static const SubKind##Operand* cast(const InstructionOperand* op) { \
+ DCHECK(op->kind() == kOperandKind); \
+ return reinterpret_cast<const SubKind##Operand*>(op); \
+ } \
+ \
+ static SubKind##Operand cast(const InstructionOperand& op) { \
+ DCHECK(op.kind() == kOperandKind); \
+ return *static_cast<const SubKind##Operand*>(&op); \
+ } \
+ };
+ALLOCATED_OPERAND_LIST(INSTRUCTION_SUBKIND_OPERAND_CLASS)
+#undef INSTRUCTION_SUBKIND_OPERAND_CLASS
+
+
class MoveOperands FINAL {
public:
MoveOperands(InstructionOperand* source, InstructionOperand* destination)
return !IsEliminated() && source()->Equals(operand);
}
- // A move is redundant if it's been eliminated, if its source and
- // destination are the same, or if its destination is constant.
+ // A move is redundant if it's been eliminated or if its source and
+ // destination are the same.
bool IsRedundant() const {
- return IsEliminated() || source_->Equals(destination_) ||
- (destination_ != NULL && destination_->IsConstant());
+ DCHECK_IMPLIES(destination_ != nullptr, !destination_->IsConstant());
+ return IsEliminated() || source_->Equals(destination_);
}
// We clear both operands to indicate move that's been eliminated.
std::ostream& operator<<(std::ostream& os, const PrintableMoveOperands& mo);
-#define INSTRUCTION_SUBKIND_OPERAND_CLASS(SubKind, kOperandKind) \
- class SubKind##Operand FINAL : public InstructionOperand { \
- public: \
- explicit SubKind##Operand(int index) \
- : InstructionOperand(kOperandKind, index) {} \
- \
- static SubKind##Operand* New(int index, Zone* zone) { \
- return InstructionOperand::New(zone, SubKind##Operand(index)); \
- } \
- \
- static SubKind##Operand* cast(InstructionOperand* op) { \
- DCHECK(op->kind() == kOperandKind); \
- return reinterpret_cast<SubKind##Operand*>(op); \
- } \
- \
- static const SubKind##Operand* cast(const InstructionOperand* op) { \
- DCHECK(op->kind() == kOperandKind); \
- return reinterpret_cast<const SubKind##Operand*>(op); \
- } \
- \
- static SubKind##Operand cast(const InstructionOperand& op) { \
- DCHECK(op.kind() == kOperandKind); \
- return *static_cast<const SubKind##Operand*>(&op); \
- } \
- };
-INSTRUCTION_OPERAND_LIST(INSTRUCTION_SUBKIND_OPERAND_CLASS)
-#undef INSTRUCTION_SUBKIND_OPERAND_CLASS
-
-
class ParallelMove FINAL : public ZoneObject {
public:
explicit ParallelMove(Zone* zone) : move_operands_(4, zone) {}
FrameStateDescriptor* GetFrameStateDescriptor(StateId deoptimization_id);
int GetFrameStateDescriptorCount();
- RpoNumber InputRpo(Instruction* instr, size_t index) {
- InstructionOperand* operand = instr->InputAt(index);
- Constant constant = operand->IsImmediate() ? GetImmediate(operand->index())
- : GetConstant(operand->index());
- return constant.ToRpoNumber();
- }
+ RpoNumber InputRpo(Instruction* instr, size_t index);
private:
friend std::ostream& operator<<(std::ostream& os,
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
- FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
+ FrameOffset offset = linkage()->GetFrameOffset(
+ AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
- FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
+ FrameOffset offset = linkage()->GetFrameOffset(
+ AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
loads.push_back(move);
// Replace source with copy for later use.
auto dest = move->destination();
- move->set_destination(
- InstructionOperand::New(code_zone(), dest->kind(), dest->index()));
+ move->set_destination(InstructionOperand::New(code_zone(), *dest));
continue;
}
if ((found->destination()->IsStackSlot() ||
move->destination()->IsDoubleStackSlot())) {
// Found a better source for this load. Smash it in place to affect other
// loads that have already been split.
- InstructionOperand::Kind found_kind = found->destination()->kind();
- int found_index = found->destination()->index();
auto next_dest =
- InstructionOperand::New(code_zone(), found_kind, found_index);
+ InstructionOperand::New(code_zone(), *found->destination());
auto dest = move->destination();
- found->destination()->ConvertTo(dest->kind(), dest->index());
+ InstructionOperand::ReplaceWith(found->destination(), dest);
move->set_destination(next_dest);
}
// move from load destination.
DCHECK(!op->IsDoubleRegister());
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
- FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), 0);
+ FrameOffset offset = linkage()->GetFrameOffset(
+ AllocatedOperand::cast(op)->index(), frame(), 0);
return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
}
};
constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
if (op->IsConstant()) {
constraint->type_ = kConstant;
- constraint->value_ = ConstantOperand::cast(op)->index();
+ constraint->value_ = ConstantOperand::cast(op)->virtual_register();
constraint->virtual_register_ = constraint->value_;
} else if (op->IsImmediate()) {
constraint->type_ = kImmediate;
switch (constraint->type_) {
case kConstant:
CHECK(op->IsConstant());
- CHECK_EQ(op->index(), constraint->value_);
+ CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
+ constraint->value_);
return;
case kImmediate:
CHECK(op->IsImmediate());
- CHECK_EQ(op->index(), constraint->value_);
+ CHECK_EQ(ImmediateOperand::cast(op)->index(), constraint->value_);
return;
case kRegister:
CHECK(op->IsRegister());
return;
case kFixedRegister:
CHECK(op->IsRegister());
- CHECK_EQ(op->index(), constraint->value_);
+ CHECK_EQ(RegisterOperand::cast(op)->index(), constraint->value_);
return;
case kDoubleRegister:
CHECK(op->IsDoubleRegister());
return;
case kFixedDoubleRegister:
CHECK(op->IsDoubleRegister());
- CHECK_EQ(op->index(), constraint->value_);
+ CHECK_EQ(DoubleRegisterOperand::cast(op)->index(), constraint->value_);
return;
case kFixedSlot:
CHECK(op->IsStackSlot());
- CHECK_EQ(op->index(), constraint->value_);
+ CHECK_EQ(StackSlotOperand::cast(op)->index(), constraint->value_);
return;
case kSlot:
CHECK(op->IsStackSlot());
void DropRegisters(const RegisterConfiguration* config) {
for (int i = 0; i < config->num_general_registers(); ++i) {
- InstructionOperand op(InstructionOperand::REGISTER, i);
+ RegisterOperand op(i);
Drop(&op);
}
for (int i = 0; i < config->num_double_registers(); ++i) {
- InstructionOperand op(InstructionOperand::DOUBLE_REGISTER, i);
+ DoubleRegisterOperand op(i);
Drop(&op);
}
}
void LiveRange::CommitSpillsAtDefinition(InstructionSequence* sequence,
InstructionOperand* op,
bool might_be_duplicated) {
+ DCHECK_IMPLIES(op->IsConstant(), spills_at_definition_ == nullptr);
DCHECK(!IsChild());
auto zone = sequence->zone();
for (auto to_spill = spills_at_definition_; to_spill != nullptr;
void LiveRange::SetSpillOperand(InstructionOperand* operand) {
DCHECK(HasNoSpillType());
- DCHECK(!operand->IsUnallocated());
+ DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
spill_type_ = SpillType::kSpillOperand;
spill_operand_ = operand;
}
}
-void LiveRange::CommitSpillOperand(InstructionOperand* operand) {
+void LiveRange::CommitSpillOperand(AllocatedOperand* operand) {
DCHECK(HasSpillRange());
- DCHECK(!operand->IsUnallocated());
DCHECK(!IsChild());
spill_type_ = SpillType::kSpillOperand;
spill_operand_ = operand;
DCHECK(!IsSpilled());
switch (Kind()) {
case GENERAL_REGISTERS:
- return cache->RegisterOperand(assigned_register());
+ return cache->GetRegisterOperand(assigned_register());
case DOUBLE_REGISTERS:
- return cache->DoubleRegisterOperand(assigned_register());
+ return cache->GetDoubleRegisterOperand(assigned_register());
default:
UNREACHABLE();
}
switch (pos->type()) {
case UsePositionType::kRequiresSlot:
if (spill_op != nullptr) {
- pos->operand()->ConvertTo(spill_op->kind(), spill_op->index());
+ InstructionOperand::ReplaceWith(pos->operand(), spill_op);
}
break;
case UsePositionType::kRequiresRegister:
DCHECK(op->IsRegister() || op->IsDoubleRegister());
// Fall through.
case UsePositionType::kAny:
- pos->operand()->ConvertTo(op->kind(), op->index());
+ InstructionOperand::ReplaceWith(pos->operand(), op);
break;
}
}
InstructionOperandCache::InstructionOperandCache() {
for (size_t i = 0; i < arraysize(general_register_operands_); ++i) {
- general_register_operands_[i] =
- i::compiler::RegisterOperand(static_cast<int>(i));
+ general_register_operands_[i] = RegisterOperand(static_cast<int>(i));
}
for (size_t i = 0; i < arraysize(double_register_operands_); ++i) {
- double_register_operands_[i] =
- i::compiler::DoubleRegisterOperand(static_cast<int>(i));
+ double_register_operands_[i] = DoubleRegisterOperand(static_cast<int>(i));
}
}
UnallocatedOperand* operand, int pos, bool is_tagged) {
TRACE("Allocating fixed reg for op %d\n", operand->virtual_register());
DCHECK(operand->HasFixedPolicy());
+ InstructionOperand allocated;
if (operand->HasFixedSlotPolicy()) {
- operand->ConvertTo(InstructionOperand::STACK_SLOT,
- operand->fixed_slot_index());
+ allocated = AllocatedOperand(InstructionOperand::STACK_SLOT,
+ operand->fixed_slot_index());
} else if (operand->HasFixedRegisterPolicy()) {
- int reg_index = operand->fixed_register_index();
- operand->ConvertTo(InstructionOperand::REGISTER, reg_index);
+ allocated = AllocatedOperand(InstructionOperand::REGISTER,
+ operand->fixed_register_index());
} else if (operand->HasFixedDoubleRegisterPolicy()) {
- int reg_index = operand->fixed_register_index();
- operand->ConvertTo(InstructionOperand::DOUBLE_REGISTER, reg_index);
+ allocated = AllocatedOperand(InstructionOperand::DOUBLE_REGISTER,
+ operand->fixed_register_index());
} else {
UNREACHABLE();
}
+ InstructionOperand::ReplaceWith(operand, &allocated);
if (is_tagged) {
TRACE("Fixed reg is tagged at %d\n", pos);
auto instr = InstructionAt(pos);
if (operand->IsUnallocated()) {
return LiveRangeFor(UnallocatedOperand::cast(operand)->virtual_register());
} else if (operand->IsConstant()) {
- return LiveRangeFor(ConstantOperand::cast(operand)->index());
+ return LiveRangeFor(ConstantOperand::cast(operand)->virtual_register());
} else if (operand->IsRegister()) {
- return FixedLiveRangeFor(operand->index());
+ return FixedLiveRangeFor(RegisterOperand::cast(operand)->index());
} else if (operand->IsDoubleRegister()) {
- return FixedDoubleLiveRangeFor(operand->index());
+ return FixedDoubleLiveRangeFor(
+ DoubleRegisterOperand::cast(operand)->index());
} else {
return nullptr;
}
}
-void SpillRange::SetOperand(InstructionOperand* op) {
+void SpillRange::SetOperand(AllocatedOperand* op) {
for (auto range : live_ranges()) {
DCHECK(range->GetSpillRange() == this);
range->CommitSpillOperand(op);
auto op_kind = kind == DOUBLE_REGISTERS
? InstructionOperand::DOUBLE_STACK_SLOT
: InstructionOperand::STACK_SLOT;
- auto op = InstructionOperand::New(code_zone(), op_kind, index);
+ auto op = AllocatedOperand::New(code_zone(), op_kind, index);
range->SetOperand(op);
}
}
AllocateFixed(output, -1, false);
// This value is produced on the stack, we never need to spill it.
if (output->IsStackSlot()) {
- DCHECK(output->index() < frame_->GetSpillSlotCount());
- range->SetSpillOperand(output);
+ DCHECK(StackSlotOperand::cast(output)->index() <
+ frame_->GetSpillSlotCount());
+ range->SetSpillOperand(StackSlotOperand::cast(output));
range->SetSpillStartIndex(end);
assigned = true;
}
for (size_t i = 0; i < first->OutputCount(); i++) {
InstructionOperand* output = first->OutputAt(i);
if (output->IsConstant()) {
- int output_vreg = output->index();
+ int output_vreg = ConstantOperand::cast(output)->virtual_register();
auto range = LiveRangeFor(output_vreg);
range->SetSpillStartIndex(instr_index + 1);
range->SetSpillOperand(output);
// This value is produced on the stack, we never need to spill it.
if (first_output->IsStackSlot()) {
- DCHECK(first_output->index() < frame_->GetSpillSlotCount());
- range->SetSpillOperand(first_output);
+ DCHECK(StackSlotOperand::cast(first_output)->index() <
+ frame_->GetSpillSlotCount());
+ range->SetSpillOperand(StackSlotOperand::cast(first_output));
range->SetSpillStartIndex(instr_index + 1);
assigned = true;
}
bool RegisterAllocator::IsOutputRegisterOf(Instruction* instr, int index) {
for (size_t i = 0; i < instr->OutputCount(); i++) {
auto output = instr->OutputAt(i);
- if (output->IsRegister() && output->index() == index) return true;
+ if (output->IsRegister() && RegisterOperand::cast(output)->index() == index)
+ return true;
}
return false;
}
int index) {
for (size_t i = 0; i < instr->OutputCount(); i++) {
auto output = instr->OutputAt(i);
- if (output->IsDoubleRegister() && output->index() == index) return true;
+ if (output->IsDoubleRegister() &&
+ DoubleRegisterOperand::cast(output)->index() == index)
+ return true;
}
return false;
}
int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
live->Remove(out_vreg);
} else if (output->IsConstant()) {
- int out_vreg = output->index();
+ int out_vreg = ConstantOperand::cast(output)->virtual_register();
live->Remove(out_vreg);
}
Define(curr_position, output, nullptr);
int to_vreg = UnallocatedOperand::cast(to)->virtual_register();
auto to_range = LiveRangeFor(to_vreg);
if (to_range->is_phi()) {
- DCHECK(!FLAG_turbo_delay_ssa_decon);
if (to_range->is_non_loop_phi()) {
hint = to_range->current_hint_operand();
}
DCHECK(res.second);
USE(res);
auto& output = phi->output();
- if (!FLAG_turbo_delay_ssa_decon) {
- for (size_t i = 0; i < phi->operands().size(); ++i) {
- InstructionBlock* cur_block =
- code()->InstructionBlockAt(block->predecessors()[i]);
- AddGapMove(cur_block->last_instruction_index(), Instruction::END,
- &phi->inputs()[i], &output);
- DCHECK(!InstructionAt(cur_block->last_instruction_index())
- ->HasPointerMap());
- }
+ for (size_t i = 0; i < phi->operands().size(); ++i) {
+ InstructionBlock* cur_block =
+ code()->InstructionBlockAt(block->predecessors()[i]);
+ AddGapMove(cur_block->last_instruction_index(), Instruction::END,
+ &phi->inputs()[i], &output);
+ DCHECK(
+ !InstructionAt(cur_block->last_instruction_index())->HasPointerMap());
}
auto live_range = LiveRangeFor(phi_vreg);
int gap_index = block->first_instruction_index();
LiveRangeFinder finder(*this);
for (auto block : code()->instruction_blocks()) {
if (CanEagerlyResolveControlFlow(block)) continue;
- if (FLAG_turbo_delay_ssa_decon) {
- // resolve phis
- for (auto phi : block->phis()) {
- auto* block_bound =
- finder.ArrayFor(phi->virtual_register())->FindSucc(block);
- auto phi_output =
- block_bound->range_->GetAssignedOperand(operand_cache());
- phi->output().ConvertTo(phi_output->kind(), phi_output->index());
- size_t pred_index = 0;
- for (auto pred : block->predecessors()) {
- const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
- auto* pred_bound = finder.ArrayFor(phi->operands()[pred_index])
- ->FindPred(pred_block);
- auto pred_op =
- pred_bound->range_->GetAssignedOperand(operand_cache());
- phi->inputs()[pred_index] = *pred_op;
- ResolveControlFlow(block, phi_output, pred_block, pred_op);
- pred_index++;
- }
- }
- }
auto live = live_in_sets_[block->rpo_number().ToInt()];
BitVector::Iterator iterator(live);
while (!iterator.Done()) {
// block.
int phi_vreg = phi->virtual_register();
live->Remove(phi_vreg);
- if (!FLAG_turbo_delay_ssa_decon) {
- InstructionOperand* hint = nullptr;
- InstructionOperand* phi_operand = nullptr;
- auto instr = GetLastInstruction(
- code()->InstructionBlockAt(block->predecessors()[0]));
- auto move = instr->GetParallelMove(Instruction::END);
- for (int j = 0; j < move->move_operands()->length(); ++j) {
- auto to = move->move_operands()->at(j).destination();
- if (to->IsUnallocated() &&
- UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) {
- hint = move->move_operands()->at(j).source();
- phi_operand = to;
- break;
- }
+ InstructionOperand* hint = nullptr;
+ InstructionOperand* phi_operand = nullptr;
+ auto instr = GetLastInstruction(
+ code()->InstructionBlockAt(block->predecessors()[0]));
+ auto move = instr->GetParallelMove(Instruction::END);
+ for (int j = 0; j < move->move_operands()->length(); ++j) {
+ auto to = move->move_operands()->at(j).destination();
+ if (to->IsUnallocated() &&
+ UnallocatedOperand::cast(to)->virtual_register() == phi_vreg) {
+ hint = move->move_operands()->at(j).source();
+ phi_operand = to;
+ break;
}
- DCHECK(hint != nullptr);
- auto block_start = LifetimePosition::GapFromInstructionIndex(
- block->first_instruction_index());
- Define(block_start, phi_operand, hint);
}
+ DCHECK(hint != nullptr);
+ auto block_start = LifetimePosition::GapFromInstructionIndex(
+ block->first_instruction_index());
+ Define(block_start, phi_operand, hint);
}
// Now live is live_in for this block except not including values live
auto hint = current->FirstHint();
if (hint != nullptr && (hint->IsRegister() || hint->IsDoubleRegister())) {
- int register_index = hint->index();
+ int register_index = AllocatedOperand::cast(hint)->index();
TRACE("Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
RegisterName(register_index), free_until_pos[register_index].Value(),
current->id(), current->End().Value());
public:
InstructionOperandCache();
- InstructionOperand* RegisterOperand(int index) {
+ RegisterOperand* GetRegisterOperand(int index) {
DCHECK(index >= 0 &&
index < static_cast<int>(arraysize(general_register_operands_)));
- return &general_register_operands_[index];
+ return RegisterOperand::cast(&general_register_operands_[index]);
}
- InstructionOperand* DoubleRegisterOperand(int index) {
+ DoubleRegisterOperand* GetDoubleRegisterOperand(int index) {
DCHECK(index >= 0 &&
index < static_cast<int>(arraysize(double_register_operands_)));
- return &double_register_operands_[index];
+ return DoubleRegisterOperand::cast(&double_register_operands_[index]);
}
private:
InstructionOperand* operand);
void SetSpillOperand(InstructionOperand* operand);
void SetSpillRange(SpillRange* spill_range);
- void CommitSpillOperand(InstructionOperand* operand);
+ void CommitSpillOperand(AllocatedOperand* operand);
void CommitSpillsAtDefinition(InstructionSequence* sequence,
InstructionOperand* operand,
bool might_be_duplicated);
RegisterKind Kind() const { return live_ranges_[0]->Kind(); }
bool IsEmpty() const { return live_ranges_.empty(); }
bool TryMerge(SpillRange* other);
- void SetOperand(InstructionOperand* op);
+ void SetOperand(AllocatedOperand* op);
private:
LifetimePosition End() const { return end_position_; }
Operand ToOperand(InstructionOperand* op, int extra = 0) {
DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
// The linkage computes where all spill slots are located.
- FrameOffset offset = linkage()->GetFrameOffset(op->index(), frame(), extra);
+ FrameOffset offset = linkage()->GetFrameOffset(
+ AllocatedOperand::cast(op)->index(), frame(), extra);
return Operand(offset.from_stack_pointer() ? rsp : rbp, offset.offset());
}
DEFINE_BOOL(trace_turbo_inlining, false, "trace TurboFan inlining")
DEFINE_BOOL(loop_assignment_analysis, true, "perform loop assignment analysis")
DEFINE_BOOL(turbo_profiling, false, "enable profiling in TurboFan")
-// TODO(dcarney): this is just for experimentation, remove when default.
-DEFINE_BOOL(turbo_delay_ssa_decon, false,
- "delay ssa deconstruction in TurboFan register allocator")
DEFINE_BOOL(turbo_verify_allocation, DEBUG_BOOL,
"verify register allocation in TurboFan")
DEFINE_BOOL(turbo_move_optimization, true, "optimize gap moves in TurboFan")
}
static Key KeyFor(const InstructionOperand* op) {
- return Key(op->kind(), op->index());
+ int v = op->IsConstant() ? ConstantOperand::cast(op)->virtual_register()
+ : AllocatedOperand::cast(op)->index();
+ return Key(op->kind(), v);
}
static Value ValueFor(const InstructionOperand* op) {
- return Value(op->kind(), op->index());
+ int v = op->IsConstant() ? ConstantOperand::cast(op)->virtual_register()
+ : AllocatedOperand::cast(op)->index();
+ return Value(op->kind(), v);
+ }
+
+ static InstructionOperand FromKey(Key key) {
+ if (key.first == InstructionOperand::CONSTANT) {
+ return ConstantOperand(key.second);
+ }
+ return AllocatedOperand(key.first, key.second);
}
friend std::ostream& operator<<(std::ostream& os,
for (OperandMap::const_iterator it = is.values_.begin();
it != is.values_.end(); ++it) {
if (it != is.values_.begin()) os << " ";
- InstructionOperand source(it->first.first, it->first.second);
- InstructionOperand destination(it->second.first, it->second.second);
+ InstructionOperand source = FromKey(it->first);
+ InstructionOperand destination = FromKey(it->second);
MoveOperands mo(&source, &destination);
PrintableMoveOperands pmo = {RegisterConfiguration::ArchDefault(), &mo};
os << pmo;
ParallelMove* parallel_move = new (main_zone()) ParallelMove(main_zone());
std::set<InstructionOperand*, InstructionOperandComparator> seen;
for (int i = 0; i < size; ++i) {
- MoveOperands mo(CreateRandomOperand(), CreateRandomOperand());
+ MoveOperands mo(CreateRandomOperand(true), CreateRandomOperand(false));
if (!mo.IsRedundant() && seen.find(mo.destination()) == seen.end()) {
parallel_move->AddMove(mo.source(), mo.destination(), main_zone());
seen.insert(mo.destination());
struct InstructionOperandComparator {
bool operator()(const InstructionOperand* x,
const InstructionOperand* y) const {
- return (x->kind() < y->kind()) ||
- (x->kind() == y->kind() && x->index() < y->index());
+ return *x < *y;
}
};
- InstructionOperand* CreateRandomOperand() {
+ InstructionOperand* CreateRandomOperand(bool is_source) {
int index = rng_->NextInt(6);
- switch (rng_->NextInt(5)) {
+ // destination can't be Constant.
+ switch (rng_->NextInt(is_source ? 5 : 4)) {
case 0:
- return ConstantOperand::New(index, main_zone());
+ return StackSlotOperand::New(main_zone(), index);
case 1:
- return StackSlotOperand::New(index, main_zone());
+ return DoubleStackSlotOperand::New(main_zone(), index);
case 2:
- return DoubleStackSlotOperand::New(index, main_zone());
+ return RegisterOperand::New(main_zone(), index);
case 3:
- return RegisterOperand::New(index, main_zone());
+ return DoubleRegisterOperand::New(main_zone(), index);
case 4:
- return DoubleRegisterOperand::New(index, main_zone());
+ return ConstantOperand::New(main_zone(), index);
}
UNREACHABLE();
return NULL;
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
- AddGapMove(index, RegisterOperand::New(13, main_zone()),
- RegisterOperand::New(13, main_zone()));
+ AddGapMove(index, RegisterOperand::New(main_zone(), 13),
+ RegisterOperand::New(main_zone(), 13));
}
void NonRedundantMoves() {
Start();
sequence_.AddInstruction(Instruction::New(main_zone(), kArchNop));
int index = static_cast<int>(sequence_.instructions().size()) - 1;
- AddGapMove(index, ImmediateOperand::New(11, main_zone()),
- RegisterOperand::New(11, main_zone()));
+ AddGapMove(index, ImmediateOperand::New(main_zone(), 11),
+ RegisterOperand::New(main_zone(), 11));
}
void Other() {
Start();
InstructionOperand* output = instr->OutputAt(i);
EXPECT_NE(InstructionOperand::IMMEDIATE, output->kind());
if (output->IsConstant()) {
- s.constants_.insert(std::make_pair(
- output->index(), sequence.GetConstant(output->index())));
+ int vreg = ConstantOperand::cast(output)->virtual_register();
+ s.constants_.insert(std::make_pair(vreg, sequence.GetConstant(vreg)));
}
}
for (size_t i = 0; i < instr->InputCount(); ++i) {
InstructionOperand* input = instr->InputAt(i);
EXPECT_NE(InstructionOperand::CONSTANT, input->kind());
if (input->IsImmediate()) {
- s.immediates_.insert(std::make_pair(
- input->index(), sequence.GetImmediate(input->index())));
+ int index = ImmediateOperand::cast(input)->index();
+ s.immediates_.insert(
+ std::make_pair(index, sequence.GetImmediate(index)));
}
}
s.instructions_.push_back(instr);
}
int ToVreg(const InstructionOperand* operand) const {
- if (operand->IsConstant()) return operand->index();
+ if (operand->IsConstant()) {
+ return ConstantOperand::cast(operand)->virtual_register();
+ }
EXPECT_EQ(InstructionOperand::UNALLOCATED, operand->kind());
return UnallocatedOperand::cast(operand)->virtual_register();
}
Constant ToConstant(const InstructionOperand* operand) const {
ConstantMap::const_iterator i;
if (operand->IsConstant()) {
- i = constants_.find(operand->index());
+ i = constants_.find(ConstantOperand::cast(operand)->virtual_register());
+ EXPECT_EQ(ConstantOperand::cast(operand)->virtual_register(), i->first);
EXPECT_FALSE(constants_.end() == i);
} else {
EXPECT_EQ(InstructionOperand::IMMEDIATE, operand->kind());
- i = immediates_.find(operand->index());
+ i = immediates_.find(ImmediateOperand::cast(operand)->index());
+ EXPECT_EQ(ImmediateOperand::cast(operand)->index(), i->first);
EXPECT_FALSE(immediates_.end() == i);
}
- EXPECT_EQ(operand->index(), i->first);
return i->second;
}
CHECK_NE(kNoValue, op.value_);
switch (op.type_) {
case kConstant:
- return ConstantOperand::New(op.value_, zone());
+ return ConstantOperand::New(zone(), op.value_);
case kFixedSlot:
- return StackSlotOperand::New(op.value_, zone());
+ return StackSlotOperand::New(zone(), op.value_);
case kFixedRegister:
CHECK(0 <= op.value_ && op.value_ < num_general_registers());
- return RegisterOperand::New(op.value_, zone());
+ return RegisterOperand::New(zone(), op.value_);
default:
break;
}