// Check that the TableGen'ed like file is in sync we our expectations.
// First, the Idx.
- assert(AArch64::PartialMappingIdx::GPR32 ==
- AArch64::PartialMappingIdx::FirstGPR &&
+ assert(AArch64::PartialMappingIdx::PMI_GPR32 ==
+ AArch64::PartialMappingIdx::PMI_FirstGPR &&
"GPR32 index not first in the GPR list");
- assert(AArch64::PartialMappingIdx::GPR64 ==
- AArch64::PartialMappingIdx::LastGPR &&
+ assert(AArch64::PartialMappingIdx::PMI_GPR64 ==
+ AArch64::PartialMappingIdx::PMI_LastGPR &&
"GPR64 index not last in the GPR list");
- assert(AArch64::PartialMappingIdx::FirstGPR <=
- AArch64::PartialMappingIdx::LastGPR &&
+ assert(AArch64::PartialMappingIdx::PMI_FirstGPR <=
+ AArch64::PartialMappingIdx::PMI_LastGPR &&
"GPR list is backward");
- assert(AArch64::PartialMappingIdx::FPR32 ==
- AArch64::PartialMappingIdx::FirstFPR &&
+ assert(AArch64::PartialMappingIdx::PMI_FPR32 ==
+ AArch64::PartialMappingIdx::PMI_FirstFPR &&
"FPR32 index not first in the FPR list");
- assert(AArch64::PartialMappingIdx::FPR512 ==
- AArch64::PartialMappingIdx::LastFPR &&
+ assert(AArch64::PartialMappingIdx::PMI_FPR512 ==
+ AArch64::PartialMappingIdx::PMI_LastFPR &&
"FPR512 index not last in the FPR list");
- assert(AArch64::PartialMappingIdx::FirstFPR <=
- AArch64::PartialMappingIdx::LastFPR &&
+ assert(AArch64::PartialMappingIdx::PMI_FirstFPR <=
+ AArch64::PartialMappingIdx::PMI_LastFPR &&
"FPR list is backward");
- assert(AArch64::PartialMappingIdx::FPR32 + 1 ==
- AArch64::PartialMappingIdx::FPR64 &&
- AArch64::PartialMappingIdx::FPR64 + 1 ==
- AArch64::PartialMappingIdx::FPR128 &&
- AArch64::PartialMappingIdx::FPR128 + 1 ==
- AArch64::PartialMappingIdx::FPR256 &&
- AArch64::PartialMappingIdx::FPR256 + 1 ==
- AArch64::PartialMappingIdx::FPR512 &&
+ assert(AArch64::PartialMappingIdx::PMI_FPR32 + 1 ==
+ AArch64::PartialMappingIdx::PMI_FPR64 &&
+ AArch64::PartialMappingIdx::PMI_FPR64 + 1 ==
+ AArch64::PartialMappingIdx::PMI_FPR128 &&
+ AArch64::PartialMappingIdx::PMI_FPR128 + 1 ==
+ AArch64::PartialMappingIdx::PMI_FPR256 &&
+ AArch64::PartialMappingIdx::PMI_FPR256 + 1 ==
+ AArch64::PartialMappingIdx::PMI_FPR512 &&
"FPR indices not properly ordered");
// Now, the content.
// Check partial mapping.
Map.RegBank == &RB && #Idx " is incorrectly initialized"); \
} while (0)
- CHECK_PARTIALMAP(GPR32, 0, 32, RBGPR);
- CHECK_PARTIALMAP(GPR64, 0, 64, RBGPR);
- CHECK_PARTIALMAP(FPR32, 0, 32, RBFPR);
- CHECK_PARTIALMAP(FPR64, 0, 64, RBFPR);
- CHECK_PARTIALMAP(FPR128, 0, 128, RBFPR);
- CHECK_PARTIALMAP(FPR256, 0, 256, RBFPR);
- CHECK_PARTIALMAP(FPR512, 0, 512, RBFPR);
+ CHECK_PARTIALMAP(PMI_GPR32, 0, 32, RBGPR);
+ CHECK_PARTIALMAP(PMI_GPR64, 0, 64, RBGPR);
+ CHECK_PARTIALMAP(PMI_FPR32, 0, 32, RBFPR);
+ CHECK_PARTIALMAP(PMI_FPR64, 0, 64, RBFPR);
+ CHECK_PARTIALMAP(PMI_FPR128, 0, 128, RBFPR);
+ CHECK_PARTIALMAP(PMI_FPR256, 0, 256, RBFPR);
+ CHECK_PARTIALMAP(PMI_FPR512, 0, 512, RBFPR);
// Check value mapping.
#define CHECK_VALUEMAP_IMPL(RBName, Size, Offset) \
do { \
AArch64::PartialMappingIdx PartialMapBaseIdx = \
- AArch64::PartialMappingIdx::RBName##Size; \
+ AArch64::PartialMappingIdx::PMI_##RBName##Size; \
(void) PartialMapBaseIdx; \
const ValueMapping &Map = \
- AArch64::getValueMapping(AArch64::First##RBName, Size)[Offset]; \
+ AArch64::getValueMapping(AArch64::PMI_First##RBName, Size)[Offset]; \
(void) Map; \
assert(Map.BreakDown == &AArch64::PartMappings[PartialMapBaseIdx] && \
Map.NumBreakDowns == 1 && #RBName #Size \
#define CHECK_VALUEMAP_CROSSREGCPY(RBNameDst, RBNameSrc, Size) \
do { \
AArch64::PartialMappingIdx PartialMapDstIdx = \
- AArch64::PartialMappingIdx::RBNameDst##Size; \
+ AArch64::PartialMappingIdx::PMI_##RBNameDst##Size; \
AArch64::PartialMappingIdx PartialMapSrcIdx = \
- AArch64::PartialMappingIdx::RBNameSrc##Size; \
+ AArch64::PartialMappingIdx::PMI_##RBNameSrc##Size; \
(void) PartialMapDstIdx; \
(void) PartialMapSrcIdx; \
const ValueMapping *Map = AArch64::getCopyMapping( \
- AArch64::First##RBNameDst == AArch64::FirstGPR, \
- AArch64::First##RBNameSrc == AArch64::FirstGPR, Size); \
+ AArch64::PMI_First##RBNameDst == AArch64::PMI_FirstGPR, \
+ AArch64::PMI_First##RBNameSrc == AArch64::PMI_FirstGPR, Size); \
(void) Map; \
assert(Map[0].BreakDown == &AArch64::PartMappings[PartialMapDstIdx] && \
Map[0].NumBreakDowns == 1 && #RBNameDst #Size \
break;
InstructionMappings AltMappings;
InstructionMapping GPRMapping(
- /*ID*/ 1, /*Cost*/ 1, AArch64::getValueMapping(AArch64::FirstGPR, Size),
+ /*ID*/ 1, /*Cost*/ 1,
+ AArch64::getValueMapping(AArch64::PMI_FirstGPR, Size),
/*NumOperands*/ 3);
InstructionMapping FPRMapping(
- /*ID*/ 2, /*Cost*/ 1, AArch64::getValueMapping(AArch64::FirstFPR, Size),
+ /*ID*/ 2, /*Cost*/ 1,
+ AArch64::getValueMapping(AArch64::PMI_FirstFPR, Size),
/*NumOperands*/ 3);
AltMappings.emplace_back(std::move(GPRMapping));
InstructionMappings AltMappings;
InstructionMapping GPRMapping(
/*ID*/ 1, /*Cost*/ 1,
- getOperandsMapping({AArch64::getValueMapping(AArch64::FirstGPR, Size),
- // Addresses are GPR 64-bit.
- AArch64::getValueMapping(AArch64::FirstGPR, 64)}),
+ getOperandsMapping(
+ {AArch64::getValueMapping(AArch64::PMI_FirstGPR, Size),
+ // Addresses are GPR 64-bit.
+ AArch64::getValueMapping(AArch64::PMI_FirstGPR, 64)}),
/*NumOperands*/ 2);
InstructionMapping FPRMapping(
/*ID*/ 2, /*Cost*/ 1,
- getOperandsMapping({AArch64::getValueMapping(AArch64::FirstFPR, Size),
- // Addresses are GPR 64-bit.
- AArch64::getValueMapping(AArch64::FirstGPR, 64)}),
+ getOperandsMapping(
+ {AArch64::getValueMapping(AArch64::PMI_FirstFPR, Size),
+ // Addresses are GPR 64-bit.
+ AArch64::getValueMapping(AArch64::PMI_FirstGPR, 64)}),
/*NumOperands*/ 2);
AltMappings.emplace_back(std::move(GPRMapping));
#endif // End NDEBUG.
AArch64::PartialMappingIdx RBIdx =
- IsFPR ? AArch64::FirstFPR : AArch64::FirstGPR;
+ IsFPR ? AArch64::PMI_FirstFPR : AArch64::PMI_FirstGPR;
return InstructionMapping{DefaultMappingID, 1,
AArch64::getValueMapping(RBIdx, Size), NumOperands};
// As a top-level guess, vectors go in FPRs, scalars and pointers in GPRs.
// For floating-point instructions, scalars go in FPRs.
if (Ty.isVector() || isPreISelGenericFloatingPointOpcode(Opc))
- OpRegBankIdx[Idx] = AArch64::FirstFPR;
+ OpRegBankIdx[Idx] = AArch64::PMI_FirstFPR;
else
- OpRegBankIdx[Idx] = AArch64::FirstGPR;
+ OpRegBankIdx[Idx] = AArch64::PMI_FirstGPR;
}
unsigned Cost = 1;
switch (Opc) {
case TargetOpcode::G_SITOFP:
case TargetOpcode::G_UITOFP: {
- OpRegBankIdx = {AArch64::FirstFPR, AArch64::FirstGPR};
+ OpRegBankIdx = {AArch64::PMI_FirstFPR, AArch64::PMI_FirstGPR};
break;
}
case TargetOpcode::G_FPTOSI:
case TargetOpcode::G_FPTOUI: {
- OpRegBankIdx = {AArch64::FirstGPR, AArch64::FirstFPR};
+ OpRegBankIdx = {AArch64::PMI_FirstGPR, AArch64::PMI_FirstFPR};
break;
}
case TargetOpcode::G_FCMP: {
- OpRegBankIdx = {AArch64::FirstGPR,
- /* Predicate */ AArch64::PartialMappingIdx::None,
- AArch64::FirstFPR, AArch64::FirstFPR};
+ OpRegBankIdx = {AArch64::PMI_FirstGPR,
+ /* Predicate */ AArch64::PMI_None, AArch64::PMI_FirstFPR,
+ AArch64::PMI_FirstFPR};
break;
}
case TargetOpcode::G_BITCAST: {
// for the greedy mode the cost of the cross bank copy will
// offset this number.
// FIXME: Should be derived from the scheduling model.
- if (OpRegBankIdx[0] >= AArch64::FirstFPR)
+ if (OpRegBankIdx[0] >= AArch64::PMI_FirstFPR)
Cost = 2;
}
}