// is one VP intrinsic that maps directly to one SDNode that goes by the
// same name. Since the operands are also the same, we open the property
// scopes for both the VPIntrinsic and the SDNode at once.
-// \p SDOPC The SelectionDAG Node id (eg VP_ADD).
+// \p VPSD The SelectionDAG Node id (eg VP_ADD).
// \p LEGALPOS The operand position of the SDNode that is used for legalizing
// this SDNode. This can be `-1`, in which case the return type of
// the SDNode is used.
// \p MASKPOS The mask operand position.
// \p EVLPOS The explicit vector length operand position.
#ifndef BEGIN_REGISTER_VP_SDNODE
-#define BEGIN_REGISTER_VP_SDNODE(SDOPC, LEGALPOS, TDNAME, MASKPOS, EVLPOS)
+#define BEGIN_REGISTER_VP_SDNODE(VPSD, LEGALPOS, TDNAME, MASKPOS, EVLPOS)
#endif
// End the property scope of a new VP SDNode.
#ifndef END_REGISTER_VP_SDNODE
-#define END_REGISTER_VP_SDNODE(SDOPC)
+#define END_REGISTER_VP_SDNODE(VPSD)
#endif
// Helper macros for the common "1:1 - Intrinsic : SDNode" case.
// same name. Since the operands are also the same, we open the property
// scopes for both the VPIntrinsic and the SDNode at once.
//
-// \p INTRIN The canonical name (eg `vp_add`, which at the same time is the
+// \p VPID The canonical name (eg `vp_add`, which at the same time is the
// name of the intrinsic and the TableGen def of the SDNode).
// \p MASKPOS The mask operand position.
// \p EVLPOS The explicit vector length operand position.
-// \p SDOPC The SelectionDAG Node id (eg VP_ADD).
+// \p VPSD The SelectionDAG Node id (eg VP_ADD).
// \p LEGALPOS The operand position of the SDNode that is used for legalizing
// this SDNode. This can be `-1`, in which case the return type of
// the SDNode is used.
-#define BEGIN_REGISTER_VP(INTRIN, MASKPOS, EVLPOS, SDOPC, LEGALPOS) \
-BEGIN_REGISTER_VP_INTRINSIC(INTRIN, MASKPOS, EVLPOS) \
-BEGIN_REGISTER_VP_SDNODE(SDOPC, LEGALPOS, INTRIN, MASKPOS, EVLPOS)
-
-#define END_REGISTER_VP(INTRIN, SDOPC) \
-END_REGISTER_VP_INTRINSIC(INTRIN) \
-END_REGISTER_VP_SDNODE(SDOPC)
+#define BEGIN_REGISTER_VP(VPID, MASKPOS, EVLPOS, VPSD, LEGALPOS) \
+ BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, EVLPOS) \
+ BEGIN_REGISTER_VP_SDNODE(VPSD, LEGALPOS, VPID, MASKPOS, EVLPOS)
+#define END_REGISTER_VP(VPID, VPSD) \
+ END_REGISTER_VP_INTRINSIC(VPID) \
+ END_REGISTER_VP_SDNODE(VPSD)
// The following macros attach properties to the scope they are placed in. This
// assigns the property to the VP Intrinsic and/or SDNode that belongs to the
// Property Macros {
// The intrinsic and/or SDNode has the same function as this LLVM IR Opcode.
-// \p OPC The standard IR opcode.
-#ifndef HANDLE_VP_TO_OPC
-#define HANDLE_VP_TO_OPC(OPC)
+// \p OPC The opcode of the instruction with the same function.
+#ifndef VP_PROPERTY_FUNCTIONAL_OPC
+#define VP_PROPERTY_FUNCTIONAL_OPC(OPC)
#endif
// Whether the intrinsic may have a rounding mode or exception behavior operand
// \p HASEXCEPT '1' if the intrinsic can have an exception behavior operand
// bundle, '0' otherwise.
// \p INTRINID The constrained fp intrinsic this VP intrinsic corresponds to.
-#ifndef HANDLE_VP_TO_CONSTRAINEDFP
-#define HANDLE_VP_TO_CONSTRAINEDFP(HASROUND, HASEXCEPT, INTRINID)
+#ifndef VP_PROPERTY_CONSTRAINEDFP
+#define VP_PROPERTY_CONSTRAINEDFP(HASROUND, HASEXCEPT, INTRINID)
#endif
// Map this VP intrinsic to its canonical functional intrinsic.
-#ifndef HANDLE_VP_TO_INTRIN
-#define HANDLE_VP_TO_INTRIN(ID)
+// \p INTRIN The non-VP intrinsics with the same function.
+#ifndef VP_PROPERTY_FUNCTIONAL_INTRINSIC
+#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN)
#endif
// This VP Intrinsic is a memory operation
// The pointer arg is at POINTERPOS and the data arg is at DATAPOS.
-#ifndef HANDLE_VP_IS_MEMOP
-#define HANDLE_VP_IS_MEMOP(VPID, POINTERPOS, DATAPOS)
+#ifndef VP_PROPERTY_MEMOP
+#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS)
#endif
// Map this VP reduction intrinsic to its reduction operand positions.
-#ifndef HANDLE_VP_REDUCTION
-#define HANDLE_VP_REDUCTION(ID, STARTPOS, VECTORPOS)
+#ifndef VP_PROPERTY_REDUCTION
+#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS)
#endif
// A property to infer VP binary-op SDNode opcodes automatically.
-#ifndef PROPERTY_VP_BINARYOP_SDNODE
-#define PROPERTY_VP_BINARYOP_SDNODE(ID)
-#endif
-
-// A property to infer VP reduction SDNode opcodes automatically.
-#ifndef PROPERTY_VP_REDUCTION_SDNODE
-#define PROPERTY_VP_REDUCTION_SDNODE(ID)
+#ifndef VP_PROPERTY_BINARYOP
+#define VP_PROPERTY_BINARYOP
#endif
/// } Property Macros
// Specialized helper macro for integer binary operators (%x, %y, %mask, %evl).
#ifdef HELPER_REGISTER_BINARY_INT_VP
-#error "The internal helper macro HELPER_REGISTER_BINARY_INT_VP is already defined!"
+#error \
+ "The internal helper macro HELPER_REGISTER_BINARY_INT_VP is already defined!"
#endif
-#define HELPER_REGISTER_BINARY_INT_VP(INTRIN, SDOPC, OPC) \
-BEGIN_REGISTER_VP(INTRIN, 2, 3, SDOPC, -1) \
-HANDLE_VP_TO_OPC(OPC) \
-PROPERTY_VP_BINARYOP_SDNODE(SDOPC) \
-END_REGISTER_VP(INTRIN, SDOPC)
-
-
+#define HELPER_REGISTER_BINARY_INT_VP(VPID, VPSD, IROPC) \
+ BEGIN_REGISTER_VP(VPID, 2, 3, VPSD, -1) \
+ VP_PROPERTY_FUNCTIONAL_OPC(IROPC) \
+ VP_PROPERTY_BINARYOP \
+ END_REGISTER_VP(VPID, VPSD)
// llvm.vp.add(x,y,mask,vlen)
HELPER_REGISTER_BINARY_INT_VP(vp_add, VP_ADD, Add)
#error \
"The internal helper macro HELPER_REGISTER_BINARY_FP_VP is already defined!"
#endif
-#define HELPER_REGISTER_BINARY_FP_VP(OPSUFFIX, SDOPC, OPC) \
- BEGIN_REGISTER_VP(vp_##OPSUFFIX, 2, 3, SDOPC, -1) \
- HANDLE_VP_TO_OPC(OPC) \
- HANDLE_VP_TO_CONSTRAINEDFP(1, 1, experimental_constrained_##OPSUFFIX) \
- PROPERTY_VP_BINARYOP_SDNODE(SDOPC) \
- END_REGISTER_VP(vp_##OPSUFFIX, SDOPC)
+#define HELPER_REGISTER_BINARY_FP_VP(OPSUFFIX, VPSD, IROPC) \
+ BEGIN_REGISTER_VP(vp_##OPSUFFIX, 2, 3, VPSD, -1) \
+ VP_PROPERTY_FUNCTIONAL_OPC(IROPC) \
+ VP_PROPERTY_CONSTRAINEDFP(1, 1, experimental_constrained_##OPSUFFIX) \
+ VP_PROPERTY_BINARYOP \
+ END_REGISTER_VP(vp_##OPSUFFIX, VPSD)
// llvm.vp.fadd(x,y,mask,vlen)
HELPER_REGISTER_BINARY_FP_VP(fadd, VP_FADD, FAdd)
BEGIN_REGISTER_VP_INTRINSIC(vp_store, 2, 3)
// chain = VP_STORE chain,val,base,offset,mask,evl
BEGIN_REGISTER_VP_SDNODE(VP_STORE, 0, vp_store, 4, 5)
-HANDLE_VP_TO_OPC(Store)
-HANDLE_VP_TO_INTRIN(masked_store)
-HANDLE_VP_IS_MEMOP(vp_store, 1, 0)
+VP_PROPERTY_FUNCTIONAL_OPC(Store)
+VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_store)
+VP_PROPERTY_MEMOP(1, 0)
END_REGISTER_VP(vp_store, VP_STORE)
// llvm.vp.scatter(ptr,val,mask,vlen)
BEGIN_REGISTER_VP_INTRINSIC(vp_scatter, 2, 3)
// chain = VP_SCATTER chain,val,base,indices,scale,mask,evl
BEGIN_REGISTER_VP_SDNODE(VP_SCATTER, -1, vp_scatter, 5, 6)
-HANDLE_VP_TO_INTRIN(masked_scatter)
-HANDLE_VP_IS_MEMOP(vp_scatter, 1, 0)
+VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_scatter)
+VP_PROPERTY_MEMOP(1, 0)
END_REGISTER_VP(vp_scatter, VP_SCATTER)
// llvm.vp.load(ptr,mask,vlen)
BEGIN_REGISTER_VP_INTRINSIC(vp_load, 1, 2)
// val,chain = VP_LOAD chain,base,offset,mask,evl
BEGIN_REGISTER_VP_SDNODE(VP_LOAD, -1, vp_load, 3, 4)
-HANDLE_VP_TO_OPC(Load)
-HANDLE_VP_TO_INTRIN(masked_load)
-HANDLE_VP_IS_MEMOP(vp_load, 0, None)
+VP_PROPERTY_FUNCTIONAL_OPC(Load)
+VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_load)
+VP_PROPERTY_MEMOP(0, None)
END_REGISTER_VP(vp_load, VP_LOAD)
// llvm.vp.gather(ptr,mask,vlen)
BEGIN_REGISTER_VP_INTRINSIC(vp_gather, 1, 2)
// val,chain = VP_GATHER chain,base,indices,scale,mask,evl
BEGIN_REGISTER_VP_SDNODE(VP_GATHER, -1, vp_gather, 4, 5)
-HANDLE_VP_TO_INTRIN(masked_gather)
-HANDLE_VP_IS_MEMOP(vp_gather, 0, None)
+VP_PROPERTY_FUNCTIONAL_INTRINSIC(masked_gather)
+VP_PROPERTY_MEMOP(0, None)
END_REGISTER_VP(vp_gather, VP_GATHER)
///// } Memory Operations
// Specialized helper macro for VP reductions (%start, %x, %mask, %evl).
#ifdef HELPER_REGISTER_REDUCTION_VP
-#error "The internal helper macro HELPER_REGISTER_REDUCTION_VP is already defined!"
+#error \
+ "The internal helper macro HELPER_REGISTER_REDUCTION_VP is already defined!"
#endif
-#define HELPER_REGISTER_REDUCTION_VP(VPINTRIN, SDOPC, INTRIN) \
-BEGIN_REGISTER_VP(VPINTRIN, 2, 3, SDOPC, -1) \
-HANDLE_VP_TO_INTRIN(INTRIN) \
-HANDLE_VP_REDUCTION(VPINTRIN, 0, 1) \
-PROPERTY_VP_REDUCTION_SDNODE(SDOPC) \
-END_REGISTER_VP(VPINTRIN, SDOPC)
+#define HELPER_REGISTER_REDUCTION_VP(VPID, VPSD, INTRIN) \
+ BEGIN_REGISTER_VP(VPID, 2, 3, VPSD, -1) \
+ VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) \
+ VP_PROPERTY_REDUCTION(0, 1) \
+ END_REGISTER_VP(VPID, VPSD)
// llvm.vp.reduce.add(start,x,mask,vlen)
HELPER_REGISTER_REDUCTION_VP(vp_reduce_add, VP_REDUCE_ADD,
// fast-math flags in the IR and as two distinct ISD opcodes in the
// SelectionDAG.
#ifdef HELPER_REGISTER_REDUCTION_SEQ_VP
-#error "The internal helper macro HELPER_REGISTER_REDUCTION_SEQ_VP is already defined!"
+#error \
+ "The internal helper macro HELPER_REGISTER_REDUCTION_SEQ_VP is already defined!"
#endif
-#define HELPER_REGISTER_REDUCTION_SEQ_VP(VPINTRIN, SDOPC, SEQ_SDOPC, INTRIN) \
-BEGIN_REGISTER_VP_INTRINSIC(VPINTRIN, 2, 3) \
-BEGIN_REGISTER_VP_SDNODE(SDOPC, -1, VPINTRIN, 2, 3) \
-END_REGISTER_VP_SDNODE(SDOPC) \
-BEGIN_REGISTER_VP_SDNODE(SEQ_SDOPC, -1, VPINTRIN, 2, 3) \
-END_REGISTER_VP_SDNODE(SEQ_SDOPC) \
-HANDLE_VP_TO_INTRIN(INTRIN) \
-HANDLE_VP_REDUCTION(VPINTRIN, 0, 1) \
-PROPERTY_VP_REDUCTION_SDNODE(SDOPC) \
-PROPERTY_VP_REDUCTION_SDNODE(SEQ_SDOPC) \
-END_REGISTER_VP_INTRINSIC(VPINTRIN)
+#define HELPER_REGISTER_REDUCTION_SEQ_VP(VPID, VPSD, SEQ_VPSD, INTRIN) \
+ BEGIN_REGISTER_VP_INTRINSIC(VPID, 2, 3) \
+ BEGIN_REGISTER_VP_SDNODE(VPSD, -1, VPID, 2, 3) \
+ VP_PROPERTY_REDUCTION(0, 1) \
+ END_REGISTER_VP_SDNODE(VPSD) \
+ BEGIN_REGISTER_VP_SDNODE(SEQ_VPSD, -1, VPID, 2, 3) \
+ VP_PROPERTY_REDUCTION(0, 1) \
+ END_REGISTER_VP_SDNODE(SEQ_VPSD) \
+ VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) \
+ END_REGISTER_VP_INTRINSIC(VPID)
// llvm.vp.reduce.fadd(start,x,mask,vlen)
HELPER_REGISTER_REDUCTION_SEQ_VP(vp_reduce_fadd, VP_REDUCE_FADD,
// END_REGISTER_CASES(vp_select, VP_SELECT)
END_REGISTER_VP_INTRINSIC(vp_select)
-BEGIN_REGISTER_VP(experimental_vp_splice, 3, 5,
- EXPERIMENTAL_VP_SPLICE, -1)
+BEGIN_REGISTER_VP(experimental_vp_splice, 3, 5, EXPERIMENTAL_VP_SPLICE, -1)
END_REGISTER_VP(experimental_vp_splice, EXPERIMENTAL_VP_SPLICE)
///// } Shuffles
#undef END_REGISTER_VP
#undef END_REGISTER_VP_INTRINSIC
#undef END_REGISTER_VP_SDNODE
-#undef HANDLE_VP_TO_OPC
-#undef HANDLE_VP_TO_CONSTRAINEDFP
-#undef HANDLE_VP_TO_INTRIN
-#undef HANDLE_VP_IS_MEMOP
-#undef HANDLE_VP_REDUCTION
-#undef PROPERTY_VP_BINARYOP_SDNODE
-#undef PROPERTY_VP_REDUCTION_SDNODE
+#undef VP_PROPERTY_BINARYOP
+#undef VP_PROPERTY_CONSTRAINEDFP
+#undef VP_PROPERTY_FUNCTIONAL_INTRINSIC
+#undef VP_PROPERTY_FUNCTIONAL_OPC
+#undef VP_PROPERTY_MEMOP
+#undef VP_PROPERTY_REDUCTION
Optional<unsigned> VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
switch (VPID) {
default:
- return None;
-
-#define HANDLE_VP_IS_MEMOP(VPID, POINTERPOS, DATAPOS) \
- case Intrinsic::VPID: \
- return POINTERPOS;
+ break;
+#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
+#define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
+#define END_REGISTER_VP_INTRINSIC(VPID) break;
#include "llvm/IR/VPIntrinsics.def"
}
+ return None;
}
/// \return The data (payload) operand of this store or scatter.
Optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
switch (VPID) {
default:
- return None;
-
-#define HANDLE_VP_IS_MEMOP(VPID, POINTERPOS, DATAPOS) \
- case Intrinsic::VPID: \
- return DATAPOS;
+ break;
+#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
+#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
+#define END_REGISTER_VP_INTRINSIC(VPID) break;
#include "llvm/IR/VPIntrinsics.def"
}
+ return None;
}
bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
switch (ID) {
default:
- return false;
-
+ break;
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
case Intrinsic::VPID: \
- break;
+ return true;
#include "llvm/IR/VPIntrinsics.def"
}
- return true;
+ return false;
}
// Equivalent non-predicated opcode
Optional<unsigned> VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
- Optional<unsigned> FunctionalOC;
switch (ID) {
default:
break;
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
-#define HANDLE_VP_TO_OPC(OPC) FunctionalOC = Instruction::OPC;
-#define END_REGISTER_VP_INTRINSIC(...) break;
+#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
+#define END_REGISTER_VP_INTRINSIC(VPID) break;
#include "llvm/IR/VPIntrinsics.def"
}
-
- return FunctionalOC;
+ return None;
}
Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
switch (IROPC) {
default:
- return Intrinsic::not_intrinsic;
+ break;
-#define HANDLE_VP_TO_OPC(OPC) case Instruction::OPC:
+#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
+#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
#include "llvm/IR/VPIntrinsics.def"
}
+ return Intrinsic::not_intrinsic;
}
bool VPIntrinsic::canIgnoreVectorLengthParam() const {
bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
switch (ID) {
default:
- return false;
-#define HANDLE_VP_REDUCTION(VPID, STARTPOS, VECTORPOS) \
- case Intrinsic::VPID: \
break;
+#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
+#define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
+#define END_REGISTER_VP_INTRINSIC(VPID) break;
#include "llvm/IR/VPIntrinsics.def"
}
- return true;
+ return false;
}
unsigned VPReductionIntrinsic::getVectorParamPos() const {
Optional<unsigned> VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
switch (ID) {
-#define HANDLE_VP_REDUCTION(VPID, STARTPOS, VECTORPOS) \
- case Intrinsic::VPID: \
- return VECTORPOS;
+#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
+#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
+#define END_REGISTER_VP_INTRINSIC(VPID) break;
#include "llvm/IR/VPIntrinsics.def"
default:
- return None;
+ break;
}
+ return None;
}
Optional<unsigned> VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
switch (ID) {
-#define HANDLE_VP_REDUCTION(VPID, STARTPOS, VECTORPOS) \
- case Intrinsic::VPID: \
- return STARTPOS;
+#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
+#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
+#define END_REGISTER_VP_INTRINSIC(VPID) break;
#include "llvm/IR/VPIntrinsics.def"
default:
- return None;
+ break;
}
+ return None;
}
Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {