class RISCVUSMLoad
: DefaultAttrsIntrinsic<[llvm_anyvector_ty],
[llvm_ptr_ty, llvm_anyint_ty],
- [NoCapture<ArgIndex<0>>, IntrReadMem]>, RISCVVIntrinsic {
+ [NoCapture<ArgIndex<0>>, IntrReadMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = 1;
}
// For unit stride load
class RISCVUSLoad
: DefaultAttrsIntrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_ptr_ty, llvm_anyint_ty],
- [NoCapture<ArgIndex<1>>, IntrReadMem]>, RISCVVIntrinsic {
+ [NoCapture<ArgIndex<1>>, IntrReadMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = 2;
}
// For unit stride fault-only-first load
[LLVMMatchType<0>, llvm_ptr_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
llvm_anyint_ty, LLVMMatchType<1>],
- [NoCapture<ArgIndex<1>>, ImmArg<ArgIndex<4>>, IntrReadMem]>,
+ [NoCapture<ArgIndex<1>>, ImmArg<ArgIndex<4>>, IntrReadMem,
+ IntrArgMemOnly]>,
RISCVVIntrinsic {
let VLOperand = 3;
}
class RISCVUSStore
: DefaultAttrsIntrinsic<[],
[llvm_anyvector_ty, llvm_ptr_ty, llvm_anyint_ty],
- [NoCapture<ArgIndex<1>>, IntrWriteMem]>, RISCVVIntrinsic {
+ [NoCapture<ArgIndex<1>>, IntrWriteMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = 2;
}
// For unit stride store with mask
[llvm_anyvector_ty, llvm_ptr_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
llvm_anyint_ty],
- [NoCapture<ArgIndex<1>>, IntrWriteMem]>, RISCVVIntrinsic {
+ [NoCapture<ArgIndex<1>>, IntrWriteMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = 3;
}
// For strided store
!add(nf, -1))),
!listconcat(!listsplat(LLVMMatchType<0>, nf),
[llvm_ptr_ty, llvm_anyint_ty]),
- [NoCapture<ArgIndex<nf>>, IntrReadMem]>, RISCVVIntrinsic {
+ [NoCapture<ArgIndex<nf>>, IntrReadMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = !add(nf, 1);
}
// For unit stride segment load with mask
[llvm_ptr_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
llvm_anyint_ty, LLVMMatchType<1>]),
- [ImmArg<ArgIndex<!add(nf, 3)>>, NoCapture<ArgIndex<nf>>, IntrReadMem]>,
- RISCVVIntrinsic {
+ [ImmArg<ArgIndex<!add(nf, 3)>>, NoCapture<ArgIndex<nf>>,
+ IntrReadMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = !add(nf, 2);
}
!listconcat([llvm_anyvector_ty],
!listsplat(LLVMMatchType<0>, !add(nf, -1)),
[llvm_ptr_ty, llvm_anyint_ty]),
- [NoCapture<ArgIndex<nf>>, IntrWriteMem]>, RISCVVIntrinsic {
+ [NoCapture<ArgIndex<nf>>, IntrWriteMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = !add(nf, 1);
}
// For unit stride segment store with mask
[llvm_ptr_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
llvm_anyint_ty]),
- [NoCapture<ArgIndex<nf>>, IntrWriteMem]>, RISCVVIntrinsic {
+ [NoCapture<ArgIndex<nf>>, IntrWriteMem, IntrArgMemOnly]>,
+ RISCVVIntrinsic {
let VLOperand = !add(nf, 2);
}