auto IsRegMask = [](const MachineOperand &MO) { return MO.isRegMask(); };
(void)IsRegMask;
- assert(!llvm::any_of(MI->operands(), IsRegMask) &&
+ assert(llvm::none_of(MI->operands(), IsRegMask) &&
"Calls were filtered out above!");
auto IsUnordered = [](MachineMemOperand *MMO) { return MMO->isUnordered(); };
// all stack slots), but we need to handle the different type of stackmap
// operands and memory effects here.
- if (!llvm::any_of(MI->operands(),
+ if (llvm::none_of(MI->operands(),
[](MachineOperand &Operand) { return Operand.isFI(); }))
return MBB;
// If any of the instructions between the VCMP and VPST are predicated
// then a different code path is expected to have merged the VCMP and
// VPST already.
- if (!std::any_of(++MachineBasicBlock::iterator(VCMP),
+ if (std::none_of(++MachineBasicBlock::iterator(VCMP),
MachineBasicBlock::iterator(VPST), hasVPRUse) &&
RDA->hasSameReachingDef(VCMP, VPST, VCMP->getOperand(1).getReg()) &&
RDA->hasSameReachingDef(VCMP, VPST, VCMP->getOperand(2).getReg())) {
erase_if(AddrGroups, [](auto &G) { return G.second.size() == 1; });
// Remove groups that don't use HVX types.
erase_if(AddrGroups, [&](auto &G) {
- return !llvm::any_of(
+ return llvm::none_of(
G.second, [&](auto &I) { return HVC.HST.isTypeForHVX(I.ValTy); });
});
bool X86WinCOFFTargetStreamer::emitFPOStackAlign(unsigned Align, SMLoc L) {
if (checkInFPOPrologue(L))
return true;
- if (!llvm::any_of(CurFPOData->Instructions, [](const FPOInstruction &Inst) {
+ if (llvm::none_of(CurFPOData->Instructions, [](const FPOInstruction &Inst) {
return Inst.Op == FPOInstruction::SetFrame;
})) {
getContext().reportError(
if (collectCmovCandidates(Blocks, AllCmovGroups, /*IncludeLoads*/ true)) {
for (auto &Group : AllCmovGroups) {
// Skip any group that doesn't do at least one memory operand cmov.
- if (!llvm::any_of(Group, [&](MachineInstr *I) { return I->mayLoad(); }))
+ if (llvm::none_of(Group, [&](MachineInstr *I) { return I->mayLoad(); }))
continue;
// For CMOV groups which we can rewrite and which contain a memory load,
return false; // TODO
// Use lambda to lazily compute expensive condition after cheap ones.
auto NoSideEffects = [](BasicBlock &BB) {
- return !llvm::any_of(BB, [](const Instruction &I) {
+ return llvm::none_of(BB, [](const Instruction &I) {
return I.mayWriteToMemory() || I.mayHaveSideEffects();
});
};