if (IsLoadCSE)
*IsLoadCSE = false;
+ TypeSize LoadTypeSize = DL.getTypeSizeInBits(AccessTy);
+ if (LoadTypeSize.isScalable())
+ return nullptr;
+
// Make sure the read bytes are contained in the memset.
- TypeSize LoadSize = DL.getTypeSizeInBits(AccessTy);
- if (LoadSize.isScalable() ||
- (Len->getValue() * 8).ult(LoadSize.getFixedSize()))
+ uint64_t LoadSize = LoadTypeSize.getFixedSize();
+ if ((Len->getValue() * 8).ult(LoadSize))
return nullptr;
- APInt Splat = APInt::getSplat(LoadSize.getFixedSize(), Val->getValue());
+ APInt Splat = LoadSize >= 8 ? APInt::getSplat(LoadSize, Val->getValue())
+ : Val->getValue().trunc(LoadSize);
ConstantInt *SplatC = ConstantInt::get(MSI->getContext(), Splat);
if (CastInst::isBitOrNoopPointerCastable(SplatC->getType(), AccessTy, DL))
return SplatC;
ret i27 %v
}
+define i1 @load_after_memset_0_i1(ptr %a) {
+; CHECK-LABEL: @load_after_memset_0_i1(
+; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr noundef nonnull align 1 dereferenceable(16) [[A:%.*]], i8 0, i64 16, i1 false)
+; CHECK-NEXT: ret i1 false
+;
+ call void @llvm.memset.p0.i64(ptr %a, i8 0, i64 16, i1 false)
+ %v = load i1, ptr %a
+ ret i1 %v
+}
+
define <4 x i8> @load_after_memset_0_vec(ptr %a) {
; CHECK-LABEL: @load_after_memset_0_vec(
; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr noundef nonnull align 1 dereferenceable(16) [[A:%.*]], i8 0, i64 16, i1 false)
ret i27 %v
}
+define i1 @load_after_memset_1_i1(ptr %a) {
+; CHECK-LABEL: @load_after_memset_1_i1(
+; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr noundef nonnull align 1 dereferenceable(16) [[A:%.*]], i8 1, i64 16, i1 false)
+; CHECK-NEXT: ret i1 true
+;
+ call void @llvm.memset.p0.i64(ptr %a, i8 1, i64 16, i1 false)
+ %v = load i1, ptr %a
+ ret i1 %v
+}
+
define <4 x i8> @load_after_memset_1_vec(ptr %a) {
; CHECK-LABEL: @load_after_memset_1_vec(
; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr noundef nonnull align 1 dereferenceable(16) [[A:%.*]], i8 1, i64 16, i1 false)