--- /dev/null
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ALL,RV64I
+; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s | FileCheck %s -check-prefixes=ALL,RV32I
+
+define void @lshr_4bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: lshr_4bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a5, 2(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: lb a0, 3(a0)
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: lbu a1, 0(a1)
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: or a0, a0, a3
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: srlw a0, a0, a1
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: lshr_4bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: lbu a3, 1(a0)
+; RV32I-NEXT: lbu a4, 0(a0)
+; RV32I-NEXT: lbu a5, 2(a0)
+; RV32I-NEXT: lbu a0, 3(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: or a0, a0, a3
+; RV32I-NEXT: lbu a3, 1(a1)
+; RV32I-NEXT: lbu a4, 0(a1)
+; RV32I-NEXT: lbu a5, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a5
+; RV32I-NEXT: or a1, a1, a3
+; RV32I-NEXT: slli a1, a1, 3
+; RV32I-NEXT: srl a0, a0, a1
+; RV32I-NEXT: sb a0, 0(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 2(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 3(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: ret
+ %src = load i32, ptr %src.ptr, align 1
+ %byteOff = load i32, ptr %byteOff.ptr, align 1
+ %bitOff = shl i32 %byteOff, 3
+ %res = lshr i32 %src, %bitOff
+ store i32 %res, ptr %dst, align 1
+ ret void
+}
+define void @shl_4bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: shl_4bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a5, 2(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: lb a0, 3(a0)
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: lbu a1, 0(a1)
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: or a0, a0, a3
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: sllw a0, a0, a1
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: shl_4bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: lbu a3, 1(a0)
+; RV32I-NEXT: lbu a4, 0(a0)
+; RV32I-NEXT: lbu a5, 2(a0)
+; RV32I-NEXT: lbu a0, 3(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: or a0, a0, a3
+; RV32I-NEXT: lbu a3, 1(a1)
+; RV32I-NEXT: lbu a4, 0(a1)
+; RV32I-NEXT: lbu a5, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a5
+; RV32I-NEXT: or a1, a1, a3
+; RV32I-NEXT: slli a1, a1, 3
+; RV32I-NEXT: sll a0, a0, a1
+; RV32I-NEXT: sb a0, 0(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 2(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 3(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: ret
+ %src = load i32, ptr %src.ptr, align 1
+ %byteOff = load i32, ptr %byteOff.ptr, align 1
+ %bitOff = shl i32 %byteOff, 3
+ %res = shl i32 %src, %bitOff
+ store i32 %res, ptr %dst, align 1
+ ret void
+}
+define void @ashr_4bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: ashr_4bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a5, 2(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: lb a0, 3(a0)
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: lbu a1, 0(a1)
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: or a0, a0, a3
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: sraw a0, a0, a1
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: ashr_4bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: lbu a3, 1(a0)
+; RV32I-NEXT: lbu a4, 0(a0)
+; RV32I-NEXT: lbu a5, 2(a0)
+; RV32I-NEXT: lbu a0, 3(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: or a0, a0, a3
+; RV32I-NEXT: lbu a3, 1(a1)
+; RV32I-NEXT: lbu a4, 0(a1)
+; RV32I-NEXT: lbu a5, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a5
+; RV32I-NEXT: or a1, a1, a3
+; RV32I-NEXT: slli a1, a1, 3
+; RV32I-NEXT: sra a0, a0, a1
+; RV32I-NEXT: sb a0, 0(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 2(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 3(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: ret
+ %src = load i32, ptr %src.ptr, align 1
+ %byteOff = load i32, ptr %byteOff.ptr, align 1
+ %bitOff = shl i32 %byteOff, 3
+ %res = ashr i32 %src, %bitOff
+ store i32 %res, ptr %dst, align 1
+ ret void
+}
+
+define void @lshr_8bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: lshr_8bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a5, 2(a0)
+; RV64I-NEXT: lbu a6, 3(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 5(a0)
+; RV64I-NEXT: lbu a5, 4(a0)
+; RV64I-NEXT: lbu a6, 6(a0)
+; RV64I-NEXT: lbu a0, 7(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, a6
+; RV64I-NEXT: or a0, a0, a4
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a0, a0, a3
+; RV64I-NEXT: lbu a3, 5(a1)
+; RV64I-NEXT: lbu a4, 4(a1)
+; RV64I-NEXT: lbu a5, 6(a1)
+; RV64I-NEXT: lbu a6, 7(a1)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 1(a1)
+; RV64I-NEXT: lbu a5, 0(a1)
+; RV64I-NEXT: lbu a6, 2(a1)
+; RV64I-NEXT: lbu a1, 3(a1)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a1, a1, 24
+; RV64I-NEXT: or a1, a1, a6
+; RV64I-NEXT: or a1, a1, a4
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: slli a3, a3, 35
+; RV64I-NEXT: or a1, a3, a1
+; RV64I-NEXT: srl a0, a0, a1
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 6(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 7(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 4(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 5(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: lshr_8bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: lbu a3, 5(a0)
+; RV32I-NEXT: lbu a4, 4(a0)
+; RV32I-NEXT: lbu a5, 6(a0)
+; RV32I-NEXT: lbu a6, 7(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a4, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or a5, a6, a5
+; RV32I-NEXT: lbu a3, 1(a1)
+; RV32I-NEXT: lbu a6, 0(a1)
+; RV32I-NEXT: lbu a7, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a6
+; RV32I-NEXT: slli a7, a7, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a7
+; RV32I-NEXT: or a1, a1, a3
+; RV32I-NEXT: slli a1, a1, 3
+; RV32I-NEXT: addi a3, a1, -32
+; RV32I-NEXT: or a4, a5, a4
+; RV32I-NEXT: bltz a3, .LBB3_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: srl a0, a4, a3
+; RV32I-NEXT: j .LBB3_3
+; RV32I-NEXT: .LBB3_2:
+; RV32I-NEXT: lbu a5, 1(a0)
+; RV32I-NEXT: lbu a6, 0(a0)
+; RV32I-NEXT: lbu a7, 2(a0)
+; RV32I-NEXT: lbu a0, 3(a0)
+; RV32I-NEXT: slli a5, a5, 8
+; RV32I-NEXT: or a5, a5, a6
+; RV32I-NEXT: slli a7, a7, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, a7
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: srl a0, a0, a1
+; RV32I-NEXT: slli a5, a4, 1
+; RV32I-NEXT: xori a6, a1, 31
+; RV32I-NEXT: sll a5, a5, a6
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: .LBB3_3:
+; RV32I-NEXT: srl a1, a4, a1
+; RV32I-NEXT: slti a3, a3, 0
+; RV32I-NEXT: neg a3, a3
+; RV32I-NEXT: and a1, a3, a1
+; RV32I-NEXT: sb a1, 4(a2)
+; RV32I-NEXT: sb a0, 0(a2)
+; RV32I-NEXT: srli a3, a1, 16
+; RV32I-NEXT: sb a3, 6(a2)
+; RV32I-NEXT: srli a3, a1, 24
+; RV32I-NEXT: sb a3, 7(a2)
+; RV32I-NEXT: srli a1, a1, 8
+; RV32I-NEXT: sb a1, 5(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 2(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 3(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: ret
+ %src = load i64, ptr %src.ptr, align 1
+ %byteOff = load i64, ptr %byteOff.ptr, align 1
+ %bitOff = shl i64 %byteOff, 3
+ %res = lshr i64 %src, %bitOff
+ store i64 %res, ptr %dst, align 1
+ ret void
+}
+define void @shl_8bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: shl_8bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a5, 2(a0)
+; RV64I-NEXT: lbu a6, 3(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 5(a0)
+; RV64I-NEXT: lbu a5, 4(a0)
+; RV64I-NEXT: lbu a6, 6(a0)
+; RV64I-NEXT: lbu a0, 7(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, a6
+; RV64I-NEXT: or a0, a0, a4
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a0, a0, a3
+; RV64I-NEXT: lbu a3, 5(a1)
+; RV64I-NEXT: lbu a4, 4(a1)
+; RV64I-NEXT: lbu a5, 6(a1)
+; RV64I-NEXT: lbu a6, 7(a1)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 1(a1)
+; RV64I-NEXT: lbu a5, 0(a1)
+; RV64I-NEXT: lbu a6, 2(a1)
+; RV64I-NEXT: lbu a1, 3(a1)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a1, a1, 24
+; RV64I-NEXT: or a1, a1, a6
+; RV64I-NEXT: or a1, a1, a4
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: slli a3, a3, 35
+; RV64I-NEXT: or a1, a3, a1
+; RV64I-NEXT: sll a0, a0, a1
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 6(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 7(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 4(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 5(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: shl_8bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: lbu a3, 1(a0)
+; RV32I-NEXT: lbu a4, 0(a0)
+; RV32I-NEXT: lbu a5, 2(a0)
+; RV32I-NEXT: lbu a6, 3(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a4, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or a5, a6, a5
+; RV32I-NEXT: lbu a3, 1(a1)
+; RV32I-NEXT: lbu a6, 0(a1)
+; RV32I-NEXT: lbu a7, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a6
+; RV32I-NEXT: slli a7, a7, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a7
+; RV32I-NEXT: or a1, a1, a3
+; RV32I-NEXT: slli a1, a1, 3
+; RV32I-NEXT: addi a3, a1, -32
+; RV32I-NEXT: or a4, a5, a4
+; RV32I-NEXT: bltz a3, .LBB4_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: sll a0, a4, a3
+; RV32I-NEXT: j .LBB4_3
+; RV32I-NEXT: .LBB4_2:
+; RV32I-NEXT: lbu a5, 5(a0)
+; RV32I-NEXT: lbu a6, 4(a0)
+; RV32I-NEXT: lbu a7, 6(a0)
+; RV32I-NEXT: lbu a0, 7(a0)
+; RV32I-NEXT: slli a5, a5, 8
+; RV32I-NEXT: or a5, a5, a6
+; RV32I-NEXT: slli a7, a7, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, a7
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: sll a0, a0, a1
+; RV32I-NEXT: srli a5, a4, 1
+; RV32I-NEXT: xori a6, a1, 31
+; RV32I-NEXT: srl a5, a5, a6
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: .LBB4_3:
+; RV32I-NEXT: sll a1, a4, a1
+; RV32I-NEXT: slti a3, a3, 0
+; RV32I-NEXT: neg a3, a3
+; RV32I-NEXT: and a1, a3, a1
+; RV32I-NEXT: sb a0, 4(a2)
+; RV32I-NEXT: sb a1, 0(a2)
+; RV32I-NEXT: srli a3, a0, 16
+; RV32I-NEXT: sb a3, 6(a2)
+; RV32I-NEXT: srli a3, a0, 24
+; RV32I-NEXT: sb a3, 7(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 5(a2)
+; RV32I-NEXT: srli a0, a1, 16
+; RV32I-NEXT: sb a0, 2(a2)
+; RV32I-NEXT: srli a0, a1, 24
+; RV32I-NEXT: sb a0, 3(a2)
+; RV32I-NEXT: srli a1, a1, 8
+; RV32I-NEXT: sb a1, 1(a2)
+; RV32I-NEXT: ret
+ %src = load i64, ptr %src.ptr, align 1
+ %byteOff = load i64, ptr %byteOff.ptr, align 1
+ %bitOff = shl i64 %byteOff, 3
+ %res = shl i64 %src, %bitOff
+ store i64 %res, ptr %dst, align 1
+ ret void
+}
+define void @ashr_8bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: ashr_8bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a5, 2(a0)
+; RV64I-NEXT: lbu a6, 3(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 5(a0)
+; RV64I-NEXT: lbu a5, 4(a0)
+; RV64I-NEXT: lbu a6, 6(a0)
+; RV64I-NEXT: lbu a0, 7(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, a6
+; RV64I-NEXT: or a0, a0, a4
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a0, a0, a3
+; RV64I-NEXT: lbu a3, 5(a1)
+; RV64I-NEXT: lbu a4, 4(a1)
+; RV64I-NEXT: lbu a5, 6(a1)
+; RV64I-NEXT: lbu a6, 7(a1)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 1(a1)
+; RV64I-NEXT: lbu a5, 0(a1)
+; RV64I-NEXT: lbu a6, 2(a1)
+; RV64I-NEXT: lbu a1, 3(a1)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a1, a1, 24
+; RV64I-NEXT: or a1, a1, a6
+; RV64I-NEXT: or a1, a1, a4
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: slli a3, a3, 35
+; RV64I-NEXT: or a1, a3, a1
+; RV64I-NEXT: sra a0, a0, a1
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 6(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 7(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 4(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 5(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: ashr_8bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: lbu a3, 5(a0)
+; RV32I-NEXT: lbu a4, 4(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: lbu a5, 1(a1)
+; RV32I-NEXT: or a6, a3, a4
+; RV32I-NEXT: lbu a3, 6(a0)
+; RV32I-NEXT: lbu a4, 0(a1)
+; RV32I-NEXT: slli a5, a5, 8
+; RV32I-NEXT: lbu a7, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: or a5, a5, a4
+; RV32I-NEXT: lbu a4, 7(a0)
+; RV32I-NEXT: slli a7, a7, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a7
+; RV32I-NEXT: slli a3, a3, 16
+; RV32I-NEXT: slli a4, a4, 24
+; RV32I-NEXT: or a7, a4, a3
+; RV32I-NEXT: or a1, a1, a5
+; RV32I-NEXT: slli a3, a1, 3
+; RV32I-NEXT: addi a5, a3, -32
+; RV32I-NEXT: or a1, a7, a6
+; RV32I-NEXT: bltz a5, .LBB5_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: sra a0, a1, a5
+; RV32I-NEXT: srai a1, a4, 31
+; RV32I-NEXT: j .LBB5_3
+; RV32I-NEXT: .LBB5_2:
+; RV32I-NEXT: lbu a4, 1(a0)
+; RV32I-NEXT: lbu a5, 0(a0)
+; RV32I-NEXT: lbu a6, 2(a0)
+; RV32I-NEXT: lbu a0, 3(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or a4, a4, a5
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, a6
+; RV32I-NEXT: or a0, a0, a4
+; RV32I-NEXT: srl a0, a0, a3
+; RV32I-NEXT: slli a4, a1, 1
+; RV32I-NEXT: xori a5, a3, 31
+; RV32I-NEXT: sll a4, a4, a5
+; RV32I-NEXT: or a0, a0, a4
+; RV32I-NEXT: sra a1, a1, a3
+; RV32I-NEXT: .LBB5_3:
+; RV32I-NEXT: sb a1, 4(a2)
+; RV32I-NEXT: srli a3, a1, 16
+; RV32I-NEXT: sb a3, 6(a2)
+; RV32I-NEXT: srli a3, a1, 24
+; RV32I-NEXT: sb a3, 7(a2)
+; RV32I-NEXT: srli a1, a1, 8
+; RV32I-NEXT: sb a1, 5(a2)
+; RV32I-NEXT: sb a0, 0(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 2(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 3(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: ret
+ %src = load i64, ptr %src.ptr, align 1
+ %byteOff = load i64, ptr %byteOff.ptr, align 1
+ %bitOff = shl i64 %byteOff, 3
+ %res = ashr i64 %src, %bitOff
+ store i64 %res, ptr %dst, align 1
+ ret void
+}
+
+define void @lshr_16bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: lshr_16bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 9(a0)
+; RV64I-NEXT: lbu a4, 8(a0)
+; RV64I-NEXT: lbu a5, 10(a0)
+; RV64I-NEXT: lbu a6, 11(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a4, a4, a3
+; RV64I-NEXT: lbu a3, 13(a0)
+; RV64I-NEXT: lbu a5, 12(a0)
+; RV64I-NEXT: lbu a6, 14(a0)
+; RV64I-NEXT: lbu a7, 15(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a7, a7, 24
+; RV64I-NEXT: or a5, a7, a6
+; RV64I-NEXT: or a3, a5, a3
+; RV64I-NEXT: lbu a5, 5(a1)
+; RV64I-NEXT: lbu a6, 4(a1)
+; RV64I-NEXT: lbu a7, 6(a1)
+; RV64I-NEXT: lbu t0, 7(a1)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, a6
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a6, t0, a7
+; RV64I-NEXT: or a5, a6, a5
+; RV64I-NEXT: lbu a6, 1(a1)
+; RV64I-NEXT: lbu a7, 0(a1)
+; RV64I-NEXT: lbu t0, 2(a1)
+; RV64I-NEXT: lbu a1, 3(a1)
+; RV64I-NEXT: slli a6, a6, 8
+; RV64I-NEXT: or a6, a6, a7
+; RV64I-NEXT: slli t0, t0, 16
+; RV64I-NEXT: slli a1, a1, 24
+; RV64I-NEXT: or a1, a1, t0
+; RV64I-NEXT: slli a7, a3, 32
+; RV64I-NEXT: or a1, a1, a6
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: slli a5, a5, 35
+; RV64I-NEXT: or a1, a5, a1
+; RV64I-NEXT: addi a3, a1, -64
+; RV64I-NEXT: or a4, a7, a4
+; RV64I-NEXT: bltz a3, .LBB6_2
+; RV64I-NEXT: # %bb.1:
+; RV64I-NEXT: srl a0, a4, a3
+; RV64I-NEXT: j .LBB6_3
+; RV64I-NEXT: .LBB6_2:
+; RV64I-NEXT: lbu a5, 1(a0)
+; RV64I-NEXT: lbu a6, 0(a0)
+; RV64I-NEXT: lbu a7, 2(a0)
+; RV64I-NEXT: lbu t0, 3(a0)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, a6
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a6, t0, a7
+; RV64I-NEXT: or a5, a6, a5
+; RV64I-NEXT: lbu a6, 5(a0)
+; RV64I-NEXT: lbu a7, 4(a0)
+; RV64I-NEXT: lbu t0, 6(a0)
+; RV64I-NEXT: lbu a0, 7(a0)
+; RV64I-NEXT: slli a6, a6, 8
+; RV64I-NEXT: or a6, a6, a7
+; RV64I-NEXT: slli t0, t0, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, t0
+; RV64I-NEXT: or a0, a0, a6
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: srl a0, a0, a1
+; RV64I-NEXT: xori a5, a1, 63
+; RV64I-NEXT: slli a6, a4, 1
+; RV64I-NEXT: sll a5, a6, a5
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: .LBB6_3:
+; RV64I-NEXT: srl a1, a4, a1
+; RV64I-NEXT: slti a3, a3, 0
+; RV64I-NEXT: neg a3, a3
+; RV64I-NEXT: and a1, a3, a1
+; RV64I-NEXT: sb a1, 8(a2)
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a3, a1, 56
+; RV64I-NEXT: sb a3, 15(a2)
+; RV64I-NEXT: srli a3, a1, 48
+; RV64I-NEXT: sb a3, 14(a2)
+; RV64I-NEXT: srli a3, a1, 40
+; RV64I-NEXT: sb a3, 13(a2)
+; RV64I-NEXT: srli a3, a1, 32
+; RV64I-NEXT: sb a3, 12(a2)
+; RV64I-NEXT: srli a3, a1, 24
+; RV64I-NEXT: sb a3, 11(a2)
+; RV64I-NEXT: srli a3, a1, 16
+; RV64I-NEXT: sb a3, 10(a2)
+; RV64I-NEXT: srli a1, a1, 8
+; RV64I-NEXT: sb a1, 9(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 7(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 6(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 5(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 4(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: lshr_16bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: addi sp, sp, -16
+; RV32I-NEXT: sw s0, 12(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s1, 8(sp) # 4-byte Folded Spill
+; RV32I-NEXT: lbu a3, 5(a0)
+; RV32I-NEXT: lbu a4, 4(a0)
+; RV32I-NEXT: lbu a5, 6(a0)
+; RV32I-NEXT: lbu a6, 7(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a7, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or t0, a6, a5
+; RV32I-NEXT: lbu a3, 1(a0)
+; RV32I-NEXT: lbu a4, 0(a0)
+; RV32I-NEXT: lbu a5, 2(a0)
+; RV32I-NEXT: lbu a6, 3(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or t3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or t4, a6, a5
+; RV32I-NEXT: lbu a3, 13(a0)
+; RV32I-NEXT: lbu a4, 12(a0)
+; RV32I-NEXT: lbu a5, 14(a0)
+; RV32I-NEXT: lbu a6, 15(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or a4, a6, a5
+; RV32I-NEXT: or a3, a4, a3
+; RV32I-NEXT: lbu a4, 9(a0)
+; RV32I-NEXT: lbu a5, 8(a0)
+; RV32I-NEXT: lbu a6, 10(a0)
+; RV32I-NEXT: lbu a0, 11(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or a4, a4, a5
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a5, a0, a6
+; RV32I-NEXT: or a5, a5, a4
+; RV32I-NEXT: lbu a0, 1(a1)
+; RV32I-NEXT: lbu a4, 0(a1)
+; RV32I-NEXT: lbu a6, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a0, a0, 8
+; RV32I-NEXT: or a0, a0, a4
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a6
+; RV32I-NEXT: or a0, a1, a0
+; RV32I-NEXT: slli a4, a0, 3
+; RV32I-NEXT: addi t1, a4, -64
+; RV32I-NEXT: addi t2, a4, -96
+; RV32I-NEXT: slli a6, a3, 1
+; RV32I-NEXT: bltz t2, .LBB6_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: srl t5, a3, t2
+; RV32I-NEXT: j .LBB6_3
+; RV32I-NEXT: .LBB6_2:
+; RV32I-NEXT: srl a0, a5, t1
+; RV32I-NEXT: xori a1, t1, 31
+; RV32I-NEXT: sll a1, a6, a1
+; RV32I-NEXT: or t5, a0, a1
+; RV32I-NEXT: .LBB6_3:
+; RV32I-NEXT: or a0, t0, a7
+; RV32I-NEXT: or a1, t4, t3
+; RV32I-NEXT: addi t0, a4, -32
+; RV32I-NEXT: xori a7, a4, 31
+; RV32I-NEXT: bltz t0, .LBB6_5
+; RV32I-NEXT: # %bb.4:
+; RV32I-NEXT: srl s1, a0, t0
+; RV32I-NEXT: j .LBB6_6
+; RV32I-NEXT: .LBB6_5:
+; RV32I-NEXT: srl t3, a1, a4
+; RV32I-NEXT: slli t4, a0, 1
+; RV32I-NEXT: sll t4, t4, a7
+; RV32I-NEXT: or s1, t3, t4
+; RV32I-NEXT: .LBB6_6:
+; RV32I-NEXT: neg t3, a4
+; RV32I-NEXT: sll t4, a5, t3
+; RV32I-NEXT: li s0, 32
+; RV32I-NEXT: li t6, 64
+; RV32I-NEXT: sub s0, s0, a4
+; RV32I-NEXT: bltu a4, t6, .LBB6_12
+; RV32I-NEXT: # %bb.7:
+; RV32I-NEXT: bnez a4, .LBB6_13
+; RV32I-NEXT: .LBB6_8:
+; RV32I-NEXT: bgez s0, .LBB6_10
+; RV32I-NEXT: .LBB6_9:
+; RV32I-NEXT: sll t3, a3, t3
+; RV32I-NEXT: srli t4, a5, 1
+; RV32I-NEXT: sub t5, t6, a4
+; RV32I-NEXT: xori t5, t5, 31
+; RV32I-NEXT: srl t4, t4, t5
+; RV32I-NEXT: or t4, t3, t4
+; RV32I-NEXT: .LBB6_10:
+; RV32I-NEXT: slti t3, t0, 0
+; RV32I-NEXT: neg t3, t3
+; RV32I-NEXT: bltu a4, t6, .LBB6_14
+; RV32I-NEXT: # %bb.11:
+; RV32I-NEXT: srl t1, a3, t1
+; RV32I-NEXT: slti t2, t2, 0
+; RV32I-NEXT: neg t2, t2
+; RV32I-NEXT: and t1, t2, t1
+; RV32I-NEXT: bnez a4, .LBB6_15
+; RV32I-NEXT: j .LBB6_16
+; RV32I-NEXT: .LBB6_12:
+; RV32I-NEXT: slti t5, s0, 0
+; RV32I-NEXT: neg t5, t5
+; RV32I-NEXT: and t5, t5, t4
+; RV32I-NEXT: or t5, s1, t5
+; RV32I-NEXT: beqz a4, .LBB6_8
+; RV32I-NEXT: .LBB6_13:
+; RV32I-NEXT: mv a1, t5
+; RV32I-NEXT: bltz s0, .LBB6_9
+; RV32I-NEXT: j .LBB6_10
+; RV32I-NEXT: .LBB6_14:
+; RV32I-NEXT: srl t1, a0, a4
+; RV32I-NEXT: and t1, t3, t1
+; RV32I-NEXT: or t1, t1, t4
+; RV32I-NEXT: beqz a4, .LBB6_16
+; RV32I-NEXT: .LBB6_15:
+; RV32I-NEXT: mv a0, t1
+; RV32I-NEXT: .LBB6_16:
+; RV32I-NEXT: bltz t0, .LBB6_18
+; RV32I-NEXT: # %bb.17:
+; RV32I-NEXT: srl a5, a3, t0
+; RV32I-NEXT: j .LBB6_19
+; RV32I-NEXT: .LBB6_18:
+; RV32I-NEXT: srl a5, a5, a4
+; RV32I-NEXT: sll a6, a6, a7
+; RV32I-NEXT: or a5, a5, a6
+; RV32I-NEXT: .LBB6_19:
+; RV32I-NEXT: sltiu a6, a4, 64
+; RV32I-NEXT: neg a6, a6
+; RV32I-NEXT: and a5, a6, a5
+; RV32I-NEXT: srl a3, a3, a4
+; RV32I-NEXT: and a3, t3, a3
+; RV32I-NEXT: and a3, a6, a3
+; RV32I-NEXT: sb a5, 8(a2)
+; RV32I-NEXT: sb a3, 12(a2)
+; RV32I-NEXT: srli a4, a5, 16
+; RV32I-NEXT: sb a4, 10(a2)
+; RV32I-NEXT: srli a4, a5, 24
+; RV32I-NEXT: sb a4, 11(a2)
+; RV32I-NEXT: srli a5, a5, 8
+; RV32I-NEXT: sb a5, 9(a2)
+; RV32I-NEXT: srli a4, a3, 16
+; RV32I-NEXT: sb a4, 14(a2)
+; RV32I-NEXT: srli a4, a3, 24
+; RV32I-NEXT: sb a4, 15(a2)
+; RV32I-NEXT: srli a3, a3, 8
+; RV32I-NEXT: sb a3, 13(a2)
+; RV32I-NEXT: sb a1, 0(a2)
+; RV32I-NEXT: srli a3, a1, 16
+; RV32I-NEXT: sb a3, 2(a2)
+; RV32I-NEXT: srli a3, a1, 24
+; RV32I-NEXT: sb a3, 3(a2)
+; RV32I-NEXT: srli a1, a1, 8
+; RV32I-NEXT: sb a1, 1(a2)
+; RV32I-NEXT: sb a0, 4(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 6(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 7(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 5(a2)
+; RV32I-NEXT: lw s0, 12(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s1, 8(sp) # 4-byte Folded Reload
+; RV32I-NEXT: addi sp, sp, 16
+; RV32I-NEXT: ret
+ %src = load i128, ptr %src.ptr, align 1
+ %byteOff = load i128, ptr %byteOff.ptr, align 1
+ %bitOff = shl i128 %byteOff, 3
+ %res = lshr i128 %src, %bitOff
+ store i128 %res, ptr %dst, align 1
+ ret void
+}
+define void @shl_16bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: shl_16bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a5, 2(a0)
+; RV64I-NEXT: lbu a6, 3(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a4, a4, a3
+; RV64I-NEXT: lbu a3, 5(a0)
+; RV64I-NEXT: lbu a5, 4(a0)
+; RV64I-NEXT: lbu a6, 6(a0)
+; RV64I-NEXT: lbu a7, 7(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a7, a7, 24
+; RV64I-NEXT: or a5, a7, a6
+; RV64I-NEXT: or a3, a5, a3
+; RV64I-NEXT: lbu a5, 5(a1)
+; RV64I-NEXT: lbu a6, 4(a1)
+; RV64I-NEXT: lbu a7, 6(a1)
+; RV64I-NEXT: lbu t0, 7(a1)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, a6
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a6, t0, a7
+; RV64I-NEXT: or a5, a6, a5
+; RV64I-NEXT: lbu a6, 1(a1)
+; RV64I-NEXT: lbu a7, 0(a1)
+; RV64I-NEXT: lbu t0, 2(a1)
+; RV64I-NEXT: lbu a1, 3(a1)
+; RV64I-NEXT: slli a6, a6, 8
+; RV64I-NEXT: or a6, a6, a7
+; RV64I-NEXT: slli t0, t0, 16
+; RV64I-NEXT: slli a1, a1, 24
+; RV64I-NEXT: or a1, a1, t0
+; RV64I-NEXT: slli a7, a3, 32
+; RV64I-NEXT: or a1, a1, a6
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: slli a5, a5, 35
+; RV64I-NEXT: or a1, a5, a1
+; RV64I-NEXT: addi a3, a1, -64
+; RV64I-NEXT: or a4, a7, a4
+; RV64I-NEXT: bltz a3, .LBB7_2
+; RV64I-NEXT: # %bb.1:
+; RV64I-NEXT: sll a0, a4, a3
+; RV64I-NEXT: j .LBB7_3
+; RV64I-NEXT: .LBB7_2:
+; RV64I-NEXT: lbu a5, 9(a0)
+; RV64I-NEXT: lbu a6, 8(a0)
+; RV64I-NEXT: lbu a7, 10(a0)
+; RV64I-NEXT: lbu t0, 11(a0)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, a6
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a6, t0, a7
+; RV64I-NEXT: or a5, a6, a5
+; RV64I-NEXT: lbu a6, 13(a0)
+; RV64I-NEXT: lbu a7, 12(a0)
+; RV64I-NEXT: lbu t0, 14(a0)
+; RV64I-NEXT: lbu a0, 15(a0)
+; RV64I-NEXT: slli a6, a6, 8
+; RV64I-NEXT: or a6, a6, a7
+; RV64I-NEXT: slli t0, t0, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, t0
+; RV64I-NEXT: or a0, a0, a6
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: sll a0, a0, a1
+; RV64I-NEXT: xori a5, a1, 63
+; RV64I-NEXT: srli a6, a4, 1
+; RV64I-NEXT: srl a5, a6, a5
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: .LBB7_3:
+; RV64I-NEXT: sll a1, a4, a1
+; RV64I-NEXT: slti a3, a3, 0
+; RV64I-NEXT: neg a3, a3
+; RV64I-NEXT: and a1, a3, a1
+; RV64I-NEXT: sb a0, 8(a2)
+; RV64I-NEXT: sb a1, 0(a2)
+; RV64I-NEXT: srli a3, a0, 56
+; RV64I-NEXT: sb a3, 15(a2)
+; RV64I-NEXT: srli a3, a0, 48
+; RV64I-NEXT: sb a3, 14(a2)
+; RV64I-NEXT: srli a3, a0, 40
+; RV64I-NEXT: sb a3, 13(a2)
+; RV64I-NEXT: srli a3, a0, 32
+; RV64I-NEXT: sb a3, 12(a2)
+; RV64I-NEXT: srli a3, a0, 24
+; RV64I-NEXT: sb a3, 11(a2)
+; RV64I-NEXT: srli a3, a0, 16
+; RV64I-NEXT: sb a3, 10(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 9(a2)
+; RV64I-NEXT: srli a0, a1, 56
+; RV64I-NEXT: sb a0, 7(a2)
+; RV64I-NEXT: srli a0, a1, 48
+; RV64I-NEXT: sb a0, 6(a2)
+; RV64I-NEXT: srli a0, a1, 40
+; RV64I-NEXT: sb a0, 5(a2)
+; RV64I-NEXT: srli a0, a1, 32
+; RV64I-NEXT: sb a0, 4(a2)
+; RV64I-NEXT: srli a0, a1, 24
+; RV64I-NEXT: sb a0, 3(a2)
+; RV64I-NEXT: srli a0, a1, 16
+; RV64I-NEXT: sb a0, 2(a2)
+; RV64I-NEXT: srli a1, a1, 8
+; RV64I-NEXT: sb a1, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: shl_16bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: addi sp, sp, -16
+; RV32I-NEXT: sw s0, 12(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s1, 8(sp) # 4-byte Folded Spill
+; RV32I-NEXT: lbu a3, 9(a0)
+; RV32I-NEXT: lbu a4, 8(a0)
+; RV32I-NEXT: lbu a5, 10(a0)
+; RV32I-NEXT: lbu a6, 11(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a7, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or t0, a6, a5
+; RV32I-NEXT: lbu a3, 13(a0)
+; RV32I-NEXT: lbu a4, 12(a0)
+; RV32I-NEXT: lbu a5, 14(a0)
+; RV32I-NEXT: lbu a6, 15(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or t3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or t4, a6, a5
+; RV32I-NEXT: lbu a3, 1(a0)
+; RV32I-NEXT: lbu a4, 0(a0)
+; RV32I-NEXT: lbu a5, 2(a0)
+; RV32I-NEXT: lbu a6, 3(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or a4, a6, a5
+; RV32I-NEXT: or a3, a4, a3
+; RV32I-NEXT: lbu a4, 5(a0)
+; RV32I-NEXT: lbu a5, 4(a0)
+; RV32I-NEXT: lbu a6, 6(a0)
+; RV32I-NEXT: lbu a0, 7(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or a4, a4, a5
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a5, a0, a6
+; RV32I-NEXT: or a5, a5, a4
+; RV32I-NEXT: lbu a0, 1(a1)
+; RV32I-NEXT: lbu a4, 0(a1)
+; RV32I-NEXT: lbu a6, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a0, a0, 8
+; RV32I-NEXT: or a0, a0, a4
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a6
+; RV32I-NEXT: or a0, a1, a0
+; RV32I-NEXT: slli a4, a0, 3
+; RV32I-NEXT: addi t1, a4, -64
+; RV32I-NEXT: addi t2, a4, -96
+; RV32I-NEXT: srli a6, a3, 1
+; RV32I-NEXT: bltz t2, .LBB7_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: sll t5, a3, t2
+; RV32I-NEXT: j .LBB7_3
+; RV32I-NEXT: .LBB7_2:
+; RV32I-NEXT: sll a0, a5, t1
+; RV32I-NEXT: xori a1, t1, 31
+; RV32I-NEXT: srl a1, a6, a1
+; RV32I-NEXT: or t5, a0, a1
+; RV32I-NEXT: .LBB7_3:
+; RV32I-NEXT: or a0, t0, a7
+; RV32I-NEXT: or a1, t4, t3
+; RV32I-NEXT: addi t0, a4, -32
+; RV32I-NEXT: xori a7, a4, 31
+; RV32I-NEXT: bltz t0, .LBB7_5
+; RV32I-NEXT: # %bb.4:
+; RV32I-NEXT: sll s1, a0, t0
+; RV32I-NEXT: j .LBB7_6
+; RV32I-NEXT: .LBB7_5:
+; RV32I-NEXT: sll t3, a1, a4
+; RV32I-NEXT: srli t4, a0, 1
+; RV32I-NEXT: srl t4, t4, a7
+; RV32I-NEXT: or s1, t3, t4
+; RV32I-NEXT: .LBB7_6:
+; RV32I-NEXT: neg t3, a4
+; RV32I-NEXT: srl t4, a5, t3
+; RV32I-NEXT: li s0, 32
+; RV32I-NEXT: li t6, 64
+; RV32I-NEXT: sub s0, s0, a4
+; RV32I-NEXT: bltu a4, t6, .LBB7_12
+; RV32I-NEXT: # %bb.7:
+; RV32I-NEXT: bnez a4, .LBB7_13
+; RV32I-NEXT: .LBB7_8:
+; RV32I-NEXT: bgez s0, .LBB7_10
+; RV32I-NEXT: .LBB7_9:
+; RV32I-NEXT: srl t3, a3, t3
+; RV32I-NEXT: slli t4, a5, 1
+; RV32I-NEXT: sub t5, t6, a4
+; RV32I-NEXT: xori t5, t5, 31
+; RV32I-NEXT: sll t4, t4, t5
+; RV32I-NEXT: or t4, t3, t4
+; RV32I-NEXT: .LBB7_10:
+; RV32I-NEXT: slti t3, t0, 0
+; RV32I-NEXT: neg t3, t3
+; RV32I-NEXT: bltu a4, t6, .LBB7_14
+; RV32I-NEXT: # %bb.11:
+; RV32I-NEXT: sll t1, a3, t1
+; RV32I-NEXT: slti t2, t2, 0
+; RV32I-NEXT: neg t2, t2
+; RV32I-NEXT: and t1, t2, t1
+; RV32I-NEXT: bnez a4, .LBB7_15
+; RV32I-NEXT: j .LBB7_16
+; RV32I-NEXT: .LBB7_12:
+; RV32I-NEXT: slti t5, s0, 0
+; RV32I-NEXT: neg t5, t5
+; RV32I-NEXT: and t5, t5, t4
+; RV32I-NEXT: or t5, s1, t5
+; RV32I-NEXT: beqz a4, .LBB7_8
+; RV32I-NEXT: .LBB7_13:
+; RV32I-NEXT: mv a1, t5
+; RV32I-NEXT: bltz s0, .LBB7_9
+; RV32I-NEXT: j .LBB7_10
+; RV32I-NEXT: .LBB7_14:
+; RV32I-NEXT: sll t1, a0, a4
+; RV32I-NEXT: and t1, t3, t1
+; RV32I-NEXT: or t1, t1, t4
+; RV32I-NEXT: beqz a4, .LBB7_16
+; RV32I-NEXT: .LBB7_15:
+; RV32I-NEXT: mv a0, t1
+; RV32I-NEXT: .LBB7_16:
+; RV32I-NEXT: bltz t0, .LBB7_18
+; RV32I-NEXT: # %bb.17:
+; RV32I-NEXT: sll a5, a3, t0
+; RV32I-NEXT: j .LBB7_19
+; RV32I-NEXT: .LBB7_18:
+; RV32I-NEXT: sll a5, a5, a4
+; RV32I-NEXT: srl a6, a6, a7
+; RV32I-NEXT: or a5, a5, a6
+; RV32I-NEXT: .LBB7_19:
+; RV32I-NEXT: sltiu a6, a4, 64
+; RV32I-NEXT: neg a6, a6
+; RV32I-NEXT: and a5, a6, a5
+; RV32I-NEXT: sll a3, a3, a4
+; RV32I-NEXT: and a3, t3, a3
+; RV32I-NEXT: and a3, a6, a3
+; RV32I-NEXT: sb a3, 0(a2)
+; RV32I-NEXT: sb a5, 4(a2)
+; RV32I-NEXT: srli a4, a3, 16
+; RV32I-NEXT: sb a4, 2(a2)
+; RV32I-NEXT: srli a4, a3, 24
+; RV32I-NEXT: sb a4, 3(a2)
+; RV32I-NEXT: srli a3, a3, 8
+; RV32I-NEXT: sb a3, 1(a2)
+; RV32I-NEXT: srli a3, a5, 16
+; RV32I-NEXT: sb a3, 6(a2)
+; RV32I-NEXT: srli a3, a5, 24
+; RV32I-NEXT: sb a3, 7(a2)
+; RV32I-NEXT: srli a5, a5, 8
+; RV32I-NEXT: sb a5, 5(a2)
+; RV32I-NEXT: sb a1, 12(a2)
+; RV32I-NEXT: sb a0, 8(a2)
+; RV32I-NEXT: srli a3, a1, 16
+; RV32I-NEXT: sb a3, 14(a2)
+; RV32I-NEXT: srli a3, a1, 24
+; RV32I-NEXT: sb a3, 15(a2)
+; RV32I-NEXT: srli a1, a1, 8
+; RV32I-NEXT: sb a1, 13(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 10(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 11(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 9(a2)
+; RV32I-NEXT: lw s0, 12(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s1, 8(sp) # 4-byte Folded Reload
+; RV32I-NEXT: addi sp, sp, 16
+; RV32I-NEXT: ret
+ %src = load i128, ptr %src.ptr, align 1
+ %byteOff = load i128, ptr %byteOff.ptr, align 1
+ %bitOff = shl i128 %byteOff, 3
+ %res = shl i128 %src, %bitOff
+ store i128 %res, ptr %dst, align 1
+ ret void
+}
+define void @ashr_16bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: ashr_16bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: lbu a3, 9(a0)
+; RV64I-NEXT: lbu a4, 8(a0)
+; RV64I-NEXT: lbu a5, 10(a0)
+; RV64I-NEXT: lbu a6, 11(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 13(a0)
+; RV64I-NEXT: lbu a5, 12(a0)
+; RV64I-NEXT: lbu a6, 14(a0)
+; RV64I-NEXT: lbu a7, 15(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a7, a7, 24
+; RV64I-NEXT: or a5, a7, a6
+; RV64I-NEXT: or a4, a5, a4
+; RV64I-NEXT: lbu a5, 5(a1)
+; RV64I-NEXT: lbu a6, 4(a1)
+; RV64I-NEXT: lbu a7, 6(a1)
+; RV64I-NEXT: lbu t0, 7(a1)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, a6
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a6, t0, a7
+; RV64I-NEXT: or a5, a6, a5
+; RV64I-NEXT: lbu a6, 1(a1)
+; RV64I-NEXT: lbu a7, 0(a1)
+; RV64I-NEXT: lbu t0, 2(a1)
+; RV64I-NEXT: lbu a1, 3(a1)
+; RV64I-NEXT: slli a6, a6, 8
+; RV64I-NEXT: or a6, a6, a7
+; RV64I-NEXT: slli t0, t0, 16
+; RV64I-NEXT: slli a1, a1, 24
+; RV64I-NEXT: or a1, a1, t0
+; RV64I-NEXT: slli a7, a4, 32
+; RV64I-NEXT: or a1, a1, a6
+; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: slli a5, a5, 35
+; RV64I-NEXT: or a1, a5, a1
+; RV64I-NEXT: addi a5, a1, -64
+; RV64I-NEXT: or a3, a7, a3
+; RV64I-NEXT: bltz a5, .LBB8_2
+; RV64I-NEXT: # %bb.1:
+; RV64I-NEXT: sra a0, a3, a5
+; RV64I-NEXT: sraiw a1, a4, 31
+; RV64I-NEXT: j .LBB8_3
+; RV64I-NEXT: .LBB8_2:
+; RV64I-NEXT: lbu a4, 1(a0)
+; RV64I-NEXT: lbu a5, 0(a0)
+; RV64I-NEXT: lbu a6, 2(a0)
+; RV64I-NEXT: lbu a7, 3(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a7, a7, 24
+; RV64I-NEXT: or a5, a7, a6
+; RV64I-NEXT: or a4, a5, a4
+; RV64I-NEXT: lbu a5, 5(a0)
+; RV64I-NEXT: lbu a6, 4(a0)
+; RV64I-NEXT: lbu a7, 6(a0)
+; RV64I-NEXT: lbu a0, 7(a0)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, a6
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, a7
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a0, a0, a4
+; RV64I-NEXT: srl a0, a0, a1
+; RV64I-NEXT: xori a4, a1, 63
+; RV64I-NEXT: slli a5, a3, 1
+; RV64I-NEXT: sll a4, a5, a4
+; RV64I-NEXT: or a0, a0, a4
+; RV64I-NEXT: sra a1, a3, a1
+; RV64I-NEXT: .LBB8_3:
+; RV64I-NEXT: sb a1, 8(a2)
+; RV64I-NEXT: srli a3, a1, 56
+; RV64I-NEXT: sb a3, 15(a2)
+; RV64I-NEXT: srli a3, a1, 48
+; RV64I-NEXT: sb a3, 14(a2)
+; RV64I-NEXT: srli a3, a1, 40
+; RV64I-NEXT: sb a3, 13(a2)
+; RV64I-NEXT: srli a3, a1, 32
+; RV64I-NEXT: sb a3, 12(a2)
+; RV64I-NEXT: srli a3, a1, 24
+; RV64I-NEXT: sb a3, 11(a2)
+; RV64I-NEXT: srli a3, a1, 16
+; RV64I-NEXT: sb a3, 10(a2)
+; RV64I-NEXT: srli a1, a1, 8
+; RV64I-NEXT: sb a1, 9(a2)
+; RV64I-NEXT: sb a0, 0(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 7(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 6(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 5(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 4(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 3(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 2(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 1(a2)
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: ashr_16bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: addi sp, sp, -16
+; RV32I-NEXT: sw s0, 12(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s1, 8(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s2, 4(sp) # 4-byte Folded Spill
+; RV32I-NEXT: lbu a3, 5(a0)
+; RV32I-NEXT: lbu a4, 4(a0)
+; RV32I-NEXT: lbu a5, 6(a0)
+; RV32I-NEXT: lbu a6, 7(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a7, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or t1, a6, a5
+; RV32I-NEXT: lbu a3, 1(a0)
+; RV32I-NEXT: lbu a4, 0(a0)
+; RV32I-NEXT: lbu a5, 2(a0)
+; RV32I-NEXT: lbu a6, 3(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or t2, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli a6, a6, 24
+; RV32I-NEXT: or t5, a6, a5
+; RV32I-NEXT: lbu a3, 13(a0)
+; RV32I-NEXT: lbu a4, 12(a0)
+; RV32I-NEXT: lbu a5, 14(a0)
+; RV32I-NEXT: lbu t0, 15(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli t0, t0, 24
+; RV32I-NEXT: or a4, t0, a5
+; RV32I-NEXT: or a3, a4, a3
+; RV32I-NEXT: lbu a4, 9(a0)
+; RV32I-NEXT: lbu a5, 8(a0)
+; RV32I-NEXT: lbu a6, 10(a0)
+; RV32I-NEXT: lbu a0, 11(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or a4, a4, a5
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a5, a0, a6
+; RV32I-NEXT: or a5, a5, a4
+; RV32I-NEXT: lbu a0, 1(a1)
+; RV32I-NEXT: lbu a4, 0(a1)
+; RV32I-NEXT: lbu a6, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a0, a0, 8
+; RV32I-NEXT: or a0, a0, a4
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, a6
+; RV32I-NEXT: or a0, a1, a0
+; RV32I-NEXT: slli a4, a0, 3
+; RV32I-NEXT: addi t3, a4, -64
+; RV32I-NEXT: addi t4, a4, -96
+; RV32I-NEXT: slli a6, a3, 1
+; RV32I-NEXT: bltz t4, .LBB8_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: sra t6, a3, t4
+; RV32I-NEXT: j .LBB8_3
+; RV32I-NEXT: .LBB8_2:
+; RV32I-NEXT: srl a0, a5, t3
+; RV32I-NEXT: xori a1, t3, 31
+; RV32I-NEXT: sll a1, a6, a1
+; RV32I-NEXT: or t6, a0, a1
+; RV32I-NEXT: .LBB8_3:
+; RV32I-NEXT: or a0, t1, a7
+; RV32I-NEXT: or a1, t5, t2
+; RV32I-NEXT: addi a7, a4, -32
+; RV32I-NEXT: xori t2, a4, 31
+; RV32I-NEXT: bltz a7, .LBB8_5
+; RV32I-NEXT: # %bb.4:
+; RV32I-NEXT: srl s2, a0, a7
+; RV32I-NEXT: j .LBB8_6
+; RV32I-NEXT: .LBB8_5:
+; RV32I-NEXT: srl t1, a1, a4
+; RV32I-NEXT: slli t5, a0, 1
+; RV32I-NEXT: sll t5, t5, t2
+; RV32I-NEXT: or s2, t1, t5
+; RV32I-NEXT: .LBB8_6:
+; RV32I-NEXT: neg s0, a4
+; RV32I-NEXT: sll t5, a5, s0
+; RV32I-NEXT: li s1, 32
+; RV32I-NEXT: li t1, 64
+; RV32I-NEXT: sub s1, s1, a4
+; RV32I-NEXT: bltu a4, t1, .LBB8_11
+; RV32I-NEXT: # %bb.7:
+; RV32I-NEXT: bnez a4, .LBB8_12
+; RV32I-NEXT: .LBB8_8:
+; RV32I-NEXT: bltz s1, .LBB8_13
+; RV32I-NEXT: .LBB8_9:
+; RV32I-NEXT: srai t0, t0, 31
+; RV32I-NEXT: bltz t4, .LBB8_14
+; RV32I-NEXT: .LBB8_10:
+; RV32I-NEXT: mv t3, t0
+; RV32I-NEXT: bltu a4, t1, .LBB8_15
+; RV32I-NEXT: j .LBB8_16
+; RV32I-NEXT: .LBB8_11:
+; RV32I-NEXT: slti t6, s1, 0
+; RV32I-NEXT: neg t6, t6
+; RV32I-NEXT: and t6, t6, t5
+; RV32I-NEXT: or t6, s2, t6
+; RV32I-NEXT: beqz a4, .LBB8_8
+; RV32I-NEXT: .LBB8_12:
+; RV32I-NEXT: mv a1, t6
+; RV32I-NEXT: bgez s1, .LBB8_9
+; RV32I-NEXT: .LBB8_13:
+; RV32I-NEXT: sll t5, a3, s0
+; RV32I-NEXT: srli t6, a5, 1
+; RV32I-NEXT: sub s0, t1, a4
+; RV32I-NEXT: xori s0, s0, 31
+; RV32I-NEXT: srl t6, t6, s0
+; RV32I-NEXT: or t5, t5, t6
+; RV32I-NEXT: srai t0, t0, 31
+; RV32I-NEXT: bgez t4, .LBB8_10
+; RV32I-NEXT: .LBB8_14:
+; RV32I-NEXT: sra t3, a3, t3
+; RV32I-NEXT: bgeu a4, t1, .LBB8_16
+; RV32I-NEXT: .LBB8_15:
+; RV32I-NEXT: slti t3, a7, 0
+; RV32I-NEXT: srl t4, a0, a4
+; RV32I-NEXT: neg t3, t3
+; RV32I-NEXT: and t3, t3, t4
+; RV32I-NEXT: or t3, t3, t5
+; RV32I-NEXT: .LBB8_16:
+; RV32I-NEXT: bnez a4, .LBB8_19
+; RV32I-NEXT: # %bb.17:
+; RV32I-NEXT: bltz a7, .LBB8_20
+; RV32I-NEXT: .LBB8_18:
+; RV32I-NEXT: sra a5, a3, a7
+; RV32I-NEXT: bgeu a4, t1, .LBB8_21
+; RV32I-NEXT: j .LBB8_22
+; RV32I-NEXT: .LBB8_19:
+; RV32I-NEXT: mv a0, t3
+; RV32I-NEXT: bgez a7, .LBB8_18
+; RV32I-NEXT: .LBB8_20:
+; RV32I-NEXT: srl a5, a5, a4
+; RV32I-NEXT: sll a6, a6, t2
+; RV32I-NEXT: or a5, a5, a6
+; RV32I-NEXT: bltu a4, t1, .LBB8_22
+; RV32I-NEXT: .LBB8_21:
+; RV32I-NEXT: mv a5, t0
+; RV32I-NEXT: .LBB8_22:
+; RV32I-NEXT: bltz a7, .LBB8_24
+; RV32I-NEXT: # %bb.23:
+; RV32I-NEXT: mv a3, t0
+; RV32I-NEXT: bgeu a4, t1, .LBB8_25
+; RV32I-NEXT: j .LBB8_26
+; RV32I-NEXT: .LBB8_24:
+; RV32I-NEXT: sra a3, a3, a4
+; RV32I-NEXT: bltu a4, t1, .LBB8_26
+; RV32I-NEXT: .LBB8_25:
+; RV32I-NEXT: mv a3, t0
+; RV32I-NEXT: .LBB8_26:
+; RV32I-NEXT: sb a3, 12(a2)
+; RV32I-NEXT: srli a4, a3, 16
+; RV32I-NEXT: sb a4, 14(a2)
+; RV32I-NEXT: srli a4, a3, 24
+; RV32I-NEXT: sb a4, 15(a2)
+; RV32I-NEXT: srli a3, a3, 8
+; RV32I-NEXT: sb a3, 13(a2)
+; RV32I-NEXT: sb a5, 8(a2)
+; RV32I-NEXT: srli a3, a5, 16
+; RV32I-NEXT: sb a3, 10(a2)
+; RV32I-NEXT: srli a3, a5, 24
+; RV32I-NEXT: sb a3, 11(a2)
+; RV32I-NEXT: srli a5, a5, 8
+; RV32I-NEXT: sb a5, 9(a2)
+; RV32I-NEXT: sb a1, 0(a2)
+; RV32I-NEXT: srli a3, a1, 16
+; RV32I-NEXT: sb a3, 2(a2)
+; RV32I-NEXT: srli a3, a1, 24
+; RV32I-NEXT: sb a3, 3(a2)
+; RV32I-NEXT: srli a1, a1, 8
+; RV32I-NEXT: sb a1, 1(a2)
+; RV32I-NEXT: sb a0, 4(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 6(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 7(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 5(a2)
+; RV32I-NEXT: lw s0, 12(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s1, 8(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s2, 4(sp) # 4-byte Folded Reload
+; RV32I-NEXT: addi sp, sp, 16
+; RV32I-NEXT: ret
+ %src = load i128, ptr %src.ptr, align 1
+ %byteOff = load i128, ptr %byteOff.ptr, align 1
+ %bitOff = shl i128 %byteOff, 3
+ %res = ashr i128 %src, %bitOff
+ store i128 %res, ptr %dst, align 1
+ ret void
+}
+
+define void @lshr_32bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: lshr_32bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: addi sp, sp, -16
+; RV64I-NEXT: sd s0, 8(sp) # 8-byte Folded Spill
+; RV64I-NEXT: sd s1, 0(sp) # 8-byte Folded Spill
+; RV64I-NEXT: lbu a3, 9(a0)
+; RV64I-NEXT: lbu a4, 8(a0)
+; RV64I-NEXT: lbu a5, 10(a0)
+; RV64I-NEXT: lbu a6, 11(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a5, a6, a5
+; RV64I-NEXT: or a5, a5, a3
+; RV64I-NEXT: lbu a3, 13(a0)
+; RV64I-NEXT: lbu a4, 12(a0)
+; RV64I-NEXT: lbu a6, 14(a0)
+; RV64I-NEXT: lbu a7, 15(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a7, a7, 24
+; RV64I-NEXT: or a4, a7, a6
+; RV64I-NEXT: or a6, a4, a3
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu a7, 2(a0)
+; RV64I-NEXT: lbu t0, 3(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a4, t0, a7
+; RV64I-NEXT: or a7, a4, a3
+; RV64I-NEXT: lbu a3, 5(a0)
+; RV64I-NEXT: lbu a4, 4(a0)
+; RV64I-NEXT: lbu t0, 6(a0)
+; RV64I-NEXT: lbu t1, 7(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli t0, t0, 16
+; RV64I-NEXT: slli t1, t1, 24
+; RV64I-NEXT: or a4, t1, t0
+; RV64I-NEXT: or t0, a4, a3
+; RV64I-NEXT: lbu a3, 25(a0)
+; RV64I-NEXT: lbu a4, 24(a0)
+; RV64I-NEXT: lbu t1, 26(a0)
+; RV64I-NEXT: lbu t2, 27(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli t1, t1, 16
+; RV64I-NEXT: slli t2, t2, 24
+; RV64I-NEXT: or a4, t2, t1
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 29(a0)
+; RV64I-NEXT: lbu t1, 28(a0)
+; RV64I-NEXT: lbu t2, 30(a0)
+; RV64I-NEXT: lbu t3, 31(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, t1
+; RV64I-NEXT: slli t2, t2, 16
+; RV64I-NEXT: slli t3, t3, 24
+; RV64I-NEXT: or t1, t3, t2
+; RV64I-NEXT: or a4, t1, a4
+; RV64I-NEXT: slli a4, a4, 32
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 17(a0)
+; RV64I-NEXT: lbu t1, 16(a0)
+; RV64I-NEXT: lbu t2, 18(a0)
+; RV64I-NEXT: lbu t3, 19(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, t1
+; RV64I-NEXT: slli t2, t2, 16
+; RV64I-NEXT: slli t3, t3, 24
+; RV64I-NEXT: or t1, t3, t2
+; RV64I-NEXT: or a4, t1, a4
+; RV64I-NEXT: lbu t1, 21(a0)
+; RV64I-NEXT: lbu t2, 20(a0)
+; RV64I-NEXT: lbu t3, 22(a0)
+; RV64I-NEXT: lbu a0, 23(a0)
+; RV64I-NEXT: slli t1, t1, 8
+; RV64I-NEXT: or t1, t1, t2
+; RV64I-NEXT: slli t3, t3, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, t3
+; RV64I-NEXT: or a0, a0, t1
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a4, a0, a4
+; RV64I-NEXT: lbu a0, 5(a1)
+; RV64I-NEXT: lbu t1, 4(a1)
+; RV64I-NEXT: lbu t2, 6(a1)
+; RV64I-NEXT: lbu t3, 7(a1)
+; RV64I-NEXT: slli a0, a0, 8
+; RV64I-NEXT: or a0, a0, t1
+; RV64I-NEXT: slli t2, t2, 16
+; RV64I-NEXT: slli t3, t3, 24
+; RV64I-NEXT: lbu t1, 1(a1)
+; RV64I-NEXT: lbu t4, 0(a1)
+; RV64I-NEXT: or t2, t3, t2
+; RV64I-NEXT: or t2, t2, a0
+; RV64I-NEXT: slli t1, t1, 8
+; RV64I-NEXT: or t1, t1, t4
+; RV64I-NEXT: lbu t3, 2(a1)
+; RV64I-NEXT: lbu t4, 3(a1)
+; RV64I-NEXT: slli a0, a6, 32
+; RV64I-NEXT: slli a1, t0, 32
+; RV64I-NEXT: slli t3, t3, 16
+; RV64I-NEXT: slli t4, t4, 24
+; RV64I-NEXT: or a6, t4, t3
+; RV64I-NEXT: or a6, a6, t1
+; RV64I-NEXT: slli a6, a6, 3
+; RV64I-NEXT: slli t2, t2, 35
+; RV64I-NEXT: or a6, t2, a6
+; RV64I-NEXT: addi t1, a6, -128
+; RV64I-NEXT: addi t2, a6, -192
+; RV64I-NEXT: slli t0, a3, 1
+; RV64I-NEXT: bltz t2, .LBB9_2
+; RV64I-NEXT: # %bb.1:
+; RV64I-NEXT: srl t3, a3, t2
+; RV64I-NEXT: j .LBB9_3
+; RV64I-NEXT: .LBB9_2:
+; RV64I-NEXT: srl t3, a4, t1
+; RV64I-NEXT: xori t4, t1, 63
+; RV64I-NEXT: sll t4, t0, t4
+; RV64I-NEXT: or t3, t3, t4
+; RV64I-NEXT: .LBB9_3:
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: or a1, a1, a7
+; RV64I-NEXT: addi a7, a6, -64
+; RV64I-NEXT: xori a5, a6, 63
+; RV64I-NEXT: bltz a7, .LBB9_5
+; RV64I-NEXT: # %bb.4:
+; RV64I-NEXT: srl s1, a0, a7
+; RV64I-NEXT: j .LBB9_6
+; RV64I-NEXT: .LBB9_5:
+; RV64I-NEXT: srl t4, a1, a6
+; RV64I-NEXT: slli t5, a0, 1
+; RV64I-NEXT: sll t5, t5, a5
+; RV64I-NEXT: or s1, t4, t5
+; RV64I-NEXT: .LBB9_6:
+; RV64I-NEXT: negw t6, a6
+; RV64I-NEXT: sll t4, a4, t6
+; RV64I-NEXT: li s0, 64
+; RV64I-NEXT: li t5, 128
+; RV64I-NEXT: sub s0, s0, a6
+; RV64I-NEXT: bltu a6, t5, .LBB9_12
+; RV64I-NEXT: # %bb.7:
+; RV64I-NEXT: bnez a6, .LBB9_13
+; RV64I-NEXT: .LBB9_8:
+; RV64I-NEXT: bgez s0, .LBB9_10
+; RV64I-NEXT: .LBB9_9:
+; RV64I-NEXT: sll t3, a3, t6
+; RV64I-NEXT: srli t4, a4, 1
+; RV64I-NEXT: sub t6, t5, a6
+; RV64I-NEXT: xori t6, t6, 63
+; RV64I-NEXT: srl t4, t4, t6
+; RV64I-NEXT: or t4, t3, t4
+; RV64I-NEXT: .LBB9_10:
+; RV64I-NEXT: slti t3, a7, 0
+; RV64I-NEXT: neg t3, t3
+; RV64I-NEXT: bltu a6, t5, .LBB9_14
+; RV64I-NEXT: # %bb.11:
+; RV64I-NEXT: srl t1, a3, t1
+; RV64I-NEXT: slti t2, t2, 0
+; RV64I-NEXT: neg t2, t2
+; RV64I-NEXT: and t1, t2, t1
+; RV64I-NEXT: bnez a6, .LBB9_15
+; RV64I-NEXT: j .LBB9_16
+; RV64I-NEXT: .LBB9_12:
+; RV64I-NEXT: slti t3, s0, 0
+; RV64I-NEXT: neg t3, t3
+; RV64I-NEXT: and t3, t3, t4
+; RV64I-NEXT: or t3, s1, t3
+; RV64I-NEXT: beqz a6, .LBB9_8
+; RV64I-NEXT: .LBB9_13:
+; RV64I-NEXT: mv a1, t3
+; RV64I-NEXT: bltz s0, .LBB9_9
+; RV64I-NEXT: j .LBB9_10
+; RV64I-NEXT: .LBB9_14:
+; RV64I-NEXT: srl t1, a0, a6
+; RV64I-NEXT: and t1, t3, t1
+; RV64I-NEXT: or t1, t1, t4
+; RV64I-NEXT: beqz a6, .LBB9_16
+; RV64I-NEXT: .LBB9_15:
+; RV64I-NEXT: mv a0, t1
+; RV64I-NEXT: .LBB9_16:
+; RV64I-NEXT: bltz a7, .LBB9_18
+; RV64I-NEXT: # %bb.17:
+; RV64I-NEXT: srl a4, a3, a7
+; RV64I-NEXT: j .LBB9_19
+; RV64I-NEXT: .LBB9_18:
+; RV64I-NEXT: srl a4, a4, a6
+; RV64I-NEXT: sll a5, t0, a5
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: .LBB9_19:
+; RV64I-NEXT: sltiu a5, a6, 128
+; RV64I-NEXT: neg a5, a5
+; RV64I-NEXT: and a4, a5, a4
+; RV64I-NEXT: srl a3, a3, a6
+; RV64I-NEXT: and a3, t3, a3
+; RV64I-NEXT: and a3, a5, a3
+; RV64I-NEXT: sb a4, 16(a2)
+; RV64I-NEXT: sb a3, 24(a2)
+; RV64I-NEXT: srli a5, a4, 56
+; RV64I-NEXT: sb a5, 23(a2)
+; RV64I-NEXT: srli a5, a4, 48
+; RV64I-NEXT: sb a5, 22(a2)
+; RV64I-NEXT: srli a5, a4, 40
+; RV64I-NEXT: sb a5, 21(a2)
+; RV64I-NEXT: srli a5, a4, 32
+; RV64I-NEXT: sb a5, 20(a2)
+; RV64I-NEXT: srli a5, a4, 24
+; RV64I-NEXT: sb a5, 19(a2)
+; RV64I-NEXT: srli a5, a4, 16
+; RV64I-NEXT: sb a5, 18(a2)
+; RV64I-NEXT: srli a4, a4, 8
+; RV64I-NEXT: sb a4, 17(a2)
+; RV64I-NEXT: srli a4, a3, 56
+; RV64I-NEXT: sb a4, 31(a2)
+; RV64I-NEXT: srli a4, a3, 48
+; RV64I-NEXT: sb a4, 30(a2)
+; RV64I-NEXT: srli a4, a3, 40
+; RV64I-NEXT: sb a4, 29(a2)
+; RV64I-NEXT: srli a4, a3, 32
+; RV64I-NEXT: sb a4, 28(a2)
+; RV64I-NEXT: srli a4, a3, 24
+; RV64I-NEXT: sb a4, 27(a2)
+; RV64I-NEXT: srli a4, a3, 16
+; RV64I-NEXT: sb a4, 26(a2)
+; RV64I-NEXT: srli a3, a3, 8
+; RV64I-NEXT: sb a3, 25(a2)
+; RV64I-NEXT: sb a1, 0(a2)
+; RV64I-NEXT: srli a3, a1, 56
+; RV64I-NEXT: sb a3, 7(a2)
+; RV64I-NEXT: srli a3, a1, 48
+; RV64I-NEXT: sb a3, 6(a2)
+; RV64I-NEXT: srli a3, a1, 40
+; RV64I-NEXT: sb a3, 5(a2)
+; RV64I-NEXT: srli a3, a1, 32
+; RV64I-NEXT: sb a3, 4(a2)
+; RV64I-NEXT: srli a3, a1, 24
+; RV64I-NEXT: sb a3, 3(a2)
+; RV64I-NEXT: srli a3, a1, 16
+; RV64I-NEXT: sb a3, 2(a2)
+; RV64I-NEXT: srli a1, a1, 8
+; RV64I-NEXT: sb a1, 1(a2)
+; RV64I-NEXT: sb a0, 8(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 15(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 14(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 13(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 12(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 11(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 10(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 9(a2)
+; RV64I-NEXT: ld s0, 8(sp) # 8-byte Folded Reload
+; RV64I-NEXT: ld s1, 0(sp) # 8-byte Folded Reload
+; RV64I-NEXT: addi sp, sp, 16
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: lshr_32bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: addi sp, sp, -128
+; RV32I-NEXT: sw ra, 124(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s0, 120(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s1, 116(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s2, 112(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s3, 108(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s4, 104(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s5, 100(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s6, 96(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s7, 92(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s8, 88(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s9, 84(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s10, 80(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s11, 76(sp) # 4-byte Folded Spill
+; RV32I-NEXT: lbu t0, 4(a0)
+; RV32I-NEXT: lbu a6, 5(a0)
+; RV32I-NEXT: lbu t2, 6(a0)
+; RV32I-NEXT: lbu t4, 7(a0)
+; RV32I-NEXT: lbu t1, 0(a0)
+; RV32I-NEXT: lbu t5, 1(a0)
+; RV32I-NEXT: lbu t6, 2(a0)
+; RV32I-NEXT: lbu s0, 3(a0)
+; RV32I-NEXT: lbu t3, 12(a0)
+; RV32I-NEXT: lbu a7, 13(a0)
+; RV32I-NEXT: lbu s1, 14(a0)
+; RV32I-NEXT: lbu s6, 15(a0)
+; RV32I-NEXT: lbu s2, 8(a0)
+; RV32I-NEXT: lbu s3, 9(a0)
+; RV32I-NEXT: lbu s4, 10(a0)
+; RV32I-NEXT: lbu s5, 11(a0)
+; RV32I-NEXT: lbu a3, 21(a0)
+; RV32I-NEXT: lbu a4, 20(a0)
+; RV32I-NEXT: lbu a5, 22(a0)
+; RV32I-NEXT: lbu s7, 23(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli s7, s7, 24
+; RV32I-NEXT: or a4, s7, a5
+; RV32I-NEXT: or a3, a4, a3
+; RV32I-NEXT: lbu a4, 17(a0)
+; RV32I-NEXT: lbu a5, 16(a0)
+; RV32I-NEXT: lbu s8, 18(a0)
+; RV32I-NEXT: lbu s9, 19(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or s7, a4, a5
+; RV32I-NEXT: slli s8, s8, 16
+; RV32I-NEXT: slli s9, s9, 24
+; RV32I-NEXT: or s9, s9, s8
+; RV32I-NEXT: lbu a4, 29(a0)
+; RV32I-NEXT: lbu a5, 28(a0)
+; RV32I-NEXT: lbu s8, 30(a0)
+; RV32I-NEXT: lbu s10, 31(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or a4, a4, a5
+; RV32I-NEXT: slli s8, s8, 16
+; RV32I-NEXT: slli s10, s10, 24
+; RV32I-NEXT: or a5, s10, s8
+; RV32I-NEXT: or a4, a5, a4
+; RV32I-NEXT: lbu a5, 25(a0)
+; RV32I-NEXT: lbu s8, 24(a0)
+; RV32I-NEXT: lbu s10, 26(a0)
+; RV32I-NEXT: lbu a0, 27(a0)
+; RV32I-NEXT: slli a5, a5, 8
+; RV32I-NEXT: or a5, a5, s8
+; RV32I-NEXT: slli s10, s10, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, s10
+; RV32I-NEXT: or ra, a0, a5
+; RV32I-NEXT: lbu a0, 1(a1)
+; RV32I-NEXT: lbu a5, 0(a1)
+; RV32I-NEXT: lbu s8, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a0, a0, 8
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: slli s8, s8, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, s8
+; RV32I-NEXT: or a0, a1, a0
+; RV32I-NEXT: slli a0, a0, 3
+; RV32I-NEXT: addi a5, a0, -192
+; RV32I-NEXT: addi a1, a0, -224
+; RV32I-NEXT: slli s8, a4, 1
+; RV32I-NEXT: sw s8, 60(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw a5, 28(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw a1, 24(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz a1, .LBB9_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: srl s8, a4, a1
+; RV32I-NEXT: j .LBB9_3
+; RV32I-NEXT: .LBB9_2:
+; RV32I-NEXT: srl a1, ra, a5
+; RV32I-NEXT: xori a5, a5, 31
+; RV32I-NEXT: sll a5, s8, a5
+; RV32I-NEXT: or s8, a1, a5
+; RV32I-NEXT: .LBB9_3:
+; RV32I-NEXT: slli a5, a7, 8
+; RV32I-NEXT: slli s10, s1, 16
+; RV32I-NEXT: slli s6, s6, 24
+; RV32I-NEXT: or a7, s9, s7
+; RV32I-NEXT: addi s1, a0, -128
+; RV32I-NEXT: slli a1, a3, 1
+; RV32I-NEXT: addi s9, a0, -160
+; RV32I-NEXT: xori s11, s1, 31
+; RV32I-NEXT: sw a1, 48(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s11, 36(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s9, .LBB9_5
+; RV32I-NEXT: # %bb.4:
+; RV32I-NEXT: srl s7, a3, s9
+; RV32I-NEXT: j .LBB9_6
+; RV32I-NEXT: .LBB9_5:
+; RV32I-NEXT: srl s7, a7, s1
+; RV32I-NEXT: sll s11, a1, s11
+; RV32I-NEXT: or s7, s7, s11
+; RV32I-NEXT: .LBB9_6:
+; RV32I-NEXT: slli s3, s3, 8
+; RV32I-NEXT: slli s4, s4, 16
+; RV32I-NEXT: slli s5, s5, 24
+; RV32I-NEXT: or a5, a5, t3
+; RV32I-NEXT: or s6, s6, s10
+; RV32I-NEXT: neg s11, a0
+; RV32I-NEXT: sll s10, ra, s11
+; RV32I-NEXT: li t3, 160
+; RV32I-NEXT: li a1, 64
+; RV32I-NEXT: sub t3, t3, a0
+; RV32I-NEXT: sw s10, 64(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t3, 40(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgeu s1, a1, .LBB9_8
+; RV32I-NEXT: # %bb.7:
+; RV32I-NEXT: slti t3, t3, 0
+; RV32I-NEXT: neg t3, t3
+; RV32I-NEXT: and t3, t3, s10
+; RV32I-NEXT: or s8, s7, t3
+; RV32I-NEXT: .LBB9_8:
+; RV32I-NEXT: slli s10, a6, 8
+; RV32I-NEXT: slli t2, t2, 16
+; RV32I-NEXT: slli t4, t4, 24
+; RV32I-NEXT: slli t5, t5, 8
+; RV32I-NEXT: slli t6, t6, 16
+; RV32I-NEXT: slli s0, s0, 24
+; RV32I-NEXT: or s2, s3, s2
+; RV32I-NEXT: or s3, s5, s4
+; RV32I-NEXT: or a6, s6, a5
+; RV32I-NEXT: mv s7, a7
+; RV32I-NEXT: beqz s1, .LBB9_10
+; RV32I-NEXT: # %bb.9:
+; RV32I-NEXT: mv s7, s8
+; RV32I-NEXT: .LBB9_10:
+; RV32I-NEXT: or t0, s10, t0
+; RV32I-NEXT: or t2, t4, t2
+; RV32I-NEXT: or t1, t5, t1
+; RV32I-NEXT: or t4, s0, t6
+; RV32I-NEXT: or s5, s3, s2
+; RV32I-NEXT: addi a1, a0, -64
+; RV32I-NEXT: slli t5, a6, 1
+; RV32I-NEXT: addi s4, a0, -96
+; RV32I-NEXT: xori t3, a1, 31
+; RV32I-NEXT: sw a1, 56(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t3, 32(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t5, 12(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s4, .LBB9_12
+; RV32I-NEXT: # %bb.11:
+; RV32I-NEXT: srl a5, a6, s4
+; RV32I-NEXT: j .LBB9_13
+; RV32I-NEXT: .LBB9_12:
+; RV32I-NEXT: srl a5, s5, a1
+; RV32I-NEXT: sll t3, t5, t3
+; RV32I-NEXT: or a5, a5, t3
+; RV32I-NEXT: .LBB9_13:
+; RV32I-NEXT: li t5, 64
+; RV32I-NEXT: or s3, t2, t0
+; RV32I-NEXT: or t1, t4, t1
+; RV32I-NEXT: addi t6, a0, -32
+; RV32I-NEXT: xori s10, a0, 31
+; RV32I-NEXT: bltz t6, .LBB9_15
+; RV32I-NEXT: # %bb.14:
+; RV32I-NEXT: srl t4, s3, t6
+; RV32I-NEXT: j .LBB9_16
+; RV32I-NEXT: .LBB9_15:
+; RV32I-NEXT: srl t0, t1, a0
+; RV32I-NEXT: slli t2, s3, 1
+; RV32I-NEXT: sll t2, t2, s10
+; RV32I-NEXT: or t4, t0, t2
+; RV32I-NEXT: .LBB9_16:
+; RV32I-NEXT: sll t2, s5, s11
+; RV32I-NEXT: li t0, 32
+; RV32I-NEXT: sub s0, t0, a0
+; RV32I-NEXT: slti t3, s0, 0
+; RV32I-NEXT: neg a1, t3
+; RV32I-NEXT: bgeu a0, t5, .LBB9_18
+; RV32I-NEXT: # %bb.17:
+; RV32I-NEXT: and a5, a1, t2
+; RV32I-NEXT: or a5, t4, a5
+; RV32I-NEXT: .LBB9_18:
+; RV32I-NEXT: mv s8, t1
+; RV32I-NEXT: beqz a0, .LBB9_20
+; RV32I-NEXT: # %bb.19:
+; RV32I-NEXT: mv s8, a5
+; RV32I-NEXT: .LBB9_20:
+; RV32I-NEXT: sll a5, a7, s11
+; RV32I-NEXT: li t3, 96
+; RV32I-NEXT: sub s6, t3, a0
+; RV32I-NEXT: slti t3, s6, 0
+; RV32I-NEXT: neg t4, t3
+; RV32I-NEXT: li s2, 128
+; RV32I-NEXT: sub t5, s2, a0
+; RV32I-NEXT: sltiu t3, t5, 64
+; RV32I-NEXT: neg t3, t3
+; RV32I-NEXT: sw t3, 8(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgeu a0, s2, .LBB9_22
+; RV32I-NEXT: # %bb.21:
+; RV32I-NEXT: mv s2, t3
+; RV32I-NEXT: and t3, t4, a5
+; RV32I-NEXT: and t3, s2, t3
+; RV32I-NEXT: or s7, s8, t3
+; RV32I-NEXT: .LBB9_22:
+; RV32I-NEXT: li s8, 64
+; RV32I-NEXT: sw s0, 72(sp) # 4-byte Folded Spill
+; RV32I-NEXT: beqz a0, .LBB9_24
+; RV32I-NEXT: # %bb.23:
+; RV32I-NEXT: mv t1, s7
+; RV32I-NEXT: .LBB9_24:
+; RV32I-NEXT: neg t3, t5
+; RV32I-NEXT: sub s0, t0, t5
+; RV32I-NEXT: srl t0, a3, t3
+; RV32I-NEXT: sw a1, 52(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s0, 16(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t0, 20(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgez s0, .LBB9_26
+; RV32I-NEXT: # %bb.25:
+; RV32I-NEXT: srl t0, a7, t3
+; RV32I-NEXT: sub t3, s8, t5
+; RV32I-NEXT: xori t3, t3, 31
+; RV32I-NEXT: lw a1, 48(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t3, a1, t3
+; RV32I-NEXT: lw a1, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: or t0, t0, t3
+; RV32I-NEXT: .LBB9_26:
+; RV32I-NEXT: bltu t5, s8, .LBB9_28
+; RV32I-NEXT: # %bb.27:
+; RV32I-NEXT: and t3, a1, a5
+; RV32I-NEXT: mv t0, ra
+; RV32I-NEXT: bnez t5, .LBB9_29
+; RV32I-NEXT: j .LBB9_30
+; RV32I-NEXT: .LBB9_28:
+; RV32I-NEXT: lw t3, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and t3, t4, t3
+; RV32I-NEXT: or t3, t3, t0
+; RV32I-NEXT: mv t0, ra
+; RV32I-NEXT: beqz t5, .LBB9_30
+; RV32I-NEXT: .LBB9_29:
+; RV32I-NEXT: mv t0, t3
+; RV32I-NEXT: .LBB9_30:
+; RV32I-NEXT: bltz t6, .LBB9_32
+; RV32I-NEXT: # %bb.31:
+; RV32I-NEXT: srl t4, a6, t6
+; RV32I-NEXT: j .LBB9_33
+; RV32I-NEXT: .LBB9_32:
+; RV32I-NEXT: srl t3, s5, a0
+; RV32I-NEXT: lw a1, 12(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t4, a1, s10
+; RV32I-NEXT: or t4, t3, t4
+; RV32I-NEXT: .LBB9_33:
+; RV32I-NEXT: sltiu s0, a0, 64
+; RV32I-NEXT: sw s10, 44(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s9, .LBB9_35
+; RV32I-NEXT: # %bb.34:
+; RV32I-NEXT: srl a1, a4, s9
+; RV32I-NEXT: j .LBB9_36
+; RV32I-NEXT: .LBB9_35:
+; RV32I-NEXT: srl t3, ra, s1
+; RV32I-NEXT: lw s7, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw a1, 36(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a1, s7, a1
+; RV32I-NEXT: or a1, t3, a1
+; RV32I-NEXT: .LBB9_36:
+; RV32I-NEXT: neg s10, s0
+; RV32I-NEXT: sltiu t3, s1, 64
+; RV32I-NEXT: neg s0, t3
+; RV32I-NEXT: li t3, 128
+; RV32I-NEXT: sw ra, 68(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltu a0, t3, .LBB9_38
+; RV32I-NEXT: # %bb.37:
+; RV32I-NEXT: and a1, s0, a1
+; RV32I-NEXT: j .LBB9_39
+; RV32I-NEXT: .LBB9_38:
+; RV32I-NEXT: and a1, s10, t4
+; RV32I-NEXT: or a1, a1, t0
+; RV32I-NEXT: .LBB9_39:
+; RV32I-NEXT: lw t3, 40(sp) # 4-byte Folded Reload
+; RV32I-NEXT: mv ra, s5
+; RV32I-NEXT: beqz a0, .LBB9_41
+; RV32I-NEXT: # %bb.40:
+; RV32I-NEXT: mv ra, a1
+; RV32I-NEXT: .LBB9_41:
+; RV32I-NEXT: sub a1, s8, a0
+; RV32I-NEXT: xori t4, a1, 31
+; RV32I-NEXT: lw a1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sw s0, 4(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgez a1, .LBB9_43
+; RV32I-NEXT: # %bb.42:
+; RV32I-NEXT: sll a1, a6, s11
+; RV32I-NEXT: srli t0, s5, 1
+; RV32I-NEXT: srl t0, t0, t4
+; RV32I-NEXT: or t2, a1, t0
+; RV32I-NEXT: .LBB9_43:
+; RV32I-NEXT: slti a1, t6, 0
+; RV32I-NEXT: neg s2, a1
+; RV32I-NEXT: slti t0, s4, 0
+; RV32I-NEXT: neg s0, t0
+; RV32I-NEXT: bltu a0, s8, .LBB9_45
+; RV32I-NEXT: # %bb.44:
+; RV32I-NEXT: lw a1, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl t0, a6, a1
+; RV32I-NEXT: and t2, s0, t0
+; RV32I-NEXT: j .LBB9_46
+; RV32I-NEXT: .LBB9_45:
+; RV32I-NEXT: srl t0, s3, a0
+; RV32I-NEXT: and t0, s2, t0
+; RV32I-NEXT: or t2, t0, t2
+; RV32I-NEXT: .LBB9_46:
+; RV32I-NEXT: sw s0, 12(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t4, 36(sp) # 4-byte Folded Spill
+; RV32I-NEXT: li a1, 64
+; RV32I-NEXT: mv t0, s3
+; RV32I-NEXT: beqz a0, .LBB9_48
+; RV32I-NEXT: # %bb.47:
+; RV32I-NEXT: mv t0, t2
+; RV32I-NEXT: .LBB9_48:
+; RV32I-NEXT: sll s7, a3, s11
+; RV32I-NEXT: srli s8, a7, 1
+; RV32I-NEXT: xori s0, t5, 31
+; RV32I-NEXT: bltz s6, .LBB9_50
+; RV32I-NEXT: # %bb.49:
+; RV32I-NEXT: mv t4, a5
+; RV32I-NEXT: j .LBB9_51
+; RV32I-NEXT: .LBB9_50:
+; RV32I-NEXT: srl t2, s8, s0
+; RV32I-NEXT: or t4, s7, t2
+; RV32I-NEXT: .LBB9_51:
+; RV32I-NEXT: sll s5, a4, s11
+; RV32I-NEXT: lw t2, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srli s11, t2, 1
+; RV32I-NEXT: bltz t3, .LBB9_53
+; RV32I-NEXT: # %bb.52:
+; RV32I-NEXT: lw t3, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: j .LBB9_54
+; RV32I-NEXT: .LBB9_53:
+; RV32I-NEXT: li t2, 192
+; RV32I-NEXT: sub t2, t2, a0
+; RV32I-NEXT: xori t2, t2, 31
+; RV32I-NEXT: srl t2, s11, t2
+; RV32I-NEXT: or t3, s5, t2
+; RV32I-NEXT: .LBB9_54:
+; RV32I-NEXT: slti t2, s9, 0
+; RV32I-NEXT: neg t2, t2
+; RV32I-NEXT: bltu s1, a1, .LBB9_56
+; RV32I-NEXT: # %bb.55:
+; RV32I-NEXT: lw a1, 28(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl t3, a4, a1
+; RV32I-NEXT: lw a1, 24(sp) # 4-byte Folded Reload
+; RV32I-NEXT: slti s9, a1, 0
+; RV32I-NEXT: neg s9, s9
+; RV32I-NEXT: and t3, s9, t3
+; RV32I-NEXT: mv s9, a3
+; RV32I-NEXT: bnez s1, .LBB9_57
+; RV32I-NEXT: j .LBB9_58
+; RV32I-NEXT: .LBB9_56:
+; RV32I-NEXT: srl s9, a3, s1
+; RV32I-NEXT: and s9, t2, s9
+; RV32I-NEXT: or t3, s9, t3
+; RV32I-NEXT: mv s9, a3
+; RV32I-NEXT: beqz s1, .LBB9_58
+; RV32I-NEXT: .LBB9_57:
+; RV32I-NEXT: mv s9, t3
+; RV32I-NEXT: .LBB9_58:
+; RV32I-NEXT: li a1, 128
+; RV32I-NEXT: bltu a0, a1, .LBB9_63
+; RV32I-NEXT: # %bb.59:
+; RV32I-NEXT: lw t3, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bnez a0, .LBB9_64
+; RV32I-NEXT: .LBB9_60:
+; RV32I-NEXT: lw a1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz a1, .LBB9_65
+; RV32I-NEXT: .LBB9_61:
+; RV32I-NEXT: li s7, 64
+; RV32I-NEXT: bltz s6, .LBB9_66
+; RV32I-NEXT: .LBB9_62:
+; RV32I-NEXT: lw t4, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: mv t0, t4
+; RV32I-NEXT: bltu t5, s7, .LBB9_67
+; RV32I-NEXT: j .LBB9_68
+; RV32I-NEXT: .LBB9_63:
+; RV32I-NEXT: lw a1, 8(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and t3, a1, t4
+; RV32I-NEXT: or s9, t0, t3
+; RV32I-NEXT: lw t3, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: beqz a0, .LBB9_60
+; RV32I-NEXT: .LBB9_64:
+; RV32I-NEXT: mv s3, s9
+; RV32I-NEXT: lw a1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgez a1, .LBB9_61
+; RV32I-NEXT: .LBB9_65:
+; RV32I-NEXT: lw a1, 36(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a5, s8, a1
+; RV32I-NEXT: or a5, s7, a5
+; RV32I-NEXT: li s7, 64
+; RV32I-NEXT: bgez s6, .LBB9_62
+; RV32I-NEXT: .LBB9_66:
+; RV32I-NEXT: srl t0, s11, s0
+; RV32I-NEXT: or t0, s5, t0
+; RV32I-NEXT: lw t4, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgeu t5, s7, .LBB9_68
+; RV32I-NEXT: .LBB9_67:
+; RV32I-NEXT: lw a1, 16(sp) # 4-byte Folded Reload
+; RV32I-NEXT: slti a5, a1, 0
+; RV32I-NEXT: neg a5, a5
+; RV32I-NEXT: lw a1, 20(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a5, a5, a1
+; RV32I-NEXT: or a5, t0, a5
+; RV32I-NEXT: .LBB9_68:
+; RV32I-NEXT: mv t0, a4
+; RV32I-NEXT: bnez t5, .LBB9_71
+; RV32I-NEXT: # %bb.69:
+; RV32I-NEXT: li a1, 128
+; RV32I-NEXT: bltu a0, a1, .LBB9_72
+; RV32I-NEXT: .LBB9_70:
+; RV32I-NEXT: srl a5, a4, s1
+; RV32I-NEXT: and a5, t2, a5
+; RV32I-NEXT: lw a1, 4(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a5, a1, a5
+; RV32I-NEXT: lw t5, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bnez a0, .LBB9_73
+; RV32I-NEXT: j .LBB9_74
+; RV32I-NEXT: .LBB9_71:
+; RV32I-NEXT: mv t0, a5
+; RV32I-NEXT: li a1, 128
+; RV32I-NEXT: bgeu a0, a1, .LBB9_70
+; RV32I-NEXT: .LBB9_72:
+; RV32I-NEXT: srl a5, a6, a0
+; RV32I-NEXT: and a5, s2, a5
+; RV32I-NEXT: and a5, s10, a5
+; RV32I-NEXT: or a5, a5, t0
+; RV32I-NEXT: lw t5, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: beqz a0, .LBB9_74
+; RV32I-NEXT: .LBB9_73:
+; RV32I-NEXT: mv a6, a5
+; RV32I-NEXT: .LBB9_74:
+; RV32I-NEXT: bltz s4, .LBB9_77
+; RV32I-NEXT: # %bb.75:
+; RV32I-NEXT: srl a5, a4, s4
+; RV32I-NEXT: bgez t6, .LBB9_78
+; RV32I-NEXT: .LBB9_76:
+; RV32I-NEXT: srl t0, a7, a0
+; RV32I-NEXT: lw a1, 48(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw t2, 44(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t2, a1, t2
+; RV32I-NEXT: or t0, t0, t2
+; RV32I-NEXT: lw a1, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltu a0, s7, .LBB9_79
+; RV32I-NEXT: j .LBB9_80
+; RV32I-NEXT: .LBB9_77:
+; RV32I-NEXT: lw a5, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a5, a5, t5
+; RV32I-NEXT: lw a1, 32(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t0, t3, a1
+; RV32I-NEXT: or a5, a5, t0
+; RV32I-NEXT: bltz t6, .LBB9_76
+; RV32I-NEXT: .LBB9_78:
+; RV32I-NEXT: srl t0, a3, t6
+; RV32I-NEXT: lw a1, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgeu a0, s7, .LBB9_80
+; RV32I-NEXT: .LBB9_79:
+; RV32I-NEXT: and a5, a1, t4
+; RV32I-NEXT: or a5, t0, a5
+; RV32I-NEXT: .LBB9_80:
+; RV32I-NEXT: bnez a0, .LBB9_84
+; RV32I-NEXT: # %bb.81:
+; RV32I-NEXT: lw a1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz a1, .LBB9_85
+; RV32I-NEXT: .LBB9_82:
+; RV32I-NEXT: sltiu a5, a0, 128
+; RV32I-NEXT: bltu a0, s7, .LBB9_86
+; RV32I-NEXT: .LBB9_83:
+; RV32I-NEXT: srl t0, a4, t5
+; RV32I-NEXT: lw a1, 12(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and t2, a1, t0
+; RV32I-NEXT: neg t0, a5
+; RV32I-NEXT: bnez a0, .LBB9_87
+; RV32I-NEXT: j .LBB9_88
+; RV32I-NEXT: .LBB9_84:
+; RV32I-NEXT: mv a7, a5
+; RV32I-NEXT: lw a1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgez a1, .LBB9_82
+; RV32I-NEXT: .LBB9_85:
+; RV32I-NEXT: lw a1, 36(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a5, s11, a1
+; RV32I-NEXT: or t4, s5, a5
+; RV32I-NEXT: sltiu a5, a0, 128
+; RV32I-NEXT: bgeu a0, s7, .LBB9_83
+; RV32I-NEXT: .LBB9_86:
+; RV32I-NEXT: srl t0, a3, a0
+; RV32I-NEXT: and t0, s2, t0
+; RV32I-NEXT: or t2, t0, t4
+; RV32I-NEXT: neg t0, a5
+; RV32I-NEXT: beqz a0, .LBB9_88
+; RV32I-NEXT: .LBB9_87:
+; RV32I-NEXT: mv a3, t2
+; RV32I-NEXT: .LBB9_88:
+; RV32I-NEXT: and a5, t0, a7
+; RV32I-NEXT: and a3, t0, a3
+; RV32I-NEXT: bltz t6, .LBB9_90
+; RV32I-NEXT: # %bb.89:
+; RV32I-NEXT: srl a7, a4, t6
+; RV32I-NEXT: j .LBB9_91
+; RV32I-NEXT: .LBB9_90:
+; RV32I-NEXT: lw a7, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a7, a7, a0
+; RV32I-NEXT: lw a1, 44(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t2, t3, a1
+; RV32I-NEXT: or a7, a7, t2
+; RV32I-NEXT: .LBB9_91:
+; RV32I-NEXT: and a7, s10, a7
+; RV32I-NEXT: and a7, t0, a7
+; RV32I-NEXT: srl a0, a4, a0
+; RV32I-NEXT: and a0, s2, a0
+; RV32I-NEXT: and a0, s10, a0
+; RV32I-NEXT: and a0, t0, a0
+; RV32I-NEXT: sb a7, 24(a2)
+; RV32I-NEXT: sb a0, 28(a2)
+; RV32I-NEXT: srli a1, a7, 24
+; RV32I-NEXT: sb a1, 27(a2)
+; RV32I-NEXT: srli a1, a7, 16
+; RV32I-NEXT: sb a1, 26(a2)
+; RV32I-NEXT: srli a1, a7, 8
+; RV32I-NEXT: sb a1, 25(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 31(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 30(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 29(a2)
+; RV32I-NEXT: sb a5, 16(a2)
+; RV32I-NEXT: srli a0, a5, 24
+; RV32I-NEXT: sb a0, 19(a2)
+; RV32I-NEXT: srli a0, a5, 16
+; RV32I-NEXT: sb a0, 18(a2)
+; RV32I-NEXT: srli a5, a5, 8
+; RV32I-NEXT: sb a5, 17(a2)
+; RV32I-NEXT: sb a3, 20(a2)
+; RV32I-NEXT: srli a0, a3, 24
+; RV32I-NEXT: sb a0, 23(a2)
+; RV32I-NEXT: srli a0, a3, 16
+; RV32I-NEXT: sb a0, 22(a2)
+; RV32I-NEXT: srli a3, a3, 8
+; RV32I-NEXT: sb a3, 21(a2)
+; RV32I-NEXT: sb t1, 0(a2)
+; RV32I-NEXT: sb a6, 12(a2)
+; RV32I-NEXT: srli a0, t1, 24
+; RV32I-NEXT: sb a0, 3(a2)
+; RV32I-NEXT: srli a0, t1, 16
+; RV32I-NEXT: sb a0, 2(a2)
+; RV32I-NEXT: srli a0, t1, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: sb s3, 4(a2)
+; RV32I-NEXT: sb ra, 8(a2)
+; RV32I-NEXT: srli a0, a6, 24
+; RV32I-NEXT: sb a0, 15(a2)
+; RV32I-NEXT: srli a0, a6, 16
+; RV32I-NEXT: sb a0, 14(a2)
+; RV32I-NEXT: srli a0, a6, 8
+; RV32I-NEXT: sb a0, 13(a2)
+; RV32I-NEXT: srli a0, s3, 24
+; RV32I-NEXT: sb a0, 7(a2)
+; RV32I-NEXT: srli a0, s3, 16
+; RV32I-NEXT: sb a0, 6(a2)
+; RV32I-NEXT: srli a0, s3, 8
+; RV32I-NEXT: sb a0, 5(a2)
+; RV32I-NEXT: srli a0, ra, 24
+; RV32I-NEXT: sb a0, 11(a2)
+; RV32I-NEXT: srli a0, ra, 16
+; RV32I-NEXT: sb a0, 10(a2)
+; RV32I-NEXT: srli a0, ra, 8
+; RV32I-NEXT: sb a0, 9(a2)
+; RV32I-NEXT: lw ra, 124(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s0, 120(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s1, 116(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s2, 112(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s3, 108(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s4, 104(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s5, 100(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s6, 96(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s7, 92(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s8, 88(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s9, 84(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s10, 80(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s11, 76(sp) # 4-byte Folded Reload
+; RV32I-NEXT: addi sp, sp, 128
+; RV32I-NEXT: ret
+ %src = load i256, ptr %src.ptr, align 1
+ %byteOff = load i256, ptr %byteOff.ptr, align 1
+ %bitOff = shl i256 %byteOff, 3
+ %res = lshr i256 %src, %bitOff
+ store i256 %res, ptr %dst, align 1
+ ret void
+}
+define void @shl_32bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: shl_32bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: addi sp, sp, -16
+; RV64I-NEXT: sd s0, 8(sp) # 8-byte Folded Spill
+; RV64I-NEXT: sd s1, 0(sp) # 8-byte Folded Spill
+; RV64I-NEXT: lbu a3, 17(a0)
+; RV64I-NEXT: lbu a4, 16(a0)
+; RV64I-NEXT: lbu a5, 18(a0)
+; RV64I-NEXT: lbu a6, 19(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a5, a6, a5
+; RV64I-NEXT: or a5, a5, a3
+; RV64I-NEXT: lbu a3, 21(a0)
+; RV64I-NEXT: lbu a4, 20(a0)
+; RV64I-NEXT: lbu a6, 22(a0)
+; RV64I-NEXT: lbu a7, 23(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli a7, a7, 24
+; RV64I-NEXT: or a4, a7, a6
+; RV64I-NEXT: or a6, a4, a3
+; RV64I-NEXT: lbu a3, 25(a0)
+; RV64I-NEXT: lbu a4, 24(a0)
+; RV64I-NEXT: lbu a7, 26(a0)
+; RV64I-NEXT: lbu t0, 27(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a7, a7, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a4, t0, a7
+; RV64I-NEXT: or a7, a4, a3
+; RV64I-NEXT: lbu a3, 29(a0)
+; RV64I-NEXT: lbu a4, 28(a0)
+; RV64I-NEXT: lbu t0, 30(a0)
+; RV64I-NEXT: lbu t1, 31(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli t0, t0, 16
+; RV64I-NEXT: slli t1, t1, 24
+; RV64I-NEXT: or a4, t1, t0
+; RV64I-NEXT: or t0, a4, a3
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a4, 0(a0)
+; RV64I-NEXT: lbu t1, 2(a0)
+; RV64I-NEXT: lbu t2, 3(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli t1, t1, 16
+; RV64I-NEXT: slli t2, t2, 24
+; RV64I-NEXT: or a4, t2, t1
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 5(a0)
+; RV64I-NEXT: lbu t1, 4(a0)
+; RV64I-NEXT: lbu t2, 6(a0)
+; RV64I-NEXT: lbu t3, 7(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, t1
+; RV64I-NEXT: slli t2, t2, 16
+; RV64I-NEXT: slli t3, t3, 24
+; RV64I-NEXT: or t1, t3, t2
+; RV64I-NEXT: or a4, t1, a4
+; RV64I-NEXT: slli a4, a4, 32
+; RV64I-NEXT: or a3, a4, a3
+; RV64I-NEXT: lbu a4, 9(a0)
+; RV64I-NEXT: lbu t1, 8(a0)
+; RV64I-NEXT: lbu t2, 10(a0)
+; RV64I-NEXT: lbu t3, 11(a0)
+; RV64I-NEXT: slli a4, a4, 8
+; RV64I-NEXT: or a4, a4, t1
+; RV64I-NEXT: slli t2, t2, 16
+; RV64I-NEXT: slli t3, t3, 24
+; RV64I-NEXT: or t1, t3, t2
+; RV64I-NEXT: or a4, t1, a4
+; RV64I-NEXT: lbu t1, 13(a0)
+; RV64I-NEXT: lbu t2, 12(a0)
+; RV64I-NEXT: lbu t3, 14(a0)
+; RV64I-NEXT: lbu a0, 15(a0)
+; RV64I-NEXT: slli t1, t1, 8
+; RV64I-NEXT: or t1, t1, t2
+; RV64I-NEXT: slli t3, t3, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, t3
+; RV64I-NEXT: or a0, a0, t1
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a4, a0, a4
+; RV64I-NEXT: lbu a0, 5(a1)
+; RV64I-NEXT: lbu t1, 4(a1)
+; RV64I-NEXT: lbu t2, 6(a1)
+; RV64I-NEXT: lbu t3, 7(a1)
+; RV64I-NEXT: slli a0, a0, 8
+; RV64I-NEXT: or a0, a0, t1
+; RV64I-NEXT: slli t2, t2, 16
+; RV64I-NEXT: slli t3, t3, 24
+; RV64I-NEXT: lbu t1, 1(a1)
+; RV64I-NEXT: lbu t4, 0(a1)
+; RV64I-NEXT: or t2, t3, t2
+; RV64I-NEXT: or t2, t2, a0
+; RV64I-NEXT: slli t1, t1, 8
+; RV64I-NEXT: or t1, t1, t4
+; RV64I-NEXT: lbu t3, 2(a1)
+; RV64I-NEXT: lbu t4, 3(a1)
+; RV64I-NEXT: slli a0, a6, 32
+; RV64I-NEXT: slli a1, t0, 32
+; RV64I-NEXT: slli t3, t3, 16
+; RV64I-NEXT: slli t4, t4, 24
+; RV64I-NEXT: or a6, t4, t3
+; RV64I-NEXT: or a6, a6, t1
+; RV64I-NEXT: slli a6, a6, 3
+; RV64I-NEXT: slli t2, t2, 35
+; RV64I-NEXT: or a6, t2, a6
+; RV64I-NEXT: addi t1, a6, -128
+; RV64I-NEXT: addi t2, a6, -192
+; RV64I-NEXT: srli t0, a3, 1
+; RV64I-NEXT: bltz t2, .LBB10_2
+; RV64I-NEXT: # %bb.1:
+; RV64I-NEXT: sll t3, a3, t2
+; RV64I-NEXT: j .LBB10_3
+; RV64I-NEXT: .LBB10_2:
+; RV64I-NEXT: sll t3, a4, t1
+; RV64I-NEXT: xori t4, t1, 63
+; RV64I-NEXT: srl t4, t0, t4
+; RV64I-NEXT: or t3, t3, t4
+; RV64I-NEXT: .LBB10_3:
+; RV64I-NEXT: or a0, a0, a5
+; RV64I-NEXT: or a1, a1, a7
+; RV64I-NEXT: addi a7, a6, -64
+; RV64I-NEXT: xori a5, a6, 63
+; RV64I-NEXT: bltz a7, .LBB10_5
+; RV64I-NEXT: # %bb.4:
+; RV64I-NEXT: sll s1, a0, a7
+; RV64I-NEXT: j .LBB10_6
+; RV64I-NEXT: .LBB10_5:
+; RV64I-NEXT: sll t4, a1, a6
+; RV64I-NEXT: srli t5, a0, 1
+; RV64I-NEXT: srl t5, t5, a5
+; RV64I-NEXT: or s1, t4, t5
+; RV64I-NEXT: .LBB10_6:
+; RV64I-NEXT: negw t6, a6
+; RV64I-NEXT: srl t4, a4, t6
+; RV64I-NEXT: li s0, 64
+; RV64I-NEXT: li t5, 128
+; RV64I-NEXT: sub s0, s0, a6
+; RV64I-NEXT: bltu a6, t5, .LBB10_12
+; RV64I-NEXT: # %bb.7:
+; RV64I-NEXT: bnez a6, .LBB10_13
+; RV64I-NEXT: .LBB10_8:
+; RV64I-NEXT: bgez s0, .LBB10_10
+; RV64I-NEXT: .LBB10_9:
+; RV64I-NEXT: srl t3, a3, t6
+; RV64I-NEXT: slli t4, a4, 1
+; RV64I-NEXT: sub t6, t5, a6
+; RV64I-NEXT: xori t6, t6, 63
+; RV64I-NEXT: sll t4, t4, t6
+; RV64I-NEXT: or t4, t3, t4
+; RV64I-NEXT: .LBB10_10:
+; RV64I-NEXT: slti t3, a7, 0
+; RV64I-NEXT: neg t3, t3
+; RV64I-NEXT: bltu a6, t5, .LBB10_14
+; RV64I-NEXT: # %bb.11:
+; RV64I-NEXT: sll t1, a3, t1
+; RV64I-NEXT: slti t2, t2, 0
+; RV64I-NEXT: neg t2, t2
+; RV64I-NEXT: and t1, t2, t1
+; RV64I-NEXT: bnez a6, .LBB10_15
+; RV64I-NEXT: j .LBB10_16
+; RV64I-NEXT: .LBB10_12:
+; RV64I-NEXT: slti t3, s0, 0
+; RV64I-NEXT: neg t3, t3
+; RV64I-NEXT: and t3, t3, t4
+; RV64I-NEXT: or t3, s1, t3
+; RV64I-NEXT: beqz a6, .LBB10_8
+; RV64I-NEXT: .LBB10_13:
+; RV64I-NEXT: mv a1, t3
+; RV64I-NEXT: bltz s0, .LBB10_9
+; RV64I-NEXT: j .LBB10_10
+; RV64I-NEXT: .LBB10_14:
+; RV64I-NEXT: sll t1, a0, a6
+; RV64I-NEXT: and t1, t3, t1
+; RV64I-NEXT: or t1, t1, t4
+; RV64I-NEXT: beqz a6, .LBB10_16
+; RV64I-NEXT: .LBB10_15:
+; RV64I-NEXT: mv a0, t1
+; RV64I-NEXT: .LBB10_16:
+; RV64I-NEXT: bltz a7, .LBB10_18
+; RV64I-NEXT: # %bb.17:
+; RV64I-NEXT: sll a4, a3, a7
+; RV64I-NEXT: j .LBB10_19
+; RV64I-NEXT: .LBB10_18:
+; RV64I-NEXT: sll a4, a4, a6
+; RV64I-NEXT: srl a5, t0, a5
+; RV64I-NEXT: or a4, a4, a5
+; RV64I-NEXT: .LBB10_19:
+; RV64I-NEXT: sltiu a5, a6, 128
+; RV64I-NEXT: neg a5, a5
+; RV64I-NEXT: and a4, a5, a4
+; RV64I-NEXT: sll a3, a3, a6
+; RV64I-NEXT: and a3, t3, a3
+; RV64I-NEXT: and a3, a5, a3
+; RV64I-NEXT: sb a3, 0(a2)
+; RV64I-NEXT: sb a4, 8(a2)
+; RV64I-NEXT: srli a5, a3, 56
+; RV64I-NEXT: sb a5, 7(a2)
+; RV64I-NEXT: srli a5, a3, 48
+; RV64I-NEXT: sb a5, 6(a2)
+; RV64I-NEXT: srli a5, a3, 40
+; RV64I-NEXT: sb a5, 5(a2)
+; RV64I-NEXT: srli a5, a3, 32
+; RV64I-NEXT: sb a5, 4(a2)
+; RV64I-NEXT: srli a5, a3, 24
+; RV64I-NEXT: sb a5, 3(a2)
+; RV64I-NEXT: srli a5, a3, 16
+; RV64I-NEXT: sb a5, 2(a2)
+; RV64I-NEXT: srli a3, a3, 8
+; RV64I-NEXT: sb a3, 1(a2)
+; RV64I-NEXT: srli a3, a4, 56
+; RV64I-NEXT: sb a3, 15(a2)
+; RV64I-NEXT: srli a3, a4, 48
+; RV64I-NEXT: sb a3, 14(a2)
+; RV64I-NEXT: srli a3, a4, 40
+; RV64I-NEXT: sb a3, 13(a2)
+; RV64I-NEXT: srli a3, a4, 32
+; RV64I-NEXT: sb a3, 12(a2)
+; RV64I-NEXT: srli a3, a4, 24
+; RV64I-NEXT: sb a3, 11(a2)
+; RV64I-NEXT: srli a3, a4, 16
+; RV64I-NEXT: sb a3, 10(a2)
+; RV64I-NEXT: srli a4, a4, 8
+; RV64I-NEXT: sb a4, 9(a2)
+; RV64I-NEXT: sb a1, 24(a2)
+; RV64I-NEXT: sb a0, 16(a2)
+; RV64I-NEXT: srli a3, a1, 56
+; RV64I-NEXT: sb a3, 31(a2)
+; RV64I-NEXT: srli a3, a1, 48
+; RV64I-NEXT: sb a3, 30(a2)
+; RV64I-NEXT: srli a3, a1, 40
+; RV64I-NEXT: sb a3, 29(a2)
+; RV64I-NEXT: srli a3, a1, 32
+; RV64I-NEXT: sb a3, 28(a2)
+; RV64I-NEXT: srli a3, a1, 24
+; RV64I-NEXT: sb a3, 27(a2)
+; RV64I-NEXT: srli a3, a1, 16
+; RV64I-NEXT: sb a3, 26(a2)
+; RV64I-NEXT: srli a1, a1, 8
+; RV64I-NEXT: sb a1, 25(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 23(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 22(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 21(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 20(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 19(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 18(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 17(a2)
+; RV64I-NEXT: ld s0, 8(sp) # 8-byte Folded Reload
+; RV64I-NEXT: ld s1, 0(sp) # 8-byte Folded Reload
+; RV64I-NEXT: addi sp, sp, 16
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: shl_32bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: addi sp, sp, -128
+; RV32I-NEXT: sw ra, 124(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s0, 120(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s1, 116(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s2, 112(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s3, 108(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s4, 104(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s5, 100(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s6, 96(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s7, 92(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s8, 88(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s9, 84(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s10, 80(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s11, 76(sp) # 4-byte Folded Spill
+; RV32I-NEXT: lbu a7, 24(a0)
+; RV32I-NEXT: lbu t3, 25(a0)
+; RV32I-NEXT: lbu t4, 26(a0)
+; RV32I-NEXT: lbu t5, 27(a0)
+; RV32I-NEXT: lbu t0, 28(a0)
+; RV32I-NEXT: lbu s0, 29(a0)
+; RV32I-NEXT: lbu s1, 30(a0)
+; RV32I-NEXT: lbu s3, 31(a0)
+; RV32I-NEXT: lbu a6, 16(a0)
+; RV32I-NEXT: lbu t6, 17(a0)
+; RV32I-NEXT: lbu s2, 18(a0)
+; RV32I-NEXT: lbu s6, 19(a0)
+; RV32I-NEXT: lbu s4, 20(a0)
+; RV32I-NEXT: lbu t1, 21(a0)
+; RV32I-NEXT: lbu t2, 22(a0)
+; RV32I-NEXT: lbu s5, 23(a0)
+; RV32I-NEXT: lbu a3, 9(a0)
+; RV32I-NEXT: lbu a4, 8(a0)
+; RV32I-NEXT: lbu a5, 10(a0)
+; RV32I-NEXT: lbu s7, 11(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a5, a5, 16
+; RV32I-NEXT: slli s7, s7, 24
+; RV32I-NEXT: or a4, s7, a5
+; RV32I-NEXT: or a3, a4, a3
+; RV32I-NEXT: lbu a4, 13(a0)
+; RV32I-NEXT: lbu a5, 12(a0)
+; RV32I-NEXT: lbu s7, 14(a0)
+; RV32I-NEXT: lbu s9, 15(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or s8, a4, a5
+; RV32I-NEXT: slli s7, s7, 16
+; RV32I-NEXT: slli s9, s9, 24
+; RV32I-NEXT: or s9, s9, s7
+; RV32I-NEXT: lbu a4, 1(a0)
+; RV32I-NEXT: lbu a5, 0(a0)
+; RV32I-NEXT: lbu s7, 2(a0)
+; RV32I-NEXT: lbu s10, 3(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or a4, a4, a5
+; RV32I-NEXT: slli s7, s7, 16
+; RV32I-NEXT: slli s10, s10, 24
+; RV32I-NEXT: or a5, s10, s7
+; RV32I-NEXT: or a4, a5, a4
+; RV32I-NEXT: lbu a5, 5(a0)
+; RV32I-NEXT: lbu s7, 4(a0)
+; RV32I-NEXT: lbu s10, 6(a0)
+; RV32I-NEXT: lbu a0, 7(a0)
+; RV32I-NEXT: slli a5, a5, 8
+; RV32I-NEXT: or a5, a5, s7
+; RV32I-NEXT: slli s10, s10, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, s10
+; RV32I-NEXT: or s10, a0, a5
+; RV32I-NEXT: lbu a0, 1(a1)
+; RV32I-NEXT: lbu a5, 0(a1)
+; RV32I-NEXT: lbu s7, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a0, a0, 8
+; RV32I-NEXT: or a0, a0, a5
+; RV32I-NEXT: slli s7, s7, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, s7
+; RV32I-NEXT: or a0, a1, a0
+; RV32I-NEXT: slli a0, a0, 3
+; RV32I-NEXT: addi a5, a0, -192
+; RV32I-NEXT: addi a1, a0, -224
+; RV32I-NEXT: srli s7, a4, 1
+; RV32I-NEXT: sw s10, 68(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s7, 52(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw a5, 20(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw a1, 16(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz a1, .LBB10_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: sll s7, a4, a1
+; RV32I-NEXT: j .LBB10_3
+; RV32I-NEXT: .LBB10_2:
+; RV32I-NEXT: sll a1, s10, a5
+; RV32I-NEXT: xori a5, a5, 31
+; RV32I-NEXT: srl a5, s7, a5
+; RV32I-NEXT: or s7, a1, a5
+; RV32I-NEXT: .LBB10_3:
+; RV32I-NEXT: slli s10, t6, 8
+; RV32I-NEXT: slli ra, s2, 16
+; RV32I-NEXT: slli s6, s6, 24
+; RV32I-NEXT: or t6, s9, s8
+; RV32I-NEXT: addi s2, a0, -128
+; RV32I-NEXT: srli a1, a3, 1
+; RV32I-NEXT: addi s11, a0, -160
+; RV32I-NEXT: xori s8, s2, 31
+; RV32I-NEXT: sw a1, 44(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s8, 28(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s11, .LBB10_5
+; RV32I-NEXT: # %bb.4:
+; RV32I-NEXT: sll s8, a3, s11
+; RV32I-NEXT: j .LBB10_6
+; RV32I-NEXT: .LBB10_5:
+; RV32I-NEXT: sll a5, t6, s2
+; RV32I-NEXT: srl s8, a1, s8
+; RV32I-NEXT: or s8, a5, s8
+; RV32I-NEXT: .LBB10_6:
+; RV32I-NEXT: slli t1, t1, 8
+; RV32I-NEXT: slli a5, t2, 16
+; RV32I-NEXT: slli s5, s5, 24
+; RV32I-NEXT: or a6, s10, a6
+; RV32I-NEXT: or s6, s6, ra
+; RV32I-NEXT: neg s10, a0
+; RV32I-NEXT: lw t2, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl s9, t2, s10
+; RV32I-NEXT: li t2, 160
+; RV32I-NEXT: li ra, 64
+; RV32I-NEXT: sub t2, t2, a0
+; RV32I-NEXT: li a1, 64
+; RV32I-NEXT: sw s9, 60(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t2, 36(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgeu s2, ra, .LBB10_8
+; RV32I-NEXT: # %bb.7:
+; RV32I-NEXT: slti t2, t2, 0
+; RV32I-NEXT: neg t2, t2
+; RV32I-NEXT: and t2, t2, s9
+; RV32I-NEXT: or s7, s8, t2
+; RV32I-NEXT: .LBB10_8:
+; RV32I-NEXT: slli t3, t3, 8
+; RV32I-NEXT: slli t4, t4, 16
+; RV32I-NEXT: slli t5, t5, 24
+; RV32I-NEXT: slli s0, s0, 8
+; RV32I-NEXT: slli s1, s1, 16
+; RV32I-NEXT: slli s3, s3, 24
+; RV32I-NEXT: or s4, t1, s4
+; RV32I-NEXT: or s5, s5, a5
+; RV32I-NEXT: or ra, s6, a6
+; RV32I-NEXT: sw t6, 72(sp) # 4-byte Folded Spill
+; RV32I-NEXT: mv a6, t6
+; RV32I-NEXT: beqz s2, .LBB10_10
+; RV32I-NEXT: # %bb.9:
+; RV32I-NEXT: mv a6, s7
+; RV32I-NEXT: .LBB10_10:
+; RV32I-NEXT: or a5, t3, a7
+; RV32I-NEXT: or a7, t5, t4
+; RV32I-NEXT: or t0, s0, t0
+; RV32I-NEXT: or t1, s3, s1
+; RV32I-NEXT: or s6, s5, s4
+; RV32I-NEXT: addi t4, a0, -64
+; RV32I-NEXT: srli s0, ra, 1
+; RV32I-NEXT: addi t6, a0, -96
+; RV32I-NEXT: xori t3, t4, 31
+; RV32I-NEXT: sw t3, 12(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz t6, .LBB10_12
+; RV32I-NEXT: # %bb.11:
+; RV32I-NEXT: sll t3, ra, t6
+; RV32I-NEXT: j .LBB10_13
+; RV32I-NEXT: .LBB10_12:
+; RV32I-NEXT: sll t2, s6, t4
+; RV32I-NEXT: srl t3, s0, t3
+; RV32I-NEXT: or t3, t2, t3
+; RV32I-NEXT: .LBB10_13:
+; RV32I-NEXT: or a7, a7, a5
+; RV32I-NEXT: or t0, t1, t0
+; RV32I-NEXT: addi t5, a0, -32
+; RV32I-NEXT: xori s4, a0, 31
+; RV32I-NEXT: bltz t5, .LBB10_15
+; RV32I-NEXT: # %bb.14:
+; RV32I-NEXT: sll a5, a7, t5
+; RV32I-NEXT: j .LBB10_16
+; RV32I-NEXT: .LBB10_15:
+; RV32I-NEXT: sll a5, t0, a0
+; RV32I-NEXT: srli t1, a7, 1
+; RV32I-NEXT: srl t1, t1, s4
+; RV32I-NEXT: or a5, a5, t1
+; RV32I-NEXT: .LBB10_16:
+; RV32I-NEXT: srl s1, s6, s10
+; RV32I-NEXT: li t1, 32
+; RV32I-NEXT: sub t2, t1, a0
+; RV32I-NEXT: sw t2, 64(sp) # 4-byte Folded Spill
+; RV32I-NEXT: slti t2, t2, 0
+; RV32I-NEXT: neg s9, t2
+; RV32I-NEXT: sw s1, 32(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgeu a0, a1, .LBB10_18
+; RV32I-NEXT: # %bb.17:
+; RV32I-NEXT: and t2, s9, s1
+; RV32I-NEXT: or t3, a5, t2
+; RV32I-NEXT: .LBB10_18:
+; RV32I-NEXT: sw t4, 48(sp) # 4-byte Folded Spill
+; RV32I-NEXT: mv s1, t0
+; RV32I-NEXT: beqz a0, .LBB10_20
+; RV32I-NEXT: # %bb.19:
+; RV32I-NEXT: mv s1, t3
+; RV32I-NEXT: .LBB10_20:
+; RV32I-NEXT: lw a1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a1, a1, s10
+; RV32I-NEXT: li t2, 96
+; RV32I-NEXT: sub t4, t2, a0
+; RV32I-NEXT: slti t2, t4, 0
+; RV32I-NEXT: neg t3, t2
+; RV32I-NEXT: li a5, 128
+; RV32I-NEXT: sub s7, a5, a0
+; RV32I-NEXT: sltiu t2, s7, 64
+; RV32I-NEXT: neg t2, t2
+; RV32I-NEXT: bgeu a0, a5, .LBB10_22
+; RV32I-NEXT: # %bb.21:
+; RV32I-NEXT: and a6, t3, a1
+; RV32I-NEXT: and a6, t2, a6
+; RV32I-NEXT: or a6, s1, a6
+; RV32I-NEXT: .LBB10_22:
+; RV32I-NEXT: lw s3, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sw a1, 56(sp) # 4-byte Folded Spill
+; RV32I-NEXT: beqz a0, .LBB10_24
+; RV32I-NEXT: # %bb.23:
+; RV32I-NEXT: mv t0, a6
+; RV32I-NEXT: .LBB10_24:
+; RV32I-NEXT: neg a6, s7
+; RV32I-NEXT: sub s8, t1, s7
+; RV32I-NEXT: sll t1, a3, a6
+; RV32I-NEXT: sw t2, 4(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s8, .LBB10_27
+; RV32I-NEXT: # %bb.25:
+; RV32I-NEXT: mv a6, t1
+; RV32I-NEXT: li a1, 64
+; RV32I-NEXT: li a5, 64
+; RV32I-NEXT: bgeu s7, a1, .LBB10_28
+; RV32I-NEXT: .LBB10_26:
+; RV32I-NEXT: lw t2, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and t2, t3, t2
+; RV32I-NEXT: or t2, t2, a6
+; RV32I-NEXT: mv a6, s3
+; RV32I-NEXT: bnez s7, .LBB10_29
+; RV32I-NEXT: j .LBB10_30
+; RV32I-NEXT: .LBB10_27:
+; RV32I-NEXT: lw a1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a6, a1, a6
+; RV32I-NEXT: li a1, 64
+; RV32I-NEXT: sub t2, a1, s7
+; RV32I-NEXT: xori t2, t2, 31
+; RV32I-NEXT: lw a5, 44(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl t2, a5, t2
+; RV32I-NEXT: or a6, a6, t2
+; RV32I-NEXT: li a5, 64
+; RV32I-NEXT: bltu s7, a1, .LBB10_26
+; RV32I-NEXT: .LBB10_28:
+; RV32I-NEXT: lw a1, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and t2, s9, a1
+; RV32I-NEXT: mv a6, s3
+; RV32I-NEXT: beqz s7, .LBB10_30
+; RV32I-NEXT: .LBB10_29:
+; RV32I-NEXT: mv a6, t2
+; RV32I-NEXT: .LBB10_30:
+; RV32I-NEXT: bltz t5, .LBB10_32
+; RV32I-NEXT: # %bb.31:
+; RV32I-NEXT: sll s0, ra, t5
+; RV32I-NEXT: j .LBB10_33
+; RV32I-NEXT: .LBB10_32:
+; RV32I-NEXT: sll t2, s6, a0
+; RV32I-NEXT: srl t3, s0, s4
+; RV32I-NEXT: or s0, t2, t3
+; RV32I-NEXT: .LBB10_33:
+; RV32I-NEXT: sltiu t3, a0, 64
+; RV32I-NEXT: sw s4, 40(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s9, 8(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s11, .LBB10_35
+; RV32I-NEXT: # %bb.34:
+; RV32I-NEXT: sll a1, a4, s11
+; RV32I-NEXT: j .LBB10_36
+; RV32I-NEXT: .LBB10_35:
+; RV32I-NEXT: sll t2, s3, s2
+; RV32I-NEXT: lw s4, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw a1, 28(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a1, s4, a1
+; RV32I-NEXT: or a1, t2, a1
+; RV32I-NEXT: .LBB10_36:
+; RV32I-NEXT: neg s5, t3
+; RV32I-NEXT: sltiu t2, s2, 64
+; RV32I-NEXT: neg t3, t2
+; RV32I-NEXT: li t2, 128
+; RV32I-NEXT: bltu a0, t2, .LBB10_38
+; RV32I-NEXT: # %bb.37:
+; RV32I-NEXT: and a1, t3, a1
+; RV32I-NEXT: mv s0, s6
+; RV32I-NEXT: bnez a0, .LBB10_39
+; RV32I-NEXT: j .LBB10_40
+; RV32I-NEXT: .LBB10_38:
+; RV32I-NEXT: and a1, s5, s0
+; RV32I-NEXT: or a1, a1, a6
+; RV32I-NEXT: mv s0, s6
+; RV32I-NEXT: beqz a0, .LBB10_40
+; RV32I-NEXT: .LBB10_39:
+; RV32I-NEXT: mv s0, a1
+; RV32I-NEXT: .LBB10_40:
+; RV32I-NEXT: srl a1, a3, s10
+; RV32I-NEXT: lw a6, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: slli a6, a6, 1
+; RV32I-NEXT: sub t2, a5, a0
+; RV32I-NEXT: xori t2, t2, 31
+; RV32I-NEXT: lw s1, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sw t2, 28(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s1, .LBB10_42
+; RV32I-NEXT: # %bb.41:
+; RV32I-NEXT: lw s4, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: j .LBB10_43
+; RV32I-NEXT: .LBB10_42:
+; RV32I-NEXT: sll t2, a6, t2
+; RV32I-NEXT: or s4, a1, t2
+; RV32I-NEXT: .LBB10_43:
+; RV32I-NEXT: srl s1, a4, s10
+; RV32I-NEXT: slli s3, s3, 1
+; RV32I-NEXT: xori s9, s7, 31
+; RV32I-NEXT: sw s3, 24(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz t4, .LBB10_45
+; RV32I-NEXT: # %bb.44:
+; RV32I-NEXT: mv s3, s1
+; RV32I-NEXT: lw t2, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltu s7, a5, .LBB10_46
+; RV32I-NEXT: j .LBB10_47
+; RV32I-NEXT: .LBB10_45:
+; RV32I-NEXT: sll t2, s3, s9
+; RV32I-NEXT: mv s3, s1
+; RV32I-NEXT: or t2, s1, t2
+; RV32I-NEXT: bgeu s7, a5, .LBB10_47
+; RV32I-NEXT: .LBB10_46:
+; RV32I-NEXT: slti s4, s8, 0
+; RV32I-NEXT: neg s4, s4
+; RV32I-NEXT: and t1, s4, t1
+; RV32I-NEXT: or s4, t2, t1
+; RV32I-NEXT: .LBB10_47:
+; RV32I-NEXT: mv s8, a4
+; RV32I-NEXT: beqz s7, .LBB10_49
+; RV32I-NEXT: # %bb.48:
+; RV32I-NEXT: mv s8, s4
+; RV32I-NEXT: .LBB10_49:
+; RV32I-NEXT: slti t1, t5, 0
+; RV32I-NEXT: neg s7, t1
+; RV32I-NEXT: slti t1, s11, 0
+; RV32I-NEXT: neg t1, t1
+; RV32I-NEXT: li a5, 128
+; RV32I-NEXT: bltu a0, a5, .LBB10_51
+; RV32I-NEXT: # %bb.50:
+; RV32I-NEXT: sll t2, a4, s2
+; RV32I-NEXT: and t2, t1, t2
+; RV32I-NEXT: and t2, t3, t2
+; RV32I-NEXT: mv s11, ra
+; RV32I-NEXT: bnez a0, .LBB10_52
+; RV32I-NEXT: j .LBB10_53
+; RV32I-NEXT: .LBB10_51:
+; RV32I-NEXT: sll t2, ra, a0
+; RV32I-NEXT: and t2, s7, t2
+; RV32I-NEXT: and t2, s5, t2
+; RV32I-NEXT: or t2, t2, s8
+; RV32I-NEXT: mv s11, ra
+; RV32I-NEXT: beqz a0, .LBB10_53
+; RV32I-NEXT: .LBB10_52:
+; RV32I-NEXT: mv s11, t2
+; RV32I-NEXT: .LBB10_53:
+; RV32I-NEXT: lw a5, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgez a5, .LBB10_55
+; RV32I-NEXT: # %bb.54:
+; RV32I-NEXT: srl t2, ra, s10
+; RV32I-NEXT: slli s6, s6, 1
+; RV32I-NEXT: lw a5, 28(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t3, s6, a5
+; RV32I-NEXT: or a5, t2, t3
+; RV32I-NEXT: sw a5, 32(sp) # 4-byte Folded Spill
+; RV32I-NEXT: .LBB10_55:
+; RV32I-NEXT: slti t2, t6, 0
+; RV32I-NEXT: neg s6, t2
+; RV32I-NEXT: li s10, 64
+; RV32I-NEXT: bltu a0, s10, .LBB10_57
+; RV32I-NEXT: # %bb.56:
+; RV32I-NEXT: lw a5, 48(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t2, ra, a5
+; RV32I-NEXT: and t2, s6, t2
+; RV32I-NEXT: j .LBB10_58
+; RV32I-NEXT: .LBB10_57:
+; RV32I-NEXT: sll t2, a7, a0
+; RV32I-NEXT: and t2, s7, t2
+; RV32I-NEXT: lw a5, 32(sp) # 4-byte Folded Reload
+; RV32I-NEXT: or t2, t2, a5
+; RV32I-NEXT: .LBB10_58:
+; RV32I-NEXT: lw s4, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s8, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: mv t3, a7
+; RV32I-NEXT: beqz a0, .LBB10_60
+; RV32I-NEXT: # %bb.59:
+; RV32I-NEXT: mv t3, t2
+; RV32I-NEXT: .LBB10_60:
+; RV32I-NEXT: bgez t4, .LBB10_62
+; RV32I-NEXT: # %bb.61:
+; RV32I-NEXT: sll a5, a6, s9
+; RV32I-NEXT: or a1, a1, a5
+; RV32I-NEXT: sw a1, 56(sp) # 4-byte Folded Spill
+; RV32I-NEXT: .LBB10_62:
+; RV32I-NEXT: lw t2, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: mv s1, s3
+; RV32I-NEXT: lw t4, 48(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw a1, 36(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz a1, .LBB10_65
+; RV32I-NEXT: # %bb.63:
+; RV32I-NEXT: mv a1, s8
+; RV32I-NEXT: bgeu s2, s10, .LBB10_66
+; RV32I-NEXT: .LBB10_64:
+; RV32I-NEXT: sll a6, a3, s2
+; RV32I-NEXT: and a6, t1, a6
+; RV32I-NEXT: or a6, a6, a1
+; RV32I-NEXT: mv a1, a3
+; RV32I-NEXT: bnez s2, .LBB10_67
+; RV32I-NEXT: j .LBB10_68
+; RV32I-NEXT: .LBB10_65:
+; RV32I-NEXT: li a1, 192
+; RV32I-NEXT: sub a1, a1, a0
+; RV32I-NEXT: xori a1, a1, 31
+; RV32I-NEXT: lw a5, 24(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a1, a5, a1
+; RV32I-NEXT: or a1, s1, a1
+; RV32I-NEXT: bltu s2, s10, .LBB10_64
+; RV32I-NEXT: .LBB10_66:
+; RV32I-NEXT: lw a1, 20(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a1, a4, a1
+; RV32I-NEXT: lw a5, 16(sp) # 4-byte Folded Reload
+; RV32I-NEXT: slti a6, a5, 0
+; RV32I-NEXT: neg a6, a6
+; RV32I-NEXT: and a6, a6, a1
+; RV32I-NEXT: mv a1, a3
+; RV32I-NEXT: beqz s2, .LBB10_68
+; RV32I-NEXT: .LBB10_67:
+; RV32I-NEXT: mv a1, a6
+; RV32I-NEXT: .LBB10_68:
+; RV32I-NEXT: li a5, 128
+; RV32I-NEXT: bltu a0, a5, .LBB10_73
+; RV32I-NEXT: # %bb.69:
+; RV32I-NEXT: bnez a0, .LBB10_74
+; RV32I-NEXT: .LBB10_70:
+; RV32I-NEXT: bltz t6, .LBB10_75
+; RV32I-NEXT: .LBB10_71:
+; RV32I-NEXT: sll a1, a4, t6
+; RV32I-NEXT: lw t3, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgez t5, .LBB10_76
+; RV32I-NEXT: .LBB10_72:
+; RV32I-NEXT: sll a5, t3, a0
+; RV32I-NEXT: lw a6, 44(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw t1, 40(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a6, a6, t1
+; RV32I-NEXT: or a5, a5, a6
+; RV32I-NEXT: bltu a0, s10, .LBB10_77
+; RV32I-NEXT: j .LBB10_78
+; RV32I-NEXT: .LBB10_73:
+; RV32I-NEXT: lw a1, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw a5, 4(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a1, a5, a1
+; RV32I-NEXT: or a1, t3, a1
+; RV32I-NEXT: beqz a0, .LBB10_70
+; RV32I-NEXT: .LBB10_74:
+; RV32I-NEXT: mv a7, a1
+; RV32I-NEXT: bgez t6, .LBB10_71
+; RV32I-NEXT: .LBB10_75:
+; RV32I-NEXT: sll a1, t2, t4
+; RV32I-NEXT: lw a5, 12(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a5, s4, a5
+; RV32I-NEXT: or a1, a1, a5
+; RV32I-NEXT: lw t3, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz t5, .LBB10_72
+; RV32I-NEXT: .LBB10_76:
+; RV32I-NEXT: sll a5, a3, t5
+; RV32I-NEXT: bgeu a0, s10, .LBB10_78
+; RV32I-NEXT: .LBB10_77:
+; RV32I-NEXT: lw a1, 8(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a1, a1, s8
+; RV32I-NEXT: or a1, a5, a1
+; RV32I-NEXT: .LBB10_78:
+; RV32I-NEXT: bnez a0, .LBB10_82
+; RV32I-NEXT: # %bb.79:
+; RV32I-NEXT: lw a1, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz a1, .LBB10_83
+; RV32I-NEXT: .LBB10_80:
+; RV32I-NEXT: sltiu a1, a0, 128
+; RV32I-NEXT: bltu a0, s10, .LBB10_84
+; RV32I-NEXT: .LBB10_81:
+; RV32I-NEXT: sll a5, a4, t4
+; RV32I-NEXT: and a6, s6, a5
+; RV32I-NEXT: neg a5, a1
+; RV32I-NEXT: bnez a0, .LBB10_85
+; RV32I-NEXT: j .LBB10_86
+; RV32I-NEXT: .LBB10_82:
+; RV32I-NEXT: mv t3, a1
+; RV32I-NEXT: lw a1, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgez a1, .LBB10_80
+; RV32I-NEXT: .LBB10_83:
+; RV32I-NEXT: lw a1, 28(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw a5, 24(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a1, a5, a1
+; RV32I-NEXT: or s8, s1, a1
+; RV32I-NEXT: sltiu a1, a0, 128
+; RV32I-NEXT: bgeu a0, s10, .LBB10_81
+; RV32I-NEXT: .LBB10_84:
+; RV32I-NEXT: sll a5, a3, a0
+; RV32I-NEXT: and a5, s7, a5
+; RV32I-NEXT: or a6, a5, s8
+; RV32I-NEXT: neg a5, a1
+; RV32I-NEXT: beqz a0, .LBB10_86
+; RV32I-NEXT: .LBB10_85:
+; RV32I-NEXT: mv a3, a6
+; RV32I-NEXT: .LBB10_86:
+; RV32I-NEXT: and a6, a5, t3
+; RV32I-NEXT: and a1, a5, a3
+; RV32I-NEXT: bltz t5, .LBB10_88
+; RV32I-NEXT: # %bb.87:
+; RV32I-NEXT: sll a3, a4, t5
+; RV32I-NEXT: j .LBB10_89
+; RV32I-NEXT: .LBB10_88:
+; RV32I-NEXT: sll a3, t2, a0
+; RV32I-NEXT: lw t1, 40(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl t1, s4, t1
+; RV32I-NEXT: or a3, a3, t1
+; RV32I-NEXT: .LBB10_89:
+; RV32I-NEXT: and a3, s5, a3
+; RV32I-NEXT: and a3, a5, a3
+; RV32I-NEXT: sll a0, a4, a0
+; RV32I-NEXT: and a0, s7, a0
+; RV32I-NEXT: and a0, s5, a0
+; RV32I-NEXT: and a0, a5, a0
+; RV32I-NEXT: sb a0, 0(a2)
+; RV32I-NEXT: sb a3, 4(a2)
+; RV32I-NEXT: srli a4, a0, 24
+; RV32I-NEXT: sb a4, 3(a2)
+; RV32I-NEXT: srli a4, a0, 16
+; RV32I-NEXT: sb a4, 2(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: srli a0, a3, 24
+; RV32I-NEXT: sb a0, 7(a2)
+; RV32I-NEXT: srli a0, a3, 16
+; RV32I-NEXT: sb a0, 6(a2)
+; RV32I-NEXT: srli a3, a3, 8
+; RV32I-NEXT: sb a3, 5(a2)
+; RV32I-NEXT: sb a6, 12(a2)
+; RV32I-NEXT: sb a1, 8(a2)
+; RV32I-NEXT: srli a0, a6, 24
+; RV32I-NEXT: sb a0, 15(a2)
+; RV32I-NEXT: srli a0, a6, 16
+; RV32I-NEXT: sb a0, 14(a2)
+; RV32I-NEXT: srli a0, a6, 8
+; RV32I-NEXT: sb a0, 13(a2)
+; RV32I-NEXT: sb t0, 28(a2)
+; RV32I-NEXT: srli a0, a1, 24
+; RV32I-NEXT: sb a0, 11(a2)
+; RV32I-NEXT: srli a0, a1, 16
+; RV32I-NEXT: sb a0, 10(a2)
+; RV32I-NEXT: srli a1, a1, 8
+; RV32I-NEXT: sb a1, 9(a2)
+; RV32I-NEXT: sb a7, 24(a2)
+; RV32I-NEXT: srli a0, t0, 24
+; RV32I-NEXT: sb a0, 31(a2)
+; RV32I-NEXT: srli a0, t0, 16
+; RV32I-NEXT: sb a0, 30(a2)
+; RV32I-NEXT: srli a0, t0, 8
+; RV32I-NEXT: sb a0, 29(a2)
+; RV32I-NEXT: sb s11, 16(a2)
+; RV32I-NEXT: srli a0, a7, 24
+; RV32I-NEXT: sb a0, 27(a2)
+; RV32I-NEXT: srli a0, a7, 16
+; RV32I-NEXT: sb a0, 26(a2)
+; RV32I-NEXT: srli a0, a7, 8
+; RV32I-NEXT: sb a0, 25(a2)
+; RV32I-NEXT: srli a0, s11, 24
+; RV32I-NEXT: sb a0, 19(a2)
+; RV32I-NEXT: srli a0, s11, 16
+; RV32I-NEXT: sb a0, 18(a2)
+; RV32I-NEXT: srli a0, s11, 8
+; RV32I-NEXT: sb a0, 17(a2)
+; RV32I-NEXT: sb s0, 20(a2)
+; RV32I-NEXT: srli a0, s0, 24
+; RV32I-NEXT: sb a0, 23(a2)
+; RV32I-NEXT: srli a0, s0, 16
+; RV32I-NEXT: sb a0, 22(a2)
+; RV32I-NEXT: srli s0, s0, 8
+; RV32I-NEXT: sb s0, 21(a2)
+; RV32I-NEXT: lw ra, 124(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s0, 120(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s1, 116(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s2, 112(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s3, 108(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s4, 104(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s5, 100(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s6, 96(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s7, 92(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s8, 88(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s9, 84(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s10, 80(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s11, 76(sp) # 4-byte Folded Reload
+; RV32I-NEXT: addi sp, sp, 128
+; RV32I-NEXT: ret
+ %src = load i256, ptr %src.ptr, align 1
+ %byteOff = load i256, ptr %byteOff.ptr, align 1
+ %bitOff = shl i256 %byteOff, 3
+ %res = shl i256 %src, %bitOff
+ store i256 %res, ptr %dst, align 1
+ ret void
+}
+define void @ashr_32bytes(ptr %src.ptr, ptr %byteOff.ptr, ptr %dst) nounwind {
+; RV64I-LABEL: ashr_32bytes:
+; RV64I: # %bb.0:
+; RV64I-NEXT: addi sp, sp, -32
+; RV64I-NEXT: sd s0, 24(sp) # 8-byte Folded Spill
+; RV64I-NEXT: sd s1, 16(sp) # 8-byte Folded Spill
+; RV64I-NEXT: sd s2, 8(sp) # 8-byte Folded Spill
+; RV64I-NEXT: lbu a3, 9(a0)
+; RV64I-NEXT: lbu a4, 8(a0)
+; RV64I-NEXT: lbu a5, 10(a0)
+; RV64I-NEXT: lbu a6, 11(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a7, a4, a3
+; RV64I-NEXT: lbu a3, 13(a0)
+; RV64I-NEXT: lbu a4, 12(a0)
+; RV64I-NEXT: lbu a5, 14(a0)
+; RV64I-NEXT: lbu a6, 15(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a4
+; RV64I-NEXT: slli a5, a5, 16
+; RV64I-NEXT: slli a6, a6, 24
+; RV64I-NEXT: or a4, a6, a5
+; RV64I-NEXT: or a4, a4, a3
+; RV64I-NEXT: lbu a3, 1(a0)
+; RV64I-NEXT: lbu a5, 0(a0)
+; RV64I-NEXT: lbu a6, 2(a0)
+; RV64I-NEXT: lbu t0, 3(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a5, t0, a6
+; RV64I-NEXT: or t1, a5, a3
+; RV64I-NEXT: lbu a3, 5(a0)
+; RV64I-NEXT: lbu a5, 4(a0)
+; RV64I-NEXT: lbu a6, 6(a0)
+; RV64I-NEXT: lbu t0, 7(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli t0, t0, 24
+; RV64I-NEXT: or a5, t0, a6
+; RV64I-NEXT: or t0, a5, a3
+; RV64I-NEXT: lbu a3, 25(a0)
+; RV64I-NEXT: lbu a5, 24(a0)
+; RV64I-NEXT: lbu a6, 26(a0)
+; RV64I-NEXT: lbu t2, 27(a0)
+; RV64I-NEXT: slli a3, a3, 8
+; RV64I-NEXT: or a3, a3, a5
+; RV64I-NEXT: slli a6, a6, 16
+; RV64I-NEXT: slli t2, t2, 24
+; RV64I-NEXT: or a5, t2, a6
+; RV64I-NEXT: or a3, a5, a3
+; RV64I-NEXT: lbu a5, 29(a0)
+; RV64I-NEXT: lbu a6, 28(a0)
+; RV64I-NEXT: lbu t2, 30(a0)
+; RV64I-NEXT: lbu t3, 31(a0)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, a6
+; RV64I-NEXT: slli t2, t2, 16
+; RV64I-NEXT: slli t3, t3, 24
+; RV64I-NEXT: or a6, t3, t2
+; RV64I-NEXT: or a6, a6, a5
+; RV64I-NEXT: slli a5, a6, 32
+; RV64I-NEXT: or a3, a5, a3
+; RV64I-NEXT: lbu a5, 17(a0)
+; RV64I-NEXT: lbu t2, 16(a0)
+; RV64I-NEXT: lbu t3, 18(a0)
+; RV64I-NEXT: lbu t4, 19(a0)
+; RV64I-NEXT: slli a5, a5, 8
+; RV64I-NEXT: or a5, a5, t2
+; RV64I-NEXT: slli t3, t3, 16
+; RV64I-NEXT: slli t4, t4, 24
+; RV64I-NEXT: or t2, t4, t3
+; RV64I-NEXT: or a5, t2, a5
+; RV64I-NEXT: lbu t2, 21(a0)
+; RV64I-NEXT: lbu t3, 20(a0)
+; RV64I-NEXT: lbu t4, 22(a0)
+; RV64I-NEXT: lbu a0, 23(a0)
+; RV64I-NEXT: slli t2, t2, 8
+; RV64I-NEXT: or t2, t2, t3
+; RV64I-NEXT: slli t4, t4, 16
+; RV64I-NEXT: slli a0, a0, 24
+; RV64I-NEXT: or a0, a0, t4
+; RV64I-NEXT: or a0, a0, t2
+; RV64I-NEXT: slli a0, a0, 32
+; RV64I-NEXT: or a5, a0, a5
+; RV64I-NEXT: lbu a0, 5(a1)
+; RV64I-NEXT: lbu t2, 4(a1)
+; RV64I-NEXT: lbu t3, 6(a1)
+; RV64I-NEXT: lbu t4, 7(a1)
+; RV64I-NEXT: slli a0, a0, 8
+; RV64I-NEXT: or a0, a0, t2
+; RV64I-NEXT: slli t3, t3, 16
+; RV64I-NEXT: slli t4, t4, 24
+; RV64I-NEXT: lbu t2, 1(a1)
+; RV64I-NEXT: lbu t5, 0(a1)
+; RV64I-NEXT: or t3, t4, t3
+; RV64I-NEXT: or t3, t3, a0
+; RV64I-NEXT: slli t2, t2, 8
+; RV64I-NEXT: or t2, t2, t5
+; RV64I-NEXT: lbu t4, 2(a1)
+; RV64I-NEXT: lbu t5, 3(a1)
+; RV64I-NEXT: slli a0, a4, 32
+; RV64I-NEXT: slli a1, t0, 32
+; RV64I-NEXT: slli t4, t4, 16
+; RV64I-NEXT: slli t5, t5, 24
+; RV64I-NEXT: or a4, t5, t4
+; RV64I-NEXT: or a4, a4, t2
+; RV64I-NEXT: slli a4, a4, 3
+; RV64I-NEXT: slli t3, t3, 35
+; RV64I-NEXT: or a4, t3, a4
+; RV64I-NEXT: addi t3, a4, -128
+; RV64I-NEXT: addi t4, a4, -192
+; RV64I-NEXT: slli t0, a3, 1
+; RV64I-NEXT: bltz t4, .LBB11_2
+; RV64I-NEXT: # %bb.1:
+; RV64I-NEXT: sra t6, a3, t4
+; RV64I-NEXT: j .LBB11_3
+; RV64I-NEXT: .LBB11_2:
+; RV64I-NEXT: srl t2, a5, t3
+; RV64I-NEXT: xori t5, t3, 63
+; RV64I-NEXT: sll t5, t0, t5
+; RV64I-NEXT: or t6, t2, t5
+; RV64I-NEXT: .LBB11_3:
+; RV64I-NEXT: or a0, a0, a7
+; RV64I-NEXT: or a1, a1, t1
+; RV64I-NEXT: addi a7, a4, -64
+; RV64I-NEXT: xori t2, a4, 63
+; RV64I-NEXT: bltz a7, .LBB11_5
+; RV64I-NEXT: # %bb.4:
+; RV64I-NEXT: srl s2, a0, a7
+; RV64I-NEXT: j .LBB11_6
+; RV64I-NEXT: .LBB11_5:
+; RV64I-NEXT: srl t1, a1, a4
+; RV64I-NEXT: slli t5, a0, 1
+; RV64I-NEXT: sll t5, t5, t2
+; RV64I-NEXT: or s2, t1, t5
+; RV64I-NEXT: .LBB11_6:
+; RV64I-NEXT: negw s0, a4
+; RV64I-NEXT: sll t5, a5, s0
+; RV64I-NEXT: li s1, 64
+; RV64I-NEXT: li t1, 128
+; RV64I-NEXT: sub s1, s1, a4
+; RV64I-NEXT: bltu a4, t1, .LBB11_11
+; RV64I-NEXT: # %bb.7:
+; RV64I-NEXT: bnez a4, .LBB11_12
+; RV64I-NEXT: .LBB11_8:
+; RV64I-NEXT: bltz s1, .LBB11_13
+; RV64I-NEXT: .LBB11_9:
+; RV64I-NEXT: sraiw a6, a6, 31
+; RV64I-NEXT: bltz t4, .LBB11_14
+; RV64I-NEXT: .LBB11_10:
+; RV64I-NEXT: mv t3, a6
+; RV64I-NEXT: bltu a4, t1, .LBB11_15
+; RV64I-NEXT: j .LBB11_16
+; RV64I-NEXT: .LBB11_11:
+; RV64I-NEXT: slti t6, s1, 0
+; RV64I-NEXT: neg t6, t6
+; RV64I-NEXT: and t6, t6, t5
+; RV64I-NEXT: or t6, s2, t6
+; RV64I-NEXT: beqz a4, .LBB11_8
+; RV64I-NEXT: .LBB11_12:
+; RV64I-NEXT: mv a1, t6
+; RV64I-NEXT: bgez s1, .LBB11_9
+; RV64I-NEXT: .LBB11_13:
+; RV64I-NEXT: sll t5, a3, s0
+; RV64I-NEXT: srli t6, a5, 1
+; RV64I-NEXT: sub s0, t1, a4
+; RV64I-NEXT: xori s0, s0, 63
+; RV64I-NEXT: srl t6, t6, s0
+; RV64I-NEXT: or t5, t5, t6
+; RV64I-NEXT: sraiw a6, a6, 31
+; RV64I-NEXT: bgez t4, .LBB11_10
+; RV64I-NEXT: .LBB11_14:
+; RV64I-NEXT: sra t3, a3, t3
+; RV64I-NEXT: bgeu a4, t1, .LBB11_16
+; RV64I-NEXT: .LBB11_15:
+; RV64I-NEXT: slti t3, a7, 0
+; RV64I-NEXT: srl t4, a0, a4
+; RV64I-NEXT: neg t3, t3
+; RV64I-NEXT: and t3, t3, t4
+; RV64I-NEXT: or t3, t3, t5
+; RV64I-NEXT: .LBB11_16:
+; RV64I-NEXT: bnez a4, .LBB11_19
+; RV64I-NEXT: # %bb.17:
+; RV64I-NEXT: bltz a7, .LBB11_20
+; RV64I-NEXT: .LBB11_18:
+; RV64I-NEXT: sra a5, a3, a7
+; RV64I-NEXT: bgeu a4, t1, .LBB11_21
+; RV64I-NEXT: j .LBB11_22
+; RV64I-NEXT: .LBB11_19:
+; RV64I-NEXT: mv a0, t3
+; RV64I-NEXT: bgez a7, .LBB11_18
+; RV64I-NEXT: .LBB11_20:
+; RV64I-NEXT: srl a5, a5, a4
+; RV64I-NEXT: sll t0, t0, t2
+; RV64I-NEXT: or a5, a5, t0
+; RV64I-NEXT: bltu a4, t1, .LBB11_22
+; RV64I-NEXT: .LBB11_21:
+; RV64I-NEXT: mv a5, a6
+; RV64I-NEXT: .LBB11_22:
+; RV64I-NEXT: bltz a7, .LBB11_24
+; RV64I-NEXT: # %bb.23:
+; RV64I-NEXT: mv a3, a6
+; RV64I-NEXT: bgeu a4, t1, .LBB11_25
+; RV64I-NEXT: j .LBB11_26
+; RV64I-NEXT: .LBB11_24:
+; RV64I-NEXT: sra a3, a3, a4
+; RV64I-NEXT: bltu a4, t1, .LBB11_26
+; RV64I-NEXT: .LBB11_25:
+; RV64I-NEXT: mv a3, a6
+; RV64I-NEXT: .LBB11_26:
+; RV64I-NEXT: sb a3, 24(a2)
+; RV64I-NEXT: srli a4, a3, 56
+; RV64I-NEXT: sb a4, 31(a2)
+; RV64I-NEXT: srli a4, a3, 48
+; RV64I-NEXT: sb a4, 30(a2)
+; RV64I-NEXT: srli a4, a3, 40
+; RV64I-NEXT: sb a4, 29(a2)
+; RV64I-NEXT: srli a4, a3, 32
+; RV64I-NEXT: sb a4, 28(a2)
+; RV64I-NEXT: srli a4, a3, 24
+; RV64I-NEXT: sb a4, 27(a2)
+; RV64I-NEXT: srli a4, a3, 16
+; RV64I-NEXT: sb a4, 26(a2)
+; RV64I-NEXT: srli a3, a3, 8
+; RV64I-NEXT: sb a3, 25(a2)
+; RV64I-NEXT: sb a5, 16(a2)
+; RV64I-NEXT: srli a3, a5, 56
+; RV64I-NEXT: sb a3, 23(a2)
+; RV64I-NEXT: srli a3, a5, 48
+; RV64I-NEXT: sb a3, 22(a2)
+; RV64I-NEXT: srli a3, a5, 40
+; RV64I-NEXT: sb a3, 21(a2)
+; RV64I-NEXT: srli a3, a5, 32
+; RV64I-NEXT: sb a3, 20(a2)
+; RV64I-NEXT: srli a3, a5, 24
+; RV64I-NEXT: sb a3, 19(a2)
+; RV64I-NEXT: srli a3, a5, 16
+; RV64I-NEXT: sb a3, 18(a2)
+; RV64I-NEXT: srli a5, a5, 8
+; RV64I-NEXT: sb a5, 17(a2)
+; RV64I-NEXT: sb a1, 0(a2)
+; RV64I-NEXT: srli a3, a1, 56
+; RV64I-NEXT: sb a3, 7(a2)
+; RV64I-NEXT: srli a3, a1, 48
+; RV64I-NEXT: sb a3, 6(a2)
+; RV64I-NEXT: srli a3, a1, 40
+; RV64I-NEXT: sb a3, 5(a2)
+; RV64I-NEXT: srli a3, a1, 32
+; RV64I-NEXT: sb a3, 4(a2)
+; RV64I-NEXT: srli a3, a1, 24
+; RV64I-NEXT: sb a3, 3(a2)
+; RV64I-NEXT: srli a3, a1, 16
+; RV64I-NEXT: sb a3, 2(a2)
+; RV64I-NEXT: srli a1, a1, 8
+; RV64I-NEXT: sb a1, 1(a2)
+; RV64I-NEXT: sb a0, 8(a2)
+; RV64I-NEXT: srli a1, a0, 56
+; RV64I-NEXT: sb a1, 15(a2)
+; RV64I-NEXT: srli a1, a0, 48
+; RV64I-NEXT: sb a1, 14(a2)
+; RV64I-NEXT: srli a1, a0, 40
+; RV64I-NEXT: sb a1, 13(a2)
+; RV64I-NEXT: srli a1, a0, 32
+; RV64I-NEXT: sb a1, 12(a2)
+; RV64I-NEXT: srli a1, a0, 24
+; RV64I-NEXT: sb a1, 11(a2)
+; RV64I-NEXT: srli a1, a0, 16
+; RV64I-NEXT: sb a1, 10(a2)
+; RV64I-NEXT: srli a0, a0, 8
+; RV64I-NEXT: sb a0, 9(a2)
+; RV64I-NEXT: ld s0, 24(sp) # 8-byte Folded Reload
+; RV64I-NEXT: ld s1, 16(sp) # 8-byte Folded Reload
+; RV64I-NEXT: ld s2, 8(sp) # 8-byte Folded Reload
+; RV64I-NEXT: addi sp, sp, 32
+; RV64I-NEXT: ret
+;
+; RV32I-LABEL: ashr_32bytes:
+; RV32I: # %bb.0:
+; RV32I-NEXT: addi sp, sp, -128
+; RV32I-NEXT: sw ra, 124(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s0, 120(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s1, 116(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s2, 112(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s3, 108(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s4, 104(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s5, 100(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s6, 96(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s7, 92(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s8, 88(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s9, 84(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s10, 80(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s11, 76(sp) # 4-byte Folded Spill
+; RV32I-NEXT: lbu a7, 4(a0)
+; RV32I-NEXT: lbu a5, 5(a0)
+; RV32I-NEXT: lbu t2, 6(a0)
+; RV32I-NEXT: lbu t3, 7(a0)
+; RV32I-NEXT: lbu t0, 0(a0)
+; RV32I-NEXT: lbu t4, 1(a0)
+; RV32I-NEXT: lbu s9, 2(a0)
+; RV32I-NEXT: lbu s0, 3(a0)
+; RV32I-NEXT: lbu t1, 12(a0)
+; RV32I-NEXT: lbu t6, 13(a0)
+; RV32I-NEXT: lbu s3, 14(a0)
+; RV32I-NEXT: lbu s5, 15(a0)
+; RV32I-NEXT: lbu s1, 8(a0)
+; RV32I-NEXT: lbu s2, 9(a0)
+; RV32I-NEXT: lbu s6, 10(a0)
+; RV32I-NEXT: lbu s7, 11(a0)
+; RV32I-NEXT: lbu a3, 21(a0)
+; RV32I-NEXT: lbu a4, 20(a0)
+; RV32I-NEXT: lbu a6, 22(a0)
+; RV32I-NEXT: lbu t5, 23(a0)
+; RV32I-NEXT: slli a3, a3, 8
+; RV32I-NEXT: or a3, a3, a4
+; RV32I-NEXT: slli a6, a6, 16
+; RV32I-NEXT: slli t5, t5, 24
+; RV32I-NEXT: or a4, t5, a6
+; RV32I-NEXT: or a3, a4, a3
+; RV32I-NEXT: lbu a4, 17(a0)
+; RV32I-NEXT: lbu a6, 16(a0)
+; RV32I-NEXT: lbu t5, 18(a0)
+; RV32I-NEXT: lbu s4, 19(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or s8, a4, a6
+; RV32I-NEXT: slli t5, t5, 16
+; RV32I-NEXT: slli s4, s4, 24
+; RV32I-NEXT: or a6, s4, t5
+; RV32I-NEXT: lbu a4, 29(a0)
+; RV32I-NEXT: lbu t5, 28(a0)
+; RV32I-NEXT: lbu s4, 30(a0)
+; RV32I-NEXT: lbu s10, 31(a0)
+; RV32I-NEXT: slli a4, a4, 8
+; RV32I-NEXT: or a4, a4, t5
+; RV32I-NEXT: slli t5, s4, 16
+; RV32I-NEXT: slli s4, s10, 24
+; RV32I-NEXT: or t5, s4, t5
+; RV32I-NEXT: or a4, t5, a4
+; RV32I-NEXT: lbu t5, 25(a0)
+; RV32I-NEXT: lbu s10, 24(a0)
+; RV32I-NEXT: lbu s11, 26(a0)
+; RV32I-NEXT: lbu a0, 27(a0)
+; RV32I-NEXT: slli t5, t5, 8
+; RV32I-NEXT: or t5, t5, s10
+; RV32I-NEXT: slli s11, s11, 16
+; RV32I-NEXT: slli a0, a0, 24
+; RV32I-NEXT: or a0, a0, s11
+; RV32I-NEXT: or s11, a0, t5
+; RV32I-NEXT: lbu a0, 1(a1)
+; RV32I-NEXT: lbu t5, 0(a1)
+; RV32I-NEXT: lbu s10, 2(a1)
+; RV32I-NEXT: lbu a1, 3(a1)
+; RV32I-NEXT: slli a0, a0, 8
+; RV32I-NEXT: or a0, a0, t5
+; RV32I-NEXT: slli s10, s10, 16
+; RV32I-NEXT: slli a1, a1, 24
+; RV32I-NEXT: or a1, a1, s10
+; RV32I-NEXT: or a0, a1, a0
+; RV32I-NEXT: slli a1, a0, 3
+; RV32I-NEXT: addi t5, a1, -192
+; RV32I-NEXT: addi a0, a1, -224
+; RV32I-NEXT: slli s10, a4, 1
+; RV32I-NEXT: sw s11, 72(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s10, 64(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t5, 20(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw a0, 40(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz a0, .LBB11_2
+; RV32I-NEXT: # %bb.1:
+; RV32I-NEXT: sra a0, a4, a0
+; RV32I-NEXT: j .LBB11_3
+; RV32I-NEXT: .LBB11_2:
+; RV32I-NEXT: srl a0, s11, t5
+; RV32I-NEXT: xori t5, t5, 31
+; RV32I-NEXT: sll t5, s10, t5
+; RV32I-NEXT: or a0, a0, t5
+; RV32I-NEXT: .LBB11_3:
+; RV32I-NEXT: slli s10, t6, 8
+; RV32I-NEXT: slli s11, s3, 16
+; RV32I-NEXT: slli ra, s5, 24
+; RV32I-NEXT: or t5, a6, s8
+; RV32I-NEXT: addi s3, a1, -128
+; RV32I-NEXT: slli t6, a3, 1
+; RV32I-NEXT: addi s5, a1, -160
+; RV32I-NEXT: xori s8, s3, 31
+; RV32I-NEXT: sw t6, 60(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s8, 32(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s5, .LBB11_5
+; RV32I-NEXT: # %bb.4:
+; RV32I-NEXT: srl t6, a3, s5
+; RV32I-NEXT: j .LBB11_6
+; RV32I-NEXT: .LBB11_5:
+; RV32I-NEXT: srl a6, t5, s3
+; RV32I-NEXT: sll t6, t6, s8
+; RV32I-NEXT: or t6, a6, t6
+; RV32I-NEXT: .LBB11_6:
+; RV32I-NEXT: slli s2, s2, 8
+; RV32I-NEXT: slli s6, s6, 16
+; RV32I-NEXT: slli s7, s7, 24
+; RV32I-NEXT: or a6, s10, t1
+; RV32I-NEXT: or s8, ra, s11
+; RV32I-NEXT: neg ra, a1
+; RV32I-NEXT: lw t1, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll s11, t1, ra
+; RV32I-NEXT: li s10, 160
+; RV32I-NEXT: li t1, 64
+; RV32I-NEXT: sub s10, s10, a1
+; RV32I-NEXT: sw s11, 68(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgeu s3, t1, .LBB11_8
+; RV32I-NEXT: # %bb.7:
+; RV32I-NEXT: slti a0, s10, 0
+; RV32I-NEXT: neg a0, a0
+; RV32I-NEXT: and a0, a0, s11
+; RV32I-NEXT: or a0, t6, a0
+; RV32I-NEXT: .LBB11_8:
+; RV32I-NEXT: slli t6, a5, 8
+; RV32I-NEXT: slli t2, t2, 16
+; RV32I-NEXT: slli t3, t3, 24
+; RV32I-NEXT: slli t4, t4, 8
+; RV32I-NEXT: slli s9, s9, 16
+; RV32I-NEXT: slli s0, s0, 24
+; RV32I-NEXT: or s1, s2, s1
+; RV32I-NEXT: or s2, s7, s6
+; RV32I-NEXT: or a5, s8, a6
+; RV32I-NEXT: mv s7, t5
+; RV32I-NEXT: beqz s3, .LBB11_10
+; RV32I-NEXT: # %bb.9:
+; RV32I-NEXT: mv s7, a0
+; RV32I-NEXT: .LBB11_10:
+; RV32I-NEXT: or a0, t6, a7
+; RV32I-NEXT: or a7, t3, t2
+; RV32I-NEXT: or t0, t4, t0
+; RV32I-NEXT: or t2, s0, s9
+; RV32I-NEXT: or s1, s2, s1
+; RV32I-NEXT: addi t6, a1, -64
+; RV32I-NEXT: slli s8, a5, 1
+; RV32I-NEXT: addi s0, a1, -96
+; RV32I-NEXT: xori t3, t6, 31
+; RV32I-NEXT: sw t3, 24(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s0, .LBB11_12
+; RV32I-NEXT: # %bb.11:
+; RV32I-NEXT: srl a6, a5, s0
+; RV32I-NEXT: j .LBB11_13
+; RV32I-NEXT: .LBB11_12:
+; RV32I-NEXT: srl a6, s1, t6
+; RV32I-NEXT: sll t3, s8, t3
+; RV32I-NEXT: or a6, a6, t3
+; RV32I-NEXT: .LBB11_13:
+; RV32I-NEXT: or s11, a7, a0
+; RV32I-NEXT: or t2, t2, t0
+; RV32I-NEXT: addi t4, a1, -32
+; RV32I-NEXT: xori s9, a1, 31
+; RV32I-NEXT: bltz t4, .LBB11_15
+; RV32I-NEXT: # %bb.14:
+; RV32I-NEXT: srl a7, s11, t4
+; RV32I-NEXT: j .LBB11_16
+; RV32I-NEXT: .LBB11_15:
+; RV32I-NEXT: srl a0, t2, a1
+; RV32I-NEXT: slli a7, s11, 1
+; RV32I-NEXT: sll a7, a7, s9
+; RV32I-NEXT: or a7, a0, a7
+; RV32I-NEXT: .LBB11_16:
+; RV32I-NEXT: sll t3, s1, ra
+; RV32I-NEXT: li a0, 32
+; RV32I-NEXT: sub s6, a0, a1
+; RV32I-NEXT: slti t0, s6, 0
+; RV32I-NEXT: neg t0, t0
+; RV32I-NEXT: bgeu a1, t1, .LBB11_18
+; RV32I-NEXT: # %bb.17:
+; RV32I-NEXT: and a6, t0, t3
+; RV32I-NEXT: or a6, a7, a6
+; RV32I-NEXT: .LBB11_18:
+; RV32I-NEXT: sw s10, 36(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t0, 44(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw s0, 52(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw t6, 56(sp) # 4-byte Folded Spill
+; RV32I-NEXT: mv t0, t2
+; RV32I-NEXT: beqz a1, .LBB11_20
+; RV32I-NEXT: # %bb.19:
+; RV32I-NEXT: mv t0, a6
+; RV32I-NEXT: .LBB11_20:
+; RV32I-NEXT: sll a6, t5, ra
+; RV32I-NEXT: li a7, 96
+; RV32I-NEXT: sub s10, a7, a1
+; RV32I-NEXT: slti a7, s10, 0
+; RV32I-NEXT: neg a7, a7
+; RV32I-NEXT: li s0, 128
+; RV32I-NEXT: sub s2, s0, a1
+; RV32I-NEXT: sltiu t6, s2, 64
+; RV32I-NEXT: neg t6, t6
+; RV32I-NEXT: sw t6, 8(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgeu a1, s0, .LBB11_22
+; RV32I-NEXT: # %bb.21:
+; RV32I-NEXT: mv s0, t6
+; RV32I-NEXT: and t6, a7, a6
+; RV32I-NEXT: and t6, s0, t6
+; RV32I-NEXT: or s7, t0, t6
+; RV32I-NEXT: .LBB11_22:
+; RV32I-NEXT: beqz a1, .LBB11_24
+; RV32I-NEXT: # %bb.23:
+; RV32I-NEXT: mv t2, s7
+; RV32I-NEXT: .LBB11_24:
+; RV32I-NEXT: neg t0, s2
+; RV32I-NEXT: sub t6, a0, s2
+; RV32I-NEXT: srl a0, a3, t0
+; RV32I-NEXT: sw t6, 12(sp) # 4-byte Folded Spill
+; RV32I-NEXT: sw a0, 16(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bgez t6, .LBB11_26
+; RV32I-NEXT: # %bb.25:
+; RV32I-NEXT: srl a0, t5, t0
+; RV32I-NEXT: sub t0, t1, s2
+; RV32I-NEXT: xori t0, t0, 31
+; RV32I-NEXT: lw t6, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t0, t6, t0
+; RV32I-NEXT: or a0, a0, t0
+; RV32I-NEXT: .LBB11_26:
+; RV32I-NEXT: lw s7, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltu s2, t1, .LBB11_28
+; RV32I-NEXT: # %bb.27:
+; RV32I-NEXT: lw a0, 44(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a7, a0, a6
+; RV32I-NEXT: mv a0, s7
+; RV32I-NEXT: bnez s2, .LBB11_29
+; RV32I-NEXT: j .LBB11_30
+; RV32I-NEXT: .LBB11_28:
+; RV32I-NEXT: lw t0, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a7, a7, t0
+; RV32I-NEXT: or a7, a7, a0
+; RV32I-NEXT: mv a0, s7
+; RV32I-NEXT: beqz s2, .LBB11_30
+; RV32I-NEXT: .LBB11_29:
+; RV32I-NEXT: mv a0, a7
+; RV32I-NEXT: .LBB11_30:
+; RV32I-NEXT: bltz t4, .LBB11_32
+; RV32I-NEXT: # %bb.31:
+; RV32I-NEXT: srl a7, a5, t4
+; RV32I-NEXT: j .LBB11_33
+; RV32I-NEXT: .LBB11_32:
+; RV32I-NEXT: srl a7, s1, a1
+; RV32I-NEXT: sll t0, s8, s9
+; RV32I-NEXT: or a7, a7, t0
+; RV32I-NEXT: .LBB11_33:
+; RV32I-NEXT: li s8, 128
+; RV32I-NEXT: sw s9, 48(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltz s5, .LBB11_35
+; RV32I-NEXT: # %bb.34:
+; RV32I-NEXT: sra t0, a4, s5
+; RV32I-NEXT: j .LBB11_36
+; RV32I-NEXT: .LBB11_35:
+; RV32I-NEXT: srl t0, s7, s3
+; RV32I-NEXT: lw t6, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s9, 32(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll t6, t6, s9
+; RV32I-NEXT: or t0, t0, t6
+; RV32I-NEXT: .LBB11_36:
+; RV32I-NEXT: sltiu t6, a1, 64
+; RV32I-NEXT: srai s9, s4, 31
+; RV32I-NEXT: bgeu s3, t1, .LBB11_44
+; RV32I-NEXT: # %bb.37:
+; RV32I-NEXT: neg s0, t6
+; RV32I-NEXT: bltu a1, s8, .LBB11_45
+; RV32I-NEXT: .LBB11_38:
+; RV32I-NEXT: mv s4, s1
+; RV32I-NEXT: beqz a1, .LBB11_40
+; RV32I-NEXT: .LBB11_39:
+; RV32I-NEXT: mv s4, t0
+; RV32I-NEXT: .LBB11_40:
+; RV32I-NEXT: sub a0, t1, a1
+; RV32I-NEXT: xori t0, a0, 31
+; RV32I-NEXT: bgez s6, .LBB11_42
+; RV32I-NEXT: # %bb.41:
+; RV32I-NEXT: sll a0, a5, ra
+; RV32I-NEXT: srli s1, s1, 1
+; RV32I-NEXT: srl a7, s1, t0
+; RV32I-NEXT: or t3, a0, a7
+; RV32I-NEXT: .LBB11_42:
+; RV32I-NEXT: slti a0, t4, 0
+; RV32I-NEXT: neg a7, a0
+; RV32I-NEXT: sw a7, 32(sp) # 4-byte Folded Spill
+; RV32I-NEXT: bltu a1, t1, .LBB11_46
+; RV32I-NEXT: # %bb.43:
+; RV32I-NEXT: lw a0, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a0, a5, a0
+; RV32I-NEXT: lw a7, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: slti a7, a7, 0
+; RV32I-NEXT: neg a7, a7
+; RV32I-NEXT: and a0, a7, a0
+; RV32I-NEXT: j .LBB11_47
+; RV32I-NEXT: .LBB11_44:
+; RV32I-NEXT: mv t0, s9
+; RV32I-NEXT: neg s0, t6
+; RV32I-NEXT: bgeu a1, s8, .LBB11_38
+; RV32I-NEXT: .LBB11_45:
+; RV32I-NEXT: and a7, s0, a7
+; RV32I-NEXT: or t0, a7, a0
+; RV32I-NEXT: mv s4, s1
+; RV32I-NEXT: bnez a1, .LBB11_39
+; RV32I-NEXT: j .LBB11_40
+; RV32I-NEXT: .LBB11_46:
+; RV32I-NEXT: srl a0, s11, a1
+; RV32I-NEXT: and a0, a7, a0
+; RV32I-NEXT: or a0, a0, t3
+; RV32I-NEXT: .LBB11_47:
+; RV32I-NEXT: sw t0, 28(sp) # 4-byte Folded Spill
+; RV32I-NEXT: mv t0, s11
+; RV32I-NEXT: beqz a1, .LBB11_49
+; RV32I-NEXT: # %bb.48:
+; RV32I-NEXT: mv t0, a0
+; RV32I-NEXT: .LBB11_49:
+; RV32I-NEXT: sll t6, a3, ra
+; RV32I-NEXT: srli a0, t5, 1
+; RV32I-NEXT: xori t3, s2, 31
+; RV32I-NEXT: bltz s10, .LBB11_51
+; RV32I-NEXT: # %bb.50:
+; RV32I-NEXT: mv a7, a6
+; RV32I-NEXT: j .LBB11_52
+; RV32I-NEXT: .LBB11_51:
+; RV32I-NEXT: srl a7, a0, t3
+; RV32I-NEXT: or a7, t6, a7
+; RV32I-NEXT: .LBB11_52:
+; RV32I-NEXT: sll ra, a4, ra
+; RV32I-NEXT: srli s1, s7, 1
+; RV32I-NEXT: lw s7, 36(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz s7, .LBB11_55
+; RV32I-NEXT: # %bb.53:
+; RV32I-NEXT: lw s7, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s8, 40(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgez s8, .LBB11_56
+; RV32I-NEXT: .LBB11_54:
+; RV32I-NEXT: lw s8, 20(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sra s8, a4, s8
+; RV32I-NEXT: bltu s3, t1, .LBB11_57
+; RV32I-NEXT: j .LBB11_58
+; RV32I-NEXT: .LBB11_55:
+; RV32I-NEXT: li s7, 192
+; RV32I-NEXT: sub s7, s7, a1
+; RV32I-NEXT: xori s7, s7, 31
+; RV32I-NEXT: srl s7, s1, s7
+; RV32I-NEXT: or s7, ra, s7
+; RV32I-NEXT: lw s8, 40(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz s8, .LBB11_54
+; RV32I-NEXT: .LBB11_56:
+; RV32I-NEXT: mv s8, s9
+; RV32I-NEXT: bgeu s3, t1, .LBB11_58
+; RV32I-NEXT: .LBB11_57:
+; RV32I-NEXT: slti s8, s5, 0
+; RV32I-NEXT: mv t1, a4
+; RV32I-NEXT: mv a4, t2
+; RV32I-NEXT: mv t2, s6
+; RV32I-NEXT: mv s6, s1
+; RV32I-NEXT: mv s1, ra
+; RV32I-NEXT: srl ra, a3, s3
+; RV32I-NEXT: neg s8, s8
+; RV32I-NEXT: and s8, s8, ra
+; RV32I-NEXT: mv ra, s1
+; RV32I-NEXT: mv s1, s6
+; RV32I-NEXT: mv s6, t2
+; RV32I-NEXT: mv t2, a4
+; RV32I-NEXT: mv a4, t1
+; RV32I-NEXT: li t1, 64
+; RV32I-NEXT: or s8, s8, s7
+; RV32I-NEXT: .LBB11_58:
+; RV32I-NEXT: mv s7, a3
+; RV32I-NEXT: bnez s3, .LBB11_65
+; RV32I-NEXT: # %bb.59:
+; RV32I-NEXT: li s8, 128
+; RV32I-NEXT: bltu a1, s8, .LBB11_66
+; RV32I-NEXT: .LBB11_60:
+; RV32I-NEXT: lw a7, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bnez a1, .LBB11_67
+; RV32I-NEXT: .LBB11_61:
+; RV32I-NEXT: bgez s6, .LBB11_63
+; RV32I-NEXT: .LBB11_62:
+; RV32I-NEXT: lw a6, 28(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a0, a0, a6
+; RV32I-NEXT: or a6, t6, a0
+; RV32I-NEXT: .LBB11_63:
+; RV32I-NEXT: lw t0, 72(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw t6, 68(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltz s10, .LBB11_68
+; RV32I-NEXT: # %bb.64:
+; RV32I-NEXT: mv a0, t6
+; RV32I-NEXT: lw t3, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltu s2, t1, .LBB11_69
+; RV32I-NEXT: j .LBB11_70
+; RV32I-NEXT: .LBB11_65:
+; RV32I-NEXT: mv s7, s8
+; RV32I-NEXT: li s8, 128
+; RV32I-NEXT: bgeu a1, s8, .LBB11_60
+; RV32I-NEXT: .LBB11_66:
+; RV32I-NEXT: lw s7, 8(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a7, s7, a7
+; RV32I-NEXT: or s7, t0, a7
+; RV32I-NEXT: lw a7, 60(sp) # 4-byte Folded Reload
+; RV32I-NEXT: beqz a1, .LBB11_61
+; RV32I-NEXT: .LBB11_67:
+; RV32I-NEXT: mv s11, s7
+; RV32I-NEXT: bltz s6, .LBB11_62
+; RV32I-NEXT: j .LBB11_63
+; RV32I-NEXT: .LBB11_68:
+; RV32I-NEXT: srl a0, s1, t3
+; RV32I-NEXT: or a0, ra, a0
+; RV32I-NEXT: lw t3, 64(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgeu s2, t1, .LBB11_70
+; RV32I-NEXT: .LBB11_69:
+; RV32I-NEXT: lw a6, 12(sp) # 4-byte Folded Reload
+; RV32I-NEXT: slti a6, a6, 0
+; RV32I-NEXT: neg a6, a6
+; RV32I-NEXT: lw s7, 16(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a6, a6, s7
+; RV32I-NEXT: or a6, a0, a6
+; RV32I-NEXT: .LBB11_70:
+; RV32I-NEXT: mv a0, a4
+; RV32I-NEXT: bnez s2, .LBB11_73
+; RV32I-NEXT: # %bb.71:
+; RV32I-NEXT: bltz s5, .LBB11_74
+; RV32I-NEXT: .LBB11_72:
+; RV32I-NEXT: mv a6, s9
+; RV32I-NEXT: lw s2, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bgeu s3, t1, .LBB11_75
+; RV32I-NEXT: j .LBB11_76
+; RV32I-NEXT: .LBB11_73:
+; RV32I-NEXT: mv a0, a6
+; RV32I-NEXT: bgez s5, .LBB11_72
+; RV32I-NEXT: .LBB11_74:
+; RV32I-NEXT: sra a6, a4, s3
+; RV32I-NEXT: lw s2, 52(sp) # 4-byte Folded Reload
+; RV32I-NEXT: bltu s3, t1, .LBB11_76
+; RV32I-NEXT: .LBB11_75:
+; RV32I-NEXT: mv a6, s9
+; RV32I-NEXT: .LBB11_76:
+; RV32I-NEXT: bltu a1, s8, .LBB11_81
+; RV32I-NEXT: # %bb.77:
+; RV32I-NEXT: bnez a1, .LBB11_82
+; RV32I-NEXT: .LBB11_78:
+; RV32I-NEXT: bltz s2, .LBB11_83
+; RV32I-NEXT: .LBB11_79:
+; RV32I-NEXT: sra a0, a4, s2
+; RV32I-NEXT: bgez t4, .LBB11_84
+; RV32I-NEXT: .LBB11_80:
+; RV32I-NEXT: srl a6, t5, a1
+; RV32I-NEXT: lw s0, 48(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a7, a7, s0
+; RV32I-NEXT: or a6, a6, a7
+; RV32I-NEXT: bltu a1, t1, .LBB11_85
+; RV32I-NEXT: j .LBB11_86
+; RV32I-NEXT: .LBB11_81:
+; RV32I-NEXT: srl a6, a5, a1
+; RV32I-NEXT: lw s3, 32(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a6, s3, a6
+; RV32I-NEXT: and a6, s0, a6
+; RV32I-NEXT: or a6, a6, a0
+; RV32I-NEXT: beqz a1, .LBB11_78
+; RV32I-NEXT: .LBB11_82:
+; RV32I-NEXT: mv a5, a6
+; RV32I-NEXT: bgez s2, .LBB11_79
+; RV32I-NEXT: .LBB11_83:
+; RV32I-NEXT: lw a0, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a0, t0, a0
+; RV32I-NEXT: lw a6, 24(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a6, t3, a6
+; RV32I-NEXT: or a0, a0, a6
+; RV32I-NEXT: bltz t4, .LBB11_80
+; RV32I-NEXT: .LBB11_84:
+; RV32I-NEXT: srl a6, a3, t4
+; RV32I-NEXT: bgeu a1, t1, .LBB11_86
+; RV32I-NEXT: .LBB11_85:
+; RV32I-NEXT: lw a0, 44(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a0, a0, t6
+; RV32I-NEXT: or a0, a6, a0
+; RV32I-NEXT: .LBB11_86:
+; RV32I-NEXT: bnez a1, .LBB11_91
+; RV32I-NEXT: # %bb.87:
+; RV32I-NEXT: bgeu a1, s8, .LBB11_92
+; RV32I-NEXT: .LBB11_88:
+; RV32I-NEXT: bltz s6, .LBB11_93
+; RV32I-NEXT: .LBB11_89:
+; RV32I-NEXT: bltz s2, .LBB11_94
+; RV32I-NEXT: .LBB11_90:
+; RV32I-NEXT: mv a0, s9
+; RV32I-NEXT: bltu a1, t1, .LBB11_95
+; RV32I-NEXT: j .LBB11_96
+; RV32I-NEXT: .LBB11_91:
+; RV32I-NEXT: mv t5, a0
+; RV32I-NEXT: bltu a1, s8, .LBB11_88
+; RV32I-NEXT: .LBB11_92:
+; RV32I-NEXT: mv t5, s9
+; RV32I-NEXT: bgez s6, .LBB11_89
+; RV32I-NEXT: .LBB11_93:
+; RV32I-NEXT: lw a0, 28(sp) # 4-byte Folded Reload
+; RV32I-NEXT: srl a0, s1, a0
+; RV32I-NEXT: or t6, ra, a0
+; RV32I-NEXT: bgez s2, .LBB11_90
+; RV32I-NEXT: .LBB11_94:
+; RV32I-NEXT: lw a0, 56(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sra a0, a4, a0
+; RV32I-NEXT: bgeu a1, t1, .LBB11_96
+; RV32I-NEXT: .LBB11_95:
+; RV32I-NEXT: srl a0, a3, a1
+; RV32I-NEXT: lw a6, 32(sp) # 4-byte Folded Reload
+; RV32I-NEXT: and a0, a6, a0
+; RV32I-NEXT: or a0, a0, t6
+; RV32I-NEXT: .LBB11_96:
+; RV32I-NEXT: bnez a1, .LBB11_100
+; RV32I-NEXT: # %bb.97:
+; RV32I-NEXT: bgeu a1, s8, .LBB11_101
+; RV32I-NEXT: .LBB11_98:
+; RV32I-NEXT: bltz t4, .LBB11_102
+; RV32I-NEXT: .LBB11_99:
+; RV32I-NEXT: sra a0, a4, t4
+; RV32I-NEXT: bgeu a1, t1, .LBB11_103
+; RV32I-NEXT: j .LBB11_104
+; RV32I-NEXT: .LBB11_100:
+; RV32I-NEXT: mv a3, a0
+; RV32I-NEXT: bltu a1, s8, .LBB11_98
+; RV32I-NEXT: .LBB11_101:
+; RV32I-NEXT: mv a3, s9
+; RV32I-NEXT: bgez t4, .LBB11_99
+; RV32I-NEXT: .LBB11_102:
+; RV32I-NEXT: srl a0, t0, a1
+; RV32I-NEXT: lw a6, 48(sp) # 4-byte Folded Reload
+; RV32I-NEXT: sll a6, t3, a6
+; RV32I-NEXT: or a0, a0, a6
+; RV32I-NEXT: bltu a1, t1, .LBB11_104
+; RV32I-NEXT: .LBB11_103:
+; RV32I-NEXT: mv a0, s9
+; RV32I-NEXT: .LBB11_104:
+; RV32I-NEXT: bgeu a1, s8, .LBB11_107
+; RV32I-NEXT: # %bb.105:
+; RV32I-NEXT: bltz t4, .LBB11_108
+; RV32I-NEXT: .LBB11_106:
+; RV32I-NEXT: mv a4, s9
+; RV32I-NEXT: bgeu a1, t1, .LBB11_109
+; RV32I-NEXT: j .LBB11_110
+; RV32I-NEXT: .LBB11_107:
+; RV32I-NEXT: mv a0, s9
+; RV32I-NEXT: bgez t4, .LBB11_106
+; RV32I-NEXT: .LBB11_108:
+; RV32I-NEXT: sra a4, a4, a1
+; RV32I-NEXT: bltu a1, t1, .LBB11_110
+; RV32I-NEXT: .LBB11_109:
+; RV32I-NEXT: mv a4, s9
+; RV32I-NEXT: .LBB11_110:
+; RV32I-NEXT: bltu a1, s8, .LBB11_112
+; RV32I-NEXT: # %bb.111:
+; RV32I-NEXT: mv a4, s9
+; RV32I-NEXT: .LBB11_112:
+; RV32I-NEXT: sb a4, 28(a2)
+; RV32I-NEXT: srli a1, a4, 24
+; RV32I-NEXT: sb a1, 31(a2)
+; RV32I-NEXT: srli a1, a4, 16
+; RV32I-NEXT: sb a1, 30(a2)
+; RV32I-NEXT: srli a4, a4, 8
+; RV32I-NEXT: sb a4, 29(a2)
+; RV32I-NEXT: sb a0, 24(a2)
+; RV32I-NEXT: srli a1, a0, 24
+; RV32I-NEXT: sb a1, 27(a2)
+; RV32I-NEXT: srli a1, a0, 16
+; RV32I-NEXT: sb a1, 26(a2)
+; RV32I-NEXT: srli a0, a0, 8
+; RV32I-NEXT: sb a0, 25(a2)
+; RV32I-NEXT: sb t5, 16(a2)
+; RV32I-NEXT: srli a0, t5, 24
+; RV32I-NEXT: sb a0, 19(a2)
+; RV32I-NEXT: srli a0, t5, 16
+; RV32I-NEXT: sb a0, 18(a2)
+; RV32I-NEXT: srli a0, t5, 8
+; RV32I-NEXT: sb a0, 17(a2)
+; RV32I-NEXT: sb a3, 20(a2)
+; RV32I-NEXT: srli a0, a3, 24
+; RV32I-NEXT: sb a0, 23(a2)
+; RV32I-NEXT: srli a0, a3, 16
+; RV32I-NEXT: sb a0, 22(a2)
+; RV32I-NEXT: srli a3, a3, 8
+; RV32I-NEXT: sb a3, 21(a2)
+; RV32I-NEXT: sb t2, 0(a2)
+; RV32I-NEXT: sb a5, 12(a2)
+; RV32I-NEXT: srli a0, t2, 24
+; RV32I-NEXT: sb a0, 3(a2)
+; RV32I-NEXT: srli a0, t2, 16
+; RV32I-NEXT: sb a0, 2(a2)
+; RV32I-NEXT: srli a0, t2, 8
+; RV32I-NEXT: sb a0, 1(a2)
+; RV32I-NEXT: sb s11, 4(a2)
+; RV32I-NEXT: sb s4, 8(a2)
+; RV32I-NEXT: srli a0, a5, 24
+; RV32I-NEXT: sb a0, 15(a2)
+; RV32I-NEXT: srli a0, a5, 16
+; RV32I-NEXT: sb a0, 14(a2)
+; RV32I-NEXT: srli a5, a5, 8
+; RV32I-NEXT: sb a5, 13(a2)
+; RV32I-NEXT: srli a0, s11, 24
+; RV32I-NEXT: sb a0, 7(a2)
+; RV32I-NEXT: srli a0, s11, 16
+; RV32I-NEXT: sb a0, 6(a2)
+; RV32I-NEXT: srli a0, s11, 8
+; RV32I-NEXT: sb a0, 5(a2)
+; RV32I-NEXT: srli a0, s4, 24
+; RV32I-NEXT: sb a0, 11(a2)
+; RV32I-NEXT: srli a0, s4, 16
+; RV32I-NEXT: sb a0, 10(a2)
+; RV32I-NEXT: srli a0, s4, 8
+; RV32I-NEXT: sb a0, 9(a2)
+; RV32I-NEXT: lw ra, 124(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s0, 120(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s1, 116(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s2, 112(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s3, 108(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s4, 104(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s5, 100(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s6, 96(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s7, 92(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s8, 88(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s9, 84(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s10, 80(sp) # 4-byte Folded Reload
+; RV32I-NEXT: lw s11, 76(sp) # 4-byte Folded Reload
+; RV32I-NEXT: addi sp, sp, 128
+; RV32I-NEXT: ret
+ %src = load i256, ptr %src.ptr, align 1
+ %byteOff = load i256, ptr %byteOff.ptr, align 1
+ %bitOff = shl i256 %byteOff, 3
+ %res = ashr i256 %src, %bitOff
+ store i256 %res, ptr %dst, align 1
+ ret void
+}
+;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
+; ALL: {{.*}}