;
; RV64I-LABEL: rotl_64_mask:
; RV64I: # %bb.0:
-; RV64I-NEXT: neg a2, a1
+; RV64I-NEXT: negw a2, a1
; RV64I-NEXT: sll a1, a0, a1
; RV64I-NEXT: srl a0, a0, a2
; RV64I-NEXT: or a0, a1, a0
; RV64I-LABEL: rotl_64_mask_and_127_and_63:
; RV64I: # %bb.0:
; RV64I-NEXT: sll a2, a0, a1
-; RV64I-NEXT: neg a1, a1
+; RV64I-NEXT: negw a1, a1
; RV64I-NEXT: srl a0, a0, a1
; RV64I-NEXT: or a0, a2, a0
; RV64I-NEXT: ret
;
; RV64I-LABEL: rotr_64_mask:
; RV64I: # %bb.0:
-; RV64I-NEXT: neg a2, a1
+; RV64I-NEXT: negw a2, a1
; RV64I-NEXT: srl a1, a0, a1
; RV64I-NEXT: sll a0, a0, a2
; RV64I-NEXT: or a0, a1, a0
; RV64I-LABEL: rotr_64_mask_and_127_and_63:
; RV64I: # %bb.0:
; RV64I-NEXT: srl a2, a0, a1
-; RV64I-NEXT: neg a1, a1
+; RV64I-NEXT: negw a1, a1
; RV64I-NEXT: sll a0, a0, a1
; RV64I-NEXT: or a0, a2, a0
; RV64I-NEXT: ret
; RV64I-LABEL: rotl_64_mask_shared:
; RV64I: # %bb.0:
; RV64I-NEXT: sll a3, a0, a2
-; RV64I-NEXT: neg a4, a2
+; RV64I-NEXT: negw a4, a2
; RV64I-NEXT: srl a0, a0, a4
; RV64I-NEXT: or a0, a3, a0
; RV64I-NEXT: sll a1, a1, a2
; RV64I-LABEL: rotr_64_mask_shared:
; RV64I: # %bb.0:
; RV64I-NEXT: srl a3, a0, a2
-; RV64I-NEXT: neg a4, a2
+; RV64I-NEXT: negw a4, a2
; RV64I-NEXT: sll a0, a0, a4
; RV64I-NEXT: or a0, a3, a0
; RV64I-NEXT: sll a1, a1, a2
; RV64I-LABEL: rotl_64_mask_multiple:
; RV64I: # %bb.0:
; RV64I-NEXT: sll a3, a0, a2
-; RV64I-NEXT: neg a4, a2
+; RV64I-NEXT: negw a4, a2
; RV64I-NEXT: srl a0, a0, a4
; RV64I-NEXT: or a0, a3, a0
; RV64I-NEXT: sll a2, a1, a2
; RV64I-LABEL: rotr_64_mask_multiple:
; RV64I: # %bb.0:
; RV64I-NEXT: srl a3, a0, a2
-; RV64I-NEXT: neg a4, a2
+; RV64I-NEXT: negw a4, a2
; RV64I-NEXT: sll a0, a0, a4
; RV64I-NEXT: or a0, a3, a0
; RV64I-NEXT: srl a2, a1, a2
;
; RV64I-LABEL: sll_redundant_mask_zeros_i64:
; RV64I: # %bb.0:
-; RV64I-NEXT: slli a1, a1, 2
+; RV64I-NEXT: slliw a1, a1, 2
; RV64I-NEXT: sll a0, a0, a1
; RV64I-NEXT: ret
%1 = shl i64 %b, 2
;
; RV64I-LABEL: srl_redundant_mask_zeros_i64:
; RV64I: # %bb.0:
-; RV64I-NEXT: slli a1, a1, 3
+; RV64I-NEXT: slliw a1, a1, 3
; RV64I-NEXT: srl a0, a0, a1
; RV64I-NEXT: ret
%1 = shl i64 %b, 3
;
; RV64I-LABEL: sra_redundant_mask_zeros_i64:
; RV64I: # %bb.0:
-; RV64I-NEXT: slli a1, a1, 4
+; RV64I-NEXT: slliw a1, a1, 4
; RV64I-NEXT: sra a0, a0, a1
; RV64I-NEXT: ret
%1 = shl i64 %b, 4