; SI-LABEL: name: test_load_private_v3s16_align8
; SI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
- ; SI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 2, align 8, addrspace 5)
- ; SI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD]](s32)
- ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
+ ; SI: [[LOAD:%[0-9]+]]:_(<2 x s16>) = G_LOAD [[COPY]](p5) :: (load 4, align 8, addrspace 5)
+ ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; SI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
- ; SI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
- ; SI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
- ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
- ; SI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
- ; SI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, align 4, addrspace 5)
- ; SI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; SI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; SI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; SI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; SI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, align 4, addrspace 5)
+ ; SI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; SI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; SI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; SI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; SI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; SI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[LOAD]](<2 x s16>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; SI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; SI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC]](s16), 32
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; SI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; SI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; CI-LABEL: name: test_load_private_v3s16_align8
; CI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
- ; CI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 2, align 8, addrspace 5)
- ; CI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD]](s32)
- ; CI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
+ ; CI: [[LOAD:%[0-9]+]]:_(<2 x s16>) = G_LOAD [[COPY]](p5) :: (load 4, align 8, addrspace 5)
+ ; CI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; CI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
- ; CI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
- ; CI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
- ; CI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
- ; CI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
- ; CI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, align 4, addrspace 5)
- ; CI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; CI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; CI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; CI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; CI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, align 4, addrspace 5)
+ ; CI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; CI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; CI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; CI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; CI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; CI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[LOAD]](<2 x s16>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; CI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; CI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC]](s16), 32
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; CI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; CI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; VI-LABEL: name: test_load_private_v3s16_align8
; VI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
- ; VI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 2, align 8, addrspace 5)
- ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD]](s32)
- ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
+ ; VI: [[LOAD:%[0-9]+]]:_(<2 x s16>) = G_LOAD [[COPY]](p5) :: (load 4, align 8, addrspace 5)
+ ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; VI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
- ; VI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
- ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
- ; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
- ; VI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
- ; VI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, align 4, addrspace 5)
- ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; VI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; VI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; VI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; VI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, align 4, addrspace 5)
+ ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; VI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; VI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; VI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; VI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; VI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[LOAD]](<2 x s16>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; VI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; VI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC]](s16), 32
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; VI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; VI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; GFX9-LABEL: name: test_load_private_v3s16_align8
; GFX9: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
- ; GFX9: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 2, align 8, addrspace 5)
- ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD]](s32)
- ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 2
+ ; GFX9: [[LOAD:%[0-9]+]]:_(<2 x s16>) = G_LOAD [[COPY]](p5) :: (load 4, align 8, addrspace 5)
+ ; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
- ; GFX9: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
- ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
- ; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
- ; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
- ; GFX9: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, align 4, addrspace 5)
- ; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; GFX9: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; GFX9: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; GFX9: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, align 4, addrspace 5)
+ ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; GFX9: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; GFX9: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; GFX9: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; GFX9: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; GFX9: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[LOAD]](<2 x s16>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; GFX9: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; GFX9: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC]](s16), 32
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; GFX9: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; GFX9: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
%0:_(p5) = COPY $vgpr0
%1:_(<3 x s16>) = G_LOAD %0 :: (load 6, align 8, addrspace 5)
%2:_(<4 x s16>) = G_IMPLICIT_DEF
; SI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; SI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
; SI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; SI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; SI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, addrspace 5)
; SI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; SI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; SI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; SI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; SI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; SI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; SI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; SI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; SI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; SI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; SI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC2]](s16), 32
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; SI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; SI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; CI-LABEL: name: test_load_private_v3s16_align2
; CI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; CI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 2, addrspace 5)
; CI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; CI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
; CI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
; CI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; CI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; CI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, addrspace 5)
; CI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; CI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; CI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; CI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; CI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; CI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; CI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; CI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; CI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; CI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; CI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC2]](s16), 32
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; CI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; CI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; VI-LABEL: name: test_load_private_v3s16_align2
; VI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; VI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 2, addrspace 5)
; VI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; VI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; VI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; VI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, addrspace 5)
; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; VI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; VI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; VI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; VI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; VI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; VI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; VI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; VI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; VI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; VI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC2]](s16), 32
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; VI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; VI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; GFX9-LABEL: name: test_load_private_v3s16_align2
; GFX9: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; GFX9: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 2, addrspace 5)
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; GFX9: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 2, addrspace 5)
; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD1]](s32)
+ ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; GFX9: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 2, addrspace 5)
; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[LOAD2]](s32)
- ; GFX9: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC]](s16), [[TRUNC1]](s16)
- ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[TRUNC2]](s16), [[DEF]](s16)
- ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; GFX9: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; GFX9: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; GFX9: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; GFX9: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; GFX9: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; GFX9: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; GFX9: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; GFX9: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[TRUNC2]](s16), 32
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; GFX9: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; GFX9: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
%0:_(p5) = COPY $vgpr0
%1:_(<3 x s16>) = G_LOAD %0 :: (load 6, align 2, addrspace 5)
%2:_(<4 x s16>) = G_IMPLICIT_DEF
; SI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[COPY2]](s32)
; SI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[SHL1]](s32)
; SI: [[OR1:%[0-9]+]]:_(s16) = G_OR [[AND2]], [[TRUNC3]]
+ ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
; SI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; SI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; SI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 1, addrspace 5)
; SI: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[COPY4]](s32)
; SI: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[SHL2]](s32)
; SI: [[OR2:%[0-9]+]]:_(s16) = G_OR [[AND4]], [[TRUNC5]]
- ; SI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
- ; SI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR2]](s16), [[DEF]](s16)
- ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; SI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; SI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; SI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; SI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; SI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; SI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; SI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; SI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[OR2]](s16), 32
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; SI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; SI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; CI-LABEL: name: test_load_private_v3s16_align1
; CI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; CI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 1, addrspace 5)
; CI: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND3]], [[COPY2]](s32)
; CI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[SHL1]](s32)
; CI: [[OR1:%[0-9]+]]:_(s16) = G_OR [[AND2]], [[TRUNC3]]
+ ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
; CI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; CI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; CI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 1, addrspace 5)
; CI: [[SHL2:%[0-9]+]]:_(s32) = G_SHL [[AND5]], [[COPY4]](s32)
; CI: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[SHL2]](s32)
; CI: [[OR2:%[0-9]+]]:_(s16) = G_OR [[AND4]], [[TRUNC5]]
- ; CI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
- ; CI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR2]](s16), [[DEF]](s16)
- ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; CI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; CI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; CI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; CI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; CI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; CI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; CI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; CI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[OR2]](s16), 32
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; CI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; CI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; VI-LABEL: name: test_load_private_v3s16_align1
; VI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; VI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 1, addrspace 5)
; VI: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C1]]
; VI: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[AND3]], [[C2]](s16)
; VI: [[OR1:%[0-9]+]]:_(s16) = G_OR [[AND2]], [[SHL1]]
+ ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
; VI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; VI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; VI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 1, addrspace 5)
; VI: [[AND5:%[0-9]+]]:_(s16) = G_AND [[TRUNC5]], [[C1]]
; VI: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[AND5]], [[C2]](s16)
; VI: [[OR2:%[0-9]+]]:_(s16) = G_OR [[AND4]], [[SHL2]]
- ; VI: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
- ; VI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR2]](s16), [[DEF]](s16)
- ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; VI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; VI: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; VI: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; VI: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; VI: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; VI: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; VI: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; VI: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[OR2]](s16), 32
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; VI: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; VI: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
; GFX9-LABEL: name: test_load_private_v3s16_align1
; GFX9: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; GFX9: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 1, addrspace 5)
; GFX9: [[AND3:%[0-9]+]]:_(s16) = G_AND [[TRUNC3]], [[C1]]
; GFX9: [[SHL1:%[0-9]+]]:_(s16) = G_SHL [[AND3]], [[C2]](s16)
; GFX9: [[OR1:%[0-9]+]]:_(s16) = G_OR [[AND2]], [[SHL1]]
+ ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
; GFX9: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; GFX9: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 1, addrspace 5)
; GFX9: [[AND5:%[0-9]+]]:_(s16) = G_AND [[TRUNC5]], [[C1]]
; GFX9: [[SHL2:%[0-9]+]]:_(s16) = G_SHL [[AND5]], [[C2]](s16)
; GFX9: [[OR2:%[0-9]+]]:_(s16) = G_OR [[AND4]], [[SHL2]]
- ; GFX9: [[DEF:%[0-9]+]]:_(s16) = G_IMPLICIT_DEF
- ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR]](s16), [[OR1]](s16)
- ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s16>) = G_BUILD_VECTOR [[OR2]](s16), [[DEF]](s16)
- ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s16>), [[BUILD_VECTOR1]](<2 x s16>)
- ; GFX9: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[CONCAT_VECTORS]](<4 x s16>), 0
+ ; GFX9: [[DEF:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<4 x s16>), 0
; GFX9: [[DEF1:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
; GFX9: [[INSERT:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF1]], [[EXTRACT]](<3 x s16>), 0
- ; GFX9: $vgpr0_vgpr1 = COPY [[INSERT]](<4 x s16>)
+ ; GFX9: [[INSERT1:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT]], [[BUILD_VECTOR]](<2 x s16>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT1]](<4 x s16>), 0
+ ; GFX9: [[DEF2:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[INSERT2:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF2]], [[EXTRACT1]](<3 x s16>), 0
+ ; GFX9: [[INSERT3:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[INSERT2]], [[OR2]](s16), 32
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[INSERT3]](<4 x s16>), 0
+ ; GFX9: [[DEF3:%[0-9]+]]:_(<4 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[INSERT4:%[0-9]+]]:_(<4 x s16>) = G_INSERT [[DEF3]], [[EXTRACT2]](<3 x s16>), 0
+ ; GFX9: $vgpr0_vgpr1 = COPY [[INSERT4]](<4 x s16>)
%0:_(p5) = COPY $vgpr0
%1:_(<3 x s16>) = G_LOAD %0 :: (load 6, align 1, addrspace 5)
%2:_(<4 x s16>) = G_IMPLICIT_DEF
; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; SI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; SI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; SI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; SI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, align 8, addrspace 5)
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; SI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; SI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; SI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
+ ; SI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; SI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; SI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; SI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; SI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; SI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; SI: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32)
- ; SI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; SI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
+ ; SI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; SI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; SI: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; SI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD5]], [[C]](s32)
+ ; SI: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; SI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; SI: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD6]](s32), [[LOAD7]](s32)
- ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<8 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>), [[BUILD_VECTOR2]](<2 x s32>), [[BUILD_VECTOR3]](<2 x s32>)
- ; SI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[CONCAT_VECTORS]](<8 x s32>)
+ ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
+ ; SI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<8 x s32>)
; CI-LABEL: name: test_load_private_v8s32_align32
; CI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; CI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, align 32, addrspace 5)
; CI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; CI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; CI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; CI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; CI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; CI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, align 8, addrspace 5)
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; CI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; CI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; CI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; CI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
+ ; CI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; CI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; CI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; CI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; CI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; CI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; CI: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32)
- ; CI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; CI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
+ ; CI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; CI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; CI: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; CI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD5]], [[C]](s32)
+ ; CI: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; CI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; CI: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD6]](s32), [[LOAD7]](s32)
- ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<8 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>), [[BUILD_VECTOR2]](<2 x s32>), [[BUILD_VECTOR3]](<2 x s32>)
- ; CI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[CONCAT_VECTORS]](<8 x s32>)
+ ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
+ ; CI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<8 x s32>)
; VI-LABEL: name: test_load_private_v8s32_align32
; VI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; VI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, align 32, addrspace 5)
; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; VI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; VI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; VI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; VI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, align 8, addrspace 5)
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; VI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; VI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; VI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
+ ; VI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; VI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; VI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; VI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; VI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; VI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; VI: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32)
- ; VI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; VI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
+ ; VI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; VI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; VI: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; VI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD5]], [[C]](s32)
+ ; VI: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; VI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; VI: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD6]](s32), [[LOAD7]](s32)
- ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<8 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>), [[BUILD_VECTOR2]](<2 x s32>), [[BUILD_VECTOR3]](<2 x s32>)
- ; VI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[CONCAT_VECTORS]](<8 x s32>)
+ ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
+ ; VI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<8 x s32>)
; GFX9-LABEL: name: test_load_private_v8s32_align32
; GFX9: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; GFX9: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, align 32, addrspace 5)
; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; GFX9: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; GFX9: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, align 8, addrspace 5)
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; GFX9: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
+ ; GFX9: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; GFX9: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; GFX9: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; GFX9: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32)
- ; GFX9: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
- ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
+ ; GFX9: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; GFX9: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD5]], [[C]](s32)
+ ; GFX9: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; GFX9: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR3:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD6]](s32), [[LOAD7]](s32)
- ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<8 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>), [[BUILD_VECTOR2]](<2 x s32>), [[BUILD_VECTOR3]](<2 x s32>)
- ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[CONCAT_VECTORS]](<8 x s32>)
+ ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<8 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
+ ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY [[BUILD_VECTOR]](<8 x s32>)
%0:_(p5) = COPY $vgpr0
%1:_(<8 x s32>) = G_LOAD %0 :: (load 16, align 32, addrspace 5)
$vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7 = COPY %1
; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
; SI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; SI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
; SI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; SI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; SI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; SI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; SI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; SI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; SI: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; SI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s32)
+ ; SI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; SI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; SI: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; SI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s32)
+ ; SI: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; SI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; SI: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
- ; SI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
- ; SI: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
+ ; SI: [[C7:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
+ ; SI: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C7]](s32)
; SI: [[LOAD8:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD7]](p5) :: (load 4, align 32, addrspace 5)
- ; SI: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C]](s32)
+ ; SI: [[C8:%[0-9]+]]:_(s32) = G_CONSTANT i32 36
+ ; SI: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C8]](s32)
; SI: [[LOAD9:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD8]](p5) :: (load 4, addrspace 5)
- ; SI: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s32)
+ ; SI: [[C9:%[0-9]+]]:_(s32) = G_CONSTANT i32 40
+ ; SI: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C9]](s32)
; SI: [[LOAD10:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD9]](p5) :: (load 4, align 8, addrspace 5)
- ; SI: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C2]](s32)
+ ; SI: [[C10:%[0-9]+]]:_(s32) = G_CONSTANT i32 44
+ ; SI: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C10]](s32)
; SI: [[LOAD11:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD10]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32)
- ; SI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
- ; SI: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
+ ; SI: [[C11:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
+ ; SI: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C11]](s32)
; SI: [[LOAD12:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD11]](p5) :: (load 4, align 16, addrspace 5)
- ; SI: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C]](s32)
+ ; SI: [[C12:%[0-9]+]]:_(s32) = G_CONSTANT i32 52
+ ; SI: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C12]](s32)
; SI: [[LOAD13:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD12]](p5) :: (load 4, addrspace 5)
- ; SI: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C1]](s32)
+ ; SI: [[C13:%[0-9]+]]:_(s32) = G_CONSTANT i32 56
+ ; SI: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C13]](s32)
; SI: [[LOAD14:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD13]](p5) :: (load 4, align 8, addrspace 5)
- ; SI: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C2]](s32)
+ ; SI: [[C14:%[0-9]+]]:_(s32) = G_CONSTANT i32 60
+ ; SI: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C14]](s32)
; SI: [[LOAD15:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD14]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
- ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<16 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<4 x s32>), [[BUILD_VECTOR1]](<4 x s32>), [[BUILD_VECTOR2]](<4 x s32>), [[BUILD_VECTOR3]](<4 x s32>)
- ; SI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[CONCAT_VECTORS]](<16 x s32>)
+ ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32), [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32), [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
+ ; SI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[BUILD_VECTOR]](<16 x s32>)
; CI-LABEL: name: test_load_private_v16s32_align32
; CI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; CI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, align 32, addrspace 5)
; CI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
; CI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; CI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
; CI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; CI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; CI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; CI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; CI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; CI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; CI: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; CI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s32)
+ ; CI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; CI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; CI: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; CI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s32)
+ ; CI: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; CI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; CI: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
- ; CI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
- ; CI: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
+ ; CI: [[C7:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
+ ; CI: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C7]](s32)
; CI: [[LOAD8:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD7]](p5) :: (load 4, align 32, addrspace 5)
- ; CI: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C]](s32)
+ ; CI: [[C8:%[0-9]+]]:_(s32) = G_CONSTANT i32 36
+ ; CI: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C8]](s32)
; CI: [[LOAD9:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD8]](p5) :: (load 4, addrspace 5)
- ; CI: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s32)
+ ; CI: [[C9:%[0-9]+]]:_(s32) = G_CONSTANT i32 40
+ ; CI: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C9]](s32)
; CI: [[LOAD10:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD9]](p5) :: (load 4, align 8, addrspace 5)
- ; CI: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C2]](s32)
+ ; CI: [[C10:%[0-9]+]]:_(s32) = G_CONSTANT i32 44
+ ; CI: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C10]](s32)
; CI: [[LOAD11:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD10]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32)
- ; CI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
- ; CI: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
+ ; CI: [[C11:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
+ ; CI: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C11]](s32)
; CI: [[LOAD12:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD11]](p5) :: (load 4, align 16, addrspace 5)
- ; CI: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C]](s32)
+ ; CI: [[C12:%[0-9]+]]:_(s32) = G_CONSTANT i32 52
+ ; CI: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C12]](s32)
; CI: [[LOAD13:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD12]](p5) :: (load 4, addrspace 5)
- ; CI: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C1]](s32)
+ ; CI: [[C13:%[0-9]+]]:_(s32) = G_CONSTANT i32 56
+ ; CI: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C13]](s32)
; CI: [[LOAD14:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD13]](p5) :: (load 4, align 8, addrspace 5)
- ; CI: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C2]](s32)
+ ; CI: [[C14:%[0-9]+]]:_(s32) = G_CONSTANT i32 60
+ ; CI: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C14]](s32)
; CI: [[LOAD15:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD14]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
- ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<16 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<4 x s32>), [[BUILD_VECTOR1]](<4 x s32>), [[BUILD_VECTOR2]](<4 x s32>), [[BUILD_VECTOR3]](<4 x s32>)
- ; CI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[CONCAT_VECTORS]](<16 x s32>)
+ ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32), [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32), [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
+ ; CI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[BUILD_VECTOR]](<16 x s32>)
; VI-LABEL: name: test_load_private_v16s32_align32
; VI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; VI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, align 32, addrspace 5)
; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
; VI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; VI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
; VI: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; VI: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; VI: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; VI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; VI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; VI: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; VI: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; VI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s32)
+ ; VI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; VI: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; VI: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; VI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s32)
+ ; VI: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; VI: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; VI: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
- ; VI: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
- ; VI: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
+ ; VI: [[C7:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
+ ; VI: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C7]](s32)
; VI: [[LOAD8:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD7]](p5) :: (load 4, align 32, addrspace 5)
- ; VI: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C]](s32)
+ ; VI: [[C8:%[0-9]+]]:_(s32) = G_CONSTANT i32 36
+ ; VI: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C8]](s32)
; VI: [[LOAD9:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD8]](p5) :: (load 4, addrspace 5)
- ; VI: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s32)
+ ; VI: [[C9:%[0-9]+]]:_(s32) = G_CONSTANT i32 40
+ ; VI: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C9]](s32)
; VI: [[LOAD10:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD9]](p5) :: (load 4, align 8, addrspace 5)
- ; VI: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C2]](s32)
+ ; VI: [[C10:%[0-9]+]]:_(s32) = G_CONSTANT i32 44
+ ; VI: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C10]](s32)
; VI: [[LOAD11:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD10]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32)
- ; VI: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
- ; VI: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
+ ; VI: [[C11:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
+ ; VI: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C11]](s32)
; VI: [[LOAD12:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD11]](p5) :: (load 4, align 16, addrspace 5)
- ; VI: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C]](s32)
+ ; VI: [[C12:%[0-9]+]]:_(s32) = G_CONSTANT i32 52
+ ; VI: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C12]](s32)
; VI: [[LOAD13:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD12]](p5) :: (load 4, addrspace 5)
- ; VI: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C1]](s32)
+ ; VI: [[C13:%[0-9]+]]:_(s32) = G_CONSTANT i32 56
+ ; VI: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C13]](s32)
; VI: [[LOAD14:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD13]](p5) :: (load 4, align 8, addrspace 5)
- ; VI: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C2]](s32)
+ ; VI: [[C14:%[0-9]+]]:_(s32) = G_CONSTANT i32 60
+ ; VI: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C14]](s32)
; VI: [[LOAD15:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD14]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
- ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<16 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<4 x s32>), [[BUILD_VECTOR1]](<4 x s32>), [[BUILD_VECTOR2]](<4 x s32>), [[BUILD_VECTOR3]](<4 x s32>)
- ; VI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[CONCAT_VECTORS]](<16 x s32>)
+ ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32), [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32), [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
+ ; VI: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[BUILD_VECTOR]](<16 x s32>)
; GFX9-LABEL: name: test_load_private_v16s32_align32
; GFX9: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; GFX9: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, align 32, addrspace 5)
; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; GFX9: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
; GFX9: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C3]](s32)
; GFX9: [[LOAD4:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD3]](p5) :: (load 4, align 16, addrspace 5)
- ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C]](s32)
+ ; GFX9: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 20
+ ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
; GFX9: [[LOAD5:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD4]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s32)
+ ; GFX9: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
+ ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
; GFX9: [[LOAD6:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD5]](p5) :: (load 4, align 8, addrspace 5)
- ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s32)
+ ; GFX9: [[C6:%[0-9]+]]:_(s32) = G_CONSTANT i32 28
+ ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C6]](s32)
; GFX9: [[LOAD7:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD6]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32)
- ; GFX9: [[C4:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
- ; GFX9: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C4]](s32)
+ ; GFX9: [[C7:%[0-9]+]]:_(s32) = G_CONSTANT i32 32
+ ; GFX9: [[PTR_ADD7:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C7]](s32)
; GFX9: [[LOAD8:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD7]](p5) :: (load 4, align 32, addrspace 5)
- ; GFX9: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C]](s32)
+ ; GFX9: [[C8:%[0-9]+]]:_(s32) = G_CONSTANT i32 36
+ ; GFX9: [[PTR_ADD8:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C8]](s32)
; GFX9: [[LOAD9:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD8]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s32)
+ ; GFX9: [[C9:%[0-9]+]]:_(s32) = G_CONSTANT i32 40
+ ; GFX9: [[PTR_ADD9:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C9]](s32)
; GFX9: [[LOAD10:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD9]](p5) :: (load 4, align 8, addrspace 5)
- ; GFX9: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD7]], [[C2]](s32)
+ ; GFX9: [[C10:%[0-9]+]]:_(s32) = G_CONSTANT i32 44
+ ; GFX9: [[PTR_ADD10:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C10]](s32)
; GFX9: [[LOAD11:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD10]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32)
- ; GFX9: [[C5:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
- ; GFX9: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C5]](s32)
+ ; GFX9: [[C11:%[0-9]+]]:_(s32) = G_CONSTANT i32 48
+ ; GFX9: [[PTR_ADD11:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C11]](s32)
; GFX9: [[LOAD12:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD11]](p5) :: (load 4, align 16, addrspace 5)
- ; GFX9: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C]](s32)
+ ; GFX9: [[C12:%[0-9]+]]:_(s32) = G_CONSTANT i32 52
+ ; GFX9: [[PTR_ADD12:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C12]](s32)
; GFX9: [[LOAD13:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD12]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C1]](s32)
+ ; GFX9: [[C13:%[0-9]+]]:_(s32) = G_CONSTANT i32 56
+ ; GFX9: [[PTR_ADD13:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C13]](s32)
; GFX9: [[LOAD14:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD13]](p5) :: (load 4, align 8, addrspace 5)
- ; GFX9: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD11]], [[C2]](s32)
+ ; GFX9: [[C14:%[0-9]+]]:_(s32) = G_CONSTANT i32 60
+ ; GFX9: [[PTR_ADD14:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C14]](s32)
; GFX9: [[LOAD15:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD14]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR3:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
- ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<16 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<4 x s32>), [[BUILD_VECTOR1]](<4 x s32>), [[BUILD_VECTOR2]](<4 x s32>), [[BUILD_VECTOR3]](<4 x s32>)
- ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[CONCAT_VECTORS]](<16 x s32>)
+ ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32), [[LOAD4]](s32), [[LOAD5]](s32), [[LOAD6]](s32), [[LOAD7]](s32), [[LOAD8]](s32), [[LOAD9]](s32), [[LOAD10]](s32), [[LOAD11]](s32), [[LOAD12]](s32), [[LOAD13]](s32), [[LOAD14]](s32), [[LOAD15]](s32)
+ ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY [[BUILD_VECTOR]](<16 x s32>)
%0:_(p5) = COPY $vgpr0
%1:_(<16 x s32>) = G_LOAD %0 :: (load 16, align 32, addrspace 5)
$vgpr0_vgpr1_vgpr2_vgpr3_vgpr4_vgpr5_vgpr6_vgpr7_vgpr8_vgpr9_vgpr10_vgpr11_vgpr12_vgpr13_vgpr14_vgpr15 = COPY %1
; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; SI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; SI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; SI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; SI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, addrspace 5)
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; SI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; SI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; SI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>)
- ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[CONCAT_VECTORS]](<4 x s32>)
+ ; SI: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
+ ; SI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<4 x s32>)
; CI-LABEL: name: test_extload_private_v4s32_from_8_align4
; CI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; CI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, addrspace 5)
; CI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; CI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; CI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; CI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; CI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; CI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, addrspace 5)
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; CI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; CI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; CI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; CI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>)
- ; CI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[CONCAT_VECTORS]](<4 x s32>)
+ ; CI: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
+ ; CI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<4 x s32>)
; VI-LABEL: name: test_extload_private_v4s32_from_8_align4
; VI: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; VI: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, addrspace 5)
; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; VI: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; VI: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; VI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; VI: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; VI: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, addrspace 5)
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; VI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; VI: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; VI: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; VI: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>)
- ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[CONCAT_VECTORS]](<4 x s32>)
+ ; VI: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
+ ; VI: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<4 x s32>)
; GFX9-LABEL: name: test_extload_private_v4s32_from_8_align4
; GFX9: [[COPY:%[0-9]+]]:_(p5) = COPY $vgpr0
; GFX9: [[LOAD:%[0-9]+]]:_(s32) = G_LOAD [[COPY]](p5) :: (load 4, addrspace 5)
; GFX9: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C]](s32)
; GFX9: [[LOAD1:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32)
; GFX9: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C1]](s32)
; GFX9: [[LOAD2:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD1]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[PTR_ADD1]], [[C]](s32)
+ ; GFX9: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 12
+ ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p5) = G_PTR_ADD [[COPY]], [[C2]](s32)
; GFX9: [[LOAD3:%[0-9]+]]:_(s32) = G_LOAD [[PTR_ADD2]](p5) :: (load 4, addrspace 5)
- ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[LOAD2]](s32), [[LOAD3]](s32)
- ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s32>) = G_CONCAT_VECTORS [[BUILD_VECTOR]](<2 x s32>), [[BUILD_VECTOR1]](<2 x s32>)
- ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[CONCAT_VECTORS]](<4 x s32>)
+ ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[LOAD]](s32), [[LOAD1]](s32), [[LOAD2]](s32), [[LOAD3]](s32)
+ ; GFX9: $vgpr0_vgpr1_vgpr2_vgpr3 = COPY [[BUILD_VECTOR]](<4 x s32>)
%0:_(p5) = COPY $vgpr0
%1:_(<4 x s32>) = G_LOAD %0 :: (load 8, align 4, addrspace 5)
$vgpr0_vgpr1_vgpr2_vgpr3 = COPY %1
; SI-LABEL: name: test_store_global_v5s32_align1
; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; SI: G_STORE [[COPY32]](s32), [[PTR_ADD14]](p1) :: (store 1, addrspace 1)
; SI: [[C9:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C9]](s64)
- ; SI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
+ ; SI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
; SI: [[COPY33:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
- ; SI: [[COPY34:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; SI: [[COPY34:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
; SI: [[AND8:%[0-9]+]]:_(s32) = G_AND [[COPY34]], [[C2]]
; SI: [[LSHR13:%[0-9]+]]:_(s32) = G_LSHR [[AND8]], [[COPY33]](s32)
; SI: [[COPY35:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
; SI: [[COPY36:%[0-9]+]]:_(s32) = COPY [[LSHR12]](s32)
; SI: [[AND9:%[0-9]+]]:_(s32) = G_AND [[COPY36]], [[C2]]
; SI: [[LSHR14:%[0-9]+]]:_(s32) = G_LSHR [[AND9]], [[COPY35]](s32)
- ; SI: [[COPY37:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; SI: [[COPY37:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
; SI: G_STORE [[COPY37]](s32), [[PTR_ADD15]](p1) :: (store 1, addrspace 1)
; SI: [[PTR_ADD16:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C3]](s64)
; SI: [[COPY38:%[0-9]+]]:_(s32) = COPY [[LSHR13]](s32)
; CI-LABEL: name: test_store_global_v5s32_align1
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 1, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 1, addrspace 1)
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
; VI-LABEL: name: test_store_global_v5s32_align1
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
; VI: G_STORE [[ANYEXT7]](s32), [[PTR_ADD14]](p1) :: (store 1, addrspace 1)
; VI: [[C8:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C8]](s64)
- ; VI: [[TRUNC8:%[0-9]+]]:_(s16) = G_TRUNC [[UV4]](s32)
- ; VI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
+ ; VI: [[TRUNC8:%[0-9]+]]:_(s16) = G_TRUNC [[EXTRACT1]](s32)
+ ; VI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
; VI: [[TRUNC9:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR12]](s32)
; VI: [[LSHR13:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC8]], [[C1]](s16)
; VI: [[LSHR14:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC9]], [[C1]](s16)
- ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
; VI: G_STORE [[COPY10]](s32), [[PTR_ADD15]](p1) :: (store 1, addrspace 1)
; VI: [[PTR_ADD16:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C2]](s64)
; VI: [[ANYEXT8:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR13]](s16)
; GFX9-LABEL: name: test_store_global_v5s32_align1
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 1, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 1, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
%1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
G_STORE %1, %0 :: (store 20, align 1, addrspace 1)
; SI-LABEL: name: test_store_global_v5s32_align2
; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; SI: [[UV:%[0-9]+]]:_(<2 x s32>), [[UV1:%[0-9]+]]:_(<2 x s32>) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
+ ; SI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](<2 x s32>)
; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
- ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
+ ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
+ ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
; SI: G_STORE [[COPY2]](s32), [[COPY]](p1) :: (store 2, addrspace 1)
; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
; SI: G_STORE [[COPY3]](s32), [[PTR_ADD]](p1) :: (store 2, addrspace 1)
; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
- ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
+ ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
+ ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
; SI: G_STORE [[COPY4]](s32), [[PTR_ADD1]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD1]], [[C1]](s64)
; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
; SI: G_STORE [[COPY5]](s32), [[PTR_ADD2]](p1) :: (store 2, addrspace 1)
; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
- ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
+ ; SI: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](<2 x s32>)
+ ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
+ ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
; SI: G_STORE [[COPY6]](s32), [[PTR_ADD3]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s64)
; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
; SI: G_STORE [[COPY7]](s32), [[PTR_ADD4]](p1) :: (store 2, addrspace 1)
- ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
- ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
+ ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s64)
+ ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
+ ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV5]](s32)
; SI: G_STORE [[COPY8]](s32), [[PTR_ADD5]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD5]], [[C1]](s64)
; SI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
; SI: G_STORE [[COPY9]](s32), [[PTR_ADD6]](p1) :: (store 2, addrspace 1)
- ; SI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; SI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
- ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; SI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
+ ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
+ ; SI: G_STORE [[COPY10]](s32), [[PTR_ADD7]](p1) :: (store 2, addrspace 1)
+ ; SI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s64)
+ ; SI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
+ ; SI: G_STORE [[COPY11]](s32), [[PTR_ADD8]](p1) :: (store 2, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5s32_align2
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 2, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5s32_align2
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; VI: [[UV:%[0-9]+]]:_(<2 x s32>), [[UV1:%[0-9]+]]:_(<2 x s32>) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
+ ; VI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](<2 x s32>)
+ ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
+ ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
+ ; VI: G_STORE [[COPY2]](s32), [[COPY]](p1) :: (store 2, addrspace 1)
+ ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
+ ; VI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
+ ; VI: G_STORE [[COPY3]](s32), [[PTR_ADD]](p1) :: (store 2, addrspace 1)
+ ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
+ ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
+ ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
+ ; VI: G_STORE [[COPY4]](s32), [[PTR_ADD1]](p1) :: (store 2, addrspace 1)
+ ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD1]], [[C1]](s64)
+ ; VI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
+ ; VI: G_STORE [[COPY5]](s32), [[PTR_ADD2]](p1) :: (store 2, addrspace 1)
+ ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
+ ; VI: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](<2 x s32>)
+ ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
+ ; VI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; VI: G_STORE [[COPY6]](s32), [[PTR_ADD3]](p1) :: (store 2, addrspace 1)
+ ; VI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s64)
+ ; VI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
+ ; VI: G_STORE [[COPY7]](s32), [[PTR_ADD4]](p1) :: (store 2, addrspace 1)
+ ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s64)
+ ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
+ ; VI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV5]](s32)
+ ; VI: G_STORE [[COPY8]](s32), [[PTR_ADD5]](p1) :: (store 2, addrspace 1)
+ ; VI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD5]], [[C1]](s64)
+ ; VI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
+ ; VI: G_STORE [[COPY9]](s32), [[PTR_ADD6]](p1) :: (store 2, addrspace 1)
+ ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; VI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
+ ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
+ ; VI: G_STORE [[COPY10]](s32), [[PTR_ADD7]](p1) :: (store 2, addrspace 1)
+ ; VI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s64)
+ ; VI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
+ ; VI: G_STORE [[COPY11]](s32), [[PTR_ADD8]](p1) :: (store 2, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5s32_align2
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 2, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ G_STORE %1, %0 :: (store 20, align 2, addrspace 1)
+...
+
+---
+name: test_store_global_v5s32_align4
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+
+ ; SI-LABEL: name: test_store_global_v5s32_align4
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; SI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; SI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5s32_align4
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5s32_align4
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; VI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; VI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5s32_align4
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ G_STORE %1, %0 :: (store 20, align 4, addrspace 1)
+...
+
+---
+name: test_store_global_v5s32_align8
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+
+ ; SI-LABEL: name: test_store_global_v5s32_align8
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; SI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; SI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5s32_align8
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5s32_align8
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; VI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; VI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5s32_align8
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ G_STORE %1, %0 :: (store 20, align 8, addrspace 1)
+...
+
+---
+name: test_store_global_v5s32_align16
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+
+ ; SI-LABEL: name: test_store_global_v5s32_align16
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; SI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; SI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5s32_align16
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5s32_align16
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; VI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; VI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5s32_align16
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[COPY1]](<5 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x s32>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ G_STORE %1, %0 :: (store 20, align 16, addrspace 1)
+...
+
+---
+name: test_store_global_v5p3_align1
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI-LABEL: name: test_store_global_v5p3_align1
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; SI: [[UV:%[0-9]+]]:_(p3), [[UV1:%[0-9]+]]:_(p3), [[UV2:%[0-9]+]]:_(p3), [[UV3:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[EXTRACT]](<4 x p3>)
+ ; SI: [[PTRTOINT:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV]](p3)
+ ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT]], [[C]](s32)
+ ; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
+ ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
+ ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[PTRTOINT]](s32)
+ ; SI: [[AND:%[0-9]+]]:_(s32) = G_AND [[COPY3]], [[C2]]
+ ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[AND]], [[COPY2]](s32)
+ ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
+ ; SI: [[AND1:%[0-9]+]]:_(s32) = G_AND [[COPY4]], [[C2]]
+ ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[AND1]], [[C1]](s32)
+ ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[PTRTOINT]](s32)
+ ; SI: G_STORE [[COPY5]](s32), [[COPY]](p1) :: (store 1, addrspace 1)
+ ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
+ ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
+ ; SI: G_STORE [[COPY6]](s32), [[PTR_ADD]](p1) :: (store 1, addrspace 1)
+ ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
+ ; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
+ ; SI: G_STORE [[COPY7]](s32), [[PTR_ADD1]](p1) :: (store 1, addrspace 1)
+ ; SI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 3
+ ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
+ ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
+ ; SI: G_STORE [[COPY8]](s32), [[PTR_ADD2]](p1) :: (store 1, addrspace 1)
+ ; SI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
+ ; SI: [[PTRTOINT1:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV1]](p3)
+ ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT1]], [[C]](s32)
+ ; SI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[PTRTOINT1]](s32)
+ ; SI: [[AND2:%[0-9]+]]:_(s32) = G_AND [[COPY10]], [[C2]]
+ ; SI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[AND2]], [[COPY9]](s32)
+ ; SI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
+ ; SI: [[AND3:%[0-9]+]]:_(s32) = G_AND [[COPY12]], [[C2]]
+ ; SI: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[AND3]], [[COPY11]](s32)
+ ; SI: [[COPY13:%[0-9]+]]:_(s32) = COPY [[PTRTOINT1]](s32)
+ ; SI: G_STORE [[COPY13]](s32), [[PTR_ADD3]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C3]](s64)
+ ; SI: [[COPY14:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
+ ; SI: G_STORE [[COPY14]](s32), [[PTR_ADD4]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C4]](s64)
+ ; SI: [[COPY15:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
+ ; SI: G_STORE [[COPY15]](s32), [[PTR_ADD5]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C5]](s64)
+ ; SI: [[COPY16:%[0-9]+]]:_(s32) = COPY [[LSHR5]](s32)
+ ; SI: G_STORE [[COPY16]](s32), [[PTR_ADD6]](p1) :: (store 1, addrspace 1)
+ ; SI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
+ ; SI: [[PTRTOINT2:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV2]](p3)
+ ; SI: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT2]], [[C]](s32)
+ ; SI: [[COPY17:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY18:%[0-9]+]]:_(s32) = COPY [[PTRTOINT2]](s32)
+ ; SI: [[AND4:%[0-9]+]]:_(s32) = G_AND [[COPY18]], [[C2]]
+ ; SI: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[AND4]], [[COPY17]](s32)
+ ; SI: [[COPY19:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY20:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32)
+ ; SI: [[AND5:%[0-9]+]]:_(s32) = G_AND [[COPY20]], [[C2]]
+ ; SI: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[AND5]], [[COPY19]](s32)
+ ; SI: [[COPY21:%[0-9]+]]:_(s32) = COPY [[PTRTOINT2]](s32)
+ ; SI: G_STORE [[COPY21]](s32), [[PTR_ADD7]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C3]](s64)
+ ; SI: [[COPY22:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32)
+ ; SI: G_STORE [[COPY22]](s32), [[PTR_ADD8]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD9:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C4]](s64)
+ ; SI: [[COPY23:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32)
+ ; SI: G_STORE [[COPY23]](s32), [[PTR_ADD9]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD10:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C5]](s64)
+ ; SI: [[COPY24:%[0-9]+]]:_(s32) = COPY [[LSHR8]](s32)
+ ; SI: G_STORE [[COPY24]](s32), [[PTR_ADD10]](p1) :: (store 1, addrspace 1)
+ ; SI: [[C8:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
+ ; SI: [[PTR_ADD11:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C8]](s64)
+ ; SI: [[PTRTOINT3:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV3]](p3)
+ ; SI: [[LSHR9:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT3]], [[C]](s32)
+ ; SI: [[COPY25:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY26:%[0-9]+]]:_(s32) = COPY [[PTRTOINT3]](s32)
+ ; SI: [[AND6:%[0-9]+]]:_(s32) = G_AND [[COPY26]], [[C2]]
+ ; SI: [[LSHR10:%[0-9]+]]:_(s32) = G_LSHR [[AND6]], [[COPY25]](s32)
+ ; SI: [[COPY27:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY28:%[0-9]+]]:_(s32) = COPY [[LSHR9]](s32)
+ ; SI: [[AND7:%[0-9]+]]:_(s32) = G_AND [[COPY28]], [[C2]]
+ ; SI: [[LSHR11:%[0-9]+]]:_(s32) = G_LSHR [[AND7]], [[COPY27]](s32)
+ ; SI: [[COPY29:%[0-9]+]]:_(s32) = COPY [[PTRTOINT3]](s32)
+ ; SI: G_STORE [[COPY29]](s32), [[PTR_ADD11]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD12:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C3]](s64)
+ ; SI: [[COPY30:%[0-9]+]]:_(s32) = COPY [[LSHR10]](s32)
+ ; SI: G_STORE [[COPY30]](s32), [[PTR_ADD12]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD13:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C4]](s64)
+ ; SI: [[COPY31:%[0-9]+]]:_(s32) = COPY [[LSHR9]](s32)
+ ; SI: G_STORE [[COPY31]](s32), [[PTR_ADD13]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD14:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C5]](s64)
+ ; SI: [[COPY32:%[0-9]+]]:_(s32) = COPY [[LSHR11]](s32)
+ ; SI: G_STORE [[COPY32]](s32), [[PTR_ADD14]](p1) :: (store 1, addrspace 1)
+ ; SI: [[C9:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C9]](s64)
+ ; SI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
+ ; SI: [[COPY33:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY34:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
+ ; SI: [[AND8:%[0-9]+]]:_(s32) = G_AND [[COPY34]], [[C2]]
+ ; SI: [[LSHR13:%[0-9]+]]:_(s32) = G_LSHR [[AND8]], [[COPY33]](s32)
+ ; SI: [[COPY35:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
+ ; SI: [[COPY36:%[0-9]+]]:_(s32) = COPY [[LSHR12]](s32)
+ ; SI: [[AND9:%[0-9]+]]:_(s32) = G_AND [[COPY36]], [[C2]]
+ ; SI: [[LSHR14:%[0-9]+]]:_(s32) = G_LSHR [[AND9]], [[COPY35]](s32)
+ ; SI: [[COPY37:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
+ ; SI: G_STORE [[COPY37]](s32), [[PTR_ADD15]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD16:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C3]](s64)
+ ; SI: [[COPY38:%[0-9]+]]:_(s32) = COPY [[LSHR13]](s32)
+ ; SI: G_STORE [[COPY38]](s32), [[PTR_ADD16]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD17:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C4]](s64)
+ ; SI: [[COPY39:%[0-9]+]]:_(s32) = COPY [[LSHR12]](s32)
+ ; SI: G_STORE [[COPY39]](s32), [[PTR_ADD17]](p1) :: (store 1, addrspace 1)
+ ; SI: [[PTR_ADD18:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C5]](s64)
+ ; SI: [[COPY40:%[0-9]+]]:_(s32) = COPY [[LSHR14]](s32)
+ ; SI: G_STORE [[COPY40]](s32), [[PTR_ADD18]](p1) :: (store 1, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5p3_align1
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 1, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5p3_align1
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; VI: [[UV:%[0-9]+]]:_(p3), [[UV1:%[0-9]+]]:_(p3), [[UV2:%[0-9]+]]:_(p3), [[UV3:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[EXTRACT]](<4 x p3>)
+ ; VI: [[PTRTOINT:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV]](p3)
+ ; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[PTRTOINT]](s32)
+ ; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT]], [[C]](s32)
+ ; VI: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR]](s32)
+ ; VI: [[C1:%[0-9]+]]:_(s16) = G_CONSTANT i16 8
+ ; VI: [[LSHR1:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC]], [[C1]](s16)
+ ; VI: [[LSHR2:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC1]], [[C1]](s16)
+ ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[PTRTOINT]](s32)
+ ; VI: G_STORE [[COPY2]](s32), [[COPY]](p1) :: (store 1, addrspace 1)
+ ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
+ ; VI: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR1]](s16)
+ ; VI: G_STORE [[ANYEXT]](s32), [[PTR_ADD]](p1) :: (store 1, addrspace 1)
+ ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
+ ; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
+ ; VI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
+ ; VI: G_STORE [[COPY3]](s32), [[PTR_ADD1]](p1) :: (store 1, addrspace 1)
+ ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 3
+ ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; VI: [[ANYEXT1:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR2]](s16)
+ ; VI: G_STORE [[ANYEXT1]](s32), [[PTR_ADD2]](p1) :: (store 1, addrspace 1)
+ ; VI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
+ ; VI: [[PTRTOINT1:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV1]](p3)
+ ; VI: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[PTRTOINT1]](s32)
+ ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT1]], [[C]](s32)
+ ; VI: [[TRUNC3:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR3]](s32)
+ ; VI: [[LSHR4:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC2]], [[C1]](s16)
+ ; VI: [[LSHR5:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC3]], [[C1]](s16)
+ ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[PTRTOINT1]](s32)
+ ; VI: G_STORE [[COPY4]](s32), [[PTR_ADD3]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s64)
+ ; VI: [[ANYEXT2:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR4]](s16)
+ ; VI: G_STORE [[ANYEXT2]](s32), [[PTR_ADD4]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C3]](s64)
+ ; VI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
+ ; VI: G_STORE [[COPY5]](s32), [[PTR_ADD5]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C4]](s64)
+ ; VI: [[ANYEXT3:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR5]](s16)
+ ; VI: G_STORE [[ANYEXT3]](s32), [[PTR_ADD6]](p1) :: (store 1, addrspace 1)
+ ; VI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
+ ; VI: [[PTRTOINT2:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV2]](p3)
+ ; VI: [[TRUNC4:%[0-9]+]]:_(s16) = G_TRUNC [[PTRTOINT2]](s32)
+ ; VI: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT2]], [[C]](s32)
+ ; VI: [[TRUNC5:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR6]](s32)
+ ; VI: [[LSHR7:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC4]], [[C1]](s16)
+ ; VI: [[LSHR8:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC5]], [[C1]](s16)
+ ; VI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[PTRTOINT2]](s32)
+ ; VI: G_STORE [[COPY6]](s32), [[PTR_ADD7]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C2]](s64)
+ ; VI: [[ANYEXT4:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR7]](s16)
+ ; VI: G_STORE [[ANYEXT4]](s32), [[PTR_ADD8]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD9:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C3]](s64)
+ ; VI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32)
+ ; VI: G_STORE [[COPY7]](s32), [[PTR_ADD9]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD10:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C4]](s64)
+ ; VI: [[ANYEXT5:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR8]](s16)
+ ; VI: G_STORE [[ANYEXT5]](s32), [[PTR_ADD10]](p1) :: (store 1, addrspace 1)
+ ; VI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
+ ; VI: [[PTR_ADD11:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
+ ; VI: [[PTRTOINT3:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV3]](p3)
+ ; VI: [[TRUNC6:%[0-9]+]]:_(s16) = G_TRUNC [[PTRTOINT3]](s32)
+ ; VI: [[LSHR9:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT3]], [[C]](s32)
+ ; VI: [[TRUNC7:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR9]](s32)
+ ; VI: [[LSHR10:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC6]], [[C1]](s16)
+ ; VI: [[LSHR11:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC7]], [[C1]](s16)
+ ; VI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[PTRTOINT3]](s32)
+ ; VI: G_STORE [[COPY8]](s32), [[PTR_ADD11]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD12:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C2]](s64)
+ ; VI: [[ANYEXT6:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR10]](s16)
+ ; VI: G_STORE [[ANYEXT6]](s32), [[PTR_ADD12]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD13:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C3]](s64)
+ ; VI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR9]](s32)
+ ; VI: G_STORE [[COPY9]](s32), [[PTR_ADD13]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD14:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C4]](s64)
+ ; VI: [[ANYEXT7:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR11]](s16)
+ ; VI: G_STORE [[ANYEXT7]](s32), [[PTR_ADD14]](p1) :: (store 1, addrspace 1)
+ ; VI: [[C8:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C8]](s64)
+ ; VI: [[TRUNC8:%[0-9]+]]:_(s16) = G_TRUNC [[EXTRACT1]](s32)
+ ; VI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
+ ; VI: [[TRUNC9:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR12]](s32)
+ ; VI: [[LSHR13:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC8]], [[C1]](s16)
+ ; VI: [[LSHR14:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC9]], [[C1]](s16)
+ ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
+ ; VI: G_STORE [[COPY10]](s32), [[PTR_ADD15]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD16:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C2]](s64)
+ ; VI: [[ANYEXT8:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR13]](s16)
+ ; VI: G_STORE [[ANYEXT8]](s32), [[PTR_ADD16]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD17:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C3]](s64)
+ ; VI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR12]](s32)
+ ; VI: G_STORE [[COPY11]](s32), [[PTR_ADD17]](p1) :: (store 1, addrspace 1)
+ ; VI: [[PTR_ADD18:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C4]](s64)
+ ; VI: [[ANYEXT9:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR14]](s16)
+ ; VI: G_STORE [[ANYEXT9]](s32), [[PTR_ADD18]](p1) :: (store 1, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5p3_align1
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 1, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ G_STORE %1, %0 :: (store 20, align 1, addrspace 1)
+...
+
+---
+name: test_store_global_v5p3_align2
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+
+ ; SI-LABEL: name: test_store_global_v5p3_align2
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; SI: [[UV:%[0-9]+]]:_(<2 x p3>), [[UV1:%[0-9]+]]:_(<2 x p3>) = G_UNMERGE_VALUES [[EXTRACT]](<4 x p3>)
+ ; SI: [[UV2:%[0-9]+]]:_(p3), [[UV3:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[UV]](<2 x p3>)
+ ; SI: [[PTRTOINT:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV2]](p3)
+ ; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
+ ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT]], [[C]](s32)
+ ; SI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[PTRTOINT]](s32)
+ ; SI: G_STORE [[COPY2]](s32), [[COPY]](p1) :: (store 2, addrspace 1)
+ ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
+ ; SI: [[COPY3:%[0-9]+]]:_(s32) = COPY [[LSHR]](s32)
+ ; SI: G_STORE [[COPY3]](s32), [[PTR_ADD]](p1) :: (store 2, addrspace 1)
+ ; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
+ ; SI: [[PTRTOINT1:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV3]](p3)
+ ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT1]], [[C]](s32)
+ ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[PTRTOINT1]](s32)
+ ; SI: G_STORE [[COPY4]](s32), [[PTR_ADD1]](p1) :: (store 2, addrspace 1)
+ ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD1]], [[C1]](s64)
+ ; SI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
+ ; SI: G_STORE [[COPY5]](s32), [[PTR_ADD2]](p1) :: (store 2, addrspace 1)
+ ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
+ ; SI: [[UV4:%[0-9]+]]:_(p3), [[UV5:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[UV1]](<2 x p3>)
+ ; SI: [[PTRTOINT2:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV4]](p3)
+ ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT2]], [[C]](s32)
+ ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[PTRTOINT2]](s32)
+ ; SI: G_STORE [[COPY6]](s32), [[PTR_ADD3]](p1) :: (store 2, addrspace 1)
+ ; SI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s64)
+ ; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
+ ; SI: G_STORE [[COPY7]](s32), [[PTR_ADD4]](p1) :: (store 2, addrspace 1)
+ ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s64)
+ ; SI: [[PTRTOINT3:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV5]](p3)
+ ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT3]], [[C]](s32)
+ ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[PTRTOINT3]](s32)
+ ; SI: G_STORE [[COPY8]](s32), [[PTR_ADD5]](p1) :: (store 2, addrspace 1)
+ ; SI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD5]], [[C1]](s64)
+ ; SI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
+ ; SI: G_STORE [[COPY9]](s32), [[PTR_ADD6]](p1) :: (store 2, addrspace 1)
+ ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; SI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
+ ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
; SI: G_STORE [[COPY10]](s32), [[PTR_ADD7]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s64)
; SI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
; SI: G_STORE [[COPY11]](s32), [[PTR_ADD8]](p1) :: (store 2, addrspace 1)
- ; CI-LABEL: name: test_store_global_v5s32_align2
+ ; CI-LABEL: name: test_store_global_v5p3_align2
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 2, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 2, addrspace 1)
- ; VI-LABEL: name: test_store_global_v5s32_align2
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5p3_align2
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; VI: [[UV:%[0-9]+]]:_(<2 x p3>), [[UV1:%[0-9]+]]:_(<2 x p3>) = G_UNMERGE_VALUES [[EXTRACT]](<4 x p3>)
+ ; VI: [[UV2:%[0-9]+]]:_(p3), [[UV3:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[UV]](<2 x p3>)
+ ; VI: [[PTRTOINT:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV2]](p3)
; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
- ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
+ ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT]], [[C]](s32)
+ ; VI: [[COPY2:%[0-9]+]]:_(s32) = COPY [[PTRTOINT]](s32)
; VI: G_STORE [[COPY2]](s32), [[COPY]](p1) :: (store 2, addrspace 1)
; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
; VI: G_STORE [[COPY3]](s32), [[PTR_ADD]](p1) :: (store 2, addrspace 1)
; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
- ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
+ ; VI: [[PTRTOINT1:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV3]](p3)
+ ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT1]], [[C]](s32)
+ ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[PTRTOINT1]](s32)
; VI: G_STORE [[COPY4]](s32), [[PTR_ADD1]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD1]], [[C1]](s64)
; VI: [[COPY5:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
; VI: G_STORE [[COPY5]](s32), [[PTR_ADD2]](p1) :: (store 2, addrspace 1)
; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
- ; VI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
+ ; VI: [[UV4:%[0-9]+]]:_(p3), [[UV5:%[0-9]+]]:_(p3) = G_UNMERGE_VALUES [[UV1]](<2 x p3>)
+ ; VI: [[PTRTOINT2:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV4]](p3)
+ ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT2]], [[C]](s32)
+ ; VI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[PTRTOINT2]](s32)
; VI: G_STORE [[COPY6]](s32), [[PTR_ADD3]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s64)
; VI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
; VI: G_STORE [[COPY7]](s32), [[PTR_ADD4]](p1) :: (store 2, addrspace 1)
- ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
- ; VI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
+ ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s64)
+ ; VI: [[PTRTOINT3:%[0-9]+]]:_(s32) = G_PTRTOINT [[UV5]](p3)
+ ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[PTRTOINT3]], [[C]](s32)
+ ; VI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[PTRTOINT3]](s32)
; VI: G_STORE [[COPY8]](s32), [[PTR_ADD5]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD5]], [[C1]](s64)
; VI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
; VI: G_STORE [[COPY9]](s32), [[PTR_ADD6]](p1) :: (store 2, addrspace 1)
- ; VI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; VI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
- ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; VI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT1]], [[C]](s32)
+ ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[EXTRACT1]](s32)
; VI: G_STORE [[COPY10]](s32), [[PTR_ADD7]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s64)
; VI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
; VI: G_STORE [[COPY11]](s32), [[PTR_ADD8]](p1) :: (store 2, addrspace 1)
- ; GFX9-LABEL: name: test_store_global_v5s32_align2
+ ; GFX9-LABEL: name: test_store_global_v5p3_align2
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 2, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 2, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
- %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ %1:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
G_STORE %1, %0 :: (store 20, align 2, addrspace 1)
...
---
-name: test_store_global_v5s32_align4
+name: test_store_global_v5p3_align4
body: |
bb.0:
liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI-LABEL: name: test_store_global_v5s32_align4
+ ; SI-LABEL: name: test_store_global_v5p3_align4
; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; SI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; SI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; SI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; SI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
- ; CI-LABEL: name: test_store_global_v5s32_align4
+ ; SI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5p3_align4
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
- ; VI-LABEL: name: test_store_global_v5s32_align4
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5p3_align4
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; VI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; VI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; VI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; VI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
- ; GFX9-LABEL: name: test_store_global_v5s32_align4
+ ; VI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5p3_align4
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
- %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ %1:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
G_STORE %1, %0 :: (store 20, align 4, addrspace 1)
...
---
-name: test_store_global_v5s32_align8
+name: test_store_global_v5p3_align8
body: |
bb.0:
liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI-LABEL: name: test_store_global_v5s32_align8
+ ; SI-LABEL: name: test_store_global_v5p3_align8
; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; SI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; SI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; SI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; SI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
- ; CI-LABEL: name: test_store_global_v5s32_align8
+ ; SI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5p3_align8
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
- ; VI-LABEL: name: test_store_global_v5s32_align8
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5p3_align8
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; VI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; VI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; VI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; VI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9-LABEL: name: test_store_global_v5s32_align8
+ ; VI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5p3_align8
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 8, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
- %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ %1:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
G_STORE %1, %0 :: (store 20, align 8, addrspace 1)
...
---
-name: test_store_global_v5s32_align16
+name: test_store_global_v5p3_align16
body: |
bb.0:
liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI-LABEL: name: test_store_global_v5s32_align16
+ ; SI-LABEL: name: test_store_global_v5p3_align16
; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; SI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; SI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; SI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; SI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; SI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
- ; CI-LABEL: name: test_store_global_v5s32_align16
+ ; SI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v5p3_align16
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; CI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; CI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
- ; VI-LABEL: name: test_store_global_v5s32_align16
+ ; CI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v5p3_align16
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; VI: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; VI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; VI: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; VI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; VI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
- ; GFX9-LABEL: name: test_store_global_v5s32_align16
+ ; VI: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v5p3_align16
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
- ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<5 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[COPY1:%[0-9]+]]:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x p3>) = G_EXTRACT [[COPY1]](<5 x p3>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY1]](<5 x p3>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<4 x p3>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
- ; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT1]](s32), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
- %1:_(<5 x s32>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ %1:_(<5 x p3>) = COPY $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+ G_STORE %1, %0 :: (store 20, align 16, addrspace 1)
+...
+
+---
+name: test_store_global_v10s16_align4
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+
+ ; SI-LABEL: name: test_store_global_v10s16_align4
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[DEF:%[0-9]+]]:_(<10 x s32>) = G_IMPLICIT_DEF
+ ; SI: [[TRUNC:%[0-9]+]]:_(<10 x s16>) = G_TRUNC [[DEF]](<10 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<2 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 128
+ ; SI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; SI: G_STORE [[EXTRACT1]](<2 x s16>), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v10s16_align4
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[DEF:%[0-9]+]]:_(<10 x s32>) = G_IMPLICIT_DEF
+ ; CI: [[TRUNC:%[0-9]+]]:_(<10 x s16>) = G_TRUNC [[DEF]](<10 x s32>)
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<2 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 128
+ ; CI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](<2 x s16>), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v10s16_align4
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[DEF:%[0-9]+]]:_(<10 x s32>) = G_IMPLICIT_DEF
+ ; VI: [[TRUNC:%[0-9]+]]:_(<10 x s16>) = G_TRUNC [[DEF]](<10 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<2 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 128
+ ; VI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; VI: G_STORE [[EXTRACT1]](<2 x s16>), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v10s16_align4
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[DEF:%[0-9]+]]:_(<10 x s32>) = G_IMPLICIT_DEF
+ ; GFX9: [[TRUNC:%[0-9]+]]:_(<10 x s16>) = G_TRUNC [[DEF]](<10 x s32>)
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<2 x s16>) = G_EXTRACT [[TRUNC]](<10 x s16>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](<2 x s16>), [[PTR_ADD]](p1) :: (store 4, align 16, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<10 x s16>) = G_IMPLICIT_DEF
+ G_STORE %1, %0 :: (store 20, align 16, addrspace 1)
+...
+
+---
+name: test_store_global_v11s16_align4
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+
+ ; SI-LABEL: name: test_store_global_v11s16_align4
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[DEF:%[0-9]+]]:_(<11 x s16>) = G_IMPLICIT_DEF
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 128
+ ; SI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; SI: G_STORE [[EXTRACT1]](<3 x s16>), [[PTR_ADD]](p1) :: (store 6, align 16, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v11s16_align4
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[DEF:%[0-9]+]]:_(<11 x s16>) = G_IMPLICIT_DEF
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 128
+ ; CI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](<3 x s16>), [[PTR_ADD]](p1) :: (store 6, align 16, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v11s16_align4
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[DEF:%[0-9]+]]:_(<11 x s16>) = G_IMPLICIT_DEF
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 128
+ ; VI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; VI: G_STORE [[EXTRACT1]](<3 x s16>), [[PTR_ADD]](p1) :: (store 6, align 16, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v11s16_align4
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[DEF:%[0-9]+]]:_(<11 x s16>) = G_IMPLICIT_DEF
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<3 x s16>) = G_EXTRACT [[DEF]](<11 x s16>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](<3 x s16>), [[PTR_ADD]](p1) :: (store 6, align 16, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<11 x s16>) = G_IMPLICIT_DEF
+ G_STORE %1, %0 :: (store 20, align 16, addrspace 1)
+...
+
+---
+name: test_store_global_v12s16_align4
+body: |
+ bb.0:
+ liveins: $vgpr0_vgpr1, $vgpr2_vgpr3_vgpr4_vgpr5_vgpr6
+
+ ; SI-LABEL: name: test_store_global_v12s16_align4
+ ; SI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; SI: [[DEF:%[0-9]+]]:_(<12 x s32>) = G_IMPLICIT_DEF
+ ; SI: [[TRUNC:%[0-9]+]]:_(<12 x s16>) = G_TRUNC [[DEF]](<12 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<4 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 128
+ ; SI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; SI: G_STORE [[EXTRACT1]](<4 x s16>), [[PTR_ADD]](p1) :: (store 8, align 16, addrspace 1)
+ ; CI-LABEL: name: test_store_global_v12s16_align4
+ ; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; CI: [[DEF:%[0-9]+]]:_(<12 x s32>) = G_IMPLICIT_DEF
+ ; CI: [[TRUNC:%[0-9]+]]:_(<12 x s16>) = G_TRUNC [[DEF]](<12 x s32>)
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<4 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 128
+ ; CI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; CI: G_STORE [[EXTRACT1]](<4 x s16>), [[PTR_ADD]](p1) :: (store 8, align 16, addrspace 1)
+ ; VI-LABEL: name: test_store_global_v12s16_align4
+ ; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; VI: [[DEF:%[0-9]+]]:_(<12 x s32>) = G_IMPLICIT_DEF
+ ; VI: [[TRUNC:%[0-9]+]]:_(<12 x s16>) = G_TRUNC [[DEF]](<12 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<4 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 128
+ ; VI: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; VI: G_STORE [[EXTRACT1]](<4 x s16>), [[PTR_ADD]](p1) :: (store 8, align 16, addrspace 1)
+ ; GFX9-LABEL: name: test_store_global_v12s16_align4
+ ; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
+ ; GFX9: [[DEF:%[0-9]+]]:_(<12 x s32>) = G_IMPLICIT_DEF
+ ; GFX9: [[TRUNC:%[0-9]+]]:_(<12 x s16>) = G_TRUNC [[DEF]](<12 x s32>)
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<8 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<4 x s16>) = G_EXTRACT [[TRUNC]](<12 x s16>), 128
+ ; GFX9: G_STORE [[EXTRACT]](<8 x s16>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
+ ; GFX9: G_STORE [[EXTRACT1]](<4 x s16>), [[PTR_ADD]](p1) :: (store 8, align 16, addrspace 1)
+ %0:_(p1) = COPY $vgpr0_vgpr1
+ %1:_(<12 x s16>) = G_IMPLICIT_DEF
G_STORE %1, %0 :: (store 20, align 16, addrspace 1)
...
; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; SI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; SI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
+ ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
; SI: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; SI: G_STORE [[COPY34]](s32), [[PTR_ADD14]](p1) :: (store 1, addrspace 1)
; SI: [[C9:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C9]](s64)
+ ; SI: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[EXTRACT1]](<4 x s32>)
; SI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
; SI: [[COPY35:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
; SI: [[COPY36:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
; SI: [[PTR_ADD18:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C5]](s64)
; SI: [[COPY42:%[0-9]+]]:_(s32) = COPY [[LSHR14]](s32)
; SI: G_STORE [[COPY42]](s32), [[PTR_ADD18]](p1) :: (store 1, addrspace 1)
- ; SI: [[C10:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; SI: [[PTR_ADD19:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C10]](s64)
+ ; SI: [[PTR_ADD19:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C6]](s64)
; SI: [[LSHR15:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
; SI: [[COPY43:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
; SI: [[COPY44:%[0-9]+]]:_(s32) = COPY [[UV5]](s32)
; SI: [[PTR_ADD22:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD19]], [[C5]](s64)
; SI: [[COPY50:%[0-9]+]]:_(s32) = COPY [[LSHR17]](s32)
; SI: G_STORE [[COPY50]](s32), [[PTR_ADD22]](p1) :: (store 1, addrspace 1)
- ; SI: [[C11:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; SI: [[PTR_ADD23:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C11]](s64)
+ ; SI: [[PTR_ADD23:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C7]](s64)
; SI: [[LSHR18:%[0-9]+]]:_(s32) = G_LSHR [[UV6]], [[C]](s32)
; SI: [[COPY51:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
; SI: [[COPY52:%[0-9]+]]:_(s32) = COPY [[UV6]](s32)
; SI: [[PTR_ADD26:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD23]], [[C5]](s64)
; SI: [[COPY58:%[0-9]+]]:_(s32) = COPY [[LSHR20]](s32)
; SI: G_STORE [[COPY58]](s32), [[PTR_ADD26]](p1) :: (store 1, addrspace 1)
- ; SI: [[C12:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; SI: [[PTR_ADD27:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C12]](s64)
+ ; SI: [[PTR_ADD27:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C8]](s64)
; SI: [[LSHR21:%[0-9]+]]:_(s32) = G_LSHR [[UV7]], [[C]](s32)
; SI: [[COPY59:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
; SI: [[COPY60:%[0-9]+]]:_(s32) = COPY [[UV7]](s32)
; SI: [[PTR_ADD30:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD27]], [[C5]](s64)
; SI: [[COPY66:%[0-9]+]]:_(s32) = COPY [[LSHR23]](s32)
; SI: G_STORE [[COPY66]](s32), [[PTR_ADD30]](p1) :: (store 1, addrspace 1)
- ; SI: [[C13:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; SI: [[PTR_ADD31:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C13]](s64)
- ; SI: [[LSHR24:%[0-9]+]]:_(s32) = G_LSHR [[UV8]], [[C]](s32)
+ ; SI: [[C10:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
+ ; SI: [[PTR_ADD31:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C10]](s64)
+ ; SI: [[LSHR24:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT2]], [[C]](s32)
; SI: [[COPY67:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
- ; SI: [[COPY68:%[0-9]+]]:_(s32) = COPY [[UV8]](s32)
+ ; SI: [[COPY68:%[0-9]+]]:_(s32) = COPY [[EXTRACT2]](s32)
; SI: [[AND16:%[0-9]+]]:_(s32) = G_AND [[COPY68]], [[C2]]
; SI: [[LSHR25:%[0-9]+]]:_(s32) = G_LSHR [[AND16]], [[COPY67]](s32)
; SI: [[COPY69:%[0-9]+]]:_(s32) = COPY [[C1]](s32)
; SI: [[COPY70:%[0-9]+]]:_(s32) = COPY [[LSHR24]](s32)
; SI: [[AND17:%[0-9]+]]:_(s32) = G_AND [[COPY70]], [[C2]]
; SI: [[LSHR26:%[0-9]+]]:_(s32) = G_LSHR [[AND17]], [[COPY69]](s32)
- ; SI: [[COPY71:%[0-9]+]]:_(s32) = COPY [[UV8]](s32)
+ ; SI: [[COPY71:%[0-9]+]]:_(s32) = COPY [[EXTRACT2]](s32)
; SI: G_STORE [[COPY71]](s32), [[PTR_ADD31]](p1) :: (store 1, addrspace 1)
; SI: [[PTR_ADD32:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD31]], [[C3]](s64)
; SI: [[COPY72:%[0-9]+]]:_(s32) = COPY [[LSHR25]](s32)
; CI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; CI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; CI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; CI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; CI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 1, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; CI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 1, addrspace 1)
+ ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; CI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; CI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; CI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; CI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; CI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; CI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, align 1, addrspace 1)
- ; CI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; CI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; CI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 1, addrspace 1)
+ ; CI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 1, addrspace 1)
; VI-LABEL: name: test_store_global_v9s32_align1
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; VI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; VI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; VI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
+ ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
; VI: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[UV]](s32)
; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
; VI: G_STORE [[ANYEXT7]](s32), [[PTR_ADD14]](p1) :: (store 1, addrspace 1)
; VI: [[C8:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C8]](s64)
+ ; VI: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[EXTRACT1]](<4 x s32>)
; VI: [[TRUNC8:%[0-9]+]]:_(s16) = G_TRUNC [[UV4]](s32)
; VI: [[LSHR12:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
; VI: [[TRUNC9:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR12]](s32)
; VI: [[PTR_ADD18:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C4]](s64)
; VI: [[ANYEXT9:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR14]](s16)
; VI: G_STORE [[ANYEXT9]](s32), [[PTR_ADD18]](p1) :: (store 1, addrspace 1)
- ; VI: [[C9:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; VI: [[PTR_ADD19:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C9]](s64)
+ ; VI: [[PTR_ADD19:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C5]](s64)
; VI: [[TRUNC10:%[0-9]+]]:_(s16) = G_TRUNC [[UV5]](s32)
; VI: [[LSHR15:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
; VI: [[TRUNC11:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR15]](s32)
; VI: [[PTR_ADD22:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD19]], [[C4]](s64)
; VI: [[ANYEXT11:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR17]](s16)
; VI: G_STORE [[ANYEXT11]](s32), [[PTR_ADD22]](p1) :: (store 1, addrspace 1)
- ; VI: [[C10:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; VI: [[PTR_ADD23:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C10]](s64)
+ ; VI: [[PTR_ADD23:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C6]](s64)
; VI: [[TRUNC12:%[0-9]+]]:_(s16) = G_TRUNC [[UV6]](s32)
; VI: [[LSHR18:%[0-9]+]]:_(s32) = G_LSHR [[UV6]], [[C]](s32)
; VI: [[TRUNC13:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR18]](s32)
; VI: [[PTR_ADD26:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD23]], [[C4]](s64)
; VI: [[ANYEXT13:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR20]](s16)
; VI: G_STORE [[ANYEXT13]](s32), [[PTR_ADD26]](p1) :: (store 1, addrspace 1)
- ; VI: [[C11:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; VI: [[PTR_ADD27:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C11]](s64)
+ ; VI: [[PTR_ADD27:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C7]](s64)
; VI: [[TRUNC14:%[0-9]+]]:_(s16) = G_TRUNC [[UV7]](s32)
; VI: [[LSHR21:%[0-9]+]]:_(s32) = G_LSHR [[UV7]], [[C]](s32)
; VI: [[TRUNC15:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR21]](s32)
; VI: [[PTR_ADD30:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD27]], [[C4]](s64)
; VI: [[ANYEXT15:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR23]](s16)
; VI: G_STORE [[ANYEXT15]](s32), [[PTR_ADD30]](p1) :: (store 1, addrspace 1)
- ; VI: [[C12:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; VI: [[PTR_ADD31:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C12]](s64)
- ; VI: [[TRUNC16:%[0-9]+]]:_(s16) = G_TRUNC [[UV8]](s32)
- ; VI: [[LSHR24:%[0-9]+]]:_(s32) = G_LSHR [[UV8]], [[C]](s32)
+ ; VI: [[C9:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
+ ; VI: [[PTR_ADD31:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C9]](s64)
+ ; VI: [[TRUNC16:%[0-9]+]]:_(s16) = G_TRUNC [[EXTRACT2]](s32)
+ ; VI: [[LSHR24:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT2]], [[C]](s32)
; VI: [[TRUNC17:%[0-9]+]]:_(s16) = G_TRUNC [[LSHR24]](s32)
; VI: [[LSHR25:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC16]], [[C1]](s16)
; VI: [[LSHR26:%[0-9]+]]:_(s16) = G_LSHR [[TRUNC17]], [[C1]](s16)
- ; VI: [[COPY20:%[0-9]+]]:_(s32) = COPY [[UV8]](s32)
+ ; VI: [[COPY20:%[0-9]+]]:_(s32) = COPY [[EXTRACT2]](s32)
; VI: G_STORE [[COPY20]](s32), [[PTR_ADD31]](p1) :: (store 1, addrspace 1)
; VI: [[PTR_ADD32:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD31]], [[C2]](s64)
; VI: [[ANYEXT16:%[0-9]+]]:_(s32) = G_ANYEXT [[LSHR25]](s16)
; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; GFX9: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; GFX9: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 1, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; GFX9: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 1, addrspace 1)
+ ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; GFX9: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; GFX9: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; GFX9: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, align 1, addrspace 1)
- ; GFX9: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; GFX9: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; GFX9: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 1, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 1, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
%1:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
%2:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; SI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; SI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
+ ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; SI: [[UV:%[0-9]+]]:_(<2 x s32>), [[UV1:%[0-9]+]]:_(<2 x s32>) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
+ ; SI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](<2 x s32>)
; SI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
- ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
+ ; SI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
+ ; SI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
; SI: G_STORE [[COPY4]](s32), [[COPY]](p1) :: (store 2, addrspace 1)
; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
; SI: G_STORE [[COPY5]](s32), [[PTR_ADD]](p1) :: (store 2, addrspace 1)
; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
- ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
+ ; SI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
+ ; SI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
; SI: G_STORE [[COPY6]](s32), [[PTR_ADD1]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD1]], [[C1]](s64)
; SI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
; SI: G_STORE [[COPY7]](s32), [[PTR_ADD2]](p1) :: (store 2, addrspace 1)
; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
- ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
+ ; SI: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](<2 x s32>)
+ ; SI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
+ ; SI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
; SI: G_STORE [[COPY8]](s32), [[PTR_ADD3]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s64)
; SI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
; SI: G_STORE [[COPY9]](s32), [[PTR_ADD4]](p1) :: (store 2, addrspace 1)
- ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
- ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
+ ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s64)
+ ; SI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
+ ; SI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV5]](s32)
; SI: G_STORE [[COPY10]](s32), [[PTR_ADD5]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD5]], [[C1]](s64)
; SI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
; SI: G_STORE [[COPY11]](s32), [[PTR_ADD6]](p1) :: (store 2, addrspace 1)
- ; SI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; SI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
- ; SI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; SI: [[UV6:%[0-9]+]]:_(<2 x s32>), [[UV7:%[0-9]+]]:_(<2 x s32>) = G_UNMERGE_VALUES [[EXTRACT1]](<4 x s32>)
+ ; SI: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV6]](<2 x s32>)
+ ; SI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[UV8]], [[C]](s32)
+ ; SI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[UV8]](s32)
; SI: G_STORE [[COPY12]](s32), [[PTR_ADD7]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s64)
; SI: [[COPY13:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
; SI: G_STORE [[COPY13]](s32), [[PTR_ADD8]](p1) :: (store 2, addrspace 1)
- ; SI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; SI: [[PTR_ADD9:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; SI: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
- ; SI: [[COPY14:%[0-9]+]]:_(s32) = COPY [[UV5]](s32)
+ ; SI: [[PTR_ADD9:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C2]](s64)
+ ; SI: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[UV9]], [[C]](s32)
+ ; SI: [[COPY14:%[0-9]+]]:_(s32) = COPY [[UV9]](s32)
; SI: G_STORE [[COPY14]](s32), [[PTR_ADD9]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD10:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD9]], [[C1]](s64)
; SI: [[COPY15:%[0-9]+]]:_(s32) = COPY [[LSHR5]](s32)
; SI: G_STORE [[COPY15]](s32), [[PTR_ADD10]](p1) :: (store 2, addrspace 1)
- ; SI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; SI: [[PTR_ADD11:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; SI: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[UV6]], [[C]](s32)
- ; SI: [[COPY16:%[0-9]+]]:_(s32) = COPY [[UV6]](s32)
+ ; SI: [[PTR_ADD11:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C3]](s64)
+ ; SI: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV7]](<2 x s32>)
+ ; SI: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[UV10]], [[C]](s32)
+ ; SI: [[COPY16:%[0-9]+]]:_(s32) = COPY [[UV10]](s32)
; SI: G_STORE [[COPY16]](s32), [[PTR_ADD11]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD12:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C1]](s64)
; SI: [[COPY17:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32)
; SI: G_STORE [[COPY17]](s32), [[PTR_ADD12]](p1) :: (store 2, addrspace 1)
- ; SI: [[C8:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; SI: [[PTR_ADD13:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C8]](s64)
- ; SI: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[UV7]], [[C]](s32)
- ; SI: [[COPY18:%[0-9]+]]:_(s32) = COPY [[UV7]](s32)
+ ; SI: [[PTR_ADD13:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C2]](s64)
+ ; SI: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[UV11]], [[C]](s32)
+ ; SI: [[COPY18:%[0-9]+]]:_(s32) = COPY [[UV11]](s32)
; SI: G_STORE [[COPY18]](s32), [[PTR_ADD13]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD14:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD13]], [[C1]](s64)
; SI: [[COPY19:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32)
; SI: G_STORE [[COPY19]](s32), [[PTR_ADD14]](p1) :: (store 2, addrspace 1)
- ; SI: [[C9:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; SI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C9]](s64)
- ; SI: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[UV8]], [[C]](s32)
- ; SI: [[COPY20:%[0-9]+]]:_(s32) = COPY [[UV8]](s32)
+ ; SI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
+ ; SI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
+ ; SI: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT2]], [[C]](s32)
+ ; SI: [[COPY20:%[0-9]+]]:_(s32) = COPY [[EXTRACT2]](s32)
; SI: G_STORE [[COPY20]](s32), [[PTR_ADD15]](p1) :: (store 2, addrspace 1)
; SI: [[PTR_ADD16:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C1]](s64)
; SI: [[COPY21:%[0-9]+]]:_(s32) = COPY [[LSHR8]](s32)
; CI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; CI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; CI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; CI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; CI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 2, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; CI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 2, addrspace 1)
+ ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; CI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; CI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; CI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; CI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; CI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; CI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, align 2, addrspace 1)
- ; CI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; CI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; CI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 2, addrspace 1)
+ ; CI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 2, addrspace 1)
; VI-LABEL: name: test_store_global_v9s32_align2
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; VI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; VI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; VI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
+ ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; VI: [[UV:%[0-9]+]]:_(<2 x s32>), [[UV1:%[0-9]+]]:_(<2 x s32>) = G_UNMERGE_VALUES [[EXTRACT]](<4 x s32>)
+ ; VI: [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV]](<2 x s32>)
; VI: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
- ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV]], [[C]](s32)
- ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV]](s32)
+ ; VI: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
+ ; VI: [[COPY4:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
; VI: G_STORE [[COPY4]](s32), [[COPY]](p1) :: (store 2, addrspace 1)
; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
; VI: G_STORE [[COPY5]](s32), [[PTR_ADD]](p1) :: (store 2, addrspace 1)
; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV1]], [[C]](s32)
- ; VI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV1]](s32)
+ ; VI: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
+ ; VI: [[COPY6:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
; VI: G_STORE [[COPY6]](s32), [[PTR_ADD1]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD1]], [[C1]](s64)
; VI: [[COPY7:%[0-9]+]]:_(s32) = COPY [[LSHR1]](s32)
; VI: G_STORE [[COPY7]](s32), [[PTR_ADD2]](p1) :: (store 2, addrspace 1)
; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV2]], [[C]](s32)
- ; VI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV2]](s32)
+ ; VI: [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV1]](<2 x s32>)
+ ; VI: [[LSHR2:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
+ ; VI: [[COPY8:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
; VI: G_STORE [[COPY8]](s32), [[PTR_ADD3]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C1]](s64)
; VI: [[COPY9:%[0-9]+]]:_(s32) = COPY [[LSHR2]](s32)
; VI: G_STORE [[COPY9]](s32), [[PTR_ADD4]](p1) :: (store 2, addrspace 1)
- ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV3]], [[C]](s32)
- ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV3]](s32)
+ ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD3]], [[C2]](s64)
+ ; VI: [[LSHR3:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
+ ; VI: [[COPY10:%[0-9]+]]:_(s32) = COPY [[UV5]](s32)
; VI: G_STORE [[COPY10]](s32), [[PTR_ADD5]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD5]], [[C1]](s64)
; VI: [[COPY11:%[0-9]+]]:_(s32) = COPY [[LSHR3]](s32)
; VI: G_STORE [[COPY11]](s32), [[PTR_ADD6]](p1) :: (store 2, addrspace 1)
- ; VI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; VI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[UV4]], [[C]](s32)
- ; VI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[UV4]](s32)
+ ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
+ ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
+ ; VI: [[UV6:%[0-9]+]]:_(<2 x s32>), [[UV7:%[0-9]+]]:_(<2 x s32>) = G_UNMERGE_VALUES [[EXTRACT1]](<4 x s32>)
+ ; VI: [[UV8:%[0-9]+]]:_(s32), [[UV9:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV6]](<2 x s32>)
+ ; VI: [[LSHR4:%[0-9]+]]:_(s32) = G_LSHR [[UV8]], [[C]](s32)
+ ; VI: [[COPY12:%[0-9]+]]:_(s32) = COPY [[UV8]](s32)
; VI: G_STORE [[COPY12]](s32), [[PTR_ADD7]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD8:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C1]](s64)
; VI: [[COPY13:%[0-9]+]]:_(s32) = COPY [[LSHR4]](s32)
; VI: G_STORE [[COPY13]](s32), [[PTR_ADD8]](p1) :: (store 2, addrspace 1)
- ; VI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; VI: [[PTR_ADD9:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; VI: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[UV5]], [[C]](s32)
- ; VI: [[COPY14:%[0-9]+]]:_(s32) = COPY [[UV5]](s32)
+ ; VI: [[PTR_ADD9:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C2]](s64)
+ ; VI: [[LSHR5:%[0-9]+]]:_(s32) = G_LSHR [[UV9]], [[C]](s32)
+ ; VI: [[COPY14:%[0-9]+]]:_(s32) = COPY [[UV9]](s32)
; VI: G_STORE [[COPY14]](s32), [[PTR_ADD9]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD10:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD9]], [[C1]](s64)
; VI: [[COPY15:%[0-9]+]]:_(s32) = COPY [[LSHR5]](s32)
; VI: G_STORE [[COPY15]](s32), [[PTR_ADD10]](p1) :: (store 2, addrspace 1)
- ; VI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; VI: [[PTR_ADD11:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; VI: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[UV6]], [[C]](s32)
- ; VI: [[COPY16:%[0-9]+]]:_(s32) = COPY [[UV6]](s32)
+ ; VI: [[PTR_ADD11:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD7]], [[C3]](s64)
+ ; VI: [[UV10:%[0-9]+]]:_(s32), [[UV11:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[UV7]](<2 x s32>)
+ ; VI: [[LSHR6:%[0-9]+]]:_(s32) = G_LSHR [[UV10]], [[C]](s32)
+ ; VI: [[COPY16:%[0-9]+]]:_(s32) = COPY [[UV10]](s32)
; VI: G_STORE [[COPY16]](s32), [[PTR_ADD11]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD12:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C1]](s64)
; VI: [[COPY17:%[0-9]+]]:_(s32) = COPY [[LSHR6]](s32)
; VI: G_STORE [[COPY17]](s32), [[PTR_ADD12]](p1) :: (store 2, addrspace 1)
- ; VI: [[C8:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; VI: [[PTR_ADD13:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C8]](s64)
- ; VI: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[UV7]], [[C]](s32)
- ; VI: [[COPY18:%[0-9]+]]:_(s32) = COPY [[UV7]](s32)
+ ; VI: [[PTR_ADD13:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD11]], [[C2]](s64)
+ ; VI: [[LSHR7:%[0-9]+]]:_(s32) = G_LSHR [[UV11]], [[C]](s32)
+ ; VI: [[COPY18:%[0-9]+]]:_(s32) = COPY [[UV11]](s32)
; VI: G_STORE [[COPY18]](s32), [[PTR_ADD13]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD14:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD13]], [[C1]](s64)
; VI: [[COPY19:%[0-9]+]]:_(s32) = COPY [[LSHR7]](s32)
; VI: G_STORE [[COPY19]](s32), [[PTR_ADD14]](p1) :: (store 2, addrspace 1)
- ; VI: [[C9:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; VI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C9]](s64)
- ; VI: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[UV8]], [[C]](s32)
- ; VI: [[COPY20:%[0-9]+]]:_(s32) = COPY [[UV8]](s32)
+ ; VI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
+ ; VI: [[PTR_ADD15:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
+ ; VI: [[LSHR8:%[0-9]+]]:_(s32) = G_LSHR [[EXTRACT2]], [[C]](s32)
+ ; VI: [[COPY20:%[0-9]+]]:_(s32) = COPY [[EXTRACT2]](s32)
; VI: G_STORE [[COPY20]](s32), [[PTR_ADD15]](p1) :: (store 2, addrspace 1)
; VI: [[PTR_ADD16:%[0-9]+]]:_(p1) = G_PTR_ADD [[PTR_ADD15]], [[C1]](s64)
; VI: [[COPY21:%[0-9]+]]:_(s32) = COPY [[LSHR8]](s32)
; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; GFX9: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; GFX9: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 2, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; GFX9: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 2, addrspace 1)
+ ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; GFX9: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; GFX9: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; GFX9: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, align 2, addrspace 1)
- ; GFX9: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; GFX9: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; GFX9: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 2, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 2, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
%1:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
%2:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; SI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; SI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; SI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; SI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; SI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; SI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 4, addrspace 1)
+ ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; SI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
- ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; SI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; SI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; SI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; SI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, addrspace 1)
- ; SI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; SI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; SI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; SI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; SI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, addrspace 1)
+ ; SI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
; CI-LABEL: name: test_store_global_v9s32_align4
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; CI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; CI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; CI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; CI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; CI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; CI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 4, addrspace 1)
+ ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
- ; CI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; CI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; CI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; CI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; CI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; CI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, addrspace 1)
- ; CI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; CI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; CI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; CI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; CI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; CI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, addrspace 1)
+ ; CI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
; VI-LABEL: name: test_store_global_v9s32_align4
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; VI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; VI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; VI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; VI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; VI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; VI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; VI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 4, addrspace 1)
+ ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; VI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
- ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; VI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; VI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; VI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; VI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, addrspace 1)
- ; VI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; VI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; VI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; VI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; VI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, addrspace 1)
+ ; VI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
; GFX9-LABEL: name: test_store_global_v9s32_align4
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; GFX9: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; GFX9: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 4, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; GFX9: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 4, addrspace 1)
+ ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; GFX9: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; GFX9: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; GFX9: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; GFX9: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; GFX9: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
%1:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
%2:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; SI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; SI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; SI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; SI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; SI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; SI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 8, addrspace 1)
+ ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; SI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; SI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; SI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; SI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; SI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; SI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; SI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; SI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; SI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 8, addrspace 1)
+ ; SI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
; CI-LABEL: name: test_store_global_v9s32_align8
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; CI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; CI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; CI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; CI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; CI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; CI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 8, addrspace 1)
+ ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; CI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; CI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; CI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; CI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; CI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; CI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; CI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; CI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; CI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; CI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 8, addrspace 1)
+ ; CI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
; VI-LABEL: name: test_store_global_v9s32_align8
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; VI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; VI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; VI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; VI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; VI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; VI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; VI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 8, addrspace 1)
+ ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; VI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; VI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; VI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; VI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; VI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; VI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; VI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; VI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; VI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 8, addrspace 1)
+ ; VI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
; GFX9-LABEL: name: test_store_global_v9s32_align8
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; GFX9: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; GFX9: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, align 8, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; GFX9: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, align 8, addrspace 1)
+ ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; GFX9: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; GFX9: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; GFX9: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; GFX9: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; GFX9: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 8, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
%1:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
%2:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; SI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; SI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; SI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; SI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; SI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; SI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; SI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; SI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; SI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; SI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; SI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; SI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; SI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; SI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; SI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, addrspace 1)
+ ; SI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; SI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; SI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; SI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; SI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; SI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; SI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; SI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
- ; SI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; SI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; SI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; SI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; SI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; SI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; SI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; SI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; SI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; SI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; SI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; SI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 16, addrspace 1)
+ ; SI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 16, addrspace 1)
; CI-LABEL: name: test_store_global_v9s32_align16
; CI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; CI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; CI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; CI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; CI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; CI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; CI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; CI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; CI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; CI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; CI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; CI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; CI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; CI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; CI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; CI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; CI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, addrspace 1)
+ ; CI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; CI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; CI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; CI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; CI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; CI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; CI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; CI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
- ; CI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; CI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; CI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; CI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; CI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; CI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; CI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; CI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; CI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; CI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; CI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; CI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 16, addrspace 1)
+ ; CI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 16, addrspace 1)
; VI-LABEL: name: test_store_global_v9s32_align16
; VI: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; VI: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; VI: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; VI: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; VI: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; VI: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; VI: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; VI: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; VI: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; VI: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; VI: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; VI: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; VI: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; VI: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; VI: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; VI: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; VI: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, addrspace 1)
+ ; VI: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; VI: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; VI: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; VI: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; VI: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; VI: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; VI: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; VI: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
- ; VI: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; VI: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; VI: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; VI: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; VI: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; VI: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; VI: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; VI: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; VI: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; VI: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; VI: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; VI: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 16, addrspace 1)
+ ; VI: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 16, addrspace 1)
; GFX9-LABEL: name: test_store_global_v9s32_align16
; GFX9: [[COPY:%[0-9]+]]:_(p1) = COPY $vgpr0_vgpr1
; GFX9: [[COPY1:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
; GFX9: [[COPY2:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7
; GFX9: [[COPY3:%[0-9]+]]:_(<3 x s32>) = COPY $vgpr7_vgpr8_vgpr9
- ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY1]](<3 x s32>)
- ; GFX9: [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY2]](<3 x s32>)
- ; GFX9: [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32), [[UV8:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[COPY3]](<3 x s32>)
- ; GFX9: G_STORE [[UV]](s32), [[COPY]](p1) :: (store 4, align 16, addrspace 1)
- ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4
+ ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<9 x s32>) = G_CONCAT_VECTORS [[COPY1]](<3 x s32>), [[COPY2]](<3 x s32>), [[COPY3]](<3 x s32>)
+ ; GFX9: [[EXTRACT:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 0
+ ; GFX9: [[EXTRACT1:%[0-9]+]]:_(<4 x s32>) = G_EXTRACT [[CONCAT_VECTORS]](<9 x s32>), 128
+ ; GFX9: [[EXTRACT2:%[0-9]+]]:_(s32) = G_EXTRACT [[COPY3]](<3 x s32>), 64
+ ; GFX9: G_STORE [[EXTRACT]](<4 x s32>), [[COPY]](p1) :: (store 16, addrspace 1)
+ ; GFX9: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
; GFX9: [[PTR_ADD:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C]](s64)
- ; GFX9: G_STORE [[UV1]](s32), [[PTR_ADD]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
+ ; GFX9: G_STORE [[EXTRACT1]](<4 x s32>), [[PTR_ADD]](p1) :: (store 16, addrspace 1)
+ ; GFX9: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; GFX9: [[PTR_ADD1:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C1]](s64)
- ; GFX9: G_STORE [[UV2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 12
- ; GFX9: [[PTR_ADD2:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C2]](s64)
- ; GFX9: G_STORE [[UV3]](s32), [[PTR_ADD2]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
- ; GFX9: [[PTR_ADD3:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C3]](s64)
- ; GFX9: G_STORE [[UV4]](s32), [[PTR_ADD3]](p1) :: (store 4, align 16, addrspace 1)
- ; GFX9: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 20
- ; GFX9: [[PTR_ADD4:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C4]](s64)
- ; GFX9: G_STORE [[UV5]](s32), [[PTR_ADD4]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
- ; GFX9: [[PTR_ADD5:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C5]](s64)
- ; GFX9: G_STORE [[UV6]](s32), [[PTR_ADD5]](p1) :: (store 4, align 8, addrspace 1)
- ; GFX9: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 28
- ; GFX9: [[PTR_ADD6:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C6]](s64)
- ; GFX9: G_STORE [[UV7]](s32), [[PTR_ADD6]](p1) :: (store 4, addrspace 1)
- ; GFX9: [[C7:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
- ; GFX9: [[PTR_ADD7:%[0-9]+]]:_(p1) = G_PTR_ADD [[COPY]], [[C7]](s64)
- ; GFX9: G_STORE [[UV8]](s32), [[PTR_ADD7]](p1) :: (store 4, align 16, addrspace 1)
+ ; GFX9: G_STORE [[EXTRACT2]](s32), [[PTR_ADD1]](p1) :: (store 4, align 16, addrspace 1)
%0:_(p1) = COPY $vgpr0_vgpr1
%1:_(<3 x s32>) = COPY $vgpr2_vgpr3_vgpr4
%2:_(<3 x s32>) = COPY $vgpr5_vgpr6_vgpr7