Lines Matching refs:p3
34 %0:vgpr(p3) = COPY $vgpr0
53 ; GFX6: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
55 … ; GFX6: [[LOAD:%[0-9]+]]:vgpr_32(<2 x s16>) = G_LOAD [[COPY]](p3) :: (load seq_cst 4, addrspace 3)
59 ; GFX7: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
61 … ; GFX7: [[LOAD:%[0-9]+]]:vgpr_32(<2 x s16>) = G_LOAD [[COPY]](p3) :: (load seq_cst 4, addrspace 3)
65 ; GFX9: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
66 … ; GFX9: [[LOAD:%[0-9]+]]:vgpr_32(<2 x s16>) = G_LOAD [[COPY]](p3) :: (load seq_cst 4, addrspace 3)
68 %0:vgpr(p3) = COPY $vgpr0
87 ; GFX6: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
89 ; GFX6: [[LOAD:%[0-9]+]]:vgpr_32(p3) = G_LOAD [[COPY]](p3) :: (load seq_cst 4, addrspace 3)
90 ; GFX6: $vgpr0 = COPY [[LOAD]](p3)
93 ; GFX7: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
95 ; GFX7: [[LOAD:%[0-9]+]]:vgpr_32(p3) = G_LOAD [[COPY]](p3) :: (load seq_cst 4, addrspace 3)
96 ; GFX7: $vgpr0 = COPY [[LOAD]](p3)
99 ; GFX9: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
100 ; GFX9: [[LOAD:%[0-9]+]]:vgpr_32(p3) = G_LOAD [[COPY]](p3) :: (load seq_cst 4, addrspace 3)
101 ; GFX9: $vgpr0 = COPY [[LOAD]](p3)
102 %0:vgpr(p3) = COPY $vgpr0
103 %1:vgpr(p3) = G_LOAD %0 :: (load seq_cst 4, align 4, addrspace 3)
136 %0:vgpr(p3) = COPY $vgpr0
155 ; GFX6: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
157 … ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(<2 x s32>) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
161 ; GFX7: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
163 … ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(<2 x s32>) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
167 ; GFX9: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
168 … ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(<2 x s32>) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
170 %0:vgpr(p3) = COPY $vgpr0
189 ; GFX6: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
191 … ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(<4 x s16>) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
195 ; GFX7: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
197 … ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(<4 x s16>) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
201 ; GFX9: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
202 … ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(<4 x s16>) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
204 %0:vgpr(p3) = COPY $vgpr0
223 ; GFX6: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
225 ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(p1) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
229 ; GFX7: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
231 ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(p1) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
235 ; GFX9: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
236 ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(p1) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
238 %0:vgpr(p3) = COPY $vgpr0
257 ; GFX6: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
259 ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(p0) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
263 ; GFX7: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
265 ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(p0) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
269 ; GFX9: [[COPY:%[0-9]+]]:vgpr(p3) = COPY $vgpr0
270 ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(p0) = G_LOAD [[COPY]](p3) :: (load seq_cst 8, addrspace 3)
272 %0:vgpr(p3) = COPY $vgpr0
308 %0:vgpr(p3) = COPY $vgpr0
310 %2:vgpr(p3) = G_PTR_ADD %0, %1