• Home
  • Raw
  • Download

Lines Matching refs:LOAD

68 … ; GFX6: [[LOAD:%[0-9]+]]:vgpr_32(<2 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspace 1)
69 ; GFX6: $vgpr0 = COPY [[LOAD]](<2 x s16>)
73 … ; GFX7: [[LOAD:%[0-9]+]]:vgpr_32(<2 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspace 1)
74 ; GFX7: $vgpr0 = COPY [[LOAD]](<2 x s16>)
78 …; GFX7-FLAT: [[LOAD:%[0-9]+]]:vgpr_32(<2 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspa…
79 ; GFX7-FLAT: $vgpr0 = COPY [[LOAD]](<2 x s16>)
83 … ; GFX9: [[LOAD:%[0-9]+]]:vgpr_32(<2 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspace 1)
84 ; GFX9: $vgpr0 = COPY [[LOAD]](<2 x s16>)
105 ; GFX6: [[LOAD:%[0-9]+]]:vgpr_32(p3) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspace 1)
106 ; GFX6: $vgpr0 = COPY [[LOAD]](p3)
110 ; GFX7: [[LOAD:%[0-9]+]]:vgpr_32(p3) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspace 1)
111 ; GFX7: $vgpr0 = COPY [[LOAD]](p3)
115 ; GFX7-FLAT: [[LOAD:%[0-9]+]]:vgpr_32(p3) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspace 1)
116 ; GFX7-FLAT: $vgpr0 = COPY [[LOAD]](p3)
120 ; GFX9: [[LOAD:%[0-9]+]]:vgpr_32(p3) = G_LOAD [[COPY]](p1) :: (load seq_cst 4, addrspace 1)
121 ; GFX9: $vgpr0 = COPY [[LOAD]](p3)
189 … ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(<2 x s32>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
190 ; GFX6: $vgpr0_vgpr1 = COPY [[LOAD]](<2 x s32>)
194 … ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(<2 x s32>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
195 ; GFX7: $vgpr0_vgpr1 = COPY [[LOAD]](<2 x s32>)
199 …; GFX7-FLAT: [[LOAD:%[0-9]+]]:vreg_64(<2 x s32>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspa…
200 ; GFX7-FLAT: $vgpr0_vgpr1 = COPY [[LOAD]](<2 x s32>)
204 … ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(<2 x s32>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
205 ; GFX9: $vgpr0_vgpr1 = COPY [[LOAD]](<2 x s32>)
226 … ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(<4 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
227 ; GFX6: $vgpr0_vgpr1 = COPY [[LOAD]](<4 x s16>)
231 … ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(<4 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
232 ; GFX7: $vgpr0_vgpr1 = COPY [[LOAD]](<4 x s16>)
236 …; GFX7-FLAT: [[LOAD:%[0-9]+]]:vreg_64(<4 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspa…
237 ; GFX7-FLAT: $vgpr0_vgpr1 = COPY [[LOAD]](<4 x s16>)
241 … ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(<4 x s16>) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
242 ; GFX9: $vgpr0_vgpr1 = COPY [[LOAD]](<4 x s16>)
263 ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(p1) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
264 ; GFX6: $vgpr0_vgpr1 = COPY [[LOAD]](p1)
268 ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(p1) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
269 ; GFX7: $vgpr0_vgpr1 = COPY [[LOAD]](p1)
273 ; GFX7-FLAT: [[LOAD:%[0-9]+]]:vreg_64(p1) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
274 ; GFX7-FLAT: $vgpr0_vgpr1 = COPY [[LOAD]](p1)
278 ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(p1) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
279 ; GFX9: $vgpr0_vgpr1 = COPY [[LOAD]](p1)
300 ; GFX6: [[LOAD:%[0-9]+]]:vreg_64(p0) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
301 ; GFX6: $vgpr0_vgpr1 = COPY [[LOAD]](p0)
305 ; GFX7: [[LOAD:%[0-9]+]]:vreg_64(p0) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
306 ; GFX7: $vgpr0_vgpr1 = COPY [[LOAD]](p0)
310 ; GFX7-FLAT: [[LOAD:%[0-9]+]]:vreg_64(p0) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
311 ; GFX7-FLAT: $vgpr0_vgpr1 = COPY [[LOAD]](p0)
315 ; GFX9: [[LOAD:%[0-9]+]]:vreg_64(p0) = G_LOAD [[COPY]](p1) :: (load seq_cst 8, addrspace 1)
316 ; GFX9: $vgpr0_vgpr1 = COPY [[LOAD]](p0)