• Home
  • Raw
  • Download

Lines Matching refs:sub0

20     ; CHECK: %2.sub0:sgpr_128 = COPY [[S_LOAD_DWORDX4_IMM]].sub0
22 ; CHECK: undef %3.sub0:sgpr_128 = COPY [[S_LOAD_DWORDX4_IMM]].sub2
34 …; CHECK: undef %52.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET]].sub0, impl…
41 …; CHECK: undef %71.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET1]].sub0, imp…
48 …; CHECK: undef %90.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET2]].sub0, imp…
55 …; CHECK: undef %109.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub0, im…
61 …; CHECK: undef %126.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub0, im…
68 …; CHECK: undef %144.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub0, im…
75 …; CHECK: undef %36.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub0, imp…
80 …; CHECK: undef %41.sub2:vreg_128 = V_LSHRREV_B32_e32 16, [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub0, imp…
85 …; CHECK: [[SI_SPILL_V128_RESTORE]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWO…
88 …; CHECK: [[SI_SPILL_V128_RESTORE1]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DW…
91 …; CHECK: [[SI_SPILL_V128_RESTORE2]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DW…
94 …; CHECK: [[SI_SPILL_V128_RESTORE3]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DW…
97 …; CHECK: %68.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET1]].sub1, i…
99 … CHECK: [[SI_SPILL_V128_RESTORE4]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWO…
102 …; CHECK: [[SI_SPILL_V128_RESTORE5]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DW…
105 …; CHECK: [[SI_SPILL_V128_RESTORE6]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DW…
108 …; CHECK: %87.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET2]].sub1, i…
110 … CHECK: [[SI_SPILL_V128_RESTORE7]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWO…
113 …; CHECK: [[SI_SPILL_V128_RESTORE8]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DW…
116 …; CHECK: [[SI_SPILL_V128_RESTORE9]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DW…
119 …; CHECK: %106.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub1, …
121 …; CHECK: %110.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub0, …
123 …; CHECK: %114.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET3]].sub3, …
125 …; CHECK: [[SI_SPILL_V128_RESTORE10]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_D…
128 …; CHECK: %123.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub1, …
130 …; CHECK: %127.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET4]].sub0, …
132 …; CHECK: [[SI_SPILL_V128_RESTORE11]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_D…
135 …; CHECK: [[SI_SPILL_V128_RESTORE12]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_D…
138 …; CHECK: %141.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub1, …
140 …CHECK: [[SI_SPILL_V128_RESTORE13]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWO…
143 …; CHECK: [[SI_SPILL_V128_RESTORE14]].sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_D…
146 …; CHECK: %155.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET5]].sub2, …
148 …; CHECK: %159.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub1, …
149 …; CHECK: %36.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub0, i…
150 …; CHECK: %37.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub3, i…
151 …; CHECK: %38.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET6]].sub2, i…
152 …; CHECK: %40.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub1, i…
153 …; CHECK: %41.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub0, i…
154 …; CHECK: %42.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub3, i…
155 …; CHECK: %43.sub0:vreg_128 = V_AND_B32_e32 [[S_MOV_B32_]], [[BUFFER_LOAD_DWORDX4_OFFSET7]].sub2, i…
177 ; CHECK: undef %157.sub0:vreg_128 = COPY %159.sub0 {
183 ; CHECK: undef %153.sub0:vreg_128 = COPY %155.sub0 {
190 ; CHECK: undef %148.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE15]].sub0 {
197 ; CHECK: undef %143.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE16]].sub0 {
203 ; CHECK: undef %139.sub0:vreg_128 = COPY %141.sub0 {
210 ; CHECK: undef %134.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE17]].sub0 {
217 ; CHECK: undef %129.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE18]].sub0 {
223 ; CHECK: undef %125.sub0:vreg_128 = COPY %127.sub0 {
229 ; CHECK: undef %121.sub0:vreg_128 = COPY %123.sub0 {
236 ; CHECK: undef %116.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE19]].sub0 {
242 ; CHECK: undef %112.sub0:vreg_128 = COPY %114.sub0 {
248 ; CHECK: undef %108.sub0:vreg_128 = COPY %110.sub0 {
254 ; CHECK: undef %104.sub0:vreg_128 = COPY %106.sub0 {
261 ; CHECK: undef %99.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE20]].sub0 {
268 ; CHECK: undef %94.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE21]].sub0 {
275 ; CHECK: undef %89.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE22]].sub0 {
281 ; CHECK: undef %85.sub0:vreg_128 = COPY %87.sub0 {
288 ; CHECK: undef %80.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE23]].sub0 {
295 ; CHECK: undef %75.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE24]].sub0 {
302 ; CHECK: undef %70.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE25]].sub0 {
308 ; CHECK: undef %66.sub0:vreg_128 = COPY %68.sub0 {
315 ; CHECK: undef %61.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE26]].sub0 {
322 ; CHECK: undef %56.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE27]].sub0 {
329 ; CHECK: undef %51.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE28]].sub0 {
336 ; CHECK: undef %46.sub0:vreg_128 = COPY [[SI_SPILL_V128_RESTORE29]].sub0 {
347 %2.sub0:sgpr_128 = COPY %1.sub0
349 undef %3.sub0:sgpr_128 = COPY %1.sub2
360 undef %9.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %7.sub0, implicit $exec
364 undef %13.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %5.sub0, implicit $exec
368 undef %17.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %4.sub0, implicit $exec
372 undef %21.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %6.sub0, implicit $exec
377 undef %26.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %24.sub0, implicit $exec
382 undef %31.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %29.sub0, implicit $exec
387 undef %36.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %34.sub0, implicit $exec
392 undef %41.sub2:vreg_128 = V_LSHRREV_B32_e32 16, %39.sub0, implicit $exec
396 %8.sub0:vreg_128 = V_AND_B32_e32 %44, %7.sub1, implicit $exec
397 %9.sub0:vreg_128 = V_AND_B32_e32 %44, %7.sub0, implicit $exec
398 %10.sub0:vreg_128 = V_AND_B32_e32 %44, %7.sub3, implicit $exec
399 %11.sub0:vreg_128 = V_AND_B32_e32 %44, %7.sub2, implicit $exec
400 %12.sub0:vreg_128 = V_AND_B32_e32 %44, %5.sub1, implicit $exec
401 %13.sub0:vreg_128 = V_AND_B32_e32 %44, %5.sub0, implicit $exec
402 %14.sub0:vreg_128 = V_AND_B32_e32 %44, %5.sub3, implicit $exec
403 %15.sub0:vreg_128 = V_AND_B32_e32 %44, %5.sub2, implicit $exec
404 %16.sub0:vreg_128 = V_AND_B32_e32 %44, %4.sub1, implicit $exec
405 %17.sub0:vreg_128 = V_AND_B32_e32 %44, %4.sub0, implicit $exec
406 %18.sub0:vreg_128 = V_AND_B32_e32 %44, %4.sub3, implicit $exec
407 %19.sub0:vreg_128 = V_AND_B32_e32 %44, %4.sub2, implicit $exec
408 %20.sub0:vreg_128 = V_AND_B32_e32 %44, %6.sub1, implicit $exec
409 %21.sub0:vreg_128 = V_AND_B32_e32 %44, %6.sub0, implicit $exec
410 %22.sub0:vreg_128 = V_AND_B32_e32 %44, %6.sub3, implicit $exec
411 %23.sub0:vreg_128 = V_AND_B32_e32 %44, %6.sub2, implicit $exec
412 %25.sub0:vreg_128 = V_AND_B32_e32 %44, %24.sub1, implicit $exec
413 %26.sub0:vreg_128 = V_AND_B32_e32 %44, %24.sub0, implicit $exec
414 %27.sub0:vreg_128 = V_AND_B32_e32 %44, %24.sub3, implicit $exec
415 %28.sub0:vreg_128 = V_AND_B32_e32 %44, %24.sub2, implicit $exec
416 %30.sub0:vreg_128 = V_AND_B32_e32 %44, %29.sub1, implicit $exec
417 %31.sub0:vreg_128 = V_AND_B32_e32 %44, %29.sub0, implicit $exec
418 %32.sub0:vreg_128 = V_AND_B32_e32 %44, %29.sub3, implicit $exec
419 %33.sub0:vreg_128 = V_AND_B32_e32 %44, %29.sub2, implicit $exec
420 %35.sub0:vreg_128 = V_AND_B32_e32 %44, %34.sub1, implicit $exec
421 %36.sub0:vreg_128 = V_AND_B32_e32 %44, %34.sub0, implicit $exec
422 %37.sub0:vreg_128 = V_AND_B32_e32 %44, %34.sub3, implicit $exec
423 %38.sub0:vreg_128 = V_AND_B32_e32 %44, %34.sub2, implicit $exec
424 %40.sub0:vreg_128 = V_AND_B32_e32 %44, %39.sub1, implicit $exec
425 %41.sub0:vreg_128 = V_AND_B32_e32 %44, %39.sub0, implicit $exec
426 %42.sub0:vreg_128 = V_AND_B32_e32 %44, %39.sub3, implicit $exec
427 %43.sub0:vreg_128 = V_AND_B32_e32 %44, %39.sub2, implicit $exec