Lines Matching refs:regClass
133 assert(mask.isUndefined() || mask.regClass() == bld.lm); in emit_mbcnt()
165 dst = bld.tmp(src.regClass()); in emit_wqm()
184 if (index.regClass() == s1) in emit_bpermute()
321 if (src.regClass() == dst_rc) { in emit_extract_vector()
329 if (it != ctx->allocated_vec.end() && dst_rc.bytes() == it->second[idx].regClass().bytes()) { in emit_extract_vector()
330 if (it->second[idx].regClass() == dst_rc) { in emit_extract_vector()
495 if (vec.regClass() == dst.regClass()) { in byte_align_vector()
538 assert(val.regClass() == s1); in bool_to_vector_condition()
539 assert(dst.regClass() == bld.lm); in bool_to_vector_condition()
550 assert(val.regClass() == bld.lm); in bool_to_scalar_condition()
551 assert(dst.regClass() == s1); in bool_to_scalar_condition()
578 } else if (src.regClass() == s1) { in convert_int()
584 assert(src_bits != 8 || src.regClass() == v1b); in convert_int()
585 assert(src_bits != 16 || src.regClass() == v2b); in convert_int()
602 if (is_signed && dst.regClass() == s2) { in convert_int()
605 } else if (is_signed && dst.regClass() == v2) { in convert_int()
636 Temp tmp = dst.regClass() == s2 ? bld.tmp(s1) : dst; in extract_8_16_bit_sgpr_element()
651 if (dst.regClass() == s2) in extract_8_16_bit_sgpr_element()
805 tmp = bld.vop3(op, bld.def(dst.regClass()), src[0], src[1], src[2]); in emit_vop3a_instruction()
807 tmp = bld.vop3(op, bld.def(dst.regClass()), src[0], src[1]); in emit_vop3a_instruction()
916 assert(dst.regClass() == bld.lm); in emit_sopc_instruction()
919 assert(src0.regClass() == src1.regClass()); in emit_sopc_instruction()
938 assert(dst.regClass() == ctx->program->lane_mask); in emit_comparison()
952 assert(dst.regClass() == bld.lm); in emit_boolean_logic()
953 assert(src0.regClass() == bld.lm); in emit_boolean_logic()
954 assert(src1.regClass() == bld.lm); in emit_boolean_logic()
966 assert(cond.regClass() == bld.lm); in emit_bcsel()
992 assert(dst.regClass() == bld.lm); in emit_bcsel()
993 assert(then.regClass() == bld.lm); in emit_bcsel()
994 assert(els.regClass() == bld.lm); in emit_bcsel()
998 if (dst.regClass() == s1 || dst.regClass() == s2) { in emit_bcsel()
999 … assert((then.regClass() == s1 || then.regClass() == s2) && els.regClass() == then.regClass()); in emit_bcsel()
1001 … aco_opcode op = dst.regClass() == s1 ? aco_opcode::s_cselect_b32 : aco_opcode::s_cselect_b64; in emit_bcsel()
1224 assert(src.regClass() == bld.lm); in visit_alu_instr()
1225 assert(dst.regClass() == bld.lm); in visit_alu_instr()
1229 } else if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1231 } else if (dst.regClass() == v2) { in visit_alu_instr()
1247 if (dst.regClass() == v1) { in visit_alu_instr()
1249 } else if (dst.regClass() == s1) { in visit_alu_instr()
1256 if (dst.regClass() == s2) { in visit_alu_instr()
1274 if (dst.regClass() == s1) { in visit_alu_instr()
1276 } else if (dst.regClass() == v1) { in visit_alu_instr()
1285 if (dst.regClass() == s1) { in visit_alu_instr()
1288 } else if (dst.regClass() == s2) { in visit_alu_instr()
1297 } else if (dst.regClass() == v1) { in visit_alu_instr()
1299 } else if (dst.regClass() == v2) { in visit_alu_instr()
1312 if (dst.regClass() == v2b && ctx->program->chip_class >= GFX10) { in visit_alu_instr()
1314 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1316 } else if (dst.regClass() == v1) { in visit_alu_instr()
1318 } else if (dst.regClass() == s1) { in visit_alu_instr()
1326 if (dst.regClass() == v2b && ctx->program->chip_class >= GFX10) { in visit_alu_instr()
1328 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1330 } else if (dst.regClass() == v1) { in visit_alu_instr()
1332 } else if (dst.regClass() == s1) { in visit_alu_instr()
1340 if (dst.regClass() == v2b && ctx->program->chip_class >= GFX10) { in visit_alu_instr()
1342 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1344 } else if (dst.regClass() == v1) { in visit_alu_instr()
1346 } else if (dst.regClass() == s1) { in visit_alu_instr()
1354 if (dst.regClass() == v2b && ctx->program->chip_class >= GFX10) { in visit_alu_instr()
1356 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1358 } else if (dst.regClass() == v1) { in visit_alu_instr()
1360 } else if (dst.regClass() == s1) { in visit_alu_instr()
1370 } else if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1372 } else if (dst.regClass() == v2) { in visit_alu_instr()
1374 } else if (dst.regClass() == s1) { in visit_alu_instr()
1376 } else if (dst.regClass() == s2) { in visit_alu_instr()
1386 } else if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1388 } else if (dst.regClass() == v2) { in visit_alu_instr()
1390 } else if (dst.regClass() == s1) { in visit_alu_instr()
1392 } else if (dst.regClass() == s2) { in visit_alu_instr()
1402 } else if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1404 } else if (dst.regClass() == v2) { in visit_alu_instr()
1406 } else if (dst.regClass() == s1) { in visit_alu_instr()
1408 } else if (dst.regClass() == s2) { in visit_alu_instr()
1416 if (dst.regClass() == v2b && ctx->program->chip_class >= GFX10) { in visit_alu_instr()
1418 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1420 } else if (dst.regClass() == v1) { in visit_alu_instr()
1422 } else if (dst.regClass() == v2 && ctx->program->chip_class >= GFX8) { in visit_alu_instr()
1425 } else if (dst.regClass() == v2) { in visit_alu_instr()
1427 } else if (dst.regClass() == s2) { in visit_alu_instr()
1429 } else if (dst.regClass() == s1) { in visit_alu_instr()
1437 if (dst.regClass() == v2b && ctx->program->chip_class >= GFX10) { in visit_alu_instr()
1439 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1441 } else if (dst.regClass() == v1) { in visit_alu_instr()
1443 } else if (dst.regClass() == v2 && ctx->program->chip_class >= GFX8) { in visit_alu_instr()
1446 } else if (dst.regClass() == v2) { in visit_alu_instr()
1448 } else if (dst.regClass() == s1) { in visit_alu_instr()
1450 } else if (dst.regClass() == s2) { in visit_alu_instr()
1458 if (dst.regClass() == v2b && ctx->program->chip_class >= GFX10) { in visit_alu_instr()
1460 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1462 } else if (dst.regClass() == v1) { in visit_alu_instr()
1464 } else if (dst.regClass() == v2 && ctx->program->chip_class >= GFX8) { in visit_alu_instr()
1467 } else if (dst.regClass() == v2) { in visit_alu_instr()
1469 } else if (dst.regClass() == s1) { in visit_alu_instr()
1471 } else if (dst.regClass() == s2) { in visit_alu_instr()
1480 if (src.regClass() == s1) { in visit_alu_instr()
1482 } else if (src.regClass() == v1) { in visit_alu_instr()
1484 } else if (src.regClass() == s2) { in visit_alu_instr()
1494 if (src.regClass() == s1 || src.regClass() == s2) { in visit_alu_instr()
1495 aco_opcode op = src.regClass() == s2 ? in visit_alu_instr()
1506 } else if (src.regClass() == v1) { in visit_alu_instr()
1519 if (dst.regClass() == s1) { in visit_alu_instr()
1521 } else if (dst.regClass() == v1) { in visit_alu_instr()
1529 if (dst.regClass() == s1) { in visit_alu_instr()
1555 if (dst.regClass() == s2) { in visit_alu_instr()
1560 } else if (dst.regClass() == v2) { in visit_alu_instr()
1573 if (dst.regClass() == s1) { in visit_alu_instr()
1578 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1588 } else if (dst.regClass() == v1) { in visit_alu_instr()
1597 if (src1.regClass() != v1) in visit_alu_instr()
1599 assert(src1.regClass() == v1); in visit_alu_instr()
1612 if (dst.regClass() == s1) { in visit_alu_instr()
1616 if (dst.regClass() == v1) { in visit_alu_instr()
1628 if (dst.regClass() == s2) { in visit_alu_instr()
1633 } else if (dst.regClass() == v2) { in visit_alu_instr()
1644 if (dst.regClass() == s1) { in visit_alu_instr()
1651 if (dst.regClass() == v1) { in visit_alu_instr()
1672 if (dst.regClass() == s2) { in visit_alu_instr()
1677 } else if (dst.regClass() == v2) { in visit_alu_instr()
1690 if (dst.regClass() == s1) { in visit_alu_instr()
1693 } else if (dst.regClass() == v1) { in visit_alu_instr()
1705 if (dst.regClass() == s2) { in visit_alu_instr()
1710 } else if (dst.regClass() == v2) { in visit_alu_instr()
1734 } else if (dst.regClass() == s1) { in visit_alu_instr()
1742 if (dst.regClass() == s1 && ctx->options->chip_class >= GFX9) { in visit_alu_instr()
1748 Temp tmp = dst.regClass() == s1 ? bld.tmp(v1) : dst; in visit_alu_instr()
1755 if (dst.regClass() == s1) in visit_alu_instr()
1763 if (dst.regClass() == v1) { in visit_alu_instr()
1765 } else if (dst.regClass() == s1 && ctx->options->chip_class >= GFX9) { in visit_alu_instr()
1767 } else if (dst.regClass() == s1) { in visit_alu_instr()
1777 if (dst.regClass() == v2b) { in visit_alu_instr()
1779 } else if (dst.regClass() == v1) { in visit_alu_instr()
1781 } else if (dst.regClass() == v2) { in visit_alu_instr()
1789 if (dst.regClass() == v2b) { in visit_alu_instr()
1791 } else if (dst.regClass() == v1) { in visit_alu_instr()
1793 } else if (dst.regClass() == v2) { in visit_alu_instr()
1803 if (dst.regClass() == v2b) { in visit_alu_instr()
1808 } else if (dst.regClass() == v1) { in visit_alu_instr()
1813 } else if (dst.regClass() == v2) { in visit_alu_instr()
1824 if (dst.regClass() == v2b) { in visit_alu_instr()
1827 } else if (dst.regClass() == v1) { in visit_alu_instr()
1829 } else if (dst.regClass() == v2) { in visit_alu_instr()
1837 if (dst.regClass() == v2b) { in visit_alu_instr()
1840 } else if (dst.regClass() == v1) { in visit_alu_instr()
1842 } else if (dst.regClass() == v2) { in visit_alu_instr()
1880 if (dst.regClass() == v2b) { in visit_alu_instr()
1882 } else if (dst.regClass() == v1) { in visit_alu_instr()
1885 } else if (dst.regClass() == v2) { in visit_alu_instr()
1895 if (dst.regClass() == v2b) { in visit_alu_instr()
1899 } else if (dst.regClass() == v1) { in visit_alu_instr()
1903 } else if (dst.regClass() == v2) { in visit_alu_instr()
1917 if (dst.regClass() == v2b) { in visit_alu_instr()
1921 } else if (dst.regClass() == v1) { in visit_alu_instr()
1925 } else if (dst.regClass() == v2) { in visit_alu_instr()
1939 if (dst.regClass() == v2b) { in visit_alu_instr()
1941 } else if (dst.regClass() == v1) { in visit_alu_instr()
1945 } else if (dst.regClass() == v2) { in visit_alu_instr()
1955 if (dst.regClass() == v2b) { in visit_alu_instr()
1957 } else if (dst.regClass() == v1) { in visit_alu_instr()
1966 if (dst.regClass() == v2b) { in visit_alu_instr()
1968 } else if (dst.regClass() == v1) { in visit_alu_instr()
1971 } else if (dst.regClass() == v2) { in visit_alu_instr()
1980 if (dst.regClass() == v2b) { in visit_alu_instr()
1982 } else if (dst.regClass() == v1) { in visit_alu_instr()
1990 if (dst.regClass() == v2b) { in visit_alu_instr()
1992 } else if (dst.regClass() == v1) { in visit_alu_instr()
1995 } else if (dst.regClass() == v2) { in visit_alu_instr()
2004 if (dst.regClass() == v2b) { in visit_alu_instr()
2006 } else if (dst.regClass() == v1) { in visit_alu_instr()
2008 } else if (dst.regClass() == v2) { in visit_alu_instr()
2016 if (dst.regClass() == v2b) { in visit_alu_instr()
2018 } else if (dst.regClass() == v1) { in visit_alu_instr()
2020 } else if (dst.regClass() == v2) { in visit_alu_instr()
2029 if (dst.regClass() == v2b) { in visit_alu_instr()
2031 } else if (dst.regClass() == v1) { in visit_alu_instr()
2033 } else if (dst.regClass() == v2) { in visit_alu_instr()
2057 if (dst.regClass() == v2b) { in visit_alu_instr()
2059 } else if (dst.regClass() == v1) { in visit_alu_instr()
2061 } else if (dst.regClass() == v2) { in visit_alu_instr()
2070 if (dst.regClass() == v2b) { in visit_alu_instr()
2072 } else if (dst.regClass() == v1) { in visit_alu_instr()
2074 } else if (dst.regClass() == v2) { in visit_alu_instr()
2111 if (dst.regClass() == v2b) { in visit_alu_instr()
2116 } else if (dst.regClass() == v1) { in visit_alu_instr()
2132 if (dst.regClass() == v2b) { in visit_alu_instr()
2134 } else if (dst.regClass() == v1) { in visit_alu_instr()
2136 } else if (dst.regClass() == v2) { in visit_alu_instr()
2144 if (dst.regClass() == v2b) { in visit_alu_instr()
2146 } else if (dst.regClass() == v1) { in visit_alu_instr()
2148 } else if (dst.regClass() == v2) { in visit_alu_instr()
2172 if (dst.regClass() == v2b) { in visit_alu_instr()
2178 } else if (dst.regClass() == v1) { in visit_alu_instr()
2182 } else if (dst.regClass() == v2) { in visit_alu_instr()
2241 assert(dst.regClass() == v2b); in visit_alu_instr()
2280 assert(dst.regClass() == v2b); in visit_alu_instr()
2528 assert(src.regClass() == bld.lm); in visit_alu_instr()
2530 if (dst.regClass() == s1) { in visit_alu_instr()
2533 } else if (dst.regClass() == v2b) { in visit_alu_instr()
2543 assert(src.regClass() == bld.lm); in visit_alu_instr()
2545 if (dst.regClass() == s1) { in visit_alu_instr()
2548 } else if (dst.regClass() == v1) { in visit_alu_instr()
2557 assert(src.regClass() == bld.lm); in visit_alu_instr()
2559 if (dst.regClass() == s2) { in visit_alu_instr()
2562 } else if (dst.regClass() == v2) { in visit_alu_instr()
2607 assert(src.regClass() == bld.lm); in visit_alu_instr()
2610 if (tmp.regClass() == s1) { in visit_alu_instr()
2626 assert(dst.regClass() == bld.lm); in visit_alu_instr()
2629 assert(src.regClass() == v1 || src.regClass() == v2); in visit_alu_instr()
2630 assert(dst.regClass() == bld.lm); in visit_alu_instr()
2634 assert(src.regClass() == s1 || src.regClass() == s2); in visit_alu_instr()
2636 if (src.regClass() == s2 && ctx->program->chip_class <= GFX7) { in visit_alu_instr()
2660 …bld.pseudo(aco_opcode::p_split_vector, Definition(dst), bld.def(dst.regClass()), get_alu_src(ctx, … in visit_alu_instr()
2663 …bld.pseudo(aco_opcode::p_split_vector, bld.def(dst.regClass()), Definition(dst), get_alu_src(ctx, … in visit_alu_instr()
2667 …bld.pseudo(aco_opcode::p_split_vector, Definition(dst), bld.def(dst.regClass()), get_alu_src(ctx, … in visit_alu_instr()
2674 …bld.pseudo(aco_opcode::p_split_vector, bld.def(dst.regClass()), Definition(dst), get_alu_src(ctx, … in visit_alu_instr()
2682 if (dst.regClass() == v1) { in visit_alu_instr()
2694 if (dst.regClass() == v1) { in visit_alu_instr()
2717 if (src.regClass() == v1) in visit_alu_instr()
2719 if (dst.regClass() == v1) { in visit_alu_instr()
2730 if (src.regClass() == s1) in visit_alu_instr()
2734 if (dst.regClass() == v1) { in visit_alu_instr()
2774 if (dst.regClass() == s1) { in visit_alu_instr()
2776 } else if (dst.regClass() == v1) { in visit_alu_instr()
2786 if (dst.regClass() == s1) { in visit_alu_instr()
2812 } else if (dst.regClass() == v1) { in visit_alu_instr()
2860 if (src.regClass() == s1) { in visit_alu_instr()
2862 } else if (src.regClass() == v1) { in visit_alu_instr()
2864 } else if (src.regClass() == v2) { in visit_alu_instr()
2869 } else if (src.regClass() == s2) { in visit_alu_instr()
2976 assert(dst.regClass() == bld.lm); in visit_load_const()
3107 } else if (offset_tmp.regClass() == s1) { in emit_load()
3110 } else if (offset_tmp.regClass() == v1) { in emit_load()
3117 if (offset_tmp.regClass() == s2) { in emit_load()
3139 } else if (offset_tmp.regClass() == s1) { in emit_load()
3141 } else if (offset_tmp.regClass() == s2) { in emit_load()
3143 } else if (offset_tmp.regClass() == v1) { in emit_load()
3145 } else if (offset_tmp.regClass() == v2) { in emit_load()
3294 offset = offset.regClass() == s1 ? bld.copy(bld.def(v1), offset) : offset; in lds_load_callback()
3343 Temp val = rc == info.dst.regClass() && dst_hint.id() ? dst_hint : bld.tmp(rc); in lds_load_callback()
3391 Temp val = dst_hint.id() && dst_hint.regClass() == rc ? dst_hint : bld.tmp(rc); in smem_load_callback()
3449 Temp val = dst_hint.id() && rc == dst_hint.regClass() ? dst_hint : bld.tmp(rc); in mubuf_load_callback()
3498 Temp val = dst_hint.id() && rc == dst_hint.regClass() ? dst_hint : bld.tmp(rc); in global_load_callback()
3513 offset = offset.regClass() == s2 ? bld.copy(bld.def(v2), offset) : offset; in global_load_callback()
3902 else if (unlikely(voffset.regClass() == s1)) in resolve_excess_vmem_const_offset()
3904 else if (likely(voffset.regClass() == v1)) in resolve_excess_vmem_const_offset()
4051 if (likely(indirect_offset_arg.regClass() == v1)) in offset_add_from_nir()
4053 else if (indirect_offset_arg.regClass() == s1) in offset_add_from_nir()
4061 else if (unlikely(offset.regClass() == s1 && with_stride.regClass() == s1)) in offset_add_from_nir()
4081 if (unlikely(off1.first.regClass() == s1 && off2.first.regClass() == s1)) in offset_add()
4102 Temp offset = unlikely(offs.first.regClass() == s1) in offset_mul()
4308 create_vec_from_array(ctx, src, dst.size(), dst.regClass().type(), 4u, 0, dst); in load_input_from_temps()
4475 if (dst.regClass() == v2b) { in emit_interp_instr()
5342 assert(src.regClass() == bld.lm); in visit_discard_if()
5656 } else if (sample_index.regClass() == s1) { in adjust_sample_index_using_fmask()
5659 assert(sample_index.regClass() == v1); in adjust_sample_index_using_fmask()
7208 assert(src.regClass() == bld.lm); in emit_boolean_exclusive_scan()
7281 assert(dst.regClass().type() != RegType::vgpr); in emit_uniform_subgroup()
7282 if (src.regClass().type() == RegType::vgpr) in emit_uniform_subgroup()
7295 Temp tmp = dst.regClass() == s1 ? bld.tmp(src_tmp.regClass()) : dst.getTemp(); in emit_addition_uniform_reduce()
7312 if (dst.regClass() == s1) in emit_addition_uniform_reduce()
7902 Definition tmp = bld.def(dst.regClass()); in visit_intrinsic()
7903 Definition lanemask_tmp = dst.size() == bld.lm.size() ? tmp : bld.def(src.regClass()); in visit_intrinsic()
7905 assert(src.regClass() == bld.lm); in visit_intrinsic()
7907 } else if (instr->src[0].ssa->bit_size == 32 && src.regClass() == v1) { in visit_intrinsic()
7909 } else if (instr->src[0].ssa->bit_size == 64 && src.regClass() == v2) { in visit_intrinsic()
7931 if (src.regClass() == v1b || src.regClass() == v2b) { in visit_intrinsic()
7935 …bld.pseudo(aco_opcode::p_split_vector, Definition(dst), bld.def(src.regClass() == v1b ? v3b : v2b)… in visit_intrinsic()
7938 } else if (src.regClass() == v1) { in visit_intrinsic()
7940 } else if (src.regClass() == v2) { in visit_intrinsic()
7947 } else if (instr->dest.ssa.bit_size == 1 && tid.regClass() == s1) { in visit_intrinsic()
7948 assert(src.regClass() == bld.lm); in visit_intrinsic()
7951 } else if (instr->dest.ssa.bit_size == 1 && tid.regClass() == v1) { in visit_intrinsic()
7952 assert(src.regClass() == bld.lm); in visit_intrinsic()
7981 if (src.regClass() == v1b || src.regClass() == v2b || src.regClass() == v1) { in visit_intrinsic()
7985 } else if (src.regClass() == v2) { in visit_intrinsic()
7993 assert(src.regClass() == bld.lm); in visit_intrinsic()
8005 assert(src.regClass() == bld.lm); in visit_intrinsic()
8006 assert(dst.regClass() == bld.lm); in visit_intrinsic()
8016 assert(src.regClass() == bld.lm); in visit_intrinsic()
8017 assert(dst.regClass() == bld.lm); in visit_intrinsic()
8090 …tmp_dst = emit_reduction_instr(ctx, aco_op, reduce_op, cluster_size, bld.def(dst.regClass()), src); in visit_intrinsic()
8105 assert(src.regClass() == bld.lm); in visit_intrinsic()
8106 assert(dst.regClass() == bld.lm); in visit_intrinsic()
8182 assert(src.regClass() == bld.lm); in visit_intrinsic()
8237 assert(src.regClass() == bld.lm); in visit_intrinsic()
8242 } else if (dst.regClass() == v1b) { in visit_intrinsic()
8245 } else if (dst.regClass() == v2b) { in visit_intrinsic()
8248 } else if (dst.regClass() == v1) { in visit_intrinsic()
8250 } else if (dst.regClass() == v2) { in visit_intrinsic()
8267 if (dst.regClass() == v1) { in visit_intrinsic()
8270 } else if (dst.regClass() == v2) { in visit_intrinsic()
8315 assert(src.regClass() == bld.lm); in visit_intrinsic()
8921 tmp_dst = bld.tmp(tmp_dst.regClass()); in visit_tex()
9029 resource = bld.tmp(resource.regClass()); in visit_tex()
9124 assert(dmask == 1 && dst.regClass() == bld.lm); in visit_tex()
9251 arg = emit_wqm(ctx, arg, bld.tmp(arg.regClass()), true); in visit_tex()
9278 Temp tmp = dst.regClass() == v4 ? dst : bld.tmp(v4); in visit_tex()
9307 assert(instr->dest.ssa.bit_size != 1 || dst.regClass() == ctx->program->lane_mask); in visit_phi()
9332 operands[num_operands++] = Operand(dst.regClass()); in visit_phi()
9343 Operand op = get_phi_operand(ctx, src.second, dst.regClass(), logical); in visit_phi()
9349 operands[num_operands++] = Operand(dst.regClass()); in visit_phi()
9382 phi->operands[1] = Operand(dst.regClass()); in visit_phi()
9700 RegClass rc = vals[0].regClass(); in create_continue_phis()
9796 assert(cond.regClass() == ctx->program->lane_mask); in begin_divergent_if_then()
9951 assert(cond.regClass() == s1); in begin_uniform_if_then()
10066 assert(cond.regClass() == ctx->program->lane_mask); in visit_if()
10373 bool is_16bit = values[0].regClass() == v2b; in export_fs_mrt_color()
10928 if (startpgm->definitions[i].regClass().size() > 1) { in split_arguments()
10930 startpgm->definitions[i].regClass().size()); in split_arguments()
11042 assert(count.regClass() == s1); in lanecount_to_mask()
11357 assert(src_mask.regClass() == bld.lm); in ngg_gs_workgroup_reduce_and_scan()
11406 sgincl = bld.pseudo(aco_opcode::p_phi, bld.def(sgincl.regClass()), sgincl, Operand(v1)); in ngg_gs_workgroup_reduce_and_scan()
11434 Temp incremented_counter = bld.tmp(counter_init.regClass()); in ngg_gs_clear_primflags()
11436 …Temp counter = bld.pseudo(aco_opcode::p_phi, bld.def(counter_init.regClass()), Operand(counter_ini… in ngg_gs_clear_primflags()
11533 …prim_flag_0 = bld.pseudo(aco_opcode::p_phi, bld.def(prim_flag_0.regClass()), Operand(prim_flag_0),… in ngg_gs_load_prim_flag_0()
11542 assert(vertex_live.regClass() == bld.lm); in ngg_gs_setup_vertex_compaction()