Lines Matching refs:regClass
151 RegClass rc = RegClass(mask.regClass().type(), 1); in emit_mbcnt()
178 dst = bld.tmp(src.regClass()); in emit_wqm()
190 if (index.regClass() == s1) in emit_bpermute()
350 if (src.regClass() == dst_rc) { in emit_extract_vector()
358 if (it != ctx->allocated_vec.end() && dst_rc.bytes() == it->second[idx].regClass().bytes()) { in emit_extract_vector()
359 if (it->second[idx].regClass() == dst_rc) { in emit_extract_vector()
554 if (vec.regClass() == dst.regClass()) { in byte_align_vector()
610 assert(val.regClass() == s1); in bool_to_vector_condition()
611 assert(dst.regClass() == bld.lm); in bool_to_vector_condition()
624 assert(val.regClass() == bld.lm); in bool_to_scalar_condition()
625 assert(dst.regClass() == s1); in bool_to_scalar_condition()
671 } else if (src.regClass() == s1) { in convert_int()
682 if (sign_extend && dst.regClass() == s2) { in convert_int()
686 } else if (sign_extend && dst.regClass() == v2) { in convert_int()
717 Temp tmp = dst.regClass() == s2 ? bld.tmp(s1) : dst; in extract_8_16_bit_sgpr_element()
726 if (dst.regClass() == s2) in extract_8_16_bit_sgpr_element()
807 if (it->second[index].regClass() == v2b) in get_alu_src_vop3p()
815 assert(tmp.regClass() == v6b && dword == 1); in get_alu_src_vop3p()
957 tmp = bld.vop3(op, bld.def(dst.regClass()), src[0], src[1], src[2]); in emit_vop3a_instruction()
959 tmp = bld.vop3(op, bld.def(dst.regClass()), src[0], src[1]); in emit_vop3a_instruction()
1074 assert(dst.regClass() == bld.lm); in emit_sopc_instruction()
1077 assert(src0.regClass() == src1.regClass()); in emit_sopc_instruction()
1101 assert(dst.regClass() == ctx->program->lane_mask); in emit_comparison()
1117 assert(dst.regClass() == bld.lm); in emit_boolean_logic()
1118 assert(src0.regClass() == bld.lm); in emit_boolean_logic()
1119 assert(src1.regClass() == bld.lm); in emit_boolean_logic()
1132 assert(cond.regClass() == bld.lm); in emit_bcsel()
1158 assert(dst.regClass() == bld.lm); in emit_bcsel()
1159 assert(then.regClass() == bld.lm); in emit_bcsel()
1160 assert(els.regClass() == bld.lm); in emit_bcsel()
1164 if (dst.regClass() == s1 || dst.regClass() == s2) { in emit_bcsel()
1165 assert((then.regClass() == s1 || then.regClass() == s2) && in emit_bcsel()
1166 els.regClass() == then.regClass()); in emit_bcsel()
1169 dst.regClass() == s1 ? aco_opcode::s_cselect_b32 : aco_opcode::s_cselect_b64; in emit_bcsel()
1492 if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1494 } else if (dst.regClass() == v2) { in visit_alu_instr()
1509 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1521 if (dst.regClass() == s1) { in visit_alu_instr()
1523 } else if (dst.regClass() == v1) { in visit_alu_instr()
1526 } else if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1530 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1541 if (dst.regClass() == s1) { in visit_alu_instr()
1545 } else if (dst.regClass() == s2) { in visit_alu_instr()
1558 } else if (dst.regClass() == v1) { in visit_alu_instr()
1560 } else if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX9) { in visit_alu_instr()
1562 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1566 } else if (dst.regClass() == v2) { in visit_alu_instr()
1579 if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1581 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1583 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1585 } else if (dst.regClass() == v1) { in visit_alu_instr()
1587 } else if (dst.regClass() == s1) { in visit_alu_instr()
1595 if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1597 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1599 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1601 } else if (dst.regClass() == v1) { in visit_alu_instr()
1603 } else if (dst.regClass() == s1) { in visit_alu_instr()
1611 if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1613 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1615 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1617 } else if (dst.regClass() == v1) { in visit_alu_instr()
1619 } else if (dst.regClass() == s1) { in visit_alu_instr()
1627 if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1629 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1631 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1633 } else if (dst.regClass() == v1) { in visit_alu_instr()
1635 } else if (dst.regClass() == s1) { in visit_alu_instr()
1645 } else if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1647 } else if (dst.regClass() == v2) { in visit_alu_instr()
1649 } else if (dst.regClass() == s1) { in visit_alu_instr()
1651 } else if (dst.regClass() == s2) { in visit_alu_instr()
1661 } else if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1663 } else if (dst.regClass() == v2) { in visit_alu_instr()
1665 } else if (dst.regClass() == s1) { in visit_alu_instr()
1667 } else if (dst.regClass() == s2) { in visit_alu_instr()
1677 } else if (dst.regClass() == v1 || dst.regClass() == v2b || dst.regClass() == v1b) { in visit_alu_instr()
1679 } else if (dst.regClass() == v2) { in visit_alu_instr()
1681 } else if (dst.regClass() == s1) { in visit_alu_instr()
1683 } else if (dst.regClass() == s2) { in visit_alu_instr()
1691 if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1693 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1695 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1697 } else if (dst.regClass() == v1) { in visit_alu_instr()
1699 } else if (dst.regClass() == v2 && ctx->program->gfx_level >= GFX8) { in visit_alu_instr()
1702 } else if (dst.regClass() == v2) { in visit_alu_instr()
1704 } else if (dst.regClass() == s2) { in visit_alu_instr()
1706 } else if (dst.regClass() == s1) { in visit_alu_instr()
1714 if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1716 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1718 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1720 } else if (dst.regClass() == v1) { in visit_alu_instr()
1723 } else if (dst.regClass() == v2 && ctx->program->gfx_level >= GFX8) { in visit_alu_instr()
1726 } else if (dst.regClass() == v2) { in visit_alu_instr()
1728 } else if (dst.regClass() == s1) { in visit_alu_instr()
1730 } else if (dst.regClass() == s2) { in visit_alu_instr()
1738 if (dst.regClass() == v2b && ctx->program->gfx_level >= GFX10) { in visit_alu_instr()
1740 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1742 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1744 } else if (dst.regClass() == v1) { in visit_alu_instr()
1746 } else if (dst.regClass() == v2 && ctx->program->gfx_level >= GFX8) { in visit_alu_instr()
1749 } else if (dst.regClass() == v2) { in visit_alu_instr()
1751 } else if (dst.regClass() == s1) { in visit_alu_instr()
1753 } else if (dst.regClass() == s2) { in visit_alu_instr()
1762 if (src.regClass() == s1) { in visit_alu_instr()
1764 } else if (src.regClass() == v1) { in visit_alu_instr()
1766 } else if (src.regClass() == s2) { in visit_alu_instr()
1776 if (src.regClass() == s1 || src.regClass() == s2) { in visit_alu_instr()
1777 aco_opcode op = src.regClass() == s2 in visit_alu_instr()
1791 } else if (src.regClass() == v1) { in visit_alu_instr()
1800 } else if (src.regClass() == v2) { in visit_alu_instr()
1825 if (src.regClass() == s1) { in visit_alu_instr()
1828 } else if (src.regClass() == v1) { in visit_alu_instr()
1837 if (dst.regClass() == s1) { in visit_alu_instr()
1839 } else if (dst.regClass() == v1) { in visit_alu_instr()
1847 if (dst.regClass() == s1) { in visit_alu_instr()
1856 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1876 if (dst.regClass() == s2) { in visit_alu_instr()
1883 } else if (dst.regClass() == v2) { in visit_alu_instr()
1894 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1902 if (dst.regClass() == s1) { in visit_alu_instr()
1908 } else if (dst.regClass() == v2b) { in visit_alu_instr()
1920 } else if (dst.regClass() == v1) { in visit_alu_instr()
1934 if (dst.regClass() == s2) { in visit_alu_instr()
1947 } else if (dst.regClass() == v2) { in visit_alu_instr()
1977 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
1985 if (dst.regClass() == s1) { in visit_alu_instr()
1998 if (dst.regClass() == v2b) { in visit_alu_instr()
2002 } else if (dst.regClass() == v1) { in visit_alu_instr()
2014 if (dst.regClass() == s1) { in visit_alu_instr()
2018 if (dst.regClass() == v1) { in visit_alu_instr()
2031 if (dst.regClass() == s2) { in visit_alu_instr()
2039 } else if (dst.regClass() == v2) { in visit_alu_instr()
2051 if (dst.regClass() == s1) { in visit_alu_instr()
2054 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2061 if (dst.regClass() == v1) { in visit_alu_instr()
2082 if (dst.regClass() == s2) { in visit_alu_instr()
2089 } else if (dst.regClass() == v2) { in visit_alu_instr()
2102 if (dst.regClass() == s1) { in visit_alu_instr()
2105 } else if (dst.regClass() == v1) { in visit_alu_instr()
2118 if (dst.regClass() == s2) { in visit_alu_instr()
2126 } else if (dst.regClass() == v2) { in visit_alu_instr()
2138 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2145 if (dst.regClass() == s1) { in visit_alu_instr()
2150 } else if (dst.regClass() == v2b) { in visit_alu_instr()
2164 } else if (dst.regClass() == v1) { in visit_alu_instr()
2177 if (dst.regClass() == s2) { in visit_alu_instr()
2190 } else if (dst.regClass() == v2) { in visit_alu_instr()
2220 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2227 if (dst.regClass() == s1) { in visit_alu_instr()
2240 if (dst.regClass() == v2b) { in visit_alu_instr()
2244 } else if (dst.regClass() == v1) { in visit_alu_instr()
2258 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2277 } else if (dst.regClass() == s1) { in visit_alu_instr()
2285 if (dst.regClass() == s1 && ctx->options->gfx_level >= GFX9) { in visit_alu_instr()
2291 Temp tmp = dst.regClass() == s1 ? bld.tmp(v1) : dst; in visit_alu_instr()
2298 if (dst.regClass() == s1) in visit_alu_instr()
2306 if (dst.regClass() == v1) { in visit_alu_instr()
2308 } else if (dst.regClass() == s1 && ctx->options->gfx_level >= GFX9) { in visit_alu_instr()
2310 } else if (dst.regClass() == s1) { in visit_alu_instr()
2320 if (dst.regClass() == v2b) { in visit_alu_instr()
2322 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2324 } else if (dst.regClass() == v1) { in visit_alu_instr()
2326 } else if (dst.regClass() == v2) { in visit_alu_instr()
2334 if (dst.regClass() == v1) { in visit_alu_instr()
2342 if (dst.regClass() == v2b) { in visit_alu_instr()
2344 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2346 } else if (dst.regClass() == v1) { in visit_alu_instr()
2348 } else if (dst.regClass() == v2) { in visit_alu_instr()
2356 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2366 if (dst.regClass() == v2b) { in visit_alu_instr()
2371 } else if (dst.regClass() == v1) { in visit_alu_instr()
2376 } else if (dst.regClass() == v2) { in visit_alu_instr()
2386 if (dst.regClass() == v2b) { in visit_alu_instr()
2388 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2403 } else if (dst.regClass() == v1) { in visit_alu_instr()
2406 } else if (dst.regClass() == v2) { in visit_alu_instr()
2414 if (dst.regClass() == v1) { in visit_alu_instr()
2423 if (dst.regClass() == v2b) { in visit_alu_instr()
2426 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2428 } else if (dst.regClass() == v1) { in visit_alu_instr()
2431 } else if (dst.regClass() == v2) { in visit_alu_instr()
2440 if (dst.regClass() == v2b) { in visit_alu_instr()
2443 } else if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2445 } else if (dst.regClass() == v1) { in visit_alu_instr()
2448 } else if (dst.regClass() == v2) { in visit_alu_instr()
2515 if (dst.regClass() == v2b) { in visit_alu_instr()
2517 } else if (dst.regClass() == v1) { in visit_alu_instr()
2520 } else if (dst.regClass() == v2) { in visit_alu_instr()
2529 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2539 if (dst.regClass() == v2b) { in visit_alu_instr()
2541 } else if (dst.regClass() == v1) { in visit_alu_instr()
2544 } else if (dst.regClass() == v2) { in visit_alu_instr()
2558 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2569 if (dst.regClass() == v2b) { in visit_alu_instr()
2574 } else if (dst.regClass() == v1) { in visit_alu_instr()
2579 } else if (dst.regClass() == v2) { in visit_alu_instr()
2593 if (dst.regClass() == v1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
2602 if (dst.regClass() == v2b) { in visit_alu_instr()
2605 } else if (dst.regClass() == v1) { in visit_alu_instr()
2611 } else if (dst.regClass() == v2) { in visit_alu_instr()
2620 if (dst.regClass() == v2b) { in visit_alu_instr()
2622 } else if (dst.regClass() == v1) { in visit_alu_instr()
2631 if (dst.regClass() == v2b) { in visit_alu_instr()
2633 } else if (dst.regClass() == v1) { in visit_alu_instr()
2636 } else if (dst.regClass() == v2) { in visit_alu_instr()
2645 if (dst.regClass() == v2b) { in visit_alu_instr()
2647 } else if (dst.regClass() == v1) { in visit_alu_instr()
2655 if (dst.regClass() == v2b) { in visit_alu_instr()
2657 } else if (dst.regClass() == v1) { in visit_alu_instr()
2660 } else if (dst.regClass() == v2) { in visit_alu_instr()
2669 if (dst.regClass() == v2b) { in visit_alu_instr()
2671 } else if (dst.regClass() == v1) { in visit_alu_instr()
2673 } else if (dst.regClass() == v2) { in visit_alu_instr()
2681 if (dst.regClass() == v2b) { in visit_alu_instr()
2683 } else if (dst.regClass() == v1) { in visit_alu_instr()
2685 } else if (dst.regClass() == v2) { in visit_alu_instr()
2694 if (dst.regClass() == v2b) { in visit_alu_instr()
2696 } else if (dst.regClass() == v1) { in visit_alu_instr()
2698 } else if (dst.regClass() == v2) { in visit_alu_instr()
2726 if (dst.regClass() == v2b) { in visit_alu_instr()
2728 } else if (dst.regClass() == v1) { in visit_alu_instr()
2730 } else if (dst.regClass() == v2) { in visit_alu_instr()
2739 if (dst.regClass() == v2b) { in visit_alu_instr()
2741 } else if (dst.regClass() == v1) { in visit_alu_instr()
2743 } else if (dst.regClass() == v2) { in visit_alu_instr()
2790 if (dst.regClass() == v2b) { in visit_alu_instr()
2794 } else if (dst.regClass() == v1) { in visit_alu_instr()
2808 if (dst.regClass() == v2b) { in visit_alu_instr()
2810 } else if (dst.regClass() == v1) { in visit_alu_instr()
2812 } else if (dst.regClass() == v2) { in visit_alu_instr()
2820 if (dst.regClass() == v2b) { in visit_alu_instr()
2822 } else if (dst.regClass() == v1) { in visit_alu_instr()
2824 } else if (dst.regClass() == v2) { in visit_alu_instr()
2848 if (dst.regClass() == v2b) { in visit_alu_instr()
2855 } else if (dst.regClass() == v1) { in visit_alu_instr()
2860 } else if (dst.regClass() == v2) { in visit_alu_instr()
2920 assert(dst.regClass() == v2b); in visit_alu_instr()
3000 assert(dst.regClass() == v2b); in visit_alu_instr()
3343 assert(src.regClass() == bld.lm); in visit_alu_instr()
3345 if (dst.regClass() == s1) { in visit_alu_instr()
3348 } else if (dst.regClass() == v2b) { in visit_alu_instr()
3358 assert(src.regClass() == bld.lm); in visit_alu_instr()
3360 if (dst.regClass() == s1) { in visit_alu_instr()
3363 } else if (dst.regClass() == v1) { in visit_alu_instr()
3373 assert(src.regClass() == bld.lm); in visit_alu_instr()
3375 if (dst.regClass() == s2) { in visit_alu_instr()
3379 } else if (dst.regClass() == v2) { in visit_alu_instr()
3429 assert(src.regClass() == bld.lm); in visit_alu_instr()
3432 if (tmp.regClass() == s1) { in visit_alu_instr()
3448 assert(dst.regClass() == bld.lm); in visit_alu_instr()
3451 assert(src.regClass() == v1 || src.regClass() == v2); in visit_alu_instr()
3452 assert(dst.regClass() == bld.lm); in visit_alu_instr()
3456 assert(src.regClass() == s1 || src.regClass() == s2); in visit_alu_instr()
3458 if (src.regClass() == s2 && ctx->program->gfx_level <= GFX7) { in visit_alu_instr()
3485 bld.pseudo(aco_opcode::p_split_vector, Definition(dst), bld.def(dst.regClass()), in visit_alu_instr()
3489 bld.pseudo(aco_opcode::p_split_vector, bld.def(dst.regClass()), Definition(dst), in visit_alu_instr()
3494 bld.pseudo(aco_opcode::p_split_vector, Definition(dst), bld.def(dst.regClass()), in visit_alu_instr()
3502 bld.pseudo(aco_opcode::p_split_vector, bld.def(dst.regClass()), Definition(dst), in visit_alu_instr()
3513 if (dst.regClass() == v1) { in visit_alu_instr()
3528 if (dst.regClass() == v1) { in visit_alu_instr()
3561 if (src.regClass() == v1) in visit_alu_instr()
3563 if (dst.regClass() == v1) { in visit_alu_instr()
3575 if (src.regClass() == s1) in visit_alu_instr()
3581 if (dst.regClass() == v1) { in visit_alu_instr()
3591 assert(dst.regClass() == v1); in visit_alu_instr()
3631 if (dst.regClass() == s1) { in visit_alu_instr()
3633 } else if (dst.regClass() == v1) { in visit_alu_instr()
3643 if (dst.regClass() == s1) { in visit_alu_instr()
3670 } else if (dst.regClass() == v1) { in visit_alu_instr()
3734 } else if (dst.regClass() == s1 && instr->dest.dest.ssa.bit_size == 16) { in visit_alu_instr()
3753 if (def.regClass() == s1) { in visit_alu_instr()
3757 src = emit_extract_vector(ctx, src, 0, def.regClass()); in visit_alu_instr()
3785 if (def.regClass() == s1) { in visit_alu_instr()
3789 src = emit_extract_vector(ctx, src, 0, def.regClass()); in visit_alu_instr()
3804 if (src.regClass() == s1) { in visit_alu_instr()
3806 } else if (src.regClass() == v1) { in visit_alu_instr()
3808 } else if (src.regClass() == v2) { in visit_alu_instr()
3812 } else if (src.regClass() == s2) { in visit_alu_instr()
3939 assert(dst.regClass() == bld.lm); in visit_load_const()
4077 } else if (offset_tmp.regClass() == s1) { in emit_load()
4080 } else if (offset_tmp.regClass() == v1) { in emit_load()
4087 if (offset_tmp.regClass() == s2) { in emit_load()
4111 } else if (offset_tmp.regClass() == s1) { in emit_load()
4114 } else if (offset_tmp.regClass() == s2) { in emit_load()
4117 } else if (offset_tmp.regClass() == v1) { in emit_load()
4120 } else if (offset_tmp.regClass() == v2) { in emit_load()
4274 offset = offset.regClass() == s1 ? bld.copy(bld.def(v1), offset) : offset; in lds_load_callback()
4324 Temp val = rc == info.dst.regClass() && dst_hint.id() ? dst_hint : bld.tmp(rc); in lds_load_callback()
4391 Temp val = dst_hint.id() && dst_hint.regClass() == rc ? dst_hint : bld.tmp(rc); in smem_load_callback()
4449 Temp val = dst_hint.id() && rc == dst_hint.regClass() ? dst_hint : bld.tmp(rc); in mubuf_load_callback()
4484 Temp val = dst_hint.id() && rc == dst_hint.regClass() ? dst_hint : bld.tmp(rc); in scratch_load_callback()
4486 flat->operands[0] = offset.regClass() == s1 ? Operand(v1) : Operand(offset); in scratch_load_callback()
4487 flat->operands[1] = offset.regClass() == s1 ? Operand(offset) : Operand(s1); in scratch_load_callback()
4647 Temp val = dst_hint.id() && rc == dst_hint.regClass() ? dst_hint : bld.tmp(rc); in global_load_callback()
4665 if (addr.regClass() == s2) { in global_load_callback()
5058 else if (unlikely(voffset.regClass() == s1)) in resolve_excess_vmem_const_offset()
5061 else if (likely(voffset.regClass() == v1)) in resolve_excess_vmem_const_offset()
5234 create_vec_from_array(ctx, src, dst.size(), dst.regClass().type(), 4u, 0, dst); in load_input_from_temps()
5267 if (dst.regClass() == v2b) { in emit_interp_instr()
6048 coord = emit_wqm(bld, coord, bld.tmp(coord.regClass()), true); in emit_mimg()
6056 coords[i] = emit_wqm(bld, coords[i], bld.tmp(coords[i].regClass()), true); in emit_mimg()
6194 Temp tmp = bld.tmp(dst.regClass()); in emit_tfe_init()
6535 return_previous ? (cmpswap ? bld.def(data.regClass()) : Definition(dst)) : Definition(); in visit_image_atomic()
6554 return_previous ? (cmpswap ? bld.def(data.regClass()) : Definition(dst)) : Definition(); in visit_image_atomic()
6824 return_previous ? (cmpswap ? bld.def(data.regClass()) : Definition(dst)) : Definition(); in visit_atomic_ssbo()
6954 if (write_address.regClass() == s2) { in visit_store_global()
7078 if (addr.regClass() == s2) { in visit_global_atomic()
7166 return_previous ? (cmpswap ? bld.def(data.regClass()) : Definition(dst)) : Definition(); in visit_global_atomic()
7673 Operand addr = offset.regClass() == s1 ? Operand(v1) : Operand(offset); in visit_store_scratch()
7674 Operand saddr = offset.regClass() == s1 ? Operand(offset) : Operand(s1); in visit_store_scratch()
7887 assert(src.regClass() == bld.lm); in emit_boolean_exclusive_scan()
7971 assert(dst.regClass().type() != RegType::vgpr); in emit_uniform_subgroup()
7972 if (src.regClass().type() == RegType::vgpr) in emit_uniform_subgroup()
7986 Temp tmp = dst.regClass() == s1 ? bld.tmp(RegClass::get(RegType::vgpr, src.ssa->bit_size / 8)) in emit_addition_uniform_reduce()
8004 if (dst.regClass() == s1) in emit_addition_uniform_reduce()
8492 assert(addr.regClass() == s2); in visit_intrinsic()
8503 assert(addr.regClass() == s2); in visit_intrinsic()
8625 assert(src.regClass() == bld.lm); in visit_intrinsic()
8626 } else if (instr->src[0].ssa->bit_size == 32 && src.regClass() == v1) { in visit_intrinsic()
8628 } else if (instr->src[0].ssa->bit_size == 64 && src.regClass() == v2) { in visit_intrinsic()
8640 bld.pseudo(aco_opcode::p_create_vector, bld.def(dst.regClass()), src, Operand::zero()); in visit_intrinsic()
8661 if (src.regClass() == v1b || src.regClass() == v2b) { in visit_intrinsic()
8666 bld.def(src.regClass() == v1b ? v3b : v2b), tmp); in visit_intrinsic()
8669 } else if (src.regClass() == v1) { in visit_intrinsic()
8671 } else if (src.regClass() == v2) { in visit_intrinsic()
8678 } else if (instr->dest.ssa.bit_size == 1 && tid.regClass() == s1) { in visit_intrinsic()
8679 assert(src.regClass() == bld.lm); in visit_intrinsic()
8682 } else if (instr->dest.ssa.bit_size == 1 && tid.regClass() == v1) { in visit_intrinsic()
8683 assert(src.regClass() == bld.lm); in visit_intrinsic()
8709 if (src.regClass() == v1b || src.regClass() == v2b || src.regClass() == v1) { in visit_intrinsic()
8711 } else if (src.regClass() == v2) { in visit_intrinsic()
8719 assert(src.regClass() == bld.lm); in visit_intrinsic()
8731 assert(src.regClass() == bld.lm); in visit_intrinsic()
8732 assert(dst.regClass() == bld.lm); in visit_intrinsic()
8745 assert(src.regClass() == bld.lm); in visit_intrinsic()
8746 assert(dst.regClass() == bld.lm); in visit_intrinsic()
8819 bld.def(dst.regClass()), src); in visit_intrinsic()
8870 tmp = bld.tmp(dst.regClass()); in visit_intrinsic()
8874 assert(src.regClass() == bld.lm && tmp.regClass() == bld.lm); in visit_intrinsic()
8939 assert(src.regClass() == bld.lm); in visit_intrinsic()
8945 } else if (dst.regClass() == v1b) { in visit_intrinsic()
8948 } else if (dst.regClass() == v2b) { in visit_intrinsic()
8951 } else if (dst.regClass() == v1) { in visit_intrinsic()
8953 } else if (dst.regClass() == v2) { in visit_intrinsic()
8970 if (dst.regClass() == v1) { in visit_intrinsic()
8973 } else if (dst.regClass() == v2) { in visit_intrinsic()
8999 assert(dst.regClass() == v1); in visit_intrinsic()
9011 if (src.regClass() == s1) { in visit_intrinsic()
9013 } else if (dst.regClass() == v1 && src.regClass() == v1) { in visit_intrinsic()
9042 assert(src.regClass() == bld.lm); in visit_intrinsic()
9061 assert(src.regClass() == bld.lm); in visit_intrinsic()
9237 assert(addr.regClass() == s2); in visit_intrinsic()
9783 resource = bld.tmp(resource.regClass()); in visit_tex()
9878 if (dst.regClass() == s1) { in visit_tex()
10056 Temp tmp = dst.regClass() == tmp_dst.regClass() ? dst : bld.tmp(tmp_dst.regClass()); in visit_tex()
10092 assert(instr->dest.ssa.bit_size != 1 || dst.regClass() == ctx->program->lane_mask); in visit_phi()
10119 operands[num_operands++] = Operand(dst.regClass()); in visit_phi()
10130 Operand op = get_phi_operand(ctx, src.second, dst.regClass(), logical); in visit_phi()
10136 operands[num_operands++] = Operand(dst.regClass()); in visit_phi()
10170 phi->operands[1] = Operand(dst.regClass()); in visit_phi()
10434 RegClass rc = vals[0].regClass(); in create_continue_phis()
10551 assert(cond.regClass() == ctx->program->lane_mask); in begin_divergent_if_then()
10703 assert(cond.regClass() == s1); in begin_uniform_if_then()
10819 assert(cond.regClass() == ctx->program->lane_mask); in visit_if()
11673 if (startpgm->definitions[i].regClass().size() > 1) { in split_arguments()
11675 startpgm->definitions[i].regClass().size()); in split_arguments()
11804 assert(count.regClass() == s1); in lanecount_to_mask()