/external/mesa3d/src/compiler/spirv/ |
D | vtn_alu.c | 161 dest->ssa = vtn_create_ssa_value(b, src0->type); in vtn_handle_matrix_alu() 164 dest->ssa->elems[i]->def = nir_fneg(&b->nb, src0->elems[i]->def); in vtn_handle_matrix_alu() 169 dest->ssa = vtn_create_ssa_value(b, src0->type); in vtn_handle_matrix_alu() 172 dest->ssa->elems[i]->def = in vtn_handle_matrix_alu() 178 dest->ssa = vtn_create_ssa_value(b, src0->type); in vtn_handle_matrix_alu() 181 dest->ssa->elems[i]->def = in vtn_handle_matrix_alu() 187 dest->ssa = vtn_ssa_transpose(b, src0); in vtn_handle_matrix_alu() 192 dest->ssa = vtn_ssa_transpose(b, mat_times_scalar(b, src0->transposed, in vtn_handle_matrix_alu() 195 dest->ssa = mat_times_scalar(b, src0, src1->def); in vtn_handle_matrix_alu() 203 dest->ssa = matrix_multiply(b, vtn_ssa_transpose(b, src1), src0); in vtn_handle_matrix_alu() [all …]
|
D | vtn_glsl450.c | 424 val->ssa = vtn_create_ssa_value(b, dest_type); in handle_glsl450_alu() 434 val->ssa->def = nir_fmul(nb, src[0], nir_imm_float(nb, 0.01745329251)); in handle_glsl450_alu() 437 val->ssa->def = nir_fmul(nb, src[0], nir_imm_float(nb, 57.2957795131)); in handle_glsl450_alu() 440 val->ssa->def = nir_fdiv(nb, nir_fsin(nb, src[0]), in handle_glsl450_alu() 447 val->ssa->def = nir_fmul(nb, sign, nir_ffract(nb, abs)); in handle_glsl450_alu() 456 assert(glsl_type_is_struct(val->ssa->type)); in handle_glsl450_alu() 457 val->ssa->elems[0]->def = nir_fmul(nb, sign, nir_ffract(nb, abs)); in handle_glsl450_alu() 458 val->ssa->elems[1]->def = nir_fmul(nb, sign, nir_ffloor(nb, abs)); in handle_glsl450_alu() 463 val->ssa->def = nir_sge(nb, src[1], src[0]); in handle_glsl450_alu() 467 val->ssa->def = build_length(nb, src[0]); in handle_glsl450_alu() [all …]
|
/external/mesa3d/src/compiler/nir/ |
D | nir_lower_alu_to_scalar.c | 51 nir_alu_ssa_dest_init(chan, 1, instr->dest.dest.ssa.bit_size); in lower_reduction() 64 last = &chan->dest.dest.ssa; in lower_reduction() 67 last, &chan->dest.dest.ssa, NULL, NULL); in lower_reduction() 72 nir_ssa_def_rewrite_uses(&instr->dest.dest.ssa, nir_src_for_ssa(last)); in lower_reduction() 109 nir_pack_half_2x16_split(b, nir_channel(b, instr->src[0].src.ssa, in lower_alu_instr_scalar() 111 nir_channel(b, instr->src[0].src.ssa, in lower_alu_instr_scalar() 114 nir_ssa_def_rewrite_uses(&instr->dest.dest.ssa, nir_src_for_ssa(val)); in lower_alu_instr_scalar() 132 comps[0] = nir_unpack_half_2x16_split_x(b, instr->src[0].src.ssa); in lower_alu_instr_scalar() 133 comps[1] = nir_unpack_half_2x16_split_y(b, instr->src[0].src.ssa); in lower_alu_instr_scalar() 136 nir_ssa_def_rewrite_uses(&instr->dest.dest.ssa, nir_src_for_ssa(vec)); in lower_alu_instr_scalar() [all …]
|
D | nir_opt_remove_phis.c | 32 get_parent_mov(nir_ssa_def *ssa) in get_parent_mov() argument 34 if (ssa->parent_instr->type != nir_instr_type_alu) in get_parent_mov() 37 nir_alu_instr *alu = nir_instr_as_alu(ssa->parent_instr); in get_parent_mov() 42 matching_mov(nir_alu_instr *mov1, nir_ssa_def *ssa) in matching_mov() argument 47 nir_alu_instr *mov2 = get_parent_mov(ssa); in matching_mov() 95 if (src->src.ssa == &phi->dest.ssa) in remove_phis_block() 99 def = src->src.ssa; in remove_phis_block() 102 if (src->src.ssa != def && !matching_mov(mov, src->src.ssa)) { in remove_phis_block() 133 nir_ssa_def_rewrite_uses(&phi->dest.ssa, nir_src_for_ssa(def)); in remove_phis_block()
|
D | nir_search_helpers.h | 120 bool zero_if_use = list_empty(&instr->dest.dest.ssa.if_uses); in is_used_more_than_once() 121 bool zero_use = list_empty(&instr->dest.dest.ssa.uses); in is_used_more_than_once() 125 else if (zero_use && list_is_singular(&instr->dest.dest.ssa.if_uses)) in is_used_more_than_once() 127 else if (zero_if_use && list_is_singular(&instr->dest.dest.ssa.uses)) in is_used_more_than_once() 136 bool zero_if_use = list_empty(&instr->dest.dest.ssa.if_uses); in is_used_once() 137 bool zero_use = list_empty(&instr->dest.dest.ssa.uses); in is_used_once() 142 if (!zero_if_use && list_is_singular(&instr->dest.dest.ssa.uses)) in is_used_once() 145 if (!zero_use && list_is_singular(&instr->dest.dest.ssa.if_uses)) in is_used_once() 148 if (!list_is_singular(&instr->dest.dest.ssa.if_uses) && in is_used_once() 149 !list_is_singular(&instr->dest.dest.ssa.uses)) in is_used_once() [all …]
|
D | nir_lower_phis_to_scalar.c | 57 nir_instr *src_instr = src->src.ssa->parent_instr; in is_phi_src_scalarizable() 134 if (phi->dest.ssa.num_components == 1) in should_lower_phi() 192 unsigned bit_size = phi->dest.ssa.bit_size; in lower_phis_to_scalar_block() 199 switch (phi->dest.ssa.num_components) { in lower_phis_to_scalar_block() 208 phi->dest.ssa.num_components, in lower_phis_to_scalar_block() 210 vec->dest.write_mask = (1 << phi->dest.ssa.num_components) - 1; in lower_phis_to_scalar_block() 212 for (unsigned i = 0; i < phi->dest.ssa.num_components; i++) { in lower_phis_to_scalar_block() 215 phi->dest.ssa.bit_size, NULL); in lower_phis_to_scalar_block() 217 vec->src[i].src = nir_src_for_ssa(&new_phi->dest.ssa); in lower_phis_to_scalar_block() 237 new_src->src = nir_src_for_ssa(&mov->dest.dest.ssa); in lower_phis_to_scalar_block() [all …]
|
D | nir_from_ssa.c | 342 phi->dest.ssa.num_components, in isolate_phi_nodes_block() 343 phi->dest.ssa.bit_size, src->src.ssa->name); in isolate_phi_nodes_block() 350 nir_src_for_ssa(&entry->dest.ssa)); in isolate_phi_nodes_block() 356 phi->dest.ssa.num_components, phi->dest.ssa.bit_size, in isolate_phi_nodes_block() 357 phi->dest.ssa.name); in isolate_phi_nodes_block() 360 nir_ssa_def_rewrite_uses(&phi->dest.ssa, in isolate_phi_nodes_block() 361 nir_src_for_ssa(&entry->dest.ssa)); in isolate_phi_nodes_block() 364 nir_src_for_ssa(&phi->dest.ssa)); in isolate_phi_nodes_block() 381 merge_node *dest_node = get_merge_node(&phi->dest.ssa, state); in coalesce_phi_nodes_block() 385 merge_node *src_node = get_merge_node(src->src.ssa, state); in coalesce_phi_nodes_block() [all …]
|
D | nir_opt_copy_prop_vars.c | 58 nir_ssa_def *ssa[4]; member 170 if (a_arr->indirect.ssa == b_arr->indirect.ssa) { in compare_derefs() 340 entry->src.ssa[i] = value->ssa[i]; in store_to_entry() 390 if (value->ssa[i]) in load_from_ssa_entry_value() 393 if (value->ssa[i] != value->ssa[0]) in load_from_ssa_entry_value() 406 (available & nir_ssa_def_components_read(&intrin->dest.ssa)) == 0) { in load_from_ssa_entry_value() 417 intrin->intrinsic == nir_intrinsic_load_var ? &intrin->dest.ssa : NULL; in load_from_ssa_entry_value() 422 if (value->ssa[i]) { in load_from_ssa_entry_value() 423 comps[i] = nir_channel(b, value->ssa[i], i); in load_from_ssa_entry_value() 440 value->ssa[i] = vec; in load_from_ssa_entry_value() [all …]
|
D | nir_instr_set.c | 33 hash = HASH(hash, src->ssa); in hash_src() 54 hash = HASH(hash, instr->dest.dest.ssa.num_components); in hash_alu() 55 hash = HASH(hash, instr->dest.dest.ssa.bit_size); in hash_alu() 130 hash = HASH(hash, instr->dest.ssa.num_components); in hash_intrinsic() 131 hash = HASH(hash, instr->dest.ssa.bit_size); in hash_intrinsic() 209 return src1.ssa == src2.ssa; in nir_srcs_equal() 270 if (alu1->dest.dest.ssa.num_components != alu2->dest.dest.ssa.num_components) in nir_instrs_equal() 273 if (alu1->dest.dest.ssa.bit_size != alu2->dest.dest.ssa.bit_size) in nir_instrs_equal() 369 if (info->has_dest && intrinsic1->dest.ssa.num_components != in nir_instrs_equal() 370 intrinsic2->dest.ssa.num_components) in nir_instrs_equal() [all …]
|
D | nir_search.c | 62 if (src.ssa->parent_instr->type == nir_instr_type_alu) { in src_is_type() 63 nir_alu_instr *src_alu = nir_instr_as_alu(src.ssa->parent_instr); in src_is_type() 81 } else if (src.ssa->parent_instr->type == nir_instr_type_intrinsic) { in src_is_type() 82 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(src.ssa->parent_instr); in src_is_type() 129 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_alu) in match_value() 133 nir_instr_as_alu(instr->src[src].src.ssa->parent_instr), in match_value() 141 if (state->variables[var->variable].src.ssa != instr->src[src].src.ssa) in match_value() 154 instr->src[src].src.ssa->parent_instr->type != nir_instr_type_load_const) in match_value() 186 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_load_const) in match_value() 190 nir_instr_as_load_const(instr->src[src].src.ssa->parent_instr); in match_value() [all …]
|
D | nir_lower_atomics.c | 135 mul->src[1].src.ssa = &atomic_counter_size->def; in lower_instr() 142 add->src[0].src.ssa = &mul->dest.dest.ssa; in lower_instr() 144 add->src[1].src.ssa = offset_def; in lower_instr() 147 offset_def = &add->dest.dest.ssa; in lower_instr() 152 new_instr->src[0].ssa = offset_def; in lower_instr() 162 instr->dest.ssa.num_components, 32, NULL); in lower_instr() 163 nir_ssa_def_rewrite_uses(&instr->dest.ssa, in lower_instr() 164 nir_src_for_ssa(&new_instr->dest.ssa)); in lower_instr()
|
D | nir_opt_constant_folding.c | 60 bit_size = instr->dest.dest.ssa.bit_size; in constant_fold_alu_instr() 68 bit_size = instr->src[i].src.ssa->bit_size; in constant_fold_alu_instr() 71 nir_instr *src_instr = instr->src[i].src.ssa->parent_instr; in constant_fold_alu_instr() 96 nir_eval_const_opcode(instr->op, instr->dest.dest.ssa.num_components, in constant_fold_alu_instr() 101 instr->dest.dest.ssa.num_components, in constant_fold_alu_instr() 102 instr->dest.dest.ssa.bit_size); in constant_fold_alu_instr() 108 nir_ssa_def_rewrite_uses(&instr->dest.dest.ssa, in constant_fold_alu_instr() 130 arr->indirect.ssa->parent_instr->type == nir_instr_type_load_const) { in constant_fold_deref() 132 nir_instr_as_load_const(arr->indirect.ssa->parent_instr); in constant_fold_deref()
|
D | nir_opt_copy_propagate.c | 90 def = instr->src[i].src.ssa; in is_swizzleless_move() 91 } else if (instr->src[i].src.ssa != def) { in is_swizzleless_move() 110 nir_instr *src_instr = src->ssa->parent_instr; in copy_prop_src() 125 if (phi->dest.ssa.num_components != in copy_prop_src() 126 alu_instr->src[0].src.ssa->num_components) in copy_prop_src() 132 nir_src_for_ssa(alu_instr->src[0].src.ssa)); in copy_prop_src() 136 nir_src_for_ssa(alu_instr->src[0].src.ssa)); in copy_prop_src() 153 nir_instr *src_instr = src->src.ssa->parent_instr; in copy_prop_alu_src() 168 def = alu_instr->src[0].src.ssa; in copy_prop_alu_src() 176 nir_ssa_def *new_def = alu_instr->src[src->swizzle[i]].src.ssa; in copy_prop_alu_src()
|
D | nir_opt_peephole_select.c | 128 if (!list_empty(&mov->dest.dest.ssa.if_uses)) in block_check_for_allowed_instrs() 132 nir_foreach_use(use, &mov->dest.dest.ssa) { in block_check_for_allowed_instrs() 222 phi->dest.ssa.num_components, in nir_opt_peephole_select_block() 223 phi->dest.ssa.bit_size, phi->dest.ssa.name); in nir_opt_peephole_select_block() 224 sel->dest.write_mask = (1 << phi->dest.ssa.num_components) - 1; in nir_opt_peephole_select_block() 226 nir_ssa_def_rewrite_uses(&phi->dest.ssa, in nir_opt_peephole_select_block() 227 nir_src_for_ssa(&sel->dest.dest.ssa)); in nir_opt_peephole_select_block()
|
D | nir_lower_vec_to_movs.c | 129 assert(vec->src[start_idx].src.ssa); in try_coalesce() 134 nir_foreach_use(src, vec->src[start_idx].src.ssa) { in try_coalesce() 143 if (!list_empty(&vec->src[start_idx].src.ssa->if_uses)) in try_coalesce() 146 if (vec->src[start_idx].src.ssa->parent_instr->type != nir_instr_type_alu) in try_coalesce() 150 nir_instr_as_alu(vec->src[start_idx].src.ssa->parent_instr); in try_coalesce() 186 vec->src[i].src.ssa != &src_alu->dest.dest.ssa) in try_coalesce() 236 reg->num_components = vec->dest.dest.ssa.num_components; in lower_vec_to_movs_block() 237 reg->bit_size = vec->dest.dest.ssa.bit_size; in lower_vec_to_movs_block() 239 nir_ssa_def_rewrite_uses(&vec->dest.dest.ssa, nir_src_for_reg(reg)); in lower_vec_to_movs_block()
|
D | nir_lower_to_source_mods.c | 49 if (alu->src[i].src.ssa->parent_instr->type != nir_instr_type_alu) in nir_lower_to_source_mods_block() 52 nir_alu_instr *parent = nir_instr_as_alu(alu->src[i].src.ssa->parent_instr); in nir_lower_to_source_mods_block() 91 if (list_empty(&parent->dest.dest.ssa.uses) && in nir_lower_to_source_mods_block() 92 list_empty(&parent->dest.dest.ssa.if_uses)) in nir_lower_to_source_mods_block() 135 if (!list_empty(&alu->dest.dest.ssa.if_uses)) in nir_lower_to_source_mods_block() 139 nir_foreach_use(child_src, &alu->dest.dest.ssa) { in nir_lower_to_source_mods_block() 165 nir_foreach_use(child_src, &alu->dest.dest.ssa) { in nir_lower_to_source_mods_block()
|
D | nir_builder.h | 201 instr->src[i].src.ssa->num_components); in nir_build_alu() 212 unsigned src_bit_size = instr->src[i].src.ssa->bit_size; in nir_build_alu() 229 for (unsigned j = instr->src[i].src.ssa->num_components; j < 4; j++) { in nir_build_alu() 230 instr->src[i].swizzle[j] = instr->src[i].src.ssa->num_components - 1; in nir_build_alu() 240 return &instr->dest.dest.ssa; in nir_build_alu() 277 return &mov->dest.dest.ssa; in nir_fmov_alu() 291 return &mov->dest.dest.ssa; in nir_imov_alu() 376 if (src.is_ssa && src.ssa->num_components == num_components) in nir_ssa_for_src() 377 return src.ssa; in nir_ssa_for_src() 398 if (src->src.is_ssa && (src->src.ssa->num_components == num_components) && in nir_ssa_for_alu_src() [all …]
|
D | nir_lower_tex.c | 116 nir_ssa_def *offset = tex->src[offset_index].src.ssa; in lower_offset() 117 nir_ssa_def *coord = tex->src[coord_index].src.ssa; in lower_offset() 178 return nir_i2f(b, &txs->dest.ssa); in get_texture_size() 230 return &plane_tex->dest.ssa; in sample_plane() 257 nir_ssa_def_rewrite_uses(&tex->dest.ssa, nir_src_for_ssa(result)); in convert_yuv_to_rgb() 347 tex->dest.ssa.num_components, 32, NULL); in replace_gradient_with_lod() 350 nir_ssa_def_rewrite_uses(&tex->dest.ssa, nir_src_for_ssa(&txl->dest.ssa)); in replace_gradient_with_lod() 423 tex->src[nir_tex_instr_src_index(tex, nir_tex_src_coord)].src.ssa; in lower_gradient_cube_map() 427 tex->src[nir_tex_instr_src_index(tex, nir_tex_src_ddx)].src.ssa; in lower_gradient_cube_map() 429 tex->src[nir_tex_instr_src_index(tex, nir_tex_src_ddy)].src.ssa; in lower_gradient_cube_map() [all …]
|
D | nir_loop_analyze.c | 169 if (!mark_invariant(alu->src[i].src.ssa, state)) { in mark_invariant() 232 nir_loop_variable *src_var = get_loop_var(src->src.ssa, state); in compute_induction_information() 251 if (alu->src[i].src.ssa->parent_instr->type == nir_instr_type_load_const && in compute_induction_information() 252 alu->src[1-i].src.ssa == &phi->dest.ssa) in compute_induction_information() 253 biv->invariant = get_loop_var(alu->src[i].src.ssa, state); in compute_induction_information() 338 if (nif->condition.ssa->parent_instr->type == nir_instr_type_phi) in find_loop_terminators() 351 terminator->conditional_instr = nif->condition.ssa->parent_instr; in find_loop_terminators() 488 if (cond_alu->src[0].src.ssa == alu_def->def || in calculate_iterations() 489 cond_alu->src[1].src.ssa == alu_def->def) { in calculate_iterations() 563 basic_ind = get_loop_var(alu->src[0].src.ssa, state); in find_trip_count() [all …]
|
D | nir_move_vec_src_uses_to_dest.c | 114 if (vec->src[j].src.ssa != vec->src[i].src.ssa) in move_vec_src_uses_to_dest_block() 124 nir_foreach_use_safe(use, vec->src[i].src.ssa) { in move_vec_src_uses_to_dest_block() 129 if (!ssa_def_dominates_instr(&vec->dest.dest.ssa, use->parent_instr)) in move_vec_src_uses_to_dest_block() 164 nir_src_for_ssa(&vec->dest.dest.ssa)); in move_vec_src_uses_to_dest_block()
|
D | nir_opt_undef.c | 50 nir_instr *parent = instr->src[i].src.ssa->parent_instr; in opt_undef_csel() 91 alu->src[i].src.ssa->parent_instr->type != nir_instr_type_ssa_undef) in opt_undef_vecN() 96 nir_ssa_def *undef = nir_ssa_undef(b, alu->dest.dest.ssa.num_components, in opt_undef_vecN() 98 nir_ssa_def_rewrite_uses(&alu->dest.dest.ssa, nir_src_for_ssa(undef)); in opt_undef_vecN() 122 intrin->src[0].ssa->parent_instr->type != nir_instr_type_ssa_undef) in opt_undef_store()
|
/external/mesa3d/src/mesa/drivers/dri/i965/ |
D | brw_nir_opt_peephole_ffma.c | 58 if (!are_all_uses_fadd(&use_alu->dest.dest.ssa)) in are_all_uses_fadd() 77 nir_instr *instr = src->src.ssa->parent_instr; in get_mul_for_src() 115 if (!are_all_uses_fadd(&alu->dest.dest.ssa)) in get_mul_for_src() 150 if (srcs[i].src.ssa->parent_instr->type == nir_instr_type_load_const) { in any_alu_src_is_a_constant() 152 nir_instr_as_load_const (srcs[i].src.ssa->parent_instr); in any_alu_src_is_a_constant() 188 if (add->src[0].src.ssa == add->src[1].src.ssa) in brw_nir_opt_peephole_ffma_block() 202 add->dest.dest.ssa.num_components, in brw_nir_opt_peephole_ffma_block() 212 unsigned bit_size = add->dest.dest.ssa.bit_size; in brw_nir_opt_peephole_ffma_block() 215 mul_src[0] = mul->src[0].src.ssa; in brw_nir_opt_peephole_ffma_block() 216 mul_src[1] = mul->src[1].src.ssa; in brw_nir_opt_peephole_ffma_block() [all …]
|
/external/mesa3d/src/gallium/drivers/freedreno/ir3/ |
D | ir3_nir_lower_if_else.c | 72 assert(list_empty(&dest->ssa.if_uses)); in valid_dest() 77 nir_foreach_use(use, &dest->ssa) { in valid_dest() 182 discard_cond = nir_iand(bld, condition, intr->src[0].ssa); in flatten_block() 258 if_stmt->condition.ssa, false); in lower_if_else_block() 260 if_stmt->condition.ssa, true); in lower_if_else_block() 282 phi->dest.ssa.num_components, 32, phi->dest.ssa.name); in lower_if_else_block() 283 sel->dest.write_mask = (1 << phi->dest.ssa.num_components) - 1; in lower_if_else_block() 285 nir_ssa_def_rewrite_uses(&phi->dest.ssa, in lower_if_else_block() 286 nir_src_for_ssa(&sel->dest.dest.ssa)); in lower_if_else_block()
|
D | ir3_cp.c | 50 struct ir3_instruction *src_instr = ssa(src); in is_eligible_mov() 243 struct ir3_instruction *srcsrc = ssa(src->regs[1]); in combine_flags() 315 struct ir3_instruction *src = ssa(reg); in reg_cp() 336 reg->instr = ssa(src_reg); in reg_cp() 339 src = ssa(reg); /* could be null for IR3_REG_ARRAY case */ in reg_cp() 478 struct ir3_instruction *src_instr = ssa(reg); in eliminate_output_mov() 503 struct ir3_instruction *src = ssa(reg); in instr_cp() 520 struct ir3_instruction *src = ssa(instr->regs[0]); in instr_cp() 541 ssa(instr->regs[1]) && in instr_cp() 544 struct ir3_instruction *cond = ssa(instr->regs[1]); in instr_cp()
|
/external/mesa3d/src/intel/vulkan/ |
D | anv_nir_apply_dynamic_offsets.c | 54 nir_instr *res_instr = intrin->src[block_idx_src].ssa->parent_instr; in apply_dynamic_offsets_block() 78 offset_load->src[0] = nir_src_for_ssa(nir_imul(b, res_intrin->src[0].ssa, in apply_dynamic_offsets_block() 86 nir_ssa_def *new_offset = nir_iadd(b, old_offset, &offset_load->dest.ssa); in apply_dynamic_offsets_block() 94 nir_ssa_def *pred = nir_uge(b, nir_channel(b, &offset_load->dest.ssa, 1), in apply_dynamic_offsets_block() 108 intrin->dest.ssa.bit_size, NULL); in apply_dynamic_offsets_block() 113 src1->src = nir_src_for_ssa(&intrin->dest.ssa); in apply_dynamic_offsets_block() 119 intrin->dest.ssa.bit_size, zero_val); in apply_dynamic_offsets_block() 128 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, in apply_dynamic_offsets_block() 129 nir_src_for_ssa(&phi->dest.ssa)); in apply_dynamic_offsets_block()
|