/third_party/mesa3d/src/compiler/nir/ |
D | nir_lower_subgroups.c | 33 lower_subgroups_64bit_split_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, in lower_subgroups_64bit_split_intrinsic() argument 38 comp = nir_unpack_64_2x32_split_x(b, intrin->src[0].ssa); in lower_subgroups_64bit_split_intrinsic() 40 comp = nir_unpack_64_2x32_split_y(b, intrin->src[0].ssa); in lower_subgroups_64bit_split_intrinsic() 42 nir_intrinsic_instr *intr = nir_intrinsic_instr_create(b->shader, intrin->intrinsic); in lower_subgroups_64bit_split_intrinsic() 44 intr->const_index[0] = intrin->const_index[0]; in lower_subgroups_64bit_split_intrinsic() 45 intr->const_index[1] = intrin->const_index[1]; in lower_subgroups_64bit_split_intrinsic() 47 if (nir_intrinsic_infos[intrin->intrinsic].num_srcs == 2) in lower_subgroups_64bit_split_intrinsic() 48 nir_src_copy(&intr->src[1], &intrin->src[1]); in lower_subgroups_64bit_split_intrinsic() 56 lower_subgroup_op_to_32bit(nir_builder *b, nir_intrinsic_instr *intrin) in lower_subgroup_op_to_32bit() argument 58 assert(intrin->src[0].ssa->bit_size == 64); in lower_subgroup_op_to_32bit() [all …]
|
D | nir_opt_uniform_atomics.c | 47 #define OP_NOIMG(intrin, alu) \ in parse_atomic_op() argument 48 case nir_intrinsic_ssbo_atomic_##intrin: \ in parse_atomic_op() 53 case nir_intrinsic_shared_atomic_##intrin: \ in parse_atomic_op() 54 case nir_intrinsic_global_atomic_##intrin: \ in parse_atomic_op() 55 case nir_intrinsic_deref_atomic_##intrin: \ in parse_atomic_op() 60 case nir_intrinsic_global_atomic_##intrin##_amd: \ in parse_atomic_op() 65 #define OP(intrin, alu) \ in parse_atomic_op() argument 66 OP_NOIMG(intrin, alu) \ in parse_atomic_op() 67 case nir_intrinsic_image_deref_atomic_##intrin: \ in parse_atomic_op() 68 case nir_intrinsic_image_atomic_##intrin: \ in parse_atomic_op() [all …]
|
D | nir_opt_offsets.c | 106 nir_intrinsic_instr *intrin, in try_fold_load_store() argument 116 unsigned off_const = nir_intrinsic_base(intrin); in try_fold_load_store() 117 nir_src *off_src = &intrin->src[offset_src_idx]; in try_fold_load_store() 130 b->cursor = nir_before_instr(&intrin->instr); in try_fold_load_store() 134 b->cursor = nir_before_instr(&intrin->instr); in try_fold_load_store() 141 … nir_instr_rewrite_src(&intrin->instr, &intrin->src[offset_src_idx], nir_src_for_ssa(replace_src)); in try_fold_load_store() 142 nir_intrinsic_set_base(intrin, off_const); in try_fold_load_store() 148 nir_intrinsic_instr *intrin, in try_fold_shared2() argument 152 unsigned comp_size = (intrin->intrinsic == nir_intrinsic_load_shared2_amd ? in try_fold_shared2() 153 intrin->dest.ssa.bit_size : intrin->src[0].ssa->bit_size) / 8; in try_fold_shared2() [all …]
|
D | nir_lower_scratch.c | 39 nir_intrinsic_instr *intrin, in lower_load_store() argument 42 b->cursor = nir_before_instr(&intrin->instr); in lower_load_store() 44 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in lower_load_store() 54 if (intrin->intrinsic == nir_intrinsic_load_deref) { in lower_load_store() 55 unsigned bit_size = intrin->dest.ssa.bit_size; in lower_load_store() 57 b, intrin->num_components, bit_size == 1 ? 32 : bit_size, offset, .align_mul=align); in lower_load_store() 61 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, value); in lower_load_store() 63 assert(intrin->intrinsic == nir_intrinsic_store_deref); in lower_load_store() 65 assert(intrin->src[1].is_ssa); in lower_load_store() 66 nir_ssa_def *value = intrin->src[1].ssa; in lower_load_store() [all …]
|
D | nir_lower_vec3_to_vec4.c | 63 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_vec3_to_vec4_impl() local 64 switch (intrin->intrinsic) { in lower_vec3_to_vec4_impl() 66 if (intrin->num_components != 3) in lower_vec3_to_vec4_impl() 69 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in lower_vec3_to_vec4_impl() 73 assert(intrin->dest.is_ssa); in lower_vec3_to_vec4_impl() 74 intrin->num_components = 4; in lower_vec3_to_vec4_impl() 75 intrin->dest.ssa.num_components = 4; in lower_vec3_to_vec4_impl() 77 b.cursor = nir_after_instr(&intrin->instr); in lower_vec3_to_vec4_impl() 78 nir_ssa_def *vec3 = nir_channels(&b, &intrin->dest.ssa, 0x7); in lower_vec3_to_vec4_impl() 79 nir_ssa_def_rewrite_uses_after(&intrin->dest.ssa, in lower_vec3_to_vec4_impl() [all …]
|
D | nir_lower_array_deref_of_vec.c | 76 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_lower_array_deref_of_vec_impl() local 77 assert(intrin->intrinsic != nir_intrinsic_copy_deref); in nir_lower_array_deref_of_vec_impl() 79 if (intrin->intrinsic != nir_intrinsic_load_deref && in nir_lower_array_deref_of_vec_impl() 80 intrin->intrinsic != nir_intrinsic_interp_deref_at_centroid && in nir_lower_array_deref_of_vec_impl() 81 intrin->intrinsic != nir_intrinsic_interp_deref_at_sample && in nir_lower_array_deref_of_vec_impl() 82 intrin->intrinsic != nir_intrinsic_interp_deref_at_offset && in nir_lower_array_deref_of_vec_impl() 83 intrin->intrinsic != nir_intrinsic_interp_deref_at_vertex && in nir_lower_array_deref_of_vec_impl() 84 intrin->intrinsic != nir_intrinsic_store_deref) in nir_lower_array_deref_of_vec_impl() 87 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in nir_lower_array_deref_of_vec_impl() 103 assert(intrin->num_components == 1); in nir_lower_array_deref_of_vec_impl() [all …]
|
D | nir_opt_constant_folding.c | 182 try_fold_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, in try_fold_intrinsic() argument 185 switch (intrin->intrinsic) { in try_fold_intrinsic() 189 if (nir_src_is_const(intrin->src[0])) { in try_fold_intrinsic() 190 if (nir_src_as_bool(intrin->src[0])) { in try_fold_intrinsic() 191 b->cursor = nir_before_instr(&intrin->instr); in try_fold_intrinsic() 193 switch (intrin->intrinsic) { in try_fold_intrinsic() 210 nir_instr_remove(&intrin->instr); in try_fold_intrinsic() 216 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in try_fold_intrinsic() 219 b->cursor = nir_before_instr(&intrin->instr); in try_fold_intrinsic() 220 nir_ssa_def *val = nir_build_imm(b, intrin->dest.ssa.num_components, in try_fold_intrinsic() [all …]
|
D | nir_opt_dead_write_vars.c | 50 nir_intrinsic_instr *intrin; member 75 nir_intrinsic_instr *intrin, in update_unused_writes() argument 92 nir_instr_remove(&entry->intrin->instr); in update_unused_writes() 101 .intrin = intrin, in update_unused_writes() 133 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in remove_dead_write_vars_local() local 134 switch (intrin->intrinsic) { in remove_dead_write_vars_local() 159 if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) { in remove_dead_write_vars_local() 161 nir_intrinsic_memory_modes(intrin)); in remove_dead_write_vars_local() 175 nir_deref_instr *src = nir_src_as_deref(intrin->src[1]); in remove_dead_write_vars_local() 183 nir_deref_instr *src = nir_src_as_deref(intrin->src[10]); in remove_dead_write_vars_local() [all …]
|
D | nir_lower_io.c | 392 lower_load(nir_intrinsic_instr *intrin, struct lower_io_state *state, in lower_load() argument 396 assert(intrin->dest.is_ssa); in lower_load() 397 if (intrin->dest.ssa.bit_size == 64 && in lower_load() 406 while (dest_comp < intrin->dest.ssa.num_components) { in lower_load() 408 MIN2(intrin->dest.ssa.num_components - dest_comp, in lower_load() 425 return nir_vec(b, comp64, intrin->dest.ssa.num_components); in lower_load() 426 } else if (intrin->dest.ssa.bit_size == 1) { in lower_load() 431 intrin->dest.ssa.num_components, 32, in lower_load() 435 intrin->dest.ssa.num_components, in lower_load() 436 intrin->dest.ssa.bit_size, in lower_load() [all …]
|
D | nir_opt_copy_prop_vars.c | 108 value_equals_store_src(struct value *value, nir_intrinsic_instr *intrin) in value_equals_store_src() argument 110 assert(intrin->intrinsic == nir_intrinsic_store_deref); in value_equals_store_src() 111 nir_component_mask_t write_mask = nir_intrinsic_write_mask(intrin); in value_equals_store_src() 113 for (unsigned i = 0; i < intrin->num_components; i++) { in value_equals_store_src() 115 (value->ssa.def[i] != intrin->src[1].ssa || in value_equals_store_src() 166 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in gather_vars_written() local 167 switch (intrin->intrinsic) { in gather_vars_written() 178 if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE) in gather_vars_written() 179 written->modes |= nir_intrinsic_memory_modes(intrin); in gather_vars_written() 192 nir_src_as_deref(*nir_get_shader_call_payload_src(intrin)); in gather_vars_written() [all …]
|
D | nir_lower_image.c | 35 lower_cube_size(nir_builder *b, nir_intrinsic_instr *intrin) in lower_cube_size() argument 37 assert(nir_intrinsic_image_dim(intrin) == GLSL_SAMPLER_DIM_CUBE); in lower_cube_size() 39 b->cursor = nir_before_instr(&intrin->instr); in lower_cube_size() 42 nir_instr_as_intrinsic(nir_instr_clone(b->shader, &intrin->instr)); in lower_cube_size() 49 unsigned coord_comps = intrin->dest.ssa.num_components; in lower_cube_size() 58 nir_ssa_def *vec = nir_vec_scalars(b, comps, intrin->dest.ssa.num_components); in lower_cube_size() 59 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, vec); in lower_cube_size() 60 nir_instr_remove(&intrin->instr); in lower_cube_size() 61 nir_instr_free(&intrin->instr); in lower_cube_size() 71 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_image_instr() local [all …]
|
D | nir_opt_ray_queries.c | 37 nir_intrinsic_instr *intrin) in mark_query_read() argument 39 nir_ssa_def *rq_def = intrin->src[0].ssa; in mark_query_read() 74 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_find_ray_queries_read() local 75 switch (intrin->intrinsic) { in nir_find_ray_queries_read() 77 if (list_length(&intrin->dest.ssa.uses) > 0 || in nir_find_ray_queries_read() 78 list_length(&intrin->dest.ssa.if_uses) > 0) in nir_find_ray_queries_read() 79 mark_query_read(queries, intrin); in nir_find_ray_queries_read() 82 mark_query_read(queries, intrin); in nir_find_ray_queries_read() 100 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_replace_unread_queries_instr() local 101 switch (intrin->intrinsic) { in nir_replace_unread_queries_instr() [all …]
|
D | nir_lower_readonly_images_to_tex.c | 77 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in is_readonly_image_op() local 78 if (intrin->intrinsic != nir_intrinsic_image_deref_load && in is_readonly_image_op() 79 intrin->intrinsic != nir_intrinsic_image_deref_size) in is_readonly_image_op() 82 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in is_readonly_image_op() 97 access = nir_intrinsic_access(intrin); in is_readonly_image_op() 108 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_readonly_image_op() local 111 switch (intrin->intrinsic) { in lower_readonly_image_op() 124 nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]); in lower_readonly_image_op() 146 switch (intrin->intrinsic) { in lower_readonly_image_op() 148 assert(intrin->src[1].is_ssa); in lower_readonly_image_op() [all …]
|
D | nir_lower_io_to_vector.c | 457 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in nir_lower_io_to_vector_impl() local 459 switch (intrin->intrinsic) { in nir_lower_io_to_vector_impl() 465 nir_deref_instr *old_deref = nir_src_as_deref(intrin->src[0]); in nir_lower_io_to_vector_impl() 487 ((1 << intrin->num_components) - 1) << old_frac; in nir_lower_io_to_vector_impl() 489 b.cursor = nir_before_instr(&intrin->instr); in nir_lower_io_to_vector_impl() 503 nir_instr_rewrite_src(&intrin->instr, &intrin->src[0], in nir_lower_io_to_vector_impl() 506 intrin->num_components = in nir_lower_io_to_vector_impl() 508 intrin->dest.ssa.num_components = intrin->num_components; in nir_lower_io_to_vector_impl() 510 b.cursor = nir_after_instr(&intrin->instr); in nir_lower_io_to_vector_impl() 512 nir_ssa_def *new_vec = nir_channels(&b, &intrin->dest.ssa, in nir_lower_io_to_vector_impl() [all …]
|
D | nir_lower_task_shader.c | 52 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_nv_task_output() local 54 switch (intrin->intrinsic) { in lower_nv_task_output() 59 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, load); in lower_nv_task_output() 66 nir_ssa_def *store_val = intrin->src[0].ssa; in lower_nv_task_output() 178 nir_intrinsic_instr *intrin, in lower_task_payload_to_shared() argument 184 unsigned base = nir_intrinsic_base(intrin); in lower_task_payload_to_shared() 185 intrin->intrinsic = shared_opcode_for_task_payload(intrin->intrinsic); in lower_task_payload_to_shared() 186 nir_intrinsic_set_base(intrin, base + s->payload_shared_addr); in lower_task_payload_to_shared() 231 nir_intrinsic_instr *intrin, in lower_task_launch_mesh_workgroups() argument 246 uint32_t payload_addr = nir_intrinsic_base(intrin); in lower_task_launch_mesh_workgroups() [all …]
|
/third_party/mesa3d/src/amd/common/ |
D | ac_nir_lower_tess_io_to_mem.c | 171 nir_intrinsic_instr *intrin, in match_mask() argument 175 bool indirect = !nir_src_is_const(*nir_get_io_offset_src(intrin)); in match_mask() 179 uint64_t slot = nir_intrinsic_io_semantics(intrin).location; in match_mask() 181 intrin->intrinsic != nir_intrinsic_load_per_vertex_input && in match_mask() 182 intrin->intrinsic != nir_intrinsic_store_per_vertex_output) in match_mask() 189 tcs_output_needs_vmem(nir_intrinsic_instr *intrin, in tcs_output_needs_vmem() argument 192 uint64_t mask = intrin->intrinsic == nir_intrinsic_store_per_vertex_output in tcs_output_needs_vmem() 196 return match_mask(MESA_SHADER_TESS_CTRL, intrin, mask, true); in tcs_output_needs_vmem() 200 tcs_output_needs_lds(nir_intrinsic_instr *intrin, in tcs_output_needs_lds() argument 203 uint64_t mask = intrin->intrinsic == nir_intrinsic_store_per_vertex_output in tcs_output_needs_lds() [all …]
|
D | ac_nir_lower_taskmesh_io_to_mem.c | 54 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in filter_workgroup_id() local 55 return intrin->intrinsic == nir_intrinsic_load_workgroup_id; in filter_workgroup_id() 64 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in replace_workgroup_id_use_first_task() local 68 if (s->hw_workgroup_id == &intrin->dest.ssa) in replace_workgroup_id_use_first_task() 255 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in filter_task_intrinsics() local 256 return intrin->intrinsic == nir_intrinsic_launch_mesh_workgroups || in filter_task_intrinsics() 257 intrin->intrinsic == nir_intrinsic_store_task_payload || in filter_task_intrinsics() 258 intrin->intrinsic == nir_intrinsic_load_task_payload; in filter_task_intrinsics() 263 nir_intrinsic_instr *intrin, in lower_task_launch_mesh_workgroups() argument 281 nir_ssa_def *dimensions = intrin->src[0].ssa; in lower_task_launch_mesh_workgroups() [all …]
|
D | ac_nir_lower_global_access.c | 76 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in process_instr() local 79 switch (intrin->intrinsic) { in process_instr() 134 nir_src *addr_src = &intrin->src[addr_src_idx]; in process_instr() 143 b->cursor = nir_before_instr(&intrin->instr); in process_instr() 152 new_intrin->num_components = intrin->num_components; in process_instr() 155 nir_ssa_dest_init(&new_intrin->instr, &new_intrin->dest, intrin->dest.ssa.num_components, in process_instr() 156 intrin->dest.ssa.bit_size, NULL); in process_instr() 158 unsigned num_src = nir_intrinsic_infos[intrin->intrinsic].num_srcs; in process_instr() 160 new_intrin->src[i] = nir_src_for_ssa(intrin->src[i].ssa); in process_instr() 164 if (nir_intrinsic_has_access(intrin)) in process_instr() [all …]
|
/third_party/mesa3d/src/intel/compiler/ |
D | brw_nir_lower_mem_access_bit_sizes.c | 30 dup_mem_intrinsic(nir_builder *b, nir_intrinsic_instr *intrin, in dup_mem_intrinsic() argument 35 const nir_intrinsic_info *info = &nir_intrinsic_infos[intrin->intrinsic]; in dup_mem_intrinsic() 38 nir_intrinsic_instr_create(b->shader, intrin->intrinsic); in dup_mem_intrinsic() 40 nir_src *intrin_offset_src = nir_get_io_offset_src(intrin); in dup_mem_intrinsic() 42 assert(intrin->src[i].is_ssa); in dup_mem_intrinsic() 45 assert(&intrin->src[i] != intrin_offset_src); in dup_mem_intrinsic() 47 } else if (&intrin->src[i] == intrin_offset_src) { in dup_mem_intrinsic() 48 dup->src[i] = nir_src_for_ssa(nir_iadd_imm(b, intrin->src[i].ssa, in dup_mem_intrinsic() 51 dup->src[i] = nir_src_for_ssa(intrin->src[i].ssa); in dup_mem_intrinsic() 56 if (intrin->intrinsic == nir_intrinsic_load_scratch || in dup_mem_intrinsic() [all …]
|
/third_party/mesa3d/src/intel/vulkan/ |
D | anv_nir_apply_pipeline_layout.c | 129 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in get_used_bindings() local 130 switch (intrin->intrinsic) { in get_used_bindings() 132 add_binding(state, nir_intrinsic_desc_set(intrin), in get_used_bindings() 133 nir_intrinsic_binding(intrin)); in get_used_bindings() 154 add_deref_src_binding(state, intrin->src[0]); in get_used_bindings() 183 nir_intrinsic_instr *intrin = nir_src_as_intrinsic(src); in find_descriptor_for_index_src() local 185 while (intrin && intrin->intrinsic == nir_intrinsic_vulkan_resource_reindex) in find_descriptor_for_index_src() 186 intrin = nir_src_as_intrinsic(intrin->src[0]); in find_descriptor_for_index_src() 188 if (!intrin || intrin->intrinsic != nir_intrinsic_vulkan_resource_index) in find_descriptor_for_index_src() 191 return intrin; in find_descriptor_for_index_src() [all …]
|
/third_party/mesa3d/src/amd/vulkan/ |
D | radv_nir_lower_abi.c | 64 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_abi_instr() local 67 switch (intrin->intrinsic) { in lower_abi_instr() 114 … return ac_nir_load_arg(b, &s->args->ac, s->args->ac.gs_vtx_offset[nir_intrinsic_base(intrin)]); in lower_abi_instr() 222 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in filter_abi_instr() local 223 return (intrin->intrinsic == nir_intrinsic_load_ring_tess_factors_amd && !s->use_llvm) || in filter_abi_instr() 224 (intrin->intrinsic == nir_intrinsic_load_ring_tess_offchip_amd && !s->use_llvm) || in filter_abi_instr() 225 (intrin->intrinsic == nir_intrinsic_load_ring_esgs_amd && !s->use_llvm) || in filter_abi_instr() 226 intrin->intrinsic == nir_intrinsic_load_ring_tess_factors_offset_amd || in filter_abi_instr() 227 intrin->intrinsic == nir_intrinsic_load_ring_tess_offchip_offset_amd || in filter_abi_instr() 228 intrin->intrinsic == nir_intrinsic_load_patch_vertices_in || in filter_abi_instr() [all …]
|
D | radv_nir_apply_pipeline_layout.c | 70 visit_vulkan_resource_index(nir_builder *b, apply_layout_state *state, nir_intrinsic_instr *intrin) in visit_vulkan_resource_index() argument 72 unsigned desc_set = nir_intrinsic_desc_set(intrin); in visit_vulkan_resource_index() 73 unsigned binding = nir_intrinsic_binding(intrin); in visit_vulkan_resource_index() 91 nir_ssa_def *binding_ptr = nir_imul_imm(b, intrin->src[0].ssa, stride); in visit_vulkan_resource_index() 99 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, nir_pack_64_2x32_split(b, set_ptr, binding_ptr)); in visit_vulkan_resource_index() 101 nir_ssa_def_rewrite_uses(&intrin->dest.ssa, in visit_vulkan_resource_index() 104 nir_instr_remove(&intrin->instr); in visit_vulkan_resource_index() 109 nir_intrinsic_instr *intrin) in visit_vulkan_resource_reindex() argument 111 VkDescriptorType desc_type = nir_intrinsic_desc_type(intrin); in visit_vulkan_resource_reindex() 113 nir_ssa_def *set_ptr = nir_unpack_64_2x32_split_x(b, intrin->src[0].ssa); in visit_vulkan_resource_reindex() [all …]
|
/third_party/mesa3d/src/compiler/spirv/ |
D | vtn_subgroup.c | 53 nir_intrinsic_instr *intrin = in vtn_build_subgroup_instr() local 55 nir_ssa_dest_init_for_type(&intrin->instr, &intrin->dest, in vtn_build_subgroup_instr() 57 intrin->num_components = intrin->dest.ssa.num_components; in vtn_build_subgroup_instr() 59 intrin->src[0] = nir_src_for_ssa(src0->def); in vtn_build_subgroup_instr() 61 intrin->src[1] = nir_src_for_ssa(index); in vtn_build_subgroup_instr() 63 intrin->const_index[0] = const_idx0; in vtn_build_subgroup_instr() 64 intrin->const_index[1] = const_idx1; in vtn_build_subgroup_instr() 66 nir_builder_instr_insert(&b->nb, &intrin->instr); in vtn_build_subgroup_instr() 68 dst->def = &intrin->dest.ssa; in vtn_build_subgroup_instr() 112 nir_intrinsic_instr *intrin = in vtn_handle_subgroup() local [all …]
|
D | vtn_amd.c | 82 nir_intrinsic_instr *intrin = nir_intrinsic_instr_create(b->nb.shader, op); in vtn_handle_amd_shader_ballot_instruction() local 83 nir_ssa_dest_init_for_type(&intrin->instr, &intrin->dest, dest_type, NULL); in vtn_handle_amd_shader_ballot_instruction() 85 intrin->num_components = intrin->dest.ssa.num_components; in vtn_handle_amd_shader_ballot_instruction() 88 intrin->src[i] = nir_src_for_ssa(vtn_get_nir_ssa(b, w[i + 5])); in vtn_handle_amd_shader_ballot_instruction() 90 if (intrin->intrinsic == nir_intrinsic_quad_swizzle_amd) { in vtn_handle_amd_shader_ballot_instruction() 96 nir_intrinsic_set_swizzle_mask(intrin, mask); in vtn_handle_amd_shader_ballot_instruction() 98 } else if (intrin->intrinsic == nir_intrinsic_masked_swizzle_amd) { in vtn_handle_amd_shader_ballot_instruction() 103 nir_intrinsic_set_swizzle_mask(intrin, mask); in vtn_handle_amd_shader_ballot_instruction() 104 } else if (intrin->intrinsic == nir_intrinsic_mbcnt_amd) { in vtn_handle_amd_shader_ballot_instruction() 108 intrin->src[1] = nir_src_for_ssa(nir_imm_int(&b->nb, 0)); in vtn_handle_amd_shader_ballot_instruction() [all …]
|
/third_party/mesa3d/src/gallium/frontends/lavapipe/ |
D | lvp_lower_vulkan_resource.c | 33 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_vulkan_resource_index() local 34 switch (intrin->intrinsic) { in lower_vulkan_resource_index() 72 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_uniform_block_access() local 73 if (intrin->intrinsic != nir_intrinsic_load_deref) in lower_uniform_block_access() 75 nir_deref_instr *deref = nir_instr_as_deref(intrin->src[0].ssa->parent_instr); in lower_uniform_block_access() 82 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr); in lower_block_instr() local 83 nir_binding nb = nir_chase_binding(intrin->src[0]); in lower_block_instr() 92 assert(intrin->src[0].ssa->num_components == 2); in lower_block_instr() 102 nir_ssa_def *added = nir_iadd(b, intrin->src[0].ssa, offset); in lower_block_instr() 103 nir_deref_instr *deref = nir_instr_as_deref(intrin->src[0].ssa->parent_instr); in lower_block_instr() [all …]
|