Lines Matching refs:ureg
241 static void set_bad( struct ureg_program *ureg ) in set_bad() argument
243 tokens_error(&ureg->domain[0]); in set_bad()
248 static union tgsi_any_token *get_tokens( struct ureg_program *ureg, in get_tokens() argument
252 struct ureg_tokens *tokens = &ureg->domain[domain]; in get_tokens()
264 static union tgsi_any_token *retrieve_token( struct ureg_program *ureg, in retrieve_token() argument
268 if (ureg->domain[domain].tokens == error_tokens) in retrieve_token()
271 return &ureg->domain[domain].tokens[nr]; in retrieve_token()
276 ureg_property(struct ureg_program *ureg, unsigned name, unsigned value) in ureg_property() argument
278 assert(name < ARRAY_SIZE(ureg->properties)); in ureg_property()
279 ureg->properties[name] = value; in ureg_property()
283 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program *ureg, in ureg_DECL_fs_input_cyl_centroid_layout() argument
299 for (i = 0; i < ureg->nr_inputs; i++) { in ureg_DECL_fs_input_cyl_centroid_layout()
300 if (ureg->input[i].semantic_name == semantic_name && in ureg_DECL_fs_input_cyl_centroid_layout()
301 ureg->input[i].semantic_index == semantic_index) { in ureg_DECL_fs_input_cyl_centroid_layout()
302 assert(ureg->input[i].interp == interp_mode); in ureg_DECL_fs_input_cyl_centroid_layout()
303 assert(ureg->input[i].cylindrical_wrap == cylindrical_wrap); in ureg_DECL_fs_input_cyl_centroid_layout()
304 assert(ureg->input[i].interp_location == interp_location); in ureg_DECL_fs_input_cyl_centroid_layout()
305 if (ureg->input[i].array_id == array_id) { in ureg_DECL_fs_input_cyl_centroid_layout()
306 ureg->input[i].usage_mask |= usage_mask; in ureg_DECL_fs_input_cyl_centroid_layout()
309 assert((ureg->input[i].usage_mask & usage_mask) == 0); in ureg_DECL_fs_input_cyl_centroid_layout()
313 if (ureg->nr_inputs < UREG_MAX_INPUT) { in ureg_DECL_fs_input_cyl_centroid_layout()
315 ureg->input[i].semantic_name = semantic_name; in ureg_DECL_fs_input_cyl_centroid_layout()
316 ureg->input[i].semantic_index = semantic_index; in ureg_DECL_fs_input_cyl_centroid_layout()
317 ureg->input[i].interp = interp_mode; in ureg_DECL_fs_input_cyl_centroid_layout()
318 ureg->input[i].cylindrical_wrap = cylindrical_wrap; in ureg_DECL_fs_input_cyl_centroid_layout()
319 ureg->input[i].interp_location = interp_location; in ureg_DECL_fs_input_cyl_centroid_layout()
320 ureg->input[i].first = index; in ureg_DECL_fs_input_cyl_centroid_layout()
321 ureg->input[i].last = index + array_size - 1; in ureg_DECL_fs_input_cyl_centroid_layout()
322 ureg->input[i].array_id = array_id; in ureg_DECL_fs_input_cyl_centroid_layout()
323 ureg->input[i].usage_mask = usage_mask; in ureg_DECL_fs_input_cyl_centroid_layout()
324 ureg->nr_input_regs = MAX2(ureg->nr_input_regs, index + array_size); in ureg_DECL_fs_input_cyl_centroid_layout()
325 ureg->nr_inputs++; in ureg_DECL_fs_input_cyl_centroid_layout()
327 set_bad(ureg); in ureg_DECL_fs_input_cyl_centroid_layout()
331 return ureg_src_array_register(TGSI_FILE_INPUT, ureg->input[i].first, in ureg_DECL_fs_input_cyl_centroid_layout()
336 ureg_DECL_fs_input_cyl_centroid(struct ureg_program *ureg, in ureg_DECL_fs_input_cyl_centroid() argument
345 return ureg_DECL_fs_input_cyl_centroid_layout(ureg, in ureg_DECL_fs_input_cyl_centroid()
347 ureg->nr_input_regs, TGSI_WRITEMASK_XYZW, array_id, array_size); in ureg_DECL_fs_input_cyl_centroid()
352 ureg_DECL_vs_input( struct ureg_program *ureg, in ureg_DECL_vs_input() argument
355 assert(ureg->processor == PIPE_SHADER_VERTEX); in ureg_DECL_vs_input()
356 assert(index / 32 < ARRAY_SIZE(ureg->vs_inputs)); in ureg_DECL_vs_input()
358 ureg->vs_inputs[index/32] |= 1 << (index % 32); in ureg_DECL_vs_input()
364 ureg_DECL_input_layout(struct ureg_program *ureg, in ureg_DECL_input_layout() argument
372 return ureg_DECL_fs_input_cyl_centroid_layout(ureg, in ureg_DECL_input_layout()
379 ureg_DECL_input(struct ureg_program *ureg, in ureg_DECL_input() argument
385 return ureg_DECL_fs_input_cyl_centroid(ureg, semantic_name, semantic_index, in ureg_DECL_input()
391 ureg_DECL_system_value(struct ureg_program *ureg, in ureg_DECL_system_value() argument
397 for (i = 0; i < ureg->nr_system_values; i++) { in ureg_DECL_system_value()
398 if (ureg->system_value[i].semantic_name == semantic_name && in ureg_DECL_system_value()
399 ureg->system_value[i].semantic_index == semantic_index) { in ureg_DECL_system_value()
404 if (ureg->nr_system_values < UREG_MAX_SYSTEM_VALUE) { in ureg_DECL_system_value()
405 ureg->system_value[ureg->nr_system_values].semantic_name = semantic_name; in ureg_DECL_system_value()
406 ureg->system_value[ureg->nr_system_values].semantic_index = semantic_index; in ureg_DECL_system_value()
407 i = ureg->nr_system_values; in ureg_DECL_system_value()
408 ureg->nr_system_values++; in ureg_DECL_system_value()
410 set_bad(ureg); in ureg_DECL_system_value()
419 ureg_DECL_output_layout(struct ureg_program *ureg, in ureg_DECL_output_layout() argument
436 for (i = 0; i < ureg->nr_outputs; i++) { in ureg_DECL_output_layout()
437 if (ureg->output[i].semantic_name == semantic_name && in ureg_DECL_output_layout()
438 ureg->output[i].semantic_index == semantic_index) { in ureg_DECL_output_layout()
439 if (ureg->output[i].array_id == array_id) { in ureg_DECL_output_layout()
440 ureg->output[i].usage_mask |= usage_mask; in ureg_DECL_output_layout()
443 assert((ureg->output[i].usage_mask & usage_mask) == 0); in ureg_DECL_output_layout()
447 if (ureg->nr_outputs < UREG_MAX_OUTPUT) { in ureg_DECL_output_layout()
448 ureg->output[i].semantic_name = semantic_name; in ureg_DECL_output_layout()
449 ureg->output[i].semantic_index = semantic_index; in ureg_DECL_output_layout()
450 ureg->output[i].usage_mask = usage_mask; in ureg_DECL_output_layout()
451 ureg->output[i].first = index; in ureg_DECL_output_layout()
452 ureg->output[i].last = index + array_size - 1; in ureg_DECL_output_layout()
453 ureg->output[i].array_id = array_id; in ureg_DECL_output_layout()
454 ureg->nr_output_regs = MAX2(ureg->nr_output_regs, index + array_size); in ureg_DECL_output_layout()
455 ureg->nr_outputs++; in ureg_DECL_output_layout()
458 set_bad( ureg ); in ureg_DECL_output_layout()
463 ureg->output[i].streams |= streams; in ureg_DECL_output_layout()
465 return ureg_dst_array_register(TGSI_FILE_OUTPUT, ureg->output[i].first, in ureg_DECL_output_layout()
471 ureg_DECL_output_masked(struct ureg_program *ureg, in ureg_DECL_output_masked() argument
478 return ureg_DECL_output_layout(ureg, name, index, 0, in ureg_DECL_output_masked()
479 ureg->nr_output_regs, usage_mask, array_id, array_size); in ureg_DECL_output_masked()
484 ureg_DECL_output(struct ureg_program *ureg, in ureg_DECL_output() argument
488 return ureg_DECL_output_masked(ureg, name, index, TGSI_WRITEMASK_XYZW, in ureg_DECL_output()
493 ureg_DECL_output_array(struct ureg_program *ureg, in ureg_DECL_output_array() argument
499 return ureg_DECL_output_masked(ureg, semantic_name, semantic_index, in ureg_DECL_output_array()
516 ureg_DECL_constant2D(struct ureg_program *ureg, in ureg_DECL_constant2D() argument
521 struct const_decl *decl = &ureg->const_decls[index2D]; in ureg_DECL_constant2D()
540 ureg_DECL_constant(struct ureg_program *ureg, in ureg_DECL_constant() argument
543 struct const_decl *decl = &ureg->const_decls[0]; in ureg_DECL_constant()
603 ureg_DECL_hw_atomic(struct ureg_program *ureg, in ureg_DECL_hw_atomic() argument
609 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[buffer_id]; in ureg_DECL_hw_atomic()
618 set_bad(ureg); in ureg_DECL_hw_atomic()
622 static struct ureg_dst alloc_temporary( struct ureg_program *ureg, in alloc_temporary() argument
629 for (i = util_bitmask_get_first_index(ureg->free_temps); in alloc_temporary()
631 i = util_bitmask_get_next_index(ureg->free_temps, i + 1)) { in alloc_temporary()
632 if (util_bitmask_get(ureg->local_temps, i) == local) in alloc_temporary()
639 i = ureg->nr_temps++; in alloc_temporary()
642 util_bitmask_set(ureg->local_temps, i); in alloc_temporary()
645 if (!i || util_bitmask_get(ureg->local_temps, i - 1) != local) in alloc_temporary()
646 util_bitmask_set(ureg->decl_temps, i); in alloc_temporary()
649 util_bitmask_clear(ureg->free_temps, i); in alloc_temporary()
654 struct ureg_dst ureg_DECL_temporary( struct ureg_program *ureg ) in ureg_DECL_temporary() argument
656 return alloc_temporary(ureg, FALSE); in ureg_DECL_temporary()
659 struct ureg_dst ureg_DECL_local_temporary( struct ureg_program *ureg ) in ureg_DECL_local_temporary() argument
661 return alloc_temporary(ureg, TRUE); in ureg_DECL_local_temporary()
664 struct ureg_dst ureg_DECL_array_temporary( struct ureg_program *ureg, in ureg_DECL_array_temporary() argument
668 unsigned i = ureg->nr_temps; in ureg_DECL_array_temporary()
672 util_bitmask_set(ureg->local_temps, i); in ureg_DECL_array_temporary()
675 util_bitmask_set(ureg->decl_temps, i); in ureg_DECL_array_temporary()
677 ureg->nr_temps += size; in ureg_DECL_array_temporary()
680 util_bitmask_set(ureg->decl_temps, ureg->nr_temps); in ureg_DECL_array_temporary()
682 if (ureg->nr_array_temps < UREG_MAX_ARRAY_TEMPS) { in ureg_DECL_array_temporary()
683 ureg->array_temps[ureg->nr_array_temps++] = i; in ureg_DECL_array_temporary()
684 dst.ArrayID = ureg->nr_array_temps; in ureg_DECL_array_temporary()
690 void ureg_release_temporary( struct ureg_program *ureg, in ureg_release_temporary() argument
694 util_bitmask_set(ureg->free_temps, tmp.Index); in ureg_release_temporary()
700 struct ureg_dst ureg_DECL_address( struct ureg_program *ureg ) in ureg_DECL_address() argument
702 if (ureg->nr_addrs < UREG_MAX_ADDR) in ureg_DECL_address()
703 return ureg_dst_register( TGSI_FILE_ADDRESS, ureg->nr_addrs++ ); in ureg_DECL_address()
711 struct ureg_src ureg_DECL_sampler( struct ureg_program *ureg, in ureg_DECL_sampler() argument
716 for (i = 0; i < ureg->nr_samplers; i++) in ureg_DECL_sampler()
717 if (ureg->sampler[i].Index == nr) in ureg_DECL_sampler()
718 return ureg->sampler[i]; in ureg_DECL_sampler()
721 ureg->sampler[i] = ureg_src_register( TGSI_FILE_SAMPLER, nr ); in ureg_DECL_sampler()
722 ureg->nr_samplers++; in ureg_DECL_sampler()
723 return ureg->sampler[i]; in ureg_DECL_sampler()
727 return ureg->sampler[0]; in ureg_DECL_sampler()
734 ureg_DECL_sampler_view(struct ureg_program *ureg, in ureg_DECL_sampler_view() argument
745 for (i = 0; i < ureg->nr_sampler_views; i++) { in ureg_DECL_sampler_view()
746 if (ureg->sampler_view[i].index == index) { in ureg_DECL_sampler_view()
752 ureg->sampler_view[i].index = index; in ureg_DECL_sampler_view()
753 ureg->sampler_view[i].target = target; in ureg_DECL_sampler_view()
754 ureg->sampler_view[i].return_type_x = return_type_x; in ureg_DECL_sampler_view()
755 ureg->sampler_view[i].return_type_y = return_type_y; in ureg_DECL_sampler_view()
756 ureg->sampler_view[i].return_type_z = return_type_z; in ureg_DECL_sampler_view()
757 ureg->sampler_view[i].return_type_w = return_type_w; in ureg_DECL_sampler_view()
758 ureg->nr_sampler_views++; in ureg_DECL_sampler_view()
769 ureg_DECL_image(struct ureg_program *ureg, in ureg_DECL_image() argument
779 for (i = 0; i < ureg->nr_images; i++) in ureg_DECL_image()
780 if (ureg->image[i].index == index) in ureg_DECL_image()
784 ureg->image[i].index = index; in ureg_DECL_image()
785 ureg->image[i].target = target; in ureg_DECL_image()
786 ureg->image[i].wr = wr; in ureg_DECL_image()
787 ureg->image[i].raw = raw; in ureg_DECL_image()
788 ureg->image[i].format = format; in ureg_DECL_image()
789 ureg->nr_images++; in ureg_DECL_image()
799 struct ureg_src ureg_DECL_buffer(struct ureg_program *ureg, unsigned nr, in ureg_DECL_buffer() argument
805 for (i = 0; i < ureg->nr_buffers; i++) in ureg_DECL_buffer()
806 if (ureg->buffer[i].index == nr) in ureg_DECL_buffer()
810 ureg->buffer[i].index = nr; in ureg_DECL_buffer()
811 ureg->buffer[i].atomic = atomic; in ureg_DECL_buffer()
812 ureg->nr_buffers++; in ureg_DECL_buffer()
822 struct ureg_src ureg_DECL_memory(struct ureg_program *ureg, in ureg_DECL_memory() argument
827 ureg->use_memory[memory_type] = true; in ureg_DECL_memory()
918 decl_immediate( struct ureg_program *ureg, in decl_immediate() argument
930 for (i = 0; i < ureg->nr_immediates; i++) { in decl_immediate()
931 if (ureg->immediate[i].type != type) { in decl_immediate()
937 ureg->immediate[i].value.u, in decl_immediate()
938 &ureg->immediate[i].nr, in decl_immediate()
944 if (ureg->nr_immediates < UREG_MAX_IMMEDIATE) { in decl_immediate()
945 i = ureg->nr_immediates++; in decl_immediate()
946 ureg->immediate[i].type = type; in decl_immediate()
950 ureg->immediate[i].value.u, in decl_immediate()
951 &ureg->immediate[i].nr, in decl_immediate()
957 set_bad(ureg); in decl_immediate()
983 ureg_DECL_immediate( struct ureg_program *ureg, in ureg_DECL_immediate() argument
997 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT32); in ureg_DECL_immediate()
1001 ureg_DECL_immediate_f64( struct ureg_program *ureg, in ureg_DECL_immediate_f64() argument
1016 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT64); in ureg_DECL_immediate_f64()
1020 ureg_DECL_immediate_uint( struct ureg_program *ureg, in ureg_DECL_immediate_uint() argument
1024 return decl_immediate(ureg, v, nr, TGSI_IMM_UINT32); in ureg_DECL_immediate_uint()
1029 ureg_DECL_immediate_block_uint( struct ureg_program *ureg, in ureg_DECL_immediate_block_uint() argument
1036 if (ureg->nr_immediates + (nr + 3) / 4 > UREG_MAX_IMMEDIATE) { in ureg_DECL_immediate_block_uint()
1037 set_bad(ureg); in ureg_DECL_immediate_block_uint()
1041 index = ureg->nr_immediates; in ureg_DECL_immediate_block_uint()
1042 ureg->nr_immediates += (nr + 3) / 4; in ureg_DECL_immediate_block_uint()
1044 for (i = index; i < ureg->nr_immediates; i++) { in ureg_DECL_immediate_block_uint()
1045 ureg->immediate[i].type = TGSI_IMM_UINT32; in ureg_DECL_immediate_block_uint()
1046 ureg->immediate[i].nr = nr > 4 ? 4 : nr; in ureg_DECL_immediate_block_uint()
1047 memcpy(ureg->immediate[i].value.u, in ureg_DECL_immediate_block_uint()
1049 ureg->immediate[i].nr * sizeof(uint)); in ureg_DECL_immediate_block_uint()
1058 ureg_DECL_immediate_int( struct ureg_program *ureg, in ureg_DECL_immediate_int() argument
1062 return decl_immediate(ureg, (const unsigned *)v, nr, TGSI_IMM_INT32); in ureg_DECL_immediate_int()
1066 ureg_DECL_immediate_uint64( struct ureg_program *ureg, in ureg_DECL_immediate_uint64() argument
1081 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_UINT64); in ureg_DECL_immediate_uint64()
1085 ureg_DECL_immediate_int64( struct ureg_program *ureg, in ureg_DECL_immediate_int64() argument
1100 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_INT64); in ureg_DECL_immediate_int64()
1104 ureg_emit_src( struct ureg_program *ureg, in ureg_emit_src() argument
1110 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size ); in ureg_emit_src()
1133 if (!ureg->supports_any_inout_decl_range && in ureg_emit_src()
1153 if (!ureg->supports_any_inout_decl_range && in ureg_emit_src()
1170 ureg_emit_dst( struct ureg_program *ureg, in ureg_emit_dst() argument
1176 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size ); in ureg_emit_dst()
1197 if (!ureg->supports_any_inout_decl_range && in ureg_emit_dst()
1217 if (!ureg->supports_any_inout_decl_range && in ureg_emit_dst()
1248 ureg_emit_insn(struct ureg_program *ureg, in ureg_emit_insn() argument
1261 out = get_tokens( ureg, DOMAIN_INSN, count ); in ureg_emit_insn()
1269 result.insn_token = ureg->domain[DOMAIN_INSN].count - count; in ureg_emit_insn()
1272 ureg->nr_instructions++; in ureg_emit_insn()
1285 ureg_emit_label(struct ureg_program *ureg, in ureg_emit_label() argument
1294 out = get_tokens( ureg, DOMAIN_INSN, 1 ); in ureg_emit_label()
1297 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token ); in ureg_emit_label()
1300 *label_token = ureg->domain[DOMAIN_INSN].count - 1; in ureg_emit_label()
1307 ureg_get_instruction_number( struct ureg_program *ureg ) in ureg_get_instruction_number() argument
1309 return ureg->nr_instructions; in ureg_get_instruction_number()
1316 ureg_fixup_label(struct ureg_program *ureg, in ureg_fixup_label() argument
1320 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, label_token ); in ureg_fixup_label()
1327 ureg_emit_texture(struct ureg_program *ureg, in ureg_emit_texture() argument
1333 out = get_tokens( ureg, DOMAIN_INSN, 1 ); in ureg_emit_texture()
1334 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token ); in ureg_emit_texture()
1345 ureg_emit_texture_offset(struct ureg_program *ureg, in ureg_emit_texture_offset() argument
1350 out = get_tokens( ureg, DOMAIN_INSN, 1); in ureg_emit_texture_offset()
1358 ureg_emit_memory(struct ureg_program *ureg, in ureg_emit_memory() argument
1366 out = get_tokens( ureg, DOMAIN_INSN, 1 ); in ureg_emit_memory()
1367 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token ); in ureg_emit_memory()
1378 ureg_fixup_insn_size(struct ureg_program *ureg, in ureg_fixup_insn_size() argument
1381 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, insn ); in ureg_fixup_insn_size()
1384 out->insn.NrTokens = ureg->domain[DOMAIN_INSN].count - insn - 1; in ureg_fixup_insn_size()
1389 ureg_insn(struct ureg_program *ureg, in ureg_insn() argument
1407 insn = ureg_emit_insn(ureg, in ureg_insn()
1415 ureg_emit_dst( ureg, dst[i] ); in ureg_insn()
1418 ureg_emit_src( ureg, src[i] ); in ureg_insn()
1420 ureg_fixup_insn_size( ureg, insn.insn_token ); in ureg_insn()
1424 ureg_tex_insn(struct ureg_program *ureg, in ureg_tex_insn() argument
1445 insn = ureg_emit_insn(ureg, in ureg_tex_insn()
1452 ureg_emit_texture( ureg, insn.extended_token, target, return_type, in ureg_tex_insn()
1456 ureg_emit_texture_offset( ureg, &texoffsets[i]); in ureg_tex_insn()
1459 ureg_emit_dst( ureg, dst[i] ); in ureg_tex_insn()
1462 ureg_emit_src( ureg, src[i] ); in ureg_tex_insn()
1464 ureg_fixup_insn_size( ureg, insn.insn_token ); in ureg_tex_insn()
1469 ureg_memory_insn(struct ureg_program *ureg, in ureg_memory_insn() argument
1482 insn = ureg_emit_insn(ureg, in ureg_memory_insn()
1489 ureg_emit_memory(ureg, insn.extended_token, qualifier, texture, format); in ureg_memory_insn()
1492 ureg_emit_dst(ureg, dst[i]); in ureg_memory_insn()
1495 ureg_emit_src(ureg, src[i]); in ureg_memory_insn()
1497 ureg_fixup_insn_size(ureg, insn.insn_token); in ureg_memory_insn()
1502 emit_decl_semantic(struct ureg_program *ureg, in emit_decl_semantic() argument
1512 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3); in emit_decl_semantic()
1541 emit_decl_atomic_2d(struct ureg_program *ureg, in emit_decl_atomic_2d() argument
1547 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3); in emit_decl_atomic_2d()
1571 emit_decl_fs(struct ureg_program *ureg, in emit_decl_fs() argument
1583 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, in emit_decl_fs()
1615 emit_decl_temps( struct ureg_program *ureg, in emit_decl_temps() argument
1620 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, in emit_decl_temps()
1641 static void emit_decl_range( struct ureg_program *ureg, in emit_decl_range() argument
1646 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 ); in emit_decl_range()
1661 emit_decl_range2D(struct ureg_program *ureg, in emit_decl_range2D() argument
1667 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3); in emit_decl_range2D()
1685 emit_decl_sampler_view(struct ureg_program *ureg, in emit_decl_sampler_view() argument
1693 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3); in emit_decl_sampler_view()
1714 emit_decl_image(struct ureg_program *ureg, in emit_decl_image() argument
1721 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3); in emit_decl_image()
1741 emit_decl_buffer(struct ureg_program *ureg, in emit_decl_buffer() argument
1745 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2); in emit_decl_buffer()
1760 emit_decl_memory(struct ureg_program *ureg, unsigned memory_type) in emit_decl_memory() argument
1762 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2); in emit_decl_memory()
1777 emit_immediate( struct ureg_program *ureg, in emit_immediate() argument
1781 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 5 ); in emit_immediate()
1796 emit_property(struct ureg_program *ureg, in emit_property() argument
1800 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2); in emit_property()
1811 static void emit_decls( struct ureg_program *ureg ) in emit_decls() argument
1815 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++) in emit_decls()
1816 if (ureg->properties[i] != ~0) in emit_decls()
1817 emit_property(ureg, i, ureg->properties[i]); in emit_decls()
1819 if (ureg->processor == PIPE_SHADER_VERTEX) { in emit_decls()
1821 if (ureg->vs_inputs[i/32] & (1u << (i%32))) { in emit_decls()
1822 emit_decl_range( ureg, TGSI_FILE_INPUT, i, 1 ); in emit_decls()
1825 } else if (ureg->processor == PIPE_SHADER_FRAGMENT) { in emit_decls()
1826 if (ureg->supports_any_inout_decl_range) { in emit_decls()
1827 for (i = 0; i < ureg->nr_inputs; i++) { in emit_decls()
1828 emit_decl_fs(ureg, in emit_decls()
1830 ureg->input[i].first, in emit_decls()
1831 ureg->input[i].last, in emit_decls()
1832 ureg->input[i].semantic_name, in emit_decls()
1833 ureg->input[i].semantic_index, in emit_decls()
1834 ureg->input[i].interp, in emit_decls()
1835 ureg->input[i].cylindrical_wrap, in emit_decls()
1836 ureg->input[i].interp_location, in emit_decls()
1837 ureg->input[i].array_id, in emit_decls()
1838 ureg->input[i].usage_mask); in emit_decls()
1842 for (i = 0; i < ureg->nr_inputs; i++) { in emit_decls()
1843 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) { in emit_decls()
1844 emit_decl_fs(ureg, in emit_decls()
1847 ureg->input[i].semantic_name, in emit_decls()
1848 ureg->input[i].semantic_index + in emit_decls()
1849 (j - ureg->input[i].first), in emit_decls()
1850 ureg->input[i].interp, in emit_decls()
1851 ureg->input[i].cylindrical_wrap, in emit_decls()
1852 ureg->input[i].interp_location, 0, in emit_decls()
1853 ureg->input[i].usage_mask); in emit_decls()
1858 if (ureg->supports_any_inout_decl_range) { in emit_decls()
1859 for (i = 0; i < ureg->nr_inputs; i++) { in emit_decls()
1860 emit_decl_semantic(ureg, in emit_decls()
1862 ureg->input[i].first, in emit_decls()
1863 ureg->input[i].last, in emit_decls()
1864 ureg->input[i].semantic_name, in emit_decls()
1865 ureg->input[i].semantic_index, in emit_decls()
1868 ureg->input[i].array_id); in emit_decls()
1872 for (i = 0; i < ureg->nr_inputs; i++) { in emit_decls()
1873 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) { in emit_decls()
1874 emit_decl_semantic(ureg, in emit_decls()
1877 ureg->input[i].semantic_name, in emit_decls()
1878 ureg->input[i].semantic_index + in emit_decls()
1879 (j - ureg->input[i].first), in emit_decls()
1887 for (i = 0; i < ureg->nr_system_values; i++) { in emit_decls()
1888 emit_decl_semantic(ureg, in emit_decls()
1892 ureg->system_value[i].semantic_name, in emit_decls()
1893 ureg->system_value[i].semantic_index, in emit_decls()
1898 if (ureg->supports_any_inout_decl_range) { in emit_decls()
1899 for (i = 0; i < ureg->nr_outputs; i++) { in emit_decls()
1900 emit_decl_semantic(ureg, in emit_decls()
1902 ureg->output[i].first, in emit_decls()
1903 ureg->output[i].last, in emit_decls()
1904 ureg->output[i].semantic_name, in emit_decls()
1905 ureg->output[i].semantic_index, in emit_decls()
1906 ureg->output[i].streams, in emit_decls()
1907 ureg->output[i].usage_mask, in emit_decls()
1908 ureg->output[i].array_id); in emit_decls()
1912 for (i = 0; i < ureg->nr_outputs; i++) { in emit_decls()
1913 for (j = ureg->output[i].first; j <= ureg->output[i].last; j++) { in emit_decls()
1914 emit_decl_semantic(ureg, in emit_decls()
1917 ureg->output[i].semantic_name, in emit_decls()
1918 ureg->output[i].semantic_index + in emit_decls()
1919 (j - ureg->output[i].first), in emit_decls()
1920 ureg->output[i].streams, in emit_decls()
1921 ureg->output[i].usage_mask, 0); in emit_decls()
1926 for (i = 0; i < ureg->nr_samplers; i++) { in emit_decls()
1927 emit_decl_range( ureg, in emit_decls()
1929 ureg->sampler[i].Index, 1 ); in emit_decls()
1932 for (i = 0; i < ureg->nr_sampler_views; i++) { in emit_decls()
1933 emit_decl_sampler_view(ureg, in emit_decls()
1934 ureg->sampler_view[i].index, in emit_decls()
1935 ureg->sampler_view[i].target, in emit_decls()
1936 ureg->sampler_view[i].return_type_x, in emit_decls()
1937 ureg->sampler_view[i].return_type_y, in emit_decls()
1938 ureg->sampler_view[i].return_type_z, in emit_decls()
1939 ureg->sampler_view[i].return_type_w); in emit_decls()
1942 for (i = 0; i < ureg->nr_images; i++) { in emit_decls()
1943 emit_decl_image(ureg, in emit_decls()
1944 ureg->image[i].index, in emit_decls()
1945 ureg->image[i].target, in emit_decls()
1946 ureg->image[i].format, in emit_decls()
1947 ureg->image[i].wr, in emit_decls()
1948 ureg->image[i].raw); in emit_decls()
1951 for (i = 0; i < ureg->nr_buffers; i++) { in emit_decls()
1952 emit_decl_buffer(ureg, ureg->buffer[i].index, ureg->buffer[i].atomic); in emit_decls()
1956 if (ureg->use_memory[i]) in emit_decls()
1957 emit_decl_memory(ureg, i); in emit_decls()
1961 struct const_decl *decl = &ureg->const_decls[i]; in emit_decls()
1967 emit_decl_range2D(ureg, in emit_decls()
1977 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[i]; in emit_decls()
1983 emit_decl_atomic_2d(ureg, in emit_decls()
1992 if (ureg->nr_temps) { in emit_decls()
1994 for (i = 0; i < ureg->nr_temps;) { in emit_decls()
1995 boolean local = util_bitmask_get(ureg->local_temps, i); in emit_decls()
1997 i = util_bitmask_get_next_index(ureg->decl_temps, i + 1); in emit_decls()
1999 i = ureg->nr_temps; in emit_decls()
2001 if (array < ureg->nr_array_temps && ureg->array_temps[array] == first) in emit_decls()
2002 emit_decl_temps( ureg, first, i - 1, local, ++array ); in emit_decls()
2004 emit_decl_temps( ureg, first, i - 1, local, 0 ); in emit_decls()
2008 if (ureg->nr_addrs) { in emit_decls()
2009 emit_decl_range( ureg, in emit_decls()
2011 0, ureg->nr_addrs ); in emit_decls()
2014 for (i = 0; i < ureg->nr_immediates; i++) { in emit_decls()
2015 emit_immediate( ureg, in emit_decls()
2016 ureg->immediate[i].value.u, in emit_decls()
2017 ureg->immediate[i].type ); in emit_decls()
2024 static void copy_instructions( struct ureg_program *ureg ) in copy_instructions() argument
2026 unsigned nr_tokens = ureg->domain[DOMAIN_INSN].count; in copy_instructions()
2027 union tgsi_any_token *out = get_tokens( ureg, in copy_instructions()
2032 ureg->domain[DOMAIN_INSN].tokens, in copy_instructions()
2038 fixup_header_size(struct ureg_program *ureg) in fixup_header_size() argument
2040 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_DECL, 0 ); in fixup_header_size()
2042 out->header.BodySize = ureg->domain[DOMAIN_DECL].count - 2; in fixup_header_size()
2047 emit_header( struct ureg_program *ureg ) in emit_header() argument
2049 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 ); in emit_header()
2054 out[1].processor.Processor = ureg->processor; in emit_header()
2059 const struct tgsi_token *ureg_finalize( struct ureg_program *ureg ) in ureg_finalize() argument
2063 switch (ureg->processor) { in ureg_finalize()
2066 ureg_property(ureg, TGSI_PROPERTY_NEXT_SHADER, in ureg_finalize()
2067 ureg->next_shader_processor == -1 ? in ureg_finalize()
2069 ureg->next_shader_processor); in ureg_finalize()
2073 emit_header( ureg ); in ureg_finalize()
2074 emit_decls( ureg ); in ureg_finalize()
2075 copy_instructions( ureg ); in ureg_finalize()
2076 fixup_header_size( ureg ); in ureg_finalize()
2078 if (ureg->domain[0].tokens == error_tokens || in ureg_finalize()
2079 ureg->domain[1].tokens == error_tokens) { in ureg_finalize()
2085 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token; in ureg_finalize()
2089 ureg->domain[DOMAIN_DECL].count); in ureg_finalize()
2106 void *ureg_create_shader( struct ureg_program *ureg, in ureg_create_shader() argument
2112 pipe_shader_state_from_tgsi(&state, ureg_finalize(ureg)); in ureg_create_shader()
2119 switch (ureg->processor) { in ureg_create_shader()
2136 const struct tgsi_token *ureg_get_tokens( struct ureg_program *ureg, in ureg_get_tokens() argument
2141 ureg_finalize(ureg); in ureg_get_tokens()
2143 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token; in ureg_get_tokens()
2146 *nr_tokens = ureg->domain[DOMAIN_DECL].count; in ureg_get_tokens()
2148 ureg->domain[DOMAIN_DECL].tokens = 0; in ureg_get_tokens()
2149 ureg->domain[DOMAIN_DECL].size = 0; in ureg_get_tokens()
2150 ureg->domain[DOMAIN_DECL].order = 0; in ureg_get_tokens()
2151 ureg->domain[DOMAIN_DECL].count = 0; in ureg_get_tokens()
2174 struct ureg_program *ureg = CALLOC_STRUCT( ureg_program ); in ureg_create_with_screen() local
2175 if (!ureg) in ureg_create_with_screen()
2178 ureg->processor = processor; in ureg_create_with_screen()
2179 ureg->supports_any_inout_decl_range = in ureg_create_with_screen()
2183 ureg->next_shader_processor = -1; in ureg_create_with_screen()
2185 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++) in ureg_create_with_screen()
2186 ureg->properties[i] = ~0; in ureg_create_with_screen()
2188 ureg->free_temps = util_bitmask_create(); in ureg_create_with_screen()
2189 if (ureg->free_temps == NULL) in ureg_create_with_screen()
2192 ureg->local_temps = util_bitmask_create(); in ureg_create_with_screen()
2193 if (ureg->local_temps == NULL) in ureg_create_with_screen()
2196 ureg->decl_temps = util_bitmask_create(); in ureg_create_with_screen()
2197 if (ureg->decl_temps == NULL) in ureg_create_with_screen()
2200 return ureg; in ureg_create_with_screen()
2203 util_bitmask_destroy(ureg->local_temps); in ureg_create_with_screen()
2205 util_bitmask_destroy(ureg->free_temps); in ureg_create_with_screen()
2207 FREE(ureg); in ureg_create_with_screen()
2214 ureg_set_next_shader_processor(struct ureg_program *ureg, unsigned processor) in ureg_set_next_shader_processor() argument
2216 ureg->next_shader_processor = processor; in ureg_set_next_shader_processor()
2221 ureg_get_nr_outputs( const struct ureg_program *ureg ) in ureg_get_nr_outputs() argument
2223 if (!ureg) in ureg_get_nr_outputs()
2225 return ureg->nr_outputs; in ureg_get_nr_outputs()
2229 void ureg_destroy( struct ureg_program *ureg ) in ureg_destroy() argument
2233 for (i = 0; i < ARRAY_SIZE(ureg->domain); i++) { in ureg_destroy()
2234 if (ureg->domain[i].tokens && in ureg_destroy()
2235 ureg->domain[i].tokens != error_tokens) in ureg_destroy()
2236 FREE(ureg->domain[i].tokens); in ureg_destroy()
2239 util_bitmask_destroy(ureg->free_temps); in ureg_destroy()
2240 util_bitmask_destroy(ureg->local_temps); in ureg_destroy()
2241 util_bitmask_destroy(ureg->decl_temps); in ureg_destroy()
2243 FREE(ureg); in ureg_destroy()