Lines Matching refs:ve
106 struct pipe_vertex_element ve[PIPE_MAX_ATTRIBS]; member
177 struct u_vbuf_elements *ve, *ve_saved; member
391 struct u_vbuf_elements *ve; in u_vbuf_set_vertex_elements_internal() local
407 ve = cso->data; in u_vbuf_set_vertex_elements_internal()
409 ve = ((struct cso_velements *)cso_hash_iter_data(iter))->data; in u_vbuf_set_vertex_elements_internal()
412 assert(ve); in u_vbuf_set_vertex_elements_internal()
414 if (ve != mgr->ve) in u_vbuf_set_vertex_elements_internal()
415 pipe->bind_vertex_elements_state(pipe, ve->driver_cso); in u_vbuf_set_vertex_elements_internal()
417 return ve; in u_vbuf_set_vertex_elements_internal()
423 mgr->ve = u_vbuf_set_vertex_elements_internal(mgr, velems); in u_vbuf_set_vertex_elements()
433 mgr->ve = NULL; in u_vbuf_unset_vertex_elements()
624 mgr->ve->incompatible_vb_mask_all | mgr->incompatible_vb_mask | in u_vbuf_translate_find_free_vb_slots()
690 mgr->ve->used_vb_mask; in u_vbuf_translate_begin()
709 for (i = 0; i < mgr->ve->count; i++) { in u_vbuf_translate_begin()
710 unsigned vb_index = mgr->ve->ve[i].vertex_buffer_index; in u_vbuf_translate_begin()
713 if (!(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
718 } else if (mgr->ve->ve[i].instance_divisor) { in u_vbuf_translate_begin()
719 if (!(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
726 !(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
743 for (i = 0; i < mgr->ve->count; i++) { in u_vbuf_translate_begin()
746 enum pipe_format output_format = mgr->ve->native_format[i]; in u_vbuf_translate_begin()
747 unsigned bit, vb_index = mgr->ve->ve[i].vertex_buffer_index; in u_vbuf_translate_begin()
750 if (!(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
764 if (mgr->ve->ve[i].src_format != output_format) in u_vbuf_translate_begin()
776 te->input_format = mgr->ve->ve[i].src_format; in u_vbuf_translate_begin()
777 te->input_offset = mgr->ve->ve[i].src_offset; in u_vbuf_translate_begin()
782 te->output_offset % mgr->ve->component_size[i] != 0) { in u_vbuf_translate_begin()
783 unsigned aligned = align(te->output_offset, mgr->ve->component_size[i]); in u_vbuf_translate_begin()
788 k->output_stride += mgr->ve->native_format_size[i] + adjustment; in u_vbuf_translate_begin()
790 min_alignment[type] = MAX2(min_alignment[type], mgr->ve->component_size[i]); in u_vbuf_translate_begin()
814 for (i = 0; i < mgr->ve->count; i++) { in u_vbuf_translate_begin()
818 mgr->fallback_velems.velems[i].instance_divisor = mgr->ve->ve[i].instance_divisor; in u_vbuf_translate_begin()
831 memcpy(&mgr->fallback_velems.velems[i], &mgr->ve->ve[i], in u_vbuf_translate_begin()
836 mgr->fallback_velems.count = mgr->ve->count; in u_vbuf_translate_begin()
848 mgr->pipe->bind_vertex_elements_state(mgr->pipe, mgr->ve->driver_cso); in u_vbuf_translate_end()
874 struct u_vbuf_elements *ve = CALLOC_STRUCT(u_vbuf_elements); in u_vbuf_create_vertex_elements() local
877 ve->count = count; in u_vbuf_create_vertex_elements()
879 memcpy(ve->ve, attribs, sizeof(struct pipe_vertex_element) * count); in u_vbuf_create_vertex_elements()
885 enum pipe_format format = ve->ve[i].src_format; in u_vbuf_create_vertex_elements()
886 unsigned vb_index_bit = 1 << ve->ve[i].vertex_buffer_index; in u_vbuf_create_vertex_elements()
888 ve->src_format_size[i] = util_format_get_blocksize(format); in u_vbuf_create_vertex_elements()
891 ve->interleaved_vb_mask |= vb_index_bit; in u_vbuf_create_vertex_elements()
895 if (!ve->ve[i].instance_divisor) { in u_vbuf_create_vertex_elements()
896 ve->noninstance_vb_mask_any |= vb_index_bit; in u_vbuf_create_vertex_elements()
902 ve->native_format[i] = format; in u_vbuf_create_vertex_elements()
903 ve->native_format_size[i] = in u_vbuf_create_vertex_elements()
904 util_format_get_blocksize(ve->native_format[i]); in u_vbuf_create_vertex_elements()
911 … ve->native_format_size[i] : (ve->native_format_size[i] / desc->nr_channels); in u_vbuf_create_vertex_elements()
912 ve->component_size[i] = component_size; in u_vbuf_create_vertex_elements()
914 if (ve->ve[i].src_format != format || in u_vbuf_create_vertex_elements()
916 ve->ve[i].src_offset % 4 != 0) || in u_vbuf_create_vertex_elements()
918 ve->ve[i].src_offset % component_size != 0)) { in u_vbuf_create_vertex_elements()
919 ve->incompatible_elem_mask |= 1 << i; in u_vbuf_create_vertex_elements()
920 ve->incompatible_vb_mask_any |= vb_index_bit; in u_vbuf_create_vertex_elements()
922 ve->compatible_vb_mask_any |= vb_index_bit; in u_vbuf_create_vertex_elements()
924 ve->vb_align_mask[0] |= vb_index_bit; in u_vbuf_create_vertex_elements()
926 ve->vb_align_mask[1] |= vb_index_bit; in u_vbuf_create_vertex_elements()
936 ve->incompatible_vb_mask_any = used_buffers; in u_vbuf_create_vertex_elements()
937 ve->compatible_vb_mask_any = 0; in u_vbuf_create_vertex_elements()
938 ve->incompatible_elem_mask = u_bit_consecutive(0, count); in u_vbuf_create_vertex_elements()
941 ve->used_vb_mask = used_buffers; in u_vbuf_create_vertex_elements()
942 ve->compatible_vb_mask_all = ~ve->incompatible_vb_mask_any & used_buffers; in u_vbuf_create_vertex_elements()
943 ve->incompatible_vb_mask_all = ~ve->compatible_vb_mask_any & used_buffers; in u_vbuf_create_vertex_elements()
948 ve->native_format_size[i] = align(ve->native_format_size[i], 4); in u_vbuf_create_vertex_elements()
949 driver_attribs[i].src_offset = align(ve->ve[i].src_offset, 4); in u_vbuf_create_vertex_elements()
954 if (!ve->incompatible_elem_mask) { in u_vbuf_create_vertex_elements()
955 ve->driver_cso = in u_vbuf_create_vertex_elements()
959 return ve; in u_vbuf_create_vertex_elements()
967 struct u_vbuf_elements *ve = (struct u_vbuf_elements*)cso->data; in u_vbuf_delete_vertex_elements() local
969 if (ve->driver_cso) in u_vbuf_delete_vertex_elements()
970 pipe->delete_vertex_elements_state(pipe, ve->driver_cso); in u_vbuf_delete_vertex_elements()
971 FREE(ve); in u_vbuf_delete_vertex_elements()
1120 struct u_vbuf_elements *ve, in get_upload_offset_size() argument
1136 *size = ve->src_format_size[velem_index]; in get_upload_offset_size()
1150 *size = vb->stride * (count - 1) + ve->src_format_size[velem_index]; in get_upload_offset_size()
1154 *size = vb->stride * (num_vertices - 1) + ve->src_format_size[velem_index]; in get_upload_offset_size()
1166 struct u_vbuf_elements *ve = mgr->ve; in u_vbuf_upload_buffers() local
1167 unsigned nr_velems = ve->count; in u_vbuf_upload_buffers()
1169 mgr->using_translate ? mgr->fallback_velems.velems : ve->ve; in u_vbuf_upload_buffers()
1172 if ((ve->interleaved_vb_mask & mgr->user_vb_mask) == 0) { in u_vbuf_upload_buffers()
1179 if (!get_upload_offset_size(mgr, vb, ve, velem, index, i, start_vertex, in u_vbuf_upload_buffers()
1211 if (!get_upload_offset_size(mgr, vb, ve, velem, index, i, start_vertex, in u_vbuf_upload_buffers()
1265 return (mgr->ve->used_vb_mask & in u_vbuf_need_minmax_index()
1269 mgr->ve->incompatible_vb_mask_any) & in u_vbuf_need_minmax_index()
1270 mgr->ve->noninstance_vb_mask_any & in u_vbuf_need_minmax_index()
1280 return (mgr->ve->used_vb_mask & in u_vbuf_mapping_vertex_buffer_blocks()
1284 mgr->ve->compatible_vb_mask_all & in u_vbuf_mapping_vertex_buffer_blocks()
1285 mgr->ve->noninstance_vb_mask_any & in u_vbuf_mapping_vertex_buffer_blocks()
1468 const uint32_t used_vb_mask = mgr->ve->used_vb_mask; in u_vbuf_draw_vbo()
1477 misaligned |= mgr->ve->vb_align_mask[i] & mgr->unaligned_vb_mask[i]; in u_vbuf_draw_vbo()
1485 !mgr->ve->incompatible_elem_mask && in u_vbuf_draw_vbo()
1550 mgr->ve->incompatible_elem_mask) { in u_vbuf_draw_vbo()
1705 mgr->ve->noninstance_vb_mask_any); in u_vbuf_draw_vbo()
1722 mgr->ve->incompatible_elem_mask) { in u_vbuf_draw_vbo()
1740 mgr->ve->incompatible_vb_mask_all); in u_vbuf_draw_vbo()
1806 mgr->ve_saved = mgr->ve; in u_vbuf_save_vertex_elements()
1811 if (mgr->ve != mgr->ve_saved) { in u_vbuf_restore_vertex_elements()
1814 mgr->ve = mgr->ve_saved; in u_vbuf_restore_vertex_elements()
1816 mgr->ve ? mgr->ve->driver_cso : NULL); in u_vbuf_restore_vertex_elements()