Lines Matching refs:ve
103 struct pipe_vertex_element ve[PIPE_MAX_ATTRIBS]; member
170 struct u_vbuf_elements *ve, *ve_saved; member
346 struct u_vbuf_elements *ve; in u_vbuf_set_vertex_elements_internal() local
364 ve = cso->data; in u_vbuf_set_vertex_elements_internal()
366 ve = ((struct cso_velements *)cso_hash_iter_data(iter))->data; in u_vbuf_set_vertex_elements_internal()
369 assert(ve); in u_vbuf_set_vertex_elements_internal()
371 if (ve != mgr->ve) in u_vbuf_set_vertex_elements_internal()
372 pipe->bind_vertex_elements_state(pipe, ve->driver_cso); in u_vbuf_set_vertex_elements_internal()
374 return ve; in u_vbuf_set_vertex_elements_internal()
380 mgr->ve = u_vbuf_set_vertex_elements_internal(mgr, velems); in u_vbuf_set_vertex_elements()
385 mgr->ve = NULL; in u_vbuf_unset_vertex_elements()
558 mgr->ve->incompatible_vb_mask_all | mgr->incompatible_vb_mask | in u_vbuf_translate_find_free_vb_slots()
622 mgr->ve->used_vb_mask; in u_vbuf_translate_begin()
641 for (i = 0; i < mgr->ve->count; i++) { in u_vbuf_translate_begin()
642 unsigned vb_index = mgr->ve->ve[i].vertex_buffer_index; in u_vbuf_translate_begin()
645 if (!(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
650 } else if (mgr->ve->ve[i].instance_divisor) { in u_vbuf_translate_begin()
651 if (!(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
658 !(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
674 for (i = 0; i < mgr->ve->count; i++) { in u_vbuf_translate_begin()
677 enum pipe_format output_format = mgr->ve->native_format[i]; in u_vbuf_translate_begin()
678 unsigned bit, vb_index = mgr->ve->ve[i].vertex_buffer_index; in u_vbuf_translate_begin()
681 if (!(mgr->ve->incompatible_elem_mask & (1 << i)) && in u_vbuf_translate_begin()
695 if (mgr->ve->ve[i].src_format != output_format) in u_vbuf_translate_begin()
707 te->input_format = mgr->ve->ve[i].src_format; in u_vbuf_translate_begin()
708 te->input_offset = mgr->ve->ve[i].src_offset; in u_vbuf_translate_begin()
712 k->output_stride += mgr->ve->native_format_size[i]; in u_vbuf_translate_begin()
735 for (i = 0; i < mgr->ve->count; i++) { in u_vbuf_translate_begin()
739 mgr->fallback_velems.velems[i].instance_divisor = mgr->ve->ve[i].instance_divisor; in u_vbuf_translate_begin()
752 memcpy(&mgr->fallback_velems.velems[i], &mgr->ve->ve[i], in u_vbuf_translate_begin()
757 mgr->fallback_velems.count = mgr->ve->count; in u_vbuf_translate_begin()
769 mgr->pipe->bind_vertex_elements_state(mgr->pipe, mgr->ve->driver_cso); in u_vbuf_translate_end()
792 struct u_vbuf_elements *ve = CALLOC_STRUCT(u_vbuf_elements); in u_vbuf_create_vertex_elements() local
795 ve->count = count; in u_vbuf_create_vertex_elements()
797 memcpy(ve->ve, attribs, sizeof(struct pipe_vertex_element) * count); in u_vbuf_create_vertex_elements()
803 enum pipe_format format = ve->ve[i].src_format; in u_vbuf_create_vertex_elements()
804 unsigned vb_index_bit = 1 << ve->ve[i].vertex_buffer_index; in u_vbuf_create_vertex_elements()
806 ve->src_format_size[i] = util_format_get_blocksize(format); in u_vbuf_create_vertex_elements()
809 ve->interleaved_vb_mask |= vb_index_bit; in u_vbuf_create_vertex_elements()
813 if (!ve->ve[i].instance_divisor) { in u_vbuf_create_vertex_elements()
814 ve->noninstance_vb_mask_any |= vb_index_bit; in u_vbuf_create_vertex_elements()
820 ve->native_format[i] = format; in u_vbuf_create_vertex_elements()
821 ve->native_format_size[i] = in u_vbuf_create_vertex_elements()
822 util_format_get_blocksize(ve->native_format[i]); in u_vbuf_create_vertex_elements()
824 if (ve->ve[i].src_format != format || in u_vbuf_create_vertex_elements()
826 ve->ve[i].src_offset % 4 != 0)) { in u_vbuf_create_vertex_elements()
827 ve->incompatible_elem_mask |= 1 << i; in u_vbuf_create_vertex_elements()
828 ve->incompatible_vb_mask_any |= vb_index_bit; in u_vbuf_create_vertex_elements()
830 ve->compatible_vb_mask_any |= vb_index_bit; in u_vbuf_create_vertex_elements()
840 ve->incompatible_vb_mask_any = used_buffers; in u_vbuf_create_vertex_elements()
841 ve->compatible_vb_mask_any = 0; in u_vbuf_create_vertex_elements()
842 ve->incompatible_elem_mask = u_bit_consecutive(0, count); in u_vbuf_create_vertex_elements()
845 ve->used_vb_mask = used_buffers; in u_vbuf_create_vertex_elements()
846 ve->compatible_vb_mask_all = ~ve->incompatible_vb_mask_any & used_buffers; in u_vbuf_create_vertex_elements()
847 ve->incompatible_vb_mask_all = ~ve->compatible_vb_mask_any & used_buffers; in u_vbuf_create_vertex_elements()
852 ve->native_format_size[i] = align(ve->native_format_size[i], 4); in u_vbuf_create_vertex_elements()
853 driver_attribs[i].src_offset = align(ve->ve[i].src_offset, 4); in u_vbuf_create_vertex_elements()
858 if (!ve->incompatible_elem_mask) { in u_vbuf_create_vertex_elements()
859 ve->driver_cso = in u_vbuf_create_vertex_elements()
863 return ve; in u_vbuf_create_vertex_elements()
869 struct u_vbuf_elements *ve = cso; in u_vbuf_delete_vertex_elements() local
871 if (ve->driver_cso) in u_vbuf_delete_vertex_elements()
872 pipe->delete_vertex_elements_state(pipe, ve->driver_cso); in u_vbuf_delete_vertex_elements()
873 FREE(ve); in u_vbuf_delete_vertex_elements()
967 struct u_vbuf_elements *ve, in get_upload_offset_size() argument
983 *size = ve->src_format_size[velem_index]; in get_upload_offset_size()
997 *size = vb->stride * (count - 1) + ve->src_format_size[velem_index]; in get_upload_offset_size()
1001 *size = vb->stride * (num_vertices - 1) + ve->src_format_size[velem_index]; in get_upload_offset_size()
1013 struct u_vbuf_elements *ve = mgr->ve; in u_vbuf_upload_buffers() local
1014 unsigned nr_velems = ve->count; in u_vbuf_upload_buffers()
1016 mgr->using_translate ? mgr->fallback_velems.velems : ve->ve; in u_vbuf_upload_buffers()
1019 if ((ve->interleaved_vb_mask & mgr->user_vb_mask) == 0) { in u_vbuf_upload_buffers()
1026 if (!get_upload_offset_size(mgr, vb, ve, velem, index, i, start_vertex, in u_vbuf_upload_buffers()
1058 if (!get_upload_offset_size(mgr, vb, ve, velem, index, i, start_vertex, in u_vbuf_upload_buffers()
1112 return (mgr->ve->used_vb_mask & in u_vbuf_need_minmax_index()
1115 mgr->ve->incompatible_vb_mask_any) & in u_vbuf_need_minmax_index()
1116 mgr->ve->noninstance_vb_mask_any & in u_vbuf_need_minmax_index()
1126 return (mgr->ve->used_vb_mask & in u_vbuf_mapping_vertex_buffer_blocks()
1129 mgr->ve->compatible_vb_mask_all & in u_vbuf_mapping_vertex_buffer_blocks()
1130 mgr->ve->noninstance_vb_mask_any & in u_vbuf_mapping_vertex_buffer_blocks()
1286 const uint32_t used_vb_mask = mgr->ve->used_vb_mask; in u_vbuf_draw_vbo()
1294 !mgr->ve->incompatible_elem_mask && in u_vbuf_draw_vbo()
1344 mgr->ve->incompatible_elem_mask) { in u_vbuf_draw_vbo()
1496 mgr->ve->noninstance_vb_mask_any); in u_vbuf_draw_vbo()
1513 mgr->ve->incompatible_elem_mask) { in u_vbuf_draw_vbo()
1529 mgr->ve->incompatible_vb_mask_all); in u_vbuf_draw_vbo()
1578 mgr->ve_saved = mgr->ve; in u_vbuf_save_vertex_elements()
1583 if (mgr->ve != mgr->ve_saved) { in u_vbuf_restore_vertex_elements()
1586 mgr->ve = mgr->ve_saved; in u_vbuf_restore_vertex_elements()
1588 mgr->ve ? mgr->ve->driver_cso : NULL); in u_vbuf_restore_vertex_elements()