Lines Matching full:exec
55 vbo_reset_all_attr(struct vbo_exec_context *exec);
64 vbo_exec_wrap_buffers(struct vbo_exec_context *exec) in vbo_exec_wrap_buffers() argument
66 if (exec->vtx.prim_count == 0) { in vbo_exec_wrap_buffers()
67 exec->vtx.copied.nr = 0; in vbo_exec_wrap_buffers()
68 exec->vtx.vert_count = 0; in vbo_exec_wrap_buffers()
69 exec->vtx.buffer_ptr = exec->vtx.buffer_map; in vbo_exec_wrap_buffers()
72 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_wrap_buffers()
73 unsigned last = exec->vtx.prim_count - 1; in vbo_exec_wrap_buffers()
74 struct pipe_draw_start_count_bias *last_draw = &exec->vtx.draw[last]; in vbo_exec_wrap_buffers()
75 const bool last_begin = exec->vtx.markers[last].begin; in vbo_exec_wrap_buffers()
79 last_draw->count = exec->vtx.vert_count - last_draw->start; in vbo_exec_wrap_buffers()
81 exec->vtx.markers[last].end = 0; in vbo_exec_wrap_buffers()
85 if (exec->vtx.mode[last] == GL_LINE_LOOP && in vbo_exec_wrap_buffers()
87 !exec->vtx.markers[last].end) { in vbo_exec_wrap_buffers()
89 exec->vtx.mode[last] = GL_LINE_STRIP; in vbo_exec_wrap_buffers()
102 if (exec->vtx.vert_count) in vbo_exec_wrap_buffers()
103 vbo_exec_vtx_flush(exec); in vbo_exec_wrap_buffers()
105 exec->vtx.prim_count = 0; in vbo_exec_wrap_buffers()
106 exec->vtx.copied.nr = 0; in vbo_exec_wrap_buffers()
111 assert(exec->vtx.prim_count == 0); in vbo_exec_wrap_buffers()
114 exec->vtx.mode[0] = ctx->Driver.CurrentExecPrimitive; in vbo_exec_wrap_buffers()
115 exec->vtx.draw[0].start = 0; in vbo_exec_wrap_buffers()
116 exec->vtx.markers[0].begin = 0; in vbo_exec_wrap_buffers()
117 exec->vtx.prim_count++; in vbo_exec_wrap_buffers()
119 if (exec->vtx.copied.nr == last_count) in vbo_exec_wrap_buffers()
120 exec->vtx.markers[0].begin = last_begin; in vbo_exec_wrap_buffers()
131 vbo_exec_vtx_wrap(struct vbo_exec_context *exec) in vbo_exec_vtx_wrap() argument
136 * to exec->vtx.copied. in vbo_exec_vtx_wrap()
138 vbo_exec_wrap_buffers(exec); in vbo_exec_vtx_wrap()
140 if (!exec->vtx.buffer_ptr) { in vbo_exec_vtx_wrap()
147 assert(exec->vtx.max_vert - exec->vtx.vert_count > exec->vtx.copied.nr); in vbo_exec_vtx_wrap()
149 numComponents = exec->vtx.copied.nr * exec->vtx.vertex_size; in vbo_exec_vtx_wrap()
150 memcpy(exec->vtx.buffer_ptr, in vbo_exec_vtx_wrap()
151 exec->vtx.copied.buffer, in vbo_exec_vtx_wrap()
153 exec->vtx.buffer_ptr += numComponents; in vbo_exec_vtx_wrap()
154 exec->vtx.vert_count += exec->vtx.copied.nr; in vbo_exec_vtx_wrap()
156 exec->vtx.copied.nr = 0; in vbo_exec_vtx_wrap()
164 vbo_exec_copy_to_current(struct vbo_exec_context *exec) in vbo_exec_copy_to_current() argument
166 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_copy_to_current()
168 GLbitfield64 enabled = exec->vtx.enabled & (~BITFIELD64_BIT(VBO_ATTRIB_POS)); in vbo_exec_copy_to_current()
174 /* Note: the exec->vtx.current[i] pointers point into the in vbo_exec_copy_to_current()
181 assert(exec->vtx.attr[i].size); in vbo_exec_copy_to_current()
187 if (exec->vtx.attr[i].type == GL_DOUBLE || in vbo_exec_copy_to_current()
188 exec->vtx.attr[i].type == GL_UNSIGNED_INT64_ARB) { in vbo_exec_copy_to_current()
190 memcpy(tmp, exec->vtx.attrptr[i], exec->vtx.attr[i].size * sizeof(GLfloat)); in vbo_exec_copy_to_current()
194 exec->vtx.attr[i].size, in vbo_exec_copy_to_current()
195 exec->vtx.attrptr[i], in vbo_exec_copy_to_current()
196 exec->vtx.attr[i].type); in vbo_exec_copy_to_current()
225 if (exec->vtx.attr[i].type != vbo->current[i].Format.Type || in vbo_exec_copy_to_current()
226 (exec->vtx.attr[i].size >> dmul_shift) != vbo->current[i].Format.Size) { in vbo_exec_copy_to_current()
228 exec->vtx.attr[i].size >> dmul_shift, in vbo_exec_copy_to_current()
229 exec->vtx.attr[i].type); in vbo_exec_copy_to_current()
249 vbo_exec_wrap_upgrade_vertex(struct vbo_exec_context *exec, in vbo_exec_wrap_upgrade_vertex() argument
252 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_wrap_upgrade_vertex()
254 const GLint lastcount = exec->vtx.vert_count; in vbo_exec_wrap_upgrade_vertex()
256 const GLuint old_vtx_size_no_pos = exec->vtx.vertex_size_no_pos; in vbo_exec_wrap_upgrade_vertex()
257 const GLuint old_vtx_size = exec->vtx.vertex_size; /* floats per vertex */ in vbo_exec_wrap_upgrade_vertex()
258 const GLuint oldSize = exec->vtx.attr[attr].size; in vbo_exec_wrap_upgrade_vertex()
263 if (unlikely(!exec->vtx.buffer_ptr)) { in vbo_exec_wrap_upgrade_vertex()
265 assert(exec->vtx.bufferobj); in vbo_exec_wrap_upgrade_vertex()
266 vbo_exec_vtx_map(exec); in vbo_exec_wrap_upgrade_vertex()
267 assert(exec->vtx.buffer_ptr); in vbo_exec_wrap_upgrade_vertex()
271 * to exec->vtx.copied. in vbo_exec_wrap_upgrade_vertex()
273 vbo_exec_wrap_buffers(exec); in vbo_exec_wrap_upgrade_vertex()
275 if (unlikely(exec->vtx.copied.nr)) { in vbo_exec_wrap_upgrade_vertex()
280 memcpy(old_attrptr, exec->vtx.attrptr, sizeof(old_attrptr)); in vbo_exec_wrap_upgrade_vertex()
287 !oldSize && lastcount > 8 && exec->vtx.vertex_size) { in vbo_exec_wrap_upgrade_vertex()
288 vbo_exec_copy_to_current(exec); in vbo_exec_wrap_upgrade_vertex()
289 vbo_reset_all_attr(exec); in vbo_exec_wrap_upgrade_vertex()
294 exec->vtx.attr[attr].size = newSize; in vbo_exec_wrap_upgrade_vertex()
295 exec->vtx.attr[attr].active_size = newSize; in vbo_exec_wrap_upgrade_vertex()
296 exec->vtx.attr[attr].type = newType; in vbo_exec_wrap_upgrade_vertex()
297 exec->vtx.vertex_size += newSize - oldSize; in vbo_exec_wrap_upgrade_vertex()
298 exec->vtx.vertex_size_no_pos = exec->vtx.vertex_size - exec->vtx.attr[0].size; in vbo_exec_wrap_upgrade_vertex()
299 exec->vtx.max_vert = vbo_compute_max_verts(exec); in vbo_exec_wrap_upgrade_vertex()
300 exec->vtx.vert_count = 0; in vbo_exec_wrap_upgrade_vertex()
301 exec->vtx.buffer_ptr = exec->vtx.buffer_map; in vbo_exec_wrap_upgrade_vertex()
302 exec->vtx.enabled |= BITFIELD64_BIT(attr); in vbo_exec_wrap_upgrade_vertex()
306 unsigned offset = exec->vtx.attrptr[attr] - exec->vtx.vertex; in vbo_exec_wrap_upgrade_vertex()
311 fi_type *old_first = exec->vtx.attrptr[attr] + oldSize; in vbo_exec_wrap_upgrade_vertex()
312 fi_type *new_first = exec->vtx.attrptr[attr] + newSize; in vbo_exec_wrap_upgrade_vertex()
313 fi_type *old_last = exec->vtx.vertex + old_vtx_size_no_pos - 1; in vbo_exec_wrap_upgrade_vertex()
314 fi_type *new_last = exec->vtx.vertex + exec->vtx.vertex_size_no_pos - 1; in vbo_exec_wrap_upgrade_vertex()
341 GLbitfield64 enabled = exec->vtx.enabled & in vbo_exec_wrap_upgrade_vertex()
347 if (exec->vtx.attrptr[i] > exec->vtx.attrptr[attr]) in vbo_exec_wrap_upgrade_vertex()
348 exec->vtx.attrptr[i] += size_diff; in vbo_exec_wrap_upgrade_vertex()
353 exec->vtx.attrptr[attr] = exec->vtx.vertex + in vbo_exec_wrap_upgrade_vertex()
354 exec->vtx.vertex_size_no_pos - newSize; in vbo_exec_wrap_upgrade_vertex()
359 exec->vtx.attrptr[0] = exec->vtx.vertex + exec->vtx.vertex_size_no_pos; in vbo_exec_wrap_upgrade_vertex()
366 if (unlikely(exec->vtx.copied.nr)) { in vbo_exec_wrap_upgrade_vertex()
367 fi_type *data = exec->vtx.copied.buffer; in vbo_exec_wrap_upgrade_vertex()
368 fi_type *dest = exec->vtx.buffer_ptr; in vbo_exec_wrap_upgrade_vertex()
370 assert(exec->vtx.buffer_ptr == exec->vtx.buffer_map); in vbo_exec_wrap_upgrade_vertex()
372 for (i = 0 ; i < exec->vtx.copied.nr ; i++) { in vbo_exec_wrap_upgrade_vertex()
373 GLbitfield64 enabled = exec->vtx.enabled; in vbo_exec_wrap_upgrade_vertex()
376 GLuint sz = exec->vtx.attr[j].size; in vbo_exec_wrap_upgrade_vertex()
377 GLint old_offset = old_attrptr[j] - exec->vtx.vertex; in vbo_exec_wrap_upgrade_vertex()
378 GLint new_offset = exec->vtx.attrptr[j] - exec->vtx.vertex; in vbo_exec_wrap_upgrade_vertex()
387 exec->vtx.attr[j].type); in vbo_exec_wrap_upgrade_vertex()
400 dest += exec->vtx.vertex_size; in vbo_exec_wrap_upgrade_vertex()
403 exec->vtx.buffer_ptr = dest; in vbo_exec_wrap_upgrade_vertex()
404 exec->vtx.vert_count += exec->vtx.copied.nr; in vbo_exec_wrap_upgrade_vertex()
405 exec->vtx.copied.nr = 0; in vbo_exec_wrap_upgrade_vertex()
421 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in vbo_exec_fixup_vertex() local
425 if (newSize > exec->vtx.attr[attr].size || in vbo_exec_fixup_vertex()
426 newType != exec->vtx.attr[attr].type) { in vbo_exec_fixup_vertex()
430 vbo_exec_wrap_upgrade_vertex(exec, attr, newSize, newType); in vbo_exec_fixup_vertex()
432 else if (newSize < exec->vtx.attr[attr].active_size) { in vbo_exec_fixup_vertex()
435 vbo_get_default_vals_as_union(exec->vtx.attr[attr].type); in vbo_exec_fixup_vertex()
440 for (i = newSize; i <= exec->vtx.attr[attr].size; i++) in vbo_exec_fixup_vertex()
441 exec->vtx.attrptr[attr][i-1] = id[i-1]; in vbo_exec_fixup_vertex()
443 exec->vtx.attr[attr].active_size = newSize; in vbo_exec_fixup_vertex()
486 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; \
490 /* store a copy of the attribute in exec except for glVertex */ \
493 if (unlikely(exec->vtx.attr[A].active_size != N * sz || \
494 exec->vtx.attr[A].type != T)) { \
498 C *dest = (C *)exec->vtx.attrptr[A]; \
503 assert(exec->vtx.attr[A].type == T); \
509 int size = exec->vtx.attr[0].size; \
513 exec->vtx.attr[0].type != T)) { \
514 vbo_exec_wrap_upgrade_vertex(exec, 0, N * sz, T); \
517 uint32_t *dst = (uint32_t *)exec->vtx.buffer_ptr; \
518 uint32_t *src = (uint32_t *)exec->vtx.vertex; \
519 unsigned vertex_size_no_pos = exec->vtx.vertex_size_no_pos; \
521 /* Copy over attributes from exec. */ \
554 exec->vtx.buffer_ptr = (fi_type*)dst; \
559 if (unlikely(++exec->vtx.vert_count >= exec->vtx.max_vert)) \
560 vbo_exec_vtx_wrap(exec); \
679 vbo_exec_FlushVertices_internal(struct vbo_exec_context *exec, unsigned flags) in vbo_exec_FlushVertices_internal() argument
681 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_FlushVertices_internal()
684 if (exec->vtx.vert_count) { in vbo_exec_FlushVertices_internal()
685 vbo_exec_vtx_flush(exec); in vbo_exec_FlushVertices_internal()
688 if (exec->vtx.vertex_size) { in vbo_exec_FlushVertices_internal()
689 vbo_exec_copy_to_current(exec); in vbo_exec_FlushVertices_internal()
690 vbo_reset_all_attr(exec); in vbo_exec_FlushVertices_internal()
701 vbo_exec_copy_to_current(exec); in vbo_exec_FlushVertices_internal()
713 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in _mesa_EvalCoord1f() local
717 if (exec->eval.recalculate_maps) in _mesa_EvalCoord1f()
718 vbo_exec_eval_update(exec); in _mesa_EvalCoord1f()
721 if (exec->eval.map1[i].map) in _mesa_EvalCoord1f()
722 if (exec->vtx.attr[i].active_size != exec->eval.map1[i].sz) in _mesa_EvalCoord1f()
723 vbo_exec_fixup_vertex(ctx, i, exec->eval.map1[i].sz, GL_FLOAT); in _mesa_EvalCoord1f()
727 memcpy(exec->vtx.copied.buffer, exec->vtx.vertex, in _mesa_EvalCoord1f()
728 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord1f()
730 vbo_exec_do_EvalCoord1f(exec, u); in _mesa_EvalCoord1f()
732 memcpy(exec->vtx.vertex, exec->vtx.copied.buffer, in _mesa_EvalCoord1f()
733 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord1f()
741 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in _mesa_EvalCoord2f() local
745 if (exec->eval.recalculate_maps) in _mesa_EvalCoord2f()
746 vbo_exec_eval_update(exec); in _mesa_EvalCoord2f()
749 if (exec->eval.map2[i].map) in _mesa_EvalCoord2f()
750 if (exec->vtx.attr[i].active_size != exec->eval.map2[i].sz) in _mesa_EvalCoord2f()
751 vbo_exec_fixup_vertex(ctx, i, exec->eval.map2[i].sz, GL_FLOAT); in _mesa_EvalCoord2f()
755 if (exec->vtx.attr[VBO_ATTRIB_NORMAL].active_size != 3) in _mesa_EvalCoord2f()
759 memcpy(exec->vtx.copied.buffer, exec->vtx.vertex, in _mesa_EvalCoord2f()
760 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord2f()
762 vbo_exec_do_EvalCoord2f(exec, u, v); in _mesa_EvalCoord2f()
764 memcpy(exec->vtx.vertex, exec->vtx.copied.buffer, in _mesa_EvalCoord2f()
765 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord2f()
818 struct vbo_exec_context *exec = &vbo->exec; in _mesa_Begin() local
841 if (exec->vtx.vertex_size && !exec->vtx.attr[VBO_ATTRIB_POS].size) in _mesa_Begin()
842 vbo_exec_FlushVertices_internal(exec, FLUSH_STORED_VERTICES); in _mesa_Begin()
844 i = exec->vtx.prim_count++; in _mesa_Begin()
845 exec->vtx.mode[i] = mode; in _mesa_Begin()
846 exec->vtx.draw[i].start = exec->vtx.vert_count; in _mesa_Begin()
847 exec->vtx.markers[i].begin = 1; in _mesa_Begin()
851 ctx->Exec = _mesa_hw_select_enabled(ctx) ? in _mesa_Begin()
859 ctx->CurrentServerDispatch = ctx->Exec; in _mesa_Begin()
861 ctx->CurrentClientDispatch = ctx->CurrentServerDispatch = ctx->Exec; in _mesa_Begin()
873 try_vbo_merge(struct vbo_exec_context *exec) in try_vbo_merge() argument
875 unsigned cur = exec->vtx.prim_count - 1; in try_vbo_merge()
877 assert(exec->vtx.prim_count >= 1); in try_vbo_merge()
879 vbo_try_prim_conversion(&exec->vtx.mode[cur], &exec->vtx.draw[cur].count); in try_vbo_merge()
881 if (exec->vtx.prim_count >= 2) { in try_vbo_merge()
882 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in try_vbo_merge()
886 exec->vtx.mode[prev], in try_vbo_merge()
887 exec->vtx.mode[cur], in try_vbo_merge()
888 exec->vtx.draw[prev].start, in try_vbo_merge()
889 exec->vtx.draw[cur].start, in try_vbo_merge()
890 &exec->vtx.draw[prev].count, in try_vbo_merge()
891 exec->vtx.draw[cur].count, in try_vbo_merge()
893 &exec->vtx.markers[prev].end, in try_vbo_merge()
894 exec->vtx.markers[cur].begin, in try_vbo_merge()
895 exec->vtx.markers[cur].end)) in try_vbo_merge()
896 exec->vtx.prim_count--; /* drop the last primitive */ in try_vbo_merge()
908 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in _mesa_End() local
915 ctx->Exec = ctx->OutsideBeginEnd; in _mesa_End()
920 ctx->CurrentServerDispatch = ctx->Exec; in _mesa_End()
924 ctx->CurrentClientDispatch = ctx->CurrentServerDispatch = ctx->Exec; in _mesa_End()
928 if (exec->vtx.prim_count > 0) { in _mesa_End()
930 unsigned last = exec->vtx.prim_count - 1; in _mesa_End()
931 struct pipe_draw_start_count_bias *last_draw = &exec->vtx.draw[last]; in _mesa_End()
932 unsigned count = exec->vtx.vert_count - last_draw->start; in _mesa_End()
935 exec->vtx.markers[last].end = 1; in _mesa_End()
946 if (exec->vtx.mode[last] == GL_LINE_LOOP && in _mesa_End()
947 exec->vtx.markers[last].begin == 0) { in _mesa_End()
951 const fi_type *src = exec->vtx.buffer_map + in _mesa_End()
952 last_draw->start * exec->vtx.vertex_size; in _mesa_End()
953 fi_type *dst = exec->vtx.buffer_map + in _mesa_End()
954 exec->vtx.vert_count * exec->vtx.vertex_size; in _mesa_End()
957 memcpy(dst, src, exec->vtx.vertex_size * sizeof(fi_type)); in _mesa_End()
961 exec->vtx.mode[last] = GL_LINE_STRIP; in _mesa_End()
966 exec->vtx.vert_count++; in _mesa_End()
967 exec->vtx.buffer_ptr += exec->vtx.vertex_size; in _mesa_End()
970 try_vbo_merge(exec); in _mesa_End()
975 if (exec->vtx.prim_count == VBO_MAX_PRIM) in _mesa_End()
976 vbo_exec_vtx_flush(exec); in _mesa_End()
1083 struct _glapi_table *tab = ctx->Exec; in vbo_install_exec_vtxfmt()
1094 vbo_reset_all_attr(struct vbo_exec_context *exec) in vbo_reset_all_attr() argument
1096 while (exec->vtx.enabled) { in vbo_reset_all_attr()
1097 const int i = u_bit_scan64(&exec->vtx.enabled); in vbo_reset_all_attr()
1100 exec->vtx.attr[i].size = 0; in vbo_reset_all_attr()
1101 exec->vtx.attr[i].type = GL_FLOAT; in vbo_reset_all_attr()
1102 exec->vtx.attr[i].active_size = 0; in vbo_reset_all_attr()
1103 exec->vtx.attrptr[i] = NULL; in vbo_reset_all_attr()
1106 exec->vtx.vertex_size = 0; in vbo_reset_all_attr()
1111 vbo_exec_vtx_init(struct vbo_exec_context *exec) in vbo_exec_vtx_init() argument
1113 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_vtx_init()
1115 exec->vtx.bufferobj = _mesa_bufferobj_alloc(ctx, IMM_BUFFER_NAME); in vbo_exec_vtx_init()
1117 exec->vtx.enabled = u_bit_consecutive64(0, VBO_ATTRIB_MAX); /* reset all */ in vbo_exec_vtx_init()
1118 vbo_reset_all_attr(exec); in vbo_exec_vtx_init()
1120 exec->vtx.info.instance_count = 1; in vbo_exec_vtx_init()
1121 exec->vtx.info.max_index = ~0; in vbo_exec_vtx_init()
1126 vbo_exec_vtx_destroy(struct vbo_exec_context *exec) in vbo_exec_vtx_destroy() argument
1129 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_vtx_destroy()
1133 if (exec->vtx.buffer_map) { in vbo_exec_vtx_destroy()
1134 assert(!exec->vtx.bufferobj || in vbo_exec_vtx_destroy()
1135 exec->vtx.bufferobj->Name == IMM_BUFFER_NAME); in vbo_exec_vtx_destroy()
1136 if (!exec->vtx.bufferobj) { in vbo_exec_vtx_destroy()
1137 align_free(exec->vtx.buffer_map); in vbo_exec_vtx_destroy()
1138 exec->vtx.buffer_map = NULL; in vbo_exec_vtx_destroy()
1139 exec->vtx.buffer_ptr = NULL; in vbo_exec_vtx_destroy()
1145 if (exec->vtx.bufferobj && in vbo_exec_vtx_destroy()
1146 _mesa_bufferobj_mapped(exec->vtx.bufferobj, MAP_INTERNAL)) { in vbo_exec_vtx_destroy()
1147 _mesa_bufferobj_unmap(ctx, exec->vtx.bufferobj, MAP_INTERNAL); in vbo_exec_vtx_destroy()
1149 _mesa_reference_buffer_object(ctx, &exec->vtx.bufferobj, NULL); in vbo_exec_vtx_destroy()
1167 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in vbo_exec_FlushVertices() local
1171 exec->flush_call_depth++; in vbo_exec_FlushVertices()
1172 assert(exec->flush_call_depth == 1); in vbo_exec_FlushVertices()
1178 exec->flush_call_depth--; in vbo_exec_FlushVertices()
1179 assert(exec->flush_call_depth == 0); in vbo_exec_FlushVertices()
1185 vbo_exec_FlushVertices_internal(exec, flags); in vbo_exec_FlushVertices()
1188 exec->flush_call_depth--; in vbo_exec_FlushVertices()
1189 assert(exec->flush_call_depth == 0); in vbo_exec_FlushVertices()