• Home
  • Raw
  • Download

Lines Matching full:exec

55 vbo_reset_all_attr(struct vbo_exec_context *exec);
64 vbo_exec_wrap_buffers(struct vbo_exec_context *exec) in vbo_exec_wrap_buffers() argument
66 if (exec->vtx.prim_count == 0) { in vbo_exec_wrap_buffers()
67 exec->vtx.copied.nr = 0; in vbo_exec_wrap_buffers()
68 exec->vtx.vert_count = 0; in vbo_exec_wrap_buffers()
69 exec->vtx.buffer_ptr = exec->vtx.buffer_map; in vbo_exec_wrap_buffers()
72 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_wrap_buffers()
73 unsigned last = exec->vtx.prim_count - 1; in vbo_exec_wrap_buffers()
74 struct pipe_draw_start_count_bias *last_draw = &exec->vtx.draw[last]; in vbo_exec_wrap_buffers()
75 const bool last_begin = exec->vtx.markers[last].begin; in vbo_exec_wrap_buffers()
79 last_draw->count = exec->vtx.vert_count - last_draw->start; in vbo_exec_wrap_buffers()
81 exec->vtx.markers[last].end = 0; in vbo_exec_wrap_buffers()
85 if (exec->vtx.mode[last] == GL_LINE_LOOP && in vbo_exec_wrap_buffers()
87 !exec->vtx.markers[last].end) { in vbo_exec_wrap_buffers()
89 exec->vtx.mode[last] = GL_LINE_STRIP; in vbo_exec_wrap_buffers()
102 if (exec->vtx.vert_count) in vbo_exec_wrap_buffers()
103 vbo_exec_vtx_flush(exec); in vbo_exec_wrap_buffers()
105 exec->vtx.prim_count = 0; in vbo_exec_wrap_buffers()
106 exec->vtx.copied.nr = 0; in vbo_exec_wrap_buffers()
111 assert(exec->vtx.prim_count == 0); in vbo_exec_wrap_buffers()
114 exec->vtx.mode[0] = ctx->Driver.CurrentExecPrimitive; in vbo_exec_wrap_buffers()
115 exec->vtx.draw[0].start = 0; in vbo_exec_wrap_buffers()
116 exec->vtx.markers[0].begin = 0; in vbo_exec_wrap_buffers()
117 exec->vtx.prim_count++; in vbo_exec_wrap_buffers()
119 if (exec->vtx.copied.nr == last_count) in vbo_exec_wrap_buffers()
120 exec->vtx.markers[0].begin = last_begin; in vbo_exec_wrap_buffers()
131 vbo_exec_vtx_wrap(struct vbo_exec_context *exec) in vbo_exec_vtx_wrap() argument
136 * to exec->vtx.copied. in vbo_exec_vtx_wrap()
138 vbo_exec_wrap_buffers(exec); in vbo_exec_vtx_wrap()
140 if (!exec->vtx.buffer_ptr) { in vbo_exec_vtx_wrap()
147 assert(exec->vtx.max_vert - exec->vtx.vert_count > exec->vtx.copied.nr); in vbo_exec_vtx_wrap()
149 numComponents = exec->vtx.copied.nr * exec->vtx.vertex_size; in vbo_exec_vtx_wrap()
150 memcpy(exec->vtx.buffer_ptr, in vbo_exec_vtx_wrap()
151 exec->vtx.copied.buffer, in vbo_exec_vtx_wrap()
153 exec->vtx.buffer_ptr += numComponents; in vbo_exec_vtx_wrap()
154 exec->vtx.vert_count += exec->vtx.copied.nr; in vbo_exec_vtx_wrap()
156 exec->vtx.copied.nr = 0; in vbo_exec_vtx_wrap()
164 vbo_exec_copy_to_current(struct vbo_exec_context *exec) in vbo_exec_copy_to_current() argument
166 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_copy_to_current()
168 GLbitfield64 enabled = exec->vtx.enabled & (~BITFIELD64_BIT(VBO_ATTRIB_POS)); in vbo_exec_copy_to_current()
174 /* Note: the exec->vtx.current[i] pointers point into the in vbo_exec_copy_to_current()
181 assert(exec->vtx.attr[i].size); in vbo_exec_copy_to_current()
187 if (exec->vtx.attr[i].type == GL_DOUBLE || in vbo_exec_copy_to_current()
188 exec->vtx.attr[i].type == GL_UNSIGNED_INT64_ARB) { in vbo_exec_copy_to_current()
190 memcpy(tmp, exec->vtx.attrptr[i], exec->vtx.attr[i].size * sizeof(GLfloat)); in vbo_exec_copy_to_current()
194 exec->vtx.attr[i].size, in vbo_exec_copy_to_current()
195 exec->vtx.attrptr[i], in vbo_exec_copy_to_current()
196 exec->vtx.attr[i].type); in vbo_exec_copy_to_current()
228 if (exec->vtx.attr[i].type != vbo->current[i].Format.User.Type || in vbo_exec_copy_to_current()
229 (exec->vtx.attr[i].size >> dmul_shift) != vbo->current[i].Format.User.Size) { in vbo_exec_copy_to_current()
231 exec->vtx.attr[i].size >> dmul_shift, in vbo_exec_copy_to_current()
232 exec->vtx.attr[i].type); in vbo_exec_copy_to_current()
257 vbo_exec_wrap_upgrade_vertex(struct vbo_exec_context *exec, in vbo_exec_wrap_upgrade_vertex() argument
260 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_wrap_upgrade_vertex()
262 const GLint lastcount = exec->vtx.vert_count; in vbo_exec_wrap_upgrade_vertex()
264 const GLuint old_vtx_size_no_pos = exec->vtx.vertex_size_no_pos; in vbo_exec_wrap_upgrade_vertex()
265 const GLuint old_vtx_size = exec->vtx.vertex_size; /* floats per vertex */ in vbo_exec_wrap_upgrade_vertex()
266 const GLuint oldSize = exec->vtx.attr[attr].size; in vbo_exec_wrap_upgrade_vertex()
271 if (unlikely(!exec->vtx.buffer_ptr)) { in vbo_exec_wrap_upgrade_vertex()
273 assert(exec->vtx.bufferobj); in vbo_exec_wrap_upgrade_vertex()
274 vbo_exec_vtx_map(exec); in vbo_exec_wrap_upgrade_vertex()
275 assert(exec->vtx.buffer_ptr); in vbo_exec_wrap_upgrade_vertex()
279 * to exec->vtx.copied. in vbo_exec_wrap_upgrade_vertex()
281 vbo_exec_wrap_buffers(exec); in vbo_exec_wrap_upgrade_vertex()
283 if (unlikely(exec->vtx.copied.nr)) { in vbo_exec_wrap_upgrade_vertex()
288 memcpy(old_attrptr, exec->vtx.attrptr, sizeof(old_attrptr)); in vbo_exec_wrap_upgrade_vertex()
295 !oldSize && lastcount > 8 && exec->vtx.vertex_size) { in vbo_exec_wrap_upgrade_vertex()
296 vbo_exec_copy_to_current(exec); in vbo_exec_wrap_upgrade_vertex()
297 vbo_reset_all_attr(exec); in vbo_exec_wrap_upgrade_vertex()
302 exec->vtx.attr[attr].size = newSize; in vbo_exec_wrap_upgrade_vertex()
303 exec->vtx.attr[attr].active_size = newSize; in vbo_exec_wrap_upgrade_vertex()
304 exec->vtx.attr[attr].type = newType; in vbo_exec_wrap_upgrade_vertex()
305 exec->vtx.vertex_size += newSize - oldSize; in vbo_exec_wrap_upgrade_vertex()
306 exec->vtx.vertex_size_no_pos = exec->vtx.vertex_size - exec->vtx.attr[0].size; in vbo_exec_wrap_upgrade_vertex()
307 exec->vtx.max_vert = vbo_compute_max_verts(exec); in vbo_exec_wrap_upgrade_vertex()
308 exec->vtx.vert_count = 0; in vbo_exec_wrap_upgrade_vertex()
309 exec->vtx.buffer_ptr = exec->vtx.buffer_map; in vbo_exec_wrap_upgrade_vertex()
310 exec->vtx.enabled |= BITFIELD64_BIT(attr); in vbo_exec_wrap_upgrade_vertex()
314 unsigned offset = exec->vtx.attrptr[attr] - exec->vtx.vertex; in vbo_exec_wrap_upgrade_vertex()
319 fi_type *old_first = exec->vtx.attrptr[attr] + oldSize; in vbo_exec_wrap_upgrade_vertex()
320 fi_type *new_first = exec->vtx.attrptr[attr] + newSize; in vbo_exec_wrap_upgrade_vertex()
321 fi_type *old_last = exec->vtx.vertex + old_vtx_size_no_pos - 1; in vbo_exec_wrap_upgrade_vertex()
322 fi_type *new_last = exec->vtx.vertex + exec->vtx.vertex_size_no_pos - 1; in vbo_exec_wrap_upgrade_vertex()
349 GLbitfield64 enabled = exec->vtx.enabled & in vbo_exec_wrap_upgrade_vertex()
355 if (exec->vtx.attrptr[i] > exec->vtx.attrptr[attr]) in vbo_exec_wrap_upgrade_vertex()
356 exec->vtx.attrptr[i] += size_diff; in vbo_exec_wrap_upgrade_vertex()
361 exec->vtx.attrptr[attr] = exec->vtx.vertex + in vbo_exec_wrap_upgrade_vertex()
362 exec->vtx.vertex_size_no_pos - newSize; in vbo_exec_wrap_upgrade_vertex()
367 exec->vtx.attrptr[0] = exec->vtx.vertex + exec->vtx.vertex_size_no_pos; in vbo_exec_wrap_upgrade_vertex()
374 if (unlikely(exec->vtx.copied.nr)) { in vbo_exec_wrap_upgrade_vertex()
375 fi_type *data = exec->vtx.copied.buffer; in vbo_exec_wrap_upgrade_vertex()
376 fi_type *dest = exec->vtx.buffer_ptr; in vbo_exec_wrap_upgrade_vertex()
378 assert(exec->vtx.buffer_ptr == exec->vtx.buffer_map); in vbo_exec_wrap_upgrade_vertex()
380 for (i = 0 ; i < exec->vtx.copied.nr ; i++) { in vbo_exec_wrap_upgrade_vertex()
381 GLbitfield64 enabled = exec->vtx.enabled; in vbo_exec_wrap_upgrade_vertex()
384 GLuint sz = exec->vtx.attr[j].size; in vbo_exec_wrap_upgrade_vertex()
385 GLint old_offset = old_attrptr[j] - exec->vtx.vertex; in vbo_exec_wrap_upgrade_vertex()
386 GLint new_offset = exec->vtx.attrptr[j] - exec->vtx.vertex; in vbo_exec_wrap_upgrade_vertex()
395 exec->vtx.attr[j].type); in vbo_exec_wrap_upgrade_vertex()
408 dest += exec->vtx.vertex_size; in vbo_exec_wrap_upgrade_vertex()
411 exec->vtx.buffer_ptr = dest; in vbo_exec_wrap_upgrade_vertex()
412 exec->vtx.vert_count += exec->vtx.copied.nr; in vbo_exec_wrap_upgrade_vertex()
413 exec->vtx.copied.nr = 0; in vbo_exec_wrap_upgrade_vertex()
429 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in vbo_exec_fixup_vertex() local
433 if (newSize > exec->vtx.attr[attr].size || in vbo_exec_fixup_vertex()
434 newType != exec->vtx.attr[attr].type) { in vbo_exec_fixup_vertex()
438 vbo_exec_wrap_upgrade_vertex(exec, attr, newSize, newType); in vbo_exec_fixup_vertex()
440 else if (newSize < exec->vtx.attr[attr].active_size) { in vbo_exec_fixup_vertex()
443 vbo_get_default_vals_as_union(exec->vtx.attr[attr].type); in vbo_exec_fixup_vertex()
448 for (i = newSize; i <= exec->vtx.attr[attr].size; i++) in vbo_exec_fixup_vertex()
449 exec->vtx.attrptr[attr][i-1] = id[i-1]; in vbo_exec_fixup_vertex()
451 exec->vtx.attr[attr].active_size = newSize; in vbo_exec_fixup_vertex()
494 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; \
498 /* store a copy of the attribute in exec except for glVertex */ \
501 if (unlikely(exec->vtx.attr[A].active_size != N * sz || \
502 exec->vtx.attr[A].type != T)) { \
506 C *dest = (C *)exec->vtx.attrptr[A]; \
511 assert(exec->vtx.attr[A].type == T); \
517 int size = exec->vtx.attr[0].size; \
521 exec->vtx.attr[0].type != T)) { \
522 vbo_exec_wrap_upgrade_vertex(exec, 0, N * sz, T); \
525 uint32_t *dst = (uint32_t *)exec->vtx.buffer_ptr; \
526 uint32_t *src = (uint32_t *)exec->vtx.vertex; \
527 unsigned vertex_size_no_pos = exec->vtx.vertex_size_no_pos; \
529 /* Copy over attributes from exec. */ \
562 exec->vtx.buffer_ptr = (fi_type*)dst; \
567 if (unlikely(++exec->vtx.vert_count >= exec->vtx.max_vert)) \
568 vbo_exec_vtx_wrap(exec); \
687 vbo_exec_FlushVertices_internal(struct vbo_exec_context *exec, unsigned flags) in vbo_exec_FlushVertices_internal() argument
689 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_FlushVertices_internal()
692 if (exec->vtx.vert_count) { in vbo_exec_FlushVertices_internal()
693 vbo_exec_vtx_flush(exec); in vbo_exec_FlushVertices_internal()
696 if (exec->vtx.vertex_size) { in vbo_exec_FlushVertices_internal()
697 vbo_exec_copy_to_current(exec); in vbo_exec_FlushVertices_internal()
698 vbo_reset_all_attr(exec); in vbo_exec_FlushVertices_internal()
709 vbo_exec_copy_to_current(exec); in vbo_exec_FlushVertices_internal()
721 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in _mesa_EvalCoord1f() local
725 if (exec->eval.recalculate_maps) in _mesa_EvalCoord1f()
726 vbo_exec_eval_update(exec); in _mesa_EvalCoord1f()
729 if (exec->eval.map1[i].map) in _mesa_EvalCoord1f()
730 if (exec->vtx.attr[i].active_size != exec->eval.map1[i].sz) in _mesa_EvalCoord1f()
731 vbo_exec_fixup_vertex(ctx, i, exec->eval.map1[i].sz, GL_FLOAT); in _mesa_EvalCoord1f()
735 memcpy(exec->vtx.copied.buffer, exec->vtx.vertex, in _mesa_EvalCoord1f()
736 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord1f()
738 vbo_exec_do_EvalCoord1f(exec, u); in _mesa_EvalCoord1f()
740 memcpy(exec->vtx.vertex, exec->vtx.copied.buffer, in _mesa_EvalCoord1f()
741 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord1f()
749 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in _mesa_EvalCoord2f() local
753 if (exec->eval.recalculate_maps) in _mesa_EvalCoord2f()
754 vbo_exec_eval_update(exec); in _mesa_EvalCoord2f()
757 if (exec->eval.map2[i].map) in _mesa_EvalCoord2f()
758 if (exec->vtx.attr[i].active_size != exec->eval.map2[i].sz) in _mesa_EvalCoord2f()
759 vbo_exec_fixup_vertex(ctx, i, exec->eval.map2[i].sz, GL_FLOAT); in _mesa_EvalCoord2f()
763 if (exec->vtx.attr[VBO_ATTRIB_NORMAL].active_size != 3) in _mesa_EvalCoord2f()
767 memcpy(exec->vtx.copied.buffer, exec->vtx.vertex, in _mesa_EvalCoord2f()
768 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord2f()
770 vbo_exec_do_EvalCoord2f(exec, u, v); in _mesa_EvalCoord2f()
772 memcpy(exec->vtx.vertex, exec->vtx.copied.buffer, in _mesa_EvalCoord2f()
773 exec->vtx.vertex_size * sizeof(GLfloat)); in _mesa_EvalCoord2f()
826 struct vbo_exec_context *exec = &vbo->exec; in _mesa_Begin() local
849 if (exec->vtx.vertex_size && !exec->vtx.attr[VBO_ATTRIB_POS].size) in _mesa_Begin()
850 vbo_exec_FlushVertices_internal(exec, FLUSH_STORED_VERTICES); in _mesa_Begin()
852 i = exec->vtx.prim_count++; in _mesa_Begin()
853 exec->vtx.mode[i] = mode; in _mesa_Begin()
854 exec->vtx.draw[i].start = exec->vtx.vert_count; in _mesa_Begin()
855 exec->vtx.markers[i].begin = 1; in _mesa_Begin()
859 ctx->Dispatch.Exec = _mesa_hw_select_enabled(ctx) ? in _mesa_Begin()
867 ctx->Dispatch.Current = ctx->Dispatch.Exec; in _mesa_Begin()
869 ctx->GLApi = ctx->Dispatch.Current = ctx->Dispatch.Exec; in _mesa_Begin()
881 try_vbo_merge(struct vbo_exec_context *exec) in try_vbo_merge() argument
883 unsigned cur = exec->vtx.prim_count - 1; in try_vbo_merge()
885 assert(exec->vtx.prim_count >= 1); in try_vbo_merge()
887 vbo_try_prim_conversion(&exec->vtx.mode[cur], &exec->vtx.draw[cur].count); in try_vbo_merge()
889 if (exec->vtx.prim_count >= 2) { in try_vbo_merge()
890 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in try_vbo_merge()
894 exec->vtx.mode[prev], in try_vbo_merge()
895 exec->vtx.mode[cur], in try_vbo_merge()
896 exec->vtx.draw[prev].start, in try_vbo_merge()
897 exec->vtx.draw[cur].start, in try_vbo_merge()
898 &exec->vtx.draw[prev].count, in try_vbo_merge()
899 exec->vtx.draw[cur].count, in try_vbo_merge()
901 &exec->vtx.markers[prev].end, in try_vbo_merge()
902 exec->vtx.markers[cur].begin, in try_vbo_merge()
903 exec->vtx.markers[cur].end)) in try_vbo_merge()
904 exec->vtx.prim_count--; /* drop the last primitive */ in try_vbo_merge()
916 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in _mesa_End() local
923 ctx->Dispatch.Exec = ctx->Dispatch.OutsideBeginEnd; in _mesa_End()
928 ctx->Dispatch.Current = ctx->Dispatch.Exec; in _mesa_End()
932 ctx->GLApi = ctx->Dispatch.Current = ctx->Dispatch.Exec; in _mesa_End()
936 if (exec->vtx.prim_count > 0) { in _mesa_End()
938 unsigned last = exec->vtx.prim_count - 1; in _mesa_End()
939 struct pipe_draw_start_count_bias *last_draw = &exec->vtx.draw[last]; in _mesa_End()
940 unsigned count = exec->vtx.vert_count - last_draw->start; in _mesa_End()
943 exec->vtx.markers[last].end = 1; in _mesa_End()
956 if (exec->vtx.mode[last] == GL_LINE_LOOP && in _mesa_End()
957 (exec->vtx.markers[last].begin == 0 || !driver_supports_lineloop)) { in _mesa_End()
961 const fi_type *src = exec->vtx.buffer_map + in _mesa_End()
962 last_draw->start * exec->vtx.vertex_size; in _mesa_End()
963 fi_type *dst = exec->vtx.buffer_map + in _mesa_End()
964 exec->vtx.vert_count * exec->vtx.vertex_size; in _mesa_End()
967 memcpy(dst, src, exec->vtx.vertex_size * sizeof(fi_type)); in _mesa_End()
969 if (exec->vtx.markers[last].begin == 0) in _mesa_End()
973 exec->vtx.mode[last] = GL_LINE_STRIP; in _mesa_End()
978 exec->vtx.vert_count++; in _mesa_End()
979 exec->vtx.buffer_ptr += exec->vtx.vertex_size; in _mesa_End()
985 try_vbo_merge(exec); in _mesa_End()
990 if (exec->vtx.prim_count == VBO_MAX_PRIM) in _mesa_End()
991 vbo_exec_vtx_flush(exec); in _mesa_End()
1109 vbo_reset_all_attr(struct vbo_exec_context *exec) in vbo_reset_all_attr() argument
1111 while (exec->vtx.enabled) { in vbo_reset_all_attr()
1112 const int i = u_bit_scan64(&exec->vtx.enabled); in vbo_reset_all_attr()
1115 exec->vtx.attr[i].size = 0; in vbo_reset_all_attr()
1116 exec->vtx.attr[i].type = GL_FLOAT; in vbo_reset_all_attr()
1117 exec->vtx.attr[i].active_size = 0; in vbo_reset_all_attr()
1118 exec->vtx.attrptr[i] = NULL; in vbo_reset_all_attr()
1121 exec->vtx.vertex_size = 0; in vbo_reset_all_attr()
1126 vbo_exec_vtx_init(struct vbo_exec_context *exec) in vbo_exec_vtx_init() argument
1128 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_vtx_init()
1130 exec->vtx.bufferobj = _mesa_bufferobj_alloc(ctx, IMM_BUFFER_NAME); in vbo_exec_vtx_init()
1132 exec->vtx.enabled = u_bit_consecutive64(0, VBO_ATTRIB_MAX); /* reset all */ in vbo_exec_vtx_init()
1133 vbo_reset_all_attr(exec); in vbo_exec_vtx_init()
1135 exec->vtx.info.instance_count = 1; in vbo_exec_vtx_init()
1136 exec->vtx.info.max_index = ~0; in vbo_exec_vtx_init()
1141 vbo_exec_vtx_destroy(struct vbo_exec_context *exec) in vbo_exec_vtx_destroy() argument
1144 struct gl_context *ctx = gl_context_from_vbo_exec(exec); in vbo_exec_vtx_destroy()
1148 if (exec->vtx.buffer_map) { in vbo_exec_vtx_destroy()
1149 assert(!exec->vtx.bufferobj || in vbo_exec_vtx_destroy()
1150 exec->vtx.bufferobj->Name == IMM_BUFFER_NAME); in vbo_exec_vtx_destroy()
1151 if (!exec->vtx.bufferobj) { in vbo_exec_vtx_destroy()
1152 align_free(exec->vtx.buffer_map); in vbo_exec_vtx_destroy()
1153 exec->vtx.buffer_map = NULL; in vbo_exec_vtx_destroy()
1154 exec->vtx.buffer_ptr = NULL; in vbo_exec_vtx_destroy()
1160 if (exec->vtx.bufferobj && in vbo_exec_vtx_destroy()
1161 _mesa_bufferobj_mapped(exec->vtx.bufferobj, MAP_INTERNAL)) { in vbo_exec_vtx_destroy()
1162 _mesa_bufferobj_unmap(ctx, exec->vtx.bufferobj, MAP_INTERNAL); in vbo_exec_vtx_destroy()
1164 _mesa_reference_buffer_object(ctx, &exec->vtx.bufferobj, NULL); in vbo_exec_vtx_destroy()
1182 struct vbo_exec_context *exec = &vbo_context(ctx)->exec; in vbo_exec_FlushVertices() local
1186 exec->flush_call_depth++; in vbo_exec_FlushVertices()
1187 assert(exec->flush_call_depth == 1); in vbo_exec_FlushVertices()
1193 exec->flush_call_depth--; in vbo_exec_FlushVertices()
1194 assert(exec->flush_call_depth == 0); in vbo_exec_FlushVertices()
1200 vbo_exec_FlushVertices_internal(exec, flags); in vbo_exec_FlushVertices()
1203 exec->flush_call_depth--; in vbo_exec_FlushVertices()
1204 assert(exec->flush_call_depth == 0); in vbo_exec_FlushVertices()