Lines Matching refs:vtx
42 static GLboolean match_fastpath( struct tnl_clipspace *vtx, in match_fastpath() argument
47 if (vtx->attr_count != fp->attr_count) in match_fastpath()
50 for (j = 0; j < vtx->attr_count; j++) in match_fastpath()
51 if (vtx->attr[j].format != fp->attr[j].format || in match_fastpath()
52 vtx->attr[j].inputsize != fp->attr[j].size || in match_fastpath()
53 vtx->attr[j].vertoffset != fp->attr[j].offset) in match_fastpath()
57 if (vtx->vertex_size != fp->vertex_size) in match_fastpath()
60 for (j = 0; j < vtx->attr_count; j++) in match_fastpath()
61 if (vtx->attr[j].inputstride != fp->attr[j].stride) in match_fastpath()
68 static GLboolean search_fastpath_emit( struct tnl_clipspace *vtx ) in search_fastpath_emit() argument
70 struct tnl_clipspace_fastpath *fp = vtx->fastpath; in search_fastpath_emit()
73 if (match_fastpath(vtx, fp)) { in search_fastpath_emit()
74 vtx->emit = fp->func; in search_fastpath_emit()
82 void _tnl_register_fastpath( struct tnl_clipspace *vtx, in _tnl_register_fastpath() argument
93 fastpath->vertex_size = vtx->vertex_size; in _tnl_register_fastpath()
94 fastpath->attr_count = vtx->attr_count; in _tnl_register_fastpath()
96 fastpath->func = vtx->emit; in _tnl_register_fastpath()
97 fastpath->attr = malloc(vtx->attr_count * sizeof(fastpath->attr[0])); in _tnl_register_fastpath()
105 for (i = 0; i < vtx->attr_count; i++) { in _tnl_register_fastpath()
106 fastpath->attr[i].format = vtx->attr[i].format; in _tnl_register_fastpath()
107 fastpath->attr[i].stride = vtx->attr[i].inputstride; in _tnl_register_fastpath()
108 fastpath->attr[i].size = vtx->attr[i].inputsize; in _tnl_register_fastpath()
109 fastpath->attr[i].offset = vtx->attr[i].vertoffset; in _tnl_register_fastpath()
112 fastpath->next = vtx->fastpath; in _tnl_register_fastpath()
113 vtx->fastpath = fastpath; in _tnl_register_fastpath()
124 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in choose_emit_func() local
125 struct tnl_clipspace_attr *a = vtx->attr; in choose_emit_func()
126 const GLuint attr_count = vtx->attr_count; in choose_emit_func()
136 vtx->emit = NULL; in choose_emit_func()
141 if (search_fastpath_emit(vtx)) { in choose_emit_func()
147 else if (vtx->codegen_emit) { in choose_emit_func()
148 vtx->codegen_emit(ctx); in choose_emit_func()
151 if (!vtx->emit) { in choose_emit_func()
157 if (!vtx->emit) in choose_emit_func()
158 vtx->emit = _tnl_generic_emit; in choose_emit_func()
160 vtx->emit( ctx, count, dest ); in choose_emit_func()
170 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in choose_interp_func() local
175 if (vtx->need_extras && (twosided || unfilled)) { in choose_interp_func()
176 vtx->interp = _tnl_generic_interp_extras; in choose_interp_func()
178 vtx->interp = _tnl_generic_interp; in choose_interp_func()
181 vtx->interp( ctx, t, edst, eout, ein, force_boundary ); in choose_interp_func()
187 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in choose_copy_pv_func() local
193 if (vtx->need_extras && (twosided || unfilled)) { in choose_copy_pv_func()
194 vtx->copy_pv = _tnl_generic_copy_pv_extras; in choose_copy_pv_func()
196 vtx->copy_pv = _tnl_generic_copy_pv; in choose_copy_pv_func()
199 vtx->copy_pv( ctx, edst, esrc ); in choose_copy_pv_func()
215 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_interp() local
216 vtx->interp( ctx, t, edst, eout, ein, force_boundary ); in _tnl_interp()
223 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_copy_pv() local
224 vtx->copy_pv( ctx, edst, esrc ); in _tnl_copy_pv()
235 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_get_attr() local
236 const struct tnl_clipspace_attr *a = vtx->attr; in _tnl_get_attr()
237 const GLuint attr_count = vtx->attr_count; in _tnl_get_attr()
266 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_set_attr() local
267 const struct tnl_clipspace_attr *a = vtx->attr; in _tnl_set_attr()
268 const GLuint attr_count = vtx->attr_count; in _tnl_set_attr()
282 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_get_vertex() local
284 return vtx->vertex_buf + nr * vtx->vertex_size; in _tnl_get_vertex()
291 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_invalidate_vertex_state() local
292 vtx->new_inputs = ~0; in _tnl_invalidate_vertex_state()
293 vtx->interp = choose_interp_func; in _tnl_invalidate_vertex_state()
294 vtx->copy_pv = choose_copy_pv_func; in _tnl_invalidate_vertex_state()
298 static void invalidate_funcs( struct tnl_clipspace *vtx ) in invalidate_funcs() argument
300 vtx->emit = choose_emit_func; in invalidate_funcs()
301 vtx->interp = choose_interp_func; in invalidate_funcs()
302 vtx->copy_pv = choose_copy_pv_func; in invalidate_funcs()
303 vtx->new_inputs = ~0; in invalidate_funcs()
310 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_install_attrs() local
317 vtx->new_inputs = ~0; in _tnl_install_attrs()
318 vtx->need_viewport = GL_FALSE; in _tnl_install_attrs()
321 vtx->need_viewport = GL_TRUE; in _tnl_install_attrs()
342 if (vtx->attr_count != j || in _tnl_install_attrs()
343 vtx->attr[j].attrib != map[i].attrib || in _tnl_install_attrs()
344 vtx->attr[j].format != format || in _tnl_install_attrs()
345 vtx->attr[j].vertoffset != tmpoffset) { in _tnl_install_attrs()
346 invalidate_funcs(vtx); in _tnl_install_attrs()
348 vtx->attr[j].attrib = map[i].attrib; in _tnl_install_attrs()
349 vtx->attr[j].format = format; in _tnl_install_attrs()
350 vtx->attr[j].vp = vp; in _tnl_install_attrs()
351 vtx->attr[j].insert = _tnl_format_info[format].insert; in _tnl_install_attrs()
352 vtx->attr[j].extract = _tnl_format_info[format].extract; in _tnl_install_attrs()
353 vtx->attr[j].vertattrsize = _tnl_format_info[format].attrsize; in _tnl_install_attrs()
354 vtx->attr[j].vertoffset = tmpoffset; in _tnl_install_attrs()
361 vtx->attr[j].vertoffset); in _tnl_install_attrs()
368 vtx->attr_count = j; in _tnl_install_attrs()
371 vtx->vertex_size = unpacked_size; in _tnl_install_attrs()
373 vtx->vertex_size = offset; in _tnl_install_attrs()
375 assert(vtx->vertex_size <= vtx->max_vertex_size); in _tnl_install_attrs()
376 return vtx->vertex_size; in _tnl_install_attrs()
383 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_invalidate_vertices() local
384 vtx->new_inputs |= newinputs; in _tnl_invalidate_vertices()
393 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_notify_pipeline_output_change() local
394 invalidate_funcs(vtx); in _tnl_notify_pipeline_output_change()
401 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in adjust_input_ptrs() local
402 struct tnl_clipspace_attr *a = vtx->attr; in adjust_input_ptrs()
403 const GLuint count = vtx->attr_count; in adjust_input_ptrs()
416 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in update_input_ptrs() local
417 struct tnl_clipspace_attr *a = vtx->attr; in update_input_ptrs()
418 const GLuint count = vtx->attr_count; in update_input_ptrs()
424 if (vtx->emit != choose_emit_func) { in update_input_ptrs()
433 vtx->vp_scale[0] = a->vp[MAT_SX]; in update_input_ptrs()
434 vtx->vp_scale[1] = a->vp[MAT_SY]; in update_input_ptrs()
435 vtx->vp_scale[2] = a->vp[MAT_SZ]; in update_input_ptrs()
436 vtx->vp_scale[3] = 1.0; in update_input_ptrs()
437 vtx->vp_xlate[0] = a->vp[MAT_TX]; in update_input_ptrs()
438 vtx->vp_xlate[1] = a->vp[MAT_TY]; in update_input_ptrs()
439 vtx->vp_xlate[2] = a->vp[MAT_TZ]; in update_input_ptrs()
440 vtx->vp_xlate[3] = 0.0; in update_input_ptrs()
450 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_build_vertices() local
452 vtx->emit( ctx, end - start, in _tnl_build_vertices()
453 (GLubyte *)(vtx->vertex_buf + in _tnl_build_vertices()
454 start * vtx->vertex_size)); in _tnl_build_vertices()
465 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_emit_vertices_to_buffer() local
470 vtx->emit( ctx, end - start, (GLubyte*) dest ); in _tnl_emit_vertices_to_buffer()
471 return (void *)((GLubyte *)dest + vtx->vertex_size * (end - start)); in _tnl_emit_vertices_to_buffer()
484 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_emit_indexed_vertices_to_buffer() local
489 vtx->emit( ctx, 1, cdest ); in _tnl_emit_indexed_vertices_to_buffer()
490 cdest += vtx->vertex_size; in _tnl_emit_indexed_vertices_to_buffer()
495 vtx->emit( ctx, 1, cdest); in _tnl_emit_indexed_vertices_to_buffer()
496 cdest += vtx->vertex_size; in _tnl_emit_indexed_vertices_to_buffer()
507 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_init_vertices() local
511 vtx->need_extras = GL_TRUE; in _tnl_init_vertices()
512 if (max_vertex_size > vtx->max_vertex_size) { in _tnl_init_vertices()
514 vtx->max_vertex_size = max_vertex_size; in _tnl_init_vertices()
515 vtx->vertex_buf = align_calloc(vb_size * max_vertex_size, 32 ); in _tnl_init_vertices()
516 invalidate_funcs(vtx); in _tnl_init_vertices()
521 vtx->chan_scale[0] = 255.0; in _tnl_init_vertices()
522 vtx->chan_scale[1] = 255.0; in _tnl_init_vertices()
523 vtx->chan_scale[2] = 255.0; in _tnl_init_vertices()
524 vtx->chan_scale[3] = 255.0; in _tnl_init_vertices()
527 vtx->chan_scale[0] = 65535.0; in _tnl_init_vertices()
528 vtx->chan_scale[1] = 65535.0; in _tnl_init_vertices()
529 vtx->chan_scale[2] = 65535.0; in _tnl_init_vertices()
530 vtx->chan_scale[3] = 65535.0; in _tnl_init_vertices()
533 vtx->chan_scale[0] = 1.0; in _tnl_init_vertices()
534 vtx->chan_scale[1] = 1.0; in _tnl_init_vertices()
535 vtx->chan_scale[2] = 1.0; in _tnl_init_vertices()
536 vtx->chan_scale[3] = 1.0; in _tnl_init_vertices()
540 vtx->identity[0] = 0.0; in _tnl_init_vertices()
541 vtx->identity[1] = 0.0; in _tnl_init_vertices()
542 vtx->identity[2] = 0.0; in _tnl_init_vertices()
543 vtx->identity[3] = 1.0; in _tnl_init_vertices()
545 vtx->codegen_emit = NULL; in _tnl_init_vertices()
549 vtx->codegen_emit = _tnl_generate_sse_emit; in _tnl_init_vertices()
558 struct tnl_clipspace *vtx = GET_VERTEX_STATE(ctx); in _tnl_free_vertices() local
561 align_free(vtx->vertex_buf); in _tnl_free_vertices()
562 vtx->vertex_buf = NULL; in _tnl_free_vertices()
564 for (fp = vtx->fastpath ; fp ; fp = tmp) { in _tnl_free_vertices()
578 vtx->fastpath = NULL; in _tnl_free_vertices()