1 /**************************************************************************
2 *
3 * Copyright 2007 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zackr@vmware.com>
35 * @author Keith Whitwell <keithw@vmware.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Per-shader sampler information.
55 */
56 struct sampler_info
57 {
58 struct cso_sampler *cso_samplers[PIPE_MAX_SAMPLERS];
59 void *samplers[PIPE_MAX_SAMPLERS];
60 unsigned nr_samplers;
61 };
62
63
64
65 struct cso_context {
66 struct pipe_context *pipe;
67 struct cso_cache *cache;
68 struct u_vbuf *vbuf;
69
70 boolean has_geometry_shader;
71 boolean has_tessellation;
72 boolean has_compute_shader;
73 boolean has_streamout;
74
75 unsigned saved_state; /**< bitmask of CSO_BIT_x flags */
76
77 struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
78 unsigned nr_fragment_views;
79
80 struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
81 unsigned nr_fragment_views_saved;
82
83 struct sampler_info fragment_samplers_saved;
84 struct sampler_info samplers[PIPE_SHADER_TYPES];
85
86 struct pipe_vertex_buffer aux_vertex_buffer_current;
87 struct pipe_vertex_buffer aux_vertex_buffer_saved;
88 unsigned aux_vertex_buffer_index;
89
90 struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
91 struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
92
93 struct pipe_image_view fragment_image0_current;
94 struct pipe_image_view fragment_image0_saved;
95
96 unsigned nr_so_targets;
97 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
98
99 unsigned nr_so_targets_saved;
100 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
101
102 /** Current and saved state.
103 * The saved state is used as a 1-deep stack.
104 */
105 void *blend, *blend_saved;
106 void *depth_stencil, *depth_stencil_saved;
107 void *rasterizer, *rasterizer_saved;
108 void *fragment_shader, *fragment_shader_saved;
109 void *vertex_shader, *vertex_shader_saved;
110 void *geometry_shader, *geometry_shader_saved;
111 void *tessctrl_shader, *tessctrl_shader_saved;
112 void *tesseval_shader, *tesseval_shader_saved;
113 void *compute_shader;
114 void *velements, *velements_saved;
115 struct pipe_query *render_condition, *render_condition_saved;
116 uint render_condition_mode, render_condition_mode_saved;
117 boolean render_condition_cond, render_condition_cond_saved;
118
119 struct pipe_framebuffer_state fb, fb_saved;
120 struct pipe_viewport_state vp, vp_saved;
121 struct pipe_blend_color blend_color;
122 unsigned sample_mask, sample_mask_saved;
123 unsigned min_samples, min_samples_saved;
124 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
125 };
126
127
delete_blend_state(struct cso_context * ctx,void * state)128 static boolean delete_blend_state(struct cso_context *ctx, void *state)
129 {
130 struct cso_blend *cso = (struct cso_blend *)state;
131
132 if (ctx->blend == cso->data)
133 return FALSE;
134
135 if (cso->delete_state)
136 cso->delete_state(cso->context, cso->data);
137 FREE(state);
138 return TRUE;
139 }
140
delete_depth_stencil_state(struct cso_context * ctx,void * state)141 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
142 {
143 struct cso_depth_stencil_alpha *cso =
144 (struct cso_depth_stencil_alpha *)state;
145
146 if (ctx->depth_stencil == cso->data)
147 return FALSE;
148
149 if (cso->delete_state)
150 cso->delete_state(cso->context, cso->data);
151 FREE(state);
152
153 return TRUE;
154 }
155
delete_sampler_state(struct cso_context * ctx,void * state)156 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
157 {
158 struct cso_sampler *cso = (struct cso_sampler *)state;
159 if (cso->delete_state)
160 cso->delete_state(cso->context, cso->data);
161 FREE(state);
162 return TRUE;
163 }
164
delete_rasterizer_state(struct cso_context * ctx,void * state)165 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
166 {
167 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
168
169 if (ctx->rasterizer == cso->data)
170 return FALSE;
171 if (cso->delete_state)
172 cso->delete_state(cso->context, cso->data);
173 FREE(state);
174 return TRUE;
175 }
176
delete_vertex_elements(struct cso_context * ctx,void * state)177 static boolean delete_vertex_elements(struct cso_context *ctx,
178 void *state)
179 {
180 struct cso_velements *cso = (struct cso_velements *)state;
181
182 if (ctx->velements == cso->data)
183 return FALSE;
184
185 if (cso->delete_state)
186 cso->delete_state(cso->context, cso->data);
187 FREE(state);
188 return TRUE;
189 }
190
191
delete_cso(struct cso_context * ctx,void * state,enum cso_cache_type type)192 static inline boolean delete_cso(struct cso_context *ctx,
193 void *state, enum cso_cache_type type)
194 {
195 switch (type) {
196 case CSO_BLEND:
197 return delete_blend_state(ctx, state);
198 case CSO_SAMPLER:
199 return delete_sampler_state(ctx, state);
200 case CSO_DEPTH_STENCIL_ALPHA:
201 return delete_depth_stencil_state(ctx, state);
202 case CSO_RASTERIZER:
203 return delete_rasterizer_state(ctx, state);
204 case CSO_VELEMENTS:
205 return delete_vertex_elements(ctx, state);
206 default:
207 assert(0);
208 FREE(state);
209 }
210 return FALSE;
211 }
212
213 static inline void
sanitize_hash(struct cso_hash * hash,enum cso_cache_type type,int max_size,void * user_data)214 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
215 int max_size, void *user_data)
216 {
217 struct cso_context *ctx = (struct cso_context *)user_data;
218 /* if we're approach the maximum size, remove fourth of the entries
219 * otherwise every subsequent call will go through the same */
220 int hash_size = cso_hash_size(hash);
221 int max_entries = (max_size > hash_size) ? max_size : hash_size;
222 int to_remove = (max_size < max_entries) * max_entries/4;
223 struct cso_hash_iter iter;
224 struct cso_sampler **samplers_to_restore = NULL;
225 unsigned to_restore = 0;
226
227 if (hash_size > max_size)
228 to_remove += hash_size - max_size;
229
230 if (to_remove == 0)
231 return;
232
233 if (type == CSO_SAMPLER) {
234 int i, j;
235
236 samplers_to_restore = MALLOC(PIPE_SHADER_TYPES * PIPE_MAX_SAMPLERS *
237 sizeof(*samplers_to_restore));
238
239 /* Temporarily remove currently bound sampler states from the hash
240 * table, to prevent them from being deleted
241 */
242 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
243 for (j = 0; j < ctx->samplers[i].nr_samplers; j++) {
244 struct cso_sampler *sampler = ctx->samplers[i].cso_samplers[j];
245
246 if (sampler && cso_hash_take(hash, sampler->hash_key))
247 samplers_to_restore[to_restore++] = sampler;
248 }
249 }
250 }
251
252 iter = cso_hash_first_node(hash);
253 while (to_remove) {
254 /*remove elements until we're good */
255 /*fixme: currently we pick the nodes to remove at random*/
256 void *cso = cso_hash_iter_data(iter);
257
258 if (!cso)
259 break;
260
261 if (delete_cso(ctx, cso, type)) {
262 iter = cso_hash_erase(hash, iter);
263 --to_remove;
264 } else
265 iter = cso_hash_iter_next(iter);
266 }
267
268 if (type == CSO_SAMPLER) {
269 /* Put currently bound sampler states back into the hash table */
270 while (to_restore--) {
271 struct cso_sampler *sampler = samplers_to_restore[to_restore];
272
273 cso_hash_insert(hash, sampler->hash_key, sampler);
274 }
275
276 FREE(samplers_to_restore);
277 }
278 }
279
cso_init_vbuf(struct cso_context * cso)280 static void cso_init_vbuf(struct cso_context *cso)
281 {
282 struct u_vbuf_caps caps;
283
284 /* Install u_vbuf if there is anything unsupported. */
285 if (u_vbuf_get_caps(cso->pipe->screen, &caps)) {
286 cso->vbuf = u_vbuf_create(cso->pipe, &caps,
287 cso->aux_vertex_buffer_index);
288 }
289 }
290
cso_create_context(struct pipe_context * pipe)291 struct cso_context *cso_create_context( struct pipe_context *pipe )
292 {
293 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
294 if (!ctx)
295 return NULL;
296
297 ctx->cache = cso_cache_create();
298 if (ctx->cache == NULL)
299 goto out;
300 cso_cache_set_sanitize_callback(ctx->cache,
301 sanitize_hash,
302 ctx);
303
304 ctx->pipe = pipe;
305 ctx->sample_mask = ~0;
306
307 ctx->aux_vertex_buffer_index = 0; /* 0 for now */
308
309 cso_init_vbuf(ctx);
310
311 /* Enable for testing: */
312 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
313
314 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
315 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
316 ctx->has_geometry_shader = TRUE;
317 }
318 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
319 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
320 ctx->has_tessellation = TRUE;
321 }
322 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
323 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
324 int supported_irs =
325 pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
326 PIPE_SHADER_CAP_SUPPORTED_IRS);
327 if (supported_irs & (1 << PIPE_SHADER_IR_TGSI)) {
328 ctx->has_compute_shader = TRUE;
329 }
330 }
331 if (pipe->screen->get_param(pipe->screen,
332 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
333 ctx->has_streamout = TRUE;
334 }
335
336 return ctx;
337
338 out:
339 cso_destroy_context( ctx );
340 return NULL;
341 }
342
343 /**
344 * Free the CSO context.
345 */
cso_destroy_context(struct cso_context * ctx)346 void cso_destroy_context( struct cso_context *ctx )
347 {
348 unsigned i;
349
350 if (ctx->pipe) {
351 ctx->pipe->set_index_buffer(ctx->pipe, NULL);
352
353 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
354 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
355
356 {
357 static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
358 static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
359 struct pipe_screen *scr = ctx->pipe->screen;
360 enum pipe_shader_type sh;
361 for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
362 int maxsam = scr->get_shader_param(scr, sh,
363 PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
364 int maxview = scr->get_shader_param(scr, sh,
365 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
366 assert(maxsam <= PIPE_MAX_SAMPLERS);
367 assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
368 if (maxsam > 0) {
369 ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
370 }
371 if (maxview > 0) {
372 ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
373 }
374 }
375 }
376
377 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
378 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
379 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
380 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
381 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
382 if (ctx->has_geometry_shader) {
383 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
384 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
385 }
386 if (ctx->has_tessellation) {
387 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
388 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
389 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
390 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
391 }
392 if (ctx->has_compute_shader) {
393 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
394 ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
395 }
396 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
397
398 if (ctx->has_streamout)
399 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
400 }
401
402 for (i = 0; i < PIPE_MAX_SHADER_SAMPLER_VIEWS; i++) {
403 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
404 pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
405 }
406
407 util_unreference_framebuffer_state(&ctx->fb);
408 util_unreference_framebuffer_state(&ctx->fb_saved);
409
410 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer, NULL);
411 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
412
413 for (i = 0; i < PIPE_SHADER_TYPES; i++) {
414 pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
415 pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
416 }
417
418 pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
419 pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
420
421 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
422 pipe_so_target_reference(&ctx->so_targets[i], NULL);
423 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
424 }
425
426 if (ctx->cache) {
427 cso_cache_delete( ctx->cache );
428 ctx->cache = NULL;
429 }
430
431 if (ctx->vbuf)
432 u_vbuf_destroy(ctx->vbuf);
433 FREE( ctx );
434 }
435
436
437 /* Those function will either find the state of the given template
438 * in the cache or they will create a new state from the given
439 * template, insert it in the cache and return it.
440 */
441
442 /*
443 * If the driver returns 0 from the create method then they will assign
444 * the data member of the cso to be the template itself.
445 */
446
cso_set_blend(struct cso_context * ctx,const struct pipe_blend_state * templ)447 enum pipe_error cso_set_blend(struct cso_context *ctx,
448 const struct pipe_blend_state *templ)
449 {
450 unsigned key_size, hash_key;
451 struct cso_hash_iter iter;
452 void *handle;
453
454 key_size = templ->independent_blend_enable ?
455 sizeof(struct pipe_blend_state) :
456 (char *)&(templ->rt[1]) - (char *)templ;
457 hash_key = cso_construct_key((void*)templ, key_size);
458 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
459 (void*)templ, key_size);
460
461 if (cso_hash_iter_is_null(iter)) {
462 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
463 if (!cso)
464 return PIPE_ERROR_OUT_OF_MEMORY;
465
466 memset(&cso->state, 0, sizeof cso->state);
467 memcpy(&cso->state, templ, key_size);
468 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
469 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
470 cso->context = ctx->pipe;
471
472 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
473 if (cso_hash_iter_is_null(iter)) {
474 FREE(cso);
475 return PIPE_ERROR_OUT_OF_MEMORY;
476 }
477
478 handle = cso->data;
479 }
480 else {
481 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
482 }
483
484 if (ctx->blend != handle) {
485 ctx->blend = handle;
486 ctx->pipe->bind_blend_state(ctx->pipe, handle);
487 }
488 return PIPE_OK;
489 }
490
491 static void
cso_save_blend(struct cso_context * ctx)492 cso_save_blend(struct cso_context *ctx)
493 {
494 assert(!ctx->blend_saved);
495 ctx->blend_saved = ctx->blend;
496 }
497
498 static void
cso_restore_blend(struct cso_context * ctx)499 cso_restore_blend(struct cso_context *ctx)
500 {
501 if (ctx->blend != ctx->blend_saved) {
502 ctx->blend = ctx->blend_saved;
503 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
504 }
505 ctx->blend_saved = NULL;
506 }
507
508
509
510 enum pipe_error
cso_set_depth_stencil_alpha(struct cso_context * ctx,const struct pipe_depth_stencil_alpha_state * templ)511 cso_set_depth_stencil_alpha(struct cso_context *ctx,
512 const struct pipe_depth_stencil_alpha_state *templ)
513 {
514 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
515 unsigned hash_key = cso_construct_key((void*)templ, key_size);
516 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
517 hash_key,
518 CSO_DEPTH_STENCIL_ALPHA,
519 (void*)templ, key_size);
520 void *handle;
521
522 if (cso_hash_iter_is_null(iter)) {
523 struct cso_depth_stencil_alpha *cso =
524 MALLOC(sizeof(struct cso_depth_stencil_alpha));
525 if (!cso)
526 return PIPE_ERROR_OUT_OF_MEMORY;
527
528 memcpy(&cso->state, templ, sizeof(*templ));
529 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
530 &cso->state);
531 cso->delete_state =
532 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
533 cso->context = ctx->pipe;
534
535 iter = cso_insert_state(ctx->cache, hash_key,
536 CSO_DEPTH_STENCIL_ALPHA, cso);
537 if (cso_hash_iter_is_null(iter)) {
538 FREE(cso);
539 return PIPE_ERROR_OUT_OF_MEMORY;
540 }
541
542 handle = cso->data;
543 }
544 else {
545 handle = ((struct cso_depth_stencil_alpha *)
546 cso_hash_iter_data(iter))->data;
547 }
548
549 if (ctx->depth_stencil != handle) {
550 ctx->depth_stencil = handle;
551 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
552 }
553 return PIPE_OK;
554 }
555
556 static void
cso_save_depth_stencil_alpha(struct cso_context * ctx)557 cso_save_depth_stencil_alpha(struct cso_context *ctx)
558 {
559 assert(!ctx->depth_stencil_saved);
560 ctx->depth_stencil_saved = ctx->depth_stencil;
561 }
562
563 static void
cso_restore_depth_stencil_alpha(struct cso_context * ctx)564 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
565 {
566 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
567 ctx->depth_stencil = ctx->depth_stencil_saved;
568 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
569 ctx->depth_stencil_saved);
570 }
571 ctx->depth_stencil_saved = NULL;
572 }
573
574
575
cso_set_rasterizer(struct cso_context * ctx,const struct pipe_rasterizer_state * templ)576 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
577 const struct pipe_rasterizer_state *templ)
578 {
579 unsigned key_size = sizeof(struct pipe_rasterizer_state);
580 unsigned hash_key = cso_construct_key((void*)templ, key_size);
581 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
582 hash_key,
583 CSO_RASTERIZER,
584 (void*)templ, key_size);
585 void *handle = NULL;
586
587 if (cso_hash_iter_is_null(iter)) {
588 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
589 if (!cso)
590 return PIPE_ERROR_OUT_OF_MEMORY;
591
592 memcpy(&cso->state, templ, sizeof(*templ));
593 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
594 cso->delete_state =
595 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
596 cso->context = ctx->pipe;
597
598 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
599 if (cso_hash_iter_is_null(iter)) {
600 FREE(cso);
601 return PIPE_ERROR_OUT_OF_MEMORY;
602 }
603
604 handle = cso->data;
605 }
606 else {
607 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
608 }
609
610 if (ctx->rasterizer != handle) {
611 ctx->rasterizer = handle;
612 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
613 }
614 return PIPE_OK;
615 }
616
617 static void
cso_save_rasterizer(struct cso_context * ctx)618 cso_save_rasterizer(struct cso_context *ctx)
619 {
620 assert(!ctx->rasterizer_saved);
621 ctx->rasterizer_saved = ctx->rasterizer;
622 }
623
624 static void
cso_restore_rasterizer(struct cso_context * ctx)625 cso_restore_rasterizer(struct cso_context *ctx)
626 {
627 if (ctx->rasterizer != ctx->rasterizer_saved) {
628 ctx->rasterizer = ctx->rasterizer_saved;
629 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
630 }
631 ctx->rasterizer_saved = NULL;
632 }
633
634
cso_set_fragment_shader_handle(struct cso_context * ctx,void * handle)635 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
636 {
637 if (ctx->fragment_shader != handle) {
638 ctx->fragment_shader = handle;
639 ctx->pipe->bind_fs_state(ctx->pipe, handle);
640 }
641 }
642
cso_delete_fragment_shader(struct cso_context * ctx,void * handle)643 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
644 {
645 if (handle == ctx->fragment_shader) {
646 /* unbind before deleting */
647 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
648 ctx->fragment_shader = NULL;
649 }
650 ctx->pipe->delete_fs_state(ctx->pipe, handle);
651 }
652
653 static void
cso_save_fragment_shader(struct cso_context * ctx)654 cso_save_fragment_shader(struct cso_context *ctx)
655 {
656 assert(!ctx->fragment_shader_saved);
657 ctx->fragment_shader_saved = ctx->fragment_shader;
658 }
659
660 static void
cso_restore_fragment_shader(struct cso_context * ctx)661 cso_restore_fragment_shader(struct cso_context *ctx)
662 {
663 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
664 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
665 ctx->fragment_shader = ctx->fragment_shader_saved;
666 }
667 ctx->fragment_shader_saved = NULL;
668 }
669
670
cso_set_vertex_shader_handle(struct cso_context * ctx,void * handle)671 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
672 {
673 if (ctx->vertex_shader != handle) {
674 ctx->vertex_shader = handle;
675 ctx->pipe->bind_vs_state(ctx->pipe, handle);
676 }
677 }
678
cso_delete_vertex_shader(struct cso_context * ctx,void * handle)679 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
680 {
681 if (handle == ctx->vertex_shader) {
682 /* unbind before deleting */
683 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
684 ctx->vertex_shader = NULL;
685 }
686 ctx->pipe->delete_vs_state(ctx->pipe, handle);
687 }
688
689 static void
cso_save_vertex_shader(struct cso_context * ctx)690 cso_save_vertex_shader(struct cso_context *ctx)
691 {
692 assert(!ctx->vertex_shader_saved);
693 ctx->vertex_shader_saved = ctx->vertex_shader;
694 }
695
696 static void
cso_restore_vertex_shader(struct cso_context * ctx)697 cso_restore_vertex_shader(struct cso_context *ctx)
698 {
699 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
700 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
701 ctx->vertex_shader = ctx->vertex_shader_saved;
702 }
703 ctx->vertex_shader_saved = NULL;
704 }
705
706
cso_set_framebuffer(struct cso_context * ctx,const struct pipe_framebuffer_state * fb)707 void cso_set_framebuffer(struct cso_context *ctx,
708 const struct pipe_framebuffer_state *fb)
709 {
710 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
711 util_copy_framebuffer_state(&ctx->fb, fb);
712 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
713 }
714 }
715
716 static void
cso_save_framebuffer(struct cso_context * ctx)717 cso_save_framebuffer(struct cso_context *ctx)
718 {
719 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
720 }
721
722 static void
cso_restore_framebuffer(struct cso_context * ctx)723 cso_restore_framebuffer(struct cso_context *ctx)
724 {
725 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
726 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
727 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
728 util_unreference_framebuffer_state(&ctx->fb_saved);
729 }
730 }
731
732
cso_set_viewport(struct cso_context * ctx,const struct pipe_viewport_state * vp)733 void cso_set_viewport(struct cso_context *ctx,
734 const struct pipe_viewport_state *vp)
735 {
736 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
737 ctx->vp = *vp;
738 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
739 }
740 }
741
742 /**
743 * Setup viewport state for given width and height (position is always (0,0)).
744 * Invert the Y axis if 'invert' is true.
745 */
746 void
cso_set_viewport_dims(struct cso_context * ctx,float width,float height,boolean invert)747 cso_set_viewport_dims(struct cso_context *ctx,
748 float width, float height, boolean invert)
749 {
750 struct pipe_viewport_state vp;
751 vp.scale[0] = width * 0.5f;
752 vp.scale[1] = height * (invert ? -0.5f : 0.5f);
753 vp.scale[2] = 0.5f;
754 vp.translate[0] = 0.5f * width;
755 vp.translate[1] = 0.5f * height;
756 vp.translate[2] = 0.5f;
757 cso_set_viewport(ctx, &vp);
758 }
759
760 static void
cso_save_viewport(struct cso_context * ctx)761 cso_save_viewport(struct cso_context *ctx)
762 {
763 ctx->vp_saved = ctx->vp;
764 }
765
766
767 static void
cso_restore_viewport(struct cso_context * ctx)768 cso_restore_viewport(struct cso_context *ctx)
769 {
770 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
771 ctx->vp = ctx->vp_saved;
772 ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
773 }
774 }
775
776
cso_set_blend_color(struct cso_context * ctx,const struct pipe_blend_color * bc)777 void cso_set_blend_color(struct cso_context *ctx,
778 const struct pipe_blend_color *bc)
779 {
780 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
781 ctx->blend_color = *bc;
782 ctx->pipe->set_blend_color(ctx->pipe, bc);
783 }
784 }
785
cso_set_sample_mask(struct cso_context * ctx,unsigned sample_mask)786 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
787 {
788 if (ctx->sample_mask != sample_mask) {
789 ctx->sample_mask = sample_mask;
790 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
791 }
792 }
793
794 static void
cso_save_sample_mask(struct cso_context * ctx)795 cso_save_sample_mask(struct cso_context *ctx)
796 {
797 ctx->sample_mask_saved = ctx->sample_mask;
798 }
799
800 static void
cso_restore_sample_mask(struct cso_context * ctx)801 cso_restore_sample_mask(struct cso_context *ctx)
802 {
803 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
804 }
805
cso_set_min_samples(struct cso_context * ctx,unsigned min_samples)806 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
807 {
808 if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
809 ctx->min_samples = min_samples;
810 ctx->pipe->set_min_samples(ctx->pipe, min_samples);
811 }
812 }
813
814 static void
cso_save_min_samples(struct cso_context * ctx)815 cso_save_min_samples(struct cso_context *ctx)
816 {
817 ctx->min_samples_saved = ctx->min_samples;
818 }
819
820 static void
cso_restore_min_samples(struct cso_context * ctx)821 cso_restore_min_samples(struct cso_context *ctx)
822 {
823 cso_set_min_samples(ctx, ctx->min_samples_saved);
824 }
825
cso_set_stencil_ref(struct cso_context * ctx,const struct pipe_stencil_ref * sr)826 void cso_set_stencil_ref(struct cso_context *ctx,
827 const struct pipe_stencil_ref *sr)
828 {
829 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
830 ctx->stencil_ref = *sr;
831 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
832 }
833 }
834
835 static void
cso_save_stencil_ref(struct cso_context * ctx)836 cso_save_stencil_ref(struct cso_context *ctx)
837 {
838 ctx->stencil_ref_saved = ctx->stencil_ref;
839 }
840
841
842 static void
cso_restore_stencil_ref(struct cso_context * ctx)843 cso_restore_stencil_ref(struct cso_context *ctx)
844 {
845 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
846 sizeof(ctx->stencil_ref))) {
847 ctx->stencil_ref = ctx->stencil_ref_saved;
848 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
849 }
850 }
851
cso_set_render_condition(struct cso_context * ctx,struct pipe_query * query,boolean condition,uint mode)852 void cso_set_render_condition(struct cso_context *ctx,
853 struct pipe_query *query,
854 boolean condition, uint mode)
855 {
856 struct pipe_context *pipe = ctx->pipe;
857
858 if (ctx->render_condition != query ||
859 ctx->render_condition_mode != mode ||
860 ctx->render_condition_cond != condition) {
861 pipe->render_condition(pipe, query, condition, mode);
862 ctx->render_condition = query;
863 ctx->render_condition_cond = condition;
864 ctx->render_condition_mode = mode;
865 }
866 }
867
868 static void
cso_save_render_condition(struct cso_context * ctx)869 cso_save_render_condition(struct cso_context *ctx)
870 {
871 ctx->render_condition_saved = ctx->render_condition;
872 ctx->render_condition_cond_saved = ctx->render_condition_cond;
873 ctx->render_condition_mode_saved = ctx->render_condition_mode;
874 }
875
876 static void
cso_restore_render_condition(struct cso_context * ctx)877 cso_restore_render_condition(struct cso_context *ctx)
878 {
879 cso_set_render_condition(ctx, ctx->render_condition_saved,
880 ctx->render_condition_cond_saved,
881 ctx->render_condition_mode_saved);
882 }
883
cso_set_geometry_shader_handle(struct cso_context * ctx,void * handle)884 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
885 {
886 assert(ctx->has_geometry_shader || !handle);
887
888 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
889 ctx->geometry_shader = handle;
890 ctx->pipe->bind_gs_state(ctx->pipe, handle);
891 }
892 }
893
cso_delete_geometry_shader(struct cso_context * ctx,void * handle)894 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
895 {
896 if (handle == ctx->geometry_shader) {
897 /* unbind before deleting */
898 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
899 ctx->geometry_shader = NULL;
900 }
901 ctx->pipe->delete_gs_state(ctx->pipe, handle);
902 }
903
904 static void
cso_save_geometry_shader(struct cso_context * ctx)905 cso_save_geometry_shader(struct cso_context *ctx)
906 {
907 if (!ctx->has_geometry_shader) {
908 return;
909 }
910
911 assert(!ctx->geometry_shader_saved);
912 ctx->geometry_shader_saved = ctx->geometry_shader;
913 }
914
915 static void
cso_restore_geometry_shader(struct cso_context * ctx)916 cso_restore_geometry_shader(struct cso_context *ctx)
917 {
918 if (!ctx->has_geometry_shader) {
919 return;
920 }
921
922 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
923 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
924 ctx->geometry_shader = ctx->geometry_shader_saved;
925 }
926 ctx->geometry_shader_saved = NULL;
927 }
928
cso_set_tessctrl_shader_handle(struct cso_context * ctx,void * handle)929 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
930 {
931 assert(ctx->has_tessellation || !handle);
932
933 if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
934 ctx->tessctrl_shader = handle;
935 ctx->pipe->bind_tcs_state(ctx->pipe, handle);
936 }
937 }
938
cso_delete_tessctrl_shader(struct cso_context * ctx,void * handle)939 void cso_delete_tessctrl_shader(struct cso_context *ctx, void *handle)
940 {
941 if (handle == ctx->tessctrl_shader) {
942 /* unbind before deleting */
943 ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
944 ctx->tessctrl_shader = NULL;
945 }
946 ctx->pipe->delete_tcs_state(ctx->pipe, handle);
947 }
948
949 static void
cso_save_tessctrl_shader(struct cso_context * ctx)950 cso_save_tessctrl_shader(struct cso_context *ctx)
951 {
952 if (!ctx->has_tessellation) {
953 return;
954 }
955
956 assert(!ctx->tessctrl_shader_saved);
957 ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
958 }
959
960 static void
cso_restore_tessctrl_shader(struct cso_context * ctx)961 cso_restore_tessctrl_shader(struct cso_context *ctx)
962 {
963 if (!ctx->has_tessellation) {
964 return;
965 }
966
967 if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
968 ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
969 ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
970 }
971 ctx->tessctrl_shader_saved = NULL;
972 }
973
cso_set_tesseval_shader_handle(struct cso_context * ctx,void * handle)974 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
975 {
976 assert(ctx->has_tessellation || !handle);
977
978 if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
979 ctx->tesseval_shader = handle;
980 ctx->pipe->bind_tes_state(ctx->pipe, handle);
981 }
982 }
983
cso_delete_tesseval_shader(struct cso_context * ctx,void * handle)984 void cso_delete_tesseval_shader(struct cso_context *ctx, void *handle)
985 {
986 if (handle == ctx->tesseval_shader) {
987 /* unbind before deleting */
988 ctx->pipe->bind_tes_state(ctx->pipe, NULL);
989 ctx->tesseval_shader = NULL;
990 }
991 ctx->pipe->delete_tes_state(ctx->pipe, handle);
992 }
993
994 static void
cso_save_tesseval_shader(struct cso_context * ctx)995 cso_save_tesseval_shader(struct cso_context *ctx)
996 {
997 if (!ctx->has_tessellation) {
998 return;
999 }
1000
1001 assert(!ctx->tesseval_shader_saved);
1002 ctx->tesseval_shader_saved = ctx->tesseval_shader;
1003 }
1004
1005 static void
cso_restore_tesseval_shader(struct cso_context * ctx)1006 cso_restore_tesseval_shader(struct cso_context *ctx)
1007 {
1008 if (!ctx->has_tessellation) {
1009 return;
1010 }
1011
1012 if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
1013 ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
1014 ctx->tesseval_shader = ctx->tesseval_shader_saved;
1015 }
1016 ctx->tesseval_shader_saved = NULL;
1017 }
1018
cso_set_compute_shader_handle(struct cso_context * ctx,void * handle)1019 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
1020 {
1021 assert(ctx->has_compute_shader || !handle);
1022
1023 if (ctx->has_compute_shader && ctx->compute_shader != handle) {
1024 ctx->compute_shader = handle;
1025 ctx->pipe->bind_compute_state(ctx->pipe, handle);
1026 }
1027 }
1028
cso_delete_compute_shader(struct cso_context * ctx,void * handle)1029 void cso_delete_compute_shader(struct cso_context *ctx, void *handle)
1030 {
1031 if (handle == ctx->compute_shader) {
1032 /* unbind before deleting */
1033 ctx->pipe->bind_compute_state(ctx->pipe, NULL);
1034 ctx->compute_shader = NULL;
1035 }
1036 ctx->pipe->delete_compute_state(ctx->pipe, handle);
1037 }
1038
1039 enum pipe_error
cso_set_vertex_elements(struct cso_context * ctx,unsigned count,const struct pipe_vertex_element * states)1040 cso_set_vertex_elements(struct cso_context *ctx,
1041 unsigned count,
1042 const struct pipe_vertex_element *states)
1043 {
1044 struct u_vbuf *vbuf = ctx->vbuf;
1045 unsigned key_size, hash_key;
1046 struct cso_hash_iter iter;
1047 void *handle;
1048 struct cso_velems_state velems_state;
1049
1050 if (vbuf) {
1051 u_vbuf_set_vertex_elements(vbuf, count, states);
1052 return PIPE_OK;
1053 }
1054
1055 /* Need to include the count into the stored state data too.
1056 * Otherwise first few count pipe_vertex_elements could be identical
1057 * even if count is different, and there's no guarantee the hash would
1058 * be different in that case neither.
1059 */
1060 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
1061 velems_state.count = count;
1062 memcpy(velems_state.velems, states,
1063 sizeof(struct pipe_vertex_element) * count);
1064 hash_key = cso_construct_key((void*)&velems_state, key_size);
1065 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1066 (void*)&velems_state, key_size);
1067
1068 if (cso_hash_iter_is_null(iter)) {
1069 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1070 if (!cso)
1071 return PIPE_ERROR_OUT_OF_MEMORY;
1072
1073 memcpy(&cso->state, &velems_state, key_size);
1074 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
1075 &cso->state.velems[0]);
1076 cso->delete_state =
1077 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1078 cso->context = ctx->pipe;
1079
1080 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1081 if (cso_hash_iter_is_null(iter)) {
1082 FREE(cso);
1083 return PIPE_ERROR_OUT_OF_MEMORY;
1084 }
1085
1086 handle = cso->data;
1087 }
1088 else {
1089 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1090 }
1091
1092 if (ctx->velements != handle) {
1093 ctx->velements = handle;
1094 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1095 }
1096 return PIPE_OK;
1097 }
1098
1099 static void
cso_save_vertex_elements(struct cso_context * ctx)1100 cso_save_vertex_elements(struct cso_context *ctx)
1101 {
1102 struct u_vbuf *vbuf = ctx->vbuf;
1103
1104 if (vbuf) {
1105 u_vbuf_save_vertex_elements(vbuf);
1106 return;
1107 }
1108
1109 assert(!ctx->velements_saved);
1110 ctx->velements_saved = ctx->velements;
1111 }
1112
1113 static void
cso_restore_vertex_elements(struct cso_context * ctx)1114 cso_restore_vertex_elements(struct cso_context *ctx)
1115 {
1116 struct u_vbuf *vbuf = ctx->vbuf;
1117
1118 if (vbuf) {
1119 u_vbuf_restore_vertex_elements(vbuf);
1120 return;
1121 }
1122
1123 if (ctx->velements != ctx->velements_saved) {
1124 ctx->velements = ctx->velements_saved;
1125 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1126 }
1127 ctx->velements_saved = NULL;
1128 }
1129
1130 /* vertex buffers */
1131
cso_set_vertex_buffers(struct cso_context * ctx,unsigned start_slot,unsigned count,const struct pipe_vertex_buffer * buffers)1132 void cso_set_vertex_buffers(struct cso_context *ctx,
1133 unsigned start_slot, unsigned count,
1134 const struct pipe_vertex_buffer *buffers)
1135 {
1136 struct u_vbuf *vbuf = ctx->vbuf;
1137
1138 if (vbuf) {
1139 u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1140 return;
1141 }
1142
1143 /* Save what's in the auxiliary slot, so that we can save and restore it
1144 * for meta ops. */
1145 if (start_slot <= ctx->aux_vertex_buffer_index &&
1146 start_slot+count > ctx->aux_vertex_buffer_index) {
1147 if (buffers) {
1148 const struct pipe_vertex_buffer *vb =
1149 buffers + (ctx->aux_vertex_buffer_index - start_slot);
1150
1151 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1152 vb->buffer);
1153 memcpy(&ctx->aux_vertex_buffer_current, vb,
1154 sizeof(struct pipe_vertex_buffer));
1155 }
1156 else {
1157 pipe_resource_reference(&ctx->aux_vertex_buffer_current.buffer,
1158 NULL);
1159 ctx->aux_vertex_buffer_current.user_buffer = NULL;
1160 }
1161 }
1162
1163 ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1164 }
1165
1166 static void
cso_save_aux_vertex_buffer_slot(struct cso_context * ctx)1167 cso_save_aux_vertex_buffer_slot(struct cso_context *ctx)
1168 {
1169 struct u_vbuf *vbuf = ctx->vbuf;
1170
1171 if (vbuf) {
1172 u_vbuf_save_aux_vertex_buffer_slot(vbuf);
1173 return;
1174 }
1175
1176 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer,
1177 ctx->aux_vertex_buffer_current.buffer);
1178 memcpy(&ctx->aux_vertex_buffer_saved, &ctx->aux_vertex_buffer_current,
1179 sizeof(struct pipe_vertex_buffer));
1180 }
1181
1182 static void
cso_restore_aux_vertex_buffer_slot(struct cso_context * ctx)1183 cso_restore_aux_vertex_buffer_slot(struct cso_context *ctx)
1184 {
1185 struct u_vbuf *vbuf = ctx->vbuf;
1186
1187 if (vbuf) {
1188 u_vbuf_restore_aux_vertex_buffer_slot(vbuf);
1189 return;
1190 }
1191
1192 cso_set_vertex_buffers(ctx, ctx->aux_vertex_buffer_index, 1,
1193 &ctx->aux_vertex_buffer_saved);
1194 pipe_resource_reference(&ctx->aux_vertex_buffer_saved.buffer, NULL);
1195 }
1196
cso_get_aux_vertex_buffer_slot(struct cso_context * ctx)1197 unsigned cso_get_aux_vertex_buffer_slot(struct cso_context *ctx)
1198 {
1199 return ctx->aux_vertex_buffer_index;
1200 }
1201
1202
1203
1204 enum pipe_error
cso_single_sampler(struct cso_context * ctx,unsigned shader_stage,unsigned idx,const struct pipe_sampler_state * templ)1205 cso_single_sampler(struct cso_context *ctx, unsigned shader_stage,
1206 unsigned idx, const struct pipe_sampler_state *templ)
1207 {
1208 if (templ) {
1209 unsigned key_size = sizeof(struct pipe_sampler_state);
1210 unsigned hash_key = cso_construct_key((void*)templ, key_size);
1211 struct cso_sampler *cso;
1212 struct cso_hash_iter iter =
1213 cso_find_state_template(ctx->cache,
1214 hash_key, CSO_SAMPLER,
1215 (void *) templ, key_size);
1216
1217 if (cso_hash_iter_is_null(iter)) {
1218 cso = MALLOC(sizeof(struct cso_sampler));
1219 if (!cso)
1220 return PIPE_ERROR_OUT_OF_MEMORY;
1221
1222 memcpy(&cso->state, templ, sizeof(*templ));
1223 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1224 cso->delete_state =
1225 (cso_state_callback) ctx->pipe->delete_sampler_state;
1226 cso->context = ctx->pipe;
1227 cso->hash_key = hash_key;
1228
1229 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1230 if (cso_hash_iter_is_null(iter)) {
1231 FREE(cso);
1232 return PIPE_ERROR_OUT_OF_MEMORY;
1233 }
1234 }
1235 else {
1236 cso = cso_hash_iter_data(iter);
1237 }
1238
1239 ctx->samplers[shader_stage].cso_samplers[idx] = cso;
1240 ctx->samplers[shader_stage].samplers[idx] = cso->data;
1241 } else {
1242 ctx->samplers[shader_stage].cso_samplers[idx] = NULL;
1243 ctx->samplers[shader_stage].samplers[idx] = NULL;
1244 }
1245
1246 return PIPE_OK;
1247 }
1248
1249
1250 /**
1251 * Send staged sampler state to the driver.
1252 */
1253 void
cso_single_sampler_done(struct cso_context * ctx,enum pipe_shader_type shader_stage)1254 cso_single_sampler_done(struct cso_context *ctx,
1255 enum pipe_shader_type shader_stage)
1256 {
1257 struct sampler_info *info = &ctx->samplers[shader_stage];
1258 const unsigned old_nr_samplers = info->nr_samplers;
1259 unsigned i;
1260
1261 /* find highest non-null sampler */
1262 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1263 if (info->samplers[i - 1] != NULL)
1264 break;
1265 }
1266
1267 info->nr_samplers = i;
1268 ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1269 MAX2(old_nr_samplers, info->nr_samplers),
1270 info->samplers);
1271 }
1272
1273
1274 /*
1275 * If the function encouters any errors it will return the
1276 * last one. Done to always try to set as many samplers
1277 * as possible.
1278 */
1279 enum pipe_error
cso_set_samplers(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned nr,const struct pipe_sampler_state ** templates)1280 cso_set_samplers(struct cso_context *ctx,
1281 enum pipe_shader_type shader_stage,
1282 unsigned nr,
1283 const struct pipe_sampler_state **templates)
1284 {
1285 struct sampler_info *info = &ctx->samplers[shader_stage];
1286 unsigned i;
1287 enum pipe_error temp, error = PIPE_OK;
1288
1289 for (i = 0; i < nr; i++) {
1290 temp = cso_single_sampler(ctx, shader_stage, i, templates[i]);
1291 if (temp != PIPE_OK)
1292 error = temp;
1293 }
1294
1295 for ( ; i < info->nr_samplers; i++) {
1296 temp = cso_single_sampler(ctx, shader_stage, i, NULL);
1297 if (temp != PIPE_OK)
1298 error = temp;
1299 }
1300
1301 cso_single_sampler_done(ctx, shader_stage);
1302
1303 return error;
1304 }
1305
1306 static void
cso_save_fragment_samplers(struct cso_context * ctx)1307 cso_save_fragment_samplers(struct cso_context *ctx)
1308 {
1309 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1310 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1311
1312 saved->nr_samplers = info->nr_samplers;
1313 memcpy(saved->cso_samplers, info->cso_samplers, info->nr_samplers *
1314 sizeof(*info->cso_samplers));
1315 memcpy(saved->samplers, info->samplers, info->nr_samplers *
1316 sizeof(*info->samplers));
1317 }
1318
1319
1320 static void
cso_restore_fragment_samplers(struct cso_context * ctx)1321 cso_restore_fragment_samplers(struct cso_context *ctx)
1322 {
1323 struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1324 struct sampler_info *saved = &ctx->fragment_samplers_saved;
1325 int delta = (int)info->nr_samplers - saved->nr_samplers;
1326
1327 memcpy(info->cso_samplers, saved->cso_samplers,
1328 saved->nr_samplers * sizeof(*info->cso_samplers));
1329 memcpy(info->samplers, saved->samplers,
1330 saved->nr_samplers * sizeof(*info->samplers));
1331
1332 if (delta > 0) {
1333 memset(&info->cso_samplers[saved->nr_samplers], 0,
1334 delta * sizeof(*info->cso_samplers));
1335 memset(&info->samplers[saved->nr_samplers], 0,
1336 delta * sizeof(*info->samplers));
1337 }
1338
1339 cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1340 }
1341
1342
1343 void
cso_set_sampler_views(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned count,struct pipe_sampler_view ** views)1344 cso_set_sampler_views(struct cso_context *ctx,
1345 enum pipe_shader_type shader_stage,
1346 unsigned count,
1347 struct pipe_sampler_view **views)
1348 {
1349 if (shader_stage == PIPE_SHADER_FRAGMENT) {
1350 unsigned i;
1351 boolean any_change = FALSE;
1352
1353 /* reference new views */
1354 for (i = 0; i < count; i++) {
1355 any_change |= ctx->fragment_views[i] != views[i];
1356 pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1357 }
1358 /* unref extra old views, if any */
1359 for (; i < ctx->nr_fragment_views; i++) {
1360 any_change |= ctx->fragment_views[i] != NULL;
1361 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1362 }
1363
1364 /* bind the new sampler views */
1365 if (any_change) {
1366 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1367 MAX2(ctx->nr_fragment_views, count),
1368 ctx->fragment_views);
1369 }
1370
1371 ctx->nr_fragment_views = count;
1372 }
1373 else
1374 ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1375 }
1376
1377
1378 static void
cso_save_fragment_sampler_views(struct cso_context * ctx)1379 cso_save_fragment_sampler_views(struct cso_context *ctx)
1380 {
1381 unsigned i;
1382
1383 ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1384
1385 for (i = 0; i < ctx->nr_fragment_views; i++) {
1386 assert(!ctx->fragment_views_saved[i]);
1387 pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1388 ctx->fragment_views[i]);
1389 }
1390 }
1391
1392
1393 static void
cso_restore_fragment_sampler_views(struct cso_context * ctx)1394 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1395 {
1396 unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1397 unsigned num;
1398
1399 for (i = 0; i < nr_saved; i++) {
1400 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1401 /* move the reference from one pointer to another */
1402 ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1403 ctx->fragment_views_saved[i] = NULL;
1404 }
1405 for (; i < ctx->nr_fragment_views; i++) {
1406 pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1407 }
1408
1409 num = MAX2(ctx->nr_fragment_views, nr_saved);
1410
1411 /* bind the old/saved sampler views */
1412 ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1413 ctx->fragment_views);
1414
1415 ctx->nr_fragment_views = nr_saved;
1416 ctx->nr_fragment_views_saved = 0;
1417 }
1418
1419
1420 void
cso_set_shader_images(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned start,unsigned count,struct pipe_image_view * images)1421 cso_set_shader_images(struct cso_context *ctx,
1422 enum pipe_shader_type shader_stage,
1423 unsigned start, unsigned count,
1424 struct pipe_image_view *images)
1425 {
1426 if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1427 util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1428 }
1429
1430 ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1431 }
1432
1433
1434 static void
cso_save_fragment_image0(struct cso_context * ctx)1435 cso_save_fragment_image0(struct cso_context *ctx)
1436 {
1437 util_copy_image_view(&ctx->fragment_image0_saved,
1438 &ctx->fragment_image0_current);
1439 }
1440
1441
1442 static void
cso_restore_fragment_image0(struct cso_context * ctx)1443 cso_restore_fragment_image0(struct cso_context *ctx)
1444 {
1445 cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1446 &ctx->fragment_image0_saved);
1447 }
1448
1449
1450 void
cso_set_stream_outputs(struct cso_context * ctx,unsigned num_targets,struct pipe_stream_output_target ** targets,const unsigned * offsets)1451 cso_set_stream_outputs(struct cso_context *ctx,
1452 unsigned num_targets,
1453 struct pipe_stream_output_target **targets,
1454 const unsigned *offsets)
1455 {
1456 struct pipe_context *pipe = ctx->pipe;
1457 uint i;
1458
1459 if (!ctx->has_streamout) {
1460 assert(num_targets == 0);
1461 return;
1462 }
1463
1464 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1465 /* Nothing to do. */
1466 return;
1467 }
1468
1469 /* reference new targets */
1470 for (i = 0; i < num_targets; i++) {
1471 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1472 }
1473 /* unref extra old targets, if any */
1474 for (; i < ctx->nr_so_targets; i++) {
1475 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1476 }
1477
1478 pipe->set_stream_output_targets(pipe, num_targets, targets,
1479 offsets);
1480 ctx->nr_so_targets = num_targets;
1481 }
1482
1483 static void
cso_save_stream_outputs(struct cso_context * ctx)1484 cso_save_stream_outputs(struct cso_context *ctx)
1485 {
1486 uint i;
1487
1488 if (!ctx->has_streamout) {
1489 return;
1490 }
1491
1492 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1493
1494 for (i = 0; i < ctx->nr_so_targets; i++) {
1495 assert(!ctx->so_targets_saved[i]);
1496 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1497 }
1498 }
1499
1500 static void
cso_restore_stream_outputs(struct cso_context * ctx)1501 cso_restore_stream_outputs(struct cso_context *ctx)
1502 {
1503 struct pipe_context *pipe = ctx->pipe;
1504 uint i;
1505 unsigned offset[PIPE_MAX_SO_BUFFERS];
1506
1507 if (!ctx->has_streamout) {
1508 return;
1509 }
1510
1511 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1512 /* Nothing to do. */
1513 return;
1514 }
1515
1516 assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1517 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1518 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1519 /* move the reference from one pointer to another */
1520 ctx->so_targets[i] = ctx->so_targets_saved[i];
1521 ctx->so_targets_saved[i] = NULL;
1522 /* -1 means append */
1523 offset[i] = (unsigned)-1;
1524 }
1525 for (; i < ctx->nr_so_targets; i++) {
1526 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1527 }
1528
1529 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1530 ctx->so_targets, offset);
1531
1532 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1533 ctx->nr_so_targets_saved = 0;
1534 }
1535
1536 /* constant buffers */
1537
1538 void
cso_set_constant_buffer(struct cso_context * cso,unsigned shader_stage,unsigned index,struct pipe_constant_buffer * cb)1539 cso_set_constant_buffer(struct cso_context *cso, unsigned shader_stage,
1540 unsigned index, struct pipe_constant_buffer *cb)
1541 {
1542 struct pipe_context *pipe = cso->pipe;
1543
1544 pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1545
1546 if (index == 0) {
1547 util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1548 }
1549 }
1550
1551 void
cso_set_constant_buffer_resource(struct cso_context * cso,unsigned shader_stage,unsigned index,struct pipe_resource * buffer)1552 cso_set_constant_buffer_resource(struct cso_context *cso,
1553 unsigned shader_stage,
1554 unsigned index,
1555 struct pipe_resource *buffer)
1556 {
1557 if (buffer) {
1558 struct pipe_constant_buffer cb;
1559 cb.buffer = buffer;
1560 cb.buffer_offset = 0;
1561 cb.buffer_size = buffer->width0;
1562 cb.user_buffer = NULL;
1563 cso_set_constant_buffer(cso, shader_stage, index, &cb);
1564 } else {
1565 cso_set_constant_buffer(cso, shader_stage, index, NULL);
1566 }
1567 }
1568
1569 void
cso_save_constant_buffer_slot0(struct cso_context * cso,unsigned shader_stage)1570 cso_save_constant_buffer_slot0(struct cso_context *cso,
1571 unsigned shader_stage)
1572 {
1573 util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1574 &cso->aux_constbuf_current[shader_stage]);
1575 }
1576
1577 void
cso_restore_constant_buffer_slot0(struct cso_context * cso,unsigned shader_stage)1578 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1579 unsigned shader_stage)
1580 {
1581 cso_set_constant_buffer(cso, shader_stage, 0,
1582 &cso->aux_constbuf_saved[shader_stage]);
1583 pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1584 NULL);
1585 }
1586
1587
1588 /**
1589 * Save all the CSO state items specified by the state_mask bitmask
1590 * of CSO_BIT_x flags.
1591 */
1592 void
cso_save_state(struct cso_context * cso,unsigned state_mask)1593 cso_save_state(struct cso_context *cso, unsigned state_mask)
1594 {
1595 assert(cso->saved_state == 0);
1596
1597 cso->saved_state = state_mask;
1598
1599 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1600 cso_save_aux_vertex_buffer_slot(cso);
1601 if (state_mask & CSO_BIT_BLEND)
1602 cso_save_blend(cso);
1603 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1604 cso_save_depth_stencil_alpha(cso);
1605 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1606 cso_save_fragment_samplers(cso);
1607 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1608 cso_save_fragment_sampler_views(cso);
1609 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1610 cso_save_fragment_shader(cso);
1611 if (state_mask & CSO_BIT_FRAMEBUFFER)
1612 cso_save_framebuffer(cso);
1613 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1614 cso_save_geometry_shader(cso);
1615 if (state_mask & CSO_BIT_MIN_SAMPLES)
1616 cso_save_min_samples(cso);
1617 if (state_mask & CSO_BIT_RASTERIZER)
1618 cso_save_rasterizer(cso);
1619 if (state_mask & CSO_BIT_RENDER_CONDITION)
1620 cso_save_render_condition(cso);
1621 if (state_mask & CSO_BIT_SAMPLE_MASK)
1622 cso_save_sample_mask(cso);
1623 if (state_mask & CSO_BIT_STENCIL_REF)
1624 cso_save_stencil_ref(cso);
1625 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1626 cso_save_stream_outputs(cso);
1627 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1628 cso_save_tessctrl_shader(cso);
1629 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1630 cso_save_tesseval_shader(cso);
1631 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1632 cso_save_vertex_elements(cso);
1633 if (state_mask & CSO_BIT_VERTEX_SHADER)
1634 cso_save_vertex_shader(cso);
1635 if (state_mask & CSO_BIT_VIEWPORT)
1636 cso_save_viewport(cso);
1637 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1638 cso->pipe->set_active_query_state(cso->pipe, false);
1639 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1640 cso_save_fragment_image0(cso);
1641 }
1642
1643
1644 /**
1645 * Restore the state which was saved by cso_save_state().
1646 */
1647 void
cso_restore_state(struct cso_context * cso)1648 cso_restore_state(struct cso_context *cso)
1649 {
1650 unsigned state_mask = cso->saved_state;
1651
1652 assert(state_mask);
1653
1654 if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1655 cso_restore_aux_vertex_buffer_slot(cso);
1656 if (state_mask & CSO_BIT_BLEND)
1657 cso_restore_blend(cso);
1658 if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1659 cso_restore_depth_stencil_alpha(cso);
1660 if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1661 cso_restore_fragment_samplers(cso);
1662 if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1663 cso_restore_fragment_sampler_views(cso);
1664 if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1665 cso_restore_fragment_shader(cso);
1666 if (state_mask & CSO_BIT_FRAMEBUFFER)
1667 cso_restore_framebuffer(cso);
1668 if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1669 cso_restore_geometry_shader(cso);
1670 if (state_mask & CSO_BIT_MIN_SAMPLES)
1671 cso_restore_min_samples(cso);
1672 if (state_mask & CSO_BIT_RASTERIZER)
1673 cso_restore_rasterizer(cso);
1674 if (state_mask & CSO_BIT_RENDER_CONDITION)
1675 cso_restore_render_condition(cso);
1676 if (state_mask & CSO_BIT_SAMPLE_MASK)
1677 cso_restore_sample_mask(cso);
1678 if (state_mask & CSO_BIT_STENCIL_REF)
1679 cso_restore_stencil_ref(cso);
1680 if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1681 cso_restore_stream_outputs(cso);
1682 if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1683 cso_restore_tessctrl_shader(cso);
1684 if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1685 cso_restore_tesseval_shader(cso);
1686 if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1687 cso_restore_vertex_elements(cso);
1688 if (state_mask & CSO_BIT_VERTEX_SHADER)
1689 cso_restore_vertex_shader(cso);
1690 if (state_mask & CSO_BIT_VIEWPORT)
1691 cso_restore_viewport(cso);
1692 if (state_mask & CSO_BIT_PAUSE_QUERIES)
1693 cso->pipe->set_active_query_state(cso->pipe, true);
1694 if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1695 cso_restore_fragment_image0(cso);
1696
1697 cso->saved_state = 0;
1698 }
1699
1700
1701
1702 /* drawing */
1703
1704 void
cso_set_index_buffer(struct cso_context * cso,const struct pipe_index_buffer * ib)1705 cso_set_index_buffer(struct cso_context *cso,
1706 const struct pipe_index_buffer *ib)
1707 {
1708 struct u_vbuf *vbuf = cso->vbuf;
1709
1710 if (vbuf) {
1711 u_vbuf_set_index_buffer(vbuf, ib);
1712 } else {
1713 struct pipe_context *pipe = cso->pipe;
1714 pipe->set_index_buffer(pipe, ib);
1715 }
1716 }
1717
1718 void
cso_draw_vbo(struct cso_context * cso,const struct pipe_draw_info * info)1719 cso_draw_vbo(struct cso_context *cso,
1720 const struct pipe_draw_info *info)
1721 {
1722 struct u_vbuf *vbuf = cso->vbuf;
1723
1724 if (vbuf) {
1725 u_vbuf_draw_vbo(vbuf, info);
1726 } else {
1727 struct pipe_context *pipe = cso->pipe;
1728 pipe->draw_vbo(pipe, info);
1729 }
1730 }
1731
1732 void
cso_draw_arrays(struct cso_context * cso,uint mode,uint start,uint count)1733 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1734 {
1735 struct pipe_draw_info info;
1736
1737 util_draw_init_info(&info);
1738
1739 info.mode = mode;
1740 info.start = start;
1741 info.count = count;
1742 info.min_index = start;
1743 info.max_index = start + count - 1;
1744
1745 cso_draw_vbo(cso, &info);
1746 }
1747
1748 void
cso_draw_arrays_instanced(struct cso_context * cso,uint mode,uint start,uint count,uint start_instance,uint instance_count)1749 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1750 uint start, uint count,
1751 uint start_instance, uint instance_count)
1752 {
1753 struct pipe_draw_info info;
1754
1755 util_draw_init_info(&info);
1756
1757 info.mode = mode;
1758 info.start = start;
1759 info.count = count;
1760 info.min_index = start;
1761 info.max_index = start + count - 1;
1762 info.start_instance = start_instance;
1763 info.instance_count = instance_count;
1764
1765 cso_draw_vbo(cso, &info);
1766 }
1767