1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /**
29 * @file
30 *
31 * Wrap the cso cache & hash mechanisms in a simplified
32 * pipe-driver-specific interface.
33 *
34 * @author Zack Rusin <zack@tungstengraphics.com>
35 * @author Keith Whitwell <keith@tungstengraphics.com>
36 */
37
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51
52
53 /**
54 * Info related to samplers and sampler views.
55 * We have one of these for fragment samplers and another for vertex samplers.
56 */
57 struct sampler_info
58 {
59 struct {
60 void *samplers[PIPE_MAX_SAMPLERS];
61 unsigned nr_samplers;
62 } hw;
63
64 void *samplers[PIPE_MAX_SAMPLERS];
65 unsigned nr_samplers;
66
67 void *samplers_saved[PIPE_MAX_SAMPLERS];
68 unsigned nr_samplers_saved;
69
70 struct pipe_sampler_view *views[PIPE_MAX_SAMPLERS];
71 unsigned nr_views;
72
73 struct pipe_sampler_view *views_saved[PIPE_MAX_SAMPLERS];
74 unsigned nr_views_saved;
75 };
76
77
78
79 struct cso_context {
80 struct pipe_context *pipe;
81 struct cso_cache *cache;
82 struct u_vbuf *vbuf;
83
84 boolean has_geometry_shader;
85 boolean has_streamout;
86
87 struct sampler_info samplers[PIPE_SHADER_TYPES];
88
89 uint nr_vertex_buffers;
90 struct pipe_vertex_buffer vertex_buffers[PIPE_MAX_ATTRIBS];
91
92 uint nr_vertex_buffers_saved;
93 struct pipe_vertex_buffer vertex_buffers_saved[PIPE_MAX_ATTRIBS];
94
95 unsigned nr_so_targets;
96 struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
97
98 unsigned nr_so_targets_saved;
99 struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
100
101 /** Current and saved state.
102 * The saved state is used as a 1-deep stack.
103 */
104 void *blend, *blend_saved;
105 void *depth_stencil, *depth_stencil_saved;
106 void *rasterizer, *rasterizer_saved;
107 void *fragment_shader, *fragment_shader_saved;
108 void *vertex_shader, *vertex_shader_saved;
109 void *geometry_shader, *geometry_shader_saved;
110 void *velements, *velements_saved;
111
112 struct pipe_clip_state clip;
113 struct pipe_clip_state clip_saved;
114
115 struct pipe_framebuffer_state fb, fb_saved;
116 struct pipe_viewport_state vp, vp_saved;
117 struct pipe_blend_color blend_color;
118 unsigned sample_mask, sample_mask_saved;
119 struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
120 };
121
122
delete_blend_state(struct cso_context * ctx,void * state)123 static boolean delete_blend_state(struct cso_context *ctx, void *state)
124 {
125 struct cso_blend *cso = (struct cso_blend *)state;
126
127 if (ctx->blend == cso->data)
128 return FALSE;
129
130 if (cso->delete_state)
131 cso->delete_state(cso->context, cso->data);
132 FREE(state);
133 return TRUE;
134 }
135
delete_depth_stencil_state(struct cso_context * ctx,void * state)136 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
137 {
138 struct cso_depth_stencil_alpha *cso =
139 (struct cso_depth_stencil_alpha *)state;
140
141 if (ctx->depth_stencil == cso->data)
142 return FALSE;
143
144 if (cso->delete_state)
145 cso->delete_state(cso->context, cso->data);
146 FREE(state);
147
148 return TRUE;
149 }
150
delete_sampler_state(struct cso_context * ctx,void * state)151 static boolean delete_sampler_state(struct cso_context *ctx, void *state)
152 {
153 struct cso_sampler *cso = (struct cso_sampler *)state;
154 if (cso->delete_state)
155 cso->delete_state(cso->context, cso->data);
156 FREE(state);
157 return TRUE;
158 }
159
delete_rasterizer_state(struct cso_context * ctx,void * state)160 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
161 {
162 struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
163
164 if (ctx->rasterizer == cso->data)
165 return FALSE;
166 if (cso->delete_state)
167 cso->delete_state(cso->context, cso->data);
168 FREE(state);
169 return TRUE;
170 }
171
delete_vertex_elements(struct cso_context * ctx,void * state)172 static boolean delete_vertex_elements(struct cso_context *ctx,
173 void *state)
174 {
175 struct cso_velements *cso = (struct cso_velements *)state;
176
177 if (ctx->velements == cso->data)
178 return FALSE;
179
180 if (cso->delete_state)
181 cso->delete_state(cso->context, cso->data);
182 FREE(state);
183 return TRUE;
184 }
185
186
delete_cso(struct cso_context * ctx,void * state,enum cso_cache_type type)187 static INLINE boolean delete_cso(struct cso_context *ctx,
188 void *state, enum cso_cache_type type)
189 {
190 switch (type) {
191 case CSO_BLEND:
192 return delete_blend_state(ctx, state);
193 case CSO_SAMPLER:
194 return delete_sampler_state(ctx, state);
195 case CSO_DEPTH_STENCIL_ALPHA:
196 return delete_depth_stencil_state(ctx, state);
197 case CSO_RASTERIZER:
198 return delete_rasterizer_state(ctx, state);
199 case CSO_VELEMENTS:
200 return delete_vertex_elements(ctx, state);
201 default:
202 assert(0);
203 FREE(state);
204 }
205 return FALSE;
206 }
207
208 static INLINE void
sanitize_hash(struct cso_hash * hash,enum cso_cache_type type,int max_size,void * user_data)209 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
210 int max_size, void *user_data)
211 {
212 struct cso_context *ctx = (struct cso_context *)user_data;
213 /* if we're approach the maximum size, remove fourth of the entries
214 * otherwise every subsequent call will go through the same */
215 int hash_size = cso_hash_size(hash);
216 int max_entries = (max_size > hash_size) ? max_size : hash_size;
217 int to_remove = (max_size < max_entries) * max_entries/4;
218 struct cso_hash_iter iter = cso_hash_first_node(hash);
219 if (hash_size > max_size)
220 to_remove += hash_size - max_size;
221 while (to_remove) {
222 /*remove elements until we're good */
223 /*fixme: currently we pick the nodes to remove at random*/
224 void *cso = cso_hash_iter_data(iter);
225 if (delete_cso(ctx, cso, type)) {
226 iter = cso_hash_erase(hash, iter);
227 --to_remove;
228 } else
229 iter = cso_hash_iter_next(iter);
230 }
231 }
232
cso_init_vbuf(struct cso_context * cso)233 static void cso_init_vbuf(struct cso_context *cso)
234 {
235 struct u_vbuf_caps caps;
236
237 u_vbuf_get_caps(cso->pipe->screen, &caps);
238
239 /* Install u_vbuf if there is anything unsupported. */
240 if (!caps.buffer_offset_unaligned ||
241 !caps.buffer_stride_unaligned ||
242 !caps.velem_src_offset_unaligned ||
243 !caps.format_fixed32 ||
244 !caps.format_float16 ||
245 !caps.format_float64 ||
246 !caps.format_norm32 ||
247 !caps.format_scaled32 ||
248 !caps.user_vertex_buffers) {
249 cso->vbuf = u_vbuf_create(cso->pipe, &caps);
250 }
251 }
252
cso_create_context(struct pipe_context * pipe)253 struct cso_context *cso_create_context( struct pipe_context *pipe )
254 {
255 struct cso_context *ctx = CALLOC_STRUCT(cso_context);
256 if (ctx == NULL)
257 goto out;
258
259 ctx->cache = cso_cache_create();
260 if (ctx->cache == NULL)
261 goto out;
262 cso_cache_set_sanitize_callback(ctx->cache,
263 sanitize_hash,
264 ctx);
265
266 ctx->pipe = pipe;
267 ctx->sample_mask_saved = ~0;
268
269 cso_init_vbuf(ctx);
270
271 /* Enable for testing: */
272 if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
273
274 if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
275 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
276 ctx->has_geometry_shader = TRUE;
277 }
278 if (pipe->screen->get_param(pipe->screen,
279 PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
280 ctx->has_streamout = TRUE;
281 }
282
283 return ctx;
284
285 out:
286 cso_destroy_context( ctx );
287 return NULL;
288 }
289
290 /**
291 * Prior to context destruction, this function unbinds all state objects.
292 */
cso_release_all(struct cso_context * ctx)293 void cso_release_all( struct cso_context *ctx )
294 {
295 unsigned i, shader;
296
297 if (ctx->pipe) {
298 ctx->pipe->bind_blend_state( ctx->pipe, NULL );
299 ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
300 ctx->pipe->bind_fragment_sampler_states( ctx->pipe, 0, NULL );
301 if (ctx->pipe->bind_vertex_sampler_states)
302 ctx->pipe->bind_vertex_sampler_states(ctx->pipe, 0, NULL);
303 ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
304 ctx->pipe->bind_fs_state( ctx->pipe, NULL );
305 ctx->pipe->bind_vs_state( ctx->pipe, NULL );
306 ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
307 ctx->pipe->set_fragment_sampler_views(ctx->pipe, 0, NULL);
308 if (ctx->pipe->set_vertex_sampler_views)
309 ctx->pipe->set_vertex_sampler_views(ctx->pipe, 0, NULL);
310 if (ctx->pipe->set_stream_output_targets)
311 ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, 0);
312 }
313
314 /* free fragment samplers, views */
315 for (shader = 0; shader < Elements(ctx->samplers); shader++) {
316 struct sampler_info *info = &ctx->samplers[shader];
317 for (i = 0; i < PIPE_MAX_SAMPLERS; i++) {
318 pipe_sampler_view_reference(&info->views[i], NULL);
319 pipe_sampler_view_reference(&info->views_saved[i], NULL);
320 }
321 }
322
323 util_unreference_framebuffer_state(&ctx->fb);
324 util_unreference_framebuffer_state(&ctx->fb_saved);
325
326 util_copy_vertex_buffers(ctx->vertex_buffers,
327 &ctx->nr_vertex_buffers,
328 NULL, 0);
329 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
330 &ctx->nr_vertex_buffers_saved,
331 NULL, 0);
332
333 for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
334 pipe_so_target_reference(&ctx->so_targets[i], NULL);
335 pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
336 }
337
338 if (ctx->cache) {
339 cso_cache_delete( ctx->cache );
340 ctx->cache = NULL;
341 }
342 }
343
344
345 /**
346 * Free the CSO context. NOTE: the state tracker should have previously called
347 * cso_release_all().
348 */
cso_destroy_context(struct cso_context * ctx)349 void cso_destroy_context( struct cso_context *ctx )
350 {
351 if (ctx) {
352 if (ctx->vbuf)
353 u_vbuf_destroy(ctx->vbuf);
354 FREE( ctx );
355 }
356 }
357
358
359 /* Those function will either find the state of the given template
360 * in the cache or they will create a new state from the given
361 * template, insert it in the cache and return it.
362 */
363
364 /*
365 * If the driver returns 0 from the create method then they will assign
366 * the data member of the cso to be the template itself.
367 */
368
cso_set_blend(struct cso_context * ctx,const struct pipe_blend_state * templ)369 enum pipe_error cso_set_blend(struct cso_context *ctx,
370 const struct pipe_blend_state *templ)
371 {
372 unsigned key_size, hash_key;
373 struct cso_hash_iter iter;
374 void *handle;
375
376 key_size = templ->independent_blend_enable ?
377 sizeof(struct pipe_blend_state) :
378 (char *)&(templ->rt[1]) - (char *)templ;
379 hash_key = cso_construct_key((void*)templ, key_size);
380 iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
381 (void*)templ, key_size);
382
383 if (cso_hash_iter_is_null(iter)) {
384 struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
385 if (!cso)
386 return PIPE_ERROR_OUT_OF_MEMORY;
387
388 memset(&cso->state, 0, sizeof cso->state);
389 memcpy(&cso->state, templ, key_size);
390 cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
391 cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
392 cso->context = ctx->pipe;
393
394 iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
395 if (cso_hash_iter_is_null(iter)) {
396 FREE(cso);
397 return PIPE_ERROR_OUT_OF_MEMORY;
398 }
399
400 handle = cso->data;
401 }
402 else {
403 handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
404 }
405
406 if (ctx->blend != handle) {
407 ctx->blend = handle;
408 ctx->pipe->bind_blend_state(ctx->pipe, handle);
409 }
410 return PIPE_OK;
411 }
412
cso_save_blend(struct cso_context * ctx)413 void cso_save_blend(struct cso_context *ctx)
414 {
415 assert(!ctx->blend_saved);
416 ctx->blend_saved = ctx->blend;
417 }
418
cso_restore_blend(struct cso_context * ctx)419 void cso_restore_blend(struct cso_context *ctx)
420 {
421 if (ctx->blend != ctx->blend_saved) {
422 ctx->blend = ctx->blend_saved;
423 ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
424 }
425 ctx->blend_saved = NULL;
426 }
427
428
429
430 enum pipe_error
cso_set_depth_stencil_alpha(struct cso_context * ctx,const struct pipe_depth_stencil_alpha_state * templ)431 cso_set_depth_stencil_alpha(struct cso_context *ctx,
432 const struct pipe_depth_stencil_alpha_state *templ)
433 {
434 unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
435 unsigned hash_key = cso_construct_key((void*)templ, key_size);
436 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
437 hash_key,
438 CSO_DEPTH_STENCIL_ALPHA,
439 (void*)templ, key_size);
440 void *handle;
441
442 if (cso_hash_iter_is_null(iter)) {
443 struct cso_depth_stencil_alpha *cso =
444 MALLOC(sizeof(struct cso_depth_stencil_alpha));
445 if (!cso)
446 return PIPE_ERROR_OUT_OF_MEMORY;
447
448 memcpy(&cso->state, templ, sizeof(*templ));
449 cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
450 &cso->state);
451 cso->delete_state =
452 (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
453 cso->context = ctx->pipe;
454
455 iter = cso_insert_state(ctx->cache, hash_key,
456 CSO_DEPTH_STENCIL_ALPHA, cso);
457 if (cso_hash_iter_is_null(iter)) {
458 FREE(cso);
459 return PIPE_ERROR_OUT_OF_MEMORY;
460 }
461
462 handle = cso->data;
463 }
464 else {
465 handle = ((struct cso_depth_stencil_alpha *)
466 cso_hash_iter_data(iter))->data;
467 }
468
469 if (ctx->depth_stencil != handle) {
470 ctx->depth_stencil = handle;
471 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
472 }
473 return PIPE_OK;
474 }
475
cso_save_depth_stencil_alpha(struct cso_context * ctx)476 void cso_save_depth_stencil_alpha(struct cso_context *ctx)
477 {
478 assert(!ctx->depth_stencil_saved);
479 ctx->depth_stencil_saved = ctx->depth_stencil;
480 }
481
cso_restore_depth_stencil_alpha(struct cso_context * ctx)482 void cso_restore_depth_stencil_alpha(struct cso_context *ctx)
483 {
484 if (ctx->depth_stencil != ctx->depth_stencil_saved) {
485 ctx->depth_stencil = ctx->depth_stencil_saved;
486 ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
487 ctx->depth_stencil_saved);
488 }
489 ctx->depth_stencil_saved = NULL;
490 }
491
492
493
cso_set_rasterizer(struct cso_context * ctx,const struct pipe_rasterizer_state * templ)494 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
495 const struct pipe_rasterizer_state *templ)
496 {
497 unsigned key_size = sizeof(struct pipe_rasterizer_state);
498 unsigned hash_key = cso_construct_key((void*)templ, key_size);
499 struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
500 hash_key,
501 CSO_RASTERIZER,
502 (void*)templ, key_size);
503 void *handle = NULL;
504
505 if (cso_hash_iter_is_null(iter)) {
506 struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
507 if (!cso)
508 return PIPE_ERROR_OUT_OF_MEMORY;
509
510 memcpy(&cso->state, templ, sizeof(*templ));
511 cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
512 cso->delete_state =
513 (cso_state_callback)ctx->pipe->delete_rasterizer_state;
514 cso->context = ctx->pipe;
515
516 iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
517 if (cso_hash_iter_is_null(iter)) {
518 FREE(cso);
519 return PIPE_ERROR_OUT_OF_MEMORY;
520 }
521
522 handle = cso->data;
523 }
524 else {
525 handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
526 }
527
528 if (ctx->rasterizer != handle) {
529 ctx->rasterizer = handle;
530 ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
531 }
532 return PIPE_OK;
533 }
534
cso_save_rasterizer(struct cso_context * ctx)535 void cso_save_rasterizer(struct cso_context *ctx)
536 {
537 assert(!ctx->rasterizer_saved);
538 ctx->rasterizer_saved = ctx->rasterizer;
539 }
540
cso_restore_rasterizer(struct cso_context * ctx)541 void cso_restore_rasterizer(struct cso_context *ctx)
542 {
543 if (ctx->rasterizer != ctx->rasterizer_saved) {
544 ctx->rasterizer = ctx->rasterizer_saved;
545 ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
546 }
547 ctx->rasterizer_saved = NULL;
548 }
549
550
551
cso_set_fragment_shader_handle(struct cso_context * ctx,void * handle)552 enum pipe_error cso_set_fragment_shader_handle(struct cso_context *ctx,
553 void *handle )
554 {
555 if (ctx->fragment_shader != handle) {
556 ctx->fragment_shader = handle;
557 ctx->pipe->bind_fs_state(ctx->pipe, handle);
558 }
559 return PIPE_OK;
560 }
561
cso_delete_fragment_shader(struct cso_context * ctx,void * handle)562 void cso_delete_fragment_shader(struct cso_context *ctx, void *handle )
563 {
564 if (handle == ctx->fragment_shader) {
565 /* unbind before deleting */
566 ctx->pipe->bind_fs_state(ctx->pipe, NULL);
567 ctx->fragment_shader = NULL;
568 }
569 ctx->pipe->delete_fs_state(ctx->pipe, handle);
570 }
571
cso_save_fragment_shader(struct cso_context * ctx)572 void cso_save_fragment_shader(struct cso_context *ctx)
573 {
574 assert(!ctx->fragment_shader_saved);
575 ctx->fragment_shader_saved = ctx->fragment_shader;
576 }
577
cso_restore_fragment_shader(struct cso_context * ctx)578 void cso_restore_fragment_shader(struct cso_context *ctx)
579 {
580 if (ctx->fragment_shader_saved != ctx->fragment_shader) {
581 ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
582 ctx->fragment_shader = ctx->fragment_shader_saved;
583 }
584 ctx->fragment_shader_saved = NULL;
585 }
586
587
cso_set_vertex_shader_handle(struct cso_context * ctx,void * handle)588 enum pipe_error cso_set_vertex_shader_handle(struct cso_context *ctx,
589 void *handle )
590 {
591 if (ctx->vertex_shader != handle) {
592 ctx->vertex_shader = handle;
593 ctx->pipe->bind_vs_state(ctx->pipe, handle);
594 }
595 return PIPE_OK;
596 }
597
cso_delete_vertex_shader(struct cso_context * ctx,void * handle)598 void cso_delete_vertex_shader(struct cso_context *ctx, void *handle )
599 {
600 if (handle == ctx->vertex_shader) {
601 /* unbind before deleting */
602 ctx->pipe->bind_vs_state(ctx->pipe, NULL);
603 ctx->vertex_shader = NULL;
604 }
605 ctx->pipe->delete_vs_state(ctx->pipe, handle);
606 }
607
cso_save_vertex_shader(struct cso_context * ctx)608 void cso_save_vertex_shader(struct cso_context *ctx)
609 {
610 assert(!ctx->vertex_shader_saved);
611 ctx->vertex_shader_saved = ctx->vertex_shader;
612 }
613
cso_restore_vertex_shader(struct cso_context * ctx)614 void cso_restore_vertex_shader(struct cso_context *ctx)
615 {
616 if (ctx->vertex_shader_saved != ctx->vertex_shader) {
617 ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
618 ctx->vertex_shader = ctx->vertex_shader_saved;
619 }
620 ctx->vertex_shader_saved = NULL;
621 }
622
623
cso_set_framebuffer(struct cso_context * ctx,const struct pipe_framebuffer_state * fb)624 enum pipe_error cso_set_framebuffer(struct cso_context *ctx,
625 const struct pipe_framebuffer_state *fb)
626 {
627 if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
628 util_copy_framebuffer_state(&ctx->fb, fb);
629 ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
630 }
631 return PIPE_OK;
632 }
633
cso_save_framebuffer(struct cso_context * ctx)634 void cso_save_framebuffer(struct cso_context *ctx)
635 {
636 util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
637 }
638
cso_restore_framebuffer(struct cso_context * ctx)639 void cso_restore_framebuffer(struct cso_context *ctx)
640 {
641 if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
642 util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
643 ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
644 util_unreference_framebuffer_state(&ctx->fb_saved);
645 }
646 }
647
648
cso_set_viewport(struct cso_context * ctx,const struct pipe_viewport_state * vp)649 enum pipe_error cso_set_viewport(struct cso_context *ctx,
650 const struct pipe_viewport_state *vp)
651 {
652 if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
653 ctx->vp = *vp;
654 ctx->pipe->set_viewport_state(ctx->pipe, vp);
655 }
656 return PIPE_OK;
657 }
658
cso_save_viewport(struct cso_context * ctx)659 void cso_save_viewport(struct cso_context *ctx)
660 {
661 ctx->vp_saved = ctx->vp;
662 }
663
664
cso_restore_viewport(struct cso_context * ctx)665 void cso_restore_viewport(struct cso_context *ctx)
666 {
667 if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
668 ctx->vp = ctx->vp_saved;
669 ctx->pipe->set_viewport_state(ctx->pipe, &ctx->vp);
670 }
671 }
672
673
cso_set_blend_color(struct cso_context * ctx,const struct pipe_blend_color * bc)674 enum pipe_error cso_set_blend_color(struct cso_context *ctx,
675 const struct pipe_blend_color *bc)
676 {
677 if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
678 ctx->blend_color = *bc;
679 ctx->pipe->set_blend_color(ctx->pipe, bc);
680 }
681 return PIPE_OK;
682 }
683
cso_set_sample_mask(struct cso_context * ctx,unsigned sample_mask)684 enum pipe_error cso_set_sample_mask(struct cso_context *ctx,
685 unsigned sample_mask)
686 {
687 if (ctx->sample_mask != sample_mask) {
688 ctx->sample_mask = sample_mask;
689 ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
690 }
691 return PIPE_OK;
692 }
693
cso_save_sample_mask(struct cso_context * ctx)694 void cso_save_sample_mask(struct cso_context *ctx)
695 {
696 ctx->sample_mask_saved = ctx->sample_mask;
697 }
698
cso_restore_sample_mask(struct cso_context * ctx)699 void cso_restore_sample_mask(struct cso_context *ctx)
700 {
701 cso_set_sample_mask(ctx, ctx->sample_mask_saved);
702 }
703
cso_set_stencil_ref(struct cso_context * ctx,const struct pipe_stencil_ref * sr)704 enum pipe_error cso_set_stencil_ref(struct cso_context *ctx,
705 const struct pipe_stencil_ref *sr)
706 {
707 if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
708 ctx->stencil_ref = *sr;
709 ctx->pipe->set_stencil_ref(ctx->pipe, sr);
710 }
711 return PIPE_OK;
712 }
713
cso_save_stencil_ref(struct cso_context * ctx)714 void cso_save_stencil_ref(struct cso_context *ctx)
715 {
716 ctx->stencil_ref_saved = ctx->stencil_ref;
717 }
718
719
cso_restore_stencil_ref(struct cso_context * ctx)720 void cso_restore_stencil_ref(struct cso_context *ctx)
721 {
722 if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
723 sizeof(ctx->stencil_ref))) {
724 ctx->stencil_ref = ctx->stencil_ref_saved;
725 ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
726 }
727 }
728
cso_set_geometry_shader_handle(struct cso_context * ctx,void * handle)729 enum pipe_error cso_set_geometry_shader_handle(struct cso_context *ctx,
730 void *handle)
731 {
732 assert(ctx->has_geometry_shader || !handle);
733
734 if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
735 ctx->geometry_shader = handle;
736 ctx->pipe->bind_gs_state(ctx->pipe, handle);
737 }
738 return PIPE_OK;
739 }
740
cso_delete_geometry_shader(struct cso_context * ctx,void * handle)741 void cso_delete_geometry_shader(struct cso_context *ctx, void *handle)
742 {
743 if (handle == ctx->geometry_shader) {
744 /* unbind before deleting */
745 ctx->pipe->bind_gs_state(ctx->pipe, NULL);
746 ctx->geometry_shader = NULL;
747 }
748 ctx->pipe->delete_gs_state(ctx->pipe, handle);
749 }
750
cso_save_geometry_shader(struct cso_context * ctx)751 void cso_save_geometry_shader(struct cso_context *ctx)
752 {
753 if (!ctx->has_geometry_shader) {
754 return;
755 }
756
757 assert(!ctx->geometry_shader_saved);
758 ctx->geometry_shader_saved = ctx->geometry_shader;
759 }
760
cso_restore_geometry_shader(struct cso_context * ctx)761 void cso_restore_geometry_shader(struct cso_context *ctx)
762 {
763 if (!ctx->has_geometry_shader) {
764 return;
765 }
766
767 if (ctx->geometry_shader_saved != ctx->geometry_shader) {
768 ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
769 ctx->geometry_shader = ctx->geometry_shader_saved;
770 }
771 ctx->geometry_shader_saved = NULL;
772 }
773
774 /* clip state */
775
776 static INLINE void
clip_state_cpy(struct pipe_clip_state * dst,const struct pipe_clip_state * src)777 clip_state_cpy(struct pipe_clip_state *dst,
778 const struct pipe_clip_state *src)
779 {
780 memcpy(dst->ucp, src->ucp, sizeof(dst->ucp));
781 }
782
783 static INLINE int
clip_state_cmp(const struct pipe_clip_state * a,const struct pipe_clip_state * b)784 clip_state_cmp(const struct pipe_clip_state *a,
785 const struct pipe_clip_state *b)
786 {
787 return memcmp(a->ucp, b->ucp, sizeof(a->ucp));
788 }
789
790 void
cso_set_clip(struct cso_context * ctx,const struct pipe_clip_state * clip)791 cso_set_clip(struct cso_context *ctx,
792 const struct pipe_clip_state *clip)
793 {
794 if (clip_state_cmp(&ctx->clip, clip)) {
795 clip_state_cpy(&ctx->clip, clip);
796 ctx->pipe->set_clip_state(ctx->pipe, clip);
797 }
798 }
799
800 void
cso_save_clip(struct cso_context * ctx)801 cso_save_clip(struct cso_context *ctx)
802 {
803 clip_state_cpy(&ctx->clip_saved, &ctx->clip);
804 }
805
806 void
cso_restore_clip(struct cso_context * ctx)807 cso_restore_clip(struct cso_context *ctx)
808 {
809 if (clip_state_cmp(&ctx->clip, &ctx->clip_saved)) {
810 clip_state_cpy(&ctx->clip, &ctx->clip_saved);
811 ctx->pipe->set_clip_state(ctx->pipe, &ctx->clip_saved);
812 }
813 }
814
815 enum pipe_error
cso_set_vertex_elements(struct cso_context * ctx,unsigned count,const struct pipe_vertex_element * states)816 cso_set_vertex_elements(struct cso_context *ctx,
817 unsigned count,
818 const struct pipe_vertex_element *states)
819 {
820 struct u_vbuf *vbuf = ctx->vbuf;
821 unsigned key_size, hash_key;
822 struct cso_hash_iter iter;
823 void *handle;
824 struct cso_velems_state velems_state;
825
826 if (vbuf) {
827 u_vbuf_set_vertex_elements(vbuf, count, states);
828 return PIPE_OK;
829 }
830
831 /* Need to include the count into the stored state data too.
832 * Otherwise first few count pipe_vertex_elements could be identical
833 * even if count is different, and there's no guarantee the hash would
834 * be different in that case neither.
835 */
836 key_size = sizeof(struct pipe_vertex_element) * count + sizeof(unsigned);
837 velems_state.count = count;
838 memcpy(velems_state.velems, states,
839 sizeof(struct pipe_vertex_element) * count);
840 hash_key = cso_construct_key((void*)&velems_state, key_size);
841 iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
842 (void*)&velems_state, key_size);
843
844 if (cso_hash_iter_is_null(iter)) {
845 struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
846 if (!cso)
847 return PIPE_ERROR_OUT_OF_MEMORY;
848
849 memcpy(&cso->state, &velems_state, key_size);
850 cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe, count,
851 &cso->state.velems[0]);
852 cso->delete_state =
853 (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
854 cso->context = ctx->pipe;
855
856 iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
857 if (cso_hash_iter_is_null(iter)) {
858 FREE(cso);
859 return PIPE_ERROR_OUT_OF_MEMORY;
860 }
861
862 handle = cso->data;
863 }
864 else {
865 handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
866 }
867
868 if (ctx->velements != handle) {
869 ctx->velements = handle;
870 ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
871 }
872 return PIPE_OK;
873 }
874
cso_save_vertex_elements(struct cso_context * ctx)875 void cso_save_vertex_elements(struct cso_context *ctx)
876 {
877 struct u_vbuf *vbuf = ctx->vbuf;
878
879 if (vbuf) {
880 u_vbuf_save_vertex_elements(vbuf);
881 return;
882 }
883
884 assert(!ctx->velements_saved);
885 ctx->velements_saved = ctx->velements;
886 }
887
cso_restore_vertex_elements(struct cso_context * ctx)888 void cso_restore_vertex_elements(struct cso_context *ctx)
889 {
890 struct u_vbuf *vbuf = ctx->vbuf;
891
892 if (vbuf) {
893 u_vbuf_restore_vertex_elements(vbuf);
894 return;
895 }
896
897 if (ctx->velements != ctx->velements_saved) {
898 ctx->velements = ctx->velements_saved;
899 ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
900 }
901 ctx->velements_saved = NULL;
902 }
903
904 /* vertex buffers */
905
cso_set_vertex_buffers(struct cso_context * ctx,unsigned count,const struct pipe_vertex_buffer * buffers)906 void cso_set_vertex_buffers(struct cso_context *ctx,
907 unsigned count,
908 const struct pipe_vertex_buffer *buffers)
909 {
910 struct u_vbuf *vbuf = ctx->vbuf;
911
912 if (vbuf) {
913 u_vbuf_set_vertex_buffers(vbuf, count, buffers);
914 return;
915 }
916
917 if (count != ctx->nr_vertex_buffers ||
918 memcmp(buffers, ctx->vertex_buffers,
919 sizeof(struct pipe_vertex_buffer) * count) != 0) {
920 util_copy_vertex_buffers(ctx->vertex_buffers, &ctx->nr_vertex_buffers,
921 buffers, count);
922 ctx->pipe->set_vertex_buffers(ctx->pipe, count, buffers);
923 }
924 }
925
cso_save_vertex_buffers(struct cso_context * ctx)926 void cso_save_vertex_buffers(struct cso_context *ctx)
927 {
928 struct u_vbuf *vbuf = ctx->vbuf;
929
930 if (vbuf) {
931 u_vbuf_save_vertex_buffers(vbuf);
932 return;
933 }
934
935 util_copy_vertex_buffers(ctx->vertex_buffers_saved,
936 &ctx->nr_vertex_buffers_saved,
937 ctx->vertex_buffers,
938 ctx->nr_vertex_buffers);
939 }
940
cso_restore_vertex_buffers(struct cso_context * ctx)941 void cso_restore_vertex_buffers(struct cso_context *ctx)
942 {
943 unsigned i;
944 struct u_vbuf *vbuf = ctx->vbuf;
945
946 if (vbuf) {
947 u_vbuf_restore_vertex_buffers(vbuf);
948 return;
949 }
950
951 util_copy_vertex_buffers(ctx->vertex_buffers,
952 &ctx->nr_vertex_buffers,
953 ctx->vertex_buffers_saved,
954 ctx->nr_vertex_buffers_saved);
955
956 for (i = 0; i < ctx->nr_vertex_buffers_saved; i++) {
957 pipe_resource_reference(&ctx->vertex_buffers_saved[i].buffer, NULL);
958 }
959 ctx->nr_vertex_buffers_saved = 0;
960
961 ctx->pipe->set_vertex_buffers(ctx->pipe, ctx->nr_vertex_buffers,
962 ctx->vertex_buffers);
963 }
964
965
966 /**************** fragment/vertex sampler view state *************************/
967
968 static enum pipe_error
single_sampler(struct cso_context * ctx,struct sampler_info * info,unsigned idx,const struct pipe_sampler_state * templ)969 single_sampler(struct cso_context *ctx,
970 struct sampler_info *info,
971 unsigned idx,
972 const struct pipe_sampler_state *templ)
973 {
974 void *handle = NULL;
975
976 if (templ != NULL) {
977 unsigned key_size = sizeof(struct pipe_sampler_state);
978 unsigned hash_key = cso_construct_key((void*)templ, key_size);
979 struct cso_hash_iter iter =
980 cso_find_state_template(ctx->cache,
981 hash_key, CSO_SAMPLER,
982 (void *) templ, key_size);
983
984 if (cso_hash_iter_is_null(iter)) {
985 struct cso_sampler *cso = MALLOC(sizeof(struct cso_sampler));
986 if (!cso)
987 return PIPE_ERROR_OUT_OF_MEMORY;
988
989 memcpy(&cso->state, templ, sizeof(*templ));
990 cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
991 cso->delete_state =
992 (cso_state_callback) ctx->pipe->delete_sampler_state;
993 cso->context = ctx->pipe;
994
995 iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
996 if (cso_hash_iter_is_null(iter)) {
997 FREE(cso);
998 return PIPE_ERROR_OUT_OF_MEMORY;
999 }
1000
1001 handle = cso->data;
1002 }
1003 else {
1004 handle = ((struct cso_sampler *)cso_hash_iter_data(iter))->data;
1005 }
1006 }
1007
1008 info->samplers[idx] = handle;
1009
1010 return PIPE_OK;
1011 }
1012
1013 enum pipe_error
cso_single_sampler(struct cso_context * ctx,unsigned shader_stage,unsigned idx,const struct pipe_sampler_state * templ)1014 cso_single_sampler(struct cso_context *ctx,
1015 unsigned shader_stage,
1016 unsigned idx,
1017 const struct pipe_sampler_state *templ)
1018 {
1019 return single_sampler(ctx, &ctx->samplers[shader_stage], idx, templ);
1020 }
1021
1022
1023
1024 static void
single_sampler_done(struct cso_context * ctx,unsigned shader_stage)1025 single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1026 {
1027 struct sampler_info *info = &ctx->samplers[shader_stage];
1028 unsigned i;
1029
1030 /* find highest non-null sampler */
1031 for (i = PIPE_MAX_SAMPLERS; i > 0; i--) {
1032 if (info->samplers[i - 1] != NULL)
1033 break;
1034 }
1035
1036 info->nr_samplers = i;
1037
1038 if (info->hw.nr_samplers != info->nr_samplers ||
1039 memcmp(info->hw.samplers,
1040 info->samplers,
1041 info->nr_samplers * sizeof(void *)) != 0)
1042 {
1043 memcpy(info->hw.samplers,
1044 info->samplers,
1045 info->nr_samplers * sizeof(void *));
1046 info->hw.nr_samplers = info->nr_samplers;
1047
1048 switch (shader_stage) {
1049 case PIPE_SHADER_FRAGMENT:
1050 ctx->pipe->bind_fragment_sampler_states(ctx->pipe,
1051 info->nr_samplers,
1052 info->samplers);
1053 break;
1054 case PIPE_SHADER_VERTEX:
1055 ctx->pipe->bind_vertex_sampler_states(ctx->pipe,
1056 info->nr_samplers,
1057 info->samplers);
1058 break;
1059 case PIPE_SHADER_GEOMETRY:
1060 ctx->pipe->bind_geometry_sampler_states(ctx->pipe,
1061 info->nr_samplers,
1062 info->samplers);
1063 break;
1064 default:
1065 assert(!"bad shader type in single_sampler_done()");
1066 }
1067 }
1068 }
1069
1070 void
cso_single_sampler_done(struct cso_context * ctx,unsigned shader_stage)1071 cso_single_sampler_done(struct cso_context *ctx, unsigned shader_stage)
1072 {
1073 single_sampler_done(ctx, shader_stage);
1074 }
1075
1076
1077 /*
1078 * If the function encouters any errors it will return the
1079 * last one. Done to always try to set as many samplers
1080 * as possible.
1081 */
1082 enum pipe_error
cso_set_samplers(struct cso_context * ctx,unsigned shader_stage,unsigned nr,const struct pipe_sampler_state ** templates)1083 cso_set_samplers(struct cso_context *ctx,
1084 unsigned shader_stage,
1085 unsigned nr,
1086 const struct pipe_sampler_state **templates)
1087 {
1088 struct sampler_info *info = &ctx->samplers[shader_stage];
1089 unsigned i;
1090 enum pipe_error temp, error = PIPE_OK;
1091
1092 /* TODO: fastpath
1093 */
1094
1095 for (i = 0; i < nr; i++) {
1096 temp = single_sampler(ctx, info, i, templates[i]);
1097 if (temp != PIPE_OK)
1098 error = temp;
1099 }
1100
1101 for ( ; i < info->nr_samplers; i++) {
1102 temp = single_sampler(ctx, info, i, NULL);
1103 if (temp != PIPE_OK)
1104 error = temp;
1105 }
1106
1107 single_sampler_done(ctx, shader_stage);
1108
1109 return error;
1110 }
1111
1112 void
cso_save_samplers(struct cso_context * ctx,unsigned shader_stage)1113 cso_save_samplers(struct cso_context *ctx, unsigned shader_stage)
1114 {
1115 struct sampler_info *info = &ctx->samplers[shader_stage];
1116 info->nr_samplers_saved = info->nr_samplers;
1117 memcpy(info->samplers_saved, info->samplers, sizeof(info->samplers));
1118 }
1119
1120
1121 void
cso_restore_samplers(struct cso_context * ctx,unsigned shader_stage)1122 cso_restore_samplers(struct cso_context *ctx, unsigned shader_stage)
1123 {
1124 struct sampler_info *info = &ctx->samplers[shader_stage];
1125 info->nr_samplers = info->nr_samplers_saved;
1126 memcpy(info->samplers, info->samplers_saved, sizeof(info->samplers));
1127 single_sampler_done(ctx, shader_stage);
1128 }
1129
1130
1131 void
cso_set_sampler_views(struct cso_context * ctx,unsigned shader_stage,unsigned count,struct pipe_sampler_view ** views)1132 cso_set_sampler_views(struct cso_context *ctx,
1133 unsigned shader_stage,
1134 unsigned count,
1135 struct pipe_sampler_view **views)
1136 {
1137 struct sampler_info *info = &ctx->samplers[shader_stage];
1138 unsigned i;
1139
1140 /* reference new views */
1141 for (i = 0; i < count; i++) {
1142 pipe_sampler_view_reference(&info->views[i], views[i]);
1143 }
1144 /* unref extra old views, if any */
1145 for (; i < info->nr_views; i++) {
1146 pipe_sampler_view_reference(&info->views[i], NULL);
1147 }
1148
1149 info->nr_views = count;
1150
1151 /* bind the new sampler views */
1152 switch (shader_stage) {
1153 case PIPE_SHADER_FRAGMENT:
1154 ctx->pipe->set_fragment_sampler_views(ctx->pipe, count, info->views);
1155 break;
1156 case PIPE_SHADER_VERTEX:
1157 ctx->pipe->set_vertex_sampler_views(ctx->pipe, count, info->views);
1158 break;
1159 case PIPE_SHADER_GEOMETRY:
1160 ctx->pipe->set_geometry_sampler_views(ctx->pipe, count, info->views);
1161 break;
1162 default:
1163 assert(!"bad shader type in cso_set_sampler_views()");
1164 }
1165 }
1166
1167
1168 void
cso_save_sampler_views(struct cso_context * ctx,unsigned shader_stage)1169 cso_save_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1170 {
1171 struct sampler_info *info = &ctx->samplers[shader_stage];
1172 unsigned i;
1173
1174 info->nr_views_saved = info->nr_views;
1175
1176 for (i = 0; i < info->nr_views; i++) {
1177 assert(!info->views_saved[i]);
1178 pipe_sampler_view_reference(&info->views_saved[i], info->views[i]);
1179 }
1180 }
1181
1182
1183 void
cso_restore_sampler_views(struct cso_context * ctx,unsigned shader_stage)1184 cso_restore_sampler_views(struct cso_context *ctx, unsigned shader_stage)
1185 {
1186 struct sampler_info *info = &ctx->samplers[shader_stage];
1187 unsigned i, nr_saved = info->nr_views_saved;
1188
1189 for (i = 0; i < nr_saved; i++) {
1190 pipe_sampler_view_reference(&info->views[i], NULL);
1191 /* move the reference from one pointer to another */
1192 info->views[i] = info->views_saved[i];
1193 info->views_saved[i] = NULL;
1194 }
1195 for (; i < info->nr_views; i++) {
1196 pipe_sampler_view_reference(&info->views[i], NULL);
1197 }
1198
1199 /* bind the old/saved sampler views */
1200 switch (shader_stage) {
1201 case PIPE_SHADER_FRAGMENT:
1202 ctx->pipe->set_fragment_sampler_views(ctx->pipe, nr_saved, info->views);
1203 break;
1204 case PIPE_SHADER_VERTEX:
1205 ctx->pipe->set_vertex_sampler_views(ctx->pipe, nr_saved, info->views);
1206 break;
1207 case PIPE_SHADER_GEOMETRY:
1208 ctx->pipe->set_geometry_sampler_views(ctx->pipe, nr_saved, info->views);
1209 break;
1210 default:
1211 assert(!"bad shader type in cso_restore_sampler_views()");
1212 }
1213
1214 info->nr_views = nr_saved;
1215 info->nr_views_saved = 0;
1216 }
1217
1218
1219 void
cso_set_stream_outputs(struct cso_context * ctx,unsigned num_targets,struct pipe_stream_output_target ** targets,unsigned append_bitmask)1220 cso_set_stream_outputs(struct cso_context *ctx,
1221 unsigned num_targets,
1222 struct pipe_stream_output_target **targets,
1223 unsigned append_bitmask)
1224 {
1225 struct pipe_context *pipe = ctx->pipe;
1226 uint i;
1227
1228 if (!ctx->has_streamout) {
1229 assert(num_targets == 0);
1230 return;
1231 }
1232
1233 if (ctx->nr_so_targets == 0 && num_targets == 0) {
1234 /* Nothing to do. */
1235 return;
1236 }
1237
1238 /* reference new targets */
1239 for (i = 0; i < num_targets; i++) {
1240 pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1241 }
1242 /* unref extra old targets, if any */
1243 for (; i < ctx->nr_so_targets; i++) {
1244 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1245 }
1246
1247 pipe->set_stream_output_targets(pipe, num_targets, targets,
1248 append_bitmask);
1249 ctx->nr_so_targets = num_targets;
1250 }
1251
1252 void
cso_save_stream_outputs(struct cso_context * ctx)1253 cso_save_stream_outputs(struct cso_context *ctx)
1254 {
1255 uint i;
1256
1257 if (!ctx->has_streamout) {
1258 return;
1259 }
1260
1261 ctx->nr_so_targets_saved = ctx->nr_so_targets;
1262
1263 for (i = 0; i < ctx->nr_so_targets; i++) {
1264 assert(!ctx->so_targets_saved[i]);
1265 pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1266 }
1267 }
1268
1269 void
cso_restore_stream_outputs(struct cso_context * ctx)1270 cso_restore_stream_outputs(struct cso_context *ctx)
1271 {
1272 struct pipe_context *pipe = ctx->pipe;
1273 uint i;
1274
1275 if (!ctx->has_streamout) {
1276 return;
1277 }
1278
1279 if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1280 /* Nothing to do. */
1281 return;
1282 }
1283
1284 for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1285 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1286 /* move the reference from one pointer to another */
1287 ctx->so_targets[i] = ctx->so_targets_saved[i];
1288 ctx->so_targets_saved[i] = NULL;
1289 }
1290 for (; i < ctx->nr_so_targets; i++) {
1291 pipe_so_target_reference(&ctx->so_targets[i], NULL);
1292 }
1293
1294 /* ~0 means append */
1295 pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1296 ctx->so_targets, ~0);
1297
1298 ctx->nr_so_targets = ctx->nr_so_targets_saved;
1299 ctx->nr_so_targets_saved = 0;
1300 }
1301
1302 /* drawing */
1303
1304 void
cso_set_index_buffer(struct cso_context * cso,const struct pipe_index_buffer * ib)1305 cso_set_index_buffer(struct cso_context *cso,
1306 const struct pipe_index_buffer *ib)
1307 {
1308 struct u_vbuf *vbuf = cso->vbuf;
1309
1310 if (vbuf) {
1311 u_vbuf_set_index_buffer(vbuf, ib);
1312 } else {
1313 struct pipe_context *pipe = cso->pipe;
1314 pipe->set_index_buffer(pipe, ib);
1315 }
1316 }
1317
1318 void
cso_draw_vbo(struct cso_context * cso,const struct pipe_draw_info * info)1319 cso_draw_vbo(struct cso_context *cso,
1320 const struct pipe_draw_info *info)
1321 {
1322 struct u_vbuf *vbuf = cso->vbuf;
1323
1324 if (vbuf) {
1325 u_vbuf_draw_vbo(vbuf, info);
1326 } else {
1327 struct pipe_context *pipe = cso->pipe;
1328 pipe->draw_vbo(pipe, info);
1329 }
1330 }
1331
1332 void
cso_draw_arrays(struct cso_context * cso,uint mode,uint start,uint count)1333 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1334 {
1335 struct pipe_draw_info info;
1336
1337 util_draw_init_info(&info);
1338
1339 info.mode = mode;
1340 info.start = start;
1341 info.count = count;
1342 info.min_index = start;
1343 info.max_index = start + count - 1;
1344
1345 cso_draw_vbo(cso, &info);
1346 }
1347