• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**************************************************************************
2  *
3  * Copyright 2007 VMware, Inc.
4  * All Rights Reserved.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the
8  * "Software"), to deal in the Software without restriction, including
9  * without limitation the rights to use, copy, modify, merge, publish,
10  * distribute, sub license, and/or sell copies of the Software, and to
11  * permit persons to whom the Software is furnished to do so, subject to
12  * the following conditions:
13  *
14  * The above copyright notice and this permission notice (including the
15  * next paragraph) shall be included in all copies or substantial portions
16  * of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21  * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25  *
26  **************************************************************************/
27 
28  /**
29   * @file
30   *
31   * Wrap the cso cache & hash mechanisms in a simplified
32   * pipe-driver-specific interface.
33   *
34   * @author Zack Rusin <zackr@vmware.com>
35   * @author Keith Whitwell <keithw@vmware.com>
36   */
37 
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46 
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51 
52 
53 /**
54  * Per-shader sampler information.
55  */
56 struct sampler_info
57 {
58    struct cso_sampler *cso_samplers[PIPE_MAX_SAMPLERS];
59    void *samplers[PIPE_MAX_SAMPLERS];
60 };
61 
62 
63 
64 struct cso_context {
65    struct pipe_context *pipe;
66    struct cso_cache *cache;
67 
68    struct u_vbuf *vbuf;
69    struct u_vbuf *vbuf_current;
70    bool always_use_vbuf;
71 
72    boolean has_geometry_shader;
73    boolean has_tessellation;
74    boolean has_compute_shader;
75    boolean has_streamout;
76 
77    unsigned saved_state;  /**< bitmask of CSO_BIT_x flags */
78 
79    struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
80    unsigned nr_fragment_views;
81 
82    struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
83    unsigned nr_fragment_views_saved;
84 
85    struct sampler_info fragment_samplers_saved;
86    struct sampler_info samplers[PIPE_SHADER_TYPES];
87 
88    /* Temporary number until cso_single_sampler_done is called.
89     * It tracks the highest sampler seen in cso_single_sampler.
90     */
91    int max_sampler_seen;
92 
93    struct pipe_vertex_buffer vertex_buffer0_current;
94    struct pipe_vertex_buffer vertex_buffer0_saved;
95 
96    struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
97    struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
98 
99    struct pipe_image_view fragment_image0_current;
100    struct pipe_image_view fragment_image0_saved;
101 
102    unsigned nr_so_targets;
103    struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
104 
105    unsigned nr_so_targets_saved;
106    struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
107 
108    /** Current and saved state.
109     * The saved state is used as a 1-deep stack.
110     */
111    void *blend, *blend_saved;
112    void *depth_stencil, *depth_stencil_saved;
113    void *rasterizer, *rasterizer_saved;
114    void *fragment_shader, *fragment_shader_saved;
115    void *vertex_shader, *vertex_shader_saved;
116    void *geometry_shader, *geometry_shader_saved;
117    void *tessctrl_shader, *tessctrl_shader_saved;
118    void *tesseval_shader, *tesseval_shader_saved;
119    void *compute_shader;
120    void *velements, *velements_saved;
121    struct pipe_query *render_condition, *render_condition_saved;
122    uint render_condition_mode, render_condition_mode_saved;
123    boolean render_condition_cond, render_condition_cond_saved;
124 
125    struct pipe_framebuffer_state fb, fb_saved;
126    struct pipe_viewport_state vp, vp_saved;
127    struct pipe_blend_color blend_color;
128    unsigned sample_mask, sample_mask_saved;
129    unsigned min_samples, min_samples_saved;
130    struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
131 };
132 
cso_get_pipe_context(struct cso_context * cso)133 struct pipe_context *cso_get_pipe_context(struct cso_context *cso)
134 {
135    return cso->pipe;
136 }
137 
delete_blend_state(struct cso_context * ctx,void * state)138 static boolean delete_blend_state(struct cso_context *ctx, void *state)
139 {
140    struct cso_blend *cso = (struct cso_blend *)state;
141 
142    if (ctx->blend == cso->data)
143       return FALSE;
144 
145    if (cso->delete_state)
146       cso->delete_state(cso->context, cso->data);
147    FREE(state);
148    return TRUE;
149 }
150 
delete_depth_stencil_state(struct cso_context * ctx,void * state)151 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
152 {
153    struct cso_depth_stencil_alpha *cso =
154       (struct cso_depth_stencil_alpha *)state;
155 
156    if (ctx->depth_stencil == cso->data)
157       return FALSE;
158 
159    if (cso->delete_state)
160       cso->delete_state(cso->context, cso->data);
161    FREE(state);
162 
163    return TRUE;
164 }
165 
delete_sampler_state(UNUSED struct cso_context * ctx,void * state)166 static boolean delete_sampler_state(UNUSED struct cso_context *ctx, void *state)
167 {
168    struct cso_sampler *cso = (struct cso_sampler *)state;
169    if (cso->delete_state)
170       cso->delete_state(cso->context, cso->data);
171    FREE(state);
172    return TRUE;
173 }
174 
delete_rasterizer_state(struct cso_context * ctx,void * state)175 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
176 {
177    struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
178 
179    if (ctx->rasterizer == cso->data)
180       return FALSE;
181    if (cso->delete_state)
182       cso->delete_state(cso->context, cso->data);
183    FREE(state);
184    return TRUE;
185 }
186 
delete_vertex_elements(struct cso_context * ctx,void * state)187 static boolean delete_vertex_elements(struct cso_context *ctx,
188                                       void *state)
189 {
190    struct cso_velements *cso = (struct cso_velements *)state;
191 
192    if (ctx->velements == cso->data)
193       return FALSE;
194 
195    if (cso->delete_state)
196       cso->delete_state(cso->context, cso->data);
197    FREE(state);
198    return TRUE;
199 }
200 
201 
delete_cso(struct cso_context * ctx,void * state,enum cso_cache_type type)202 static inline boolean delete_cso(struct cso_context *ctx,
203                                  void *state, enum cso_cache_type type)
204 {
205    switch (type) {
206    case CSO_BLEND:
207       return delete_blend_state(ctx, state);
208    case CSO_SAMPLER:
209       return delete_sampler_state(ctx, state);
210    case CSO_DEPTH_STENCIL_ALPHA:
211       return delete_depth_stencil_state(ctx, state);
212    case CSO_RASTERIZER:
213       return delete_rasterizer_state(ctx, state);
214    case CSO_VELEMENTS:
215       return delete_vertex_elements(ctx, state);
216    default:
217       assert(0);
218       FREE(state);
219    }
220    return FALSE;
221 }
222 
223 static inline void
sanitize_hash(struct cso_hash * hash,enum cso_cache_type type,int max_size,void * user_data)224 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
225               int max_size, void *user_data)
226 {
227    struct cso_context *ctx = (struct cso_context *)user_data;
228    /* if we're approach the maximum size, remove fourth of the entries
229     * otherwise every subsequent call will go through the same */
230    int hash_size = cso_hash_size(hash);
231    int max_entries = (max_size > hash_size) ? max_size : hash_size;
232    int to_remove =  (max_size < max_entries) * max_entries/4;
233    struct cso_hash_iter iter;
234    struct cso_sampler **samplers_to_restore = NULL;
235    unsigned to_restore = 0;
236 
237    if (hash_size > max_size)
238       to_remove += hash_size - max_size;
239 
240    if (to_remove == 0)
241       return;
242 
243    if (type == CSO_SAMPLER) {
244       int i, j;
245 
246       samplers_to_restore = MALLOC(PIPE_SHADER_TYPES * PIPE_MAX_SAMPLERS *
247                                    sizeof(*samplers_to_restore));
248 
249       /* Temporarily remove currently bound sampler states from the hash
250        * table, to prevent them from being deleted
251        */
252       for (i = 0; i < PIPE_SHADER_TYPES; i++) {
253          for (j = 0; j < PIPE_MAX_SAMPLERS; j++) {
254             struct cso_sampler *sampler = ctx->samplers[i].cso_samplers[j];
255 
256             if (sampler && cso_hash_take(hash, sampler->hash_key))
257                samplers_to_restore[to_restore++] = sampler;
258          }
259       }
260    }
261 
262    iter = cso_hash_first_node(hash);
263    while (to_remove) {
264       /*remove elements until we're good */
265       /*fixme: currently we pick the nodes to remove at random*/
266       void *cso = cso_hash_iter_data(iter);
267 
268       if (!cso)
269          break;
270 
271       if (delete_cso(ctx, cso, type)) {
272          iter = cso_hash_erase(hash, iter);
273          --to_remove;
274       } else
275          iter = cso_hash_iter_next(iter);
276    }
277 
278    if (type == CSO_SAMPLER) {
279       /* Put currently bound sampler states back into the hash table */
280       while (to_restore--) {
281          struct cso_sampler *sampler = samplers_to_restore[to_restore];
282 
283          cso_hash_insert(hash, sampler->hash_key, sampler);
284       }
285 
286       FREE(samplers_to_restore);
287    }
288 }
289 
cso_init_vbuf(struct cso_context * cso,unsigned flags)290 static void cso_init_vbuf(struct cso_context *cso, unsigned flags)
291 {
292    struct u_vbuf_caps caps;
293    bool uses_user_vertex_buffers = !(flags & CSO_NO_USER_VERTEX_BUFFERS);
294    bool needs64b = !(flags & CSO_NO_64B_VERTEX_BUFFERS);
295 
296    u_vbuf_get_caps(cso->pipe->screen, &caps, needs64b);
297 
298    /* Enable u_vbuf if needed. */
299    if (caps.fallback_always ||
300        (uses_user_vertex_buffers &&
301         caps.fallback_only_for_user_vbuffers)) {
302       cso->vbuf = u_vbuf_create(cso->pipe, &caps);
303       cso->vbuf_current = cso->vbuf;
304       cso->always_use_vbuf = caps.fallback_always;
305    }
306 }
307 
308 struct cso_context *
cso_create_context(struct pipe_context * pipe,unsigned flags)309 cso_create_context(struct pipe_context *pipe, unsigned flags)
310 {
311    struct cso_context *ctx = CALLOC_STRUCT(cso_context);
312    if (!ctx)
313       return NULL;
314 
315    ctx->cache = cso_cache_create();
316    if (ctx->cache == NULL)
317       goto out;
318    cso_cache_set_sanitize_callback(ctx->cache,
319                                    sanitize_hash,
320                                    ctx);
321 
322    ctx->pipe = pipe;
323    ctx->sample_mask = ~0;
324 
325    cso_init_vbuf(ctx, flags);
326 
327    /* Enable for testing: */
328    if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
329 
330    if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
331                                 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
332       ctx->has_geometry_shader = TRUE;
333    }
334    if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
335                                 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
336       ctx->has_tessellation = TRUE;
337    }
338    if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
339                                       PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
340       int supported_irs =
341          pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
342                                         PIPE_SHADER_CAP_SUPPORTED_IRS);
343       if (supported_irs & ((1 << PIPE_SHADER_IR_TGSI) |
344                            (1 << PIPE_SHADER_IR_NIR))) {
345          ctx->has_compute_shader = TRUE;
346       }
347    }
348    if (pipe->screen->get_param(pipe->screen,
349                                PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
350       ctx->has_streamout = TRUE;
351    }
352 
353    ctx->max_sampler_seen = -1;
354    return ctx;
355 
356 out:
357    cso_destroy_context( ctx );
358    return NULL;
359 }
360 
361 /**
362  * Free the CSO context.
363  */
cso_destroy_context(struct cso_context * ctx)364 void cso_destroy_context( struct cso_context *ctx )
365 {
366    unsigned i;
367 
368    if (ctx->pipe) {
369       ctx->pipe->bind_blend_state( ctx->pipe, NULL );
370       ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
371 
372       {
373          static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
374          static struct pipe_shader_buffer ssbos[PIPE_MAX_SHADER_BUFFERS] = { 0 };
375          static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
376          struct pipe_screen *scr = ctx->pipe->screen;
377          enum pipe_shader_type sh;
378          for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
379             switch (sh) {
380             case PIPE_SHADER_GEOMETRY:
381                if (!ctx->has_geometry_shader)
382                   continue;
383                break;
384             case PIPE_SHADER_TESS_CTRL:
385             case PIPE_SHADER_TESS_EVAL:
386                if (!ctx->has_tessellation)
387                   continue;
388                break;
389             case PIPE_SHADER_COMPUTE:
390                if (!ctx->has_compute_shader)
391                   continue;
392                break;
393             default:
394                break;
395             }
396 
397             int maxsam = scr->get_shader_param(scr, sh,
398                                                PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
399             int maxview = scr->get_shader_param(scr, sh,
400                                                 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
401             int maxssbo = scr->get_shader_param(scr, sh,
402                                                 PIPE_SHADER_CAP_MAX_SHADER_BUFFERS);
403             int maxcb = scr->get_shader_param(scr, sh,
404                                               PIPE_SHADER_CAP_MAX_CONST_BUFFERS);
405             int maximg = scr->get_shader_param(scr, sh,
406                                               PIPE_SHADER_CAP_MAX_SHADER_IMAGES);
407             assert(maxsam <= PIPE_MAX_SAMPLERS);
408             assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
409             assert(maxssbo <= PIPE_MAX_SHADER_BUFFERS);
410             assert(maxcb <= PIPE_MAX_CONSTANT_BUFFERS);
411             assert(maximg <= PIPE_MAX_SHADER_IMAGES);
412             if (maxsam > 0) {
413                ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
414             }
415             if (maxview > 0) {
416                ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
417             }
418             if (maxssbo > 0) {
419                ctx->pipe->set_shader_buffers(ctx->pipe, sh, 0, maxssbo, ssbos, 0);
420             }
421             if (maximg > 0) {
422                ctx->pipe->set_shader_images(ctx->pipe, sh, 0, maximg, NULL);
423             }
424             for (int i = 0; i < maxcb; i++) {
425                ctx->pipe->set_constant_buffer(ctx->pipe, sh, i, NULL);
426             }
427          }
428       }
429 
430       ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
431       ctx->pipe->bind_fs_state( ctx->pipe, NULL );
432       ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
433       ctx->pipe->bind_vs_state( ctx->pipe, NULL );
434       ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
435       if (ctx->has_geometry_shader) {
436          ctx->pipe->bind_gs_state(ctx->pipe, NULL);
437       }
438       if (ctx->has_tessellation) {
439          ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
440          ctx->pipe->bind_tes_state(ctx->pipe, NULL);
441       }
442       if (ctx->has_compute_shader) {
443          ctx->pipe->bind_compute_state(ctx->pipe, NULL);
444       }
445       ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
446 
447       if (ctx->has_streamout)
448          ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
449    }
450 
451    for (i = 0; i < ctx->nr_fragment_views; i++) {
452       pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
453    }
454    for (i = 0; i < ctx->nr_fragment_views_saved; i++) {
455       pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
456    }
457 
458    util_unreference_framebuffer_state(&ctx->fb);
459    util_unreference_framebuffer_state(&ctx->fb_saved);
460 
461    pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_current);
462    pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_saved);
463 
464    for (i = 0; i < PIPE_SHADER_TYPES; i++) {
465       pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
466       pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
467    }
468 
469    pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
470    pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
471 
472    for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
473       pipe_so_target_reference(&ctx->so_targets[i], NULL);
474       pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
475    }
476 
477    if (ctx->cache) {
478       cso_cache_delete( ctx->cache );
479       ctx->cache = NULL;
480    }
481 
482    if (ctx->vbuf)
483       u_vbuf_destroy(ctx->vbuf);
484    FREE( ctx );
485 }
486 
487 
488 /* Those function will either find the state of the given template
489  * in the cache or they will create a new state from the given
490  * template, insert it in the cache and return it.
491  */
492 
493 /*
494  * If the driver returns 0 from the create method then they will assign
495  * the data member of the cso to be the template itself.
496  */
497 
cso_set_blend(struct cso_context * ctx,const struct pipe_blend_state * templ)498 enum pipe_error cso_set_blend(struct cso_context *ctx,
499                               const struct pipe_blend_state *templ)
500 {
501    unsigned key_size, hash_key;
502    struct cso_hash_iter iter;
503    void *handle;
504 
505    key_size = templ->independent_blend_enable ?
506       sizeof(struct pipe_blend_state) :
507       (char *)&(templ->rt[1]) - (char *)templ;
508    hash_key = cso_construct_key((void*)templ, key_size);
509    iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
510                                   (void*)templ, key_size);
511 
512    if (cso_hash_iter_is_null(iter)) {
513       struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
514       if (!cso)
515          return PIPE_ERROR_OUT_OF_MEMORY;
516 
517       memset(&cso->state, 0, sizeof cso->state);
518       memcpy(&cso->state, templ, key_size);
519       cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
520       cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
521       cso->context = ctx->pipe;
522 
523       iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
524       if (cso_hash_iter_is_null(iter)) {
525          FREE(cso);
526          return PIPE_ERROR_OUT_OF_MEMORY;
527       }
528 
529       handle = cso->data;
530    }
531    else {
532       handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
533    }
534 
535    if (ctx->blend != handle) {
536       ctx->blend = handle;
537       ctx->pipe->bind_blend_state(ctx->pipe, handle);
538    }
539    return PIPE_OK;
540 }
541 
542 static void
cso_save_blend(struct cso_context * ctx)543 cso_save_blend(struct cso_context *ctx)
544 {
545    assert(!ctx->blend_saved);
546    ctx->blend_saved = ctx->blend;
547 }
548 
549 static void
cso_restore_blend(struct cso_context * ctx)550 cso_restore_blend(struct cso_context *ctx)
551 {
552    if (ctx->blend != ctx->blend_saved) {
553       ctx->blend = ctx->blend_saved;
554       ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
555    }
556    ctx->blend_saved = NULL;
557 }
558 
559 
560 
561 enum pipe_error
cso_set_depth_stencil_alpha(struct cso_context * ctx,const struct pipe_depth_stencil_alpha_state * templ)562 cso_set_depth_stencil_alpha(struct cso_context *ctx,
563                             const struct pipe_depth_stencil_alpha_state *templ)
564 {
565    unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
566    unsigned hash_key = cso_construct_key((void*)templ, key_size);
567    struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
568                                                        hash_key,
569                                                        CSO_DEPTH_STENCIL_ALPHA,
570                                                        (void*)templ, key_size);
571    void *handle;
572 
573    if (cso_hash_iter_is_null(iter)) {
574       struct cso_depth_stencil_alpha *cso =
575          MALLOC(sizeof(struct cso_depth_stencil_alpha));
576       if (!cso)
577          return PIPE_ERROR_OUT_OF_MEMORY;
578 
579       memcpy(&cso->state, templ, sizeof(*templ));
580       cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
581                                                               &cso->state);
582       cso->delete_state =
583          (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
584       cso->context = ctx->pipe;
585 
586       iter = cso_insert_state(ctx->cache, hash_key,
587                               CSO_DEPTH_STENCIL_ALPHA, cso);
588       if (cso_hash_iter_is_null(iter)) {
589          FREE(cso);
590          return PIPE_ERROR_OUT_OF_MEMORY;
591       }
592 
593       handle = cso->data;
594    }
595    else {
596       handle = ((struct cso_depth_stencil_alpha *)
597                 cso_hash_iter_data(iter))->data;
598    }
599 
600    if (ctx->depth_stencil != handle) {
601       ctx->depth_stencil = handle;
602       ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
603    }
604    return PIPE_OK;
605 }
606 
607 static void
cso_save_depth_stencil_alpha(struct cso_context * ctx)608 cso_save_depth_stencil_alpha(struct cso_context *ctx)
609 {
610    assert(!ctx->depth_stencil_saved);
611    ctx->depth_stencil_saved = ctx->depth_stencil;
612 }
613 
614 static void
cso_restore_depth_stencil_alpha(struct cso_context * ctx)615 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
616 {
617    if (ctx->depth_stencil != ctx->depth_stencil_saved) {
618       ctx->depth_stencil = ctx->depth_stencil_saved;
619       ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
620                                                 ctx->depth_stencil_saved);
621    }
622    ctx->depth_stencil_saved = NULL;
623 }
624 
625 
626 
cso_set_rasterizer(struct cso_context * ctx,const struct pipe_rasterizer_state * templ)627 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
628                                    const struct pipe_rasterizer_state *templ)
629 {
630    unsigned key_size = sizeof(struct pipe_rasterizer_state);
631    unsigned hash_key = cso_construct_key((void*)templ, key_size);
632    struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
633                                                        hash_key,
634                                                        CSO_RASTERIZER,
635                                                        (void*)templ, key_size);
636    void *handle = NULL;
637 
638    /* We can't have both point_quad_rasterization (sprites) and point_smooth
639     * (round AA points) enabled at the same time.
640     */
641    assert(!(templ->point_quad_rasterization && templ->point_smooth));
642 
643    if (cso_hash_iter_is_null(iter)) {
644       struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
645       if (!cso)
646          return PIPE_ERROR_OUT_OF_MEMORY;
647 
648       memcpy(&cso->state, templ, sizeof(*templ));
649       cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
650       cso->delete_state =
651          (cso_state_callback)ctx->pipe->delete_rasterizer_state;
652       cso->context = ctx->pipe;
653 
654       iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
655       if (cso_hash_iter_is_null(iter)) {
656          FREE(cso);
657          return PIPE_ERROR_OUT_OF_MEMORY;
658       }
659 
660       handle = cso->data;
661    }
662    else {
663       handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
664    }
665 
666    if (ctx->rasterizer != handle) {
667       ctx->rasterizer = handle;
668       ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
669    }
670    return PIPE_OK;
671 }
672 
673 static void
cso_save_rasterizer(struct cso_context * ctx)674 cso_save_rasterizer(struct cso_context *ctx)
675 {
676    assert(!ctx->rasterizer_saved);
677    ctx->rasterizer_saved = ctx->rasterizer;
678 }
679 
680 static void
cso_restore_rasterizer(struct cso_context * ctx)681 cso_restore_rasterizer(struct cso_context *ctx)
682 {
683    if (ctx->rasterizer != ctx->rasterizer_saved) {
684       ctx->rasterizer = ctx->rasterizer_saved;
685       ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
686    }
687    ctx->rasterizer_saved = NULL;
688 }
689 
690 
cso_set_fragment_shader_handle(struct cso_context * ctx,void * handle)691 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
692 {
693    if (ctx->fragment_shader != handle) {
694       ctx->fragment_shader = handle;
695       ctx->pipe->bind_fs_state(ctx->pipe, handle);
696    }
697 }
698 
699 static void
cso_save_fragment_shader(struct cso_context * ctx)700 cso_save_fragment_shader(struct cso_context *ctx)
701 {
702    assert(!ctx->fragment_shader_saved);
703    ctx->fragment_shader_saved = ctx->fragment_shader;
704 }
705 
706 static void
cso_restore_fragment_shader(struct cso_context * ctx)707 cso_restore_fragment_shader(struct cso_context *ctx)
708 {
709    if (ctx->fragment_shader_saved != ctx->fragment_shader) {
710       ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
711       ctx->fragment_shader = ctx->fragment_shader_saved;
712    }
713    ctx->fragment_shader_saved = NULL;
714 }
715 
716 
cso_set_vertex_shader_handle(struct cso_context * ctx,void * handle)717 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
718 {
719    if (ctx->vertex_shader != handle) {
720       ctx->vertex_shader = handle;
721       ctx->pipe->bind_vs_state(ctx->pipe, handle);
722    }
723 }
724 
725 static void
cso_save_vertex_shader(struct cso_context * ctx)726 cso_save_vertex_shader(struct cso_context *ctx)
727 {
728    assert(!ctx->vertex_shader_saved);
729    ctx->vertex_shader_saved = ctx->vertex_shader;
730 }
731 
732 static void
cso_restore_vertex_shader(struct cso_context * ctx)733 cso_restore_vertex_shader(struct cso_context *ctx)
734 {
735    if (ctx->vertex_shader_saved != ctx->vertex_shader) {
736       ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
737       ctx->vertex_shader = ctx->vertex_shader_saved;
738    }
739    ctx->vertex_shader_saved = NULL;
740 }
741 
742 
cso_set_framebuffer(struct cso_context * ctx,const struct pipe_framebuffer_state * fb)743 void cso_set_framebuffer(struct cso_context *ctx,
744                          const struct pipe_framebuffer_state *fb)
745 {
746    if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
747       util_copy_framebuffer_state(&ctx->fb, fb);
748       ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
749    }
750 }
751 
752 static void
cso_save_framebuffer(struct cso_context * ctx)753 cso_save_framebuffer(struct cso_context *ctx)
754 {
755    util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
756 }
757 
758 static void
cso_restore_framebuffer(struct cso_context * ctx)759 cso_restore_framebuffer(struct cso_context *ctx)
760 {
761    if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
762       util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
763       ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
764       util_unreference_framebuffer_state(&ctx->fb_saved);
765    }
766 }
767 
768 
cso_set_viewport(struct cso_context * ctx,const struct pipe_viewport_state * vp)769 void cso_set_viewport(struct cso_context *ctx,
770                       const struct pipe_viewport_state *vp)
771 {
772    if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
773       ctx->vp = *vp;
774       ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
775    }
776 }
777 
778 /**
779  * Setup viewport state for given width and height (position is always (0,0)).
780  * Invert the Y axis if 'invert' is true.
781  */
782 void
cso_set_viewport_dims(struct cso_context * ctx,float width,float height,boolean invert)783 cso_set_viewport_dims(struct cso_context *ctx,
784                       float width, float height, boolean invert)
785 {
786    struct pipe_viewport_state vp;
787    vp.scale[0] = width * 0.5f;
788    vp.scale[1] = height * (invert ? -0.5f : 0.5f);
789    vp.scale[2] = 0.5f;
790    vp.translate[0] = 0.5f * width;
791    vp.translate[1] = 0.5f * height;
792    vp.translate[2] = 0.5f;
793    vp.swizzle_x = PIPE_VIEWPORT_SWIZZLE_POSITIVE_X;
794    vp.swizzle_y = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Y;
795    vp.swizzle_z = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Z;
796    vp.swizzle_w = PIPE_VIEWPORT_SWIZZLE_POSITIVE_W;
797    cso_set_viewport(ctx, &vp);
798 }
799 
800 static void
cso_save_viewport(struct cso_context * ctx)801 cso_save_viewport(struct cso_context *ctx)
802 {
803    ctx->vp_saved = ctx->vp;
804 }
805 
806 
807 static void
cso_restore_viewport(struct cso_context * ctx)808 cso_restore_viewport(struct cso_context *ctx)
809 {
810    if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
811       ctx->vp = ctx->vp_saved;
812       ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
813    }
814 }
815 
816 
cso_set_blend_color(struct cso_context * ctx,const struct pipe_blend_color * bc)817 void cso_set_blend_color(struct cso_context *ctx,
818                          const struct pipe_blend_color *bc)
819 {
820    if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
821       ctx->blend_color = *bc;
822       ctx->pipe->set_blend_color(ctx->pipe, bc);
823    }
824 }
825 
cso_set_sample_mask(struct cso_context * ctx,unsigned sample_mask)826 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
827 {
828    if (ctx->sample_mask != sample_mask) {
829       ctx->sample_mask = sample_mask;
830       ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
831    }
832 }
833 
834 static void
cso_save_sample_mask(struct cso_context * ctx)835 cso_save_sample_mask(struct cso_context *ctx)
836 {
837    ctx->sample_mask_saved = ctx->sample_mask;
838 }
839 
840 static void
cso_restore_sample_mask(struct cso_context * ctx)841 cso_restore_sample_mask(struct cso_context *ctx)
842 {
843    cso_set_sample_mask(ctx, ctx->sample_mask_saved);
844 }
845 
cso_set_min_samples(struct cso_context * ctx,unsigned min_samples)846 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
847 {
848    if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
849       ctx->min_samples = min_samples;
850       ctx->pipe->set_min_samples(ctx->pipe, min_samples);
851    }
852 }
853 
854 static void
cso_save_min_samples(struct cso_context * ctx)855 cso_save_min_samples(struct cso_context *ctx)
856 {
857    ctx->min_samples_saved = ctx->min_samples;
858 }
859 
860 static void
cso_restore_min_samples(struct cso_context * ctx)861 cso_restore_min_samples(struct cso_context *ctx)
862 {
863    cso_set_min_samples(ctx, ctx->min_samples_saved);
864 }
865 
cso_set_stencil_ref(struct cso_context * ctx,const struct pipe_stencil_ref * sr)866 void cso_set_stencil_ref(struct cso_context *ctx,
867                          const struct pipe_stencil_ref *sr)
868 {
869    if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
870       ctx->stencil_ref = *sr;
871       ctx->pipe->set_stencil_ref(ctx->pipe, sr);
872    }
873 }
874 
875 static void
cso_save_stencil_ref(struct cso_context * ctx)876 cso_save_stencil_ref(struct cso_context *ctx)
877 {
878    ctx->stencil_ref_saved = ctx->stencil_ref;
879 }
880 
881 
882 static void
cso_restore_stencil_ref(struct cso_context * ctx)883 cso_restore_stencil_ref(struct cso_context *ctx)
884 {
885    if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
886               sizeof(ctx->stencil_ref))) {
887       ctx->stencil_ref = ctx->stencil_ref_saved;
888       ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
889    }
890 }
891 
cso_set_render_condition(struct cso_context * ctx,struct pipe_query * query,boolean condition,enum pipe_render_cond_flag mode)892 void cso_set_render_condition(struct cso_context *ctx,
893                               struct pipe_query *query,
894                               boolean condition,
895                               enum pipe_render_cond_flag mode)
896 {
897    struct pipe_context *pipe = ctx->pipe;
898 
899    if (ctx->render_condition != query ||
900        ctx->render_condition_mode != mode ||
901        ctx->render_condition_cond != condition) {
902       pipe->render_condition(pipe, query, condition, mode);
903       ctx->render_condition = query;
904       ctx->render_condition_cond = condition;
905       ctx->render_condition_mode = mode;
906    }
907 }
908 
909 static void
cso_save_render_condition(struct cso_context * ctx)910 cso_save_render_condition(struct cso_context *ctx)
911 {
912    ctx->render_condition_saved = ctx->render_condition;
913    ctx->render_condition_cond_saved = ctx->render_condition_cond;
914    ctx->render_condition_mode_saved = ctx->render_condition_mode;
915 }
916 
917 static void
cso_restore_render_condition(struct cso_context * ctx)918 cso_restore_render_condition(struct cso_context *ctx)
919 {
920    cso_set_render_condition(ctx, ctx->render_condition_saved,
921                             ctx->render_condition_cond_saved,
922                             ctx->render_condition_mode_saved);
923 }
924 
cso_set_geometry_shader_handle(struct cso_context * ctx,void * handle)925 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
926 {
927    assert(ctx->has_geometry_shader || !handle);
928 
929    if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
930       ctx->geometry_shader = handle;
931       ctx->pipe->bind_gs_state(ctx->pipe, handle);
932    }
933 }
934 
935 static void
cso_save_geometry_shader(struct cso_context * ctx)936 cso_save_geometry_shader(struct cso_context *ctx)
937 {
938    if (!ctx->has_geometry_shader) {
939       return;
940    }
941 
942    assert(!ctx->geometry_shader_saved);
943    ctx->geometry_shader_saved = ctx->geometry_shader;
944 }
945 
946 static void
cso_restore_geometry_shader(struct cso_context * ctx)947 cso_restore_geometry_shader(struct cso_context *ctx)
948 {
949    if (!ctx->has_geometry_shader) {
950       return;
951    }
952 
953    if (ctx->geometry_shader_saved != ctx->geometry_shader) {
954       ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
955       ctx->geometry_shader = ctx->geometry_shader_saved;
956    }
957    ctx->geometry_shader_saved = NULL;
958 }
959 
cso_set_tessctrl_shader_handle(struct cso_context * ctx,void * handle)960 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
961 {
962    assert(ctx->has_tessellation || !handle);
963 
964    if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
965       ctx->tessctrl_shader = handle;
966       ctx->pipe->bind_tcs_state(ctx->pipe, handle);
967    }
968 }
969 
970 static void
cso_save_tessctrl_shader(struct cso_context * ctx)971 cso_save_tessctrl_shader(struct cso_context *ctx)
972 {
973    if (!ctx->has_tessellation) {
974       return;
975    }
976 
977    assert(!ctx->tessctrl_shader_saved);
978    ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
979 }
980 
981 static void
cso_restore_tessctrl_shader(struct cso_context * ctx)982 cso_restore_tessctrl_shader(struct cso_context *ctx)
983 {
984    if (!ctx->has_tessellation) {
985       return;
986    }
987 
988    if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
989       ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
990       ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
991    }
992    ctx->tessctrl_shader_saved = NULL;
993 }
994 
cso_set_tesseval_shader_handle(struct cso_context * ctx,void * handle)995 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
996 {
997    assert(ctx->has_tessellation || !handle);
998 
999    if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
1000       ctx->tesseval_shader = handle;
1001       ctx->pipe->bind_tes_state(ctx->pipe, handle);
1002    }
1003 }
1004 
1005 static void
cso_save_tesseval_shader(struct cso_context * ctx)1006 cso_save_tesseval_shader(struct cso_context *ctx)
1007 {
1008    if (!ctx->has_tessellation) {
1009       return;
1010    }
1011 
1012    assert(!ctx->tesseval_shader_saved);
1013    ctx->tesseval_shader_saved = ctx->tesseval_shader;
1014 }
1015 
1016 static void
cso_restore_tesseval_shader(struct cso_context * ctx)1017 cso_restore_tesseval_shader(struct cso_context *ctx)
1018 {
1019    if (!ctx->has_tessellation) {
1020       return;
1021    }
1022 
1023    if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
1024       ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
1025       ctx->tesseval_shader = ctx->tesseval_shader_saved;
1026    }
1027    ctx->tesseval_shader_saved = NULL;
1028 }
1029 
cso_set_compute_shader_handle(struct cso_context * ctx,void * handle)1030 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
1031 {
1032    assert(ctx->has_compute_shader || !handle);
1033 
1034    if (ctx->has_compute_shader && ctx->compute_shader != handle) {
1035       ctx->compute_shader = handle;
1036       ctx->pipe->bind_compute_state(ctx->pipe, handle);
1037    }
1038 }
1039 
1040 static void
cso_set_vertex_elements_direct(struct cso_context * ctx,const struct cso_velems_state * velems)1041 cso_set_vertex_elements_direct(struct cso_context *ctx,
1042                                const struct cso_velems_state *velems)
1043 {
1044    unsigned key_size, hash_key;
1045    struct cso_hash_iter iter;
1046    void *handle;
1047 
1048    /* Need to include the count into the stored state data too.
1049     * Otherwise first few count pipe_vertex_elements could be identical
1050     * even if count is different, and there's no guarantee the hash would
1051     * be different in that case neither.
1052     */
1053    key_size = sizeof(struct pipe_vertex_element) * velems->count +
1054               sizeof(unsigned);
1055    hash_key = cso_construct_key((void*)velems, key_size);
1056    iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1057                                   (void*)velems, key_size);
1058 
1059    if (cso_hash_iter_is_null(iter)) {
1060       struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1061       if (!cso)
1062          return;
1063 
1064       memcpy(&cso->state, velems, key_size);
1065       cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe,
1066                                                           velems->count,
1067                                                       &cso->state.velems[0]);
1068       cso->delete_state =
1069          (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1070       cso->context = ctx->pipe;
1071 
1072       iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1073       if (cso_hash_iter_is_null(iter)) {
1074          FREE(cso);
1075          return;
1076       }
1077 
1078       handle = cso->data;
1079    }
1080    else {
1081       handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1082    }
1083 
1084    if (ctx->velements != handle) {
1085       ctx->velements = handle;
1086       ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1087    }
1088 }
1089 
1090 enum pipe_error
cso_set_vertex_elements(struct cso_context * ctx,const struct cso_velems_state * velems)1091 cso_set_vertex_elements(struct cso_context *ctx,
1092                         const struct cso_velems_state *velems)
1093 {
1094    struct u_vbuf *vbuf = ctx->vbuf_current;
1095 
1096    if (vbuf) {
1097       u_vbuf_set_vertex_elements(vbuf, velems);
1098       return PIPE_OK;
1099    }
1100 
1101    cso_set_vertex_elements_direct(ctx, velems);
1102    return PIPE_OK;
1103 }
1104 
1105 static void
cso_save_vertex_elements(struct cso_context * ctx)1106 cso_save_vertex_elements(struct cso_context *ctx)
1107 {
1108    struct u_vbuf *vbuf = ctx->vbuf_current;
1109 
1110    if (vbuf) {
1111       u_vbuf_save_vertex_elements(vbuf);
1112       return;
1113    }
1114 
1115    assert(!ctx->velements_saved);
1116    ctx->velements_saved = ctx->velements;
1117 }
1118 
1119 static void
cso_restore_vertex_elements(struct cso_context * ctx)1120 cso_restore_vertex_elements(struct cso_context *ctx)
1121 {
1122    struct u_vbuf *vbuf = ctx->vbuf_current;
1123 
1124    if (vbuf) {
1125       u_vbuf_restore_vertex_elements(vbuf);
1126       return;
1127    }
1128 
1129    if (ctx->velements != ctx->velements_saved) {
1130       ctx->velements = ctx->velements_saved;
1131       ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1132    }
1133    ctx->velements_saved = NULL;
1134 }
1135 
1136 /* vertex buffers */
1137 
1138 static void
cso_set_vertex_buffers_direct(struct cso_context * ctx,unsigned start_slot,unsigned count,const struct pipe_vertex_buffer * buffers)1139 cso_set_vertex_buffers_direct(struct cso_context *ctx,
1140                               unsigned start_slot, unsigned count,
1141                               const struct pipe_vertex_buffer *buffers)
1142 {
1143    /* Save what's in the auxiliary slot, so that we can save and restore it
1144     * for meta ops.
1145     */
1146    if (start_slot == 0) {
1147       if (buffers) {
1148          pipe_vertex_buffer_reference(&ctx->vertex_buffer0_current,
1149                                       buffers);
1150       } else {
1151          pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_current);
1152       }
1153    }
1154 
1155    ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1156 }
1157 
1158 
cso_set_vertex_buffers(struct cso_context * ctx,unsigned start_slot,unsigned count,const struct pipe_vertex_buffer * buffers)1159 void cso_set_vertex_buffers(struct cso_context *ctx,
1160                             unsigned start_slot, unsigned count,
1161                             const struct pipe_vertex_buffer *buffers)
1162 {
1163    struct u_vbuf *vbuf = ctx->vbuf_current;
1164 
1165    if (!count)
1166       return;
1167 
1168    if (vbuf) {
1169       u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1170       return;
1171    }
1172 
1173    cso_set_vertex_buffers_direct(ctx, start_slot, count, buffers);
1174 }
1175 
1176 static void
cso_save_vertex_buffer0(struct cso_context * ctx)1177 cso_save_vertex_buffer0(struct cso_context *ctx)
1178 {
1179    struct u_vbuf *vbuf = ctx->vbuf_current;
1180 
1181    if (vbuf) {
1182       u_vbuf_save_vertex_buffer0(vbuf);
1183       return;
1184    }
1185 
1186    pipe_vertex_buffer_reference(&ctx->vertex_buffer0_saved,
1187                                 &ctx->vertex_buffer0_current);
1188 }
1189 
1190 static void
cso_restore_vertex_buffer0(struct cso_context * ctx)1191 cso_restore_vertex_buffer0(struct cso_context *ctx)
1192 {
1193    struct u_vbuf *vbuf = ctx->vbuf_current;
1194 
1195    if (vbuf) {
1196       u_vbuf_restore_vertex_buffer0(vbuf);
1197       return;
1198    }
1199 
1200    cso_set_vertex_buffers(ctx, 0, 1, &ctx->vertex_buffer0_saved);
1201    pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_saved);
1202 }
1203 
1204 /**
1205  * Set vertex buffers and vertex elements. Skip u_vbuf if it's only needed
1206  * for user vertex buffers and user vertex buffers are not set by this call.
1207  * u_vbuf will be disabled. To re-enable u_vbuf, call this function again.
1208  *
1209  * Skipping u_vbuf decreases CPU overhead for draw calls that don't need it,
1210  * such as VBOs, glBegin/End, and display lists.
1211  *
1212  * Internal operations that do "save states, draw, restore states" shouldn't
1213  * use this, because the states are only saved in either cso_context or
1214  * u_vbuf, not both.
1215  */
1216 void
cso_set_vertex_buffers_and_elements(struct cso_context * ctx,const struct cso_velems_state * velems,unsigned vb_count,unsigned unbind_trailing_vb_count,const struct pipe_vertex_buffer * vbuffers,bool uses_user_vertex_buffers)1217 cso_set_vertex_buffers_and_elements(struct cso_context *ctx,
1218                                     const struct cso_velems_state *velems,
1219                                     unsigned vb_count,
1220                                     unsigned unbind_trailing_vb_count,
1221                                     const struct pipe_vertex_buffer *vbuffers,
1222                                     bool uses_user_vertex_buffers)
1223 {
1224    struct u_vbuf *vbuf = ctx->vbuf;
1225 
1226    if (vbuf && (ctx->always_use_vbuf || uses_user_vertex_buffers)) {
1227       if (!ctx->vbuf_current) {
1228          /* Unbind all buffers in cso_context, because we'll use u_vbuf. */
1229          unsigned unbind_vb_count = vb_count + unbind_trailing_vb_count;
1230          if (unbind_vb_count)
1231             cso_set_vertex_buffers_direct(ctx, 0, unbind_vb_count, NULL);
1232 
1233          /* Unset this to make sure the CSO is re-bound on the next use. */
1234          ctx->velements = NULL;
1235          ctx->vbuf_current = vbuf;
1236       } else if (unbind_trailing_vb_count) {
1237          u_vbuf_set_vertex_buffers(vbuf, vb_count, unbind_trailing_vb_count,
1238                                    NULL);
1239       }
1240 
1241       if (vb_count)
1242          u_vbuf_set_vertex_buffers(vbuf, 0, vb_count, vbuffers);
1243       u_vbuf_set_vertex_elements(vbuf, velems);
1244       return;
1245    }
1246 
1247    if (ctx->vbuf_current) {
1248       /* Unbind all buffers in u_vbuf, because we'll use cso_context. */
1249       unsigned unbind_vb_count = vb_count + unbind_trailing_vb_count;
1250       if (unbind_vb_count)
1251          u_vbuf_set_vertex_buffers(vbuf, 0, unbind_vb_count, NULL);
1252 
1253       /* Unset this to make sure the CSO is re-bound on the next use. */
1254       u_vbuf_unset_vertex_elements(vbuf);
1255       ctx->vbuf_current = NULL;
1256    } else if (unbind_trailing_vb_count) {
1257       cso_set_vertex_buffers_direct(ctx, vb_count, unbind_trailing_vb_count,
1258                                     NULL);
1259    }
1260 
1261    if (vb_count)
1262       cso_set_vertex_buffers_direct(ctx, 0, vb_count, vbuffers);
1263    cso_set_vertex_elements_direct(ctx, velems);
1264 }
1265 
1266 void
cso_single_sampler(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned idx,const struct pipe_sampler_state * templ)1267 cso_single_sampler(struct cso_context *ctx, enum pipe_shader_type shader_stage,
1268                    unsigned idx, const struct pipe_sampler_state *templ)
1269 {
1270    if (templ) {
1271       unsigned key_size = sizeof(struct pipe_sampler_state);
1272       unsigned hash_key = cso_construct_key((void*)templ, key_size);
1273       struct cso_sampler *cso;
1274       struct cso_hash_iter iter =
1275          cso_find_state_template(ctx->cache,
1276                                  hash_key, CSO_SAMPLER,
1277                                  (void *) templ, key_size);
1278 
1279       if (cso_hash_iter_is_null(iter)) {
1280          cso = MALLOC(sizeof(struct cso_sampler));
1281          if (!cso)
1282             return;
1283 
1284          memcpy(&cso->state, templ, sizeof(*templ));
1285          cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1286          cso->delete_state =
1287             (cso_state_callback) ctx->pipe->delete_sampler_state;
1288          cso->context = ctx->pipe;
1289          cso->hash_key = hash_key;
1290 
1291          iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1292          if (cso_hash_iter_is_null(iter)) {
1293             FREE(cso);
1294             return;
1295          }
1296       }
1297       else {
1298          cso = cso_hash_iter_data(iter);
1299       }
1300 
1301       ctx->samplers[shader_stage].cso_samplers[idx] = cso;
1302       ctx->samplers[shader_stage].samplers[idx] = cso->data;
1303       ctx->max_sampler_seen = MAX2(ctx->max_sampler_seen, (int)idx);
1304    }
1305 }
1306 
1307 
1308 /**
1309  * Send staged sampler state to the driver.
1310  */
1311 void
cso_single_sampler_done(struct cso_context * ctx,enum pipe_shader_type shader_stage)1312 cso_single_sampler_done(struct cso_context *ctx,
1313                         enum pipe_shader_type shader_stage)
1314 {
1315    struct sampler_info *info = &ctx->samplers[shader_stage];
1316 
1317    if (ctx->max_sampler_seen == -1)
1318       return;
1319 
1320    ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1321                                   ctx->max_sampler_seen + 1,
1322                                   info->samplers);
1323    ctx->max_sampler_seen = -1;
1324 }
1325 
1326 
1327 /*
1328  * If the function encouters any errors it will return the
1329  * last one. Done to always try to set as many samplers
1330  * as possible.
1331  */
1332 void
cso_set_samplers(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned nr,const struct pipe_sampler_state ** templates)1333 cso_set_samplers(struct cso_context *ctx,
1334                  enum pipe_shader_type shader_stage,
1335                  unsigned nr,
1336                  const struct pipe_sampler_state **templates)
1337 {
1338    for (unsigned i = 0; i < nr; i++)
1339       cso_single_sampler(ctx, shader_stage, i, templates[i]);
1340 
1341    cso_single_sampler_done(ctx, shader_stage);
1342 }
1343 
1344 static void
cso_save_fragment_samplers(struct cso_context * ctx)1345 cso_save_fragment_samplers(struct cso_context *ctx)
1346 {
1347    struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1348    struct sampler_info *saved = &ctx->fragment_samplers_saved;
1349 
1350    memcpy(saved->cso_samplers, info->cso_samplers,
1351           sizeof(info->cso_samplers));
1352    memcpy(saved->samplers, info->samplers, sizeof(info->samplers));
1353 }
1354 
1355 
1356 static void
cso_restore_fragment_samplers(struct cso_context * ctx)1357 cso_restore_fragment_samplers(struct cso_context *ctx)
1358 {
1359    struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1360    struct sampler_info *saved = &ctx->fragment_samplers_saved;
1361 
1362    memcpy(info->cso_samplers, saved->cso_samplers,
1363           sizeof(info->cso_samplers));
1364    memcpy(info->samplers, saved->samplers, sizeof(info->samplers));
1365 
1366    for (int i = PIPE_MAX_SAMPLERS - 1; i >= 0; i--) {
1367       if (info->samplers[i]) {
1368          ctx->max_sampler_seen = i;
1369          break;
1370       }
1371    }
1372 
1373    cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1374 }
1375 
1376 
1377 void
cso_set_sampler_views(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned count,struct pipe_sampler_view ** views)1378 cso_set_sampler_views(struct cso_context *ctx,
1379                       enum pipe_shader_type shader_stage,
1380                       unsigned count,
1381                       struct pipe_sampler_view **views)
1382 {
1383    if (shader_stage == PIPE_SHADER_FRAGMENT) {
1384       unsigned i;
1385       boolean any_change = FALSE;
1386 
1387       /* reference new views */
1388       for (i = 0; i < count; i++) {
1389          any_change |= ctx->fragment_views[i] != views[i];
1390          pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1391       }
1392       /* unref extra old views, if any */
1393       for (; i < ctx->nr_fragment_views; i++) {
1394          any_change |= ctx->fragment_views[i] != NULL;
1395          pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1396       }
1397 
1398       /* bind the new sampler views */
1399       if (any_change) {
1400          ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1401                                       MAX2(ctx->nr_fragment_views, count),
1402                                       ctx->fragment_views);
1403       }
1404 
1405       ctx->nr_fragment_views = count;
1406    }
1407    else
1408       ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1409 }
1410 
1411 
1412 static void
cso_save_fragment_sampler_views(struct cso_context * ctx)1413 cso_save_fragment_sampler_views(struct cso_context *ctx)
1414 {
1415    unsigned i;
1416 
1417    ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1418 
1419    for (i = 0; i < ctx->nr_fragment_views; i++) {
1420       assert(!ctx->fragment_views_saved[i]);
1421       pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1422                                   ctx->fragment_views[i]);
1423    }
1424 }
1425 
1426 
1427 static void
cso_restore_fragment_sampler_views(struct cso_context * ctx)1428 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1429 {
1430    unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1431    unsigned num;
1432 
1433    for (i = 0; i < nr_saved; i++) {
1434       pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1435       /* move the reference from one pointer to another */
1436       ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1437       ctx->fragment_views_saved[i] = NULL;
1438    }
1439    for (; i < ctx->nr_fragment_views; i++) {
1440       pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1441    }
1442 
1443    num = MAX2(ctx->nr_fragment_views, nr_saved);
1444 
1445    /* bind the old/saved sampler views */
1446    ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1447                                 ctx->fragment_views);
1448 
1449    ctx->nr_fragment_views = nr_saved;
1450    ctx->nr_fragment_views_saved = 0;
1451 }
1452 
1453 
1454 void
cso_set_shader_images(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned start,unsigned count,struct pipe_image_view * images)1455 cso_set_shader_images(struct cso_context *ctx,
1456                       enum pipe_shader_type shader_stage,
1457                       unsigned start, unsigned count,
1458                       struct pipe_image_view *images)
1459 {
1460    if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1461       util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1462    }
1463 
1464    ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1465 }
1466 
1467 
1468 static void
cso_save_fragment_image0(struct cso_context * ctx)1469 cso_save_fragment_image0(struct cso_context *ctx)
1470 {
1471    util_copy_image_view(&ctx->fragment_image0_saved,
1472                         &ctx->fragment_image0_current);
1473 }
1474 
1475 
1476 static void
cso_restore_fragment_image0(struct cso_context * ctx)1477 cso_restore_fragment_image0(struct cso_context *ctx)
1478 {
1479    cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1480                          &ctx->fragment_image0_saved);
1481 }
1482 
1483 
1484 void
cso_set_stream_outputs(struct cso_context * ctx,unsigned num_targets,struct pipe_stream_output_target ** targets,const unsigned * offsets)1485 cso_set_stream_outputs(struct cso_context *ctx,
1486                        unsigned num_targets,
1487                        struct pipe_stream_output_target **targets,
1488                        const unsigned *offsets)
1489 {
1490    struct pipe_context *pipe = ctx->pipe;
1491    uint i;
1492 
1493    if (!ctx->has_streamout) {
1494       assert(num_targets == 0);
1495       return;
1496    }
1497 
1498    if (ctx->nr_so_targets == 0 && num_targets == 0) {
1499       /* Nothing to do. */
1500       return;
1501    }
1502 
1503    /* reference new targets */
1504    for (i = 0; i < num_targets; i++) {
1505       pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1506    }
1507    /* unref extra old targets, if any */
1508    for (; i < ctx->nr_so_targets; i++) {
1509       pipe_so_target_reference(&ctx->so_targets[i], NULL);
1510    }
1511 
1512    pipe->set_stream_output_targets(pipe, num_targets, targets,
1513                                    offsets);
1514    ctx->nr_so_targets = num_targets;
1515 }
1516 
1517 static void
cso_save_stream_outputs(struct cso_context * ctx)1518 cso_save_stream_outputs(struct cso_context *ctx)
1519 {
1520    uint i;
1521 
1522    if (!ctx->has_streamout) {
1523       return;
1524    }
1525 
1526    ctx->nr_so_targets_saved = ctx->nr_so_targets;
1527 
1528    for (i = 0; i < ctx->nr_so_targets; i++) {
1529       assert(!ctx->so_targets_saved[i]);
1530       pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1531    }
1532 }
1533 
1534 static void
cso_restore_stream_outputs(struct cso_context * ctx)1535 cso_restore_stream_outputs(struct cso_context *ctx)
1536 {
1537    struct pipe_context *pipe = ctx->pipe;
1538    uint i;
1539    unsigned offset[PIPE_MAX_SO_BUFFERS];
1540 
1541    if (!ctx->has_streamout) {
1542       return;
1543    }
1544 
1545    if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1546       /* Nothing to do. */
1547       return;
1548    }
1549 
1550    assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1551    for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1552       pipe_so_target_reference(&ctx->so_targets[i], NULL);
1553       /* move the reference from one pointer to another */
1554       ctx->so_targets[i] = ctx->so_targets_saved[i];
1555       ctx->so_targets_saved[i] = NULL;
1556       /* -1 means append */
1557       offset[i] = (unsigned)-1;
1558    }
1559    for (; i < ctx->nr_so_targets; i++) {
1560       pipe_so_target_reference(&ctx->so_targets[i], NULL);
1561    }
1562 
1563    pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1564                                    ctx->so_targets, offset);
1565 
1566    ctx->nr_so_targets = ctx->nr_so_targets_saved;
1567    ctx->nr_so_targets_saved = 0;
1568 }
1569 
1570 /* constant buffers */
1571 
1572 void
cso_set_constant_buffer(struct cso_context * cso,enum pipe_shader_type shader_stage,unsigned index,struct pipe_constant_buffer * cb)1573 cso_set_constant_buffer(struct cso_context *cso,
1574                         enum pipe_shader_type shader_stage,
1575                         unsigned index, struct pipe_constant_buffer *cb)
1576 {
1577    struct pipe_context *pipe = cso->pipe;
1578 
1579    pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1580 
1581    if (index == 0) {
1582       util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1583    }
1584 }
1585 
1586 void
cso_set_constant_buffer_resource(struct cso_context * cso,enum pipe_shader_type shader_stage,unsigned index,struct pipe_resource * buffer)1587 cso_set_constant_buffer_resource(struct cso_context *cso,
1588                                  enum pipe_shader_type shader_stage,
1589                                  unsigned index,
1590                                  struct pipe_resource *buffer)
1591 {
1592    if (buffer) {
1593       struct pipe_constant_buffer cb;
1594       cb.buffer = buffer;
1595       cb.buffer_offset = 0;
1596       cb.buffer_size = buffer->width0;
1597       cb.user_buffer = NULL;
1598       cso_set_constant_buffer(cso, shader_stage, index, &cb);
1599    } else {
1600       cso_set_constant_buffer(cso, shader_stage, index, NULL);
1601    }
1602 }
1603 
1604 void
cso_set_constant_user_buffer(struct cso_context * cso,enum pipe_shader_type shader_stage,unsigned index,void * ptr,unsigned size)1605 cso_set_constant_user_buffer(struct cso_context *cso,
1606                              enum pipe_shader_type shader_stage,
1607                              unsigned index, void *ptr, unsigned size)
1608 {
1609    if (ptr) {
1610       struct pipe_constant_buffer cb;
1611       cb.buffer = NULL;
1612       cb.buffer_offset = 0;
1613       cb.buffer_size = size;
1614       cb.user_buffer = ptr;
1615       cso_set_constant_buffer(cso, shader_stage, index, &cb);
1616    } else {
1617       cso_set_constant_buffer(cso, shader_stage, index, NULL);
1618    }
1619 }
1620 
1621 void
cso_save_constant_buffer_slot0(struct cso_context * cso,enum pipe_shader_type shader_stage)1622 cso_save_constant_buffer_slot0(struct cso_context *cso,
1623                                enum pipe_shader_type shader_stage)
1624 {
1625    util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1626                              &cso->aux_constbuf_current[shader_stage]);
1627 }
1628 
1629 void
cso_restore_constant_buffer_slot0(struct cso_context * cso,enum pipe_shader_type shader_stage)1630 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1631                                   enum pipe_shader_type shader_stage)
1632 {
1633    cso_set_constant_buffer(cso, shader_stage, 0,
1634                            &cso->aux_constbuf_saved[shader_stage]);
1635    pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1636                            NULL);
1637 }
1638 
1639 
1640 /**
1641  * Save all the CSO state items specified by the state_mask bitmask
1642  * of CSO_BIT_x flags.
1643  */
1644 void
cso_save_state(struct cso_context * cso,unsigned state_mask)1645 cso_save_state(struct cso_context *cso, unsigned state_mask)
1646 {
1647    assert(cso->saved_state == 0);
1648 
1649    cso->saved_state = state_mask;
1650 
1651    if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1652       cso_save_vertex_buffer0(cso);
1653    if (state_mask & CSO_BIT_BLEND)
1654       cso_save_blend(cso);
1655    if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1656       cso_save_depth_stencil_alpha(cso);
1657    if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1658       cso_save_fragment_samplers(cso);
1659    if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1660       cso_save_fragment_sampler_views(cso);
1661    if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1662       cso_save_fragment_shader(cso);
1663    if (state_mask & CSO_BIT_FRAMEBUFFER)
1664       cso_save_framebuffer(cso);
1665    if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1666       cso_save_geometry_shader(cso);
1667    if (state_mask & CSO_BIT_MIN_SAMPLES)
1668       cso_save_min_samples(cso);
1669    if (state_mask & CSO_BIT_RASTERIZER)
1670       cso_save_rasterizer(cso);
1671    if (state_mask & CSO_BIT_RENDER_CONDITION)
1672       cso_save_render_condition(cso);
1673    if (state_mask & CSO_BIT_SAMPLE_MASK)
1674       cso_save_sample_mask(cso);
1675    if (state_mask & CSO_BIT_STENCIL_REF)
1676       cso_save_stencil_ref(cso);
1677    if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1678       cso_save_stream_outputs(cso);
1679    if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1680       cso_save_tessctrl_shader(cso);
1681    if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1682       cso_save_tesseval_shader(cso);
1683    if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1684       cso_save_vertex_elements(cso);
1685    if (state_mask & CSO_BIT_VERTEX_SHADER)
1686       cso_save_vertex_shader(cso);
1687    if (state_mask & CSO_BIT_VIEWPORT)
1688       cso_save_viewport(cso);
1689    if (state_mask & CSO_BIT_PAUSE_QUERIES)
1690       cso->pipe->set_active_query_state(cso->pipe, false);
1691    if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1692       cso_save_fragment_image0(cso);
1693 }
1694 
1695 
1696 /**
1697  * Restore the state which was saved by cso_save_state().
1698  */
1699 void
cso_restore_state(struct cso_context * cso)1700 cso_restore_state(struct cso_context *cso)
1701 {
1702    unsigned state_mask = cso->saved_state;
1703 
1704    assert(state_mask);
1705 
1706    if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1707       cso_restore_vertex_buffer0(cso);
1708    if (state_mask & CSO_BIT_BLEND)
1709       cso_restore_blend(cso);
1710    if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1711       cso_restore_depth_stencil_alpha(cso);
1712    if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1713       cso_restore_fragment_samplers(cso);
1714    if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1715       cso_restore_fragment_sampler_views(cso);
1716    if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1717       cso_restore_fragment_shader(cso);
1718    if (state_mask & CSO_BIT_FRAMEBUFFER)
1719       cso_restore_framebuffer(cso);
1720    if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1721       cso_restore_geometry_shader(cso);
1722    if (state_mask & CSO_BIT_MIN_SAMPLES)
1723       cso_restore_min_samples(cso);
1724    if (state_mask & CSO_BIT_RASTERIZER)
1725       cso_restore_rasterizer(cso);
1726    if (state_mask & CSO_BIT_RENDER_CONDITION)
1727       cso_restore_render_condition(cso);
1728    if (state_mask & CSO_BIT_SAMPLE_MASK)
1729       cso_restore_sample_mask(cso);
1730    if (state_mask & CSO_BIT_STENCIL_REF)
1731       cso_restore_stencil_ref(cso);
1732    if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1733       cso_restore_stream_outputs(cso);
1734    if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1735       cso_restore_tessctrl_shader(cso);
1736    if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1737       cso_restore_tesseval_shader(cso);
1738    if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1739       cso_restore_vertex_elements(cso);
1740    if (state_mask & CSO_BIT_VERTEX_SHADER)
1741       cso_restore_vertex_shader(cso);
1742    if (state_mask & CSO_BIT_VIEWPORT)
1743       cso_restore_viewport(cso);
1744    if (state_mask & CSO_BIT_PAUSE_QUERIES)
1745       cso->pipe->set_active_query_state(cso->pipe, true);
1746    if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1747       cso_restore_fragment_image0(cso);
1748 
1749    cso->saved_state = 0;
1750 }
1751 
1752 
1753 
1754 /* drawing */
1755 
1756 void
cso_draw_vbo(struct cso_context * cso,const struct pipe_draw_info * info)1757 cso_draw_vbo(struct cso_context *cso,
1758              const struct pipe_draw_info *info)
1759 {
1760    struct u_vbuf *vbuf = cso->vbuf_current;
1761 
1762    /* We can't have both indirect drawing and SO-vertex-count drawing */
1763    assert(info->indirect == NULL || info->count_from_stream_output == NULL);
1764 
1765    /* We can't have SO-vertex-count drawing with an index buffer */
1766    assert(info->count_from_stream_output == NULL || info->index_size == 0);
1767 
1768    if (vbuf) {
1769       u_vbuf_draw_vbo(vbuf, info);
1770    } else {
1771       struct pipe_context *pipe = cso->pipe;
1772       pipe->draw_vbo(pipe, info);
1773    }
1774 }
1775 
1776 void
cso_draw_arrays(struct cso_context * cso,uint mode,uint start,uint count)1777 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1778 {
1779    struct pipe_draw_info info;
1780 
1781    util_draw_init_info(&info);
1782 
1783    info.mode = mode;
1784    info.start = start;
1785    info.count = count;
1786    info.min_index = start;
1787    info.max_index = start + count - 1;
1788 
1789    cso_draw_vbo(cso, &info);
1790 }
1791 
1792 void
cso_draw_arrays_instanced(struct cso_context * cso,uint mode,uint start,uint count,uint start_instance,uint instance_count)1793 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1794                           uint start, uint count,
1795                           uint start_instance, uint instance_count)
1796 {
1797    struct pipe_draw_info info;
1798 
1799    util_draw_init_info(&info);
1800 
1801    info.mode = mode;
1802    info.start = start;
1803    info.count = count;
1804    info.min_index = start;
1805    info.max_index = start + count - 1;
1806    info.start_instance = start_instance;
1807    info.instance_count = instance_count;
1808 
1809    cso_draw_vbo(cso, &info);
1810 }
1811