• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2014, 2015 Red Hat.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * on the rights to use, copy, modify, merge, publish, distribute, sub
8  * license, and/or sell copies of the Software, and to permit persons to whom
9  * the Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18  * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19  * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20  * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21  * USE OR OTHER DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include "pipe/p_shader_tokens.h"
25 
26 #include "pipe/p_context.h"
27 #include "pipe/p_defines.h"
28 #include "pipe/p_screen.h"
29 #include "pipe/p_state.h"
30 #include "util/u_inlines.h"
31 #include "util/u_memory.h"
32 #include "util/u_format.h"
33 #include "util/u_transfer.h"
34 #include "util/u_helpers.h"
35 #include "util/slab.h"
36 #include "util/u_upload_mgr.h"
37 #include "util/u_blitter.h"
38 #include "tgsi/tgsi_text.h"
39 #include "indices/u_primconvert.h"
40 
41 #include "pipebuffer/pb_buffer.h"
42 
43 #include "virgl_encode.h"
44 #include "virgl_context.h"
45 #include "virgl_protocol.h"
46 #include "virgl_resource.h"
47 #include "virgl_screen.h"
48 
49 static uint32_t next_handle;
virgl_object_assign_handle(void)50 uint32_t virgl_object_assign_handle(void)
51 {
52    return ++next_handle;
53 }
54 
virgl_buffer_flush(struct virgl_context * vctx,struct virgl_buffer * vbuf)55 static void virgl_buffer_flush(struct virgl_context *vctx,
56                               struct virgl_buffer *vbuf)
57 {
58    struct virgl_screen *rs = virgl_screen(vctx->base.screen);
59    struct pipe_box box;
60 
61    assert(vbuf->on_list);
62 
63    box.height = 1;
64    box.depth = 1;
65    box.y = 0;
66    box.z = 0;
67 
68    box.x = vbuf->valid_buffer_range.start;
69    box.width = MIN2(vbuf->valid_buffer_range.end - vbuf->valid_buffer_range.start, vbuf->base.u.b.width0);
70 
71    vctx->num_transfers++;
72    rs->vws->transfer_put(rs->vws, vbuf->base.hw_res,
73                          &box, 0, 0, box.x, 0);
74 
75    util_range_set_empty(&vbuf->valid_buffer_range);
76 }
77 
virgl_attach_res_framebuffer(struct virgl_context * vctx)78 static void virgl_attach_res_framebuffer(struct virgl_context *vctx)
79 {
80    struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
81    struct pipe_surface *surf;
82    struct virgl_resource *res;
83    unsigned i;
84 
85    surf = vctx->framebuffer.zsbuf;
86    if (surf) {
87       res = virgl_resource(surf->texture);
88       if (res)
89          vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
90    }
91    for (i = 0; i < vctx->framebuffer.nr_cbufs; i++) {
92       surf = vctx->framebuffer.cbufs[i];
93       if (surf) {
94          res = virgl_resource(surf->texture);
95          if (res)
96             vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
97       }
98    }
99 }
100 
virgl_attach_res_sampler_views(struct virgl_context * vctx,unsigned shader_type)101 static void virgl_attach_res_sampler_views(struct virgl_context *vctx,
102                                            unsigned shader_type)
103 {
104    struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
105    struct virgl_textures_info *tinfo = &vctx->samplers[shader_type];
106    struct virgl_resource *res;
107    uint32_t remaining_mask = tinfo->enabled_mask;
108    unsigned i;
109    while (remaining_mask) {
110       i = u_bit_scan(&remaining_mask);
111       assert(tinfo->views[i]);
112 
113       res = virgl_resource(tinfo->views[i]->base.texture);
114       if (res)
115          vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
116    }
117 }
118 
virgl_attach_res_vertex_buffers(struct virgl_context * vctx)119 static void virgl_attach_res_vertex_buffers(struct virgl_context *vctx)
120 {
121    struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
122    struct virgl_resource *res;
123    unsigned i;
124 
125    for (i = 0; i < vctx->num_vertex_buffers; i++) {
126       res = virgl_resource(vctx->vertex_buffer[i].buffer);
127       if (res)
128          vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
129    }
130 }
131 
virgl_attach_res_index_buffer(struct virgl_context * vctx)132 static void virgl_attach_res_index_buffer(struct virgl_context *vctx)
133 {
134    struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
135    struct virgl_resource *res;
136 
137    res = virgl_resource(vctx->index_buffer.buffer);
138    if (res)
139       vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
140 }
141 
virgl_attach_res_so_targets(struct virgl_context * vctx)142 static void virgl_attach_res_so_targets(struct virgl_context *vctx)
143 {
144    struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
145    struct virgl_resource *res;
146    unsigned i;
147 
148    for (i = 0; i < vctx->num_so_targets; i++) {
149       res = virgl_resource(vctx->so_targets[i].base.buffer);
150       if (res)
151          vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
152    }
153 }
154 
virgl_attach_res_uniform_buffers(struct virgl_context * vctx,unsigned shader_type)155 static void virgl_attach_res_uniform_buffers(struct virgl_context *vctx,
156                                              unsigned shader_type)
157 {
158    struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
159    struct virgl_resource *res;
160    unsigned i;
161    for (i = 0; i < PIPE_MAX_CONSTANT_BUFFERS; i++) {
162       res = virgl_resource(vctx->ubos[shader_type][i]);
163       if (res) {
164          vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
165       }
166    }
167 }
168 
169 /*
170  * after flushing, the hw context still has a bunch of
171  * resources bound, so we need to rebind those here.
172  */
virgl_reemit_res(struct virgl_context * vctx)173 static void virgl_reemit_res(struct virgl_context *vctx)
174 {
175    unsigned shader_type;
176 
177    /* reattach any flushed resources */
178    /* framebuffer, sampler views, vertex/index/uniform/stream buffers */
179    virgl_attach_res_framebuffer(vctx);
180 
181    for (shader_type = 0; shader_type < PIPE_SHADER_TYPES; shader_type++) {
182       virgl_attach_res_sampler_views(vctx, shader_type);
183       virgl_attach_res_uniform_buffers(vctx, shader_type);
184    }
185    virgl_attach_res_index_buffer(vctx);
186    virgl_attach_res_vertex_buffers(vctx);
187    virgl_attach_res_so_targets(vctx);
188 }
189 
virgl_create_surface(struct pipe_context * ctx,struct pipe_resource * resource,const struct pipe_surface * templ)190 static struct pipe_surface *virgl_create_surface(struct pipe_context *ctx,
191                                                 struct pipe_resource *resource,
192                                                 const struct pipe_surface *templ)
193 {
194    struct virgl_context *vctx = virgl_context(ctx);
195    struct virgl_surface *surf;
196    struct virgl_resource *res = virgl_resource(resource);
197    uint32_t handle;
198 
199    surf = CALLOC_STRUCT(virgl_surface);
200    if (!surf)
201       return NULL;
202 
203    res->clean = FALSE;
204    handle = virgl_object_assign_handle();
205    pipe_reference_init(&surf->base.reference, 1);
206    pipe_resource_reference(&surf->base.texture, resource);
207    surf->base.context = ctx;
208    surf->base.format = templ->format;
209    if (resource->target != PIPE_BUFFER) {
210       surf->base.width = u_minify(resource->width0, templ->u.tex.level);
211       surf->base.height = u_minify(resource->height0, templ->u.tex.level);
212       surf->base.u.tex.level = templ->u.tex.level;
213       surf->base.u.tex.first_layer = templ->u.tex.first_layer;
214       surf->base.u.tex.last_layer = templ->u.tex.last_layer;
215    } else {
216       surf->base.width = templ->u.buf.last_element - templ->u.buf.first_element + 1;
217       surf->base.height = resource->height0;
218       surf->base.u.buf.first_element = templ->u.buf.first_element;
219       surf->base.u.buf.last_element = templ->u.buf.last_element;
220    }
221    virgl_encoder_create_surface(vctx, handle, res, &surf->base);
222    surf->handle = handle;
223    return &surf->base;
224 }
225 
virgl_surface_destroy(struct pipe_context * ctx,struct pipe_surface * psurf)226 static void virgl_surface_destroy(struct pipe_context *ctx,
227                                  struct pipe_surface *psurf)
228 {
229    struct virgl_context *vctx = virgl_context(ctx);
230    struct virgl_surface *surf = virgl_surface(psurf);
231 
232    pipe_resource_reference(&surf->base.texture, NULL);
233    virgl_encode_delete_object(vctx, surf->handle, VIRGL_OBJECT_SURFACE);
234    FREE(surf);
235 }
236 
virgl_create_blend_state(struct pipe_context * ctx,const struct pipe_blend_state * blend_state)237 static void *virgl_create_blend_state(struct pipe_context *ctx,
238                                               const struct pipe_blend_state *blend_state)
239 {
240    struct virgl_context *vctx = virgl_context(ctx);
241    uint32_t handle;
242    handle = virgl_object_assign_handle();
243 
244    virgl_encode_blend_state(vctx, handle, blend_state);
245    return (void *)(unsigned long)handle;
246 
247 }
248 
virgl_bind_blend_state(struct pipe_context * ctx,void * blend_state)249 static void virgl_bind_blend_state(struct pipe_context *ctx,
250                                            void *blend_state)
251 {
252    struct virgl_context *vctx = virgl_context(ctx);
253    uint32_t handle = (unsigned long)blend_state;
254    virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_BLEND);
255 }
256 
virgl_delete_blend_state(struct pipe_context * ctx,void * blend_state)257 static void virgl_delete_blend_state(struct pipe_context *ctx,
258                                      void *blend_state)
259 {
260    struct virgl_context *vctx = virgl_context(ctx);
261    uint32_t handle = (unsigned long)blend_state;
262    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_BLEND);
263 }
264 
virgl_create_depth_stencil_alpha_state(struct pipe_context * ctx,const struct pipe_depth_stencil_alpha_state * blend_state)265 static void *virgl_create_depth_stencil_alpha_state(struct pipe_context *ctx,
266                                                    const struct pipe_depth_stencil_alpha_state *blend_state)
267 {
268    struct virgl_context *vctx = virgl_context(ctx);
269    uint32_t handle;
270    handle = virgl_object_assign_handle();
271 
272    virgl_encode_dsa_state(vctx, handle, blend_state);
273    return (void *)(unsigned long)handle;
274 }
275 
virgl_bind_depth_stencil_alpha_state(struct pipe_context * ctx,void * blend_state)276 static void virgl_bind_depth_stencil_alpha_state(struct pipe_context *ctx,
277                                                 void *blend_state)
278 {
279    struct virgl_context *vctx = virgl_context(ctx);
280    uint32_t handle = (unsigned long)blend_state;
281    virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_DSA);
282 }
283 
virgl_delete_depth_stencil_alpha_state(struct pipe_context * ctx,void * dsa_state)284 static void virgl_delete_depth_stencil_alpha_state(struct pipe_context *ctx,
285                                                   void *dsa_state)
286 {
287    struct virgl_context *vctx = virgl_context(ctx);
288    uint32_t handle = (unsigned long)dsa_state;
289    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_DSA);
290 }
291 
virgl_create_rasterizer_state(struct pipe_context * ctx,const struct pipe_rasterizer_state * rs_state)292 static void *virgl_create_rasterizer_state(struct pipe_context *ctx,
293                                                    const struct pipe_rasterizer_state *rs_state)
294 {
295    struct virgl_context *vctx = virgl_context(ctx);
296    uint32_t handle;
297    handle = virgl_object_assign_handle();
298 
299    virgl_encode_rasterizer_state(vctx, handle, rs_state);
300    return (void *)(unsigned long)handle;
301 }
302 
virgl_bind_rasterizer_state(struct pipe_context * ctx,void * rs_state)303 static void virgl_bind_rasterizer_state(struct pipe_context *ctx,
304                                                 void *rs_state)
305 {
306    struct virgl_context *vctx = virgl_context(ctx);
307    uint32_t handle = (unsigned long)rs_state;
308 
309    virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_RASTERIZER);
310 }
311 
virgl_delete_rasterizer_state(struct pipe_context * ctx,void * rs_state)312 static void virgl_delete_rasterizer_state(struct pipe_context *ctx,
313                                          void *rs_state)
314 {
315    struct virgl_context *vctx = virgl_context(ctx);
316    uint32_t handle = (unsigned long)rs_state;
317    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_RASTERIZER);
318 }
319 
virgl_set_framebuffer_state(struct pipe_context * ctx,const struct pipe_framebuffer_state * state)320 static void virgl_set_framebuffer_state(struct pipe_context *ctx,
321                                                 const struct pipe_framebuffer_state *state)
322 {
323    struct virgl_context *vctx = virgl_context(ctx);
324 
325    vctx->framebuffer = *state;
326    virgl_encoder_set_framebuffer_state(vctx, state);
327    virgl_attach_res_framebuffer(vctx);
328 }
329 
virgl_set_viewport_states(struct pipe_context * ctx,unsigned start_slot,unsigned num_viewports,const struct pipe_viewport_state * state)330 static void virgl_set_viewport_states(struct pipe_context *ctx,
331                                      unsigned start_slot,
332                                      unsigned num_viewports,
333                                      const struct pipe_viewport_state *state)
334 {
335    struct virgl_context *vctx = virgl_context(ctx);
336    virgl_encoder_set_viewport_states(vctx, start_slot, num_viewports, state);
337 }
338 
virgl_create_vertex_elements_state(struct pipe_context * ctx,unsigned num_elements,const struct pipe_vertex_element * elements)339 static void *virgl_create_vertex_elements_state(struct pipe_context *ctx,
340                                                         unsigned num_elements,
341                                                         const struct pipe_vertex_element *elements)
342 {
343    struct virgl_context *vctx = virgl_context(ctx);
344    uint32_t handle = virgl_object_assign_handle();
345    virgl_encoder_create_vertex_elements(vctx, handle,
346                                        num_elements, elements);
347    return (void*)(unsigned long)handle;
348 
349 }
350 
virgl_delete_vertex_elements_state(struct pipe_context * ctx,void * ve)351 static void virgl_delete_vertex_elements_state(struct pipe_context *ctx,
352                                               void *ve)
353 {
354    struct virgl_context *vctx = virgl_context(ctx);
355    uint32_t handle = (unsigned long)ve;
356 
357    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_VERTEX_ELEMENTS);
358 }
359 
virgl_bind_vertex_elements_state(struct pipe_context * ctx,void * ve)360 static void virgl_bind_vertex_elements_state(struct pipe_context *ctx,
361                                                      void *ve)
362 {
363    struct virgl_context *vctx = virgl_context(ctx);
364    uint32_t handle = (unsigned long)ve;
365    virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_VERTEX_ELEMENTS);
366 }
367 
virgl_set_vertex_buffers(struct pipe_context * ctx,unsigned start_slot,unsigned num_buffers,const struct pipe_vertex_buffer * buffers)368 static void virgl_set_vertex_buffers(struct pipe_context *ctx,
369                                     unsigned start_slot,
370                                     unsigned num_buffers,
371                                     const struct pipe_vertex_buffer *buffers)
372 {
373    struct virgl_context *vctx = virgl_context(ctx);
374 
375    util_set_vertex_buffers_count(vctx->vertex_buffer,
376                                  &vctx->num_vertex_buffers,
377                                  buffers, start_slot, num_buffers);
378 
379    vctx->vertex_array_dirty = TRUE;
380 }
381 
virgl_hw_set_vertex_buffers(struct pipe_context * ctx)382 static void virgl_hw_set_vertex_buffers(struct pipe_context *ctx)
383 {
384    struct virgl_context *vctx = virgl_context(ctx);
385 
386    if (vctx->vertex_array_dirty) {
387       virgl_encoder_set_vertex_buffers(vctx, vctx->num_vertex_buffers, vctx->vertex_buffer);
388       virgl_attach_res_vertex_buffers(vctx);
389    }
390 }
391 
virgl_set_stencil_ref(struct pipe_context * ctx,const struct pipe_stencil_ref * ref)392 static void virgl_set_stencil_ref(struct pipe_context *ctx,
393                                  const struct pipe_stencil_ref *ref)
394 {
395    struct virgl_context *vctx = virgl_context(ctx);
396    virgl_encoder_set_stencil_ref(vctx, ref);
397 }
398 
virgl_set_blend_color(struct pipe_context * ctx,const struct pipe_blend_color * color)399 static void virgl_set_blend_color(struct pipe_context *ctx,
400                                  const struct pipe_blend_color *color)
401 {
402    struct virgl_context *vctx = virgl_context(ctx);
403    virgl_encoder_set_blend_color(vctx, color);
404 }
405 
virgl_set_index_buffer(struct pipe_context * ctx,const struct pipe_index_buffer * ib)406 static void virgl_set_index_buffer(struct pipe_context *ctx,
407                                   const struct pipe_index_buffer *ib)
408 {
409    struct virgl_context *vctx = virgl_context(ctx);
410 
411    if (ib) {
412       pipe_resource_reference(&vctx->index_buffer.buffer, ib->buffer);
413       memcpy(&vctx->index_buffer, ib, sizeof(*ib));
414    } else {
415       pipe_resource_reference(&vctx->index_buffer.buffer, NULL);
416    }
417 }
418 
virgl_hw_set_index_buffer(struct pipe_context * ctx,struct pipe_index_buffer * ib)419 static void virgl_hw_set_index_buffer(struct pipe_context *ctx,
420                                      struct pipe_index_buffer *ib)
421 {
422    struct virgl_context *vctx = virgl_context(ctx);
423    virgl_encoder_set_index_buffer(vctx, ib);
424    virgl_attach_res_index_buffer(vctx);
425 }
426 
virgl_set_constant_buffer(struct pipe_context * ctx,uint shader,uint index,const struct pipe_constant_buffer * buf)427 static void virgl_set_constant_buffer(struct pipe_context *ctx,
428                                      uint shader, uint index,
429                                      const struct pipe_constant_buffer *buf)
430 {
431    struct virgl_context *vctx = virgl_context(ctx);
432 
433    if (buf) {
434       if (!buf->user_buffer){
435          struct virgl_resource *res = virgl_resource(buf->buffer);
436          virgl_encoder_set_uniform_buffer(vctx, shader, index, buf->buffer_offset,
437                                           buf->buffer_size, res);
438          pipe_resource_reference(&vctx->ubos[shader][index], buf->buffer);
439          return;
440       }
441       pipe_resource_reference(&vctx->ubos[shader][index], NULL);
442       virgl_encoder_write_constant_buffer(vctx, shader, index, buf->buffer_size / 4, buf->user_buffer);
443    } else {
444       virgl_encoder_write_constant_buffer(vctx, shader, index, 0, NULL);
445       pipe_resource_reference(&vctx->ubos[shader][index], NULL);
446    }
447 }
448 
virgl_transfer_inline_write(struct pipe_context * ctx,struct pipe_resource * res,unsigned level,unsigned usage,const struct pipe_box * box,const void * data,unsigned stride,unsigned layer_stride)449 void virgl_transfer_inline_write(struct pipe_context *ctx,
450                                 struct pipe_resource *res,
451                                 unsigned level,
452                                 unsigned usage,
453                                 const struct pipe_box *box,
454                                 const void *data,
455                                 unsigned stride,
456                                 unsigned layer_stride)
457 {
458    struct virgl_context *vctx = virgl_context(ctx);
459    struct virgl_screen *vs = virgl_screen(ctx->screen);
460    struct virgl_resource *grres = virgl_resource(res);
461    struct virgl_buffer *vbuf = virgl_buffer(res);
462 
463    grres->clean = FALSE;
464 
465    if (virgl_res_needs_flush_wait(vctx, &vbuf->base, usage)) {
466       ctx->flush(ctx, NULL, 0);
467 
468       vs->vws->resource_wait(vs->vws, vbuf->base.hw_res);
469    }
470 
471    virgl_encoder_inline_write(vctx, grres, level, usage,
472                               box, data, stride, layer_stride);
473 }
474 
virgl_shader_encoder(struct pipe_context * ctx,const struct pipe_shader_state * shader,unsigned type)475 static void *virgl_shader_encoder(struct pipe_context *ctx,
476                                   const struct pipe_shader_state *shader,
477                                   unsigned type)
478 {
479    struct virgl_context *vctx = virgl_context(ctx);
480    uint32_t handle;
481    struct tgsi_token *new_tokens;
482    int ret;
483 
484    new_tokens = virgl_tgsi_transform(shader->tokens);
485    if (!new_tokens)
486       return NULL;
487 
488    handle = virgl_object_assign_handle();
489    /* encode VS state */
490    ret = virgl_encode_shader_state(vctx, handle, type,
491                                    &shader->stream_output,
492                                    new_tokens);
493    if (ret) {
494       return NULL;
495    }
496 
497    FREE(new_tokens);
498    return (void *)(unsigned long)handle;
499 
500 }
virgl_create_vs_state(struct pipe_context * ctx,const struct pipe_shader_state * shader)501 static void *virgl_create_vs_state(struct pipe_context *ctx,
502                                    const struct pipe_shader_state *shader)
503 {
504    return virgl_shader_encoder(ctx, shader, PIPE_SHADER_VERTEX);
505 }
506 
virgl_create_gs_state(struct pipe_context * ctx,const struct pipe_shader_state * shader)507 static void *virgl_create_gs_state(struct pipe_context *ctx,
508                                    const struct pipe_shader_state *shader)
509 {
510    return virgl_shader_encoder(ctx, shader, PIPE_SHADER_GEOMETRY);
511 }
512 
virgl_create_fs_state(struct pipe_context * ctx,const struct pipe_shader_state * shader)513 static void *virgl_create_fs_state(struct pipe_context *ctx,
514                                    const struct pipe_shader_state *shader)
515 {
516    return virgl_shader_encoder(ctx, shader, PIPE_SHADER_FRAGMENT);
517 }
518 
519 static void
virgl_delete_fs_state(struct pipe_context * ctx,void * fs)520 virgl_delete_fs_state(struct pipe_context *ctx,
521                      void *fs)
522 {
523    uint32_t handle = (unsigned long)fs;
524    struct virgl_context *vctx = virgl_context(ctx);
525 
526    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SHADER);
527 }
528 
529 static void
virgl_delete_gs_state(struct pipe_context * ctx,void * gs)530 virgl_delete_gs_state(struct pipe_context *ctx,
531                      void *gs)
532 {
533    uint32_t handle = (unsigned long)gs;
534    struct virgl_context *vctx = virgl_context(ctx);
535 
536    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SHADER);
537 }
538 
539 static void
virgl_delete_vs_state(struct pipe_context * ctx,void * vs)540 virgl_delete_vs_state(struct pipe_context *ctx,
541                      void *vs)
542 {
543    uint32_t handle = (unsigned long)vs;
544    struct virgl_context *vctx = virgl_context(ctx);
545 
546    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SHADER);
547 }
548 
virgl_bind_vs_state(struct pipe_context * ctx,void * vss)549 static void virgl_bind_vs_state(struct pipe_context *ctx,
550                                         void *vss)
551 {
552    uint32_t handle = (unsigned long)vss;
553    struct virgl_context *vctx = virgl_context(ctx);
554 
555    virgl_encode_bind_shader(vctx, handle, PIPE_SHADER_VERTEX);
556 }
557 
virgl_bind_gs_state(struct pipe_context * ctx,void * vss)558 static void virgl_bind_gs_state(struct pipe_context *ctx,
559                                void *vss)
560 {
561    uint32_t handle = (unsigned long)vss;
562    struct virgl_context *vctx = virgl_context(ctx);
563 
564    virgl_encode_bind_shader(vctx, handle, PIPE_SHADER_GEOMETRY);
565 }
566 
567 
virgl_bind_fs_state(struct pipe_context * ctx,void * vss)568 static void virgl_bind_fs_state(struct pipe_context *ctx,
569                                         void *vss)
570 {
571    uint32_t handle = (unsigned long)vss;
572    struct virgl_context *vctx = virgl_context(ctx);
573 
574    virgl_encode_bind_shader(vctx, handle, PIPE_SHADER_FRAGMENT);
575 }
576 
virgl_clear(struct pipe_context * ctx,unsigned buffers,const union pipe_color_union * color,double depth,unsigned stencil)577 static void virgl_clear(struct pipe_context *ctx,
578                                 unsigned buffers,
579                                 const union pipe_color_union *color,
580                                 double depth, unsigned stencil)
581 {
582    struct virgl_context *vctx = virgl_context(ctx);
583 
584    virgl_encode_clear(vctx, buffers, color, depth, stencil);
585 }
586 
virgl_draw_vbo(struct pipe_context * ctx,const struct pipe_draw_info * dinfo)587 static void virgl_draw_vbo(struct pipe_context *ctx,
588                                    const struct pipe_draw_info *dinfo)
589 {
590    struct virgl_context *vctx = virgl_context(ctx);
591    struct virgl_screen *rs = virgl_screen(ctx->screen);
592    struct pipe_index_buffer ib = {};
593    struct pipe_draw_info info = *dinfo;
594 
595    if (!(rs->caps.caps.v1.prim_mask & (1 << dinfo->mode))) {
596       util_primconvert_save_index_buffer(vctx->primconvert, &vctx->index_buffer);
597       util_primconvert_draw_vbo(vctx->primconvert, dinfo);
598       return;
599    }
600    if (info.indexed) {
601            pipe_resource_reference(&ib.buffer, vctx->index_buffer.buffer);
602            ib.user_buffer = vctx->index_buffer.user_buffer;
603            ib.index_size = vctx->index_buffer.index_size;
604            ib.offset = vctx->index_buffer.offset + info.start * ib.index_size;
605 
606            if (ib.user_buffer) {
607                    u_upload_data(vctx->uploader, 0, info.count * ib.index_size, 256,
608                                  ib.user_buffer, &ib.offset, &ib.buffer);
609                    ib.user_buffer = NULL;
610            }
611    }
612 
613    u_upload_unmap(vctx->uploader);
614 
615    vctx->num_draws++;
616    virgl_hw_set_vertex_buffers(ctx);
617    if (info.indexed)
618       virgl_hw_set_index_buffer(ctx, &ib);
619 
620    virgl_encoder_draw_vbo(vctx, &info);
621 
622    pipe_resource_reference(&ib.buffer, NULL);
623 
624 }
625 
virgl_flush_eq(struct virgl_context * ctx,void * closure)626 static void virgl_flush_eq(struct virgl_context *ctx, void *closure)
627 {
628    struct virgl_screen *rs = virgl_screen(ctx->base.screen);
629 
630    /* send the buffer to the remote side for decoding */
631    ctx->num_transfers = ctx->num_draws = 0;
632    rs->vws->submit_cmd(rs->vws, ctx->cbuf);
633 
634    virgl_encoder_set_sub_ctx(ctx, ctx->hw_sub_ctx_id);
635 
636    /* add back current framebuffer resources to reference list? */
637    virgl_reemit_res(ctx);
638 }
639 
virgl_flush_from_st(struct pipe_context * ctx,struct pipe_fence_handle ** fence,enum pipe_flush_flags flags)640 static void virgl_flush_from_st(struct pipe_context *ctx,
641                                struct pipe_fence_handle **fence,
642                                enum pipe_flush_flags flags)
643 {
644    struct virgl_context *vctx = virgl_context(ctx);
645    struct virgl_screen *rs = virgl_screen(ctx->screen);
646    struct virgl_buffer *buf, *tmp;
647 
648    if (fence)
649       *fence = rs->vws->cs_create_fence(rs->vws);
650 
651    LIST_FOR_EACH_ENTRY_SAFE(buf, tmp, &vctx->to_flush_bufs, flush_list) {
652       struct pipe_resource *res = &buf->base.u.b;
653       virgl_buffer_flush(vctx, buf);
654       list_del(&buf->flush_list);
655       buf->on_list = FALSE;
656       pipe_resource_reference(&res, NULL);
657 
658    }
659    virgl_flush_eq(vctx, vctx);
660 }
661 
virgl_create_sampler_view(struct pipe_context * ctx,struct pipe_resource * texture,const struct pipe_sampler_view * state)662 static struct pipe_sampler_view *virgl_create_sampler_view(struct pipe_context *ctx,
663                                       struct pipe_resource *texture,
664                                       const struct pipe_sampler_view *state)
665 {
666    struct virgl_context *vctx = virgl_context(ctx);
667    struct virgl_sampler_view *grview;
668    uint32_t handle;
669    struct virgl_resource *res;
670 
671    if (!state)
672       return NULL;
673 
674    grview = CALLOC_STRUCT(virgl_sampler_view);
675    if (!grview)
676       return NULL;
677 
678    res = virgl_resource(texture);
679    handle = virgl_object_assign_handle();
680    virgl_encode_sampler_view(vctx, handle, res, state);
681 
682    grview->base = *state;
683    grview->base.reference.count = 1;
684 
685    grview->base.texture = NULL;
686    grview->base.context = ctx;
687    pipe_resource_reference(&grview->base.texture, texture);
688    grview->handle = handle;
689    return &grview->base;
690 }
691 
virgl_set_sampler_views(struct pipe_context * ctx,enum pipe_shader_type shader_type,unsigned start_slot,unsigned num_views,struct pipe_sampler_view ** views)692 static void virgl_set_sampler_views(struct pipe_context *ctx,
693                                    enum pipe_shader_type shader_type,
694                                    unsigned start_slot,
695                                    unsigned num_views,
696                                    struct pipe_sampler_view **views)
697 {
698    struct virgl_context *vctx = virgl_context(ctx);
699    int i;
700    uint32_t disable_mask = ~((1ull << num_views) - 1);
701    struct virgl_textures_info *tinfo = &vctx->samplers[shader_type];
702    uint32_t new_mask = 0;
703    uint32_t remaining_mask;
704 
705    remaining_mask = tinfo->enabled_mask & disable_mask;
706 
707    while (remaining_mask) {
708       i = u_bit_scan(&remaining_mask);
709       assert(tinfo->views[i]);
710 
711       pipe_sampler_view_reference((struct pipe_sampler_view **)&tinfo->views[i], NULL);
712    }
713 
714    for (i = 0; i < num_views; i++) {
715       struct virgl_sampler_view *grview = virgl_sampler_view(views[i]);
716 
717       if (views[i] == (struct pipe_sampler_view *)tinfo->views[i])
718          continue;
719 
720       if (grview) {
721          new_mask |= 1 << i;
722          pipe_sampler_view_reference((struct pipe_sampler_view **)&tinfo->views[i], views[i]);
723       } else {
724          pipe_sampler_view_reference((struct pipe_sampler_view **)&tinfo->views[i], NULL);
725          disable_mask |= 1 << i;
726       }
727    }
728 
729    tinfo->enabled_mask &= ~disable_mask;
730    tinfo->enabled_mask |= new_mask;
731    virgl_encode_set_sampler_views(vctx, shader_type, start_slot, num_views, tinfo->views);
732    virgl_attach_res_sampler_views(vctx, shader_type);
733 }
734 
virgl_destroy_sampler_view(struct pipe_context * ctx,struct pipe_sampler_view * view)735 static void virgl_destroy_sampler_view(struct pipe_context *ctx,
736                                  struct pipe_sampler_view *view)
737 {
738    struct virgl_context *vctx = virgl_context(ctx);
739    struct virgl_sampler_view *grview = virgl_sampler_view(view);
740 
741    virgl_encode_delete_object(vctx, grview->handle, VIRGL_OBJECT_SAMPLER_VIEW);
742    pipe_resource_reference(&view->texture, NULL);
743    FREE(view);
744 }
745 
virgl_create_sampler_state(struct pipe_context * ctx,const struct pipe_sampler_state * state)746 static void *virgl_create_sampler_state(struct pipe_context *ctx,
747                                         const struct pipe_sampler_state *state)
748 {
749    struct virgl_context *vctx = virgl_context(ctx);
750    uint32_t handle;
751 
752    handle = virgl_object_assign_handle();
753 
754    virgl_encode_sampler_state(vctx, handle, state);
755    return (void *)(unsigned long)handle;
756 }
757 
virgl_delete_sampler_state(struct pipe_context * ctx,void * ss)758 static void virgl_delete_sampler_state(struct pipe_context *ctx,
759                                       void *ss)
760 {
761    struct virgl_context *vctx = virgl_context(ctx);
762    uint32_t handle = (unsigned long)ss;
763 
764    virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SAMPLER_STATE);
765 }
766 
virgl_bind_sampler_states(struct pipe_context * ctx,enum pipe_shader_type shader,unsigned start_slot,unsigned num_samplers,void ** samplers)767 static void virgl_bind_sampler_states(struct pipe_context *ctx,
768                                      enum pipe_shader_type shader,
769                                      unsigned start_slot,
770                                      unsigned num_samplers,
771                                      void **samplers)
772 {
773    struct virgl_context *vctx = virgl_context(ctx);
774    uint32_t handles[32];
775    int i;
776    for (i = 0; i < num_samplers; i++) {
777       handles[i] = (unsigned long)(samplers[i]);
778    }
779    virgl_encode_bind_sampler_states(vctx, shader, start_slot, num_samplers, handles);
780 }
781 
virgl_set_polygon_stipple(struct pipe_context * ctx,const struct pipe_poly_stipple * ps)782 static void virgl_set_polygon_stipple(struct pipe_context *ctx,
783                                      const struct pipe_poly_stipple *ps)
784 {
785    struct virgl_context *vctx = virgl_context(ctx);
786    virgl_encoder_set_polygon_stipple(vctx, ps);
787 }
788 
virgl_set_scissor_states(struct pipe_context * ctx,unsigned start_slot,unsigned num_scissor,const struct pipe_scissor_state * ss)789 static void virgl_set_scissor_states(struct pipe_context *ctx,
790                                     unsigned start_slot,
791                                     unsigned num_scissor,
792                                    const struct pipe_scissor_state *ss)
793 {
794    struct virgl_context *vctx = virgl_context(ctx);
795    virgl_encoder_set_scissor_state(vctx, start_slot, num_scissor, ss);
796 }
797 
virgl_set_sample_mask(struct pipe_context * ctx,unsigned sample_mask)798 static void virgl_set_sample_mask(struct pipe_context *ctx,
799                                  unsigned sample_mask)
800 {
801    struct virgl_context *vctx = virgl_context(ctx);
802    virgl_encoder_set_sample_mask(vctx, sample_mask);
803 }
804 
virgl_set_clip_state(struct pipe_context * ctx,const struct pipe_clip_state * clip)805 static void virgl_set_clip_state(struct pipe_context *ctx,
806                                 const struct pipe_clip_state *clip)
807 {
808    struct virgl_context *vctx = virgl_context(ctx);
809    virgl_encoder_set_clip_state(vctx, clip);
810 }
811 
virgl_resource_copy_region(struct pipe_context * ctx,struct pipe_resource * dst,unsigned dst_level,unsigned dstx,unsigned dsty,unsigned dstz,struct pipe_resource * src,unsigned src_level,const struct pipe_box * src_box)812 static void virgl_resource_copy_region(struct pipe_context *ctx,
813                                       struct pipe_resource *dst,
814                                       unsigned dst_level,
815                                       unsigned dstx, unsigned dsty, unsigned dstz,
816                                       struct pipe_resource *src,
817                                       unsigned src_level,
818                                       const struct pipe_box *src_box)
819 {
820    struct virgl_context *vctx = virgl_context(ctx);
821    struct virgl_resource *dres = virgl_resource(dst);
822    struct virgl_resource *sres = virgl_resource(src);
823 
824    dres->clean = FALSE;
825    virgl_encode_resource_copy_region(vctx, dres,
826                                     dst_level, dstx, dsty, dstz,
827                                     sres, src_level,
828                                     src_box);
829 }
830 
831 static void
virgl_flush_resource(struct pipe_context * pipe,struct pipe_resource * resource)832 virgl_flush_resource(struct pipe_context *pipe,
833                     struct pipe_resource *resource)
834 {
835 }
836 
virgl_blit(struct pipe_context * ctx,const struct pipe_blit_info * blit)837 static void virgl_blit(struct pipe_context *ctx,
838                       const struct pipe_blit_info *blit)
839 {
840    struct virgl_context *vctx = virgl_context(ctx);
841    struct virgl_resource *dres = virgl_resource(blit->dst.resource);
842    struct virgl_resource *sres = virgl_resource(blit->src.resource);
843 
844    dres->clean = FALSE;
845    virgl_encode_blit(vctx, dres, sres,
846                     blit);
847 }
848 
849 static void
virgl_context_destroy(struct pipe_context * ctx)850 virgl_context_destroy( struct pipe_context *ctx )
851 {
852    struct virgl_context *vctx = virgl_context(ctx);
853    struct virgl_screen *rs = virgl_screen(ctx->screen);
854 
855    vctx->framebuffer.zsbuf = NULL;
856    vctx->framebuffer.nr_cbufs = 0;
857    virgl_encoder_destroy_sub_ctx(vctx, vctx->hw_sub_ctx_id);
858    virgl_flush_eq(vctx, vctx);
859 
860    rs->vws->cmd_buf_destroy(vctx->cbuf);
861    if (vctx->uploader)
862       u_upload_destroy(vctx->uploader);
863    util_primconvert_destroy(vctx->primconvert);
864 
865    slab_destroy_child(&vctx->texture_transfer_pool);
866    FREE(vctx);
867 }
868 
virgl_context_create(struct pipe_screen * pscreen,void * priv,unsigned flags)869 struct pipe_context *virgl_context_create(struct pipe_screen *pscreen,
870                                           void *priv,
871                                           unsigned flags)
872 {
873    struct virgl_context *vctx;
874    struct virgl_screen *rs = virgl_screen(pscreen);
875    vctx = CALLOC_STRUCT(virgl_context);
876 
877    vctx->cbuf = rs->vws->cmd_buf_create(rs->vws);
878    if (!vctx->cbuf) {
879       FREE(vctx);
880       return NULL;
881    }
882 
883    vctx->base.destroy = virgl_context_destroy;
884    vctx->base.create_surface = virgl_create_surface;
885    vctx->base.surface_destroy = virgl_surface_destroy;
886    vctx->base.set_framebuffer_state = virgl_set_framebuffer_state;
887    vctx->base.create_blend_state = virgl_create_blend_state;
888    vctx->base.bind_blend_state = virgl_bind_blend_state;
889    vctx->base.delete_blend_state = virgl_delete_blend_state;
890    vctx->base.create_depth_stencil_alpha_state = virgl_create_depth_stencil_alpha_state;
891    vctx->base.bind_depth_stencil_alpha_state = virgl_bind_depth_stencil_alpha_state;
892    vctx->base.delete_depth_stencil_alpha_state = virgl_delete_depth_stencil_alpha_state;
893    vctx->base.create_rasterizer_state = virgl_create_rasterizer_state;
894    vctx->base.bind_rasterizer_state = virgl_bind_rasterizer_state;
895    vctx->base.delete_rasterizer_state = virgl_delete_rasterizer_state;
896 
897    vctx->base.set_viewport_states = virgl_set_viewport_states;
898    vctx->base.create_vertex_elements_state = virgl_create_vertex_elements_state;
899    vctx->base.bind_vertex_elements_state = virgl_bind_vertex_elements_state;
900    vctx->base.delete_vertex_elements_state = virgl_delete_vertex_elements_state;
901    vctx->base.set_vertex_buffers = virgl_set_vertex_buffers;
902    vctx->base.set_index_buffer = virgl_set_index_buffer;
903    vctx->base.set_constant_buffer = virgl_set_constant_buffer;
904 
905    vctx->base.create_vs_state = virgl_create_vs_state;
906    vctx->base.create_gs_state = virgl_create_gs_state;
907    vctx->base.create_fs_state = virgl_create_fs_state;
908 
909    vctx->base.bind_vs_state = virgl_bind_vs_state;
910    vctx->base.bind_gs_state = virgl_bind_gs_state;
911    vctx->base.bind_fs_state = virgl_bind_fs_state;
912 
913    vctx->base.delete_vs_state = virgl_delete_vs_state;
914    vctx->base.delete_gs_state = virgl_delete_gs_state;
915    vctx->base.delete_fs_state = virgl_delete_fs_state;
916 
917    vctx->base.clear = virgl_clear;
918    vctx->base.draw_vbo = virgl_draw_vbo;
919    vctx->base.flush = virgl_flush_from_st;
920    vctx->base.screen = pscreen;
921    vctx->base.create_sampler_view = virgl_create_sampler_view;
922    vctx->base.sampler_view_destroy = virgl_destroy_sampler_view;
923    vctx->base.set_sampler_views = virgl_set_sampler_views;
924 
925    vctx->base.create_sampler_state = virgl_create_sampler_state;
926    vctx->base.delete_sampler_state = virgl_delete_sampler_state;
927    vctx->base.bind_sampler_states = virgl_bind_sampler_states;
928 
929    vctx->base.set_polygon_stipple = virgl_set_polygon_stipple;
930    vctx->base.set_scissor_states = virgl_set_scissor_states;
931    vctx->base.set_sample_mask = virgl_set_sample_mask;
932    vctx->base.set_stencil_ref = virgl_set_stencil_ref;
933    vctx->base.set_clip_state = virgl_set_clip_state;
934 
935    vctx->base.set_blend_color = virgl_set_blend_color;
936 
937    vctx->base.resource_copy_region = virgl_resource_copy_region;
938    vctx->base.flush_resource = virgl_flush_resource;
939    vctx->base.blit =  virgl_blit;
940 
941    virgl_init_context_resource_functions(&vctx->base);
942    virgl_init_query_functions(vctx);
943    virgl_init_so_functions(vctx);
944 
945    list_inithead(&vctx->to_flush_bufs);
946    slab_create_child(&vctx->texture_transfer_pool, &rs->texture_transfer_pool);
947 
948    vctx->primconvert = util_primconvert_create(&vctx->base, rs->caps.caps.v1.prim_mask);
949    vctx->uploader = u_upload_create(&vctx->base, 1024 * 1024,
950                                      PIPE_BIND_INDEX_BUFFER, PIPE_USAGE_STREAM);
951    if (!vctx->uploader)
952            goto fail;
953 
954    vctx->hw_sub_ctx_id = rs->sub_ctx_id++;
955    virgl_encoder_create_sub_ctx(vctx, vctx->hw_sub_ctx_id);
956 
957    virgl_encoder_set_sub_ctx(vctx, vctx->hw_sub_ctx_id);
958    return &vctx->base;
959 fail:
960    return NULL;
961 }
962