1 /**************************************************************************
2 *
3 * Copyright (C) 2014 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 **************************************************************************/
24
25 #include <stdio.h>
26 #include <time.h>
27
28 #include <epoxy/gl.h>
29
30 #include <sys/stat.h>
31 #include <fcntl.h>
32 #include <errno.h>
33 #include <unistd.h>
34
35 #include "pipe/p_state.h"
36 #include "util/u_format.h"
37 #include "util/u_math.h"
38 #include "vrend_renderer.h"
39 #include "vrend_winsys.h"
40
41 #include "virglrenderer.h"
42 #include "virglrenderer_hw.h"
43
44 #include "virgl_context.h"
45 #include "virgl_resource.h"
46 #include "virgl_util.h"
47
48 struct global_state {
49 bool client_initialized;
50 void *cookie;
51 int flags;
52 const struct virgl_renderer_callbacks *cbs;
53
54 bool resource_initialized;
55 bool context_initialized;
56 bool winsys_initialized;
57 bool vrend_initialized;
58 };
59
60 static struct global_state state;
61
62 /* new API - just wrap internal API for now */
63
virgl_renderer_resource_create_internal(struct virgl_renderer_resource_create_args * args,UNUSED struct iovec * iov,UNUSED uint32_t num_iovs,void * image)64 static int virgl_renderer_resource_create_internal(struct virgl_renderer_resource_create_args *args,
65 UNUSED struct iovec *iov, UNUSED uint32_t num_iovs,
66 void *image)
67 {
68 struct virgl_resource *res;
69 struct pipe_resource *pipe_res;
70 struct vrend_renderer_resource_create_args vrend_args = { 0 };
71
72 /* do not accept handle 0 */
73 if (args->handle == 0)
74 return EINVAL;
75
76 vrend_args.target = args->target;
77 vrend_args.format = args->format;
78 vrend_args.bind = args->bind;
79 vrend_args.width = args->width;
80 vrend_args.height = args->height;
81 vrend_args.depth = args->depth;
82 vrend_args.array_size = args->array_size;
83 vrend_args.nr_samples = args->nr_samples;
84 vrend_args.last_level = args->last_level;
85 vrend_args.flags = args->flags;
86
87 pipe_res = vrend_renderer_resource_create(&vrend_args, image);
88 if (!pipe_res)
89 return EINVAL;
90
91 res = virgl_resource_create_from_pipe(args->handle, pipe_res, iov, num_iovs);
92 if (!res) {
93 vrend_renderer_resource_destroy((struct vrend_resource *)pipe_res);
94 return -ENOMEM;
95 }
96
97 res->map_info = vrend_renderer_resource_get_map_info(pipe_res);
98
99 return 0;
100 }
101
virgl_renderer_resource_create(struct virgl_renderer_resource_create_args * args,struct iovec * iov,uint32_t num_iovs)102 int virgl_renderer_resource_create(struct virgl_renderer_resource_create_args *args,
103 struct iovec *iov, uint32_t num_iovs)
104 {
105 TRACE_FUNC();
106 return virgl_renderer_resource_create_internal(args, iov, num_iovs, NULL);
107 }
108
virgl_renderer_resource_import_eglimage(struct virgl_renderer_resource_create_args * args,void * image)109 int virgl_renderer_resource_import_eglimage(struct virgl_renderer_resource_create_args *args, void *image)
110 {
111 TRACE_FUNC();
112 return virgl_renderer_resource_create_internal(args, NULL, 0, image);
113 }
114
virgl_renderer_resource_set_priv(uint32_t res_handle,void * priv)115 void virgl_renderer_resource_set_priv(uint32_t res_handle, void *priv)
116 {
117 struct virgl_resource *res = virgl_resource_lookup(res_handle);
118 if (!res)
119 return;
120
121 res->private_data = priv;
122 }
123
virgl_renderer_resource_get_priv(uint32_t res_handle)124 void *virgl_renderer_resource_get_priv(uint32_t res_handle)
125 {
126 struct virgl_resource *res = virgl_resource_lookup(res_handle);
127 if (!res)
128 return NULL;
129
130 return res->private_data;
131 }
132
detach_resource(struct virgl_context * ctx,void * data)133 static bool detach_resource(struct virgl_context *ctx, void *data)
134 {
135 struct virgl_resource *res = data;
136 ctx->detach_resource(ctx, res);
137 return true;
138 }
139
virgl_renderer_resource_unref(uint32_t res_handle)140 void virgl_renderer_resource_unref(uint32_t res_handle)
141 {
142 struct virgl_resource *res = virgl_resource_lookup(res_handle);
143 struct virgl_context_foreach_args args;
144
145 if (!res)
146 return;
147
148 args.callback = detach_resource;
149 args.data = res;
150 virgl_context_foreach(&args);
151
152 virgl_resource_remove(res->res_id);
153 }
154
virgl_renderer_fill_caps(uint32_t set,uint32_t version,void * caps)155 void virgl_renderer_fill_caps(uint32_t set, uint32_t version,
156 void *caps)
157 {
158 switch (set) {
159 case VIRGL_RENDERER_CAPSET_VIRGL:
160 case VIRGL_RENDERER_CAPSET_VIRGL2:
161 vrend_renderer_fill_caps(set, version, (union virgl_caps *)caps);
162 break;
163 default:
164 break;
165 }
166 }
167
per_context_fence_retire(struct virgl_context * ctx,uint64_t queue_id,void * fence_cookie)168 static void per_context_fence_retire(struct virgl_context *ctx,
169 uint64_t queue_id,
170 void *fence_cookie)
171 {
172 state.cbs->write_context_fence(state.cookie,
173 ctx->ctx_id,
174 queue_id,
175 fence_cookie);
176 }
177
virgl_renderer_context_create_with_flags(uint32_t ctx_id,uint32_t ctx_flags,uint32_t nlen,const char * name)178 int virgl_renderer_context_create_with_flags(uint32_t ctx_id,
179 uint32_t ctx_flags,
180 uint32_t nlen,
181 const char *name)
182 {
183 const enum virgl_renderer_capset capset_id =
184 ctx_flags & VIRGL_RENDERER_CONTEXT_FLAG_CAPSET_ID_MASK;
185 struct virgl_context *ctx;
186 int ret;
187
188 TRACE_FUNC();
189
190 /* user context id must be greater than 0 */
191 if (ctx_id == 0)
192 return EINVAL;
193
194 /* unsupported flags */
195 if (ctx_flags & ~VIRGL_RENDERER_CONTEXT_FLAG_CAPSET_ID_MASK)
196 return EINVAL;
197
198 ctx = virgl_context_lookup(ctx_id);
199 if (ctx) {
200 return ctx->capset_id == capset_id ? 0 : EINVAL;
201 }
202
203 switch (capset_id) {
204 case VIRGL_RENDERER_CAPSET_VIRGL:
205 case VIRGL_RENDERER_CAPSET_VIRGL2:
206 ctx = vrend_renderer_context_create(ctx_id, nlen, name);
207 break;
208 default:
209 return EINVAL;
210 break;
211 }
212 if (!ctx)
213 return ENOMEM;
214
215 ctx->ctx_id = ctx_id;
216 ctx->capset_id = capset_id;
217 ctx->fence_retire = per_context_fence_retire;
218
219 ret = virgl_context_add(ctx);
220 if (ret) {
221 ctx->destroy(ctx);
222 return ret;
223 }
224
225 return 0;
226 }
227
virgl_renderer_context_create(uint32_t handle,uint32_t nlen,const char * name)228 int virgl_renderer_context_create(uint32_t handle, uint32_t nlen, const char *name)
229 {
230 return virgl_renderer_context_create_with_flags(handle,
231 VIRGL_RENDERER_CAPSET_VIRGL2,
232 nlen,
233 name);
234 }
235
virgl_renderer_context_destroy(uint32_t handle)236 void virgl_renderer_context_destroy(uint32_t handle)
237 {
238 TRACE_FUNC();
239 virgl_context_remove(handle);
240 }
241
virgl_renderer_submit_cmd(void * buffer,int ctx_id,int ndw)242 int virgl_renderer_submit_cmd(void *buffer,
243 int ctx_id,
244 int ndw)
245 {
246 TRACE_FUNC();
247 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
248 if (!ctx)
249 return EINVAL;
250 return ctx->submit_cmd(ctx, buffer, sizeof(uint32_t) * ndw);
251 }
252
virgl_renderer_transfer_write_iov(uint32_t handle,uint32_t ctx_id,int level,uint32_t stride,uint32_t layer_stride,struct virgl_box * box,uint64_t offset,struct iovec * iovec,unsigned int iovec_cnt)253 int virgl_renderer_transfer_write_iov(uint32_t handle,
254 uint32_t ctx_id,
255 int level,
256 uint32_t stride,
257 uint32_t layer_stride,
258 struct virgl_box *box,
259 uint64_t offset,
260 struct iovec *iovec,
261 unsigned int iovec_cnt)
262 {
263 TRACE_FUNC();
264
265 struct virgl_resource *res = virgl_resource_lookup(handle);
266 struct vrend_transfer_info transfer_info;
267
268 if (!res)
269 return EINVAL;
270
271 transfer_info.level = level;
272 transfer_info.stride = stride;
273 transfer_info.layer_stride = layer_stride;
274 transfer_info.box = (struct pipe_box *)box;
275 transfer_info.offset = offset;
276 transfer_info.iovec = iovec;
277 transfer_info.iovec_cnt = iovec_cnt;
278 transfer_info.synchronized = false;
279
280 if (ctx_id) {
281 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
282 if (!ctx)
283 return EINVAL;
284
285 return ctx->transfer_3d(ctx, res, &transfer_info,
286 VIRGL_TRANSFER_TO_HOST);
287 } else {
288 if (!res->pipe_resource)
289 return EINVAL;
290
291 return vrend_renderer_transfer_pipe(res->pipe_resource, &transfer_info,
292 VIRGL_TRANSFER_TO_HOST);
293 }
294 }
295
virgl_renderer_transfer_read_iov(uint32_t handle,uint32_t ctx_id,uint32_t level,uint32_t stride,uint32_t layer_stride,struct virgl_box * box,uint64_t offset,struct iovec * iovec,int iovec_cnt)296 int virgl_renderer_transfer_read_iov(uint32_t handle, uint32_t ctx_id,
297 uint32_t level, uint32_t stride,
298 uint32_t layer_stride,
299 struct virgl_box *box,
300 uint64_t offset, struct iovec *iovec,
301 int iovec_cnt)
302 {
303 TRACE_FUNC();
304 struct virgl_resource *res = virgl_resource_lookup(handle);
305 struct vrend_transfer_info transfer_info;
306
307 if (!res)
308 return EINVAL;
309
310 transfer_info.level = level;
311 transfer_info.stride = stride;
312 transfer_info.layer_stride = layer_stride;
313 transfer_info.box = (struct pipe_box *)box;
314 transfer_info.offset = offset;
315 transfer_info.iovec = iovec;
316 transfer_info.iovec_cnt = iovec_cnt;
317 transfer_info.synchronized = false;
318
319 if (ctx_id) {
320 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
321 if (!ctx)
322 return EINVAL;
323
324 return ctx->transfer_3d(ctx, res, &transfer_info,
325 VIRGL_TRANSFER_FROM_HOST);
326 } else {
327 if (!res->pipe_resource)
328 return EINVAL;
329
330 return vrend_renderer_transfer_pipe(res->pipe_resource, &transfer_info,
331 VIRGL_TRANSFER_FROM_HOST);
332 }
333 }
334
virgl_renderer_resource_attach_iov(int res_handle,struct iovec * iov,int num_iovs)335 int virgl_renderer_resource_attach_iov(int res_handle, struct iovec *iov,
336 int num_iovs)
337 {
338 TRACE_FUNC();
339 struct virgl_resource *res = virgl_resource_lookup(res_handle);
340 if (!res)
341 return EINVAL;
342
343 return virgl_resource_attach_iov(res, iov, num_iovs);
344 }
345
virgl_renderer_resource_detach_iov(int res_handle,struct iovec ** iov_p,int * num_iovs_p)346 void virgl_renderer_resource_detach_iov(int res_handle, struct iovec **iov_p, int *num_iovs_p)
347 {
348 TRACE_FUNC();
349 struct virgl_resource *res = virgl_resource_lookup(res_handle);
350 if (!res)
351 return;
352
353 if (iov_p)
354 *iov_p = (struct iovec *)res->iov;
355 if (num_iovs_p)
356 *num_iovs_p = res->iov_count;
357
358 virgl_resource_detach_iov(res);
359 }
360
virgl_renderer_create_fence(int client_fence_id,UNUSED uint32_t ctx_id)361 int virgl_renderer_create_fence(int client_fence_id, UNUSED uint32_t ctx_id)
362 {
363 TRACE_FUNC();
364 const uint32_t fence_id = (uint32_t)client_fence_id;
365 if (state.vrend_initialized)
366 return vrend_renderer_create_ctx0_fence(fence_id);
367 return EINVAL;
368 }
369
virgl_renderer_context_create_fence(uint32_t ctx_id,uint32_t flags,uint64_t queue_id,void * fence_cookie)370 int virgl_renderer_context_create_fence(uint32_t ctx_id,
371 uint32_t flags,
372 uint64_t queue_id,
373 void *fence_cookie)
374 {
375 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
376 if (!ctx)
377 return -EINVAL;
378
379 assert(state.cbs->version >= 3 && state.cbs->write_context_fence);
380 return ctx->submit_fence(ctx, flags, queue_id, fence_cookie);
381 }
382
virgl_renderer_context_poll(uint32_t ctx_id)383 void virgl_renderer_context_poll(uint32_t ctx_id)
384 {
385 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
386 if (!ctx)
387 return;
388
389 ctx->retire_fences(ctx);
390 }
391
virgl_renderer_context_get_poll_fd(uint32_t ctx_id)392 int virgl_renderer_context_get_poll_fd(uint32_t ctx_id)
393 {
394 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
395 if (!ctx)
396 return -1;
397
398 return ctx->get_fencing_fd(ctx);
399 }
400
virgl_renderer_force_ctx_0(void)401 void virgl_renderer_force_ctx_0(void)
402 {
403 TRACE_FUNC();
404 vrend_renderer_force_ctx_0();
405 }
406
virgl_renderer_ctx_attach_resource(int ctx_id,int res_handle)407 void virgl_renderer_ctx_attach_resource(int ctx_id, int res_handle)
408 {
409 TRACE_FUNC();
410 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
411 struct virgl_resource *res = virgl_resource_lookup(res_handle);
412 if (!ctx || !res)
413 return;
414 ctx->attach_resource(ctx, res);
415 }
416
virgl_renderer_ctx_detach_resource(int ctx_id,int res_handle)417 void virgl_renderer_ctx_detach_resource(int ctx_id, int res_handle)
418 {
419 TRACE_FUNC();
420 struct virgl_context *ctx = virgl_context_lookup(ctx_id);
421 struct virgl_resource *res = virgl_resource_lookup(res_handle);
422 if (!ctx || !res)
423 return;
424 ctx->detach_resource(ctx, res);
425 }
426
virgl_renderer_resource_get_info(int res_handle,struct virgl_renderer_resource_info * info)427 int virgl_renderer_resource_get_info(int res_handle,
428 struct virgl_renderer_resource_info *info)
429 {
430 TRACE_FUNC();
431 struct virgl_resource *res = virgl_resource_lookup(res_handle);
432
433 if (!res || !res->pipe_resource)
434 return EINVAL;
435 if (!info)
436 return EINVAL;
437
438 vrend_renderer_resource_get_info(res->pipe_resource,
439 (struct vrend_renderer_resource_info *)info);
440 info->handle = res_handle;
441
442 if (state.winsys_initialized) {
443 return vrend_winsys_get_fourcc_for_texture(info->tex_id,
444 info->virgl_format,
445 &info->drm_fourcc);
446 }
447
448 return 0;
449 }
450
virgl_renderer_get_cap_set(uint32_t cap_set,uint32_t * max_ver,uint32_t * max_size)451 void virgl_renderer_get_cap_set(uint32_t cap_set, uint32_t *max_ver,
452 uint32_t *max_size)
453 {
454 TRACE_FUNC();
455 switch (cap_set) {
456 case VIRGL_RENDERER_CAPSET_VIRGL:
457 case VIRGL_RENDERER_CAPSET_VIRGL2:
458 vrend_renderer_get_cap_set(cap_set, max_ver, max_size);
459 break;
460 default:
461 *max_ver = 0;
462 *max_size = 0;
463 break;
464 }
465 }
466
virgl_renderer_get_rect(int resource_id,struct iovec * iov,unsigned int num_iovs,uint32_t offset,int x,int y,int width,int height)467 void virgl_renderer_get_rect(int resource_id, struct iovec *iov, unsigned int num_iovs,
468 uint32_t offset, int x, int y, int width, int height)
469 {
470 TRACE_FUNC();
471 struct virgl_resource *res = virgl_resource_lookup(resource_id);
472 if (!res || !res->pipe_resource)
473 return;
474
475 vrend_renderer_get_rect(res->pipe_resource, iov, num_iovs, offset, x, y,
476 width, height);
477 }
478
479
ctx0_fence_retire(void * fence_cookie,UNUSED void * retire_data)480 static void ctx0_fence_retire(void *fence_cookie,
481 UNUSED void *retire_data)
482 {
483 const uint32_t fence_id = (uint32_t)(uintptr_t)fence_cookie;
484 state.cbs->write_fence(state.cookie, fence_id);
485 }
486
create_gl_context(int scanout_idx,struct virgl_gl_ctx_param * param)487 static virgl_renderer_gl_context create_gl_context(int scanout_idx, struct virgl_gl_ctx_param *param)
488 {
489 struct virgl_renderer_gl_ctx_param vparam;
490
491 if (state.winsys_initialized)
492 return vrend_winsys_create_context(param);
493
494 vparam.version = 1;
495 vparam.shared = param->shared;
496 vparam.major_ver = param->major_ver;
497 vparam.minor_ver = param->minor_ver;
498 return state.cbs->create_gl_context(state.cookie, scanout_idx, &vparam);
499 }
500
destroy_gl_context(virgl_renderer_gl_context ctx)501 static void destroy_gl_context(virgl_renderer_gl_context ctx)
502 {
503 if (state.winsys_initialized) {
504 vrend_winsys_destroy_context(ctx);
505 return;
506 }
507
508 state.cbs->destroy_gl_context(state.cookie, ctx);
509 }
510
make_current(virgl_renderer_gl_context ctx)511 static int make_current(virgl_renderer_gl_context ctx)
512 {
513 if (state.winsys_initialized)
514 return vrend_winsys_make_context_current(ctx);
515
516 return state.cbs->make_current(state.cookie, 0, ctx);
517 }
518
519 static const struct vrend_if_cbs vrend_cbs = {
520 ctx0_fence_retire,
521 create_gl_context,
522 destroy_gl_context,
523 make_current,
524 };
525
virgl_renderer_get_cursor_data(uint32_t resource_id,uint32_t * width,uint32_t * height)526 void *virgl_renderer_get_cursor_data(uint32_t resource_id, uint32_t *width, uint32_t *height)
527 {
528 struct virgl_resource *res = virgl_resource_lookup(resource_id);
529 if (!res || !res->pipe_resource)
530 return NULL;
531
532 vrend_renderer_force_ctx_0();
533 return vrend_renderer_get_cursor_contents(res->pipe_resource,
534 width,
535 height);
536 }
537
virgl_renderer_poll(void)538 void virgl_renderer_poll(void)
539 {
540 TRACE_FUNC();
541 if (state.vrend_initialized)
542 vrend_renderer_check_fences();
543 }
544
virgl_renderer_cleanup(UNUSED void * cookie)545 void virgl_renderer_cleanup(UNUSED void *cookie)
546 {
547 TRACE_FUNC();
548 if (state.vrend_initialized)
549 vrend_renderer_prepare_reset();
550
551 if (state.context_initialized)
552 virgl_context_table_cleanup();
553
554 if (state.resource_initialized)
555 virgl_resource_table_cleanup();
556
557 if (state.vrend_initialized)
558 vrend_renderer_fini();
559
560 if (state.winsys_initialized)
561 vrend_winsys_cleanup();
562
563 memset(&state, 0, sizeof(state));
564 }
565
virgl_renderer_init(void * cookie,int flags,struct virgl_renderer_callbacks * cbs)566 int virgl_renderer_init(void *cookie, int flags, struct virgl_renderer_callbacks *cbs)
567 {
568 TRACE_INIT();
569 TRACE_FUNC();
570
571 int ret;
572
573 /* VIRGL_RENDERER_THREAD_SYNC is a hint and can be silently ignored */
574 if (!has_eventfd() || getenv("VIRGL_DISABLE_MT"))
575 flags &= ~VIRGL_RENDERER_THREAD_SYNC;
576
577 if (state.client_initialized && (state.cookie != cookie ||
578 state.flags != flags ||
579 state.cbs != cbs))
580 return -EBUSY;
581
582 if (!state.client_initialized) {
583 if (!cookie || !cbs)
584 return -1;
585 if (cbs->version < 1 || cbs->version > VIRGL_RENDERER_CALLBACKS_VERSION)
586 return -1;
587
588 state.cookie = cookie;
589 state.flags = flags;
590 state.cbs = cbs;
591 state.client_initialized = true;
592 }
593
594 if (!state.resource_initialized) {
595 ret = virgl_resource_table_init(vrend_renderer_get_pipe_callbacks());
596 if (ret)
597 goto fail;
598 state.resource_initialized = true;
599 }
600
601 if (!state.context_initialized) {
602 ret = virgl_context_table_init();
603 if (ret)
604 goto fail;
605 state.context_initialized = true;
606 }
607
608 if (!state.winsys_initialized && (flags & (VIRGL_RENDERER_USE_EGL |
609 VIRGL_RENDERER_USE_GLX))) {
610 int drm_fd = -1;
611
612 if (flags & VIRGL_RENDERER_USE_EGL) {
613 if (cbs->version >= 2 && cbs->get_drm_fd)
614 drm_fd = cbs->get_drm_fd(cookie);
615 }
616
617 ret = vrend_winsys_init(flags, drm_fd);
618 if (ret) {
619 if (drm_fd >= 0)
620 close(drm_fd);
621 goto fail;
622 }
623 state.winsys_initialized = true;
624 }
625
626 if (!state.vrend_initialized) {
627 uint32_t renderer_flags = 0;
628
629 if (flags & VIRGL_RENDERER_THREAD_SYNC)
630 renderer_flags |= VREND_USE_THREAD_SYNC;
631 if (flags & VIRGL_RENDERER_USE_EXTERNAL_BLOB)
632 renderer_flags |= VREND_USE_EXTERNAL_BLOB;
633
634 ret = vrend_renderer_init(&vrend_cbs, renderer_flags);
635 if (ret)
636 goto fail;
637 state.vrend_initialized = true;
638 }
639
640 return 0;
641
642 fail:
643 virgl_renderer_cleanup(NULL);
644 return ret;
645 }
646
virgl_renderer_get_fd_for_texture(uint32_t tex_id,int * fd)647 int virgl_renderer_get_fd_for_texture(uint32_t tex_id, int *fd)
648 {
649 TRACE_FUNC();
650 if (state.winsys_initialized)
651 return vrend_winsys_get_fd_for_texture(tex_id, fd);
652 return -1;
653 }
654
virgl_renderer_get_fd_for_texture2(uint32_t tex_id,int * fd,int * stride,int * offset)655 int virgl_renderer_get_fd_for_texture2(uint32_t tex_id, int *fd, int *stride, int *offset)
656 {
657 TRACE_FUNC();
658 if (state.winsys_initialized)
659 return vrend_winsys_get_fd_for_texture2(tex_id, fd, stride, offset);
660 return -1;
661 }
662
virgl_renderer_reset(void)663 void virgl_renderer_reset(void)
664 {
665 TRACE_FUNC();
666 if (state.vrend_initialized)
667 vrend_renderer_prepare_reset();
668
669 if (state.context_initialized)
670 virgl_context_table_reset();
671
672 if (state.resource_initialized)
673 virgl_resource_table_reset();
674
675 if (state.vrend_initialized)
676 vrend_renderer_reset();
677 }
678
virgl_renderer_get_poll_fd(void)679 int virgl_renderer_get_poll_fd(void)
680 {
681 TRACE_FUNC();
682 if (state.vrend_initialized)
683 return vrend_renderer_get_poll_fd();
684
685 return -1;
686 }
687
virgl_set_debug_callback(virgl_debug_callback_type cb)688 virgl_debug_callback_type virgl_set_debug_callback(virgl_debug_callback_type cb)
689 {
690 return vrend_set_debug_callback(cb);
691 }
692
virgl_renderer_export_query(void * execute_args,uint32_t execute_size)693 static int virgl_renderer_export_query(void *execute_args, uint32_t execute_size)
694 {
695 struct virgl_resource *res;
696 struct virgl_renderer_export_query *export_query = execute_args;
697 if (execute_size != sizeof(struct virgl_renderer_export_query))
698 return -EINVAL;
699
700 if (export_query->hdr.size != sizeof(struct virgl_renderer_export_query))
701 return -EINVAL;
702
703 res = virgl_resource_lookup(export_query->in_resource_id);
704 if (!res)
705 return -EINVAL;
706
707
708 if (res->pipe_resource) {
709 return vrend_renderer_export_query(res->pipe_resource, export_query);
710 } else if (!export_query->in_export_fds) {
711 /* Untyped resources are expected to be exported with
712 * virgl_renderer_resource_export_blob instead and have no type
713 * information. But when this is called to query (in_export_fds is
714 * false) an untyped resource, we should return sane values.
715 */
716 export_query->out_num_fds = 1;
717 export_query->out_fourcc = 0;
718 export_query->out_fds[0] = -1;
719 export_query->out_strides[0] = 0;
720 export_query->out_offsets[0] = 0;
721 export_query->out_modifier = DRM_FORMAT_MOD_INVALID;
722 return 0;
723 } else {
724 return -EINVAL;
725 }
726 }
727
virgl_renderer_supported_structures(void * execute_args,uint32_t execute_size)728 static int virgl_renderer_supported_structures(void *execute_args, uint32_t execute_size)
729 {
730 struct virgl_renderer_supported_structures *supported_structures = execute_args;
731 if (execute_size != sizeof(struct virgl_renderer_supported_structures))
732 return -EINVAL;
733
734 if (supported_structures->hdr.size != sizeof(struct virgl_renderer_supported_structures))
735 return -EINVAL;
736
737 if (supported_structures->in_stype_version == 0) {
738 supported_structures->out_supported_structures_mask =
739 VIRGL_RENDERER_STRUCTURE_TYPE_EXPORT_QUERY |
740 VIRGL_RENDERER_STRUCTURE_TYPE_SUPPORTED_STRUCTURES;
741 } else {
742 supported_structures->out_supported_structures_mask = 0;
743 }
744
745 return 0;
746 }
747
virgl_renderer_execute(void * execute_args,uint32_t execute_size)748 int virgl_renderer_execute(void *execute_args, uint32_t execute_size)
749 {
750 TRACE_FUNC();
751 struct virgl_renderer_hdr *hdr = execute_args;
752 if (hdr->stype_version != 0)
753 return -EINVAL;
754
755 switch (hdr->stype) {
756 case VIRGL_RENDERER_STRUCTURE_TYPE_SUPPORTED_STRUCTURES:
757 return virgl_renderer_supported_structures(execute_args, execute_size);
758 case VIRGL_RENDERER_STRUCTURE_TYPE_EXPORT_QUERY:
759 return virgl_renderer_export_query(execute_args, execute_size);
760 default:
761 return -EINVAL;
762 }
763 }
764
virgl_renderer_resource_create_blob(const struct virgl_renderer_resource_create_blob_args * args)765 int virgl_renderer_resource_create_blob(const struct virgl_renderer_resource_create_blob_args *args)
766 {
767 TRACE_FUNC();
768 struct virgl_resource *res;
769 struct virgl_context *ctx;
770 struct virgl_context_blob blob;
771 bool has_host_storage;
772 bool has_guest_storage;
773 int ret;
774
775 switch (args->blob_mem) {
776 case VIRGL_RENDERER_BLOB_MEM_GUEST:
777 has_host_storage = false;
778 has_guest_storage = true;
779 break;
780 case VIRGL_RENDERER_BLOB_MEM_HOST3D:
781 has_host_storage = true;
782 has_guest_storage = false;
783 break;
784 case VIRGL_RENDERER_BLOB_MEM_HOST3D_GUEST:
785 has_host_storage = true;
786 has_guest_storage = true;
787 break;
788 default:
789 return -EINVAL;
790 }
791
792 /* user resource id must be greater than 0 */
793 if (args->res_handle == 0)
794 return -EINVAL;
795
796 if (args->size == 0)
797 return -EINVAL;
798 if (has_guest_storage) {
799 const size_t iov_size = vrend_get_iovec_size(args->iovecs, args->num_iovs);
800 if (iov_size < args->size)
801 return -EINVAL;
802 } else {
803 if (args->num_iovs)
804 return -EINVAL;
805 }
806
807 if (!has_host_storage) {
808 res = virgl_resource_create_from_iov(args->res_handle,
809 args->iovecs,
810 args->num_iovs);
811 if (!res)
812 return -ENOMEM;
813
814 res->map_info = VIRGL_RENDERER_MAP_CACHE_CACHED;
815 return 0;
816 }
817
818 ctx = virgl_context_lookup(args->ctx_id);
819 if (!ctx)
820 return -EINVAL;
821
822 ret = ctx->get_blob(ctx, args->blob_id, args->blob_flags, &blob);
823 if (ret)
824 return ret;
825
826 if (blob.type != VIRGL_RESOURCE_FD_INVALID) {
827 res = virgl_resource_create_from_fd(args->res_handle,
828 blob.type,
829 blob.u.fd,
830 args->iovecs,
831 args->num_iovs);
832 if (!res) {
833 close(blob.u.fd);
834 return -ENOMEM;
835 }
836 } else {
837 res = virgl_resource_create_from_pipe(args->res_handle,
838 blob.u.pipe_resource,
839 args->iovecs,
840 args->num_iovs);
841 if (!res) {
842 vrend_renderer_resource_destroy((struct vrend_resource *)blob.u.pipe_resource);
843 return -ENOMEM;
844 }
845 }
846
847 res->map_info = blob.map_info;
848
849 if (ctx->get_blob_done)
850 ctx->get_blob_done(ctx, args->res_handle, &blob);
851
852 return 0;
853 }
854
virgl_renderer_resource_map(uint32_t res_handle,void ** map,uint64_t * out_size)855 int virgl_renderer_resource_map(uint32_t res_handle, void **map, uint64_t *out_size)
856 {
857 TRACE_FUNC();
858 struct virgl_resource *res = virgl_resource_lookup(res_handle);
859 if (!res || !res->pipe_resource)
860 return -EINVAL;
861
862 return vrend_renderer_resource_map(res->pipe_resource, map, out_size);
863 }
864
virgl_renderer_resource_unmap(uint32_t res_handle)865 int virgl_renderer_resource_unmap(uint32_t res_handle)
866 {
867 TRACE_FUNC();
868 struct virgl_resource *res = virgl_resource_lookup(res_handle);
869 if (!res || !res->pipe_resource)
870 return -EINVAL;
871
872 return vrend_renderer_resource_unmap(res->pipe_resource);
873 }
874
virgl_renderer_resource_get_map_info(uint32_t res_handle,uint32_t * map_info)875 int virgl_renderer_resource_get_map_info(uint32_t res_handle, uint32_t *map_info)
876 {
877 TRACE_FUNC();
878 struct virgl_resource *res = virgl_resource_lookup(res_handle);
879 if (!res)
880 return -EINVAL;
881
882 if ((res->map_info & VIRGL_RENDERER_MAP_CACHE_MASK) ==
883 VIRGL_RENDERER_MAP_CACHE_NONE)
884 return -EINVAL;
885
886 *map_info = res->map_info;
887 return 0;
888 }
889
890 int
virgl_renderer_resource_export_blob(uint32_t res_id,uint32_t * fd_type,int * fd)891 virgl_renderer_resource_export_blob(uint32_t res_id, uint32_t *fd_type, int *fd)
892 {
893 struct virgl_resource *res = virgl_resource_lookup(res_id);
894 if (!res)
895 return EINVAL;
896
897 switch (virgl_resource_export_fd(res, fd)) {
898 case VIRGL_RESOURCE_FD_DMABUF:
899 *fd_type = VIRGL_RENDERER_BLOB_FD_TYPE_DMABUF;
900 break;
901 case VIRGL_RESOURCE_FD_OPAQUE:
902 *fd_type = VIRGL_RENDERER_BLOB_FD_TYPE_OPAQUE;
903 break;
904 default:
905 return EINVAL;
906 }
907
908 return 0;
909 }
910
911 int
virgl_renderer_export_fence(uint32_t client_fence_id,int * fd)912 virgl_renderer_export_fence(uint32_t client_fence_id, int *fd)
913 {
914 TRACE_FUNC();
915 return vrend_renderer_export_ctx0_fence(client_fence_id, fd);
916 }
917