1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include "loader_dri3_helper.h"
36
37 /* From xmlpool/options.h, user exposed so should be stable */
38 #define DRI_CONF_VBLANK_NEVER 0
39 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
41 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
42
43 /**
44 * A cached blit context.
45 */
46 struct loader_dri3_blit_context {
47 mtx_t mtx;
48 __DRIcontext *ctx;
49 __DRIscreen *cur_screen;
50 const __DRIcoreExtension *core;
51 };
52
53 /* For simplicity we maintain the cache only for a single screen at a time */
54 static struct loader_dri3_blit_context blit_context = {
55 _MTX_INITIALIZER_NP, NULL
56 };
57
58 static void
59 dri3_flush_present_events(struct loader_dri3_drawable *draw);
60
61 static struct loader_dri3_buffer *
62 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
63
64 /**
65 * Do we have blit functionality in the image blit extension?
66 *
67 * \param draw[in] The drawable intended to blit from / to.
68 * \return true if we have blit functionality. false otherwise.
69 */
loader_dri3_have_image_blit(const struct loader_dri3_drawable * draw)70 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
71 {
72 return draw->ext->image->base.version >= 9 &&
73 draw->ext->image->blitImage != NULL;
74 }
75
76 /**
77 * Get and lock (for use with the current thread) a dri context associated
78 * with the drawable's dri screen. The context is intended to be used with
79 * the dri image extension's blitImage method.
80 *
81 * \param draw[in] Pointer to the drawable whose dri screen we want a
82 * dri context for.
83 * \return A dri context or NULL if context creation failed.
84 *
85 * When the caller is done with the context (even if the context returned was
86 * NULL), the caller must call loader_dri3_blit_context_put.
87 */
88 static __DRIcontext *
loader_dri3_blit_context_get(struct loader_dri3_drawable * draw)89 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
90 {
91 mtx_lock(&blit_context.mtx);
92
93 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
94 blit_context.core->destroyContext(blit_context.ctx);
95 blit_context.ctx = NULL;
96 }
97
98 if (!blit_context.ctx) {
99 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
100 NULL, NULL, NULL);
101 blit_context.cur_screen = draw->dri_screen;
102 blit_context.core = draw->ext->core;
103 }
104
105 return blit_context.ctx;
106 }
107
108 /**
109 * Release (for use with other threads) a dri context previously obtained using
110 * loader_dri3_blit_context_get.
111 */
112 static void
loader_dri3_blit_context_put(void)113 loader_dri3_blit_context_put(void)
114 {
115 mtx_unlock(&blit_context.mtx);
116 }
117
118 /**
119 * Blit (parts of) the contents of a DRI image to another dri image
120 *
121 * \param draw[in] The drawable which owns the images.
122 * \param dst[in] The destination image.
123 * \param src[in] The source image.
124 * \param dstx0[in] Start destination coordinate.
125 * \param dsty0[in] Start destination coordinate.
126 * \param width[in] Blit width.
127 * \param height[in] Blit height.
128 * \param srcx0[in] Start source coordinate.
129 * \param srcy0[in] Start source coordinate.
130 * \param flush_flag[in] Image blit flush flag.
131 * \return true iff successful.
132 */
133 static bool
loader_dri3_blit_image(struct loader_dri3_drawable * draw,__DRIimage * dst,__DRIimage * src,int dstx0,int dsty0,int width,int height,int srcx0,int srcy0,int flush_flag)134 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
135 __DRIimage *dst, __DRIimage *src,
136 int dstx0, int dsty0, int width, int height,
137 int srcx0, int srcy0, int flush_flag)
138 {
139 __DRIcontext *dri_context;
140 bool use_blit_context = false;
141
142 if (!loader_dri3_have_image_blit(draw))
143 return false;
144
145 dri_context = draw->vtable->get_dri_context(draw);
146
147 if (!dri_context || !draw->vtable->in_current_context(draw)) {
148 dri_context = loader_dri3_blit_context_get(draw);
149 use_blit_context = true;
150 flush_flag |= __BLIT_FLAG_FLUSH;
151 }
152
153 if (dri_context)
154 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
155 width, height, srcx0, srcy0,
156 width, height, flush_flag);
157
158 if (use_blit_context)
159 loader_dri3_blit_context_put();
160
161 return dri_context != NULL;
162 }
163
164 static inline void
dri3_fence_reset(xcb_connection_t * c,struct loader_dri3_buffer * buffer)165 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
166 {
167 xshmfence_reset(buffer->shm_fence);
168 }
169
170 static inline void
dri3_fence_set(struct loader_dri3_buffer * buffer)171 dri3_fence_set(struct loader_dri3_buffer *buffer)
172 {
173 xshmfence_trigger(buffer->shm_fence);
174 }
175
176 static inline void
dri3_fence_trigger(xcb_connection_t * c,struct loader_dri3_buffer * buffer)177 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
178 {
179 xcb_sync_trigger_fence(c, buffer->sync_fence);
180 }
181
182 static inline void
dri3_fence_await(xcb_connection_t * c,struct loader_dri3_drawable * draw,struct loader_dri3_buffer * buffer)183 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
184 struct loader_dri3_buffer *buffer)
185 {
186 xcb_flush(c);
187 xshmfence_await(buffer->shm_fence);
188 if (draw) {
189 mtx_lock(&draw->mtx);
190 dri3_flush_present_events(draw);
191 mtx_unlock(&draw->mtx);
192 }
193 }
194
195 static void
dri3_update_num_back(struct loader_dri3_drawable * draw)196 dri3_update_num_back(struct loader_dri3_drawable *draw)
197 {
198 if (draw->flipping)
199 draw->num_back = 3;
200 else
201 draw->num_back = 2;
202 }
203
204 void
loader_dri3_set_swap_interval(struct loader_dri3_drawable * draw,int interval)205 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
206 {
207 draw->swap_interval = interval;
208 }
209
210 /** dri3_free_render_buffer
211 *
212 * Free everything associated with one render buffer including pixmap, fence
213 * stuff and the driver image
214 */
215 static void
dri3_free_render_buffer(struct loader_dri3_drawable * draw,struct loader_dri3_buffer * buffer)216 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
217 struct loader_dri3_buffer *buffer)
218 {
219 if (buffer->own_pixmap)
220 xcb_free_pixmap(draw->conn, buffer->pixmap);
221 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
222 xshmfence_unmap_shm(buffer->shm_fence);
223 draw->ext->image->destroyImage(buffer->image);
224 if (buffer->linear_buffer)
225 draw->ext->image->destroyImage(buffer->linear_buffer);
226 free(buffer);
227 }
228
229 void
loader_dri3_drawable_fini(struct loader_dri3_drawable * draw)230 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
231 {
232 int i;
233
234 draw->ext->core->destroyDrawable(draw->dri_drawable);
235
236 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
237 if (draw->buffers[i])
238 dri3_free_render_buffer(draw, draw->buffers[i]);
239 }
240
241 if (draw->special_event) {
242 xcb_void_cookie_t cookie =
243 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
244 XCB_PRESENT_EVENT_MASK_NO_EVENT);
245
246 xcb_discard_reply(draw->conn, cookie.sequence);
247 xcb_unregister_for_special_event(draw->conn, draw->special_event);
248 }
249
250 cnd_destroy(&draw->event_cnd);
251 mtx_destroy(&draw->mtx);
252 }
253
254 int
loader_dri3_drawable_init(xcb_connection_t * conn,xcb_drawable_t drawable,__DRIscreen * dri_screen,bool is_different_gpu,const __DRIconfig * dri_config,struct loader_dri3_extensions * ext,const struct loader_dri3_vtable * vtable,struct loader_dri3_drawable * draw)255 loader_dri3_drawable_init(xcb_connection_t *conn,
256 xcb_drawable_t drawable,
257 __DRIscreen *dri_screen,
258 bool is_different_gpu,
259 const __DRIconfig *dri_config,
260 struct loader_dri3_extensions *ext,
261 const struct loader_dri3_vtable *vtable,
262 struct loader_dri3_drawable *draw)
263 {
264 xcb_get_geometry_cookie_t cookie;
265 xcb_get_geometry_reply_t *reply;
266 xcb_generic_error_t *error;
267 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
268 int swap_interval;
269
270 draw->conn = conn;
271 draw->ext = ext;
272 draw->vtable = vtable;
273 draw->drawable = drawable;
274 draw->dri_screen = dri_screen;
275 draw->is_different_gpu = is_different_gpu;
276
277 draw->have_back = 0;
278 draw->have_fake_front = 0;
279 draw->first_init = true;
280
281 draw->cur_blit_source = -1;
282 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
283 mtx_init(&draw->mtx, mtx_plain);
284 cnd_init(&draw->event_cnd);
285
286 if (draw->ext->config)
287 draw->ext->config->configQueryi(draw->dri_screen,
288 "vblank_mode", &vblank_mode);
289
290 switch (vblank_mode) {
291 case DRI_CONF_VBLANK_NEVER:
292 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
293 swap_interval = 0;
294 break;
295 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
296 case DRI_CONF_VBLANK_ALWAYS_SYNC:
297 default:
298 swap_interval = 1;
299 break;
300 }
301 draw->swap_interval = swap_interval;
302
303 dri3_update_num_back(draw);
304
305 /* Create a new drawable */
306 draw->dri_drawable =
307 draw->ext->image_driver->createNewDrawable(dri_screen,
308 dri_config,
309 draw);
310
311 if (!draw->dri_drawable)
312 return 1;
313
314 cookie = xcb_get_geometry(draw->conn, draw->drawable);
315 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
316 if (reply == NULL || error != NULL) {
317 draw->ext->core->destroyDrawable(draw->dri_drawable);
318 return 1;
319 }
320
321 draw->width = reply->width;
322 draw->height = reply->height;
323 draw->depth = reply->depth;
324 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
325 free(reply);
326
327 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
328 if (draw->ext->core->base.version >= 2) {
329 (void )draw->ext->core->getConfigAttrib(dri_config,
330 __DRI_ATTRIB_SWAP_METHOD,
331 &draw->swap_method);
332 }
333
334 /*
335 * Make sure server has the same swap interval we do for the new
336 * drawable.
337 */
338 loader_dri3_set_swap_interval(draw, swap_interval);
339
340 return 0;
341 }
342
343 /*
344 * Process one Present event
345 */
346 static void
dri3_handle_present_event(struct loader_dri3_drawable * draw,xcb_present_generic_event_t * ge)347 dri3_handle_present_event(struct loader_dri3_drawable *draw,
348 xcb_present_generic_event_t *ge)
349 {
350 switch (ge->evtype) {
351 case XCB_PRESENT_CONFIGURE_NOTIFY: {
352 xcb_present_configure_notify_event_t *ce = (void *) ge;
353
354 draw->width = ce->width;
355 draw->height = ce->height;
356 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
357 draw->ext->flush->invalidate(draw->dri_drawable);
358 break;
359 }
360 case XCB_PRESENT_COMPLETE_NOTIFY: {
361 xcb_present_complete_notify_event_t *ce = (void *) ge;
362
363 /* Compute the processed SBC number from the received 32-bit serial number
364 * merged with the upper 32-bits of the sent 64-bit serial number while
365 * checking for wrap.
366 */
367 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
368 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
369 if (draw->recv_sbc > draw->send_sbc)
370 draw->recv_sbc -= 0x100000000;
371 switch (ce->mode) {
372 case XCB_PRESENT_COMPLETE_MODE_FLIP:
373 draw->flipping = true;
374 break;
375 case XCB_PRESENT_COMPLETE_MODE_COPY:
376 draw->flipping = false;
377 break;
378 }
379
380 if (draw->vtable->show_fps)
381 draw->vtable->show_fps(draw, ce->ust);
382
383 draw->ust = ce->ust;
384 draw->msc = ce->msc;
385 } else if (ce->serial == draw->eid) {
386 draw->notify_ust = ce->ust;
387 draw->notify_msc = ce->msc;
388 }
389 break;
390 }
391 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
392 xcb_present_idle_notify_event_t *ie = (void *) ge;
393 int b;
394
395 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
396 struct loader_dri3_buffer *buf = draw->buffers[b];
397
398 if (buf && buf->pixmap == ie->pixmap)
399 buf->busy = 0;
400
401 if (buf && draw->num_back <= b && b < LOADER_DRI3_MAX_BACK &&
402 draw->cur_blit_source != b &&
403 !buf->busy) {
404 dri3_free_render_buffer(draw, buf);
405 draw->buffers[b] = NULL;
406 }
407 }
408 break;
409 }
410 }
411 free(ge);
412 }
413
414 static bool
dri3_wait_for_event_locked(struct loader_dri3_drawable * draw)415 dri3_wait_for_event_locked(struct loader_dri3_drawable *draw)
416 {
417 xcb_generic_event_t *ev;
418 xcb_present_generic_event_t *ge;
419
420 xcb_flush(draw->conn);
421
422 /* Only have one thread waiting for events at a time */
423 if (draw->has_event_waiter) {
424 cnd_wait(&draw->event_cnd, &draw->mtx);
425 /* Another thread has updated the protected info, so retest. */
426 return true;
427 } else {
428 draw->has_event_waiter = true;
429 /* Allow other threads access to the drawable while we're waiting. */
430 mtx_unlock(&draw->mtx);
431 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
432 mtx_lock(&draw->mtx);
433 draw->has_event_waiter = false;
434 cnd_broadcast(&draw->event_cnd);
435 }
436 if (!ev)
437 return false;
438 ge = (void *) ev;
439 dri3_handle_present_event(draw, ge);
440 return true;
441 }
442
443 /** loader_dri3_wait_for_msc
444 *
445 * Get the X server to send an event when the target msc/divisor/remainder is
446 * reached.
447 */
448 bool
loader_dri3_wait_for_msc(struct loader_dri3_drawable * draw,int64_t target_msc,int64_t divisor,int64_t remainder,int64_t * ust,int64_t * msc,int64_t * sbc)449 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
450 int64_t target_msc,
451 int64_t divisor, int64_t remainder,
452 int64_t *ust, int64_t *msc, int64_t *sbc)
453 {
454 xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
455 draw->drawable,
456 draw->eid,
457 target_msc,
458 divisor,
459 remainder);
460 xcb_generic_event_t *ev;
461 unsigned full_sequence;
462
463 mtx_lock(&draw->mtx);
464 xcb_flush(draw->conn);
465
466 /* Wait for the event */
467 do {
468 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
469 if (!ev) {
470 mtx_unlock(&draw->mtx);
471 return false;
472 }
473
474 full_sequence = ev->full_sequence;
475 dri3_handle_present_event(draw, (void *) ev);
476 } while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
477
478 *ust = draw->notify_ust;
479 *msc = draw->notify_msc;
480 *sbc = draw->recv_sbc;
481 mtx_unlock(&draw->mtx);
482
483 return true;
484 }
485
486 /** loader_dri3_wait_for_sbc
487 *
488 * Wait for the completed swap buffer count to reach the specified
489 * target. Presumably the application knows that this will be reached with
490 * outstanding complete events, or we're going to be here awhile.
491 */
492 int
loader_dri3_wait_for_sbc(struct loader_dri3_drawable * draw,int64_t target_sbc,int64_t * ust,int64_t * msc,int64_t * sbc)493 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
494 int64_t target_sbc, int64_t *ust,
495 int64_t *msc, int64_t *sbc)
496 {
497 /* From the GLX_OML_sync_control spec:
498 *
499 * "If <target_sbc> = 0, the function will block until all previous
500 * swaps requested with glXSwapBuffersMscOML for that window have
501 * completed."
502 */
503 mtx_lock(&draw->mtx);
504 if (!target_sbc)
505 target_sbc = draw->send_sbc;
506
507 while (draw->recv_sbc < target_sbc) {
508 if (!dri3_wait_for_event_locked(draw)) {
509 mtx_unlock(&draw->mtx);
510 return 0;
511 }
512 }
513
514 *ust = draw->ust;
515 *msc = draw->msc;
516 *sbc = draw->recv_sbc;
517 mtx_unlock(&draw->mtx);
518 return 1;
519 }
520
521 /** loader_dri3_find_back
522 *
523 * Find an idle back buffer. If there isn't one, then
524 * wait for a present idle notify event from the X server
525 */
526 static int
dri3_find_back(struct loader_dri3_drawable * draw)527 dri3_find_back(struct loader_dri3_drawable *draw)
528 {
529 int b;
530 int num_to_consider;
531
532 mtx_lock(&draw->mtx);
533 /* Increase the likelyhood of reusing current buffer */
534 dri3_flush_present_events(draw);
535
536 /* Check whether we need to reuse the current back buffer as new back.
537 * In that case, wait until it's not busy anymore.
538 */
539 dri3_update_num_back(draw);
540 num_to_consider = draw->num_back;
541 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
542 num_to_consider = 1;
543 draw->cur_blit_source = -1;
544 }
545
546 for (;;) {
547 for (b = 0; b < num_to_consider; b++) {
548 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
549 struct loader_dri3_buffer *buffer = draw->buffers[id];
550
551 if (!buffer || !buffer->busy) {
552 draw->cur_back = id;
553 mtx_unlock(&draw->mtx);
554 return id;
555 }
556 }
557 if (!dri3_wait_for_event_locked(draw)) {
558 mtx_unlock(&draw->mtx);
559 return -1;
560 }
561 }
562 }
563
564 static xcb_gcontext_t
dri3_drawable_gc(struct loader_dri3_drawable * draw)565 dri3_drawable_gc(struct loader_dri3_drawable *draw)
566 {
567 if (!draw->gc) {
568 uint32_t v = 0;
569 xcb_create_gc(draw->conn,
570 (draw->gc = xcb_generate_id(draw->conn)),
571 draw->drawable,
572 XCB_GC_GRAPHICS_EXPOSURES,
573 &v);
574 }
575 return draw->gc;
576 }
577
578
579 static struct loader_dri3_buffer *
dri3_back_buffer(struct loader_dri3_drawable * draw)580 dri3_back_buffer(struct loader_dri3_drawable *draw)
581 {
582 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
583 }
584
585 static struct loader_dri3_buffer *
dri3_fake_front_buffer(struct loader_dri3_drawable * draw)586 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
587 {
588 return draw->buffers[LOADER_DRI3_FRONT_ID];
589 }
590
591 static void
dri3_copy_area(xcb_connection_t * c,xcb_drawable_t src_drawable,xcb_drawable_t dst_drawable,xcb_gcontext_t gc,int16_t src_x,int16_t src_y,int16_t dst_x,int16_t dst_y,uint16_t width,uint16_t height)592 dri3_copy_area(xcb_connection_t *c,
593 xcb_drawable_t src_drawable,
594 xcb_drawable_t dst_drawable,
595 xcb_gcontext_t gc,
596 int16_t src_x,
597 int16_t src_y,
598 int16_t dst_x,
599 int16_t dst_y,
600 uint16_t width,
601 uint16_t height)
602 {
603 xcb_void_cookie_t cookie;
604
605 cookie = xcb_copy_area_checked(c,
606 src_drawable,
607 dst_drawable,
608 gc,
609 src_x,
610 src_y,
611 dst_x,
612 dst_y,
613 width,
614 height);
615 xcb_discard_reply(c, cookie.sequence);
616 }
617
618 /**
619 * Asks the driver to flush any queued work necessary for serializing with the
620 * X command stream, and optionally the slightly more strict requirement of
621 * glFlush() equivalence (which would require flushing even if nothing had
622 * been drawn to a window system framebuffer, for example).
623 */
624 void
loader_dri3_flush(struct loader_dri3_drawable * draw,unsigned flags,enum __DRI2throttleReason throttle_reason)625 loader_dri3_flush(struct loader_dri3_drawable *draw,
626 unsigned flags,
627 enum __DRI2throttleReason throttle_reason)
628 {
629 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
630 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
631
632 if (dri_context) {
633 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
634 flags, throttle_reason);
635 }
636 }
637
638 void
loader_dri3_copy_sub_buffer(struct loader_dri3_drawable * draw,int x,int y,int width,int height,bool flush)639 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
640 int x, int y,
641 int width, int height,
642 bool flush)
643 {
644 struct loader_dri3_buffer *back;
645 unsigned flags = __DRI2_FLUSH_DRAWABLE;
646
647 /* Check we have the right attachments */
648 if (!draw->have_back || draw->is_pixmap)
649 return;
650
651 if (flush)
652 flags |= __DRI2_FLUSH_CONTEXT;
653 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
654
655 back = dri3_find_back_alloc(draw);
656 if (!back)
657 return;
658
659 y = draw->height - y - height;
660
661 if (draw->is_different_gpu) {
662 /* Update the linear buffer part of the back buffer
663 * for the dri3_copy_area operation
664 */
665 (void) loader_dri3_blit_image(draw,
666 back->linear_buffer,
667 back->image,
668 0, 0, back->width, back->height,
669 0, 0, __BLIT_FLAG_FLUSH);
670 }
671
672 loader_dri3_swapbuffer_barrier(draw);
673 dri3_fence_reset(draw->conn, back);
674 dri3_copy_area(draw->conn,
675 back->pixmap,
676 draw->drawable,
677 dri3_drawable_gc(draw),
678 x, y, x, y, width, height);
679 dri3_fence_trigger(draw->conn, back);
680 /* Refresh the fake front (if present) after we just damaged the real
681 * front.
682 */
683 if (draw->have_fake_front &&
684 !loader_dri3_blit_image(draw,
685 dri3_fake_front_buffer(draw)->image,
686 back->image,
687 x, y, width, height,
688 x, y, __BLIT_FLAG_FLUSH) &&
689 !draw->is_different_gpu) {
690 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
691 dri3_copy_area(draw->conn,
692 back->pixmap,
693 dri3_fake_front_buffer(draw)->pixmap,
694 dri3_drawable_gc(draw),
695 x, y, x, y, width, height);
696 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
697 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
698 }
699 dri3_fence_await(draw->conn, draw, back);
700 }
701
702 void
loader_dri3_copy_drawable(struct loader_dri3_drawable * draw,xcb_drawable_t dest,xcb_drawable_t src)703 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
704 xcb_drawable_t dest,
705 xcb_drawable_t src)
706 {
707 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
708
709 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
710 dri3_copy_area(draw->conn,
711 src, dest,
712 dri3_drawable_gc(draw),
713 0, 0, 0, 0, draw->width, draw->height);
714 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
715 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
716 }
717
718 void
loader_dri3_wait_x(struct loader_dri3_drawable * draw)719 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
720 {
721 struct loader_dri3_buffer *front;
722
723 if (draw == NULL || !draw->have_fake_front)
724 return;
725
726 front = dri3_fake_front_buffer(draw);
727
728 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
729
730 /* In the psc->is_different_gpu case, the linear buffer has been updated,
731 * but not yet the tiled buffer.
732 * Copy back to the tiled buffer we use for rendering.
733 * Note that we don't need flushing.
734 */
735 if (draw->is_different_gpu)
736 (void) loader_dri3_blit_image(draw,
737 front->image,
738 front->linear_buffer,
739 0, 0, front->width, front->height,
740 0, 0, 0);
741 }
742
743 void
loader_dri3_wait_gl(struct loader_dri3_drawable * draw)744 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
745 {
746 struct loader_dri3_buffer *front;
747
748 if (draw == NULL || !draw->have_fake_front)
749 return;
750
751 front = dri3_fake_front_buffer(draw);
752
753 /* In the psc->is_different_gpu case, we update the linear_buffer
754 * before updating the real front.
755 */
756 if (draw->is_different_gpu)
757 (void) loader_dri3_blit_image(draw,
758 front->linear_buffer,
759 front->image,
760 0, 0, front->width, front->height,
761 0, 0, __BLIT_FLAG_FLUSH);
762 loader_dri3_swapbuffer_barrier(draw);
763 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
764 }
765
766 /** dri3_flush_present_events
767 *
768 * Process any present events that have been received from the X server
769 */
770 static void
dri3_flush_present_events(struct loader_dri3_drawable * draw)771 dri3_flush_present_events(struct loader_dri3_drawable *draw)
772 {
773 /* Check to see if any configuration changes have occurred
774 * since we were last invoked
775 */
776 if (draw->has_event_waiter)
777 return;
778
779 if (draw->special_event) {
780 xcb_generic_event_t *ev;
781
782 while ((ev = xcb_poll_for_special_event(draw->conn,
783 draw->special_event)) != NULL) {
784 xcb_present_generic_event_t *ge = (void *) ev;
785 dri3_handle_present_event(draw, ge);
786 }
787 }
788 }
789
790 /** loader_dri3_swap_buffers_msc
791 *
792 * Make the current back buffer visible using the present extension
793 */
794 int64_t
loader_dri3_swap_buffers_msc(struct loader_dri3_drawable * draw,int64_t target_msc,int64_t divisor,int64_t remainder,unsigned flush_flags,bool force_copy)795 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
796 int64_t target_msc, int64_t divisor,
797 int64_t remainder, unsigned flush_flags,
798 bool force_copy)
799 {
800 struct loader_dri3_buffer *back;
801 int64_t ret = 0;
802 uint32_t options = XCB_PRESENT_OPTION_NONE;
803
804 draw->vtable->flush_drawable(draw, flush_flags);
805
806 back = dri3_find_back_alloc(draw);
807
808 mtx_lock(&draw->mtx);
809 if (draw->is_different_gpu && back) {
810 /* Update the linear buffer before presenting the pixmap */
811 (void) loader_dri3_blit_image(draw,
812 back->linear_buffer,
813 back->image,
814 0, 0, back->width, back->height,
815 0, 0, __BLIT_FLAG_FLUSH);
816 }
817
818 /* If we need to preload the new back buffer, remember the source.
819 * The force_copy parameter is used by EGL to attempt to preserve
820 * the back buffer across a call to this function.
821 */
822 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
823 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
824
825 /* Exchange the back and fake front. Even though the server knows about these
826 * buffers, it has no notion of back and fake front.
827 */
828 if (back && draw->have_fake_front) {
829 struct loader_dri3_buffer *tmp;
830
831 tmp = dri3_fake_front_buffer(draw);
832 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
833 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
834
835 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
836 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
837 }
838
839 dri3_flush_present_events(draw);
840
841 if (back && !draw->is_pixmap) {
842 dri3_fence_reset(draw->conn, back);
843
844 /* Compute when we want the frame shown by taking the last known
845 * successful MSC and adding in a swap interval for each outstanding swap
846 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
847 * semantic"
848 */
849 ++draw->send_sbc;
850 if (target_msc == 0 && divisor == 0 && remainder == 0)
851 target_msc = draw->msc + draw->swap_interval *
852 (draw->send_sbc - draw->recv_sbc);
853 else if (divisor == 0 && remainder > 0) {
854 /* From the GLX_OML_sync_control spec:
855 * "If <divisor> = 0, the swap will occur when MSC becomes
856 * greater than or equal to <target_msc>."
857 *
858 * Note that there's no mention of the remainder. The Present
859 * extension throws BadValue for remainder != 0 with divisor == 0, so
860 * just drop the passed in value.
861 */
862 remainder = 0;
863 }
864
865 /* From the GLX_EXT_swap_control spec
866 * and the EGL 1.4 spec (page 53):
867 *
868 * "If <interval> is set to a value of 0, buffer swaps are not
869 * synchronized to a video frame."
870 *
871 * Implementation note: It is possible to enable triple buffering
872 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
873 * the default.
874 */
875 if (draw->swap_interval == 0)
876 options |= XCB_PRESENT_OPTION_ASYNC;
877
878 /* If we need to populate the new back, but need to reuse the back
879 * buffer slot due to lack of local blit capabilities, make sure
880 * the server doesn't flip and we deadlock.
881 */
882 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
883 options |= XCB_PRESENT_OPTION_COPY;
884
885 back->busy = 1;
886 back->last_swap = draw->send_sbc;
887 xcb_present_pixmap(draw->conn,
888 draw->drawable,
889 back->pixmap,
890 (uint32_t) draw->send_sbc,
891 0, /* valid */
892 0, /* update */
893 0, /* x_off */
894 0, /* y_off */
895 None, /* target_crtc */
896 None,
897 back->sync_fence,
898 options,
899 target_msc,
900 divisor,
901 remainder, 0, NULL);
902 ret = (int64_t) draw->send_sbc;
903
904 /* Schedule a server-side back-preserving blit if necessary.
905 * This happens iff all conditions below are satisfied:
906 * a) We have a fake front,
907 * b) We need to preserve the back buffer,
908 * c) We don't have local blit capabilities.
909 */
910 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
911 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
912 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
913 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
914
915 dri3_fence_reset(draw->conn, new_back);
916 dri3_copy_area(draw->conn, src->pixmap,
917 new_back->pixmap,
918 dri3_drawable_gc(draw),
919 0, 0, 0, 0, draw->width, draw->height);
920 dri3_fence_trigger(draw->conn, new_back);
921 new_back->last_swap = src->last_swap;
922 }
923
924 xcb_flush(draw->conn);
925 if (draw->stamp)
926 ++(*draw->stamp);
927 }
928 mtx_unlock(&draw->mtx);
929
930 draw->ext->flush->invalidate(draw->dri_drawable);
931
932 return ret;
933 }
934
935 int
loader_dri3_query_buffer_age(struct loader_dri3_drawable * draw)936 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
937 {
938 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
939 int ret;
940
941 mtx_lock(&draw->mtx);
942 ret = (!back || back->last_swap == 0) ? 0 :
943 draw->send_sbc - back->last_swap + 1;
944 mtx_unlock(&draw->mtx);
945
946 return ret;
947 }
948
949 /** loader_dri3_open
950 *
951 * Wrapper around xcb_dri3_open
952 */
953 int
loader_dri3_open(xcb_connection_t * conn,xcb_window_t root,uint32_t provider)954 loader_dri3_open(xcb_connection_t *conn,
955 xcb_window_t root,
956 uint32_t provider)
957 {
958 xcb_dri3_open_cookie_t cookie;
959 xcb_dri3_open_reply_t *reply;
960 int fd;
961
962 cookie = xcb_dri3_open(conn,
963 root,
964 provider);
965
966 reply = xcb_dri3_open_reply(conn, cookie, NULL);
967 if (!reply)
968 return -1;
969
970 if (reply->nfd != 1) {
971 free(reply);
972 return -1;
973 }
974
975 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
976 free(reply);
977 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
978
979 return fd;
980 }
981
982 static uint32_t
dri3_cpp_for_format(uint32_t format)983 dri3_cpp_for_format(uint32_t format) {
984 switch (format) {
985 case __DRI_IMAGE_FORMAT_R8:
986 return 1;
987 case __DRI_IMAGE_FORMAT_RGB565:
988 case __DRI_IMAGE_FORMAT_GR88:
989 return 2;
990 case __DRI_IMAGE_FORMAT_XRGB8888:
991 case __DRI_IMAGE_FORMAT_ARGB8888:
992 case __DRI_IMAGE_FORMAT_ABGR8888:
993 case __DRI_IMAGE_FORMAT_XBGR8888:
994 case __DRI_IMAGE_FORMAT_XRGB2101010:
995 case __DRI_IMAGE_FORMAT_ARGB2101010:
996 case __DRI_IMAGE_FORMAT_SARGB8:
997 return 4;
998 case __DRI_IMAGE_FORMAT_NONE:
999 default:
1000 return 0;
1001 }
1002 }
1003
1004 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1005 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1006 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1007 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1008 */
1009 static int
image_format_to_fourcc(int format)1010 image_format_to_fourcc(int format)
1011 {
1012
1013 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1014 switch (format) {
1015 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1016 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1017 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1018 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1019 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1020 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1021 case __DRI_IMAGE_FORMAT_XRGB2101010: return __DRI_IMAGE_FOURCC_XRGB2101010;
1022 case __DRI_IMAGE_FORMAT_ARGB2101010: return __DRI_IMAGE_FOURCC_ARGB2101010;
1023 }
1024 return 0;
1025 }
1026
1027 /** loader_dri3_alloc_render_buffer
1028 *
1029 * Use the driver createImage function to construct a __DRIimage, then
1030 * get a file descriptor for that and create an X pixmap from that
1031 *
1032 * Allocate an xshmfence for synchronization
1033 */
1034 static struct loader_dri3_buffer *
dri3_alloc_render_buffer(struct loader_dri3_drawable * draw,unsigned int format,int width,int height,int depth)1035 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
1036 int width, int height, int depth)
1037 {
1038 struct loader_dri3_buffer *buffer;
1039 __DRIimage *pixmap_buffer;
1040 xcb_pixmap_t pixmap;
1041 xcb_sync_fence_t sync_fence;
1042 struct xshmfence *shm_fence;
1043 int buffer_fd, fence_fd;
1044 int stride;
1045
1046 /* Create an xshmfence object and
1047 * prepare to send that to the X server
1048 */
1049
1050 fence_fd = xshmfence_alloc_shm();
1051 if (fence_fd < 0)
1052 return NULL;
1053
1054 shm_fence = xshmfence_map_shm(fence_fd);
1055 if (shm_fence == NULL)
1056 goto no_shm_fence;
1057
1058 /* Allocate the image from the driver
1059 */
1060 buffer = calloc(1, sizeof *buffer);
1061 if (!buffer)
1062 goto no_buffer;
1063
1064 buffer->cpp = dri3_cpp_for_format(format);
1065 if (!buffer->cpp)
1066 goto no_image;
1067
1068 if (!draw->is_different_gpu) {
1069 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1070 width, height,
1071 format,
1072 __DRI_IMAGE_USE_SHARE |
1073 __DRI_IMAGE_USE_SCANOUT |
1074 __DRI_IMAGE_USE_BACKBUFFER,
1075 buffer);
1076 pixmap_buffer = buffer->image;
1077
1078 if (!buffer->image)
1079 goto no_image;
1080 } else {
1081 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1082 width, height,
1083 format,
1084 0,
1085 buffer);
1086
1087 if (!buffer->image)
1088 goto no_image;
1089
1090 buffer->linear_buffer =
1091 draw->ext->image->createImage(draw->dri_screen,
1092 width, height, format,
1093 __DRI_IMAGE_USE_SHARE |
1094 __DRI_IMAGE_USE_LINEAR |
1095 __DRI_IMAGE_USE_BACKBUFFER,
1096 buffer);
1097 pixmap_buffer = buffer->linear_buffer;
1098
1099 if (!buffer->linear_buffer)
1100 goto no_linear_buffer;
1101 }
1102
1103 /* X wants the stride, so ask the image for it
1104 */
1105 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
1106 &stride))
1107 goto no_buffer_attrib;
1108
1109 buffer->pitch = stride;
1110
1111 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
1112 &buffer_fd))
1113 goto no_buffer_attrib;
1114
1115 xcb_dri3_pixmap_from_buffer(draw->conn,
1116 (pixmap = xcb_generate_id(draw->conn)),
1117 draw->drawable,
1118 buffer->size,
1119 width, height, buffer->pitch,
1120 depth, buffer->cpp * 8,
1121 buffer_fd);
1122
1123 xcb_dri3_fence_from_fd(draw->conn,
1124 pixmap,
1125 (sync_fence = xcb_generate_id(draw->conn)),
1126 false,
1127 fence_fd);
1128
1129 buffer->pixmap = pixmap;
1130 buffer->own_pixmap = true;
1131 buffer->sync_fence = sync_fence;
1132 buffer->shm_fence = shm_fence;
1133 buffer->width = width;
1134 buffer->height = height;
1135
1136 /* Mark the buffer as idle
1137 */
1138 dri3_fence_set(buffer);
1139
1140 return buffer;
1141
1142 no_buffer_attrib:
1143 draw->ext->image->destroyImage(pixmap_buffer);
1144 no_linear_buffer:
1145 if (draw->is_different_gpu)
1146 draw->ext->image->destroyImage(buffer->image);
1147 no_image:
1148 free(buffer);
1149 no_buffer:
1150 xshmfence_unmap_shm(shm_fence);
1151 no_shm_fence:
1152 close(fence_fd);
1153 return NULL;
1154 }
1155
1156 /** loader_dri3_update_drawable
1157 *
1158 * Called the first time we use the drawable and then
1159 * after we receive present configure notify events to
1160 * track the geometry of the drawable
1161 */
1162 static int
dri3_update_drawable(__DRIdrawable * driDrawable,struct loader_dri3_drawable * draw)1163 dri3_update_drawable(__DRIdrawable *driDrawable,
1164 struct loader_dri3_drawable *draw)
1165 {
1166 mtx_lock(&draw->mtx);
1167 if (draw->first_init) {
1168 xcb_get_geometry_cookie_t geom_cookie;
1169 xcb_get_geometry_reply_t *geom_reply;
1170 xcb_void_cookie_t cookie;
1171 xcb_generic_error_t *error;
1172 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1173 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1174
1175 draw->first_init = false;
1176
1177 /* Try to select for input on the window.
1178 *
1179 * If the drawable is a window, this will get our events
1180 * delivered.
1181 *
1182 * Otherwise, we'll get a BadWindow error back from this request which
1183 * will let us know that the drawable is a pixmap instead.
1184 */
1185
1186 draw->eid = xcb_generate_id(draw->conn);
1187 cookie =
1188 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1189 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1190 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1191 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1192
1193 present_capabilities_cookie =
1194 xcb_present_query_capabilities(draw->conn, draw->drawable);
1195
1196 /* Create an XCB event queue to hold present events outside of the usual
1197 * application event queue
1198 */
1199 draw->special_event = xcb_register_for_special_xge(draw->conn,
1200 &xcb_present_id,
1201 draw->eid,
1202 draw->stamp);
1203 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1204
1205 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1206
1207 if (!geom_reply) {
1208 mtx_unlock(&draw->mtx);
1209 return false;
1210 }
1211
1212 draw->width = geom_reply->width;
1213 draw->height = geom_reply->height;
1214 draw->depth = geom_reply->depth;
1215 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1216
1217 free(geom_reply);
1218
1219 draw->is_pixmap = false;
1220
1221 /* Check to see if our select input call failed. If it failed with a
1222 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1223 * special event queue created above and mark the drawable as a pixmap
1224 */
1225
1226 error = xcb_request_check(draw->conn, cookie);
1227
1228 present_capabilities_reply =
1229 xcb_present_query_capabilities_reply(draw->conn,
1230 present_capabilities_cookie,
1231 NULL);
1232
1233 if (present_capabilities_reply) {
1234 draw->present_capabilities = present_capabilities_reply->capabilities;
1235 free(present_capabilities_reply);
1236 } else
1237 draw->present_capabilities = 0;
1238
1239 if (error) {
1240 if (error->error_code != BadWindow) {
1241 free(error);
1242 mtx_unlock(&draw->mtx);
1243 return false;
1244 }
1245 draw->is_pixmap = true;
1246 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1247 draw->special_event = NULL;
1248 }
1249 }
1250 dri3_flush_present_events(draw);
1251 mtx_unlock(&draw->mtx);
1252 return true;
1253 }
1254
1255 __DRIimage *
loader_dri3_create_image(xcb_connection_t * c,xcb_dri3_buffer_from_pixmap_reply_t * bp_reply,unsigned int format,__DRIscreen * dri_screen,const __DRIimageExtension * image,void * loaderPrivate)1256 loader_dri3_create_image(xcb_connection_t *c,
1257 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1258 unsigned int format,
1259 __DRIscreen *dri_screen,
1260 const __DRIimageExtension *image,
1261 void *loaderPrivate)
1262 {
1263 int *fds;
1264 __DRIimage *image_planar, *ret;
1265 int stride, offset;
1266
1267 /* Get an FD for the pixmap object
1268 */
1269 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1270
1271 stride = bp_reply->stride;
1272 offset = 0;
1273
1274 /* createImageFromFds creates a wrapper __DRIimage structure which
1275 * can deal with multiple planes for things like Yuv images. So, once
1276 * we've gotten the planar wrapper, pull the single plane out of it and
1277 * discard the wrapper.
1278 */
1279 image_planar = image->createImageFromFds(dri_screen,
1280 bp_reply->width,
1281 bp_reply->height,
1282 image_format_to_fourcc(format),
1283 fds, 1,
1284 &stride, &offset, loaderPrivate);
1285 close(fds[0]);
1286 if (!image_planar)
1287 return NULL;
1288
1289 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1290
1291 image->destroyImage(image_planar);
1292
1293 return ret;
1294 }
1295
1296 /** dri3_get_pixmap_buffer
1297 *
1298 * Get the DRM object for a pixmap from the X server and
1299 * wrap that with a __DRIimage structure using createImageFromFds
1300 */
1301 static struct loader_dri3_buffer *
dri3_get_pixmap_buffer(__DRIdrawable * driDrawable,unsigned int format,enum loader_dri3_buffer_type buffer_type,struct loader_dri3_drawable * draw)1302 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1303 enum loader_dri3_buffer_type buffer_type,
1304 struct loader_dri3_drawable *draw)
1305 {
1306 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1307 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1308 xcb_drawable_t pixmap;
1309 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1310 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1311 xcb_sync_fence_t sync_fence;
1312 struct xshmfence *shm_fence;
1313 int fence_fd;
1314 __DRIscreen *cur_screen;
1315
1316 if (buffer)
1317 return buffer;
1318
1319 pixmap = draw->drawable;
1320
1321 buffer = calloc(1, sizeof *buffer);
1322 if (!buffer)
1323 goto no_buffer;
1324
1325 fence_fd = xshmfence_alloc_shm();
1326 if (fence_fd < 0)
1327 goto no_fence;
1328 shm_fence = xshmfence_map_shm(fence_fd);
1329 if (shm_fence == NULL) {
1330 close (fence_fd);
1331 goto no_fence;
1332 }
1333
1334 xcb_dri3_fence_from_fd(draw->conn,
1335 pixmap,
1336 (sync_fence = xcb_generate_id(draw->conn)),
1337 false,
1338 fence_fd);
1339
1340 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1341 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1342 if (!bp_reply)
1343 goto no_image;
1344
1345 /* Get the currently-bound screen or revert to using the drawable's screen if
1346 * no contexts are currently bound. The latter case is at least necessary for
1347 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1348 */
1349 cur_screen = draw->vtable->get_dri_screen();
1350 if (!cur_screen) {
1351 cur_screen = draw->dri_screen;
1352 }
1353
1354 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1355 cur_screen, draw->ext->image,
1356 buffer);
1357 if (!buffer->image)
1358 goto no_image;
1359
1360 buffer->pixmap = pixmap;
1361 buffer->own_pixmap = false;
1362 buffer->width = bp_reply->width;
1363 buffer->height = bp_reply->height;
1364 buffer->shm_fence = shm_fence;
1365 buffer->sync_fence = sync_fence;
1366
1367 draw->buffers[buf_id] = buffer;
1368
1369 free(bp_reply);
1370
1371 return buffer;
1372
1373 no_image:
1374 free(bp_reply);
1375 xcb_sync_destroy_fence(draw->conn, sync_fence);
1376 xshmfence_unmap_shm(shm_fence);
1377 no_fence:
1378 free(buffer);
1379 no_buffer:
1380 return NULL;
1381 }
1382
1383 /** dri3_get_buffer
1384 *
1385 * Find a front or back buffer, allocating new ones as necessary
1386 */
1387 static struct loader_dri3_buffer *
dri3_get_buffer(__DRIdrawable * driDrawable,unsigned int format,enum loader_dri3_buffer_type buffer_type,struct loader_dri3_drawable * draw)1388 dri3_get_buffer(__DRIdrawable *driDrawable,
1389 unsigned int format,
1390 enum loader_dri3_buffer_type buffer_type,
1391 struct loader_dri3_drawable *draw)
1392 {
1393 struct loader_dri3_buffer *buffer;
1394 int buf_id;
1395
1396 if (buffer_type == loader_dri3_buffer_back) {
1397 draw->back_format = format;
1398
1399 buf_id = dri3_find_back(draw);
1400
1401 if (buf_id < 0)
1402 return NULL;
1403 } else {
1404 buf_id = LOADER_DRI3_FRONT_ID;
1405 }
1406
1407 buffer = draw->buffers[buf_id];
1408
1409 /* Allocate a new buffer if there isn't an old one, or if that
1410 * old one is the wrong size
1411 */
1412 if (!buffer || buffer->width != draw->width ||
1413 buffer->height != draw->height) {
1414 struct loader_dri3_buffer *new_buffer;
1415
1416 /* Allocate the new buffers
1417 */
1418 new_buffer = dri3_alloc_render_buffer(draw,
1419 format,
1420 draw->width,
1421 draw->height,
1422 draw->depth);
1423 if (!new_buffer)
1424 return NULL;
1425
1426 /* When resizing, copy the contents of the old buffer, waiting for that
1427 * copy to complete using our fences before proceeding
1428 */
1429 if ((buffer_type == loader_dri3_buffer_back ||
1430 (buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
1431 && buffer) {
1432
1433 /* Fill the new buffer with data from an old buffer */
1434 dri3_fence_await(draw->conn, draw, buffer);
1435 if (!loader_dri3_blit_image(draw,
1436 new_buffer->image,
1437 buffer->image,
1438 0, 0, draw->width, draw->height,
1439 0, 0, 0) &&
1440 !buffer->linear_buffer) {
1441 dri3_fence_reset(draw->conn, new_buffer);
1442 dri3_copy_area(draw->conn,
1443 buffer->pixmap,
1444 new_buffer->pixmap,
1445 dri3_drawable_gc(draw),
1446 0, 0, 0, 0,
1447 draw->width, draw->height);
1448 dri3_fence_trigger(draw->conn, new_buffer);
1449 }
1450 dri3_free_render_buffer(draw, buffer);
1451 } else if (buffer_type == loader_dri3_buffer_front) {
1452 /* Fill the new fake front with data from a real front */
1453 loader_dri3_swapbuffer_barrier(draw);
1454 dri3_fence_reset(draw->conn, new_buffer);
1455 dri3_copy_area(draw->conn,
1456 draw->drawable,
1457 new_buffer->pixmap,
1458 dri3_drawable_gc(draw),
1459 0, 0, 0, 0,
1460 draw->width, draw->height);
1461 dri3_fence_trigger(draw->conn, new_buffer);
1462
1463 if (new_buffer->linear_buffer) {
1464 dri3_fence_await(draw->conn, draw, new_buffer);
1465 (void) loader_dri3_blit_image(draw,
1466 new_buffer->image,
1467 new_buffer->linear_buffer,
1468 0, 0, draw->width, draw->height,
1469 0, 0, 0);
1470 }
1471 }
1472 buffer = new_buffer;
1473 draw->buffers[buf_id] = buffer;
1474 }
1475 dri3_fence_await(draw->conn, draw, buffer);
1476
1477 /*
1478 * Do we need to preserve the content of a previous buffer?
1479 *
1480 * Note that this blit is needed only to avoid a wait for a buffer that
1481 * is currently in the flip chain or being scanned out from. That's really
1482 * a tradeoff. If we're ok with the wait we can reduce the number of back
1483 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1484 * but in the latter case we must disallow page-flipping.
1485 */
1486 if (buffer_type == loader_dri3_buffer_back &&
1487 draw->cur_blit_source != -1 &&
1488 draw->buffers[draw->cur_blit_source] &&
1489 buffer != draw->buffers[draw->cur_blit_source]) {
1490
1491 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1492
1493 /* Avoid flushing here. Will propably do good for tiling hardware. */
1494 (void) loader_dri3_blit_image(draw,
1495 buffer->image,
1496 source->image,
1497 0, 0, draw->width, draw->height,
1498 0, 0, 0);
1499 buffer->last_swap = source->last_swap;
1500 draw->cur_blit_source = -1;
1501 }
1502 /* Return the requested buffer */
1503 return buffer;
1504 }
1505
1506 /** dri3_free_buffers
1507 *
1508 * Free the front bufffer or all of the back buffers. Used
1509 * when the application changes which buffers it needs
1510 */
1511 static void
dri3_free_buffers(__DRIdrawable * driDrawable,enum loader_dri3_buffer_type buffer_type,struct loader_dri3_drawable * draw)1512 dri3_free_buffers(__DRIdrawable *driDrawable,
1513 enum loader_dri3_buffer_type buffer_type,
1514 struct loader_dri3_drawable *draw)
1515 {
1516 struct loader_dri3_buffer *buffer;
1517 int first_id;
1518 int n_id;
1519 int buf_id;
1520
1521 switch (buffer_type) {
1522 case loader_dri3_buffer_back:
1523 first_id = LOADER_DRI3_BACK_ID(0);
1524 n_id = LOADER_DRI3_MAX_BACK;
1525 draw->cur_blit_source = -1;
1526 break;
1527 case loader_dri3_buffer_front:
1528 first_id = LOADER_DRI3_FRONT_ID;
1529 /* Don't free a fake front holding new backbuffer content. */
1530 n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
1531 }
1532
1533 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1534 buffer = draw->buffers[buf_id];
1535 if (buffer) {
1536 dri3_free_render_buffer(draw, buffer);
1537 draw->buffers[buf_id] = NULL;
1538 }
1539 }
1540 }
1541
1542 /** loader_dri3_get_buffers
1543 *
1544 * The published buffer allocation API.
1545 * Returns all of the necessary buffers, allocating
1546 * as needed.
1547 */
1548 int
loader_dri3_get_buffers(__DRIdrawable * driDrawable,unsigned int format,uint32_t * stamp,void * loaderPrivate,uint32_t buffer_mask,struct __DRIimageList * buffers)1549 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1550 unsigned int format,
1551 uint32_t *stamp,
1552 void *loaderPrivate,
1553 uint32_t buffer_mask,
1554 struct __DRIimageList *buffers)
1555 {
1556 struct loader_dri3_drawable *draw = loaderPrivate;
1557 struct loader_dri3_buffer *front, *back;
1558
1559 buffers->image_mask = 0;
1560 buffers->front = NULL;
1561 buffers->back = NULL;
1562
1563 front = NULL;
1564 back = NULL;
1565
1566 if (!dri3_update_drawable(driDrawable, draw))
1567 return false;
1568
1569 /* pixmaps always have front buffers.
1570 * Exchange swaps also mandate fake front buffers.
1571 */
1572 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1573 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1574
1575 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1576 /* All pixmaps are owned by the server gpu.
1577 * When we use a different gpu, we can't use the pixmap
1578 * as buffer since it is potentially tiled a way
1579 * our device can't understand. In this case, use
1580 * a fake front buffer. Hopefully the pixmap
1581 * content will get synced with the fake front
1582 * buffer.
1583 */
1584 if (draw->is_pixmap && !draw->is_different_gpu)
1585 front = dri3_get_pixmap_buffer(driDrawable,
1586 format,
1587 loader_dri3_buffer_front,
1588 draw);
1589 else
1590 front = dri3_get_buffer(driDrawable,
1591 format,
1592 loader_dri3_buffer_front,
1593 draw);
1594
1595 if (!front)
1596 return false;
1597 } else {
1598 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1599 draw->have_fake_front = 0;
1600 }
1601
1602 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1603 back = dri3_get_buffer(driDrawable,
1604 format,
1605 loader_dri3_buffer_back,
1606 draw);
1607 if (!back)
1608 return false;
1609 draw->have_back = 1;
1610 } else {
1611 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1612 draw->have_back = 0;
1613 }
1614
1615 if (front) {
1616 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1617 buffers->front = front->image;
1618 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1619 }
1620
1621 if (back) {
1622 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1623 buffers->back = back->image;
1624 }
1625
1626 draw->stamp = stamp;
1627
1628 return true;
1629 }
1630
1631 /** loader_dri3_update_drawable_geometry
1632 *
1633 * Get the current drawable geometry.
1634 */
1635 void
loader_dri3_update_drawable_geometry(struct loader_dri3_drawable * draw)1636 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1637 {
1638 xcb_get_geometry_cookie_t geom_cookie;
1639 xcb_get_geometry_reply_t *geom_reply;
1640
1641 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1642
1643 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1644
1645 if (geom_reply) {
1646 draw->width = geom_reply->width;
1647 draw->height = geom_reply->height;
1648 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1649 draw->ext->flush->invalidate(draw->dri_drawable);
1650
1651 free(geom_reply);
1652 }
1653 }
1654
1655
1656 /**
1657 * Make sure the server has flushed all pending swap buffers to hardware
1658 * for this drawable. Ideally we'd want to send an X protocol request to
1659 * have the server block our connection until the swaps are complete. That
1660 * would avoid the potential round-trip here.
1661 */
1662 void
loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable * draw)1663 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1664 {
1665 int64_t ust, msc, sbc;
1666
1667 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1668 }
1669
1670 /**
1671 * Perform any cleanup associated with a close screen operation.
1672 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1673 *
1674 * This function destroys the screen's cached swap context if any.
1675 */
1676 void
loader_dri3_close_screen(__DRIscreen * dri_screen)1677 loader_dri3_close_screen(__DRIscreen *dri_screen)
1678 {
1679 mtx_lock(&blit_context.mtx);
1680 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1681 blit_context.core->destroyContext(blit_context.ctx);
1682 blit_context.ctx = NULL;
1683 }
1684 mtx_unlock(&blit_context.mtx);
1685 }
1686
1687 /**
1688 * Find a backbuffer slot - potentially allocating a back buffer
1689 *
1690 * \param draw[in,out] Pointer to the drawable for which to find back.
1691 * \return Pointer to a new back buffer or NULL if allocation failed or was
1692 * not mandated.
1693 *
1694 * Find a potentially new back buffer, and if it's not been allocated yet and
1695 * in addition needs initializing, then try to allocate and initialize it.
1696 */
1697 #include <stdio.h>
1698 static struct loader_dri3_buffer *
dri3_find_back_alloc(struct loader_dri3_drawable * draw)1699 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1700 {
1701 struct loader_dri3_buffer *back;
1702 int id;
1703
1704 id = dri3_find_back(draw);
1705 if (id < 0)
1706 return NULL;
1707
1708 back = draw->buffers[id];
1709 /* Allocate a new back if we haven't got one */
1710 if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
1711 dri3_update_drawable(draw->dri_drawable, draw))
1712 back = dri3_alloc_render_buffer(draw, draw->back_format,
1713 draw->width, draw->height, draw->depth);
1714
1715 if (!back)
1716 return NULL;
1717
1718 draw->buffers[id] = back;
1719
1720 /* If necessary, prefill the back with data according to swap_method mode. */
1721 if (draw->cur_blit_source != -1 &&
1722 draw->buffers[draw->cur_blit_source] &&
1723 back != draw->buffers[draw->cur_blit_source]) {
1724 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1725
1726 dri3_fence_await(draw->conn, draw, source);
1727 dri3_fence_await(draw->conn, draw, back);
1728 (void) loader_dri3_blit_image(draw,
1729 back->image,
1730 source->image,
1731 0, 0, draw->width, draw->height,
1732 0, 0, 0);
1733 back->last_swap = source->last_swap;
1734 draw->cur_blit_source = -1;
1735 }
1736
1737 return back;
1738 }
1739