• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2010 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18  * NONINFRINGEMENT.  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
19  * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
20  * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22  * DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Kristian Høgsberg <krh@bitplanet.net>
26  */
27 
28 #include <dlfcn.h>
29 #include <errno.h>
30 #include <fcntl.h>
31 #include <limits.h>
32 #include <stdbool.h>
33 #include <stdint.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <string.h>
37 #include <time.h>
38 #include <unistd.h>
39 #include <c11/threads.h>
40 #ifdef HAVE_LIBDRM
41 #include <xf86drm.h>
42 #include "drm-uapi/drm_fourcc.h"
43 #endif
44 #include <GL/gl.h>
45 #include "mesa_interface.h"
46 #include <sys/stat.h>
47 #include <sys/types.h>
48 #include "dri_screen.h"
49 
50 #ifdef HAVE_WAYLAND_PLATFORM
51 #include "linux-dmabuf-unstable-v1-client-protocol.h"
52 #include "wayland-drm-client-protocol.h"
53 #include "wayland-drm.h"
54 #include <wayland-client.h>
55 #endif
56 
57 #ifdef HAVE_X11_PLATFORM
58 #include "X11/Xlibint.h"
59 #include "loader_x11.h"
60 #endif
61 
62 #include "GL/mesa_glinterop.h"
63 #include "pipe-loader/pipe_loader.h"
64 #include "loader/loader.h"
65 #include "mapi/glapi/glapi.h"
66 #include "pipe/p_screen.h"
67 #include "util/bitscan.h"
68 #include "util/driconf.h"
69 #include "util/libsync.h"
70 #include "util/os_file.h"
71 #include "util/u_atomic.h"
72 #include "util/u_call_once.h"
73 #include "util/u_math.h"
74 #include "util/u_vector.h"
75 #include "egl_dri2.h"
76 #include "egldefines.h"
77 
78 #define NUM_ATTRIBS 16
79 
80 static const enum pipe_format dri2_pbuffer_visuals[] = {
81    PIPE_FORMAT_R16G16B16A16_FLOAT,
82    PIPE_FORMAT_R16G16B16X16_FLOAT,
83    PIPE_FORMAT_B10G10R10A2_UNORM,
84    PIPE_FORMAT_B10G10R10X2_UNORM,
85    PIPE_FORMAT_BGRA8888_UNORM,
86    PIPE_FORMAT_BGRX8888_UNORM,
87    PIPE_FORMAT_B5G6R5_UNORM,
88 };
89 
90 static void
dri_set_background_context(void * loaderPrivate)91 dri_set_background_context(void *loaderPrivate)
92 {
93    _EGLContext *ctx = _eglGetCurrentContext();
94    _EGLThreadInfo *t = _eglGetCurrentThread();
95 
96    _eglBindContextToThread(ctx, t);
97 }
98 
99 static void
dri2_gl_flush_get(_glapi_proc * glFlush)100 dri2_gl_flush_get(_glapi_proc *glFlush)
101 {
102    *glFlush = _mesa_glapi_get_proc_address("glFlush");
103 }
104 
105 static void
dri2_gl_flush()106 dri2_gl_flush()
107 {
108    static void (*glFlush)(void);
109    static util_once_flag once = UTIL_ONCE_FLAG_INIT;
110 
111    util_call_once_data(&once, (util_call_once_data_func)dri2_gl_flush_get,
112                        &glFlush);
113 
114    /* if glFlush is not available things are horribly broken */
115    if (!glFlush) {
116       _eglLog(_EGL_WARNING, "DRI2: failed to find glFlush entry point");
117       return;
118    }
119 
120    glFlush();
121 }
122 
123 static GLboolean
dri_is_thread_safe(UNUSED void * loaderPrivate)124 dri_is_thread_safe(UNUSED void *loaderPrivate)
125 {
126 #ifdef HAVE_X11_PLATFORM
127    struct dri2_egl_surface *dri2_surf = loaderPrivate;
128 
129    /* loader_dri3_blit_context_get creates a context with
130     * loaderPrivate being NULL. Enabling glthread for a blitting
131     * context isn't useful so return false.
132     */
133    if (!loaderPrivate)
134       return false;
135 
136    _EGLDisplay *display = dri2_surf->base.Resource.Display;
137 
138    Display *xdpy = (Display *)display->PlatformDisplay;
139 
140    /* Check Xlib is running in thread safe mode when running on EGL/X11-xlib
141     * platform
142     *
143     * 'lock_fns' is the XLockDisplay function pointer of the X11 display 'dpy'.
144     * It will be NULL if XInitThreads wasn't called.
145     */
146    if (display->Platform == _EGL_PLATFORM_X11 && xdpy && !xdpy->lock_fns)
147       return false;
148 #endif
149 
150    return true;
151 }
152 
153 const __DRIbackgroundCallableExtension background_callable_extension = {
154    .base = {__DRI_BACKGROUND_CALLABLE, 2},
155 
156    .setBackgroundContext = dri_set_background_context,
157    .isThreadSafe = dri_is_thread_safe,
158 };
159 
160 const __DRIuseInvalidateExtension use_invalidate = {
161    .base = {__DRI_USE_INVALIDATE, 1},
162 };
163 
164 static void
dri2_get_pbuffer_drawable_info(struct dri_drawable * draw,int * x,int * y,int * w,int * h,void * loaderPrivate)165 dri2_get_pbuffer_drawable_info(struct dri_drawable *draw, int *x, int *y, int *w,
166                                int *h, void *loaderPrivate)
167 {
168    struct dri2_egl_surface *dri2_surf = loaderPrivate;
169 
170    *x = *y = 0;
171    *w = dri2_surf->base.Width;
172    *h = dri2_surf->base.Height;
173 }
174 
175 static int
dri2_get_bytes_per_pixel(struct dri2_egl_surface * dri2_surf)176 dri2_get_bytes_per_pixel(struct dri2_egl_surface *dri2_surf)
177 {
178    const int depth = dri2_surf->base.Config->BufferSize;
179    return depth ? util_next_power_of_two(depth / 8) : 0;
180 }
181 
182 static void
dri2_put_image(struct dri_drawable * draw,int op,int x,int y,int w,int h,char * data,void * loaderPrivate)183 dri2_put_image(struct dri_drawable *draw, int op, int x, int y, int w, int h,
184                char *data, void *loaderPrivate)
185 {
186    struct dri2_egl_surface *dri2_surf = loaderPrivate;
187    const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
188    const int width = dri2_surf->base.Width;
189    const int height = dri2_surf->base.Height;
190    const int dst_stride = width * bpp;
191    const int src_stride = w * bpp;
192    const int x_offset = x * bpp;
193    int copy_width = src_stride;
194 
195    if (!dri2_surf->swrast_device_buffer)
196       dri2_surf->swrast_device_buffer = malloc(height * dst_stride);
197 
198    if (dri2_surf->swrast_device_buffer) {
199       const char *src = data;
200       char *dst = dri2_surf->swrast_device_buffer;
201 
202       dst += x_offset;
203       dst += y * dst_stride;
204 
205       /* Drivers are allowed to submit OOB PutImage requests, so clip here. */
206       if (copy_width > dst_stride - x_offset)
207          copy_width = dst_stride - x_offset;
208       if (h > height - y)
209          h = height - y;
210 
211       for (; 0 < h; --h) {
212          memcpy(dst, src, copy_width);
213          dst += dst_stride;
214          src += src_stride;
215       }
216    }
217 }
218 
219 static void
dri2_get_image(struct dri_drawable * read,int x,int y,int w,int h,char * data,void * loaderPrivate)220 dri2_get_image(struct dri_drawable *read, int x, int y, int w, int h, char *data,
221                void *loaderPrivate)
222 {
223    struct dri2_egl_surface *dri2_surf = loaderPrivate;
224    const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
225    const int width = dri2_surf->base.Width;
226    const int height = dri2_surf->base.Height;
227    const int src_stride = width * bpp;
228    const int dst_stride = w * bpp;
229    const int x_offset = x * bpp;
230    int copy_width = dst_stride;
231    const char *src = dri2_surf->swrast_device_buffer;
232    char *dst = data;
233 
234    if (!src) {
235       memset(data, 0, copy_width * h);
236       return;
237    }
238 
239    src += x_offset;
240    src += y * src_stride;
241 
242    /* Drivers are allowed to submit OOB GetImage requests, so clip here. */
243    if (copy_width > src_stride - x_offset)
244       copy_width = src_stride - x_offset;
245    if (h > height - y)
246       h = height - y;
247 
248    for (; 0 < h; --h) {
249       memcpy(dst, src, copy_width);
250       src += src_stride;
251       dst += dst_stride;
252    }
253 }
254 
255 /* HACK: technically we should have swrast_null, instead of these.
256  */
257 const __DRIswrastLoaderExtension swrast_pbuffer_loader_extension = {
258    .base = {__DRI_SWRAST_LOADER, 1},
259    .getDrawableInfo = dri2_get_pbuffer_drawable_info,
260    .putImage = dri2_put_image,
261    .getImage = dri2_get_image,
262 };
263 
264 static const EGLint dri2_to_egl_attribute_map[__DRI_ATTRIB_MAX] = {
265    [__DRI_ATTRIB_BUFFER_SIZE] = EGL_BUFFER_SIZE,
266    [__DRI_ATTRIB_LEVEL] = EGL_LEVEL,
267    [__DRI_ATTRIB_LUMINANCE_SIZE] = EGL_LUMINANCE_SIZE,
268    [__DRI_ATTRIB_DEPTH_SIZE] = EGL_DEPTH_SIZE,
269    [__DRI_ATTRIB_STENCIL_SIZE] = EGL_STENCIL_SIZE,
270    [__DRI_ATTRIB_SAMPLE_BUFFERS] = EGL_SAMPLE_BUFFERS,
271    [__DRI_ATTRIB_SAMPLES] = EGL_SAMPLES,
272    [__DRI_ATTRIB_MAX_PBUFFER_WIDTH] = EGL_MAX_PBUFFER_WIDTH,
273    [__DRI_ATTRIB_MAX_PBUFFER_HEIGHT] = EGL_MAX_PBUFFER_HEIGHT,
274    [__DRI_ATTRIB_MAX_PBUFFER_PIXELS] = EGL_MAX_PBUFFER_PIXELS,
275    [__DRI_ATTRIB_MAX_SWAP_INTERVAL] = EGL_MAX_SWAP_INTERVAL,
276    [__DRI_ATTRIB_MIN_SWAP_INTERVAL] = EGL_MIN_SWAP_INTERVAL,
277    [__DRI_ATTRIB_YINVERTED] = EGL_Y_INVERTED_NOK,
278 };
279 
280 const struct dri_config *
dri2_get_dri_config(struct dri2_egl_config * conf,EGLint surface_type,EGLenum colorspace)281 dri2_get_dri_config(struct dri2_egl_config *conf, EGLint surface_type,
282                     EGLenum colorspace)
283 {
284    const bool double_buffer = surface_type == EGL_WINDOW_BIT;
285    const bool srgb = colorspace == EGL_GL_COLORSPACE_SRGB_KHR;
286 
287    return conf->dri_config[double_buffer][srgb];
288 }
289 
290 static EGLBoolean
dri2_match_config(const _EGLConfig * conf,const _EGLConfig * criteria)291 dri2_match_config(const _EGLConfig *conf, const _EGLConfig *criteria)
292 {
293 #ifdef HAVE_X11_PLATFORM
294    if (conf->Display->Platform == _EGL_PLATFORM_X11 &&
295        conf->AlphaSize > 0 &&
296        conf->NativeVisualID != criteria->NativeVisualID)
297       return EGL_FALSE;
298 #endif
299 
300    if (_eglCompareConfigs(conf, criteria, NULL, EGL_FALSE) != 0)
301       return EGL_FALSE;
302 
303    if (!_eglMatchConfig(conf, criteria))
304       return EGL_FALSE;
305 
306    return EGL_TRUE;
307 }
308 
309 void
dri2_get_shifts_and_sizes(const struct dri_config * config,int * shifts,unsigned int * sizes)310 dri2_get_shifts_and_sizes(const struct dri_config *config, int *shifts,
311                           unsigned int *sizes)
312 {
313    driGetConfigAttrib(config, __DRI_ATTRIB_RED_SHIFT,
314                          (unsigned int *)&shifts[0]);
315    driGetConfigAttrib(config, __DRI_ATTRIB_GREEN_SHIFT,
316                          (unsigned int *)&shifts[1]);
317    driGetConfigAttrib(config, __DRI_ATTRIB_BLUE_SHIFT,
318                          (unsigned int *)&shifts[2]);
319    driGetConfigAttrib(config, __DRI_ATTRIB_ALPHA_SHIFT,
320                          (unsigned int *)&shifts[3]);
321    driGetConfigAttrib(config, __DRI_ATTRIB_RED_SIZE, &sizes[0]);
322    driGetConfigAttrib(config, __DRI_ATTRIB_GREEN_SIZE, &sizes[1]);
323    driGetConfigAttrib(config, __DRI_ATTRIB_BLUE_SIZE, &sizes[2]);
324    driGetConfigAttrib(config, __DRI_ATTRIB_ALPHA_SIZE, &sizes[3]);
325 }
326 
327 enum pipe_format
dri2_image_format_for_pbuffer_config(struct dri2_egl_display * dri2_dpy,const struct dri_config * config)328 dri2_image_format_for_pbuffer_config(struct dri2_egl_display *dri2_dpy,
329                                      const struct dri_config *config)
330 {
331    struct gl_config *gl_config = (struct gl_config *) config;
332    return gl_config->color_format;
333 }
334 
335 struct dri2_egl_config *
dri2_add_config(_EGLDisplay * disp,const struct dri_config * dri_config,EGLint surface_type,const EGLint * attr_list)336 dri2_add_config(_EGLDisplay *disp, const struct dri_config *dri_config,
337                 EGLint surface_type, const EGLint *attr_list)
338 {
339    struct dri2_egl_config *conf;
340    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
341    _EGLConfig base;
342    unsigned int attrib, value, double_buffer;
343    bool srgb = false;
344    EGLint key, bind_to_texture_rgb, bind_to_texture_rgba;
345    _EGLConfig *matching_config;
346    EGLint num_configs = 0;
347    EGLint config_id;
348 
349    _eglInitConfig(&base, disp, _eglGetArraySize(disp->Configs) + 1);
350 
351    double_buffer = 0;
352    bind_to_texture_rgb = 0;
353    bind_to_texture_rgba = 0;
354 
355    for (int i = 0; i < __DRI_ATTRIB_MAX; ++i) {
356       if (!driIndexConfigAttrib(dri_config, i, &attrib, &value))
357          break;
358 
359       switch (attrib) {
360       case __DRI_ATTRIB_RENDER_TYPE:
361          if (value & __DRI_ATTRIB_FLOAT_BIT)
362             base.ComponentType = EGL_COLOR_COMPONENT_TYPE_FLOAT_EXT;
363          if (value & __DRI_ATTRIB_RGBA_BIT)
364             value = EGL_RGB_BUFFER;
365          else if (value & __DRI_ATTRIB_LUMINANCE_BIT)
366             value = EGL_LUMINANCE_BUFFER;
367          else
368             return NULL;
369          base.ColorBufferType = value;
370          break;
371 
372       case __DRI_ATTRIB_CONFIG_CAVEAT:
373          if (value & __DRI_ATTRIB_NON_CONFORMANT_CONFIG)
374             value = EGL_NON_CONFORMANT_CONFIG;
375          else if (value & __DRI_ATTRIB_SLOW_BIT)
376             value = EGL_SLOW_CONFIG;
377          else
378             value = EGL_NONE;
379          base.ConfigCaveat = value;
380          break;
381 
382       case __DRI_ATTRIB_BIND_TO_TEXTURE_RGB:
383          bind_to_texture_rgb = value;
384          break;
385 
386       case __DRI_ATTRIB_BIND_TO_TEXTURE_RGBA:
387          bind_to_texture_rgba = value;
388          break;
389 
390       case __DRI_ATTRIB_DOUBLE_BUFFER:
391          double_buffer = value;
392          break;
393 
394       case __DRI_ATTRIB_RED_SIZE:
395          base.RedSize = value;
396          break;
397 
398       case __DRI_ATTRIB_GREEN_SIZE:
399          base.GreenSize = value;
400          break;
401 
402       case __DRI_ATTRIB_BLUE_SIZE:
403          base.BlueSize = value;
404          break;
405 
406       case __DRI_ATTRIB_ALPHA_SIZE:
407          base.AlphaSize = value;
408          break;
409 
410       case __DRI_ATTRIB_ACCUM_RED_SIZE:
411       case __DRI_ATTRIB_ACCUM_GREEN_SIZE:
412       case __DRI_ATTRIB_ACCUM_BLUE_SIZE:
413       case __DRI_ATTRIB_ACCUM_ALPHA_SIZE:
414          /* Don't expose visuals with the accumulation buffer. */
415          if (value > 0)
416             return NULL;
417          break;
418 
419       case __DRI_ATTRIB_FRAMEBUFFER_SRGB_CAPABLE:
420          srgb = value != 0;
421          if (!disp->Extensions.KHR_gl_colorspace && srgb)
422             return NULL;
423          break;
424 
425       case __DRI_ATTRIB_MAX_PBUFFER_WIDTH:
426          base.MaxPbufferWidth = _EGL_MAX_PBUFFER_WIDTH;
427          break;
428       case __DRI_ATTRIB_MAX_PBUFFER_HEIGHT:
429          base.MaxPbufferHeight = _EGL_MAX_PBUFFER_HEIGHT;
430          break;
431       case __DRI_ATTRIB_MUTABLE_RENDER_BUFFER:
432          if (disp->Extensions.KHR_mutable_render_buffer)
433             surface_type |= EGL_MUTABLE_RENDER_BUFFER_BIT_KHR;
434          break;
435       default:
436          key = dri2_to_egl_attribute_map[attrib];
437          if (key != 0)
438             _eglSetConfigKey(&base, key, value);
439          break;
440       }
441    }
442 
443    if (attr_list)
444       for (int i = 0; attr_list[i] != EGL_NONE; i += 2)
445          _eglSetConfigKey(&base, attr_list[i], attr_list[i + 1]);
446 
447    base.NativeRenderable = EGL_TRUE;
448 
449    base.SurfaceType = surface_type;
450    if (surface_type &
451        (EGL_PBUFFER_BIT |
452         (disp->Extensions.NOK_texture_from_pixmap ? EGL_PIXMAP_BIT : 0))) {
453       base.BindToTextureRGB = bind_to_texture_rgb;
454       if (base.AlphaSize > 0)
455          base.BindToTextureRGBA = bind_to_texture_rgba;
456    }
457 
458    if (double_buffer) {
459       surface_type &= ~EGL_PIXMAP_BIT;
460    } else {
461       surface_type &= ~EGL_WINDOW_BIT;
462    }
463 
464    if (!surface_type)
465       return NULL;
466 
467    base.RenderableType = disp->ClientAPIs;
468    base.Conformant = disp->ClientAPIs;
469 
470    base.MinSwapInterval = dri2_dpy->min_swap_interval;
471    base.MaxSwapInterval = dri2_dpy->max_swap_interval;
472 
473    if (!_eglValidateConfig(&base, EGL_FALSE)) {
474       _eglLog(_EGL_DEBUG, "DRI2: failed to validate config %d", base.ConfigID);
475       return NULL;
476    }
477 
478    config_id = base.ConfigID;
479    base.ConfigID = EGL_DONT_CARE;
480    base.SurfaceType = EGL_DONT_CARE;
481    num_configs = _eglFilterArray(disp->Configs, (void **)&matching_config, 1,
482                                  (_EGLArrayForEach)dri2_match_config, &base);
483 
484    if (num_configs == 1) {
485       conf = (struct dri2_egl_config *)matching_config;
486 
487       if (!conf->dri_config[double_buffer][srgb])
488          conf->dri_config[double_buffer][srgb] = dri_config;
489       else
490          /* a similar config type is already added (unlikely) => discard */
491          return NULL;
492    } else if (num_configs == 0) {
493       conf = calloc(1, sizeof *conf);
494       if (conf == NULL)
495          return NULL;
496 
497       conf->dri_config[double_buffer][srgb] = dri_config;
498 
499       memcpy(&conf->base, &base, sizeof base);
500       conf->base.SurfaceType = 0;
501       conf->base.ConfigID = config_id;
502 
503       _eglLinkConfig(&conf->base);
504    } else {
505       unreachable("duplicates should not be possible");
506       return NULL;
507    }
508 
509    conf->base.SurfaceType |= surface_type;
510 
511    return conf;
512 }
513 
514 static int
dri2_pbuffer_visual_index(enum pipe_format format)515 dri2_pbuffer_visual_index(enum pipe_format format)
516 {
517    for (unsigned i = 0; i < ARRAY_SIZE(dri2_pbuffer_visuals); i++) {
518       if (dri2_pbuffer_visuals[i] == format)
519          return i;
520    }
521 
522    return -1;
523 }
524 
525 void
dri2_add_pbuffer_configs_for_visuals(_EGLDisplay * disp)526 dri2_add_pbuffer_configs_for_visuals(_EGLDisplay *disp)
527 {
528    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
529    unsigned int format_count[ARRAY_SIZE(dri2_pbuffer_visuals)] = {0};
530 
531    for (unsigned i = 0; dri2_dpy->driver_configs[i] != NULL; i++) {
532       struct dri2_egl_config *dri2_conf;
533       struct gl_config *gl_config =
534          (struct gl_config *) dri2_dpy->driver_configs[i];
535       int idx = dri2_pbuffer_visual_index(gl_config->color_format);
536 
537       if (idx == -1)
538          continue;
539 
540       dri2_conf = dri2_add_config(disp, dri2_dpy->driver_configs[i],
541                                   EGL_PBUFFER_BIT, NULL);
542       if (dri2_conf)
543          format_count[idx]++;
544    }
545 
546    for (unsigned i = 0; i < ARRAY_SIZE(format_count); i++) {
547       if (!format_count[i]) {
548          _eglLog(_EGL_DEBUG, "No DRI config supports native format %s",
549                  util_format_name(dri2_pbuffer_visuals[i]));
550       }
551    }
552 }
553 
554 GLboolean
dri2_validate_egl_image(void * image,void * data)555 dri2_validate_egl_image(void *image, void *data)
556 {
557    _EGLDisplay *disp = _eglLockDisplay(data);
558    _EGLImage *img = _eglLookupImage(image, disp);
559    _eglUnlockDisplay(disp);
560 
561    if (img == NULL) {
562       _eglError(EGL_BAD_PARAMETER, "dri2_validate_egl_image");
563       return false;
564    }
565 
566    return true;
567 }
568 
569 struct dri_image *
dri2_lookup_egl_image_validated(void * image,void * data)570 dri2_lookup_egl_image_validated(void *image, void *data)
571 {
572    struct dri2_egl_image *dri2_img;
573 
574    (void)data;
575 
576    dri2_img = dri2_egl_image(image);
577 
578    return dri2_img->dri_image;
579 }
580 
581 const __DRIimageLookupExtension image_lookup_extension = {
582    .base = {__DRI_IMAGE_LOOKUP, 2},
583 
584    .validateEGLImage = dri2_validate_egl_image,
585    .lookupEGLImageValidated = dri2_lookup_egl_image_validated,
586 };
587 
588 EGLBoolean
dri2_load_driver(_EGLDisplay * disp)589 dri2_load_driver(_EGLDisplay *disp)
590 {
591    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
592 
593    dri2_dpy->swrast = (disp->Options.ForceSoftware && !dri2_dpy->kopper && strcmp(dri2_dpy->driver_name, "vmwgfx")) ||
594                       !dri2_dpy->driver_name || strstr(dri2_dpy->driver_name, "swrast");
595    dri2_dpy->swrast_not_kms = dri2_dpy->swrast && (!dri2_dpy->driver_name || strcmp(dri2_dpy->driver_name, "kms_swrast"));
596 
597    return EGL_TRUE;
598 }
599 
600 static const char *
dri2_query_driver_name(_EGLDisplay * disp)601 dri2_query_driver_name(_EGLDisplay *disp)
602 {
603    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
604    return dri2_dpy->driver_name;
605 }
606 
607 static char *
dri2_query_driver_config(_EGLDisplay * disp)608 dri2_query_driver_config(_EGLDisplay *disp)
609 {
610    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
611    char *ret;
612 
613    ret = pipe_loader_get_driinfo_xml(dri2_dpy->driver_name);
614 
615    mtx_unlock(&dri2_dpy->lock);
616 
617    return ret;
618 }
619 
620 void
dri2_setup_screen(_EGLDisplay * disp)621 dri2_setup_screen(_EGLDisplay *disp)
622 {
623    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
624    struct dri_screen *screen = dri2_dpy->dri_screen_render_gpu;
625    struct pipe_screen *pscreen = screen->base.screen;
626    unsigned int api_mask = screen->api_mask;
627 
628 #ifdef HAVE_LIBDRM
629    unsigned caps = pscreen->caps.dmabuf;
630    /* set if both import and export are suported */
631    if (dri2_dpy->multibuffers_available) {
632       dri2_dpy->has_dmabuf_import = (caps & DRM_PRIME_CAP_IMPORT) > 0;
633       dri2_dpy->has_dmabuf_export = (caps & DRM_PRIME_CAP_EXPORT) > 0;
634    }
635 #endif
636 #ifdef HAVE_ANDROID_PLATFORM
637    dri2_dpy->has_native_fence_fd = pscreen->caps.native_fence_fd;
638 #endif
639    dri2_dpy->has_compression_modifiers = pscreen->query_compression_rates &&
640                                          (pscreen->query_compression_modifiers || dri2_dpy->kopper);
641 
642    /*
643     * EGL 1.5 specification defines the default value to 1. Moreover,
644     * eglSwapInterval() is required to clamp requested value to the supported
645     * range. Since the default value is implicitly assumed to be supported,
646     * use it as both minimum and maximum for the platforms that do not allow
647     * changing the interval. Platforms, which allow it (e.g. x11, wayland)
648     * override these values already.
649     */
650    dri2_dpy->min_swap_interval = 1;
651    dri2_dpy->max_swap_interval = 1;
652    dri2_dpy->default_swap_interval = 1;
653 
654    disp->ClientAPIs = 0;
655    if ((api_mask & (1 << __DRI_API_OPENGL)) && _eglIsApiValid(EGL_OPENGL_API))
656       disp->ClientAPIs |= EGL_OPENGL_BIT;
657    if ((api_mask & (1 << __DRI_API_GLES)) && _eglIsApiValid(EGL_OPENGL_ES_API))
658       disp->ClientAPIs |= EGL_OPENGL_ES_BIT;
659    if ((api_mask & (1 << __DRI_API_GLES2)) && _eglIsApiValid(EGL_OPENGL_ES_API))
660       disp->ClientAPIs |= EGL_OPENGL_ES2_BIT;
661    if ((api_mask & (1 << __DRI_API_GLES3)) && _eglIsApiValid(EGL_OPENGL_ES_API))
662       disp->ClientAPIs |= EGL_OPENGL_ES3_BIT_KHR;
663 
664    disp->Extensions.KHR_create_context = EGL_TRUE;
665    disp->Extensions.KHR_create_context_no_error = EGL_TRUE;
666    disp->Extensions.KHR_no_config_context = EGL_TRUE;
667    disp->Extensions.KHR_surfaceless_context = EGL_TRUE;
668 
669    disp->Extensions.MESA_gl_interop = EGL_TRUE;
670 
671    disp->Extensions.MESA_query_driver = EGL_TRUE;
672 
673    /* Report back to EGL the bitmask of priorities supported */
674    disp->Extensions.IMG_context_priority = pscreen->caps.context_priority_mask;
675 
676    /**
677     * FIXME: Some drivers currently misreport what context priorities the user
678     * can use and fail context creation. This cause issues on Android where the
679     * display process would try to use realtime priority. This is also a spec
680     * violation for IMG_context_priority.
681     */
682 #ifndef HAVE_ANDROID_PLATFORM
683    disp->Extensions.NV_context_priority_realtime =
684       disp->Extensions.IMG_context_priority &
685       (1 << __EGL_CONTEXT_PRIORITY_REALTIME_BIT);
686 #endif
687 
688    disp->Extensions.EXT_pixel_format_float = EGL_TRUE;
689 
690    if (pscreen->is_format_supported(pscreen, PIPE_FORMAT_B8G8R8A8_SRGB,
691                                     PIPE_TEXTURE_2D, 0, 0,
692                                     PIPE_BIND_RENDER_TARGET)) {
693       disp->Extensions.KHR_gl_colorspace = EGL_TRUE;
694    }
695 
696    disp->Extensions.EXT_config_select_group = EGL_TRUE;
697 
698    disp->Extensions.EXT_create_context_robustness =
699       pscreen->caps.device_reset_status_query;
700    disp->RobustBufferAccess = pscreen->caps.robust_buffer_access_behavior;
701 
702    /* EXT_query_reset_notification_strategy complements and requires
703     * EXT_create_context_robustness. */
704    disp->Extensions.EXT_query_reset_notification_strategy =
705       disp->Extensions.EXT_create_context_robustness;
706 
707    disp->Extensions.KHR_fence_sync = EGL_TRUE;
708    disp->Extensions.KHR_wait_sync = EGL_TRUE;
709    disp->Extensions.KHR_cl_event2 = EGL_TRUE;
710    if (dri_fence_get_caps(dri2_dpy->dri_screen_render_gpu)
711       & __DRI_FENCE_CAP_NATIVE_FD)
712       disp->Extensions.ANDROID_native_fence_sync = EGL_TRUE;
713 
714    if (dri_get_pipe_screen(dri2_dpy->dri_screen_render_gpu)->get_disk_shader_cache)
715       disp->Extensions.ANDROID_blob_cache = EGL_TRUE;
716 
717    disp->Extensions.KHR_reusable_sync = EGL_TRUE;
718 
719    int capabilities;
720    capabilities = dri2_get_capabilities(dri2_dpy->dri_screen_render_gpu);
721    disp->Extensions.MESA_drm_image = (capabilities & __DRI_IMAGE_CAP_GLOBAL_NAMES) != 0;
722 
723 #ifdef HAVE_LIBDRM
724    if (pscreen->caps.dmabuf & DRM_PRIME_CAP_EXPORT)
725       disp->Extensions.MESA_image_dma_buf_export = true;
726 
727    if (dri2_dpy->has_dmabuf_import) {
728       disp->Extensions.EXT_image_dma_buf_import = EGL_TRUE;
729       disp->Extensions.EXT_image_dma_buf_import_modifiers = EGL_TRUE;
730    }
731 #endif
732    disp->Extensions.MESA_x11_native_visual_id = EGL_TRUE;
733    disp->Extensions.EXT_surface_compression = EGL_TRUE;
734    disp->Extensions.KHR_image_base = EGL_TRUE;
735    disp->Extensions.KHR_gl_renderbuffer_image = EGL_TRUE;
736    disp->Extensions.KHR_gl_texture_2D_image = EGL_TRUE;
737    disp->Extensions.KHR_gl_texture_cubemap_image = EGL_TRUE;
738 
739    if (pscreen->caps.max_texture_3d_levels != 0)
740       disp->Extensions.KHR_gl_texture_3D_image = EGL_TRUE;
741 
742    disp->Extensions.KHR_context_flush_control = EGL_TRUE;
743 
744    if (dri_get_pipe_screen(dri2_dpy->dri_screen_render_gpu)->set_damage_region)
745       disp->Extensions.KHR_partial_update = EGL_TRUE;
746 
747    disp->Extensions.EXT_protected_surface = pscreen->caps.device_protected_surface;
748    disp->Extensions.EXT_protected_content = pscreen->caps.device_protected_context;
749 }
750 
751 void
dri2_setup_swap_interval(_EGLDisplay * disp,int max_swap_interval)752 dri2_setup_swap_interval(_EGLDisplay *disp, int max_swap_interval)
753 {
754    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
755    GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
756 
757    /* Allow driconf to override applications.*/
758    dri2GalliumConfigQueryi(dri2_dpy->dri_screen_render_gpu, "vblank_mode", &vblank_mode);
759 
760    switch (vblank_mode) {
761    case DRI_CONF_VBLANK_NEVER:
762       dri2_dpy->min_swap_interval = 0;
763       dri2_dpy->max_swap_interval = 0;
764       dri2_dpy->default_swap_interval = 0;
765       break;
766    case DRI_CONF_VBLANK_ALWAYS_SYNC:
767       dri2_dpy->min_swap_interval = 1;
768       dri2_dpy->max_swap_interval = max_swap_interval;
769       dri2_dpy->default_swap_interval = 1;
770       break;
771    case DRI_CONF_VBLANK_DEF_INTERVAL_0:
772       dri2_dpy->min_swap_interval = 0;
773       dri2_dpy->max_swap_interval = max_swap_interval;
774       dri2_dpy->default_swap_interval = 0;
775       break;
776    default:
777    case DRI_CONF_VBLANK_DEF_INTERVAL_1:
778       dri2_dpy->min_swap_interval = 0;
779       dri2_dpy->max_swap_interval = max_swap_interval;
780       dri2_dpy->default_swap_interval = 1;
781       break;
782    }
783 }
784 
785 /* All platforms but DRM call this function to create the screen and populate
786  * the driver_configs. DRM inherits that information from its display - GBM.
787  */
788 EGLBoolean
dri2_create_screen(_EGLDisplay * disp)789 dri2_create_screen(_EGLDisplay *disp)
790 {
791    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
792    char *driver_name_display_gpu;
793    enum dri_screen_type type = DRI_SCREEN_DRI3;
794 
795    if (dri2_dpy->kopper)
796       type = DRI_SCREEN_KOPPER;
797    else if (dri2_dpy->swrast_not_kms)
798       type = DRI_SCREEN_SWRAST;
799    else if (dri2_dpy->swrast)
800       type = DRI_SCREEN_KMS_SWRAST;
801 
802    if (dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu) {
803       driver_name_display_gpu =
804          loader_get_driver_for_fd(dri2_dpy->fd_display_gpu);
805       if (driver_name_display_gpu) {
806          /* check if driver name is matching so that non mesa drivers
807           * will not crash.
808           */
809          if (strcmp(dri2_dpy->driver_name, driver_name_display_gpu) == 0) {
810             dri2_dpy->dri_screen_display_gpu = driCreateNewScreen3(
811                0, dri2_dpy->fd_display_gpu, dri2_dpy->loader_extensions,
812                type, &dri2_dpy->driver_configs, false, dri2_dpy->multibuffers_available, disp);
813          }
814          free(driver_name_display_gpu);
815       }
816    }
817 
818    int screen_fd = dri2_dpy->swrast_not_kms ? -1 : dri2_dpy->fd_render_gpu;
819    dri2_dpy->dri_screen_render_gpu = driCreateNewScreen3(
820       0, screen_fd, dri2_dpy->loader_extensions, type,
821       &dri2_dpy->driver_configs, false, dri2_dpy->multibuffers_available, disp);
822 
823    if (dri2_dpy->dri_screen_render_gpu == NULL) {
824       _eglLog(_EGL_WARNING, "egl: failed to create dri2 screen");
825       return EGL_FALSE;
826    }
827 
828    if (dri2_dpy->fd_render_gpu == dri2_dpy->fd_display_gpu)
829       dri2_dpy->dri_screen_display_gpu = dri2_dpy->dri_screen_render_gpu;
830 
831    dri2_dpy->own_dri_screen = true;
832    return EGL_TRUE;
833 }
834 
835 EGLBoolean
dri2_setup_device(_EGLDisplay * disp,EGLBoolean software)836 dri2_setup_device(_EGLDisplay *disp, EGLBoolean software)
837 {
838    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
839    _EGLDevice *dev;
840    int render_fd;
841 
842    /* If we're not software, we need a DRM node FD */
843    assert(software || dri2_dpy->fd_render_gpu >= 0);
844 
845    /* fd_render_gpu is what we got from WSI, so might actually be a lie and
846     * not a render node... */
847    if (software) {
848       render_fd = -1;
849    } else if (loader_is_device_render_capable(dri2_dpy->fd_render_gpu)) {
850       render_fd = dri2_dpy->fd_render_gpu;
851    } else {
852       render_fd = dri_query_compatible_render_only_device_fd(
853          dri2_dpy->fd_render_gpu);
854       if (render_fd < 0)
855          return EGL_FALSE;
856    }
857 
858    dev = _eglFindDevice(render_fd, software);
859 
860    if (render_fd >= 0 && render_fd != dri2_dpy->fd_render_gpu)
861       close(render_fd);
862 
863    if (!dev)
864       return EGL_FALSE;
865 
866    disp->Device = dev;
867    return EGL_TRUE;
868 }
869 
870 /**
871  * Called via eglInitialize(), drv->Initialize().
872  *
873  * This must be guaranteed to be called exactly once, even if eglInitialize is
874  * called many times (without a eglTerminate in between).
875  */
876 static EGLBoolean
dri2_initialize(_EGLDisplay * disp)877 dri2_initialize(_EGLDisplay *disp)
878 {
879    EGLBoolean ret = EGL_FALSE;
880    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
881 
882    /* In the case where the application calls eglMakeCurrent(context1),
883     * eglTerminate, then eglInitialize again (without a call to eglReleaseThread
884     * or eglMakeCurrent(NULL) before that), dri2_dpy structure is still
885     * initialized, as we need it to be able to free context1 correctly.
886     *
887     * It would probably be safest to forcibly release the display with
888     * dri2_display_release, to make sure the display is reinitialized correctly.
889     * However, the EGL spec states that we need to keep a reference to the
890     * current context (so we cannot call dri2_make_current(NULL)), and therefore
891     * we would leak context1 as we would be missing the old display connection
892     * to free it up correctly.
893     */
894    if (dri2_dpy) {
895       p_atomic_inc(&dri2_dpy->ref_count);
896       return EGL_TRUE;
897    }
898    dri2_dpy = dri2_display_create(disp);
899    if (!dri2_dpy)
900       return EGL_FALSE;
901 
902    loader_set_logger(_eglLog);
903 
904    bool allow_dri2 = false;
905    switch (disp->Platform) {
906    case _EGL_PLATFORM_SURFACELESS:
907       ret = dri2_initialize_surfaceless(disp);
908       break;
909    case _EGL_PLATFORM_DEVICE:
910       ret = dri2_initialize_device(disp);
911       break;
912    case _EGL_PLATFORM_X11:
913    case _EGL_PLATFORM_XCB:
914       ret = dri2_initialize_x11(disp, &allow_dri2);
915       /* platform_x11 detects dri2 availability */
916       if (!ret && allow_dri2) {
917          /* this is a fallthrough using the same dri2_dpy from dri3,
918          * so the existing one must be destroyed and a new one created
919          * the caller will switch to the new display automatically
920          */
921          dri2_display_destroy(disp);
922          dri2_display_create(disp);
923          ret = dri2_initialize_x11_dri2(disp);
924       }
925       break;
926    case _EGL_PLATFORM_DRM:
927       ret = dri2_initialize_drm(disp);
928       break;
929    case _EGL_PLATFORM_WAYLAND:
930       ret = dri2_initialize_wayland(disp);
931       break;
932    case _EGL_PLATFORM_ANDROID:
933       ret = dri2_initialize_android(disp);
934       break;
935    case _EGL_PLATFORM_OHOS:
936       ret = dri2_initialize_ohos(disp);
937       break;
938    default:
939       unreachable("Callers ensure we cannot get here.");
940       return EGL_FALSE;
941    }
942 
943    if (!ret) {
944       dri2_display_destroy(disp);
945       return EGL_FALSE;
946    }
947 
948    if (_eglGetArraySize(disp->Configs) == 0) {
949       _eglError(EGL_NOT_INITIALIZED, "failed to add any EGLConfigs");
950       dri2_display_destroy(disp);
951       return EGL_FALSE;
952    }
953 
954    dri2_dpy = dri2_egl_display(disp);
955    p_atomic_inc(&dri2_dpy->ref_count);
956 
957    mtx_init(&dri2_dpy->lock, mtx_plain);
958 
959    return EGL_TRUE;
960 }
961 
962 /**
963  * Decrement display reference count, and free up display if necessary.
964  */
965 static void
dri2_display_release(_EGLDisplay * disp)966 dri2_display_release(_EGLDisplay *disp)
967 {
968    struct dri2_egl_display *dri2_dpy;
969 
970    if (!disp)
971       return;
972 
973    dri2_dpy = dri2_egl_display(disp);
974 
975    assert(dri2_dpy->ref_count > 0);
976 
977    if (!p_atomic_dec_zero(&dri2_dpy->ref_count))
978       return;
979 
980    _eglCleanupDisplay(disp);
981    dri2_display_destroy(disp);
982 }
983 
984 void
dri2_display_destroy(_EGLDisplay * disp)985 dri2_display_destroy(_EGLDisplay *disp)
986 {
987    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
988 
989    if (dri2_dpy->own_dri_screen) {
990       if (dri2_dpy->vtbl && dri2_dpy->vtbl->close_screen_notify)
991          dri2_dpy->vtbl->close_screen_notify(disp);
992 
993       driDestroyScreen(dri2_dpy->dri_screen_render_gpu);
994 
995       if (dri2_dpy->dri_screen_display_gpu &&
996           dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
997          driDestroyScreen(dri2_dpy->dri_screen_display_gpu);
998    }
999    if (dri2_dpy->fd_display_gpu >= 0 &&
1000        dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
1001       close(dri2_dpy->fd_display_gpu);
1002    if (dri2_dpy->fd_render_gpu >= 0)
1003       close(dri2_dpy->fd_render_gpu);
1004 
1005    free(dri2_dpy->driver_name);
1006 
1007 #ifdef HAVE_WAYLAND_PLATFORM
1008    free(dri2_dpy->device_name);
1009 #endif
1010 
1011    switch (disp->Platform) {
1012    case _EGL_PLATFORM_X11:
1013    case _EGL_PLATFORM_XCB:
1014       dri2_teardown_x11(dri2_dpy);
1015       break;
1016    case _EGL_PLATFORM_DRM:
1017       dri2_teardown_drm(dri2_dpy);
1018       break;
1019    case _EGL_PLATFORM_WAYLAND:
1020       dri2_teardown_wayland(dri2_dpy);
1021       break;
1022    case _EGL_PLATFORM_ANDROID:
1023 #ifdef HAVE_ANDROID_PLATFORM
1024       u_gralloc_destroy(&dri2_dpy->gralloc);
1025 #endif
1026       break;
1027    case _EGL_PLATFORM_SURFACELESS:
1028       break;
1029    case _EGL_PLATFORM_DEVICE:
1030       break;
1031    case _EGL_PLATFORM_OHOS:
1032       break;
1033    default:
1034       unreachable("Platform teardown is not properly hooked.");
1035       break;
1036    }
1037 
1038    /* The drm platform does not create the screen/driver_configs but reuses
1039     * the ones from the gbm device. As such the gbm itself is responsible
1040     * for the cleanup.
1041     */
1042    if (disp->Platform != _EGL_PLATFORM_DRM && dri2_dpy->driver_configs) {
1043       for (unsigned i = 0; dri2_dpy->driver_configs[i]; i++)
1044          free((struct dri_config *)dri2_dpy->driver_configs[i]);
1045       free(dri2_dpy->driver_configs);
1046    }
1047    free(dri2_dpy);
1048    disp->DriverData = NULL;
1049 }
1050 
1051 struct dri2_egl_display *
dri2_display_create(_EGLDisplay * disp)1052 dri2_display_create(_EGLDisplay *disp)
1053 {
1054    struct dri2_egl_display *dri2_dpy = calloc(1, sizeof *dri2_dpy);
1055    if (!dri2_dpy) {
1056       _eglError(EGL_BAD_ALLOC, "eglInitialize");
1057       return NULL;
1058    }
1059 
1060    dri2_dpy->fd_render_gpu = -1;
1061    dri2_dpy->fd_display_gpu = -1;
1062    dri2_dpy->multibuffers_available = true;
1063    dri2_dpy->kopper = disp->Options.Zink && !debug_get_bool_option("LIBGL_KOPPER_DISABLE", false);
1064    dri2_dpy->kopper_without_modifiers = dri2_dpy->kopper && debug_get_bool_option("LIBGL_KOPPER_DRI2", false);
1065    disp->DriverData = (void *)dri2_dpy;
1066 
1067    return dri2_dpy;
1068 }
1069 
1070 /**
1071  * Called via eglTerminate(), drv->Terminate().
1072  *
1073  * This must be guaranteed to be called exactly once, even if eglTerminate is
1074  * called many times (without a eglInitialize in between).
1075  */
1076 static EGLBoolean
dri2_terminate(_EGLDisplay * disp)1077 dri2_terminate(_EGLDisplay *disp)
1078 {
1079    /* Release all non-current Context/Surfaces. */
1080    _eglReleaseDisplayResources(disp);
1081 
1082    dri2_display_release(disp);
1083 
1084    return EGL_TRUE;
1085 }
1086 
1087 /**
1088  * Set the error code after a call to
1089  * dri2_egl_display::dri2::createContextAttribs.
1090  */
1091 static void
dri2_create_context_attribs_error(int dri_error)1092 dri2_create_context_attribs_error(int dri_error)
1093 {
1094    EGLint egl_error;
1095 
1096    switch (dri_error) {
1097    case __DRI_CTX_ERROR_SUCCESS:
1098       return;
1099 
1100    case __DRI_CTX_ERROR_NO_MEMORY:
1101       egl_error = EGL_BAD_ALLOC;
1102       break;
1103 
1104       /* From the EGL_KHR_create_context spec, section "Errors":
1105        *
1106        *   * If <config> does not support a client API context compatible
1107        *     with the requested API major and minor version, [...] context
1108        * flags, and context reset notification behavior (for client API types
1109        * where these attributes are supported), then an EGL_BAD_MATCH error is
1110        *     generated.
1111        *
1112        *   * If an OpenGL ES context is requested and the values for
1113        *     attributes EGL_CONTEXT_MAJOR_VERSION_KHR and
1114        *     EGL_CONTEXT_MINOR_VERSION_KHR specify an OpenGL ES version that
1115        *     is not defined, than an EGL_BAD_MATCH error is generated.
1116        *
1117        *   * If an OpenGL context is requested, the requested version is
1118        *     greater than 3.2, and the value for attribute
1119        *     EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR has no bits set; has any
1120        *     bits set other than EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR and
1121        *     EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT_KHR; has more than
1122        *     one of these bits set; or if the implementation does not support
1123        *     the requested profile, then an EGL_BAD_MATCH error is generated.
1124        */
1125    case __DRI_CTX_ERROR_BAD_API:
1126    case __DRI_CTX_ERROR_BAD_VERSION:
1127    case __DRI_CTX_ERROR_BAD_FLAG:
1128       egl_error = EGL_BAD_MATCH;
1129       break;
1130 
1131       /* From the EGL_KHR_create_context spec, section "Errors":
1132        *
1133        *   * If an attribute name or attribute value in <attrib_list> is not
1134        *     recognized (including unrecognized bits in bitmask attributes),
1135        *     then an EGL_BAD_ATTRIBUTE error is generated."
1136        */
1137    case __DRI_CTX_ERROR_UNKNOWN_ATTRIBUTE:
1138    case __DRI_CTX_ERROR_UNKNOWN_FLAG:
1139       egl_error = EGL_BAD_ATTRIBUTE;
1140       break;
1141 
1142    default:
1143       assert(!"unknown dri_error code");
1144       egl_error = EGL_BAD_MATCH;
1145       break;
1146    }
1147 
1148    _eglError(egl_error, "dri2_create_context");
1149 }
1150 
1151 static bool
dri2_fill_context_attribs(struct dri2_egl_context * dri2_ctx,struct dri2_egl_display * dri2_dpy,uint32_t * ctx_attribs,unsigned * num_attribs)1152 dri2_fill_context_attribs(struct dri2_egl_context *dri2_ctx,
1153                           struct dri2_egl_display *dri2_dpy,
1154                           uint32_t *ctx_attribs, unsigned *num_attribs)
1155 {
1156    int pos = 0;
1157 
1158    assert(*num_attribs >= NUM_ATTRIBS);
1159 
1160    ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MAJOR_VERSION;
1161    ctx_attribs[pos++] = dri2_ctx->base.ClientMajorVersion;
1162    ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MINOR_VERSION;
1163    ctx_attribs[pos++] = dri2_ctx->base.ClientMinorVersion;
1164 
1165    if (dri2_ctx->base.Flags != 0) {
1166       ctx_attribs[pos++] = __DRI_CTX_ATTRIB_FLAGS;
1167       ctx_attribs[pos++] = dri2_ctx->base.Flags;
1168    }
1169 
1170    if (dri2_ctx->base.ResetNotificationStrategy !=
1171        EGL_NO_RESET_NOTIFICATION_KHR) {
1172       ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RESET_STRATEGY;
1173       ctx_attribs[pos++] = __DRI_CTX_RESET_LOSE_CONTEXT;
1174    }
1175 
1176    if (dri2_ctx->base.ContextPriority != EGL_CONTEXT_PRIORITY_MEDIUM_IMG) {
1177       unsigned val;
1178 
1179       switch (dri2_ctx->base.ContextPriority) {
1180       case EGL_CONTEXT_PRIORITY_REALTIME_NV:
1181          val = __DRI_CTX_PRIORITY_REALTIME;
1182          break;
1183       case EGL_CONTEXT_PRIORITY_HIGH_IMG:
1184          val = __DRI_CTX_PRIORITY_HIGH;
1185          break;
1186       case EGL_CONTEXT_PRIORITY_MEDIUM_IMG:
1187          val = __DRI_CTX_PRIORITY_MEDIUM;
1188          break;
1189       case EGL_CONTEXT_PRIORITY_LOW_IMG:
1190          val = __DRI_CTX_PRIORITY_LOW;
1191          break;
1192       default:
1193          _eglError(EGL_BAD_CONFIG, "eglCreateContext");
1194          return false;
1195       }
1196 
1197       ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PRIORITY;
1198       ctx_attribs[pos++] = val;
1199    }
1200 
1201    if (dri2_ctx->base.ReleaseBehavior ==
1202        EGL_CONTEXT_RELEASE_BEHAVIOR_NONE_KHR) {
1203       ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RELEASE_BEHAVIOR;
1204       ctx_attribs[pos++] = __DRI_CTX_RELEASE_BEHAVIOR_NONE;
1205    }
1206 
1207    if (dri2_ctx->base.NoError) {
1208       ctx_attribs[pos++] = __DRI_CTX_ATTRIB_NO_ERROR;
1209       ctx_attribs[pos++] = true;
1210    }
1211 
1212    if (dri2_ctx->base.Protected) {
1213       ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PROTECTED;
1214       ctx_attribs[pos++] = true;
1215    }
1216 
1217    *num_attribs = pos;
1218 
1219    return true;
1220 }
1221 
1222 /**
1223  * Called via eglCreateContext(), drv->CreateContext().
1224  */
1225 static _EGLContext *
dri2_create_context(_EGLDisplay * disp,_EGLConfig * conf,_EGLContext * share_list,const EGLint * attrib_list)1226 dri2_create_context(_EGLDisplay *disp, _EGLConfig *conf,
1227                     _EGLContext *share_list, const EGLint *attrib_list)
1228 {
1229    struct dri2_egl_context *dri2_ctx;
1230    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1231    struct dri2_egl_context *dri2_ctx_shared = dri2_egl_context(share_list);
1232    struct dri_context *shared = dri2_ctx_shared ? dri2_ctx_shared->dri_context : NULL;
1233    struct dri2_egl_config *dri2_config = dri2_egl_config(conf);
1234    const struct dri_config *dri_config;
1235    int api;
1236    unsigned error;
1237    unsigned num_attribs = NUM_ATTRIBS;
1238    uint32_t ctx_attribs[NUM_ATTRIBS];
1239 
1240    dri2_ctx = malloc(sizeof *dri2_ctx);
1241    if (!dri2_ctx) {
1242       dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC, "eglCreateContext");
1243       return NULL;
1244    }
1245 
1246    if (!_eglInitContext(&dri2_ctx->base, disp, conf, share_list, attrib_list))
1247       goto cleanup;
1248 
1249    switch (dri2_ctx->base.ClientAPI) {
1250    case EGL_OPENGL_ES_API:
1251       switch (dri2_ctx->base.ClientMajorVersion) {
1252       case 1:
1253          api = __DRI_API_GLES;
1254          break;
1255       case 2:
1256          api = __DRI_API_GLES2;
1257          break;
1258       case 3:
1259          api = __DRI_API_GLES3;
1260          break;
1261       default:
1262          _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1263          goto cleanup;
1264       }
1265       break;
1266    case EGL_OPENGL_API:
1267       if ((dri2_ctx->base.ClientMajorVersion >= 4 ||
1268            (dri2_ctx->base.ClientMajorVersion == 3 &&
1269             dri2_ctx->base.ClientMinorVersion >= 2)) &&
1270           dri2_ctx->base.Profile == EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR)
1271          api = __DRI_API_OPENGL_CORE;
1272       else if (dri2_ctx->base.ClientMajorVersion == 3 &&
1273                dri2_ctx->base.ClientMinorVersion == 1)
1274          api = __DRI_API_OPENGL_CORE;
1275       else
1276          api = __DRI_API_OPENGL;
1277       break;
1278    default:
1279       _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1280       goto cleanup;
1281    }
1282 
1283    if (conf != NULL) {
1284       /* The config chosen here isn't necessarily
1285        * used for surfaces later.
1286        * A pixmap surface will use the single config.
1287        * This opportunity depends on disabling the
1288        * doubleBufferMode check in
1289        * src/mesa/main/context.c:check_compatible()
1290        */
1291       if (dri2_config->dri_config[1][0])
1292          dri_config = dri2_config->dri_config[1][0];
1293       else
1294          dri_config = dri2_config->dri_config[0][0];
1295    } else
1296       dri_config = NULL;
1297 
1298    if (!dri2_fill_context_attribs(dri2_ctx, dri2_dpy, ctx_attribs,
1299                                   &num_attribs))
1300       goto cleanup;
1301 
1302    dri2_ctx->dri_context = driCreateContextAttribs(
1303       dri2_dpy->dri_screen_render_gpu, api, dri_config, shared, num_attribs / 2,
1304       ctx_attribs, &error, dri2_ctx);
1305    dri2_create_context_attribs_error(error);
1306 
1307    if (!dri2_ctx->dri_context)
1308       goto cleanup;
1309 
1310    mtx_unlock(&dri2_dpy->lock);
1311 
1312    return &dri2_ctx->base;
1313 
1314 cleanup:
1315    mtx_unlock(&dri2_dpy->lock);
1316    free(dri2_ctx);
1317    return NULL;
1318 }
1319 
1320 /**
1321  * Called via eglDestroyContext(), drv->DestroyContext().
1322  */
1323 static EGLBoolean
dri2_destroy_context(_EGLDisplay * disp,_EGLContext * ctx)1324 dri2_destroy_context(_EGLDisplay *disp, _EGLContext *ctx)
1325 {
1326    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1327 
1328    if (_eglPutContext(ctx)) {
1329       driDestroyContext(dri2_ctx->dri_context);
1330       free(dri2_ctx);
1331    }
1332 
1333    return EGL_TRUE;
1334 }
1335 
1336 EGLBoolean
dri2_init_surface(_EGLSurface * surf,_EGLDisplay * disp,EGLint type,_EGLConfig * conf,const EGLint * attrib_list,EGLBoolean enable_out_fence,void * native_surface)1337 dri2_init_surface(_EGLSurface *surf, _EGLDisplay *disp, EGLint type,
1338                   _EGLConfig *conf, const EGLint *attrib_list,
1339                   EGLBoolean enable_out_fence, void *native_surface)
1340 {
1341    struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1342 
1343    dri2_surf->out_fence_fd = -1;
1344    dri2_surf->enable_out_fence = false;
1345    if (disp->Extensions.ANDROID_native_fence_sync) {
1346       dri2_surf->enable_out_fence = enable_out_fence;
1347    }
1348 
1349    return _eglInitSurface(surf, disp, type, conf, attrib_list, native_surface);
1350 }
1351 
1352 static void
dri2_surface_set_out_fence_fd(_EGLSurface * surf,int fence_fd)1353 dri2_surface_set_out_fence_fd(_EGLSurface *surf, int fence_fd)
1354 {
1355    struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1356 
1357    if (dri2_surf->out_fence_fd >= 0)
1358       close(dri2_surf->out_fence_fd);
1359 
1360    dri2_surf->out_fence_fd = fence_fd;
1361 }
1362 
1363 void
dri2_fini_surface(_EGLSurface * surf)1364 dri2_fini_surface(_EGLSurface *surf)
1365 {
1366    struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1367 
1368    dri2_surface_set_out_fence_fd(surf, -1);
1369    dri2_surf->enable_out_fence = false;
1370 }
1371 
1372 static EGLBoolean
dri2_destroy_surface(_EGLDisplay * disp,_EGLSurface * surf)1373 dri2_destroy_surface(_EGLDisplay *disp, _EGLSurface *surf)
1374 {
1375    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1376    EGLBoolean ret = EGL_TRUE;
1377 
1378    if (_eglPutSurface(surf))
1379       ret = dri2_dpy->vtbl->destroy_surface(disp, surf);
1380 
1381    return ret;
1382 }
1383 
1384 static void
dri2_surf_update_fence_fd(_EGLContext * ctx,_EGLDisplay * disp,_EGLSurface * surf)1385 dri2_surf_update_fence_fd(_EGLContext *ctx, _EGLDisplay *disp,
1386                           _EGLSurface *surf)
1387 {
1388    struct dri_context *dri_ctx = dri2_egl_context(ctx)->dri_context;
1389    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1390    struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1391    int fence_fd = -1;
1392    void *fence;
1393 
1394    if (!dri2_surf->enable_out_fence)
1395       return;
1396 
1397    fence = dri_create_fence_fd(dri_ctx, -1);
1398    if (fence) {
1399       fence_fd = dri_get_fence_fd(dri2_dpy->dri_screen_render_gpu, fence);
1400       dri_destroy_fence(dri2_dpy->dri_screen_render_gpu, fence);
1401    }
1402    dri2_surface_set_out_fence_fd(surf, fence_fd);
1403 }
1404 
1405 EGLBoolean
dri2_create_drawable(struct dri2_egl_display * dri2_dpy,const struct dri_config * config,struct dri2_egl_surface * dri2_surf,void * loaderPrivate)1406 dri2_create_drawable(struct dri2_egl_display *dri2_dpy,
1407                      const struct dri_config *config,
1408                      struct dri2_egl_surface *dri2_surf, void *loaderPrivate)
1409 {
1410    bool is_pixmap = dri2_surf->base.Type == EGL_PBUFFER_BIT ||
1411                     dri2_surf->base.Type == EGL_PIXMAP_BIT;
1412    dri2_surf->dri_drawable = dri_create_drawable(dri2_dpy->dri_screen_render_gpu, config, is_pixmap, loaderPrivate);
1413    if (dri2_surf->dri_drawable == NULL)
1414       return _eglError(EGL_BAD_ALLOC, "createNewDrawable");
1415 
1416    return EGL_TRUE;
1417 }
1418 
1419 /**
1420  * Called via eglMakeCurrent(), drv->MakeCurrent().
1421  */
1422 static EGLBoolean
dri2_make_current(_EGLDisplay * disp,_EGLSurface * dsurf,_EGLSurface * rsurf,_EGLContext * ctx)1423 dri2_make_current(_EGLDisplay *disp, _EGLSurface *dsurf, _EGLSurface *rsurf,
1424                   _EGLContext *ctx)
1425 {
1426    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1427    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1428    _EGLDisplay *old_disp = NULL;
1429    struct dri2_egl_display *old_dri2_dpy = NULL;
1430    _EGLContext *old_ctx;
1431    _EGLSurface *old_dsurf, *old_rsurf;
1432    _EGLSurface *tmp_dsurf, *tmp_rsurf;
1433    struct dri_drawable *ddraw, *rdraw;
1434    struct dri_context *cctx;
1435    EGLint egl_error = EGL_SUCCESS;
1436 
1437    if (!dri2_dpy)
1438       return _eglError(EGL_NOT_INITIALIZED, "eglMakeCurrent");
1439 
1440    /* make new bindings, set the EGL error otherwise */
1441    if (!_eglBindContext(ctx, dsurf, rsurf, &old_ctx, &old_dsurf, &old_rsurf))
1442       return EGL_FALSE;
1443 
1444    if (old_ctx == ctx && old_dsurf == dsurf && old_rsurf == rsurf) {
1445       _eglPutSurface(old_dsurf);
1446       _eglPutSurface(old_rsurf);
1447       _eglPutContext(old_ctx);
1448       return EGL_TRUE;
1449    }
1450 
1451    if (old_ctx) {
1452       struct dri_context *old_cctx = dri2_egl_context(old_ctx)->dri_context;
1453       old_disp = old_ctx->Resource.Display;
1454       old_dri2_dpy = dri2_egl_display(old_disp);
1455 
1456       /* Disable shared buffer mode */
1457       if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1458           old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1459          old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf, false);
1460       }
1461 
1462       driUnbindContext(old_cctx);
1463 
1464       if (old_dsurf)
1465          dri2_surf_update_fence_fd(old_ctx, old_disp, old_dsurf);
1466    }
1467 
1468    ddraw = (dsurf) ? dri2_dpy->vtbl->get_dri_drawable(dsurf) : NULL;
1469    rdraw = (rsurf) ? dri2_dpy->vtbl->get_dri_drawable(rsurf) : NULL;
1470    cctx = (dri2_ctx) ? dri2_ctx->dri_context : NULL;
1471 
1472    if (cctx) {
1473       if (!driBindContext(cctx, ddraw, rdraw)) {
1474          _EGLContext *tmp_ctx;
1475 
1476          /* driBindContext failed. We cannot tell for sure why, but
1477           * setting the error to EGL_BAD_MATCH is surely better than leaving it
1478           * as EGL_SUCCESS.
1479           */
1480          egl_error = EGL_BAD_MATCH;
1481 
1482          /* undo the previous _eglBindContext */
1483          _eglBindContext(old_ctx, old_dsurf, old_rsurf, &ctx, &tmp_dsurf,
1484                          &tmp_rsurf);
1485          assert(&dri2_ctx->base == ctx && tmp_dsurf == dsurf &&
1486                 tmp_rsurf == rsurf);
1487 
1488          _eglPutSurface(dsurf);
1489          _eglPutSurface(rsurf);
1490          _eglPutContext(ctx);
1491 
1492          _eglPutSurface(old_dsurf);
1493          _eglPutSurface(old_rsurf);
1494          _eglPutContext(old_ctx);
1495 
1496          ddraw =
1497             (old_dsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_dsurf) : NULL;
1498          rdraw =
1499             (old_rsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_rsurf) : NULL;
1500          cctx = (old_ctx) ? dri2_egl_context(old_ctx)->dri_context : NULL;
1501 
1502          /* undo the previous driUnbindContext */
1503          if (driBindContext(cctx, ddraw, rdraw)) {
1504             if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1505                 old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1506                old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf,
1507                                                           true);
1508             }
1509 
1510             return _eglError(egl_error, "eglMakeCurrent");
1511          }
1512 
1513          /* We cannot restore the same state as it was before calling
1514           * eglMakeCurrent() and the spec isn't clear about what to do. We
1515           * can prevent EGL from calling into the DRI driver with no DRI
1516           * context bound.
1517           */
1518          dsurf = rsurf = NULL;
1519          ctx = NULL;
1520 
1521          _eglBindContext(ctx, dsurf, rsurf, &tmp_ctx, &tmp_dsurf, &tmp_rsurf);
1522          assert(tmp_ctx == old_ctx && tmp_dsurf == old_dsurf &&
1523                 tmp_rsurf == old_rsurf);
1524 
1525          _eglLog(_EGL_WARNING, "DRI2: failed to rebind the previous context");
1526       } else {
1527          /* driBindContext succeeded, so take a reference on the
1528           * dri2_dpy. This prevents dri2_dpy from being reinitialized when a
1529           * EGLDisplay is terminated and then initialized again while a
1530           * context is still bound. See dri2_initialize() for a more in depth
1531           * explanation. */
1532          p_atomic_inc(&dri2_dpy->ref_count);
1533       }
1534    }
1535 
1536    dri2_destroy_surface(disp, old_dsurf);
1537    dri2_destroy_surface(disp, old_rsurf);
1538 
1539    if (old_ctx) {
1540       dri2_destroy_context(disp, old_ctx);
1541       dri2_display_release(old_disp);
1542    }
1543 
1544    if (egl_error != EGL_SUCCESS)
1545       return _eglError(egl_error, "eglMakeCurrent");
1546 
1547    if (dsurf && _eglSurfaceHasMutableRenderBuffer(dsurf) &&
1548        dri2_dpy->vtbl->set_shared_buffer_mode) {
1549       /* Always update the shared buffer mode. This is obviously needed when
1550        * the active EGL_RENDER_BUFFER is EGL_SINGLE_BUFFER. When
1551        * EGL_RENDER_BUFFER is EGL_BACK_BUFFER, the update protects us in the
1552        * case where external non-EGL API may have changed window's shared
1553        * buffer mode since we last saw it.
1554        */
1555       bool mode = (dsurf->ActiveRenderBuffer == EGL_SINGLE_BUFFER);
1556       dri2_dpy->vtbl->set_shared_buffer_mode(disp, dsurf, mode);
1557    }
1558 
1559    return EGL_TRUE;
1560 }
1561 
1562 struct dri_drawable *
dri2_surface_get_dri_drawable(_EGLSurface * surf)1563 dri2_surface_get_dri_drawable(_EGLSurface *surf)
1564 {
1565    struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1566 
1567    return dri2_surf->dri_drawable;
1568 }
1569 
1570 static _EGLSurface *
dri2_create_window_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_window,const EGLint * attrib_list)1571 dri2_create_window_surface(_EGLDisplay *disp, _EGLConfig *conf,
1572                            void *native_window, const EGLint *attrib_list)
1573 {
1574    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1575    _EGLSurface *ret = dri2_dpy->vtbl->create_window_surface(
1576       disp, conf, native_window, attrib_list);
1577    mtx_unlock(&dri2_dpy->lock);
1578    return ret;
1579 }
1580 
1581 static _EGLSurface *
dri2_create_pixmap_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_pixmap,const EGLint * attrib_list)1582 dri2_create_pixmap_surface(_EGLDisplay *disp, _EGLConfig *conf,
1583                            void *native_pixmap, const EGLint *attrib_list)
1584 {
1585    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1586    _EGLSurface *ret = NULL;
1587 
1588    if (dri2_dpy->vtbl->create_pixmap_surface)
1589       ret = dri2_dpy->vtbl->create_pixmap_surface(disp, conf, native_pixmap,
1590                                                   attrib_list);
1591 
1592    mtx_unlock(&dri2_dpy->lock);
1593 
1594    return ret;
1595 }
1596 
1597 static _EGLSurface *
dri2_create_pbuffer_surface(_EGLDisplay * disp,_EGLConfig * conf,const EGLint * attrib_list)1598 dri2_create_pbuffer_surface(_EGLDisplay *disp, _EGLConfig *conf,
1599                             const EGLint *attrib_list)
1600 {
1601    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1602    _EGLSurface *ret = NULL;
1603 
1604    if (dri2_dpy->vtbl->create_pbuffer_surface)
1605       ret = dri2_dpy->vtbl->create_pbuffer_surface(disp, conf, attrib_list);
1606 
1607    mtx_unlock(&dri2_dpy->lock);
1608 
1609    return ret;
1610 }
1611 
1612 static EGLBoolean
dri2_swap_interval(_EGLDisplay * disp,_EGLSurface * surf,EGLint interval)1613 dri2_swap_interval(_EGLDisplay *disp, _EGLSurface *surf, EGLint interval)
1614 {
1615    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1616    EGLBoolean ret = EGL_TRUE;
1617 
1618    if (dri2_dpy->vtbl->swap_interval)
1619       ret = dri2_dpy->vtbl->swap_interval(disp, surf, interval);
1620 
1621    mtx_unlock(&dri2_dpy->lock);
1622 
1623    return ret;
1624 }
1625 
1626 /**
1627  * Asks the client API to flush any rendering to the drawable so that we can
1628  * do our swapbuffers.
1629  */
1630 void
dri2_flush_drawable_for_swapbuffers_flags(_EGLDisplay * disp,_EGLSurface * draw,enum __DRI2throttleReason throttle_reason)1631 dri2_flush_drawable_for_swapbuffers_flags(
1632    _EGLDisplay *disp, _EGLSurface *draw,
1633    enum __DRI2throttleReason throttle_reason)
1634 {
1635    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1636    struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(draw);
1637 
1638    /* flush not available for swrast */
1639    if (dri2_dpy->swrast_not_kms)
1640       return;
1641 
1642    /* We know there's a current context because:
1643       *
1644       *     "If surface is not bound to the calling thread’s current
1645       *      context, an EGL_BAD_SURFACE error is generated."
1646       */
1647    _EGLContext *ctx = _eglGetCurrentContext();
1648    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1649 
1650    /* From the EGL 1.4 spec (page 52):
1651       *
1652       *     "The contents of ancillary buffers are always undefined
1653       *      after calling eglSwapBuffers."
1654       */
1655    dri_flush(dri2_ctx->dri_context, dri_drawable,
1656       __DRI2_FLUSH_DRAWABLE | __DRI2_FLUSH_INVALIDATE_ANCILLARY,
1657       throttle_reason);
1658 }
1659 
1660 void
dri2_flush_drawable_for_swapbuffers(_EGLDisplay * disp,_EGLSurface * draw)1661 dri2_flush_drawable_for_swapbuffers(_EGLDisplay *disp, _EGLSurface *draw)
1662 {
1663    dri2_flush_drawable_for_swapbuffers_flags(disp, draw,
1664                                              __DRI2_THROTTLE_SWAPBUFFER);
1665 }
1666 
1667 static EGLBoolean
dri2_swap_buffers(_EGLDisplay * disp,_EGLSurface * surf)1668 dri2_swap_buffers(_EGLDisplay *disp, _EGLSurface *surf)
1669 {
1670    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1671    struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1672    _EGLContext *ctx = _eglGetCurrentContext();
1673    EGLBoolean ret;
1674 
1675    if (ctx && surf)
1676       dri2_surf_update_fence_fd(ctx, disp, surf);
1677    ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1678 
1679    /* SwapBuffers marks the end of the frame; reset the damage region for
1680     * use again next time.
1681     */
1682    if (ret && disp->Extensions.KHR_partial_update)
1683       dri_set_damage_region(dri_drawable, 0, NULL);
1684 
1685    return ret;
1686 }
1687 
1688 static EGLBoolean
dri2_swap_buffers_with_damage(_EGLDisplay * disp,_EGLSurface * surf,const EGLint * rects,EGLint n_rects)1689 dri2_swap_buffers_with_damage(_EGLDisplay *disp, _EGLSurface *surf,
1690                               const EGLint *rects, EGLint n_rects)
1691 {
1692    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1693    struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1694    _EGLContext *ctx = _eglGetCurrentContext();
1695    EGLBoolean ret;
1696 
1697    if (ctx && surf)
1698       dri2_surf_update_fence_fd(ctx, disp, surf);
1699    if (dri2_dpy->vtbl->swap_buffers_with_damage)
1700       ret =
1701          dri2_dpy->vtbl->swap_buffers_with_damage(disp, surf, rects, n_rects);
1702    else
1703       ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1704 
1705    /* SwapBuffers marks the end of the frame; reset the damage region for
1706     * use again next time.
1707     */
1708    if (ret && disp->Extensions.KHR_partial_update)
1709       dri_set_damage_region(dri_drawable, 0, NULL);
1710 
1711    return ret;
1712 }
1713 
1714 static EGLBoolean
dri2_swap_buffers_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint numRects,const EGLint * rects)1715 dri2_swap_buffers_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint numRects,
1716                          const EGLint *rects)
1717 {
1718    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1719    struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1720    EGLBoolean ret;
1721 
1722    if (!dri2_dpy->vtbl->swap_buffers_region)
1723       return EGL_FALSE;
1724    ret = dri2_dpy->vtbl->swap_buffers_region(disp, surf, numRects, rects);
1725 
1726    /* SwapBuffers marks the end of the frame; reset the damage region for
1727     * use again next time.
1728     */
1729    if (ret && disp->Extensions.KHR_partial_update)
1730       dri_set_damage_region(dri_drawable, 0, NULL);
1731 
1732    return ret;
1733 }
1734 
1735 static EGLBoolean
dri2_set_damage_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint * rects,EGLint n_rects)1736 dri2_set_damage_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint *rects,
1737                        EGLint n_rects)
1738 {
1739    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1740    struct dri_drawable *drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1741 
1742    if (!disp->Extensions.KHR_partial_update) {
1743       mtx_unlock(&dri2_dpy->lock);
1744       return EGL_FALSE;
1745    }
1746 
1747    dri_set_damage_region(drawable, n_rects, rects);
1748    mtx_unlock(&dri2_dpy->lock);
1749    return EGL_TRUE;
1750 }
1751 
1752 static EGLBoolean
dri2_post_sub_buffer(_EGLDisplay * disp,_EGLSurface * surf,EGLint x,EGLint y,EGLint width,EGLint height)1753 dri2_post_sub_buffer(_EGLDisplay *disp, _EGLSurface *surf, EGLint x, EGLint y,
1754                      EGLint width, EGLint height)
1755 {
1756    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1757    EGLBoolean ret = EGL_FALSE;
1758 
1759    if (dri2_dpy->vtbl->post_sub_buffer)
1760       ret = dri2_dpy->vtbl->post_sub_buffer(disp, surf, x, y, width, height);
1761 
1762    mtx_unlock(&dri2_dpy->lock);
1763 
1764    return ret;
1765 }
1766 
1767 static EGLBoolean
dri2_copy_buffers(_EGLDisplay * disp,_EGLSurface * surf,void * native_pixmap_target)1768 dri2_copy_buffers(_EGLDisplay *disp, _EGLSurface *surf,
1769                   void *native_pixmap_target)
1770 {
1771    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1772    if (!dri2_dpy->vtbl->copy_buffers)
1773       return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_NATIVE_PIXMAP,
1774                                    "no support for native pixmaps");
1775    EGLBoolean ret =
1776       dri2_dpy->vtbl->copy_buffers(disp, surf, native_pixmap_target);
1777    mtx_unlock(&dri2_dpy->lock);
1778    return ret;
1779 }
1780 
1781 static EGLint
dri2_query_buffer_age(_EGLDisplay * disp,_EGLSurface * surf)1782 dri2_query_buffer_age(_EGLDisplay *disp, _EGLSurface *surf)
1783 {
1784    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1785    if (!dri2_dpy->vtbl->query_buffer_age)
1786       return 0;
1787    return dri2_dpy->vtbl->query_buffer_age(disp, surf);
1788 }
1789 
1790 static EGLBoolean
dri2_wait_client(_EGLDisplay * disp,_EGLContext * ctx)1791 dri2_wait_client(_EGLDisplay *disp, _EGLContext *ctx)
1792 {
1793    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1794    _EGLSurface *surf = ctx->DrawSurface;
1795    struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1796 
1797    /* FIXME: If EGL allows frontbuffer rendering for window surfaces,
1798     * we need to copy fake to real here.*/
1799 
1800    if (!dri2_dpy->swrast_not_kms)
1801       dri_flush_drawable(dri_drawable);
1802 
1803    return EGL_TRUE;
1804 }
1805 
1806 static EGLBoolean
dri2_wait_native(EGLint engine)1807 dri2_wait_native(EGLint engine)
1808 {
1809    if (engine != EGL_CORE_NATIVE_ENGINE)
1810       return _eglError(EGL_BAD_PARAMETER, "eglWaitNative");
1811    /* glXWaitX(); */
1812 
1813    return EGL_TRUE;
1814 }
1815 
1816 static EGLBoolean
dri2_bind_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)1817 dri2_bind_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
1818 {
1819    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1820    struct dri2_egl_context *dri2_ctx;
1821    _EGLContext *ctx;
1822    GLint format, target;
1823    struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1824 
1825    ctx = _eglGetCurrentContext();
1826    dri2_ctx = dri2_egl_context(ctx);
1827 
1828    if (!_eglBindTexImage(disp, surf, buffer)) {
1829       mtx_unlock(&dri2_dpy->lock);
1830       return EGL_FALSE;
1831    }
1832 
1833    switch (surf->TextureFormat) {
1834    case EGL_TEXTURE_RGB:
1835       format = __DRI_TEXTURE_FORMAT_RGB;
1836       break;
1837    case EGL_TEXTURE_RGBA:
1838       format = __DRI_TEXTURE_FORMAT_RGBA;
1839       break;
1840    default:
1841       assert(!"Unexpected texture format in dri2_bind_tex_image()");
1842       format = __DRI_TEXTURE_FORMAT_RGBA;
1843    }
1844 
1845    switch (surf->TextureTarget) {
1846    case EGL_TEXTURE_2D:
1847       target = GL_TEXTURE_2D;
1848       break;
1849    default:
1850       target = GL_TEXTURE_2D;
1851       assert(!"Unexpected texture target in dri2_bind_tex_image()");
1852    }
1853 
1854    dri_set_tex_buffer2(dri2_ctx->dri_context, target, format, dri_drawable);
1855 
1856    mtx_unlock(&dri2_dpy->lock);
1857 
1858    return EGL_TRUE;
1859 }
1860 
1861 static EGLBoolean
dri2_release_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)1862 dri2_release_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
1863 {
1864    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1865 
1866    if (!_eglReleaseTexImage(disp, surf, buffer)) {
1867       mtx_unlock(&dri2_dpy->lock);
1868       return EGL_FALSE;
1869    }
1870 
1871    mtx_unlock(&dri2_dpy->lock);
1872 
1873    return EGL_TRUE;
1874 }
1875 
1876 static _EGLImage *
dri2_create_image(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)1877 dri2_create_image(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
1878                   EGLClientBuffer buffer, const EGLint *attr_list)
1879 {
1880    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1881    _EGLImage *ret =
1882       dri2_dpy->vtbl->create_image(disp, ctx, target, buffer, attr_list);
1883    mtx_unlock(&dri2_dpy->lock);
1884    return ret;
1885 }
1886 
1887 _EGLImage *
dri2_create_image_from_dri(_EGLDisplay * disp,struct dri_image * dri_image)1888 dri2_create_image_from_dri(_EGLDisplay *disp, struct dri_image *dri_image)
1889 {
1890    struct dri2_egl_image *dri2_img;
1891 
1892    if (dri_image == NULL) {
1893       _eglError(EGL_BAD_ALLOC, "dri2_create_image");
1894       return NULL;
1895    }
1896 
1897    dri2_img = malloc(sizeof *dri2_img);
1898    if (!dri2_img) {
1899       _eglError(EGL_BAD_ALLOC, "dri2_create_image");
1900       return NULL;
1901    }
1902 
1903    _eglInitImage(&dri2_img->base, disp);
1904 
1905    dri2_img->dri_image = dri_image;
1906 
1907    return &dri2_img->base;
1908 }
1909 
1910 /**
1911  * Translate a DRI Image extension error code into an EGL error code.
1912  */
1913 static EGLint
egl_error_from_dri_image_error(int dri_error)1914 egl_error_from_dri_image_error(int dri_error)
1915 {
1916    switch (dri_error) {
1917    case __DRI_IMAGE_ERROR_SUCCESS:
1918       return EGL_SUCCESS;
1919    case __DRI_IMAGE_ERROR_BAD_ALLOC:
1920       return EGL_BAD_ALLOC;
1921    case __DRI_IMAGE_ERROR_BAD_MATCH:
1922       return EGL_BAD_MATCH;
1923    case __DRI_IMAGE_ERROR_BAD_PARAMETER:
1924       return EGL_BAD_PARAMETER;
1925    case __DRI_IMAGE_ERROR_BAD_ACCESS:
1926       return EGL_BAD_ACCESS;
1927    default:
1928       assert(!"unknown dri_error code");
1929       return EGL_BAD_ALLOC;
1930    }
1931 }
1932 
1933 static _EGLImage *
dri2_create_image_khr_renderbuffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)1934 dri2_create_image_khr_renderbuffer(_EGLDisplay *disp, _EGLContext *ctx,
1935                                    EGLClientBuffer buffer,
1936                                    const EGLint *attr_list)
1937 {
1938    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1939    GLuint renderbuffer = (GLuint)(uintptr_t)buffer;
1940    struct dri_image *dri_image;
1941 
1942    if (renderbuffer == 0) {
1943       _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
1944       return EGL_NO_IMAGE_KHR;
1945    }
1946 
1947    if (!disp->Extensions.KHR_gl_renderbuffer_image) {
1948       _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
1949       return EGL_NO_IMAGE_KHR;
1950    }
1951 
1952    unsigned error = ~0;
1953    dri_image = dri_create_image_from_renderbuffer(
1954       dri2_ctx->dri_context, renderbuffer, NULL, &error);
1955 
1956    assert(!!dri_image == (error == __DRI_IMAGE_ERROR_SUCCESS));
1957 
1958    if (!dri_image) {
1959       _eglError(egl_error_from_dri_image_error(error), "dri2_create_image_khr");
1960       return EGL_NO_IMAGE_KHR;
1961    }
1962 
1963    return dri2_create_image_from_dri(disp, dri_image);
1964 }
1965 
1966 #ifdef HAVE_WAYLAND_PLATFORM
1967 
1968 /* This structure describes how a wl_buffer maps to one or more
1969  * dri_image structures.  A wl_drm_buffer stores the wl_drm format code and the
1970  * offsets and strides of the planes in the buffer.  This table maps a
1971  * wl_drm format code to a description of the planes in the buffer
1972  * that lets us create a struct dri_image for each of the planes. */
1973 
1974 static const struct wl_drm_components_descriptor {
1975    uint32_t dri_components;
1976    EGLint components;
1977    int nplanes;
1978 } wl_drm_components[] = {
1979    {__DRI_IMAGE_COMPONENTS_RGB, EGL_TEXTURE_RGB, 1},
1980    {__DRI_IMAGE_COMPONENTS_RGBA, EGL_TEXTURE_RGBA, 1},
1981    {__DRI_IMAGE_COMPONENTS_Y_U_V, EGL_TEXTURE_Y_U_V_WL, 3},
1982    {__DRI_IMAGE_COMPONENTS_Y_UV, EGL_TEXTURE_Y_UV_WL, 2},
1983    {__DRI_IMAGE_COMPONENTS_Y_XUXV, EGL_TEXTURE_Y_XUXV_WL, 2},
1984 };
1985 
1986 static _EGLImage *
dri2_create_image_wayland_wl_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer _buffer,const EGLint * attr_list)1987 dri2_create_image_wayland_wl_buffer(_EGLDisplay *disp, _EGLContext *ctx,
1988                                     EGLClientBuffer _buffer,
1989                                     const EGLint *attr_list)
1990 {
1991    struct wl_drm_buffer *buffer;
1992    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1993    const struct wl_drm_components_descriptor *f;
1994    struct dri_image *dri_image;
1995    _EGLImageAttribs attrs;
1996    int32_t plane;
1997 
1998    buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm,
1999                                    (struct wl_resource *)_buffer);
2000    if (!buffer)
2001       return NULL;
2002 
2003    if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2004       return NULL;
2005 
2006    plane = attrs.PlaneWL;
2007    f = buffer->driver_format;
2008    if (plane < 0 || plane >= f->nplanes) {
2009       _eglError(EGL_BAD_PARAMETER,
2010                 "dri2_create_image_wayland_wl_buffer (plane out of bounds)");
2011       return NULL;
2012    }
2013 
2014    dri_image = dri2_from_planar(buffer->driver_buffer, plane, NULL);
2015    if (dri_image == NULL && plane == 0)
2016       dri_image = dri2_dup_image(buffer->driver_buffer, NULL);
2017    if (dri_image == NULL) {
2018       _eglError(EGL_BAD_PARAMETER, "dri2_create_image_wayland_wl_buffer");
2019       return NULL;
2020    }
2021 
2022    return dri2_create_image_from_dri(disp, dri_image);
2023 }
2024 #endif
2025 
2026 static EGLBoolean
dri2_get_sync_values_chromium(_EGLDisplay * disp,_EGLSurface * surf,EGLuint64KHR * ust,EGLuint64KHR * msc,EGLuint64KHR * sbc)2027 dri2_get_sync_values_chromium(_EGLDisplay *disp, _EGLSurface *surf,
2028                               EGLuint64KHR *ust, EGLuint64KHR *msc,
2029                               EGLuint64KHR *sbc)
2030 {
2031    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2032    EGLBoolean ret = EGL_FALSE;
2033 
2034    if (dri2_dpy->vtbl->get_sync_values)
2035       ret = dri2_dpy->vtbl->get_sync_values(disp, surf, ust, msc, sbc);
2036 
2037    return ret;
2038 }
2039 
2040 static EGLBoolean
dri2_get_msc_rate_angle(_EGLDisplay * disp,_EGLSurface * surf,EGLint * numerator,EGLint * denominator)2041 dri2_get_msc_rate_angle(_EGLDisplay *disp, _EGLSurface *surf, EGLint *numerator,
2042                         EGLint *denominator)
2043 {
2044    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2045    if (!dri2_dpy->vtbl->get_msc_rate)
2046       return EGL_FALSE;
2047    return dri2_dpy->vtbl->get_msc_rate(disp, surf, numerator, denominator);
2048 }
2049 
2050 /**
2051  * Set the error code after a call to
2052  * dri2_egl_image::dri_image::createImageFromTexture.
2053  */
2054 static void
dri2_create_image_khr_texture_error(int dri_error)2055 dri2_create_image_khr_texture_error(int dri_error)
2056 {
2057    EGLint egl_error = egl_error_from_dri_image_error(dri_error);
2058 
2059    if (egl_error != EGL_SUCCESS)
2060       _eglError(egl_error, "dri2_create_image_khr_texture");
2061 }
2062 
2063 static _EGLImage *
dri2_create_image_khr_texture(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2064 dri2_create_image_khr_texture(_EGLDisplay *disp, _EGLContext *ctx,
2065                               EGLenum target, EGLClientBuffer buffer,
2066                               const EGLint *attr_list)
2067 {
2068    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
2069    struct dri2_egl_image *dri2_img;
2070    GLuint texture = (GLuint)(uintptr_t)buffer;
2071    _EGLImageAttribs attrs;
2072    GLuint depth;
2073    GLenum gl_target;
2074    unsigned error = __DRI_IMAGE_ERROR_SUCCESS;
2075 
2076    if (texture == 0) {
2077       _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2078       return EGL_NO_IMAGE_KHR;
2079    }
2080 
2081    if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2082       return EGL_NO_IMAGE_KHR;
2083 
2084    switch (target) {
2085    case EGL_GL_TEXTURE_2D_KHR:
2086       if (!disp->Extensions.KHR_gl_texture_2D_image) {
2087          _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2088          return EGL_NO_IMAGE_KHR;
2089       }
2090       depth = 0;
2091       gl_target = GL_TEXTURE_2D;
2092       break;
2093    case EGL_GL_TEXTURE_3D_KHR:
2094       if (!disp->Extensions.KHR_gl_texture_3D_image) {
2095          _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2096          return EGL_NO_IMAGE_KHR;
2097       }
2098 
2099       depth = attrs.GLTextureZOffset;
2100       gl_target = GL_TEXTURE_3D;
2101       break;
2102    case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
2103    case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
2104    case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
2105    case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
2106    case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
2107    case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
2108       if (!disp->Extensions.KHR_gl_texture_cubemap_image) {
2109          _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2110          return EGL_NO_IMAGE_KHR;
2111       }
2112 
2113       depth = target - EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR;
2114       gl_target = GL_TEXTURE_CUBE_MAP;
2115       break;
2116    default:
2117       unreachable("Unexpected target in dri2_create_image_khr_texture()");
2118       return EGL_NO_IMAGE_KHR;
2119    }
2120 
2121    dri2_img = malloc(sizeof *dri2_img);
2122    if (!dri2_img) {
2123       _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2124       return EGL_NO_IMAGE_KHR;
2125    }
2126 
2127    _eglInitImage(&dri2_img->base, disp);
2128 
2129    dri2_img->dri_image = dri2_create_from_texture(
2130       dri2_ctx->dri_context, gl_target, texture, depth, attrs.GLTextureLevel,
2131       &error, NULL);
2132    dri2_create_image_khr_texture_error(error);
2133 
2134    if (!dri2_img->dri_image) {
2135       free(dri2_img);
2136       return EGL_NO_IMAGE_KHR;
2137    }
2138    return &dri2_img->base;
2139 }
2140 
2141 static EGLBoolean
dri2_query_surface(_EGLDisplay * disp,_EGLSurface * surf,EGLint attribute,EGLint * value)2142 dri2_query_surface(_EGLDisplay *disp, _EGLSurface *surf, EGLint attribute,
2143                    EGLint *value)
2144 {
2145    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2146    EGLBoolean ret;
2147 
2148    if (!dri2_dpy->vtbl->query_surface) {
2149       ret = _eglQuerySurface(disp, surf, attribute, value);
2150    } else {
2151       ret = dri2_dpy->vtbl->query_surface(disp, surf, attribute, value);
2152    }
2153 
2154    return ret;
2155 }
2156 
2157 static struct wl_buffer *
dri2_create_wayland_buffer_from_image(_EGLDisplay * disp,_EGLImage * img)2158 dri2_create_wayland_buffer_from_image(_EGLDisplay *disp, _EGLImage *img)
2159 {
2160    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2161    struct wl_buffer *ret = NULL;
2162 
2163    if (dri2_dpy->vtbl->create_wayland_buffer_from_image)
2164       ret = dri2_dpy->vtbl->create_wayland_buffer_from_image(disp, img);
2165 
2166    mtx_unlock(&dri2_dpy->lock);
2167 
2168    return ret;
2169 }
2170 
2171 #ifdef HAVE_LIBDRM
2172 static _EGLImage *
dri2_create_image_mesa_drm_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2173 dri2_create_image_mesa_drm_buffer(_EGLDisplay *disp, _EGLContext *ctx,
2174                                   EGLClientBuffer buffer,
2175                                   const EGLint *attr_list)
2176 {
2177    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2178    EGLint name, pitch;
2179    uint32_t fourcc;
2180    _EGLImageAttribs attrs;
2181    struct dri_image *dri_image;
2182 
2183    name = (EGLint)(uintptr_t)buffer;
2184 
2185    if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2186       return NULL;
2187 
2188    if (attrs.Width <= 0 || attrs.Height <= 0 ||
2189        attrs.DRMBufferStrideMESA <= 0) {
2190       _eglError(EGL_BAD_PARAMETER, "bad width, height or stride");
2191       return NULL;
2192    }
2193 
2194    switch (attrs.DRMBufferFormatMESA) {
2195    case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2196       fourcc = DRM_FORMAT_ARGB8888;
2197       pitch = attrs.DRMBufferStrideMESA * 4;
2198       break;
2199    default:
2200       _eglError(EGL_BAD_PARAMETER,
2201                 "dri2_create_image_khr: unsupported pixmap depth");
2202       return NULL;
2203    }
2204 
2205    int offset = 0;
2206    dri_image = dri2_from_names(
2207       dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height, fourcc,
2208       (int *) &name, 1, (int *) &pitch, &offset, NULL);
2209 
2210    return dri2_create_image_from_dri(disp, dri_image);
2211 }
2212 
2213 static EGLBoolean
dri2_check_dma_buf_attribs(const _EGLImageAttribs * attrs)2214 dri2_check_dma_buf_attribs(const _EGLImageAttribs *attrs)
2215 {
2216    /**
2217     * The spec says:
2218     *
2219     * "Required attributes and their values are as follows:
2220     *
2221     *  * EGL_WIDTH & EGL_HEIGHT: The logical dimensions of the buffer in pixels
2222     *
2223     *  * EGL_LINUX_DRM_FOURCC_EXT: The pixel format of the buffer, as specified
2224     *    by drm_fourcc.h and used as the pixel_format parameter of the
2225     *    drm_mode_fb_cmd2 ioctl."
2226     *
2227     * and
2228     *
2229     * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2230     *    incomplete, EGL_BAD_PARAMETER is generated."
2231     */
2232    if (attrs->Width <= 0 || attrs->Height <= 0 ||
2233        !attrs->DMABufFourCC.IsPresent)
2234       return _eglError(EGL_BAD_PARAMETER, "attribute(s) missing");
2235 
2236    /**
2237     * Also:
2238     *
2239     * "If <target> is EGL_LINUX_DMA_BUF_EXT and one or more of the values
2240     *  specified for a plane's pitch or offset isn't supported by EGL,
2241     *  EGL_BAD_ACCESS is generated."
2242     */
2243    for (unsigned i = 0; i < ARRAY_SIZE(attrs->DMABufPlanePitches); ++i) {
2244       if (attrs->DMABufPlanePitches[i].IsPresent &&
2245           attrs->DMABufPlanePitches[i].Value <= 0)
2246          return _eglError(EGL_BAD_ACCESS, "invalid pitch");
2247    }
2248 
2249    /**
2250     * If <target> is EGL_LINUX_DMA_BUF_EXT, both or neither of the following
2251     * attribute values may be given.
2252     *
2253     * This is referring to EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT and
2254     * EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, and the same for other planes.
2255     */
2256    for (unsigned i = 0; i < DMA_BUF_MAX_PLANES; ++i) {
2257       if (attrs->DMABufPlaneModifiersLo[i].IsPresent !=
2258           attrs->DMABufPlaneModifiersHi[i].IsPresent)
2259          return _eglError(EGL_BAD_PARAMETER,
2260                           "modifier attribute lo or hi missing");
2261    }
2262 
2263    /* Although the EGL_EXT_image_dma_buf_import_modifiers spec doesn't
2264     * mandate it, we only accept the same modifier across all planes. */
2265    for (unsigned i = 1; i < DMA_BUF_MAX_PLANES; ++i) {
2266       if (attrs->DMABufPlaneFds[i].IsPresent) {
2267          if ((attrs->DMABufPlaneModifiersLo[0].IsPresent !=
2268               attrs->DMABufPlaneModifiersLo[i].IsPresent) ||
2269              (attrs->DMABufPlaneModifiersLo[0].Value !=
2270               attrs->DMABufPlaneModifiersLo[i].Value) ||
2271              (attrs->DMABufPlaneModifiersHi[0].Value !=
2272               attrs->DMABufPlaneModifiersHi[i].Value))
2273             return _eglError(EGL_BAD_PARAMETER,
2274                              "modifier attributes not equal");
2275       }
2276    }
2277 
2278    return EGL_TRUE;
2279 }
2280 
2281 /* Returns the total number of planes for the format or zero if it isn't a
2282  * valid fourcc format.
2283  */
2284 static unsigned
dri2_num_fourcc_format_planes(EGLint format)2285 dri2_num_fourcc_format_planes(EGLint format)
2286 {
2287    switch (format) {
2288    case DRM_FORMAT_R8:
2289    case DRM_FORMAT_RG88:
2290    case DRM_FORMAT_GR88:
2291    case DRM_FORMAT_R16:
2292    case DRM_FORMAT_GR1616:
2293    case DRM_FORMAT_RGB332:
2294    case DRM_FORMAT_BGR233:
2295    case DRM_FORMAT_XRGB4444:
2296    case DRM_FORMAT_XBGR4444:
2297    case DRM_FORMAT_RGBX4444:
2298    case DRM_FORMAT_BGRX4444:
2299    case DRM_FORMAT_ARGB4444:
2300    case DRM_FORMAT_ABGR4444:
2301    case DRM_FORMAT_RGBA4444:
2302    case DRM_FORMAT_BGRA4444:
2303    case DRM_FORMAT_XRGB1555:
2304    case DRM_FORMAT_XBGR1555:
2305    case DRM_FORMAT_RGBX5551:
2306    case DRM_FORMAT_BGRX5551:
2307    case DRM_FORMAT_ARGB1555:
2308    case DRM_FORMAT_ABGR1555:
2309    case DRM_FORMAT_RGBA5551:
2310    case DRM_FORMAT_BGRA5551:
2311    case DRM_FORMAT_RGB565:
2312    case DRM_FORMAT_BGR565:
2313    case DRM_FORMAT_RGB888:
2314    case DRM_FORMAT_BGR888:
2315    case DRM_FORMAT_XRGB8888:
2316    case DRM_FORMAT_XBGR8888:
2317    case DRM_FORMAT_RGBX8888:
2318    case DRM_FORMAT_BGRX8888:
2319    case DRM_FORMAT_ARGB8888:
2320    case DRM_FORMAT_ABGR8888:
2321    case DRM_FORMAT_RGBA8888:
2322    case DRM_FORMAT_BGRA8888:
2323    case DRM_FORMAT_XRGB2101010:
2324    case DRM_FORMAT_XBGR2101010:
2325    case DRM_FORMAT_RGBX1010102:
2326    case DRM_FORMAT_BGRX1010102:
2327    case DRM_FORMAT_ARGB2101010:
2328    case DRM_FORMAT_ABGR2101010:
2329    case DRM_FORMAT_RGBA1010102:
2330    case DRM_FORMAT_BGRA1010102:
2331    case DRM_FORMAT_ABGR16161616:
2332    case DRM_FORMAT_XBGR16161616:
2333    case DRM_FORMAT_XBGR16161616F:
2334    case DRM_FORMAT_ABGR16161616F:
2335    case DRM_FORMAT_YUYV:
2336    case DRM_FORMAT_YVYU:
2337    case DRM_FORMAT_UYVY:
2338    case DRM_FORMAT_VYUY:
2339    case DRM_FORMAT_AYUV:
2340    case DRM_FORMAT_XYUV8888:
2341    case DRM_FORMAT_Y210:
2342    case DRM_FORMAT_Y212:
2343    case DRM_FORMAT_Y216:
2344    case DRM_FORMAT_Y410:
2345    case DRM_FORMAT_Y412:
2346    case DRM_FORMAT_Y416:
2347       return 1;
2348 
2349    case DRM_FORMAT_NV12:
2350    case DRM_FORMAT_NV21:
2351    case DRM_FORMAT_NV16:
2352    case DRM_FORMAT_NV61:
2353    case DRM_FORMAT_NV15:
2354    case DRM_FORMAT_NV20:
2355    case DRM_FORMAT_NV30:
2356    case DRM_FORMAT_P010:
2357    case DRM_FORMAT_P012:
2358    case DRM_FORMAT_P016:
2359    case DRM_FORMAT_P030:
2360       return 2;
2361 
2362    case DRM_FORMAT_YUV410:
2363    case DRM_FORMAT_YVU410:
2364    case DRM_FORMAT_YUV411:
2365    case DRM_FORMAT_YVU411:
2366    case DRM_FORMAT_YUV420:
2367    case DRM_FORMAT_YVU420:
2368    case DRM_FORMAT_YUV422:
2369    case DRM_FORMAT_YVU422:
2370    case DRM_FORMAT_YUV444:
2371    case DRM_FORMAT_YVU444:
2372       return 3;
2373 
2374    default:
2375       return 0;
2376    }
2377 }
2378 
2379 /* Returns the total number of file descriptors. Zero indicates an error. */
2380 static unsigned
dri2_check_dma_buf_format(const _EGLImageAttribs * attrs)2381 dri2_check_dma_buf_format(const _EGLImageAttribs *attrs)
2382 {
2383    unsigned plane_n = dri2_num_fourcc_format_planes(attrs->DMABufFourCC.Value);
2384    if (plane_n == 0) {
2385       _eglError(EGL_BAD_MATCH, "unknown drm fourcc format");
2386       return 0;
2387    }
2388 
2389    for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; i++) {
2390       /**
2391        * The modifiers extension spec says:
2392        *
2393        * "Modifiers may modify any attribute of a buffer import, including
2394        *  but not limited to adding extra planes to a format which
2395        *  otherwise does not have those planes. As an example, a modifier
2396        *  may add a plane for an external compression buffer to a
2397        *  single-plane format. The exact meaning and effect of any
2398        *  modifier is canonically defined by drm_fourcc.h, not as part of
2399        *  this extension."
2400        */
2401       if (attrs->DMABufPlaneModifiersLo[i].IsPresent &&
2402           attrs->DMABufPlaneModifiersHi[i].IsPresent) {
2403          plane_n = i + 1;
2404       }
2405    }
2406 
2407    /**
2408     * The spec says:
2409     *
2410     * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2411     *    incomplete, EGL_BAD_PARAMETER is generated."
2412     */
2413    for (unsigned i = 0; i < plane_n; ++i) {
2414       if (!attrs->DMABufPlaneFds[i].IsPresent ||
2415           !attrs->DMABufPlaneOffsets[i].IsPresent ||
2416           !attrs->DMABufPlanePitches[i].IsPresent) {
2417          _eglError(EGL_BAD_PARAMETER, "plane attribute(s) missing");
2418          return 0;
2419       }
2420    }
2421 
2422    /**
2423     * The spec also says:
2424     *
2425     * "If <target> is EGL_LINUX_DMA_BUF_EXT, and the EGL_LINUX_DRM_FOURCC_EXT
2426     *  attribute indicates a single-plane format, EGL_BAD_ATTRIBUTE is
2427     *  generated if any of the EGL_DMA_BUF_PLANE1_* or EGL_DMA_BUF_PLANE2_*
2428     *  or EGL_DMA_BUF_PLANE3_* attributes are specified."
2429     */
2430    for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; ++i) {
2431       if (attrs->DMABufPlaneFds[i].IsPresent ||
2432           attrs->DMABufPlaneOffsets[i].IsPresent ||
2433           attrs->DMABufPlanePitches[i].IsPresent) {
2434          _eglError(EGL_BAD_ATTRIBUTE, "too many plane attributes");
2435          return 0;
2436       }
2437    }
2438 
2439    return plane_n;
2440 }
2441 
2442 static EGLBoolean
dri2_query_dma_buf_formats(_EGLDisplay * disp,EGLint max,EGLint * formats,EGLint * count)2443 dri2_query_dma_buf_formats(_EGLDisplay *disp, EGLint max, EGLint *formats,
2444                            EGLint *count)
2445 {
2446    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2447    if (max < 0 || (max > 0 && formats == NULL)) {
2448       _eglError(EGL_BAD_PARAMETER, "invalid value for max count of formats");
2449       goto fail;
2450    }
2451 
2452    if (!dri2_dpy->has_dmabuf_import)
2453       goto fail;
2454 
2455    if (!dri_query_dma_buf_formats(dri2_dpy->dri_screen_render_gpu,
2456                                             max, formats, count))
2457       goto fail;
2458 
2459    if (max > 0) {
2460       /* Assert that all of the formats returned are actually fourcc formats.
2461        * Some day, if we want the internal interface function to be able to
2462        * return the fake fourcc formats defined in mesa_interface.h, we'll have
2463        * to do something more clever here to pair the list down to just real
2464        * fourcc formats so that we don't leak the fake internal ones.
2465        */
2466       for (int i = 0; i < *count; i++) {
2467          assert(dri2_num_fourcc_format_planes(formats[i]) > 0);
2468       }
2469    }
2470 
2471    mtx_unlock(&dri2_dpy->lock);
2472 
2473    return EGL_TRUE;
2474 
2475 fail:
2476    mtx_unlock(&dri2_dpy->lock);
2477    return EGL_FALSE;
2478 }
2479 
2480 static EGLBoolean
dri2_query_dma_buf_modifiers(_EGLDisplay * disp,EGLint format,EGLint max,EGLuint64KHR * modifiers,EGLBoolean * external_only,EGLint * count)2481 dri2_query_dma_buf_modifiers(_EGLDisplay *disp, EGLint format, EGLint max,
2482                              EGLuint64KHR *modifiers, EGLBoolean *external_only,
2483                              EGLint *count)
2484 {
2485    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2486 
2487    if (dri2_num_fourcc_format_planes(format) == 0)
2488       return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2489                                    "invalid fourcc format");
2490 
2491    if (max < 0)
2492       return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2493                                    "invalid value for max count of formats");
2494 
2495    if (max > 0 && modifiers == NULL)
2496       return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2497                                    "invalid modifiers array");
2498 
2499    if (!dri2_dpy->has_dmabuf_import) {
2500       mtx_unlock(&dri2_dpy->lock);
2501       return EGL_FALSE;
2502    }
2503 
2504    if (dri_query_dma_buf_modifiers(
2505           dri2_dpy->dri_screen_render_gpu, format, max, modifiers,
2506           (unsigned int *)external_only, count) == false)
2507       return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2508                                    "invalid format");
2509 
2510    mtx_unlock(&dri2_dpy->lock);
2511 
2512    return EGL_TRUE;
2513 }
2514 
2515 /**
2516  * The spec says:
2517  *
2518  * "If eglCreateImageKHR is successful for a EGL_LINUX_DMA_BUF_EXT target, the
2519  *  EGL will take a reference to the dma_buf(s) which it will release at any
2520  *  time while the EGLDisplay is initialized. It is the responsibility of the
2521  *  application to close the dma_buf file descriptors."
2522  *
2523  * Therefore we must never close or otherwise modify the file descriptors.
2524  */
2525 _EGLImage *
dri2_create_image_dma_buf(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2526 dri2_create_image_dma_buf(_EGLDisplay *disp, _EGLContext *ctx,
2527                           EGLClientBuffer buffer, const EGLint *attr_list)
2528 {
2529    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2530    _EGLImage *res;
2531    _EGLImageAttribs attrs;
2532    struct dri_image *dri_image;
2533    unsigned num_fds;
2534    int fds[DMA_BUF_MAX_PLANES];
2535    int pitches[DMA_BUF_MAX_PLANES];
2536    int offsets[DMA_BUF_MAX_PLANES];
2537    uint64_t modifier;
2538    unsigned error = __DRI_IMAGE_ERROR_SUCCESS;
2539    EGLint egl_error;
2540 
2541    /**
2542     * The spec says:
2543     *
2544     * ""* If <target> is EGL_LINUX_DMA_BUF_EXT and <buffer> is not NULL, the
2545     *     error EGL_BAD_PARAMETER is generated."
2546     */
2547    if (buffer != NULL) {
2548       _eglError(EGL_BAD_PARAMETER, "buffer not NULL");
2549       return NULL;
2550    }
2551 
2552    if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2553       return NULL;
2554 
2555    if (!dri2_check_dma_buf_attribs(&attrs))
2556       return NULL;
2557 
2558    num_fds = dri2_check_dma_buf_format(&attrs);
2559    if (!num_fds)
2560       return NULL;
2561 
2562    for (unsigned i = 0; i < num_fds; ++i) {
2563       fds[i] = attrs.DMABufPlaneFds[i].Value;
2564       pitches[i] = attrs.DMABufPlanePitches[i].Value;
2565       offsets[i] = attrs.DMABufPlaneOffsets[i].Value;
2566    }
2567 
2568    /* dri2_check_dma_buf_attribs ensures that the modifier, if available,
2569     * will be present in attrs.DMABufPlaneModifiersLo[0] and
2570     * attrs.DMABufPlaneModifiersHi[0] */
2571    if (attrs.DMABufPlaneModifiersLo[0].IsPresent) {
2572       modifier = combine_u32_into_u64(attrs.DMABufPlaneModifiersHi[0].Value,
2573                                       attrs.DMABufPlaneModifiersLo[0].Value);
2574    } else {
2575       modifier = DRM_FORMAT_MOD_INVALID;
2576    }
2577 
2578    uint32_t flags = 0;
2579    if (attrs.ProtectedContent)
2580       flags |= __DRI_IMAGE_PROTECTED_CONTENT_FLAG;
2581 
2582    dri_image = dri2_from_dma_bufs(
2583       dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height,
2584       attrs.DMABufFourCC.Value, modifier, fds, num_fds, pitches, offsets,
2585       attrs.DMABufYuvColorSpaceHint.Value, attrs.DMABufSampleRangeHint.Value,
2586       attrs.DMABufChromaHorizontalSiting.Value,
2587       attrs.DMABufChromaVerticalSiting.Value,
2588       flags, &error, NULL);
2589 
2590    egl_error = egl_error_from_dri_image_error(error);
2591    if (egl_error != EGL_SUCCESS)
2592       _eglError(egl_error, "createImageFromDmaBufs failed");
2593 
2594    if (!dri_image)
2595       return EGL_NO_IMAGE_KHR;
2596 
2597    res = dri2_create_image_from_dri(disp, dri_image);
2598 
2599    return res;
2600 }
2601 
2602 static _EGLImage *
dri2_create_drm_image_mesa(_EGLDisplay * disp,const EGLint * attr_list)2603 dri2_create_drm_image_mesa(_EGLDisplay *disp, const EGLint *attr_list)
2604 {
2605    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2606    struct dri2_egl_image *dri2_img;
2607    _EGLImageAttribs attrs;
2608    unsigned int dri_use, valid_mask;
2609    int format;
2610 
2611    if (!attr_list) {
2612       _eglError(EGL_BAD_PARAMETER, __func__);
2613       goto fail;
2614    }
2615 
2616    if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2617       goto fail;
2618 
2619    if (attrs.Width <= 0 || attrs.Height <= 0) {
2620       _eglError(EGL_BAD_PARAMETER, __func__);
2621       goto fail;
2622    }
2623 
2624    switch (attrs.DRMBufferFormatMESA) {
2625    case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2626       format = PIPE_FORMAT_BGRA8888_UNORM;
2627       break;
2628    default:
2629       _eglError(EGL_BAD_PARAMETER, __func__);
2630       goto fail;
2631    }
2632 
2633    valid_mask = EGL_DRM_BUFFER_USE_SCANOUT_MESA |
2634                 EGL_DRM_BUFFER_USE_SHARE_MESA | EGL_DRM_BUFFER_USE_CURSOR_MESA;
2635    if (attrs.DRMBufferUseMESA & ~valid_mask) {
2636       _eglError(EGL_BAD_PARAMETER, __func__);
2637       goto fail;
2638    }
2639 
2640    dri_use = 0;
2641    if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SHARE_MESA)
2642       dri_use |= __DRI_IMAGE_USE_SHARE;
2643    if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SCANOUT_MESA)
2644       dri_use |= __DRI_IMAGE_USE_SCANOUT;
2645    if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_CURSOR_MESA)
2646       dri_use |= __DRI_IMAGE_USE_CURSOR;
2647 
2648    dri2_img = malloc(sizeof *dri2_img);
2649    if (!dri2_img) {
2650       _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2651       goto fail;
2652    }
2653 
2654    _eglInitImage(&dri2_img->base, disp);
2655 
2656    dri2_img->dri_image =
2657       dri_create_image(dri2_dpy->dri_screen_render_gpu, attrs.Width,
2658                                    attrs.Height, format, NULL, 0, dri_use, dri2_img);
2659    if (dri2_img->dri_image == NULL) {
2660       free(dri2_img);
2661       _eglError(EGL_BAD_ALLOC, "dri2_create_drm_image_mesa");
2662       goto fail;
2663    }
2664 
2665    mtx_unlock(&dri2_dpy->lock);
2666 
2667    return &dri2_img->base;
2668 
2669 fail:
2670    mtx_unlock(&dri2_dpy->lock);
2671    return EGL_NO_IMAGE_KHR;
2672 }
2673 
2674 static EGLBoolean
dri2_export_drm_image_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * name,EGLint * handle,EGLint * stride)2675 dri2_export_drm_image_mesa(_EGLDisplay *disp, _EGLImage *img, EGLint *name,
2676                            EGLint *handle, EGLint *stride)
2677 {
2678    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2679    struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2680 
2681    if (name && !dri2_query_image(dri2_img->dri_image,
2682                                             __DRI_IMAGE_ATTRIB_NAME, name))
2683       return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC,
2684                                    "dri2_export_drm_image_mesa");
2685 
2686    if (handle)
2687       dri2_query_image(dri2_img->dri_image,
2688                                   __DRI_IMAGE_ATTRIB_HANDLE, handle);
2689 
2690    if (stride)
2691       dri2_query_image(dri2_img->dri_image,
2692                                   __DRI_IMAGE_ATTRIB_STRIDE, stride);
2693 
2694    mtx_unlock(&dri2_dpy->lock);
2695 
2696    return EGL_TRUE;
2697 }
2698 
2699 /**
2700  * Checks if we can support EGL_MESA_image_dma_buf_export on this image.
2701 
2702  * The spec provides a boolean return for the driver to reject exporting for
2703  * basically any reason, but doesn't specify any particular error cases.  For
2704  * now, we just fail if we don't have a DRM fourcc for the format.
2705  */
2706 static bool
dri2_can_export_dma_buf_image(_EGLDisplay * disp,_EGLImage * img)2707 dri2_can_export_dma_buf_image(_EGLDisplay *disp, _EGLImage *img)
2708 {
2709    struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2710    EGLint fourcc;
2711 
2712    if (!dri2_query_image(dri2_img->dri_image,
2713                                     __DRI_IMAGE_ATTRIB_FOURCC, &fourcc)) {
2714       return false;
2715    }
2716 
2717    return true;
2718 }
2719 
2720 static EGLBoolean
dri2_export_dma_buf_image_query_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * fourcc,EGLint * nplanes,EGLuint64KHR * modifiers)2721 dri2_export_dma_buf_image_query_mesa(_EGLDisplay *disp, _EGLImage *img,
2722                                      EGLint *fourcc, EGLint *nplanes,
2723                                      EGLuint64KHR *modifiers)
2724 {
2725    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2726    struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2727    int num_planes;
2728 
2729    if (!dri2_can_export_dma_buf_image(disp, img)) {
2730       mtx_unlock(&dri2_dpy->lock);
2731       return EGL_FALSE;
2732    }
2733 
2734    dri2_query_image(dri2_img->dri_image,
2735                                __DRI_IMAGE_ATTRIB_NUM_PLANES, &num_planes);
2736    if (nplanes)
2737       *nplanes = num_planes;
2738 
2739    if (fourcc)
2740       dri2_query_image(dri2_img->dri_image,
2741                                   __DRI_IMAGE_ATTRIB_FOURCC, fourcc);
2742 
2743    if (modifiers) {
2744       int mod_hi, mod_lo;
2745       uint64_t modifier = DRM_FORMAT_MOD_INVALID;
2746       bool query;
2747 
2748       query = dri2_query_image(
2749          dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod_hi);
2750       query &= dri2_query_image(
2751          dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod_lo);
2752       if (query)
2753          modifier = combine_u32_into_u64(mod_hi, mod_lo);
2754 
2755       for (int i = 0; i < num_planes; i++)
2756          modifiers[i] = modifier;
2757    }
2758 
2759    mtx_unlock(&dri2_dpy->lock);
2760 
2761    return EGL_TRUE;
2762 }
2763 
2764 static EGLBoolean
dri2_export_dma_buf_image_mesa(_EGLDisplay * disp,_EGLImage * img,int * fds,EGLint * strides,EGLint * offsets)2765 dri2_export_dma_buf_image_mesa(_EGLDisplay *disp, _EGLImage *img, int *fds,
2766                                EGLint *strides, EGLint *offsets)
2767 {
2768    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2769    struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2770    EGLint nplanes;
2771 
2772    if (!dri2_can_export_dma_buf_image(disp, img)) {
2773       mtx_unlock(&dri2_dpy->lock);
2774       return EGL_FALSE;
2775    }
2776 
2777    /* EGL_MESA_image_dma_buf_export spec says:
2778     *    "If the number of fds is less than the number of planes, then
2779     *    subsequent fd slots should contain -1."
2780     */
2781    if (fds) {
2782       /* Query nplanes so that we know how big the given array is. */
2783       dri2_query_image(dri2_img->dri_image,
2784                                   __DRI_IMAGE_ATTRIB_NUM_PLANES, &nplanes);
2785       memset(fds, -1, nplanes * sizeof(int));
2786    }
2787 
2788    /* rework later to provide multiple fds/strides/offsets */
2789    if (fds)
2790       dri2_query_image(dri2_img->dri_image, __DRI_IMAGE_ATTRIB_FD,
2791                                   fds);
2792 
2793    if (strides)
2794       dri2_query_image(dri2_img->dri_image,
2795                                   __DRI_IMAGE_ATTRIB_STRIDE, strides);
2796 
2797    if (offsets) {
2798       int img_offset;
2799       bool ret = dri2_query_image(
2800          dri2_img->dri_image, __DRI_IMAGE_ATTRIB_OFFSET, &img_offset);
2801       if (ret)
2802          offsets[0] = img_offset;
2803       else
2804          offsets[0] = 0;
2805    }
2806 
2807    mtx_unlock(&dri2_dpy->lock);
2808 
2809    return EGL_TRUE;
2810 }
2811 
2812 #endif
2813 
2814 _EGLImage *
dri2_create_image_khr(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2815 dri2_create_image_khr(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
2816                       EGLClientBuffer buffer, const EGLint *attr_list)
2817 {
2818    switch (target) {
2819    case EGL_GL_TEXTURE_2D_KHR:
2820    case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
2821    case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
2822    case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
2823    case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
2824    case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
2825    case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
2826    case EGL_GL_TEXTURE_3D_KHR:
2827       return dri2_create_image_khr_texture(disp, ctx, target, buffer,
2828                                            attr_list);
2829    case EGL_GL_RENDERBUFFER_KHR:
2830       return dri2_create_image_khr_renderbuffer(disp, ctx, buffer, attr_list);
2831 #ifdef HAVE_LIBDRM
2832    case EGL_DRM_BUFFER_MESA:
2833       return dri2_create_image_mesa_drm_buffer(disp, ctx, buffer, attr_list);
2834    case EGL_LINUX_DMA_BUF_EXT:
2835       return dri2_create_image_dma_buf(disp, ctx, buffer, attr_list);
2836 #endif
2837 #ifdef HAVE_WAYLAND_PLATFORM
2838    case EGL_WAYLAND_BUFFER_WL:
2839       return dri2_create_image_wayland_wl_buffer(disp, ctx, buffer, attr_list);
2840 #endif
2841    default:
2842       _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2843       return EGL_NO_IMAGE_KHR;
2844    }
2845 }
2846 
2847 static EGLBoolean
dri2_destroy_image_khr(_EGLDisplay * disp,_EGLImage * image)2848 dri2_destroy_image_khr(_EGLDisplay *disp, _EGLImage *image)
2849 {
2850    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2851    struct dri2_egl_image *dri2_img = dri2_egl_image(image);
2852 
2853    dri2_destroy_image(dri2_img->dri_image);
2854    free(dri2_img);
2855 
2856    mtx_unlock(&dri2_dpy->lock);
2857 
2858    return EGL_TRUE;
2859 }
2860 
2861 #ifdef HAVE_WAYLAND_PLATFORM
2862 
2863 static void
dri2_wl_reference_buffer(void * user_data,uint32_t name,int fd,struct wl_drm_buffer * buffer)2864 dri2_wl_reference_buffer(void *user_data, uint32_t name, int fd,
2865                          struct wl_drm_buffer *buffer)
2866 {
2867    _EGLDisplay *disp = user_data;
2868    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2869    struct dri_image *img;
2870    int dri_components = 0;
2871 
2872    if (fd == -1)
2873       img = dri2_from_names(
2874          dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
2875          buffer->format, (int *)&name, 1, buffer->stride, buffer->offset, NULL);
2876    else
2877       img = dri2_from_dma_bufs(
2878          dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
2879          buffer->format, DRM_FORMAT_MOD_INVALID, &fd, 1, buffer->stride,
2880          buffer->offset, 0, 0, 0, 0, 0, NULL, NULL);
2881 
2882    if (img == NULL)
2883       return;
2884 
2885    dri2_query_image(img, __DRI_IMAGE_ATTRIB_COMPONENTS,
2886                                &dri_components);
2887 
2888    buffer->driver_format = NULL;
2889    for (int i = 0; i < ARRAY_SIZE(wl_drm_components); i++)
2890       if (wl_drm_components[i].dri_components == dri_components)
2891          buffer->driver_format = &wl_drm_components[i];
2892 
2893    if (buffer->driver_format == NULL)
2894       dri2_destroy_image(img);
2895    else
2896       buffer->driver_buffer = img;
2897 }
2898 
2899 static void
dri2_wl_release_buffer(void * user_data,struct wl_drm_buffer * buffer)2900 dri2_wl_release_buffer(void *user_data, struct wl_drm_buffer *buffer)
2901 {
2902    dri2_destroy_image(buffer->driver_buffer);
2903 }
2904 
2905 static EGLBoolean
dri2_bind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)2906 dri2_bind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
2907 {
2908    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2909    const struct wayland_drm_callbacks wl_drm_callbacks = {
2910       .authenticate = (int (*)(void *, uint32_t))dri2_dpy->vtbl->authenticate,
2911       .reference_buffer = dri2_wl_reference_buffer,
2912       .release_buffer = dri2_wl_release_buffer,
2913       .is_format_supported = dri2_wl_is_format_supported,
2914    };
2915    int flags = 0;
2916    char *device_name;
2917 
2918    if (dri2_dpy->wl_server_drm)
2919       goto fail;
2920 
2921    device_name = drmGetRenderDeviceNameFromFd(dri2_dpy->fd_render_gpu);
2922    if (!device_name)
2923       device_name = strdup(dri2_dpy->device_name);
2924    if (!device_name)
2925       goto fail;
2926 
2927    if (dri2_dpy->has_dmabuf_import && dri2_dpy->has_dmabuf_export)
2928       flags |= WAYLAND_DRM_PRIME;
2929 
2930    dri2_dpy->wl_server_drm =
2931       wayland_drm_init(wl_dpy, device_name, &wl_drm_callbacks, disp, flags);
2932 
2933    free(device_name);
2934 
2935    if (!dri2_dpy->wl_server_drm)
2936       goto fail;
2937 
2938 #ifdef HAVE_DRM_PLATFORM
2939    /* We have to share the wl_drm instance with gbm, so gbm can convert
2940     * wl_buffers to gbm bos. */
2941    if (dri2_dpy->gbm_dri)
2942       dri2_dpy->gbm_dri->wl_drm = dri2_dpy->wl_server_drm;
2943 #endif
2944 
2945    mtx_unlock(&dri2_dpy->lock);
2946    return EGL_TRUE;
2947 
2948 fail:
2949    mtx_unlock(&dri2_dpy->lock);
2950    return EGL_FALSE;
2951 }
2952 
2953 static EGLBoolean
dri2_unbind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)2954 dri2_unbind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
2955 {
2956    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2957 
2958    if (!dri2_dpy->wl_server_drm)
2959       return EGL_FALSE;
2960 
2961    wayland_drm_uninit(dri2_dpy->wl_server_drm);
2962    dri2_dpy->wl_server_drm = NULL;
2963 
2964    return EGL_TRUE;
2965 }
2966 
2967 static EGLBoolean
dri2_query_wayland_buffer_wl(_EGLDisplay * disp,struct wl_resource * buffer_resource,EGLint attribute,EGLint * value)2968 dri2_query_wayland_buffer_wl(_EGLDisplay *disp,
2969                              struct wl_resource *buffer_resource,
2970                              EGLint attribute, EGLint *value)
2971 {
2972    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2973    struct wl_drm_buffer *buffer;
2974    const struct wl_drm_components_descriptor *format;
2975 
2976    buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm, buffer_resource);
2977    if (!buffer)
2978       return EGL_FALSE;
2979 
2980    format = buffer->driver_format;
2981    switch (attribute) {
2982    case EGL_TEXTURE_FORMAT:
2983       *value = format->components;
2984       return EGL_TRUE;
2985    case EGL_WIDTH:
2986       *value = buffer->width;
2987       return EGL_TRUE;
2988    case EGL_HEIGHT:
2989       *value = buffer->height;
2990       return EGL_TRUE;
2991    }
2992 
2993    return EGL_FALSE;
2994 }
2995 #endif
2996 
2997 static void
dri2_egl_ref_sync(struct dri2_egl_sync * sync)2998 dri2_egl_ref_sync(struct dri2_egl_sync *sync)
2999 {
3000    p_atomic_inc(&sync->refcount);
3001 }
3002 
3003 static void
dri2_egl_unref_sync(struct dri2_egl_display * dri2_dpy,struct dri2_egl_sync * dri2_sync)3004 dri2_egl_unref_sync(struct dri2_egl_display *dri2_dpy,
3005                     struct dri2_egl_sync *dri2_sync)
3006 {
3007    if (p_atomic_dec_zero(&dri2_sync->refcount)) {
3008       switch (dri2_sync->base.Type) {
3009       case EGL_SYNC_REUSABLE_KHR:
3010          cnd_destroy(&dri2_sync->cond);
3011          break;
3012       case EGL_SYNC_NATIVE_FENCE_ANDROID:
3013          if (dri2_sync->base.SyncFd != EGL_NO_NATIVE_FENCE_FD_ANDROID)
3014             close(dri2_sync->base.SyncFd);
3015          break;
3016       default:
3017          break;
3018       }
3019 
3020       if (dri2_sync->fence)
3021          dri_destroy_fence(dri2_dpy->dri_screen_render_gpu,
3022                                         dri2_sync->fence);
3023 
3024       free(dri2_sync);
3025    }
3026 }
3027 
3028 static _EGLSync *
dri2_create_sync(_EGLDisplay * disp,EGLenum type,const EGLAttrib * attrib_list)3029 dri2_create_sync(_EGLDisplay *disp, EGLenum type, const EGLAttrib *attrib_list)
3030 {
3031    _EGLContext *ctx = _eglGetCurrentContext();
3032    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3033    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3034    struct dri2_egl_sync *dri2_sync;
3035    EGLint ret;
3036    pthread_condattr_t attr;
3037 
3038    dri2_sync = calloc(1, sizeof(struct dri2_egl_sync));
3039    if (!dri2_sync) {
3040       _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3041       goto fail;
3042    }
3043 
3044    if (!_eglInitSync(&dri2_sync->base, disp, type, attrib_list)) {
3045       goto fail;
3046    }
3047 
3048    switch (type) {
3049    case EGL_SYNC_FENCE_KHR:
3050       dri2_sync->fence = dri_create_fence(dri2_ctx->dri_context);
3051       if (!dri2_sync->fence) {
3052          /* Why did it fail? DRI doesn't return an error code, so we emit
3053           * a generic EGL error that doesn't communicate user error.
3054           */
3055          _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3056          goto fail;
3057       }
3058       break;
3059 
3060    case EGL_SYNC_CL_EVENT_KHR:
3061       dri2_sync->fence = dri_get_fence_from_cl_event(
3062          dri2_dpy->dri_screen_render_gpu, dri2_sync->base.CLEvent);
3063       /* this can only happen if the cl_event passed in is invalid. */
3064       if (!dri2_sync->fence) {
3065          _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3066          goto fail;
3067       }
3068 
3069       /* the initial status must be "signaled" if the cl_event is signaled */
3070       if (dri_client_wait_sync(dri2_ctx->dri_context,
3071                                             dri2_sync->fence, 0, 0))
3072          dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3073       break;
3074 
3075    case EGL_SYNC_REUSABLE_KHR:
3076       /* initialize attr */
3077       ret = pthread_condattr_init(&attr);
3078 
3079       if (ret) {
3080          _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3081          goto fail;
3082       }
3083 
3084 #if !defined(__APPLE__) && !defined(__MACOSX)
3085       /* change clock attribute to CLOCK_MONOTONIC */
3086       ret = pthread_condattr_setclock(&attr, CLOCK_MONOTONIC);
3087 
3088       if (ret) {
3089          _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3090          goto fail;
3091       }
3092 #endif
3093 
3094       ret = pthread_cond_init(&dri2_sync->cond, &attr);
3095 
3096       if (ret) {
3097          _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3098          goto fail;
3099       }
3100 
3101       /* initial status of reusable sync must be "unsignaled" */
3102       dri2_sync->base.SyncStatus = EGL_UNSIGNALED_KHR;
3103       break;
3104 
3105    case EGL_SYNC_NATIVE_FENCE_ANDROID:
3106       dri2_sync->fence = dri_create_fence_fd(
3107             dri2_ctx->dri_context, dri2_sync->base.SyncFd);
3108       if (!dri2_sync->fence) {
3109          _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3110          goto fail;
3111       }
3112       break;
3113    }
3114 
3115    p_atomic_set(&dri2_sync->refcount, 1);
3116    mtx_unlock(&dri2_dpy->lock);
3117 
3118    return &dri2_sync->base;
3119 
3120 fail:
3121    free(dri2_sync);
3122    mtx_unlock(&dri2_dpy->lock);
3123    return NULL;
3124 }
3125 
3126 static EGLBoolean
dri2_destroy_sync(_EGLDisplay * disp,_EGLSync * sync)3127 dri2_destroy_sync(_EGLDisplay *disp, _EGLSync *sync)
3128 {
3129    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3130    struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3131    EGLint ret = EGL_TRUE;
3132    EGLint err;
3133 
3134    /* if type of sync is EGL_SYNC_REUSABLE_KHR and it is not signaled yet,
3135     * then unlock all threads possibly blocked by the reusable sync before
3136     * destroying it.
3137     */
3138    if (dri2_sync->base.Type == EGL_SYNC_REUSABLE_KHR &&
3139        dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3140       dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3141       /* unblock all threads currently blocked by sync */
3142       err = cnd_broadcast(&dri2_sync->cond);
3143 
3144       if (err) {
3145          _eglError(EGL_BAD_ACCESS, "eglDestroySyncKHR");
3146          ret = EGL_FALSE;
3147       }
3148    }
3149 
3150    dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3151 
3152    mtx_unlock(&dri2_dpy->lock);
3153 
3154    return ret;
3155 }
3156 
3157 static EGLint
dri2_dup_native_fence_fd(_EGLDisplay * disp,_EGLSync * sync)3158 dri2_dup_native_fence_fd(_EGLDisplay *disp, _EGLSync *sync)
3159 {
3160    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3161    struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3162 
3163    assert(sync->Type == EGL_SYNC_NATIVE_FENCE_ANDROID);
3164 
3165    if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3166       /* try to retrieve the actual native fence fd.. if rendering is
3167        * not flushed this will just return -1, aka NO_NATIVE_FENCE_FD:
3168        */
3169       sync->SyncFd = dri_get_fence_fd(
3170          dri2_dpy->dri_screen_render_gpu, dri2_sync->fence);
3171    }
3172 
3173    mtx_unlock(&dri2_dpy->lock);
3174 
3175    if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3176       /* if native fence fd still not created, return an error: */
3177       _eglError(EGL_BAD_PARAMETER, "eglDupNativeFenceFDANDROID");
3178       return EGL_NO_NATIVE_FENCE_FD_ANDROID;
3179    }
3180 
3181    assert(sync_valid_fd(sync->SyncFd));
3182 
3183    return os_dupfd_cloexec(sync->SyncFd);
3184 }
3185 
3186 static void
dri2_set_blob_cache_funcs(_EGLDisplay * disp,EGLSetBlobFuncANDROID set,EGLGetBlobFuncANDROID get)3187 dri2_set_blob_cache_funcs(_EGLDisplay *disp, EGLSetBlobFuncANDROID set,
3188                           EGLGetBlobFuncANDROID get)
3189 {
3190    struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3191    dri_set_blob_cache_funcs(dri2_dpy->dri_screen_render_gpu, set, get);
3192    mtx_unlock(&dri2_dpy->lock);
3193 }
3194 
3195 static EGLint
dri2_client_wait_sync(_EGLDisplay * disp,_EGLSync * sync,EGLint flags,EGLTime timeout)3196 dri2_client_wait_sync(_EGLDisplay *disp, _EGLSync *sync, EGLint flags,
3197                       EGLTime timeout)
3198 {
3199    _EGLContext *ctx = _eglGetCurrentContext();
3200    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3201    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3202    struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3203    unsigned wait_flags = 0;
3204 
3205    EGLint ret = EGL_CONDITION_SATISFIED_KHR;
3206 
3207    /* The EGL_KHR_fence_sync spec states:
3208     *
3209     *    "If no context is current for the bound API,
3210     *     the EGL_SYNC_FLUSH_COMMANDS_BIT_KHR bit is ignored.
3211     */
3212    if (dri2_ctx && flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)
3213       wait_flags |= __DRI2_FENCE_FLAG_FLUSH_COMMANDS;
3214 
3215    /* the sync object should take a reference while waiting */
3216    dri2_egl_ref_sync(dri2_sync);
3217 
3218    switch (sync->Type) {
3219    case EGL_SYNC_FENCE_KHR:
3220    case EGL_SYNC_NATIVE_FENCE_ANDROID:
3221    case EGL_SYNC_CL_EVENT_KHR:
3222       if (dri_client_wait_sync(
3223              dri2_ctx ? dri2_ctx->dri_context : NULL, dri2_sync->fence,
3224              wait_flags, timeout))
3225          dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3226       else
3227          ret = EGL_TIMEOUT_EXPIRED_KHR;
3228       break;
3229 
3230    case EGL_SYNC_REUSABLE_KHR:
3231       if (dri2_ctx && dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR &&
3232           (flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)) {
3233          /* flush context if EGL_SYNC_FLUSH_COMMANDS_BIT_KHR is set */
3234          dri2_gl_flush();
3235       }
3236 
3237       /* if timeout is EGL_FOREVER_KHR, it should wait without any timeout.*/
3238       if (timeout == EGL_FOREVER_KHR) {
3239          mtx_lock(&dri2_sync->mutex);
3240          cnd_wait(&dri2_sync->cond, &dri2_sync->mutex);
3241          mtx_unlock(&dri2_sync->mutex);
3242       } else {
3243          /* if reusable sync has not been yet signaled */
3244          if (dri2_sync->base.SyncStatus != EGL_SIGNALED_KHR) {
3245             /* timespecs for cnd_timedwait */
3246             struct timespec current;
3247             struct timespec expire;
3248 
3249             /* We override the clock to monotonic when creating the condition
3250              * variable. */
3251             clock_gettime(CLOCK_MONOTONIC, &current);
3252 
3253             /* calculating when to expire */
3254             expire.tv_nsec = timeout % 1000000000L;
3255             expire.tv_sec = timeout / 1000000000L;
3256 
3257             expire.tv_nsec += current.tv_nsec;
3258             expire.tv_sec += current.tv_sec;
3259 
3260             /* expire.nsec now is a number between 0 and 1999999998 */
3261             if (expire.tv_nsec > 999999999L) {
3262                expire.tv_sec++;
3263                expire.tv_nsec -= 1000000000L;
3264             }
3265 
3266             mtx_lock(&dri2_sync->mutex);
3267             ret = cnd_timedwait(&dri2_sync->cond, &dri2_sync->mutex, &expire);
3268             mtx_unlock(&dri2_sync->mutex);
3269 
3270             if (ret == thrd_timedout) {
3271                if (dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3272                   ret = EGL_TIMEOUT_EXPIRED_KHR;
3273                } else {
3274                   _eglError(EGL_BAD_ACCESS, "eglClientWaitSyncKHR");
3275                   ret = EGL_FALSE;
3276                }
3277             }
3278          }
3279       }
3280       break;
3281    }
3282 
3283    dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3284 
3285    return ret;
3286 }
3287 
3288 static EGLBoolean
dri2_signal_sync(_EGLDisplay * disp,_EGLSync * sync,EGLenum mode)3289 dri2_signal_sync(_EGLDisplay *disp, _EGLSync *sync, EGLenum mode)
3290 {
3291    struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3292    EGLint ret;
3293 
3294    if (sync->Type != EGL_SYNC_REUSABLE_KHR)
3295       return _eglError(EGL_BAD_MATCH, "eglSignalSyncKHR");
3296 
3297    if (mode != EGL_SIGNALED_KHR && mode != EGL_UNSIGNALED_KHR)
3298       return _eglError(EGL_BAD_ATTRIBUTE, "eglSignalSyncKHR");
3299 
3300    dri2_sync->base.SyncStatus = mode;
3301 
3302    if (mode == EGL_SIGNALED_KHR) {
3303       ret = cnd_broadcast(&dri2_sync->cond);
3304 
3305       /* fail to broadcast */
3306       if (ret)
3307          return _eglError(EGL_BAD_ACCESS, "eglSignalSyncKHR");
3308    }
3309 
3310    return EGL_TRUE;
3311 }
3312 
3313 static EGLint
dri2_server_wait_sync(_EGLDisplay * disp,_EGLSync * sync)3314 dri2_server_wait_sync(_EGLDisplay *disp, _EGLSync *sync)
3315 {
3316    _EGLContext *ctx = _eglGetCurrentContext();
3317    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3318    struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3319 
3320    dri_server_wait_sync(dri2_ctx->dri_context, dri2_sync->fence,
3321                                      0);
3322    return EGL_TRUE;
3323 }
3324 
3325 static int
dri2_interop_query_device_info(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_device_info * out)3326 dri2_interop_query_device_info(_EGLDisplay *disp, _EGLContext *ctx,
3327                                struct mesa_glinterop_device_info *out)
3328 {
3329    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3330 
3331    return dri_interop_query_device_info(dri2_ctx->dri_context, out);
3332 }
3333 
3334 static int
dri2_interop_export_object(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_export_in * in,struct mesa_glinterop_export_out * out)3335 dri2_interop_export_object(_EGLDisplay *disp, _EGLContext *ctx,
3336                            struct mesa_glinterop_export_in *in,
3337                            struct mesa_glinterop_export_out *out)
3338 {
3339    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3340 
3341    return dri_interop_export_object(dri2_ctx->dri_context, in, out);
3342 }
3343 
3344 static int
dri2_interop_flush_objects(_EGLDisplay * disp,_EGLContext * ctx,unsigned count,struct mesa_glinterop_export_in * objects,struct mesa_glinterop_flush_out * out)3345 dri2_interop_flush_objects(_EGLDisplay *disp, _EGLContext *ctx, unsigned count,
3346                            struct mesa_glinterop_export_in *objects,
3347                            struct mesa_glinterop_flush_out *out)
3348 {
3349    struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3350 
3351    return dri_interop_flush_objects(dri2_ctx->dri_context, count,
3352                                            objects, out);
3353 }
3354 
3355 static EGLBoolean
dri2_query_supported_compression_rates(_EGLDisplay * disp,_EGLConfig * config,const EGLAttrib * attr_list,EGLint * rates,EGLint rate_size,EGLint * num_rate)3356 dri2_query_supported_compression_rates(_EGLDisplay *disp, _EGLConfig *config,
3357                                        const EGLAttrib *attr_list,
3358                                        EGLint *rates, EGLint rate_size,
3359                                        EGLint *num_rate)
3360 {
3361    struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3362    struct dri2_egl_config *conf = dri2_egl_config(config);
3363    enum __DRIFixedRateCompression dri_rates[rate_size];
3364 
3365    if (dri2_dpy->has_compression_modifiers) {
3366       const struct dri_config *dri_conf =
3367          dri2_get_dri_config(conf, EGL_WINDOW_BIT, EGL_GL_COLORSPACE_LINEAR);
3368       if (!dri2_query_compression_rates(
3369              dri2_dpy->dri_screen_render_gpu, dri_conf, rate_size, dri_rates,
3370              num_rate))
3371          return EGL_FALSE;
3372 
3373       for (int i = 0; i < *num_rate && i < rate_size; ++i)
3374          rates[i] = dri_rates[i];
3375       return EGL_TRUE;
3376    }
3377    *num_rate = 0;
3378    return EGL_TRUE;
3379 }
3380 
3381 const _EGLDriver _eglDriver = {
3382    .Initialize = dri2_initialize,
3383    .Terminate = dri2_terminate,
3384    .CreateContext = dri2_create_context,
3385    .DestroyContext = dri2_destroy_context,
3386    .MakeCurrent = dri2_make_current,
3387    .CreateWindowSurface = dri2_create_window_surface,
3388    .CreatePixmapSurface = dri2_create_pixmap_surface,
3389    .CreatePbufferSurface = dri2_create_pbuffer_surface,
3390    .DestroySurface = dri2_destroy_surface,
3391    .WaitClient = dri2_wait_client,
3392    .WaitNative = dri2_wait_native,
3393    .BindTexImage = dri2_bind_tex_image,
3394    .ReleaseTexImage = dri2_release_tex_image,
3395    .SwapInterval = dri2_swap_interval,
3396    .SwapBuffers = dri2_swap_buffers,
3397    .SwapBuffersWithDamageEXT = dri2_swap_buffers_with_damage,
3398    .SwapBuffersRegionNOK = dri2_swap_buffers_region,
3399    .SetDamageRegion = dri2_set_damage_region,
3400    .PostSubBufferNV = dri2_post_sub_buffer,
3401    .CopyBuffers = dri2_copy_buffers,
3402    .QueryBufferAge = dri2_query_buffer_age,
3403    .CreateImageKHR = dri2_create_image,
3404    .DestroyImageKHR = dri2_destroy_image_khr,
3405    .CreateWaylandBufferFromImageWL = dri2_create_wayland_buffer_from_image,
3406    .QuerySurface = dri2_query_surface,
3407    .QueryDriverName = dri2_query_driver_name,
3408    .QueryDriverConfig = dri2_query_driver_config,
3409 #ifdef HAVE_LIBDRM
3410    .CreateDRMImageMESA = dri2_create_drm_image_mesa,
3411    .ExportDRMImageMESA = dri2_export_drm_image_mesa,
3412    .ExportDMABUFImageQueryMESA = dri2_export_dma_buf_image_query_mesa,
3413    .ExportDMABUFImageMESA = dri2_export_dma_buf_image_mesa,
3414    .QueryDmaBufFormatsEXT = dri2_query_dma_buf_formats,
3415    .QueryDmaBufModifiersEXT = dri2_query_dma_buf_modifiers,
3416 #endif
3417 #ifdef HAVE_WAYLAND_PLATFORM
3418    .BindWaylandDisplayWL = dri2_bind_wayland_display_wl,
3419    .UnbindWaylandDisplayWL = dri2_unbind_wayland_display_wl,
3420    .QueryWaylandBufferWL = dri2_query_wayland_buffer_wl,
3421 #endif
3422    .GetSyncValuesCHROMIUM = dri2_get_sync_values_chromium,
3423    .GetMscRateANGLE = dri2_get_msc_rate_angle,
3424    .CreateSyncKHR = dri2_create_sync,
3425    .ClientWaitSyncKHR = dri2_client_wait_sync,
3426    .SignalSyncKHR = dri2_signal_sync,
3427    .WaitSyncKHR = dri2_server_wait_sync,
3428    .DestroySyncKHR = dri2_destroy_sync,
3429    .GLInteropQueryDeviceInfo = dri2_interop_query_device_info,
3430    .GLInteropExportObject = dri2_interop_export_object,
3431    .GLInteropFlushObjects = dri2_interop_flush_objects,
3432    .DupNativeFenceFDANDROID = dri2_dup_native_fence_fd,
3433    .SetBlobCacheFuncsANDROID = dri2_set_blob_cache_funcs,
3434    .QuerySupportedCompressionRatesEXT = dri2_query_supported_compression_rates,
3435 };
3436