1 /*
2 * Copyright © 2010 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
19 * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
20 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Kristian Høgsberg <krh@bitplanet.net>
26 */
27
28 #include <dlfcn.h>
29 #include <errno.h>
30 #include <fcntl.h>
31 #include <limits.h>
32 #include <stdbool.h>
33 #include <stdint.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <string.h>
37 #include <time.h>
38 #include <unistd.h>
39 #include <c11/threads.h>
40 #ifdef HAVE_LIBDRM
41 #include <xf86drm.h>
42 #include "drm-uapi/drm_fourcc.h"
43 #endif
44 #include <GL/gl.h>
45 #include "mesa_interface.h"
46 #include <sys/stat.h>
47 #include <sys/types.h>
48 #include "dri_screen.h"
49
50 #ifdef HAVE_WAYLAND_PLATFORM
51 #include "linux-dmabuf-unstable-v1-client-protocol.h"
52 #include "wayland-drm-client-protocol.h"
53 #include "wayland-drm.h"
54 #include <wayland-client.h>
55 #endif
56
57 #ifdef HAVE_X11_PLATFORM
58 #include "X11/Xlibint.h"
59 #include "loader_x11.h"
60 #endif
61
62 #include "GL/mesa_glinterop.h"
63 #include "pipe-loader/pipe_loader.h"
64 #include "loader/loader.h"
65 #include "mapi/glapi/glapi.h"
66 #include "pipe/p_screen.h"
67 #include "util/bitscan.h"
68 #include "util/driconf.h"
69 #include "util/libsync.h"
70 #include "util/os_file.h"
71 #include "util/u_atomic.h"
72 #include "util/u_call_once.h"
73 #include "util/u_math.h"
74 #include "util/u_vector.h"
75 #include "egl_dri2.h"
76 #include "egldefines.h"
77
78 #define NUM_ATTRIBS 16
79
80 static const enum pipe_format dri2_pbuffer_visuals[] = {
81 PIPE_FORMAT_R16G16B16A16_FLOAT,
82 PIPE_FORMAT_R16G16B16X16_FLOAT,
83 PIPE_FORMAT_B10G10R10A2_UNORM,
84 PIPE_FORMAT_B10G10R10X2_UNORM,
85 PIPE_FORMAT_BGRA8888_UNORM,
86 PIPE_FORMAT_BGRX8888_UNORM,
87 PIPE_FORMAT_B5G6R5_UNORM,
88 };
89
90 static void
dri_set_background_context(void * loaderPrivate)91 dri_set_background_context(void *loaderPrivate)
92 {
93 _EGLContext *ctx = _eglGetCurrentContext();
94 _EGLThreadInfo *t = _eglGetCurrentThread();
95
96 _eglBindContextToThread(ctx, t);
97 }
98
99 static void
dri2_gl_flush_get(_glapi_proc * glFlush)100 dri2_gl_flush_get(_glapi_proc *glFlush)
101 {
102 *glFlush = _glapi_get_proc_address("glFlush");
103 }
104
105 static void
dri2_gl_flush()106 dri2_gl_flush()
107 {
108 static void (*glFlush)(void);
109 static util_once_flag once = UTIL_ONCE_FLAG_INIT;
110
111 util_call_once_data(&once, (util_call_once_data_func)dri2_gl_flush_get,
112 &glFlush);
113
114 /* if glFlush is not available things are horribly broken */
115 if (!glFlush) {
116 _eglLog(_EGL_WARNING, "DRI2: failed to find glFlush entry point");
117 return;
118 }
119
120 glFlush();
121 }
122
123 static GLboolean
dri_is_thread_safe(UNUSED void * loaderPrivate)124 dri_is_thread_safe(UNUSED void *loaderPrivate)
125 {
126 #ifdef HAVE_X11_PLATFORM
127 struct dri2_egl_surface *dri2_surf = loaderPrivate;
128
129 /* loader_dri3_blit_context_get creates a context with
130 * loaderPrivate being NULL. Enabling glthread for a blitting
131 * context isn't useful so return false.
132 */
133 if (!loaderPrivate)
134 return false;
135
136 _EGLDisplay *display = dri2_surf->base.Resource.Display;
137
138 Display *xdpy = (Display *)display->PlatformDisplay;
139
140 /* Check Xlib is running in thread safe mode when running on EGL/X11-xlib
141 * platform
142 *
143 * 'lock_fns' is the XLockDisplay function pointer of the X11 display 'dpy'.
144 * It will be NULL if XInitThreads wasn't called.
145 */
146 if (display->Platform == _EGL_PLATFORM_X11 && xdpy && !xdpy->lock_fns)
147 return false;
148 #endif
149
150 return true;
151 }
152
153 const __DRIbackgroundCallableExtension background_callable_extension = {
154 .base = {__DRI_BACKGROUND_CALLABLE, 2},
155
156 .setBackgroundContext = dri_set_background_context,
157 .isThreadSafe = dri_is_thread_safe,
158 };
159
160 const __DRIuseInvalidateExtension use_invalidate = {
161 .base = {__DRI_USE_INVALIDATE, 1},
162 };
163
164 static void
dri2_get_pbuffer_drawable_info(struct dri_drawable * draw,int * x,int * y,int * w,int * h,void * loaderPrivate)165 dri2_get_pbuffer_drawable_info(struct dri_drawable *draw, int *x, int *y, int *w,
166 int *h, void *loaderPrivate)
167 {
168 struct dri2_egl_surface *dri2_surf = loaderPrivate;
169
170 *x = *y = 0;
171 *w = dri2_surf->base.Width;
172 *h = dri2_surf->base.Height;
173 }
174
175 static int
dri2_get_bytes_per_pixel(struct dri2_egl_surface * dri2_surf)176 dri2_get_bytes_per_pixel(struct dri2_egl_surface *dri2_surf)
177 {
178 const int depth = dri2_surf->base.Config->BufferSize;
179 return depth ? util_next_power_of_two(depth / 8) : 0;
180 }
181
182 static void
dri2_put_image(struct dri_drawable * draw,int op,int x,int y,int w,int h,char * data,void * loaderPrivate)183 dri2_put_image(struct dri_drawable *draw, int op, int x, int y, int w, int h,
184 char *data, void *loaderPrivate)
185 {
186 struct dri2_egl_surface *dri2_surf = loaderPrivate;
187 const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
188 const int width = dri2_surf->base.Width;
189 const int height = dri2_surf->base.Height;
190 const int dst_stride = width * bpp;
191 const int src_stride = w * bpp;
192 const int x_offset = x * bpp;
193 int copy_width = src_stride;
194
195 if (!dri2_surf->swrast_device_buffer)
196 dri2_surf->swrast_device_buffer = malloc(height * dst_stride);
197
198 if (dri2_surf->swrast_device_buffer) {
199 const char *src = data;
200 char *dst = dri2_surf->swrast_device_buffer;
201
202 dst += x_offset;
203 dst += y * dst_stride;
204
205 /* Drivers are allowed to submit OOB PutImage requests, so clip here. */
206 if (copy_width > dst_stride - x_offset)
207 copy_width = dst_stride - x_offset;
208 if (h > height - y)
209 h = height - y;
210
211 for (; 0 < h; --h) {
212 memcpy(dst, src, copy_width);
213 dst += dst_stride;
214 src += src_stride;
215 }
216 }
217 }
218
219 static void
dri2_get_image(struct dri_drawable * read,int x,int y,int w,int h,char * data,void * loaderPrivate)220 dri2_get_image(struct dri_drawable *read, int x, int y, int w, int h, char *data,
221 void *loaderPrivate)
222 {
223 struct dri2_egl_surface *dri2_surf = loaderPrivate;
224 const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
225 const int width = dri2_surf->base.Width;
226 const int height = dri2_surf->base.Height;
227 const int src_stride = width * bpp;
228 const int dst_stride = w * bpp;
229 const int x_offset = x * bpp;
230 int copy_width = dst_stride;
231 const char *src = dri2_surf->swrast_device_buffer;
232 char *dst = data;
233
234 if (!src) {
235 memset(data, 0, copy_width * h);
236 return;
237 }
238
239 src += x_offset;
240 src += y * src_stride;
241
242 /* Drivers are allowed to submit OOB GetImage requests, so clip here. */
243 if (copy_width > src_stride - x_offset)
244 copy_width = src_stride - x_offset;
245 if (h > height - y)
246 h = height - y;
247
248 for (; 0 < h; --h) {
249 memcpy(dst, src, copy_width);
250 src += src_stride;
251 dst += dst_stride;
252 }
253 }
254
255 /* HACK: technically we should have swrast_null, instead of these.
256 */
257 const __DRIswrastLoaderExtension swrast_pbuffer_loader_extension = {
258 .base = {__DRI_SWRAST_LOADER, 1},
259 .getDrawableInfo = dri2_get_pbuffer_drawable_info,
260 .putImage = dri2_put_image,
261 .getImage = dri2_get_image,
262 };
263
264 static const EGLint dri2_to_egl_attribute_map[__DRI_ATTRIB_MAX] = {
265 [__DRI_ATTRIB_BUFFER_SIZE] = EGL_BUFFER_SIZE,
266 [__DRI_ATTRIB_LEVEL] = EGL_LEVEL,
267 [__DRI_ATTRIB_LUMINANCE_SIZE] = EGL_LUMINANCE_SIZE,
268 [__DRI_ATTRIB_DEPTH_SIZE] = EGL_DEPTH_SIZE,
269 [__DRI_ATTRIB_STENCIL_SIZE] = EGL_STENCIL_SIZE,
270 [__DRI_ATTRIB_SAMPLE_BUFFERS] = EGL_SAMPLE_BUFFERS,
271 [__DRI_ATTRIB_SAMPLES] = EGL_SAMPLES,
272 [__DRI_ATTRIB_MAX_PBUFFER_WIDTH] = EGL_MAX_PBUFFER_WIDTH,
273 [__DRI_ATTRIB_MAX_PBUFFER_HEIGHT] = EGL_MAX_PBUFFER_HEIGHT,
274 [__DRI_ATTRIB_MAX_PBUFFER_PIXELS] = EGL_MAX_PBUFFER_PIXELS,
275 [__DRI_ATTRIB_MAX_SWAP_INTERVAL] = EGL_MAX_SWAP_INTERVAL,
276 [__DRI_ATTRIB_MIN_SWAP_INTERVAL] = EGL_MIN_SWAP_INTERVAL,
277 [__DRI_ATTRIB_YINVERTED] = EGL_Y_INVERTED_NOK,
278 };
279
280 const struct dri_config *
dri2_get_dri_config(struct dri2_egl_config * conf,EGLint surface_type,EGLenum colorspace)281 dri2_get_dri_config(struct dri2_egl_config *conf, EGLint surface_type,
282 EGLenum colorspace)
283 {
284 const bool double_buffer = surface_type == EGL_WINDOW_BIT;
285 const bool srgb = colorspace == EGL_GL_COLORSPACE_SRGB_KHR;
286
287 return conf->dri_config[double_buffer][srgb];
288 }
289
290 static EGLBoolean
dri2_match_config(const _EGLConfig * conf,const _EGLConfig * criteria)291 dri2_match_config(const _EGLConfig *conf, const _EGLConfig *criteria)
292 {
293 #ifdef HAVE_X11_PLATFORM
294 if (conf->Display->Platform == _EGL_PLATFORM_X11 &&
295 conf->AlphaSize > 0 &&
296 conf->NativeVisualID != criteria->NativeVisualID)
297 return EGL_FALSE;
298 #endif
299
300 if (_eglCompareConfigs(conf, criteria, NULL, EGL_FALSE) != 0)
301 return EGL_FALSE;
302
303 if (!_eglMatchConfig(conf, criteria))
304 return EGL_FALSE;
305
306 return EGL_TRUE;
307 }
308
309 void
dri2_get_shifts_and_sizes(const struct dri_config * config,int * shifts,unsigned int * sizes)310 dri2_get_shifts_and_sizes(const struct dri_config *config, int *shifts,
311 unsigned int *sizes)
312 {
313 driGetConfigAttrib(config, __DRI_ATTRIB_RED_SHIFT,
314 (unsigned int *)&shifts[0]);
315 driGetConfigAttrib(config, __DRI_ATTRIB_GREEN_SHIFT,
316 (unsigned int *)&shifts[1]);
317 driGetConfigAttrib(config, __DRI_ATTRIB_BLUE_SHIFT,
318 (unsigned int *)&shifts[2]);
319 driGetConfigAttrib(config, __DRI_ATTRIB_ALPHA_SHIFT,
320 (unsigned int *)&shifts[3]);
321 driGetConfigAttrib(config, __DRI_ATTRIB_RED_SIZE, &sizes[0]);
322 driGetConfigAttrib(config, __DRI_ATTRIB_GREEN_SIZE, &sizes[1]);
323 driGetConfigAttrib(config, __DRI_ATTRIB_BLUE_SIZE, &sizes[2]);
324 driGetConfigAttrib(config, __DRI_ATTRIB_ALPHA_SIZE, &sizes[3]);
325 }
326
327 enum pipe_format
dri2_image_format_for_pbuffer_config(struct dri2_egl_display * dri2_dpy,const struct dri_config * config)328 dri2_image_format_for_pbuffer_config(struct dri2_egl_display *dri2_dpy,
329 const struct dri_config *config)
330 {
331 struct gl_config *gl_config = (struct gl_config *) config;
332 return gl_config->color_format;
333 }
334
335 struct dri2_egl_config *
dri2_add_config(_EGLDisplay * disp,const struct dri_config * dri_config,EGLint surface_type,const EGLint * attr_list)336 dri2_add_config(_EGLDisplay *disp, const struct dri_config *dri_config,
337 EGLint surface_type, const EGLint *attr_list)
338 {
339 struct dri2_egl_config *conf;
340 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
341 _EGLConfig base;
342 unsigned int attrib, value, double_buffer;
343 bool srgb = false;
344 EGLint key, bind_to_texture_rgb, bind_to_texture_rgba;
345 _EGLConfig *matching_config;
346 EGLint num_configs = 0;
347 EGLint config_id;
348
349 _eglInitConfig(&base, disp, _eglGetArraySize(disp->Configs) + 1);
350
351 double_buffer = 0;
352 bind_to_texture_rgb = 0;
353 bind_to_texture_rgba = 0;
354
355 for (int i = 0; i < __DRI_ATTRIB_MAX; ++i) {
356 if (!driIndexConfigAttrib(dri_config, i, &attrib, &value))
357 break;
358
359 switch (attrib) {
360 case __DRI_ATTRIB_RENDER_TYPE:
361 if (value & __DRI_ATTRIB_FLOAT_BIT)
362 base.ComponentType = EGL_COLOR_COMPONENT_TYPE_FLOAT_EXT;
363 if (value & __DRI_ATTRIB_RGBA_BIT)
364 value = EGL_RGB_BUFFER;
365 else if (value & __DRI_ATTRIB_LUMINANCE_BIT)
366 value = EGL_LUMINANCE_BUFFER;
367 else
368 return NULL;
369 base.ColorBufferType = value;
370 break;
371
372 case __DRI_ATTRIB_CONFIG_CAVEAT:
373 if (value & __DRI_ATTRIB_NON_CONFORMANT_CONFIG)
374 value = EGL_NON_CONFORMANT_CONFIG;
375 else if (value & __DRI_ATTRIB_SLOW_BIT)
376 value = EGL_SLOW_CONFIG;
377 else
378 value = EGL_NONE;
379 base.ConfigCaveat = value;
380 break;
381
382 case __DRI_ATTRIB_BIND_TO_TEXTURE_RGB:
383 bind_to_texture_rgb = value;
384 break;
385
386 case __DRI_ATTRIB_BIND_TO_TEXTURE_RGBA:
387 bind_to_texture_rgba = value;
388 break;
389
390 case __DRI_ATTRIB_DOUBLE_BUFFER:
391 double_buffer = value;
392 break;
393
394 case __DRI_ATTRIB_RED_SIZE:
395 base.RedSize = value;
396 break;
397
398 case __DRI_ATTRIB_GREEN_SIZE:
399 base.GreenSize = value;
400 break;
401
402 case __DRI_ATTRIB_BLUE_SIZE:
403 base.BlueSize = value;
404 break;
405
406 case __DRI_ATTRIB_ALPHA_SIZE:
407 base.AlphaSize = value;
408 break;
409
410 case __DRI_ATTRIB_ACCUM_RED_SIZE:
411 case __DRI_ATTRIB_ACCUM_GREEN_SIZE:
412 case __DRI_ATTRIB_ACCUM_BLUE_SIZE:
413 case __DRI_ATTRIB_ACCUM_ALPHA_SIZE:
414 /* Don't expose visuals with the accumulation buffer. */
415 if (value > 0)
416 return NULL;
417 break;
418
419 case __DRI_ATTRIB_FRAMEBUFFER_SRGB_CAPABLE:
420 srgb = value != 0;
421 if (!disp->Extensions.KHR_gl_colorspace && srgb)
422 return NULL;
423 break;
424
425 case __DRI_ATTRIB_MAX_PBUFFER_WIDTH:
426 base.MaxPbufferWidth = _EGL_MAX_PBUFFER_WIDTH;
427 break;
428 case __DRI_ATTRIB_MAX_PBUFFER_HEIGHT:
429 base.MaxPbufferHeight = _EGL_MAX_PBUFFER_HEIGHT;
430 break;
431 case __DRI_ATTRIB_MUTABLE_RENDER_BUFFER:
432 if (disp->Extensions.KHR_mutable_render_buffer)
433 surface_type |= EGL_MUTABLE_RENDER_BUFFER_BIT_KHR;
434 break;
435 default:
436 key = dri2_to_egl_attribute_map[attrib];
437 if (key != 0)
438 _eglSetConfigKey(&base, key, value);
439 break;
440 }
441 }
442
443 if (attr_list)
444 for (int i = 0; attr_list[i] != EGL_NONE; i += 2)
445 _eglSetConfigKey(&base, attr_list[i], attr_list[i + 1]);
446
447 base.NativeRenderable = EGL_TRUE;
448
449 base.SurfaceType = surface_type;
450 if (surface_type &
451 (EGL_PBUFFER_BIT |
452 (disp->Extensions.NOK_texture_from_pixmap ? EGL_PIXMAP_BIT : 0))) {
453 base.BindToTextureRGB = bind_to_texture_rgb;
454 if (base.AlphaSize > 0)
455 base.BindToTextureRGBA = bind_to_texture_rgba;
456 }
457
458 if (double_buffer) {
459 surface_type &= ~EGL_PIXMAP_BIT;
460 } else {
461 surface_type &= ~EGL_WINDOW_BIT;
462 }
463
464 if (!surface_type)
465 return NULL;
466
467 base.RenderableType = disp->ClientAPIs;
468 base.Conformant = disp->ClientAPIs;
469
470 base.MinSwapInterval = dri2_dpy->min_swap_interval;
471 base.MaxSwapInterval = dri2_dpy->max_swap_interval;
472
473 if (!_eglValidateConfig(&base, EGL_FALSE)) {
474 _eglLog(_EGL_DEBUG, "DRI2: failed to validate config %d", base.ConfigID);
475 return NULL;
476 }
477
478 config_id = base.ConfigID;
479 base.ConfigID = EGL_DONT_CARE;
480 base.SurfaceType = EGL_DONT_CARE;
481 num_configs = _eglFilterArray(disp->Configs, (void **)&matching_config, 1,
482 (_EGLArrayForEach)dri2_match_config, &base);
483
484 if (num_configs == 1) {
485 conf = (struct dri2_egl_config *)matching_config;
486
487 if (!conf->dri_config[double_buffer][srgb])
488 conf->dri_config[double_buffer][srgb] = dri_config;
489 else
490 /* a similar config type is already added (unlikely) => discard */
491 return NULL;
492 } else if (num_configs == 0) {
493 conf = calloc(1, sizeof *conf);
494 if (conf == NULL)
495 return NULL;
496
497 conf->dri_config[double_buffer][srgb] = dri_config;
498
499 memcpy(&conf->base, &base, sizeof base);
500 conf->base.SurfaceType = 0;
501 conf->base.ConfigID = config_id;
502
503 _eglLinkConfig(&conf->base);
504 } else {
505 unreachable("duplicates should not be possible");
506 return NULL;
507 }
508
509 conf->base.SurfaceType |= surface_type;
510
511 return conf;
512 }
513
514 static int
dri2_pbuffer_visual_index(enum pipe_format format)515 dri2_pbuffer_visual_index(enum pipe_format format)
516 {
517 for (unsigned i = 0; i < ARRAY_SIZE(dri2_pbuffer_visuals); i++) {
518 if (dri2_pbuffer_visuals[i] == format)
519 return i;
520 }
521
522 return -1;
523 }
524
525 void
dri2_add_pbuffer_configs_for_visuals(_EGLDisplay * disp)526 dri2_add_pbuffer_configs_for_visuals(_EGLDisplay *disp)
527 {
528 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
529 unsigned int format_count[ARRAY_SIZE(dri2_pbuffer_visuals)] = {0};
530
531 for (unsigned i = 0; dri2_dpy->driver_configs[i] != NULL; i++) {
532 struct dri2_egl_config *dri2_conf;
533 struct gl_config *gl_config =
534 (struct gl_config *) dri2_dpy->driver_configs[i];
535 int idx = dri2_pbuffer_visual_index(gl_config->color_format);
536
537 if (idx == -1)
538 continue;
539
540 dri2_conf = dri2_add_config(disp, dri2_dpy->driver_configs[i],
541 EGL_PBUFFER_BIT, NULL);
542 if (dri2_conf)
543 format_count[idx]++;
544 }
545
546 for (unsigned i = 0; i < ARRAY_SIZE(format_count); i++) {
547 if (!format_count[i]) {
548 _eglLog(_EGL_DEBUG, "No DRI config supports native format %s",
549 util_format_name(dri2_pbuffer_visuals[i]));
550 }
551 }
552 }
553
554 GLboolean
dri2_validate_egl_image(void * image,void * data)555 dri2_validate_egl_image(void *image, void *data)
556 {
557 _EGLDisplay *disp = _eglLockDisplay(data);
558 _EGLImage *img = _eglLookupImage(image, disp);
559 _eglUnlockDisplay(disp);
560
561 if (img == NULL) {
562 _eglError(EGL_BAD_PARAMETER, "dri2_validate_egl_image");
563 return false;
564 }
565
566 return true;
567 }
568
569 struct dri_image *
dri2_lookup_egl_image_validated(void * image,void * data)570 dri2_lookup_egl_image_validated(void *image, void *data)
571 {
572 struct dri2_egl_image *dri2_img;
573
574 (void)data;
575
576 dri2_img = dri2_egl_image(image);
577
578 return dri2_img->dri_image;
579 }
580
581 const __DRIimageLookupExtension image_lookup_extension = {
582 .base = {__DRI_IMAGE_LOOKUP, 2},
583
584 .validateEGLImage = dri2_validate_egl_image,
585 .lookupEGLImageValidated = dri2_lookup_egl_image_validated,
586 };
587
588 EGLBoolean
dri2_load_driver(_EGLDisplay * disp)589 dri2_load_driver(_EGLDisplay *disp)
590 {
591 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
592
593 dri2_dpy->kopper = disp->Options.Zink && !debug_get_bool_option("LIBGL_KOPPER_DISABLE", false);
594 dri2_dpy->kopper_without_modifiers = dri2_dpy->kopper && debug_get_bool_option("LIBGL_KOPPER_DRI2", false);
595 dri2_dpy->swrast = (disp->Options.ForceSoftware && !dri2_dpy->kopper) ||
596 !dri2_dpy->driver_name || strstr(dri2_dpy->driver_name, "swrast");
597 dri2_dpy->swrast_not_kms = dri2_dpy->swrast && (!dri2_dpy->driver_name || strcmp(dri2_dpy->driver_name, "kms_swrast"));
598
599 return EGL_TRUE;
600 }
601
602 static const char *
dri2_query_driver_name(_EGLDisplay * disp)603 dri2_query_driver_name(_EGLDisplay *disp)
604 {
605 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
606 return dri2_dpy->driver_name;
607 }
608
609 static char *
dri2_query_driver_config(_EGLDisplay * disp)610 dri2_query_driver_config(_EGLDisplay *disp)
611 {
612 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
613 char *ret;
614
615 ret = pipe_loader_get_driinfo_xml(dri2_dpy->driver_name);
616
617 mtx_unlock(&dri2_dpy->lock);
618
619 return ret;
620 }
621
622 void
dri2_setup_screen(_EGLDisplay * disp)623 dri2_setup_screen(_EGLDisplay *disp)
624 {
625 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
626 struct dri_screen *screen = dri2_dpy->dri_screen_render_gpu;
627 struct pipe_screen *pscreen = screen->base.screen;
628 unsigned int api_mask = screen->api_mask;
629
630 #ifdef HAVE_LIBDRM
631 unsigned caps = pscreen->caps.dmabuf;
632 /* set if both import and export are suported */
633 if (dri2_dpy->multibuffers_available) {
634 dri2_dpy->has_dmabuf_import = (caps & DRM_PRIME_CAP_IMPORT) > 0;
635 dri2_dpy->has_dmabuf_export = (caps & DRM_PRIME_CAP_EXPORT) > 0;
636 }
637 #endif
638 #ifdef HAVE_ANDROID_PLATFORM
639 dri2_dpy->has_native_fence_fd = pscreen->caps.native_fence_fd;
640 #endif
641 dri2_dpy->has_compression_modifiers = pscreen->query_compression_rates &&
642 (pscreen->query_compression_modifiers || dri2_dpy->kopper);
643
644 /*
645 * EGL 1.5 specification defines the default value to 1. Moreover,
646 * eglSwapInterval() is required to clamp requested value to the supported
647 * range. Since the default value is implicitly assumed to be supported,
648 * use it as both minimum and maximum for the platforms that do not allow
649 * changing the interval. Platforms, which allow it (e.g. x11, wayland)
650 * override these values already.
651 */
652 dri2_dpy->min_swap_interval = 1;
653 dri2_dpy->max_swap_interval = 1;
654 dri2_dpy->default_swap_interval = 1;
655
656 disp->ClientAPIs = 0;
657 if ((api_mask & (1 << __DRI_API_OPENGL)) && _eglIsApiValid(EGL_OPENGL_API))
658 disp->ClientAPIs |= EGL_OPENGL_BIT;
659 if ((api_mask & (1 << __DRI_API_GLES)) && _eglIsApiValid(EGL_OPENGL_ES_API))
660 disp->ClientAPIs |= EGL_OPENGL_ES_BIT;
661 if ((api_mask & (1 << __DRI_API_GLES2)) && _eglIsApiValid(EGL_OPENGL_ES_API))
662 disp->ClientAPIs |= EGL_OPENGL_ES2_BIT;
663 if ((api_mask & (1 << __DRI_API_GLES3)) && _eglIsApiValid(EGL_OPENGL_ES_API))
664 disp->ClientAPIs |= EGL_OPENGL_ES3_BIT_KHR;
665
666 disp->Extensions.KHR_create_context = EGL_TRUE;
667 disp->Extensions.KHR_create_context_no_error = EGL_TRUE;
668 disp->Extensions.KHR_no_config_context = EGL_TRUE;
669 disp->Extensions.KHR_surfaceless_context = EGL_TRUE;
670
671 disp->Extensions.MESA_gl_interop = EGL_TRUE;
672
673 disp->Extensions.MESA_query_driver = EGL_TRUE;
674
675 /* Report back to EGL the bitmask of priorities supported */
676 disp->Extensions.IMG_context_priority = pscreen->caps.context_priority_mask;
677
678 /**
679 * FIXME: Some drivers currently misreport what context priorities the user
680 * can use and fail context creation. This cause issues on Android where the
681 * display process would try to use realtime priority. This is also a spec
682 * violation for IMG_context_priority.
683 */
684 #ifndef HAVE_ANDROID_PLATFORM
685 disp->Extensions.NV_context_priority_realtime =
686 disp->Extensions.IMG_context_priority &
687 (1 << __EGL_CONTEXT_PRIORITY_REALTIME_BIT);
688 #endif
689
690 disp->Extensions.EXT_pixel_format_float = EGL_TRUE;
691
692 if (pscreen->is_format_supported(pscreen, PIPE_FORMAT_B8G8R8A8_SRGB,
693 PIPE_TEXTURE_2D, 0, 0,
694 PIPE_BIND_RENDER_TARGET)) {
695 disp->Extensions.KHR_gl_colorspace = EGL_TRUE;
696 }
697
698 disp->Extensions.EXT_config_select_group = EGL_TRUE;
699
700 disp->Extensions.EXT_create_context_robustness =
701 pscreen->caps.device_reset_status_query;
702 disp->RobustBufferAccess = pscreen->caps.robust_buffer_access_behavior;
703
704 /* EXT_query_reset_notification_strategy complements and requires
705 * EXT_create_context_robustness. */
706 disp->Extensions.EXT_query_reset_notification_strategy =
707 disp->Extensions.EXT_create_context_robustness;
708
709 disp->Extensions.KHR_fence_sync = EGL_TRUE;
710 disp->Extensions.KHR_wait_sync = EGL_TRUE;
711 disp->Extensions.KHR_cl_event2 = EGL_TRUE;
712 if (dri_fence_get_caps(dri2_dpy->dri_screen_render_gpu)
713 & __DRI_FENCE_CAP_NATIVE_FD)
714 disp->Extensions.ANDROID_native_fence_sync = EGL_TRUE;
715
716 if (dri_get_pipe_screen(dri2_dpy->dri_screen_render_gpu)->get_disk_shader_cache)
717 disp->Extensions.ANDROID_blob_cache = EGL_TRUE;
718
719 disp->Extensions.KHR_reusable_sync = EGL_TRUE;
720
721 int capabilities;
722 capabilities = dri2_get_capabilities(dri2_dpy->dri_screen_render_gpu);
723 disp->Extensions.MESA_drm_image = (capabilities & __DRI_IMAGE_CAP_GLOBAL_NAMES) != 0;
724
725 #ifdef HAVE_LIBDRM
726 if (pscreen->caps.dmabuf & DRM_PRIME_CAP_EXPORT)
727 disp->Extensions.MESA_image_dma_buf_export = true;
728
729 if (dri2_dpy->has_dmabuf_import) {
730 disp->Extensions.EXT_image_dma_buf_import = EGL_TRUE;
731 disp->Extensions.EXT_image_dma_buf_import_modifiers = EGL_TRUE;
732 }
733 #endif
734 disp->Extensions.MESA_x11_native_visual_id = EGL_TRUE;
735 disp->Extensions.EXT_surface_compression = EGL_TRUE;
736 disp->Extensions.KHR_image_base = EGL_TRUE;
737 disp->Extensions.KHR_gl_renderbuffer_image = EGL_TRUE;
738 disp->Extensions.KHR_gl_texture_2D_image = EGL_TRUE;
739 disp->Extensions.KHR_gl_texture_cubemap_image = EGL_TRUE;
740
741 if (pscreen->caps.max_texture_3d_levels != 0)
742 disp->Extensions.KHR_gl_texture_3D_image = EGL_TRUE;
743
744 disp->Extensions.KHR_context_flush_control = EGL_TRUE;
745
746 if (dri_get_pipe_screen(dri2_dpy->dri_screen_render_gpu)->set_damage_region)
747 disp->Extensions.KHR_partial_update = EGL_TRUE;
748
749 disp->Extensions.EXT_protected_surface = pscreen->caps.device_protected_surface;
750 disp->Extensions.EXT_protected_content = pscreen->caps.device_protected_context;
751 }
752
753 void
dri2_setup_swap_interval(_EGLDisplay * disp,int max_swap_interval)754 dri2_setup_swap_interval(_EGLDisplay *disp, int max_swap_interval)
755 {
756 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
757 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
758
759 /* Allow driconf to override applications.*/
760 dri2GalliumConfigQueryi(dri2_dpy->dri_screen_render_gpu, "vblank_mode", &vblank_mode);
761
762 switch (vblank_mode) {
763 case DRI_CONF_VBLANK_NEVER:
764 dri2_dpy->min_swap_interval = 0;
765 dri2_dpy->max_swap_interval = 0;
766 dri2_dpy->default_swap_interval = 0;
767 break;
768 case DRI_CONF_VBLANK_ALWAYS_SYNC:
769 dri2_dpy->min_swap_interval = 1;
770 dri2_dpy->max_swap_interval = max_swap_interval;
771 dri2_dpy->default_swap_interval = 1;
772 break;
773 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
774 dri2_dpy->min_swap_interval = 0;
775 dri2_dpy->max_swap_interval = max_swap_interval;
776 dri2_dpy->default_swap_interval = 0;
777 break;
778 default:
779 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
780 dri2_dpy->min_swap_interval = 0;
781 dri2_dpy->max_swap_interval = max_swap_interval;
782 dri2_dpy->default_swap_interval = 1;
783 break;
784 }
785 }
786
787 /* All platforms but DRM call this function to create the screen and populate
788 * the driver_configs. DRM inherits that information from its display - GBM.
789 */
790 EGLBoolean
dri2_create_screen(_EGLDisplay * disp)791 dri2_create_screen(_EGLDisplay *disp)
792 {
793 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
794 char *driver_name_display_gpu;
795 enum dri_screen_type type = DRI_SCREEN_DRI3;
796
797 if (dri2_dpy->kopper)
798 type = DRI_SCREEN_KOPPER;
799 else if (dri2_dpy->swrast_not_kms)
800 type = DRI_SCREEN_SWRAST;
801 else if (dri2_dpy->swrast)
802 type = DRI_SCREEN_KMS_SWRAST;
803
804 if (dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu) {
805 driver_name_display_gpu =
806 loader_get_driver_for_fd(dri2_dpy->fd_display_gpu);
807 if (driver_name_display_gpu) {
808 /* check if driver name is matching so that non mesa drivers
809 * will not crash.
810 */
811 if (strcmp(dri2_dpy->driver_name, driver_name_display_gpu) == 0) {
812 dri2_dpy->dri_screen_display_gpu = driCreateNewScreen3(
813 0, dri2_dpy->fd_display_gpu, dri2_dpy->loader_extensions,
814 type, &dri2_dpy->driver_configs, false, dri2_dpy->multibuffers_available, disp);
815 }
816 free(driver_name_display_gpu);
817 }
818 }
819
820 int screen_fd = dri2_dpy->swrast_not_kms ? -1 : dri2_dpy->fd_render_gpu;
821 dri2_dpy->dri_screen_render_gpu = driCreateNewScreen3(
822 0, screen_fd, dri2_dpy->loader_extensions, type,
823 &dri2_dpy->driver_configs, false, dri2_dpy->multibuffers_available, disp);
824
825 if (dri2_dpy->dri_screen_render_gpu == NULL) {
826 _eglLog(_EGL_WARNING, "egl: failed to create dri2 screen");
827 return EGL_FALSE;
828 }
829
830 if (dri2_dpy->fd_render_gpu == dri2_dpy->fd_display_gpu)
831 dri2_dpy->dri_screen_display_gpu = dri2_dpy->dri_screen_render_gpu;
832
833 dri2_dpy->own_dri_screen = true;
834 return EGL_TRUE;
835 }
836
837 EGLBoolean
dri2_setup_device(_EGLDisplay * disp,EGLBoolean software)838 dri2_setup_device(_EGLDisplay *disp, EGLBoolean software)
839 {
840 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
841 _EGLDevice *dev;
842 int render_fd;
843
844 /* If we're not software, we need a DRM node FD */
845 assert(software || dri2_dpy->fd_render_gpu >= 0);
846
847 /* fd_render_gpu is what we got from WSI, so might actually be a lie and
848 * not a render node... */
849 if (software) {
850 render_fd = -1;
851 } else if (loader_is_device_render_capable(dri2_dpy->fd_render_gpu)) {
852 render_fd = dri2_dpy->fd_render_gpu;
853 } else {
854 render_fd = dri_query_compatible_render_only_device_fd(
855 dri2_dpy->fd_render_gpu);
856 if (render_fd < 0)
857 return EGL_FALSE;
858 }
859
860 dev = _eglFindDevice(render_fd, software);
861
862 if (render_fd >= 0 && render_fd != dri2_dpy->fd_render_gpu)
863 close(render_fd);
864
865 if (!dev)
866 return EGL_FALSE;
867
868 disp->Device = dev;
869 return EGL_TRUE;
870 }
871
872 /**
873 * Called via eglInitialize(), drv->Initialize().
874 *
875 * This must be guaranteed to be called exactly once, even if eglInitialize is
876 * called many times (without a eglTerminate in between).
877 */
878 static EGLBoolean
dri2_initialize(_EGLDisplay * disp)879 dri2_initialize(_EGLDisplay *disp)
880 {
881 EGLBoolean ret = EGL_FALSE;
882 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
883
884 /* In the case where the application calls eglMakeCurrent(context1),
885 * eglTerminate, then eglInitialize again (without a call to eglReleaseThread
886 * or eglMakeCurrent(NULL) before that), dri2_dpy structure is still
887 * initialized, as we need it to be able to free context1 correctly.
888 *
889 * It would probably be safest to forcibly release the display with
890 * dri2_display_release, to make sure the display is reinitialized correctly.
891 * However, the EGL spec states that we need to keep a reference to the
892 * current context (so we cannot call dri2_make_current(NULL)), and therefore
893 * we would leak context1 as we would be missing the old display connection
894 * to free it up correctly.
895 */
896 if (dri2_dpy) {
897 p_atomic_inc(&dri2_dpy->ref_count);
898 return EGL_TRUE;
899 }
900
901 loader_set_logger(_eglLog);
902
903 switch (disp->Platform) {
904 case _EGL_PLATFORM_SURFACELESS:
905 ret = dri2_initialize_surfaceless(disp);
906 break;
907 case _EGL_PLATFORM_DEVICE:
908 ret = dri2_initialize_device(disp);
909 break;
910 case _EGL_PLATFORM_X11:
911 case _EGL_PLATFORM_XCB:
912 ret = dri2_initialize_x11(disp);
913 break;
914 case _EGL_PLATFORM_DRM:
915 ret = dri2_initialize_drm(disp);
916 break;
917 case _EGL_PLATFORM_WAYLAND:
918 ret = dri2_initialize_wayland(disp);
919 break;
920 case _EGL_PLATFORM_ANDROID:
921 ret = dri2_initialize_android(disp);
922 break;
923 default:
924 unreachable("Callers ensure we cannot get here.");
925 return EGL_FALSE;
926 }
927
928 if (!ret)
929 return EGL_FALSE;
930
931 if (_eglGetArraySize(disp->Configs) == 0) {
932 _eglError(EGL_NOT_INITIALIZED, "failed to add any EGLConfigs");
933 dri2_display_destroy(disp);
934 return EGL_FALSE;
935 }
936
937 dri2_dpy = dri2_egl_display(disp);
938 p_atomic_inc(&dri2_dpy->ref_count);
939
940 mtx_init(&dri2_dpy->lock, mtx_plain);
941
942 return EGL_TRUE;
943 }
944
945 /**
946 * Decrement display reference count, and free up display if necessary.
947 */
948 static void
dri2_display_release(_EGLDisplay * disp)949 dri2_display_release(_EGLDisplay *disp)
950 {
951 struct dri2_egl_display *dri2_dpy;
952
953 if (!disp)
954 return;
955
956 dri2_dpy = dri2_egl_display(disp);
957
958 assert(dri2_dpy->ref_count > 0);
959
960 if (!p_atomic_dec_zero(&dri2_dpy->ref_count))
961 return;
962
963 _eglCleanupDisplay(disp);
964 dri2_display_destroy(disp);
965 }
966
967 void
dri2_display_destroy(_EGLDisplay * disp)968 dri2_display_destroy(_EGLDisplay *disp)
969 {
970 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
971
972 if (dri2_dpy->own_dri_screen) {
973 if (dri2_dpy->vtbl && dri2_dpy->vtbl->close_screen_notify)
974 dri2_dpy->vtbl->close_screen_notify(disp);
975
976 driDestroyScreen(dri2_dpy->dri_screen_render_gpu);
977
978 if (dri2_dpy->dri_screen_display_gpu &&
979 dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
980 driDestroyScreen(dri2_dpy->dri_screen_display_gpu);
981 }
982 if (dri2_dpy->fd_display_gpu >= 0 &&
983 dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
984 close(dri2_dpy->fd_display_gpu);
985 if (dri2_dpy->fd_render_gpu >= 0)
986 close(dri2_dpy->fd_render_gpu);
987
988 free(dri2_dpy->driver_name);
989
990 #ifdef HAVE_WAYLAND_PLATFORM
991 free(dri2_dpy->device_name);
992 #endif
993
994 switch (disp->Platform) {
995 case _EGL_PLATFORM_X11:
996 case _EGL_PLATFORM_XCB:
997 dri2_teardown_x11(dri2_dpy);
998 break;
999 case _EGL_PLATFORM_DRM:
1000 dri2_teardown_drm(dri2_dpy);
1001 break;
1002 case _EGL_PLATFORM_WAYLAND:
1003 dri2_teardown_wayland(dri2_dpy);
1004 break;
1005 case _EGL_PLATFORM_ANDROID:
1006 #ifdef HAVE_ANDROID_PLATFORM
1007 u_gralloc_destroy(&dri2_dpy->gralloc);
1008 #endif
1009 break;
1010 case _EGL_PLATFORM_SURFACELESS:
1011 break;
1012 case _EGL_PLATFORM_DEVICE:
1013 break;
1014 default:
1015 unreachable("Platform teardown is not properly hooked.");
1016 break;
1017 }
1018
1019 /* The drm platform does not create the screen/driver_configs but reuses
1020 * the ones from the gbm device. As such the gbm itself is responsible
1021 * for the cleanup.
1022 */
1023 if (disp->Platform != _EGL_PLATFORM_DRM && dri2_dpy->driver_configs) {
1024 for (unsigned i = 0; dri2_dpy->driver_configs[i]; i++)
1025 free((struct dri_config *)dri2_dpy->driver_configs[i]);
1026 free(dri2_dpy->driver_configs);
1027 }
1028 free(dri2_dpy);
1029 disp->DriverData = NULL;
1030 }
1031
1032 struct dri2_egl_display *
dri2_display_create(void)1033 dri2_display_create(void)
1034 {
1035 struct dri2_egl_display *dri2_dpy = calloc(1, sizeof *dri2_dpy);
1036 if (!dri2_dpy) {
1037 _eglError(EGL_BAD_ALLOC, "eglInitialize");
1038 return NULL;
1039 }
1040
1041 dri2_dpy->fd_render_gpu = -1;
1042 dri2_dpy->fd_display_gpu = -1;
1043 dri2_dpy->multibuffers_available = true;
1044
1045 return dri2_dpy;
1046 }
1047
1048 /**
1049 * Called via eglTerminate(), drv->Terminate().
1050 *
1051 * This must be guaranteed to be called exactly once, even if eglTerminate is
1052 * called many times (without a eglInitialize in between).
1053 */
1054 static EGLBoolean
dri2_terminate(_EGLDisplay * disp)1055 dri2_terminate(_EGLDisplay *disp)
1056 {
1057 /* Release all non-current Context/Surfaces. */
1058 _eglReleaseDisplayResources(disp);
1059
1060 dri2_display_release(disp);
1061
1062 return EGL_TRUE;
1063 }
1064
1065 /**
1066 * Set the error code after a call to
1067 * dri2_egl_display::dri2::createContextAttribs.
1068 */
1069 static void
dri2_create_context_attribs_error(int dri_error)1070 dri2_create_context_attribs_error(int dri_error)
1071 {
1072 EGLint egl_error;
1073
1074 switch (dri_error) {
1075 case __DRI_CTX_ERROR_SUCCESS:
1076 return;
1077
1078 case __DRI_CTX_ERROR_NO_MEMORY:
1079 egl_error = EGL_BAD_ALLOC;
1080 break;
1081
1082 /* From the EGL_KHR_create_context spec, section "Errors":
1083 *
1084 * * If <config> does not support a client API context compatible
1085 * with the requested API major and minor version, [...] context
1086 * flags, and context reset notification behavior (for client API types
1087 * where these attributes are supported), then an EGL_BAD_MATCH error is
1088 * generated.
1089 *
1090 * * If an OpenGL ES context is requested and the values for
1091 * attributes EGL_CONTEXT_MAJOR_VERSION_KHR and
1092 * EGL_CONTEXT_MINOR_VERSION_KHR specify an OpenGL ES version that
1093 * is not defined, than an EGL_BAD_MATCH error is generated.
1094 *
1095 * * If an OpenGL context is requested, the requested version is
1096 * greater than 3.2, and the value for attribute
1097 * EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR has no bits set; has any
1098 * bits set other than EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR and
1099 * EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT_KHR; has more than
1100 * one of these bits set; or if the implementation does not support
1101 * the requested profile, then an EGL_BAD_MATCH error is generated.
1102 */
1103 case __DRI_CTX_ERROR_BAD_API:
1104 case __DRI_CTX_ERROR_BAD_VERSION:
1105 case __DRI_CTX_ERROR_BAD_FLAG:
1106 egl_error = EGL_BAD_MATCH;
1107 break;
1108
1109 /* From the EGL_KHR_create_context spec, section "Errors":
1110 *
1111 * * If an attribute name or attribute value in <attrib_list> is not
1112 * recognized (including unrecognized bits in bitmask attributes),
1113 * then an EGL_BAD_ATTRIBUTE error is generated."
1114 */
1115 case __DRI_CTX_ERROR_UNKNOWN_ATTRIBUTE:
1116 case __DRI_CTX_ERROR_UNKNOWN_FLAG:
1117 egl_error = EGL_BAD_ATTRIBUTE;
1118 break;
1119
1120 default:
1121 assert(!"unknown dri_error code");
1122 egl_error = EGL_BAD_MATCH;
1123 break;
1124 }
1125
1126 _eglError(egl_error, "dri2_create_context");
1127 }
1128
1129 static bool
dri2_fill_context_attribs(struct dri2_egl_context * dri2_ctx,struct dri2_egl_display * dri2_dpy,uint32_t * ctx_attribs,unsigned * num_attribs)1130 dri2_fill_context_attribs(struct dri2_egl_context *dri2_ctx,
1131 struct dri2_egl_display *dri2_dpy,
1132 uint32_t *ctx_attribs, unsigned *num_attribs)
1133 {
1134 int pos = 0;
1135
1136 assert(*num_attribs >= NUM_ATTRIBS);
1137
1138 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MAJOR_VERSION;
1139 ctx_attribs[pos++] = dri2_ctx->base.ClientMajorVersion;
1140 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MINOR_VERSION;
1141 ctx_attribs[pos++] = dri2_ctx->base.ClientMinorVersion;
1142
1143 if (dri2_ctx->base.Flags != 0) {
1144 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_FLAGS;
1145 ctx_attribs[pos++] = dri2_ctx->base.Flags;
1146 }
1147
1148 if (dri2_ctx->base.ResetNotificationStrategy !=
1149 EGL_NO_RESET_NOTIFICATION_KHR) {
1150 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RESET_STRATEGY;
1151 ctx_attribs[pos++] = __DRI_CTX_RESET_LOSE_CONTEXT;
1152 }
1153
1154 if (dri2_ctx->base.ContextPriority != EGL_CONTEXT_PRIORITY_MEDIUM_IMG) {
1155 unsigned val;
1156
1157 switch (dri2_ctx->base.ContextPriority) {
1158 case EGL_CONTEXT_PRIORITY_REALTIME_NV:
1159 val = __DRI_CTX_PRIORITY_REALTIME;
1160 break;
1161 case EGL_CONTEXT_PRIORITY_HIGH_IMG:
1162 val = __DRI_CTX_PRIORITY_HIGH;
1163 break;
1164 case EGL_CONTEXT_PRIORITY_MEDIUM_IMG:
1165 val = __DRI_CTX_PRIORITY_MEDIUM;
1166 break;
1167 case EGL_CONTEXT_PRIORITY_LOW_IMG:
1168 val = __DRI_CTX_PRIORITY_LOW;
1169 break;
1170 default:
1171 _eglError(EGL_BAD_CONFIG, "eglCreateContext");
1172 return false;
1173 }
1174
1175 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PRIORITY;
1176 ctx_attribs[pos++] = val;
1177 }
1178
1179 if (dri2_ctx->base.ReleaseBehavior ==
1180 EGL_CONTEXT_RELEASE_BEHAVIOR_NONE_KHR) {
1181 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RELEASE_BEHAVIOR;
1182 ctx_attribs[pos++] = __DRI_CTX_RELEASE_BEHAVIOR_NONE;
1183 }
1184
1185 if (dri2_ctx->base.NoError) {
1186 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_NO_ERROR;
1187 ctx_attribs[pos++] = true;
1188 }
1189
1190 if (dri2_ctx->base.Protected) {
1191 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PROTECTED;
1192 ctx_attribs[pos++] = true;
1193 }
1194
1195 *num_attribs = pos;
1196
1197 return true;
1198 }
1199
1200 /**
1201 * Called via eglCreateContext(), drv->CreateContext().
1202 */
1203 static _EGLContext *
dri2_create_context(_EGLDisplay * disp,_EGLConfig * conf,_EGLContext * share_list,const EGLint * attrib_list)1204 dri2_create_context(_EGLDisplay *disp, _EGLConfig *conf,
1205 _EGLContext *share_list, const EGLint *attrib_list)
1206 {
1207 struct dri2_egl_context *dri2_ctx;
1208 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1209 struct dri2_egl_context *dri2_ctx_shared = dri2_egl_context(share_list);
1210 struct dri_context *shared = dri2_ctx_shared ? dri2_ctx_shared->dri_context : NULL;
1211 struct dri2_egl_config *dri2_config = dri2_egl_config(conf);
1212 const struct dri_config *dri_config;
1213 int api;
1214 unsigned error;
1215 unsigned num_attribs = NUM_ATTRIBS;
1216 uint32_t ctx_attribs[NUM_ATTRIBS];
1217
1218 dri2_ctx = malloc(sizeof *dri2_ctx);
1219 if (!dri2_ctx) {
1220 dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC, "eglCreateContext");
1221 return NULL;
1222 }
1223
1224 if (!_eglInitContext(&dri2_ctx->base, disp, conf, share_list, attrib_list))
1225 goto cleanup;
1226
1227 switch (dri2_ctx->base.ClientAPI) {
1228 case EGL_OPENGL_ES_API:
1229 switch (dri2_ctx->base.ClientMajorVersion) {
1230 case 1:
1231 api = __DRI_API_GLES;
1232 break;
1233 case 2:
1234 api = __DRI_API_GLES2;
1235 break;
1236 case 3:
1237 api = __DRI_API_GLES3;
1238 break;
1239 default:
1240 _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1241 goto cleanup;
1242 }
1243 break;
1244 case EGL_OPENGL_API:
1245 if ((dri2_ctx->base.ClientMajorVersion >= 4 ||
1246 (dri2_ctx->base.ClientMajorVersion == 3 &&
1247 dri2_ctx->base.ClientMinorVersion >= 2)) &&
1248 dri2_ctx->base.Profile == EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR)
1249 api = __DRI_API_OPENGL_CORE;
1250 else if (dri2_ctx->base.ClientMajorVersion == 3 &&
1251 dri2_ctx->base.ClientMinorVersion == 1)
1252 api = __DRI_API_OPENGL_CORE;
1253 else
1254 api = __DRI_API_OPENGL;
1255 break;
1256 default:
1257 _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1258 goto cleanup;
1259 }
1260
1261 if (conf != NULL) {
1262 /* The config chosen here isn't necessarily
1263 * used for surfaces later.
1264 * A pixmap surface will use the single config.
1265 * This opportunity depends on disabling the
1266 * doubleBufferMode check in
1267 * src/mesa/main/context.c:check_compatible()
1268 */
1269 if (dri2_config->dri_config[1][0])
1270 dri_config = dri2_config->dri_config[1][0];
1271 else
1272 dri_config = dri2_config->dri_config[0][0];
1273 } else
1274 dri_config = NULL;
1275
1276 if (!dri2_fill_context_attribs(dri2_ctx, dri2_dpy, ctx_attribs,
1277 &num_attribs))
1278 goto cleanup;
1279
1280 dri2_ctx->dri_context = driCreateContextAttribs(
1281 dri2_dpy->dri_screen_render_gpu, api, dri_config, shared, num_attribs / 2,
1282 ctx_attribs, &error, dri2_ctx);
1283 dri2_create_context_attribs_error(error);
1284
1285 if (!dri2_ctx->dri_context)
1286 goto cleanup;
1287
1288 mtx_unlock(&dri2_dpy->lock);
1289
1290 return &dri2_ctx->base;
1291
1292 cleanup:
1293 mtx_unlock(&dri2_dpy->lock);
1294 free(dri2_ctx);
1295 return NULL;
1296 }
1297
1298 /**
1299 * Called via eglDestroyContext(), drv->DestroyContext().
1300 */
1301 static EGLBoolean
dri2_destroy_context(_EGLDisplay * disp,_EGLContext * ctx)1302 dri2_destroy_context(_EGLDisplay *disp, _EGLContext *ctx)
1303 {
1304 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1305
1306 if (_eglPutContext(ctx)) {
1307 driDestroyContext(dri2_ctx->dri_context);
1308 free(dri2_ctx);
1309 }
1310
1311 return EGL_TRUE;
1312 }
1313
1314 EGLBoolean
dri2_init_surface(_EGLSurface * surf,_EGLDisplay * disp,EGLint type,_EGLConfig * conf,const EGLint * attrib_list,EGLBoolean enable_out_fence,void * native_surface)1315 dri2_init_surface(_EGLSurface *surf, _EGLDisplay *disp, EGLint type,
1316 _EGLConfig *conf, const EGLint *attrib_list,
1317 EGLBoolean enable_out_fence, void *native_surface)
1318 {
1319 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1320
1321 dri2_surf->out_fence_fd = -1;
1322 dri2_surf->enable_out_fence = false;
1323 if (disp->Extensions.ANDROID_native_fence_sync) {
1324 dri2_surf->enable_out_fence = enable_out_fence;
1325 }
1326
1327 return _eglInitSurface(surf, disp, type, conf, attrib_list, native_surface);
1328 }
1329
1330 static void
dri2_surface_set_out_fence_fd(_EGLSurface * surf,int fence_fd)1331 dri2_surface_set_out_fence_fd(_EGLSurface *surf, int fence_fd)
1332 {
1333 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1334
1335 if (dri2_surf->out_fence_fd >= 0)
1336 close(dri2_surf->out_fence_fd);
1337
1338 dri2_surf->out_fence_fd = fence_fd;
1339 }
1340
1341 void
dri2_fini_surface(_EGLSurface * surf)1342 dri2_fini_surface(_EGLSurface *surf)
1343 {
1344 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1345
1346 dri2_surface_set_out_fence_fd(surf, -1);
1347 dri2_surf->enable_out_fence = false;
1348 }
1349
1350 static EGLBoolean
dri2_destroy_surface(_EGLDisplay * disp,_EGLSurface * surf)1351 dri2_destroy_surface(_EGLDisplay *disp, _EGLSurface *surf)
1352 {
1353 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1354 EGLBoolean ret = EGL_TRUE;
1355
1356 if (_eglPutSurface(surf))
1357 ret = dri2_dpy->vtbl->destroy_surface(disp, surf);
1358
1359 return ret;
1360 }
1361
1362 static void
dri2_surf_update_fence_fd(_EGLContext * ctx,_EGLDisplay * disp,_EGLSurface * surf)1363 dri2_surf_update_fence_fd(_EGLContext *ctx, _EGLDisplay *disp,
1364 _EGLSurface *surf)
1365 {
1366 struct dri_context *dri_ctx = dri2_egl_context(ctx)->dri_context;
1367 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1368 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1369 int fence_fd = -1;
1370 void *fence;
1371
1372 if (!dri2_surf->enable_out_fence)
1373 return;
1374
1375 fence = dri_create_fence_fd(dri_ctx, -1);
1376 if (fence) {
1377 fence_fd = dri_get_fence_fd(dri2_dpy->dri_screen_render_gpu, fence);
1378 dri_destroy_fence(dri2_dpy->dri_screen_render_gpu, fence);
1379 }
1380 dri2_surface_set_out_fence_fd(surf, fence_fd);
1381 }
1382
1383 EGLBoolean
dri2_create_drawable(struct dri2_egl_display * dri2_dpy,const struct dri_config * config,struct dri2_egl_surface * dri2_surf,void * loaderPrivate)1384 dri2_create_drawable(struct dri2_egl_display *dri2_dpy,
1385 const struct dri_config *config,
1386 struct dri2_egl_surface *dri2_surf, void *loaderPrivate)
1387 {
1388 bool is_pixmap = dri2_surf->base.Type == EGL_PBUFFER_BIT ||
1389 dri2_surf->base.Type == EGL_PIXMAP_BIT;
1390 dri2_surf->dri_drawable = dri_create_drawable(dri2_dpy->dri_screen_render_gpu, config, is_pixmap, loaderPrivate);
1391 if (dri2_surf->dri_drawable == NULL)
1392 return _eglError(EGL_BAD_ALLOC, "createNewDrawable");
1393
1394 return EGL_TRUE;
1395 }
1396
1397 /**
1398 * Called via eglMakeCurrent(), drv->MakeCurrent().
1399 */
1400 static EGLBoolean
dri2_make_current(_EGLDisplay * disp,_EGLSurface * dsurf,_EGLSurface * rsurf,_EGLContext * ctx)1401 dri2_make_current(_EGLDisplay *disp, _EGLSurface *dsurf, _EGLSurface *rsurf,
1402 _EGLContext *ctx)
1403 {
1404 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1405 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1406 _EGLDisplay *old_disp = NULL;
1407 struct dri2_egl_display *old_dri2_dpy = NULL;
1408 _EGLContext *old_ctx;
1409 _EGLSurface *old_dsurf, *old_rsurf;
1410 _EGLSurface *tmp_dsurf, *tmp_rsurf;
1411 struct dri_drawable *ddraw, *rdraw;
1412 struct dri_context *cctx;
1413 EGLint egl_error = EGL_SUCCESS;
1414
1415 if (!dri2_dpy)
1416 return _eglError(EGL_NOT_INITIALIZED, "eglMakeCurrent");
1417
1418 /* make new bindings, set the EGL error otherwise */
1419 if (!_eglBindContext(ctx, dsurf, rsurf, &old_ctx, &old_dsurf, &old_rsurf))
1420 return EGL_FALSE;
1421
1422 if (old_ctx == ctx && old_dsurf == dsurf && old_rsurf == rsurf) {
1423 _eglPutSurface(old_dsurf);
1424 _eglPutSurface(old_rsurf);
1425 _eglPutContext(old_ctx);
1426 return EGL_TRUE;
1427 }
1428
1429 if (old_ctx) {
1430 struct dri_context *old_cctx = dri2_egl_context(old_ctx)->dri_context;
1431 old_disp = old_ctx->Resource.Display;
1432 old_dri2_dpy = dri2_egl_display(old_disp);
1433
1434 /* Disable shared buffer mode */
1435 if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1436 old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1437 old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf, false);
1438 }
1439
1440 driUnbindContext(old_cctx);
1441
1442 if (old_dsurf)
1443 dri2_surf_update_fence_fd(old_ctx, old_disp, old_dsurf);
1444 }
1445
1446 ddraw = (dsurf) ? dri2_dpy->vtbl->get_dri_drawable(dsurf) : NULL;
1447 rdraw = (rsurf) ? dri2_dpy->vtbl->get_dri_drawable(rsurf) : NULL;
1448 cctx = (dri2_ctx) ? dri2_ctx->dri_context : NULL;
1449
1450 if (cctx) {
1451 if (!driBindContext(cctx, ddraw, rdraw)) {
1452 _EGLContext *tmp_ctx;
1453
1454 /* driBindContext failed. We cannot tell for sure why, but
1455 * setting the error to EGL_BAD_MATCH is surely better than leaving it
1456 * as EGL_SUCCESS.
1457 */
1458 egl_error = EGL_BAD_MATCH;
1459
1460 /* undo the previous _eglBindContext */
1461 _eglBindContext(old_ctx, old_dsurf, old_rsurf, &ctx, &tmp_dsurf,
1462 &tmp_rsurf);
1463 assert(&dri2_ctx->base == ctx && tmp_dsurf == dsurf &&
1464 tmp_rsurf == rsurf);
1465
1466 _eglPutSurface(dsurf);
1467 _eglPutSurface(rsurf);
1468 _eglPutContext(ctx);
1469
1470 _eglPutSurface(old_dsurf);
1471 _eglPutSurface(old_rsurf);
1472 _eglPutContext(old_ctx);
1473
1474 ddraw =
1475 (old_dsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_dsurf) : NULL;
1476 rdraw =
1477 (old_rsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_rsurf) : NULL;
1478 cctx = (old_ctx) ? dri2_egl_context(old_ctx)->dri_context : NULL;
1479
1480 /* undo the previous driUnbindContext */
1481 if (driBindContext(cctx, ddraw, rdraw)) {
1482 if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1483 old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1484 old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf,
1485 true);
1486 }
1487
1488 return _eglError(egl_error, "eglMakeCurrent");
1489 }
1490
1491 /* We cannot restore the same state as it was before calling
1492 * eglMakeCurrent() and the spec isn't clear about what to do. We
1493 * can prevent EGL from calling into the DRI driver with no DRI
1494 * context bound.
1495 */
1496 dsurf = rsurf = NULL;
1497 ctx = NULL;
1498
1499 _eglBindContext(ctx, dsurf, rsurf, &tmp_ctx, &tmp_dsurf, &tmp_rsurf);
1500 assert(tmp_ctx == old_ctx && tmp_dsurf == old_dsurf &&
1501 tmp_rsurf == old_rsurf);
1502
1503 _eglLog(_EGL_WARNING, "DRI2: failed to rebind the previous context");
1504 } else {
1505 /* driBindContext succeeded, so take a reference on the
1506 * dri2_dpy. This prevents dri2_dpy from being reinitialized when a
1507 * EGLDisplay is terminated and then initialized again while a
1508 * context is still bound. See dri2_initialize() for a more in depth
1509 * explanation. */
1510 p_atomic_inc(&dri2_dpy->ref_count);
1511 }
1512 }
1513
1514 dri2_destroy_surface(disp, old_dsurf);
1515 dri2_destroy_surface(disp, old_rsurf);
1516
1517 if (old_ctx) {
1518 dri2_destroy_context(disp, old_ctx);
1519 dri2_display_release(old_disp);
1520 }
1521
1522 if (egl_error != EGL_SUCCESS)
1523 return _eglError(egl_error, "eglMakeCurrent");
1524
1525 if (dsurf && _eglSurfaceHasMutableRenderBuffer(dsurf) &&
1526 dri2_dpy->vtbl->set_shared_buffer_mode) {
1527 /* Always update the shared buffer mode. This is obviously needed when
1528 * the active EGL_RENDER_BUFFER is EGL_SINGLE_BUFFER. When
1529 * EGL_RENDER_BUFFER is EGL_BACK_BUFFER, the update protects us in the
1530 * case where external non-EGL API may have changed window's shared
1531 * buffer mode since we last saw it.
1532 */
1533 bool mode = (dsurf->ActiveRenderBuffer == EGL_SINGLE_BUFFER);
1534 dri2_dpy->vtbl->set_shared_buffer_mode(disp, dsurf, mode);
1535 }
1536
1537 return EGL_TRUE;
1538 }
1539
1540 struct dri_drawable *
dri2_surface_get_dri_drawable(_EGLSurface * surf)1541 dri2_surface_get_dri_drawable(_EGLSurface *surf)
1542 {
1543 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1544
1545 return dri2_surf->dri_drawable;
1546 }
1547
1548 static _EGLSurface *
dri2_create_window_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_window,const EGLint * attrib_list)1549 dri2_create_window_surface(_EGLDisplay *disp, _EGLConfig *conf,
1550 void *native_window, const EGLint *attrib_list)
1551 {
1552 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1553 _EGLSurface *ret = dri2_dpy->vtbl->create_window_surface(
1554 disp, conf, native_window, attrib_list);
1555 mtx_unlock(&dri2_dpy->lock);
1556 return ret;
1557 }
1558
1559 static _EGLSurface *
dri2_create_pixmap_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_pixmap,const EGLint * attrib_list)1560 dri2_create_pixmap_surface(_EGLDisplay *disp, _EGLConfig *conf,
1561 void *native_pixmap, const EGLint *attrib_list)
1562 {
1563 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1564 _EGLSurface *ret = NULL;
1565
1566 if (dri2_dpy->vtbl->create_pixmap_surface)
1567 ret = dri2_dpy->vtbl->create_pixmap_surface(disp, conf, native_pixmap,
1568 attrib_list);
1569
1570 mtx_unlock(&dri2_dpy->lock);
1571
1572 return ret;
1573 }
1574
1575 static _EGLSurface *
dri2_create_pbuffer_surface(_EGLDisplay * disp,_EGLConfig * conf,const EGLint * attrib_list)1576 dri2_create_pbuffer_surface(_EGLDisplay *disp, _EGLConfig *conf,
1577 const EGLint *attrib_list)
1578 {
1579 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1580 _EGLSurface *ret = NULL;
1581
1582 if (dri2_dpy->vtbl->create_pbuffer_surface)
1583 ret = dri2_dpy->vtbl->create_pbuffer_surface(disp, conf, attrib_list);
1584
1585 mtx_unlock(&dri2_dpy->lock);
1586
1587 return ret;
1588 }
1589
1590 static EGLBoolean
dri2_swap_interval(_EGLDisplay * disp,_EGLSurface * surf,EGLint interval)1591 dri2_swap_interval(_EGLDisplay *disp, _EGLSurface *surf, EGLint interval)
1592 {
1593 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1594 EGLBoolean ret = EGL_TRUE;
1595
1596 if (dri2_dpy->vtbl->swap_interval)
1597 ret = dri2_dpy->vtbl->swap_interval(disp, surf, interval);
1598
1599 mtx_unlock(&dri2_dpy->lock);
1600
1601 return ret;
1602 }
1603
1604 /**
1605 * Asks the client API to flush any rendering to the drawable so that we can
1606 * do our swapbuffers.
1607 */
1608 void
dri2_flush_drawable_for_swapbuffers_flags(_EGLDisplay * disp,_EGLSurface * draw,enum __DRI2throttleReason throttle_reason)1609 dri2_flush_drawable_for_swapbuffers_flags(
1610 _EGLDisplay *disp, _EGLSurface *draw,
1611 enum __DRI2throttleReason throttle_reason)
1612 {
1613 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1614 struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(draw);
1615
1616 /* flush not available for swrast */
1617 if (dri2_dpy->swrast_not_kms)
1618 return;
1619
1620 /* We know there's a current context because:
1621 *
1622 * "If surface is not bound to the calling thread’s current
1623 * context, an EGL_BAD_SURFACE error is generated."
1624 */
1625 _EGLContext *ctx = _eglGetCurrentContext();
1626 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1627
1628 /* From the EGL 1.4 spec (page 52):
1629 *
1630 * "The contents of ancillary buffers are always undefined
1631 * after calling eglSwapBuffers."
1632 */
1633 dri_flush(dri2_ctx->dri_context, dri_drawable,
1634 __DRI2_FLUSH_DRAWABLE | __DRI2_FLUSH_INVALIDATE_ANCILLARY,
1635 throttle_reason);
1636 }
1637
1638 void
dri2_flush_drawable_for_swapbuffers(_EGLDisplay * disp,_EGLSurface * draw)1639 dri2_flush_drawable_for_swapbuffers(_EGLDisplay *disp, _EGLSurface *draw)
1640 {
1641 dri2_flush_drawable_for_swapbuffers_flags(disp, draw,
1642 __DRI2_THROTTLE_SWAPBUFFER);
1643 }
1644
1645 static EGLBoolean
dri2_swap_buffers(_EGLDisplay * disp,_EGLSurface * surf)1646 dri2_swap_buffers(_EGLDisplay *disp, _EGLSurface *surf)
1647 {
1648 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1649 struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1650 _EGLContext *ctx = _eglGetCurrentContext();
1651 EGLBoolean ret;
1652
1653 if (ctx && surf)
1654 dri2_surf_update_fence_fd(ctx, disp, surf);
1655 ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1656
1657 /* SwapBuffers marks the end of the frame; reset the damage region for
1658 * use again next time.
1659 */
1660 if (ret && disp->Extensions.KHR_partial_update)
1661 dri_set_damage_region(dri_drawable, 0, NULL);
1662
1663 return ret;
1664 }
1665
1666 static EGLBoolean
dri2_swap_buffers_with_damage(_EGLDisplay * disp,_EGLSurface * surf,const EGLint * rects,EGLint n_rects)1667 dri2_swap_buffers_with_damage(_EGLDisplay *disp, _EGLSurface *surf,
1668 const EGLint *rects, EGLint n_rects)
1669 {
1670 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1671 struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1672 _EGLContext *ctx = _eglGetCurrentContext();
1673 EGLBoolean ret;
1674
1675 if (ctx && surf)
1676 dri2_surf_update_fence_fd(ctx, disp, surf);
1677 if (dri2_dpy->vtbl->swap_buffers_with_damage)
1678 ret =
1679 dri2_dpy->vtbl->swap_buffers_with_damage(disp, surf, rects, n_rects);
1680 else
1681 ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1682
1683 /* SwapBuffers marks the end of the frame; reset the damage region for
1684 * use again next time.
1685 */
1686 if (ret && disp->Extensions.KHR_partial_update)
1687 dri_set_damage_region(dri_drawable, 0, NULL);
1688
1689 return ret;
1690 }
1691
1692 static EGLBoolean
dri2_swap_buffers_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint numRects,const EGLint * rects)1693 dri2_swap_buffers_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint numRects,
1694 const EGLint *rects)
1695 {
1696 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1697 struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1698 EGLBoolean ret;
1699
1700 if (!dri2_dpy->vtbl->swap_buffers_region)
1701 return EGL_FALSE;
1702 ret = dri2_dpy->vtbl->swap_buffers_region(disp, surf, numRects, rects);
1703
1704 /* SwapBuffers marks the end of the frame; reset the damage region for
1705 * use again next time.
1706 */
1707 if (ret && disp->Extensions.KHR_partial_update)
1708 dri_set_damage_region(dri_drawable, 0, NULL);
1709
1710 return ret;
1711 }
1712
1713 static EGLBoolean
dri2_set_damage_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint * rects,EGLint n_rects)1714 dri2_set_damage_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint *rects,
1715 EGLint n_rects)
1716 {
1717 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1718 struct dri_drawable *drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1719
1720 if (!disp->Extensions.KHR_partial_update) {
1721 mtx_unlock(&dri2_dpy->lock);
1722 return EGL_FALSE;
1723 }
1724
1725 dri_set_damage_region(drawable, n_rects, rects);
1726 mtx_unlock(&dri2_dpy->lock);
1727 return EGL_TRUE;
1728 }
1729
1730 static EGLBoolean
dri2_post_sub_buffer(_EGLDisplay * disp,_EGLSurface * surf,EGLint x,EGLint y,EGLint width,EGLint height)1731 dri2_post_sub_buffer(_EGLDisplay *disp, _EGLSurface *surf, EGLint x, EGLint y,
1732 EGLint width, EGLint height)
1733 {
1734 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1735 EGLBoolean ret = EGL_FALSE;
1736
1737 if (dri2_dpy->vtbl->post_sub_buffer)
1738 ret = dri2_dpy->vtbl->post_sub_buffer(disp, surf, x, y, width, height);
1739
1740 mtx_unlock(&dri2_dpy->lock);
1741
1742 return ret;
1743 }
1744
1745 static EGLBoolean
dri2_copy_buffers(_EGLDisplay * disp,_EGLSurface * surf,void * native_pixmap_target)1746 dri2_copy_buffers(_EGLDisplay *disp, _EGLSurface *surf,
1747 void *native_pixmap_target)
1748 {
1749 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1750 if (!dri2_dpy->vtbl->copy_buffers)
1751 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_NATIVE_PIXMAP,
1752 "no support for native pixmaps");
1753 EGLBoolean ret =
1754 dri2_dpy->vtbl->copy_buffers(disp, surf, native_pixmap_target);
1755 mtx_unlock(&dri2_dpy->lock);
1756 return ret;
1757 }
1758
1759 static EGLint
dri2_query_buffer_age(_EGLDisplay * disp,_EGLSurface * surf)1760 dri2_query_buffer_age(_EGLDisplay *disp, _EGLSurface *surf)
1761 {
1762 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1763 if (!dri2_dpy->vtbl->query_buffer_age)
1764 return 0;
1765 return dri2_dpy->vtbl->query_buffer_age(disp, surf);
1766 }
1767
1768 static EGLBoolean
dri2_wait_client(_EGLDisplay * disp,_EGLContext * ctx)1769 dri2_wait_client(_EGLDisplay *disp, _EGLContext *ctx)
1770 {
1771 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1772 _EGLSurface *surf = ctx->DrawSurface;
1773 struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1774
1775 /* FIXME: If EGL allows frontbuffer rendering for window surfaces,
1776 * we need to copy fake to real here.*/
1777
1778 if (!dri2_dpy->swrast_not_kms)
1779 dri_flush_drawable(dri_drawable);
1780
1781 return EGL_TRUE;
1782 }
1783
1784 static EGLBoolean
dri2_wait_native(EGLint engine)1785 dri2_wait_native(EGLint engine)
1786 {
1787 if (engine != EGL_CORE_NATIVE_ENGINE)
1788 return _eglError(EGL_BAD_PARAMETER, "eglWaitNative");
1789 /* glXWaitX(); */
1790
1791 return EGL_TRUE;
1792 }
1793
1794 static EGLBoolean
dri2_bind_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)1795 dri2_bind_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
1796 {
1797 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1798 struct dri2_egl_context *dri2_ctx;
1799 _EGLContext *ctx;
1800 GLint format, target;
1801 struct dri_drawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1802
1803 ctx = _eglGetCurrentContext();
1804 dri2_ctx = dri2_egl_context(ctx);
1805
1806 if (!_eglBindTexImage(disp, surf, buffer)) {
1807 mtx_unlock(&dri2_dpy->lock);
1808 return EGL_FALSE;
1809 }
1810
1811 switch (surf->TextureFormat) {
1812 case EGL_TEXTURE_RGB:
1813 format = __DRI_TEXTURE_FORMAT_RGB;
1814 break;
1815 case EGL_TEXTURE_RGBA:
1816 format = __DRI_TEXTURE_FORMAT_RGBA;
1817 break;
1818 default:
1819 assert(!"Unexpected texture format in dri2_bind_tex_image()");
1820 format = __DRI_TEXTURE_FORMAT_RGBA;
1821 }
1822
1823 switch (surf->TextureTarget) {
1824 case EGL_TEXTURE_2D:
1825 target = GL_TEXTURE_2D;
1826 break;
1827 default:
1828 target = GL_TEXTURE_2D;
1829 assert(!"Unexpected texture target in dri2_bind_tex_image()");
1830 }
1831
1832 dri_set_tex_buffer2(dri2_ctx->dri_context, target, format, dri_drawable);
1833
1834 mtx_unlock(&dri2_dpy->lock);
1835
1836 return EGL_TRUE;
1837 }
1838
1839 static EGLBoolean
dri2_release_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)1840 dri2_release_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
1841 {
1842 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1843
1844 if (!_eglReleaseTexImage(disp, surf, buffer)) {
1845 mtx_unlock(&dri2_dpy->lock);
1846 return EGL_FALSE;
1847 }
1848
1849 mtx_unlock(&dri2_dpy->lock);
1850
1851 return EGL_TRUE;
1852 }
1853
1854 static _EGLImage *
dri2_create_image(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)1855 dri2_create_image(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
1856 EGLClientBuffer buffer, const EGLint *attr_list)
1857 {
1858 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1859 _EGLImage *ret =
1860 dri2_dpy->vtbl->create_image(disp, ctx, target, buffer, attr_list);
1861 mtx_unlock(&dri2_dpy->lock);
1862 return ret;
1863 }
1864
1865 _EGLImage *
dri2_create_image_from_dri(_EGLDisplay * disp,struct dri_image * dri_image)1866 dri2_create_image_from_dri(_EGLDisplay *disp, struct dri_image *dri_image)
1867 {
1868 struct dri2_egl_image *dri2_img;
1869
1870 if (dri_image == NULL) {
1871 _eglError(EGL_BAD_ALLOC, "dri2_create_image");
1872 return NULL;
1873 }
1874
1875 dri2_img = malloc(sizeof *dri2_img);
1876 if (!dri2_img) {
1877 _eglError(EGL_BAD_ALLOC, "dri2_create_image");
1878 return NULL;
1879 }
1880
1881 _eglInitImage(&dri2_img->base, disp);
1882
1883 dri2_img->dri_image = dri_image;
1884
1885 return &dri2_img->base;
1886 }
1887
1888 /**
1889 * Translate a DRI Image extension error code into an EGL error code.
1890 */
1891 static EGLint
egl_error_from_dri_image_error(int dri_error)1892 egl_error_from_dri_image_error(int dri_error)
1893 {
1894 switch (dri_error) {
1895 case __DRI_IMAGE_ERROR_SUCCESS:
1896 return EGL_SUCCESS;
1897 case __DRI_IMAGE_ERROR_BAD_ALLOC:
1898 return EGL_BAD_ALLOC;
1899 case __DRI_IMAGE_ERROR_BAD_MATCH:
1900 return EGL_BAD_MATCH;
1901 case __DRI_IMAGE_ERROR_BAD_PARAMETER:
1902 return EGL_BAD_PARAMETER;
1903 case __DRI_IMAGE_ERROR_BAD_ACCESS:
1904 return EGL_BAD_ACCESS;
1905 default:
1906 assert(!"unknown dri_error code");
1907 return EGL_BAD_ALLOC;
1908 }
1909 }
1910
1911 static _EGLImage *
dri2_create_image_khr_renderbuffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)1912 dri2_create_image_khr_renderbuffer(_EGLDisplay *disp, _EGLContext *ctx,
1913 EGLClientBuffer buffer,
1914 const EGLint *attr_list)
1915 {
1916 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1917 GLuint renderbuffer = (GLuint)(uintptr_t)buffer;
1918 struct dri_image *dri_image;
1919
1920 if (renderbuffer == 0) {
1921 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
1922 return EGL_NO_IMAGE_KHR;
1923 }
1924
1925 if (!disp->Extensions.KHR_gl_renderbuffer_image) {
1926 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
1927 return EGL_NO_IMAGE_KHR;
1928 }
1929
1930 unsigned error = ~0;
1931 dri_image = dri_create_image_from_renderbuffer(
1932 dri2_ctx->dri_context, renderbuffer, NULL, &error);
1933
1934 assert(!!dri_image == (error == __DRI_IMAGE_ERROR_SUCCESS));
1935
1936 if (!dri_image) {
1937 _eglError(egl_error_from_dri_image_error(error), "dri2_create_image_khr");
1938 return EGL_NO_IMAGE_KHR;
1939 }
1940
1941 return dri2_create_image_from_dri(disp, dri_image);
1942 }
1943
1944 #ifdef HAVE_WAYLAND_PLATFORM
1945
1946 /* This structure describes how a wl_buffer maps to one or more
1947 * dri_image structures. A wl_drm_buffer stores the wl_drm format code and the
1948 * offsets and strides of the planes in the buffer. This table maps a
1949 * wl_drm format code to a description of the planes in the buffer
1950 * that lets us create a struct dri_image for each of the planes. */
1951
1952 static const struct wl_drm_components_descriptor {
1953 uint32_t dri_components;
1954 EGLint components;
1955 int nplanes;
1956 } wl_drm_components[] = {
1957 {__DRI_IMAGE_COMPONENTS_RGB, EGL_TEXTURE_RGB, 1},
1958 {__DRI_IMAGE_COMPONENTS_RGBA, EGL_TEXTURE_RGBA, 1},
1959 {__DRI_IMAGE_COMPONENTS_Y_U_V, EGL_TEXTURE_Y_U_V_WL, 3},
1960 {__DRI_IMAGE_COMPONENTS_Y_UV, EGL_TEXTURE_Y_UV_WL, 2},
1961 {__DRI_IMAGE_COMPONENTS_Y_XUXV, EGL_TEXTURE_Y_XUXV_WL, 2},
1962 };
1963
1964 static _EGLImage *
dri2_create_image_wayland_wl_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer _buffer,const EGLint * attr_list)1965 dri2_create_image_wayland_wl_buffer(_EGLDisplay *disp, _EGLContext *ctx,
1966 EGLClientBuffer _buffer,
1967 const EGLint *attr_list)
1968 {
1969 struct wl_drm_buffer *buffer;
1970 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1971 const struct wl_drm_components_descriptor *f;
1972 struct dri_image *dri_image;
1973 _EGLImageAttribs attrs;
1974 int32_t plane;
1975
1976 buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm,
1977 (struct wl_resource *)_buffer);
1978 if (!buffer)
1979 return NULL;
1980
1981 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
1982 return NULL;
1983
1984 plane = attrs.PlaneWL;
1985 f = buffer->driver_format;
1986 if (plane < 0 || plane >= f->nplanes) {
1987 _eglError(EGL_BAD_PARAMETER,
1988 "dri2_create_image_wayland_wl_buffer (plane out of bounds)");
1989 return NULL;
1990 }
1991
1992 dri_image = dri2_from_planar(buffer->driver_buffer, plane, NULL);
1993 if (dri_image == NULL && plane == 0)
1994 dri_image = dri2_dup_image(buffer->driver_buffer, NULL);
1995 if (dri_image == NULL) {
1996 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_wayland_wl_buffer");
1997 return NULL;
1998 }
1999
2000 return dri2_create_image_from_dri(disp, dri_image);
2001 }
2002 #endif
2003
2004 static EGLBoolean
dri2_get_sync_values_chromium(_EGLDisplay * disp,_EGLSurface * surf,EGLuint64KHR * ust,EGLuint64KHR * msc,EGLuint64KHR * sbc)2005 dri2_get_sync_values_chromium(_EGLDisplay *disp, _EGLSurface *surf,
2006 EGLuint64KHR *ust, EGLuint64KHR *msc,
2007 EGLuint64KHR *sbc)
2008 {
2009 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2010 EGLBoolean ret = EGL_FALSE;
2011
2012 if (dri2_dpy->vtbl->get_sync_values)
2013 ret = dri2_dpy->vtbl->get_sync_values(disp, surf, ust, msc, sbc);
2014
2015 return ret;
2016 }
2017
2018 static EGLBoolean
dri2_get_msc_rate_angle(_EGLDisplay * disp,_EGLSurface * surf,EGLint * numerator,EGLint * denominator)2019 dri2_get_msc_rate_angle(_EGLDisplay *disp, _EGLSurface *surf, EGLint *numerator,
2020 EGLint *denominator)
2021 {
2022 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2023 if (!dri2_dpy->vtbl->get_msc_rate)
2024 return EGL_FALSE;
2025 return dri2_dpy->vtbl->get_msc_rate(disp, surf, numerator, denominator);
2026 }
2027
2028 /**
2029 * Set the error code after a call to
2030 * dri2_egl_image::dri_image::createImageFromTexture.
2031 */
2032 static void
dri2_create_image_khr_texture_error(int dri_error)2033 dri2_create_image_khr_texture_error(int dri_error)
2034 {
2035 EGLint egl_error = egl_error_from_dri_image_error(dri_error);
2036
2037 if (egl_error != EGL_SUCCESS)
2038 _eglError(egl_error, "dri2_create_image_khr_texture");
2039 }
2040
2041 static _EGLImage *
dri2_create_image_khr_texture(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2042 dri2_create_image_khr_texture(_EGLDisplay *disp, _EGLContext *ctx,
2043 EGLenum target, EGLClientBuffer buffer,
2044 const EGLint *attr_list)
2045 {
2046 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
2047 struct dri2_egl_image *dri2_img;
2048 GLuint texture = (GLuint)(uintptr_t)buffer;
2049 _EGLImageAttribs attrs;
2050 GLuint depth;
2051 GLenum gl_target;
2052 unsigned error = __DRI_IMAGE_ERROR_SUCCESS;
2053
2054 if (texture == 0) {
2055 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2056 return EGL_NO_IMAGE_KHR;
2057 }
2058
2059 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2060 return EGL_NO_IMAGE_KHR;
2061
2062 switch (target) {
2063 case EGL_GL_TEXTURE_2D_KHR:
2064 if (!disp->Extensions.KHR_gl_texture_2D_image) {
2065 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2066 return EGL_NO_IMAGE_KHR;
2067 }
2068 depth = 0;
2069 gl_target = GL_TEXTURE_2D;
2070 break;
2071 case EGL_GL_TEXTURE_3D_KHR:
2072 if (!disp->Extensions.KHR_gl_texture_3D_image) {
2073 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2074 return EGL_NO_IMAGE_KHR;
2075 }
2076
2077 depth = attrs.GLTextureZOffset;
2078 gl_target = GL_TEXTURE_3D;
2079 break;
2080 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
2081 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
2082 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
2083 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
2084 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
2085 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
2086 if (!disp->Extensions.KHR_gl_texture_cubemap_image) {
2087 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2088 return EGL_NO_IMAGE_KHR;
2089 }
2090
2091 depth = target - EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR;
2092 gl_target = GL_TEXTURE_CUBE_MAP;
2093 break;
2094 default:
2095 unreachable("Unexpected target in dri2_create_image_khr_texture()");
2096 return EGL_NO_IMAGE_KHR;
2097 }
2098
2099 dri2_img = malloc(sizeof *dri2_img);
2100 if (!dri2_img) {
2101 _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2102 return EGL_NO_IMAGE_KHR;
2103 }
2104
2105 _eglInitImage(&dri2_img->base, disp);
2106
2107 dri2_img->dri_image = dri2_create_from_texture(
2108 dri2_ctx->dri_context, gl_target, texture, depth, attrs.GLTextureLevel,
2109 &error, NULL);
2110 dri2_create_image_khr_texture_error(error);
2111
2112 if (!dri2_img->dri_image) {
2113 free(dri2_img);
2114 return EGL_NO_IMAGE_KHR;
2115 }
2116 return &dri2_img->base;
2117 }
2118
2119 static EGLBoolean
dri2_query_surface(_EGLDisplay * disp,_EGLSurface * surf,EGLint attribute,EGLint * value)2120 dri2_query_surface(_EGLDisplay *disp, _EGLSurface *surf, EGLint attribute,
2121 EGLint *value)
2122 {
2123 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2124 EGLBoolean ret;
2125
2126 if (!dri2_dpy->vtbl->query_surface) {
2127 ret = _eglQuerySurface(disp, surf, attribute, value);
2128 } else {
2129 ret = dri2_dpy->vtbl->query_surface(disp, surf, attribute, value);
2130 }
2131
2132 return ret;
2133 }
2134
2135 static struct wl_buffer *
dri2_create_wayland_buffer_from_image(_EGLDisplay * disp,_EGLImage * img)2136 dri2_create_wayland_buffer_from_image(_EGLDisplay *disp, _EGLImage *img)
2137 {
2138 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2139 struct wl_buffer *ret = NULL;
2140
2141 if (dri2_dpy->vtbl->create_wayland_buffer_from_image)
2142 ret = dri2_dpy->vtbl->create_wayland_buffer_from_image(disp, img);
2143
2144 mtx_unlock(&dri2_dpy->lock);
2145
2146 return ret;
2147 }
2148
2149 #ifdef HAVE_LIBDRM
2150 static _EGLImage *
dri2_create_image_mesa_drm_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2151 dri2_create_image_mesa_drm_buffer(_EGLDisplay *disp, _EGLContext *ctx,
2152 EGLClientBuffer buffer,
2153 const EGLint *attr_list)
2154 {
2155 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2156 EGLint name, pitch;
2157 uint32_t fourcc;
2158 _EGLImageAttribs attrs;
2159 struct dri_image *dri_image;
2160
2161 name = (EGLint)(uintptr_t)buffer;
2162
2163 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2164 return NULL;
2165
2166 if (attrs.Width <= 0 || attrs.Height <= 0 ||
2167 attrs.DRMBufferStrideMESA <= 0) {
2168 _eglError(EGL_BAD_PARAMETER, "bad width, height or stride");
2169 return NULL;
2170 }
2171
2172 switch (attrs.DRMBufferFormatMESA) {
2173 case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2174 fourcc = DRM_FORMAT_ARGB8888;
2175 pitch = attrs.DRMBufferStrideMESA * 4;
2176 break;
2177 default:
2178 _eglError(EGL_BAD_PARAMETER,
2179 "dri2_create_image_khr: unsupported pixmap depth");
2180 return NULL;
2181 }
2182
2183 int offset = 0;
2184 dri_image = dri2_from_names(
2185 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height, fourcc,
2186 (int *) &name, 1, (int *) &pitch, &offset, NULL);
2187
2188 return dri2_create_image_from_dri(disp, dri_image);
2189 }
2190
2191 static EGLBoolean
dri2_check_dma_buf_attribs(const _EGLImageAttribs * attrs)2192 dri2_check_dma_buf_attribs(const _EGLImageAttribs *attrs)
2193 {
2194 /**
2195 * The spec says:
2196 *
2197 * "Required attributes and their values are as follows:
2198 *
2199 * * EGL_WIDTH & EGL_HEIGHT: The logical dimensions of the buffer in pixels
2200 *
2201 * * EGL_LINUX_DRM_FOURCC_EXT: The pixel format of the buffer, as specified
2202 * by drm_fourcc.h and used as the pixel_format parameter of the
2203 * drm_mode_fb_cmd2 ioctl."
2204 *
2205 * and
2206 *
2207 * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2208 * incomplete, EGL_BAD_PARAMETER is generated."
2209 */
2210 if (attrs->Width <= 0 || attrs->Height <= 0 ||
2211 !attrs->DMABufFourCC.IsPresent)
2212 return _eglError(EGL_BAD_PARAMETER, "attribute(s) missing");
2213
2214 /**
2215 * Also:
2216 *
2217 * "If <target> is EGL_LINUX_DMA_BUF_EXT and one or more of the values
2218 * specified for a plane's pitch or offset isn't supported by EGL,
2219 * EGL_BAD_ACCESS is generated."
2220 */
2221 for (unsigned i = 0; i < ARRAY_SIZE(attrs->DMABufPlanePitches); ++i) {
2222 if (attrs->DMABufPlanePitches[i].IsPresent &&
2223 attrs->DMABufPlanePitches[i].Value <= 0)
2224 return _eglError(EGL_BAD_ACCESS, "invalid pitch");
2225 }
2226
2227 /**
2228 * If <target> is EGL_LINUX_DMA_BUF_EXT, both or neither of the following
2229 * attribute values may be given.
2230 *
2231 * This is referring to EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT and
2232 * EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, and the same for other planes.
2233 */
2234 for (unsigned i = 0; i < DMA_BUF_MAX_PLANES; ++i) {
2235 if (attrs->DMABufPlaneModifiersLo[i].IsPresent !=
2236 attrs->DMABufPlaneModifiersHi[i].IsPresent)
2237 return _eglError(EGL_BAD_PARAMETER,
2238 "modifier attribute lo or hi missing");
2239 }
2240
2241 /* Although the EGL_EXT_image_dma_buf_import_modifiers spec doesn't
2242 * mandate it, we only accept the same modifier across all planes. */
2243 for (unsigned i = 1; i < DMA_BUF_MAX_PLANES; ++i) {
2244 if (attrs->DMABufPlaneFds[i].IsPresent) {
2245 if ((attrs->DMABufPlaneModifiersLo[0].IsPresent !=
2246 attrs->DMABufPlaneModifiersLo[i].IsPresent) ||
2247 (attrs->DMABufPlaneModifiersLo[0].Value !=
2248 attrs->DMABufPlaneModifiersLo[i].Value) ||
2249 (attrs->DMABufPlaneModifiersHi[0].Value !=
2250 attrs->DMABufPlaneModifiersHi[i].Value))
2251 return _eglError(EGL_BAD_PARAMETER,
2252 "modifier attributes not equal");
2253 }
2254 }
2255
2256 return EGL_TRUE;
2257 }
2258
2259 /* Returns the total number of planes for the format or zero if it isn't a
2260 * valid fourcc format.
2261 */
2262 static unsigned
dri2_num_fourcc_format_planes(EGLint format)2263 dri2_num_fourcc_format_planes(EGLint format)
2264 {
2265 switch (format) {
2266 case DRM_FORMAT_R8:
2267 case DRM_FORMAT_RG88:
2268 case DRM_FORMAT_GR88:
2269 case DRM_FORMAT_R16:
2270 case DRM_FORMAT_GR1616:
2271 case DRM_FORMAT_RGB332:
2272 case DRM_FORMAT_BGR233:
2273 case DRM_FORMAT_XRGB4444:
2274 case DRM_FORMAT_XBGR4444:
2275 case DRM_FORMAT_RGBX4444:
2276 case DRM_FORMAT_BGRX4444:
2277 case DRM_FORMAT_ARGB4444:
2278 case DRM_FORMAT_ABGR4444:
2279 case DRM_FORMAT_RGBA4444:
2280 case DRM_FORMAT_BGRA4444:
2281 case DRM_FORMAT_XRGB1555:
2282 case DRM_FORMAT_XBGR1555:
2283 case DRM_FORMAT_RGBX5551:
2284 case DRM_FORMAT_BGRX5551:
2285 case DRM_FORMAT_ARGB1555:
2286 case DRM_FORMAT_ABGR1555:
2287 case DRM_FORMAT_RGBA5551:
2288 case DRM_FORMAT_BGRA5551:
2289 case DRM_FORMAT_RGB565:
2290 case DRM_FORMAT_BGR565:
2291 case DRM_FORMAT_RGB888:
2292 case DRM_FORMAT_BGR888:
2293 case DRM_FORMAT_XRGB8888:
2294 case DRM_FORMAT_XBGR8888:
2295 case DRM_FORMAT_RGBX8888:
2296 case DRM_FORMAT_BGRX8888:
2297 case DRM_FORMAT_ARGB8888:
2298 case DRM_FORMAT_ABGR8888:
2299 case DRM_FORMAT_RGBA8888:
2300 case DRM_FORMAT_BGRA8888:
2301 case DRM_FORMAT_XRGB2101010:
2302 case DRM_FORMAT_XBGR2101010:
2303 case DRM_FORMAT_RGBX1010102:
2304 case DRM_FORMAT_BGRX1010102:
2305 case DRM_FORMAT_ARGB2101010:
2306 case DRM_FORMAT_ABGR2101010:
2307 case DRM_FORMAT_RGBA1010102:
2308 case DRM_FORMAT_BGRA1010102:
2309 case DRM_FORMAT_ABGR16161616:
2310 case DRM_FORMAT_XBGR16161616:
2311 case DRM_FORMAT_XBGR16161616F:
2312 case DRM_FORMAT_ABGR16161616F:
2313 case DRM_FORMAT_YUYV:
2314 case DRM_FORMAT_YVYU:
2315 case DRM_FORMAT_UYVY:
2316 case DRM_FORMAT_VYUY:
2317 case DRM_FORMAT_AYUV:
2318 case DRM_FORMAT_XYUV8888:
2319 case DRM_FORMAT_Y210:
2320 case DRM_FORMAT_Y212:
2321 case DRM_FORMAT_Y216:
2322 case DRM_FORMAT_Y410:
2323 case DRM_FORMAT_Y412:
2324 case DRM_FORMAT_Y416:
2325 return 1;
2326
2327 case DRM_FORMAT_NV12:
2328 case DRM_FORMAT_NV21:
2329 case DRM_FORMAT_NV16:
2330 case DRM_FORMAT_NV61:
2331 case DRM_FORMAT_NV15:
2332 case DRM_FORMAT_NV20:
2333 case DRM_FORMAT_NV30:
2334 case DRM_FORMAT_P010:
2335 case DRM_FORMAT_P012:
2336 case DRM_FORMAT_P016:
2337 case DRM_FORMAT_P030:
2338 return 2;
2339
2340 case DRM_FORMAT_YUV410:
2341 case DRM_FORMAT_YVU410:
2342 case DRM_FORMAT_YUV411:
2343 case DRM_FORMAT_YVU411:
2344 case DRM_FORMAT_YUV420:
2345 case DRM_FORMAT_YVU420:
2346 case DRM_FORMAT_YUV422:
2347 case DRM_FORMAT_YVU422:
2348 case DRM_FORMAT_YUV444:
2349 case DRM_FORMAT_YVU444:
2350 return 3;
2351
2352 default:
2353 return 0;
2354 }
2355 }
2356
2357 /* Returns the total number of file descriptors. Zero indicates an error. */
2358 static unsigned
dri2_check_dma_buf_format(const _EGLImageAttribs * attrs)2359 dri2_check_dma_buf_format(const _EGLImageAttribs *attrs)
2360 {
2361 unsigned plane_n = dri2_num_fourcc_format_planes(attrs->DMABufFourCC.Value);
2362 if (plane_n == 0) {
2363 _eglError(EGL_BAD_MATCH, "unknown drm fourcc format");
2364 return 0;
2365 }
2366
2367 for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; i++) {
2368 /**
2369 * The modifiers extension spec says:
2370 *
2371 * "Modifiers may modify any attribute of a buffer import, including
2372 * but not limited to adding extra planes to a format which
2373 * otherwise does not have those planes. As an example, a modifier
2374 * may add a plane for an external compression buffer to a
2375 * single-plane format. The exact meaning and effect of any
2376 * modifier is canonically defined by drm_fourcc.h, not as part of
2377 * this extension."
2378 */
2379 if (attrs->DMABufPlaneModifiersLo[i].IsPresent &&
2380 attrs->DMABufPlaneModifiersHi[i].IsPresent) {
2381 plane_n = i + 1;
2382 }
2383 }
2384
2385 /**
2386 * The spec says:
2387 *
2388 * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2389 * incomplete, EGL_BAD_PARAMETER is generated."
2390 */
2391 for (unsigned i = 0; i < plane_n; ++i) {
2392 if (!attrs->DMABufPlaneFds[i].IsPresent ||
2393 !attrs->DMABufPlaneOffsets[i].IsPresent ||
2394 !attrs->DMABufPlanePitches[i].IsPresent) {
2395 _eglError(EGL_BAD_PARAMETER, "plane attribute(s) missing");
2396 return 0;
2397 }
2398 }
2399
2400 /**
2401 * The spec also says:
2402 *
2403 * "If <target> is EGL_LINUX_DMA_BUF_EXT, and the EGL_LINUX_DRM_FOURCC_EXT
2404 * attribute indicates a single-plane format, EGL_BAD_ATTRIBUTE is
2405 * generated if any of the EGL_DMA_BUF_PLANE1_* or EGL_DMA_BUF_PLANE2_*
2406 * or EGL_DMA_BUF_PLANE3_* attributes are specified."
2407 */
2408 for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; ++i) {
2409 if (attrs->DMABufPlaneFds[i].IsPresent ||
2410 attrs->DMABufPlaneOffsets[i].IsPresent ||
2411 attrs->DMABufPlanePitches[i].IsPresent) {
2412 _eglError(EGL_BAD_ATTRIBUTE, "too many plane attributes");
2413 return 0;
2414 }
2415 }
2416
2417 return plane_n;
2418 }
2419
2420 static EGLBoolean
dri2_query_dma_buf_formats(_EGLDisplay * disp,EGLint max,EGLint * formats,EGLint * count)2421 dri2_query_dma_buf_formats(_EGLDisplay *disp, EGLint max, EGLint *formats,
2422 EGLint *count)
2423 {
2424 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2425 if (max < 0 || (max > 0 && formats == NULL)) {
2426 _eglError(EGL_BAD_PARAMETER, "invalid value for max count of formats");
2427 goto fail;
2428 }
2429
2430 if (!dri2_dpy->has_dmabuf_import)
2431 goto fail;
2432
2433 if (!dri_query_dma_buf_formats(dri2_dpy->dri_screen_render_gpu,
2434 max, formats, count))
2435 goto fail;
2436
2437 if (max > 0) {
2438 /* Assert that all of the formats returned are actually fourcc formats.
2439 * Some day, if we want the internal interface function to be able to
2440 * return the fake fourcc formats defined in mesa_interface.h, we'll have
2441 * to do something more clever here to pair the list down to just real
2442 * fourcc formats so that we don't leak the fake internal ones.
2443 */
2444 for (int i = 0; i < *count; i++) {
2445 assert(dri2_num_fourcc_format_planes(formats[i]) > 0);
2446 }
2447 }
2448
2449 mtx_unlock(&dri2_dpy->lock);
2450
2451 return EGL_TRUE;
2452
2453 fail:
2454 mtx_unlock(&dri2_dpy->lock);
2455 return EGL_FALSE;
2456 }
2457
2458 static EGLBoolean
dri2_query_dma_buf_modifiers(_EGLDisplay * disp,EGLint format,EGLint max,EGLuint64KHR * modifiers,EGLBoolean * external_only,EGLint * count)2459 dri2_query_dma_buf_modifiers(_EGLDisplay *disp, EGLint format, EGLint max,
2460 EGLuint64KHR *modifiers, EGLBoolean *external_only,
2461 EGLint *count)
2462 {
2463 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2464
2465 if (dri2_num_fourcc_format_planes(format) == 0)
2466 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2467 "invalid fourcc format");
2468
2469 if (max < 0)
2470 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2471 "invalid value for max count of formats");
2472
2473 if (max > 0 && modifiers == NULL)
2474 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2475 "invalid modifiers array");
2476
2477 if (!dri2_dpy->has_dmabuf_import) {
2478 mtx_unlock(&dri2_dpy->lock);
2479 return EGL_FALSE;
2480 }
2481
2482 if (dri_query_dma_buf_modifiers(
2483 dri2_dpy->dri_screen_render_gpu, format, max, modifiers,
2484 (unsigned int *)external_only, count) == false)
2485 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2486 "invalid format");
2487
2488 mtx_unlock(&dri2_dpy->lock);
2489
2490 return EGL_TRUE;
2491 }
2492
2493 /**
2494 * The spec says:
2495 *
2496 * "If eglCreateImageKHR is successful for a EGL_LINUX_DMA_BUF_EXT target, the
2497 * EGL will take a reference to the dma_buf(s) which it will release at any
2498 * time while the EGLDisplay is initialized. It is the responsibility of the
2499 * application to close the dma_buf file descriptors."
2500 *
2501 * Therefore we must never close or otherwise modify the file descriptors.
2502 */
2503 _EGLImage *
dri2_create_image_dma_buf(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2504 dri2_create_image_dma_buf(_EGLDisplay *disp, _EGLContext *ctx,
2505 EGLClientBuffer buffer, const EGLint *attr_list)
2506 {
2507 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2508 _EGLImage *res;
2509 _EGLImageAttribs attrs;
2510 struct dri_image *dri_image;
2511 unsigned num_fds;
2512 int fds[DMA_BUF_MAX_PLANES];
2513 int pitches[DMA_BUF_MAX_PLANES];
2514 int offsets[DMA_BUF_MAX_PLANES];
2515 uint64_t modifier;
2516 unsigned error = __DRI_IMAGE_ERROR_SUCCESS;
2517 EGLint egl_error;
2518
2519 /**
2520 * The spec says:
2521 *
2522 * ""* If <target> is EGL_LINUX_DMA_BUF_EXT and <buffer> is not NULL, the
2523 * error EGL_BAD_PARAMETER is generated."
2524 */
2525 if (buffer != NULL) {
2526 _eglError(EGL_BAD_PARAMETER, "buffer not NULL");
2527 return NULL;
2528 }
2529
2530 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2531 return NULL;
2532
2533 if (!dri2_check_dma_buf_attribs(&attrs))
2534 return NULL;
2535
2536 num_fds = dri2_check_dma_buf_format(&attrs);
2537 if (!num_fds)
2538 return NULL;
2539
2540 for (unsigned i = 0; i < num_fds; ++i) {
2541 fds[i] = attrs.DMABufPlaneFds[i].Value;
2542 pitches[i] = attrs.DMABufPlanePitches[i].Value;
2543 offsets[i] = attrs.DMABufPlaneOffsets[i].Value;
2544 }
2545
2546 /* dri2_check_dma_buf_attribs ensures that the modifier, if available,
2547 * will be present in attrs.DMABufPlaneModifiersLo[0] and
2548 * attrs.DMABufPlaneModifiersHi[0] */
2549 if (attrs.DMABufPlaneModifiersLo[0].IsPresent) {
2550 modifier = combine_u32_into_u64(attrs.DMABufPlaneModifiersHi[0].Value,
2551 attrs.DMABufPlaneModifiersLo[0].Value);
2552 } else {
2553 modifier = DRM_FORMAT_MOD_INVALID;
2554 }
2555
2556 uint32_t flags = 0;
2557 if (attrs.ProtectedContent)
2558 flags |= __DRI_IMAGE_PROTECTED_CONTENT_FLAG;
2559
2560 dri_image = dri2_from_dma_bufs(
2561 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height,
2562 attrs.DMABufFourCC.Value, modifier, fds, num_fds, pitches, offsets,
2563 attrs.DMABufYuvColorSpaceHint.Value, attrs.DMABufSampleRangeHint.Value,
2564 attrs.DMABufChromaHorizontalSiting.Value,
2565 attrs.DMABufChromaVerticalSiting.Value,
2566 flags, &error, NULL);
2567
2568 egl_error = egl_error_from_dri_image_error(error);
2569 if (egl_error != EGL_SUCCESS)
2570 _eglError(egl_error, "createImageFromDmaBufs failed");
2571
2572 if (!dri_image)
2573 return EGL_NO_IMAGE_KHR;
2574
2575 res = dri2_create_image_from_dri(disp, dri_image);
2576
2577 return res;
2578 }
2579
2580 static _EGLImage *
dri2_create_drm_image_mesa(_EGLDisplay * disp,const EGLint * attr_list)2581 dri2_create_drm_image_mesa(_EGLDisplay *disp, const EGLint *attr_list)
2582 {
2583 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2584 struct dri2_egl_image *dri2_img;
2585 _EGLImageAttribs attrs;
2586 unsigned int dri_use, valid_mask;
2587 int format;
2588
2589 if (!attr_list) {
2590 _eglError(EGL_BAD_PARAMETER, __func__);
2591 goto fail;
2592 }
2593
2594 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2595 goto fail;
2596
2597 if (attrs.Width <= 0 || attrs.Height <= 0) {
2598 _eglError(EGL_BAD_PARAMETER, __func__);
2599 goto fail;
2600 }
2601
2602 switch (attrs.DRMBufferFormatMESA) {
2603 case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2604 format = PIPE_FORMAT_BGRA8888_UNORM;
2605 break;
2606 default:
2607 _eglError(EGL_BAD_PARAMETER, __func__);
2608 goto fail;
2609 }
2610
2611 valid_mask = EGL_DRM_BUFFER_USE_SCANOUT_MESA |
2612 EGL_DRM_BUFFER_USE_SHARE_MESA | EGL_DRM_BUFFER_USE_CURSOR_MESA;
2613 if (attrs.DRMBufferUseMESA & ~valid_mask) {
2614 _eglError(EGL_BAD_PARAMETER, __func__);
2615 goto fail;
2616 }
2617
2618 dri_use = 0;
2619 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SHARE_MESA)
2620 dri_use |= __DRI_IMAGE_USE_SHARE;
2621 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SCANOUT_MESA)
2622 dri_use |= __DRI_IMAGE_USE_SCANOUT;
2623 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_CURSOR_MESA)
2624 dri_use |= __DRI_IMAGE_USE_CURSOR;
2625
2626 dri2_img = malloc(sizeof *dri2_img);
2627 if (!dri2_img) {
2628 _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2629 goto fail;
2630 }
2631
2632 _eglInitImage(&dri2_img->base, disp);
2633
2634 dri2_img->dri_image =
2635 dri_create_image(dri2_dpy->dri_screen_render_gpu, attrs.Width,
2636 attrs.Height, format, NULL, 0, dri_use, dri2_img);
2637 if (dri2_img->dri_image == NULL) {
2638 free(dri2_img);
2639 _eglError(EGL_BAD_ALLOC, "dri2_create_drm_image_mesa");
2640 goto fail;
2641 }
2642
2643 mtx_unlock(&dri2_dpy->lock);
2644
2645 return &dri2_img->base;
2646
2647 fail:
2648 mtx_unlock(&dri2_dpy->lock);
2649 return EGL_NO_IMAGE_KHR;
2650 }
2651
2652 static EGLBoolean
dri2_export_drm_image_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * name,EGLint * handle,EGLint * stride)2653 dri2_export_drm_image_mesa(_EGLDisplay *disp, _EGLImage *img, EGLint *name,
2654 EGLint *handle, EGLint *stride)
2655 {
2656 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2657 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2658
2659 if (name && !dri2_query_image(dri2_img->dri_image,
2660 __DRI_IMAGE_ATTRIB_NAME, name))
2661 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC,
2662 "dri2_export_drm_image_mesa");
2663
2664 if (handle)
2665 dri2_query_image(dri2_img->dri_image,
2666 __DRI_IMAGE_ATTRIB_HANDLE, handle);
2667
2668 if (stride)
2669 dri2_query_image(dri2_img->dri_image,
2670 __DRI_IMAGE_ATTRIB_STRIDE, stride);
2671
2672 mtx_unlock(&dri2_dpy->lock);
2673
2674 return EGL_TRUE;
2675 }
2676
2677 /**
2678 * Checks if we can support EGL_MESA_image_dma_buf_export on this image.
2679
2680 * The spec provides a boolean return for the driver to reject exporting for
2681 * basically any reason, but doesn't specify any particular error cases. For
2682 * now, we just fail if we don't have a DRM fourcc for the format.
2683 */
2684 static bool
dri2_can_export_dma_buf_image(_EGLDisplay * disp,_EGLImage * img)2685 dri2_can_export_dma_buf_image(_EGLDisplay *disp, _EGLImage *img)
2686 {
2687 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2688 EGLint fourcc;
2689
2690 if (!dri2_query_image(dri2_img->dri_image,
2691 __DRI_IMAGE_ATTRIB_FOURCC, &fourcc)) {
2692 return false;
2693 }
2694
2695 return true;
2696 }
2697
2698 static EGLBoolean
dri2_export_dma_buf_image_query_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * fourcc,EGLint * nplanes,EGLuint64KHR * modifiers)2699 dri2_export_dma_buf_image_query_mesa(_EGLDisplay *disp, _EGLImage *img,
2700 EGLint *fourcc, EGLint *nplanes,
2701 EGLuint64KHR *modifiers)
2702 {
2703 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2704 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2705 int num_planes;
2706
2707 if (!dri2_can_export_dma_buf_image(disp, img)) {
2708 mtx_unlock(&dri2_dpy->lock);
2709 return EGL_FALSE;
2710 }
2711
2712 dri2_query_image(dri2_img->dri_image,
2713 __DRI_IMAGE_ATTRIB_NUM_PLANES, &num_planes);
2714 if (nplanes)
2715 *nplanes = num_planes;
2716
2717 if (fourcc)
2718 dri2_query_image(dri2_img->dri_image,
2719 __DRI_IMAGE_ATTRIB_FOURCC, fourcc);
2720
2721 if (modifiers) {
2722 int mod_hi, mod_lo;
2723 uint64_t modifier = DRM_FORMAT_MOD_INVALID;
2724 bool query;
2725
2726 query = dri2_query_image(
2727 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod_hi);
2728 query &= dri2_query_image(
2729 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod_lo);
2730 if (query)
2731 modifier = combine_u32_into_u64(mod_hi, mod_lo);
2732
2733 for (int i = 0; i < num_planes; i++)
2734 modifiers[i] = modifier;
2735 }
2736
2737 mtx_unlock(&dri2_dpy->lock);
2738
2739 return EGL_TRUE;
2740 }
2741
2742 static EGLBoolean
dri2_export_dma_buf_image_mesa(_EGLDisplay * disp,_EGLImage * img,int * fds,EGLint * strides,EGLint * offsets)2743 dri2_export_dma_buf_image_mesa(_EGLDisplay *disp, _EGLImage *img, int *fds,
2744 EGLint *strides, EGLint *offsets)
2745 {
2746 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2747 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2748 EGLint nplanes;
2749
2750 if (!dri2_can_export_dma_buf_image(disp, img)) {
2751 mtx_unlock(&dri2_dpy->lock);
2752 return EGL_FALSE;
2753 }
2754
2755 /* EGL_MESA_image_dma_buf_export spec says:
2756 * "If the number of fds is less than the number of planes, then
2757 * subsequent fd slots should contain -1."
2758 */
2759 if (fds) {
2760 /* Query nplanes so that we know how big the given array is. */
2761 dri2_query_image(dri2_img->dri_image,
2762 __DRI_IMAGE_ATTRIB_NUM_PLANES, &nplanes);
2763 memset(fds, -1, nplanes * sizeof(int));
2764 }
2765
2766 /* rework later to provide multiple fds/strides/offsets */
2767 if (fds)
2768 dri2_query_image(dri2_img->dri_image, __DRI_IMAGE_ATTRIB_FD,
2769 fds);
2770
2771 if (strides)
2772 dri2_query_image(dri2_img->dri_image,
2773 __DRI_IMAGE_ATTRIB_STRIDE, strides);
2774
2775 if (offsets) {
2776 int img_offset;
2777 bool ret = dri2_query_image(
2778 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_OFFSET, &img_offset);
2779 if (ret)
2780 offsets[0] = img_offset;
2781 else
2782 offsets[0] = 0;
2783 }
2784
2785 mtx_unlock(&dri2_dpy->lock);
2786
2787 return EGL_TRUE;
2788 }
2789
2790 #endif
2791
2792 _EGLImage *
dri2_create_image_khr(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2793 dri2_create_image_khr(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
2794 EGLClientBuffer buffer, const EGLint *attr_list)
2795 {
2796 switch (target) {
2797 case EGL_GL_TEXTURE_2D_KHR:
2798 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
2799 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
2800 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
2801 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
2802 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
2803 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
2804 case EGL_GL_TEXTURE_3D_KHR:
2805 return dri2_create_image_khr_texture(disp, ctx, target, buffer,
2806 attr_list);
2807 case EGL_GL_RENDERBUFFER_KHR:
2808 return dri2_create_image_khr_renderbuffer(disp, ctx, buffer, attr_list);
2809 #ifdef HAVE_LIBDRM
2810 case EGL_DRM_BUFFER_MESA:
2811 return dri2_create_image_mesa_drm_buffer(disp, ctx, buffer, attr_list);
2812 case EGL_LINUX_DMA_BUF_EXT:
2813 return dri2_create_image_dma_buf(disp, ctx, buffer, attr_list);
2814 #endif
2815 #ifdef HAVE_WAYLAND_PLATFORM
2816 case EGL_WAYLAND_BUFFER_WL:
2817 return dri2_create_image_wayland_wl_buffer(disp, ctx, buffer, attr_list);
2818 #endif
2819 default:
2820 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2821 return EGL_NO_IMAGE_KHR;
2822 }
2823 }
2824
2825 static EGLBoolean
dri2_destroy_image_khr(_EGLDisplay * disp,_EGLImage * image)2826 dri2_destroy_image_khr(_EGLDisplay *disp, _EGLImage *image)
2827 {
2828 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2829 struct dri2_egl_image *dri2_img = dri2_egl_image(image);
2830
2831 dri2_destroy_image(dri2_img->dri_image);
2832 free(dri2_img);
2833
2834 mtx_unlock(&dri2_dpy->lock);
2835
2836 return EGL_TRUE;
2837 }
2838
2839 #ifdef HAVE_WAYLAND_PLATFORM
2840
2841 static void
dri2_wl_reference_buffer(void * user_data,uint32_t name,int fd,struct wl_drm_buffer * buffer)2842 dri2_wl_reference_buffer(void *user_data, uint32_t name, int fd,
2843 struct wl_drm_buffer *buffer)
2844 {
2845 _EGLDisplay *disp = user_data;
2846 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2847 struct dri_image *img;
2848 int dri_components = 0;
2849
2850 if (fd == -1)
2851 img = dri2_from_names(
2852 dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
2853 buffer->format, (int *)&name, 1, buffer->stride, buffer->offset, NULL);
2854 else
2855 img = dri2_from_dma_bufs(
2856 dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
2857 buffer->format, DRM_FORMAT_MOD_INVALID, &fd, 1, buffer->stride,
2858 buffer->offset, 0, 0, 0, 0, 0, NULL, NULL);
2859
2860 if (img == NULL)
2861 return;
2862
2863 dri2_query_image(img, __DRI_IMAGE_ATTRIB_COMPONENTS,
2864 &dri_components);
2865
2866 buffer->driver_format = NULL;
2867 for (int i = 0; i < ARRAY_SIZE(wl_drm_components); i++)
2868 if (wl_drm_components[i].dri_components == dri_components)
2869 buffer->driver_format = &wl_drm_components[i];
2870
2871 if (buffer->driver_format == NULL)
2872 dri2_destroy_image(img);
2873 else
2874 buffer->driver_buffer = img;
2875 }
2876
2877 static void
dri2_wl_release_buffer(void * user_data,struct wl_drm_buffer * buffer)2878 dri2_wl_release_buffer(void *user_data, struct wl_drm_buffer *buffer)
2879 {
2880 dri2_destroy_image(buffer->driver_buffer);
2881 }
2882
2883 static EGLBoolean
dri2_bind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)2884 dri2_bind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
2885 {
2886 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2887 const struct wayland_drm_callbacks wl_drm_callbacks = {
2888 .authenticate = (int (*)(void *, uint32_t))dri2_dpy->vtbl->authenticate,
2889 .reference_buffer = dri2_wl_reference_buffer,
2890 .release_buffer = dri2_wl_release_buffer,
2891 .is_format_supported = dri2_wl_is_format_supported,
2892 };
2893 int flags = 0;
2894 char *device_name;
2895
2896 if (dri2_dpy->wl_server_drm)
2897 goto fail;
2898
2899 device_name = drmGetRenderDeviceNameFromFd(dri2_dpy->fd_render_gpu);
2900 if (!device_name)
2901 device_name = strdup(dri2_dpy->device_name);
2902 if (!device_name)
2903 goto fail;
2904
2905 if (dri2_dpy->has_dmabuf_import && dri2_dpy->has_dmabuf_export)
2906 flags |= WAYLAND_DRM_PRIME;
2907
2908 dri2_dpy->wl_server_drm =
2909 wayland_drm_init(wl_dpy, device_name, &wl_drm_callbacks, disp, flags);
2910
2911 free(device_name);
2912
2913 if (!dri2_dpy->wl_server_drm)
2914 goto fail;
2915
2916 #ifdef HAVE_DRM_PLATFORM
2917 /* We have to share the wl_drm instance with gbm, so gbm can convert
2918 * wl_buffers to gbm bos. */
2919 if (dri2_dpy->gbm_dri)
2920 dri2_dpy->gbm_dri->wl_drm = dri2_dpy->wl_server_drm;
2921 #endif
2922
2923 mtx_unlock(&dri2_dpy->lock);
2924 return EGL_TRUE;
2925
2926 fail:
2927 mtx_unlock(&dri2_dpy->lock);
2928 return EGL_FALSE;
2929 }
2930
2931 static EGLBoolean
dri2_unbind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)2932 dri2_unbind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
2933 {
2934 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2935
2936 if (!dri2_dpy->wl_server_drm)
2937 return EGL_FALSE;
2938
2939 wayland_drm_uninit(dri2_dpy->wl_server_drm);
2940 dri2_dpy->wl_server_drm = NULL;
2941
2942 return EGL_TRUE;
2943 }
2944
2945 static EGLBoolean
dri2_query_wayland_buffer_wl(_EGLDisplay * disp,struct wl_resource * buffer_resource,EGLint attribute,EGLint * value)2946 dri2_query_wayland_buffer_wl(_EGLDisplay *disp,
2947 struct wl_resource *buffer_resource,
2948 EGLint attribute, EGLint *value)
2949 {
2950 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2951 struct wl_drm_buffer *buffer;
2952 const struct wl_drm_components_descriptor *format;
2953
2954 buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm, buffer_resource);
2955 if (!buffer)
2956 return EGL_FALSE;
2957
2958 format = buffer->driver_format;
2959 switch (attribute) {
2960 case EGL_TEXTURE_FORMAT:
2961 *value = format->components;
2962 return EGL_TRUE;
2963 case EGL_WIDTH:
2964 *value = buffer->width;
2965 return EGL_TRUE;
2966 case EGL_HEIGHT:
2967 *value = buffer->height;
2968 return EGL_TRUE;
2969 }
2970
2971 return EGL_FALSE;
2972 }
2973 #endif
2974
2975 static void
dri2_egl_ref_sync(struct dri2_egl_sync * sync)2976 dri2_egl_ref_sync(struct dri2_egl_sync *sync)
2977 {
2978 p_atomic_inc(&sync->refcount);
2979 }
2980
2981 static void
dri2_egl_unref_sync(struct dri2_egl_display * dri2_dpy,struct dri2_egl_sync * dri2_sync)2982 dri2_egl_unref_sync(struct dri2_egl_display *dri2_dpy,
2983 struct dri2_egl_sync *dri2_sync)
2984 {
2985 if (p_atomic_dec_zero(&dri2_sync->refcount)) {
2986 switch (dri2_sync->base.Type) {
2987 case EGL_SYNC_REUSABLE_KHR:
2988 cnd_destroy(&dri2_sync->cond);
2989 break;
2990 case EGL_SYNC_NATIVE_FENCE_ANDROID:
2991 if (dri2_sync->base.SyncFd != EGL_NO_NATIVE_FENCE_FD_ANDROID)
2992 close(dri2_sync->base.SyncFd);
2993 break;
2994 default:
2995 break;
2996 }
2997
2998 if (dri2_sync->fence)
2999 dri_destroy_fence(dri2_dpy->dri_screen_render_gpu,
3000 dri2_sync->fence);
3001
3002 free(dri2_sync);
3003 }
3004 }
3005
3006 static _EGLSync *
dri2_create_sync(_EGLDisplay * disp,EGLenum type,const EGLAttrib * attrib_list)3007 dri2_create_sync(_EGLDisplay *disp, EGLenum type, const EGLAttrib *attrib_list)
3008 {
3009 _EGLContext *ctx = _eglGetCurrentContext();
3010 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3011 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3012 struct dri2_egl_sync *dri2_sync;
3013 EGLint ret;
3014 pthread_condattr_t attr;
3015
3016 dri2_sync = calloc(1, sizeof(struct dri2_egl_sync));
3017 if (!dri2_sync) {
3018 _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3019 goto fail;
3020 }
3021
3022 if (!_eglInitSync(&dri2_sync->base, disp, type, attrib_list)) {
3023 goto fail;
3024 }
3025
3026 switch (type) {
3027 case EGL_SYNC_FENCE_KHR:
3028 dri2_sync->fence = dri_create_fence(dri2_ctx->dri_context);
3029 if (!dri2_sync->fence) {
3030 /* Why did it fail? DRI doesn't return an error code, so we emit
3031 * a generic EGL error that doesn't communicate user error.
3032 */
3033 _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3034 goto fail;
3035 }
3036 break;
3037
3038 case EGL_SYNC_CL_EVENT_KHR:
3039 dri2_sync->fence = dri_get_fence_from_cl_event(
3040 dri2_dpy->dri_screen_render_gpu, dri2_sync->base.CLEvent);
3041 /* this can only happen if the cl_event passed in is invalid. */
3042 if (!dri2_sync->fence) {
3043 _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3044 goto fail;
3045 }
3046
3047 /* the initial status must be "signaled" if the cl_event is signaled */
3048 if (dri_client_wait_sync(dri2_ctx->dri_context,
3049 dri2_sync->fence, 0, 0))
3050 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3051 break;
3052
3053 case EGL_SYNC_REUSABLE_KHR:
3054 /* initialize attr */
3055 ret = pthread_condattr_init(&attr);
3056
3057 if (ret) {
3058 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3059 goto fail;
3060 }
3061
3062 #if !defined(__APPLE__) && !defined(__MACOSX)
3063 /* change clock attribute to CLOCK_MONOTONIC */
3064 ret = pthread_condattr_setclock(&attr, CLOCK_MONOTONIC);
3065
3066 if (ret) {
3067 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3068 goto fail;
3069 }
3070 #endif
3071
3072 ret = pthread_cond_init(&dri2_sync->cond, &attr);
3073
3074 if (ret) {
3075 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3076 goto fail;
3077 }
3078
3079 /* initial status of reusable sync must be "unsignaled" */
3080 dri2_sync->base.SyncStatus = EGL_UNSIGNALED_KHR;
3081 break;
3082
3083 case EGL_SYNC_NATIVE_FENCE_ANDROID:
3084 dri2_sync->fence = dri_create_fence_fd(
3085 dri2_ctx->dri_context, dri2_sync->base.SyncFd);
3086 if (!dri2_sync->fence) {
3087 _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3088 goto fail;
3089 }
3090 break;
3091 }
3092
3093 p_atomic_set(&dri2_sync->refcount, 1);
3094 mtx_unlock(&dri2_dpy->lock);
3095
3096 return &dri2_sync->base;
3097
3098 fail:
3099 free(dri2_sync);
3100 mtx_unlock(&dri2_dpy->lock);
3101 return NULL;
3102 }
3103
3104 static EGLBoolean
dri2_destroy_sync(_EGLDisplay * disp,_EGLSync * sync)3105 dri2_destroy_sync(_EGLDisplay *disp, _EGLSync *sync)
3106 {
3107 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3108 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3109 EGLint ret = EGL_TRUE;
3110 EGLint err;
3111
3112 /* if type of sync is EGL_SYNC_REUSABLE_KHR and it is not signaled yet,
3113 * then unlock all threads possibly blocked by the reusable sync before
3114 * destroying it.
3115 */
3116 if (dri2_sync->base.Type == EGL_SYNC_REUSABLE_KHR &&
3117 dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3118 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3119 /* unblock all threads currently blocked by sync */
3120 err = cnd_broadcast(&dri2_sync->cond);
3121
3122 if (err) {
3123 _eglError(EGL_BAD_ACCESS, "eglDestroySyncKHR");
3124 ret = EGL_FALSE;
3125 }
3126 }
3127
3128 dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3129
3130 mtx_unlock(&dri2_dpy->lock);
3131
3132 return ret;
3133 }
3134
3135 static EGLint
dri2_dup_native_fence_fd(_EGLDisplay * disp,_EGLSync * sync)3136 dri2_dup_native_fence_fd(_EGLDisplay *disp, _EGLSync *sync)
3137 {
3138 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3139 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3140
3141 assert(sync->Type == EGL_SYNC_NATIVE_FENCE_ANDROID);
3142
3143 if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3144 /* try to retrieve the actual native fence fd.. if rendering is
3145 * not flushed this will just return -1, aka NO_NATIVE_FENCE_FD:
3146 */
3147 sync->SyncFd = dri_get_fence_fd(
3148 dri2_dpy->dri_screen_render_gpu, dri2_sync->fence);
3149 }
3150
3151 mtx_unlock(&dri2_dpy->lock);
3152
3153 if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3154 /* if native fence fd still not created, return an error: */
3155 _eglError(EGL_BAD_PARAMETER, "eglDupNativeFenceFDANDROID");
3156 return EGL_NO_NATIVE_FENCE_FD_ANDROID;
3157 }
3158
3159 assert(sync_valid_fd(sync->SyncFd));
3160
3161 return os_dupfd_cloexec(sync->SyncFd);
3162 }
3163
3164 static void
dri2_set_blob_cache_funcs(_EGLDisplay * disp,EGLSetBlobFuncANDROID set,EGLGetBlobFuncANDROID get)3165 dri2_set_blob_cache_funcs(_EGLDisplay *disp, EGLSetBlobFuncANDROID set,
3166 EGLGetBlobFuncANDROID get)
3167 {
3168 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3169 dri_set_blob_cache_funcs(dri2_dpy->dri_screen_render_gpu, set, get);
3170 mtx_unlock(&dri2_dpy->lock);
3171 }
3172
3173 static EGLint
dri2_client_wait_sync(_EGLDisplay * disp,_EGLSync * sync,EGLint flags,EGLTime timeout)3174 dri2_client_wait_sync(_EGLDisplay *disp, _EGLSync *sync, EGLint flags,
3175 EGLTime timeout)
3176 {
3177 _EGLContext *ctx = _eglGetCurrentContext();
3178 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3179 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3180 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3181 unsigned wait_flags = 0;
3182
3183 EGLint ret = EGL_CONDITION_SATISFIED_KHR;
3184
3185 /* The EGL_KHR_fence_sync spec states:
3186 *
3187 * "If no context is current for the bound API,
3188 * the EGL_SYNC_FLUSH_COMMANDS_BIT_KHR bit is ignored.
3189 */
3190 if (dri2_ctx && flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)
3191 wait_flags |= __DRI2_FENCE_FLAG_FLUSH_COMMANDS;
3192
3193 /* the sync object should take a reference while waiting */
3194 dri2_egl_ref_sync(dri2_sync);
3195
3196 switch (sync->Type) {
3197 case EGL_SYNC_FENCE_KHR:
3198 case EGL_SYNC_NATIVE_FENCE_ANDROID:
3199 case EGL_SYNC_CL_EVENT_KHR:
3200 if (dri_client_wait_sync(
3201 dri2_ctx ? dri2_ctx->dri_context : NULL, dri2_sync->fence,
3202 wait_flags, timeout))
3203 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3204 else
3205 ret = EGL_TIMEOUT_EXPIRED_KHR;
3206 break;
3207
3208 case EGL_SYNC_REUSABLE_KHR:
3209 if (dri2_ctx && dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR &&
3210 (flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)) {
3211 /* flush context if EGL_SYNC_FLUSH_COMMANDS_BIT_KHR is set */
3212 dri2_gl_flush();
3213 }
3214
3215 /* if timeout is EGL_FOREVER_KHR, it should wait without any timeout.*/
3216 if (timeout == EGL_FOREVER_KHR) {
3217 mtx_lock(&dri2_sync->mutex);
3218 cnd_wait(&dri2_sync->cond, &dri2_sync->mutex);
3219 mtx_unlock(&dri2_sync->mutex);
3220 } else {
3221 /* if reusable sync has not been yet signaled */
3222 if (dri2_sync->base.SyncStatus != EGL_SIGNALED_KHR) {
3223 /* timespecs for cnd_timedwait */
3224 struct timespec current;
3225 struct timespec expire;
3226
3227 /* We override the clock to monotonic when creating the condition
3228 * variable. */
3229 clock_gettime(CLOCK_MONOTONIC, ¤t);
3230
3231 /* calculating when to expire */
3232 expire.tv_nsec = timeout % 1000000000L;
3233 expire.tv_sec = timeout / 1000000000L;
3234
3235 expire.tv_nsec += current.tv_nsec;
3236 expire.tv_sec += current.tv_sec;
3237
3238 /* expire.nsec now is a number between 0 and 1999999998 */
3239 if (expire.tv_nsec > 999999999L) {
3240 expire.tv_sec++;
3241 expire.tv_nsec -= 1000000000L;
3242 }
3243
3244 mtx_lock(&dri2_sync->mutex);
3245 ret = cnd_timedwait(&dri2_sync->cond, &dri2_sync->mutex, &expire);
3246 mtx_unlock(&dri2_sync->mutex);
3247
3248 if (ret == thrd_timedout) {
3249 if (dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3250 ret = EGL_TIMEOUT_EXPIRED_KHR;
3251 } else {
3252 _eglError(EGL_BAD_ACCESS, "eglClientWaitSyncKHR");
3253 ret = EGL_FALSE;
3254 }
3255 }
3256 }
3257 }
3258 break;
3259 }
3260
3261 dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3262
3263 return ret;
3264 }
3265
3266 static EGLBoolean
dri2_signal_sync(_EGLDisplay * disp,_EGLSync * sync,EGLenum mode)3267 dri2_signal_sync(_EGLDisplay *disp, _EGLSync *sync, EGLenum mode)
3268 {
3269 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3270 EGLint ret;
3271
3272 if (sync->Type != EGL_SYNC_REUSABLE_KHR)
3273 return _eglError(EGL_BAD_MATCH, "eglSignalSyncKHR");
3274
3275 if (mode != EGL_SIGNALED_KHR && mode != EGL_UNSIGNALED_KHR)
3276 return _eglError(EGL_BAD_ATTRIBUTE, "eglSignalSyncKHR");
3277
3278 dri2_sync->base.SyncStatus = mode;
3279
3280 if (mode == EGL_SIGNALED_KHR) {
3281 ret = cnd_broadcast(&dri2_sync->cond);
3282
3283 /* fail to broadcast */
3284 if (ret)
3285 return _eglError(EGL_BAD_ACCESS, "eglSignalSyncKHR");
3286 }
3287
3288 return EGL_TRUE;
3289 }
3290
3291 static EGLint
dri2_server_wait_sync(_EGLDisplay * disp,_EGLSync * sync)3292 dri2_server_wait_sync(_EGLDisplay *disp, _EGLSync *sync)
3293 {
3294 _EGLContext *ctx = _eglGetCurrentContext();
3295 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3296 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3297
3298 dri_server_wait_sync(dri2_ctx->dri_context, dri2_sync->fence,
3299 0);
3300 return EGL_TRUE;
3301 }
3302
3303 static int
dri2_interop_query_device_info(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_device_info * out)3304 dri2_interop_query_device_info(_EGLDisplay *disp, _EGLContext *ctx,
3305 struct mesa_glinterop_device_info *out)
3306 {
3307 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3308
3309 return dri_interop_query_device_info(dri2_ctx->dri_context, out);
3310 }
3311
3312 static int
dri2_interop_export_object(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_export_in * in,struct mesa_glinterop_export_out * out)3313 dri2_interop_export_object(_EGLDisplay *disp, _EGLContext *ctx,
3314 struct mesa_glinterop_export_in *in,
3315 struct mesa_glinterop_export_out *out)
3316 {
3317 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3318
3319 return dri_interop_export_object(dri2_ctx->dri_context, in, out);
3320 }
3321
3322 static int
dri2_interop_flush_objects(_EGLDisplay * disp,_EGLContext * ctx,unsigned count,struct mesa_glinterop_export_in * objects,struct mesa_glinterop_flush_out * out)3323 dri2_interop_flush_objects(_EGLDisplay *disp, _EGLContext *ctx, unsigned count,
3324 struct mesa_glinterop_export_in *objects,
3325 struct mesa_glinterop_flush_out *out)
3326 {
3327 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3328
3329 return dri_interop_flush_objects(dri2_ctx->dri_context, count,
3330 objects, out);
3331 }
3332
3333 static EGLBoolean
dri2_query_supported_compression_rates(_EGLDisplay * disp,_EGLConfig * config,const EGLAttrib * attr_list,EGLint * rates,EGLint rate_size,EGLint * num_rate)3334 dri2_query_supported_compression_rates(_EGLDisplay *disp, _EGLConfig *config,
3335 const EGLAttrib *attr_list,
3336 EGLint *rates, EGLint rate_size,
3337 EGLint *num_rate)
3338 {
3339 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3340 struct dri2_egl_config *conf = dri2_egl_config(config);
3341 enum __DRIFixedRateCompression dri_rates[rate_size];
3342
3343 if (dri2_dpy->has_compression_modifiers) {
3344 const struct dri_config *dri_conf =
3345 dri2_get_dri_config(conf, EGL_WINDOW_BIT, EGL_GL_COLORSPACE_LINEAR);
3346 if (!dri2_query_compression_rates(
3347 dri2_dpy->dri_screen_render_gpu, dri_conf, rate_size, dri_rates,
3348 num_rate))
3349 return EGL_FALSE;
3350
3351 for (int i = 0; i < *num_rate && i < rate_size; ++i)
3352 rates[i] = dri_rates[i];
3353 return EGL_TRUE;
3354 }
3355 *num_rate = 0;
3356 return EGL_TRUE;
3357 }
3358
3359 const _EGLDriver _eglDriver = {
3360 .Initialize = dri2_initialize,
3361 .Terminate = dri2_terminate,
3362 .CreateContext = dri2_create_context,
3363 .DestroyContext = dri2_destroy_context,
3364 .MakeCurrent = dri2_make_current,
3365 .CreateWindowSurface = dri2_create_window_surface,
3366 .CreatePixmapSurface = dri2_create_pixmap_surface,
3367 .CreatePbufferSurface = dri2_create_pbuffer_surface,
3368 .DestroySurface = dri2_destroy_surface,
3369 .WaitClient = dri2_wait_client,
3370 .WaitNative = dri2_wait_native,
3371 .BindTexImage = dri2_bind_tex_image,
3372 .ReleaseTexImage = dri2_release_tex_image,
3373 .SwapInterval = dri2_swap_interval,
3374 .SwapBuffers = dri2_swap_buffers,
3375 .SwapBuffersWithDamageEXT = dri2_swap_buffers_with_damage,
3376 .SwapBuffersRegionNOK = dri2_swap_buffers_region,
3377 .SetDamageRegion = dri2_set_damage_region,
3378 .PostSubBufferNV = dri2_post_sub_buffer,
3379 .CopyBuffers = dri2_copy_buffers,
3380 .QueryBufferAge = dri2_query_buffer_age,
3381 .CreateImageKHR = dri2_create_image,
3382 .DestroyImageKHR = dri2_destroy_image_khr,
3383 .CreateWaylandBufferFromImageWL = dri2_create_wayland_buffer_from_image,
3384 .QuerySurface = dri2_query_surface,
3385 .QueryDriverName = dri2_query_driver_name,
3386 .QueryDriverConfig = dri2_query_driver_config,
3387 #ifdef HAVE_LIBDRM
3388 .CreateDRMImageMESA = dri2_create_drm_image_mesa,
3389 .ExportDRMImageMESA = dri2_export_drm_image_mesa,
3390 .ExportDMABUFImageQueryMESA = dri2_export_dma_buf_image_query_mesa,
3391 .ExportDMABUFImageMESA = dri2_export_dma_buf_image_mesa,
3392 .QueryDmaBufFormatsEXT = dri2_query_dma_buf_formats,
3393 .QueryDmaBufModifiersEXT = dri2_query_dma_buf_modifiers,
3394 #endif
3395 #ifdef HAVE_WAYLAND_PLATFORM
3396 .BindWaylandDisplayWL = dri2_bind_wayland_display_wl,
3397 .UnbindWaylandDisplayWL = dri2_unbind_wayland_display_wl,
3398 .QueryWaylandBufferWL = dri2_query_wayland_buffer_wl,
3399 #endif
3400 .GetSyncValuesCHROMIUM = dri2_get_sync_values_chromium,
3401 .GetMscRateANGLE = dri2_get_msc_rate_angle,
3402 .CreateSyncKHR = dri2_create_sync,
3403 .ClientWaitSyncKHR = dri2_client_wait_sync,
3404 .SignalSyncKHR = dri2_signal_sync,
3405 .WaitSyncKHR = dri2_server_wait_sync,
3406 .DestroySyncKHR = dri2_destroy_sync,
3407 .GLInteropQueryDeviceInfo = dri2_interop_query_device_info,
3408 .GLInteropExportObject = dri2_interop_export_object,
3409 .GLInteropFlushObjects = dri2_interop_flush_objects,
3410 .DupNativeFenceFDANDROID = dri2_dup_native_fence_fd,
3411 .SetBlobCacheFuncsANDROID = dri2_set_blob_cache_funcs,
3412 .QuerySupportedCompressionRatesEXT = dri2_query_supported_compression_rates,
3413 };
3414