1 /*
2 * Copyright © 2010 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
19 * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
20 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Kristian Høgsberg <krh@bitplanet.net>
26 */
27
28 #include <dlfcn.h>
29 #include <errno.h>
30 #include <fcntl.h>
31 #include <limits.h>
32 #include <stdbool.h>
33 #include <stdint.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <string.h>
37 #include <time.h>
38 #include <unistd.h>
39 #include <c11/threads.h>
40 #ifdef HAVE_LIBDRM
41 #include <xf86drm.h>
42 #include "drm-uapi/drm_fourcc.h"
43 #endif
44 #include <GL/gl.h>
45 #include <GL/internal/dri_interface.h>
46 #include <sys/stat.h>
47 #include <sys/types.h>
48 #include "dri_screen.h"
49
50 #ifdef HAVE_WAYLAND_PLATFORM
51 #include "linux-dmabuf-unstable-v1-client-protocol.h"
52 #include "wayland-drm-client-protocol.h"
53 #include "wayland-drm.h"
54 #include <wayland-client.h>
55 #endif
56
57 #ifdef HAVE_X11_PLATFORM
58 #include "X11/Xlibint.h"
59 #endif
60
61 #include "GL/mesa_glinterop.h"
62 #include "loader/loader.h"
63 #include "mapi/glapi/glapi.h"
64 #include "pipe/p_screen.h"
65 #include "util/bitscan.h"
66 #include "util/driconf.h"
67 #include "util/libsync.h"
68 #include "util/os_file.h"
69 #include "util/u_atomic.h"
70 #include "util/u_call_once.h"
71 #include "util/u_math.h"
72 #include "util/u_vector.h"
73 #include "egl_dri2.h"
74 #include "egldefines.h"
75
76 #define NUM_ATTRIBS 16
77
78 static const enum pipe_format dri2_pbuffer_visuals[] = {
79 PIPE_FORMAT_R16G16B16A16_FLOAT,
80 PIPE_FORMAT_R16G16B16X16_FLOAT,
81 PIPE_FORMAT_B10G10R10A2_UNORM,
82 PIPE_FORMAT_B10G10R10X2_UNORM,
83 PIPE_FORMAT_BGRA8888_UNORM,
84 PIPE_FORMAT_BGRX8888_UNORM,
85 PIPE_FORMAT_B5G6R5_UNORM,
86 };
87
88 static void
dri_set_background_context(void * loaderPrivate)89 dri_set_background_context(void *loaderPrivate)
90 {
91 _EGLContext *ctx = _eglGetCurrentContext();
92 _EGLThreadInfo *t = _eglGetCurrentThread();
93
94 _eglBindContextToThread(ctx, t);
95 }
96
97 static void
dri2_gl_flush_get(_glapi_proc * glFlush)98 dri2_gl_flush_get(_glapi_proc *glFlush)
99 {
100 *glFlush = _glapi_get_proc_address("glFlush");
101 }
102
103 static void
dri2_gl_flush()104 dri2_gl_flush()
105 {
106 static void (*glFlush)(void);
107 static util_once_flag once = UTIL_ONCE_FLAG_INIT;
108
109 util_call_once_data(&once, (util_call_once_data_func)dri2_gl_flush_get,
110 &glFlush);
111
112 /* if glFlush is not available things are horribly broken */
113 if (!glFlush) {
114 _eglLog(_EGL_WARNING, "DRI2: failed to find glFlush entry point");
115 return;
116 }
117
118 glFlush();
119 }
120
121 static GLboolean
dri_is_thread_safe(UNUSED void * loaderPrivate)122 dri_is_thread_safe(UNUSED void *loaderPrivate)
123 {
124 #ifdef HAVE_X11_PLATFORM
125 struct dri2_egl_surface *dri2_surf = loaderPrivate;
126
127 /* loader_dri3_blit_context_get creates a context with
128 * loaderPrivate being NULL. Enabling glthread for a blitting
129 * context isn't useful so return false.
130 */
131 if (!loaderPrivate)
132 return false;
133
134 _EGLDisplay *display = dri2_surf->base.Resource.Display;
135
136 Display *xdpy = (Display *)display->PlatformDisplay;
137
138 /* Check Xlib is running in thread safe mode when running on EGL/X11-xlib
139 * platform
140 *
141 * 'lock_fns' is the XLockDisplay function pointer of the X11 display 'dpy'.
142 * It will be NULL if XInitThreads wasn't called.
143 */
144 if (display->Platform == _EGL_PLATFORM_X11 && xdpy && !xdpy->lock_fns)
145 return false;
146 #endif
147
148 return true;
149 }
150
151 const __DRIbackgroundCallableExtension background_callable_extension = {
152 .base = {__DRI_BACKGROUND_CALLABLE, 2},
153
154 .setBackgroundContext = dri_set_background_context,
155 .isThreadSafe = dri_is_thread_safe,
156 };
157
158 const __DRIuseInvalidateExtension use_invalidate = {
159 .base = {__DRI_USE_INVALIDATE, 1},
160 };
161
162 static void
dri2_get_pbuffer_drawable_info(__DRIdrawable * draw,int * x,int * y,int * w,int * h,void * loaderPrivate)163 dri2_get_pbuffer_drawable_info(__DRIdrawable *draw, int *x, int *y, int *w,
164 int *h, void *loaderPrivate)
165 {
166 struct dri2_egl_surface *dri2_surf = loaderPrivate;
167
168 *x = *y = 0;
169 *w = dri2_surf->base.Width;
170 *h = dri2_surf->base.Height;
171 }
172
173 static int
dri2_get_bytes_per_pixel(struct dri2_egl_surface * dri2_surf)174 dri2_get_bytes_per_pixel(struct dri2_egl_surface *dri2_surf)
175 {
176 const int depth = dri2_surf->base.Config->BufferSize;
177 return depth ? util_next_power_of_two(depth / 8) : 0;
178 }
179
180 static void
dri2_put_image(__DRIdrawable * draw,int op,int x,int y,int w,int h,char * data,void * loaderPrivate)181 dri2_put_image(__DRIdrawable *draw, int op, int x, int y, int w, int h,
182 char *data, void *loaderPrivate)
183 {
184 struct dri2_egl_surface *dri2_surf = loaderPrivate;
185 const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
186 const int width = dri2_surf->base.Width;
187 const int height = dri2_surf->base.Height;
188 const int dst_stride = width * bpp;
189 const int src_stride = w * bpp;
190 const int x_offset = x * bpp;
191 int copy_width = src_stride;
192
193 if (!dri2_surf->swrast_device_buffer)
194 dri2_surf->swrast_device_buffer = malloc(height * dst_stride);
195
196 if (dri2_surf->swrast_device_buffer) {
197 const char *src = data;
198 char *dst = dri2_surf->swrast_device_buffer;
199
200 dst += x_offset;
201 dst += y * dst_stride;
202
203 /* Drivers are allowed to submit OOB PutImage requests, so clip here. */
204 if (copy_width > dst_stride - x_offset)
205 copy_width = dst_stride - x_offset;
206 if (h > height - y)
207 h = height - y;
208
209 for (; 0 < h; --h) {
210 memcpy(dst, src, copy_width);
211 dst += dst_stride;
212 src += src_stride;
213 }
214 }
215 }
216
217 static void
dri2_get_image(__DRIdrawable * read,int x,int y,int w,int h,char * data,void * loaderPrivate)218 dri2_get_image(__DRIdrawable *read, int x, int y, int w, int h, char *data,
219 void *loaderPrivate)
220 {
221 struct dri2_egl_surface *dri2_surf = loaderPrivate;
222 const int bpp = dri2_get_bytes_per_pixel(dri2_surf);
223 const int width = dri2_surf->base.Width;
224 const int height = dri2_surf->base.Height;
225 const int src_stride = width * bpp;
226 const int dst_stride = w * bpp;
227 const int x_offset = x * bpp;
228 int copy_width = dst_stride;
229 const char *src = dri2_surf->swrast_device_buffer;
230 char *dst = data;
231
232 if (!src) {
233 memset(data, 0, copy_width * h);
234 return;
235 }
236
237 src += x_offset;
238 src += y * src_stride;
239
240 /* Drivers are allowed to submit OOB GetImage requests, so clip here. */
241 if (copy_width > src_stride - x_offset)
242 copy_width = src_stride - x_offset;
243 if (h > height - y)
244 h = height - y;
245
246 for (; 0 < h; --h) {
247 memcpy(dst, src, copy_width);
248 src += src_stride;
249 dst += dst_stride;
250 }
251 }
252
253 /* HACK: technically we should have swrast_null, instead of these.
254 */
255 const __DRIswrastLoaderExtension swrast_pbuffer_loader_extension = {
256 .base = {__DRI_SWRAST_LOADER, 1},
257 .getDrawableInfo = dri2_get_pbuffer_drawable_info,
258 .putImage = dri2_put_image,
259 .getImage = dri2_get_image,
260 };
261
262 static const EGLint dri2_to_egl_attribute_map[__DRI_ATTRIB_MAX] = {
263 [__DRI_ATTRIB_BUFFER_SIZE] = EGL_BUFFER_SIZE,
264 [__DRI_ATTRIB_LEVEL] = EGL_LEVEL,
265 [__DRI_ATTRIB_LUMINANCE_SIZE] = EGL_LUMINANCE_SIZE,
266 [__DRI_ATTRIB_DEPTH_SIZE] = EGL_DEPTH_SIZE,
267 [__DRI_ATTRIB_STENCIL_SIZE] = EGL_STENCIL_SIZE,
268 [__DRI_ATTRIB_SAMPLE_BUFFERS] = EGL_SAMPLE_BUFFERS,
269 [__DRI_ATTRIB_SAMPLES] = EGL_SAMPLES,
270 [__DRI_ATTRIB_MAX_PBUFFER_WIDTH] = EGL_MAX_PBUFFER_WIDTH,
271 [__DRI_ATTRIB_MAX_PBUFFER_HEIGHT] = EGL_MAX_PBUFFER_HEIGHT,
272 [__DRI_ATTRIB_MAX_PBUFFER_PIXELS] = EGL_MAX_PBUFFER_PIXELS,
273 [__DRI_ATTRIB_MAX_SWAP_INTERVAL] = EGL_MAX_SWAP_INTERVAL,
274 [__DRI_ATTRIB_MIN_SWAP_INTERVAL] = EGL_MIN_SWAP_INTERVAL,
275 [__DRI_ATTRIB_YINVERTED] = EGL_Y_INVERTED_NOK,
276 };
277
278 const __DRIconfig *
dri2_get_dri_config(struct dri2_egl_config * conf,EGLint surface_type,EGLenum colorspace)279 dri2_get_dri_config(struct dri2_egl_config *conf, EGLint surface_type,
280 EGLenum colorspace)
281 {
282 const bool double_buffer = surface_type == EGL_WINDOW_BIT;
283 const bool srgb = colorspace == EGL_GL_COLORSPACE_SRGB_KHR;
284
285 return conf->dri_config[double_buffer][srgb];
286 }
287
288 static EGLBoolean
dri2_match_config(const _EGLConfig * conf,const _EGLConfig * criteria)289 dri2_match_config(const _EGLConfig *conf, const _EGLConfig *criteria)
290 {
291 if (_eglCompareConfigs(conf, criteria, NULL, EGL_FALSE) != 0)
292 return EGL_FALSE;
293
294 if (!_eglMatchConfig(conf, criteria))
295 return EGL_FALSE;
296
297 return EGL_TRUE;
298 }
299
300 void
dri2_get_shifts_and_sizes(const __DRIcoreExtension * core,const __DRIconfig * config,int * shifts,unsigned int * sizes)301 dri2_get_shifts_and_sizes(const __DRIcoreExtension *core,
302 const __DRIconfig *config, int *shifts,
303 unsigned int *sizes)
304 {
305 core->getConfigAttrib(config, __DRI_ATTRIB_RED_SHIFT,
306 (unsigned int *)&shifts[0]);
307 core->getConfigAttrib(config, __DRI_ATTRIB_GREEN_SHIFT,
308 (unsigned int *)&shifts[1]);
309 core->getConfigAttrib(config, __DRI_ATTRIB_BLUE_SHIFT,
310 (unsigned int *)&shifts[2]);
311 core->getConfigAttrib(config, __DRI_ATTRIB_ALPHA_SHIFT,
312 (unsigned int *)&shifts[3]);
313 core->getConfigAttrib(config, __DRI_ATTRIB_RED_SIZE, &sizes[0]);
314 core->getConfigAttrib(config, __DRI_ATTRIB_GREEN_SIZE, &sizes[1]);
315 core->getConfigAttrib(config, __DRI_ATTRIB_BLUE_SIZE, &sizes[2]);
316 core->getConfigAttrib(config, __DRI_ATTRIB_ALPHA_SIZE, &sizes[3]);
317 }
318
319 void
dri2_get_render_type_float(const __DRIcoreExtension * core,const __DRIconfig * config,bool * is_float)320 dri2_get_render_type_float(const __DRIcoreExtension *core,
321 const __DRIconfig *config, bool *is_float)
322 {
323 unsigned int render_type;
324
325 core->getConfigAttrib(config, __DRI_ATTRIB_RENDER_TYPE, &render_type);
326 *is_float = (render_type & __DRI_ATTRIB_FLOAT_BIT) ? true : false;
327 }
328
329 enum pipe_format
dri2_image_format_for_pbuffer_config(struct dri2_egl_display * dri2_dpy,const __DRIconfig * config)330 dri2_image_format_for_pbuffer_config(struct dri2_egl_display *dri2_dpy,
331 const __DRIconfig *config)
332 {
333 struct gl_config *gl_config = (struct gl_config *) config;
334 return gl_config->color_format;
335 }
336
337 struct dri2_egl_config *
dri2_add_config(_EGLDisplay * disp,const __DRIconfig * dri_config,EGLint surface_type,const EGLint * attr_list)338 dri2_add_config(_EGLDisplay *disp, const __DRIconfig *dri_config,
339 EGLint surface_type, const EGLint *attr_list)
340 {
341 struct dri2_egl_config *conf;
342 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
343 _EGLConfig base;
344 unsigned int attrib, value, double_buffer;
345 bool srgb = false;
346 EGLint key, bind_to_texture_rgb, bind_to_texture_rgba;
347 _EGLConfig *matching_config;
348 EGLint num_configs = 0;
349 EGLint config_id;
350
351 _eglInitConfig(&base, disp, _eglGetArraySize(disp->Configs) + 1);
352
353 double_buffer = 0;
354 bind_to_texture_rgb = 0;
355 bind_to_texture_rgba = 0;
356
357 for (int i = 0; i < __DRI_ATTRIB_MAX; ++i) {
358 if (!dri2_dpy->core->indexConfigAttrib(dri_config, i, &attrib, &value))
359 break;
360
361 switch (attrib) {
362 case __DRI_ATTRIB_RENDER_TYPE:
363 if (value & __DRI_ATTRIB_FLOAT_BIT)
364 base.ComponentType = EGL_COLOR_COMPONENT_TYPE_FLOAT_EXT;
365 if (value & __DRI_ATTRIB_RGBA_BIT)
366 value = EGL_RGB_BUFFER;
367 else if (value & __DRI_ATTRIB_LUMINANCE_BIT)
368 value = EGL_LUMINANCE_BUFFER;
369 else
370 return NULL;
371 base.ColorBufferType = value;
372 break;
373
374 case __DRI_ATTRIB_CONFIG_CAVEAT:
375 if (value & __DRI_ATTRIB_NON_CONFORMANT_CONFIG)
376 value = EGL_NON_CONFORMANT_CONFIG;
377 else if (value & __DRI_ATTRIB_SLOW_BIT)
378 value = EGL_SLOW_CONFIG;
379 else
380 value = EGL_NONE;
381 base.ConfigCaveat = value;
382 break;
383
384 case __DRI_ATTRIB_BIND_TO_TEXTURE_RGB:
385 bind_to_texture_rgb = value;
386 break;
387
388 case __DRI_ATTRIB_BIND_TO_TEXTURE_RGBA:
389 bind_to_texture_rgba = value;
390 break;
391
392 case __DRI_ATTRIB_DOUBLE_BUFFER:
393 double_buffer = value;
394 break;
395
396 case __DRI_ATTRIB_RED_SIZE:
397 base.RedSize = value;
398 break;
399
400 case __DRI_ATTRIB_GREEN_SIZE:
401 base.GreenSize = value;
402 break;
403
404 case __DRI_ATTRIB_BLUE_SIZE:
405 base.BlueSize = value;
406 break;
407
408 case __DRI_ATTRIB_ALPHA_SIZE:
409 base.AlphaSize = value;
410 break;
411
412 case __DRI_ATTRIB_ACCUM_RED_SIZE:
413 case __DRI_ATTRIB_ACCUM_GREEN_SIZE:
414 case __DRI_ATTRIB_ACCUM_BLUE_SIZE:
415 case __DRI_ATTRIB_ACCUM_ALPHA_SIZE:
416 /* Don't expose visuals with the accumulation buffer. */
417 if (value > 0)
418 return NULL;
419 break;
420
421 case __DRI_ATTRIB_FRAMEBUFFER_SRGB_CAPABLE:
422 srgb = value != 0;
423 if (!disp->Extensions.KHR_gl_colorspace && srgb)
424 return NULL;
425 break;
426
427 case __DRI_ATTRIB_MAX_PBUFFER_WIDTH:
428 base.MaxPbufferWidth = _EGL_MAX_PBUFFER_WIDTH;
429 break;
430 case __DRI_ATTRIB_MAX_PBUFFER_HEIGHT:
431 base.MaxPbufferHeight = _EGL_MAX_PBUFFER_HEIGHT;
432 break;
433 case __DRI_ATTRIB_MUTABLE_RENDER_BUFFER:
434 if (disp->Extensions.KHR_mutable_render_buffer)
435 surface_type |= EGL_MUTABLE_RENDER_BUFFER_BIT_KHR;
436 break;
437 default:
438 key = dri2_to_egl_attribute_map[attrib];
439 if (key != 0)
440 _eglSetConfigKey(&base, key, value);
441 break;
442 }
443 }
444
445 if (attr_list)
446 for (int i = 0; attr_list[i] != EGL_NONE; i += 2)
447 _eglSetConfigKey(&base, attr_list[i], attr_list[i + 1]);
448
449 base.NativeRenderable = EGL_TRUE;
450
451 base.SurfaceType = surface_type;
452 if (surface_type &
453 (EGL_PBUFFER_BIT |
454 (disp->Extensions.NOK_texture_from_pixmap ? EGL_PIXMAP_BIT : 0))) {
455 base.BindToTextureRGB = bind_to_texture_rgb;
456 if (base.AlphaSize > 0)
457 base.BindToTextureRGBA = bind_to_texture_rgba;
458 }
459
460 if (double_buffer) {
461 surface_type &= ~EGL_PIXMAP_BIT;
462 } else {
463 surface_type &= ~EGL_WINDOW_BIT;
464 }
465
466 if (!surface_type)
467 return NULL;
468
469 base.RenderableType = disp->ClientAPIs;
470 base.Conformant = disp->ClientAPIs;
471
472 base.MinSwapInterval = dri2_dpy->min_swap_interval;
473 base.MaxSwapInterval = dri2_dpy->max_swap_interval;
474
475 if (!_eglValidateConfig(&base, EGL_FALSE)) {
476 _eglLog(_EGL_DEBUG, "DRI2: failed to validate config %d", base.ConfigID);
477 return NULL;
478 }
479
480 config_id = base.ConfigID;
481 base.ConfigID = EGL_DONT_CARE;
482 base.SurfaceType = EGL_DONT_CARE;
483 num_configs = _eglFilterArray(disp->Configs, (void **)&matching_config, 1,
484 (_EGLArrayForEach)dri2_match_config, &base);
485
486 if (num_configs == 1) {
487 conf = (struct dri2_egl_config *)matching_config;
488
489 if (!conf->dri_config[double_buffer][srgb])
490 conf->dri_config[double_buffer][srgb] = dri_config;
491 else
492 /* a similar config type is already added (unlikely) => discard */
493 return NULL;
494 } else if (num_configs == 0) {
495 conf = calloc(1, sizeof *conf);
496 if (conf == NULL)
497 return NULL;
498
499 conf->dri_config[double_buffer][srgb] = dri_config;
500
501 memcpy(&conf->base, &base, sizeof base);
502 conf->base.SurfaceType = 0;
503 conf->base.ConfigID = config_id;
504
505 _eglLinkConfig(&conf->base);
506 } else {
507 unreachable("duplicates should not be possible");
508 return NULL;
509 }
510
511 conf->base.SurfaceType |= surface_type;
512
513 return conf;
514 }
515
516 static int
dri2_pbuffer_visual_index(enum pipe_format format)517 dri2_pbuffer_visual_index(enum pipe_format format)
518 {
519 for (unsigned i = 0; i < ARRAY_SIZE(dri2_pbuffer_visuals); i++) {
520 if (dri2_pbuffer_visuals[i] == format)
521 return i;
522 }
523
524 return -1;
525 }
526
527 void
dri2_add_pbuffer_configs_for_visuals(_EGLDisplay * disp)528 dri2_add_pbuffer_configs_for_visuals(_EGLDisplay *disp)
529 {
530 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
531 unsigned int format_count[ARRAY_SIZE(dri2_pbuffer_visuals)] = {0};
532
533 for (unsigned i = 0; dri2_dpy->driver_configs[i] != NULL; i++) {
534 struct dri2_egl_config *dri2_conf;
535 struct gl_config *gl_config =
536 (struct gl_config *) dri2_dpy->driver_configs[i];
537 int idx = dri2_pbuffer_visual_index(gl_config->color_format);
538
539 if (idx == -1)
540 continue;
541
542 dri2_conf = dri2_add_config(disp, dri2_dpy->driver_configs[i],
543 EGL_PBUFFER_BIT, NULL);
544 if (dri2_conf)
545 format_count[idx]++;
546 }
547
548 for (unsigned i = 0; i < ARRAY_SIZE(format_count); i++) {
549 if (!format_count[i]) {
550 _eglLog(_EGL_DEBUG, "No DRI config supports native format %s",
551 util_format_name(dri2_pbuffer_visuals[i]));
552 }
553 }
554 }
555
556 GLboolean
dri2_validate_egl_image(void * image,void * data)557 dri2_validate_egl_image(void *image, void *data)
558 {
559 _EGLDisplay *disp = _eglLockDisplay(data);
560 _EGLImage *img = _eglLookupImage(image, disp);
561 _eglUnlockDisplay(disp);
562
563 if (img == NULL) {
564 _eglError(EGL_BAD_PARAMETER, "dri2_validate_egl_image");
565 return false;
566 }
567
568 return true;
569 }
570
571 __DRIimage *
dri2_lookup_egl_image_validated(void * image,void * data)572 dri2_lookup_egl_image_validated(void *image, void *data)
573 {
574 struct dri2_egl_image *dri2_img;
575
576 (void)data;
577
578 dri2_img = dri2_egl_image(image);
579
580 return dri2_img->dri_image;
581 }
582
583 __DRIimage *
dri2_lookup_egl_image(__DRIscreen * screen,void * image,void * data)584 dri2_lookup_egl_image(__DRIscreen *screen, void *image, void *data)
585 {
586 (void)screen;
587
588 if (!dri2_validate_egl_image(image, data))
589 return NULL;
590
591 return dri2_lookup_egl_image_validated(image, data);
592 }
593
594 const __DRIimageLookupExtension image_lookup_extension = {
595 .base = {__DRI_IMAGE_LOOKUP, 2},
596
597 .lookupEGLImage = dri2_lookup_egl_image,
598 .validateEGLImage = dri2_validate_egl_image,
599 .lookupEGLImageValidated = dri2_lookup_egl_image_validated,
600 };
601
602 static const struct dri_extension_match dri3_driver_extensions[] = {
603 {__DRI_CORE, 1, offsetof(struct dri2_egl_display, core), false},
604 {__DRI_MESA, 1, offsetof(struct dri2_egl_display, mesa), false},
605 {__DRI_IMAGE_DRIVER, 1, offsetof(struct dri2_egl_display, image_driver),
606 false},
607 {__DRI_CONFIG_OPTIONS, 2, offsetof(struct dri2_egl_display, configOptions),
608 true},
609 };
610
611 static const struct dri_extension_match dri2_driver_extensions[] = {
612 {__DRI_CORE, 1, offsetof(struct dri2_egl_display, core), false},
613 {__DRI_MESA, 1, offsetof(struct dri2_egl_display, mesa), false},
614 {__DRI_DRI2, 4, offsetof(struct dri2_egl_display, dri2), false},
615 {__DRI_CONFIG_OPTIONS, 2, offsetof(struct dri2_egl_display, configOptions),
616 true},
617 };
618
619 static const struct dri_extension_match dri2_core_extensions[] = {
620 {__DRI2_FLUSH, 1, offsetof(struct dri2_egl_display, flush), false},
621 {__DRI_TEX_BUFFER, 2, offsetof(struct dri2_egl_display, tex_buffer), false},
622 {__DRI_IMAGE, 6, offsetof(struct dri2_egl_display, image), false},
623 };
624
625 static const struct dri_extension_match swrast_driver_extensions[] = {
626 {__DRI_CORE, 1, offsetof(struct dri2_egl_display, core), false},
627 {__DRI_MESA, 1, offsetof(struct dri2_egl_display, mesa), false},
628 {__DRI_SWRAST, 4, offsetof(struct dri2_egl_display, swrast), false},
629 {__DRI_CONFIG_OPTIONS, 2, offsetof(struct dri2_egl_display, configOptions),
630 true},
631 };
632
633 static const struct dri_extension_match swrast_core_extensions[] = {
634 {__DRI_TEX_BUFFER, 2, offsetof(struct dri2_egl_display, tex_buffer), false},
635 {__DRI_IMAGE, 6, offsetof(struct dri2_egl_display, image), true},
636 };
637
638 static const struct dri_extension_match optional_core_extensions[] = {
639 {__DRI2_CONFIG_QUERY, 1, offsetof(struct dri2_egl_display, config), true},
640 {__DRI2_FENCE, 2, offsetof(struct dri2_egl_display, fence), true},
641 {__DRI2_BUFFER_DAMAGE, 1, offsetof(struct dri2_egl_display, buffer_damage),
642 true},
643 {__DRI2_INTEROP, 1, offsetof(struct dri2_egl_display, interop), true},
644 {__DRI2_FLUSH_CONTROL, 1, offsetof(struct dri2_egl_display, flush_control),
645 true},
646 {__DRI2_BLOB, 1, offsetof(struct dri2_egl_display, blob), true},
647 {__DRI_MUTABLE_RENDER_BUFFER_DRIVER, 1,
648 offsetof(struct dri2_egl_display, mutable_render_buffer), true},
649 {__DRI_KOPPER, 1, offsetof(struct dri2_egl_display, kopper), true},
650 };
651
652 static const __DRIextension **
dri2_open_driver(_EGLDisplay * disp)653 dri2_open_driver(_EGLDisplay *disp)
654 {
655 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
656 static const char *search_path_vars[] = {
657 "LIBGL_DRIVERS_PATH",
658 NULL,
659 };
660
661 return loader_open_driver(dri2_dpy->driver_name, &dri2_dpy->driver,
662 search_path_vars);
663 }
664
665 static EGLBoolean
dri2_load_driver_common(_EGLDisplay * disp,const struct dri_extension_match * driver_extensions,int num_matches)666 dri2_load_driver_common(_EGLDisplay *disp,
667 const struct dri_extension_match *driver_extensions,
668 int num_matches)
669 {
670 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
671 const __DRIextension **extensions;
672
673 extensions = dri2_open_driver(disp);
674 if (!extensions)
675 return EGL_FALSE;
676
677 if (!loader_bind_extensions(dri2_dpy, driver_extensions, num_matches,
678 extensions)) {
679 dlclose(dri2_dpy->driver);
680 dri2_dpy->driver = NULL;
681 return EGL_FALSE;
682 }
683 dri2_dpy->driver_extensions = extensions;
684
685 return EGL_TRUE;
686 }
687
688 EGLBoolean
dri2_load_driver(_EGLDisplay * disp)689 dri2_load_driver(_EGLDisplay *disp)
690 {
691 return dri2_load_driver_common(disp, dri2_driver_extensions,
692 ARRAY_SIZE(dri2_driver_extensions));
693 }
694
695 EGLBoolean
dri2_load_driver_dri3(_EGLDisplay * disp)696 dri2_load_driver_dri3(_EGLDisplay *disp)
697 {
698 return dri2_load_driver_common(disp, dri3_driver_extensions,
699 ARRAY_SIZE(dri3_driver_extensions));
700 }
701
702 EGLBoolean
dri2_load_driver_swrast(_EGLDisplay * disp)703 dri2_load_driver_swrast(_EGLDisplay *disp)
704 {
705 return dri2_load_driver_common(disp, swrast_driver_extensions,
706 ARRAY_SIZE(swrast_driver_extensions));
707 }
708
709 static const char *
dri2_query_driver_name(_EGLDisplay * disp)710 dri2_query_driver_name(_EGLDisplay *disp)
711 {
712 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
713 return dri2_dpy->driver_name;
714 }
715
716 static char *
dri2_query_driver_config(_EGLDisplay * disp)717 dri2_query_driver_config(_EGLDisplay *disp)
718 {
719 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
720 char *ret;
721
722 ret = dri2_dpy->configOptions->getXml(dri2_dpy->driver_name);
723
724 mtx_unlock(&dri2_dpy->lock);
725
726 return ret;
727 }
728
729 static int
get_screen_param(_EGLDisplay * disp,enum pipe_cap param)730 get_screen_param(_EGLDisplay *disp, enum pipe_cap param)
731 {
732 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
733 struct dri_screen *screen = dri_screen(dri2_dpy->dri_screen_render_gpu);
734 return screen->base.screen->get_param(screen->base.screen, param);
735 }
736
737 void
dri2_setup_screen(_EGLDisplay * disp)738 dri2_setup_screen(_EGLDisplay *disp)
739 {
740 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
741 struct dri_screen *screen = dri_screen(dri2_dpy->dri_screen_render_gpu);
742 struct pipe_screen *pscreen = screen->base.screen;
743 unsigned int api_mask = screen->api_mask;
744
745 /*
746 * EGL 1.5 specification defines the default value to 1. Moreover,
747 * eglSwapInterval() is required to clamp requested value to the supported
748 * range. Since the default value is implicitly assumed to be supported,
749 * use it as both minimum and maximum for the platforms that do not allow
750 * changing the interval. Platforms, which allow it (e.g. x11, wayland)
751 * override these values already.
752 */
753 dri2_dpy->min_swap_interval = 1;
754 dri2_dpy->max_swap_interval = 1;
755 dri2_dpy->default_swap_interval = 1;
756
757 disp->ClientAPIs = 0;
758 if ((api_mask & (1 << __DRI_API_OPENGL)) && _eglIsApiValid(EGL_OPENGL_API))
759 disp->ClientAPIs |= EGL_OPENGL_BIT;
760 if ((api_mask & (1 << __DRI_API_GLES)) && _eglIsApiValid(EGL_OPENGL_ES_API))
761 disp->ClientAPIs |= EGL_OPENGL_ES_BIT;
762 if ((api_mask & (1 << __DRI_API_GLES2)) && _eglIsApiValid(EGL_OPENGL_ES_API))
763 disp->ClientAPIs |= EGL_OPENGL_ES2_BIT;
764 if ((api_mask & (1 << __DRI_API_GLES3)) && _eglIsApiValid(EGL_OPENGL_ES_API))
765 disp->ClientAPIs |= EGL_OPENGL_ES3_BIT_KHR;
766
767 assert(dri2_dpy->image_driver || dri2_dpy->dri2 || dri2_dpy->swrast);
768 disp->Extensions.KHR_create_context = EGL_TRUE;
769 disp->Extensions.KHR_create_context_no_error = EGL_TRUE;
770 disp->Extensions.KHR_no_config_context = EGL_TRUE;
771 disp->Extensions.KHR_surfaceless_context = EGL_TRUE;
772
773 if (dri2_dpy->interop) {
774 disp->Extensions.MESA_gl_interop = EGL_TRUE;
775 }
776
777 if (dri2_dpy->configOptions) {
778 disp->Extensions.MESA_query_driver = EGL_TRUE;
779 }
780
781 /* Report back to EGL the bitmask of priorities supported */
782 disp->Extensions.IMG_context_priority =
783 get_screen_param(disp, PIPE_CAP_CONTEXT_PRIORITY_MASK);
784
785 disp->Extensions.EXT_pixel_format_float = EGL_TRUE;
786
787 if (pscreen->is_format_supported(pscreen, PIPE_FORMAT_B8G8R8A8_SRGB,
788 PIPE_TEXTURE_2D, 0, 0,
789 PIPE_BIND_RENDER_TARGET)) {
790 disp->Extensions.KHR_gl_colorspace = EGL_TRUE;
791 }
792
793 disp->Extensions.EXT_create_context_robustness =
794 get_screen_param(disp, PIPE_CAP_DEVICE_RESET_STATUS_QUERY);
795 disp->RobustBufferAccess =
796 get_screen_param(disp, PIPE_CAP_ROBUST_BUFFER_ACCESS_BEHAVIOR);
797
798 /* EXT_query_reset_notification_strategy complements and requires
799 * EXT_create_context_robustness. */
800 disp->Extensions.EXT_query_reset_notification_strategy =
801 disp->Extensions.EXT_create_context_robustness;
802
803 if (dri2_dpy->fence) {
804 disp->Extensions.KHR_fence_sync = EGL_TRUE;
805 disp->Extensions.KHR_wait_sync = EGL_TRUE;
806 if (dri2_dpy->fence->get_fence_from_cl_event)
807 disp->Extensions.KHR_cl_event2 = EGL_TRUE;
808 unsigned capabilities =
809 dri2_dpy->fence->get_capabilities(dri2_dpy->dri_screen_render_gpu);
810 disp->Extensions.ANDROID_native_fence_sync =
811 (capabilities & __DRI_FENCE_CAP_NATIVE_FD) != 0;
812 }
813
814 if (dri2_dpy->blob)
815 disp->Extensions.ANDROID_blob_cache = EGL_TRUE;
816
817 disp->Extensions.KHR_reusable_sync = EGL_TRUE;
818
819 if (dri2_dpy->image) {
820 if (dri2_dpy->image->base.version >= 10 &&
821 dri2_dpy->image->getCapabilities != NULL) {
822 int capabilities;
823
824 capabilities =
825 dri2_dpy->image->getCapabilities(dri2_dpy->dri_screen_render_gpu);
826 disp->Extensions.MESA_drm_image =
827 (capabilities & __DRI_IMAGE_CAP_GLOBAL_NAMES) != 0;
828
829 if (dri2_dpy->image->base.version >= 11)
830 disp->Extensions.MESA_image_dma_buf_export = EGL_TRUE;
831 } else {
832 disp->Extensions.MESA_drm_image = EGL_TRUE;
833 if (dri2_dpy->image->base.version >= 11)
834 disp->Extensions.MESA_image_dma_buf_export = EGL_TRUE;
835 }
836
837 disp->Extensions.KHR_image_base = EGL_TRUE;
838 disp->Extensions.KHR_gl_renderbuffer_image = EGL_TRUE;
839 disp->Extensions.KHR_gl_texture_2D_image = EGL_TRUE;
840 disp->Extensions.KHR_gl_texture_cubemap_image = EGL_TRUE;
841
842 if (get_screen_param(disp, PIPE_CAP_MAX_TEXTURE_3D_LEVELS) != 0)
843 disp->Extensions.KHR_gl_texture_3D_image = EGL_TRUE;
844
845 #ifdef HAVE_LIBDRM
846 if (dri2_dpy->image->base.version >= 8 &&
847 dri2_dpy->image->createImageFromDmaBufs) {
848 disp->Extensions.EXT_image_dma_buf_import = EGL_TRUE;
849 disp->Extensions.EXT_image_dma_buf_import_modifiers = EGL_TRUE;
850 }
851 #endif
852 }
853
854 if (dri2_dpy->flush_control)
855 disp->Extensions.KHR_context_flush_control = EGL_TRUE;
856
857 if (dri2_dpy->buffer_damage && dri2_dpy->buffer_damage->set_damage_region)
858 disp->Extensions.KHR_partial_update = EGL_TRUE;
859
860 disp->Extensions.EXT_protected_surface =
861 get_screen_param(disp, PIPE_CAP_DEVICE_PROTECTED_SURFACE) != 0;
862 disp->Extensions.EXT_protected_content =
863 get_screen_param(disp, PIPE_CAP_DEVICE_PROTECTED_CONTEXT) != 0;
864 }
865
866 void
dri2_setup_swap_interval(_EGLDisplay * disp,int max_swap_interval)867 dri2_setup_swap_interval(_EGLDisplay *disp, int max_swap_interval)
868 {
869 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
870 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
871
872 /* Allow driconf to override applications.*/
873 if (dri2_dpy->config)
874 dri2_dpy->config->configQueryi(dri2_dpy->dri_screen_render_gpu,
875 "vblank_mode", &vblank_mode);
876 switch (vblank_mode) {
877 case DRI_CONF_VBLANK_NEVER:
878 dri2_dpy->min_swap_interval = 0;
879 dri2_dpy->max_swap_interval = 0;
880 dri2_dpy->default_swap_interval = 0;
881 break;
882 case DRI_CONF_VBLANK_ALWAYS_SYNC:
883 dri2_dpy->min_swap_interval = 1;
884 dri2_dpy->max_swap_interval = max_swap_interval;
885 dri2_dpy->default_swap_interval = 1;
886 break;
887 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
888 dri2_dpy->min_swap_interval = 0;
889 dri2_dpy->max_swap_interval = max_swap_interval;
890 dri2_dpy->default_swap_interval = 0;
891 break;
892 default:
893 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
894 dri2_dpy->min_swap_interval = 0;
895 dri2_dpy->max_swap_interval = max_swap_interval;
896 dri2_dpy->default_swap_interval = 1;
897 break;
898 }
899 }
900
901 /* All platforms but DRM call this function to create the screen and populate
902 * the driver_configs. DRM inherits that information from its display - GBM.
903 */
904 EGLBoolean
dri2_create_screen(_EGLDisplay * disp)905 dri2_create_screen(_EGLDisplay *disp)
906 {
907 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
908 char *driver_name_display_gpu;
909
910 if (dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu) {
911 driver_name_display_gpu =
912 loader_get_driver_for_fd(dri2_dpy->fd_display_gpu);
913 if (driver_name_display_gpu) {
914 /* check if driver name is matching so that non mesa drivers
915 * will not crash.
916 */
917 if (strcmp(dri2_dpy->driver_name, driver_name_display_gpu) == 0) {
918 dri2_dpy->dri_screen_display_gpu = dri2_dpy->mesa->createNewScreen(
919 0, dri2_dpy->fd_display_gpu, dri2_dpy->loader_extensions,
920 dri2_dpy->driver_extensions, &dri2_dpy->driver_configs, disp);
921 }
922 free(driver_name_display_gpu);
923 }
924 }
925
926 int screen_fd = dri2_dpy->swrast ? -1 : dri2_dpy->fd_render_gpu;
927 dri2_dpy->dri_screen_render_gpu = dri2_dpy->mesa->createNewScreen(
928 0, screen_fd, dri2_dpy->loader_extensions, dri2_dpy->driver_extensions,
929 &dri2_dpy->driver_configs, disp);
930
931 if (dri2_dpy->dri_screen_render_gpu == NULL) {
932 _eglLog(_EGL_WARNING, "egl: failed to create dri2 screen");
933 return EGL_FALSE;
934 }
935
936 if (dri2_dpy->fd_render_gpu == dri2_dpy->fd_display_gpu)
937 dri2_dpy->dri_screen_display_gpu = dri2_dpy->dri_screen_render_gpu;
938
939 dri2_dpy->own_dri_screen = true;
940 return EGL_TRUE;
941 }
942
943 EGLBoolean
dri2_setup_extensions(_EGLDisplay * disp)944 dri2_setup_extensions(_EGLDisplay *disp)
945 {
946 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
947 const __DRIextension **extensions;
948
949 extensions = dri2_dpy->core->getExtensions(dri2_dpy->dri_screen_render_gpu);
950
951 if (dri2_dpy->image_driver || dri2_dpy->dri2 || disp->Options.Zink) {
952 if (!loader_bind_extensions(dri2_dpy, dri2_core_extensions,
953 ARRAY_SIZE(dri2_core_extensions), extensions))
954 return EGL_FALSE;
955 } else {
956 if (!loader_bind_extensions(dri2_dpy, swrast_core_extensions,
957 ARRAY_SIZE(swrast_core_extensions),
958 extensions))
959 return EGL_FALSE;
960 }
961
962 #ifdef HAVE_DRI3_MODIFIERS
963 dri2_dpy->multibuffers_available =
964 (dri2_dpy->dri3_major_version > 1 ||
965 (dri2_dpy->dri3_major_version == 1 &&
966 dri2_dpy->dri3_minor_version >= 2)) &&
967 (dri2_dpy->present_major_version > 1 ||
968 (dri2_dpy->present_major_version == 1 &&
969 dri2_dpy->present_minor_version >= 2)) &&
970 (dri2_dpy->image && dri2_dpy->image->base.version >= 15);
971 #endif
972 if (disp->Options.Zink && !disp->Options.ForceSoftware &&
973 #ifdef HAVE_DRI3_MODIFIERS
974 dri2_dpy->dri3_major_version != -1 &&
975 !dri2_dpy->multibuffers_available &&
976 #endif
977 (disp->Platform == EGL_PLATFORM_X11_KHR ||
978 disp->Platform == EGL_PLATFORM_XCB_EXT) &&
979 !debug_get_bool_option("LIBGL_KOPPER_DRI2", false))
980 return EGL_FALSE;
981
982 loader_bind_extensions(dri2_dpy, optional_core_extensions,
983 ARRAY_SIZE(optional_core_extensions), extensions);
984 return EGL_TRUE;
985 }
986
987 EGLBoolean
dri2_setup_device(_EGLDisplay * disp,EGLBoolean software)988 dri2_setup_device(_EGLDisplay *disp, EGLBoolean software)
989 {
990 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
991 _EGLDevice *dev;
992 int render_fd;
993
994 /* Extensions must be loaded before calling this function */
995 assert(dri2_dpy->mesa);
996 /* If we're not software, we need a DRM node FD */
997 assert(software || dri2_dpy->fd_render_gpu >= 0);
998
999 /* fd_render_gpu is what we got from WSI, so might actually be a lie and
1000 * not a render node... */
1001 if (software) {
1002 render_fd = -1;
1003 } else if (loader_is_device_render_capable(dri2_dpy->fd_render_gpu)) {
1004 render_fd = dri2_dpy->fd_render_gpu;
1005 } else {
1006 render_fd = dri2_dpy->mesa->queryCompatibleRenderOnlyDeviceFd(
1007 dri2_dpy->fd_render_gpu);
1008 if (render_fd < 0)
1009 return EGL_FALSE;
1010 }
1011
1012 dev = _eglFindDevice(render_fd, software);
1013
1014 if (render_fd >= 0 && render_fd != dri2_dpy->fd_render_gpu)
1015 close(render_fd);
1016
1017 if (!dev)
1018 return EGL_FALSE;
1019
1020 disp->Device = dev;
1021 return EGL_TRUE;
1022 }
1023
1024 /**
1025 * Called via eglInitialize(), drv->Initialize().
1026 *
1027 * This must be guaranteed to be called exactly once, even if eglInitialize is
1028 * called many times (without a eglTerminate in between).
1029 */
1030 static EGLBoolean
dri2_initialize(_EGLDisplay * disp)1031 dri2_initialize(_EGLDisplay *disp)
1032 {
1033 EGLBoolean ret = EGL_FALSE;
1034 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1035
1036 /* In the case where the application calls eglMakeCurrent(context1),
1037 * eglTerminate, then eglInitialize again (without a call to eglReleaseThread
1038 * or eglMakeCurrent(NULL) before that), dri2_dpy structure is still
1039 * initialized, as we need it to be able to free context1 correctly.
1040 *
1041 * It would probably be safest to forcibly release the display with
1042 * dri2_display_release, to make sure the display is reinitialized correctly.
1043 * However, the EGL spec states that we need to keep a reference to the
1044 * current context (so we cannot call dri2_make_current(NULL)), and therefore
1045 * we would leak context1 as we would be missing the old display connection
1046 * to free it up correctly.
1047 */
1048 if (dri2_dpy) {
1049 p_atomic_inc(&dri2_dpy->ref_count);
1050 return EGL_TRUE;
1051 }
1052
1053 loader_set_logger(_eglLog);
1054
1055 switch (disp->Platform) {
1056 case _EGL_PLATFORM_SURFACELESS:
1057 ret = dri2_initialize_surfaceless(disp);
1058 break;
1059 case _EGL_PLATFORM_DEVICE:
1060 ret = dri2_initialize_device(disp);
1061 break;
1062 case _EGL_PLATFORM_X11:
1063 case _EGL_PLATFORM_XCB:
1064 ret = dri2_initialize_x11(disp);
1065 break;
1066 case _EGL_PLATFORM_DRM:
1067 ret = dri2_initialize_drm(disp);
1068 break;
1069 case _EGL_PLATFORM_WAYLAND:
1070 ret = dri2_initialize_wayland(disp);
1071 break;
1072 case _EGL_PLATFORM_ANDROID:
1073 ret = dri2_initialize_android(disp);
1074 break;
1075 default:
1076 unreachable("Callers ensure we cannot get here.");
1077 return EGL_FALSE;
1078 }
1079
1080 if (!ret)
1081 return EGL_FALSE;
1082
1083 if (_eglGetArraySize(disp->Configs) == 0) {
1084 _eglError(EGL_NOT_INITIALIZED, "failed to add any EGLConfigs");
1085 dri2_display_destroy(disp);
1086 return EGL_FALSE;
1087 }
1088
1089 dri2_dpy = dri2_egl_display(disp);
1090 p_atomic_inc(&dri2_dpy->ref_count);
1091
1092 mtx_init(&dri2_dpy->lock, mtx_plain);
1093
1094 return EGL_TRUE;
1095 }
1096
1097 /**
1098 * Decrement display reference count, and free up display if necessary.
1099 */
1100 static void
dri2_display_release(_EGLDisplay * disp)1101 dri2_display_release(_EGLDisplay *disp)
1102 {
1103 struct dri2_egl_display *dri2_dpy;
1104
1105 if (!disp)
1106 return;
1107
1108 dri2_dpy = dri2_egl_display(disp);
1109
1110 assert(dri2_dpy->ref_count > 0);
1111
1112 if (!p_atomic_dec_zero(&dri2_dpy->ref_count))
1113 return;
1114
1115 _eglCleanupDisplay(disp);
1116 dri2_display_destroy(disp);
1117 }
1118
1119 void
dri2_display_destroy(_EGLDisplay * disp)1120 dri2_display_destroy(_EGLDisplay *disp)
1121 {
1122 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1123
1124 if (dri2_dpy->own_dri_screen) {
1125 if (dri2_dpy->vtbl && dri2_dpy->vtbl->close_screen_notify)
1126 dri2_dpy->vtbl->close_screen_notify(disp);
1127
1128 dri2_dpy->core->destroyScreen(dri2_dpy->dri_screen_render_gpu);
1129
1130 if (dri2_dpy->dri_screen_display_gpu &&
1131 dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
1132 dri2_dpy->core->destroyScreen(dri2_dpy->dri_screen_display_gpu);
1133 }
1134 if (dri2_dpy->fd_display_gpu >= 0 &&
1135 dri2_dpy->fd_render_gpu != dri2_dpy->fd_display_gpu)
1136 close(dri2_dpy->fd_display_gpu);
1137 if (dri2_dpy->fd_render_gpu >= 0)
1138 close(dri2_dpy->fd_render_gpu);
1139
1140 /* Don't dlclose the driver when building with the address sanitizer, so
1141 * you get good symbols from the leak reports.
1142 */
1143 #if !BUILT_WITH_ASAN || defined(NDEBUG)
1144 if (dri2_dpy->driver)
1145 dlclose(dri2_dpy->driver);
1146 #endif
1147
1148 free(dri2_dpy->driver_name);
1149
1150 #ifdef HAVE_WAYLAND_PLATFORM
1151 free(dri2_dpy->device_name);
1152 #endif
1153
1154 #ifdef HAVE_ANDROID_PLATFORM
1155 u_gralloc_destroy(&dri2_dpy->gralloc);
1156 #endif
1157
1158 switch (disp->Platform) {
1159 case _EGL_PLATFORM_X11:
1160 dri2_teardown_x11(dri2_dpy);
1161 break;
1162 case _EGL_PLATFORM_DRM:
1163 dri2_teardown_drm(dri2_dpy);
1164 break;
1165 case _EGL_PLATFORM_WAYLAND:
1166 dri2_teardown_wayland(dri2_dpy);
1167 break;
1168 default:
1169 /* TODO: add teardown for other platforms */
1170 break;
1171 }
1172
1173 /* The drm platform does not create the screen/driver_configs but reuses
1174 * the ones from the gbm device. As such the gbm itself is responsible
1175 * for the cleanup.
1176 */
1177 if (disp->Platform != _EGL_PLATFORM_DRM && dri2_dpy->driver_configs) {
1178 for (unsigned i = 0; dri2_dpy->driver_configs[i]; i++)
1179 free((__DRIconfig *)dri2_dpy->driver_configs[i]);
1180 free(dri2_dpy->driver_configs);
1181 }
1182 free(dri2_dpy);
1183 disp->DriverData = NULL;
1184 }
1185
1186 struct dri2_egl_display *
dri2_display_create(void)1187 dri2_display_create(void)
1188 {
1189 struct dri2_egl_display *dri2_dpy = calloc(1, sizeof *dri2_dpy);
1190 if (!dri2_dpy) {
1191 _eglError(EGL_BAD_ALLOC, "eglInitialize");
1192 return NULL;
1193 }
1194
1195 dri2_dpy->fd_render_gpu = -1;
1196 dri2_dpy->fd_display_gpu = -1;
1197
1198 #ifdef HAVE_DRI3_MODIFIERS
1199 dri2_dpy->dri3_major_version = -1;
1200 dri2_dpy->dri3_minor_version = -1;
1201 dri2_dpy->present_major_version = -1;
1202 dri2_dpy->present_minor_version = -1;
1203 #endif
1204
1205 return dri2_dpy;
1206 }
1207
1208 __DRIbuffer *
dri2_egl_surface_alloc_local_buffer(struct dri2_egl_surface * dri2_surf,unsigned int att,unsigned int format)1209 dri2_egl_surface_alloc_local_buffer(struct dri2_egl_surface *dri2_surf,
1210 unsigned int att, unsigned int format)
1211 {
1212 struct dri2_egl_display *dri2_dpy =
1213 dri2_egl_display(dri2_surf->base.Resource.Display);
1214
1215 if (att >= ARRAY_SIZE(dri2_surf->local_buffers))
1216 return NULL;
1217
1218 if (!dri2_surf->local_buffers[att]) {
1219 dri2_surf->local_buffers[att] = dri2_dpy->dri2->allocateBuffer(
1220 dri2_dpy->dri_screen_render_gpu, att, format, dri2_surf->base.Width,
1221 dri2_surf->base.Height);
1222 }
1223
1224 return dri2_surf->local_buffers[att];
1225 }
1226
1227 void
dri2_egl_surface_free_local_buffers(struct dri2_egl_surface * dri2_surf)1228 dri2_egl_surface_free_local_buffers(struct dri2_egl_surface *dri2_surf)
1229 {
1230 struct dri2_egl_display *dri2_dpy =
1231 dri2_egl_display(dri2_surf->base.Resource.Display);
1232
1233 for (int i = 0; i < ARRAY_SIZE(dri2_surf->local_buffers); i++) {
1234 if (dri2_surf->local_buffers[i]) {
1235 dri2_dpy->dri2->releaseBuffer(dri2_dpy->dri_screen_render_gpu,
1236 dri2_surf->local_buffers[i]);
1237 dri2_surf->local_buffers[i] = NULL;
1238 }
1239 }
1240 }
1241
1242 /**
1243 * Called via eglTerminate(), drv->Terminate().
1244 *
1245 * This must be guaranteed to be called exactly once, even if eglTerminate is
1246 * called many times (without a eglInitialize in between).
1247 */
1248 static EGLBoolean
dri2_terminate(_EGLDisplay * disp)1249 dri2_terminate(_EGLDisplay *disp)
1250 {
1251 /* Release all non-current Context/Surfaces. */
1252 _eglReleaseDisplayResources(disp);
1253
1254 dri2_display_release(disp);
1255
1256 return EGL_TRUE;
1257 }
1258
1259 /**
1260 * Set the error code after a call to
1261 * dri2_egl_display::dri2::createContextAttribs.
1262 */
1263 static void
dri2_create_context_attribs_error(int dri_error)1264 dri2_create_context_attribs_error(int dri_error)
1265 {
1266 EGLint egl_error;
1267
1268 switch (dri_error) {
1269 case __DRI_CTX_ERROR_SUCCESS:
1270 return;
1271
1272 case __DRI_CTX_ERROR_NO_MEMORY:
1273 egl_error = EGL_BAD_ALLOC;
1274 break;
1275
1276 /* From the EGL_KHR_create_context spec, section "Errors":
1277 *
1278 * * If <config> does not support a client API context compatible
1279 * with the requested API major and minor version, [...] context
1280 * flags, and context reset notification behavior (for client API types
1281 * where these attributes are supported), then an EGL_BAD_MATCH error is
1282 * generated.
1283 *
1284 * * If an OpenGL ES context is requested and the values for
1285 * attributes EGL_CONTEXT_MAJOR_VERSION_KHR and
1286 * EGL_CONTEXT_MINOR_VERSION_KHR specify an OpenGL ES version that
1287 * is not defined, than an EGL_BAD_MATCH error is generated.
1288 *
1289 * * If an OpenGL context is requested, the requested version is
1290 * greater than 3.2, and the value for attribute
1291 * EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR has no bits set; has any
1292 * bits set other than EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR and
1293 * EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT_KHR; has more than
1294 * one of these bits set; or if the implementation does not support
1295 * the requested profile, then an EGL_BAD_MATCH error is generated.
1296 */
1297 case __DRI_CTX_ERROR_BAD_API:
1298 case __DRI_CTX_ERROR_BAD_VERSION:
1299 case __DRI_CTX_ERROR_BAD_FLAG:
1300 egl_error = EGL_BAD_MATCH;
1301 break;
1302
1303 /* From the EGL_KHR_create_context spec, section "Errors":
1304 *
1305 * * If an attribute name or attribute value in <attrib_list> is not
1306 * recognized (including unrecognized bits in bitmask attributes),
1307 * then an EGL_BAD_ATTRIBUTE error is generated."
1308 */
1309 case __DRI_CTX_ERROR_UNKNOWN_ATTRIBUTE:
1310 case __DRI_CTX_ERROR_UNKNOWN_FLAG:
1311 egl_error = EGL_BAD_ATTRIBUTE;
1312 break;
1313
1314 default:
1315 assert(!"unknown dri_error code");
1316 egl_error = EGL_BAD_MATCH;
1317 break;
1318 }
1319
1320 _eglError(egl_error, "dri2_create_context");
1321 }
1322
1323 static bool
dri2_fill_context_attribs(struct dri2_egl_context * dri2_ctx,struct dri2_egl_display * dri2_dpy,uint32_t * ctx_attribs,unsigned * num_attribs)1324 dri2_fill_context_attribs(struct dri2_egl_context *dri2_ctx,
1325 struct dri2_egl_display *dri2_dpy,
1326 uint32_t *ctx_attribs, unsigned *num_attribs)
1327 {
1328 int pos = 0;
1329
1330 assert(*num_attribs >= NUM_ATTRIBS);
1331
1332 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MAJOR_VERSION;
1333 ctx_attribs[pos++] = dri2_ctx->base.ClientMajorVersion;
1334 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_MINOR_VERSION;
1335 ctx_attribs[pos++] = dri2_ctx->base.ClientMinorVersion;
1336
1337 if (dri2_ctx->base.Flags != 0) {
1338 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_FLAGS;
1339 ctx_attribs[pos++] = dri2_ctx->base.Flags;
1340 }
1341
1342 if (dri2_ctx->base.ResetNotificationStrategy !=
1343 EGL_NO_RESET_NOTIFICATION_KHR) {
1344 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RESET_STRATEGY;
1345 ctx_attribs[pos++] = __DRI_CTX_RESET_LOSE_CONTEXT;
1346 }
1347
1348 if (dri2_ctx->base.ContextPriority != EGL_CONTEXT_PRIORITY_MEDIUM_IMG) {
1349 unsigned val;
1350
1351 switch (dri2_ctx->base.ContextPriority) {
1352 case EGL_CONTEXT_PRIORITY_HIGH_IMG:
1353 val = __DRI_CTX_PRIORITY_HIGH;
1354 break;
1355 case EGL_CONTEXT_PRIORITY_MEDIUM_IMG:
1356 val = __DRI_CTX_PRIORITY_MEDIUM;
1357 break;
1358 case EGL_CONTEXT_PRIORITY_LOW_IMG:
1359 val = __DRI_CTX_PRIORITY_LOW;
1360 break;
1361 default:
1362 _eglError(EGL_BAD_CONFIG, "eglCreateContext");
1363 return false;
1364 }
1365
1366 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PRIORITY;
1367 ctx_attribs[pos++] = val;
1368 }
1369
1370 if (dri2_ctx->base.ReleaseBehavior ==
1371 EGL_CONTEXT_RELEASE_BEHAVIOR_NONE_KHR) {
1372 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_RELEASE_BEHAVIOR;
1373 ctx_attribs[pos++] = __DRI_CTX_RELEASE_BEHAVIOR_NONE;
1374 }
1375
1376 if (dri2_ctx->base.NoError) {
1377 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_NO_ERROR;
1378 ctx_attribs[pos++] = true;
1379 }
1380
1381 if (dri2_ctx->base.Protected) {
1382 ctx_attribs[pos++] = __DRI_CTX_ATTRIB_PROTECTED;
1383 ctx_attribs[pos++] = true;
1384 }
1385
1386 *num_attribs = pos;
1387
1388 return true;
1389 }
1390
1391 /**
1392 * Called via eglCreateContext(), drv->CreateContext().
1393 */
1394 static _EGLContext *
dri2_create_context(_EGLDisplay * disp,_EGLConfig * conf,_EGLContext * share_list,const EGLint * attrib_list)1395 dri2_create_context(_EGLDisplay *disp, _EGLConfig *conf,
1396 _EGLContext *share_list, const EGLint *attrib_list)
1397 {
1398 struct dri2_egl_context *dri2_ctx;
1399 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1400 struct dri2_egl_context *dri2_ctx_shared = dri2_egl_context(share_list);
1401 __DRIcontext *shared = dri2_ctx_shared ? dri2_ctx_shared->dri_context : NULL;
1402 struct dri2_egl_config *dri2_config = dri2_egl_config(conf);
1403 const __DRIconfig *dri_config;
1404 int api;
1405 unsigned error;
1406 unsigned num_attribs = NUM_ATTRIBS;
1407 uint32_t ctx_attribs[NUM_ATTRIBS];
1408
1409 dri2_ctx = malloc(sizeof *dri2_ctx);
1410 if (!dri2_ctx) {
1411 dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC, "eglCreateContext");
1412 return NULL;
1413 }
1414
1415 if (!_eglInitContext(&dri2_ctx->base, disp, conf, share_list, attrib_list))
1416 goto cleanup;
1417
1418 switch (dri2_ctx->base.ClientAPI) {
1419 case EGL_OPENGL_ES_API:
1420 switch (dri2_ctx->base.ClientMajorVersion) {
1421 case 1:
1422 api = __DRI_API_GLES;
1423 break;
1424 case 2:
1425 api = __DRI_API_GLES2;
1426 break;
1427 case 3:
1428 api = __DRI_API_GLES3;
1429 break;
1430 default:
1431 _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1432 goto cleanup;
1433 }
1434 break;
1435 case EGL_OPENGL_API:
1436 if ((dri2_ctx->base.ClientMajorVersion >= 4 ||
1437 (dri2_ctx->base.ClientMajorVersion == 3 &&
1438 dri2_ctx->base.ClientMinorVersion >= 2)) &&
1439 dri2_ctx->base.Profile == EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR)
1440 api = __DRI_API_OPENGL_CORE;
1441 else if (dri2_ctx->base.ClientMajorVersion == 3 &&
1442 dri2_ctx->base.ClientMinorVersion == 1)
1443 api = __DRI_API_OPENGL_CORE;
1444 else
1445 api = __DRI_API_OPENGL;
1446 break;
1447 default:
1448 _eglError(EGL_BAD_PARAMETER, "eglCreateContext");
1449 goto cleanup;
1450 }
1451
1452 if (conf != NULL) {
1453 /* The config chosen here isn't necessarily
1454 * used for surfaces later.
1455 * A pixmap surface will use the single config.
1456 * This opportunity depends on disabling the
1457 * doubleBufferMode check in
1458 * src/mesa/main/context.c:check_compatible()
1459 */
1460 if (dri2_config->dri_config[1][0])
1461 dri_config = dri2_config->dri_config[1][0];
1462 else
1463 dri_config = dri2_config->dri_config[0][0];
1464 } else
1465 dri_config = NULL;
1466
1467 if (!dri2_fill_context_attribs(dri2_ctx, dri2_dpy, ctx_attribs,
1468 &num_attribs))
1469 goto cleanup;
1470
1471 dri2_ctx->dri_context = dri2_dpy->mesa->createContext(
1472 dri2_dpy->dri_screen_render_gpu, api, dri_config, shared, num_attribs / 2,
1473 ctx_attribs, &error, dri2_ctx);
1474 dri2_create_context_attribs_error(error);
1475
1476 if (!dri2_ctx->dri_context)
1477 goto cleanup;
1478
1479 mtx_unlock(&dri2_dpy->lock);
1480
1481 return &dri2_ctx->base;
1482
1483 cleanup:
1484 mtx_unlock(&dri2_dpy->lock);
1485 free(dri2_ctx);
1486 return NULL;
1487 }
1488
1489 /**
1490 * Called via eglDestroyContext(), drv->DestroyContext().
1491 */
1492 static EGLBoolean
dri2_destroy_context(_EGLDisplay * disp,_EGLContext * ctx)1493 dri2_destroy_context(_EGLDisplay *disp, _EGLContext *ctx)
1494 {
1495 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1496 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1497
1498 if (_eglPutContext(ctx)) {
1499 dri2_dpy->core->destroyContext(dri2_ctx->dri_context);
1500 free(dri2_ctx);
1501 }
1502
1503 return EGL_TRUE;
1504 }
1505
1506 EGLBoolean
dri2_init_surface(_EGLSurface * surf,_EGLDisplay * disp,EGLint type,_EGLConfig * conf,const EGLint * attrib_list,EGLBoolean enable_out_fence,void * native_surface)1507 dri2_init_surface(_EGLSurface *surf, _EGLDisplay *disp, EGLint type,
1508 _EGLConfig *conf, const EGLint *attrib_list,
1509 EGLBoolean enable_out_fence, void *native_surface)
1510 {
1511 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1512 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1513
1514 dri2_surf->out_fence_fd = -1;
1515 dri2_surf->enable_out_fence = false;
1516 if (dri2_dpy->fence &&
1517 (dri2_dpy->fence->get_capabilities(dri2_dpy->dri_screen_render_gpu) &
1518 __DRI_FENCE_CAP_NATIVE_FD)) {
1519 dri2_surf->enable_out_fence = enable_out_fence;
1520 }
1521
1522 return _eglInitSurface(surf, disp, type, conf, attrib_list, native_surface);
1523 }
1524
1525 static void
dri2_surface_set_out_fence_fd(_EGLSurface * surf,int fence_fd)1526 dri2_surface_set_out_fence_fd(_EGLSurface *surf, int fence_fd)
1527 {
1528 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1529
1530 if (dri2_surf->out_fence_fd >= 0)
1531 close(dri2_surf->out_fence_fd);
1532
1533 dri2_surf->out_fence_fd = fence_fd;
1534 }
1535
1536 void
dri2_fini_surface(_EGLSurface * surf)1537 dri2_fini_surface(_EGLSurface *surf)
1538 {
1539 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1540
1541 dri2_surface_set_out_fence_fd(surf, -1);
1542 dri2_surf->enable_out_fence = false;
1543 }
1544
1545 static EGLBoolean
dri2_destroy_surface(_EGLDisplay * disp,_EGLSurface * surf)1546 dri2_destroy_surface(_EGLDisplay *disp, _EGLSurface *surf)
1547 {
1548 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1549 EGLBoolean ret = EGL_TRUE;
1550
1551 if (_eglPutSurface(surf))
1552 ret = dri2_dpy->vtbl->destroy_surface(disp, surf);
1553
1554 return ret;
1555 }
1556
1557 static void
dri2_surf_update_fence_fd(_EGLContext * ctx,_EGLDisplay * disp,_EGLSurface * surf)1558 dri2_surf_update_fence_fd(_EGLContext *ctx, _EGLDisplay *disp,
1559 _EGLSurface *surf)
1560 {
1561 __DRIcontext *dri_ctx = dri2_egl_context(ctx)->dri_context;
1562 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1563 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1564 int fence_fd = -1;
1565 void *fence;
1566
1567 if (!dri2_surf->enable_out_fence)
1568 return;
1569
1570 fence = dri2_dpy->fence->create_fence_fd(dri_ctx, -1);
1571 if (fence) {
1572 fence_fd =
1573 dri2_dpy->fence->get_fence_fd(dri2_dpy->dri_screen_render_gpu, fence);
1574 dri2_dpy->fence->destroy_fence(dri2_dpy->dri_screen_render_gpu, fence);
1575 }
1576 dri2_surface_set_out_fence_fd(surf, fence_fd);
1577 }
1578
1579 EGLBoolean
dri2_create_drawable(struct dri2_egl_display * dri2_dpy,const __DRIconfig * config,struct dri2_egl_surface * dri2_surf,void * loaderPrivate)1580 dri2_create_drawable(struct dri2_egl_display *dri2_dpy,
1581 const __DRIconfig *config,
1582 struct dri2_egl_surface *dri2_surf, void *loaderPrivate)
1583 {
1584 if (dri2_dpy->kopper) {
1585 dri2_surf->dri_drawable = dri2_dpy->kopper->createNewDrawable(
1586 dri2_dpy->dri_screen_render_gpu, config, loaderPrivate,
1587 &(__DRIkopperDrawableInfo){
1588 #ifdef HAVE_X11_PLATFORM
1589 .multiplanes_available = dri2_dpy->multibuffers_available,
1590 #endif
1591 .is_pixmap = dri2_surf->base.Type == EGL_PBUFFER_BIT ||
1592 dri2_surf->base.Type == EGL_PIXMAP_BIT,
1593 });
1594 } else {
1595 __DRIcreateNewDrawableFunc createNewDrawable;
1596 if (dri2_dpy->image_driver)
1597 createNewDrawable = dri2_dpy->image_driver->createNewDrawable;
1598 else if (dri2_dpy->dri2)
1599 createNewDrawable = dri2_dpy->dri2->createNewDrawable;
1600 else if (dri2_dpy->swrast)
1601 createNewDrawable = dri2_dpy->swrast->createNewDrawable;
1602 else
1603 return _eglError(EGL_BAD_ALLOC, "no createNewDrawable");
1604
1605 dri2_surf->dri_drawable = createNewDrawable(
1606 dri2_dpy->dri_screen_render_gpu, config, loaderPrivate);
1607 }
1608 if (dri2_surf->dri_drawable == NULL)
1609 return _eglError(EGL_BAD_ALLOC, "createNewDrawable");
1610
1611 return EGL_TRUE;
1612 }
1613
1614 /**
1615 * Called via eglMakeCurrent(), drv->MakeCurrent().
1616 */
1617 static EGLBoolean
dri2_make_current(_EGLDisplay * disp,_EGLSurface * dsurf,_EGLSurface * rsurf,_EGLContext * ctx)1618 dri2_make_current(_EGLDisplay *disp, _EGLSurface *dsurf, _EGLSurface *rsurf,
1619 _EGLContext *ctx)
1620 {
1621 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1622 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1623 _EGLDisplay *old_disp = NULL;
1624 struct dri2_egl_display *old_dri2_dpy = NULL;
1625 _EGLContext *old_ctx;
1626 _EGLSurface *old_dsurf, *old_rsurf;
1627 _EGLSurface *tmp_dsurf, *tmp_rsurf;
1628 __DRIdrawable *ddraw, *rdraw;
1629 __DRIcontext *cctx;
1630 EGLint egl_error = EGL_SUCCESS;
1631
1632 if (!dri2_dpy)
1633 return _eglError(EGL_NOT_INITIALIZED, "eglMakeCurrent");
1634
1635 /* make new bindings, set the EGL error otherwise */
1636 if (!_eglBindContext(ctx, dsurf, rsurf, &old_ctx, &old_dsurf, &old_rsurf))
1637 return EGL_FALSE;
1638
1639 if (old_ctx == ctx && old_dsurf == dsurf && old_rsurf == rsurf) {
1640 _eglPutSurface(old_dsurf);
1641 _eglPutSurface(old_rsurf);
1642 _eglPutContext(old_ctx);
1643 return EGL_TRUE;
1644 }
1645
1646 if (old_ctx) {
1647 __DRIcontext *old_cctx = dri2_egl_context(old_ctx)->dri_context;
1648 old_disp = old_ctx->Resource.Display;
1649 old_dri2_dpy = dri2_egl_display(old_disp);
1650
1651 /* Disable shared buffer mode */
1652 if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1653 old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1654 old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf, false);
1655 }
1656
1657 old_dri2_dpy->core->unbindContext(old_cctx);
1658
1659 if (old_dsurf)
1660 dri2_surf_update_fence_fd(old_ctx, old_disp, old_dsurf);
1661 }
1662
1663 ddraw = (dsurf) ? dri2_dpy->vtbl->get_dri_drawable(dsurf) : NULL;
1664 rdraw = (rsurf) ? dri2_dpy->vtbl->get_dri_drawable(rsurf) : NULL;
1665 cctx = (dri2_ctx) ? dri2_ctx->dri_context : NULL;
1666
1667 if (cctx) {
1668 if (!dri2_dpy->core->bindContext(cctx, ddraw, rdraw)) {
1669 _EGLContext *tmp_ctx;
1670
1671 /* dri2_dpy->core->bindContext failed. We cannot tell for sure why, but
1672 * setting the error to EGL_BAD_MATCH is surely better than leaving it
1673 * as EGL_SUCCESS.
1674 */
1675 egl_error = EGL_BAD_MATCH;
1676
1677 /* undo the previous _eglBindContext */
1678 _eglBindContext(old_ctx, old_dsurf, old_rsurf, &ctx, &tmp_dsurf,
1679 &tmp_rsurf);
1680 assert(&dri2_ctx->base == ctx && tmp_dsurf == dsurf &&
1681 tmp_rsurf == rsurf);
1682
1683 _eglPutSurface(dsurf);
1684 _eglPutSurface(rsurf);
1685 _eglPutContext(ctx);
1686
1687 _eglPutSurface(old_dsurf);
1688 _eglPutSurface(old_rsurf);
1689 _eglPutContext(old_ctx);
1690
1691 ddraw =
1692 (old_dsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_dsurf) : NULL;
1693 rdraw =
1694 (old_rsurf) ? dri2_dpy->vtbl->get_dri_drawable(old_rsurf) : NULL;
1695 cctx = (old_ctx) ? dri2_egl_context(old_ctx)->dri_context : NULL;
1696
1697 /* undo the previous dri2_dpy->core->unbindContext */
1698 if (dri2_dpy->core->bindContext(cctx, ddraw, rdraw)) {
1699 if (old_dsurf && _eglSurfaceInSharedBufferMode(old_dsurf) &&
1700 old_dri2_dpy->vtbl->set_shared_buffer_mode) {
1701 old_dri2_dpy->vtbl->set_shared_buffer_mode(old_disp, old_dsurf,
1702 true);
1703 }
1704
1705 return _eglError(egl_error, "eglMakeCurrent");
1706 }
1707
1708 /* We cannot restore the same state as it was before calling
1709 * eglMakeCurrent() and the spec isn't clear about what to do. We
1710 * can prevent EGL from calling into the DRI driver with no DRI
1711 * context bound.
1712 */
1713 dsurf = rsurf = NULL;
1714 ctx = NULL;
1715
1716 _eglBindContext(ctx, dsurf, rsurf, &tmp_ctx, &tmp_dsurf, &tmp_rsurf);
1717 assert(tmp_ctx == old_ctx && tmp_dsurf == old_dsurf &&
1718 tmp_rsurf == old_rsurf);
1719
1720 _eglLog(_EGL_WARNING, "DRI2: failed to rebind the previous context");
1721 } else {
1722 /* dri2_dpy->core->bindContext succeeded, so take a reference on the
1723 * dri2_dpy. This prevents dri2_dpy from being reinitialized when a
1724 * EGLDisplay is terminated and then initialized again while a
1725 * context is still bound. See dri2_initialize() for a more in depth
1726 * explanation. */
1727 p_atomic_inc(&dri2_dpy->ref_count);
1728 }
1729 }
1730
1731 dri2_destroy_surface(disp, old_dsurf);
1732 dri2_destroy_surface(disp, old_rsurf);
1733
1734 if (old_ctx) {
1735 dri2_destroy_context(disp, old_ctx);
1736 dri2_display_release(old_disp);
1737 }
1738
1739 if (egl_error != EGL_SUCCESS)
1740 return _eglError(egl_error, "eglMakeCurrent");
1741
1742 if (dsurf && _eglSurfaceHasMutableRenderBuffer(dsurf) &&
1743 dri2_dpy->vtbl->set_shared_buffer_mode) {
1744 /* Always update the shared buffer mode. This is obviously needed when
1745 * the active EGL_RENDER_BUFFER is EGL_SINGLE_BUFFER. When
1746 * EGL_RENDER_BUFFER is EGL_BACK_BUFFER, the update protects us in the
1747 * case where external non-EGL API may have changed window's shared
1748 * buffer mode since we last saw it.
1749 */
1750 bool mode = (dsurf->ActiveRenderBuffer == EGL_SINGLE_BUFFER);
1751 dri2_dpy->vtbl->set_shared_buffer_mode(disp, dsurf, mode);
1752 }
1753
1754 return EGL_TRUE;
1755 }
1756
1757 __DRIdrawable *
dri2_surface_get_dri_drawable(_EGLSurface * surf)1758 dri2_surface_get_dri_drawable(_EGLSurface *surf)
1759 {
1760 struct dri2_egl_surface *dri2_surf = dri2_egl_surface(surf);
1761
1762 return dri2_surf->dri_drawable;
1763 }
1764
1765 static _EGLSurface *
dri2_create_window_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_window,const EGLint * attrib_list)1766 dri2_create_window_surface(_EGLDisplay *disp, _EGLConfig *conf,
1767 void *native_window, const EGLint *attrib_list)
1768 {
1769 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1770 _EGLSurface *ret = dri2_dpy->vtbl->create_window_surface(
1771 disp, conf, native_window, attrib_list);
1772 mtx_unlock(&dri2_dpy->lock);
1773 return ret;
1774 }
1775
1776 static _EGLSurface *
dri2_create_pixmap_surface(_EGLDisplay * disp,_EGLConfig * conf,void * native_pixmap,const EGLint * attrib_list)1777 dri2_create_pixmap_surface(_EGLDisplay *disp, _EGLConfig *conf,
1778 void *native_pixmap, const EGLint *attrib_list)
1779 {
1780 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1781 _EGLSurface *ret = NULL;
1782
1783 if (dri2_dpy->vtbl->create_pixmap_surface)
1784 ret = dri2_dpy->vtbl->create_pixmap_surface(disp, conf, native_pixmap,
1785 attrib_list);
1786
1787 mtx_unlock(&dri2_dpy->lock);
1788
1789 return ret;
1790 }
1791
1792 static _EGLSurface *
dri2_create_pbuffer_surface(_EGLDisplay * disp,_EGLConfig * conf,const EGLint * attrib_list)1793 dri2_create_pbuffer_surface(_EGLDisplay *disp, _EGLConfig *conf,
1794 const EGLint *attrib_list)
1795 {
1796 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1797 _EGLSurface *ret = NULL;
1798
1799 if (dri2_dpy->vtbl->create_pbuffer_surface)
1800 ret = dri2_dpy->vtbl->create_pbuffer_surface(disp, conf, attrib_list);
1801
1802 mtx_unlock(&dri2_dpy->lock);
1803
1804 return ret;
1805 }
1806
1807 static EGLBoolean
dri2_swap_interval(_EGLDisplay * disp,_EGLSurface * surf,EGLint interval)1808 dri2_swap_interval(_EGLDisplay *disp, _EGLSurface *surf, EGLint interval)
1809 {
1810 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1811 EGLBoolean ret = EGL_TRUE;
1812
1813 if (dri2_dpy->vtbl->swap_interval)
1814 ret = dri2_dpy->vtbl->swap_interval(disp, surf, interval);
1815
1816 mtx_unlock(&dri2_dpy->lock);
1817
1818 return ret;
1819 }
1820
1821 /**
1822 * Asks the client API to flush any rendering to the drawable so that we can
1823 * do our swapbuffers.
1824 */
1825 void
dri2_flush_drawable_for_swapbuffers_flags(_EGLDisplay * disp,_EGLSurface * draw,enum __DRI2throttleReason throttle_reason)1826 dri2_flush_drawable_for_swapbuffers_flags(
1827 _EGLDisplay *disp, _EGLSurface *draw,
1828 enum __DRI2throttleReason throttle_reason)
1829 {
1830 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1831 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(draw);
1832
1833 if (dri2_dpy->flush) {
1834 if (dri2_dpy->flush->base.version >= 4) {
1835 /* We know there's a current context because:
1836 *
1837 * "If surface is not bound to the calling thread’s current
1838 * context, an EGL_BAD_SURFACE error is generated."
1839 */
1840 _EGLContext *ctx = _eglGetCurrentContext();
1841 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
1842
1843 /* From the EGL 1.4 spec (page 52):
1844 *
1845 * "The contents of ancillary buffers are always undefined
1846 * after calling eglSwapBuffers."
1847 */
1848 dri2_dpy->flush->flush_with_flags(
1849 dri2_ctx->dri_context, dri_drawable,
1850 __DRI2_FLUSH_DRAWABLE | __DRI2_FLUSH_INVALIDATE_ANCILLARY,
1851 throttle_reason);
1852 } else {
1853 dri2_dpy->flush->flush(dri_drawable);
1854 }
1855 }
1856 }
1857
1858 void
dri2_flush_drawable_for_swapbuffers(_EGLDisplay * disp,_EGLSurface * draw)1859 dri2_flush_drawable_for_swapbuffers(_EGLDisplay *disp, _EGLSurface *draw)
1860 {
1861 dri2_flush_drawable_for_swapbuffers_flags(disp, draw,
1862 __DRI2_THROTTLE_SWAPBUFFER);
1863 }
1864
1865 static EGLBoolean
dri2_swap_buffers(_EGLDisplay * disp,_EGLSurface * surf)1866 dri2_swap_buffers(_EGLDisplay *disp, _EGLSurface *surf)
1867 {
1868 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1869 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1870 _EGLContext *ctx = _eglGetCurrentContext();
1871 EGLBoolean ret;
1872
1873 if (ctx && surf)
1874 dri2_surf_update_fence_fd(ctx, disp, surf);
1875 ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1876
1877 /* SwapBuffers marks the end of the frame; reset the damage region for
1878 * use again next time.
1879 */
1880 if (ret && dri2_dpy->buffer_damage &&
1881 dri2_dpy->buffer_damage->set_damage_region)
1882 dri2_dpy->buffer_damage->set_damage_region(dri_drawable, 0, NULL);
1883
1884 return ret;
1885 }
1886
1887 static EGLBoolean
dri2_swap_buffers_with_damage(_EGLDisplay * disp,_EGLSurface * surf,const EGLint * rects,EGLint n_rects)1888 dri2_swap_buffers_with_damage(_EGLDisplay *disp, _EGLSurface *surf,
1889 const EGLint *rects, EGLint n_rects)
1890 {
1891 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1892 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1893 _EGLContext *ctx = _eglGetCurrentContext();
1894 EGLBoolean ret;
1895
1896 if (ctx && surf)
1897 dri2_surf_update_fence_fd(ctx, disp, surf);
1898 if (dri2_dpy->vtbl->swap_buffers_with_damage)
1899 ret =
1900 dri2_dpy->vtbl->swap_buffers_with_damage(disp, surf, rects, n_rects);
1901 else
1902 ret = dri2_dpy->vtbl->swap_buffers(disp, surf);
1903
1904 /* SwapBuffers marks the end of the frame; reset the damage region for
1905 * use again next time.
1906 */
1907 if (ret && dri2_dpy->buffer_damage &&
1908 dri2_dpy->buffer_damage->set_damage_region)
1909 dri2_dpy->buffer_damage->set_damage_region(dri_drawable, 0, NULL);
1910
1911 return ret;
1912 }
1913
1914 static EGLBoolean
dri2_swap_buffers_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint numRects,const EGLint * rects)1915 dri2_swap_buffers_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint numRects,
1916 const EGLint *rects)
1917 {
1918 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1919 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1920 EGLBoolean ret;
1921
1922 if (!dri2_dpy->vtbl->swap_buffers_region)
1923 return EGL_FALSE;
1924 ret = dri2_dpy->vtbl->swap_buffers_region(disp, surf, numRects, rects);
1925
1926 /* SwapBuffers marks the end of the frame; reset the damage region for
1927 * use again next time.
1928 */
1929 if (ret && dri2_dpy->buffer_damage &&
1930 dri2_dpy->buffer_damage->set_damage_region)
1931 dri2_dpy->buffer_damage->set_damage_region(dri_drawable, 0, NULL);
1932
1933 return ret;
1934 }
1935
1936 static EGLBoolean
dri2_set_damage_region(_EGLDisplay * disp,_EGLSurface * surf,EGLint * rects,EGLint n_rects)1937 dri2_set_damage_region(_EGLDisplay *disp, _EGLSurface *surf, EGLint *rects,
1938 EGLint n_rects)
1939 {
1940 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1941 __DRIdrawable *drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1942
1943 if (!dri2_dpy->buffer_damage ||
1944 !dri2_dpy->buffer_damage->set_damage_region) {
1945 mtx_unlock(&dri2_dpy->lock);
1946 return EGL_FALSE;
1947 }
1948
1949 dri2_dpy->buffer_damage->set_damage_region(drawable, n_rects, rects);
1950 mtx_unlock(&dri2_dpy->lock);
1951 return EGL_TRUE;
1952 }
1953
1954 static EGLBoolean
dri2_post_sub_buffer(_EGLDisplay * disp,_EGLSurface * surf,EGLint x,EGLint y,EGLint width,EGLint height)1955 dri2_post_sub_buffer(_EGLDisplay *disp, _EGLSurface *surf, EGLint x, EGLint y,
1956 EGLint width, EGLint height)
1957 {
1958 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1959 EGLBoolean ret = EGL_FALSE;
1960
1961 if (dri2_dpy->vtbl->post_sub_buffer)
1962 ret = dri2_dpy->vtbl->post_sub_buffer(disp, surf, x, y, width, height);
1963
1964 mtx_unlock(&dri2_dpy->lock);
1965
1966 return ret;
1967 }
1968
1969 static EGLBoolean
dri2_copy_buffers(_EGLDisplay * disp,_EGLSurface * surf,void * native_pixmap_target)1970 dri2_copy_buffers(_EGLDisplay *disp, _EGLSurface *surf,
1971 void *native_pixmap_target)
1972 {
1973 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
1974 if (!dri2_dpy->vtbl->copy_buffers)
1975 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_NATIVE_PIXMAP,
1976 "no support for native pixmaps");
1977 EGLBoolean ret =
1978 dri2_dpy->vtbl->copy_buffers(disp, surf, native_pixmap_target);
1979 mtx_unlock(&dri2_dpy->lock);
1980 return ret;
1981 }
1982
1983 static EGLint
dri2_query_buffer_age(_EGLDisplay * disp,_EGLSurface * surf)1984 dri2_query_buffer_age(_EGLDisplay *disp, _EGLSurface *surf)
1985 {
1986 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1987 if (!dri2_dpy->vtbl->query_buffer_age)
1988 return 0;
1989 return dri2_dpy->vtbl->query_buffer_age(disp, surf);
1990 }
1991
1992 static EGLBoolean
dri2_wait_client(_EGLDisplay * disp,_EGLContext * ctx)1993 dri2_wait_client(_EGLDisplay *disp, _EGLContext *ctx)
1994 {
1995 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
1996 _EGLSurface *surf = ctx->DrawSurface;
1997 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
1998
1999 /* FIXME: If EGL allows frontbuffer rendering for window surfaces,
2000 * we need to copy fake to real here.*/
2001
2002 if (dri2_dpy->flush != NULL)
2003 dri2_dpy->flush->flush(dri_drawable);
2004
2005 return EGL_TRUE;
2006 }
2007
2008 static EGLBoolean
dri2_wait_native(EGLint engine)2009 dri2_wait_native(EGLint engine)
2010 {
2011 if (engine != EGL_CORE_NATIVE_ENGINE)
2012 return _eglError(EGL_BAD_PARAMETER, "eglWaitNative");
2013 /* glXWaitX(); */
2014
2015 return EGL_TRUE;
2016 }
2017
2018 static EGLBoolean
dri2_bind_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)2019 dri2_bind_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
2020 {
2021 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2022 struct dri2_egl_context *dri2_ctx;
2023 _EGLContext *ctx;
2024 GLint format, target;
2025 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
2026
2027 ctx = _eglGetCurrentContext();
2028 dri2_ctx = dri2_egl_context(ctx);
2029
2030 if (!_eglBindTexImage(disp, surf, buffer)) {
2031 mtx_unlock(&dri2_dpy->lock);
2032 return EGL_FALSE;
2033 }
2034
2035 switch (surf->TextureFormat) {
2036 case EGL_TEXTURE_RGB:
2037 format = __DRI_TEXTURE_FORMAT_RGB;
2038 break;
2039 case EGL_TEXTURE_RGBA:
2040 format = __DRI_TEXTURE_FORMAT_RGBA;
2041 break;
2042 default:
2043 assert(!"Unexpected texture format in dri2_bind_tex_image()");
2044 format = __DRI_TEXTURE_FORMAT_RGBA;
2045 }
2046
2047 switch (surf->TextureTarget) {
2048 case EGL_TEXTURE_2D:
2049 target = GL_TEXTURE_2D;
2050 break;
2051 default:
2052 target = GL_TEXTURE_2D;
2053 assert(!"Unexpected texture target in dri2_bind_tex_image()");
2054 }
2055
2056 dri2_dpy->tex_buffer->setTexBuffer2(dri2_ctx->dri_context, target, format,
2057 dri_drawable);
2058
2059 mtx_unlock(&dri2_dpy->lock);
2060
2061 return EGL_TRUE;
2062 }
2063
2064 static EGLBoolean
dri2_release_tex_image(_EGLDisplay * disp,_EGLSurface * surf,EGLint buffer)2065 dri2_release_tex_image(_EGLDisplay *disp, _EGLSurface *surf, EGLint buffer)
2066 {
2067 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2068 struct dri2_egl_context *dri2_ctx;
2069 _EGLContext *ctx;
2070 GLint target;
2071 __DRIdrawable *dri_drawable = dri2_dpy->vtbl->get_dri_drawable(surf);
2072
2073 ctx = _eglGetCurrentContext();
2074 dri2_ctx = dri2_egl_context(ctx);
2075
2076 if (!_eglReleaseTexImage(disp, surf, buffer)) {
2077 mtx_unlock(&dri2_dpy->lock);
2078 return EGL_FALSE;
2079 }
2080
2081 switch (surf->TextureTarget) {
2082 case EGL_TEXTURE_2D:
2083 target = GL_TEXTURE_2D;
2084 break;
2085 default:
2086 assert(!"missing texture target");
2087 }
2088
2089 if (dri2_dpy->tex_buffer->base.version >= 3 &&
2090 dri2_dpy->tex_buffer->releaseTexBuffer != NULL) {
2091 dri2_dpy->tex_buffer->releaseTexBuffer(dri2_ctx->dri_context, target,
2092 dri_drawable);
2093 }
2094
2095 mtx_unlock(&dri2_dpy->lock);
2096
2097 return EGL_TRUE;
2098 }
2099
2100 static _EGLImage *
dri2_create_image(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2101 dri2_create_image(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
2102 EGLClientBuffer buffer, const EGLint *attr_list)
2103 {
2104 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2105 _EGLImage *ret =
2106 dri2_dpy->vtbl->create_image(disp, ctx, target, buffer, attr_list);
2107 mtx_unlock(&dri2_dpy->lock);
2108 return ret;
2109 }
2110
2111 _EGLImage *
dri2_create_image_from_dri(_EGLDisplay * disp,__DRIimage * dri_image)2112 dri2_create_image_from_dri(_EGLDisplay *disp, __DRIimage *dri_image)
2113 {
2114 struct dri2_egl_image *dri2_img;
2115
2116 if (dri_image == NULL) {
2117 _eglError(EGL_BAD_ALLOC, "dri2_create_image");
2118 return NULL;
2119 }
2120
2121 dri2_img = malloc(sizeof *dri2_img);
2122 if (!dri2_img) {
2123 _eglError(EGL_BAD_ALLOC, "dri2_create_image");
2124 return NULL;
2125 }
2126
2127 _eglInitImage(&dri2_img->base, disp);
2128
2129 dri2_img->dri_image = dri_image;
2130
2131 return &dri2_img->base;
2132 }
2133
2134 /**
2135 * Translate a DRI Image extension error code into an EGL error code.
2136 */
2137 static EGLint
egl_error_from_dri_image_error(int dri_error)2138 egl_error_from_dri_image_error(int dri_error)
2139 {
2140 switch (dri_error) {
2141 case __DRI_IMAGE_ERROR_SUCCESS:
2142 return EGL_SUCCESS;
2143 case __DRI_IMAGE_ERROR_BAD_ALLOC:
2144 return EGL_BAD_ALLOC;
2145 case __DRI_IMAGE_ERROR_BAD_MATCH:
2146 return EGL_BAD_MATCH;
2147 case __DRI_IMAGE_ERROR_BAD_PARAMETER:
2148 return EGL_BAD_PARAMETER;
2149 case __DRI_IMAGE_ERROR_BAD_ACCESS:
2150 return EGL_BAD_ACCESS;
2151 default:
2152 assert(!"unknown dri_error code");
2153 return EGL_BAD_ALLOC;
2154 }
2155 }
2156
2157 static _EGLImage *
dri2_create_image_khr_renderbuffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2158 dri2_create_image_khr_renderbuffer(_EGLDisplay *disp, _EGLContext *ctx,
2159 EGLClientBuffer buffer,
2160 const EGLint *attr_list)
2161 {
2162 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2163 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
2164 GLuint renderbuffer = (GLuint)(uintptr_t)buffer;
2165 __DRIimage *dri_image;
2166
2167 if (renderbuffer == 0) {
2168 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2169 return EGL_NO_IMAGE_KHR;
2170 }
2171
2172 if (!disp->Extensions.KHR_gl_renderbuffer_image) {
2173 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2174 return EGL_NO_IMAGE_KHR;
2175 }
2176
2177 if (dri2_dpy->image->base.version >= 17 &&
2178 dri2_dpy->image->createImageFromRenderbuffer2) {
2179 unsigned error = ~0;
2180
2181 dri_image = dri2_dpy->image->createImageFromRenderbuffer2(
2182 dri2_ctx->dri_context, renderbuffer, NULL, &error);
2183
2184 assert(!!dri_image == (error == __DRI_IMAGE_ERROR_SUCCESS));
2185
2186 if (!dri_image) {
2187 _eglError(egl_error_from_dri_image_error(error),
2188 "dri2_create_image_khr");
2189 return EGL_NO_IMAGE_KHR;
2190 }
2191 } else {
2192 dri_image = dri2_dpy->image->createImageFromRenderbuffer(
2193 dri2_ctx->dri_context, renderbuffer, NULL);
2194 if (!dri_image) {
2195 _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2196 return EGL_NO_IMAGE_KHR;
2197 }
2198 }
2199
2200 return dri2_create_image_from_dri(disp, dri_image);
2201 }
2202
2203 #ifdef HAVE_WAYLAND_PLATFORM
2204
2205 /* This structure describes how a wl_buffer maps to one or more
2206 * __DRIimages. A wl_drm_buffer stores the wl_drm format code and the
2207 * offsets and strides of the planes in the buffer. This table maps a
2208 * wl_drm format code to a description of the planes in the buffer
2209 * that lets us create a __DRIimage for each of the planes. */
2210
2211 static const struct wl_drm_components_descriptor {
2212 uint32_t dri_components;
2213 EGLint components;
2214 int nplanes;
2215 } wl_drm_components[] = {
2216 {__DRI_IMAGE_COMPONENTS_RGB, EGL_TEXTURE_RGB, 1},
2217 {__DRI_IMAGE_COMPONENTS_RGBA, EGL_TEXTURE_RGBA, 1},
2218 {__DRI_IMAGE_COMPONENTS_Y_U_V, EGL_TEXTURE_Y_U_V_WL, 3},
2219 {__DRI_IMAGE_COMPONENTS_Y_UV, EGL_TEXTURE_Y_UV_WL, 2},
2220 {__DRI_IMAGE_COMPONENTS_Y_XUXV, EGL_TEXTURE_Y_XUXV_WL, 2},
2221 };
2222
2223 static _EGLImage *
dri2_create_image_wayland_wl_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer _buffer,const EGLint * attr_list)2224 dri2_create_image_wayland_wl_buffer(_EGLDisplay *disp, _EGLContext *ctx,
2225 EGLClientBuffer _buffer,
2226 const EGLint *attr_list)
2227 {
2228 struct wl_drm_buffer *buffer;
2229 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2230 const struct wl_drm_components_descriptor *f;
2231 __DRIimage *dri_image;
2232 _EGLImageAttribs attrs;
2233 int32_t plane;
2234
2235 buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm,
2236 (struct wl_resource *)_buffer);
2237 if (!buffer)
2238 return NULL;
2239
2240 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2241 return NULL;
2242
2243 plane = attrs.PlaneWL;
2244 f = buffer->driver_format;
2245 if (plane < 0 || plane >= f->nplanes) {
2246 _eglError(EGL_BAD_PARAMETER,
2247 "dri2_create_image_wayland_wl_buffer (plane out of bounds)");
2248 return NULL;
2249 }
2250
2251 dri_image = dri2_dpy->image->fromPlanar(buffer->driver_buffer, plane, NULL);
2252 if (dri_image == NULL && plane == 0)
2253 dri_image = dri2_dpy->image->dupImage(buffer->driver_buffer, NULL);
2254 if (dri_image == NULL) {
2255 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_wayland_wl_buffer");
2256 return NULL;
2257 }
2258
2259 return dri2_create_image_from_dri(disp, dri_image);
2260 }
2261 #endif
2262
2263 static EGLBoolean
dri2_get_sync_values_chromium(_EGLDisplay * disp,_EGLSurface * surf,EGLuint64KHR * ust,EGLuint64KHR * msc,EGLuint64KHR * sbc)2264 dri2_get_sync_values_chromium(_EGLDisplay *disp, _EGLSurface *surf,
2265 EGLuint64KHR *ust, EGLuint64KHR *msc,
2266 EGLuint64KHR *sbc)
2267 {
2268 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2269 EGLBoolean ret = EGL_FALSE;
2270
2271 if (dri2_dpy->vtbl->get_sync_values)
2272 ret = dri2_dpy->vtbl->get_sync_values(disp, surf, ust, msc, sbc);
2273
2274 return ret;
2275 }
2276
2277 static EGLBoolean
dri2_get_msc_rate_angle(_EGLDisplay * disp,_EGLSurface * surf,EGLint * numerator,EGLint * denominator)2278 dri2_get_msc_rate_angle(_EGLDisplay *disp, _EGLSurface *surf, EGLint *numerator,
2279 EGLint *denominator)
2280 {
2281 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2282 if (!dri2_dpy->vtbl->get_msc_rate)
2283 return EGL_FALSE;
2284 return dri2_dpy->vtbl->get_msc_rate(disp, surf, numerator, denominator);
2285 }
2286
2287 /**
2288 * Set the error code after a call to
2289 * dri2_egl_image::dri_image::createImageFromTexture.
2290 */
2291 static void
dri2_create_image_khr_texture_error(int dri_error)2292 dri2_create_image_khr_texture_error(int dri_error)
2293 {
2294 EGLint egl_error = egl_error_from_dri_image_error(dri_error);
2295
2296 if (egl_error != EGL_SUCCESS)
2297 _eglError(egl_error, "dri2_create_image_khr_texture");
2298 }
2299
2300 static _EGLImage *
dri2_create_image_khr_texture(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)2301 dri2_create_image_khr_texture(_EGLDisplay *disp, _EGLContext *ctx,
2302 EGLenum target, EGLClientBuffer buffer,
2303 const EGLint *attr_list)
2304 {
2305 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2306 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
2307 struct dri2_egl_image *dri2_img;
2308 GLuint texture = (GLuint)(uintptr_t)buffer;
2309 _EGLImageAttribs attrs;
2310 GLuint depth;
2311 GLenum gl_target;
2312 unsigned error;
2313
2314 if (texture == 0) {
2315 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2316 return EGL_NO_IMAGE_KHR;
2317 }
2318
2319 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2320 return EGL_NO_IMAGE_KHR;
2321
2322 switch (target) {
2323 case EGL_GL_TEXTURE_2D_KHR:
2324 if (!disp->Extensions.KHR_gl_texture_2D_image) {
2325 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2326 return EGL_NO_IMAGE_KHR;
2327 }
2328 depth = 0;
2329 gl_target = GL_TEXTURE_2D;
2330 break;
2331 case EGL_GL_TEXTURE_3D_KHR:
2332 if (!disp->Extensions.KHR_gl_texture_3D_image) {
2333 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2334 return EGL_NO_IMAGE_KHR;
2335 }
2336
2337 depth = attrs.GLTextureZOffset;
2338 gl_target = GL_TEXTURE_3D;
2339 break;
2340 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
2341 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
2342 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
2343 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
2344 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
2345 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
2346 if (!disp->Extensions.KHR_gl_texture_cubemap_image) {
2347 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
2348 return EGL_NO_IMAGE_KHR;
2349 }
2350
2351 depth = target - EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR;
2352 gl_target = GL_TEXTURE_CUBE_MAP;
2353 break;
2354 default:
2355 unreachable("Unexpected target in dri2_create_image_khr_texture()");
2356 return EGL_NO_IMAGE_KHR;
2357 }
2358
2359 dri2_img = malloc(sizeof *dri2_img);
2360 if (!dri2_img) {
2361 _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2362 return EGL_NO_IMAGE_KHR;
2363 }
2364
2365 _eglInitImage(&dri2_img->base, disp);
2366
2367 dri2_img->dri_image = dri2_dpy->image->createImageFromTexture(
2368 dri2_ctx->dri_context, gl_target, texture, depth, attrs.GLTextureLevel,
2369 &error, NULL);
2370 dri2_create_image_khr_texture_error(error);
2371
2372 if (!dri2_img->dri_image) {
2373 free(dri2_img);
2374 return EGL_NO_IMAGE_KHR;
2375 }
2376 return &dri2_img->base;
2377 }
2378
2379 static EGLBoolean
dri2_query_surface(_EGLDisplay * disp,_EGLSurface * surf,EGLint attribute,EGLint * value)2380 dri2_query_surface(_EGLDisplay *disp, _EGLSurface *surf, EGLint attribute,
2381 EGLint *value)
2382 {
2383 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2384 EGLBoolean ret;
2385
2386 if (!dri2_dpy->vtbl->query_surface) {
2387 ret = _eglQuerySurface(disp, surf, attribute, value);
2388 } else {
2389 ret = dri2_dpy->vtbl->query_surface(disp, surf, attribute, value);
2390 }
2391
2392 return ret;
2393 }
2394
2395 static struct wl_buffer *
dri2_create_wayland_buffer_from_image(_EGLDisplay * disp,_EGLImage * img)2396 dri2_create_wayland_buffer_from_image(_EGLDisplay *disp, _EGLImage *img)
2397 {
2398 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2399 struct wl_buffer *ret = NULL;
2400
2401 if (dri2_dpy->vtbl->create_wayland_buffer_from_image)
2402 ret = dri2_dpy->vtbl->create_wayland_buffer_from_image(disp, img);
2403
2404 mtx_unlock(&dri2_dpy->lock);
2405
2406 return ret;
2407 }
2408
2409 #ifdef HAVE_LIBDRM
2410 static _EGLImage *
dri2_create_image_mesa_drm_buffer(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2411 dri2_create_image_mesa_drm_buffer(_EGLDisplay *disp, _EGLContext *ctx,
2412 EGLClientBuffer buffer,
2413 const EGLint *attr_list)
2414 {
2415 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2416 EGLint format, name, pitch;
2417 _EGLImageAttribs attrs;
2418 __DRIimage *dri_image;
2419
2420 name = (EGLint)(uintptr_t)buffer;
2421
2422 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2423 return NULL;
2424
2425 if (attrs.Width <= 0 || attrs.Height <= 0 ||
2426 attrs.DRMBufferStrideMESA <= 0) {
2427 _eglError(EGL_BAD_PARAMETER, "bad width, height or stride");
2428 return NULL;
2429 }
2430
2431 switch (attrs.DRMBufferFormatMESA) {
2432 case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2433 format = PIPE_FORMAT_B8G8R8A8_UNORM;
2434 pitch = attrs.DRMBufferStrideMESA;
2435 break;
2436 default:
2437 _eglError(EGL_BAD_PARAMETER,
2438 "dri2_create_image_khr: unsupported pixmap depth");
2439 return NULL;
2440 }
2441
2442 dri_image = dri2_dpy->image->createImageFromName(
2443 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height, format, name,
2444 pitch, NULL);
2445
2446 return dri2_create_image_from_dri(disp, dri_image);
2447 }
2448
2449 static EGLBoolean
dri2_check_dma_buf_attribs(const _EGLImageAttribs * attrs)2450 dri2_check_dma_buf_attribs(const _EGLImageAttribs *attrs)
2451 {
2452 /**
2453 * The spec says:
2454 *
2455 * "Required attributes and their values are as follows:
2456 *
2457 * * EGL_WIDTH & EGL_HEIGHT: The logical dimensions of the buffer in pixels
2458 *
2459 * * EGL_LINUX_DRM_FOURCC_EXT: The pixel format of the buffer, as specified
2460 * by drm_fourcc.h and used as the pixel_format parameter of the
2461 * drm_mode_fb_cmd2 ioctl."
2462 *
2463 * and
2464 *
2465 * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2466 * incomplete, EGL_BAD_PARAMETER is generated."
2467 */
2468 if (attrs->Width <= 0 || attrs->Height <= 0 ||
2469 !attrs->DMABufFourCC.IsPresent)
2470 return _eglError(EGL_BAD_PARAMETER, "attribute(s) missing");
2471
2472 /**
2473 * Also:
2474 *
2475 * "If <target> is EGL_LINUX_DMA_BUF_EXT and one or more of the values
2476 * specified for a plane's pitch or offset isn't supported by EGL,
2477 * EGL_BAD_ACCESS is generated."
2478 */
2479 for (unsigned i = 0; i < ARRAY_SIZE(attrs->DMABufPlanePitches); ++i) {
2480 if (attrs->DMABufPlanePitches[i].IsPresent &&
2481 attrs->DMABufPlanePitches[i].Value <= 0)
2482 return _eglError(EGL_BAD_ACCESS, "invalid pitch");
2483 }
2484
2485 /**
2486 * If <target> is EGL_LINUX_DMA_BUF_EXT, both or neither of the following
2487 * attribute values may be given.
2488 *
2489 * This is referring to EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT and
2490 * EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, and the same for other planes.
2491 */
2492 for (unsigned i = 0; i < DMA_BUF_MAX_PLANES; ++i) {
2493 if (attrs->DMABufPlaneModifiersLo[i].IsPresent !=
2494 attrs->DMABufPlaneModifiersHi[i].IsPresent)
2495 return _eglError(EGL_BAD_PARAMETER,
2496 "modifier attribute lo or hi missing");
2497 }
2498
2499 /* Although the EGL_EXT_image_dma_buf_import_modifiers spec doesn't
2500 * mandate it, we only accept the same modifier across all planes. */
2501 for (unsigned i = 1; i < DMA_BUF_MAX_PLANES; ++i) {
2502 if (attrs->DMABufPlaneFds[i].IsPresent) {
2503 if ((attrs->DMABufPlaneModifiersLo[0].IsPresent !=
2504 attrs->DMABufPlaneModifiersLo[i].IsPresent) ||
2505 (attrs->DMABufPlaneModifiersLo[0].Value !=
2506 attrs->DMABufPlaneModifiersLo[i].Value) ||
2507 (attrs->DMABufPlaneModifiersHi[0].Value !=
2508 attrs->DMABufPlaneModifiersHi[i].Value))
2509 return _eglError(EGL_BAD_PARAMETER,
2510 "modifier attributes not equal");
2511 }
2512 }
2513
2514 return EGL_TRUE;
2515 }
2516
2517 /* Returns the total number of planes for the format or zero if it isn't a
2518 * valid fourcc format.
2519 */
2520 static unsigned
dri2_num_fourcc_format_planes(EGLint format)2521 dri2_num_fourcc_format_planes(EGLint format)
2522 {
2523 switch (format) {
2524 case DRM_FORMAT_R8:
2525 case DRM_FORMAT_RG88:
2526 case DRM_FORMAT_GR88:
2527 case DRM_FORMAT_R16:
2528 case DRM_FORMAT_GR1616:
2529 case DRM_FORMAT_RGB332:
2530 case DRM_FORMAT_BGR233:
2531 case DRM_FORMAT_XRGB4444:
2532 case DRM_FORMAT_XBGR4444:
2533 case DRM_FORMAT_RGBX4444:
2534 case DRM_FORMAT_BGRX4444:
2535 case DRM_FORMAT_ARGB4444:
2536 case DRM_FORMAT_ABGR4444:
2537 case DRM_FORMAT_RGBA4444:
2538 case DRM_FORMAT_BGRA4444:
2539 case DRM_FORMAT_XRGB1555:
2540 case DRM_FORMAT_XBGR1555:
2541 case DRM_FORMAT_RGBX5551:
2542 case DRM_FORMAT_BGRX5551:
2543 case DRM_FORMAT_ARGB1555:
2544 case DRM_FORMAT_ABGR1555:
2545 case DRM_FORMAT_RGBA5551:
2546 case DRM_FORMAT_BGRA5551:
2547 case DRM_FORMAT_RGB565:
2548 case DRM_FORMAT_BGR565:
2549 case DRM_FORMAT_RGB888:
2550 case DRM_FORMAT_BGR888:
2551 case DRM_FORMAT_XRGB8888:
2552 case DRM_FORMAT_XBGR8888:
2553 case DRM_FORMAT_RGBX8888:
2554 case DRM_FORMAT_BGRX8888:
2555 case DRM_FORMAT_ARGB8888:
2556 case DRM_FORMAT_ABGR8888:
2557 case DRM_FORMAT_RGBA8888:
2558 case DRM_FORMAT_BGRA8888:
2559 case DRM_FORMAT_XRGB2101010:
2560 case DRM_FORMAT_XBGR2101010:
2561 case DRM_FORMAT_RGBX1010102:
2562 case DRM_FORMAT_BGRX1010102:
2563 case DRM_FORMAT_ARGB2101010:
2564 case DRM_FORMAT_ABGR2101010:
2565 case DRM_FORMAT_RGBA1010102:
2566 case DRM_FORMAT_BGRA1010102:
2567 case DRM_FORMAT_ABGR16161616:
2568 case DRM_FORMAT_XBGR16161616:
2569 case DRM_FORMAT_XBGR16161616F:
2570 case DRM_FORMAT_ABGR16161616F:
2571 case DRM_FORMAT_YUYV:
2572 case DRM_FORMAT_YVYU:
2573 case DRM_FORMAT_UYVY:
2574 case DRM_FORMAT_VYUY:
2575 case DRM_FORMAT_AYUV:
2576 case DRM_FORMAT_XYUV8888:
2577 case DRM_FORMAT_Y210:
2578 case DRM_FORMAT_Y212:
2579 case DRM_FORMAT_Y216:
2580 case DRM_FORMAT_Y410:
2581 case DRM_FORMAT_Y412:
2582 case DRM_FORMAT_Y416:
2583 return 1;
2584
2585 case DRM_FORMAT_NV12:
2586 case DRM_FORMAT_NV21:
2587 case DRM_FORMAT_NV16:
2588 case DRM_FORMAT_NV61:
2589 case DRM_FORMAT_P010:
2590 case DRM_FORMAT_P012:
2591 case DRM_FORMAT_P016:
2592 case DRM_FORMAT_P030:
2593 return 2;
2594
2595 case DRM_FORMAT_YUV410:
2596 case DRM_FORMAT_YVU410:
2597 case DRM_FORMAT_YUV411:
2598 case DRM_FORMAT_YVU411:
2599 case DRM_FORMAT_YUV420:
2600 case DRM_FORMAT_YVU420:
2601 case DRM_FORMAT_YUV422:
2602 case DRM_FORMAT_YVU422:
2603 case DRM_FORMAT_YUV444:
2604 case DRM_FORMAT_YVU444:
2605 return 3;
2606
2607 default:
2608 return 0;
2609 }
2610 }
2611
2612 /* Returns the total number of file descriptors. Zero indicates an error. */
2613 static unsigned
dri2_check_dma_buf_format(const _EGLImageAttribs * attrs)2614 dri2_check_dma_buf_format(const _EGLImageAttribs *attrs)
2615 {
2616 unsigned plane_n = dri2_num_fourcc_format_planes(attrs->DMABufFourCC.Value);
2617 if (plane_n == 0) {
2618 _eglError(EGL_BAD_MATCH, "unknown drm fourcc format");
2619 return 0;
2620 }
2621
2622 for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; i++) {
2623 /**
2624 * The modifiers extension spec says:
2625 *
2626 * "Modifiers may modify any attribute of a buffer import, including
2627 * but not limited to adding extra planes to a format which
2628 * otherwise does not have those planes. As an example, a modifier
2629 * may add a plane for an external compression buffer to a
2630 * single-plane format. The exact meaning and effect of any
2631 * modifier is canonically defined by drm_fourcc.h, not as part of
2632 * this extension."
2633 */
2634 if (attrs->DMABufPlaneModifiersLo[i].IsPresent &&
2635 attrs->DMABufPlaneModifiersHi[i].IsPresent) {
2636 plane_n = i + 1;
2637 }
2638 }
2639
2640 /**
2641 * The spec says:
2642 *
2643 * "* If <target> is EGL_LINUX_DMA_BUF_EXT, and the list of attributes is
2644 * incomplete, EGL_BAD_PARAMETER is generated."
2645 */
2646 for (unsigned i = 0; i < plane_n; ++i) {
2647 if (!attrs->DMABufPlaneFds[i].IsPresent ||
2648 !attrs->DMABufPlaneOffsets[i].IsPresent ||
2649 !attrs->DMABufPlanePitches[i].IsPresent) {
2650 _eglError(EGL_BAD_PARAMETER, "plane attribute(s) missing");
2651 return 0;
2652 }
2653 }
2654
2655 /**
2656 * The spec also says:
2657 *
2658 * "If <target> is EGL_LINUX_DMA_BUF_EXT, and the EGL_LINUX_DRM_FOURCC_EXT
2659 * attribute indicates a single-plane format, EGL_BAD_ATTRIBUTE is
2660 * generated if any of the EGL_DMA_BUF_PLANE1_* or EGL_DMA_BUF_PLANE2_*
2661 * or EGL_DMA_BUF_PLANE3_* attributes are specified."
2662 */
2663 for (unsigned i = plane_n; i < DMA_BUF_MAX_PLANES; ++i) {
2664 if (attrs->DMABufPlaneFds[i].IsPresent ||
2665 attrs->DMABufPlaneOffsets[i].IsPresent ||
2666 attrs->DMABufPlanePitches[i].IsPresent) {
2667 _eglError(EGL_BAD_ATTRIBUTE, "too many plane attributes");
2668 return 0;
2669 }
2670 }
2671
2672 return plane_n;
2673 }
2674
2675 static EGLBoolean
dri2_query_dma_buf_formats(_EGLDisplay * disp,EGLint max,EGLint * formats,EGLint * count)2676 dri2_query_dma_buf_formats(_EGLDisplay *disp, EGLint max, EGLint *formats,
2677 EGLint *count)
2678 {
2679 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2680 if (max < 0 || (max > 0 && formats == NULL)) {
2681 _eglError(EGL_BAD_PARAMETER, "invalid value for max count of formats");
2682 goto fail;
2683 }
2684
2685 if (dri2_dpy->image->base.version < 15 ||
2686 dri2_dpy->image->queryDmaBufFormats == NULL)
2687 goto fail;
2688
2689 if (!dri2_dpy->image->queryDmaBufFormats(dri2_dpy->dri_screen_render_gpu,
2690 max, formats, count))
2691 goto fail;
2692
2693 if (max > 0) {
2694 /* Assert that all of the formats returned are actually fourcc formats.
2695 * Some day, if we want the internal interface function to be able to
2696 * return the fake fourcc formats defined in dri_interface.h, we'll have
2697 * to do something more clever here to pair the list down to just real
2698 * fourcc formats so that we don't leak the fake internal ones.
2699 */
2700 for (int i = 0; i < *count; i++) {
2701 assert(dri2_num_fourcc_format_planes(formats[i]) > 0);
2702 }
2703 }
2704
2705 mtx_unlock(&dri2_dpy->lock);
2706
2707 return EGL_TRUE;
2708
2709 fail:
2710 mtx_unlock(&dri2_dpy->lock);
2711 return EGL_FALSE;
2712 }
2713
2714 static EGLBoolean
dri2_query_dma_buf_modifiers(_EGLDisplay * disp,EGLint format,EGLint max,EGLuint64KHR * modifiers,EGLBoolean * external_only,EGLint * count)2715 dri2_query_dma_buf_modifiers(_EGLDisplay *disp, EGLint format, EGLint max,
2716 EGLuint64KHR *modifiers, EGLBoolean *external_only,
2717 EGLint *count)
2718 {
2719 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2720
2721 if (dri2_num_fourcc_format_planes(format) == 0)
2722 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2723 "invalid fourcc format");
2724
2725 if (max < 0)
2726 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2727 "invalid value for max count of formats");
2728
2729 if (max > 0 && modifiers == NULL)
2730 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2731 "invalid modifiers array");
2732
2733 if (dri2_dpy->image->base.version < 15 ||
2734 dri2_dpy->image->queryDmaBufModifiers == NULL) {
2735 mtx_unlock(&dri2_dpy->lock);
2736 return EGL_FALSE;
2737 }
2738
2739 if (dri2_dpy->image->queryDmaBufModifiers(
2740 dri2_dpy->dri_screen_render_gpu, format, max, modifiers,
2741 (unsigned int *)external_only, count) == false)
2742 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_PARAMETER,
2743 "invalid format");
2744
2745 mtx_unlock(&dri2_dpy->lock);
2746
2747 return EGL_TRUE;
2748 }
2749
2750 /**
2751 * The spec says:
2752 *
2753 * "If eglCreateImageKHR is successful for a EGL_LINUX_DMA_BUF_EXT target, the
2754 * EGL will take a reference to the dma_buf(s) which it will release at any
2755 * time while the EGLDisplay is initialized. It is the responsibility of the
2756 * application to close the dma_buf file descriptors."
2757 *
2758 * Therefore we must never close or otherwise modify the file descriptors.
2759 */
2760 _EGLImage *
dri2_create_image_dma_buf(_EGLDisplay * disp,_EGLContext * ctx,EGLClientBuffer buffer,const EGLint * attr_list)2761 dri2_create_image_dma_buf(_EGLDisplay *disp, _EGLContext *ctx,
2762 EGLClientBuffer buffer, const EGLint *attr_list)
2763 {
2764 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2765 _EGLImage *res;
2766 _EGLImageAttribs attrs;
2767 __DRIimage *dri_image;
2768 unsigned num_fds;
2769 int fds[DMA_BUF_MAX_PLANES];
2770 int pitches[DMA_BUF_MAX_PLANES];
2771 int offsets[DMA_BUF_MAX_PLANES];
2772 uint64_t modifier;
2773 bool has_modifier = false;
2774 unsigned error;
2775 EGLint egl_error;
2776
2777 /**
2778 * The spec says:
2779 *
2780 * ""* If <target> is EGL_LINUX_DMA_BUF_EXT and <buffer> is not NULL, the
2781 * error EGL_BAD_PARAMETER is generated."
2782 */
2783 if (buffer != NULL) {
2784 _eglError(EGL_BAD_PARAMETER, "buffer not NULL");
2785 return NULL;
2786 }
2787
2788 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2789 return NULL;
2790
2791 if (!dri2_check_dma_buf_attribs(&attrs))
2792 return NULL;
2793
2794 num_fds = dri2_check_dma_buf_format(&attrs);
2795 if (!num_fds)
2796 return NULL;
2797
2798 for (unsigned i = 0; i < num_fds; ++i) {
2799 fds[i] = attrs.DMABufPlaneFds[i].Value;
2800 pitches[i] = attrs.DMABufPlanePitches[i].Value;
2801 offsets[i] = attrs.DMABufPlaneOffsets[i].Value;
2802 }
2803
2804 /* dri2_check_dma_buf_attribs ensures that the modifier, if available,
2805 * will be present in attrs.DMABufPlaneModifiersLo[0] and
2806 * attrs.DMABufPlaneModifiersHi[0] */
2807 if (attrs.DMABufPlaneModifiersLo[0].IsPresent) {
2808 modifier = combine_u32_into_u64(attrs.DMABufPlaneModifiersHi[0].Value,
2809 attrs.DMABufPlaneModifiersLo[0].Value);
2810 has_modifier = true;
2811 }
2812
2813 if (attrs.ProtectedContent) {
2814 if (dri2_dpy->image->base.version < 18 ||
2815 dri2_dpy->image->createImageFromDmaBufs3 == NULL) {
2816 _eglError(EGL_BAD_MATCH, "unsupported protected_content attribute");
2817 return EGL_NO_IMAGE_KHR;
2818 }
2819 if (!has_modifier)
2820 modifier = DRM_FORMAT_MOD_INVALID;
2821
2822 dri_image = dri2_dpy->image->createImageFromDmaBufs3(
2823 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height,
2824 attrs.DMABufFourCC.Value, modifier, fds, num_fds, pitches, offsets,
2825 attrs.DMABufYuvColorSpaceHint.Value, attrs.DMABufSampleRangeHint.Value,
2826 attrs.DMABufChromaHorizontalSiting.Value,
2827 attrs.DMABufChromaVerticalSiting.Value,
2828 attrs.ProtectedContent ? __DRI_IMAGE_PROTECTED_CONTENT_FLAG : 0,
2829 &error, NULL);
2830 } else if (has_modifier) {
2831 if (dri2_dpy->image->base.version < 15 ||
2832 dri2_dpy->image->createImageFromDmaBufs2 == NULL) {
2833 _eglError(EGL_BAD_MATCH, "unsupported dma_buf format modifier");
2834 return EGL_NO_IMAGE_KHR;
2835 }
2836 dri_image = dri2_dpy->image->createImageFromDmaBufs2(
2837 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height,
2838 attrs.DMABufFourCC.Value, modifier, fds, num_fds, pitches, offsets,
2839 attrs.DMABufYuvColorSpaceHint.Value, attrs.DMABufSampleRangeHint.Value,
2840 attrs.DMABufChromaHorizontalSiting.Value,
2841 attrs.DMABufChromaVerticalSiting.Value, &error, NULL);
2842 } else {
2843 dri_image = dri2_dpy->image->createImageFromDmaBufs(
2844 dri2_dpy->dri_screen_render_gpu, attrs.Width, attrs.Height,
2845 attrs.DMABufFourCC.Value, fds, num_fds, pitches, offsets,
2846 attrs.DMABufYuvColorSpaceHint.Value, attrs.DMABufSampleRangeHint.Value,
2847 attrs.DMABufChromaHorizontalSiting.Value,
2848 attrs.DMABufChromaVerticalSiting.Value, &error, NULL);
2849 }
2850
2851 egl_error = egl_error_from_dri_image_error(error);
2852 if (egl_error != EGL_SUCCESS)
2853 _eglError(egl_error, "createImageFromDmaBufs failed");
2854
2855 if (!dri_image)
2856 return EGL_NO_IMAGE_KHR;
2857
2858 res = dri2_create_image_from_dri(disp, dri_image);
2859
2860 return res;
2861 }
2862 static _EGLImage *
dri2_create_drm_image_mesa(_EGLDisplay * disp,const EGLint * attr_list)2863 dri2_create_drm_image_mesa(_EGLDisplay *disp, const EGLint *attr_list)
2864 {
2865 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2866 struct dri2_egl_image *dri2_img;
2867 _EGLImageAttribs attrs;
2868 unsigned int dri_use, valid_mask;
2869 int format;
2870
2871 if (!attr_list) {
2872 _eglError(EGL_BAD_PARAMETER, __func__);
2873 goto fail;
2874 }
2875
2876 if (!_eglParseImageAttribList(&attrs, disp, attr_list))
2877 goto fail;
2878
2879 if (attrs.Width <= 0 || attrs.Height <= 0) {
2880 _eglError(EGL_BAD_PARAMETER, __func__);
2881 goto fail;
2882 }
2883
2884 switch (attrs.DRMBufferFormatMESA) {
2885 case EGL_DRM_BUFFER_FORMAT_ARGB32_MESA:
2886 format = PIPE_FORMAT_B8G8R8A8_UNORM;
2887 break;
2888 default:
2889 _eglError(EGL_BAD_PARAMETER, __func__);
2890 goto fail;
2891 }
2892
2893 valid_mask = EGL_DRM_BUFFER_USE_SCANOUT_MESA |
2894 EGL_DRM_BUFFER_USE_SHARE_MESA | EGL_DRM_BUFFER_USE_CURSOR_MESA;
2895 if (attrs.DRMBufferUseMESA & ~valid_mask) {
2896 _eglError(EGL_BAD_PARAMETER, __func__);
2897 goto fail;
2898 }
2899
2900 dri_use = 0;
2901 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SHARE_MESA)
2902 dri_use |= __DRI_IMAGE_USE_SHARE;
2903 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_SCANOUT_MESA)
2904 dri_use |= __DRI_IMAGE_USE_SCANOUT;
2905 if (attrs.DRMBufferUseMESA & EGL_DRM_BUFFER_USE_CURSOR_MESA)
2906 dri_use |= __DRI_IMAGE_USE_CURSOR;
2907
2908 dri2_img = malloc(sizeof *dri2_img);
2909 if (!dri2_img) {
2910 _eglError(EGL_BAD_ALLOC, "dri2_create_image_khr");
2911 goto fail;
2912 }
2913
2914 _eglInitImage(&dri2_img->base, disp);
2915
2916 dri2_img->dri_image =
2917 dri2_dpy->image->createImage(dri2_dpy->dri_screen_render_gpu, attrs.Width,
2918 attrs.Height, format, dri_use, dri2_img);
2919 if (dri2_img->dri_image == NULL) {
2920 free(dri2_img);
2921 _eglError(EGL_BAD_ALLOC, "dri2_create_drm_image_mesa");
2922 goto fail;
2923 }
2924
2925 mtx_unlock(&dri2_dpy->lock);
2926
2927 return &dri2_img->base;
2928
2929 fail:
2930 mtx_unlock(&dri2_dpy->lock);
2931 return EGL_NO_IMAGE_KHR;
2932 }
2933
2934 static EGLBoolean
dri2_export_drm_image_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * name,EGLint * handle,EGLint * stride)2935 dri2_export_drm_image_mesa(_EGLDisplay *disp, _EGLImage *img, EGLint *name,
2936 EGLint *handle, EGLint *stride)
2937 {
2938 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2939 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2940
2941 if (name && !dri2_dpy->image->queryImage(dri2_img->dri_image,
2942 __DRI_IMAGE_ATTRIB_NAME, name))
2943 return dri2_egl_error_unlock(dri2_dpy, EGL_BAD_ALLOC,
2944 "dri2_export_drm_image_mesa");
2945
2946 if (handle)
2947 dri2_dpy->image->queryImage(dri2_img->dri_image,
2948 __DRI_IMAGE_ATTRIB_HANDLE, handle);
2949
2950 if (stride)
2951 dri2_dpy->image->queryImage(dri2_img->dri_image,
2952 __DRI_IMAGE_ATTRIB_STRIDE, stride);
2953
2954 mtx_unlock(&dri2_dpy->lock);
2955
2956 return EGL_TRUE;
2957 }
2958
2959 /**
2960 * Checks if we can support EGL_MESA_image_dma_buf_export on this image.
2961
2962 * The spec provides a boolean return for the driver to reject exporting for
2963 * basically any reason, but doesn't specify any particular error cases. For
2964 * now, we just fail if we don't have a DRM fourcc for the format.
2965 */
2966 static bool
dri2_can_export_dma_buf_image(_EGLDisplay * disp,_EGLImage * img)2967 dri2_can_export_dma_buf_image(_EGLDisplay *disp, _EGLImage *img)
2968 {
2969 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
2970 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2971 EGLint fourcc;
2972
2973 if (!dri2_dpy->image->queryImage(dri2_img->dri_image,
2974 __DRI_IMAGE_ATTRIB_FOURCC, &fourcc)) {
2975 return false;
2976 }
2977
2978 return true;
2979 }
2980
2981 static EGLBoolean
dri2_export_dma_buf_image_query_mesa(_EGLDisplay * disp,_EGLImage * img,EGLint * fourcc,EGLint * nplanes,EGLuint64KHR * modifiers)2982 dri2_export_dma_buf_image_query_mesa(_EGLDisplay *disp, _EGLImage *img,
2983 EGLint *fourcc, EGLint *nplanes,
2984 EGLuint64KHR *modifiers)
2985 {
2986 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
2987 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
2988 int num_planes;
2989
2990 if (!dri2_can_export_dma_buf_image(disp, img)) {
2991 mtx_unlock(&dri2_dpy->lock);
2992 return EGL_FALSE;
2993 }
2994
2995 dri2_dpy->image->queryImage(dri2_img->dri_image,
2996 __DRI_IMAGE_ATTRIB_NUM_PLANES, &num_planes);
2997 if (nplanes)
2998 *nplanes = num_planes;
2999
3000 if (fourcc)
3001 dri2_dpy->image->queryImage(dri2_img->dri_image,
3002 __DRI_IMAGE_ATTRIB_FOURCC, fourcc);
3003
3004 if (modifiers) {
3005 int mod_hi, mod_lo;
3006 uint64_t modifier = DRM_FORMAT_MOD_INVALID;
3007 bool query;
3008
3009 query = dri2_dpy->image->queryImage(
3010 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod_hi);
3011 query &= dri2_dpy->image->queryImage(
3012 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod_lo);
3013 if (query)
3014 modifier = combine_u32_into_u64(mod_hi, mod_lo);
3015
3016 for (int i = 0; i < num_planes; i++)
3017 modifiers[i] = modifier;
3018 }
3019
3020 mtx_unlock(&dri2_dpy->lock);
3021
3022 return EGL_TRUE;
3023 }
3024
3025 static EGLBoolean
dri2_export_dma_buf_image_mesa(_EGLDisplay * disp,_EGLImage * img,int * fds,EGLint * strides,EGLint * offsets)3026 dri2_export_dma_buf_image_mesa(_EGLDisplay *disp, _EGLImage *img, int *fds,
3027 EGLint *strides, EGLint *offsets)
3028 {
3029 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3030 struct dri2_egl_image *dri2_img = dri2_egl_image(img);
3031 EGLint nplanes;
3032
3033 if (!dri2_can_export_dma_buf_image(disp, img)) {
3034 mtx_unlock(&dri2_dpy->lock);
3035 return EGL_FALSE;
3036 }
3037
3038 /* EGL_MESA_image_dma_buf_export spec says:
3039 * "If the number of fds is less than the number of planes, then
3040 * subsequent fd slots should contain -1."
3041 */
3042 if (fds) {
3043 /* Query nplanes so that we know how big the given array is. */
3044 dri2_dpy->image->queryImage(dri2_img->dri_image,
3045 __DRI_IMAGE_ATTRIB_NUM_PLANES, &nplanes);
3046 memset(fds, -1, nplanes * sizeof(int));
3047 }
3048
3049 /* rework later to provide multiple fds/strides/offsets */
3050 if (fds)
3051 dri2_dpy->image->queryImage(dri2_img->dri_image, __DRI_IMAGE_ATTRIB_FD,
3052 fds);
3053
3054 if (strides)
3055 dri2_dpy->image->queryImage(dri2_img->dri_image,
3056 __DRI_IMAGE_ATTRIB_STRIDE, strides);
3057
3058 if (offsets) {
3059 int img_offset;
3060 bool ret = dri2_dpy->image->queryImage(
3061 dri2_img->dri_image, __DRI_IMAGE_ATTRIB_OFFSET, &img_offset);
3062 if (ret)
3063 offsets[0] = img_offset;
3064 else
3065 offsets[0] = 0;
3066 }
3067
3068 mtx_unlock(&dri2_dpy->lock);
3069
3070 return EGL_TRUE;
3071 }
3072
3073 #endif
3074
3075 _EGLImage *
dri2_create_image_khr(_EGLDisplay * disp,_EGLContext * ctx,EGLenum target,EGLClientBuffer buffer,const EGLint * attr_list)3076 dri2_create_image_khr(_EGLDisplay *disp, _EGLContext *ctx, EGLenum target,
3077 EGLClientBuffer buffer, const EGLint *attr_list)
3078 {
3079 switch (target) {
3080 case EGL_GL_TEXTURE_2D_KHR:
3081 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_X_KHR:
3082 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_X_KHR:
3083 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Y_KHR:
3084 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Y_KHR:
3085 case EGL_GL_TEXTURE_CUBE_MAP_POSITIVE_Z_KHR:
3086 case EGL_GL_TEXTURE_CUBE_MAP_NEGATIVE_Z_KHR:
3087 case EGL_GL_TEXTURE_3D_KHR:
3088 return dri2_create_image_khr_texture(disp, ctx, target, buffer,
3089 attr_list);
3090 case EGL_GL_RENDERBUFFER_KHR:
3091 return dri2_create_image_khr_renderbuffer(disp, ctx, buffer, attr_list);
3092 #ifdef HAVE_LIBDRM
3093 case EGL_DRM_BUFFER_MESA:
3094 return dri2_create_image_mesa_drm_buffer(disp, ctx, buffer, attr_list);
3095 case EGL_LINUX_DMA_BUF_EXT:
3096 return dri2_create_image_dma_buf(disp, ctx, buffer, attr_list);
3097 #endif
3098 #ifdef HAVE_WAYLAND_PLATFORM
3099 case EGL_WAYLAND_BUFFER_WL:
3100 return dri2_create_image_wayland_wl_buffer(disp, ctx, buffer, attr_list);
3101 #endif
3102 default:
3103 _eglError(EGL_BAD_PARAMETER, "dri2_create_image_khr");
3104 return EGL_NO_IMAGE_KHR;
3105 }
3106 }
3107
3108 static EGLBoolean
dri2_destroy_image_khr(_EGLDisplay * disp,_EGLImage * image)3109 dri2_destroy_image_khr(_EGLDisplay *disp, _EGLImage *image)
3110 {
3111 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3112 struct dri2_egl_image *dri2_img = dri2_egl_image(image);
3113
3114 dri2_dpy->image->destroyImage(dri2_img->dri_image);
3115 free(dri2_img);
3116
3117 mtx_unlock(&dri2_dpy->lock);
3118
3119 return EGL_TRUE;
3120 }
3121
3122 #ifdef HAVE_WAYLAND_PLATFORM
3123
3124 static void
dri2_wl_reference_buffer(void * user_data,uint32_t name,int fd,struct wl_drm_buffer * buffer)3125 dri2_wl_reference_buffer(void *user_data, uint32_t name, int fd,
3126 struct wl_drm_buffer *buffer)
3127 {
3128 _EGLDisplay *disp = user_data;
3129 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3130 __DRIimage *img;
3131 int dri_components = 0;
3132
3133 if (fd == -1)
3134 img = dri2_dpy->image->createImageFromNames(
3135 dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
3136 buffer->format, (int *)&name, 1, buffer->stride, buffer->offset, NULL);
3137 else
3138 img = dri2_dpy->image->createImageFromFds(
3139 dri2_dpy->dri_screen_render_gpu, buffer->width, buffer->height,
3140 buffer->format, &fd, 1, buffer->stride, buffer->offset, NULL);
3141
3142 if (img == NULL)
3143 return;
3144
3145 dri2_dpy->image->queryImage(img, __DRI_IMAGE_ATTRIB_COMPONENTS,
3146 &dri_components);
3147
3148 buffer->driver_format = NULL;
3149 for (int i = 0; i < ARRAY_SIZE(wl_drm_components); i++)
3150 if (wl_drm_components[i].dri_components == dri_components)
3151 buffer->driver_format = &wl_drm_components[i];
3152
3153 if (buffer->driver_format == NULL)
3154 dri2_dpy->image->destroyImage(img);
3155 else
3156 buffer->driver_buffer = img;
3157 }
3158
3159 static void
dri2_wl_release_buffer(void * user_data,struct wl_drm_buffer * buffer)3160 dri2_wl_release_buffer(void *user_data, struct wl_drm_buffer *buffer)
3161 {
3162 _EGLDisplay *disp = user_data;
3163 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3164
3165 dri2_dpy->image->destroyImage(buffer->driver_buffer);
3166 }
3167
3168 static EGLBoolean
dri2_bind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)3169 dri2_bind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
3170 {
3171 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3172 const struct wayland_drm_callbacks wl_drm_callbacks = {
3173 .authenticate = (int (*)(void *, uint32_t))dri2_dpy->vtbl->authenticate,
3174 .reference_buffer = dri2_wl_reference_buffer,
3175 .release_buffer = dri2_wl_release_buffer,
3176 .is_format_supported = dri2_wl_is_format_supported,
3177 };
3178 int flags = 0;
3179 char *device_name;
3180 uint64_t cap;
3181
3182 if (dri2_dpy->wl_server_drm)
3183 goto fail;
3184
3185 device_name = drmGetRenderDeviceNameFromFd(dri2_dpy->fd_render_gpu);
3186 if (!device_name)
3187 device_name = strdup(dri2_dpy->device_name);
3188 if (!device_name)
3189 goto fail;
3190
3191 if (drmGetCap(dri2_dpy->fd_render_gpu, DRM_CAP_PRIME, &cap) == 0 &&
3192 cap == (DRM_PRIME_CAP_IMPORT | DRM_PRIME_CAP_EXPORT) &&
3193 dri2_dpy->image->base.version >= 7 &&
3194 dri2_dpy->image->createImageFromFds != NULL)
3195 flags |= WAYLAND_DRM_PRIME;
3196
3197 dri2_dpy->wl_server_drm =
3198 wayland_drm_init(wl_dpy, device_name, &wl_drm_callbacks, disp, flags);
3199
3200 free(device_name);
3201
3202 if (!dri2_dpy->wl_server_drm)
3203 goto fail;
3204
3205 #ifdef HAVE_DRM_PLATFORM
3206 /* We have to share the wl_drm instance with gbm, so gbm can convert
3207 * wl_buffers to gbm bos. */
3208 if (dri2_dpy->gbm_dri)
3209 dri2_dpy->gbm_dri->wl_drm = dri2_dpy->wl_server_drm;
3210 #endif
3211
3212 mtx_unlock(&dri2_dpy->lock);
3213 return EGL_TRUE;
3214
3215 fail:
3216 mtx_unlock(&dri2_dpy->lock);
3217 return EGL_FALSE;
3218 }
3219
3220 static EGLBoolean
dri2_unbind_wayland_display_wl(_EGLDisplay * disp,struct wl_display * wl_dpy)3221 dri2_unbind_wayland_display_wl(_EGLDisplay *disp, struct wl_display *wl_dpy)
3222 {
3223 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3224
3225 if (!dri2_dpy->wl_server_drm)
3226 return EGL_FALSE;
3227
3228 wayland_drm_uninit(dri2_dpy->wl_server_drm);
3229 dri2_dpy->wl_server_drm = NULL;
3230
3231 return EGL_TRUE;
3232 }
3233
3234 static EGLBoolean
dri2_query_wayland_buffer_wl(_EGLDisplay * disp,struct wl_resource * buffer_resource,EGLint attribute,EGLint * value)3235 dri2_query_wayland_buffer_wl(_EGLDisplay *disp,
3236 struct wl_resource *buffer_resource,
3237 EGLint attribute, EGLint *value)
3238 {
3239 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3240 struct wl_drm_buffer *buffer;
3241 const struct wl_drm_components_descriptor *format;
3242
3243 buffer = wayland_drm_buffer_get(dri2_dpy->wl_server_drm, buffer_resource);
3244 if (!buffer)
3245 return EGL_FALSE;
3246
3247 format = buffer->driver_format;
3248 switch (attribute) {
3249 case EGL_TEXTURE_FORMAT:
3250 *value = format->components;
3251 return EGL_TRUE;
3252 case EGL_WIDTH:
3253 *value = buffer->width;
3254 return EGL_TRUE;
3255 case EGL_HEIGHT:
3256 *value = buffer->height;
3257 return EGL_TRUE;
3258 }
3259
3260 return EGL_FALSE;
3261 }
3262 #endif
3263
3264 static void
dri2_egl_ref_sync(struct dri2_egl_sync * sync)3265 dri2_egl_ref_sync(struct dri2_egl_sync *sync)
3266 {
3267 p_atomic_inc(&sync->refcount);
3268 }
3269
3270 static void
dri2_egl_unref_sync(struct dri2_egl_display * dri2_dpy,struct dri2_egl_sync * dri2_sync)3271 dri2_egl_unref_sync(struct dri2_egl_display *dri2_dpy,
3272 struct dri2_egl_sync *dri2_sync)
3273 {
3274 if (p_atomic_dec_zero(&dri2_sync->refcount)) {
3275 switch (dri2_sync->base.Type) {
3276 case EGL_SYNC_REUSABLE_KHR:
3277 cnd_destroy(&dri2_sync->cond);
3278 break;
3279 case EGL_SYNC_NATIVE_FENCE_ANDROID:
3280 if (dri2_sync->base.SyncFd != EGL_NO_NATIVE_FENCE_FD_ANDROID)
3281 close(dri2_sync->base.SyncFd);
3282 break;
3283 default:
3284 break;
3285 }
3286
3287 if (dri2_sync->fence)
3288 dri2_dpy->fence->destroy_fence(dri2_dpy->dri_screen_render_gpu,
3289 dri2_sync->fence);
3290
3291 free(dri2_sync);
3292 }
3293 }
3294
3295 static _EGLSync *
dri2_create_sync(_EGLDisplay * disp,EGLenum type,const EGLAttrib * attrib_list)3296 dri2_create_sync(_EGLDisplay *disp, EGLenum type, const EGLAttrib *attrib_list)
3297 {
3298 _EGLContext *ctx = _eglGetCurrentContext();
3299 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3300 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3301 struct dri2_egl_sync *dri2_sync;
3302 EGLint ret;
3303 pthread_condattr_t attr;
3304
3305 dri2_sync = calloc(1, sizeof(struct dri2_egl_sync));
3306 if (!dri2_sync) {
3307 _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3308 goto fail;
3309 }
3310
3311 if (!_eglInitSync(&dri2_sync->base, disp, type, attrib_list)) {
3312 goto fail;
3313 }
3314
3315 switch (type) {
3316 case EGL_SYNC_FENCE_KHR:
3317 dri2_sync->fence = dri2_dpy->fence->create_fence(dri2_ctx->dri_context);
3318 if (!dri2_sync->fence) {
3319 /* Why did it fail? DRI doesn't return an error code, so we emit
3320 * a generic EGL error that doesn't communicate user error.
3321 */
3322 _eglError(EGL_BAD_ALLOC, "eglCreateSyncKHR");
3323 goto fail;
3324 }
3325 break;
3326
3327 case EGL_SYNC_CL_EVENT_KHR:
3328 dri2_sync->fence = dri2_dpy->fence->get_fence_from_cl_event(
3329 dri2_dpy->dri_screen_render_gpu, dri2_sync->base.CLEvent);
3330 /* this can only happen if the cl_event passed in is invalid. */
3331 if (!dri2_sync->fence) {
3332 _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3333 goto fail;
3334 }
3335
3336 /* the initial status must be "signaled" if the cl_event is signaled */
3337 if (dri2_dpy->fence->client_wait_sync(dri2_ctx->dri_context,
3338 dri2_sync->fence, 0, 0))
3339 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3340 break;
3341
3342 case EGL_SYNC_REUSABLE_KHR:
3343 /* initialize attr */
3344 ret = pthread_condattr_init(&attr);
3345
3346 if (ret) {
3347 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3348 goto fail;
3349 }
3350
3351 /* change clock attribute to CLOCK_MONOTONIC */
3352 ret = pthread_condattr_setclock(&attr, CLOCK_MONOTONIC);
3353
3354 if (ret) {
3355 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3356 goto fail;
3357 }
3358
3359 ret = pthread_cond_init(&dri2_sync->cond, &attr);
3360
3361 if (ret) {
3362 _eglError(EGL_BAD_ACCESS, "eglCreateSyncKHR");
3363 goto fail;
3364 }
3365
3366 /* initial status of reusable sync must be "unsignaled" */
3367 dri2_sync->base.SyncStatus = EGL_UNSIGNALED_KHR;
3368 break;
3369
3370 case EGL_SYNC_NATIVE_FENCE_ANDROID:
3371 if (dri2_dpy->fence->create_fence_fd) {
3372 dri2_sync->fence = dri2_dpy->fence->create_fence_fd(
3373 dri2_ctx->dri_context, dri2_sync->base.SyncFd);
3374 }
3375 if (!dri2_sync->fence) {
3376 _eglError(EGL_BAD_ATTRIBUTE, "eglCreateSyncKHR");
3377 goto fail;
3378 }
3379 break;
3380 }
3381
3382 p_atomic_set(&dri2_sync->refcount, 1);
3383 mtx_unlock(&dri2_dpy->lock);
3384
3385 return &dri2_sync->base;
3386
3387 fail:
3388 free(dri2_sync);
3389 mtx_unlock(&dri2_dpy->lock);
3390 return NULL;
3391 }
3392
3393 static EGLBoolean
dri2_destroy_sync(_EGLDisplay * disp,_EGLSync * sync)3394 dri2_destroy_sync(_EGLDisplay *disp, _EGLSync *sync)
3395 {
3396 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3397 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3398 EGLint ret = EGL_TRUE;
3399 EGLint err;
3400
3401 /* if type of sync is EGL_SYNC_REUSABLE_KHR and it is not signaled yet,
3402 * then unlock all threads possibly blocked by the reusable sync before
3403 * destroying it.
3404 */
3405 if (dri2_sync->base.Type == EGL_SYNC_REUSABLE_KHR &&
3406 dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3407 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3408 /* unblock all threads currently blocked by sync */
3409 err = cnd_broadcast(&dri2_sync->cond);
3410
3411 if (err) {
3412 _eglError(EGL_BAD_ACCESS, "eglDestroySyncKHR");
3413 ret = EGL_FALSE;
3414 }
3415 }
3416
3417 dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3418
3419 mtx_unlock(&dri2_dpy->lock);
3420
3421 return ret;
3422 }
3423
3424 static EGLint
dri2_dup_native_fence_fd(_EGLDisplay * disp,_EGLSync * sync)3425 dri2_dup_native_fence_fd(_EGLDisplay *disp, _EGLSync *sync)
3426 {
3427 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3428 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3429
3430 assert(sync->Type == EGL_SYNC_NATIVE_FENCE_ANDROID);
3431
3432 if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3433 /* try to retrieve the actual native fence fd.. if rendering is
3434 * not flushed this will just return -1, aka NO_NATIVE_FENCE_FD:
3435 */
3436 sync->SyncFd = dri2_dpy->fence->get_fence_fd(
3437 dri2_dpy->dri_screen_render_gpu, dri2_sync->fence);
3438 }
3439
3440 mtx_unlock(&dri2_dpy->lock);
3441
3442 if (sync->SyncFd == EGL_NO_NATIVE_FENCE_FD_ANDROID) {
3443 /* if native fence fd still not created, return an error: */
3444 _eglError(EGL_BAD_PARAMETER, "eglDupNativeFenceFDANDROID");
3445 return EGL_NO_NATIVE_FENCE_FD_ANDROID;
3446 }
3447
3448 assert(sync_valid_fd(sync->SyncFd));
3449
3450 return os_dupfd_cloexec(sync->SyncFd);
3451 }
3452
3453 static void
dri2_set_blob_cache_funcs(_EGLDisplay * disp,EGLSetBlobFuncANDROID set,EGLGetBlobFuncANDROID get)3454 dri2_set_blob_cache_funcs(_EGLDisplay *disp, EGLSetBlobFuncANDROID set,
3455 EGLGetBlobFuncANDROID get)
3456 {
3457 struct dri2_egl_display *dri2_dpy = dri2_egl_display_lock(disp);
3458 dri2_dpy->blob->set_cache_funcs(dri2_dpy->dri_screen_render_gpu, set, get);
3459 mtx_unlock(&dri2_dpy->lock);
3460 }
3461
3462 static EGLint
dri2_client_wait_sync(_EGLDisplay * disp,_EGLSync * sync,EGLint flags,EGLTime timeout)3463 dri2_client_wait_sync(_EGLDisplay *disp, _EGLSync *sync, EGLint flags,
3464 EGLTime timeout)
3465 {
3466 _EGLContext *ctx = _eglGetCurrentContext();
3467 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3468 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3469 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3470 unsigned wait_flags = 0;
3471
3472 EGLint ret = EGL_CONDITION_SATISFIED_KHR;
3473
3474 /* The EGL_KHR_fence_sync spec states:
3475 *
3476 * "If no context is current for the bound API,
3477 * the EGL_SYNC_FLUSH_COMMANDS_BIT_KHR bit is ignored.
3478 */
3479 if (dri2_ctx && flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)
3480 wait_flags |= __DRI2_FENCE_FLAG_FLUSH_COMMANDS;
3481
3482 /* the sync object should take a reference while waiting */
3483 dri2_egl_ref_sync(dri2_sync);
3484
3485 switch (sync->Type) {
3486 case EGL_SYNC_FENCE_KHR:
3487 case EGL_SYNC_NATIVE_FENCE_ANDROID:
3488 case EGL_SYNC_CL_EVENT_KHR:
3489 if (dri2_dpy->fence->client_wait_sync(
3490 dri2_ctx ? dri2_ctx->dri_context : NULL, dri2_sync->fence,
3491 wait_flags, timeout))
3492 dri2_sync->base.SyncStatus = EGL_SIGNALED_KHR;
3493 else
3494 ret = EGL_TIMEOUT_EXPIRED_KHR;
3495 break;
3496
3497 case EGL_SYNC_REUSABLE_KHR:
3498 if (dri2_ctx && dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR &&
3499 (flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR)) {
3500 /* flush context if EGL_SYNC_FLUSH_COMMANDS_BIT_KHR is set */
3501 dri2_gl_flush();
3502 }
3503
3504 /* if timeout is EGL_FOREVER_KHR, it should wait without any timeout.*/
3505 if (timeout == EGL_FOREVER_KHR) {
3506 mtx_lock(&dri2_sync->mutex);
3507 cnd_wait(&dri2_sync->cond, &dri2_sync->mutex);
3508 mtx_unlock(&dri2_sync->mutex);
3509 } else {
3510 /* if reusable sync has not been yet signaled */
3511 if (dri2_sync->base.SyncStatus != EGL_SIGNALED_KHR) {
3512 /* timespecs for cnd_timedwait */
3513 struct timespec current;
3514 struct timespec expire;
3515
3516 /* We override the clock to monotonic when creating the condition
3517 * variable. */
3518 clock_gettime(CLOCK_MONOTONIC, ¤t);
3519
3520 /* calculating when to expire */
3521 expire.tv_nsec = timeout % 1000000000L;
3522 expire.tv_sec = timeout / 1000000000L;
3523
3524 expire.tv_nsec += current.tv_nsec;
3525 expire.tv_sec += current.tv_sec;
3526
3527 /* expire.nsec now is a number between 0 and 1999999998 */
3528 if (expire.tv_nsec > 999999999L) {
3529 expire.tv_sec++;
3530 expire.tv_nsec -= 1000000000L;
3531 }
3532
3533 mtx_lock(&dri2_sync->mutex);
3534 ret = cnd_timedwait(&dri2_sync->cond, &dri2_sync->mutex, &expire);
3535 mtx_unlock(&dri2_sync->mutex);
3536
3537 if (ret == thrd_timedout) {
3538 if (dri2_sync->base.SyncStatus == EGL_UNSIGNALED_KHR) {
3539 ret = EGL_TIMEOUT_EXPIRED_KHR;
3540 } else {
3541 _eglError(EGL_BAD_ACCESS, "eglClientWaitSyncKHR");
3542 ret = EGL_FALSE;
3543 }
3544 }
3545 }
3546 }
3547 break;
3548 }
3549
3550 dri2_egl_unref_sync(dri2_dpy, dri2_sync);
3551
3552 return ret;
3553 }
3554
3555 static EGLBoolean
dri2_signal_sync(_EGLDisplay * disp,_EGLSync * sync,EGLenum mode)3556 dri2_signal_sync(_EGLDisplay *disp, _EGLSync *sync, EGLenum mode)
3557 {
3558 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3559 EGLint ret;
3560
3561 if (sync->Type != EGL_SYNC_REUSABLE_KHR)
3562 return _eglError(EGL_BAD_MATCH, "eglSignalSyncKHR");
3563
3564 if (mode != EGL_SIGNALED_KHR && mode != EGL_UNSIGNALED_KHR)
3565 return _eglError(EGL_BAD_ATTRIBUTE, "eglSignalSyncKHR");
3566
3567 dri2_sync->base.SyncStatus = mode;
3568
3569 if (mode == EGL_SIGNALED_KHR) {
3570 ret = cnd_broadcast(&dri2_sync->cond);
3571
3572 /* fail to broadcast */
3573 if (ret)
3574 return _eglError(EGL_BAD_ACCESS, "eglSignalSyncKHR");
3575 }
3576
3577 return EGL_TRUE;
3578 }
3579
3580 static EGLint
dri2_server_wait_sync(_EGLDisplay * disp,_EGLSync * sync)3581 dri2_server_wait_sync(_EGLDisplay *disp, _EGLSync *sync)
3582 {
3583 _EGLContext *ctx = _eglGetCurrentContext();
3584 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3585 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3586 struct dri2_egl_sync *dri2_sync = dri2_egl_sync(sync);
3587
3588 dri2_dpy->fence->server_wait_sync(dri2_ctx->dri_context, dri2_sync->fence,
3589 0);
3590 return EGL_TRUE;
3591 }
3592
3593 static int
dri2_interop_query_device_info(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_device_info * out)3594 dri2_interop_query_device_info(_EGLDisplay *disp, _EGLContext *ctx,
3595 struct mesa_glinterop_device_info *out)
3596 {
3597 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3598 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3599
3600 if (!dri2_dpy->interop)
3601 return MESA_GLINTEROP_UNSUPPORTED;
3602
3603 return dri2_dpy->interop->query_device_info(dri2_ctx->dri_context, out);
3604 }
3605
3606 static int
dri2_interop_export_object(_EGLDisplay * disp,_EGLContext * ctx,struct mesa_glinterop_export_in * in,struct mesa_glinterop_export_out * out)3607 dri2_interop_export_object(_EGLDisplay *disp, _EGLContext *ctx,
3608 struct mesa_glinterop_export_in *in,
3609 struct mesa_glinterop_export_out *out)
3610 {
3611 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3612 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3613
3614 if (!dri2_dpy->interop)
3615 return MESA_GLINTEROP_UNSUPPORTED;
3616
3617 return dri2_dpy->interop->export_object(dri2_ctx->dri_context, in, out);
3618 }
3619
3620 static int
dri2_interop_flush_objects(_EGLDisplay * disp,_EGLContext * ctx,unsigned count,struct mesa_glinterop_export_in * objects,struct mesa_glinterop_flush_out * out)3621 dri2_interop_flush_objects(_EGLDisplay *disp, _EGLContext *ctx, unsigned count,
3622 struct mesa_glinterop_export_in *objects,
3623 struct mesa_glinterop_flush_out *out)
3624 {
3625 struct dri2_egl_display *dri2_dpy = dri2_egl_display(disp);
3626 struct dri2_egl_context *dri2_ctx = dri2_egl_context(ctx);
3627
3628 if (!dri2_dpy->interop || dri2_dpy->interop->base.version < 2)
3629 return MESA_GLINTEROP_UNSUPPORTED;
3630
3631 return dri2_dpy->interop->flush_objects(dri2_ctx->dri_context, count,
3632 objects, out);
3633 }
3634
3635 const _EGLDriver _eglDriver = {
3636 .Initialize = dri2_initialize,
3637 .Terminate = dri2_terminate,
3638 .CreateContext = dri2_create_context,
3639 .DestroyContext = dri2_destroy_context,
3640 .MakeCurrent = dri2_make_current,
3641 .CreateWindowSurface = dri2_create_window_surface,
3642 .CreatePixmapSurface = dri2_create_pixmap_surface,
3643 .CreatePbufferSurface = dri2_create_pbuffer_surface,
3644 .DestroySurface = dri2_destroy_surface,
3645 .WaitClient = dri2_wait_client,
3646 .WaitNative = dri2_wait_native,
3647 .BindTexImage = dri2_bind_tex_image,
3648 .ReleaseTexImage = dri2_release_tex_image,
3649 .SwapInterval = dri2_swap_interval,
3650 .SwapBuffers = dri2_swap_buffers,
3651 .SwapBuffersWithDamageEXT = dri2_swap_buffers_with_damage,
3652 .SwapBuffersRegionNOK = dri2_swap_buffers_region,
3653 .SetDamageRegion = dri2_set_damage_region,
3654 .PostSubBufferNV = dri2_post_sub_buffer,
3655 .CopyBuffers = dri2_copy_buffers,
3656 .QueryBufferAge = dri2_query_buffer_age,
3657 .CreateImageKHR = dri2_create_image,
3658 .DestroyImageKHR = dri2_destroy_image_khr,
3659 .CreateWaylandBufferFromImageWL = dri2_create_wayland_buffer_from_image,
3660 .QuerySurface = dri2_query_surface,
3661 .QueryDriverName = dri2_query_driver_name,
3662 .QueryDriverConfig = dri2_query_driver_config,
3663 #ifdef HAVE_LIBDRM
3664 .CreateDRMImageMESA = dri2_create_drm_image_mesa,
3665 .ExportDRMImageMESA = dri2_export_drm_image_mesa,
3666 .ExportDMABUFImageQueryMESA = dri2_export_dma_buf_image_query_mesa,
3667 .ExportDMABUFImageMESA = dri2_export_dma_buf_image_mesa,
3668 .QueryDmaBufFormatsEXT = dri2_query_dma_buf_formats,
3669 .QueryDmaBufModifiersEXT = dri2_query_dma_buf_modifiers,
3670 #endif
3671 #ifdef HAVE_WAYLAND_PLATFORM
3672 .BindWaylandDisplayWL = dri2_bind_wayland_display_wl,
3673 .UnbindWaylandDisplayWL = dri2_unbind_wayland_display_wl,
3674 .QueryWaylandBufferWL = dri2_query_wayland_buffer_wl,
3675 #endif
3676 .GetSyncValuesCHROMIUM = dri2_get_sync_values_chromium,
3677 .GetMscRateANGLE = dri2_get_msc_rate_angle,
3678 .CreateSyncKHR = dri2_create_sync,
3679 .ClientWaitSyncKHR = dri2_client_wait_sync,
3680 .SignalSyncKHR = dri2_signal_sync,
3681 .WaitSyncKHR = dri2_server_wait_sync,
3682 .DestroySyncKHR = dri2_destroy_sync,
3683 .GLInteropQueryDeviceInfo = dri2_interop_query_device_info,
3684 .GLInteropExportObject = dri2_interop_export_object,
3685 .GLInteropFlushObjects = dri2_interop_flush_objects,
3686 .DupNativeFenceFDANDROID = dri2_dup_native_fence_fd,
3687 .SetBlobCacheFuncsANDROID = dri2_set_blob_cache_funcs,
3688 };
3689