1 /**************************************************************************
2 *
3 * Copyright 2010 Thomas Balling Sørensen.
4 * Copyright 2011 Christian König.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
17 * of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 *
27 **************************************************************************/
28
29 #include <assert.h>
30
31 #include "pipe/p_state.h"
32
33 #include "util/u_memory.h"
34 #include "util/u_debug.h"
35 #include "util/u_rect.h"
36 #include "util/u_surface.h"
37 #include "util/u_video.h"
38 #include "vl/vl_defines.h"
39
40 #include "frontend/drm_driver.h"
41
42 #include "vdpau_private.h"
43
44 enum getbits_conversion {
45 CONVERSION_NONE,
46 CONVERSION_NV12_TO_YV12,
47 CONVERSION_YV12_TO_NV12,
48 CONVERSION_SWAP_YUYV_UYVY,
49 };
50
51 /**
52 * Create a VdpVideoSurface.
53 */
54 VdpStatus
vlVdpVideoSurfaceCreate(VdpDevice device,VdpChromaType chroma_type,uint32_t width,uint32_t height,VdpVideoSurface * surface)55 vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
56 uint32_t width, uint32_t height,
57 VdpVideoSurface *surface)
58 {
59 struct pipe_context *pipe;
60 vlVdpSurface *p_surf;
61 VdpStatus ret;
62
63 if (!(width && height)) {
64 ret = VDP_STATUS_INVALID_SIZE;
65 goto inv_size;
66 }
67
68 p_surf = CALLOC(1, sizeof(vlVdpSurface));
69 if (!p_surf) {
70 ret = VDP_STATUS_RESOURCES;
71 goto no_res;
72 }
73
74 vlVdpDevice *dev = vlGetDataHTAB(device);
75 if (!dev) {
76 ret = VDP_STATUS_INVALID_HANDLE;
77 goto inv_device;
78 }
79
80 DeviceReference(&p_surf->device, dev);
81 pipe = dev->context;
82
83 mtx_lock(&dev->mutex);
84 memset(&p_surf->templat, 0, sizeof(p_surf->templat));
85 /* TODO: buffer_format should be selected to match chroma_type */
86 p_surf->templat.buffer_format = pipe->screen->get_video_param
87 (
88 pipe->screen,
89 PIPE_VIDEO_PROFILE_UNKNOWN,
90 PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
91 PIPE_VIDEO_CAP_PREFERED_FORMAT
92 );
93 p_surf->templat.width = width;
94 p_surf->templat.height = height;
95 p_surf->templat.interlaced = pipe->screen->get_video_param
96 (
97 pipe->screen,
98 PIPE_VIDEO_PROFILE_UNKNOWN,
99 PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
100 PIPE_VIDEO_CAP_PREFERS_INTERLACED
101 );
102 if (p_surf->templat.buffer_format != PIPE_FORMAT_NONE)
103 p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
104
105 /* do not mandate early allocation of a video buffer */
106 vlVdpVideoSurfaceClear(p_surf);
107 mtx_unlock(&dev->mutex);
108
109 *surface = vlAddDataHTAB(p_surf);
110 if (*surface == 0) {
111 ret = VDP_STATUS_ERROR;
112 goto no_handle;
113 }
114
115 return VDP_STATUS_OK;
116
117 no_handle:
118 p_surf->video_buffer->destroy(p_surf->video_buffer);
119
120 inv_device:
121 DeviceReference(&p_surf->device, NULL);
122 FREE(p_surf);
123
124 no_res:
125 inv_size:
126 return ret;
127 }
128
129 /**
130 * Destroy a VdpVideoSurface.
131 */
132 VdpStatus
vlVdpVideoSurfaceDestroy(VdpVideoSurface surface)133 vlVdpVideoSurfaceDestroy(VdpVideoSurface surface)
134 {
135 vlVdpSurface *p_surf;
136
137 p_surf = (vlVdpSurface *)vlGetDataHTAB((vlHandle)surface);
138 if (!p_surf)
139 return VDP_STATUS_INVALID_HANDLE;
140
141 mtx_lock(&p_surf->device->mutex);
142 if (p_surf->video_buffer)
143 p_surf->video_buffer->destroy(p_surf->video_buffer);
144 mtx_unlock(&p_surf->device->mutex);
145
146 vlRemoveDataHTAB(surface);
147 DeviceReference(&p_surf->device, NULL);
148 FREE(p_surf);
149
150 return VDP_STATUS_OK;
151 }
152
153 /**
154 * Retrieve the parameters used to create a VdpVideoSurface.
155 */
156 VdpStatus
vlVdpVideoSurfaceGetParameters(VdpVideoSurface surface,VdpChromaType * chroma_type,uint32_t * width,uint32_t * height)157 vlVdpVideoSurfaceGetParameters(VdpVideoSurface surface,
158 VdpChromaType *chroma_type,
159 uint32_t *width, uint32_t *height)
160 {
161 if (!(width && height && chroma_type))
162 return VDP_STATUS_INVALID_POINTER;
163
164 vlVdpSurface *p_surf = vlGetDataHTAB(surface);
165 if (!p_surf)
166 return VDP_STATUS_INVALID_HANDLE;
167
168 if (p_surf->video_buffer) {
169 *width = p_surf->video_buffer->width;
170 *height = p_surf->video_buffer->height;
171 *chroma_type = PipeToChroma(pipe_format_to_chroma_format(p_surf->video_buffer->buffer_format));
172 } else {
173 *width = p_surf->templat.width;
174 *height = p_surf->templat.height;
175 *chroma_type = PipeToChroma(pipe_format_to_chroma_format(p_surf->templat.buffer_format));
176 }
177
178 return VDP_STATUS_OK;
179 }
180
181 static void
vlVdpVideoSurfaceSize(vlVdpSurface * p_surf,int component,unsigned * width,unsigned * height)182 vlVdpVideoSurfaceSize(vlVdpSurface *p_surf, int component,
183 unsigned *width, unsigned *height)
184 {
185 *width = p_surf->templat.width;
186 *height = p_surf->templat.height;
187
188 vl_video_buffer_adjust_size(width, height, component,
189 pipe_format_to_chroma_format(p_surf->templat.buffer_format),
190 p_surf->templat.interlaced);
191 }
192
193 /**
194 * Copy image data from a VdpVideoSurface to application memory in a specified
195 * YCbCr format.
196 */
197 VdpStatus
vlVdpVideoSurfaceGetBitsYCbCr(VdpVideoSurface surface,VdpYCbCrFormat destination_ycbcr_format,void * const * destination_data,uint32_t const * destination_pitches)198 vlVdpVideoSurfaceGetBitsYCbCr(VdpVideoSurface surface,
199 VdpYCbCrFormat destination_ycbcr_format,
200 void *const *destination_data,
201 uint32_t const *destination_pitches)
202 {
203 vlVdpSurface *vlsurface;
204 struct pipe_context *pipe;
205 enum pipe_format format, buffer_format;
206 struct pipe_sampler_view **sampler_views;
207 enum getbits_conversion conversion = CONVERSION_NONE;
208 unsigned i, j;
209
210 vlsurface = vlGetDataHTAB(surface);
211 if (!vlsurface)
212 return VDP_STATUS_INVALID_HANDLE;
213
214 pipe = vlsurface->device->context;
215 if (!pipe)
216 return VDP_STATUS_INVALID_HANDLE;
217
218 if (!destination_data || !destination_pitches)
219 return VDP_STATUS_INVALID_POINTER;
220
221 format = FormatYCBCRToPipe(destination_ycbcr_format);
222 if (format == PIPE_FORMAT_NONE)
223 return VDP_STATUS_INVALID_Y_CB_CR_FORMAT;
224
225 if (vlsurface->video_buffer == NULL)
226 return VDP_STATUS_INVALID_VALUE;
227
228 buffer_format = vlsurface->video_buffer->buffer_format;
229 if (format != buffer_format) {
230 if (format == PIPE_FORMAT_YV12 && buffer_format == PIPE_FORMAT_NV12)
231 conversion = CONVERSION_NV12_TO_YV12;
232 else if (format == PIPE_FORMAT_NV12 && buffer_format == PIPE_FORMAT_YV12)
233 conversion = CONVERSION_YV12_TO_NV12;
234 else if ((format == PIPE_FORMAT_YUYV && buffer_format == PIPE_FORMAT_UYVY) ||
235 (format == PIPE_FORMAT_UYVY && buffer_format == PIPE_FORMAT_YUYV))
236 conversion = CONVERSION_SWAP_YUYV_UYVY;
237 else
238 return VDP_STATUS_NO_IMPLEMENTATION;
239 }
240
241 mtx_lock(&vlsurface->device->mutex);
242 sampler_views = vlsurface->video_buffer->get_sampler_view_planes(vlsurface->video_buffer);
243 if (!sampler_views) {
244 mtx_unlock(&vlsurface->device->mutex);
245 return VDP_STATUS_RESOURCES;
246 }
247
248 for (i = 0; i < 3; ++i) {
249 unsigned width, height;
250 struct pipe_sampler_view *sv = sampler_views[i];
251 if (!sv) continue;
252
253 vlVdpVideoSurfaceSize(vlsurface, i, &width, &height);
254
255 for (j = 0; j < sv->texture->array_size; ++j) {
256 struct pipe_box box = {
257 0, 0, j,
258 width, height, 1
259 };
260 struct pipe_transfer *transfer;
261 uint8_t *map;
262
263 map = pipe->transfer_map(pipe, sv->texture, 0,
264 PIPE_MAP_READ, &box, &transfer);
265 if (!map) {
266 mtx_unlock(&vlsurface->device->mutex);
267 return VDP_STATUS_RESOURCES;
268 }
269
270 if (conversion == CONVERSION_NV12_TO_YV12 && i == 1) {
271 u_copy_nv12_to_yv12(destination_data, destination_pitches,
272 i, j, transfer->stride, sv->texture->array_size,
273 map, box.width, box.height);
274 } else if (conversion == CONVERSION_YV12_TO_NV12 && i > 0) {
275 u_copy_yv12_to_nv12(destination_data, destination_pitches,
276 i, j, transfer->stride, sv->texture->array_size,
277 map, box.width, box.height);
278 } else if (conversion == CONVERSION_SWAP_YUYV_UYVY) {
279 u_copy_swap422_packed(destination_data, destination_pitches,
280 i, j, transfer->stride, sv->texture->array_size,
281 map, box.width, box.height);
282 } else {
283 util_copy_rect(destination_data[i] + destination_pitches[i] * j, sv->texture->format,
284 destination_pitches[i] * sv->texture->array_size, 0, 0,
285 box.width, box.height, map, transfer->stride, 0, 0);
286 }
287
288 pipe_transfer_unmap(pipe, transfer);
289 }
290 }
291 mtx_unlock(&vlsurface->device->mutex);
292
293 return VDP_STATUS_OK;
294 }
295
296 /**
297 * Copy image data from application memory in a specific YCbCr format to
298 * a VdpVideoSurface.
299 */
300 VdpStatus
vlVdpVideoSurfacePutBitsYCbCr(VdpVideoSurface surface,VdpYCbCrFormat source_ycbcr_format,void const * const * source_data,uint32_t const * source_pitches)301 vlVdpVideoSurfacePutBitsYCbCr(VdpVideoSurface surface,
302 VdpYCbCrFormat source_ycbcr_format,
303 void const *const *source_data,
304 uint32_t const *source_pitches)
305 {
306 enum pipe_format pformat = FormatYCBCRToPipe(source_ycbcr_format);
307 enum getbits_conversion conversion = CONVERSION_NONE;
308 struct pipe_context *pipe;
309 struct pipe_sampler_view **sampler_views;
310 unsigned i, j;
311 unsigned usage = PIPE_MAP_WRITE;
312
313 vlVdpSurface *p_surf = vlGetDataHTAB(surface);
314 if (!p_surf)
315 return VDP_STATUS_INVALID_HANDLE;
316
317 pipe = p_surf->device->context;
318 if (!pipe)
319 return VDP_STATUS_INVALID_HANDLE;
320
321 if (!source_data || !source_pitches)
322 return VDP_STATUS_INVALID_POINTER;
323
324 mtx_lock(&p_surf->device->mutex);
325
326 if (p_surf->video_buffer == NULL ||
327 ((pformat != p_surf->video_buffer->buffer_format))) {
328 enum pipe_format nformat = pformat;
329 struct pipe_screen *screen = pipe->screen;
330
331 /* Determine the most suitable format for the new surface */
332 if (!screen->is_video_format_supported(screen, nformat,
333 PIPE_VIDEO_PROFILE_UNKNOWN,
334 PIPE_VIDEO_ENTRYPOINT_BITSTREAM)) {
335 nformat = screen->get_video_param(screen,
336 PIPE_VIDEO_PROFILE_UNKNOWN,
337 PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
338 PIPE_VIDEO_CAP_PREFERED_FORMAT);
339 if (nformat == PIPE_FORMAT_NONE) {
340 mtx_unlock(&p_surf->device->mutex);
341 return VDP_STATUS_NO_IMPLEMENTATION;
342 }
343 }
344
345 if (p_surf->video_buffer == NULL ||
346 nformat != p_surf->video_buffer->buffer_format) {
347 /* destroy the old one */
348 if (p_surf->video_buffer)
349 p_surf->video_buffer->destroy(p_surf->video_buffer);
350
351 /* adjust the template parameters */
352 p_surf->templat.buffer_format = nformat;
353 if (nformat == PIPE_FORMAT_YUYV || nformat == PIPE_FORMAT_UYVY)
354 p_surf->templat.interlaced = false;
355
356 /* and try to create the video buffer with the new format */
357 p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
358
359 /* stil no luck? ok forget it we don't support it */
360 if (!p_surf->video_buffer) {
361 mtx_unlock(&p_surf->device->mutex);
362 return VDP_STATUS_NO_IMPLEMENTATION;
363 }
364 vlVdpVideoSurfaceClear(p_surf);
365 }
366 }
367
368 if (pformat != p_surf->video_buffer->buffer_format) {
369 if (pformat == PIPE_FORMAT_YV12 &&
370 p_surf->video_buffer->buffer_format == PIPE_FORMAT_NV12)
371 conversion = CONVERSION_YV12_TO_NV12;
372 else {
373 mtx_unlock(&p_surf->device->mutex);
374 return VDP_STATUS_NO_IMPLEMENTATION;
375 }
376 }
377
378 sampler_views = p_surf->video_buffer->get_sampler_view_planes(p_surf->video_buffer);
379 if (!sampler_views) {
380 mtx_unlock(&p_surf->device->mutex);
381 return VDP_STATUS_RESOURCES;
382 }
383
384 for (i = 0; i < 3; ++i) {
385 unsigned width, height;
386 struct pipe_sampler_view *sv = sampler_views[i];
387 struct pipe_resource *tex;
388 if (!sv || !source_pitches[i]) continue;
389
390 tex = sv->texture;
391 vlVdpVideoSurfaceSize(p_surf, i, &width, &height);
392
393 for (j = 0; j < tex->array_size; ++j) {
394 struct pipe_box dst_box = {
395 0, 0, j,
396 width, height, 1
397 };
398
399 if (conversion == CONVERSION_YV12_TO_NV12 && i == 1) {
400 struct pipe_transfer *transfer;
401 uint8_t *map;
402
403 map = pipe->transfer_map(pipe, tex, 0, usage,
404 &dst_box, &transfer);
405 if (!map) {
406 mtx_unlock(&p_surf->device->mutex);
407 return VDP_STATUS_RESOURCES;
408 }
409
410 u_copy_nv12_from_yv12(source_data, source_pitches,
411 i, j, transfer->stride, tex->array_size,
412 map, dst_box.width, dst_box.height);
413
414 pipe_transfer_unmap(pipe, transfer);
415 } else {
416 pipe->texture_subdata(pipe, tex, 0,
417 PIPE_MAP_WRITE, &dst_box,
418 source_data[i] + source_pitches[i] * j,
419 source_pitches[i] * tex->array_size,
420 0);
421 }
422 /*
423 * This surface has already been synced
424 * by the first map.
425 */
426 usage |= PIPE_MAP_UNSYNCHRONIZED;
427 }
428 }
429 mtx_unlock(&p_surf->device->mutex);
430
431 return VDP_STATUS_OK;
432 }
433
434 /**
435 * Helper function to initially clear the VideoSurface after (re-)creation
436 */
437 void
vlVdpVideoSurfaceClear(vlVdpSurface * vlsurf)438 vlVdpVideoSurfaceClear(vlVdpSurface *vlsurf)
439 {
440 struct pipe_context *pipe = vlsurf->device->context;
441 struct pipe_surface **surfaces;
442 unsigned i;
443
444 if (!vlsurf->video_buffer)
445 return;
446
447 surfaces = vlsurf->video_buffer->get_surfaces(vlsurf->video_buffer);
448 for (i = 0; i < VL_MAX_SURFACES; ++i) {
449 union pipe_color_union c = {};
450
451 if (!surfaces[i])
452 continue;
453
454 if (i > !!vlsurf->templat.interlaced)
455 c.f[0] = c.f[1] = c.f[2] = c.f[3] = 0.5f;
456
457 pipe->clear_render_target(pipe, surfaces[i], &c, 0, 0,
458 surfaces[i]->width, surfaces[i]->height, false);
459 }
460 pipe->flush(pipe, NULL, 0);
461 }
462
463 /**
464 * Interop for the GL gallium frontend
465 */
vlVdpVideoSurfaceGallium(VdpVideoSurface surface)466 struct pipe_video_buffer *vlVdpVideoSurfaceGallium(VdpVideoSurface surface)
467 {
468 vlVdpSurface *p_surf = vlGetDataHTAB(surface);
469 if (!p_surf)
470 return NULL;
471
472 mtx_lock(&p_surf->device->mutex);
473 if (p_surf->video_buffer == NULL) {
474 struct pipe_context *pipe = p_surf->device->context;
475
476 /* try to create a video buffer if we don't already have one */
477 p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
478 }
479 mtx_unlock(&p_surf->device->mutex);
480
481 return p_surf->video_buffer;
482 }
483
vlVdpVideoSurfaceDMABuf(VdpVideoSurface surface,VdpVideoSurfacePlane plane,struct VdpSurfaceDMABufDesc * result)484 VdpStatus vlVdpVideoSurfaceDMABuf(VdpVideoSurface surface,
485 VdpVideoSurfacePlane plane,
486 struct VdpSurfaceDMABufDesc *result)
487 {
488 vlVdpSurface *p_surf = vlGetDataHTAB(surface);
489
490 struct pipe_screen *pscreen;
491 struct winsys_handle whandle;
492
493 struct pipe_surface *surf;
494
495 if (!p_surf)
496 return VDP_STATUS_INVALID_HANDLE;
497
498 if (plane > 3)
499 return VDP_STATUS_INVALID_VALUE;
500
501 if (!result)
502 return VDP_STATUS_INVALID_POINTER;
503
504 memset(result, 0, sizeof(*result));
505 result->handle = -1;
506
507 mtx_lock(&p_surf->device->mutex);
508 if (p_surf->video_buffer == NULL) {
509 struct pipe_context *pipe = p_surf->device->context;
510
511 /* try to create a video buffer if we don't already have one */
512 p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
513 }
514
515 /* Check if surface match interop requirements */
516 if (p_surf->video_buffer == NULL || !p_surf->video_buffer->interlaced ||
517 p_surf->video_buffer->buffer_format != PIPE_FORMAT_NV12) {
518 mtx_unlock(&p_surf->device->mutex);
519 return VDP_STATUS_NO_IMPLEMENTATION;
520 }
521
522 surf = p_surf->video_buffer->get_surfaces(p_surf->video_buffer)[plane];
523 if (!surf) {
524 mtx_unlock(&p_surf->device->mutex);
525 return VDP_STATUS_RESOURCES;
526 }
527
528 memset(&whandle, 0, sizeof(struct winsys_handle));
529 whandle.type = WINSYS_HANDLE_TYPE_FD;
530 whandle.layer = surf->u.tex.first_layer;
531
532 pscreen = surf->texture->screen;
533 if (!pscreen->resource_get_handle(pscreen, p_surf->device->context,
534 surf->texture, &whandle,
535 PIPE_HANDLE_USAGE_FRAMEBUFFER_WRITE)) {
536 mtx_unlock(&p_surf->device->mutex);
537 return VDP_STATUS_NO_IMPLEMENTATION;
538 }
539
540 mtx_unlock(&p_surf->device->mutex);
541
542 result->handle = whandle.handle;
543 result->width = surf->width;
544 result->height = surf->height;
545 result->offset = whandle.offset;
546 result->stride = whandle.stride;
547
548 if (surf->format == PIPE_FORMAT_R8_UNORM)
549 result->format = VDP_RGBA_FORMAT_R8;
550 else
551 result->format = VDP_RGBA_FORMAT_R8G8;
552
553 return VDP_STATUS_OK;
554 }
555