1 /**************************************************************************
2 *
3 * Copyright 2010 Thomas Balling Sørensen & Orasanu Lucian.
4 * Copyright 2014 Advanced Micro Devices, Inc.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
17 * of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 *
27 **************************************************************************/
28
29 #include "pipe/p_screen.h"
30 #include "frontend/drm_driver.h"
31 #include "util/u_memory.h"
32 #include "util/u_handle_table.h"
33 #include "util/u_transfer.h"
34 #include "vl/vl_winsys.h"
35
36 #include "va_private.h"
37
38 #ifdef _WIN32
39 #include <va/va_win32.h>
40 #endif
41
42 #ifndef VA_MAPBUFFER_FLAG_DEFAULT
43 #define VA_MAPBUFFER_FLAG_DEFAULT 0
44 #define VA_MAPBUFFER_FLAG_READ 1
45 #define VA_MAPBUFFER_FLAG_WRITE 2
46 #endif
47
48 VAStatus
vlVaCreateBuffer(VADriverContextP ctx,VAContextID context,VABufferType type,unsigned int size,unsigned int num_elements,void * data,VABufferID * buf_id)49 vlVaCreateBuffer(VADriverContextP ctx, VAContextID context, VABufferType type,
50 unsigned int size, unsigned int num_elements, void *data,
51 VABufferID *buf_id)
52 {
53 vlVaDriver *drv;
54 vlVaBuffer *buf;
55
56 if (!ctx)
57 return VA_STATUS_ERROR_INVALID_CONTEXT;
58
59 buf = CALLOC(1, sizeof(vlVaBuffer));
60 if (!buf)
61 return VA_STATUS_ERROR_ALLOCATION_FAILED;
62
63 buf->type = type;
64 buf->size = size;
65 buf->num_elements = num_elements;
66
67 if (buf->type == VAEncCodedBufferType)
68 buf->data = CALLOC(1, sizeof(VACodedBufferSegment));
69 else
70 buf->data = MALLOC(size * num_elements);
71
72 if (!buf->data) {
73 FREE(buf);
74 return VA_STATUS_ERROR_ALLOCATION_FAILED;
75 }
76
77 if (data)
78 memcpy(buf->data, data, size * num_elements);
79
80 drv = VL_VA_DRIVER(ctx);
81 mtx_lock(&drv->mutex);
82 *buf_id = handle_table_add(drv->htab, buf);
83 mtx_unlock(&drv->mutex);
84
85 return VA_STATUS_SUCCESS;
86 }
87
88 VAStatus
vlVaBufferSetNumElements(VADriverContextP ctx,VABufferID buf_id,unsigned int num_elements)89 vlVaBufferSetNumElements(VADriverContextP ctx, VABufferID buf_id,
90 unsigned int num_elements)
91 {
92 vlVaDriver *drv;
93 vlVaBuffer *buf;
94
95 if (!ctx)
96 return VA_STATUS_ERROR_INVALID_CONTEXT;
97
98 drv = VL_VA_DRIVER(ctx);
99 mtx_lock(&drv->mutex);
100 buf = handle_table_get(drv->htab, buf_id);
101 mtx_unlock(&drv->mutex);
102 if (!buf)
103 return VA_STATUS_ERROR_INVALID_BUFFER;
104
105 if (buf->derived_surface.resource)
106 return VA_STATUS_ERROR_INVALID_BUFFER;
107
108 buf->data = REALLOC(buf->data, buf->size * buf->num_elements,
109 buf->size * num_elements);
110 buf->num_elements = num_elements;
111
112 if (!buf->data)
113 return VA_STATUS_ERROR_ALLOCATION_FAILED;
114
115 return VA_STATUS_SUCCESS;
116 }
117
118 VAStatus
vlVaMapBuffer(VADriverContextP ctx,VABufferID buf_id,void ** pbuff)119 vlVaMapBuffer(VADriverContextP ctx, VABufferID buf_id, void **pbuff)
120 {
121 return vlVaMapBuffer2(ctx, buf_id, pbuff, VA_MAPBUFFER_FLAG_DEFAULT);
122 }
123
vlVaMapBuffer2(VADriverContextP ctx,VABufferID buf_id,void ** pbuff,uint32_t flags)124 VAStatus vlVaMapBuffer2(VADriverContextP ctx, VABufferID buf_id,
125 void **pbuff, uint32_t flags)
126 {
127 vlVaDriver *drv;
128 vlVaBuffer *buf;
129
130 if (!ctx)
131 return VA_STATUS_ERROR_INVALID_CONTEXT;
132
133 drv = VL_VA_DRIVER(ctx);
134 if (!drv)
135 return VA_STATUS_ERROR_INVALID_CONTEXT;
136
137 if (!pbuff)
138 return VA_STATUS_ERROR_INVALID_PARAMETER;
139
140 mtx_lock(&drv->mutex);
141 buf = handle_table_get(drv->htab, buf_id);
142 if (!buf || buf->export_refcount > 0) {
143 mtx_unlock(&drv->mutex);
144 return VA_STATUS_ERROR_INVALID_BUFFER;
145 }
146
147 if (buf->derived_surface.resource) {
148 struct pipe_resource *resource;
149 struct pipe_box box;
150 unsigned usage = 0;
151 void *(*map_func)(struct pipe_context *,
152 struct pipe_resource *resource,
153 unsigned level,
154 unsigned usage, /* a combination of PIPE_MAP_x */
155 const struct pipe_box *,
156 struct pipe_transfer **out_transfer);
157
158 memset(&box, 0, sizeof(box));
159 resource = buf->derived_surface.resource;
160 box.width = resource->width0;
161 box.height = resource->height0;
162 box.depth = resource->depth0;
163
164 if (resource->target == PIPE_BUFFER)
165 map_func = drv->pipe->buffer_map;
166 else
167 map_func = drv->pipe->texture_map;
168
169 if (flags == VA_MAPBUFFER_FLAG_DEFAULT) {
170 /* For VAImageBufferType, use PIPE_MAP_WRITE for now,
171 * PIPE_MAP_READ_WRITE degradate perf with two copies when map/unmap. */
172 if (buf->type == VAEncCodedBufferType)
173 usage = PIPE_MAP_READ;
174 else
175 usage = PIPE_MAP_WRITE;
176
177 /* Map decoder and postproc surfaces also for reading. */
178 if (buf->derived_surface.entrypoint == PIPE_VIDEO_ENTRYPOINT_BITSTREAM ||
179 buf->derived_surface.entrypoint == PIPE_VIDEO_ENTRYPOINT_PROCESSING)
180 usage |= PIPE_MAP_READ;
181 }
182
183 if (flags & VA_MAPBUFFER_FLAG_READ)
184 usage |= PIPE_MAP_READ;
185 if (flags & VA_MAPBUFFER_FLAG_WRITE)
186 usage |= PIPE_MAP_WRITE;
187
188 assert(usage);
189
190 *pbuff = map_func(drv->pipe, resource, 0, usage,
191 &box, &buf->derived_surface.transfer);
192 mtx_unlock(&drv->mutex);
193
194 if (!buf->derived_surface.transfer || !*pbuff)
195 return VA_STATUS_ERROR_INVALID_BUFFER;
196
197 if (buf->type == VAEncCodedBufferType) {
198 VACodedBufferSegment* curr_buf_ptr = (VACodedBufferSegment*) buf->data;
199
200 if ((buf->extended_metadata.present_metadata & PIPE_VIDEO_FEEDBACK_METADATA_TYPE_ENCODE_RESULT) &&
201 (buf->extended_metadata.encode_result & PIPE_VIDEO_FEEDBACK_METADATA_ENCODE_FLAG_FAILED)) {
202 curr_buf_ptr->status = VA_CODED_BUF_STATUS_BAD_BITSTREAM;
203 return VA_STATUS_ERROR_OPERATION_FAILED;
204 }
205
206 curr_buf_ptr->status = (buf->extended_metadata.average_frame_qp & VA_CODED_BUF_STATUS_PICTURE_AVE_QP_MASK);
207 if (buf->extended_metadata.encode_result & PIPE_VIDEO_FEEDBACK_METADATA_ENCODE_FLAG_MAX_FRAME_SIZE_OVERFLOW)
208 curr_buf_ptr->status |= VA_CODED_BUF_STATUS_FRAME_SIZE_OVERFLOW;
209
210 if ((buf->extended_metadata.present_metadata & PIPE_VIDEO_FEEDBACK_METADATA_TYPE_CODEC_UNIT_LOCATION) == 0) {
211 curr_buf_ptr->buf = *pbuff;
212 curr_buf_ptr->size = buf->coded_size;
213 *pbuff = buf->data;
214 } else {
215 uint8_t* compressed_bitstream_data = *pbuff;
216 *pbuff = buf->data;
217
218 for (size_t i = 0; i < buf->extended_metadata.codec_unit_metadata_count - 1; i++) {
219 curr_buf_ptr->next = CALLOC(1, sizeof(VACodedBufferSegment));
220 if (!curr_buf_ptr->next)
221 return VA_STATUS_ERROR_ALLOCATION_FAILED;
222 curr_buf_ptr = curr_buf_ptr->next;
223 }
224 curr_buf_ptr->next = NULL;
225
226 curr_buf_ptr = buf->data;
227 for (size_t i = 0; i < buf->extended_metadata.codec_unit_metadata_count; i++) {
228 curr_buf_ptr->status |= VA_CODED_BUF_STATUS_SINGLE_NALU;
229 curr_buf_ptr->size = buf->extended_metadata.codec_unit_metadata[i].size;
230 curr_buf_ptr->buf = compressed_bitstream_data + buf->extended_metadata.codec_unit_metadata[i].offset;
231 if (buf->extended_metadata.codec_unit_metadata[i].flags & PIPE_VIDEO_CODEC_UNIT_LOCATION_FLAG_MAX_SLICE_SIZE_OVERFLOW)
232 curr_buf_ptr->status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
233
234 curr_buf_ptr = curr_buf_ptr->next;
235 }
236 }
237 }
238 } else {
239 mtx_unlock(&drv->mutex);
240 *pbuff = buf->data;
241 }
242
243 return VA_STATUS_SUCCESS;
244 }
245
246 VAStatus
vlVaUnmapBuffer(VADriverContextP ctx,VABufferID buf_id)247 vlVaUnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
248 {
249 vlVaDriver *drv;
250 vlVaBuffer *buf;
251 struct pipe_resource *resource;
252
253 if (!ctx)
254 return VA_STATUS_ERROR_INVALID_CONTEXT;
255
256 drv = VL_VA_DRIVER(ctx);
257 if (!drv)
258 return VA_STATUS_ERROR_INVALID_CONTEXT;
259
260 mtx_lock(&drv->mutex);
261 buf = handle_table_get(drv->htab, buf_id);
262 if (!buf || buf->export_refcount > 0) {
263 mtx_unlock(&drv->mutex);
264 return VA_STATUS_ERROR_INVALID_BUFFER;
265 }
266
267 resource = buf->derived_surface.resource;
268 if (resource) {
269 void (*unmap_func)(struct pipe_context *pipe,
270 struct pipe_transfer *transfer);
271
272 if (!buf->derived_surface.transfer) {
273 mtx_unlock(&drv->mutex);
274 return VA_STATUS_ERROR_INVALID_BUFFER;
275 }
276
277 if (resource->target == PIPE_BUFFER)
278 unmap_func = pipe_buffer_unmap;
279 else
280 unmap_func = pipe_texture_unmap;
281
282 unmap_func(drv->pipe, buf->derived_surface.transfer);
283 buf->derived_surface.transfer = NULL;
284
285 if (buf->type == VAImageBufferType)
286 drv->pipe->flush(drv->pipe, NULL, 0);
287 }
288 mtx_unlock(&drv->mutex);
289
290 return VA_STATUS_SUCCESS;
291 }
292
293 VAStatus
vlVaDestroyBuffer(VADriverContextP ctx,VABufferID buf_id)294 vlVaDestroyBuffer(VADriverContextP ctx, VABufferID buf_id)
295 {
296 vlVaDriver *drv;
297 vlVaBuffer *buf;
298
299 if (!ctx)
300 return VA_STATUS_ERROR_INVALID_CONTEXT;
301
302 drv = VL_VA_DRIVER(ctx);
303 mtx_lock(&drv->mutex);
304 buf = handle_table_get(drv->htab, buf_id);
305 if (!buf) {
306 mtx_unlock(&drv->mutex);
307 return VA_STATUS_ERROR_INVALID_BUFFER;
308 }
309
310 if (buf->derived_surface.resource) {
311 pipe_resource_reference(&buf->derived_surface.resource, NULL);
312
313 if (buf->derived_image_buffer)
314 buf->derived_image_buffer->destroy(buf->derived_image_buffer);
315 }
316
317 if (buf->type == VAEncCodedBufferType) {
318 VACodedBufferSegment* node = buf->data;
319 while(!node) {
320 VACodedBufferSegment* next = (VACodedBufferSegment*) node->next;
321 FREE(node);
322 node = next;
323 }
324 } else {
325 FREE(buf->data);
326 }
327
328 FREE(buf);
329 handle_table_remove(VL_VA_DRIVER(ctx)->htab, buf_id);
330 mtx_unlock(&drv->mutex);
331
332 return VA_STATUS_SUCCESS;
333 }
334
335 VAStatus
vlVaBufferInfo(VADriverContextP ctx,VABufferID buf_id,VABufferType * type,unsigned int * size,unsigned int * num_elements)336 vlVaBufferInfo(VADriverContextP ctx, VABufferID buf_id, VABufferType *type,
337 unsigned int *size, unsigned int *num_elements)
338 {
339 vlVaDriver *drv;
340 vlVaBuffer *buf;
341
342 if (!ctx)
343 return VA_STATUS_ERROR_INVALID_CONTEXT;
344
345 drv = VL_VA_DRIVER(ctx);
346 mtx_lock(&drv->mutex);
347 buf = handle_table_get(drv->htab, buf_id);
348 mtx_unlock(&drv->mutex);
349 if (!buf)
350 return VA_STATUS_ERROR_INVALID_BUFFER;
351
352 *type = buf->type;
353 *size = buf->size;
354 *num_elements = buf->num_elements;
355
356 return VA_STATUS_SUCCESS;
357 }
358
359 VAStatus
vlVaAcquireBufferHandle(VADriverContextP ctx,VABufferID buf_id,VABufferInfo * out_buf_info)360 vlVaAcquireBufferHandle(VADriverContextP ctx, VABufferID buf_id,
361 VABufferInfo *out_buf_info)
362 {
363 vlVaDriver *drv;
364 uint32_t i;
365 uint32_t mem_type;
366 vlVaBuffer *buf ;
367 struct pipe_screen *screen;
368
369 /* List of supported memory types, in preferred order. */
370 static const uint32_t mem_types[] = {
371 #ifdef _WIN32
372 VA_SURFACE_ATTRIB_MEM_TYPE_NTHANDLE,
373 VA_SURFACE_ATTRIB_MEM_TYPE_D3D12_RESOURCE,
374 #else
375 VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME,
376 #endif
377 0
378 };
379
380 if (!ctx)
381 return VA_STATUS_ERROR_INVALID_CONTEXT;
382
383 drv = VL_VA_DRIVER(ctx);
384 screen = VL_VA_PSCREEN(ctx);
385 mtx_lock(&drv->mutex);
386 buf = handle_table_get(VL_VA_DRIVER(ctx)->htab, buf_id);
387 mtx_unlock(&drv->mutex);
388
389 if (!buf)
390 return VA_STATUS_ERROR_INVALID_BUFFER;
391
392 /* Only VA surface|image like buffers are supported for now .*/
393 if (buf->type != VAImageBufferType)
394 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
395
396 if (!out_buf_info)
397 return VA_STATUS_ERROR_INVALID_PARAMETER;
398
399 if (!out_buf_info->mem_type)
400 mem_type = mem_types[0];
401 else {
402 mem_type = 0;
403 for (i = 0; mem_types[i] != 0; i++) {
404 if (out_buf_info->mem_type & mem_types[i]) {
405 mem_type = out_buf_info->mem_type;
406 break;
407 }
408 }
409 if (!mem_type)
410 return VA_STATUS_ERROR_UNSUPPORTED_MEMORY_TYPE;
411 }
412
413 if (!buf->derived_surface.resource)
414 return VA_STATUS_ERROR_INVALID_BUFFER;
415
416 if (buf->export_refcount > 0) {
417 if (buf->export_state.mem_type != mem_type)
418 return VA_STATUS_ERROR_INVALID_PARAMETER;
419 } else {
420 VABufferInfo * const buf_info = &buf->export_state;
421
422 switch (mem_type) {
423 #ifdef _WIN32
424 case VA_SURFACE_ATTRIB_MEM_TYPE_D3D12_RESOURCE:
425 case VA_SURFACE_ATTRIB_MEM_TYPE_NTHANDLE:
426 #else
427 case VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME:
428 #endif
429 {
430 struct winsys_handle whandle;
431
432 mtx_lock(&drv->mutex);
433 drv->pipe->flush(drv->pipe, NULL, 0);
434
435 memset(&whandle, 0, sizeof(whandle));
436 whandle.type = WINSYS_HANDLE_TYPE_FD;
437
438 #ifdef _WIN32
439 if (mem_type == VA_SURFACE_ATTRIB_MEM_TYPE_D3D12_RESOURCE)
440 whandle.type = WINSYS_HANDLE_TYPE_D3D12_RES;
441 #endif
442 if (!screen->resource_get_handle(screen, drv->pipe,
443 buf->derived_surface.resource,
444 &whandle, PIPE_HANDLE_USAGE_FRAMEBUFFER_WRITE)) {
445 mtx_unlock(&drv->mutex);
446 return VA_STATUS_ERROR_INVALID_BUFFER;
447 }
448
449 mtx_unlock(&drv->mutex);
450
451 buf_info->handle = (intptr_t)whandle.handle;
452
453 #ifdef _WIN32
454 if (mem_type == VA_SURFACE_ATTRIB_MEM_TYPE_D3D12_RESOURCE)
455 buf_info->handle = (intptr_t)whandle.com_obj;
456 #endif
457 break;
458 }
459 default:
460 return VA_STATUS_ERROR_UNSUPPORTED_MEMORY_TYPE;
461 }
462
463 buf_info->type = buf->type;
464 buf_info->mem_type = mem_type;
465 buf_info->mem_size = buf->num_elements * buf->size;
466 }
467
468 buf->export_refcount++;
469
470 *out_buf_info = buf->export_state;
471
472 return VA_STATUS_SUCCESS;
473 }
474
475 VAStatus
vlVaReleaseBufferHandle(VADriverContextP ctx,VABufferID buf_id)476 vlVaReleaseBufferHandle(VADriverContextP ctx, VABufferID buf_id)
477 {
478 vlVaDriver *drv;
479 vlVaBuffer *buf;
480
481 if (!ctx)
482 return VA_STATUS_ERROR_INVALID_CONTEXT;
483
484 drv = VL_VA_DRIVER(ctx);
485 mtx_lock(&drv->mutex);
486 buf = handle_table_get(drv->htab, buf_id);
487 mtx_unlock(&drv->mutex);
488
489 if (!buf)
490 return VA_STATUS_ERROR_INVALID_BUFFER;
491
492 if (buf->export_refcount == 0)
493 return VA_STATUS_ERROR_INVALID_BUFFER;
494
495 if (--buf->export_refcount == 0) {
496 VABufferInfo * const buf_info = &buf->export_state;
497
498 switch (buf_info->mem_type) {
499 #ifdef _WIN32
500 case VA_SURFACE_ATTRIB_MEM_TYPE_D3D12_RESOURCE:
501 // Do nothing for this case.
502 break;
503 case VA_SURFACE_ATTRIB_MEM_TYPE_NTHANDLE:
504 CloseHandle((HANDLE) buf_info->handle);
505 break;
506 #else
507 case VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME:
508 close((intptr_t)buf_info->handle);
509 break;
510 #endif
511 default:
512 return VA_STATUS_ERROR_INVALID_BUFFER;
513 }
514
515 buf_info->mem_type = 0;
516 }
517
518 return VA_STATUS_SUCCESS;
519 }
520
521 #if VA_CHECK_VERSION(1, 15, 0)
522 VAStatus
vlVaSyncBuffer(VADriverContextP ctx,VABufferID buf_id,uint64_t timeout_ns)523 vlVaSyncBuffer(VADriverContextP ctx, VABufferID buf_id, uint64_t timeout_ns)
524 {
525 vlVaDriver *drv;
526 vlVaContext *context;
527 vlVaBuffer *buf;
528
529 if (!ctx)
530 return VA_STATUS_ERROR_INVALID_CONTEXT;
531
532 drv = VL_VA_DRIVER(ctx);
533 if (!drv)
534 return VA_STATUS_ERROR_INVALID_CONTEXT;
535
536 /* Some apps like ffmpeg check for vaSyncBuffer to be present
537 to do async enqueuing of multiple vaEndPicture encode calls
538 before calling vaSyncBuffer with a pre-defined latency
539 If vaSyncBuffer is not implemented, they fallback to the
540 usual synchronous pairs of { vaEndPicture + vaSyncSurface }
541
542 As this might require the driver to support multiple
543 operations and/or store multiple feedback values before sync
544 fallback to backward compatible behaviour unless driver
545 explicitly supports PIPE_VIDEO_CAP_ENC_SUPPORTS_ASYNC_OPERATION
546 */
547 if (!drv->pipe->screen->get_video_param(drv->pipe->screen,
548 PIPE_VIDEO_PROFILE_UNKNOWN,
549 PIPE_VIDEO_ENTRYPOINT_ENCODE,
550 PIPE_VIDEO_CAP_ENC_SUPPORTS_ASYNC_OPERATION))
551 return VA_STATUS_ERROR_UNIMPLEMENTED;
552
553 /* vaSyncBuffer spec states that "If timeout is zero, the function returns immediately." */
554 if (timeout_ns == 0)
555 return VA_STATUS_ERROR_TIMEDOUT;
556
557 if (timeout_ns != VA_TIMEOUT_INFINITE)
558 return VA_STATUS_ERROR_UNIMPLEMENTED;
559
560 mtx_lock(&drv->mutex);
561 buf = handle_table_get(drv->htab, buf_id);
562
563 if (!buf) {
564 mtx_unlock(&drv->mutex);
565 return VA_STATUS_ERROR_INVALID_BUFFER;
566 }
567
568 if (!buf->feedback) {
569 /* No outstanding operation: nothing to do. */
570 mtx_unlock(&drv->mutex);
571 return VA_STATUS_SUCCESS;
572 }
573
574 context = handle_table_get(drv->htab, buf->ctx);
575 if (!context) {
576 mtx_unlock(&drv->mutex);
577 return VA_STATUS_ERROR_INVALID_CONTEXT;
578 }
579
580 vlVaSurface* surf = handle_table_get(drv->htab, buf->associated_encode_input_surf);
581
582 if ((buf->feedback) && (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE)) {
583 context->decoder->get_feedback(context->decoder, buf->feedback, &(buf->coded_size), &(buf->extended_metadata));
584 buf->feedback = NULL;
585 /* Also mark the associated render target (encode source texture) surface as done
586 in case they call vaSyncSurface on it to avoid getting the feedback twice*/
587 if(surf)
588 {
589 surf->feedback = NULL;
590 buf->associated_encode_input_surf = VA_INVALID_ID;
591 }
592 }
593
594 mtx_unlock(&drv->mutex);
595 return VA_STATUS_SUCCESS;
596 }
597 #endif
598