1 /*
2 * Copyright © Microsoft Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "d3d12_context.h"
25 #include "d3d12_screen.h"
26 #include "d3d12_video_proc.h"
27 #include "d3d12_residency.h"
28 #include "d3d12_util.h"
29 #include "d3d12_resource.h"
30 #include "d3d12_video_buffer.h"
31 #include "d3d12_format.h"
32
33 void
d3d12_video_processor_begin_frame(struct pipe_video_codec * codec,struct pipe_video_buffer * target,struct pipe_picture_desc * picture)34 d3d12_video_processor_begin_frame(struct pipe_video_codec * codec,
35 struct pipe_video_buffer *target,
36 struct pipe_picture_desc *picture)
37 {
38 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
39 debug_printf("[d3d12_video_processor] d3d12_video_processor_begin_frame - "
40 "fenceValue: %d\n",
41 pD3D12Proc->m_fenceValue);
42
43 ///
44 /// Wait here to make sure the next in flight resource set is empty before using it
45 ///
46 uint64_t fenceValueToWaitOn = static_cast<uint64_t>(std::max(static_cast<int64_t>(0l), static_cast<int64_t>(pD3D12Proc->m_fenceValue) - static_cast<int64_t>(D3D12_VIDEO_PROC_ASYNC_DEPTH) ));
47
48 debug_printf("[d3d12_video_processor] d3d12_video_processor_begin_frame Waiting for completion of in flight resource sets with previous work with fenceValue: %" PRIu64 "\n",
49 fenceValueToWaitOn);
50
51 ASSERTED bool wait_res = d3d12_video_processor_sync_completion(codec, fenceValueToWaitOn, OS_TIMEOUT_INFINITE);
52 assert(wait_res);
53
54 HRESULT hr = pD3D12Proc->m_spCommandList->Reset(pD3D12Proc->m_spCommandAllocators[d3d12_video_processor_pool_current_index(pD3D12Proc)].Get());
55 if (FAILED(hr)) {
56 debug_printf(
57 "[d3d12_video_processor] resetting ID3D12GraphicsCommandList failed with HR %x\n",
58 hr);
59 assert(false);
60 }
61
62 // Setup process frame arguments for output/target texture.
63 struct d3d12_video_buffer *pOutputVideoBuffer = (struct d3d12_video_buffer *) target;
64
65 ID3D12Resource *pDstD3D12Res = d3d12_resource_resource(pOutputVideoBuffer->texture);
66 auto dstDesc = GetDesc(pDstD3D12Res);
67 pD3D12Proc->m_OutputArguments = {
68 //struct D3D12_VIDEO_PROCESS_OUTPUT_STREAM_ARGUMENTS args;
69 {
70 {
71 {
72 pDstD3D12Res, // ID3D12Resource *pTexture2D;
73 0, // UINT Subresource;
74 },
75 {
76 NULL, // ID3D12Resource *pTexture2D;
77 0 // UINT Subresource;
78 }
79 },
80 { 0, 0, (int) dstDesc.Width, (int) dstDesc.Height }
81 },
82 // struct d3d12_resource* buffer;
83 pOutputVideoBuffer,
84 };
85
86 debug_printf("d3d12_video_processor_begin_frame: Beginning new scene with Output ID3D12Resource: %p (%d %d)\n", pDstD3D12Res, (int) dstDesc.Width, (int) dstDesc.Height);
87 }
88
89 int
d3d12_video_processor_end_frame(struct pipe_video_codec * codec,struct pipe_video_buffer * target,struct pipe_picture_desc * picture)90 d3d12_video_processor_end_frame(struct pipe_video_codec * codec,
91 struct pipe_video_buffer *target,
92 struct pipe_picture_desc *picture)
93 {
94 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
95 debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - "
96 "fenceValue: %d\n",
97 pD3D12Proc->m_fenceValue);
98
99 if(pD3D12Proc->m_ProcessInputs.size() > pD3D12Proc->m_vpMaxInputStreams.MaxInputStreams) {
100 debug_printf("[d3d12_video_processor] ERROR: Requested number of input surfaces (%" PRIu64 ") exceeds underlying D3D12 driver capabilities (%d)\n", (uint64_t) pD3D12Proc->m_ProcessInputs.size(), pD3D12Proc->m_vpMaxInputStreams.MaxInputStreams);
101 assert(false);
102 }
103
104 auto curOutputDesc = GetOutputStreamDesc(pD3D12Proc->m_spVideoProcessor.Get());
105 auto curOutputTexFmt = GetDesc(pD3D12Proc->m_OutputArguments.args.OutputStream[0].pTexture2D).Format;
106
107 bool inputFmtsMatch = pD3D12Proc->m_inputStreamDescs.size() == pD3D12Proc->m_ProcessInputs.size();
108 unsigned curInputIdx = 0;
109 while( (curInputIdx < pD3D12Proc->m_inputStreamDescs.size()) && inputFmtsMatch)
110 {
111 inputFmtsMatch = inputFmtsMatch && (pD3D12Proc->m_inputStreamDescs[curInputIdx].Format == GetDesc(pD3D12Proc->m_ProcessInputs[curInputIdx].InputStream[0].pTexture2D).Format);
112 curInputIdx++;
113 }
114
115 bool inputCountMatches = (pD3D12Proc->m_ProcessInputs.size() == pD3D12Proc->m_spVideoProcessor->GetNumInputStreamDescs());
116 bool outputFmtMatches = (curOutputDesc.Format == curOutputTexFmt);
117 bool needsVPRecreation = (
118 !inputCountMatches // Requested batch has different number of Inputs to be blit'd
119 || !outputFmtMatches // output texture format different than vid proc object expects
120 || !inputFmtsMatch // inputs texture formats different than vid proc object expects
121 );
122
123 if(needsVPRecreation) {
124 debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - Attempting to re-create ID3D12VideoProcessor "
125 "input count matches %d inputFmtsMatch: %d outputFmtsMatch %d \n", inputCountMatches, inputFmtsMatch, outputFmtMatches);
126
127 DXGI_COLOR_SPACE_TYPE OutputColorSpace = d3d12_convert_from_legacy_color_space(
128 !util_format_is_yuv(d3d12_get_pipe_format(curOutputTexFmt)),
129 util_format_get_blocksize(d3d12_get_pipe_format(curOutputTexFmt)) * 8 /*bytes to bits conversion*/,
130 /* StudioRGB= */ false,
131 /* P709= */ true,
132 /* StudioYUV= */ true);
133
134 std::vector<DXGI_FORMAT> InputFormats;
135 for(D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 curInput : pD3D12Proc->m_ProcessInputs)
136 {
137 InputFormats.push_back(GetDesc(curInput.InputStream[0].pTexture2D).Format);
138 }
139 DXGI_COLOR_SPACE_TYPE InputColorSpace = d3d12_convert_from_legacy_color_space(
140 !util_format_is_yuv(d3d12_get_pipe_format(InputFormats[0])),
141 util_format_get_blocksize(d3d12_get_pipe_format(InputFormats[0])) * 8 /*bytes to bits conversion*/,
142 /* StudioRGB= */ false,
143 /* P709= */ true,
144 /* StudioYUV= */ true);
145
146 // Release previous allocation
147 pD3D12Proc->m_spVideoProcessor.Reset();
148 if(!d3d12_video_processor_check_caps_and_create_processor(pD3D12Proc, InputFormats, InputColorSpace, curOutputTexFmt, OutputColorSpace))
149 {
150 debug_printf("[d3d12_video_processor] d3d12_video_processor_end_frame - Failure when "
151 " trying to re-create the ID3D12VideoProcessor for current batch streams configuration\n");
152 assert(false);
153 }
154 }
155
156 // Schedule barrier transitions
157 std::vector<D3D12_RESOURCE_BARRIER> barrier_transitions;
158 barrier_transitions.push_back(CD3DX12_RESOURCE_BARRIER::Transition(
159 pD3D12Proc->m_OutputArguments.args.OutputStream[0].pTexture2D,
160 D3D12_RESOURCE_STATE_COMMON,
161 D3D12_RESOURCE_STATE_VIDEO_PROCESS_WRITE));
162
163 for(D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 curInput : pD3D12Proc->m_ProcessInputs)
164 barrier_transitions.push_back(CD3DX12_RESOURCE_BARRIER::Transition(
165 curInput.InputStream[0].pTexture2D,
166 D3D12_RESOURCE_STATE_COMMON,
167 D3D12_RESOURCE_STATE_VIDEO_PROCESS_READ));
168
169 pD3D12Proc->m_spCommandList->ResourceBarrier(static_cast<uint32_t>(barrier_transitions.size()), barrier_transitions.data());
170
171 // Schedule process operation
172
173 pD3D12Proc->m_spCommandList->ProcessFrames1(pD3D12Proc->m_spVideoProcessor.Get(), &pD3D12Proc->m_OutputArguments.args, static_cast<UINT>(pD3D12Proc->m_ProcessInputs.size()), pD3D12Proc->m_ProcessInputs.data());
174
175 // Schedule reverse (back to common) transitions before command list closes for current frame
176
177 for (auto &BarrierDesc : barrier_transitions)
178 std::swap(BarrierDesc.Transition.StateBefore, BarrierDesc.Transition.StateAfter);
179
180 pD3D12Proc->m_spCommandList->ResourceBarrier(static_cast<uint32_t>(barrier_transitions.size()), barrier_transitions.data());
181
182 pD3D12Proc->m_PendingFences[d3d12_video_processor_pool_current_index(pD3D12Proc)].value = pD3D12Proc->m_fenceValue;
183 pD3D12Proc->m_PendingFences[d3d12_video_processor_pool_current_index(pD3D12Proc)].cmdqueue_fence = pD3D12Proc->m_spFence.Get();
184 *picture->fence = (pipe_fence_handle*) &pD3D12Proc->m_PendingFences[d3d12_video_processor_pool_current_index(pD3D12Proc)];
185 return 0;
186 }
187
188 int
d3d12_video_processor_process_frame(struct pipe_video_codec * codec,struct pipe_video_buffer * input_texture,const struct pipe_vpp_desc * process_properties)189 d3d12_video_processor_process_frame(struct pipe_video_codec *codec,
190 struct pipe_video_buffer *input_texture,
191 const struct pipe_vpp_desc *process_properties)
192 {
193 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
194
195 // begin_frame gets only called once so wouldn't update process_properties->src_surface_fence correctly
196 pD3D12Proc->input_surface_fence = (struct d3d12_fence*) process_properties->src_surface_fence;
197
198 // Get the underlying resources from the pipe_video_buffers
199 struct d3d12_video_buffer *pInputVideoBuffer = (struct d3d12_video_buffer *) input_texture;
200
201 ID3D12Resource *pSrcD3D12Res = d3d12_resource_resource(pInputVideoBuffer->texture);
202
203 // y0 = top
204 // x0 = left
205 // x1 = right
206 // y1 = bottom
207
208 debug_printf("d3d12_video_processor_process_frame: Adding Input ID3D12Resource: %p to scene (Output target %p)\n", pSrcD3D12Res, pD3D12Proc->m_OutputArguments.args.OutputStream[0].pTexture2D);
209 debug_printf("d3d12_video_processor_process_frame: Input box: top: %d left: %d right: %d bottom: %d\n", process_properties->src_region.y0, process_properties->src_region.x0, process_properties->src_region.x1, process_properties->src_region.y1);
210 debug_printf("d3d12_video_processor_process_frame: Output box: top: %d left: %d right: %d bottom: %d\n", process_properties->dst_region.y0, process_properties->dst_region.x0, process_properties->dst_region.x1, process_properties->dst_region.y1);
211 debug_printf("d3d12_video_processor_process_frame: Requested alpha blend mode %d global alpha: %f \n", process_properties->blend.mode, process_properties->blend.global_alpha);
212
213 // Setup process frame arguments for current input texture.
214
215 D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS1 InputArguments = {
216 {
217 { // D3D12_VIDEO_PROCESS_INPUT_STREAM InputStream[0];
218 pSrcD3D12Res, // ID3D12Resource *pTexture2D;
219 0, // UINT Subresource
220 {//D3D12_VIDEO_PROCESS_REFERENCE_SET ReferenceSet;
221 0, //UINT NumPastFrames;
222 NULL, //ID3D12Resource **ppPastFrames;
223 NULL, // UINT *pPastSubresources;
224 0, //UINT NumFutureFrames;
225 NULL, //ID3D12Resource **ppFutureFrames;
226 NULL //UINT *pFutureSubresources;
227 }
228 },
229 { // D3D12_VIDEO_PROCESS_INPUT_STREAM InputStream[1];
230 NULL, //ID3D12Resource *pTexture2D;
231 0, //UINT Subresource;
232 {//D3D12_VIDEO_PROCESS_REFERENCE_SET ReferenceSet;
233 0, //UINT NumPastFrames;
234 NULL, //ID3D12Resource **ppPastFrames;
235 NULL, // UINT *pPastSubresources;
236 0, //UINT NumFutureFrames;
237 NULL, //ID3D12Resource **ppFutureFrames;
238 NULL //UINT *pFutureSubresources;
239 }
240 }
241 },
242 { // D3D12_VIDEO_PROCESS_TRANSFORM Transform;
243 // y0 = top
244 // x0 = left
245 // x1 = right
246 // y1 = bottom
247 // typedef struct _RECT
248 // {
249 // int left;
250 // int top;
251 // int right;
252 // int bottom;
253 // } RECT;
254 { process_properties->src_region.x0/*left*/, process_properties->src_region.y0/*top*/, process_properties->src_region.x1/*right*/, process_properties->src_region.y1/*bottom*/ },
255 { process_properties->dst_region.x0/*left*/, process_properties->dst_region.y0/*top*/, process_properties->dst_region.x1/*right*/, process_properties->dst_region.y1/*bottom*/ }, // D3D12_RECT DestinationRectangle;
256 pD3D12Proc->m_inputStreamDescs[0].EnableOrientation ? d3d12_video_processor_convert_pipe_rotation(process_properties->orientation) : D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT, // D3D12_VIDEO_PROCESS_ORIENTATION Orientation;
257 },
258 D3D12_VIDEO_PROCESS_INPUT_STREAM_FLAG_NONE,
259 { // D3D12_VIDEO_PROCESS_INPUT_STREAM_RATE RateInfo;
260 0,
261 0,
262 },
263 // INT FilterLevels[32];
264 {
265 0, // Trailing zeroes on the rest
266 },
267 //D3D12_VIDEO_PROCESS_ALPHA_BLENDING;
268 {
269 (process_properties->blend.mode == PIPE_VIDEO_VPP_BLEND_MODE_GLOBAL_ALPHA),
270 process_properties->blend.global_alpha
271 },
272 // D3D12_VIDEO_FIELD_TYPE FieldType
273 D3D12_VIDEO_FIELD_TYPE_NONE,
274 };
275
276 debug_printf("ProcessFrame InArgs Orientation %d \n\tSrc top: %d left: %d right: %d bottom: %d\n\tDst top: %d left: %d right: %d bottom: %d\n", InputArguments.Transform.Orientation,
277 InputArguments.Transform.SourceRectangle.top, InputArguments.Transform.SourceRectangle.left, InputArguments.Transform.SourceRectangle.right, InputArguments.Transform.SourceRectangle.bottom,
278 InputArguments.Transform.DestinationRectangle.top, InputArguments.Transform.DestinationRectangle.left, InputArguments.Transform.DestinationRectangle.right, InputArguments.Transform.DestinationRectangle.bottom);
279
280 pD3D12Proc->m_ProcessInputs.push_back(InputArguments);
281 pD3D12Proc->m_InputBuffers.push_back(pInputVideoBuffer);
282
283 ///
284 /// Flush work to the GPU and blocking wait until GPU finishes
285 ///
286 pD3D12Proc->m_needsGPUFlush = true;
287 return 0;
288 }
289
290 void
d3d12_video_processor_destroy(struct pipe_video_codec * codec)291 d3d12_video_processor_destroy(struct pipe_video_codec * codec)
292 {
293 if (codec == nullptr) {
294 return;
295 }
296 // Flush pending work before destroying.
297 struct d3d12_video_processor *pD3D12Proc = (struct d3d12_video_processor *) codec;
298
299 uint64_t curBatchFence = pD3D12Proc->m_fenceValue;
300 if (pD3D12Proc->m_needsGPUFlush)
301 {
302 d3d12_video_processor_flush(codec);
303 d3d12_video_processor_sync_completion(codec, curBatchFence, OS_TIMEOUT_INFINITE);
304 }
305
306 // Call dtor to make ComPtr work
307 delete pD3D12Proc;
308 }
309
310 void
d3d12_video_processor_flush(struct pipe_video_codec * codec)311 d3d12_video_processor_flush(struct pipe_video_codec * codec)
312 {
313 struct d3d12_video_processor * pD3D12Proc = (struct d3d12_video_processor *) codec;
314 assert(pD3D12Proc);
315 assert(pD3D12Proc->m_spD3D12VideoDevice);
316 assert(pD3D12Proc->m_spCommandQueue);
317
318 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush started. Will flush video queue work and CPU wait on "
319 "fenceValue: %d\n",
320 pD3D12Proc->m_fenceValue);
321
322 if (!pD3D12Proc->m_needsGPUFlush) {
323 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush started. Nothing to flush, all up to date.\n");
324 } else {
325 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Promoting the output texture %p to d3d12_permanently_resident.\n",
326 pD3D12Proc->m_OutputArguments.buffer->texture);
327
328 // Make the resources permanently resident for video use
329 d3d12_promote_to_permanent_residency(pD3D12Proc->m_pD3D12Screen, pD3D12Proc->m_OutputArguments.buffer->texture);
330
331 for(auto curInput : pD3D12Proc->m_InputBuffers)
332 {
333 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Promoting the input texture %p to d3d12_permanently_resident.\n",
334 curInput->texture);
335 // Make the resources permanently resident for video use
336 d3d12_promote_to_permanent_residency(pD3D12Proc->m_pD3D12Screen, curInput->texture);
337 }
338
339 HRESULT hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
340 if (hr != S_OK) {
341 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush"
342 " - D3D12Device was removed BEFORE commandlist "
343 "execution with HR %x.\n",
344 hr);
345 goto flush_fail;
346 }
347
348 // Close and execute command list and wait for idle on CPU blocking
349 // this method before resetting list and allocator for next submission.
350
351 if (pD3D12Proc->m_transitionsBeforeCloseCmdList.size() > 0) {
352 pD3D12Proc->m_spCommandList->ResourceBarrier(static_cast<UINT>(pD3D12Proc->m_transitionsBeforeCloseCmdList.size()),
353 pD3D12Proc->m_transitionsBeforeCloseCmdList.data());
354 pD3D12Proc->m_transitionsBeforeCloseCmdList.clear();
355 }
356
357 hr = pD3D12Proc->m_spCommandList->Close();
358 if (FAILED(hr)) {
359 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush - Can't close command list with HR %x\n", hr);
360 goto flush_fail;
361 }
362
363 // Flush any work batched in the d3d12_screen and Wait on the m_spCommandQueue
364 struct pipe_fence_handle *completion_fence = NULL;
365 pD3D12Proc->base.context->flush(pD3D12Proc->base.context, &completion_fence, PIPE_FLUSH_ASYNC | PIPE_FLUSH_HINT_FINISH);
366 struct d3d12_fence *casted_completion_fence = d3d12_fence(completion_fence);
367 pD3D12Proc->m_spCommandQueue->Wait(casted_completion_fence->cmdqueue_fence, casted_completion_fence->value);
368 pD3D12Proc->m_pD3D12Screen->base.fence_reference(&pD3D12Proc->m_pD3D12Screen->base, &completion_fence, NULL);
369
370 struct d3d12_fence *input_surface_fence = pD3D12Proc->input_surface_fence;
371 if (input_surface_fence)
372 pD3D12Proc->m_spCommandQueue->Wait(input_surface_fence->cmdqueue_fence, input_surface_fence->value);
373
374 ID3D12CommandList *ppCommandLists[1] = { pD3D12Proc->m_spCommandList.Get() };
375 pD3D12Proc->m_spCommandQueue->ExecuteCommandLists(1, ppCommandLists);
376 pD3D12Proc->m_spCommandQueue->Signal(pD3D12Proc->m_spFence.Get(), pD3D12Proc->m_fenceValue);
377
378 // Validate device was not removed
379 hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
380 if (hr != S_OK) {
381 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush"
382 " - D3D12Device was removed AFTER commandlist "
383 "execution with HR %x, but wasn't before.\n",
384 hr);
385 goto flush_fail;
386 }
387
388 debug_printf(
389 "[d3d12_video_processor] d3d12_video_processor_flush - GPU signaled execution finalized for fenceValue: %d\n",
390 pD3D12Proc->m_fenceValue);
391
392 pD3D12Proc->m_fenceValue++;
393 pD3D12Proc->m_needsGPUFlush = false;
394 }
395 pD3D12Proc->m_ProcessInputs.clear();
396 pD3D12Proc->m_InputBuffers.clear();
397 // Free the fence after completion finished
398
399 return;
400
401 flush_fail:
402 debug_printf("[d3d12_video_processor] d3d12_video_processor_flush failed for fenceValue: %d\n", pD3D12Proc->m_fenceValue);
403 assert(false);
404 }
405
406 struct pipe_video_codec *
d3d12_video_processor_create(struct pipe_context * context,const struct pipe_video_codec * codec)407 d3d12_video_processor_create(struct pipe_context *context, const struct pipe_video_codec *codec)
408 {
409 ///
410 /// Initialize d3d12_video_processor
411 ///
412
413 // Not using new doesn't call ctor and the initializations in the class declaration are lost
414 struct d3d12_video_processor *pD3D12Proc = new d3d12_video_processor;
415
416 pD3D12Proc->m_PendingFences.resize(D3D12_VIDEO_PROC_ASYNC_DEPTH);
417 pD3D12Proc->base = *codec;
418
419 pD3D12Proc->base.context = context;
420 pD3D12Proc->base.width = codec->width;
421 pD3D12Proc->base.height = codec->height;
422 pD3D12Proc->base.destroy = d3d12_video_processor_destroy;
423 pD3D12Proc->base.begin_frame = d3d12_video_processor_begin_frame;
424 pD3D12Proc->base.process_frame = d3d12_video_processor_process_frame;
425 pD3D12Proc->base.end_frame = d3d12_video_processor_end_frame;
426 pD3D12Proc->base.flush = d3d12_video_processor_flush;
427 pD3D12Proc->base.fence_wait = d3d12_video_processor_fence_wait;
428
429 ///
430
431 ///
432 /// Try initializing D3D12 Video device and check for device caps
433 ///
434
435 struct d3d12_context *pD3D12Ctx = (struct d3d12_context *) context;
436 pD3D12Proc->m_pD3D12Context = pD3D12Ctx;
437 pD3D12Proc->m_pD3D12Screen = d3d12_screen(pD3D12Ctx->base.screen);
438
439 // Assume defaults for now, can re-create if necessary when d3d12_video_processor_end_frame kicks off the processing
440 DXGI_COLOR_SPACE_TYPE InputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
441 std::vector<DXGI_FORMAT> InputFormats = { DXGI_FORMAT_NV12 };
442 DXGI_FORMAT OutputFormat = DXGI_FORMAT_NV12;
443 DXGI_COLOR_SPACE_TYPE OutputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
444
445 ///
446 /// Create processor objects
447 ///
448 if (FAILED(pD3D12Proc->m_pD3D12Screen->dev->QueryInterface(
449 IID_PPV_ARGS(pD3D12Proc->m_spD3D12VideoDevice.GetAddressOf())))) {
450 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - D3D12 Device has no Video support\n");
451 goto failed;
452 }
453
454 if (FAILED(pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_MAX_INPUT_STREAMS, &pD3D12Proc->m_vpMaxInputStreams, sizeof(pD3D12Proc->m_vpMaxInputStreams)))) {
455 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Failed to query D3D12_FEATURE_VIDEO_PROCESS_MAX_INPUT_STREAMS\n");
456 goto failed;
457 }
458
459 if (!d3d12_video_processor_check_caps_and_create_processor(pD3D12Proc, InputFormats, InputColorSpace, OutputFormat, OutputColorSpace)) {
460 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Failure on "
461 "d3d12_video_processor_check_caps_and_create_processor\n");
462 goto failed;
463 }
464
465 if (!d3d12_video_processor_create_command_objects(pD3D12Proc)) {
466 debug_printf(
467 "[d3d12_video_processor] d3d12_video_create_processor - Failure on d3d12_video_processor_create_command_objects\n");
468 goto failed;
469 }
470
471 debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Created successfully!\n");
472
473 return &pD3D12Proc->base;
474
475 failed:
476 if (pD3D12Proc != nullptr) {
477 d3d12_video_processor_destroy(&pD3D12Proc->base);
478 }
479
480 return nullptr;
481 }
482
483 bool
d3d12_video_processor_check_caps_and_create_processor(struct d3d12_video_processor * pD3D12Proc,std::vector<DXGI_FORMAT> InputFormats,DXGI_COLOR_SPACE_TYPE InputColorSpace,DXGI_FORMAT OutputFormat,DXGI_COLOR_SPACE_TYPE OutputColorSpace)484 d3d12_video_processor_check_caps_and_create_processor(struct d3d12_video_processor *pD3D12Proc,
485 std::vector<DXGI_FORMAT> InputFormats,
486 DXGI_COLOR_SPACE_TYPE InputColorSpace,
487 DXGI_FORMAT OutputFormat,
488 DXGI_COLOR_SPACE_TYPE OutputColorSpace)
489 {
490 HRESULT hr = S_OK;
491
492 D3D12_VIDEO_FIELD_TYPE FieldType = D3D12_VIDEO_FIELD_TYPE_NONE;
493 D3D12_VIDEO_FRAME_STEREO_FORMAT StereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE;
494 DXGI_RATIONAL FrameRate = { 30, 1 };
495 DXGI_RATIONAL AspectRatio = { 1, 1 };
496
497 struct ResolStruct {
498 uint Width;
499 uint Height;
500 };
501
502 ResolStruct resolutionsList[] = {
503 { 8192, 8192 }, // 8k
504 { 8192, 4320 }, // 8k - alternative
505 { 7680, 4800 }, // 8k - alternative
506 { 7680, 4320 }, // 8k - alternative
507 { 4096, 2304 }, // 2160p (4K)
508 { 4096, 2160 }, // 2160p (4K) - alternative
509 { 2560, 1440 }, // 1440p
510 { 1920, 1200 }, // 1200p
511 { 1920, 1080 }, // 1080p
512 { 1280, 720 }, // 720p
513 { 800, 600 },
514 };
515
516 pD3D12Proc->m_SupportCaps =
517 {
518 0, // NodeIndex
519 { resolutionsList[0].Width, resolutionsList[0].Height, { InputFormats[0], InputColorSpace } },
520 FieldType,
521 StereoFormat,
522 FrameRate,
523 { OutputFormat, OutputColorSpace },
524 StereoFormat,
525 FrameRate,
526 };
527
528 uint32_t idxResol = 0;
529 bool bSupportsAny = false;
530 while ((idxResol < ARRAY_SIZE(resolutionsList)) && !bSupportsAny) {
531 pD3D12Proc->m_SupportCaps.InputSample.Width = resolutionsList[idxResol].Width;
532 pD3D12Proc->m_SupportCaps.InputSample.Height = resolutionsList[idxResol].Height;
533 if (SUCCEEDED(pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_SUPPORT, &pD3D12Proc->m_SupportCaps, sizeof(pD3D12Proc->m_SupportCaps)))) {
534 bSupportsAny = ((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != 0);
535 }
536 idxResol++;
537 }
538
539 if ((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED)
540 {
541 if((pD3D12Proc->m_SupportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) {
542 debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED not returned by driver. "
543 "failed with SupportFlags %x\n",
544 pD3D12Proc->m_SupportCaps.SupportFlags);
545 }
546 }
547
548 D3D12_VIDEO_PROCESS_FILTER_FLAGS enabledFilterFlags = D3D12_VIDEO_PROCESS_FILTER_FLAG_NONE;
549
550 bool enableOrientation = (
551 ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ROTATION) != 0)
552 || ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_FLIP) != 0)
553 );
554
555 D3D12_VIDEO_PROCESS_INPUT_STREAM_DESC inputStreamDesc = {
556 InputFormats[0],
557 InputColorSpace,
558 AspectRatio, // SourceAspectRatio;
559 AspectRatio, // DestinationAspectRatio;
560 FrameRate, // FrameRate
561 pD3D12Proc->m_SupportCaps.ScaleSupport.OutputSizeRange, // SourceSizeRange
562 pD3D12Proc->m_SupportCaps.ScaleSupport.OutputSizeRange, // DestinationSizeRange
563 enableOrientation,
564 enabledFilterFlags,
565 StereoFormat,
566 FieldType,
567 D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_NONE,
568 ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_BLENDING) != 0)
569 && ((pD3D12Proc->m_SupportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_FILL) != 0), // EnableAlphaBlending
570 {}, // LumaKey
571 0, // NumPastFrames
572 0, // NumFutureFrames
573 false // EnableAutoProcessing
574 };
575
576 D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC outputStreamDesc =
577 {
578 pD3D12Proc->m_SupportCaps.OutputFormat.Format,
579 OutputColorSpace,
580 D3D12_VIDEO_PROCESS_ALPHA_FILL_MODE_OPAQUE, // AlphaFillMode
581 0u, // AlphaFillModeSourceStreamIndex
582 {0, 0, 0, 0}, // BackgroundColor
583 FrameRate, // FrameRate
584 false // EnableStereo
585 };
586
587 // gets the required past/future frames for VP creation
588 {
589 D3D12_FEATURE_DATA_VIDEO_PROCESS_REFERENCE_INFO referenceInfo = {};
590 referenceInfo.NodeIndex = 0;
591 D3D12_VIDEO_PROCESS_FEATURE_FLAGS featureFlags = D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
592 featureFlags |= outputStreamDesc.AlphaFillMode ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_FILL : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
593 featureFlags |= inputStreamDesc.LumaKey.Enable ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_LUMA_KEY : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
594 featureFlags |= (inputStreamDesc.StereoFormat != D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE || outputStreamDesc.EnableStereo) ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_STEREO : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
595 featureFlags |= inputStreamDesc.EnableOrientation ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ROTATION | D3D12_VIDEO_PROCESS_FEATURE_FLAG_FLIP : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
596 featureFlags |= inputStreamDesc.EnableAlphaBlending ? D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_BLENDING : D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE;
597
598 referenceInfo.DeinterlaceMode = inputStreamDesc.DeinterlaceMode;
599 referenceInfo.Filters = inputStreamDesc.FilterFlags;
600 referenceInfo.FeatureSupport = featureFlags;
601 referenceInfo.InputFrameRate = inputStreamDesc.FrameRate;
602 referenceInfo.OutputFrameRate = outputStreamDesc.FrameRate;
603 referenceInfo.EnableAutoProcessing = inputStreamDesc.EnableAutoProcessing;
604
605 hr = pD3D12Proc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_REFERENCE_INFO, &referenceInfo, sizeof(referenceInfo));
606 if (FAILED(hr)) {
607 debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - CheckFeatureSupport "
608 "failed with HR %x\n",
609 hr);
610 return false;
611 }
612
613 inputStreamDesc.NumPastFrames = referenceInfo.PastFrames;
614 inputStreamDesc.NumFutureFrames = referenceInfo.FutureFrames;
615 }
616
617 pD3D12Proc->m_outputStreamDesc = outputStreamDesc;
618
619 debug_printf("[d3d12_video_processor]\t Creating Video Processor\n");
620 debug_printf("[d3d12_video_processor]\t NumInputs: %d\n", (int) InputFormats.size());
621
622 pD3D12Proc->m_inputStreamDescs.clear();
623 for (unsigned i = 0; i < InputFormats.size(); i++)
624 {
625 inputStreamDesc.Format = InputFormats[i];
626 pD3D12Proc->m_inputStreamDescs.push_back(inputStreamDesc);
627 debug_printf("[d3d12_video_processor]\t Input Stream #%d Format: %d\n", i, inputStreamDesc.Format);
628 }
629 debug_printf("[d3d12_video_processor]\t Output Stream Format: %d\n", pD3D12Proc->m_outputStreamDesc.Format);
630
631 hr = pD3D12Proc->m_spD3D12VideoDevice->CreateVideoProcessor(pD3D12Proc->m_NodeMask,
632 &pD3D12Proc->m_outputStreamDesc,
633 static_cast<UINT>(pD3D12Proc->m_inputStreamDescs.size()),
634 pD3D12Proc->m_inputStreamDescs.data(),
635 IID_PPV_ARGS(pD3D12Proc->m_spVideoProcessor.GetAddressOf()));
636 if (FAILED(hr)) {
637 debug_printf("[d3d12_video_processor] d3d12_video_processor_check_caps_and_create_processor - CreateVideoProcessor "
638 "failed with HR %x\n",
639 hr);
640 return false;
641 }
642
643 return true;
644 }
645
646 bool
d3d12_video_processor_create_command_objects(struct d3d12_video_processor * pD3D12Proc)647 d3d12_video_processor_create_command_objects(struct d3d12_video_processor *pD3D12Proc)
648 {
649 assert(pD3D12Proc->m_spD3D12VideoDevice);
650
651 D3D12_COMMAND_QUEUE_DESC commandQueueDesc = { D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS };
652 HRESULT hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandQueue(
653 &commandQueueDesc,
654 IID_PPV_ARGS(pD3D12Proc->m_spCommandQueue.GetAddressOf()));
655
656 if (FAILED(hr)) {
657 debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateCommandQueue "
658 "failed with HR %x\n",
659 hr);
660 return false;
661 }
662
663 hr = pD3D12Proc->m_pD3D12Screen->dev->CreateFence(0,
664 D3D12_FENCE_FLAG_SHARED,
665 IID_PPV_ARGS(&pD3D12Proc->m_spFence));
666
667 if (FAILED(hr)) {
668 debug_printf(
669 "[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateFence failed with HR %x\n",
670 hr);
671 return false;
672 }
673
674 pD3D12Proc->m_spCommandAllocators.resize(D3D12_VIDEO_PROC_ASYNC_DEPTH);
675 for (uint32_t i = 0; i < pD3D12Proc->m_spCommandAllocators.size() ; i++) {
676 hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandAllocator(
677 D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
678 IID_PPV_ARGS(pD3D12Proc->m_spCommandAllocators[i].GetAddressOf()));
679
680 if (FAILED(hr)) {
681 debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to "
682 "CreateCommandAllocator failed with HR %x\n",
683 hr);
684 return false;
685 }
686 }
687
688 ComPtr<ID3D12Device4> spD3D12Device4;
689 if (FAILED(pD3D12Proc->m_pD3D12Screen->dev->QueryInterface(
690 IID_PPV_ARGS(spD3D12Device4.GetAddressOf())))) {
691 debug_printf(
692 "[d3d12_video_processor] d3d12_video_processor_create_processor - D3D12 Device has no ID3D12Device4 support\n");
693 return false;
694 }
695
696 hr = spD3D12Device4->CreateCommandList1(0,
697 D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
698 D3D12_COMMAND_LIST_FLAG_NONE,
699 IID_PPV_ARGS(pD3D12Proc->m_spCommandList.GetAddressOf()));
700
701 if (FAILED(hr)) {
702 debug_printf("[d3d12_video_processor] d3d12_video_processor_create_command_objects - Call to CreateCommandList "
703 "failed with HR %x\n",
704 hr);
705 return false;
706 }
707
708 return true;
709 }
710
711 D3D12_VIDEO_PROCESS_ORIENTATION
d3d12_video_processor_convert_pipe_rotation(enum pipe_video_vpp_orientation orientation_flags)712 d3d12_video_processor_convert_pipe_rotation(enum pipe_video_vpp_orientation orientation_flags)
713 {
714 D3D12_VIDEO_PROCESS_ORIENTATION result = D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT;
715
716 if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_90)
717 {
718 result = (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90_FLIP_HORIZONTAL : D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90;
719 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: %s\n", (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90_FLIP_HORIZONTAL" : "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_90");
720 }
721 else if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_180)
722 {
723 result = D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_180;
724 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_180\n");
725 }
726 else if(orientation_flags & PIPE_VIDEO_VPP_ROTATION_270)
727 {
728 result = (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270_FLIP_HORIZONTAL : D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270;
729 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: %s\n", (orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL) ? "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270_FLIP_HORIZONTAL" : "D3D12_VIDEO_PROCESS_ORIENTATION_CLOCKWISE_270");
730 }
731 else if(orientation_flags & PIPE_VIDEO_VPP_FLIP_HORIZONTAL)
732 {
733 result = D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_HORIZONTAL;
734 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_HORIZONTAL\n");
735 }
736 else if(orientation_flags & PIPE_VIDEO_VPP_FLIP_VERTICAL)
737 {
738 result = D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_VERTICAL;
739 debug_printf("d3d12_video_processor_process_frame: Orientation Mode: D3D12_VIDEO_PROCESS_ORIENTATION_FLIP_VERTICAL\n");
740 }
741
742 return result;
743 }
744
745 unsigned int
d3d12_video_processor_pool_current_index(struct d3d12_video_processor * pD3D12Proc)746 d3d12_video_processor_pool_current_index(struct d3d12_video_processor *pD3D12Proc)
747 {
748 return pD3D12Proc->m_fenceValue % D3D12_VIDEO_PROC_ASYNC_DEPTH;
749 }
750
751
752 bool
d3d12_video_processor_ensure_fence_finished(struct pipe_video_codec * codec,uint64_t fenceValueToWaitOn,uint64_t timeout_ns)753 d3d12_video_processor_ensure_fence_finished(struct pipe_video_codec *codec,
754 uint64_t fenceValueToWaitOn,
755 uint64_t timeout_ns)
756 {
757 bool wait_result = true;
758 struct d3d12_video_processor *pD3D12Proc = (struct d3d12_video_processor *) codec;
759 HRESULT hr = S_OK;
760 uint64_t completedValue = pD3D12Proc->m_spFence->GetCompletedValue();
761
762 debug_printf(
763 "[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - Waiting for fence (with timeout_ns %" PRIu64
764 ") to finish with "
765 "fenceValue: %" PRIu64 " - Current Fence Completed Value %" PRIu64 "\n",
766 timeout_ns,
767 fenceValueToWaitOn,
768 completedValue);
769
770 if (completedValue < fenceValueToWaitOn) {
771
772 HANDLE event = {};
773 int event_fd = 0;
774 event = d3d12_fence_create_event(&event_fd);
775
776 hr = pD3D12Proc->m_spFence->SetEventOnCompletion(fenceValueToWaitOn, event);
777 if (FAILED(hr)) {
778 debug_printf("[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - SetEventOnCompletion for "
779 "fenceValue %" PRIu64 " failed with HR %x\n",
780 fenceValueToWaitOn,
781 hr);
782 goto ensure_fence_finished_fail;
783 }
784
785 wait_result = d3d12_fence_wait_event(event, event_fd, timeout_ns);
786 d3d12_fence_close_event(event, event_fd);
787
788 debug_printf("[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - Waiting on fence to be done with "
789 "fenceValue: %" PRIu64 " - current CompletedValue: %" PRIu64 "\n",
790 fenceValueToWaitOn,
791 completedValue);
792 } else {
793 debug_printf("[d3d12_video_processor] d3d12_video_processor_ensure_fence_finished - Fence already done with "
794 "fenceValue: %" PRIu64 " - current CompletedValue: %" PRIu64 "\n",
795 fenceValueToWaitOn,
796 completedValue);
797 }
798 return wait_result;
799
800 ensure_fence_finished_fail:
801 debug_printf("[d3d12_video_processor] d3d12_video_processor_sync_completion failed for fenceValue: %" PRIu64 "\n",
802 fenceValueToWaitOn);
803 assert(false);
804 return false;
805 }
806
807 bool
d3d12_video_processor_sync_completion(struct pipe_video_codec * codec,uint64_t fenceValueToWaitOn,uint64_t timeout_ns)808 d3d12_video_processor_sync_completion(struct pipe_video_codec *codec, uint64_t fenceValueToWaitOn, uint64_t timeout_ns)
809 {
810 struct d3d12_video_processor *pD3D12Proc = (struct d3d12_video_processor *) codec;
811 assert(pD3D12Proc);
812 assert(pD3D12Proc->m_spD3D12VideoDevice);
813 assert(pD3D12Proc->m_spCommandQueue);
814 HRESULT hr = S_OK;
815
816 ASSERTED bool wait_result = d3d12_video_processor_ensure_fence_finished(codec, fenceValueToWaitOn, timeout_ns);
817 assert(wait_result);
818
819 hr =
820 pD3D12Proc->m_spCommandAllocators[fenceValueToWaitOn % D3D12_VIDEO_PROC_ASYNC_DEPTH]->Reset();
821 if (FAILED(hr)) {
822 debug_printf("m_spCommandAllocator->Reset() failed with %x.\n", hr);
823 goto sync_with_token_fail;
824 }
825
826 // Validate device was not removed
827 hr = pD3D12Proc->m_pD3D12Screen->dev->GetDeviceRemovedReason();
828 if (hr != S_OK) {
829 debug_printf("[d3d12_video_processor] d3d12_video_processor_sync_completion"
830 " - D3D12Device was removed AFTER d3d12_video_processor_ensure_fence_finished "
831 "execution with HR %x, but wasn't before.\n",
832 hr);
833 goto sync_with_token_fail;
834 }
835
836 debug_printf(
837 "[d3d12_video_processor] d3d12_video_processor_sync_completion - GPU execution finalized for fenceValue: %" PRIu64
838 "\n",
839 fenceValueToWaitOn);
840
841 return wait_result;
842
843 sync_with_token_fail:
844 debug_printf("[d3d12_video_processor] d3d12_video_processor_sync_completion failed for fenceValue: %" PRIu64 "\n",
845 fenceValueToWaitOn);
846 assert(false);
847 return false;
848 }
849
d3d12_video_processor_fence_wait(struct pipe_video_codec * codec,struct pipe_fence_handle * fence,uint64_t timeout)850 int d3d12_video_processor_fence_wait(struct pipe_video_codec *codec,
851 struct pipe_fence_handle *fence,
852 uint64_t timeout)
853 {
854 struct d3d12_fence *fenceValueToWaitOn = (struct d3d12_fence *) fence;
855 assert(fenceValueToWaitOn);
856
857 ASSERTED bool wait_res = d3d12_video_processor_sync_completion(codec, fenceValueToWaitOn->value, timeout);
858
859 // Return semantics based on p_video_codec interface
860 // ret == 0 -> work in progress
861 // ret != 0 -> work completed
862 return wait_res ? 1 : 0;
863 }
864