1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // SyncVk.cpp:
7 // Implements the class methods for SyncVk.
8 //
9
10 #include "libANGLE/renderer/vulkan/SyncVk.h"
11
12 #include "common/debug.h"
13 #include "libANGLE/Context.h"
14 #include "libANGLE/Display.h"
15 #include "libANGLE/renderer/vulkan/ContextVk.h"
16 #include "libANGLE/renderer/vulkan/DisplayVk.h"
17
18 #if !defined(ANGLE_PLATFORM_WINDOWS)
19 # include <poll.h>
20 # include <unistd.h>
21 #else
22 # include <io.h>
23 #endif
24
25 namespace
26 {
27 // Wait for file descriptor to be signaled
SyncWaitFd(int fd,uint64_t timeoutNs,VkResult timeoutResult=VK_TIMEOUT)28 VkResult SyncWaitFd(int fd, uint64_t timeoutNs, VkResult timeoutResult = VK_TIMEOUT)
29 {
30 #if !defined(ANGLE_PLATFORM_WINDOWS)
31 struct pollfd fds;
32 int ret;
33
34 // Convert nanoseconds to milliseconds
35 int timeoutMs = static_cast<int>(timeoutNs / 1000000);
36 // If timeoutNs was non-zero but less than one millisecond, make it a millisecond.
37 if (timeoutNs > 0 && timeoutNs < 1000000)
38 {
39 timeoutMs = 1;
40 }
41
42 ASSERT(fd >= 0);
43
44 fds.fd = fd;
45 fds.events = POLLIN;
46
47 do
48 {
49 ret = poll(&fds, 1, timeoutMs);
50 if (ret > 0)
51 {
52 if (fds.revents & (POLLERR | POLLNVAL))
53 {
54 return VK_ERROR_UNKNOWN;
55 }
56 return VK_SUCCESS;
57 }
58 else if (ret == 0)
59 {
60 return timeoutResult;
61 }
62 } while (ret == -1 && (errno == EINTR || errno == EAGAIN));
63
64 return VK_ERROR_UNKNOWN;
65 #else
66 UNREACHABLE();
67 return VK_ERROR_UNKNOWN;
68 #endif
69 }
70
71 // Map VkResult to GLenum
MapVkResultToGlenum(VkResult vkResult,angle::Result angleResult,void * outResult)72 void MapVkResultToGlenum(VkResult vkResult, angle::Result angleResult, void *outResult)
73 {
74 GLenum *glEnumOut = static_cast<GLenum *>(outResult);
75 ASSERT(glEnumOut);
76
77 if (angleResult != angle::Result::Continue)
78 {
79 *glEnumOut = GL_WAIT_FAILED;
80 return;
81 }
82
83 switch (vkResult)
84 {
85 case VK_EVENT_SET:
86 *glEnumOut = GL_ALREADY_SIGNALED;
87 break;
88 case VK_SUCCESS:
89 *glEnumOut = GL_CONDITION_SATISFIED;
90 break;
91 case VK_TIMEOUT:
92 *glEnumOut = GL_TIMEOUT_EXPIRED;
93 break;
94 default:
95 *glEnumOut = GL_WAIT_FAILED;
96 break;
97 }
98 }
99
100 // Map VkResult to EGLint
MapVkResultToEglint(VkResult result,angle::Result angleResult,void * outResult)101 void MapVkResultToEglint(VkResult result, angle::Result angleResult, void *outResult)
102 {
103 EGLint *eglIntOut = static_cast<EGLint *>(outResult);
104 ASSERT(eglIntOut);
105
106 if (angleResult != angle::Result::Continue)
107 {
108 *eglIntOut = EGL_FALSE;
109 return;
110 }
111
112 switch (result)
113 {
114 case VK_EVENT_SET:
115 // fall through. EGL doesn't differentiate between event being already set, or set
116 // before timeout.
117 case VK_SUCCESS:
118 *eglIntOut = EGL_CONDITION_SATISFIED_KHR;
119 break;
120 case VK_TIMEOUT:
121 *eglIntOut = EGL_TIMEOUT_EXPIRED_KHR;
122 break;
123 default:
124 *eglIntOut = EGL_FALSE;
125 break;
126 }
127 }
128
129 } // anonymous namespace
130
131 namespace rx
132 {
133 namespace vk
134 {
SyncHelper()135 SyncHelper::SyncHelper() {}
136
~SyncHelper()137 SyncHelper::~SyncHelper() {}
138
releaseToRenderer(Renderer * renderer)139 void SyncHelper::releaseToRenderer(Renderer *renderer) {}
140
initialize(ContextVk * contextVk,SyncFenceScope scope)141 angle::Result SyncHelper::initialize(ContextVk *contextVk, SyncFenceScope scope)
142 {
143 ASSERT(!mUse.valid());
144 return contextVk->onSyncObjectInit(this, scope);
145 }
146
prepareForClientWait(ErrorContext * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,VkResult * resultOut)147 angle::Result SyncHelper::prepareForClientWait(ErrorContext *context,
148 ContextVk *contextVk,
149 bool flushCommands,
150 uint64_t timeout,
151 VkResult *resultOut)
152 {
153 // If the event is already set, don't wait
154 bool alreadySignaled = false;
155 ANGLE_TRY(getStatus(context, contextVk, &alreadySignaled));
156 if (alreadySignaled)
157 {
158 *resultOut = VK_EVENT_SET;
159 return angle::Result::Continue;
160 }
161
162 // If timeout is zero, there's no need to wait, so return timeout already.
163 if (timeout == 0)
164 {
165 *resultOut = VK_TIMEOUT;
166 return angle::Result::Continue;
167 }
168
169 // Submit commands if requested
170 if (flushCommands && contextVk)
171 {
172 ANGLE_TRY(contextVk->flushCommandsAndEndRenderPassIfDeferredSyncInit(
173 RenderPassClosureReason::SyncObjectClientWait));
174 }
175
176 *resultOut = VK_INCOMPLETE;
177 return angle::Result::Continue;
178 }
179
clientWait(ErrorContext * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,MapVkResultToApiType mappingFunction,void * resultOut)180 angle::Result SyncHelper::clientWait(ErrorContext *context,
181 ContextVk *contextVk,
182 bool flushCommands,
183 uint64_t timeout,
184 MapVkResultToApiType mappingFunction,
185 void *resultOut)
186 {
187 ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelper::clientWait");
188
189 VkResult status = VK_INCOMPLETE;
190 ANGLE_TRY(prepareForClientWait(context, contextVk, flushCommands, timeout, &status));
191
192 if (status != VK_INCOMPLETE)
193 {
194 mappingFunction(status, angle::Result::Continue, resultOut);
195 return angle::Result::Continue;
196 }
197
198 Renderer *renderer = context->getRenderer();
199
200 // If we need to perform a CPU wait don't set the resultOut parameter passed into the
201 // method, instead set the parameter passed into the unlocked tail call.
202 auto clientWaitUnlocked = [renderer, context, mappingFunction, use = mUse,
203 timeout](void *resultOut) {
204 ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelper::clientWait block (unlocked)");
205
206 VkResult status = VK_INCOMPLETE;
207 angle::Result angleResult =
208 renderer->waitForResourceUseToFinishWithUserTimeout(context, use, timeout, &status);
209 // Note: resultOut may be nullptr through the glFinishFenceNV path, which does not have a
210 // return value.
211 if (resultOut != nullptr)
212 {
213 mappingFunction(status, angleResult, resultOut);
214 }
215 };
216
217 // Schedule the wait to be run at the tail of the current call.
218 egl::Display::GetCurrentThreadUnlockedTailCall()->add(clientWaitUnlocked);
219 return angle::Result::Continue;
220 }
221
finish(ContextVk * contextVk)222 angle::Result SyncHelper::finish(ContextVk *contextVk)
223 {
224 GLenum result;
225 return clientWait(contextVk, contextVk, true, UINT64_MAX, MapVkResultToGlenum, &result);
226 }
227
serverWait(ContextVk * contextVk)228 angle::Result SyncHelper::serverWait(ContextVk *contextVk)
229 {
230 // If already signaled, no need to wait
231 bool alreadySignaled = false;
232 ANGLE_TRY(getStatus(contextVk, contextVk, &alreadySignaled));
233 if (alreadySignaled)
234 {
235 return angle::Result::Continue;
236 }
237
238 // Every resource already tracks its usage and issues the appropriate barriers, so there's
239 // really nothing to do here. An execution barrier is issued to strictly satisfy what the
240 // application asked for.
241 vk::OutsideRenderPassCommandBuffer *commandBuffer;
242 ANGLE_TRY(contextVk->getOutsideRenderPassCommandBuffer({}, &commandBuffer));
243 commandBuffer->pipelineBarrier(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
244 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 0,
245 nullptr);
246 return angle::Result::Continue;
247 }
248
getStatus(ErrorContext * context,ContextVk * contextVk,bool * signaledOut)249 angle::Result SyncHelper::getStatus(ErrorContext *context, ContextVk *contextVk, bool *signaledOut)
250 {
251 // Submit commands if it was deferred on the context that issued the sync object
252 ANGLE_TRY(submitSyncIfDeferred(contextVk, RenderPassClosureReason::SyncObjectClientWait));
253 ASSERT(mUse.valid());
254 Renderer *renderer = context->getRenderer();
255 if (renderer->hasResourceUseFinished(mUse))
256 {
257 *signaledOut = true;
258 }
259 else
260 {
261 // Check completed commands once before returning, perhaps the serial is actually already
262 // finished.
263 // We don't call checkCompletedCommandsAndCleanup() to cleanup finished commands immediately
264 // if isAsyncCommandBufferResetAndGarbageCleanupEnabled feature is turned off.
265 // Because when that feature is turned off, vkResetCommandBuffer() is called in cleanup
266 // step, and it must take the CommandPoolAccess::mCmdPoolMutex lock, see details in
267 // CommandPoolAccess::collectPrimaryCommandBuffer. This means the cleanup step can
268 // be blocked by command buffer recording if another thread calls
269 // CommandPoolAccess::flushRenderPassCommands(), which is against EGL spec where
270 // eglClientWaitSync() should return immediately with timeout == 0.
271 if (renderer->isAsyncCommandBufferResetAndGarbageCleanupEnabled())
272 {
273 ANGLE_TRY(renderer->checkCompletedCommandsAndCleanup(context));
274 }
275 else
276 {
277 ANGLE_TRY(renderer->checkCompletedCommands(context));
278 }
279
280 *signaledOut = renderer->hasResourceUseFinished(mUse);
281 }
282 return angle::Result::Continue;
283 }
284
submitSyncIfDeferred(ContextVk * contextVk,RenderPassClosureReason reason)285 angle::Result SyncHelper::submitSyncIfDeferred(ContextVk *contextVk, RenderPassClosureReason reason)
286 {
287 if (contextVk == nullptr)
288 {
289 return angle::Result::Continue;
290 }
291
292 if (contextVk->getRenderer()->hasResourceUseSubmitted(mUse))
293 {
294 return angle::Result::Continue;
295 }
296
297 // The submission of a sync object may be deferred to allow further optimizations to an open
298 // render pass before a submission happens for another reason. If the sync object is being
299 // waited on by the current context, the application must have used GL_SYNC_FLUSH_COMMANDS_BIT.
300 // However, when waited on by other contexts, the application must have ensured the original
301 // context is flushed. Due to deferred flushes, a glFlush is not sufficient to guarantee this.
302 //
303 // Deferring the submission is restricted to non-EGL sync objects, so it's sufficient to ensure
304 // that the contexts in the share group issue their deferred flushes.
305 for (auto context : contextVk->getShareGroup()->getContexts())
306 {
307 ContextVk *sharedContextVk = vk::GetImpl(context.second);
308 if (sharedContextVk->hasUnsubmittedUse(mUse))
309 {
310 ANGLE_TRY(sharedContextVk->flushCommandsAndEndRenderPassIfDeferredSyncInit(reason));
311 break;
312 }
313 }
314 // Note mUse could still be invalid here if it is inserted on a fresh created context, i.e.,
315 // fence is tracking nothing and is finished when inserted..
316 ASSERT(contextVk->getRenderer()->hasResourceUseSubmitted(mUse));
317
318 return angle::Result::Continue;
319 }
320
ExternalFence()321 ExternalFence::ExternalFence()
322 : mDevice(VK_NULL_HANDLE), mFenceFdStatus(VK_INCOMPLETE), mFenceFd(kInvalidFenceFd)
323 {}
324
~ExternalFence()325 ExternalFence::~ExternalFence()
326 {
327 if (mDevice != VK_NULL_HANDLE)
328 {
329 mFence.destroy(mDevice);
330 }
331
332 if (mFenceFd != kInvalidFenceFd)
333 {
334 close(mFenceFd);
335 }
336 }
337
init(VkDevice device,const VkFenceCreateInfo & createInfo)338 VkResult ExternalFence::init(VkDevice device, const VkFenceCreateInfo &createInfo)
339 {
340 ASSERT(device != VK_NULL_HANDLE);
341 ASSERT(mFenceFdStatus == VK_INCOMPLETE && mFenceFd == kInvalidFenceFd);
342 ASSERT(mDevice == VK_NULL_HANDLE);
343 mDevice = device;
344 return mFence.init(device, createInfo);
345 }
346
init(int fenceFd)347 void ExternalFence::init(int fenceFd)
348 {
349 ASSERT(fenceFd != kInvalidFenceFd);
350 ASSERT(mFenceFdStatus == VK_INCOMPLETE && mFenceFd == kInvalidFenceFd);
351 mFenceFdStatus = VK_SUCCESS;
352 mFenceFd = fenceFd;
353 }
354
getStatus(VkDevice device) const355 VkResult ExternalFence::getStatus(VkDevice device) const
356 {
357 if (mFenceFdStatus == VK_SUCCESS)
358 {
359 return SyncWaitFd(mFenceFd, 0, VK_NOT_READY);
360 }
361 return mFence.getStatus(device);
362 }
363
wait(VkDevice device,uint64_t timeout) const364 VkResult ExternalFence::wait(VkDevice device, uint64_t timeout) const
365 {
366 if (mFenceFdStatus == VK_SUCCESS)
367 {
368 return SyncWaitFd(mFenceFd, timeout);
369 }
370 return mFence.wait(device, timeout);
371 }
372
exportFd(VkDevice device,const VkFenceGetFdInfoKHR & fenceGetFdInfo)373 void ExternalFence::exportFd(VkDevice device, const VkFenceGetFdInfoKHR &fenceGetFdInfo)
374 {
375 ASSERT(mFenceFdStatus == VK_INCOMPLETE && mFenceFd == kInvalidFenceFd);
376 mFenceFdStatus = mFence.exportFd(device, fenceGetFdInfo, &mFenceFd);
377 ASSERT(mFenceFdStatus != VK_INCOMPLETE);
378 }
379
SyncHelperNativeFence()380 SyncHelperNativeFence::SyncHelperNativeFence()
381 {
382 mExternalFence = std::make_shared<ExternalFence>();
383 }
384
~SyncHelperNativeFence()385 SyncHelperNativeFence::~SyncHelperNativeFence() {}
386
releaseToRenderer(Renderer * renderer)387 void SyncHelperNativeFence::releaseToRenderer(Renderer *renderer)
388 {
389 mExternalFence.reset();
390 }
391
initializeWithFd(ContextVk * contextVk,int inFd)392 angle::Result SyncHelperNativeFence::initializeWithFd(ContextVk *contextVk, int inFd)
393 {
394 ASSERT(inFd >= kInvalidFenceFd);
395
396 // If valid FD provided by application - import it to fence.
397 if (inFd > kInvalidFenceFd)
398 {
399 // File descriptor ownership: EGL_ANDROID_native_fence_sync
400 // Whenever a file descriptor is passed into or returned from an
401 // EGL call in this extension, ownership of that file descriptor is
402 // transferred. The recipient of the file descriptor must close it when it is
403 // no longer needed, and the provider of the file descriptor must dup it
404 // before providing it if they require continued use of the native fence.
405 mExternalFence->init(inFd);
406 return angle::Result::Continue;
407 }
408
409 Renderer *renderer = contextVk->getRenderer();
410 VkDevice device = renderer->getDevice();
411
412 VkExportFenceCreateInfo exportCreateInfo = {};
413 exportCreateInfo.sType = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO;
414 exportCreateInfo.pNext = nullptr;
415 exportCreateInfo.handleTypes = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
416
417 // Create fenceInfo base.
418 VkFenceCreateInfo fenceCreateInfo = {};
419 fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
420 fenceCreateInfo.flags = 0;
421 fenceCreateInfo.pNext = &exportCreateInfo;
422
423 // Initialize/create a VkFence handle
424 ANGLE_VK_TRY(contextVk, mExternalFence->init(device, fenceCreateInfo));
425
426 // invalid FD provided by application - create one with fence.
427 /*
428 Spec: "When a fence sync object is created or when an EGL native fence sync
429 object is created with the EGL_SYNC_NATIVE_FENCE_FD_ANDROID attribute set to
430 EGL_NO_NATIVE_FENCE_FD_ANDROID, eglCreateSyncKHR also inserts a fence command
431 into the command stream of the bound client API's current context and associates it
432 with the newly created sync object.
433 */
434 // Flush current pending set of commands providing the fence...
435 ANGLE_TRY(contextVk->flushAndSubmitCommands(nullptr, &mExternalFence,
436 RenderPassClosureReason::SyncObjectWithFdInit));
437
438 ANGLE_VK_TRY(contextVk, mExternalFence->getFenceFdStatus());
439
440 return angle::Result::Continue;
441 }
442
prepareForClientWait(ErrorContext * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,VkResult * resultOut)443 angle::Result SyncHelperNativeFence::prepareForClientWait(ErrorContext *context,
444 ContextVk *contextVk,
445 bool flushCommands,
446 uint64_t timeout,
447 VkResult *resultOut)
448 {
449 // If already signaled, don't wait
450 bool alreadySignaled = false;
451 ANGLE_TRY(getStatus(context, contextVk, &alreadySignaled));
452 if (alreadySignaled)
453 {
454 *resultOut = VK_SUCCESS;
455 return angle::Result::Continue;
456 }
457
458 // If timeout is zero, there's no need to wait, so return timeout already.
459 if (timeout == 0)
460 {
461 *resultOut = VK_TIMEOUT;
462 return angle::Result::Continue;
463 }
464
465 if (flushCommands && contextVk)
466 {
467 ANGLE_TRY(contextVk->flushAndSubmitCommands(nullptr, nullptr,
468 RenderPassClosureReason::SyncObjectClientWait));
469 }
470
471 *resultOut = VK_INCOMPLETE;
472 return angle::Result::Continue;
473 }
474
clientWait(ErrorContext * context,ContextVk * contextVk,bool flushCommands,uint64_t timeout,MapVkResultToApiType mappingFunction,void * resultOut)475 angle::Result SyncHelperNativeFence::clientWait(ErrorContext *context,
476 ContextVk *contextVk,
477 bool flushCommands,
478 uint64_t timeout,
479 MapVkResultToApiType mappingFunction,
480 void *resultOut)
481 {
482 ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelperNativeFence::clientWait");
483
484 VkResult status = VK_INCOMPLETE;
485 ANGLE_TRY(prepareForClientWait(context, contextVk, flushCommands, timeout, &status));
486
487 if (status != VK_INCOMPLETE)
488 {
489 mappingFunction(status, angle::Result::Continue, resultOut);
490 return angle::Result::Continue;
491 }
492
493 Renderer *renderer = context->getRenderer();
494
495 auto clientWaitUnlocked = [device = renderer->getDevice(), fence = mExternalFence,
496 mappingFunction, timeout](void *resultOut) {
497 ANGLE_TRACE_EVENT0("gpu.angle", "SyncHelperNativeFence::clientWait block (unlocked)");
498 ASSERT(resultOut);
499
500 VkResult status = fence->wait(device, timeout);
501 mappingFunction(status, angle::Result::Continue, resultOut);
502 };
503
504 egl::Display::GetCurrentThreadUnlockedTailCall()->add(clientWaitUnlocked);
505 return angle::Result::Continue;
506 }
507
serverWait(ContextVk * contextVk)508 angle::Result SyncHelperNativeFence::serverWait(ContextVk *contextVk)
509 {
510 Renderer *renderer = contextVk->getRenderer();
511
512 // If already signaled, no need to wait
513 bool alreadySignaled = false;
514 ANGLE_TRY(getStatus(contextVk, contextVk, &alreadySignaled));
515 if (alreadySignaled)
516 {
517 return angle::Result::Continue;
518 }
519
520 VkDevice device = renderer->getDevice();
521 DeviceScoped<Semaphore> waitSemaphore(device);
522 // Wait semaphore for next vkQueueSubmit().
523 // Create a Semaphore with imported fenceFd.
524 ANGLE_VK_TRY(contextVk, waitSemaphore.get().init(device));
525
526 VkImportSemaphoreFdInfoKHR importFdInfo = {};
527 importFdInfo.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
528 importFdInfo.semaphore = waitSemaphore.get().getHandle();
529 importFdInfo.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR;
530 importFdInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
531 importFdInfo.fd = dup(mExternalFence->getFenceFd());
532 ANGLE_VK_TRY(contextVk, waitSemaphore.get().importFd(device, importFdInfo));
533
534 // Add semaphore to next submit job.
535 contextVk->addWaitSemaphore(waitSemaphore.get().getHandle(),
536 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
537 contextVk->addGarbage(&waitSemaphore.get()); // This releases the handle.
538 return angle::Result::Continue;
539 }
540
getStatus(ErrorContext * context,ContextVk * contextVk,bool * signaledOut)541 angle::Result SyncHelperNativeFence::getStatus(ErrorContext *context,
542 ContextVk *contextVk,
543 bool *signaledOut)
544 {
545 VkResult result = mExternalFence->getStatus(context->getDevice());
546 if (result != VK_NOT_READY)
547 {
548 ANGLE_VK_TRY(context, result);
549 }
550 *signaledOut = (result == VK_SUCCESS);
551 return angle::Result::Continue;
552 }
553
dupNativeFenceFD(ErrorContext * context,int * fdOut) const554 angle::Result SyncHelperNativeFence::dupNativeFenceFD(ErrorContext *context, int *fdOut) const
555 {
556 if (mExternalFence->getFenceFd() == kInvalidFenceFd)
557 {
558 return angle::Result::Stop;
559 }
560
561 *fdOut = dup(mExternalFence->getFenceFd());
562
563 return angle::Result::Continue;
564 }
565
566 } // namespace vk
567
SyncVk()568 SyncVk::SyncVk() : SyncImpl() {}
569
~SyncVk()570 SyncVk::~SyncVk() {}
571
onDestroy(const gl::Context * context)572 void SyncVk::onDestroy(const gl::Context *context)
573 {
574 mSyncHelper.releaseToRenderer(vk::GetImpl(context)->getRenderer());
575 }
576
set(const gl::Context * context,GLenum condition,GLbitfield flags)577 angle::Result SyncVk::set(const gl::Context *context, GLenum condition, GLbitfield flags)
578 {
579 ASSERT(condition == GL_SYNC_GPU_COMMANDS_COMPLETE);
580 ASSERT(flags == 0);
581
582 return mSyncHelper.initialize(vk::GetImpl(context), SyncFenceScope::CurrentContextToShareGroup);
583 }
584
clientWait(const gl::Context * context,GLbitfield flags,GLuint64 timeout,GLenum * outResult)585 angle::Result SyncVk::clientWait(const gl::Context *context,
586 GLbitfield flags,
587 GLuint64 timeout,
588 GLenum *outResult)
589 {
590 ContextVk *contextVk = vk::GetImpl(context);
591
592 ASSERT((flags & ~GL_SYNC_FLUSH_COMMANDS_BIT) == 0);
593
594 bool flush = (flags & GL_SYNC_FLUSH_COMMANDS_BIT) != 0;
595
596 return mSyncHelper.clientWait(contextVk, contextVk, flush, static_cast<uint64_t>(timeout),
597 MapVkResultToGlenum, outResult);
598 }
599
serverWait(const gl::Context * context,GLbitfield flags,GLuint64 timeout)600 angle::Result SyncVk::serverWait(const gl::Context *context, GLbitfield flags, GLuint64 timeout)
601 {
602 ASSERT(flags == 0);
603 ASSERT(timeout == GL_TIMEOUT_IGNORED);
604
605 ContextVk *contextVk = vk::GetImpl(context);
606 return mSyncHelper.serverWait(contextVk);
607 }
608
getStatus(const gl::Context * context,GLint * outResult)609 angle::Result SyncVk::getStatus(const gl::Context *context, GLint *outResult)
610 {
611 ContextVk *contextVk = vk::GetImpl(context);
612 bool signaled = false;
613 ANGLE_TRY(mSyncHelper.getStatus(contextVk, contextVk, &signaled));
614
615 *outResult = signaled ? GL_SIGNALED : GL_UNSIGNALED;
616 return angle::Result::Continue;
617 }
618
EGLSyncVk()619 EGLSyncVk::EGLSyncVk() : EGLSyncImpl(), mSyncHelper(nullptr) {}
620
~EGLSyncVk()621 EGLSyncVk::~EGLSyncVk() {}
622
onDestroy(const egl::Display * display)623 void EGLSyncVk::onDestroy(const egl::Display *display)
624 {
625 mSyncHelper->releaseToRenderer(vk::GetImpl(display)->getRenderer());
626 }
627
initialize(const egl::Display * display,const gl::Context * context,EGLenum type,const egl::AttributeMap & attribs)628 egl::Error EGLSyncVk::initialize(const egl::Display *display,
629 const gl::Context *context,
630 EGLenum type,
631 const egl::AttributeMap &attribs)
632 {
633 ASSERT(context != nullptr);
634
635 switch (type)
636 {
637 case EGL_SYNC_FENCE_KHR:
638 case EGL_SYNC_GLOBAL_FENCE_ANGLE:
639 {
640 vk::SyncHelper *syncHelper = new vk::SyncHelper();
641 mSyncHelper.reset(syncHelper);
642 const SyncFenceScope scope = type == EGL_SYNC_GLOBAL_FENCE_ANGLE
643 ? SyncFenceScope::AllContextsToAllContexts
644 : SyncFenceScope::CurrentContextToAllContexts;
645 if (syncHelper->initialize(vk::GetImpl(context), scope) == angle::Result::Stop)
646 {
647 return egl::Error(EGL_BAD_ALLOC, "eglCreateSyncKHR failed to create sync object");
648 }
649 return egl::NoError();
650 }
651 case EGL_SYNC_NATIVE_FENCE_ANDROID:
652 {
653 vk::SyncHelperNativeFence *syncHelper = new vk::SyncHelperNativeFence();
654 mSyncHelper.reset(syncHelper);
655 EGLint nativeFenceFd =
656 attribs.getAsInt(EGL_SYNC_NATIVE_FENCE_FD_ANDROID, EGL_NO_NATIVE_FENCE_FD_ANDROID);
657 return angle::ToEGL(syncHelper->initializeWithFd(vk::GetImpl(context), nativeFenceFd),
658 EGL_BAD_ALLOC);
659 }
660 default:
661 UNREACHABLE();
662 return egl::Error(EGL_BAD_ALLOC);
663 }
664 }
665
clientWait(const egl::Display * display,const gl::Context * context,EGLint flags,EGLTime timeout,EGLint * outResult)666 egl::Error EGLSyncVk::clientWait(const egl::Display *display,
667 const gl::Context *context,
668 EGLint flags,
669 EGLTime timeout,
670 EGLint *outResult)
671 {
672 ASSERT((flags & ~EGL_SYNC_FLUSH_COMMANDS_BIT_KHR) == 0);
673
674 bool flush = (flags & EGL_SYNC_FLUSH_COMMANDS_BIT_KHR) != 0;
675
676 ContextVk *contextVk = context != nullptr && flush ? vk::GetImpl(context) : nullptr;
677 if (mSyncHelper->clientWait(vk::GetImpl(display), contextVk, flush,
678 static_cast<uint64_t>(timeout), MapVkResultToEglint,
679 outResult) == angle::Result::Stop)
680 {
681 return egl::Error(EGL_BAD_ALLOC);
682 }
683
684 return egl::NoError();
685 }
686
serverWait(const egl::Display * display,const gl::Context * context,EGLint flags)687 egl::Error EGLSyncVk::serverWait(const egl::Display *display,
688 const gl::Context *context,
689 EGLint flags)
690 {
691 // Server wait requires a valid bound context.
692 ASSERT(context);
693
694 // No flags are currently implemented.
695 ASSERT(flags == 0);
696
697 ContextVk *contextVk = vk::GetImpl(context);
698 return angle::ToEGL(mSyncHelper->serverWait(contextVk), EGL_BAD_ALLOC);
699 }
700
getStatus(const egl::Display * display,EGLint * outStatus)701 egl::Error EGLSyncVk::getStatus(const egl::Display *display, EGLint *outStatus)
702 {
703 bool signaled = false;
704 if (mSyncHelper->getStatus(vk::GetImpl(display), nullptr, &signaled) == angle::Result::Stop)
705 {
706 return egl::Error(EGL_BAD_ALLOC);
707 }
708
709 *outStatus = signaled ? EGL_SIGNALED_KHR : EGL_UNSIGNALED_KHR;
710 return egl::NoError();
711 }
712
dupNativeFenceFD(const egl::Display * display,EGLint * fdOut) const713 egl::Error EGLSyncVk::dupNativeFenceFD(const egl::Display *display, EGLint *fdOut) const
714 {
715 return angle::ToEGL(mSyncHelper->dupNativeFenceFD(vk::GetImpl(display), fdOut),
716 EGL_BAD_PARAMETER);
717 }
718
719 } // namespace rx
720