1 /*
2 * Copyright 2023 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "tools/window/VulkanWindowContext.h"
9
10 #include "include/core/SkSurface.h"
11 #include "include/gpu/GrBackendSemaphore.h"
12 #include "include/gpu/GrBackendSurface.h"
13 #include "include/gpu/GrDirectContext.h"
14 #include "include/gpu/ganesh/SkSurfaceGanesh.h"
15 #include "include/gpu/ganesh/vk/GrVkBackendSemaphore.h"
16 #include "include/gpu/ganesh/vk/GrVkBackendSurface.h"
17 #include "include/gpu/ganesh/vk/GrVkDirectContext.h"
18 #include "include/gpu/vk/GrVkTypes.h"
19 #include "include/gpu/vk/VulkanExtensions.h"
20 #include "include/gpu/vk/VulkanMutableTextureState.h"
21 #include "src/base/SkAutoMalloc.h"
22 #include "src/gpu/ganesh/vk/GrVkImage.h"
23 #include "src/gpu/ganesh/vk/GrVkUtil.h"
24 #include "src/gpu/vk/VulkanInterface.h"
25
26 #ifdef VK_USE_PLATFORM_WIN32_KHR
27 // windows wants to define this as CreateSemaphoreA or CreateSemaphoreW
28 #undef CreateSemaphore
29 #endif
30
31 #define GET_PROC(F) f ## F = \
32 (PFN_vk ## F) backendContext.fGetProc("vk" #F, fInstance, VK_NULL_HANDLE)
33 #define GET_DEV_PROC(F) f ## F = \
34 (PFN_vk ## F) backendContext.fGetProc("vk" #F, VK_NULL_HANDLE, fDevice)
35
36 namespace skwindow::internal {
37
VulkanWindowContext(const DisplayParams & params,CreateVkSurfaceFn createVkSurface,CanPresentFn canPresent,PFN_vkGetInstanceProcAddr instProc)38 VulkanWindowContext::VulkanWindowContext(const DisplayParams& params,
39 CreateVkSurfaceFn createVkSurface,
40 CanPresentFn canPresent,
41 PFN_vkGetInstanceProcAddr instProc)
42 : WindowContext(params)
43 , fCreateVkSurfaceFn(std::move(createVkSurface))
44 , fCanPresentFn(std::move(canPresent))
45 , fSurface(VK_NULL_HANDLE)
46 , fSwapchain(VK_NULL_HANDLE)
47 , fImages(nullptr)
48 , fImageLayouts(nullptr)
49 , fSurfaces(nullptr)
50 , fBackbuffers(nullptr) {
51 fGetInstanceProcAddr = instProc;
52 this->initializeContext();
53 }
54
initializeContext()55 void VulkanWindowContext::initializeContext() {
56 SkASSERT(!fContext);
57 // any config code here (particularly for msaa)?
58
59 PFN_vkGetInstanceProcAddr getInstanceProc = fGetInstanceProcAddr;
60 skgpu::VulkanBackendContext backendContext;
61 skgpu::VulkanExtensions extensions;
62 VkPhysicalDeviceFeatures2 features;
63 if (!sk_gpu_test::CreateVkBackendContext(getInstanceProc, &backendContext, &extensions,
64 &features, &fDebugCallback, &fPresentQueueIndex,
65 fCanPresentFn,
66 fDisplayParams.fCreateProtectedNativeBackend)) {
67 sk_gpu_test::FreeVulkanFeaturesStructs(&features);
68 return;
69 }
70
71 if (!extensions.hasExtension(VK_KHR_SURFACE_EXTENSION_NAME, 25) ||
72 !extensions.hasExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, 68)) {
73 sk_gpu_test::FreeVulkanFeaturesStructs(&features);
74 return;
75 }
76
77 fInstance = backendContext.fInstance;
78 fPhysicalDevice = backendContext.fPhysicalDevice;
79 fDevice = backendContext.fDevice;
80 fGraphicsQueueIndex = backendContext.fGraphicsQueueIndex;
81 fGraphicsQueue = backendContext.fQueue;
82
83 PFN_vkGetPhysicalDeviceProperties localGetPhysicalDeviceProperties =
84 reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(
85 backendContext.fGetProc("vkGetPhysicalDeviceProperties",
86 backendContext.fInstance,
87 VK_NULL_HANDLE));
88 if (!localGetPhysicalDeviceProperties) {
89 sk_gpu_test::FreeVulkanFeaturesStructs(&features);
90 return;
91 }
92 VkPhysicalDeviceProperties physDeviceProperties;
93 localGetPhysicalDeviceProperties(backendContext.fPhysicalDevice, &physDeviceProperties);
94 uint32_t physDevVersion = physDeviceProperties.apiVersion;
95
96 fInterface.reset(new skgpu::VulkanInterface(backendContext.fGetProc,
97 fInstance,
98 fDevice,
99 backendContext.fMaxAPIVersion,
100 physDevVersion,
101 &extensions));
102
103 GET_PROC(DestroyInstance);
104 if (fDebugCallback != VK_NULL_HANDLE) {
105 GET_PROC(DestroyDebugReportCallbackEXT);
106 }
107 GET_PROC(DestroySurfaceKHR);
108 GET_PROC(GetPhysicalDeviceSurfaceSupportKHR);
109 GET_PROC(GetPhysicalDeviceSurfaceCapabilitiesKHR);
110 GET_PROC(GetPhysicalDeviceSurfaceFormatsKHR);
111 GET_PROC(GetPhysicalDeviceSurfacePresentModesKHR);
112 GET_DEV_PROC(DeviceWaitIdle);
113 GET_DEV_PROC(QueueWaitIdle);
114 GET_DEV_PROC(DestroyDevice);
115 GET_DEV_PROC(CreateSwapchainKHR);
116 GET_DEV_PROC(DestroySwapchainKHR);
117 GET_DEV_PROC(GetSwapchainImagesKHR);
118 GET_DEV_PROC(AcquireNextImageKHR);
119 GET_DEV_PROC(QueuePresentKHR);
120 GET_DEV_PROC(GetDeviceQueue);
121
122 fContext = GrDirectContexts::MakeVulkan(backendContext, fDisplayParams.fGrContextOptions);
123
124 fSurface = fCreateVkSurfaceFn(fInstance);
125 if (VK_NULL_HANDLE == fSurface) {
126 this->destroyContext();
127 sk_gpu_test::FreeVulkanFeaturesStructs(&features);
128 return;
129 }
130
131 VkBool32 supported;
132 VkResult res = fGetPhysicalDeviceSurfaceSupportKHR(fPhysicalDevice, fPresentQueueIndex,
133 fSurface, &supported);
134 if (VK_SUCCESS != res) {
135 this->destroyContext();
136 sk_gpu_test::FreeVulkanFeaturesStructs(&features);
137 return;
138 }
139
140 if (!this->createSwapchain(-1, -1, fDisplayParams)) {
141 this->destroyContext();
142 sk_gpu_test::FreeVulkanFeaturesStructs(&features);
143 return;
144 }
145
146 // create presentQueue
147 fGetDeviceQueue(fDevice, fPresentQueueIndex, 0, &fPresentQueue);
148 sk_gpu_test::FreeVulkanFeaturesStructs(&features);
149 }
150
createSwapchain(int width,int height,const DisplayParams & params)151 bool VulkanWindowContext::createSwapchain(int width, int height,
152 const DisplayParams& params) {
153 // check for capabilities
154 VkSurfaceCapabilitiesKHR caps;
155 VkResult res = fGetPhysicalDeviceSurfaceCapabilitiesKHR(fPhysicalDevice, fSurface, &caps);
156 if (VK_SUCCESS != res) {
157 return false;
158 }
159
160 uint32_t surfaceFormatCount;
161 res = fGetPhysicalDeviceSurfaceFormatsKHR(fPhysicalDevice, fSurface, &surfaceFormatCount,
162 nullptr);
163 if (VK_SUCCESS != res) {
164 return false;
165 }
166
167 SkAutoMalloc surfaceFormatAlloc(surfaceFormatCount * sizeof(VkSurfaceFormatKHR));
168 VkSurfaceFormatKHR* surfaceFormats = (VkSurfaceFormatKHR*)surfaceFormatAlloc.get();
169 res = fGetPhysicalDeviceSurfaceFormatsKHR(fPhysicalDevice, fSurface, &surfaceFormatCount,
170 surfaceFormats);
171 if (VK_SUCCESS != res) {
172 return false;
173 }
174
175 uint32_t presentModeCount;
176 res = fGetPhysicalDeviceSurfacePresentModesKHR(fPhysicalDevice, fSurface, &presentModeCount,
177 nullptr);
178 if (VK_SUCCESS != res) {
179 return false;
180 }
181
182 SkAutoMalloc presentModeAlloc(presentModeCount * sizeof(VkPresentModeKHR));
183 VkPresentModeKHR* presentModes = (VkPresentModeKHR*)presentModeAlloc.get();
184 res = fGetPhysicalDeviceSurfacePresentModesKHR(fPhysicalDevice, fSurface, &presentModeCount,
185 presentModes);
186 if (VK_SUCCESS != res) {
187 return false;
188 }
189
190 VkExtent2D extent = caps.currentExtent;
191 // use the hints
192 if (extent.width == (uint32_t)-1) {
193 extent.width = width;
194 extent.height = height;
195 }
196
197 // clamp width; to protect us from broken hints
198 if (extent.width < caps.minImageExtent.width) {
199 extent.width = caps.minImageExtent.width;
200 } else if (extent.width > caps.maxImageExtent.width) {
201 extent.width = caps.maxImageExtent.width;
202 }
203 // clamp height
204 if (extent.height < caps.minImageExtent.height) {
205 extent.height = caps.minImageExtent.height;
206 } else if (extent.height > caps.maxImageExtent.height) {
207 extent.height = caps.maxImageExtent.height;
208 }
209
210 fWidth = (int)extent.width;
211 fHeight = (int)extent.height;
212
213 uint32_t imageCount = caps.minImageCount + 2;
214 if (caps.maxImageCount > 0 && imageCount > caps.maxImageCount) {
215 // Application must settle for fewer images than desired:
216 imageCount = caps.maxImageCount;
217 }
218
219 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
220 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
221 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
222 SkASSERT((caps.supportedUsageFlags & usageFlags) == usageFlags);
223 if (caps.supportedUsageFlags & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) {
224 usageFlags |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
225 }
226 if (caps.supportedUsageFlags & VK_IMAGE_USAGE_SAMPLED_BIT) {
227 usageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
228 }
229 SkASSERT(caps.supportedTransforms & caps.currentTransform);
230 SkASSERT(caps.supportedCompositeAlpha & (VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
231 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR));
232 VkCompositeAlphaFlagBitsKHR composite_alpha =
233 (caps.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) ?
234 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR :
235 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
236
237 // Pick our surface format.
238 VkFormat surfaceFormat = VK_FORMAT_UNDEFINED;
239 VkColorSpaceKHR colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
240 for (uint32_t i = 0; i < surfaceFormatCount; ++i) {
241 VkFormat localFormat = surfaceFormats[i].format;
242 if (GrVkFormatIsSupported(localFormat)) {
243 surfaceFormat = localFormat;
244 colorSpace = surfaceFormats[i].colorSpace;
245 break;
246 }
247 }
248 fDisplayParams = params;
249 fSampleCount = std::max(1, params.fMSAASampleCount);
250 fStencilBits = 8;
251
252 if (VK_FORMAT_UNDEFINED == surfaceFormat) {
253 return false;
254 }
255
256 SkColorType colorType;
257 switch (surfaceFormat) {
258 case VK_FORMAT_R8G8B8A8_UNORM: // fall through
259 case VK_FORMAT_R8G8B8A8_SRGB:
260 colorType = kRGBA_8888_SkColorType;
261 break;
262 case VK_FORMAT_B8G8R8A8_UNORM: // fall through
263 colorType = kBGRA_8888_SkColorType;
264 break;
265 default:
266 return false;
267 }
268
269 // If mailbox mode is available, use it, as it is the lowest-latency non-
270 // tearing mode. If not, fall back to FIFO which is always available.
271 VkPresentModeKHR mode = VK_PRESENT_MODE_FIFO_KHR;
272 bool hasImmediate = false;
273 for (uint32_t i = 0; i < presentModeCount; ++i) {
274 // use mailbox
275 if (VK_PRESENT_MODE_MAILBOX_KHR == presentModes[i]) {
276 mode = VK_PRESENT_MODE_MAILBOX_KHR;
277 }
278 if (VK_PRESENT_MODE_IMMEDIATE_KHR == presentModes[i]) {
279 hasImmediate = true;
280 }
281 }
282 if (params.fDisableVsync && hasImmediate) {
283 mode = VK_PRESENT_MODE_IMMEDIATE_KHR;
284 }
285
286 VkSwapchainCreateInfoKHR swapchainCreateInfo;
287 memset(&swapchainCreateInfo, 0, sizeof(VkSwapchainCreateInfoKHR));
288 swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
289 swapchainCreateInfo.flags = fDisplayParams.fCreateProtectedNativeBackend
290 ? VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR
291 : 0;
292 swapchainCreateInfo.surface = fSurface;
293 swapchainCreateInfo.minImageCount = imageCount;
294 swapchainCreateInfo.imageFormat = surfaceFormat;
295 swapchainCreateInfo.imageColorSpace = colorSpace;
296 swapchainCreateInfo.imageExtent = extent;
297 swapchainCreateInfo.imageArrayLayers = 1;
298 swapchainCreateInfo.imageUsage = usageFlags;
299
300 uint32_t queueFamilies[] = { fGraphicsQueueIndex, fPresentQueueIndex };
301 if (fGraphicsQueueIndex != fPresentQueueIndex) {
302 swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
303 swapchainCreateInfo.queueFamilyIndexCount = 2;
304 swapchainCreateInfo.pQueueFamilyIndices = queueFamilies;
305 } else {
306 swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
307 swapchainCreateInfo.queueFamilyIndexCount = 0;
308 swapchainCreateInfo.pQueueFamilyIndices = nullptr;
309 }
310
311 swapchainCreateInfo.preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
312 swapchainCreateInfo.compositeAlpha = composite_alpha;
313 swapchainCreateInfo.presentMode = mode;
314 swapchainCreateInfo.clipped = true;
315 swapchainCreateInfo.oldSwapchain = fSwapchain;
316
317 res = fCreateSwapchainKHR(fDevice, &swapchainCreateInfo, nullptr, &fSwapchain);
318 if (VK_SUCCESS != res) {
319 return false;
320 }
321
322 // destroy the old swapchain
323 if (swapchainCreateInfo.oldSwapchain != VK_NULL_HANDLE) {
324 fDeviceWaitIdle(fDevice);
325
326 this->destroyBuffers();
327
328 fDestroySwapchainKHR(fDevice, swapchainCreateInfo.oldSwapchain, nullptr);
329 }
330
331 if (!this->createBuffers(swapchainCreateInfo.imageFormat, usageFlags, colorType,
332 swapchainCreateInfo.imageSharingMode)) {
333 fDeviceWaitIdle(fDevice);
334
335 this->destroyBuffers();
336
337 fDestroySwapchainKHR(fDevice, swapchainCreateInfo.oldSwapchain, nullptr);
338 }
339
340 return true;
341 }
342
createBuffers(VkFormat format,VkImageUsageFlags usageFlags,SkColorType colorType,VkSharingMode sharingMode)343 bool VulkanWindowContext::createBuffers(VkFormat format,
344 VkImageUsageFlags usageFlags,
345 SkColorType colorType,
346 VkSharingMode sharingMode) {
347 fGetSwapchainImagesKHR(fDevice, fSwapchain, &fImageCount, nullptr);
348 SkASSERT(fImageCount);
349 fImages = new VkImage[fImageCount];
350 fGetSwapchainImagesKHR(fDevice, fSwapchain, &fImageCount, fImages);
351
352 // set up initial image layouts and create surfaces
353 fImageLayouts = new VkImageLayout[fImageCount];
354 fSurfaces = new sk_sp<SkSurface>[fImageCount];
355 for (uint32_t i = 0; i < fImageCount; ++i) {
356 fImageLayouts[i] = VK_IMAGE_LAYOUT_UNDEFINED;
357
358 GrVkImageInfo info;
359 info.fImage = fImages[i];
360 info.fAlloc = skgpu::VulkanAlloc();
361 info.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
362 info.fImageTiling = VK_IMAGE_TILING_OPTIMAL;
363 info.fFormat = format;
364 info.fImageUsageFlags = usageFlags;
365 info.fLevelCount = 1;
366 info.fCurrentQueueFamily = fPresentQueueIndex;
367 info.fProtected = skgpu::Protected(fDisplayParams.fCreateProtectedNativeBackend);
368 info.fSharingMode = sharingMode;
369
370 if (usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT) {
371 GrBackendTexture backendTexture = GrBackendTextures::MakeVk(fWidth, fHeight, info);
372 fSurfaces[i] = SkSurfaces::WrapBackendTexture(fContext.get(),
373 backendTexture,
374 kTopLeft_GrSurfaceOrigin,
375 fDisplayParams.fMSAASampleCount,
376 colorType,
377 fDisplayParams.fColorSpace,
378 &fDisplayParams.fSurfaceProps);
379 } else {
380 if (fDisplayParams.fMSAASampleCount > 1) {
381 return false;
382 }
383 info.fSampleCount = fSampleCount;
384 GrBackendRenderTarget backendRT = GrBackendRenderTargets::MakeVk(fWidth, fHeight, info);
385 fSurfaces[i] = SkSurfaces::WrapBackendRenderTarget(fContext.get(),
386 backendRT,
387 kTopLeft_GrSurfaceOrigin,
388 colorType,
389 fDisplayParams.fColorSpace,
390 &fDisplayParams.fSurfaceProps);
391 }
392 if (!fSurfaces[i]) {
393 return false;
394 }
395 }
396
397 // set up the backbuffers
398 VkSemaphoreCreateInfo semaphoreInfo;
399 memset(&semaphoreInfo, 0, sizeof(VkSemaphoreCreateInfo));
400 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
401 semaphoreInfo.pNext = nullptr;
402 semaphoreInfo.flags = 0;
403
404 // we create one additional backbuffer structure here, because we want to
405 // give the command buffers they contain a chance to finish before we cycle back
406 fBackbuffers = new BackbufferInfo[fImageCount + 1];
407 for (uint32_t i = 0; i < fImageCount + 1; ++i) {
408 fBackbuffers[i].fImageIndex = -1;
409 SkDEBUGCODE(VkResult result = )GR_VK_CALL(fInterface,
410 CreateSemaphore(fDevice, &semaphoreInfo, nullptr,
411 &fBackbuffers[i].fRenderSemaphore));
412 SkASSERT(result == VK_SUCCESS);
413 }
414 fCurrentBackbufferIndex = fImageCount;
415 return true;
416 }
417
destroyBuffers()418 void VulkanWindowContext::destroyBuffers() {
419
420 if (fBackbuffers) {
421 for (uint32_t i = 0; i < fImageCount + 1; ++i) {
422 fBackbuffers[i].fImageIndex = -1;
423 GR_VK_CALL(fInterface,
424 DestroySemaphore(fDevice,
425 fBackbuffers[i].fRenderSemaphore,
426 nullptr));
427 }
428 }
429
430 delete[] fBackbuffers;
431 fBackbuffers = nullptr;
432
433 // Does this actually free the surfaces?
434 delete[] fSurfaces;
435 fSurfaces = nullptr;
436 delete[] fImageLayouts;
437 fImageLayouts = nullptr;
438 delete[] fImages;
439 fImages = nullptr;
440 }
441
~VulkanWindowContext()442 VulkanWindowContext::~VulkanWindowContext() {
443 this->destroyContext();
444 }
445
destroyContext()446 void VulkanWindowContext::destroyContext() {
447 if (this->isValid()) {
448 fQueueWaitIdle(fPresentQueue);
449 fDeviceWaitIdle(fDevice);
450
451 this->destroyBuffers();
452
453 if (VK_NULL_HANDLE != fSwapchain) {
454 fDestroySwapchainKHR(fDevice, fSwapchain, nullptr);
455 fSwapchain = VK_NULL_HANDLE;
456 }
457
458 if (VK_NULL_HANDLE != fSurface) {
459 fDestroySurfaceKHR(fInstance, fSurface, nullptr);
460 fSurface = VK_NULL_HANDLE;
461 }
462 }
463
464 SkASSERT(fContext->unique());
465 fContext.reset();
466 fInterface.reset();
467
468 if (VK_NULL_HANDLE != fDevice) {
469 fDestroyDevice(fDevice, nullptr);
470 fDevice = VK_NULL_HANDLE;
471 }
472
473 #ifdef SK_ENABLE_VK_LAYERS
474 if (fDebugCallback != VK_NULL_HANDLE) {
475 fDestroyDebugReportCallbackEXT(fInstance, fDebugCallback, nullptr);
476 }
477 #endif
478
479 fPhysicalDevice = VK_NULL_HANDLE;
480
481 if (VK_NULL_HANDLE != fInstance) {
482 fDestroyInstance(fInstance, nullptr);
483 fInstance = VK_NULL_HANDLE;
484 }
485 }
486
getAvailableBackbuffer()487 VulkanWindowContext::BackbufferInfo* VulkanWindowContext::getAvailableBackbuffer() {
488 SkASSERT(fBackbuffers);
489
490 ++fCurrentBackbufferIndex;
491 if (fCurrentBackbufferIndex > fImageCount) {
492 fCurrentBackbufferIndex = 0;
493 }
494
495 BackbufferInfo* backbuffer = fBackbuffers + fCurrentBackbufferIndex;
496 return backbuffer;
497 }
498
getBackbufferSurface()499 sk_sp<SkSurface> VulkanWindowContext::getBackbufferSurface() {
500 BackbufferInfo* backbuffer = this->getAvailableBackbuffer();
501 SkASSERT(backbuffer);
502
503 // semaphores should be in unsignaled state
504 VkSemaphoreCreateInfo semaphoreInfo;
505 memset(&semaphoreInfo, 0, sizeof(VkSemaphoreCreateInfo));
506 semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
507 semaphoreInfo.pNext = nullptr;
508 semaphoreInfo.flags = 0;
509 VkSemaphore semaphore;
510 SkDEBUGCODE(VkResult result = )GR_VK_CALL(fInterface, CreateSemaphore(fDevice, &semaphoreInfo,
511 nullptr, &semaphore));
512 SkASSERT(result == VK_SUCCESS);
513
514 // acquire the image
515 VkResult res = fAcquireNextImageKHR(fDevice, fSwapchain, UINT64_MAX,
516 semaphore, VK_NULL_HANDLE,
517 &backbuffer->fImageIndex);
518 if (VK_ERROR_SURFACE_LOST_KHR == res) {
519 // need to figure out how to create a new vkSurface without the platformData*
520 // maybe use attach somehow? but need a Window
521 GR_VK_CALL(fInterface, DestroySemaphore(fDevice, semaphore, nullptr));
522 return nullptr;
523 }
524 if (VK_ERROR_OUT_OF_DATE_KHR == res) {
525 // tear swapchain down and try again
526 if (!this->createSwapchain(-1, -1, fDisplayParams)) {
527 GR_VK_CALL(fInterface, DestroySemaphore(fDevice, semaphore, nullptr));
528 return nullptr;
529 }
530 backbuffer = this->getAvailableBackbuffer();
531
532 // acquire the image
533 res = fAcquireNextImageKHR(fDevice, fSwapchain, UINT64_MAX,
534 semaphore, VK_NULL_HANDLE,
535 &backbuffer->fImageIndex);
536
537 if (VK_SUCCESS != res) {
538 GR_VK_CALL(fInterface, DestroySemaphore(fDevice, semaphore, nullptr));
539 return nullptr;
540 }
541 }
542
543 SkSurface* surface = fSurfaces[backbuffer->fImageIndex].get();
544
545 GrBackendSemaphore beSemaphore = GrBackendSemaphores::MakeVk(semaphore);
546
547 surface->wait(1, &beSemaphore);
548
549 return sk_ref_sp(surface);
550 }
551
onSwapBuffers()552 void VulkanWindowContext::onSwapBuffers() {
553
554 BackbufferInfo* backbuffer = fBackbuffers + fCurrentBackbufferIndex;
555 SkSurface* surface = fSurfaces[backbuffer->fImageIndex].get();
556
557 GrBackendSemaphore beSemaphore = GrBackendSemaphores::MakeVk(backbuffer->fRenderSemaphore);
558
559 GrFlushInfo info;
560 info.fNumSemaphores = 1;
561 info.fSignalSemaphores = &beSemaphore;
562 skgpu::MutableTextureState presentState = skgpu::MutableTextureStates::MakeVulkan(
563 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, fPresentQueueIndex);
564 auto dContext = surface->recordingContext()->asDirectContext();
565 dContext->flush(surface, info, &presentState);
566 dContext->submit();
567
568 // Submit present operation to present queue
569 const VkPresentInfoKHR presentInfo =
570 {
571 VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, // sType
572 nullptr, // pNext
573 1, // waitSemaphoreCount
574 &backbuffer->fRenderSemaphore, // pWaitSemaphores
575 1, // swapchainCount
576 &fSwapchain, // pSwapchains
577 &backbuffer->fImageIndex, // pImageIndices
578 nullptr // pResults
579 };
580
581 fQueuePresentKHR(fPresentQueue, &presentInfo);
582 }
583
584 } // namespace skwindow::internal
585