1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "vulkan/vulkan_core.h"
18 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
19
20 #include <aidl/android/hardware/graphics/common/Dataspace.h>
21 #include <aidl/android/hardware/graphics/common/PixelFormat.h>
22 #include <android/hardware/graphics/common/1.0/types.h>
23 #include <android/hardware_buffer.h>
24 #include <grallocusage/GrallocUsageConversion.h>
25 #include <graphicsenv/GraphicsEnv.h>
26 #include <hardware/gralloc.h>
27 #include <hardware/gralloc1.h>
28 #include <log/log.h>
29 #include <sync/sync.h>
30 #include <system/window.h>
31 #include <ui/BufferQueueDefs.h>
32 #include <utils/StrongPointer.h>
33 #include <utils/Timers.h>
34 #include <utils/Trace.h>
35
36 #include <algorithm>
37 #include <unordered_set>
38 #include <vector>
39
40 #include "driver.h"
41
42 using PixelFormat = aidl::android::hardware::graphics::common::PixelFormat;
43 using DataSpace = aidl::android::hardware::graphics::common::Dataspace;
44 using android::hardware::graphics::common::V1_0::BufferUsage;
45
46 namespace vulkan {
47 namespace driver {
48
49 namespace {
50
convertGralloc1ToBufferUsage(uint64_t producerUsage,uint64_t consumerUsage)51 static uint64_t convertGralloc1ToBufferUsage(uint64_t producerUsage,
52 uint64_t consumerUsage) {
53 static_assert(uint64_t(GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN) ==
54 uint64_t(GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN),
55 "expected ConsumerUsage and ProducerUsage CPU_READ_OFTEN "
56 "bits to match");
57 uint64_t merged = producerUsage | consumerUsage;
58 if ((merged & (GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN)) ==
59 GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN) {
60 merged &= ~uint64_t(GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN);
61 merged |= BufferUsage::CPU_READ_OFTEN;
62 }
63 if ((merged & (GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN)) ==
64 GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN) {
65 merged &= ~uint64_t(GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN);
66 merged |= BufferUsage::CPU_WRITE_OFTEN;
67 }
68 return merged;
69 }
70
71 const VkSurfaceTransformFlagsKHR kSupportedTransforms =
72 VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
73 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
74 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
75 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
76 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
77 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
78 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
79 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
80 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
81
TranslateNativeToVulkanTransform(int native)82 VkSurfaceTransformFlagBitsKHR TranslateNativeToVulkanTransform(int native) {
83 // Native and Vulkan transforms are isomorphic, but are represented
84 // differently. Vulkan transforms are built up of an optional horizontal
85 // mirror, followed by a clockwise 0/90/180/270-degree rotation. Native
86 // transforms are built up from a horizontal flip, vertical flip, and
87 // 90-degree rotation, all optional but always in that order.
88
89 switch (native) {
90 case 0:
91 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
92 case NATIVE_WINDOW_TRANSFORM_FLIP_H:
93 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR;
94 case NATIVE_WINDOW_TRANSFORM_FLIP_V:
95 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR;
96 case NATIVE_WINDOW_TRANSFORM_ROT_180:
97 return VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR;
98 case NATIVE_WINDOW_TRANSFORM_ROT_90:
99 return VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR;
100 case NATIVE_WINDOW_TRANSFORM_FLIP_H | NATIVE_WINDOW_TRANSFORM_ROT_90:
101 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR;
102 case NATIVE_WINDOW_TRANSFORM_FLIP_V | NATIVE_WINDOW_TRANSFORM_ROT_90:
103 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR;
104 case NATIVE_WINDOW_TRANSFORM_ROT_270:
105 return VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
106 case NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY:
107 default:
108 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
109 }
110 }
111
TranslateVulkanToNativeTransform(VkSurfaceTransformFlagBitsKHR transform)112 int TranslateVulkanToNativeTransform(VkSurfaceTransformFlagBitsKHR transform) {
113 switch (transform) {
114 case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
115 return NATIVE_WINDOW_TRANSFORM_ROT_90;
116 case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
117 return NATIVE_WINDOW_TRANSFORM_ROT_180;
118 case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
119 return NATIVE_WINDOW_TRANSFORM_ROT_270;
120 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
121 return NATIVE_WINDOW_TRANSFORM_FLIP_H;
122 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
123 return NATIVE_WINDOW_TRANSFORM_FLIP_H |
124 NATIVE_WINDOW_TRANSFORM_ROT_90;
125 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
126 return NATIVE_WINDOW_TRANSFORM_FLIP_V;
127 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
128 return NATIVE_WINDOW_TRANSFORM_FLIP_V |
129 NATIVE_WINDOW_TRANSFORM_ROT_90;
130 case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
131 case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
132 default:
133 return 0;
134 }
135 }
136
InvertTransformToNative(VkSurfaceTransformFlagBitsKHR transform)137 int InvertTransformToNative(VkSurfaceTransformFlagBitsKHR transform) {
138 switch (transform) {
139 case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
140 return NATIVE_WINDOW_TRANSFORM_ROT_270;
141 case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
142 return NATIVE_WINDOW_TRANSFORM_ROT_180;
143 case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
144 return NATIVE_WINDOW_TRANSFORM_ROT_90;
145 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
146 return NATIVE_WINDOW_TRANSFORM_FLIP_H;
147 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
148 return NATIVE_WINDOW_TRANSFORM_FLIP_H |
149 NATIVE_WINDOW_TRANSFORM_ROT_90;
150 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
151 return NATIVE_WINDOW_TRANSFORM_FLIP_V;
152 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
153 return NATIVE_WINDOW_TRANSFORM_FLIP_V |
154 NATIVE_WINDOW_TRANSFORM_ROT_90;
155 case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
156 case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
157 default:
158 return 0;
159 }
160 }
161
162 const static VkColorSpaceKHR colorSpaceSupportedByVkEXTSwapchainColorspace[] = {
163 VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
164 VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
165 VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
166 VK_COLOR_SPACE_BT709_LINEAR_EXT,
167 VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
168 VK_COLOR_SPACE_BT2020_LINEAR_EXT,
169 VK_COLOR_SPACE_HDR10_ST2084_EXT,
170 VK_COLOR_SPACE_HDR10_HLG_EXT,
171 VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
172 VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
173 VK_COLOR_SPACE_PASS_THROUGH_EXT,
174 VK_COLOR_SPACE_DCI_P3_LINEAR_EXT};
175
176 const static VkColorSpaceKHR
177 colorSpaceSupportedByVkEXTSwapchainColorspaceOnFP16SurfaceOnly[] = {
178 VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
179 VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT};
180
181 class TimingInfo {
182 public:
TimingInfo(const VkPresentTimeGOOGLE * qp,uint64_t nativeFrameId)183 TimingInfo(const VkPresentTimeGOOGLE* qp, uint64_t nativeFrameId)
184 : vals_{qp->presentID, qp->desiredPresentTime, 0, 0, 0},
185 native_frame_id_(nativeFrameId) {}
ready() const186 bool ready() const {
187 return (timestamp_desired_present_time_ !=
188 NATIVE_WINDOW_TIMESTAMP_PENDING &&
189 timestamp_actual_present_time_ !=
190 NATIVE_WINDOW_TIMESTAMP_PENDING &&
191 timestamp_render_complete_time_ !=
192 NATIVE_WINDOW_TIMESTAMP_PENDING &&
193 timestamp_composition_latch_time_ !=
194 NATIVE_WINDOW_TIMESTAMP_PENDING);
195 }
calculate(int64_t rdur)196 void calculate(int64_t rdur) {
197 bool anyTimestampInvalid =
198 (timestamp_actual_present_time_ ==
199 NATIVE_WINDOW_TIMESTAMP_INVALID) ||
200 (timestamp_render_complete_time_ ==
201 NATIVE_WINDOW_TIMESTAMP_INVALID) ||
202 (timestamp_composition_latch_time_ ==
203 NATIVE_WINDOW_TIMESTAMP_INVALID);
204 if (anyTimestampInvalid) {
205 ALOGE("Unexpectedly received invalid timestamp.");
206 vals_.actualPresentTime = 0;
207 vals_.earliestPresentTime = 0;
208 vals_.presentMargin = 0;
209 return;
210 }
211
212 vals_.actualPresentTime =
213 static_cast<uint64_t>(timestamp_actual_present_time_);
214 int64_t margin = (timestamp_composition_latch_time_ -
215 timestamp_render_complete_time_);
216 // Calculate vals_.earliestPresentTime, and potentially adjust
217 // vals_.presentMargin. The initial value of vals_.earliestPresentTime
218 // is vals_.actualPresentTime. If we can subtract rdur (the duration
219 // of a refresh cycle) from vals_.earliestPresentTime (and also from
220 // vals_.presentMargin) and still leave a positive margin, then we can
221 // report to the application that it could have presented earlier than
222 // it did (per the extension specification). If for some reason, we
223 // can do this subtraction repeatedly, we do, since
224 // vals_.earliestPresentTime really is supposed to be the "earliest".
225 int64_t early_time = timestamp_actual_present_time_;
226 while ((margin > rdur) &&
227 ((early_time - rdur) > timestamp_composition_latch_time_)) {
228 early_time -= rdur;
229 margin -= rdur;
230 }
231 vals_.earliestPresentTime = static_cast<uint64_t>(early_time);
232 vals_.presentMargin = static_cast<uint64_t>(margin);
233 }
get_values(VkPastPresentationTimingGOOGLE * values) const234 void get_values(VkPastPresentationTimingGOOGLE* values) const {
235 *values = vals_;
236 }
237
238 public:
239 VkPastPresentationTimingGOOGLE vals_ { 0, 0, 0, 0, 0 };
240
241 uint64_t native_frame_id_ { 0 };
242 int64_t timestamp_desired_present_time_{ NATIVE_WINDOW_TIMESTAMP_PENDING };
243 int64_t timestamp_actual_present_time_ { NATIVE_WINDOW_TIMESTAMP_PENDING };
244 int64_t timestamp_render_complete_time_ { NATIVE_WINDOW_TIMESTAMP_PENDING };
245 int64_t timestamp_composition_latch_time_
246 { NATIVE_WINDOW_TIMESTAMP_PENDING };
247 };
248
249 struct Surface {
250 android::sp<ANativeWindow> window;
251 VkSwapchainKHR swapchain_handle;
252 uint64_t consumer_usage;
253
254 // Indicate whether this surface has been used by a swapchain, no matter the
255 // swapchain is still current or has been destroyed.
256 bool used_by_swapchain;
257 };
258
HandleFromSurface(Surface * surface)259 VkSurfaceKHR HandleFromSurface(Surface* surface) {
260 return VkSurfaceKHR(reinterpret_cast<uint64_t>(surface));
261 }
262
SurfaceFromHandle(VkSurfaceKHR handle)263 Surface* SurfaceFromHandle(VkSurfaceKHR handle) {
264 return reinterpret_cast<Surface*>(handle);
265 }
266
267 // Maximum number of TimingInfo structs to keep per swapchain:
268 enum { MAX_TIMING_INFOS = 10 };
269 // Minimum number of frames to look for in the past (so we don't cause
270 // syncronous requests to Surface Flinger):
271 enum { MIN_NUM_FRAMES_AGO = 5 };
272
IsSharedPresentMode(VkPresentModeKHR mode)273 bool IsSharedPresentMode(VkPresentModeKHR mode) {
274 return mode == VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR ||
275 mode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
276 }
277
278 struct Swapchain {
Swapchainvulkan::driver::__anon65a7247f0111::Swapchain279 Swapchain(Surface& surface_,
280 uint32_t num_images_,
281 VkPresentModeKHR present_mode,
282 int pre_transform_,
283 int64_t refresh_duration_)
284 : surface(surface_),
285 num_images(num_images_),
286 mailbox_mode(present_mode == VK_PRESENT_MODE_MAILBOX_KHR),
287 pre_transform(pre_transform_),
288 frame_timestamps_enabled(false),
289 refresh_duration(refresh_duration_),
290 acquire_next_image_timeout(-1),
291 shared(IsSharedPresentMode(present_mode)) {
292 }
293
get_refresh_durationvulkan::driver::__anon65a7247f0111::Swapchain294 VkResult get_refresh_duration(uint64_t& outRefreshDuration)
295 {
296 ANativeWindow* window = surface.window.get();
297 int err = native_window_get_refresh_cycle_duration(
298 window,
299 &refresh_duration);
300 if (err != android::OK) {
301 ALOGE("%s:native_window_get_refresh_cycle_duration failed: %s (%d)",
302 __func__, strerror(-err), err );
303 return VK_ERROR_SURFACE_LOST_KHR;
304 }
305 outRefreshDuration = refresh_duration;
306 return VK_SUCCESS;
307 }
308
309 Surface& surface;
310 uint32_t num_images;
311 bool mailbox_mode;
312 int pre_transform;
313 bool frame_timestamps_enabled;
314 int64_t refresh_duration;
315 nsecs_t acquire_next_image_timeout;
316 bool shared;
317
318 struct Image {
Imagevulkan::driver::__anon65a7247f0111::Swapchain::Image319 Image()
320 : image(VK_NULL_HANDLE),
321 dequeue_fence(-1),
322 release_fence(-1),
323 dequeued(false) {}
324 VkImage image;
325 // If the image is bound to memory, an sp to the underlying gralloc buffer.
326 // Otherwise, nullptr; the image will be bound to memory as part of
327 // AcquireNextImage.
328 android::sp<ANativeWindowBuffer> buffer;
329 // The fence is only valid when the buffer is dequeued, and should be
330 // -1 any other time. When valid, we own the fd, and must ensure it is
331 // closed: either by closing it explicitly when queueing the buffer,
332 // or by passing ownership e.g. to ANativeWindow::cancelBuffer().
333 int dequeue_fence;
334 // This fence is a dup of the sync fd returned from the driver via
335 // vkQueueSignalReleaseImageANDROID upon vkQueuePresentKHR. We must
336 // ensure it is closed upon re-presenting or releasing the image.
337 int release_fence;
338 bool dequeued;
339 } images[android::BufferQueueDefs::NUM_BUFFER_SLOTS];
340
341 std::vector<TimingInfo> timing;
342 };
343
HandleFromSwapchain(Swapchain * swapchain)344 VkSwapchainKHR HandleFromSwapchain(Swapchain* swapchain) {
345 return VkSwapchainKHR(reinterpret_cast<uint64_t>(swapchain));
346 }
347
SwapchainFromHandle(VkSwapchainKHR handle)348 Swapchain* SwapchainFromHandle(VkSwapchainKHR handle) {
349 return reinterpret_cast<Swapchain*>(handle);
350 }
351
IsFencePending(int fd)352 static bool IsFencePending(int fd) {
353 if (fd < 0)
354 return false;
355
356 errno = 0;
357 return sync_wait(fd, 0 /* timeout */) == -1 && errno == ETIME;
358 }
359
ReleaseSwapchainImage(VkDevice device,bool shared_present,ANativeWindow * window,int release_fence,Swapchain::Image & image,bool defer_if_pending)360 void ReleaseSwapchainImage(VkDevice device,
361 bool shared_present,
362 ANativeWindow* window,
363 int release_fence,
364 Swapchain::Image& image,
365 bool defer_if_pending) {
366 ATRACE_CALL();
367
368 ALOG_ASSERT(release_fence == -1 || image.dequeued,
369 "ReleaseSwapchainImage: can't provide a release fence for "
370 "non-dequeued images");
371
372 if (image.dequeued) {
373 if (release_fence >= 0) {
374 // We get here from vkQueuePresentKHR. The application is
375 // responsible for creating an execution dependency chain from
376 // vkAcquireNextImage (dequeue_fence) to vkQueuePresentKHR
377 // (release_fence), so we can drop the dequeue_fence here.
378 if (image.dequeue_fence >= 0)
379 close(image.dequeue_fence);
380 } else {
381 // We get here during swapchain destruction, or various serious
382 // error cases e.g. when we can't create the release_fence during
383 // vkQueuePresentKHR. In non-error cases, the dequeue_fence should
384 // have already signalled, since the swapchain images are supposed
385 // to be idle before the swapchain is destroyed. In error cases,
386 // there may be rendering in flight to the image, but since we
387 // weren't able to create a release_fence, waiting for the
388 // dequeue_fence is about the best we can do.
389 release_fence = image.dequeue_fence;
390 }
391 image.dequeue_fence = -1;
392
393 // It's invalid to call cancelBuffer on a shared buffer
394 if (window && !shared_present) {
395 window->cancelBuffer(window, image.buffer.get(), release_fence);
396 } else {
397 if (release_fence >= 0) {
398 sync_wait(release_fence, -1 /* forever */);
399 close(release_fence);
400 }
401 }
402 release_fence = -1;
403 image.dequeued = false;
404 }
405
406 if (defer_if_pending && IsFencePending(image.release_fence))
407 return;
408
409 if (image.release_fence >= 0) {
410 close(image.release_fence);
411 image.release_fence = -1;
412 }
413
414 if (image.image) {
415 ATRACE_BEGIN("DestroyImage");
416 GetData(device).driver.DestroyImage(device, image.image, nullptr);
417 ATRACE_END();
418 image.image = VK_NULL_HANDLE;
419 }
420
421 image.buffer.clear();
422 }
423
OrphanSwapchain(VkDevice device,Swapchain * swapchain)424 void OrphanSwapchain(VkDevice device, Swapchain* swapchain) {
425 if (swapchain->surface.swapchain_handle != HandleFromSwapchain(swapchain))
426 return;
427 for (uint32_t i = 0; i < swapchain->num_images; i++) {
428 if (!swapchain->images[i].dequeued) {
429 ReleaseSwapchainImage(device, swapchain->shared, nullptr, -1,
430 swapchain->images[i], true);
431 }
432 }
433 swapchain->surface.swapchain_handle = VK_NULL_HANDLE;
434 swapchain->timing.clear();
435 }
436
get_num_ready_timings(Swapchain & swapchain)437 uint32_t get_num_ready_timings(Swapchain& swapchain) {
438 if (swapchain.timing.size() < MIN_NUM_FRAMES_AGO) {
439 return 0;
440 }
441
442 uint32_t num_ready = 0;
443 const size_t num_timings = swapchain.timing.size() - MIN_NUM_FRAMES_AGO + 1;
444 for (uint32_t i = 0; i < num_timings; i++) {
445 TimingInfo& ti = swapchain.timing[i];
446 if (ti.ready()) {
447 // This TimingInfo is ready to be reported to the user. Add it
448 // to the num_ready.
449 num_ready++;
450 continue;
451 }
452 // This TimingInfo is not yet ready to be reported to the user,
453 // and so we should look for any available timestamps that
454 // might make it ready.
455 int64_t desired_present_time = 0;
456 int64_t render_complete_time = 0;
457 int64_t composition_latch_time = 0;
458 int64_t actual_present_time = 0;
459 // Obtain timestamps:
460 int err = native_window_get_frame_timestamps(
461 swapchain.surface.window.get(), ti.native_frame_id_,
462 &desired_present_time, &render_complete_time,
463 &composition_latch_time,
464 nullptr, //&first_composition_start_time,
465 nullptr, //&last_composition_start_time,
466 nullptr, //&composition_finish_time,
467 &actual_present_time,
468 nullptr, //&dequeue_ready_time,
469 nullptr /*&reads_done_time*/);
470
471 if (err != android::OK) {
472 continue;
473 }
474
475 // Record the timestamp(s) we received, and then see if this TimingInfo
476 // is ready to be reported to the user:
477 ti.timestamp_desired_present_time_ = desired_present_time;
478 ti.timestamp_actual_present_time_ = actual_present_time;
479 ti.timestamp_render_complete_time_ = render_complete_time;
480 ti.timestamp_composition_latch_time_ = composition_latch_time;
481
482 if (ti.ready()) {
483 // The TimingInfo has received enough timestamps, and should now
484 // use those timestamps to calculate the info that should be
485 // reported to the user:
486 ti.calculate(swapchain.refresh_duration);
487 num_ready++;
488 }
489 }
490 return num_ready;
491 }
492
copy_ready_timings(Swapchain & swapchain,uint32_t * count,VkPastPresentationTimingGOOGLE * timings)493 void copy_ready_timings(Swapchain& swapchain,
494 uint32_t* count,
495 VkPastPresentationTimingGOOGLE* timings) {
496 if (swapchain.timing.empty()) {
497 *count = 0;
498 return;
499 }
500
501 size_t last_ready = swapchain.timing.size() - 1;
502 while (!swapchain.timing[last_ready].ready()) {
503 if (last_ready == 0) {
504 *count = 0;
505 return;
506 }
507 last_ready--;
508 }
509
510 uint32_t num_copied = 0;
511 int32_t num_to_remove = 0;
512 for (uint32_t i = 0; i <= last_ready && num_copied < *count; i++) {
513 const TimingInfo& ti = swapchain.timing[i];
514 if (ti.ready()) {
515 ti.get_values(&timings[num_copied]);
516 num_copied++;
517 }
518 num_to_remove++;
519 }
520
521 // Discard old frames that aren't ready if newer frames are ready.
522 // We don't expect to get the timing info for those old frames.
523 swapchain.timing.erase(swapchain.timing.begin(),
524 swapchain.timing.begin() + num_to_remove);
525
526 *count = num_copied;
527 }
528
GetNativePixelFormat(VkFormat format)529 PixelFormat GetNativePixelFormat(VkFormat format) {
530 PixelFormat native_format = PixelFormat::RGBA_8888;
531 switch (format) {
532 case VK_FORMAT_R8G8B8A8_UNORM:
533 case VK_FORMAT_R8G8B8A8_SRGB:
534 native_format = PixelFormat::RGBA_8888;
535 break;
536 case VK_FORMAT_R5G6B5_UNORM_PACK16:
537 native_format = PixelFormat::RGB_565;
538 break;
539 case VK_FORMAT_R16G16B16A16_SFLOAT:
540 native_format = PixelFormat::RGBA_FP16;
541 break;
542 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
543 native_format = PixelFormat::RGBA_1010102;
544 break;
545 case VK_FORMAT_R8_UNORM:
546 native_format = PixelFormat::R_8;
547 break;
548 case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
549 native_format = PixelFormat::RGBA_10101010;
550 break;
551 default:
552 ALOGV("unsupported swapchain format %d", format);
553 break;
554 }
555 return native_format;
556 }
557
GetNativeDataspace(VkColorSpaceKHR colorspace,VkFormat format)558 DataSpace GetNativeDataspace(VkColorSpaceKHR colorspace, VkFormat format) {
559 switch (colorspace) {
560 case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
561 return DataSpace::SRGB;
562 case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:
563 return DataSpace::DISPLAY_P3;
564 case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
565 return DataSpace::SCRGB_LINEAR;
566 case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT:
567 return DataSpace::SCRGB;
568 case VK_COLOR_SPACE_DCI_P3_LINEAR_EXT:
569 return DataSpace::DCI_P3_LINEAR;
570 case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT:
571 return DataSpace::DCI_P3;
572 case VK_COLOR_SPACE_BT709_LINEAR_EXT:
573 return DataSpace::SRGB_LINEAR;
574 case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:
575 return DataSpace::SRGB;
576 case VK_COLOR_SPACE_BT2020_LINEAR_EXT:
577 if (format == VK_FORMAT_R16G16B16A16_SFLOAT) {
578 return DataSpace::BT2020_LINEAR_EXTENDED;
579 } else {
580 return DataSpace::BT2020_LINEAR;
581 }
582 case VK_COLOR_SPACE_HDR10_ST2084_EXT:
583 return DataSpace::BT2020_PQ;
584 case VK_COLOR_SPACE_DOLBYVISION_EXT:
585 return DataSpace::BT2020_PQ;
586 case VK_COLOR_SPACE_HDR10_HLG_EXT:
587 return DataSpace::BT2020_HLG;
588 case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT:
589 return DataSpace::ADOBE_RGB_LINEAR;
590 case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT:
591 return DataSpace::ADOBE_RGB;
592 // Pass through is intended to allow app to provide data that is passed
593 // to the display system without modification.
594 case VK_COLOR_SPACE_PASS_THROUGH_EXT:
595 return DataSpace::ARBITRARY;
596
597 default:
598 // This indicates that we don't know about the
599 // dataspace specified and we should indicate that
600 // it's unsupported
601 return DataSpace::UNKNOWN;
602 }
603 }
604
605 } // anonymous namespace
606
607 VKAPI_ATTR
CreateAndroidSurfaceKHR(VkInstance instance,const VkAndroidSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * allocator,VkSurfaceKHR * out_surface)608 VkResult CreateAndroidSurfaceKHR(
609 VkInstance instance,
610 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
611 const VkAllocationCallbacks* allocator,
612 VkSurfaceKHR* out_surface) {
613 ATRACE_CALL();
614
615 if (!allocator)
616 allocator = &GetData(instance).allocator;
617 void* mem = allocator->pfnAllocation(allocator->pUserData, sizeof(Surface),
618 alignof(Surface),
619 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
620 if (!mem)
621 return VK_ERROR_OUT_OF_HOST_MEMORY;
622 Surface* surface = new (mem) Surface;
623
624 surface->window = pCreateInfo->window;
625 surface->swapchain_handle = VK_NULL_HANDLE;
626 surface->used_by_swapchain = false;
627 int err = native_window_get_consumer_usage(surface->window.get(),
628 &surface->consumer_usage);
629 if (err != android::OK) {
630 ALOGE("native_window_get_consumer_usage() failed: %s (%d)",
631 strerror(-err), err);
632 surface->~Surface();
633 allocator->pfnFree(allocator->pUserData, surface);
634 return VK_ERROR_SURFACE_LOST_KHR;
635 }
636
637 err =
638 native_window_api_connect(surface->window.get(), NATIVE_WINDOW_API_EGL);
639 if (err != android::OK) {
640 ALOGE("native_window_api_connect() failed: %s (%d)", strerror(-err),
641 err);
642 surface->~Surface();
643 allocator->pfnFree(allocator->pUserData, surface);
644 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
645 }
646
647 *out_surface = HandleFromSurface(surface);
648 return VK_SUCCESS;
649 }
650
651 VKAPI_ATTR
DestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface_handle,const VkAllocationCallbacks * allocator)652 void DestroySurfaceKHR(VkInstance instance,
653 VkSurfaceKHR surface_handle,
654 const VkAllocationCallbacks* allocator) {
655 ATRACE_CALL();
656
657 Surface* surface = SurfaceFromHandle(surface_handle);
658 if (!surface)
659 return;
660 native_window_api_disconnect(surface->window.get(), NATIVE_WINDOW_API_EGL);
661 ALOGV_IF(surface->swapchain_handle != VK_NULL_HANDLE,
662 "destroyed VkSurfaceKHR 0x%" PRIx64
663 " has active VkSwapchainKHR 0x%" PRIx64,
664 reinterpret_cast<uint64_t>(surface_handle),
665 reinterpret_cast<uint64_t>(surface->swapchain_handle));
666 surface->~Surface();
667 if (!allocator)
668 allocator = &GetData(instance).allocator;
669 allocator->pfnFree(allocator->pUserData, surface);
670 }
671
672 VKAPI_ATTR
GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice,uint32_t,VkSurfaceKHR,VkBool32 * supported)673 VkResult GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice /*pdev*/,
674 uint32_t /*queue_family*/,
675 VkSurfaceKHR /*surface_handle*/,
676 VkBool32* supported) {
677 *supported = VK_TRUE;
678 return VK_SUCCESS;
679 }
680
681 VKAPI_ATTR
GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * capabilities)682 VkResult GetPhysicalDeviceSurfaceCapabilitiesKHR(
683 VkPhysicalDevice pdev,
684 VkSurfaceKHR surface,
685 VkSurfaceCapabilitiesKHR* capabilities) {
686 ATRACE_CALL();
687
688 // Implement in terms of GetPhysicalDeviceSurfaceCapabilities2KHR
689
690 VkPhysicalDeviceSurfaceInfo2KHR info2 = {
691 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
692 nullptr,
693 surface
694 };
695
696 VkSurfaceCapabilities2KHR caps2 = {
697 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
698 nullptr,
699 {},
700 };
701
702 VkResult result = GetPhysicalDeviceSurfaceCapabilities2KHR(pdev, &info2, &caps2);
703 *capabilities = caps2.surfaceCapabilities;
704 return result;
705 }
706
707 // Does the call-twice and VK_INCOMPLETE handling for querying lists
708 // of things, where we already have the full set built in a vector.
709 template <typename T>
CopyWithIncomplete(std::vector<T> const & things,T * callerPtr,uint32_t * callerCount)710 VkResult CopyWithIncomplete(std::vector<T> const& things,
711 T* callerPtr, uint32_t* callerCount) {
712 VkResult result = VK_SUCCESS;
713 if (callerPtr) {
714 if (things.size() > *callerCount)
715 result = VK_INCOMPLETE;
716 *callerCount = std::min(uint32_t(things.size()), *callerCount);
717 std::copy(things.begin(), things.begin() + *callerCount, callerPtr);
718 } else {
719 *callerCount = things.size();
720 }
721 return result;
722 }
723
724 VKAPI_ATTR
GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface_handle,uint32_t * count,VkSurfaceFormatKHR * formats)725 VkResult GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice pdev,
726 VkSurfaceKHR surface_handle,
727 uint32_t* count,
728 VkSurfaceFormatKHR* formats) {
729 ATRACE_CALL();
730
731 const InstanceData& instance_data = GetData(pdev);
732
733 uint64_t consumer_usage = 0;
734 bool colorspace_ext =
735 instance_data.hook_extensions.test(ProcHook::EXT_swapchain_colorspace);
736 if (surface_handle == VK_NULL_HANDLE) {
737 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
738 bool surfaceless_enabled =
739 instance_data.hook_extensions.test(surfaceless);
740 if (!surfaceless_enabled) {
741 return VK_ERROR_SURFACE_LOST_KHR;
742 }
743 // Support for VK_GOOGLE_surfaceless_query.
744
745 // TODO(b/203826952): research proper value; temporarily use the
746 // values seen on Pixel
747 consumer_usage = AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY;
748 } else {
749 Surface& surface = *SurfaceFromHandle(surface_handle);
750 consumer_usage = surface.consumer_usage;
751 }
752
753 AHardwareBuffer_Desc desc = {};
754 desc.width = 1;
755 desc.height = 1;
756 desc.layers = 1;
757 desc.usage = consumer_usage | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
758 AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
759
760 // We must support R8G8B8A8
761 std::vector<VkSurfaceFormatKHR> all_formats = {
762 {VK_FORMAT_R8G8B8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR},
763 {VK_FORMAT_R8G8B8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR},
764 };
765
766 VkFormat format = VK_FORMAT_UNDEFINED;
767 if (colorspace_ext) {
768 for (VkColorSpaceKHR colorSpace :
769 colorSpaceSupportedByVkEXTSwapchainColorspace) {
770 format = VK_FORMAT_R8G8B8A8_UNORM;
771 if (GetNativeDataspace(colorSpace, format) != DataSpace::UNKNOWN) {
772 all_formats.emplace_back(
773 VkSurfaceFormatKHR{format, colorSpace});
774 }
775
776 format = VK_FORMAT_R8G8B8A8_SRGB;
777 if (GetNativeDataspace(colorSpace, format) != DataSpace::UNKNOWN) {
778 all_formats.emplace_back(
779 VkSurfaceFormatKHR{format, colorSpace});
780 }
781 }
782 }
783
784 // NOTE: Any new formats that are added must be coordinated across different
785 // Android users. This includes the ANGLE team (a layered implementation of
786 // OpenGL-ES).
787
788 format = VK_FORMAT_R5G6B5_UNORM_PACK16;
789 desc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
790 if (AHardwareBuffer_isSupported(&desc)) {
791 all_formats.emplace_back(
792 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
793 if (colorspace_ext) {
794 for (VkColorSpaceKHR colorSpace :
795 colorSpaceSupportedByVkEXTSwapchainColorspace) {
796 if (GetNativeDataspace(colorSpace, format) !=
797 DataSpace::UNKNOWN) {
798 all_formats.emplace_back(
799 VkSurfaceFormatKHR{format, colorSpace});
800 }
801 }
802 }
803 }
804
805 format = VK_FORMAT_R16G16B16A16_SFLOAT;
806 desc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
807 if (AHardwareBuffer_isSupported(&desc)) {
808 all_formats.emplace_back(
809 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
810 if (colorspace_ext) {
811 for (VkColorSpaceKHR colorSpace :
812 colorSpaceSupportedByVkEXTSwapchainColorspace) {
813 if (GetNativeDataspace(colorSpace, format) !=
814 DataSpace::UNKNOWN) {
815 all_formats.emplace_back(
816 VkSurfaceFormatKHR{format, colorSpace});
817 }
818 }
819
820 for (
821 VkColorSpaceKHR colorSpace :
822 colorSpaceSupportedByVkEXTSwapchainColorspaceOnFP16SurfaceOnly) {
823 if (GetNativeDataspace(colorSpace, format) !=
824 DataSpace::UNKNOWN) {
825 all_formats.emplace_back(
826 VkSurfaceFormatKHR{format, colorSpace});
827 }
828 }
829 }
830 }
831
832 format = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
833 desc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
834 if (AHardwareBuffer_isSupported(&desc)) {
835 all_formats.emplace_back(
836 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
837 if (colorspace_ext) {
838 for (VkColorSpaceKHR colorSpace :
839 colorSpaceSupportedByVkEXTSwapchainColorspace) {
840 if (GetNativeDataspace(colorSpace, format) !=
841 DataSpace::UNKNOWN) {
842 all_formats.emplace_back(
843 VkSurfaceFormatKHR{format, colorSpace});
844 }
845 }
846 }
847 }
848
849 format = VK_FORMAT_R8_UNORM;
850 desc.format = AHARDWAREBUFFER_FORMAT_R8_UNORM;
851 if (AHardwareBuffer_isSupported(&desc)) {
852 if (colorspace_ext) {
853 all_formats.emplace_back(
854 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_PASS_THROUGH_EXT});
855 }
856 }
857
858 bool rgba10x6_formats_ext = false;
859 uint32_t exts_count;
860 const auto& driver = GetData(pdev).driver;
861 driver.EnumerateDeviceExtensionProperties(pdev, nullptr, &exts_count,
862 nullptr);
863 std::vector<VkExtensionProperties> props(exts_count);
864 driver.EnumerateDeviceExtensionProperties(pdev, nullptr, &exts_count,
865 props.data());
866 for (uint32_t i = 0; i < exts_count; i++) {
867 VkExtensionProperties prop = props[i];
868 if (strcmp(prop.extensionName,
869 VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME) == 0) {
870 rgba10x6_formats_ext = true;
871 }
872 }
873 format = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16;
874 desc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A10_UNORM;
875 if (AHardwareBuffer_isSupported(&desc) && rgba10x6_formats_ext) {
876 all_formats.emplace_back(
877 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
878 if (colorspace_ext) {
879 for (VkColorSpaceKHR colorSpace :
880 colorSpaceSupportedByVkEXTSwapchainColorspace) {
881 if (GetNativeDataspace(colorSpace, format) !=
882 DataSpace::UNKNOWN) {
883 all_formats.emplace_back(
884 VkSurfaceFormatKHR{format, colorSpace});
885 }
886 }
887 }
888 }
889
890 // NOTE: Any new formats that are added must be coordinated across different
891 // Android users. This includes the ANGLE team (a layered implementation of
892 // OpenGL-ES).
893
894 return CopyWithIncomplete(all_formats, formats, count);
895 }
896
897 VKAPI_ATTR
GetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VkSurfaceCapabilities2KHR * pSurfaceCapabilities)898 VkResult GetPhysicalDeviceSurfaceCapabilities2KHR(
899 VkPhysicalDevice physicalDevice,
900 const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
901 VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {
902 ATRACE_CALL();
903
904 auto surface = pSurfaceInfo->surface;
905 auto capabilities = &pSurfaceCapabilities->surfaceCapabilities;
906
907 VkSurfacePresentModeEXT const *pPresentMode = nullptr;
908 for (auto pNext = reinterpret_cast<VkBaseInStructure const *>(pSurfaceInfo->pNext);
909 pNext; pNext = reinterpret_cast<VkBaseInStructure const *>(pNext->pNext)) {
910 switch (pNext->sType) {
911 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT:
912 pPresentMode = reinterpret_cast<VkSurfacePresentModeEXT const *>(pNext);
913 break;
914
915 default:
916 break;
917 }
918 }
919
920 int err;
921 int width, height;
922 int transform_hint;
923 int max_buffer_count;
924 int min_undequeued_buffers;
925 if (surface == VK_NULL_HANDLE) {
926 const InstanceData& instance_data = GetData(physicalDevice);
927 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
928 bool surfaceless_enabled =
929 instance_data.hook_extensions.test(surfaceless);
930 if (!surfaceless_enabled) {
931 // It is an error to pass a surface==VK_NULL_HANDLE unless the
932 // VK_GOOGLE_surfaceless_query extension is enabled
933 return VK_ERROR_SURFACE_LOST_KHR;
934 }
935 // Support for VK_GOOGLE_surfaceless_query. The primary purpose of this
936 // extension for this function is for
937 // VkSurfaceProtectedCapabilitiesKHR::supportsProtected. The following
938 // four values cannot be known without a surface. Default values will
939 // be supplied anyway, but cannot be relied upon.
940 width = 0xFFFFFFFF;
941 height = 0xFFFFFFFF;
942 transform_hint = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
943 capabilities->minImageCount = 0xFFFFFFFF;
944 capabilities->maxImageCount = 0xFFFFFFFF;
945 } else {
946 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
947
948 err = window->query(window, NATIVE_WINDOW_DEFAULT_WIDTH, &width);
949 if (err != android::OK) {
950 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
951 strerror(-err), err);
952 return VK_ERROR_SURFACE_LOST_KHR;
953 }
954 err = window->query(window, NATIVE_WINDOW_DEFAULT_HEIGHT, &height);
955 if (err != android::OK) {
956 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
957 strerror(-err), err);
958 return VK_ERROR_SURFACE_LOST_KHR;
959 }
960
961 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT,
962 &transform_hint);
963 if (err != android::OK) {
964 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
965 strerror(-err), err);
966 return VK_ERROR_SURFACE_LOST_KHR;
967 }
968
969 err = window->query(window, NATIVE_WINDOW_MAX_BUFFER_COUNT,
970 &max_buffer_count);
971 if (err != android::OK) {
972 ALOGE("NATIVE_WINDOW_MAX_BUFFER_COUNT query failed: %s (%d)",
973 strerror(-err), err);
974 return VK_ERROR_SURFACE_LOST_KHR;
975 }
976
977 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
978 &min_undequeued_buffers);
979 if (err != android::OK) {
980 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
981 strerror(-err), err);
982 return VK_ERROR_SURFACE_LOST_KHR;
983 }
984
985 // Additional buffer count over min_undequeued_buffers in vulkan came from 2 total
986 // being technically enough for fifo (although a poor experience) vs 3 being the
987 // absolute minimum for mailbox to be useful. So min_undequeued_buffers + 2 is sensible
988 static constexpr int default_additional_buffers = 2;
989
990 if(pPresentMode != nullptr) {
991 switch (pPresentMode->presentMode) {
992 case VK_PRESENT_MODE_IMMEDIATE_KHR:
993 ALOGE("Swapchain present mode VK_PRESENT_MODE_IMMEDIATE_KHR is not supported");
994 break;
995 case VK_PRESENT_MODE_MAILBOX_KHR:
996 case VK_PRESENT_MODE_FIFO_KHR:
997 capabilities->minImageCount = std::min(max_buffer_count,
998 min_undequeued_buffers + default_additional_buffers);
999 capabilities->maxImageCount = static_cast<uint32_t>(max_buffer_count);
1000 break;
1001 case VK_PRESENT_MODE_FIFO_RELAXED_KHR:
1002 ALOGE("Swapchain present mode VK_PRESENT_MODE_FIFO_RELEAXED_KHR "
1003 "is not supported");
1004 break;
1005 case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
1006 case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
1007 capabilities->minImageCount = 1;
1008 capabilities->maxImageCount = 1;
1009 break;
1010
1011 default:
1012 ALOGE("Unrecognized swapchain present mode %u is not supported",
1013 pPresentMode->presentMode);
1014 break;
1015 }
1016 } else {
1017 capabilities->minImageCount = std::min(max_buffer_count,
1018 min_undequeued_buffers + default_additional_buffers);
1019 capabilities->maxImageCount = static_cast<uint32_t>(max_buffer_count);
1020 }
1021 }
1022
1023 capabilities->currentExtent =
1024 VkExtent2D{static_cast<uint32_t>(width), static_cast<uint32_t>(height)};
1025
1026 // TODO(http://b/134182502): Figure out what the max extent should be.
1027 capabilities->minImageExtent = VkExtent2D{1, 1};
1028 capabilities->maxImageExtent = VkExtent2D{4096, 4096};
1029
1030 if (capabilities->maxImageExtent.height <
1031 capabilities->currentExtent.height) {
1032 capabilities->maxImageExtent.height =
1033 capabilities->currentExtent.height;
1034 }
1035
1036 if (capabilities->maxImageExtent.width <
1037 capabilities->currentExtent.width) {
1038 capabilities->maxImageExtent.width = capabilities->currentExtent.width;
1039 }
1040
1041 capabilities->maxImageArrayLayers = 1;
1042
1043 capabilities->supportedTransforms = kSupportedTransforms;
1044 capabilities->currentTransform =
1045 TranslateNativeToVulkanTransform(transform_hint);
1046
1047 // On Android, window composition is a WindowManager property, not something
1048 // associated with the bufferqueue. It can't be changed from here.
1049 capabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
1050
1051 capabilities->supportedUsageFlags =
1052 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
1053 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
1054 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
1055 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
1056
1057 for (auto pNext = reinterpret_cast<VkBaseOutStructure*>(pSurfaceCapabilities->pNext);
1058 pNext; pNext = reinterpret_cast<VkBaseOutStructure*>(pNext->pNext)) {
1059
1060 switch (pNext->sType) {
1061 case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: {
1062 VkSharedPresentSurfaceCapabilitiesKHR* shared_caps =
1063 reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>(pNext);
1064 // Claim same set of usage flags are supported for
1065 // shared present modes as for other modes.
1066 shared_caps->sharedPresentSupportedUsageFlags =
1067 pSurfaceCapabilities->surfaceCapabilities
1068 .supportedUsageFlags;
1069 } break;
1070
1071 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
1072 VkSurfaceProtectedCapabilitiesKHR* protected_caps =
1073 reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>(pNext);
1074 protected_caps->supportsProtected = VK_TRUE;
1075 } break;
1076
1077 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: {
1078 VkSurfacePresentScalingCapabilitiesEXT* scaling_caps =
1079 reinterpret_cast<VkSurfacePresentScalingCapabilitiesEXT*>(pNext);
1080 // By default, Android stretches the buffer to fit the window,
1081 // without preserving aspect ratio. Other modes are technically possible
1082 // but consult with CoGS team before exposing them here!
1083 scaling_caps->supportedPresentScaling = VK_PRESENT_SCALING_STRETCH_BIT_EXT;
1084
1085 // Since we always scale, we don't support any gravity.
1086 scaling_caps->supportedPresentGravityX = 0;
1087 scaling_caps->supportedPresentGravityY = 0;
1088
1089 // Scaled image limits are just the basic image limits
1090 scaling_caps->minScaledImageExtent = capabilities->minImageExtent;
1091 scaling_caps->maxScaledImageExtent = capabilities->maxImageExtent;
1092 } break;
1093
1094 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: {
1095 VkSurfacePresentModeCompatibilityEXT* mode_caps =
1096 reinterpret_cast<VkSurfacePresentModeCompatibilityEXT*>(pNext);
1097
1098 ALOG_ASSERT(pPresentMode,
1099 "querying VkSurfacePresentModeCompatibilityEXT "
1100 "requires VkSurfacePresentModeEXT to be provided");
1101 std::vector<VkPresentModeKHR> compatibleModes;
1102 compatibleModes.push_back(pPresentMode->presentMode);
1103
1104 switch (pPresentMode->presentMode) {
1105 // Shared modes are both compatible with each other.
1106 case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
1107 compatibleModes.push_back(VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
1108 break;
1109 case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
1110 compatibleModes.push_back(VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR);
1111 break;
1112 default:
1113 // Other modes are only compatible with themselves.
1114 // TODO: consider whether switching between FIFO and MAILBOX is reasonable
1115 break;
1116 }
1117
1118 // Note: this does not generate VK_INCOMPLETE since we're nested inside
1119 // a larger query and there would be no way to determine exactly where it came from.
1120 CopyWithIncomplete(compatibleModes, mode_caps->pPresentModes,
1121 &mode_caps->presentModeCount);
1122 } break;
1123
1124 default:
1125 // Ignore all other extension structs
1126 break;
1127 }
1128 }
1129
1130 return VK_SUCCESS;
1131 }
1132
1133 VKAPI_ATTR
GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)1134 VkResult GetPhysicalDeviceSurfaceFormats2KHR(
1135 VkPhysicalDevice physicalDevice,
1136 const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
1137 uint32_t* pSurfaceFormatCount,
1138 VkSurfaceFormat2KHR* pSurfaceFormats) {
1139 ATRACE_CALL();
1140
1141 if (!pSurfaceFormats) {
1142 return GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice,
1143 pSurfaceInfo->surface,
1144 pSurfaceFormatCount, nullptr);
1145 }
1146
1147 // temp vector for forwarding; we'll marshal it into the pSurfaceFormats
1148 // after the call.
1149 std::vector<VkSurfaceFormatKHR> surface_formats(*pSurfaceFormatCount);
1150 VkResult result = GetPhysicalDeviceSurfaceFormatsKHR(
1151 physicalDevice, pSurfaceInfo->surface, pSurfaceFormatCount,
1152 surface_formats.data());
1153
1154 if (result != VK_SUCCESS && result != VK_INCOMPLETE) {
1155 return result;
1156 }
1157
1158 const auto& driver = GetData(physicalDevice).driver;
1159
1160 // marshal results individually due to stride difference.
1161 uint32_t formats_to_marshal = *pSurfaceFormatCount;
1162 for (uint32_t i = 0u; i < formats_to_marshal; i++) {
1163 pSurfaceFormats[i].surfaceFormat = surface_formats[i];
1164
1165 // Query the compression properties for the surface format
1166 VkSurfaceFormat2KHR* pSurfaceFormat = &pSurfaceFormats[i];
1167 while (pSurfaceFormat->pNext) {
1168 pSurfaceFormat =
1169 reinterpret_cast<VkSurfaceFormat2KHR*>(pSurfaceFormat->pNext);
1170 switch (pSurfaceFormat->sType) {
1171 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: {
1172 VkImageCompressionPropertiesEXT* surfaceCompressionProps =
1173 reinterpret_cast<VkImageCompressionPropertiesEXT*>(
1174 pSurfaceFormat);
1175
1176 if (surfaceCompressionProps &&
1177 (driver.GetPhysicalDeviceImageFormatProperties2KHR ||
1178 driver.GetPhysicalDeviceImageFormatProperties2)) {
1179 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo = {};
1180 imageFormatInfo.sType =
1181 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
1182 imageFormatInfo.format =
1183 pSurfaceFormats[i].surfaceFormat.format;
1184 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
1185 imageFormatInfo.usage =
1186 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1187 imageFormatInfo.pNext = nullptr;
1188
1189 VkImageCompressionControlEXT compressionControl = {};
1190 compressionControl.sType =
1191 VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT;
1192 compressionControl.pNext = imageFormatInfo.pNext;
1193 compressionControl.flags =
1194 VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT;
1195
1196 imageFormatInfo.pNext = &compressionControl;
1197
1198 VkImageCompressionPropertiesEXT compressionProps = {};
1199 compressionProps.sType =
1200 VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT;
1201 compressionProps.pNext = nullptr;
1202
1203 VkImageFormatProperties2KHR imageFormatProps = {};
1204 imageFormatProps.sType =
1205 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR;
1206 imageFormatProps.pNext = &compressionProps;
1207
1208 VkResult compressionRes =
1209 GetPhysicalDeviceImageFormatProperties2(
1210 physicalDevice, &imageFormatInfo,
1211 &imageFormatProps);
1212 if (compressionRes == VK_SUCCESS) {
1213 surfaceCompressionProps->imageCompressionFlags =
1214 compressionProps.imageCompressionFlags;
1215 surfaceCompressionProps
1216 ->imageCompressionFixedRateFlags =
1217 compressionProps.imageCompressionFixedRateFlags;
1218 } else {
1219 return compressionRes;
1220 }
1221 }
1222 } break;
1223
1224 default:
1225 // Ignore all other extension structs
1226 break;
1227 }
1228 }
1229 }
1230
1231 return result;
1232 }
1233
1234 VKAPI_ATTR
GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface,uint32_t * count,VkPresentModeKHR * modes)1235 VkResult GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice pdev,
1236 VkSurfaceKHR surface,
1237 uint32_t* count,
1238 VkPresentModeKHR* modes) {
1239 ATRACE_CALL();
1240
1241 int err;
1242 int query_value;
1243 std::vector<VkPresentModeKHR> present_modes;
1244 if (surface == VK_NULL_HANDLE) {
1245 const InstanceData& instance_data = GetData(pdev);
1246 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
1247 bool surfaceless_enabled =
1248 instance_data.hook_extensions.test(surfaceless);
1249 if (!surfaceless_enabled) {
1250 return VK_ERROR_SURFACE_LOST_KHR;
1251 }
1252 // Support for VK_GOOGLE_surfaceless_query. The primary purpose of this
1253 // extension for this function is for
1254 // VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR and
1255 // VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR. We technically cannot
1256 // know if VK_PRESENT_MODE_SHARED_MAILBOX_KHR is supported without a
1257 // surface, and that cannot be relied upon. Therefore, don't return it.
1258 present_modes.push_back(VK_PRESENT_MODE_FIFO_KHR);
1259 } else {
1260 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
1261
1262 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1263 &query_value);
1264 if (err != android::OK || query_value < 0) {
1265 ALOGE(
1266 "NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d) "
1267 "value=%d",
1268 strerror(-err), err, query_value);
1269 return VK_ERROR_SURFACE_LOST_KHR;
1270 }
1271 uint32_t min_undequeued_buffers = static_cast<uint32_t>(query_value);
1272
1273 err =
1274 window->query(window, NATIVE_WINDOW_MAX_BUFFER_COUNT, &query_value);
1275 if (err != android::OK || query_value < 0) {
1276 ALOGE(
1277 "NATIVE_WINDOW_MAX_BUFFER_COUNT query failed: %s (%d) value=%d",
1278 strerror(-err), err, query_value);
1279 return VK_ERROR_SURFACE_LOST_KHR;
1280 }
1281 uint32_t max_buffer_count = static_cast<uint32_t>(query_value);
1282
1283 if (min_undequeued_buffers + 1 < max_buffer_count)
1284 present_modes.push_back(VK_PRESENT_MODE_MAILBOX_KHR);
1285 present_modes.push_back(VK_PRESENT_MODE_FIFO_KHR);
1286 }
1287
1288 VkPhysicalDevicePresentationPropertiesANDROID present_properties;
1289 QueryPresentationProperties(pdev, &present_properties);
1290 if (present_properties.sharedImage) {
1291 present_modes.push_back(VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR);
1292 present_modes.push_back(VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
1293 }
1294
1295 return CopyWithIncomplete(present_modes, modes, count);
1296 }
1297
1298 VKAPI_ATTR
GetDeviceGroupPresentCapabilitiesKHR(VkDevice,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)1299 VkResult GetDeviceGroupPresentCapabilitiesKHR(
1300 VkDevice,
1301 VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {
1302 ATRACE_CALL();
1303
1304 ALOGV_IF(pDeviceGroupPresentCapabilities->sType !=
1305 VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
1306 "vkGetDeviceGroupPresentCapabilitiesKHR: invalid "
1307 "VkDeviceGroupPresentCapabilitiesKHR structure type %d",
1308 pDeviceGroupPresentCapabilities->sType);
1309
1310 memset(pDeviceGroupPresentCapabilities->presentMask, 0,
1311 sizeof(pDeviceGroupPresentCapabilities->presentMask));
1312
1313 // assume device group of size 1
1314 pDeviceGroupPresentCapabilities->presentMask[0] = 1 << 0;
1315 pDeviceGroupPresentCapabilities->modes =
1316 VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
1317
1318 return VK_SUCCESS;
1319 }
1320
1321 VKAPI_ATTR
GetDeviceGroupSurfacePresentModesKHR(VkDevice,VkSurfaceKHR,VkDeviceGroupPresentModeFlagsKHR * pModes)1322 VkResult GetDeviceGroupSurfacePresentModesKHR(
1323 VkDevice,
1324 VkSurfaceKHR,
1325 VkDeviceGroupPresentModeFlagsKHR* pModes) {
1326 ATRACE_CALL();
1327
1328 *pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
1329 return VK_SUCCESS;
1330 }
1331
1332 VKAPI_ATTR
GetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)1333 VkResult GetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice,
1334 VkSurfaceKHR surface,
1335 uint32_t* pRectCount,
1336 VkRect2D* pRects) {
1337 ATRACE_CALL();
1338
1339 if (!pRects) {
1340 *pRectCount = 1;
1341 } else {
1342 uint32_t count = std::min(*pRectCount, 1u);
1343 bool incomplete = *pRectCount < 1;
1344
1345 *pRectCount = count;
1346
1347 if (incomplete) {
1348 return VK_INCOMPLETE;
1349 }
1350
1351 int err;
1352 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
1353
1354 int width = 0, height = 0;
1355 err = window->query(window, NATIVE_WINDOW_DEFAULT_WIDTH, &width);
1356 if (err != android::OK) {
1357 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
1358 strerror(-err), err);
1359 }
1360 err = window->query(window, NATIVE_WINDOW_DEFAULT_HEIGHT, &height);
1361 if (err != android::OK) {
1362 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
1363 strerror(-err), err);
1364 }
1365
1366 pRects[0].offset.x = 0;
1367 pRects[0].offset.y = 0;
1368 pRects[0].extent = VkExtent2D{static_cast<uint32_t>(width),
1369 static_cast<uint32_t>(height)};
1370 }
1371 return VK_SUCCESS;
1372 }
1373
DestroySwapchainInternal(VkDevice device,VkSwapchainKHR swapchain_handle,const VkAllocationCallbacks * allocator)1374 static void DestroySwapchainInternal(VkDevice device,
1375 VkSwapchainKHR swapchain_handle,
1376 const VkAllocationCallbacks* allocator) {
1377 ATRACE_CALL();
1378
1379 const auto& dispatch = GetData(device).driver;
1380 Swapchain* swapchain = SwapchainFromHandle(swapchain_handle);
1381 if (!swapchain) {
1382 return;
1383 }
1384
1385 bool active = swapchain->surface.swapchain_handle == swapchain_handle;
1386 ANativeWindow* window = active ? swapchain->surface.window.get() : nullptr;
1387
1388 if (window && swapchain->frame_timestamps_enabled) {
1389 native_window_enable_frame_timestamps(window, false);
1390 }
1391
1392 for (uint32_t i = 0; i < swapchain->num_images; i++) {
1393 ReleaseSwapchainImage(device, swapchain->shared, window, -1,
1394 swapchain->images[i], false);
1395 }
1396
1397 if (active) {
1398 swapchain->surface.swapchain_handle = VK_NULL_HANDLE;
1399 }
1400
1401 if (!allocator) {
1402 allocator = &GetData(device).allocator;
1403 }
1404
1405 swapchain->~Swapchain();
1406 allocator->pfnFree(allocator->pUserData, swapchain);
1407 }
1408
getProducerUsage(const VkDevice & device,const VkSwapchainCreateInfoKHR * create_info,const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,bool create_protected_swapchain,uint64_t * producer_usage)1409 static VkResult getProducerUsage(const VkDevice& device,
1410 const VkSwapchainCreateInfoKHR* create_info,
1411 const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,
1412 bool create_protected_swapchain,
1413 uint64_t* producer_usage) {
1414 // Get the physical device to query the appropriate producer usage
1415 const VkPhysicalDevice& pdev = GetData(device).driver_physical_device;
1416 const InstanceData& instance_data = GetData(pdev);
1417 const InstanceDriverTable& instance_dispatch = instance_data.driver;
1418 if (instance_dispatch.GetPhysicalDeviceImageFormatProperties2 ||
1419 instance_dispatch.GetPhysicalDeviceImageFormatProperties2KHR) {
1420 // Look through the create_info pNext chain passed to createSwapchainKHR
1421 // for an image compression control struct.
1422 // if one is found AND the appropriate extensions are enabled, create a
1423 // VkImageCompressionControlEXT structure to pass on to
1424 // GetPhysicalDeviceImageFormatProperties2
1425 void* compression_control_pNext = nullptr;
1426 VkImageCompressionControlEXT image_compression = {};
1427 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1428 while (create_infos->pNext) {
1429 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(create_infos->pNext);
1430 switch (create_infos->sType) {
1431 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1432 const VkImageCompressionControlEXT* compression_infos =
1433 reinterpret_cast<const VkImageCompressionControlEXT*>(create_infos);
1434 image_compression = *compression_infos;
1435 image_compression.pNext = nullptr;
1436 compression_control_pNext = &image_compression;
1437 } break;
1438 default:
1439 // Ignore all other info structs
1440 break;
1441 }
1442 }
1443
1444 // call GetPhysicalDeviceImageFormatProperties2KHR
1445 VkPhysicalDeviceExternalImageFormatInfo external_image_format_info = {
1446 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
1447 .pNext = compression_control_pNext,
1448 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
1449 };
1450
1451 // AHB does not have an sRGB format so we can't pass it to GPDIFP
1452 // We need to convert the format to unorm if it is srgb
1453 VkFormat format = create_info->imageFormat;
1454 if (format == VK_FORMAT_R8G8B8A8_SRGB) {
1455 format = VK_FORMAT_R8G8B8A8_UNORM;
1456 }
1457
1458 VkPhysicalDeviceImageFormatInfo2 image_format_info = {
1459 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
1460 .pNext = &external_image_format_info,
1461 .format = format,
1462 .type = VK_IMAGE_TYPE_2D,
1463 .tiling = VK_IMAGE_TILING_OPTIMAL,
1464 .usage = create_info->imageUsage,
1465 .flags = create_protected_swapchain ? VK_IMAGE_CREATE_PROTECTED_BIT : 0u,
1466 };
1467
1468 VkAndroidHardwareBufferUsageANDROID ahb_usage;
1469 ahb_usage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
1470 ahb_usage.pNext = nullptr;
1471
1472 VkImageFormatProperties2 image_format_properties;
1473 image_format_properties.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
1474 image_format_properties.pNext = &ahb_usage;
1475
1476 if (instance_dispatch.GetPhysicalDeviceImageFormatProperties2) {
1477 VkResult result = instance_dispatch.GetPhysicalDeviceImageFormatProperties2(
1478 pdev, &image_format_info, &image_format_properties);
1479 if (result != VK_SUCCESS) {
1480 ALOGE("VkGetPhysicalDeviceImageFormatProperties2 for AHB usage failed: %d", result);
1481 return VK_ERROR_SURFACE_LOST_KHR;
1482 }
1483 }
1484 else {
1485 VkResult result = instance_dispatch.GetPhysicalDeviceImageFormatProperties2KHR(
1486 pdev, &image_format_info,
1487 &image_format_properties);
1488 if (result != VK_SUCCESS) {
1489 ALOGE("VkGetPhysicalDeviceImageFormatProperties2KHR for AHB usage failed: %d",
1490 result);
1491 return VK_ERROR_SURFACE_LOST_KHR;
1492 }
1493 }
1494
1495 // Determine if USAGE_FRONT_BUFFER is needed.
1496 // GPDIFP2 has no means of using VkSwapchainImageUsageFlagsANDROID when
1497 // querying for producer_usage. So androidHardwareBufferUsage will not
1498 // contain USAGE_FRONT_BUFFER. We need to manually check for usage here.
1499 if (!(swapchain_image_usage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)) {
1500 *producer_usage = ahb_usage.androidHardwareBufferUsage;
1501 return VK_SUCCESS;
1502 }
1503
1504 // Check if USAGE_FRONT_BUFFER is supported for this swapchain
1505 AHardwareBuffer_Desc ahb_desc = {
1506 .width = create_info->imageExtent.width,
1507 .height = create_info->imageExtent.height,
1508 .layers = create_info->imageArrayLayers,
1509 .format = create_info->imageFormat,
1510 .usage = ahb_usage.androidHardwareBufferUsage | AHARDWAREBUFFER_USAGE_FRONT_BUFFER,
1511 .stride = 0, // stride is always ignored when calling isSupported()
1512 };
1513
1514 // If FRONT_BUFFER is not supported,
1515 // then we need to call GetSwapchainGrallocUsageXAndroid below
1516 if (AHardwareBuffer_isSupported(&ahb_desc)) {
1517 *producer_usage = ahb_usage.androidHardwareBufferUsage;
1518 *producer_usage |= AHARDWAREBUFFER_USAGE_FRONT_BUFFER;
1519 return VK_SUCCESS;
1520 }
1521 }
1522
1523 uint64_t native_usage = 0;
1524 void* usage_info_pNext = nullptr;
1525 VkResult result;
1526 VkImageCompressionControlEXT image_compression = {};
1527 const auto& dispatch = GetData(device).driver;
1528 if (dispatch.GetSwapchainGrallocUsage4ANDROID) {
1529 ATRACE_BEGIN("GetSwapchainGrallocUsage4ANDROID");
1530 VkGrallocUsageInfo2ANDROID gralloc_usage_info = {};
1531 gralloc_usage_info.sType =
1532 VK_STRUCTURE_TYPE_GRALLOC_USAGE_INFO_2_ANDROID;
1533 gralloc_usage_info.format = create_info->imageFormat;
1534 gralloc_usage_info.imageUsage = create_info->imageUsage;
1535 gralloc_usage_info.swapchainImageUsage = swapchain_image_usage;
1536
1537 // Look through the pNext chain for an image compression control struct
1538 // if one is found AND the appropriate extensions are enabled,
1539 // append it to be the gralloc usage pNext chain
1540 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1541 while (create_infos->pNext) {
1542 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(
1543 create_infos->pNext);
1544 switch (create_infos->sType) {
1545 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1546 const VkImageCompressionControlEXT* compression_infos =
1547 reinterpret_cast<const VkImageCompressionControlEXT*>(
1548 create_infos);
1549 image_compression = *compression_infos;
1550 image_compression.pNext = nullptr;
1551 usage_info_pNext = &image_compression;
1552 } break;
1553
1554 default:
1555 // Ignore all other info structs
1556 break;
1557 }
1558 }
1559 gralloc_usage_info.pNext = usage_info_pNext;
1560
1561 result = dispatch.GetSwapchainGrallocUsage4ANDROID(
1562 device, &gralloc_usage_info, &native_usage);
1563 ATRACE_END();
1564 if (result != VK_SUCCESS) {
1565 ALOGE("vkGetSwapchainGrallocUsage4ANDROID failed: %d", result);
1566 return VK_ERROR_SURFACE_LOST_KHR;
1567 }
1568 } else if (dispatch.GetSwapchainGrallocUsage3ANDROID) {
1569 ATRACE_BEGIN("GetSwapchainGrallocUsage3ANDROID");
1570 VkGrallocUsageInfoANDROID gralloc_usage_info = {};
1571 gralloc_usage_info.sType = VK_STRUCTURE_TYPE_GRALLOC_USAGE_INFO_ANDROID;
1572 gralloc_usage_info.format = create_info->imageFormat;
1573 gralloc_usage_info.imageUsage = create_info->imageUsage;
1574
1575 // Look through the pNext chain for an image compression control struct
1576 // if one is found AND the appropriate extensions are enabled,
1577 // append it to be the gralloc usage pNext chain
1578 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1579 while (create_infos->pNext) {
1580 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(
1581 create_infos->pNext);
1582 switch (create_infos->sType) {
1583 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1584 const VkImageCompressionControlEXT* compression_infos =
1585 reinterpret_cast<const VkImageCompressionControlEXT*>(
1586 create_infos);
1587 image_compression = *compression_infos;
1588 image_compression.pNext = nullptr;
1589 usage_info_pNext = &image_compression;
1590 } break;
1591
1592 default:
1593 // Ignore all other info structs
1594 break;
1595 }
1596 }
1597 gralloc_usage_info.pNext = usage_info_pNext;
1598
1599 result = dispatch.GetSwapchainGrallocUsage3ANDROID(
1600 device, &gralloc_usage_info, &native_usage);
1601 ATRACE_END();
1602 if (result != VK_SUCCESS) {
1603 ALOGE("vkGetSwapchainGrallocUsage3ANDROID failed: %d", result);
1604 return VK_ERROR_SURFACE_LOST_KHR;
1605 }
1606 } else if (dispatch.GetSwapchainGrallocUsage2ANDROID) {
1607 uint64_t consumer_usage, producer_usage;
1608 ATRACE_BEGIN("GetSwapchainGrallocUsage2ANDROID");
1609 result = dispatch.GetSwapchainGrallocUsage2ANDROID(
1610 device, create_info->imageFormat, create_info->imageUsage,
1611 swapchain_image_usage, &consumer_usage, &producer_usage);
1612 ATRACE_END();
1613 if (result != VK_SUCCESS) {
1614 ALOGE("vkGetSwapchainGrallocUsage2ANDROID failed: %d", result);
1615 return VK_ERROR_SURFACE_LOST_KHR;
1616 }
1617 native_usage =
1618 convertGralloc1ToBufferUsage(producer_usage, consumer_usage);
1619 } else if (dispatch.GetSwapchainGrallocUsageANDROID) {
1620 ATRACE_BEGIN("GetSwapchainGrallocUsageANDROID");
1621 int32_t legacy_usage = 0;
1622 result = dispatch.GetSwapchainGrallocUsageANDROID(
1623 device, create_info->imageFormat, create_info->imageUsage,
1624 &legacy_usage);
1625 ATRACE_END();
1626 if (result != VK_SUCCESS) {
1627 ALOGE("vkGetSwapchainGrallocUsageANDROID failed: %d", result);
1628 return VK_ERROR_SURFACE_LOST_KHR;
1629 }
1630 native_usage = static_cast<uint64_t>(legacy_usage);
1631 }
1632 *producer_usage = native_usage;
1633
1634 return VK_SUCCESS;
1635 }
1636
1637 VKAPI_ATTR
CreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * create_info,const VkAllocationCallbacks * allocator,VkSwapchainKHR * swapchain_handle)1638 VkResult CreateSwapchainKHR(VkDevice device,
1639 const VkSwapchainCreateInfoKHR* create_info,
1640 const VkAllocationCallbacks* allocator,
1641 VkSwapchainKHR* swapchain_handle) {
1642 ATRACE_CALL();
1643
1644 int err;
1645 VkResult result = VK_SUCCESS;
1646
1647 ALOGV("vkCreateSwapchainKHR: surface=0x%" PRIx64
1648 " minImageCount=%u imageFormat=%u imageColorSpace=%u"
1649 " imageExtent=%ux%u imageUsage=%#x preTransform=%u presentMode=%u"
1650 " oldSwapchain=0x%" PRIx64,
1651 reinterpret_cast<uint64_t>(create_info->surface),
1652 create_info->minImageCount, create_info->imageFormat,
1653 create_info->imageColorSpace, create_info->imageExtent.width,
1654 create_info->imageExtent.height, create_info->imageUsage,
1655 create_info->preTransform, create_info->presentMode,
1656 reinterpret_cast<uint64_t>(create_info->oldSwapchain));
1657
1658 if (!allocator)
1659 allocator = &GetData(device).allocator;
1660
1661 PixelFormat native_pixel_format =
1662 GetNativePixelFormat(create_info->imageFormat);
1663 DataSpace native_dataspace = GetNativeDataspace(
1664 create_info->imageColorSpace, create_info->imageFormat);
1665 if (native_dataspace == DataSpace::UNKNOWN) {
1666 ALOGE(
1667 "CreateSwapchainKHR(VkSwapchainCreateInfoKHR.imageColorSpace = %d) "
1668 "failed: Unsupported color space",
1669 create_info->imageColorSpace);
1670 return VK_ERROR_INITIALIZATION_FAILED;
1671 }
1672
1673 ALOGV_IF(create_info->imageArrayLayers != 1,
1674 "swapchain imageArrayLayers=%u not supported",
1675 create_info->imageArrayLayers);
1676 ALOGV_IF((create_info->preTransform & ~kSupportedTransforms) != 0,
1677 "swapchain preTransform=%#x not supported",
1678 create_info->preTransform);
1679 ALOGV_IF(!(create_info->presentMode == VK_PRESENT_MODE_FIFO_KHR ||
1680 create_info->presentMode == VK_PRESENT_MODE_MAILBOX_KHR ||
1681 create_info->presentMode == VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR ||
1682 create_info->presentMode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR),
1683 "swapchain presentMode=%u not supported",
1684 create_info->presentMode);
1685
1686 Surface& surface = *SurfaceFromHandle(create_info->surface);
1687
1688 if (surface.swapchain_handle != create_info->oldSwapchain) {
1689 ALOGV("Can't create a swapchain for VkSurfaceKHR 0x%" PRIx64
1690 " because it already has active swapchain 0x%" PRIx64
1691 " but VkSwapchainCreateInfo::oldSwapchain=0x%" PRIx64,
1692 reinterpret_cast<uint64_t>(create_info->surface),
1693 reinterpret_cast<uint64_t>(surface.swapchain_handle),
1694 reinterpret_cast<uint64_t>(create_info->oldSwapchain));
1695 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
1696 }
1697 if (create_info->oldSwapchain != VK_NULL_HANDLE)
1698 OrphanSwapchain(device, SwapchainFromHandle(create_info->oldSwapchain));
1699
1700 // -- Reset the native window --
1701 // The native window might have been used previously, and had its properties
1702 // changed from defaults. That will affect the answer we get for queries
1703 // like MIN_UNDEQUED_BUFFERS. Reset to a known/default state before we
1704 // attempt such queries.
1705
1706 // The native window only allows dequeueing all buffers before any have
1707 // been queued, since after that point at least one is assumed to be in
1708 // non-FREE state at any given time. Disconnecting and re-connecting
1709 // orphans the previous buffers, getting us back to the state where we can
1710 // dequeue all buffers.
1711 //
1712 // This is not necessary if the surface was never used previously.
1713 //
1714 // TODO(http://b/134186185) recycle swapchain images more efficiently
1715 ANativeWindow* window = surface.window.get();
1716 if (surface.used_by_swapchain) {
1717 err = native_window_api_disconnect(window, NATIVE_WINDOW_API_EGL);
1718 ALOGW_IF(err != android::OK,
1719 "native_window_api_disconnect failed: %s (%d)", strerror(-err),
1720 err);
1721 err = native_window_api_connect(window, NATIVE_WINDOW_API_EGL);
1722 ALOGW_IF(err != android::OK,
1723 "native_window_api_connect failed: %s (%d)", strerror(-err),
1724 err);
1725 }
1726
1727 err =
1728 window->perform(window, NATIVE_WINDOW_SET_DEQUEUE_TIMEOUT, nsecs_t{-1});
1729 if (err != android::OK) {
1730 ALOGE("window->perform(SET_DEQUEUE_TIMEOUT) failed: %s (%d)",
1731 strerror(-err), err);
1732 return VK_ERROR_SURFACE_LOST_KHR;
1733 }
1734
1735 int swap_interval =
1736 create_info->presentMode == VK_PRESENT_MODE_MAILBOX_KHR ? 0 : 1;
1737 err = window->setSwapInterval(window, swap_interval);
1738 if (err != android::OK) {
1739 ALOGE("native_window->setSwapInterval(1) failed: %s (%d)",
1740 strerror(-err), err);
1741 return VK_ERROR_SURFACE_LOST_KHR;
1742 }
1743
1744 err = native_window_set_shared_buffer_mode(window, false);
1745 if (err != android::OK) {
1746 ALOGE("native_window_set_shared_buffer_mode(false) failed: %s (%d)",
1747 strerror(-err), err);
1748 return VK_ERROR_SURFACE_LOST_KHR;
1749 }
1750
1751 err = native_window_set_auto_refresh(window, false);
1752 if (err != android::OK) {
1753 ALOGE("native_window_set_auto_refresh(false) failed: %s (%d)",
1754 strerror(-err), err);
1755 return VK_ERROR_SURFACE_LOST_KHR;
1756 }
1757
1758 // -- Configure the native window --
1759
1760 const auto& dispatch = GetData(device).driver;
1761
1762 err = native_window_set_buffers_format(
1763 window, static_cast<int>(native_pixel_format));
1764 if (err != android::OK) {
1765 ALOGE("native_window_set_buffers_format(%s) failed: %s (%d)",
1766 toString(native_pixel_format).c_str(), strerror(-err), err);
1767 return VK_ERROR_SURFACE_LOST_KHR;
1768 }
1769
1770 /* Respect consumer default dataspace upon HAL_DATASPACE_ARBITRARY. */
1771 if (native_dataspace != DataSpace::ARBITRARY) {
1772 err = native_window_set_buffers_data_space(
1773 window, static_cast<android_dataspace_t>(native_dataspace));
1774 if (err != android::OK) {
1775 ALOGE("native_window_set_buffers_data_space(%d) failed: %s (%d)",
1776 native_dataspace, strerror(-err), err);
1777 return VK_ERROR_SURFACE_LOST_KHR;
1778 }
1779 }
1780
1781 err = native_window_set_buffers_dimensions(
1782 window, static_cast<int>(create_info->imageExtent.width),
1783 static_cast<int>(create_info->imageExtent.height));
1784 if (err != android::OK) {
1785 ALOGE("native_window_set_buffers_dimensions(%d,%d) failed: %s (%d)",
1786 create_info->imageExtent.width, create_info->imageExtent.height,
1787 strerror(-err), err);
1788 return VK_ERROR_SURFACE_LOST_KHR;
1789 }
1790
1791 // VkSwapchainCreateInfo::preTransform indicates the transformation the app
1792 // applied during rendering. native_window_set_transform() expects the
1793 // inverse: the transform the app is requesting that the compositor perform
1794 // during composition. With native windows, pre-transform works by rendering
1795 // with the same transform the compositor is applying (as in Vulkan), but
1796 // then requesting the inverse transform, so that when the compositor does
1797 // it's job the two transforms cancel each other out and the compositor ends
1798 // up applying an identity transform to the app's buffer.
1799 err = native_window_set_buffers_transform(
1800 window, InvertTransformToNative(create_info->preTransform));
1801 if (err != android::OK) {
1802 ALOGE("native_window_set_buffers_transform(%d) failed: %s (%d)",
1803 InvertTransformToNative(create_info->preTransform),
1804 strerror(-err), err);
1805 return VK_ERROR_SURFACE_LOST_KHR;
1806 }
1807
1808 err = native_window_set_scaling_mode(
1809 window, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
1810 if (err != android::OK) {
1811 ALOGE("native_window_set_scaling_mode(SCALE_TO_WINDOW) failed: %s (%d)",
1812 strerror(-err), err);
1813 return VK_ERROR_SURFACE_LOST_KHR;
1814 }
1815
1816 VkSwapchainImageUsageFlagsANDROID swapchain_image_usage = 0;
1817 if (IsSharedPresentMode(create_info->presentMode)) {
1818 swapchain_image_usage |= VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID;
1819 err = native_window_set_shared_buffer_mode(window, true);
1820 if (err != android::OK) {
1821 ALOGE("native_window_set_shared_buffer_mode failed: %s (%d)", strerror(-err), err);
1822 return VK_ERROR_SURFACE_LOST_KHR;
1823 }
1824 }
1825
1826 if (create_info->presentMode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR) {
1827 err = native_window_set_auto_refresh(window, true);
1828 if (err != android::OK) {
1829 ALOGE("native_window_set_auto_refresh failed: %s (%d)", strerror(-err), err);
1830 return VK_ERROR_SURFACE_LOST_KHR;
1831 }
1832 }
1833
1834 int query_value;
1835 // TODO: Now that we are calling into GPDSC2 directly, this query may be redundant
1836 // the call to std::max(min_buffer_count, num_images) may be redundant as well
1837 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1838 &query_value);
1839 if (err != android::OK || query_value < 0) {
1840 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err,
1841 query_value);
1842 return VK_ERROR_SURFACE_LOST_KHR;
1843 }
1844 const uint32_t min_undequeued_buffers = static_cast<uint32_t>(query_value);
1845
1846 // Lower layer insists that we have at least min_undequeued_buffers + 1
1847 // buffers. This is wasteful and we'd like to relax it in the shared case,
1848 // but not all the pieces are in place for that to work yet. Note we only
1849 // lie to the lower layer--we don't want to give the app back a swapchain
1850 // with extra images (which they can't actually use!).
1851 const uint32_t min_buffer_count = min_undequeued_buffers + 1;
1852
1853 // Call into GPDSC2 to get the minimum and maximum allowable buffer count for the surface of
1854 // interest. This step is only necessary if the app requests a number of images
1855 // (create_info->minImageCount) that is less or more than the surface capabilities.
1856 // An app should be calling GPDSC2 and using those values to set create_info, but in the
1857 // event that the app has hard-coded image counts an error can occur
1858 VkSurfacePresentModeEXT present_mode = {
1859 VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT,
1860 nullptr,
1861 create_info->presentMode
1862 };
1863 VkPhysicalDeviceSurfaceInfo2KHR surface_info2 = {
1864 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
1865 &present_mode,
1866 create_info->surface
1867 };
1868 VkSurfaceCapabilities2KHR surface_capabilities2 = {
1869 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
1870 nullptr,
1871 {},
1872 };
1873 result = GetPhysicalDeviceSurfaceCapabilities2KHR(GetData(device).driver_physical_device,
1874 &surface_info2, &surface_capabilities2);
1875
1876 uint32_t num_images = create_info->minImageCount;
1877 num_images = std::clamp(num_images,
1878 surface_capabilities2.surfaceCapabilities.minImageCount,
1879 surface_capabilities2.surfaceCapabilities.maxImageCount);
1880
1881 const uint32_t buffer_count = std::max(min_buffer_count, num_images);
1882 err = native_window_set_buffer_count(window, buffer_count);
1883 if (err != android::OK) {
1884 ALOGE("native_window_set_buffer_count(%d) failed: %s (%d)", buffer_count,
1885 strerror(-err), err);
1886 return VK_ERROR_SURFACE_LOST_KHR;
1887 }
1888
1889 // In shared mode the num_images must be one regardless of how many
1890 // buffers were allocated for the buffer queue.
1891 if (swapchain_image_usage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID) {
1892 num_images = 1;
1893 }
1894
1895 // Look through the create_info pNext chain passed to createSwapchainKHR
1896 // for an image compression control struct.
1897 // if one is found AND the appropriate extensions are enabled, create a
1898 // VkImageCompressionControlEXT structure to pass on to VkImageCreateInfo
1899 // TODO check for imageCompressionControlSwapchain feature is enabled
1900 void* usage_info_pNext = nullptr;
1901 VkImageCompressionControlEXT image_compression = {};
1902 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1903 while (create_infos->pNext) {
1904 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(create_infos->pNext);
1905 switch (create_infos->sType) {
1906 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1907 const VkImageCompressionControlEXT* compression_infos =
1908 reinterpret_cast<const VkImageCompressionControlEXT*>(create_infos);
1909 image_compression = *compression_infos;
1910 image_compression.pNext = nullptr;
1911 usage_info_pNext = &image_compression;
1912 } break;
1913
1914 default:
1915 // Ignore all other info structs
1916 break;
1917 }
1918 }
1919
1920 // Get the appropriate native_usage for the images
1921 // Get the consumer usage
1922 uint64_t native_usage = surface.consumer_usage;
1923 // Determine if the swapchain is protected
1924 bool create_protected_swapchain = false;
1925 if (create_info->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
1926 create_protected_swapchain = true;
1927 native_usage |= BufferUsage::PROTECTED;
1928 }
1929 // Get the producer usage
1930 uint64_t producer_usage;
1931 result = getProducerUsage(device, create_info, swapchain_image_usage, create_protected_swapchain, &producer_usage);
1932 if (result != VK_SUCCESS) {
1933 return result;
1934 }
1935 native_usage |= producer_usage;
1936
1937 err = native_window_set_usage(window, native_usage);
1938 if (err != android::OK) {
1939 ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), err);
1940 return VK_ERROR_SURFACE_LOST_KHR;
1941 }
1942
1943 int transform_hint;
1944 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT, &transform_hint);
1945 if (err != android::OK) {
1946 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
1947 strerror(-err), err);
1948 return VK_ERROR_SURFACE_LOST_KHR;
1949 }
1950
1951 int64_t refresh_duration;
1952 err = native_window_get_refresh_cycle_duration(window, &refresh_duration);
1953 if (err != android::OK) {
1954 ALOGE("native_window_get_refresh_cycle_duration query failed: %s (%d)",
1955 strerror(-err), err);
1956 return VK_ERROR_SURFACE_LOST_KHR;
1957 }
1958 // -- Allocate our Swapchain object --
1959 // After this point, we must deallocate the swapchain on error.
1960
1961 void* mem = allocator->pfnAllocation(allocator->pUserData,
1962 sizeof(Swapchain), alignof(Swapchain),
1963 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1964
1965 if (!mem)
1966 return VK_ERROR_OUT_OF_HOST_MEMORY;
1967
1968 Swapchain* swapchain = new (mem)
1969 Swapchain(surface, num_images, create_info->presentMode,
1970 TranslateVulkanToNativeTransform(create_info->preTransform),
1971 refresh_duration);
1972 VkSwapchainImageCreateInfoANDROID swapchain_image_create = {
1973 #pragma clang diagnostic push
1974 #pragma clang diagnostic ignored "-Wold-style-cast"
1975 .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID,
1976 #pragma clang diagnostic pop
1977 .pNext = usage_info_pNext,
1978 .usage = swapchain_image_usage,
1979 };
1980 VkNativeBufferANDROID image_native_buffer = {
1981 #pragma clang diagnostic push
1982 #pragma clang diagnostic ignored "-Wold-style-cast"
1983 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
1984 #pragma clang diagnostic pop
1985 .pNext = &swapchain_image_create,
1986 };
1987
1988 VkImageCreateInfo image_create = {
1989 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1990 .pNext = nullptr,
1991 .flags = create_protected_swapchain ? VK_IMAGE_CREATE_PROTECTED_BIT : 0u,
1992 .imageType = VK_IMAGE_TYPE_2D,
1993 .format = create_info->imageFormat,
1994 .extent = {
1995 create_info->imageExtent.width,
1996 create_info->imageExtent.height,
1997 1
1998 },
1999 .mipLevels = 1,
2000 .arrayLayers = 1,
2001 .samples = VK_SAMPLE_COUNT_1_BIT,
2002 .tiling = VK_IMAGE_TILING_OPTIMAL,
2003 .usage = create_info->imageUsage,
2004 .sharingMode = create_info->imageSharingMode,
2005 .queueFamilyIndexCount = create_info->queueFamilyIndexCount,
2006 .pQueueFamilyIndices = create_info->pQueueFamilyIndices,
2007 };
2008
2009 // Note: don't do deferred allocation for shared present modes. There's only one buffer
2010 // involved so very little benefit.
2011 if ((create_info->flags & VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT) &&
2012 !IsSharedPresentMode(create_info->presentMode)) {
2013 // Don't want to touch the underlying gralloc buffers yet;
2014 // instead just create unbound VkImages which will later be bound to memory inside
2015 // AcquireNextImage.
2016 VkImageSwapchainCreateInfoKHR image_swapchain_create = {
2017 .sType = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
2018 .pNext = nullptr,
2019 .swapchain = HandleFromSwapchain(swapchain),
2020 };
2021 image_create.pNext = &image_swapchain_create;
2022
2023 for (uint32_t i = 0; i < num_images; i++) {
2024 Swapchain::Image& img = swapchain->images[i];
2025 img.buffer = nullptr;
2026 img.dequeued = false;
2027
2028 result = dispatch.CreateImage(device, &image_create, nullptr, &img.image);
2029 if (result != VK_SUCCESS) {
2030 ALOGD("vkCreateImage w/ for deferred swapchain image failed: %u", result);
2031 break;
2032 }
2033 }
2034 } else {
2035 // -- Dequeue all buffers and create a VkImage for each --
2036 // Any failures during or after this must cancel the dequeued buffers.
2037
2038 for (uint32_t i = 0; i < num_images; i++) {
2039 Swapchain::Image& img = swapchain->images[i];
2040
2041 ANativeWindowBuffer* buffer;
2042 err = window->dequeueBuffer(window, &buffer, &img.dequeue_fence);
2043 if (err != android::OK) {
2044 ALOGE("dequeueBuffer[%u] failed: %s (%d)", i, strerror(-err), err);
2045 switch (-err) {
2046 case ENOMEM:
2047 result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
2048 break;
2049 default:
2050 result = VK_ERROR_SURFACE_LOST_KHR;
2051 break;
2052 }
2053 break;
2054 }
2055 img.buffer = buffer;
2056 img.dequeued = true;
2057
2058 image_native_buffer.handle = img.buffer->handle;
2059 image_native_buffer.stride = img.buffer->stride;
2060 image_native_buffer.format = img.buffer->format;
2061 image_native_buffer.usage = int(img.buffer->usage);
2062 android_convertGralloc0To1Usage(int(img.buffer->usage),
2063 &image_native_buffer.usage2.producer,
2064 &image_native_buffer.usage2.consumer);
2065 image_native_buffer.usage3 = img.buffer->usage;
2066 image_native_buffer.ahb =
2067 ANativeWindowBuffer_getHardwareBuffer(img.buffer.get());
2068 image_create.pNext = &image_native_buffer;
2069
2070 ATRACE_BEGIN("CreateImage");
2071 result =
2072 dispatch.CreateImage(device, &image_create, nullptr, &img.image);
2073 ATRACE_END();
2074 if (result != VK_SUCCESS) {
2075 ALOGD("vkCreateImage w/ native buffer failed: %u", result);
2076 break;
2077 }
2078 }
2079
2080 // -- Cancel all buffers, returning them to the queue --
2081 // If an error occurred before, also destroy the VkImage and release the
2082 // buffer reference. Otherwise, we retain a strong reference to the buffer.
2083 for (uint32_t i = 0; i < num_images; i++) {
2084 Swapchain::Image& img = swapchain->images[i];
2085 if (img.dequeued) {
2086 if (!swapchain->shared) {
2087 window->cancelBuffer(window, img.buffer.get(),
2088 img.dequeue_fence);
2089 img.dequeue_fence = -1;
2090 img.dequeued = false;
2091 }
2092 }
2093 }
2094 }
2095
2096 if (result != VK_SUCCESS) {
2097 DestroySwapchainInternal(device, HandleFromSwapchain(swapchain),
2098 allocator);
2099 return result;
2100 }
2101
2102 if (transform_hint != swapchain->pre_transform) {
2103 // Log that the app is not doing pre-rotation.
2104 android::GraphicsEnv::getInstance().setTargetStats(
2105 android::GpuStatsInfo::Stats::FALSE_PREROTATION);
2106 }
2107
2108 // Set stats for creating a Vulkan swapchain
2109 android::GraphicsEnv::getInstance().setTargetStats(
2110 android::GpuStatsInfo::Stats::CREATED_VULKAN_SWAPCHAIN);
2111
2112 surface.used_by_swapchain = true;
2113 surface.swapchain_handle = HandleFromSwapchain(swapchain);
2114 *swapchain_handle = surface.swapchain_handle;
2115 return VK_SUCCESS;
2116 }
2117
2118 VKAPI_ATTR
DestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain_handle,const VkAllocationCallbacks * allocator)2119 void DestroySwapchainKHR(VkDevice device,
2120 VkSwapchainKHR swapchain_handle,
2121 const VkAllocationCallbacks* allocator) {
2122 ATRACE_CALL();
2123
2124 DestroySwapchainInternal(device, swapchain_handle, allocator);
2125 }
2126
2127 VKAPI_ATTR
GetSwapchainImagesKHR(VkDevice,VkSwapchainKHR swapchain_handle,uint32_t * count,VkImage * images)2128 VkResult GetSwapchainImagesKHR(VkDevice,
2129 VkSwapchainKHR swapchain_handle,
2130 uint32_t* count,
2131 VkImage* images) {
2132 ATRACE_CALL();
2133
2134 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2135 ALOGW_IF(swapchain.surface.swapchain_handle != swapchain_handle,
2136 "getting images for non-active swapchain 0x%" PRIx64
2137 "; only dequeued image handles are valid",
2138 reinterpret_cast<uint64_t>(swapchain_handle));
2139 VkResult result = VK_SUCCESS;
2140 if (images) {
2141 uint32_t n = swapchain.num_images;
2142 if (*count < swapchain.num_images) {
2143 n = *count;
2144 result = VK_INCOMPLETE;
2145 }
2146 for (uint32_t i = 0; i < n; i++)
2147 images[i] = swapchain.images[i].image;
2148 *count = n;
2149 } else {
2150 *count = swapchain.num_images;
2151 }
2152 return result;
2153 }
2154
2155 VKAPI_ATTR
AcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain_handle,uint64_t timeout,VkSemaphore semaphore,VkFence vk_fence,uint32_t * image_index)2156 VkResult AcquireNextImageKHR(VkDevice device,
2157 VkSwapchainKHR swapchain_handle,
2158 uint64_t timeout,
2159 VkSemaphore semaphore,
2160 VkFence vk_fence,
2161 uint32_t* image_index) {
2162 ATRACE_CALL();
2163
2164 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2165 ANativeWindow* window = swapchain.surface.window.get();
2166 VkResult result;
2167 int err;
2168
2169 if (swapchain.surface.swapchain_handle != swapchain_handle)
2170 return VK_ERROR_OUT_OF_DATE_KHR;
2171
2172 if (swapchain.shared) {
2173 // In shared mode, we keep the buffer dequeued all the time, so we don't
2174 // want to dequeue a buffer here. Instead, just ask the driver to ensure
2175 // the semaphore and fence passed to us will be signalled.
2176 *image_index = 0;
2177 result = GetData(device).driver.AcquireImageANDROID(
2178 device, swapchain.images[*image_index].image, -1, semaphore, vk_fence);
2179 return result;
2180 }
2181
2182 const nsecs_t acquire_next_image_timeout =
2183 timeout > (uint64_t)std::numeric_limits<nsecs_t>::max() ? -1 : timeout;
2184 if (acquire_next_image_timeout != swapchain.acquire_next_image_timeout) {
2185 // Cache the timeout to avoid the duplicate binder cost.
2186 err = window->perform(window, NATIVE_WINDOW_SET_DEQUEUE_TIMEOUT,
2187 acquire_next_image_timeout);
2188 if (err != android::OK) {
2189 ALOGE("window->perform(SET_DEQUEUE_TIMEOUT) failed: %s (%d)",
2190 strerror(-err), err);
2191 return VK_ERROR_SURFACE_LOST_KHR;
2192 }
2193 swapchain.acquire_next_image_timeout = acquire_next_image_timeout;
2194 }
2195
2196 ANativeWindowBuffer* buffer;
2197 int fence_fd;
2198 err = window->dequeueBuffer(window, &buffer, &fence_fd);
2199 if (err == android::TIMED_OUT || err == android::INVALID_OPERATION) {
2200 ALOGW("dequeueBuffer timed out: %s (%d)", strerror(-err), err);
2201 return timeout ? VK_TIMEOUT : VK_NOT_READY;
2202 } else if (err != android::OK) {
2203 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), err);
2204 return VK_ERROR_SURFACE_LOST_KHR;
2205 }
2206
2207 uint32_t idx;
2208 for (idx = 0; idx < swapchain.num_images; idx++) {
2209 if (swapchain.images[idx].buffer.get() == buffer) {
2210 swapchain.images[idx].dequeued = true;
2211 swapchain.images[idx].dequeue_fence = fence_fd;
2212 break;
2213 }
2214 }
2215
2216 // If this is a deferred alloc swapchain, this may be the first time we've
2217 // seen a particular buffer. If so, there should be an empty slot. Find it,
2218 // and bind the gralloc buffer to the VkImage for that slot. If there is no
2219 // empty slot, then we dequeued an unexpected buffer. Non-deferred swapchains
2220 // will also take this path, but will never have an empty slot since we
2221 // populated them all upfront.
2222 if (idx == swapchain.num_images) {
2223 for (idx = 0; idx < swapchain.num_images; idx++) {
2224 if (!swapchain.images[idx].buffer) {
2225 // Note: this structure is technically required for
2226 // Vulkan correctness, even though the driver is probably going
2227 // to use everything from the VkNativeBufferANDROID below.
2228 // This is kindof silly, but it's how we did the ANB
2229 // side of VK_KHR_swapchain v69, so we're stuck with it unless
2230 // we want to go tinkering with the ANB spec some more.
2231 VkBindImageMemorySwapchainInfoKHR bimsi = {
2232 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
2233 .pNext = nullptr,
2234 .swapchain = swapchain_handle,
2235 .imageIndex = idx,
2236 };
2237 VkNativeBufferANDROID nb = {
2238 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2239 .pNext = &bimsi,
2240 .handle = buffer->handle,
2241 .stride = buffer->stride,
2242 .format = buffer->format,
2243 .usage = int(buffer->usage),
2244 .usage3 = buffer->usage,
2245 .ahb = ANativeWindowBuffer_getHardwareBuffer(buffer),
2246 };
2247 android_convertGralloc0To1Usage(int(buffer->usage),
2248 &nb.usage2.producer,
2249 &nb.usage2.consumer);
2250 VkBindImageMemoryInfo bimi = {
2251 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2252 .pNext = &nb,
2253 .image = swapchain.images[idx].image,
2254 .memory = VK_NULL_HANDLE,
2255 .memoryOffset = 0,
2256 };
2257 result = GetData(device).driver.BindImageMemory2(device, 1, &bimi);
2258 if (result != VK_SUCCESS) {
2259 // This shouldn't really happen. If it does, something is probably
2260 // unrecoverably wrong with the swapchain and its images. Cancel
2261 // the buffer and declare the swapchain broken.
2262 ALOGE("failed to do deferred gralloc buffer bind");
2263 window->cancelBuffer(window, buffer, fence_fd);
2264 return VK_ERROR_OUT_OF_DATE_KHR;
2265 }
2266
2267 swapchain.images[idx].dequeued = true;
2268 swapchain.images[idx].dequeue_fence = fence_fd;
2269 swapchain.images[idx].buffer = buffer;
2270 break;
2271 }
2272 }
2273 }
2274
2275 // The buffer doesn't match any slot. This shouldn't normally happen, but is
2276 // possible if the bufferqueue is reconfigured behind libvulkan's back. If this
2277 // happens, just declare the swapchain to be broken and the app will recreate it.
2278 if (idx == swapchain.num_images) {
2279 ALOGE("dequeueBuffer returned unrecognized buffer");
2280 window->cancelBuffer(window, buffer, fence_fd);
2281 return VK_ERROR_OUT_OF_DATE_KHR;
2282 }
2283
2284 int fence_clone = -1;
2285 if (fence_fd != -1) {
2286 fence_clone = dup(fence_fd);
2287 if (fence_clone == -1) {
2288 ALOGE("dup(fence) failed, stalling until signalled: %s (%d)",
2289 strerror(errno), errno);
2290 sync_wait(fence_fd, -1 /* forever */);
2291 }
2292 }
2293
2294 result = GetData(device).driver.AcquireImageANDROID(
2295 device, swapchain.images[idx].image, fence_clone, semaphore, vk_fence);
2296 if (result != VK_SUCCESS) {
2297 // NOTE: we're relying on AcquireImageANDROID to close fence_clone,
2298 // even if the call fails. We could close it ourselves on failure, but
2299 // that would create a race condition if the driver closes it on a
2300 // failure path: some other thread might create an fd with the same
2301 // number between the time the driver closes it and the time we close
2302 // it. We must assume one of: the driver *always* closes it even on
2303 // failure, or *never* closes it on failure.
2304 window->cancelBuffer(window, buffer, fence_fd);
2305 swapchain.images[idx].dequeued = false;
2306 swapchain.images[idx].dequeue_fence = -1;
2307 return result;
2308 }
2309
2310 *image_index = idx;
2311 return VK_SUCCESS;
2312 }
2313
2314 VKAPI_ATTR
AcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)2315 VkResult AcquireNextImage2KHR(VkDevice device,
2316 const VkAcquireNextImageInfoKHR* pAcquireInfo,
2317 uint32_t* pImageIndex) {
2318 ATRACE_CALL();
2319
2320 return AcquireNextImageKHR(device, pAcquireInfo->swapchain,
2321 pAcquireInfo->timeout, pAcquireInfo->semaphore,
2322 pAcquireInfo->fence, pImageIndex);
2323 }
2324
WorstPresentResult(VkResult a,VkResult b)2325 static VkResult WorstPresentResult(VkResult a, VkResult b) {
2326 // See the error ranking for vkQueuePresentKHR at the end of section 29.6
2327 // (in spec version 1.0.14).
2328 static const VkResult kWorstToBest[] = {
2329 VK_ERROR_DEVICE_LOST,
2330 VK_ERROR_SURFACE_LOST_KHR,
2331 VK_ERROR_OUT_OF_DATE_KHR,
2332 VK_ERROR_OUT_OF_DEVICE_MEMORY,
2333 VK_ERROR_OUT_OF_HOST_MEMORY,
2334 VK_SUBOPTIMAL_KHR,
2335 };
2336 for (auto result : kWorstToBest) {
2337 if (a == result || b == result)
2338 return result;
2339 }
2340 ALOG_ASSERT(a == VK_SUCCESS, "invalid vkQueuePresentKHR result %d", a);
2341 ALOG_ASSERT(b == VK_SUCCESS, "invalid vkQueuePresentKHR result %d", b);
2342 return a != VK_SUCCESS ? a : b;
2343 }
2344
2345 // KHR_incremental_present aspect of QueuePresentKHR
SetSwapchainSurfaceDamage(ANativeWindow * window,const VkPresentRegionKHR * pRegion)2346 static void SetSwapchainSurfaceDamage(ANativeWindow *window, const VkPresentRegionKHR *pRegion) {
2347 std::vector<android_native_rect_t> rects(pRegion->rectangleCount);
2348 for (auto i = 0u; i < pRegion->rectangleCount; i++) {
2349 auto const& rect = pRegion->pRectangles[i];
2350 if (rect.layer > 0) {
2351 ALOGV("vkQueuePresentKHR ignoring invalid layer (%u); using layer 0 instead",
2352 rect.layer);
2353 }
2354
2355 rects[i].left = rect.offset.x;
2356 rects[i].bottom = rect.offset.y;
2357 rects[i].right = rect.offset.x + rect.extent.width;
2358 rects[i].top = rect.offset.y + rect.extent.height;
2359 }
2360 native_window_set_surface_damage(window, rects.data(), rects.size());
2361 }
2362
2363 // GOOGLE_display_timing aspect of QueuePresentKHR
SetSwapchainFrameTimestamp(Swapchain & swapchain,const VkPresentTimeGOOGLE * pTime)2364 static void SetSwapchainFrameTimestamp(Swapchain &swapchain, const VkPresentTimeGOOGLE *pTime) {
2365 ANativeWindow *window = swapchain.surface.window.get();
2366
2367 // We don't know whether the app will actually use GOOGLE_display_timing
2368 // with a particular swapchain until QueuePresent; enable it on the BQ
2369 // now if needed
2370 if (!swapchain.frame_timestamps_enabled) {
2371 ALOGV("Calling native_window_enable_frame_timestamps(true)");
2372 native_window_enable_frame_timestamps(window, true);
2373 swapchain.frame_timestamps_enabled = true;
2374 }
2375
2376 // Record the nativeFrameId so it can be later correlated to
2377 // this present.
2378 uint64_t nativeFrameId = 0;
2379 int err = native_window_get_next_frame_id(
2380 window, &nativeFrameId);
2381 if (err != android::OK) {
2382 ALOGE("Failed to get next native frame ID.");
2383 }
2384
2385 // Add a new timing record with the user's presentID and
2386 // the nativeFrameId.
2387 swapchain.timing.emplace_back(pTime, nativeFrameId);
2388 if (swapchain.timing.size() > MAX_TIMING_INFOS) {
2389 swapchain.timing.erase(
2390 swapchain.timing.begin(),
2391 swapchain.timing.begin() + swapchain.timing.size() - MAX_TIMING_INFOS);
2392 }
2393 if (pTime->desiredPresentTime) {
2394 ALOGV(
2395 "Calling native_window_set_buffers_timestamp(%" PRId64 ")",
2396 pTime->desiredPresentTime);
2397 native_window_set_buffers_timestamp(
2398 window,
2399 static_cast<int64_t>(pTime->desiredPresentTime));
2400 }
2401 }
2402
2403 // EXT_swapchain_maintenance1 present mode change
SetSwapchainPresentMode(ANativeWindow * window,VkPresentModeKHR mode)2404 static bool SetSwapchainPresentMode(ANativeWindow *window, VkPresentModeKHR mode) {
2405 // There is no dynamic switching between non-shared present modes.
2406 // All we support is switching between demand and continuous refresh.
2407 if (!IsSharedPresentMode(mode))
2408 return true;
2409
2410 int err = native_window_set_auto_refresh(window,
2411 mode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
2412 if (err != android::OK) {
2413 ALOGE("native_window_set_auto_refresh() failed: %s (%d)",
2414 strerror(-err), err);
2415 return false;
2416 }
2417
2418 return true;
2419 }
2420
PresentOneSwapchain(VkQueue queue,Swapchain & swapchain,uint32_t imageIndex,const VkPresentRegionKHR * pRegion,const VkPresentTimeGOOGLE * pTime,VkFence presentFence,const VkPresentModeKHR * pPresentMode,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores)2421 static VkResult PresentOneSwapchain(
2422 VkQueue queue,
2423 Swapchain& swapchain,
2424 uint32_t imageIndex,
2425 const VkPresentRegionKHR *pRegion,
2426 const VkPresentTimeGOOGLE *pTime,
2427 VkFence presentFence,
2428 const VkPresentModeKHR *pPresentMode,
2429 uint32_t waitSemaphoreCount,
2430 const VkSemaphore *pWaitSemaphores) {
2431
2432 VkDevice device = GetData(queue).driver_device;
2433 const auto& dispatch = GetData(queue).driver;
2434
2435 Swapchain::Image& img = swapchain.images[imageIndex];
2436 VkResult swapchain_result = VK_SUCCESS;
2437 VkResult result;
2438 int err;
2439
2440 // XXX: long standing issue: QueueSignalReleaseImageANDROID consumes the
2441 // wait semaphores, so this doesn't actually work for the multiple swapchain
2442 // case.
2443 int fence = -1;
2444 result = dispatch.QueueSignalReleaseImageANDROID(
2445 queue, waitSemaphoreCount,
2446 pWaitSemaphores, img.image, &fence);
2447 if (result != VK_SUCCESS) {
2448 ALOGE("QueueSignalReleaseImageANDROID failed: %d", result);
2449 swapchain_result = result;
2450 }
2451 if (img.release_fence >= 0)
2452 close(img.release_fence);
2453 img.release_fence = fence < 0 ? -1 : dup(fence);
2454
2455 if (swapchain.surface.swapchain_handle == HandleFromSwapchain(&swapchain)) {
2456 ANativeWindow* window = swapchain.surface.window.get();
2457 if (swapchain_result == VK_SUCCESS) {
2458
2459 if (presentFence != VK_NULL_HANDLE) {
2460 int fence_copy = fence < 0 ? -1 : dup(fence);
2461 VkImportFenceFdInfoKHR iffi = {
2462 VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
2463 nullptr,
2464 presentFence,
2465 VK_FENCE_IMPORT_TEMPORARY_BIT,
2466 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
2467 fence_copy,
2468 };
2469 if (VK_SUCCESS != dispatch.ImportFenceFdKHR(device, &iffi) && fence_copy >= 0) {
2470 // ImportFenceFdKHR takes ownership only if it succeeds
2471 close(fence_copy);
2472 }
2473 }
2474
2475 if (pRegion) {
2476 SetSwapchainSurfaceDamage(window, pRegion);
2477 }
2478 if (pTime) {
2479 SetSwapchainFrameTimestamp(swapchain, pTime);
2480 }
2481 if (pPresentMode) {
2482 if (!SetSwapchainPresentMode(window, *pPresentMode))
2483 swapchain_result = WorstPresentResult(swapchain_result,
2484 VK_ERROR_SURFACE_LOST_KHR);
2485 }
2486
2487 err = window->queueBuffer(window, img.buffer.get(), fence);
2488 // queueBuffer always closes fence, even on error
2489 if (err != android::OK) {
2490 ALOGE("queueBuffer failed: %s (%d)", strerror(-err), err);
2491 swapchain_result = WorstPresentResult(
2492 swapchain_result, VK_ERROR_SURFACE_LOST_KHR);
2493 } else {
2494 if (img.dequeue_fence >= 0) {
2495 close(img.dequeue_fence);
2496 img.dequeue_fence = -1;
2497 }
2498 img.dequeued = false;
2499 }
2500
2501 // If the swapchain is in shared mode, immediately dequeue the
2502 // buffer so it can be presented again without an intervening
2503 // call to AcquireNextImageKHR. We expect to get the same buffer
2504 // back from every call to dequeueBuffer in this mode.
2505 if (swapchain.shared && swapchain_result == VK_SUCCESS) {
2506 ANativeWindowBuffer* buffer;
2507 int fence_fd;
2508 err = window->dequeueBuffer(window, &buffer, &fence_fd);
2509 if (err != android::OK) {
2510 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), err);
2511 swapchain_result = WorstPresentResult(swapchain_result,
2512 VK_ERROR_SURFACE_LOST_KHR);
2513 } else if (img.buffer != buffer) {
2514 ALOGE("got wrong image back for shared swapchain");
2515 swapchain_result = WorstPresentResult(swapchain_result,
2516 VK_ERROR_SURFACE_LOST_KHR);
2517 } else {
2518 img.dequeue_fence = fence_fd;
2519 img.dequeued = true;
2520 }
2521 }
2522 }
2523 if (swapchain_result != VK_SUCCESS) {
2524 OrphanSwapchain(device, &swapchain);
2525 }
2526 // Android will only return VK_SUBOPTIMAL_KHR for vkQueuePresentKHR,
2527 // and only when the window's transform/rotation changes. Extent
2528 // changes will not cause VK_SUBOPTIMAL_KHR because of the
2529 // application issues that were caused when the following transform
2530 // change was added.
2531 int window_transform_hint;
2532 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT,
2533 &window_transform_hint);
2534 if (err != android::OK) {
2535 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
2536 strerror(-err), err);
2537 swapchain_result = WorstPresentResult(
2538 swapchain_result, VK_ERROR_SURFACE_LOST_KHR);
2539 }
2540 if (swapchain.pre_transform != window_transform_hint) {
2541 swapchain_result =
2542 WorstPresentResult(swapchain_result, VK_SUBOPTIMAL_KHR);
2543 }
2544 } else {
2545 ReleaseSwapchainImage(device, swapchain.shared, nullptr, fence,
2546 img, true);
2547 swapchain_result = VK_ERROR_OUT_OF_DATE_KHR;
2548 }
2549
2550 return swapchain_result;
2551 }
2552
2553 VKAPI_ATTR
QueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * present_info)2554 VkResult QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* present_info) {
2555 ATRACE_CALL();
2556
2557 ALOGV_IF(present_info->sType != VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
2558 "vkQueuePresentKHR: invalid VkPresentInfoKHR structure type %d",
2559 present_info->sType);
2560
2561 VkResult final_result = VK_SUCCESS;
2562
2563 // Look at the pNext chain for supported extension structs:
2564 const VkPresentRegionsKHR* present_regions = nullptr;
2565 const VkPresentTimesInfoGOOGLE* present_times = nullptr;
2566 const VkSwapchainPresentFenceInfoEXT* present_fences = nullptr;
2567 const VkSwapchainPresentModeInfoEXT* present_modes = nullptr;
2568
2569 const VkPresentRegionsKHR* next =
2570 reinterpret_cast<const VkPresentRegionsKHR*>(present_info->pNext);
2571 while (next) {
2572 switch (next->sType) {
2573 case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
2574 present_regions = next;
2575 break;
2576 case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
2577 present_times =
2578 reinterpret_cast<const VkPresentTimesInfoGOOGLE*>(next);
2579 break;
2580 case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT:
2581 present_fences =
2582 reinterpret_cast<const VkSwapchainPresentFenceInfoEXT*>(next);
2583 break;
2584 case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT:
2585 present_modes =
2586 reinterpret_cast<const VkSwapchainPresentModeInfoEXT*>(next);
2587 break;
2588 default:
2589 ALOGV("QueuePresentKHR ignoring unrecognized pNext->sType = %x",
2590 next->sType);
2591 break;
2592 }
2593 next = reinterpret_cast<const VkPresentRegionsKHR*>(next->pNext);
2594 }
2595 ALOGV_IF(
2596 present_regions &&
2597 present_regions->swapchainCount != present_info->swapchainCount,
2598 "VkPresentRegions::swapchainCount != VkPresentInfo::swapchainCount");
2599 ALOGV_IF(present_times &&
2600 present_times->swapchainCount != present_info->swapchainCount,
2601 "VkPresentTimesInfoGOOGLE::swapchainCount != "
2602 "VkPresentInfo::swapchainCount");
2603 ALOGV_IF(present_fences &&
2604 present_fences->swapchainCount != present_info->swapchainCount,
2605 "VkSwapchainPresentFenceInfoEXT::swapchainCount != "
2606 "VkPresentInfo::swapchainCount");
2607 ALOGV_IF(present_modes &&
2608 present_modes->swapchainCount != present_info->swapchainCount,
2609 "VkSwapchainPresentModeInfoEXT::swapchainCount != "
2610 "VkPresentInfo::swapchainCount");
2611
2612 const VkPresentRegionKHR* regions =
2613 (present_regions) ? present_regions->pRegions : nullptr;
2614 const VkPresentTimeGOOGLE* times =
2615 (present_times) ? present_times->pTimes : nullptr;
2616
2617 for (uint32_t sc = 0; sc < present_info->swapchainCount; sc++) {
2618 Swapchain& swapchain =
2619 *SwapchainFromHandle(present_info->pSwapchains[sc]);
2620
2621 VkResult swapchain_result = PresentOneSwapchain(
2622 queue,
2623 swapchain,
2624 present_info->pImageIndices[sc],
2625 (regions && !swapchain.mailbox_mode) ? ®ions[sc] : nullptr,
2626 times ? ×[sc] : nullptr,
2627 present_fences ? present_fences->pFences[sc] : VK_NULL_HANDLE,
2628 present_modes ? &present_modes->pPresentModes[sc] : nullptr,
2629 present_info->waitSemaphoreCount,
2630 present_info->pWaitSemaphores);
2631
2632 if (present_info->pResults)
2633 present_info->pResults[sc] = swapchain_result;
2634
2635 if (swapchain_result != final_result)
2636 final_result = WorstPresentResult(final_result, swapchain_result);
2637 }
2638
2639 return final_result;
2640 }
2641
2642 VKAPI_ATTR
GetRefreshCycleDurationGOOGLE(VkDevice,VkSwapchainKHR swapchain_handle,VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties)2643 VkResult GetRefreshCycleDurationGOOGLE(
2644 VkDevice,
2645 VkSwapchainKHR swapchain_handle,
2646 VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {
2647 ATRACE_CALL();
2648
2649 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2650 VkResult result = swapchain.get_refresh_duration(pDisplayTimingProperties->refreshDuration);
2651
2652 return result;
2653 }
2654
2655 VKAPI_ATTR
GetPastPresentationTimingGOOGLE(VkDevice,VkSwapchainKHR swapchain_handle,uint32_t * count,VkPastPresentationTimingGOOGLE * timings)2656 VkResult GetPastPresentationTimingGOOGLE(
2657 VkDevice,
2658 VkSwapchainKHR swapchain_handle,
2659 uint32_t* count,
2660 VkPastPresentationTimingGOOGLE* timings) {
2661 ATRACE_CALL();
2662
2663 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2664 if (swapchain.surface.swapchain_handle != swapchain_handle) {
2665 return VK_ERROR_OUT_OF_DATE_KHR;
2666 }
2667
2668 ANativeWindow* window = swapchain.surface.window.get();
2669 VkResult result = VK_SUCCESS;
2670
2671 if (!swapchain.frame_timestamps_enabled) {
2672 ALOGV("Calling native_window_enable_frame_timestamps(true)");
2673 native_window_enable_frame_timestamps(window, true);
2674 swapchain.frame_timestamps_enabled = true;
2675 }
2676
2677 if (timings) {
2678 // Get the latest ready timing count before copying, since the copied
2679 // timing info will be erased in copy_ready_timings function.
2680 uint32_t n = get_num_ready_timings(swapchain);
2681 copy_ready_timings(swapchain, count, timings);
2682 // Check the *count here against the recorded ready timing count, since
2683 // *count can be overwritten per spec describes.
2684 if (*count < n) {
2685 result = VK_INCOMPLETE;
2686 }
2687 } else {
2688 *count = get_num_ready_timings(swapchain);
2689 }
2690
2691 return result;
2692 }
2693
2694 VKAPI_ATTR
GetSwapchainStatusKHR(VkDevice,VkSwapchainKHR swapchain_handle)2695 VkResult GetSwapchainStatusKHR(
2696 VkDevice,
2697 VkSwapchainKHR swapchain_handle) {
2698 ATRACE_CALL();
2699
2700 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2701 VkResult result = VK_SUCCESS;
2702
2703 if (swapchain.surface.swapchain_handle != swapchain_handle) {
2704 return VK_ERROR_OUT_OF_DATE_KHR;
2705 }
2706
2707 // TODO(b/143296009): Implement this function properly
2708
2709 return result;
2710 }
2711
SetHdrMetadataEXT(VkDevice,uint32_t swapchainCount,const VkSwapchainKHR * pSwapchains,const VkHdrMetadataEXT * pHdrMetadataEXTs)2712 VKAPI_ATTR void SetHdrMetadataEXT(
2713 VkDevice,
2714 uint32_t swapchainCount,
2715 const VkSwapchainKHR* pSwapchains,
2716 const VkHdrMetadataEXT* pHdrMetadataEXTs) {
2717 ATRACE_CALL();
2718
2719 for (uint32_t idx = 0; idx < swapchainCount; idx++) {
2720 Swapchain* swapchain = SwapchainFromHandle(pSwapchains[idx]);
2721 if (!swapchain)
2722 continue;
2723
2724 if (swapchain->surface.swapchain_handle != pSwapchains[idx]) continue;
2725
2726 ANativeWindow* window = swapchain->surface.window.get();
2727
2728 VkHdrMetadataEXT vulkanMetadata = pHdrMetadataEXTs[idx];
2729 const android_smpte2086_metadata smpteMetdata = {
2730 {vulkanMetadata.displayPrimaryRed.x,
2731 vulkanMetadata.displayPrimaryRed.y},
2732 {vulkanMetadata.displayPrimaryGreen.x,
2733 vulkanMetadata.displayPrimaryGreen.y},
2734 {vulkanMetadata.displayPrimaryBlue.x,
2735 vulkanMetadata.displayPrimaryBlue.y},
2736 {vulkanMetadata.whitePoint.x, vulkanMetadata.whitePoint.y},
2737 vulkanMetadata.maxLuminance,
2738 vulkanMetadata.minLuminance};
2739 native_window_set_buffers_smpte2086_metadata(window, &smpteMetdata);
2740
2741 const android_cta861_3_metadata cta8613Metadata = {
2742 vulkanMetadata.maxContentLightLevel,
2743 vulkanMetadata.maxFrameAverageLightLevel};
2744 native_window_set_buffers_cta861_3_metadata(window, &cta8613Metadata);
2745 }
2746
2747 return;
2748 }
2749
InterceptBindImageMemory2(uint32_t bind_info_count,const VkBindImageMemoryInfo * bind_infos,std::vector<VkNativeBufferANDROID> * out_native_buffers,std::vector<VkBindImageMemoryInfo> * out_bind_infos)2750 static void InterceptBindImageMemory2(
2751 uint32_t bind_info_count,
2752 const VkBindImageMemoryInfo* bind_infos,
2753 std::vector<VkNativeBufferANDROID>* out_native_buffers,
2754 std::vector<VkBindImageMemoryInfo>* out_bind_infos) {
2755 out_native_buffers->clear();
2756 out_bind_infos->clear();
2757
2758 if (!bind_info_count)
2759 return;
2760
2761 std::unordered_set<uint32_t> intercepted_indexes;
2762
2763 for (uint32_t idx = 0; idx < bind_info_count; idx++) {
2764 auto info = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(
2765 bind_infos[idx].pNext);
2766 while (info &&
2767 info->sType !=
2768 VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR) {
2769 info = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(
2770 info->pNext);
2771 }
2772
2773 if (!info)
2774 continue;
2775
2776 ALOG_ASSERT(info->swapchain != VK_NULL_HANDLE,
2777 "swapchain handle must not be NULL");
2778 const Swapchain* swapchain = SwapchainFromHandle(info->swapchain);
2779 ALOG_ASSERT(
2780 info->imageIndex < swapchain->num_images,
2781 "imageIndex must be less than the number of images in swapchain");
2782
2783 ANativeWindowBuffer* buffer =
2784 swapchain->images[info->imageIndex].buffer.get();
2785 VkNativeBufferANDROID native_buffer = {
2786 #pragma clang diagnostic push
2787 #pragma clang diagnostic ignored "-Wold-style-cast"
2788 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2789 #pragma clang diagnostic pop
2790 .pNext = bind_infos[idx].pNext,
2791 .handle = buffer->handle,
2792 .stride = buffer->stride,
2793 .format = buffer->format,
2794 .usage = int(buffer->usage),
2795 .usage3 = buffer->usage,
2796 .ahb = ANativeWindowBuffer_getHardwareBuffer(buffer),
2797 };
2798 android_convertGralloc0To1Usage(int(buffer->usage),
2799 &native_buffer.usage2.producer,
2800 &native_buffer.usage2.consumer);
2801 // Reserve enough space to avoid letting re-allocation invalidate the
2802 // addresses of the elements inside.
2803 out_native_buffers->reserve(bind_info_count);
2804 out_native_buffers->emplace_back(native_buffer);
2805
2806 // Reserve the space now since we know how much is needed now.
2807 out_bind_infos->reserve(bind_info_count);
2808 out_bind_infos->emplace_back(bind_infos[idx]);
2809 out_bind_infos->back().pNext = &out_native_buffers->back();
2810
2811 intercepted_indexes.insert(idx);
2812 }
2813
2814 if (intercepted_indexes.empty())
2815 return;
2816
2817 for (uint32_t idx = 0; idx < bind_info_count; idx++) {
2818 if (intercepted_indexes.count(idx))
2819 continue;
2820 out_bind_infos->emplace_back(bind_infos[idx]);
2821 }
2822 }
2823
2824 VKAPI_ATTR
BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2825 VkResult BindImageMemory2(VkDevice device,
2826 uint32_t bindInfoCount,
2827 const VkBindImageMemoryInfo* pBindInfos) {
2828 ATRACE_CALL();
2829
2830 // out_native_buffers is for maintaining the lifecycle of the constructed
2831 // VkNativeBufferANDROID objects inside InterceptBindImageMemory2.
2832 std::vector<VkNativeBufferANDROID> out_native_buffers;
2833 std::vector<VkBindImageMemoryInfo> out_bind_infos;
2834 InterceptBindImageMemory2(bindInfoCount, pBindInfos, &out_native_buffers,
2835 &out_bind_infos);
2836 return GetData(device).driver.BindImageMemory2(
2837 device, bindInfoCount,
2838 out_bind_infos.empty() ? pBindInfos : out_bind_infos.data());
2839 }
2840
2841 VKAPI_ATTR
BindImageMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2842 VkResult BindImageMemory2KHR(VkDevice device,
2843 uint32_t bindInfoCount,
2844 const VkBindImageMemoryInfo* pBindInfos) {
2845 ATRACE_CALL();
2846
2847 std::vector<VkNativeBufferANDROID> out_native_buffers;
2848 std::vector<VkBindImageMemoryInfo> out_bind_infos;
2849 InterceptBindImageMemory2(bindInfoCount, pBindInfos, &out_native_buffers,
2850 &out_bind_infos);
2851 return GetData(device).driver.BindImageMemory2KHR(
2852 device, bindInfoCount,
2853 out_bind_infos.empty() ? pBindInfos : out_bind_infos.data());
2854 }
2855
2856 VKAPI_ATTR
ReleaseSwapchainImagesEXT(VkDevice,const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo)2857 VkResult ReleaseSwapchainImagesEXT(VkDevice /*device*/,
2858 const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo) {
2859 ATRACE_CALL();
2860
2861 Swapchain& swapchain = *SwapchainFromHandle(pReleaseInfo->swapchain);
2862 ANativeWindow* window = swapchain.surface.window.get();
2863
2864 // If in shared present mode, don't actually release the image back to the BQ.
2865 // Both sides share it forever.
2866 if (swapchain.shared)
2867 return VK_SUCCESS;
2868
2869 for (uint32_t i = 0; i < pReleaseInfo->imageIndexCount; i++) {
2870 Swapchain::Image& img = swapchain.images[pReleaseInfo->pImageIndices[i]];
2871 window->cancelBuffer(window, img.buffer.get(), img.dequeue_fence);
2872
2873 // cancelBuffer has taken ownership of the dequeue fence
2874 img.dequeue_fence = -1;
2875 // if we're still holding a release fence, get rid of it now
2876 if (img.release_fence >= 0) {
2877 close(img.release_fence);
2878 img.release_fence = -1;
2879 }
2880 img.dequeued = false;
2881 }
2882
2883 return VK_SUCCESS;
2884 }
2885
2886 } // namespace driver
2887 } // namespace vulkan
2888