1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "vulkan/vulkan_core.h"
18 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
19
20 #include <aidl/android/hardware/graphics/common/Dataspace.h>
21 #include <aidl/android/hardware/graphics/common/PixelFormat.h>
22 #include <android/hardware/graphics/common/1.0/types.h>
23 #include <android/hardware_buffer.h>
24 #include <grallocusage/GrallocUsageConversion.h>
25 #include <graphicsenv/GraphicsEnv.h>
26 #include <hardware/gralloc.h>
27 #include <hardware/gralloc1.h>
28 #include <log/log.h>
29 #include <sync/sync.h>
30 #include <system/window.h>
31 #include <ui/BufferQueueDefs.h>
32 #include <utils/StrongPointer.h>
33 #include <utils/Timers.h>
34 #include <utils/Trace.h>
35
36 #include <algorithm>
37 #include <unordered_set>
38 #include <vector>
39
40 #include "driver.h"
41
42 using PixelFormat = aidl::android::hardware::graphics::common::PixelFormat;
43 using DataSpace = aidl::android::hardware::graphics::common::Dataspace;
44 using android::hardware::graphics::common::V1_0::BufferUsage;
45
46 namespace vulkan {
47 namespace driver {
48
49 namespace {
50
convertGralloc1ToBufferUsage(uint64_t producerUsage,uint64_t consumerUsage)51 static uint64_t convertGralloc1ToBufferUsage(uint64_t producerUsage,
52 uint64_t consumerUsage) {
53 static_assert(uint64_t(GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN) ==
54 uint64_t(GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN),
55 "expected ConsumerUsage and ProducerUsage CPU_READ_OFTEN "
56 "bits to match");
57 uint64_t merged = producerUsage | consumerUsage;
58 if ((merged & (GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN)) ==
59 GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN) {
60 merged &= ~uint64_t(GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN);
61 merged |= BufferUsage::CPU_READ_OFTEN;
62 }
63 if ((merged & (GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN)) ==
64 GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN) {
65 merged &= ~uint64_t(GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN);
66 merged |= BufferUsage::CPU_WRITE_OFTEN;
67 }
68 return merged;
69 }
70
71 const VkSurfaceTransformFlagsKHR kSupportedTransforms =
72 VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
73 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
74 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
75 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
76 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
77 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
78 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
79 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
80 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
81
TranslateNativeToVulkanTransform(int native)82 VkSurfaceTransformFlagBitsKHR TranslateNativeToVulkanTransform(int native) {
83 // Native and Vulkan transforms are isomorphic, but are represented
84 // differently. Vulkan transforms are built up of an optional horizontal
85 // mirror, followed by a clockwise 0/90/180/270-degree rotation. Native
86 // transforms are built up from a horizontal flip, vertical flip, and
87 // 90-degree rotation, all optional but always in that order.
88
89 switch (native) {
90 case 0:
91 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
92 case NATIVE_WINDOW_TRANSFORM_FLIP_H:
93 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR;
94 case NATIVE_WINDOW_TRANSFORM_FLIP_V:
95 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR;
96 case NATIVE_WINDOW_TRANSFORM_ROT_180:
97 return VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR;
98 case NATIVE_WINDOW_TRANSFORM_ROT_90:
99 return VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR;
100 case NATIVE_WINDOW_TRANSFORM_FLIP_H | NATIVE_WINDOW_TRANSFORM_ROT_90:
101 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR;
102 case NATIVE_WINDOW_TRANSFORM_FLIP_V | NATIVE_WINDOW_TRANSFORM_ROT_90:
103 return VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR;
104 case NATIVE_WINDOW_TRANSFORM_ROT_270:
105 return VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
106 case NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY:
107 default:
108 return VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
109 }
110 }
111
TranslateVulkanToNativeTransform(VkSurfaceTransformFlagBitsKHR transform)112 int TranslateVulkanToNativeTransform(VkSurfaceTransformFlagBitsKHR transform) {
113 switch (transform) {
114 case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
115 return NATIVE_WINDOW_TRANSFORM_ROT_90;
116 case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
117 return NATIVE_WINDOW_TRANSFORM_ROT_180;
118 case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
119 return NATIVE_WINDOW_TRANSFORM_ROT_270;
120 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
121 return NATIVE_WINDOW_TRANSFORM_FLIP_H;
122 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
123 return NATIVE_WINDOW_TRANSFORM_FLIP_H |
124 NATIVE_WINDOW_TRANSFORM_ROT_90;
125 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
126 return NATIVE_WINDOW_TRANSFORM_FLIP_V;
127 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
128 return NATIVE_WINDOW_TRANSFORM_FLIP_V |
129 NATIVE_WINDOW_TRANSFORM_ROT_90;
130 case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
131 case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
132 default:
133 return 0;
134 }
135 }
136
InvertTransformToNative(VkSurfaceTransformFlagBitsKHR transform)137 int InvertTransformToNative(VkSurfaceTransformFlagBitsKHR transform) {
138 switch (transform) {
139 case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
140 return NATIVE_WINDOW_TRANSFORM_ROT_270;
141 case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
142 return NATIVE_WINDOW_TRANSFORM_ROT_180;
143 case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
144 return NATIVE_WINDOW_TRANSFORM_ROT_90;
145 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
146 return NATIVE_WINDOW_TRANSFORM_FLIP_H;
147 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
148 return NATIVE_WINDOW_TRANSFORM_FLIP_H |
149 NATIVE_WINDOW_TRANSFORM_ROT_90;
150 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
151 return NATIVE_WINDOW_TRANSFORM_FLIP_V;
152 case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
153 return NATIVE_WINDOW_TRANSFORM_FLIP_V |
154 NATIVE_WINDOW_TRANSFORM_ROT_90;
155 case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
156 case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
157 default:
158 return 0;
159 }
160 }
161
162 const static VkColorSpaceKHR colorSpaceSupportedByVkEXTSwapchainColorspace[] = {
163 VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
164 VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
165 VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
166 VK_COLOR_SPACE_BT709_LINEAR_EXT,
167 VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
168 VK_COLOR_SPACE_BT2020_LINEAR_EXT,
169 VK_COLOR_SPACE_HDR10_ST2084_EXT,
170 VK_COLOR_SPACE_HDR10_HLG_EXT,
171 VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
172 VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
173 VK_COLOR_SPACE_PASS_THROUGH_EXT,
174 VK_COLOR_SPACE_DCI_P3_LINEAR_EXT};
175
176 const static VkColorSpaceKHR
177 colorSpaceSupportedByVkEXTSwapchainColorspaceOnFP16SurfaceOnly[] = {
178 VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
179 VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT};
180
181 class TimingInfo {
182 public:
TimingInfo(const VkPresentTimeGOOGLE * qp,uint64_t nativeFrameId)183 TimingInfo(const VkPresentTimeGOOGLE* qp, uint64_t nativeFrameId)
184 : vals_{qp->presentID, qp->desiredPresentTime, 0, 0, 0},
185 native_frame_id_(nativeFrameId) {}
ready() const186 bool ready() const {
187 return (timestamp_desired_present_time_ !=
188 NATIVE_WINDOW_TIMESTAMP_PENDING &&
189 timestamp_actual_present_time_ !=
190 NATIVE_WINDOW_TIMESTAMP_PENDING &&
191 timestamp_render_complete_time_ !=
192 NATIVE_WINDOW_TIMESTAMP_PENDING &&
193 timestamp_composition_latch_time_ !=
194 NATIVE_WINDOW_TIMESTAMP_PENDING);
195 }
calculate(int64_t rdur)196 void calculate(int64_t rdur) {
197 bool anyTimestampInvalid =
198 (timestamp_actual_present_time_ ==
199 NATIVE_WINDOW_TIMESTAMP_INVALID) ||
200 (timestamp_render_complete_time_ ==
201 NATIVE_WINDOW_TIMESTAMP_INVALID) ||
202 (timestamp_composition_latch_time_ ==
203 NATIVE_WINDOW_TIMESTAMP_INVALID);
204 if (anyTimestampInvalid) {
205 ALOGE("Unexpectedly received invalid timestamp.");
206 vals_.actualPresentTime = 0;
207 vals_.earliestPresentTime = 0;
208 vals_.presentMargin = 0;
209 return;
210 }
211
212 vals_.actualPresentTime =
213 static_cast<uint64_t>(timestamp_actual_present_time_);
214 int64_t margin = (timestamp_composition_latch_time_ -
215 timestamp_render_complete_time_);
216 // Calculate vals_.earliestPresentTime, and potentially adjust
217 // vals_.presentMargin. The initial value of vals_.earliestPresentTime
218 // is vals_.actualPresentTime. If we can subtract rdur (the duration
219 // of a refresh cycle) from vals_.earliestPresentTime (and also from
220 // vals_.presentMargin) and still leave a positive margin, then we can
221 // report to the application that it could have presented earlier than
222 // it did (per the extension specification). If for some reason, we
223 // can do this subtraction repeatedly, we do, since
224 // vals_.earliestPresentTime really is supposed to be the "earliest".
225 int64_t early_time = timestamp_actual_present_time_;
226 while ((margin > rdur) &&
227 ((early_time - rdur) > timestamp_composition_latch_time_)) {
228 early_time -= rdur;
229 margin -= rdur;
230 }
231 vals_.earliestPresentTime = static_cast<uint64_t>(early_time);
232 vals_.presentMargin = static_cast<uint64_t>(margin);
233 }
get_values(VkPastPresentationTimingGOOGLE * values) const234 void get_values(VkPastPresentationTimingGOOGLE* values) const {
235 *values = vals_;
236 }
237
238 public:
239 VkPastPresentationTimingGOOGLE vals_ { 0, 0, 0, 0, 0 };
240
241 uint64_t native_frame_id_ { 0 };
242 int64_t timestamp_desired_present_time_{ NATIVE_WINDOW_TIMESTAMP_PENDING };
243 int64_t timestamp_actual_present_time_ { NATIVE_WINDOW_TIMESTAMP_PENDING };
244 int64_t timestamp_render_complete_time_ { NATIVE_WINDOW_TIMESTAMP_PENDING };
245 int64_t timestamp_composition_latch_time_
246 { NATIVE_WINDOW_TIMESTAMP_PENDING };
247 };
248
249 struct Surface {
250 android::sp<ANativeWindow> window;
251 VkSwapchainKHR swapchain_handle;
252 uint64_t consumer_usage;
253
254 // Indicate whether this surface has been used by a swapchain, no matter the
255 // swapchain is still current or has been destroyed.
256 bool used_by_swapchain;
257 };
258
HandleFromSurface(Surface * surface)259 VkSurfaceKHR HandleFromSurface(Surface* surface) {
260 return VkSurfaceKHR(reinterpret_cast<uint64_t>(surface));
261 }
262
SurfaceFromHandle(VkSurfaceKHR handle)263 Surface* SurfaceFromHandle(VkSurfaceKHR handle) {
264 return reinterpret_cast<Surface*>(handle);
265 }
266
267 // Maximum number of TimingInfo structs to keep per swapchain:
268 enum { MAX_TIMING_INFOS = 10 };
269 // Minimum number of frames to look for in the past (so we don't cause
270 // syncronous requests to Surface Flinger):
271 enum { MIN_NUM_FRAMES_AGO = 5 };
272
IsSharedPresentMode(VkPresentModeKHR mode)273 bool IsSharedPresentMode(VkPresentModeKHR mode) {
274 return mode == VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR ||
275 mode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
276 }
277
278 struct Swapchain {
Swapchainvulkan::driver::__anonfdd1a1a10111::Swapchain279 Swapchain(Surface& surface_,
280 uint32_t num_images_,
281 VkPresentModeKHR present_mode,
282 int pre_transform_,
283 int64_t refresh_duration_)
284 : surface(surface_),
285 num_images(num_images_),
286 mailbox_mode(present_mode == VK_PRESENT_MODE_MAILBOX_KHR),
287 pre_transform(pre_transform_),
288 frame_timestamps_enabled(false),
289 refresh_duration(refresh_duration_),
290 acquire_next_image_timeout(-1),
291 shared(IsSharedPresentMode(present_mode)) {
292 }
293
get_refresh_durationvulkan::driver::__anonfdd1a1a10111::Swapchain294 VkResult get_refresh_duration(uint64_t& outRefreshDuration)
295 {
296 ANativeWindow* window = surface.window.get();
297 int err = native_window_get_refresh_cycle_duration(
298 window,
299 &refresh_duration);
300 if (err != android::OK) {
301 ALOGE("%s:native_window_get_refresh_cycle_duration failed: %s (%d)",
302 __func__, strerror(-err), err );
303 return VK_ERROR_SURFACE_LOST_KHR;
304 }
305 outRefreshDuration = refresh_duration;
306 return VK_SUCCESS;
307 }
308
309 Surface& surface;
310 uint32_t num_images;
311 bool mailbox_mode;
312 int pre_transform;
313 bool frame_timestamps_enabled;
314 int64_t refresh_duration;
315 nsecs_t acquire_next_image_timeout;
316 bool shared;
317
318 struct Image {
Imagevulkan::driver::__anonfdd1a1a10111::Swapchain::Image319 Image()
320 : image(VK_NULL_HANDLE),
321 dequeue_fence(-1),
322 release_fence(-1),
323 dequeued(false) {}
324 VkImage image;
325 // If the image is bound to memory, an sp to the underlying gralloc buffer.
326 // Otherwise, nullptr; the image will be bound to memory as part of
327 // AcquireNextImage.
328 android::sp<ANativeWindowBuffer> buffer;
329 // The fence is only valid when the buffer is dequeued, and should be
330 // -1 any other time. When valid, we own the fd, and must ensure it is
331 // closed: either by closing it explicitly when queueing the buffer,
332 // or by passing ownership e.g. to ANativeWindow::cancelBuffer().
333 int dequeue_fence;
334 // This fence is a dup of the sync fd returned from the driver via
335 // vkQueueSignalReleaseImageANDROID upon vkQueuePresentKHR. We must
336 // ensure it is closed upon re-presenting or releasing the image.
337 int release_fence;
338 bool dequeued;
339 } images[android::BufferQueueDefs::NUM_BUFFER_SLOTS];
340
341 std::vector<TimingInfo> timing;
342 };
343
HandleFromSwapchain(Swapchain * swapchain)344 VkSwapchainKHR HandleFromSwapchain(Swapchain* swapchain) {
345 return VkSwapchainKHR(reinterpret_cast<uint64_t>(swapchain));
346 }
347
SwapchainFromHandle(VkSwapchainKHR handle)348 Swapchain* SwapchainFromHandle(VkSwapchainKHR handle) {
349 return reinterpret_cast<Swapchain*>(handle);
350 }
351
IsFencePending(int fd)352 static bool IsFencePending(int fd) {
353 if (fd < 0)
354 return false;
355
356 errno = 0;
357 return sync_wait(fd, 0 /* timeout */) == -1 && errno == ETIME;
358 }
359
ReleaseSwapchainImage(VkDevice device,bool shared_present,ANativeWindow * window,int release_fence,Swapchain::Image & image,bool defer_if_pending)360 void ReleaseSwapchainImage(VkDevice device,
361 bool shared_present,
362 ANativeWindow* window,
363 int release_fence,
364 Swapchain::Image& image,
365 bool defer_if_pending) {
366 ATRACE_CALL();
367
368 ALOG_ASSERT(release_fence == -1 || image.dequeued,
369 "ReleaseSwapchainImage: can't provide a release fence for "
370 "non-dequeued images");
371
372 if (image.dequeued) {
373 if (release_fence >= 0) {
374 // We get here from vkQueuePresentKHR. The application is
375 // responsible for creating an execution dependency chain from
376 // vkAcquireNextImage (dequeue_fence) to vkQueuePresentKHR
377 // (release_fence), so we can drop the dequeue_fence here.
378 if (image.dequeue_fence >= 0)
379 close(image.dequeue_fence);
380 } else {
381 // We get here during swapchain destruction, or various serious
382 // error cases e.g. when we can't create the release_fence during
383 // vkQueuePresentKHR. In non-error cases, the dequeue_fence should
384 // have already signalled, since the swapchain images are supposed
385 // to be idle before the swapchain is destroyed. In error cases,
386 // there may be rendering in flight to the image, but since we
387 // weren't able to create a release_fence, waiting for the
388 // dequeue_fence is about the best we can do.
389 release_fence = image.dequeue_fence;
390 }
391 image.dequeue_fence = -1;
392
393 // It's invalid to call cancelBuffer on a shared buffer
394 if (window && !shared_present) {
395 window->cancelBuffer(window, image.buffer.get(), release_fence);
396 } else {
397 if (release_fence >= 0) {
398 sync_wait(release_fence, -1 /* forever */);
399 close(release_fence);
400 }
401 }
402 release_fence = -1;
403 image.dequeued = false;
404 }
405
406 if (defer_if_pending && IsFencePending(image.release_fence))
407 return;
408
409 if (image.release_fence >= 0) {
410 close(image.release_fence);
411 image.release_fence = -1;
412 }
413
414 if (image.image) {
415 ATRACE_BEGIN("DestroyImage");
416 GetData(device).driver.DestroyImage(device, image.image, nullptr);
417 ATRACE_END();
418 image.image = VK_NULL_HANDLE;
419 }
420
421 image.buffer.clear();
422 }
423
OrphanSwapchain(VkDevice device,Swapchain * swapchain)424 void OrphanSwapchain(VkDevice device, Swapchain* swapchain) {
425 if (swapchain->surface.swapchain_handle != HandleFromSwapchain(swapchain))
426 return;
427 for (uint32_t i = 0; i < swapchain->num_images; i++) {
428 if (!swapchain->images[i].dequeued) {
429 ReleaseSwapchainImage(device, swapchain->shared, nullptr, -1,
430 swapchain->images[i], true);
431 }
432 }
433 swapchain->surface.swapchain_handle = VK_NULL_HANDLE;
434 swapchain->timing.clear();
435 }
436
get_num_ready_timings(Swapchain & swapchain)437 uint32_t get_num_ready_timings(Swapchain& swapchain) {
438 if (swapchain.timing.size() < MIN_NUM_FRAMES_AGO) {
439 return 0;
440 }
441
442 uint32_t num_ready = 0;
443 const size_t num_timings = swapchain.timing.size() - MIN_NUM_FRAMES_AGO + 1;
444 for (uint32_t i = 0; i < num_timings; i++) {
445 TimingInfo& ti = swapchain.timing[i];
446 if (ti.ready()) {
447 // This TimingInfo is ready to be reported to the user. Add it
448 // to the num_ready.
449 num_ready++;
450 continue;
451 }
452 // This TimingInfo is not yet ready to be reported to the user,
453 // and so we should look for any available timestamps that
454 // might make it ready.
455 int64_t desired_present_time = 0;
456 int64_t render_complete_time = 0;
457 int64_t composition_latch_time = 0;
458 int64_t actual_present_time = 0;
459 // Obtain timestamps:
460 int err = native_window_get_frame_timestamps(
461 swapchain.surface.window.get(), ti.native_frame_id_,
462 &desired_present_time, &render_complete_time,
463 &composition_latch_time,
464 nullptr, //&first_composition_start_time,
465 nullptr, //&last_composition_start_time,
466 nullptr, //&composition_finish_time,
467 &actual_present_time,
468 nullptr, //&dequeue_ready_time,
469 nullptr /*&reads_done_time*/);
470
471 if (err != android::OK) {
472 continue;
473 }
474
475 // Record the timestamp(s) we received, and then see if this TimingInfo
476 // is ready to be reported to the user:
477 ti.timestamp_desired_present_time_ = desired_present_time;
478 ti.timestamp_actual_present_time_ = actual_present_time;
479 ti.timestamp_render_complete_time_ = render_complete_time;
480 ti.timestamp_composition_latch_time_ = composition_latch_time;
481
482 if (ti.ready()) {
483 // The TimingInfo has received enough timestamps, and should now
484 // use those timestamps to calculate the info that should be
485 // reported to the user:
486 ti.calculate(swapchain.refresh_duration);
487 num_ready++;
488 }
489 }
490 return num_ready;
491 }
492
copy_ready_timings(Swapchain & swapchain,uint32_t * count,VkPastPresentationTimingGOOGLE * timings)493 void copy_ready_timings(Swapchain& swapchain,
494 uint32_t* count,
495 VkPastPresentationTimingGOOGLE* timings) {
496 if (swapchain.timing.empty()) {
497 *count = 0;
498 return;
499 }
500
501 size_t last_ready = swapchain.timing.size() - 1;
502 while (!swapchain.timing[last_ready].ready()) {
503 if (last_ready == 0) {
504 *count = 0;
505 return;
506 }
507 last_ready--;
508 }
509
510 uint32_t num_copied = 0;
511 int32_t num_to_remove = 0;
512 for (uint32_t i = 0; i <= last_ready && num_copied < *count; i++) {
513 const TimingInfo& ti = swapchain.timing[i];
514 if (ti.ready()) {
515 ti.get_values(&timings[num_copied]);
516 num_copied++;
517 }
518 num_to_remove++;
519 }
520
521 // Discard old frames that aren't ready if newer frames are ready.
522 // We don't expect to get the timing info for those old frames.
523 swapchain.timing.erase(swapchain.timing.begin(),
524 swapchain.timing.begin() + num_to_remove);
525
526 *count = num_copied;
527 }
528
GetNativePixelFormat(VkFormat format)529 PixelFormat GetNativePixelFormat(VkFormat format) {
530 PixelFormat native_format = PixelFormat::RGBA_8888;
531 switch (format) {
532 case VK_FORMAT_R8G8B8A8_UNORM:
533 case VK_FORMAT_R8G8B8A8_SRGB:
534 native_format = PixelFormat::RGBA_8888;
535 break;
536 case VK_FORMAT_R5G6B5_UNORM_PACK16:
537 native_format = PixelFormat::RGB_565;
538 break;
539 case VK_FORMAT_R16G16B16A16_SFLOAT:
540 native_format = PixelFormat::RGBA_FP16;
541 break;
542 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
543 native_format = PixelFormat::RGBA_1010102;
544 break;
545 case VK_FORMAT_R8_UNORM:
546 native_format = PixelFormat::R_8;
547 break;
548 case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
549 native_format = PixelFormat::RGBA_10101010;
550 break;
551 default:
552 ALOGV("unsupported swapchain format %d", format);
553 break;
554 }
555 return native_format;
556 }
557
GetNativeDataspace(VkColorSpaceKHR colorspace,VkFormat format)558 DataSpace GetNativeDataspace(VkColorSpaceKHR colorspace, VkFormat format) {
559 switch (colorspace) {
560 case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
561 return DataSpace::SRGB;
562 case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:
563 return DataSpace::DISPLAY_P3;
564 case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
565 return DataSpace::SCRGB_LINEAR;
566 case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT:
567 return DataSpace::SCRGB;
568 case VK_COLOR_SPACE_DCI_P3_LINEAR_EXT:
569 return DataSpace::DCI_P3_LINEAR;
570 case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT:
571 return DataSpace::DCI_P3;
572 case VK_COLOR_SPACE_BT709_LINEAR_EXT:
573 return DataSpace::SRGB_LINEAR;
574 case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:
575 return DataSpace::SRGB;
576 case VK_COLOR_SPACE_BT2020_LINEAR_EXT:
577 if (format == VK_FORMAT_R16G16B16A16_SFLOAT) {
578 return DataSpace::BT2020_LINEAR_EXTENDED;
579 } else {
580 return DataSpace::BT2020_LINEAR;
581 }
582 case VK_COLOR_SPACE_HDR10_ST2084_EXT:
583 return DataSpace::BT2020_PQ;
584 case VK_COLOR_SPACE_DOLBYVISION_EXT:
585 return DataSpace::BT2020_PQ;
586 case VK_COLOR_SPACE_HDR10_HLG_EXT:
587 return DataSpace::BT2020_HLG;
588 case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT:
589 return DataSpace::ADOBE_RGB_LINEAR;
590 case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT:
591 return DataSpace::ADOBE_RGB;
592 // Pass through is intended to allow app to provide data that is passed
593 // to the display system without modification.
594 case VK_COLOR_SPACE_PASS_THROUGH_EXT:
595 return DataSpace::ARBITRARY;
596
597 default:
598 // This indicates that we don't know about the
599 // dataspace specified and we should indicate that
600 // it's unsupported
601 return DataSpace::UNKNOWN;
602 }
603 }
604
605 } // anonymous namespace
606
607 VKAPI_ATTR
CreateAndroidSurfaceKHR(VkInstance instance,const VkAndroidSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * allocator,VkSurfaceKHR * out_surface)608 VkResult CreateAndroidSurfaceKHR(
609 VkInstance instance,
610 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
611 const VkAllocationCallbacks* allocator,
612 VkSurfaceKHR* out_surface) {
613 ATRACE_CALL();
614
615 if (!allocator)
616 allocator = &GetData(instance).allocator;
617 void* mem = allocator->pfnAllocation(allocator->pUserData, sizeof(Surface),
618 alignof(Surface),
619 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
620 if (!mem)
621 return VK_ERROR_OUT_OF_HOST_MEMORY;
622 Surface* surface = new (mem) Surface;
623
624 surface->window = pCreateInfo->window;
625 surface->swapchain_handle = VK_NULL_HANDLE;
626 surface->used_by_swapchain = false;
627 int err = native_window_get_consumer_usage(surface->window.get(),
628 &surface->consumer_usage);
629 if (err != android::OK) {
630 ALOGE("native_window_get_consumer_usage() failed: %s (%d)",
631 strerror(-err), err);
632 surface->~Surface();
633 allocator->pfnFree(allocator->pUserData, surface);
634 return VK_ERROR_SURFACE_LOST_KHR;
635 }
636
637 err =
638 native_window_api_connect(surface->window.get(), NATIVE_WINDOW_API_EGL);
639 if (err != android::OK) {
640 ALOGE("native_window_api_connect() failed: %s (%d)", strerror(-err),
641 err);
642 surface->~Surface();
643 allocator->pfnFree(allocator->pUserData, surface);
644 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
645 }
646
647 *out_surface = HandleFromSurface(surface);
648 return VK_SUCCESS;
649 }
650
651 VKAPI_ATTR
DestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface_handle,const VkAllocationCallbacks * allocator)652 void DestroySurfaceKHR(VkInstance instance,
653 VkSurfaceKHR surface_handle,
654 const VkAllocationCallbacks* allocator) {
655 ATRACE_CALL();
656
657 Surface* surface = SurfaceFromHandle(surface_handle);
658 if (!surface)
659 return;
660 native_window_api_disconnect(surface->window.get(), NATIVE_WINDOW_API_EGL);
661 ALOGV_IF(surface->swapchain_handle != VK_NULL_HANDLE,
662 "destroyed VkSurfaceKHR 0x%" PRIx64
663 " has active VkSwapchainKHR 0x%" PRIx64,
664 reinterpret_cast<uint64_t>(surface_handle),
665 reinterpret_cast<uint64_t>(surface->swapchain_handle));
666 surface->~Surface();
667 if (!allocator)
668 allocator = &GetData(instance).allocator;
669 allocator->pfnFree(allocator->pUserData, surface);
670 }
671
672 VKAPI_ATTR
GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice,uint32_t,VkSurfaceKHR,VkBool32 * supported)673 VkResult GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice /*pdev*/,
674 uint32_t /*queue_family*/,
675 VkSurfaceKHR /*surface_handle*/,
676 VkBool32* supported) {
677 *supported = VK_TRUE;
678 return VK_SUCCESS;
679 }
680
681 VKAPI_ATTR
GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * capabilities)682 VkResult GetPhysicalDeviceSurfaceCapabilitiesKHR(
683 VkPhysicalDevice pdev,
684 VkSurfaceKHR surface,
685 VkSurfaceCapabilitiesKHR* capabilities) {
686 ATRACE_CALL();
687
688 // Implement in terms of GetPhysicalDeviceSurfaceCapabilities2KHR
689
690 VkPhysicalDeviceSurfaceInfo2KHR info2 = {
691 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
692 nullptr,
693 surface
694 };
695
696 VkSurfaceCapabilities2KHR caps2 = {
697 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
698 nullptr,
699 {},
700 };
701
702 VkResult result = GetPhysicalDeviceSurfaceCapabilities2KHR(pdev, &info2, &caps2);
703 *capabilities = caps2.surfaceCapabilities;
704 return result;
705 }
706
707 // Does the call-twice and VK_INCOMPLETE handling for querying lists
708 // of things, where we already have the full set built in a vector.
709 template <typename T>
CopyWithIncomplete(std::vector<T> const & things,T * callerPtr,uint32_t * callerCount)710 VkResult CopyWithIncomplete(std::vector<T> const& things,
711 T* callerPtr, uint32_t* callerCount) {
712 VkResult result = VK_SUCCESS;
713 if (callerPtr) {
714 if (things.size() > *callerCount)
715 result = VK_INCOMPLETE;
716 *callerCount = std::min(uint32_t(things.size()), *callerCount);
717 std::copy(things.begin(), things.begin() + *callerCount, callerPtr);
718 } else {
719 *callerCount = things.size();
720 }
721 return result;
722 }
723
724 VKAPI_ATTR
GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface_handle,uint32_t * count,VkSurfaceFormatKHR * formats)725 VkResult GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice pdev,
726 VkSurfaceKHR surface_handle,
727 uint32_t* count,
728 VkSurfaceFormatKHR* formats) {
729 ATRACE_CALL();
730
731 const InstanceData& instance_data = GetData(pdev);
732
733 uint64_t consumer_usage = 0;
734 bool colorspace_ext =
735 instance_data.hook_extensions.test(ProcHook::EXT_swapchain_colorspace);
736 if (surface_handle == VK_NULL_HANDLE) {
737 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
738 bool surfaceless_enabled =
739 instance_data.hook_extensions.test(surfaceless);
740 if (!surfaceless_enabled) {
741 return VK_ERROR_SURFACE_LOST_KHR;
742 }
743 // Support for VK_GOOGLE_surfaceless_query.
744
745 // TODO(b/203826952): research proper value; temporarily use the
746 // values seen on Pixel
747 consumer_usage = AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY;
748 } else {
749 Surface& surface = *SurfaceFromHandle(surface_handle);
750 consumer_usage = surface.consumer_usage;
751 }
752
753 AHardwareBuffer_Desc desc = {};
754 desc.width = 1;
755 desc.height = 1;
756 desc.layers = 1;
757 desc.usage = consumer_usage | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
758 AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
759
760 // We must support R8G8B8A8
761 std::vector<VkSurfaceFormatKHR> all_formats = {
762 {VK_FORMAT_R8G8B8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR},
763 {VK_FORMAT_R8G8B8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR},
764 };
765
766 VkFormat format = VK_FORMAT_UNDEFINED;
767 if (colorspace_ext) {
768 for (VkColorSpaceKHR colorSpace :
769 colorSpaceSupportedByVkEXTSwapchainColorspace) {
770 format = VK_FORMAT_R8G8B8A8_UNORM;
771 if (GetNativeDataspace(colorSpace, format) != DataSpace::UNKNOWN) {
772 all_formats.emplace_back(
773 VkSurfaceFormatKHR{format, colorSpace});
774 }
775
776 format = VK_FORMAT_R8G8B8A8_SRGB;
777 if (GetNativeDataspace(colorSpace, format) != DataSpace::UNKNOWN) {
778 all_formats.emplace_back(
779 VkSurfaceFormatKHR{format, colorSpace});
780 }
781 }
782 }
783
784 // NOTE: Any new formats that are added must be coordinated across different
785 // Android users. This includes the ANGLE team (a layered implementation of
786 // OpenGL-ES).
787
788 format = VK_FORMAT_R5G6B5_UNORM_PACK16;
789 desc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
790 if (AHardwareBuffer_isSupported(&desc)) {
791 all_formats.emplace_back(
792 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
793 if (colorspace_ext) {
794 for (VkColorSpaceKHR colorSpace :
795 colorSpaceSupportedByVkEXTSwapchainColorspace) {
796 if (GetNativeDataspace(colorSpace, format) !=
797 DataSpace::UNKNOWN) {
798 all_formats.emplace_back(
799 VkSurfaceFormatKHR{format, colorSpace});
800 }
801 }
802 }
803 }
804
805 format = VK_FORMAT_R16G16B16A16_SFLOAT;
806 desc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
807 if (AHardwareBuffer_isSupported(&desc)) {
808 all_formats.emplace_back(
809 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
810 if (colorspace_ext) {
811 for (VkColorSpaceKHR colorSpace :
812 colorSpaceSupportedByVkEXTSwapchainColorspace) {
813 if (GetNativeDataspace(colorSpace, format) !=
814 DataSpace::UNKNOWN) {
815 all_formats.emplace_back(
816 VkSurfaceFormatKHR{format, colorSpace});
817 }
818 }
819
820 for (
821 VkColorSpaceKHR colorSpace :
822 colorSpaceSupportedByVkEXTSwapchainColorspaceOnFP16SurfaceOnly) {
823 if (GetNativeDataspace(colorSpace, format) !=
824 DataSpace::UNKNOWN) {
825 all_formats.emplace_back(
826 VkSurfaceFormatKHR{format, colorSpace});
827 }
828 }
829 }
830 }
831
832 format = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
833 desc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
834 if (AHardwareBuffer_isSupported(&desc)) {
835 all_formats.emplace_back(
836 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
837 if (colorspace_ext) {
838 for (VkColorSpaceKHR colorSpace :
839 colorSpaceSupportedByVkEXTSwapchainColorspace) {
840 if (GetNativeDataspace(colorSpace, format) !=
841 DataSpace::UNKNOWN) {
842 all_formats.emplace_back(
843 VkSurfaceFormatKHR{format, colorSpace});
844 }
845 }
846 }
847 }
848
849 format = VK_FORMAT_R8_UNORM;
850 desc.format = AHARDWAREBUFFER_FORMAT_R8_UNORM;
851 if (AHardwareBuffer_isSupported(&desc)) {
852 if (colorspace_ext) {
853 all_formats.emplace_back(
854 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_PASS_THROUGH_EXT});
855 }
856 }
857
858 bool rgba10x6_formats_ext = false;
859 uint32_t exts_count;
860 const auto& driver = GetData(pdev).driver;
861 driver.EnumerateDeviceExtensionProperties(pdev, nullptr, &exts_count,
862 nullptr);
863 std::vector<VkExtensionProperties> props(exts_count);
864 driver.EnumerateDeviceExtensionProperties(pdev, nullptr, &exts_count,
865 props.data());
866 for (uint32_t i = 0; i < exts_count; i++) {
867 VkExtensionProperties prop = props[i];
868 if (strcmp(prop.extensionName,
869 VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME) == 0) {
870 rgba10x6_formats_ext = true;
871 }
872 }
873 format = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16;
874 desc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A10_UNORM;
875 if (AHardwareBuffer_isSupported(&desc) && rgba10x6_formats_ext) {
876 all_formats.emplace_back(
877 VkSurfaceFormatKHR{format, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR});
878 if (colorspace_ext) {
879 for (VkColorSpaceKHR colorSpace :
880 colorSpaceSupportedByVkEXTSwapchainColorspace) {
881 if (GetNativeDataspace(colorSpace, format) !=
882 DataSpace::UNKNOWN) {
883 all_formats.emplace_back(
884 VkSurfaceFormatKHR{format, colorSpace});
885 }
886 }
887 }
888 }
889
890 // NOTE: Any new formats that are added must be coordinated across different
891 // Android users. This includes the ANGLE team (a layered implementation of
892 // OpenGL-ES).
893
894 return CopyWithIncomplete(all_formats, formats, count);
895 }
896
897 VKAPI_ATTR
GetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VkSurfaceCapabilities2KHR * pSurfaceCapabilities)898 VkResult GetPhysicalDeviceSurfaceCapabilities2KHR(
899 VkPhysicalDevice physicalDevice,
900 const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
901 VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {
902 ATRACE_CALL();
903
904 auto surface = pSurfaceInfo->surface;
905 auto capabilities = &pSurfaceCapabilities->surfaceCapabilities;
906
907 VkSurfacePresentModeEXT const *pPresentMode = nullptr;
908 for (auto pNext = reinterpret_cast<VkBaseInStructure const *>(pSurfaceInfo->pNext);
909 pNext; pNext = reinterpret_cast<VkBaseInStructure const *>(pNext->pNext)) {
910 switch (pNext->sType) {
911 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT:
912 pPresentMode = reinterpret_cast<VkSurfacePresentModeEXT const *>(pNext);
913 break;
914
915 default:
916 break;
917 }
918 }
919
920 int err;
921 int width, height;
922 int transform_hint;
923 int max_buffer_count;
924 int min_undequeued_buffers;
925 if (surface == VK_NULL_HANDLE) {
926 const InstanceData& instance_data = GetData(physicalDevice);
927 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
928 bool surfaceless_enabled =
929 instance_data.hook_extensions.test(surfaceless);
930 if (!surfaceless_enabled) {
931 // It is an error to pass a surface==VK_NULL_HANDLE unless the
932 // VK_GOOGLE_surfaceless_query extension is enabled
933 return VK_ERROR_SURFACE_LOST_KHR;
934 }
935 // Support for VK_GOOGLE_surfaceless_query. The primary purpose of this
936 // extension for this function is for
937 // VkSurfaceProtectedCapabilitiesKHR::supportsProtected. The following
938 // four values cannot be known without a surface. Default values will
939 // be supplied anyway, but cannot be relied upon.
940 width = 0xFFFFFFFF;
941 height = 0xFFFFFFFF;
942 transform_hint = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
943 capabilities->minImageCount = 0xFFFFFFFF;
944 capabilities->maxImageCount = 0xFFFFFFFF;
945 } else {
946 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
947
948 err = window->query(window, NATIVE_WINDOW_DEFAULT_WIDTH, &width);
949 if (err != android::OK) {
950 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
951 strerror(-err), err);
952 return VK_ERROR_SURFACE_LOST_KHR;
953 }
954 err = window->query(window, NATIVE_WINDOW_DEFAULT_HEIGHT, &height);
955 if (err != android::OK) {
956 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
957 strerror(-err), err);
958 return VK_ERROR_SURFACE_LOST_KHR;
959 }
960
961 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT,
962 &transform_hint);
963 if (err != android::OK) {
964 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
965 strerror(-err), err);
966 return VK_ERROR_SURFACE_LOST_KHR;
967 }
968
969 err = window->query(window, NATIVE_WINDOW_MAX_BUFFER_COUNT,
970 &max_buffer_count);
971 if (err != android::OK) {
972 ALOGE("NATIVE_WINDOW_MAX_BUFFER_COUNT query failed: %s (%d)",
973 strerror(-err), err);
974 return VK_ERROR_SURFACE_LOST_KHR;
975 }
976
977 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
978 &min_undequeued_buffers);
979 if (err != android::OK) {
980 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
981 strerror(-err), err);
982 return VK_ERROR_SURFACE_LOST_KHR;
983 }
984
985 // Additional buffer count over min_undequeued_buffers in vulkan came from 2 total
986 // being technically enough for fifo (although a poor experience) vs 3 being the
987 // absolute minimum for mailbox to be useful. So min_undequeued_buffers + 2 is sensible
988 static constexpr int default_additional_buffers = 2;
989
990 if(pPresentMode != nullptr) {
991 switch (pPresentMode->presentMode) {
992 case VK_PRESENT_MODE_IMMEDIATE_KHR:
993 ALOGE("Swapchain present mode VK_PRESENT_MODE_IMMEDIATE_KHR is not supported");
994 break;
995 case VK_PRESENT_MODE_MAILBOX_KHR:
996 case VK_PRESENT_MODE_FIFO_KHR:
997 capabilities->minImageCount = std::min(max_buffer_count,
998 min_undequeued_buffers + default_additional_buffers);
999 capabilities->maxImageCount = static_cast<uint32_t>(max_buffer_count);
1000 break;
1001 case VK_PRESENT_MODE_FIFO_RELAXED_KHR:
1002 ALOGE("Swapchain present mode VK_PRESENT_MODE_FIFO_RELEAXED_KHR "
1003 "is not supported");
1004 break;
1005 case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
1006 case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
1007 capabilities->minImageCount = 1;
1008 capabilities->maxImageCount = 1;
1009 break;
1010
1011 default:
1012 ALOGE("Unrecognized swapchain present mode %u is not supported",
1013 pPresentMode->presentMode);
1014 break;
1015 }
1016 } else {
1017 capabilities->minImageCount = std::min(max_buffer_count,
1018 min_undequeued_buffers + default_additional_buffers);
1019 capabilities->maxImageCount = static_cast<uint32_t>(max_buffer_count);
1020 }
1021 }
1022
1023 capabilities->currentExtent =
1024 VkExtent2D{static_cast<uint32_t>(width), static_cast<uint32_t>(height)};
1025
1026 // TODO(http://b/134182502): Figure out what the max extent should be.
1027 capabilities->minImageExtent = VkExtent2D{1, 1};
1028 capabilities->maxImageExtent = VkExtent2D{4096, 4096};
1029
1030 if (capabilities->maxImageExtent.height <
1031 capabilities->currentExtent.height) {
1032 capabilities->maxImageExtent.height =
1033 capabilities->currentExtent.height;
1034 }
1035
1036 if (capabilities->maxImageExtent.width <
1037 capabilities->currentExtent.width) {
1038 capabilities->maxImageExtent.width = capabilities->currentExtent.width;
1039 }
1040
1041 capabilities->maxImageArrayLayers = 1;
1042
1043 capabilities->supportedTransforms = kSupportedTransforms;
1044 capabilities->currentTransform =
1045 TranslateNativeToVulkanTransform(transform_hint);
1046
1047 // On Android, window composition is a WindowManager property, not something
1048 // associated with the bufferqueue. It can't be changed from here.
1049 capabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
1050
1051 capabilities->supportedUsageFlags =
1052 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
1053 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
1054 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
1055 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
1056
1057 for (auto pNext = reinterpret_cast<VkBaseOutStructure*>(pSurfaceCapabilities->pNext);
1058 pNext; pNext = reinterpret_cast<VkBaseOutStructure*>(pNext->pNext)) {
1059
1060 switch (pNext->sType) {
1061 case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: {
1062 VkSharedPresentSurfaceCapabilitiesKHR* shared_caps =
1063 reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>(pNext);
1064 // Claim same set of usage flags are supported for
1065 // shared present modes as for other modes.
1066 shared_caps->sharedPresentSupportedUsageFlags =
1067 pSurfaceCapabilities->surfaceCapabilities
1068 .supportedUsageFlags;
1069 } break;
1070
1071 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
1072 VkSurfaceProtectedCapabilitiesKHR* protected_caps =
1073 reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>(pNext);
1074 protected_caps->supportsProtected = VK_TRUE;
1075 } break;
1076
1077 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: {
1078 VkSurfacePresentScalingCapabilitiesEXT* scaling_caps =
1079 reinterpret_cast<VkSurfacePresentScalingCapabilitiesEXT*>(pNext);
1080 // By default, Android stretches the buffer to fit the window,
1081 // without preserving aspect ratio. Other modes are technically possible
1082 // but consult with CoGS team before exposing them here!
1083 scaling_caps->supportedPresentScaling = VK_PRESENT_SCALING_STRETCH_BIT_EXT;
1084
1085 // Since we always scale, we don't support any gravity.
1086 scaling_caps->supportedPresentGravityX = 0;
1087 scaling_caps->supportedPresentGravityY = 0;
1088
1089 // Scaled image limits are just the basic image limits
1090 scaling_caps->minScaledImageExtent = capabilities->minImageExtent;
1091 scaling_caps->maxScaledImageExtent = capabilities->maxImageExtent;
1092 } break;
1093
1094 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: {
1095 VkSurfacePresentModeCompatibilityEXT* mode_caps =
1096 reinterpret_cast<VkSurfacePresentModeCompatibilityEXT*>(pNext);
1097
1098 ALOG_ASSERT(pPresentMode,
1099 "querying VkSurfacePresentModeCompatibilityEXT "
1100 "requires VkSurfacePresentModeEXT to be provided");
1101 std::vector<VkPresentModeKHR> compatibleModes;
1102 compatibleModes.push_back(pPresentMode->presentMode);
1103
1104 switch (pPresentMode->presentMode) {
1105 // Shared modes are both compatible with each other.
1106 case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
1107 compatibleModes.push_back(VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
1108 break;
1109 case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
1110 compatibleModes.push_back(VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR);
1111 break;
1112 default:
1113 // Other modes are only compatible with themselves.
1114 // TODO: consider whether switching between FIFO and MAILBOX is reasonable
1115 break;
1116 }
1117
1118 // Note: this does not generate VK_INCOMPLETE since we're nested inside
1119 // a larger query and there would be no way to determine exactly where it came from.
1120 CopyWithIncomplete(compatibleModes, mode_caps->pPresentModes,
1121 &mode_caps->presentModeCount);
1122 } break;
1123
1124 default:
1125 // Ignore all other extension structs
1126 break;
1127 }
1128 }
1129
1130 return VK_SUCCESS;
1131 }
1132
1133 VKAPI_ATTR
GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)1134 VkResult GetPhysicalDeviceSurfaceFormats2KHR(
1135 VkPhysicalDevice physicalDevice,
1136 const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
1137 uint32_t* pSurfaceFormatCount,
1138 VkSurfaceFormat2KHR* pSurfaceFormats) {
1139 ATRACE_CALL();
1140
1141 if (!pSurfaceFormats) {
1142 return GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice,
1143 pSurfaceInfo->surface,
1144 pSurfaceFormatCount, nullptr);
1145 }
1146
1147 // temp vector for forwarding; we'll marshal it into the pSurfaceFormats
1148 // after the call.
1149 std::vector<VkSurfaceFormatKHR> surface_formats(*pSurfaceFormatCount);
1150 VkResult result = GetPhysicalDeviceSurfaceFormatsKHR(
1151 physicalDevice, pSurfaceInfo->surface, pSurfaceFormatCount,
1152 surface_formats.data());
1153
1154 if (result != VK_SUCCESS && result != VK_INCOMPLETE) {
1155 return result;
1156 }
1157
1158 const auto& driver = GetData(physicalDevice).driver;
1159
1160 // marshal results individually due to stride difference.
1161 uint32_t formats_to_marshal = *pSurfaceFormatCount;
1162 for (uint32_t i = 0u; i < formats_to_marshal; i++) {
1163 pSurfaceFormats[i].surfaceFormat = surface_formats[i];
1164
1165 // Query the compression properties for the surface format
1166 VkSurfaceFormat2KHR* pSurfaceFormat = &pSurfaceFormats[i];
1167 while (pSurfaceFormat->pNext) {
1168 pSurfaceFormat =
1169 reinterpret_cast<VkSurfaceFormat2KHR*>(pSurfaceFormat->pNext);
1170 switch (pSurfaceFormat->sType) {
1171 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT: {
1172 VkImageCompressionPropertiesEXT* surfaceCompressionProps =
1173 reinterpret_cast<VkImageCompressionPropertiesEXT*>(
1174 pSurfaceFormat);
1175
1176 if (surfaceCompressionProps &&
1177 (driver.GetPhysicalDeviceImageFormatProperties2KHR ||
1178 driver.GetPhysicalDeviceImageFormatProperties2)) {
1179 VkPhysicalDeviceImageFormatInfo2 imageFormatInfo = {};
1180 imageFormatInfo.sType =
1181 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
1182 imageFormatInfo.format =
1183 pSurfaceFormats[i].surfaceFormat.format;
1184 imageFormatInfo.type = VK_IMAGE_TYPE_2D;
1185 imageFormatInfo.usage =
1186 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1187 imageFormatInfo.pNext = nullptr;
1188
1189 VkImageCompressionControlEXT compressionControl = {};
1190 compressionControl.sType =
1191 VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT;
1192 compressionControl.pNext = imageFormatInfo.pNext;
1193 compressionControl.flags =
1194 VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT;
1195
1196 imageFormatInfo.pNext = &compressionControl;
1197
1198 VkImageCompressionPropertiesEXT compressionProps = {};
1199 compressionProps.sType =
1200 VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT;
1201 compressionProps.pNext = nullptr;
1202
1203 VkImageFormatProperties2KHR imageFormatProps = {};
1204 imageFormatProps.sType =
1205 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR;
1206 imageFormatProps.pNext = &compressionProps;
1207
1208 VkResult compressionRes =
1209 GetPhysicalDeviceImageFormatProperties2(
1210 physicalDevice, &imageFormatInfo,
1211 &imageFormatProps);
1212 if (compressionRes == VK_SUCCESS) {
1213 surfaceCompressionProps->imageCompressionFlags =
1214 compressionProps.imageCompressionFlags;
1215 surfaceCompressionProps
1216 ->imageCompressionFixedRateFlags =
1217 compressionProps.imageCompressionFixedRateFlags;
1218 } else if (compressionRes ==
1219 VK_ERROR_OUT_OF_HOST_MEMORY ||
1220 compressionRes ==
1221 VK_ERROR_OUT_OF_DEVICE_MEMORY) {
1222 return compressionRes;
1223 } else {
1224 // For any of the *_NOT_SUPPORTED errors we continue
1225 // onto the next format
1226 continue;
1227 }
1228 }
1229 } break;
1230
1231 default:
1232 // Ignore all other extension structs
1233 break;
1234 }
1235 }
1236 }
1237
1238 return result;
1239 }
1240
1241 VKAPI_ATTR
GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice pdev,VkSurfaceKHR surface,uint32_t * count,VkPresentModeKHR * modes)1242 VkResult GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice pdev,
1243 VkSurfaceKHR surface,
1244 uint32_t* count,
1245 VkPresentModeKHR* modes) {
1246 ATRACE_CALL();
1247
1248 int err;
1249 int query_value;
1250 std::vector<VkPresentModeKHR> present_modes;
1251 if (surface == VK_NULL_HANDLE) {
1252 const InstanceData& instance_data = GetData(pdev);
1253 ProcHook::Extension surfaceless = ProcHook::GOOGLE_surfaceless_query;
1254 bool surfaceless_enabled =
1255 instance_data.hook_extensions.test(surfaceless);
1256 if (!surfaceless_enabled) {
1257 return VK_ERROR_SURFACE_LOST_KHR;
1258 }
1259 // Support for VK_GOOGLE_surfaceless_query. The primary purpose of this
1260 // extension for this function is for
1261 // VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR and
1262 // VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR. We technically cannot
1263 // know if VK_PRESENT_MODE_SHARED_MAILBOX_KHR is supported without a
1264 // surface, and that cannot be relied upon. Therefore, don't return it.
1265 present_modes.push_back(VK_PRESENT_MODE_FIFO_KHR);
1266 } else {
1267 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
1268
1269 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1270 &query_value);
1271 if (err != android::OK || query_value < 0) {
1272 ALOGE(
1273 "NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d) "
1274 "value=%d",
1275 strerror(-err), err, query_value);
1276 return VK_ERROR_SURFACE_LOST_KHR;
1277 }
1278 uint32_t min_undequeued_buffers = static_cast<uint32_t>(query_value);
1279
1280 err =
1281 window->query(window, NATIVE_WINDOW_MAX_BUFFER_COUNT, &query_value);
1282 if (err != android::OK || query_value < 0) {
1283 ALOGE(
1284 "NATIVE_WINDOW_MAX_BUFFER_COUNT query failed: %s (%d) value=%d",
1285 strerror(-err), err, query_value);
1286 return VK_ERROR_SURFACE_LOST_KHR;
1287 }
1288 uint32_t max_buffer_count = static_cast<uint32_t>(query_value);
1289
1290 if (min_undequeued_buffers + 1 < max_buffer_count)
1291 present_modes.push_back(VK_PRESENT_MODE_MAILBOX_KHR);
1292 present_modes.push_back(VK_PRESENT_MODE_FIFO_KHR);
1293 }
1294
1295 VkPhysicalDevicePresentationPropertiesANDROID present_properties;
1296 QueryPresentationProperties(pdev, &present_properties);
1297 if (present_properties.sharedImage) {
1298 present_modes.push_back(VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR);
1299 present_modes.push_back(VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
1300 }
1301
1302 return CopyWithIncomplete(present_modes, modes, count);
1303 }
1304
1305 VKAPI_ATTR
GetDeviceGroupPresentCapabilitiesKHR(VkDevice,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)1306 VkResult GetDeviceGroupPresentCapabilitiesKHR(
1307 VkDevice,
1308 VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {
1309 ATRACE_CALL();
1310
1311 ALOGV_IF(pDeviceGroupPresentCapabilities->sType !=
1312 VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
1313 "vkGetDeviceGroupPresentCapabilitiesKHR: invalid "
1314 "VkDeviceGroupPresentCapabilitiesKHR structure type %d",
1315 pDeviceGroupPresentCapabilities->sType);
1316
1317 memset(pDeviceGroupPresentCapabilities->presentMask, 0,
1318 sizeof(pDeviceGroupPresentCapabilities->presentMask));
1319
1320 // assume device group of size 1
1321 pDeviceGroupPresentCapabilities->presentMask[0] = 1 << 0;
1322 pDeviceGroupPresentCapabilities->modes =
1323 VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
1324
1325 return VK_SUCCESS;
1326 }
1327
1328 VKAPI_ATTR
GetDeviceGroupSurfacePresentModesKHR(VkDevice,VkSurfaceKHR,VkDeviceGroupPresentModeFlagsKHR * pModes)1329 VkResult GetDeviceGroupSurfacePresentModesKHR(
1330 VkDevice,
1331 VkSurfaceKHR,
1332 VkDeviceGroupPresentModeFlagsKHR* pModes) {
1333 ATRACE_CALL();
1334
1335 *pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
1336 return VK_SUCCESS;
1337 }
1338
1339 VKAPI_ATTR
GetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)1340 VkResult GetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice,
1341 VkSurfaceKHR surface,
1342 uint32_t* pRectCount,
1343 VkRect2D* pRects) {
1344 ATRACE_CALL();
1345
1346 if (!pRects) {
1347 *pRectCount = 1;
1348 } else {
1349 uint32_t count = std::min(*pRectCount, 1u);
1350 bool incomplete = *pRectCount < 1;
1351
1352 *pRectCount = count;
1353
1354 if (incomplete) {
1355 return VK_INCOMPLETE;
1356 }
1357
1358 int err;
1359 ANativeWindow* window = SurfaceFromHandle(surface)->window.get();
1360
1361 int width = 0, height = 0;
1362 err = window->query(window, NATIVE_WINDOW_DEFAULT_WIDTH, &width);
1363 if (err != android::OK) {
1364 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
1365 strerror(-err), err);
1366 }
1367 err = window->query(window, NATIVE_WINDOW_DEFAULT_HEIGHT, &height);
1368 if (err != android::OK) {
1369 ALOGE("NATIVE_WINDOW_DEFAULT_WIDTH query failed: %s (%d)",
1370 strerror(-err), err);
1371 }
1372
1373 pRects[0].offset.x = 0;
1374 pRects[0].offset.y = 0;
1375 pRects[0].extent = VkExtent2D{static_cast<uint32_t>(width),
1376 static_cast<uint32_t>(height)};
1377 }
1378 return VK_SUCCESS;
1379 }
1380
DestroySwapchainInternal(VkDevice device,VkSwapchainKHR swapchain_handle,const VkAllocationCallbacks * allocator)1381 static void DestroySwapchainInternal(VkDevice device,
1382 VkSwapchainKHR swapchain_handle,
1383 const VkAllocationCallbacks* allocator) {
1384 ATRACE_CALL();
1385
1386 const auto& dispatch = GetData(device).driver;
1387 Swapchain* swapchain = SwapchainFromHandle(swapchain_handle);
1388 if (!swapchain) {
1389 return;
1390 }
1391
1392 bool active = swapchain->surface.swapchain_handle == swapchain_handle;
1393 ANativeWindow* window = active ? swapchain->surface.window.get() : nullptr;
1394
1395 if (window && swapchain->frame_timestamps_enabled) {
1396 native_window_enable_frame_timestamps(window, false);
1397 }
1398
1399 for (uint32_t i = 0; i < swapchain->num_images; i++) {
1400 ReleaseSwapchainImage(device, swapchain->shared, window, -1,
1401 swapchain->images[i], false);
1402 }
1403
1404 if (active) {
1405 swapchain->surface.swapchain_handle = VK_NULL_HANDLE;
1406 }
1407
1408 if (!allocator) {
1409 allocator = &GetData(device).allocator;
1410 }
1411
1412 swapchain->~Swapchain();
1413 allocator->pfnFree(allocator->pUserData, swapchain);
1414 }
1415
getProducerUsageGPDIFP2(const VkPhysicalDevice & pdev,const VkSwapchainCreateInfoKHR * create_info,const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,bool create_protected_swapchain,uint64_t * producer_usage)1416 static VkResult getProducerUsageGPDIFP2(
1417 const VkPhysicalDevice& pdev,
1418 const VkSwapchainCreateInfoKHR* create_info,
1419 const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,
1420 bool create_protected_swapchain,
1421 uint64_t* producer_usage) {
1422 // Look through the create_info pNext chain passed to createSwapchainKHR
1423 // for an image compression control struct.
1424 // if one is found AND the appropriate extensions are enabled, create a
1425 // VkImageCompressionControlEXT structure to pass on to
1426 // GetPhysicalDeviceImageFormatProperties2
1427 void* compression_control_pNext = nullptr;
1428 VkImageCompressionControlEXT image_compression = {};
1429 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1430 while (create_infos->pNext) {
1431 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(
1432 create_infos->pNext);
1433 switch (create_infos->sType) {
1434 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1435 const VkImageCompressionControlEXT* compression_infos =
1436 reinterpret_cast<const VkImageCompressionControlEXT*>(
1437 create_infos);
1438 image_compression = *compression_infos;
1439 image_compression.pNext = nullptr;
1440 compression_control_pNext = &image_compression;
1441 } break;
1442 default:
1443 // Ignore all other info structs
1444 break;
1445 }
1446 }
1447
1448 // call GetPhysicalDeviceImageFormatProperties2KHR
1449 VkPhysicalDeviceExternalImageFormatInfo external_image_format_info = {
1450 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
1451 .pNext = compression_control_pNext,
1452 .handleType =
1453 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
1454 };
1455
1456 // AHB does not have an sRGB format so we can't pass it to GPDIFP
1457 // We need to convert the format to unorm if it is srgb
1458 VkFormat format = create_info->imageFormat;
1459 if (format == VK_FORMAT_R8G8B8A8_SRGB) {
1460 format = VK_FORMAT_R8G8B8A8_UNORM;
1461 }
1462
1463 VkPhysicalDeviceImageFormatInfo2 image_format_info = {
1464 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
1465 .pNext = &external_image_format_info,
1466 .format = format,
1467 .type = VK_IMAGE_TYPE_2D,
1468 .tiling = VK_IMAGE_TILING_OPTIMAL,
1469 .usage = create_info->imageUsage,
1470 .flags =
1471 create_protected_swapchain ? VK_IMAGE_CREATE_PROTECTED_BIT : 0u,
1472 };
1473
1474 // If supporting mutable format swapchain add the mutable format flag
1475 if (create_info->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
1476 image_format_info.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1477 image_format_info.flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR;
1478 }
1479
1480 VkAndroidHardwareBufferUsageANDROID ahb_usage;
1481 ahb_usage.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
1482 ahb_usage.pNext = nullptr;
1483
1484 VkImageFormatProperties2 image_format_properties;
1485 image_format_properties.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
1486 image_format_properties.pNext = &ahb_usage;
1487
1488 VkResult result = GetPhysicalDeviceImageFormatProperties2(
1489 pdev, &image_format_info, &image_format_properties);
1490 if (result != VK_SUCCESS) {
1491 ALOGE(
1492 "VkGetPhysicalDeviceImageFormatProperties2 for AHB usage "
1493 "failed: %d",
1494 result);
1495 return VK_ERROR_SURFACE_LOST_KHR;
1496 }
1497 // Determine if USAGE_FRONT_BUFFER is needed.
1498 // GPDIFP2 has no means of using VkSwapchainImageUsageFlagsANDROID when
1499 // querying for producer_usage. So androidHardwareBufferUsage will not
1500 // contain USAGE_FRONT_BUFFER. We need to manually check for usage here.
1501 if (!(swapchain_image_usage &
1502 VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)) {
1503 *producer_usage = ahb_usage.androidHardwareBufferUsage;
1504 return VK_SUCCESS;
1505 }
1506
1507 // Check if USAGE_FRONT_BUFFER is supported for this swapchain
1508 AHardwareBuffer_Desc ahb_desc = {
1509 .width = create_info->imageExtent.width,
1510 .height = create_info->imageExtent.height,
1511 .layers = create_info->imageArrayLayers,
1512 .format = create_info->imageFormat,
1513 .usage = ahb_usage.androidHardwareBufferUsage |
1514 AHARDWAREBUFFER_USAGE_FRONT_BUFFER,
1515 .stride = 0, // stride is always ignored when calling isSupported()
1516 };
1517
1518 // If FRONT_BUFFER is not supported in the GPDIFP2 path
1519 // then we need to fallback to GetSwapchainGrallocUsageXAndroid
1520 if (AHardwareBuffer_isSupported(&ahb_desc)) {
1521 *producer_usage = ahb_usage.androidHardwareBufferUsage;
1522 *producer_usage |= AHARDWAREBUFFER_USAGE_FRONT_BUFFER;
1523 return VK_SUCCESS;
1524 }
1525
1526 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1527 }
1528
getProducerUsage(const VkDevice & device,const VkSwapchainCreateInfoKHR * create_info,const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,bool create_protected_swapchain,uint64_t * producer_usage)1529 static VkResult getProducerUsage(const VkDevice& device,
1530 const VkSwapchainCreateInfoKHR* create_info,
1531 const VkSwapchainImageUsageFlagsANDROID swapchain_image_usage,
1532 bool create_protected_swapchain,
1533 uint64_t* producer_usage) {
1534 // Get the physical device to query the appropriate producer usage
1535 const VkPhysicalDevice& pdev = GetData(device).driver_physical_device;
1536 const InstanceData& instance_data = GetData(pdev);
1537 const InstanceDriverTable& instance_dispatch = instance_data.driver;
1538
1539 if (instance_dispatch.GetPhysicalDeviceImageFormatProperties2 ||
1540 instance_dispatch.GetPhysicalDeviceImageFormatProperties2KHR) {
1541 VkResult result =
1542 getProducerUsageGPDIFP2(pdev, create_info, swapchain_image_usage,
1543 create_protected_swapchain, producer_usage);
1544 if (result == VK_SUCCESS) {
1545 return VK_SUCCESS;
1546 }
1547 // Fall through to gralloc path on error
1548 }
1549
1550 uint64_t native_usage = 0;
1551 void* usage_info_pNext = nullptr;
1552 VkResult result;
1553 VkImageCompressionControlEXT image_compression = {};
1554 const auto& dispatch = GetData(device).driver;
1555 if (dispatch.GetSwapchainGrallocUsage4ANDROID) {
1556 ATRACE_BEGIN("GetSwapchainGrallocUsage4ANDROID");
1557 VkGrallocUsageInfo2ANDROID gralloc_usage_info = {};
1558 gralloc_usage_info.sType =
1559 VK_STRUCTURE_TYPE_GRALLOC_USAGE_INFO_2_ANDROID;
1560 gralloc_usage_info.format = create_info->imageFormat;
1561 gralloc_usage_info.imageUsage = create_info->imageUsage;
1562 gralloc_usage_info.swapchainImageUsage = swapchain_image_usage;
1563
1564 // Look through the pNext chain for an image compression control struct
1565 // if one is found AND the appropriate extensions are enabled,
1566 // append it to be the gralloc usage pNext chain
1567 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1568 while (create_infos->pNext) {
1569 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(
1570 create_infos->pNext);
1571 switch (create_infos->sType) {
1572 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1573 const VkImageCompressionControlEXT* compression_infos =
1574 reinterpret_cast<const VkImageCompressionControlEXT*>(
1575 create_infos);
1576 image_compression = *compression_infos;
1577 image_compression.pNext = nullptr;
1578 usage_info_pNext = &image_compression;
1579 } break;
1580
1581 default:
1582 // Ignore all other info structs
1583 break;
1584 }
1585 }
1586 gralloc_usage_info.pNext = usage_info_pNext;
1587
1588 result = dispatch.GetSwapchainGrallocUsage4ANDROID(
1589 device, &gralloc_usage_info, &native_usage);
1590 ATRACE_END();
1591 if (result != VK_SUCCESS) {
1592 ALOGE("vkGetSwapchainGrallocUsage4ANDROID failed: %d", result);
1593 return VK_ERROR_SURFACE_LOST_KHR;
1594 }
1595 } else if (dispatch.GetSwapchainGrallocUsage3ANDROID) {
1596 ATRACE_BEGIN("GetSwapchainGrallocUsage3ANDROID");
1597 VkGrallocUsageInfoANDROID gralloc_usage_info = {};
1598 gralloc_usage_info.sType = VK_STRUCTURE_TYPE_GRALLOC_USAGE_INFO_ANDROID;
1599 gralloc_usage_info.format = create_info->imageFormat;
1600 gralloc_usage_info.imageUsage = create_info->imageUsage;
1601
1602 // Look through the pNext chain for an image compression control struct
1603 // if one is found AND the appropriate extensions are enabled,
1604 // append it to be the gralloc usage pNext chain
1605 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1606 while (create_infos->pNext) {
1607 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(
1608 create_infos->pNext);
1609 switch (create_infos->sType) {
1610 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1611 const VkImageCompressionControlEXT* compression_infos =
1612 reinterpret_cast<const VkImageCompressionControlEXT*>(
1613 create_infos);
1614 image_compression = *compression_infos;
1615 image_compression.pNext = nullptr;
1616 usage_info_pNext = &image_compression;
1617 } break;
1618
1619 default:
1620 // Ignore all other info structs
1621 break;
1622 }
1623 }
1624 gralloc_usage_info.pNext = usage_info_pNext;
1625
1626 result = dispatch.GetSwapchainGrallocUsage3ANDROID(
1627 device, &gralloc_usage_info, &native_usage);
1628 ATRACE_END();
1629 if (result != VK_SUCCESS) {
1630 ALOGE("vkGetSwapchainGrallocUsage3ANDROID failed: %d", result);
1631 return VK_ERROR_SURFACE_LOST_KHR;
1632 }
1633 } else if (dispatch.GetSwapchainGrallocUsage2ANDROID) {
1634 uint64_t consumer_usage, producer_usage;
1635 ATRACE_BEGIN("GetSwapchainGrallocUsage2ANDROID");
1636 result = dispatch.GetSwapchainGrallocUsage2ANDROID(
1637 device, create_info->imageFormat, create_info->imageUsage,
1638 swapchain_image_usage, &consumer_usage, &producer_usage);
1639 ATRACE_END();
1640 if (result != VK_SUCCESS) {
1641 ALOGE("vkGetSwapchainGrallocUsage2ANDROID failed: %d", result);
1642 return VK_ERROR_SURFACE_LOST_KHR;
1643 }
1644 native_usage =
1645 convertGralloc1ToBufferUsage(producer_usage, consumer_usage);
1646 } else if (dispatch.GetSwapchainGrallocUsageANDROID) {
1647 ATRACE_BEGIN("GetSwapchainGrallocUsageANDROID");
1648 int32_t legacy_usage = 0;
1649 result = dispatch.GetSwapchainGrallocUsageANDROID(
1650 device, create_info->imageFormat, create_info->imageUsage,
1651 &legacy_usage);
1652 ATRACE_END();
1653 if (result != VK_SUCCESS) {
1654 ALOGE("vkGetSwapchainGrallocUsageANDROID failed: %d", result);
1655 return VK_ERROR_SURFACE_LOST_KHR;
1656 }
1657 native_usage = static_cast<uint64_t>(legacy_usage);
1658 }
1659 *producer_usage = native_usage;
1660
1661 return VK_SUCCESS;
1662 }
1663
1664 VKAPI_ATTR
CreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * create_info,const VkAllocationCallbacks * allocator,VkSwapchainKHR * swapchain_handle)1665 VkResult CreateSwapchainKHR(VkDevice device,
1666 const VkSwapchainCreateInfoKHR* create_info,
1667 const VkAllocationCallbacks* allocator,
1668 VkSwapchainKHR* swapchain_handle) {
1669 ATRACE_CALL();
1670
1671 int err;
1672 VkResult result = VK_SUCCESS;
1673
1674 ALOGV("vkCreateSwapchainKHR: surface=0x%" PRIx64
1675 " minImageCount=%u imageFormat=%u imageColorSpace=%u"
1676 " imageExtent=%ux%u imageUsage=%#x preTransform=%u presentMode=%u"
1677 " oldSwapchain=0x%" PRIx64,
1678 reinterpret_cast<uint64_t>(create_info->surface),
1679 create_info->minImageCount, create_info->imageFormat,
1680 create_info->imageColorSpace, create_info->imageExtent.width,
1681 create_info->imageExtent.height, create_info->imageUsage,
1682 create_info->preTransform, create_info->presentMode,
1683 reinterpret_cast<uint64_t>(create_info->oldSwapchain));
1684
1685 if (!allocator)
1686 allocator = &GetData(device).allocator;
1687
1688 PixelFormat native_pixel_format =
1689 GetNativePixelFormat(create_info->imageFormat);
1690 DataSpace native_dataspace = GetNativeDataspace(
1691 create_info->imageColorSpace, create_info->imageFormat);
1692 if (native_dataspace == DataSpace::UNKNOWN) {
1693 ALOGE(
1694 "CreateSwapchainKHR(VkSwapchainCreateInfoKHR.imageColorSpace = %d) "
1695 "failed: Unsupported color space",
1696 create_info->imageColorSpace);
1697 return VK_ERROR_INITIALIZATION_FAILED;
1698 }
1699
1700 ALOGV_IF(create_info->imageArrayLayers != 1,
1701 "swapchain imageArrayLayers=%u not supported",
1702 create_info->imageArrayLayers);
1703 ALOGV_IF((create_info->preTransform & ~kSupportedTransforms) != 0,
1704 "swapchain preTransform=%#x not supported",
1705 create_info->preTransform);
1706 ALOGV_IF(!(create_info->presentMode == VK_PRESENT_MODE_FIFO_KHR ||
1707 create_info->presentMode == VK_PRESENT_MODE_MAILBOX_KHR ||
1708 create_info->presentMode == VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR ||
1709 create_info->presentMode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR),
1710 "swapchain presentMode=%u not supported",
1711 create_info->presentMode);
1712
1713 Surface& surface = *SurfaceFromHandle(create_info->surface);
1714
1715 if (surface.swapchain_handle != create_info->oldSwapchain) {
1716 ALOGV("Can't create a swapchain for VkSurfaceKHR 0x%" PRIx64
1717 " because it already has active swapchain 0x%" PRIx64
1718 " but VkSwapchainCreateInfo::oldSwapchain=0x%" PRIx64,
1719 reinterpret_cast<uint64_t>(create_info->surface),
1720 reinterpret_cast<uint64_t>(surface.swapchain_handle),
1721 reinterpret_cast<uint64_t>(create_info->oldSwapchain));
1722 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
1723 }
1724 if (create_info->oldSwapchain != VK_NULL_HANDLE)
1725 OrphanSwapchain(device, SwapchainFromHandle(create_info->oldSwapchain));
1726
1727 // -- Reset the native window --
1728 // The native window might have been used previously, and had its properties
1729 // changed from defaults. That will affect the answer we get for queries
1730 // like MIN_UNDEQUED_BUFFERS. Reset to a known/default state before we
1731 // attempt such queries.
1732
1733 // The native window only allows dequeueing all buffers before any have
1734 // been queued, since after that point at least one is assumed to be in
1735 // non-FREE state at any given time. Disconnecting and re-connecting
1736 // orphans the previous buffers, getting us back to the state where we can
1737 // dequeue all buffers.
1738 //
1739 // This is not necessary if the surface was never used previously.
1740 //
1741 // TODO(http://b/134186185) recycle swapchain images more efficiently
1742 ANativeWindow* window = surface.window.get();
1743 if (surface.used_by_swapchain) {
1744 err = native_window_api_disconnect(window, NATIVE_WINDOW_API_EGL);
1745 ALOGW_IF(err != android::OK,
1746 "native_window_api_disconnect failed: %s (%d)", strerror(-err),
1747 err);
1748 err = native_window_api_connect(window, NATIVE_WINDOW_API_EGL);
1749 ALOGW_IF(err != android::OK,
1750 "native_window_api_connect failed: %s (%d)", strerror(-err),
1751 err);
1752 }
1753
1754 err =
1755 window->perform(window, NATIVE_WINDOW_SET_DEQUEUE_TIMEOUT, nsecs_t{-1});
1756 if (err != android::OK) {
1757 ALOGE("window->perform(SET_DEQUEUE_TIMEOUT) failed: %s (%d)",
1758 strerror(-err), err);
1759 return VK_ERROR_SURFACE_LOST_KHR;
1760 }
1761
1762 int swap_interval =
1763 create_info->presentMode == VK_PRESENT_MODE_MAILBOX_KHR ? 0 : 1;
1764 err = window->setSwapInterval(window, swap_interval);
1765 if (err != android::OK) {
1766 ALOGE("native_window->setSwapInterval(1) failed: %s (%d)",
1767 strerror(-err), err);
1768 return VK_ERROR_SURFACE_LOST_KHR;
1769 }
1770
1771 err = native_window_set_shared_buffer_mode(window, false);
1772 if (err != android::OK) {
1773 ALOGE("native_window_set_shared_buffer_mode(false) failed: %s (%d)",
1774 strerror(-err), err);
1775 return VK_ERROR_SURFACE_LOST_KHR;
1776 }
1777
1778 err = native_window_set_auto_refresh(window, false);
1779 if (err != android::OK) {
1780 ALOGE("native_window_set_auto_refresh(false) failed: %s (%d)",
1781 strerror(-err), err);
1782 return VK_ERROR_SURFACE_LOST_KHR;
1783 }
1784
1785 // -- Configure the native window --
1786
1787 const auto& dispatch = GetData(device).driver;
1788
1789 err = native_window_set_buffers_format(
1790 window, static_cast<int>(native_pixel_format));
1791 if (err != android::OK) {
1792 ALOGE("native_window_set_buffers_format(%s) failed: %s (%d)",
1793 toString(native_pixel_format).c_str(), strerror(-err), err);
1794 return VK_ERROR_SURFACE_LOST_KHR;
1795 }
1796
1797 /* Respect consumer default dataspace upon HAL_DATASPACE_ARBITRARY. */
1798 if (native_dataspace != DataSpace::ARBITRARY) {
1799 err = native_window_set_buffers_data_space(
1800 window, static_cast<android_dataspace_t>(native_dataspace));
1801 if (err != android::OK) {
1802 ALOGE("native_window_set_buffers_data_space(%d) failed: %s (%d)",
1803 native_dataspace, strerror(-err), err);
1804 return VK_ERROR_SURFACE_LOST_KHR;
1805 }
1806 }
1807
1808 err = native_window_set_buffers_dimensions(
1809 window, static_cast<int>(create_info->imageExtent.width),
1810 static_cast<int>(create_info->imageExtent.height));
1811 if (err != android::OK) {
1812 ALOGE("native_window_set_buffers_dimensions(%d,%d) failed: %s (%d)",
1813 create_info->imageExtent.width, create_info->imageExtent.height,
1814 strerror(-err), err);
1815 return VK_ERROR_SURFACE_LOST_KHR;
1816 }
1817
1818 // VkSwapchainCreateInfo::preTransform indicates the transformation the app
1819 // applied during rendering. native_window_set_transform() expects the
1820 // inverse: the transform the app is requesting that the compositor perform
1821 // during composition. With native windows, pre-transform works by rendering
1822 // with the same transform the compositor is applying (as in Vulkan), but
1823 // then requesting the inverse transform, so that when the compositor does
1824 // it's job the two transforms cancel each other out and the compositor ends
1825 // up applying an identity transform to the app's buffer.
1826 err = native_window_set_buffers_transform(
1827 window, InvertTransformToNative(create_info->preTransform));
1828 if (err != android::OK) {
1829 ALOGE("native_window_set_buffers_transform(%d) failed: %s (%d)",
1830 InvertTransformToNative(create_info->preTransform),
1831 strerror(-err), err);
1832 return VK_ERROR_SURFACE_LOST_KHR;
1833 }
1834
1835 err = native_window_set_scaling_mode(
1836 window, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
1837 if (err != android::OK) {
1838 ALOGE("native_window_set_scaling_mode(SCALE_TO_WINDOW) failed: %s (%d)",
1839 strerror(-err), err);
1840 return VK_ERROR_SURFACE_LOST_KHR;
1841 }
1842
1843 VkSwapchainImageUsageFlagsANDROID swapchain_image_usage = 0;
1844 if (IsSharedPresentMode(create_info->presentMode)) {
1845 swapchain_image_usage |= VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID;
1846 err = native_window_set_shared_buffer_mode(window, true);
1847 if (err != android::OK) {
1848 ALOGE("native_window_set_shared_buffer_mode failed: %s (%d)", strerror(-err), err);
1849 return VK_ERROR_SURFACE_LOST_KHR;
1850 }
1851 }
1852
1853 if (create_info->presentMode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR) {
1854 err = native_window_set_auto_refresh(window, true);
1855 if (err != android::OK) {
1856 ALOGE("native_window_set_auto_refresh failed: %s (%d)", strerror(-err), err);
1857 return VK_ERROR_SURFACE_LOST_KHR;
1858 }
1859 }
1860
1861 int query_value;
1862 // TODO: Now that we are calling into GPDSC2 directly, this query may be redundant
1863 // the call to std::max(min_buffer_count, num_images) may be redundant as well
1864 err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1865 &query_value);
1866 if (err != android::OK || query_value < 0) {
1867 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err,
1868 query_value);
1869 return VK_ERROR_SURFACE_LOST_KHR;
1870 }
1871 const uint32_t min_undequeued_buffers = static_cast<uint32_t>(query_value);
1872
1873 // Lower layer insists that we have at least min_undequeued_buffers + 1
1874 // buffers. This is wasteful and we'd like to relax it in the shared case,
1875 // but not all the pieces are in place for that to work yet. Note we only
1876 // lie to the lower layer--we don't want to give the app back a swapchain
1877 // with extra images (which they can't actually use!).
1878 const uint32_t min_buffer_count = min_undequeued_buffers + 1;
1879
1880 // Call into GPDSC2 to get the minimum and maximum allowable buffer count for the surface of
1881 // interest. This step is only necessary if the app requests a number of images
1882 // (create_info->minImageCount) that is less or more than the surface capabilities.
1883 // An app should be calling GPDSC2 and using those values to set create_info, but in the
1884 // event that the app has hard-coded image counts an error can occur
1885 VkSurfacePresentModeEXT present_mode = {
1886 VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT,
1887 nullptr,
1888 create_info->presentMode
1889 };
1890 VkPhysicalDeviceSurfaceInfo2KHR surface_info2 = {
1891 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
1892 &present_mode,
1893 create_info->surface
1894 };
1895 VkSurfaceCapabilities2KHR surface_capabilities2 = {
1896 VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
1897 nullptr,
1898 {},
1899 };
1900 result = GetPhysicalDeviceSurfaceCapabilities2KHR(GetData(device).driver_physical_device,
1901 &surface_info2, &surface_capabilities2);
1902
1903 uint32_t num_images = create_info->minImageCount;
1904 num_images = std::clamp(num_images,
1905 surface_capabilities2.surfaceCapabilities.minImageCount,
1906 surface_capabilities2.surfaceCapabilities.maxImageCount);
1907
1908 const uint32_t buffer_count = std::max(min_buffer_count, num_images);
1909 err = native_window_set_buffer_count(window, buffer_count);
1910 if (err != android::OK) {
1911 ALOGE("native_window_set_buffer_count(%d) failed: %s (%d)", buffer_count,
1912 strerror(-err), err);
1913 return VK_ERROR_SURFACE_LOST_KHR;
1914 }
1915
1916 // In shared mode the num_images must be one regardless of how many
1917 // buffers were allocated for the buffer queue.
1918 if (swapchain_image_usage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID) {
1919 num_images = 1;
1920 }
1921
1922 VkImageFormatListCreateInfo extra_mutable_formats = {
1923 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
1924 };
1925 VkImageFormatListCreateInfo* extra_mutable_formats_ptr;
1926
1927 // Look through the create_info pNext chain passed to createSwapchainKHR
1928 // for an image compression control struct.
1929 // if one is found AND the appropriate extensions are enabled, create a
1930 // VkImageCompressionControlEXT structure to pass on to VkImageCreateInfo
1931 // TODO check for imageCompressionControlSwapchain feature is enabled
1932 void* usage_info_pNext = nullptr;
1933 VkImageCompressionControlEXT image_compression = {};
1934 const VkSwapchainCreateInfoKHR* create_infos = create_info;
1935 while (create_infos->pNext) {
1936 create_infos = reinterpret_cast<const VkSwapchainCreateInfoKHR*>(create_infos->pNext);
1937 switch (create_infos->sType) {
1938 case VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT: {
1939 const VkImageCompressionControlEXT* compression_infos =
1940 reinterpret_cast<const VkImageCompressionControlEXT*>(create_infos);
1941 image_compression = *compression_infos;
1942 image_compression.pNext = nullptr;
1943 usage_info_pNext = &image_compression;
1944 } break;
1945 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
1946 const VkImageFormatListCreateInfo* format_list =
1947 reinterpret_cast<const VkImageFormatListCreateInfo*>(
1948 create_infos);
1949 if (create_info->flags &
1950 VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
1951 if (format_list && format_list->viewFormatCount > 0 &&
1952 format_list->pViewFormats) {
1953 extra_mutable_formats.viewFormatCount =
1954 format_list->viewFormatCount;
1955 extra_mutable_formats.pViewFormats =
1956 format_list->pViewFormats;
1957 extra_mutable_formats_ptr = &extra_mutable_formats;
1958 } else {
1959 ALOGE(
1960 "vk_swapchain_create_mutable_format_bit_khr was "
1961 "set during swapchain creation but no valid "
1962 "vkimageformatlistcreateinfo was found in the "
1963 "pnext chain");
1964 return VK_ERROR_INITIALIZATION_FAILED;
1965 }
1966 }
1967 } break;
1968 default:
1969 // Ignore all other info structs
1970 break;
1971 }
1972 }
1973
1974 // Get the appropriate native_usage for the images
1975 // Get the consumer usage
1976 uint64_t native_usage = surface.consumer_usage;
1977 // Determine if the swapchain is protected
1978 bool create_protected_swapchain = false;
1979 if (create_info->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR) {
1980 create_protected_swapchain = true;
1981 native_usage |= BufferUsage::PROTECTED;
1982 }
1983 // Get the producer usage
1984 uint64_t producer_usage;
1985 result = getProducerUsage(device, create_info, swapchain_image_usage, create_protected_swapchain, &producer_usage);
1986 if (result != VK_SUCCESS) {
1987 return result;
1988 }
1989 native_usage |= producer_usage;
1990
1991 err = native_window_set_usage(window, native_usage);
1992 if (err != android::OK) {
1993 ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), err);
1994 return VK_ERROR_SURFACE_LOST_KHR;
1995 }
1996
1997 int transform_hint;
1998 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT, &transform_hint);
1999 if (err != android::OK) {
2000 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
2001 strerror(-err), err);
2002 return VK_ERROR_SURFACE_LOST_KHR;
2003 }
2004
2005 int64_t refresh_duration;
2006 err = native_window_get_refresh_cycle_duration(window, &refresh_duration);
2007 if (err != android::OK) {
2008 ALOGE("native_window_get_refresh_cycle_duration query failed: %s (%d)",
2009 strerror(-err), err);
2010 return VK_ERROR_SURFACE_LOST_KHR;
2011 }
2012 // -- Allocate our Swapchain object --
2013 // After this point, we must deallocate the swapchain on error.
2014
2015 void* mem = allocator->pfnAllocation(allocator->pUserData,
2016 sizeof(Swapchain), alignof(Swapchain),
2017 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2018
2019 if (!mem)
2020 return VK_ERROR_OUT_OF_HOST_MEMORY;
2021
2022 Swapchain* swapchain = new (mem)
2023 Swapchain(surface, num_images, create_info->presentMode,
2024 TranslateVulkanToNativeTransform(create_info->preTransform),
2025 refresh_duration);
2026 VkSwapchainImageCreateInfoANDROID swapchain_image_create = {
2027 #pragma clang diagnostic push
2028 #pragma clang diagnostic ignored "-Wold-style-cast"
2029 .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID,
2030 #pragma clang diagnostic pop
2031 .pNext = usage_info_pNext,
2032 .usage = swapchain_image_usage,
2033 };
2034 VkNativeBufferANDROID image_native_buffer = {
2035 #pragma clang diagnostic push
2036 #pragma clang diagnostic ignored "-Wold-style-cast"
2037 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2038 #pragma clang diagnostic pop
2039 .pNext = &swapchain_image_create,
2040 };
2041
2042 VkImageCreateInfo image_create = {
2043 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2044 .pNext = nullptr,
2045 .flags = create_protected_swapchain ? VK_IMAGE_CREATE_PROTECTED_BIT : 0u,
2046 .imageType = VK_IMAGE_TYPE_2D,
2047 .format = create_info->imageFormat,
2048 .extent = {
2049 create_info->imageExtent.width,
2050 create_info->imageExtent.height,
2051 1
2052 },
2053 .mipLevels = 1,
2054 .arrayLayers = 1,
2055 .samples = VK_SAMPLE_COUNT_1_BIT,
2056 .tiling = VK_IMAGE_TILING_OPTIMAL,
2057 .usage = create_info->imageUsage,
2058 .sharingMode = create_info->imageSharingMode,
2059 .queueFamilyIndexCount = create_info->queueFamilyIndexCount,
2060 .pQueueFamilyIndices = create_info->pQueueFamilyIndices,
2061 };
2062
2063 if (create_info->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
2064 image_create.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
2065 image_create.flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR;
2066 }
2067
2068 // Note: don't do deferred allocation for shared present modes. There's only one buffer
2069 // involved so very little benefit.
2070 if ((create_info->flags & VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT) &&
2071 !IsSharedPresentMode(create_info->presentMode)) {
2072 // Don't want to touch the underlying gralloc buffers yet;
2073 // instead just create unbound VkImages which will later be bound to memory inside
2074 // AcquireNextImage.
2075 VkImageSwapchainCreateInfoKHR image_swapchain_create = {
2076 .sType = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
2077 .pNext = extra_mutable_formats_ptr,
2078 .swapchain = HandleFromSwapchain(swapchain),
2079 };
2080 image_create.pNext = &image_swapchain_create;
2081
2082 for (uint32_t i = 0; i < num_images; i++) {
2083 Swapchain::Image& img = swapchain->images[i];
2084 img.buffer = nullptr;
2085 img.dequeued = false;
2086
2087 result = dispatch.CreateImage(device, &image_create, nullptr, &img.image);
2088 if (result != VK_SUCCESS) {
2089 ALOGD("vkCreateImage w/ for deferred swapchain image failed: %u", result);
2090 break;
2091 }
2092 }
2093 } else {
2094 // -- Dequeue all buffers and create a VkImage for each --
2095 // Any failures during or after this must cancel the dequeued buffers.
2096
2097 for (uint32_t i = 0; i < num_images; i++) {
2098 Swapchain::Image& img = swapchain->images[i];
2099
2100 ANativeWindowBuffer* buffer;
2101 err = window->dequeueBuffer(window, &buffer, &img.dequeue_fence);
2102 if (err != android::OK) {
2103 ALOGE("dequeueBuffer[%u] failed: %s (%d)", i, strerror(-err), err);
2104 switch (-err) {
2105 case ENOMEM:
2106 result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
2107 break;
2108 default:
2109 result = VK_ERROR_SURFACE_LOST_KHR;
2110 break;
2111 }
2112 break;
2113 }
2114 img.buffer = buffer;
2115 img.dequeued = true;
2116
2117 image_native_buffer.handle = img.buffer->handle;
2118 image_native_buffer.stride = img.buffer->stride;
2119 image_native_buffer.format = img.buffer->format;
2120 image_native_buffer.usage = int(img.buffer->usage);
2121 android_convertGralloc0To1Usage(int(img.buffer->usage),
2122 &image_native_buffer.usage2.producer,
2123 &image_native_buffer.usage2.consumer);
2124 image_native_buffer.usage3 = img.buffer->usage;
2125 image_native_buffer.ahb =
2126 ANativeWindowBuffer_getHardwareBuffer(img.buffer.get());
2127 image_create.pNext = &image_native_buffer;
2128
2129 if (extra_mutable_formats_ptr) {
2130 extra_mutable_formats_ptr->pNext = image_create.pNext;
2131 image_create.pNext = extra_mutable_formats_ptr;
2132 }
2133
2134 ATRACE_BEGIN("CreateImage");
2135 result =
2136 dispatch.CreateImage(device, &image_create, nullptr, &img.image);
2137 ATRACE_END();
2138 if (result != VK_SUCCESS) {
2139 ALOGD("vkCreateImage w/ native buffer failed: %u", result);
2140 break;
2141 }
2142 }
2143
2144 // -- Cancel all buffers, returning them to the queue --
2145 // If an error occurred before, also destroy the VkImage and release the
2146 // buffer reference. Otherwise, we retain a strong reference to the buffer.
2147 for (uint32_t i = 0; i < num_images; i++) {
2148 Swapchain::Image& img = swapchain->images[i];
2149 if (img.dequeued) {
2150 if (!swapchain->shared) {
2151 window->cancelBuffer(window, img.buffer.get(),
2152 img.dequeue_fence);
2153 img.dequeue_fence = -1;
2154 img.dequeued = false;
2155 }
2156 }
2157 }
2158 }
2159
2160 if (result != VK_SUCCESS) {
2161 DestroySwapchainInternal(device, HandleFromSwapchain(swapchain),
2162 allocator);
2163 return result;
2164 }
2165
2166 if (transform_hint != swapchain->pre_transform) {
2167 // Log that the app is not doing pre-rotation.
2168 android::GraphicsEnv::getInstance().setTargetStats(
2169 android::GpuStatsInfo::Stats::FALSE_PREROTATION);
2170 }
2171
2172 // Set stats for creating a Vulkan swapchain
2173 android::GraphicsEnv::getInstance().setTargetStats(
2174 android::GpuStatsInfo::Stats::CREATED_VULKAN_SWAPCHAIN);
2175
2176 surface.used_by_swapchain = true;
2177 surface.swapchain_handle = HandleFromSwapchain(swapchain);
2178 *swapchain_handle = surface.swapchain_handle;
2179 return VK_SUCCESS;
2180 }
2181
2182 VKAPI_ATTR
DestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain_handle,const VkAllocationCallbacks * allocator)2183 void DestroySwapchainKHR(VkDevice device,
2184 VkSwapchainKHR swapchain_handle,
2185 const VkAllocationCallbacks* allocator) {
2186 ATRACE_CALL();
2187
2188 DestroySwapchainInternal(device, swapchain_handle, allocator);
2189 }
2190
2191 VKAPI_ATTR
GetSwapchainImagesKHR(VkDevice,VkSwapchainKHR swapchain_handle,uint32_t * count,VkImage * images)2192 VkResult GetSwapchainImagesKHR(VkDevice,
2193 VkSwapchainKHR swapchain_handle,
2194 uint32_t* count,
2195 VkImage* images) {
2196 ATRACE_CALL();
2197
2198 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2199 ALOGW_IF(swapchain.surface.swapchain_handle != swapchain_handle,
2200 "getting images for non-active swapchain 0x%" PRIx64
2201 "; only dequeued image handles are valid",
2202 reinterpret_cast<uint64_t>(swapchain_handle));
2203 VkResult result = VK_SUCCESS;
2204 if (images) {
2205 uint32_t n = swapchain.num_images;
2206 if (*count < swapchain.num_images) {
2207 n = *count;
2208 result = VK_INCOMPLETE;
2209 }
2210 for (uint32_t i = 0; i < n; i++)
2211 images[i] = swapchain.images[i].image;
2212 *count = n;
2213 } else {
2214 *count = swapchain.num_images;
2215 }
2216 return result;
2217 }
2218
2219 VKAPI_ATTR
AcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain_handle,uint64_t timeout,VkSemaphore semaphore,VkFence vk_fence,uint32_t * image_index)2220 VkResult AcquireNextImageKHR(VkDevice device,
2221 VkSwapchainKHR swapchain_handle,
2222 uint64_t timeout,
2223 VkSemaphore semaphore,
2224 VkFence vk_fence,
2225 uint32_t* image_index) {
2226 ATRACE_CALL();
2227
2228 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2229 ANativeWindow* window = swapchain.surface.window.get();
2230 VkResult result;
2231 int err;
2232
2233 if (swapchain.surface.swapchain_handle != swapchain_handle)
2234 return VK_ERROR_OUT_OF_DATE_KHR;
2235
2236 if (swapchain.shared) {
2237 // In shared mode, we keep the buffer dequeued all the time, so we don't
2238 // want to dequeue a buffer here. Instead, just ask the driver to ensure
2239 // the semaphore and fence passed to us will be signalled.
2240 *image_index = 0;
2241 result = GetData(device).driver.AcquireImageANDROID(
2242 device, swapchain.images[*image_index].image, -1, semaphore, vk_fence);
2243 return result;
2244 }
2245
2246 const nsecs_t acquire_next_image_timeout =
2247 timeout > (uint64_t)std::numeric_limits<nsecs_t>::max() ? -1 : timeout;
2248 if (acquire_next_image_timeout != swapchain.acquire_next_image_timeout) {
2249 // Cache the timeout to avoid the duplicate binder cost.
2250 err = window->perform(window, NATIVE_WINDOW_SET_DEQUEUE_TIMEOUT,
2251 acquire_next_image_timeout);
2252 if (err != android::OK) {
2253 ALOGE("window->perform(SET_DEQUEUE_TIMEOUT) failed: %s (%d)",
2254 strerror(-err), err);
2255 return VK_ERROR_SURFACE_LOST_KHR;
2256 }
2257 swapchain.acquire_next_image_timeout = acquire_next_image_timeout;
2258 }
2259
2260 ANativeWindowBuffer* buffer;
2261 int fence_fd;
2262 err = window->dequeueBuffer(window, &buffer, &fence_fd);
2263 if (err == android::TIMED_OUT || err == android::INVALID_OPERATION) {
2264 ALOGW("dequeueBuffer timed out: %s (%d)", strerror(-err), err);
2265 return timeout ? VK_TIMEOUT : VK_NOT_READY;
2266 } else if (err != android::OK) {
2267 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), err);
2268 return VK_ERROR_SURFACE_LOST_KHR;
2269 }
2270
2271 uint32_t idx;
2272 for (idx = 0; idx < swapchain.num_images; idx++) {
2273 if (swapchain.images[idx].buffer.get() == buffer) {
2274 swapchain.images[idx].dequeued = true;
2275 swapchain.images[idx].dequeue_fence = fence_fd;
2276 break;
2277 }
2278 }
2279
2280 // If this is a deferred alloc swapchain, this may be the first time we've
2281 // seen a particular buffer. If so, there should be an empty slot. Find it,
2282 // and bind the gralloc buffer to the VkImage for that slot. If there is no
2283 // empty slot, then we dequeued an unexpected buffer. Non-deferred swapchains
2284 // will also take this path, but will never have an empty slot since we
2285 // populated them all upfront.
2286 if (idx == swapchain.num_images) {
2287 for (idx = 0; idx < swapchain.num_images; idx++) {
2288 if (!swapchain.images[idx].buffer) {
2289 // Note: this structure is technically required for
2290 // Vulkan correctness, even though the driver is probably going
2291 // to use everything from the VkNativeBufferANDROID below.
2292 // This is kindof silly, but it's how we did the ANB
2293 // side of VK_KHR_swapchain v69, so we're stuck with it unless
2294 // we want to go tinkering with the ANB spec some more.
2295 VkBindImageMemorySwapchainInfoKHR bimsi = {
2296 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
2297 .pNext = nullptr,
2298 .swapchain = swapchain_handle,
2299 .imageIndex = idx,
2300 };
2301 VkNativeBufferANDROID nb = {
2302 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2303 .pNext = &bimsi,
2304 .handle = buffer->handle,
2305 .stride = buffer->stride,
2306 .format = buffer->format,
2307 .usage = int(buffer->usage),
2308 .usage3 = buffer->usage,
2309 .ahb = ANativeWindowBuffer_getHardwareBuffer(buffer),
2310 };
2311 android_convertGralloc0To1Usage(int(buffer->usage),
2312 &nb.usage2.producer,
2313 &nb.usage2.consumer);
2314 VkBindImageMemoryInfo bimi = {
2315 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2316 .pNext = &nb,
2317 .image = swapchain.images[idx].image,
2318 .memory = VK_NULL_HANDLE,
2319 .memoryOffset = 0,
2320 };
2321 result = GetData(device).driver.BindImageMemory2(device, 1, &bimi);
2322 if (result != VK_SUCCESS) {
2323 // This shouldn't really happen. If it does, something is probably
2324 // unrecoverably wrong with the swapchain and its images. Cancel
2325 // the buffer and declare the swapchain broken.
2326 ALOGE("failed to do deferred gralloc buffer bind");
2327 window->cancelBuffer(window, buffer, fence_fd);
2328 return VK_ERROR_OUT_OF_DATE_KHR;
2329 }
2330
2331 swapchain.images[idx].dequeued = true;
2332 swapchain.images[idx].dequeue_fence = fence_fd;
2333 swapchain.images[idx].buffer = buffer;
2334 break;
2335 }
2336 }
2337 }
2338
2339 // The buffer doesn't match any slot. This shouldn't normally happen, but is
2340 // possible if the bufferqueue is reconfigured behind libvulkan's back. If this
2341 // happens, just declare the swapchain to be broken and the app will recreate it.
2342 if (idx == swapchain.num_images) {
2343 ALOGE("dequeueBuffer returned unrecognized buffer");
2344 window->cancelBuffer(window, buffer, fence_fd);
2345 return VK_ERROR_OUT_OF_DATE_KHR;
2346 }
2347
2348 int fence_clone = -1;
2349 if (fence_fd != -1) {
2350 fence_clone = dup(fence_fd);
2351 if (fence_clone == -1) {
2352 ALOGE("dup(fence) failed, stalling until signalled: %s (%d)",
2353 strerror(errno), errno);
2354 sync_wait(fence_fd, -1 /* forever */);
2355 }
2356 }
2357
2358 result = GetData(device).driver.AcquireImageANDROID(
2359 device, swapchain.images[idx].image, fence_clone, semaphore, vk_fence);
2360 if (result != VK_SUCCESS) {
2361 // NOTE: we're relying on AcquireImageANDROID to close fence_clone,
2362 // even if the call fails. We could close it ourselves on failure, but
2363 // that would create a race condition if the driver closes it on a
2364 // failure path: some other thread might create an fd with the same
2365 // number between the time the driver closes it and the time we close
2366 // it. We must assume one of: the driver *always* closes it even on
2367 // failure, or *never* closes it on failure.
2368 window->cancelBuffer(window, buffer, fence_fd);
2369 swapchain.images[idx].dequeued = false;
2370 swapchain.images[idx].dequeue_fence = -1;
2371 return result;
2372 }
2373
2374 *image_index = idx;
2375 return VK_SUCCESS;
2376 }
2377
2378 VKAPI_ATTR
AcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)2379 VkResult AcquireNextImage2KHR(VkDevice device,
2380 const VkAcquireNextImageInfoKHR* pAcquireInfo,
2381 uint32_t* pImageIndex) {
2382 ATRACE_CALL();
2383
2384 return AcquireNextImageKHR(device, pAcquireInfo->swapchain,
2385 pAcquireInfo->timeout, pAcquireInfo->semaphore,
2386 pAcquireInfo->fence, pImageIndex);
2387 }
2388
WorstPresentResult(VkResult a,VkResult b)2389 static VkResult WorstPresentResult(VkResult a, VkResult b) {
2390 // See the error ranking for vkQueuePresentKHR at the end of section 29.6
2391 // (in spec version 1.0.14).
2392 static const VkResult kWorstToBest[] = {
2393 VK_ERROR_DEVICE_LOST,
2394 VK_ERROR_SURFACE_LOST_KHR,
2395 VK_ERROR_OUT_OF_DATE_KHR,
2396 VK_ERROR_OUT_OF_DEVICE_MEMORY,
2397 VK_ERROR_OUT_OF_HOST_MEMORY,
2398 VK_SUBOPTIMAL_KHR,
2399 };
2400 for (auto result : kWorstToBest) {
2401 if (a == result || b == result)
2402 return result;
2403 }
2404 ALOG_ASSERT(a == VK_SUCCESS, "invalid vkQueuePresentKHR result %d", a);
2405 ALOG_ASSERT(b == VK_SUCCESS, "invalid vkQueuePresentKHR result %d", b);
2406 return a != VK_SUCCESS ? a : b;
2407 }
2408
2409 // KHR_incremental_present aspect of QueuePresentKHR
SetSwapchainSurfaceDamage(ANativeWindow * window,const VkPresentRegionKHR * pRegion)2410 static void SetSwapchainSurfaceDamage(ANativeWindow *window, const VkPresentRegionKHR *pRegion) {
2411 std::vector<android_native_rect_t> rects(pRegion->rectangleCount);
2412 for (auto i = 0u; i < pRegion->rectangleCount; i++) {
2413 auto const& rect = pRegion->pRectangles[i];
2414 if (rect.layer > 0) {
2415 ALOGV("vkQueuePresentKHR ignoring invalid layer (%u); using layer 0 instead",
2416 rect.layer);
2417 }
2418
2419 rects[i].left = rect.offset.x;
2420 rects[i].bottom = rect.offset.y;
2421 rects[i].right = rect.offset.x + rect.extent.width;
2422 rects[i].top = rect.offset.y + rect.extent.height;
2423 }
2424 native_window_set_surface_damage(window, rects.data(), rects.size());
2425 }
2426
2427 // GOOGLE_display_timing aspect of QueuePresentKHR
SetSwapchainFrameTimestamp(Swapchain & swapchain,const VkPresentTimeGOOGLE * pTime)2428 static void SetSwapchainFrameTimestamp(Swapchain &swapchain, const VkPresentTimeGOOGLE *pTime) {
2429 ANativeWindow *window = swapchain.surface.window.get();
2430
2431 // We don't know whether the app will actually use GOOGLE_display_timing
2432 // with a particular swapchain until QueuePresent; enable it on the BQ
2433 // now if needed
2434 if (!swapchain.frame_timestamps_enabled) {
2435 ALOGV("Calling native_window_enable_frame_timestamps(true)");
2436 native_window_enable_frame_timestamps(window, true);
2437 swapchain.frame_timestamps_enabled = true;
2438 }
2439
2440 // Record the nativeFrameId so it can be later correlated to
2441 // this present.
2442 uint64_t nativeFrameId = 0;
2443 int err = native_window_get_next_frame_id(
2444 window, &nativeFrameId);
2445 if (err != android::OK) {
2446 ALOGE("Failed to get next native frame ID.");
2447 }
2448
2449 // Add a new timing record with the user's presentID and
2450 // the nativeFrameId.
2451 swapchain.timing.emplace_back(pTime, nativeFrameId);
2452 if (swapchain.timing.size() > MAX_TIMING_INFOS) {
2453 swapchain.timing.erase(
2454 swapchain.timing.begin(),
2455 swapchain.timing.begin() + swapchain.timing.size() - MAX_TIMING_INFOS);
2456 }
2457 if (pTime->desiredPresentTime) {
2458 ALOGV(
2459 "Calling native_window_set_buffers_timestamp(%" PRId64 ")",
2460 pTime->desiredPresentTime);
2461 native_window_set_buffers_timestamp(
2462 window,
2463 static_cast<int64_t>(pTime->desiredPresentTime));
2464 }
2465 }
2466
2467 // EXT_swapchain_maintenance1 present mode change
SetSwapchainPresentMode(ANativeWindow * window,VkPresentModeKHR mode)2468 static bool SetSwapchainPresentMode(ANativeWindow *window, VkPresentModeKHR mode) {
2469 // There is no dynamic switching between non-shared present modes.
2470 // All we support is switching between demand and continuous refresh.
2471 if (!IsSharedPresentMode(mode))
2472 return true;
2473
2474 int err = native_window_set_auto_refresh(window,
2475 mode == VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR);
2476 if (err != android::OK) {
2477 ALOGE("native_window_set_auto_refresh() failed: %s (%d)",
2478 strerror(-err), err);
2479 return false;
2480 }
2481
2482 return true;
2483 }
2484
PresentOneSwapchain(VkQueue queue,Swapchain & swapchain,uint32_t imageIndex,const VkPresentRegionKHR * pRegion,const VkPresentTimeGOOGLE * pTime,VkFence presentFence,const VkPresentModeKHR * pPresentMode,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores)2485 static VkResult PresentOneSwapchain(
2486 VkQueue queue,
2487 Swapchain& swapchain,
2488 uint32_t imageIndex,
2489 const VkPresentRegionKHR *pRegion,
2490 const VkPresentTimeGOOGLE *pTime,
2491 VkFence presentFence,
2492 const VkPresentModeKHR *pPresentMode,
2493 uint32_t waitSemaphoreCount,
2494 const VkSemaphore *pWaitSemaphores) {
2495
2496 VkDevice device = GetData(queue).driver_device;
2497 const auto& dispatch = GetData(queue).driver;
2498
2499 Swapchain::Image& img = swapchain.images[imageIndex];
2500 VkResult swapchain_result = VK_SUCCESS;
2501 VkResult result;
2502 int err;
2503
2504 // XXX: long standing issue: QueueSignalReleaseImageANDROID consumes the
2505 // wait semaphores, so this doesn't actually work for the multiple swapchain
2506 // case.
2507 int fence = -1;
2508 result = dispatch.QueueSignalReleaseImageANDROID(
2509 queue, waitSemaphoreCount,
2510 pWaitSemaphores, img.image, &fence);
2511 if (result != VK_SUCCESS) {
2512 ALOGE("QueueSignalReleaseImageANDROID failed: %d", result);
2513 swapchain_result = result;
2514 }
2515 if (img.release_fence >= 0)
2516 close(img.release_fence);
2517 img.release_fence = fence < 0 ? -1 : dup(fence);
2518
2519 if (swapchain.surface.swapchain_handle == HandleFromSwapchain(&swapchain)) {
2520 ANativeWindow* window = swapchain.surface.window.get();
2521 if (swapchain_result == VK_SUCCESS) {
2522
2523 if (presentFence != VK_NULL_HANDLE) {
2524 int fence_copy = fence < 0 ? -1 : dup(fence);
2525 VkImportFenceFdInfoKHR iffi = {
2526 VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
2527 nullptr,
2528 presentFence,
2529 VK_FENCE_IMPORT_TEMPORARY_BIT,
2530 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
2531 fence_copy,
2532 };
2533 if (VK_SUCCESS != dispatch.ImportFenceFdKHR(device, &iffi) && fence_copy >= 0) {
2534 // ImportFenceFdKHR takes ownership only if it succeeds
2535 close(fence_copy);
2536 }
2537 }
2538
2539 if (pRegion) {
2540 SetSwapchainSurfaceDamage(window, pRegion);
2541 }
2542 if (pTime) {
2543 SetSwapchainFrameTimestamp(swapchain, pTime);
2544 }
2545 if (pPresentMode) {
2546 if (!SetSwapchainPresentMode(window, *pPresentMode))
2547 swapchain_result = WorstPresentResult(swapchain_result,
2548 VK_ERROR_SURFACE_LOST_KHR);
2549 }
2550
2551 err = window->queueBuffer(window, img.buffer.get(), fence);
2552 // queueBuffer always closes fence, even on error
2553 if (err != android::OK) {
2554 ALOGE("queueBuffer failed: %s (%d)", strerror(-err), err);
2555 swapchain_result = WorstPresentResult(
2556 swapchain_result, VK_ERROR_SURFACE_LOST_KHR);
2557 } else {
2558 if (img.dequeue_fence >= 0) {
2559 close(img.dequeue_fence);
2560 img.dequeue_fence = -1;
2561 }
2562 img.dequeued = false;
2563 }
2564
2565 // If the swapchain is in shared mode, immediately dequeue the
2566 // buffer so it can be presented again without an intervening
2567 // call to AcquireNextImageKHR. We expect to get the same buffer
2568 // back from every call to dequeueBuffer in this mode.
2569 if (swapchain.shared && swapchain_result == VK_SUCCESS) {
2570 ANativeWindowBuffer* buffer;
2571 int fence_fd;
2572 err = window->dequeueBuffer(window, &buffer, &fence_fd);
2573 if (err != android::OK) {
2574 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), err);
2575 swapchain_result = WorstPresentResult(swapchain_result,
2576 VK_ERROR_SURFACE_LOST_KHR);
2577 } else if (img.buffer != buffer) {
2578 ALOGE("got wrong image back for shared swapchain");
2579 swapchain_result = WorstPresentResult(swapchain_result,
2580 VK_ERROR_SURFACE_LOST_KHR);
2581 } else {
2582 img.dequeue_fence = fence_fd;
2583 img.dequeued = true;
2584 }
2585 }
2586 }
2587 if (swapchain_result != VK_SUCCESS) {
2588 OrphanSwapchain(device, &swapchain);
2589 }
2590 // Android will only return VK_SUBOPTIMAL_KHR for vkQueuePresentKHR,
2591 // and only when the window's transform/rotation changes. Extent
2592 // changes will not cause VK_SUBOPTIMAL_KHR because of the
2593 // application issues that were caused when the following transform
2594 // change was added.
2595 int window_transform_hint;
2596 err = window->query(window, NATIVE_WINDOW_TRANSFORM_HINT,
2597 &window_transform_hint);
2598 if (err != android::OK) {
2599 ALOGE("NATIVE_WINDOW_TRANSFORM_HINT query failed: %s (%d)",
2600 strerror(-err), err);
2601 swapchain_result = WorstPresentResult(
2602 swapchain_result, VK_ERROR_SURFACE_LOST_KHR);
2603 }
2604 if (swapchain.pre_transform != window_transform_hint) {
2605 swapchain_result =
2606 WorstPresentResult(swapchain_result, VK_SUBOPTIMAL_KHR);
2607 }
2608 } else {
2609 ReleaseSwapchainImage(device, swapchain.shared, nullptr, fence,
2610 img, true);
2611 swapchain_result = VK_ERROR_OUT_OF_DATE_KHR;
2612 }
2613
2614 return swapchain_result;
2615 }
2616
2617 VKAPI_ATTR
QueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * present_info)2618 VkResult QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* present_info) {
2619 ATRACE_CALL();
2620
2621 ALOGV_IF(present_info->sType != VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
2622 "vkQueuePresentKHR: invalid VkPresentInfoKHR structure type %d",
2623 present_info->sType);
2624
2625 VkResult final_result = VK_SUCCESS;
2626
2627 // Look at the pNext chain for supported extension structs:
2628 const VkPresentRegionsKHR* present_regions = nullptr;
2629 const VkPresentTimesInfoGOOGLE* present_times = nullptr;
2630 const VkSwapchainPresentFenceInfoEXT* present_fences = nullptr;
2631 const VkSwapchainPresentModeInfoEXT* present_modes = nullptr;
2632
2633 const VkPresentRegionsKHR* next =
2634 reinterpret_cast<const VkPresentRegionsKHR*>(present_info->pNext);
2635 while (next) {
2636 switch (next->sType) {
2637 case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
2638 present_regions = next;
2639 break;
2640 case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
2641 present_times =
2642 reinterpret_cast<const VkPresentTimesInfoGOOGLE*>(next);
2643 break;
2644 case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT:
2645 present_fences =
2646 reinterpret_cast<const VkSwapchainPresentFenceInfoEXT*>(next);
2647 break;
2648 case VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT:
2649 present_modes =
2650 reinterpret_cast<const VkSwapchainPresentModeInfoEXT*>(next);
2651 break;
2652 default:
2653 ALOGV("QueuePresentKHR ignoring unrecognized pNext->sType = %x",
2654 next->sType);
2655 break;
2656 }
2657 next = reinterpret_cast<const VkPresentRegionsKHR*>(next->pNext);
2658 }
2659 ALOGV_IF(
2660 present_regions &&
2661 present_regions->swapchainCount != present_info->swapchainCount,
2662 "VkPresentRegions::swapchainCount != VkPresentInfo::swapchainCount");
2663 ALOGV_IF(present_times &&
2664 present_times->swapchainCount != present_info->swapchainCount,
2665 "VkPresentTimesInfoGOOGLE::swapchainCount != "
2666 "VkPresentInfo::swapchainCount");
2667 ALOGV_IF(present_fences &&
2668 present_fences->swapchainCount != present_info->swapchainCount,
2669 "VkSwapchainPresentFenceInfoEXT::swapchainCount != "
2670 "VkPresentInfo::swapchainCount");
2671 ALOGV_IF(present_modes &&
2672 present_modes->swapchainCount != present_info->swapchainCount,
2673 "VkSwapchainPresentModeInfoEXT::swapchainCount != "
2674 "VkPresentInfo::swapchainCount");
2675
2676 const VkPresentRegionKHR* regions =
2677 (present_regions) ? present_regions->pRegions : nullptr;
2678 const VkPresentTimeGOOGLE* times =
2679 (present_times) ? present_times->pTimes : nullptr;
2680
2681 for (uint32_t sc = 0; sc < present_info->swapchainCount; sc++) {
2682 Swapchain& swapchain =
2683 *SwapchainFromHandle(present_info->pSwapchains[sc]);
2684
2685 VkResult swapchain_result = PresentOneSwapchain(
2686 queue,
2687 swapchain,
2688 present_info->pImageIndices[sc],
2689 (regions && !swapchain.mailbox_mode) ? ®ions[sc] : nullptr,
2690 times ? ×[sc] : nullptr,
2691 present_fences ? present_fences->pFences[sc] : VK_NULL_HANDLE,
2692 present_modes ? &present_modes->pPresentModes[sc] : nullptr,
2693 present_info->waitSemaphoreCount,
2694 present_info->pWaitSemaphores);
2695
2696 if (present_info->pResults)
2697 present_info->pResults[sc] = swapchain_result;
2698
2699 if (swapchain_result != final_result)
2700 final_result = WorstPresentResult(final_result, swapchain_result);
2701 }
2702
2703 return final_result;
2704 }
2705
2706 VKAPI_ATTR
GetRefreshCycleDurationGOOGLE(VkDevice,VkSwapchainKHR swapchain_handle,VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties)2707 VkResult GetRefreshCycleDurationGOOGLE(
2708 VkDevice,
2709 VkSwapchainKHR swapchain_handle,
2710 VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {
2711 ATRACE_CALL();
2712
2713 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2714 VkResult result = swapchain.get_refresh_duration(pDisplayTimingProperties->refreshDuration);
2715
2716 return result;
2717 }
2718
2719 VKAPI_ATTR
GetPastPresentationTimingGOOGLE(VkDevice,VkSwapchainKHR swapchain_handle,uint32_t * count,VkPastPresentationTimingGOOGLE * timings)2720 VkResult GetPastPresentationTimingGOOGLE(
2721 VkDevice,
2722 VkSwapchainKHR swapchain_handle,
2723 uint32_t* count,
2724 VkPastPresentationTimingGOOGLE* timings) {
2725 ATRACE_CALL();
2726
2727 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2728 if (swapchain.surface.swapchain_handle != swapchain_handle) {
2729 return VK_ERROR_OUT_OF_DATE_KHR;
2730 }
2731
2732 ANativeWindow* window = swapchain.surface.window.get();
2733 VkResult result = VK_SUCCESS;
2734
2735 if (!swapchain.frame_timestamps_enabled) {
2736 ALOGV("Calling native_window_enable_frame_timestamps(true)");
2737 native_window_enable_frame_timestamps(window, true);
2738 swapchain.frame_timestamps_enabled = true;
2739 }
2740
2741 if (timings) {
2742 // Get the latest ready timing count before copying, since the copied
2743 // timing info will be erased in copy_ready_timings function.
2744 uint32_t n = get_num_ready_timings(swapchain);
2745 copy_ready_timings(swapchain, count, timings);
2746 // Check the *count here against the recorded ready timing count, since
2747 // *count can be overwritten per spec describes.
2748 if (*count < n) {
2749 result = VK_INCOMPLETE;
2750 }
2751 } else {
2752 *count = get_num_ready_timings(swapchain);
2753 }
2754
2755 return result;
2756 }
2757
2758 VKAPI_ATTR
GetSwapchainStatusKHR(VkDevice,VkSwapchainKHR swapchain_handle)2759 VkResult GetSwapchainStatusKHR(
2760 VkDevice,
2761 VkSwapchainKHR swapchain_handle) {
2762 ATRACE_CALL();
2763
2764 Swapchain& swapchain = *SwapchainFromHandle(swapchain_handle);
2765 VkResult result = VK_SUCCESS;
2766
2767 if (swapchain.surface.swapchain_handle != swapchain_handle) {
2768 return VK_ERROR_OUT_OF_DATE_KHR;
2769 }
2770
2771 // TODO(b/143296009): Implement this function properly
2772
2773 return result;
2774 }
2775
SetHdrMetadataEXT(VkDevice,uint32_t swapchainCount,const VkSwapchainKHR * pSwapchains,const VkHdrMetadataEXT * pHdrMetadataEXTs)2776 VKAPI_ATTR void SetHdrMetadataEXT(
2777 VkDevice,
2778 uint32_t swapchainCount,
2779 const VkSwapchainKHR* pSwapchains,
2780 const VkHdrMetadataEXT* pHdrMetadataEXTs) {
2781 ATRACE_CALL();
2782
2783 for (uint32_t idx = 0; idx < swapchainCount; idx++) {
2784 Swapchain* swapchain = SwapchainFromHandle(pSwapchains[idx]);
2785 if (!swapchain)
2786 continue;
2787
2788 if (swapchain->surface.swapchain_handle != pSwapchains[idx]) continue;
2789
2790 ANativeWindow* window = swapchain->surface.window.get();
2791
2792 VkHdrMetadataEXT vulkanMetadata = pHdrMetadataEXTs[idx];
2793 const android_smpte2086_metadata smpteMetdata = {
2794 {vulkanMetadata.displayPrimaryRed.x,
2795 vulkanMetadata.displayPrimaryRed.y},
2796 {vulkanMetadata.displayPrimaryGreen.x,
2797 vulkanMetadata.displayPrimaryGreen.y},
2798 {vulkanMetadata.displayPrimaryBlue.x,
2799 vulkanMetadata.displayPrimaryBlue.y},
2800 {vulkanMetadata.whitePoint.x, vulkanMetadata.whitePoint.y},
2801 vulkanMetadata.maxLuminance,
2802 vulkanMetadata.minLuminance};
2803 native_window_set_buffers_smpte2086_metadata(window, &smpteMetdata);
2804
2805 const android_cta861_3_metadata cta8613Metadata = {
2806 vulkanMetadata.maxContentLightLevel,
2807 vulkanMetadata.maxFrameAverageLightLevel};
2808 native_window_set_buffers_cta861_3_metadata(window, &cta8613Metadata);
2809 }
2810
2811 return;
2812 }
2813
InterceptBindImageMemory2(uint32_t bind_info_count,const VkBindImageMemoryInfo * bind_infos,std::vector<VkNativeBufferANDROID> * out_native_buffers,std::vector<VkBindImageMemoryInfo> * out_bind_infos)2814 static void InterceptBindImageMemory2(
2815 uint32_t bind_info_count,
2816 const VkBindImageMemoryInfo* bind_infos,
2817 std::vector<VkNativeBufferANDROID>* out_native_buffers,
2818 std::vector<VkBindImageMemoryInfo>* out_bind_infos) {
2819 out_native_buffers->clear();
2820 out_bind_infos->clear();
2821
2822 if (!bind_info_count)
2823 return;
2824
2825 std::unordered_set<uint32_t> intercepted_indexes;
2826
2827 for (uint32_t idx = 0; idx < bind_info_count; idx++) {
2828 auto info = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(
2829 bind_infos[idx].pNext);
2830 while (info &&
2831 info->sType !=
2832 VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR) {
2833 info = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>(
2834 info->pNext);
2835 }
2836
2837 if (!info)
2838 continue;
2839
2840 ALOG_ASSERT(info->swapchain != VK_NULL_HANDLE,
2841 "swapchain handle must not be NULL");
2842 const Swapchain* swapchain = SwapchainFromHandle(info->swapchain);
2843 ALOG_ASSERT(
2844 info->imageIndex < swapchain->num_images,
2845 "imageIndex must be less than the number of images in swapchain");
2846
2847 ANativeWindowBuffer* buffer =
2848 swapchain->images[info->imageIndex].buffer.get();
2849 VkNativeBufferANDROID native_buffer = {
2850 #pragma clang diagnostic push
2851 #pragma clang diagnostic ignored "-Wold-style-cast"
2852 .sType = VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID,
2853 #pragma clang diagnostic pop
2854 .pNext = bind_infos[idx].pNext,
2855 .handle = buffer->handle,
2856 .stride = buffer->stride,
2857 .format = buffer->format,
2858 .usage = int(buffer->usage),
2859 .usage3 = buffer->usage,
2860 .ahb = ANativeWindowBuffer_getHardwareBuffer(buffer),
2861 };
2862 android_convertGralloc0To1Usage(int(buffer->usage),
2863 &native_buffer.usage2.producer,
2864 &native_buffer.usage2.consumer);
2865 // Reserve enough space to avoid letting re-allocation invalidate the
2866 // addresses of the elements inside.
2867 out_native_buffers->reserve(bind_info_count);
2868 out_native_buffers->emplace_back(native_buffer);
2869
2870 // Reserve the space now since we know how much is needed now.
2871 out_bind_infos->reserve(bind_info_count);
2872 out_bind_infos->emplace_back(bind_infos[idx]);
2873 out_bind_infos->back().pNext = &out_native_buffers->back();
2874
2875 intercepted_indexes.insert(idx);
2876 }
2877
2878 if (intercepted_indexes.empty())
2879 return;
2880
2881 for (uint32_t idx = 0; idx < bind_info_count; idx++) {
2882 if (intercepted_indexes.count(idx))
2883 continue;
2884 out_bind_infos->emplace_back(bind_infos[idx]);
2885 }
2886 }
2887
2888 VKAPI_ATTR
BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2889 VkResult BindImageMemory2(VkDevice device,
2890 uint32_t bindInfoCount,
2891 const VkBindImageMemoryInfo* pBindInfos) {
2892 ATRACE_CALL();
2893
2894 // out_native_buffers is for maintaining the lifecycle of the constructed
2895 // VkNativeBufferANDROID objects inside InterceptBindImageMemory2.
2896 std::vector<VkNativeBufferANDROID> out_native_buffers;
2897 std::vector<VkBindImageMemoryInfo> out_bind_infos;
2898 InterceptBindImageMemory2(bindInfoCount, pBindInfos, &out_native_buffers,
2899 &out_bind_infos);
2900 return GetData(device).driver.BindImageMemory2(
2901 device, bindInfoCount,
2902 out_bind_infos.empty() ? pBindInfos : out_bind_infos.data());
2903 }
2904
2905 VKAPI_ATTR
BindImageMemory2KHR(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2906 VkResult BindImageMemory2KHR(VkDevice device,
2907 uint32_t bindInfoCount,
2908 const VkBindImageMemoryInfo* pBindInfos) {
2909 ATRACE_CALL();
2910
2911 std::vector<VkNativeBufferANDROID> out_native_buffers;
2912 std::vector<VkBindImageMemoryInfo> out_bind_infos;
2913 InterceptBindImageMemory2(bindInfoCount, pBindInfos, &out_native_buffers,
2914 &out_bind_infos);
2915 return GetData(device).driver.BindImageMemory2KHR(
2916 device, bindInfoCount,
2917 out_bind_infos.empty() ? pBindInfos : out_bind_infos.data());
2918 }
2919
2920 VKAPI_ATTR
ReleaseSwapchainImagesEXT(VkDevice,const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo)2921 VkResult ReleaseSwapchainImagesEXT(VkDevice /*device*/,
2922 const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo) {
2923 ATRACE_CALL();
2924
2925 Swapchain& swapchain = *SwapchainFromHandle(pReleaseInfo->swapchain);
2926 ANativeWindow* window = swapchain.surface.window.get();
2927
2928 // If in shared present mode, don't actually release the image back to the BQ.
2929 // Both sides share it forever.
2930 if (swapchain.shared)
2931 return VK_SUCCESS;
2932
2933 for (uint32_t i = 0; i < pReleaseInfo->imageIndexCount; i++) {
2934 Swapchain::Image& img = swapchain.images[pReleaseInfo->pImageIndices[i]];
2935 window->cancelBuffer(window, img.buffer.get(), img.dequeue_fence);
2936
2937 // cancelBuffer has taken ownership of the dequeue fence
2938 img.dequeue_fence = -1;
2939 // if we're still holding a release fence, get rid of it now
2940 if (img.release_fence >= 0) {
2941 close(img.release_fence);
2942 img.release_fence = -1;
2943 }
2944 img.dequeued = false;
2945 }
2946
2947 return VK_SUCCESS;
2948 }
2949
2950 } // namespace driver
2951 } // namespace vulkan
2952