1 /* 2 * Copyright (c) 2023 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef RS_VULKAN_CONTEXT_H 17 #define RS_VULKAN_CONTEXT_H 18 19 #include <atomic> 20 #include <list> 21 #include <memory> 22 #include <mutex> 23 #include <string> 24 #include "sync_fence.h" 25 #include "vulkan/vulkan_core.h" 26 #include "vulkan/vulkan_xeg.h" 27 #include "platform/ohos/backend/rs_vulkan_header_ext.h" 28 29 #define VK_NO_PROTOTYPES 1 30 31 #include "vulkan/vulkan.h" 32 #include "rs_vulkan_mem_statistic.h" 33 34 #include "draw/surface.h" 35 #include "image/gpu_context.h" 36 #include "rs_trace.h" 37 38 typedef enum VkSemaphoreExtTypeHUAWEI { 39 VK_SEMAPHORE_EXT_TYPE_HTS_HUAWEI = 0x80000000, 40 VK_SEMAPHORE_EXT_TYPE_FFTS_HUAWEI = 0x80000001, 41 }VkSemaphoreExtTypeHUAWEI; 42 43 typedef struct VkSemaphoreExtTypeCreateInfoHUAWEI { 44 OHOS::Rosen::VkStructureTypeHUAWEI sType; 45 const void* pNext; 46 VkSemaphoreExtTypeHUAWEI semaphoreExtType; 47 uint32_t eventId; 48 }VkSemaphoreExtTypeCreateInfoHUAWEI; 49 50 #ifdef USE_M133_SKIA 51 #include "include/gpu/vk/VulkanExtensions.h" 52 #include "include/gpu/vk/VulkanBackendContext.h" 53 #include "include/gpu/ganesh/GrDirectContext.h" 54 #else 55 #include "include/gpu/vk/GrVkExtensions.h" 56 #include "include/gpu/vk/GrVkBackendContext.h" 57 #include "include/gpu/GrDirectContext.h" 58 #endif 59 60 namespace OHOS { 61 namespace Rosen { 62 enum class VulkanInterfaceType : uint32_t { 63 BASIC_RENDER = 0, 64 PROTECTED_REDRAW, 65 UNPROTECTED_REDRAW, 66 MAX_INTERFACE_TYPE, 67 }; 68 69 enum class VulkanDeviceStatus : uint32_t { 70 UNINITIALIZED = 0, 71 CREATE_SUCCESS, 72 CREATE_FAIL, 73 MAX_DEVICE_STATUS, 74 }; 75 class MemoryHandler; 76 class RsVulkanInterface { 77 public: 78 struct CallbackSemaphoreInfo { 79 RsVulkanInterface& mVkContext; 80 VkSemaphore mSemaphore; 81 int mFenceFd; 82 83 int mRefs = 2; // 2 : both skia and rs hold fence fd 84 int mRSRefs = 1; // 1 : rs hold fence fd 85 int m2DEngineRefs = 1; // 1 : skia or ddgr hold fence fd CallbackSemaphoreInfoCallbackSemaphoreInfo86 CallbackSemaphoreInfo(RsVulkanInterface& vkContext, VkSemaphore semaphore, int fenceFd) 87 : mVkContext(vkContext), 88 mSemaphore(semaphore), 89 mFenceFd(fenceFd) 90 { 91 } 92 DestroyCallbackRefsCallbackSemaphoreInfo93 static void DestroyCallbackRefs(void* context) 94 { 95 if (context == nullptr) { 96 return; 97 } 98 CallbackSemaphoreInfo* info = reinterpret_cast<CallbackSemaphoreInfo*>(context); 99 --info->mRefs; 100 if (info->mRefs > 1 || info->mRefs < 0) { 101 RS_LOGE("Destory CallBackSemphoreInfo error, mRefs=%{public}d", info->mRefs); 102 } 103 if (info->mRefs <= 0) { 104 info->mVkContext.SendSemaphoreWithFd(info->mSemaphore, info->mFenceFd); 105 delete info; 106 info = nullptr; 107 } 108 } 109 DestroyCallbackRefsFromRSCallbackSemaphoreInfo110 static void DestroyCallbackRefsFromRS(void* context) 111 { 112 if (context == nullptr) { 113 return; 114 } 115 CallbackSemaphoreInfo* info = reinterpret_cast<CallbackSemaphoreInfo*>(context); 116 --info->mRSRefs; 117 RsVulkanInterface::callbackSemaphoreInfoRSDerefCnt_.fetch_add(+1, std::memory_order_relaxed); 118 DestroyCallbackRefsInner(info); 119 } 120 DestroyCallbackRefsFrom2DEngineCallbackSemaphoreInfo121 static void DestroyCallbackRefsFrom2DEngine(void* context) 122 { 123 RsVulkanInterface::callbackSemaphoreInfo2DEngineCallCnt_.fetch_add(+1, std::memory_order_relaxed); 124 if (context == nullptr) { 125 return; 126 } 127 CallbackSemaphoreInfo* info = reinterpret_cast<CallbackSemaphoreInfo*>(context); 128 --info->m2DEngineRefs; 129 RsVulkanInterface::callbackSemaphoreInfo2DEngineDerefCnt_.fetch_add(+1, std::memory_order_relaxed); 130 DestroyCallbackRefsInner(info); 131 } 132 DestroyCallbackRefsInnerCallbackSemaphoreInfo133 static void DestroyCallbackRefsInner(CallbackSemaphoreInfo* info) 134 { 135 if (info == nullptr) { 136 return; 137 } 138 if (info->mRSRefs > 1 || info->mRSRefs < 0 || info->m2DEngineRefs > 1 || info->m2DEngineRefs < 0) { 139 RS_LOGE("Destroy CallBackSemphoreInfo error, mRSRefs=%{public}d, m2DEngineRefs=%{public}d", 140 info->mRSRefs, info->m2DEngineRefs); 141 } 142 if (info->mRSRefs <= 0 && info->m2DEngineRefs <= 0) { 143 info->mVkContext.SendSemaphoreWithFd(info->mSemaphore, info->mFenceFd); 144 delete info; 145 info = nullptr; 146 } 147 } 148 }; 149 template <class T> 150 class Func { 151 public: 152 using Proto = T; func_(proc)153 explicit Func(T proc = nullptr) : func_(proc) {} ~Func()154 ~Func() { func_ = nullptr; } 155 156 Func operator=(T proc) 157 { 158 func_ = proc; 159 return *this; 160 } 161 162 Func operator=(PFN_vkVoidFunction proc) 163 { 164 func_ = reinterpret_cast<Proto>(proc); 165 return *this; 166 } 167 168 operator bool() const { return func_ != nullptr; } T()169 operator T() const { return func_; } 170 private: 171 T func_; 172 }; 173 RsVulkanInterface()174 RsVulkanInterface() {}; 175 ~RsVulkanInterface(); 176 void Init(VulkanInterfaceType vulkanInterfaceType, bool isProtected = false, bool isHtsEnable = false); 177 bool CreateInstance(); 178 bool SelectPhysicalDevice(bool isProtected = false); 179 bool CreateDevice(bool isProtected = false, bool isHtsEnable = false); 180 #ifdef USE_M133_SKIA 181 bool CreateSkiaBackendContext(skgpu::VulkanBackendContext* context, bool isProtected = false); 182 #else 183 bool CreateSkiaBackendContext(GrVkBackendContext* context, bool isProtected = false); 184 #endif GetRsVkMemStat()185 RsVulkanMemStat& GetRsVkMemStat() 186 { 187 return mVkMemStat; 188 } 189 190 bool IsValid() const; 191 #ifdef USE_M133_SKIA 192 skgpu::VulkanGetProc CreateSkiaGetProc() const; 193 #else 194 GrVkGetProc CreateSkiaGetProc() const; 195 #endif GetMemoryHandler()196 const std::shared_ptr<MemoryHandler> GetMemoryHandler() const 197 { 198 return memHandler_; 199 } 200 201 #define DEFINE_FUNC(name) Func<PFN_vk##name> vk##name 202 203 DEFINE_FUNC(AcquireNextImageKHR); 204 DEFINE_FUNC(AllocateCommandBuffers); 205 DEFINE_FUNC(AllocateMemory); 206 DEFINE_FUNC(BeginCommandBuffer); 207 DEFINE_FUNC(BindImageMemory); 208 DEFINE_FUNC(BindImageMemory2); 209 DEFINE_FUNC(CmdPipelineBarrier); 210 DEFINE_FUNC(CreateCommandPool); 211 DEFINE_FUNC(CreateDebugReportCallbackEXT); 212 DEFINE_FUNC(CreateDevice); 213 DEFINE_FUNC(CreateFence); 214 DEFINE_FUNC(CreateImage); 215 DEFINE_FUNC(CreateImageView); 216 DEFINE_FUNC(CreateInstance); 217 DEFINE_FUNC(CreateSemaphore); 218 DEFINE_FUNC(CreateSwapchainKHR); 219 DEFINE_FUNC(DestroyCommandPool); 220 DEFINE_FUNC(DestroyDebugReportCallbackEXT); 221 DEFINE_FUNC(DestroyDevice); 222 DEFINE_FUNC(DestroyFence); 223 DEFINE_FUNC(DestroyImage); 224 DEFINE_FUNC(DestroyImageView); 225 DEFINE_FUNC(DestroyInstance); 226 DEFINE_FUNC(DestroySemaphore); 227 DEFINE_FUNC(DestroySurfaceKHR); 228 DEFINE_FUNC(DestroySwapchainKHR); 229 DEFINE_FUNC(DeviceWaitIdle); 230 DEFINE_FUNC(EndCommandBuffer); 231 DEFINE_FUNC(EnumerateDeviceExtensionProperties); 232 DEFINE_FUNC(EnumerateDeviceLayerProperties); 233 DEFINE_FUNC(EnumerateInstanceExtensionProperties); 234 DEFINE_FUNC(EnumerateInstanceLayerProperties); 235 DEFINE_FUNC(EnumeratePhysicalDevices); 236 DEFINE_FUNC(FreeCommandBuffers); 237 DEFINE_FUNC(FreeMemory); 238 DEFINE_FUNC(GetDeviceProcAddr); 239 DEFINE_FUNC(GetDeviceQueue); 240 DEFINE_FUNC(GetImageMemoryRequirements); 241 DEFINE_FUNC(GetInstanceProcAddr); 242 DEFINE_FUNC(GetPhysicalDeviceFeatures); 243 DEFINE_FUNC(GetPhysicalDeviceQueueFamilyProperties); 244 DEFINE_FUNC(QueueSubmit); 245 DEFINE_FUNC(QueueWaitIdle); 246 DEFINE_FUNC(ResetCommandBuffer); 247 DEFINE_FUNC(ResetFences); 248 DEFINE_FUNC(WaitForFences); 249 DEFINE_FUNC(GetPhysicalDeviceSurfaceCapabilitiesKHR); 250 DEFINE_FUNC(GetPhysicalDeviceSurfaceFormatsKHR); 251 DEFINE_FUNC(GetPhysicalDeviceSurfacePresentModesKHR); 252 DEFINE_FUNC(GetPhysicalDeviceSurfaceSupportKHR); 253 DEFINE_FUNC(GetSwapchainImagesKHR); 254 DEFINE_FUNC(QueuePresentKHR); 255 DEFINE_FUNC(CreateSurfaceOHOS); 256 DEFINE_FUNC(GetPhysicalDeviceMemoryProperties); 257 DEFINE_FUNC(GetPhysicalDeviceMemoryProperties2); 258 DEFINE_FUNC(GetNativeBufferPropertiesOHOS); 259 DEFINE_FUNC(QueueSignalReleaseImageOHOS); 260 DEFINE_FUNC(ImportSemaphoreFdKHR); 261 DEFINE_FUNC(GetPhysicalDeviceFeatures2); 262 DEFINE_FUNC(SetFreqAdjustEnable); 263 DEFINE_FUNC(GetSemaphoreFdKHR); 264 #undef DEFINE_FUNC 265 GetPhysicalDevice()266 VkPhysicalDevice GetPhysicalDevice() const 267 { 268 return physicalDevice_; 269 } 270 GetDevice()271 VkDevice GetDevice() const 272 { 273 return device_; 274 } 275 GetQueue()276 VkQueue GetQueue() const 277 { 278 return backendContext_.fQueue; 279 } 280 281 #ifdef USE_M133_SKIA GetGrVkBackendContext()282 inline const skgpu::VulkanBackendContext& GetGrVkBackendContext() const noexcept 283 #else 284 inline const GrVkBackendContext& GetGrVkBackendContext() const noexcept 285 #endif 286 { 287 return backendContext_; 288 } 289 GetVulkanVersion()290 inline const std::string GetVulkanVersion() const 291 { 292 return std::to_string(VK_API_VERSION_1_2); 293 } 294 295 std::shared_ptr<Drawing::GPUContext> CreateDrawingContext(std::string cacheDir = ""); 296 std::shared_ptr<Drawing::GPUContext> DoCreateDrawingContext(std::string cacheDir = ""); 297 std::shared_ptr<Drawing::GPUContext> GetDrawingContext(); 298 GetInterfaceType()299 VulkanInterfaceType GetInterfaceType() const 300 { 301 return interfaceType_; 302 } 303 304 VkSemaphore RequireSemaphore(); 305 void SendSemaphoreWithFd(VkSemaphore semaphore, int fenceFd); 306 void DestroyAllSemaphoreFence(); 307 VulkanDeviceStatus GetVulkanDeviceStatus(); 308 static std::atomic<uint64_t> callbackSemaphoreInfofdDupCnt_; 309 static std::atomic<uint64_t> callbackSemaphoreInfoRSDerefCnt_; 310 static std::atomic<uint64_t> callbackSemaphoreInfo2DEngineDerefCnt_; 311 static std::atomic<uint64_t> callbackSemaphoreInfo2DEngineDefensiveDerefCnt_; 312 static std::atomic<uint64_t> callbackSemaphoreInfoFlushCnt_; 313 static std::atomic<uint64_t> callbackSemaphoreInfo2DEngineCallCnt_; 314 315 friend class RsVulkanContext; 316 private: 317 std::mutex vkMutex_; 318 std::mutex graphicsQueueMutex_; 319 std::mutex hGraphicsQueueMutex_; 320 static void* handle_; 321 bool acquiredMandatoryProcAddresses_ = false; 322 static VkInstance instance_; 323 VkPhysicalDevice physicalDevice_ = VK_NULL_HANDLE; 324 uint32_t graphicsQueueFamilyIndex_ = UINT32_MAX; 325 VkDevice device_ = VK_NULL_HANDLE; 326 VkQueue queue_ = VK_NULL_HANDLE; 327 VkPhysicalDeviceFeatures2 physicalDeviceFeatures2_; 328 VkPhysicalDeviceProtectedMemoryFeatures* protectedMemoryFeatures_ = nullptr; 329 VkPhysicalDeviceSamplerYcbcrConversionFeatures ycbcrFeature_; 330 VkPhysicalDeviceSynchronization2Features sync2Feature_; 331 VkPhysicalDeviceDescriptorIndexingFeatures bindlessFeature_; 332 VkPhysicalDeviceTimelineSemaphoreFeatures timelineFeature_; 333 std::vector<const char*> deviceExtensions_; 334 VkDeviceMemoryExclusiveThresholdHUAWEI deviceMemoryExclusiveThreshold_; 335 #ifdef USE_M133_SKIA 336 skgpu::VulkanExtensions skVkExtensions_; 337 skgpu::VulkanBackendContext backendContext_; 338 #else 339 GrVkExtensions skVkExtensions_; 340 GrVkBackendContext backendContext_; 341 #endif 342 RsVulkanMemStat mVkMemStat; 343 344 // static thread_local GrVkBackendContext backendContext_; 345 VulkanInterfaceType interfaceType_ = VulkanInterfaceType::BASIC_RENDER; 346 RsVulkanInterface(const RsVulkanInterface &) = delete; 347 RsVulkanInterface &operator=(const RsVulkanInterface &) = delete; 348 349 RsVulkanInterface(RsVulkanInterface &&) = delete; 350 RsVulkanInterface &operator=(RsVulkanInterface &&) = delete; 351 352 void SetVulkanDeviceStatus(VulkanDeviceStatus status); 353 bool OpenLibraryHandle(); 354 bool SetupLoaderProcAddresses(); 355 bool CloseLibraryHandle(); 356 bool SetupDeviceProcAddresses(VkDevice device); 357 void ConfigureFeatures(bool isProtected); 358 void ConfigureExtensions(); 359 PFN_vkVoidFunction AcquireProc( 360 const char* proc_name, 361 const VkInstance& instance) const; 362 PFN_vkVoidFunction AcquireProc(const char* proc_name, const VkDevice& device) const; 363 std::shared_ptr<Drawing::GPUContext> CreateNewDrawingContext(bool isProtected = false); 364 std::shared_ptr<MemoryHandler> memHandler_; 365 366 struct semaphoreFence { 367 VkSemaphore semaphore; 368 std::unique_ptr<SyncFence> fence; 369 }; 370 std::list<semaphoreFence> usedSemaphoreFenceList_; 371 std::mutex semaphoreLock_; 372 std::atomic<VulkanDeviceStatus> deviceStatus_ = VulkanDeviceStatus::UNINITIALIZED; 373 }; 374 375 class RsVulkanContext { 376 public: 377 class DrawContextHolder { 378 public: DrawContextHolder(std::function<void ()> callback)379 DrawContextHolder(std::function<void()> callback) : destructCallback_(std::move(callback)) {} 380 ~DrawContextHolder()381 ~DrawContextHolder() 382 { 383 destructCallback_(); 384 } 385 private: 386 std::function<void()> destructCallback_; 387 }; 388 static RsVulkanContext& GetSingleton(const std::string& cacheDir = ""); 389 static void ReleaseRecyclableSingleton(); 390 explicit RsVulkanContext(std::string cacheDir = ""); 391 void InitVulkanContextForHybridRender(const std::string& cacheDir); 392 void InitVulkanContextForUniRender(const std::string& cacheDir); 393 ~RsVulkanContext(); 394 395 RsVulkanContext(const RsVulkanContext&) = delete; 396 RsVulkanContext &operator=(const RsVulkanContext&) = delete; 397 398 RsVulkanContext(const RsVulkanContext&&) = delete; 399 RsVulkanContext &operator=(const RsVulkanContext&&) = delete; 400 401 void SetIsProtected(bool isProtected); 402 403 RsVulkanInterface& GetRsVulkanInterface(); 404 IsValid()405 bool IsValid() 406 { 407 return GetRsVulkanInterface().IsValid(); 408 } 409 410 #ifdef USE_M133_SKIA CreateSkiaGetProc()411 skgpu::VulkanGetProc CreateSkiaGetProc() 412 #else 413 GrVkGetProc CreateSkiaGetProc() 414 #endif 415 { 416 return GetRsVulkanInterface().CreateSkiaGetProc(); 417 } 418 GetRsVkMemStat()419 RsVulkanMemStat& GetRsVkMemStat() 420 { 421 return GetRsVulkanInterface().GetRsVkMemStat(); 422 } 423 GetPhysicalDevice()424 VkPhysicalDevice GetPhysicalDevice() 425 { 426 return GetRsVulkanInterface().GetPhysicalDevice(); 427 } 428 GetDevice()429 VkDevice GetDevice() 430 { 431 return GetRsVulkanInterface().GetDevice(); 432 } 433 GetQueue()434 VkQueue GetQueue() 435 { 436 return GetRsVulkanInterface().GetQueue(); 437 } 438 439 #ifdef USE_M133_SKIA GetGrVkBackendContext()440 inline const skgpu::VulkanBackendContext& GetGrVkBackendContext() noexcept 441 #else 442 inline const GrVkBackendContext& GetGrVkBackendContext() noexcept 443 #endif 444 { 445 return GetRsVulkanInterface().GetGrVkBackendContext(); 446 } 447 GetVulkanVersion()448 inline const std::string GetVulkanVersion() 449 { 450 return std::to_string(VK_API_VERSION_1_2); 451 } 452 GetVulkanDeviceStatus()453 VulkanDeviceStatus GetVulkanDeviceStatus() 454 { 455 return GetRsVulkanInterface().GetVulkanDeviceStatus(); 456 } 457 458 std::shared_ptr<Drawing::GPUContext> CreateDrawingContext(); 459 std::shared_ptr<Drawing::GPUContext> GetDrawingContext(const std::string& cacheDir = ""); 460 std::shared_ptr<Drawing::GPUContext> GetRecyclableDrawingContext(const std::string& cacheDir = ""); 461 static void ReleaseDrawingContextMap(); 462 static void ReleaseRecyclableDrawingContext(); 463 static void ReleaseDrawingContextForThread(int tid); 464 465 void ClearGrContext(bool isProtected = false); 466 467 static VKAPI_ATTR VkResult HookedVkQueueSubmit(VkQueue queue, uint32_t submitCount, 468 VkSubmitInfo* pSubmits, VkFence fence); 469 470 static VKAPI_ATTR VkResult HookedVkQueueSignalReleaseImageOHOS(VkQueue queue, uint32_t waitSemaphoreCount, 471 const VkSemaphore* pWaitSemaphores, VkImage image, int32_t* pNativeFenceFd); 472 GetMemoryHandler()473 const std::shared_ptr<MemoryHandler> GetMemoryHandler() 474 { 475 return GetRsVulkanInterface().GetMemoryHandler(); 476 } 477 478 bool GetIsProtected() const; 479 480 static bool IsRecyclable(); 481 482 static void SetRecyclable(bool isRecyclable); 483 484 static void SaveNewDrawingContext(int tid, std::shared_ptr<Drawing::GPUContext> drawingContext); 485 486 static bool GetIsInited(); 487 488 static bool IsRecyclableSingletonValid(); 489 490 private: 491 static RsVulkanContext& GetRecyclableSingleton(const std::string& cacheDir = ""); 492 static std::unique_ptr<RsVulkanContext>& GetRecyclableSingletonPtr(const std::string& cacheDir = ""); 493 static bool CheckDrawingContextRecyclable(); 494 static thread_local bool isProtected_; 495 static thread_local VulkanInterfaceType vulkanInterfaceType_; 496 std::vector<std::shared_ptr<RsVulkanInterface>> vulkanInterfaceVec_; 497 // drawingContextMap_ : <tid, <drawingContext, isRecyclable>> 498 static std::map<int, std::pair<std::shared_ptr<Drawing::GPUContext>, bool>> drawingContextMap_; 499 static std::map<int, std::pair<std::shared_ptr<Drawing::GPUContext>, bool>> protectedDrawingContextMap_; 500 static std::mutex drawingContextMutex_; 501 // use for recyclable singleton 502 static std::recursive_mutex recyclableSingletonMutex_; 503 static bool isRecyclable_; 504 // isRecyclableSingletonValid_ : true -> has been initialized and is valid , false -> has been released 505 static std::atomic<bool> isRecyclableSingletonValid_; 506 // use to mark current process has created vulkan context at least once 507 static std::atomic<bool> isInited_; 508 }; 509 510 } // namespace Rosen 511 } // namespace OHOS 512 513 #endif 514