1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #ifndef VK_DEVICE_HPP_
16 #define VK_DEVICE_HPP_
17
18 #include "VkImageView.hpp"
19 #include "VkSampler.hpp"
20 #include "Reactor/Routine.hpp"
21 #include "System/LRUCache.hpp"
22
23 #include "marl/mutex.h"
24 #include "marl/tsa.h"
25
26 #include <map>
27 #include <memory>
28 #include <unordered_map>
29 #include <unordered_set>
30
31 namespace marl {
32 class Scheduler;
33 }
34 namespace sw {
35 class Blitter;
36 }
37
38 namespace vk {
39
40 class PhysicalDevice;
41 class Queue;
42
43 namespace dbg {
44 class Context;
45 class Server;
46 } // namespace dbg
47
48 class Device
49 {
50 public:
GetAllocationScope()51 static constexpr VkSystemAllocationScope GetAllocationScope() { return VK_SYSTEM_ALLOCATION_SCOPE_DEVICE; }
52
53 Device(const VkDeviceCreateInfo *pCreateInfo, void *mem, PhysicalDevice *physicalDevice, const VkPhysicalDeviceFeatures *enabledFeatures, const std::shared_ptr<marl::Scheduler> &scheduler);
54 void destroy(const VkAllocationCallbacks *pAllocator);
55
56 static size_t ComputeRequiredAllocationSize(const VkDeviceCreateInfo *pCreateInfo);
57
58 bool hasExtension(const char *extensionName) const;
59 VkQueue getQueue(uint32_t queueFamilyIndex, uint32_t queueIndex) const;
60 VkResult waitForFences(uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout);
61 VkResult waitForSemaphores(const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout);
62 VkResult waitIdle();
63 void getDescriptorSetLayoutSupport(const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
64 VkDescriptorSetLayoutSupport *pSupport) const;
getPhysicalDevice() const65 PhysicalDevice *getPhysicalDevice() const { return physicalDevice; }
66 void updateDescriptorSets(uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites,
67 uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies);
68 void getRequirements(VkMemoryDedicatedRequirements *requirements) const;
getEnabledFeatures() const69 const VkPhysicalDeviceFeatures &getEnabledFeatures() const { return enabledFeatures; }
getBlitter() const70 sw::Blitter *getBlitter() const { return blitter.get(); }
71
72 void registerImageView(ImageView *imageView);
73 void unregisterImageView(ImageView *imageView);
74 void prepareForSampling(ImageView *imageView);
75 void contentsChanged(ImageView *imageView);
76
77 class SamplingRoutineCache
78 {
79 public:
SamplingRoutineCache()80 SamplingRoutineCache()
81 : cache(1024)
82 {}
~SamplingRoutineCache()83 ~SamplingRoutineCache() {}
84
85 struct Key
86 {
87 uint32_t instruction;
88 uint32_t sampler;
89 uint32_t imageView;
90
91 inline bool operator==(const Key &rhs) const;
92
93 struct Hash
94 {
95 inline std::size_t operator()(const Key &key) const noexcept;
96 };
97 };
98
99 // getOrCreate() queries the cache for a Routine with the given key.
100 // If one is found, it is returned, otherwise createRoutine(key) is
101 // called, the returned Routine is added to the cache, and it is
102 // returned.
103 // Function must be a function of the signature:
104 // std::shared_ptr<rr::Routine>(const Key &)
105 template<typename Function>
getOrCreate(const Key & key,Function && createRoutine)106 std::shared_ptr<rr::Routine> getOrCreate(const Key &key, Function &&createRoutine)
107 {
108 auto it = snapshot.find(key);
109 if(it != snapshot.end()) { return it->second; }
110
111 marl::lock lock(mutex);
112 if(auto existingRoutine = cache.lookup(key))
113 {
114 return existingRoutine;
115 }
116
117 std::shared_ptr<rr::Routine> newRoutine = createRoutine(key);
118 cache.add(key, newRoutine);
119 snapshotNeedsUpdate = true;
120
121 return newRoutine;
122 }
123
124 void updateSnapshot();
125
126 private:
127 bool snapshotNeedsUpdate = false;
128 std::unordered_map<Key, std::shared_ptr<rr::Routine>, Key::Hash> snapshot;
129
130 marl::mutex mutex;
131 sw::LRUCache<Key, std::shared_ptr<rr::Routine>, Key::Hash> cache GUARDED_BY(mutex);
132 };
133
134 SamplingRoutineCache *getSamplingRoutineCache() const;
135 void updateSamplingRoutineSnapshotCache();
136
137 class SamplerIndexer
138 {
139 public:
140 ~SamplerIndexer();
141
142 uint32_t index(const SamplerState &samplerState);
143 void remove(const SamplerState &samplerState);
144
145 private:
146 struct Identifier
147 {
148 uint32_t id;
149 uint32_t count; // Number of samplers sharing this state identifier.
150 };
151
152 marl::mutex mutex;
153 std::map<SamplerState, Identifier> map GUARDED_BY(mutex);
154
155 uint32_t nextID = 0;
156 };
157
158 uint32_t indexSampler(const SamplerState &samplerState);
159 void removeSampler(const SamplerState &samplerState);
160
getDebuggerContext() const161 std::shared_ptr<vk::dbg::Context> getDebuggerContext() const
162 {
163 #ifdef ENABLE_VK_DEBUGGER
164 return debugger.context;
165 #else
166 return nullptr;
167 #endif // ENABLE_VK_DEBUGGER
168 }
169
170 VkResult setDebugUtilsObjectName(const VkDebugUtilsObjectNameInfoEXT *pNameInfo);
171 VkResult setDebugUtilsObjectTag(const VkDebugUtilsObjectTagInfoEXT *pTagInfo);
172
173 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
174 void emitDeviceMemoryReport(VkDeviceMemoryReportEventTypeEXT type, uint64_t memoryObjectId, VkDeviceSize size, VkObjectType objectType, uint64_t objectHandle, uint32_t heapIndex = 0);
175 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
176
177 private:
178 PhysicalDevice *const physicalDevice = nullptr;
179 Queue *const queues = nullptr;
180 uint32_t queueCount = 0;
181 std::unique_ptr<sw::Blitter> blitter;
182 uint32_t enabledExtensionCount = 0;
183 typedef char ExtensionName[VK_MAX_EXTENSION_NAME_SIZE];
184 ExtensionName *extensions = nullptr;
185 const VkPhysicalDeviceFeatures enabledFeatures = {};
186
187 std::shared_ptr<marl::Scheduler> scheduler;
188 std::unique_ptr<SamplingRoutineCache> samplingRoutineCache;
189 std::unique_ptr<SamplerIndexer> samplerIndexer;
190
191 marl::mutex imageViewSetMutex;
192 std::unordered_set<ImageView *> imageViewSet GUARDED_BY(imageViewSetMutex);
193
194 #ifdef ENABLE_VK_DEBUGGER
195 struct
196 {
197 std::shared_ptr<vk::dbg::Context> context;
198 std::shared_ptr<vk::dbg::Server> server;
199 } debugger;
200 #endif // ENABLE_VK_DEBUGGER
201
202 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
203 std::vector<std::pair<PFN_vkDeviceMemoryReportCallbackEXT, void *>> deviceMemoryReportCallbacks;
204 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
205 };
206
207 using DispatchableDevice = DispatchableObject<Device, VkDevice>;
208
Cast(VkDevice object)209 static inline Device *Cast(VkDevice object)
210 {
211 return DispatchableDevice::Cast(object);
212 }
213
operator ==(const Key & rhs) const214 inline bool vk::Device::SamplingRoutineCache::Key::operator==(const Key &rhs) const
215 {
216 return instruction == rhs.instruction && sampler == rhs.sampler && imageView == rhs.imageView;
217 }
218
operator ()(const Key & key) const219 inline std::size_t vk::Device::SamplingRoutineCache::Key::Hash::operator()(const Key &key) const noexcept
220 {
221 // Combine three 32-bit integers into a 64-bit hash.
222 // 2642239 is the largest prime which when cubed is smaller than 2^64.
223 uint64_t hash = key.instruction;
224 hash = (hash * 2642239) ^ key.sampler;
225 hash = (hash * 2642239) ^ key.imageView;
226 return static_cast<std::size_t>(hash); // Truncates to 32-bits on 32-bit platforms.
227 }
228
229 } // namespace vk
230
231 #endif // VK_DEVICE_HPP_
232