1 /*
2 * Copyright © 2021 Collabora Ltd.
3 * SPDX-License-Identifier: MIT
4 */
5
6 #include "genxml/decode.h"
7
8 #include "vulkan/util/vk_util.h"
9
10 #include "panvk_device.h"
11 #include "panvk_device_memory.h"
12 #include "panvk_entrypoints.h"
13
14 #include "vk_log.h"
15
16 static void *
panvk_memory_mmap(struct panvk_device_memory * mem)17 panvk_memory_mmap(struct panvk_device_memory *mem)
18 {
19 if (!mem->addr.host) {
20 void *addr = pan_kmod_bo_mmap(mem->bo, 0, pan_kmod_bo_size(mem->bo),
21 PROT_READ | PROT_WRITE, MAP_SHARED, NULL);
22 if (addr == MAP_FAILED)
23 return NULL;
24
25 mem->addr.host = addr;
26 }
27
28 return mem->addr.host;
29 }
30
31 static void
panvk_memory_munmap(struct panvk_device_memory * mem)32 panvk_memory_munmap(struct panvk_device_memory *mem)
33 {
34 if (mem->addr.host) {
35 ASSERTED int ret =
36 os_munmap((void *)mem->addr.host, pan_kmod_bo_size(mem->bo));
37
38 assert(!ret);
39 mem->addr.host = NULL;
40 }
41 }
42
43 VKAPI_ATTR VkResult VKAPI_CALL
panvk_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)44 panvk_AllocateMemory(VkDevice _device,
45 const VkMemoryAllocateInfo *pAllocateInfo,
46 const VkAllocationCallbacks *pAllocator,
47 VkDeviceMemory *pMem)
48 {
49 VK_FROM_HANDLE(panvk_device, device, _device);
50 struct panvk_instance *instance =
51 to_panvk_instance(device->vk.physical->instance);
52 struct panvk_device_memory *mem;
53 bool can_be_exported = false;
54 VkResult result;
55
56 assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
57
58 const VkExportMemoryAllocateInfo *export_info =
59 vk_find_struct_const(pAllocateInfo->pNext, EXPORT_MEMORY_ALLOCATE_INFO);
60
61 if (export_info) {
62 if (export_info->handleTypes &
63 ~(VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT |
64 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT))
65 return panvk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
66 else if (export_info->handleTypes)
67 can_be_exported = true;
68 }
69
70 mem = vk_device_memory_create(&device->vk, pAllocateInfo, pAllocator,
71 sizeof(*mem));
72 if (mem == NULL)
73 return panvk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
74
75 const VkImportMemoryFdInfoKHR *fd_info =
76 vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_FD_INFO_KHR);
77
78 if (fd_info && !fd_info->handleType)
79 fd_info = NULL;
80
81 if (fd_info) {
82 assert(
83 fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
84 fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
85
86 mem->bo = pan_kmod_bo_import(device->kmod.dev, fd_info->fd, 0);
87 if (!mem->bo) {
88 result = panvk_error(device, VK_ERROR_INVALID_EXTERNAL_HANDLE);
89 goto err_destroy_mem;
90 }
91 } else {
92 mem->bo = pan_kmod_bo_alloc(device->kmod.dev,
93 can_be_exported ? NULL : device->kmod.vm,
94 pAllocateInfo->allocationSize, 0);
95 if (!mem->bo) {
96 result = panvk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
97 goto err_destroy_mem;
98 }
99 }
100
101 /* Always GPU-map at creation time. */
102 struct pan_kmod_vm_op op = {
103 .type = PAN_KMOD_VM_OP_TYPE_MAP,
104 .va = {
105 .start = PAN_KMOD_VM_MAP_AUTO_VA,
106 .size = pan_kmod_bo_size(mem->bo),
107 },
108 .map = {
109 .bo = mem->bo,
110 .bo_offset = 0,
111 },
112 };
113
114 if (!(device->kmod.vm->flags & PAN_KMOD_VM_FLAG_AUTO_VA)) {
115 simple_mtx_lock(&device->as.lock);
116 op.va.start =
117 util_vma_heap_alloc(&device->as.heap, op.va.size,
118 op.va.size > 0x200000 ? 0x200000 : 0x1000);
119 simple_mtx_unlock(&device->as.lock);
120 if (!op.va.start) {
121 result = panvk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
122 goto err_put_bo;
123 }
124 }
125
126 int ret =
127 pan_kmod_vm_bind(device->kmod.vm, PAN_KMOD_VM_OP_MODE_IMMEDIATE, &op, 1);
128 if (ret) {
129 result = panvk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
130 goto err_return_va;
131 }
132
133 mem->addr.dev = op.va.start;
134
135 if (fd_info) {
136 /* From the Vulkan spec:
137 *
138 * "Importing memory from a file descriptor transfers ownership of
139 * the file descriptor from the application to the Vulkan
140 * implementation. The application must not perform any operations on
141 * the file descriptor after a successful import."
142 *
143 * If the import fails, we leave the file descriptor open.
144 */
145 close(fd_info->fd);
146 }
147
148 if (device->debug.decode_ctx) {
149 if (instance->debug_flags & (PANVK_DEBUG_DUMP | PANVK_DEBUG_TRACE)) {
150 mem->debug.host_mapping =
151 pan_kmod_bo_mmap(mem->bo, 0, pan_kmod_bo_size(mem->bo),
152 PROT_READ | PROT_WRITE, MAP_SHARED, NULL);
153 }
154
155 pandecode_inject_mmap(device->debug.decode_ctx, mem->addr.dev,
156 mem->debug.host_mapping, pan_kmod_bo_size(mem->bo),
157 NULL);
158 }
159
160 *pMem = panvk_device_memory_to_handle(mem);
161
162 return VK_SUCCESS;
163
164 err_return_va:
165 if (!(device->kmod.vm->flags & PAN_KMOD_VM_FLAG_AUTO_VA)) {
166 simple_mtx_lock(&device->as.lock);
167 util_vma_heap_free(&device->as.heap, op.va.start, op.va.size);
168 simple_mtx_unlock(&device->as.lock);
169 }
170
171 err_put_bo:
172 pan_kmod_bo_put(mem->bo);
173
174 err_destroy_mem:
175 vk_device_memory_destroy(&device->vk, pAllocator, &mem->vk);
176 return result;
177 }
178
179 VKAPI_ATTR void VKAPI_CALL
panvk_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)180 panvk_FreeMemory(VkDevice _device, VkDeviceMemory _mem,
181 const VkAllocationCallbacks *pAllocator)
182 {
183 VK_FROM_HANDLE(panvk_device, device, _device);
184 VK_FROM_HANDLE(panvk_device_memory, mem, _mem);
185
186 if (mem == NULL)
187 return;
188
189 if (device->debug.decode_ctx) {
190 pandecode_inject_free(device->debug.decode_ctx, mem->addr.dev,
191 pan_kmod_bo_size(mem->bo));
192
193 if (mem->debug.host_mapping)
194 os_munmap(mem->debug.host_mapping, pan_kmod_bo_size(mem->bo));
195 }
196
197 panvk_memory_munmap(mem);
198
199 struct pan_kmod_vm_op op = {
200 .type = PAN_KMOD_VM_OP_TYPE_UNMAP,
201 .va = {
202 .start = mem->addr.dev,
203 .size = pan_kmod_bo_size(mem->bo),
204 },
205 };
206
207 ASSERTED int ret =
208 pan_kmod_vm_bind(device->kmod.vm, PAN_KMOD_VM_OP_MODE_IMMEDIATE, &op, 1);
209 assert(!ret);
210
211 if (!(device->kmod.vm->flags & PAN_KMOD_VM_FLAG_AUTO_VA)) {
212 simple_mtx_lock(&device->as.lock);
213 util_vma_heap_free(&device->as.heap, op.va.start, op.va.size);
214 simple_mtx_unlock(&device->as.lock);
215 }
216
217 pan_kmod_bo_put(mem->bo);
218 vk_device_memory_destroy(&device->vk, pAllocator, &mem->vk);
219 }
220
221 VKAPI_ATTR VkResult VKAPI_CALL
panvk_MapMemory2KHR(VkDevice _device,const VkMemoryMapInfoKHR * pMemoryMapInfo,void ** ppData)222 panvk_MapMemory2KHR(VkDevice _device, const VkMemoryMapInfoKHR *pMemoryMapInfo,
223 void **ppData)
224 {
225 VK_FROM_HANDLE(panvk_device, device, _device);
226 VK_FROM_HANDLE(panvk_device_memory, mem, pMemoryMapInfo->memory);
227
228 if (mem == NULL) {
229 *ppData = NULL;
230 return VK_SUCCESS;
231 }
232
233 const VkDeviceSize offset = pMemoryMapInfo->offset;
234 const VkDeviceSize size = vk_device_memory_range(
235 &mem->vk, pMemoryMapInfo->offset, pMemoryMapInfo->size);
236
237 /* From the Vulkan spec version 1.0.32 docs for MapMemory:
238 *
239 * * If size is not equal to VK_WHOLE_SIZE, size must be greater than 0
240 * assert(size != 0);
241 * * If size is not equal to VK_WHOLE_SIZE, size must be less than or
242 * equal to the size of the memory minus offset
243 */
244 assert(size > 0);
245 assert(offset + size <= mem->bo->size);
246
247 if (size != (size_t)size) {
248 return panvk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
249 "requested size 0x%" PRIx64
250 " does not fit in %u bits",
251 size, (unsigned)(sizeof(size_t) * 8));
252 }
253
254 /* From the Vulkan 1.2.194 spec:
255 *
256 * "memory must not be currently host mapped"
257 */
258 if (mem->addr.host)
259 return panvk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
260 "Memory object already mapped.");
261
262 void *addr = panvk_memory_mmap(mem);
263 if (!addr)
264 return panvk_errorf(device, VK_ERROR_MEMORY_MAP_FAILED,
265 "Memory object couldn't be mapped.");
266
267 *ppData = addr + offset;
268 return VK_SUCCESS;
269 }
270
271 VKAPI_ATTR VkResult VKAPI_CALL
panvk_UnmapMemory2KHR(VkDevice _device,const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo)272 panvk_UnmapMemory2KHR(VkDevice _device,
273 const VkMemoryUnmapInfoKHR *pMemoryUnmapInfo)
274 {
275 VK_FROM_HANDLE(panvk_device_memory, mem, pMemoryUnmapInfo->memory);
276
277 panvk_memory_munmap(mem);
278
279 return VK_SUCCESS;
280 }
281
282 VKAPI_ATTR VkResult VKAPI_CALL
panvk_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)283 panvk_FlushMappedMemoryRanges(VkDevice _device, uint32_t memoryRangeCount,
284 const VkMappedMemoryRange *pMemoryRanges)
285 {
286 return VK_SUCCESS;
287 }
288
289 VKAPI_ATTR VkResult VKAPI_CALL
panvk_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)290 panvk_InvalidateMappedMemoryRanges(VkDevice _device, uint32_t memoryRangeCount,
291 const VkMappedMemoryRange *pMemoryRanges)
292 {
293 return VK_SUCCESS;
294 }
295
296 VKAPI_ATTR VkResult VKAPI_CALL
panvk_GetMemoryFdKHR(VkDevice _device,const VkMemoryGetFdInfoKHR * pGetFdInfo,int * pFd)297 panvk_GetMemoryFdKHR(VkDevice _device, const VkMemoryGetFdInfoKHR *pGetFdInfo,
298 int *pFd)
299 {
300 VK_FROM_HANDLE(panvk_device, device, _device);
301 VK_FROM_HANDLE(panvk_device_memory, memory, pGetFdInfo->memory);
302
303 assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
304
305 /* At the moment, we support only the below handle types. */
306 assert(
307 pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
308 pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
309
310 int prime_fd = pan_kmod_bo_export(memory->bo);
311 if (prime_fd < 0)
312 return panvk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
313
314 *pFd = prime_fd;
315 return VK_SUCCESS;
316 }
317
318 VKAPI_ATTR VkResult VKAPI_CALL
panvk_GetMemoryFdPropertiesKHR(VkDevice _device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)319 panvk_GetMemoryFdPropertiesKHR(VkDevice _device,
320 VkExternalMemoryHandleTypeFlagBits handleType,
321 int fd,
322 VkMemoryFdPropertiesKHR *pMemoryFdProperties)
323 {
324 assert(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
325 pMemoryFdProperties->memoryTypeBits = 1;
326 return VK_SUCCESS;
327 }
328
329 VKAPI_ATTR void VKAPI_CALL
panvk_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)330 panvk_GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
331 VkDeviceSize *pCommittedMemoryInBytes)
332 {
333 *pCommittedMemoryInBytes = 0;
334 }
335
336 VKAPI_ATTR uint64_t VKAPI_CALL
panvk_GetDeviceMemoryOpaqueCaptureAddress(VkDevice _device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)337 panvk_GetDeviceMemoryOpaqueCaptureAddress(
338 VkDevice _device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
339 {
340 VK_FROM_HANDLE(panvk_device_memory, memory, pInfo->memory);
341
342 return memory->addr.dev;
343 }
344