• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2016 Red Hat.
3  * Copyright © 2016 Bas Nieuwenhuizen
4  *
5  * based in part on anv driver which is:
6  * Copyright © 2015 Intel Corporation
7  *
8  * Permission is hereby granted, free of charge, to any person obtaining a
9  * copy of this software and associated documentation files (the "Software"),
10  * to deal in the Software without restriction, including without limitation
11  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12  * and/or sell copies of the Software, and to permit persons to whom the
13  * Software is furnished to do so, subject to the following conditions:
14  *
15  * The above copyright notice and this permission notice (including the next
16  * paragraph) shall be included in all copies or substantial portions of the
17  * Software.
18  *
19  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
22  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
25  * IN THE SOFTWARE.
26  */
27 
28 #include "radv_private.h"
29 
30 void
radv_device_memory_init(struct radv_device_memory * mem,struct radv_device * device,struct radeon_winsys_bo * bo)31 radv_device_memory_init(struct radv_device_memory *mem, struct radv_device *device, struct radeon_winsys_bo *bo)
32 {
33    memset(mem, 0, sizeof(*mem));
34    vk_object_base_init(&device->vk, &mem->base, VK_OBJECT_TYPE_DEVICE_MEMORY);
35 
36    mem->bo = bo;
37 }
38 
39 void
radv_device_memory_finish(struct radv_device_memory * mem)40 radv_device_memory_finish(struct radv_device_memory *mem)
41 {
42    vk_object_base_finish(&mem->base);
43 }
44 
45 void
radv_free_memory(struct radv_device * device,const VkAllocationCallbacks * pAllocator,struct radv_device_memory * mem)46 radv_free_memory(struct radv_device *device, const VkAllocationCallbacks *pAllocator, struct radv_device_memory *mem)
47 {
48    if (mem == NULL)
49       return;
50 
51 #if RADV_SUPPORT_ANDROID_HARDWARE_BUFFER
52    if (mem->android_hardware_buffer)
53       AHardwareBuffer_release(mem->android_hardware_buffer);
54 #endif
55 
56    if (mem->bo) {
57       radv_rmv_log_bo_destroy(device, mem->bo);
58 
59       if (device->overallocation_disallowed) {
60          mtx_lock(&device->overallocation_mutex);
61          device->allocated_memory_size[mem->heap_index] -= mem->alloc_size;
62          mtx_unlock(&device->overallocation_mutex);
63       }
64 
65       if (device->use_global_bo_list)
66          device->ws->buffer_make_resident(device->ws, mem->bo, false);
67       device->ws->buffer_destroy(device->ws, mem->bo);
68       mem->bo = NULL;
69    }
70 
71    radv_rmv_log_resource_destroy(device, (uint64_t)radv_device_memory_to_handle(mem));
72    radv_device_memory_finish(mem);
73    vk_free2(&device->vk.alloc, pAllocator, mem);
74 }
75 
76 VkResult
radv_alloc_memory(struct radv_device * device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem,bool is_internal)77 radv_alloc_memory(struct radv_device *device, const VkMemoryAllocateInfo *pAllocateInfo,
78                   const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMem, bool is_internal)
79 {
80    struct radv_device_memory *mem;
81    VkResult result;
82    enum radeon_bo_domain domain;
83    uint32_t flags = 0;
84 
85    assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
86 
87    const VkImportMemoryFdInfoKHR *import_info = vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_FD_INFO_KHR);
88    const VkMemoryDedicatedAllocateInfo *dedicate_info =
89       vk_find_struct_const(pAllocateInfo->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
90    const VkExportMemoryAllocateInfo *export_info =
91       vk_find_struct_const(pAllocateInfo->pNext, EXPORT_MEMORY_ALLOCATE_INFO);
92    const struct VkImportAndroidHardwareBufferInfoANDROID *ahb_import_info =
93       vk_find_struct_const(pAllocateInfo->pNext, IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID);
94    const VkImportMemoryHostPointerInfoEXT *host_ptr_info =
95       vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_HOST_POINTER_INFO_EXT);
96    const struct VkMemoryAllocateFlagsInfo *flags_info =
97       vk_find_struct_const(pAllocateInfo->pNext, MEMORY_ALLOCATE_FLAGS_INFO);
98 
99    const struct wsi_memory_allocate_info *wsi_info =
100       vk_find_struct_const(pAllocateInfo->pNext, WSI_MEMORY_ALLOCATE_INFO_MESA);
101 
102    if (pAllocateInfo->allocationSize == 0 && !ahb_import_info &&
103        !(export_info &&
104          (export_info->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID))) {
105       /* Apparently, this is allowed */
106       *pMem = VK_NULL_HANDLE;
107       return VK_SUCCESS;
108    }
109 
110    mem = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*mem), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
111    if (mem == NULL)
112       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
113 
114    radv_device_memory_init(mem, device, NULL);
115 
116    if (dedicate_info) {
117       mem->image = radv_image_from_handle(dedicate_info->image);
118       mem->buffer = radv_buffer_from_handle(dedicate_info->buffer);
119    } else {
120       mem->image = NULL;
121       mem->buffer = NULL;
122    }
123 
124    if (wsi_info && wsi_info->implicit_sync) {
125       flags |= RADEON_FLAG_IMPLICIT_SYNC;
126 
127       /* Mark the linear prime buffer (aka the destination of the prime blit
128        * as uncached.
129        */
130       if (mem->buffer)
131          flags |= RADEON_FLAG_VA_UNCACHED;
132    }
133 
134    float priority_float = 0.5;
135    const struct VkMemoryPriorityAllocateInfoEXT *priority_ext =
136       vk_find_struct_const(pAllocateInfo->pNext, MEMORY_PRIORITY_ALLOCATE_INFO_EXT);
137    if (priority_ext)
138       priority_float = priority_ext->priority;
139 
140    uint64_t replay_address = 0;
141    const VkMemoryOpaqueCaptureAddressAllocateInfo *replay_info =
142       vk_find_struct_const(pAllocateInfo->pNext, MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO);
143    if (replay_info && replay_info->opaqueCaptureAddress)
144       replay_address = replay_info->opaqueCaptureAddress;
145 
146    unsigned priority =
147       MIN2(RADV_BO_PRIORITY_APPLICATION_MAX - 1, (int)(priority_float * RADV_BO_PRIORITY_APPLICATION_MAX));
148 
149    mem->user_ptr = NULL;
150 
151 #if RADV_SUPPORT_ANDROID_HARDWARE_BUFFER
152    mem->android_hardware_buffer = NULL;
153 #endif
154 
155    if (ahb_import_info) {
156       result = radv_import_ahb_memory(device, mem, priority, ahb_import_info);
157       if (result != VK_SUCCESS)
158          goto fail;
159    } else if (export_info &&
160               (export_info->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
161       result = radv_create_ahb_memory(device, mem, priority, pAllocateInfo);
162       if (result != VK_SUCCESS)
163          goto fail;
164    } else if (import_info) {
165       assert(import_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
166              import_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
167       result = device->ws->buffer_from_fd(device->ws, import_info->fd, priority, &mem->bo, NULL);
168       if (result != VK_SUCCESS) {
169          goto fail;
170       } else {
171          close(import_info->fd);
172       }
173 
174       if (mem->image && mem->image->plane_count == 1 && !vk_format_is_depth_or_stencil(mem->image->vk.format) &&
175           mem->image->vk.samples == 1 && mem->image->vk.tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
176          struct radeon_bo_metadata metadata;
177          device->ws->buffer_get_metadata(device->ws, mem->bo, &metadata);
178 
179          struct radv_image_create_info create_info = {.no_metadata_planes = true, .bo_metadata = &metadata};
180 
181          /* This gives a basic ability to import radeonsi images
182           * that don't have DCC. This is not guaranteed by any
183           * spec and can be removed after we support modifiers. */
184          result = radv_image_create_layout(device, create_info, NULL, NULL, mem->image);
185          if (result != VK_SUCCESS) {
186             device->ws->buffer_destroy(device->ws, mem->bo);
187             goto fail;
188          }
189       }
190    } else if (host_ptr_info) {
191       assert(host_ptr_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT);
192       result = device->ws->buffer_from_ptr(device->ws, host_ptr_info->pHostPointer, pAllocateInfo->allocationSize,
193                                            priority, &mem->bo);
194       if (result != VK_SUCCESS) {
195          goto fail;
196       } else {
197          mem->user_ptr = host_ptr_info->pHostPointer;
198       }
199    } else {
200       uint64_t alloc_size = align_u64(pAllocateInfo->allocationSize, 4096);
201       uint32_t heap_index;
202 
203       heap_index = device->physical_device->memory_properties.memoryTypes[pAllocateInfo->memoryTypeIndex].heapIndex;
204       domain = device->physical_device->memory_domains[pAllocateInfo->memoryTypeIndex];
205       flags |= device->physical_device->memory_flags[pAllocateInfo->memoryTypeIndex];
206 
207       if (export_info && export_info->handleTypes) {
208          /* Setting RADEON_FLAG_GTT_WC in case the bo is spilled to GTT.  This is important when the
209           * foreign queue is the display engine of iGPU.  The carveout of iGPU can be tiny and the
210           * kernel driver refuses to spill without the flag.
211           *
212           * This covers any external memory user, including WSI.
213           */
214          if (domain == RADEON_DOMAIN_VRAM)
215             flags |= RADEON_FLAG_GTT_WC;
216       } else if (!import_info) {
217          /* neither export nor import */
218          flags |= RADEON_FLAG_NO_INTERPROCESS_SHARING;
219          if (device->use_global_bo_list) {
220             flags |= RADEON_FLAG_PREFER_LOCAL_BO;
221          }
222       }
223 
224       if (flags_info && flags_info->flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT)
225          flags |= RADEON_FLAG_REPLAYABLE;
226 
227       if (device->instance->drirc.zero_vram)
228          flags |= RADEON_FLAG_ZERO_VRAM;
229 
230       if (device->overallocation_disallowed) {
231          uint64_t total_size = device->physical_device->memory_properties.memoryHeaps[heap_index].size;
232 
233          mtx_lock(&device->overallocation_mutex);
234          if (device->allocated_memory_size[heap_index] + alloc_size > total_size) {
235             mtx_unlock(&device->overallocation_mutex);
236             result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
237             goto fail;
238          }
239          device->allocated_memory_size[heap_index] += alloc_size;
240          mtx_unlock(&device->overallocation_mutex);
241       }
242 
243       result = device->ws->buffer_create(device->ws, alloc_size, device->physical_device->rad_info.max_alignment,
244                                          domain, flags, priority, replay_address, &mem->bo);
245 
246       if (result != VK_SUCCESS) {
247          if (device->overallocation_disallowed) {
248             mtx_lock(&device->overallocation_mutex);
249             device->allocated_memory_size[heap_index] -= alloc_size;
250             mtx_unlock(&device->overallocation_mutex);
251          }
252          goto fail;
253       }
254 
255       mem->heap_index = heap_index;
256       mem->alloc_size = alloc_size;
257    }
258 
259    if (!wsi_info) {
260       if (device->use_global_bo_list) {
261          result = device->ws->buffer_make_resident(device->ws, mem->bo, true);
262          if (result != VK_SUCCESS)
263             goto fail;
264       }
265    }
266 
267    *pMem = radv_device_memory_to_handle(mem);
268    radv_rmv_log_heap_create(device, *pMem, is_internal, flags_info ? flags_info->flags : 0);
269    return VK_SUCCESS;
270 
271 fail:
272    radv_free_memory(device, pAllocator, mem);
273 
274    return result;
275 }
276 
277 VKAPI_ATTR VkResult VKAPI_CALL
radv_AllocateMemory(VkDevice _device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMem)278 radv_AllocateMemory(VkDevice _device, const VkMemoryAllocateInfo *pAllocateInfo,
279                     const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMem)
280 {
281    RADV_FROM_HANDLE(radv_device, device, _device);
282    return radv_alloc_memory(device, pAllocateInfo, pAllocator, pMem, false);
283 }
284 
285 VKAPI_ATTR void VKAPI_CALL
radv_FreeMemory(VkDevice _device,VkDeviceMemory _mem,const VkAllocationCallbacks * pAllocator)286 radv_FreeMemory(VkDevice _device, VkDeviceMemory _mem, const VkAllocationCallbacks *pAllocator)
287 {
288    RADV_FROM_HANDLE(radv_device, device, _device);
289    RADV_FROM_HANDLE(radv_device_memory, mem, _mem);
290 
291    radv_free_memory(device, pAllocator, mem);
292 }
293 
294 VKAPI_ATTR VkResult VKAPI_CALL
radv_MapMemory2KHR(VkDevice _device,const VkMemoryMapInfoKHR * pMemoryMapInfo,void ** ppData)295 radv_MapMemory2KHR(VkDevice _device, const VkMemoryMapInfoKHR *pMemoryMapInfo, void **ppData)
296 {
297    RADV_FROM_HANDLE(radv_device, device, _device);
298    RADV_FROM_HANDLE(radv_device_memory, mem, pMemoryMapInfo->memory);
299 
300    if (mem->user_ptr)
301       *ppData = mem->user_ptr;
302    else
303       *ppData = device->ws->buffer_map(mem->bo);
304 
305    if (*ppData) {
306       vk_rmv_log_cpu_map(&device->vk, mem->bo->va, false);
307       *ppData = (uint8_t *)*ppData + pMemoryMapInfo->offset;
308       return VK_SUCCESS;
309    }
310 
311    return vk_error(device, VK_ERROR_MEMORY_MAP_FAILED);
312 }
313 
314 VKAPI_ATTR VkResult VKAPI_CALL
radv_UnmapMemory2KHR(VkDevice _device,const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo)315 radv_UnmapMemory2KHR(VkDevice _device, const VkMemoryUnmapInfoKHR *pMemoryUnmapInfo)
316 {
317    RADV_FROM_HANDLE(radv_device, device, _device);
318    RADV_FROM_HANDLE(radv_device_memory, mem, pMemoryUnmapInfo->memory);
319 
320    vk_rmv_log_cpu_map(&device->vk, mem->bo->va, true);
321    if (mem->user_ptr == NULL)
322       device->ws->buffer_unmap(mem->bo);
323 
324    return VK_SUCCESS;
325 }
326 
327 VKAPI_ATTR VkResult VKAPI_CALL
radv_FlushMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)328 radv_FlushMappedMemoryRanges(VkDevice _device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
329 {
330    return VK_SUCCESS;
331 }
332 
333 VKAPI_ATTR VkResult VKAPI_CALL
radv_InvalidateMappedMemoryRanges(VkDevice _device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)334 radv_InvalidateMappedMemoryRanges(VkDevice _device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
335 {
336    return VK_SUCCESS;
337 }
338 
339 VKAPI_ATTR uint64_t VKAPI_CALL
radv_GetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)340 radv_GetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
341 {
342    RADV_FROM_HANDLE(radv_device_memory, mem, pInfo->memory);
343    return radv_buffer_get_va(mem->bo);
344 }
345 
346 VKAPI_ATTR void VKAPI_CALL
radv_GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)347 radv_GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize *pCommittedMemoryInBytes)
348 {
349    *pCommittedMemoryInBytes = 0;
350 }
351