1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_buffer.h"
12
13 #include "venus-protocol/vn_protocol_driver_buffer.h"
14 #include "venus-protocol/vn_protocol_driver_buffer_view.h"
15
16 #include "vn_android.h"
17 #include "vn_device.h"
18 #include "vn_device_memory.h"
19
20 /* buffer commands */
21
22 VkResult
vn_buffer_create(struct vn_device * dev,const VkBufferCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_buffer ** out_buf)23 vn_buffer_create(struct vn_device *dev,
24 const VkBufferCreateInfo *create_info,
25 const VkAllocationCallbacks *alloc,
26 struct vn_buffer **out_buf)
27 {
28 VkDevice device = vn_device_to_handle(dev);
29 struct vn_buffer *buf = NULL;
30 VkBuffer buffer = VK_NULL_HANDLE;
31 VkResult result;
32
33 buf = vk_zalloc(alloc, sizeof(*buf), VN_DEFAULT_ALIGN,
34 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
35 if (!buf)
36 return VK_ERROR_OUT_OF_HOST_MEMORY;
37
38 vn_object_base_init(&buf->base, VK_OBJECT_TYPE_BUFFER, &dev->base);
39
40 buffer = vn_buffer_to_handle(buf);
41 /* TODO async */
42 result = vn_call_vkCreateBuffer(dev->instance, device, create_info, NULL,
43 &buffer);
44 if (result != VK_SUCCESS) {
45 vn_object_base_fini(&buf->base);
46 vk_free(alloc, buf);
47 return result;
48 }
49
50 /* TODO add a per-device cache for the requirements */
51 buf->memory_requirements.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
52 buf->memory_requirements.pNext = &buf->dedicated_requirements;
53 buf->dedicated_requirements.sType =
54 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
55 buf->dedicated_requirements.pNext = NULL;
56
57 vn_call_vkGetBufferMemoryRequirements2(
58 dev->instance, device,
59 &(VkBufferMemoryRequirementsInfo2){
60 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
61 .buffer = buffer,
62 },
63 &buf->memory_requirements);
64
65 *out_buf = buf;
66
67 return VK_SUCCESS;
68 }
69
70 VkResult
vn_CreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)71 vn_CreateBuffer(VkDevice device,
72 const VkBufferCreateInfo *pCreateInfo,
73 const VkAllocationCallbacks *pAllocator,
74 VkBuffer *pBuffer)
75 {
76 struct vn_device *dev = vn_device_from_handle(device);
77 const VkAllocationCallbacks *alloc =
78 pAllocator ? pAllocator : &dev->base.base.alloc;
79 struct vn_buffer *buf = NULL;
80 VkResult result;
81
82 const VkExternalMemoryBufferCreateInfo *external_info =
83 vk_find_struct_const(pCreateInfo->pNext,
84 EXTERNAL_MEMORY_BUFFER_CREATE_INFO);
85 const bool ahb_info =
86 external_info &&
87 external_info->handleTypes ==
88 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
89
90 if (ahb_info)
91 result = vn_android_buffer_from_ahb(dev, pCreateInfo, alloc, &buf);
92 else
93 result = vn_buffer_create(dev, pCreateInfo, alloc, &buf);
94
95 if (result != VK_SUCCESS)
96 return vn_error(dev->instance, result);
97
98 *pBuffer = vn_buffer_to_handle(buf);
99
100 return VK_SUCCESS;
101 }
102
103 void
vn_DestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)104 vn_DestroyBuffer(VkDevice device,
105 VkBuffer buffer,
106 const VkAllocationCallbacks *pAllocator)
107 {
108 struct vn_device *dev = vn_device_from_handle(device);
109 struct vn_buffer *buf = vn_buffer_from_handle(buffer);
110 const VkAllocationCallbacks *alloc =
111 pAllocator ? pAllocator : &dev->base.base.alloc;
112
113 if (!buf)
114 return;
115
116 vn_async_vkDestroyBuffer(dev->instance, device, buffer, NULL);
117
118 vn_object_base_fini(&buf->base);
119 vk_free(alloc, buf);
120 }
121
122 VkDeviceAddress
vn_GetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)123 vn_GetBufferDeviceAddress(VkDevice device,
124 const VkBufferDeviceAddressInfo *pInfo)
125 {
126 struct vn_device *dev = vn_device_from_handle(device);
127
128 return vn_call_vkGetBufferDeviceAddress(dev->instance, device, pInfo);
129 }
130
131 uint64_t
vn_GetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)132 vn_GetBufferOpaqueCaptureAddress(VkDevice device,
133 const VkBufferDeviceAddressInfo *pInfo)
134 {
135 struct vn_device *dev = vn_device_from_handle(device);
136
137 return vn_call_vkGetBufferOpaqueCaptureAddress(dev->instance, device,
138 pInfo);
139 }
140
141 void
vn_GetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)142 vn_GetBufferMemoryRequirements(VkDevice device,
143 VkBuffer buffer,
144 VkMemoryRequirements *pMemoryRequirements)
145 {
146 const struct vn_buffer *buf = vn_buffer_from_handle(buffer);
147
148 *pMemoryRequirements = buf->memory_requirements.memoryRequirements;
149 }
150
151 void
vn_GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)152 vn_GetBufferMemoryRequirements2(VkDevice device,
153 const VkBufferMemoryRequirementsInfo2 *pInfo,
154 VkMemoryRequirements2 *pMemoryRequirements)
155 {
156 const struct vn_buffer *buf = vn_buffer_from_handle(pInfo->buffer);
157 union {
158 VkBaseOutStructure *pnext;
159 VkMemoryRequirements2 *two;
160 VkMemoryDedicatedRequirements *dedicated;
161 } u = { .two = pMemoryRequirements };
162
163 while (u.pnext) {
164 switch (u.pnext->sType) {
165 case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
166 u.two->memoryRequirements =
167 buf->memory_requirements.memoryRequirements;
168 break;
169 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
170 u.dedicated->prefersDedicatedAllocation =
171 buf->dedicated_requirements.prefersDedicatedAllocation;
172 u.dedicated->requiresDedicatedAllocation =
173 buf->dedicated_requirements.requiresDedicatedAllocation;
174 break;
175 default:
176 break;
177 }
178 u.pnext = u.pnext->pNext;
179 }
180 }
181
182 VkResult
vn_BindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)183 vn_BindBufferMemory(VkDevice device,
184 VkBuffer buffer,
185 VkDeviceMemory memory,
186 VkDeviceSize memoryOffset)
187 {
188 struct vn_device *dev = vn_device_from_handle(device);
189 struct vn_device_memory *mem = vn_device_memory_from_handle(memory);
190
191 if (mem->base_memory) {
192 memory = vn_device_memory_to_handle(mem->base_memory);
193 memoryOffset += mem->base_offset;
194 }
195
196 vn_async_vkBindBufferMemory(dev->instance, device, buffer, memory,
197 memoryOffset);
198
199 return VK_SUCCESS;
200 }
201
202 VkResult
vn_BindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)203 vn_BindBufferMemory2(VkDevice device,
204 uint32_t bindInfoCount,
205 const VkBindBufferMemoryInfo *pBindInfos)
206 {
207 struct vn_device *dev = vn_device_from_handle(device);
208 const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
209
210 VkBindBufferMemoryInfo *local_infos = NULL;
211 for (uint32_t i = 0; i < bindInfoCount; i++) {
212 const VkBindBufferMemoryInfo *info = &pBindInfos[i];
213 struct vn_device_memory *mem =
214 vn_device_memory_from_handle(info->memory);
215 if (!mem->base_memory)
216 continue;
217
218 if (!local_infos) {
219 const size_t size = sizeof(*local_infos) * bindInfoCount;
220 local_infos = vk_alloc(alloc, size, VN_DEFAULT_ALIGN,
221 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
222 if (!local_infos)
223 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
224
225 memcpy(local_infos, pBindInfos, size);
226 }
227
228 local_infos[i].memory = vn_device_memory_to_handle(mem->base_memory);
229 local_infos[i].memoryOffset += mem->base_offset;
230 }
231 if (local_infos)
232 pBindInfos = local_infos;
233
234 vn_async_vkBindBufferMemory2(dev->instance, device, bindInfoCount,
235 pBindInfos);
236
237 vk_free(alloc, local_infos);
238
239 return VK_SUCCESS;
240 }
241
242 /* buffer view commands */
243
244 VkResult
vn_CreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)245 vn_CreateBufferView(VkDevice device,
246 const VkBufferViewCreateInfo *pCreateInfo,
247 const VkAllocationCallbacks *pAllocator,
248 VkBufferView *pView)
249 {
250 struct vn_device *dev = vn_device_from_handle(device);
251 const VkAllocationCallbacks *alloc =
252 pAllocator ? pAllocator : &dev->base.base.alloc;
253
254 struct vn_buffer_view *view =
255 vk_zalloc(alloc, sizeof(*view), VN_DEFAULT_ALIGN,
256 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
257 if (!view)
258 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
259
260 vn_object_base_init(&view->base, VK_OBJECT_TYPE_BUFFER_VIEW, &dev->base);
261
262 VkBufferView view_handle = vn_buffer_view_to_handle(view);
263 vn_async_vkCreateBufferView(dev->instance, device, pCreateInfo, NULL,
264 &view_handle);
265
266 *pView = view_handle;
267
268 return VK_SUCCESS;
269 }
270
271 void
vn_DestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)272 vn_DestroyBufferView(VkDevice device,
273 VkBufferView bufferView,
274 const VkAllocationCallbacks *pAllocator)
275 {
276 struct vn_device *dev = vn_device_from_handle(device);
277 struct vn_buffer_view *view = vn_buffer_view_from_handle(bufferView);
278 const VkAllocationCallbacks *alloc =
279 pAllocator ? pAllocator : &dev->base.base.alloc;
280
281 if (!view)
282 return;
283
284 vn_async_vkDestroyBufferView(dev->instance, device, bufferView, NULL);
285
286 vn_object_base_fini(&view->base);
287 vk_free(alloc, view);
288 }
289