1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "lvp_private.h"
25 #include "util/format/u_format.h"
26 #include "util/u_inlines.h"
27 #include "util/u_surface.h"
28 #include "pipe/p_state.h"
29
30 static VkResult
lvp_image_create(VkDevice _device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * alloc,VkImage * pImage)31 lvp_image_create(VkDevice _device,
32 const VkImageCreateInfo *pCreateInfo,
33 const VkAllocationCallbacks* alloc,
34 VkImage *pImage)
35 {
36 LVP_FROM_HANDLE(lvp_device, device, _device);
37 struct lvp_image *image;
38
39 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO);
40
41 image = vk_image_create(&device->vk, pCreateInfo, alloc, sizeof(*image));
42 if (image == NULL)
43 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
44
45 image->alignment = 64;
46 image->plane_count = vk_format_get_plane_count(pCreateInfo->format);
47 image->disjoint = image->plane_count > 1 &&
48 (pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT);
49
50 const struct vk_format_ycbcr_info *ycbcr_info =
51 vk_format_get_ycbcr_info(pCreateInfo->format);
52 for (unsigned p = 0; p < image->plane_count; p++) {
53 struct pipe_resource template;
54 VkFormat format = ycbcr_info ?
55 ycbcr_info->planes[p].format : pCreateInfo->format;
56 const uint8_t width_scale = ycbcr_info ?
57 ycbcr_info->planes[p].denominator_scales[0] : 1;
58 const uint8_t height_scale = ycbcr_info ?
59 ycbcr_info->planes[p].denominator_scales[1] : 1;
60 memset(&template, 0, sizeof(template));
61
62 template.screen = device->pscreen;
63 switch (pCreateInfo->imageType) {
64 case VK_IMAGE_TYPE_1D:
65 template.target = pCreateInfo->arrayLayers > 1 ? PIPE_TEXTURE_1D_ARRAY : PIPE_TEXTURE_1D;
66 break;
67 default:
68 case VK_IMAGE_TYPE_2D:
69 template.target = pCreateInfo->arrayLayers > 1 ? PIPE_TEXTURE_2D_ARRAY : PIPE_TEXTURE_2D;
70 break;
71 case VK_IMAGE_TYPE_3D:
72 template.target = PIPE_TEXTURE_3D;
73 break;
74 }
75
76 template.format = lvp_vk_format_to_pipe_format(format);
77
78 bool is_ds = util_format_is_depth_or_stencil(template.format);
79
80 if (pCreateInfo->usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
81 template.bind |= PIPE_BIND_RENDER_TARGET;
82 /* sampler view is needed for resolve blits */
83 if (pCreateInfo->samples > 1)
84 template.bind |= PIPE_BIND_SAMPLER_VIEW;
85 }
86
87 if (pCreateInfo->usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT) {
88 if (!is_ds)
89 template.bind |= PIPE_BIND_RENDER_TARGET;
90 else
91 template.bind |= PIPE_BIND_DEPTH_STENCIL;
92 }
93
94 if (pCreateInfo->usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
95 template.bind |= PIPE_BIND_DEPTH_STENCIL;
96
97 if (pCreateInfo->usage & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
98 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
99 template.bind |= PIPE_BIND_SAMPLER_VIEW;
100
101 if (pCreateInfo->usage & (VK_IMAGE_USAGE_STORAGE_BIT |
102 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
103 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
104 template.bind |= PIPE_BIND_SHADER_IMAGE;
105
106 template.width0 = pCreateInfo->extent.width / width_scale;
107 template.height0 = pCreateInfo->extent.height / height_scale;
108 template.depth0 = pCreateInfo->extent.depth;
109 template.array_size = pCreateInfo->arrayLayers;
110 template.last_level = pCreateInfo->mipLevels - 1;
111 template.nr_samples = pCreateInfo->samples;
112 template.nr_storage_samples = pCreateInfo->samples;
113 image->planes[p].bo = device->pscreen->resource_create_unbacked(device->pscreen,
114 &template,
115 &image->planes[p].size);
116 if (!image->planes[p].bo)
117 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
118
119 image->size += image->planes[p].size;
120 }
121 *pImage = lvp_image_to_handle(image);
122
123 return VK_SUCCESS;
124 }
125
126 struct lvp_image *
lvp_swapchain_get_image(VkSwapchainKHR swapchain,uint32_t index)127 lvp_swapchain_get_image(VkSwapchainKHR swapchain,
128 uint32_t index)
129 {
130 VkImage image = wsi_common_get_image(swapchain, index);
131 return lvp_image_from_handle(image);
132 }
133
134 static VkResult
lvp_image_from_swapchain(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkImageSwapchainCreateInfoKHR * swapchain_info,const VkAllocationCallbacks * pAllocator,VkImage * pImage)135 lvp_image_from_swapchain(VkDevice device,
136 const VkImageCreateInfo *pCreateInfo,
137 const VkImageSwapchainCreateInfoKHR *swapchain_info,
138 const VkAllocationCallbacks *pAllocator,
139 VkImage *pImage)
140 {
141 ASSERTED struct lvp_image *swapchain_image = lvp_swapchain_get_image(swapchain_info->swapchain, 0);
142 assert(swapchain_image);
143
144 assert(swapchain_image->vk.image_type == pCreateInfo->imageType);
145
146 VkImageCreateInfo local_create_info;
147 local_create_info = *pCreateInfo;
148 local_create_info.pNext = NULL;
149 /* The following parameters are implictly selected by the wsi code. */
150 local_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
151 local_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
152 local_create_info.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
153
154 assert(!(local_create_info.usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT));
155 return lvp_image_create(device, &local_create_info, pAllocator,
156 pImage);
157 }
158
159 VKAPI_ATTR VkResult VKAPI_CALL
lvp_CreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)160 lvp_CreateImage(VkDevice device,
161 const VkImageCreateInfo *pCreateInfo,
162 const VkAllocationCallbacks *pAllocator,
163 VkImage *pImage)
164 {
165 const VkImageSwapchainCreateInfoKHR *swapchain_info =
166 vk_find_struct_const(pCreateInfo->pNext, IMAGE_SWAPCHAIN_CREATE_INFO_KHR);
167 if (swapchain_info && swapchain_info->swapchain != VK_NULL_HANDLE)
168 return lvp_image_from_swapchain(device, pCreateInfo, swapchain_info,
169 pAllocator, pImage);
170 return lvp_image_create(device, pCreateInfo, pAllocator,
171 pImage);
172 }
173
174 VKAPI_ATTR void VKAPI_CALL
lvp_DestroyImage(VkDevice _device,VkImage _image,const VkAllocationCallbacks * pAllocator)175 lvp_DestroyImage(VkDevice _device, VkImage _image,
176 const VkAllocationCallbacks *pAllocator)
177 {
178 LVP_FROM_HANDLE(lvp_device, device, _device);
179 LVP_FROM_HANDLE(lvp_image, image, _image);
180
181 if (!_image)
182 return;
183 for (unsigned p = 0; p < image->plane_count; p++)
184 pipe_resource_reference(&image->planes[p].bo, NULL);
185 vk_image_destroy(&device->vk, pAllocator, &image->vk);
186 }
187
188 #include "lvp_conv.h"
189 #include "util/u_sampler.h"
190 #include "util/u_inlines.h"
191
conv_depth_swiz(char swiz)192 static inline char conv_depth_swiz(char swiz) {
193 switch (swiz) {
194 case PIPE_SWIZZLE_Y:
195 case PIPE_SWIZZLE_Z:
196 return PIPE_SWIZZLE_0;
197 case PIPE_SWIZZLE_W:
198 return PIPE_SWIZZLE_1;
199 default:
200 return swiz;
201 }
202 }
203
204 static struct pipe_sampler_view *
lvp_create_samplerview(struct pipe_context * pctx,struct lvp_image_view * iv,VkFormat plane_format,unsigned image_plane)205 lvp_create_samplerview(struct pipe_context *pctx, struct lvp_image_view *iv, VkFormat plane_format, unsigned image_plane)
206 {
207 if (!iv)
208 return NULL;
209
210 struct pipe_sampler_view templ;
211 enum pipe_format pformat;
212 if (iv->vk.aspects == VK_IMAGE_ASPECT_DEPTH_BIT)
213 pformat = lvp_vk_format_to_pipe_format(plane_format);
214 else if (iv->vk.aspects == VK_IMAGE_ASPECT_STENCIL_BIT)
215 pformat = util_format_stencil_only(lvp_vk_format_to_pipe_format(plane_format));
216 else
217 pformat = lvp_vk_format_to_pipe_format(plane_format);
218 u_sampler_view_default_template(&templ,
219 iv->image->planes[image_plane].bo,
220 pformat);
221 if (iv->vk.view_type == VK_IMAGE_VIEW_TYPE_1D)
222 templ.target = PIPE_TEXTURE_1D;
223 if (iv->vk.view_type == VK_IMAGE_VIEW_TYPE_2D)
224 templ.target = PIPE_TEXTURE_2D;
225 if (iv->vk.view_type == VK_IMAGE_VIEW_TYPE_CUBE)
226 templ.target = PIPE_TEXTURE_CUBE;
227 if (iv->vk.view_type == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
228 templ.target = PIPE_TEXTURE_CUBE_ARRAY;
229 templ.u.tex.first_layer = iv->vk.base_array_layer;
230 templ.u.tex.last_layer = iv->vk.base_array_layer + iv->vk.layer_count - 1;
231 templ.u.tex.first_level = iv->vk.base_mip_level;
232 templ.u.tex.last_level = iv->vk.base_mip_level + iv->vk.level_count - 1;
233 templ.swizzle_r = vk_conv_swizzle(iv->vk.swizzle.r, PIPE_SWIZZLE_X);
234 templ.swizzle_g = vk_conv_swizzle(iv->vk.swizzle.g, PIPE_SWIZZLE_Y);
235 templ.swizzle_b = vk_conv_swizzle(iv->vk.swizzle.b, PIPE_SWIZZLE_Z);
236 templ.swizzle_a = vk_conv_swizzle(iv->vk.swizzle.a, PIPE_SWIZZLE_W);
237
238 /* depth stencil swizzles need special handling to pass VK CTS
239 * but also for zink GL tests.
240 * piping A swizzle into R fixes GL_ALPHA depth texture mode
241 * only swizzling from R/0/1 (for alpha) fixes VK CTS tests
242 * and a bunch of zink tests.
243 */
244 if (iv->vk.aspects == VK_IMAGE_ASPECT_DEPTH_BIT ||
245 iv->vk.aspects == VK_IMAGE_ASPECT_STENCIL_BIT) {
246 templ.swizzle_r = conv_depth_swiz(templ.swizzle_r);
247 templ.swizzle_g = conv_depth_swiz(templ.swizzle_g);
248 templ.swizzle_b = conv_depth_swiz(templ.swizzle_b);
249 templ.swizzle_a = conv_depth_swiz(templ.swizzle_a);
250 }
251
252 return pctx->create_sampler_view(pctx, iv->image->planes[image_plane].bo, &templ);
253 }
254
255 static struct pipe_image_view
lvp_create_imageview(const struct lvp_image_view * iv,VkFormat plane_format,unsigned image_plane)256 lvp_create_imageview(const struct lvp_image_view *iv, VkFormat plane_format, unsigned image_plane)
257 {
258 struct pipe_image_view view = {0};
259 if (!iv)
260 return view;
261
262 view.resource = iv->image->planes[image_plane].bo;
263 if (iv->vk.aspects == VK_IMAGE_ASPECT_DEPTH_BIT)
264 view.format = lvp_vk_format_to_pipe_format(plane_format);
265 else if (iv->vk.aspects == VK_IMAGE_ASPECT_STENCIL_BIT)
266 view.format = util_format_stencil_only(lvp_vk_format_to_pipe_format(plane_format));
267 else
268 view.format = lvp_vk_format_to_pipe_format(plane_format);
269
270 if (iv->vk.view_type == VK_IMAGE_VIEW_TYPE_3D) {
271 view.u.tex.first_layer = iv->vk.storage.z_slice_offset;
272 view.u.tex.last_layer = view.u.tex.first_layer + iv->vk.storage.z_slice_count - 1;
273 } else {
274 view.u.tex.first_layer = iv->vk.base_array_layer,
275 view.u.tex.last_layer = iv->vk.base_array_layer + iv->vk.layer_count - 1;
276 }
277 view.u.tex.level = iv->vk.base_mip_level;
278 return view;
279 }
280
281 VKAPI_ATTR VkResult VKAPI_CALL
lvp_CreateImageView(VkDevice _device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)282 lvp_CreateImageView(VkDevice _device,
283 const VkImageViewCreateInfo *pCreateInfo,
284 const VkAllocationCallbacks *pAllocator,
285 VkImageView *pView)
286 {
287 LVP_FROM_HANDLE(lvp_device, device, _device);
288 LVP_FROM_HANDLE(lvp_image, image, pCreateInfo->image);
289 struct lvp_image_view *view;
290
291 view = vk_image_view_create(&device->vk, false, pCreateInfo,
292 pAllocator, sizeof(*view));
293 if (view == NULL)
294 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
295
296 view->pformat = lvp_vk_format_to_pipe_format(view->vk.format);
297 view->image = image;
298 view->surface = NULL;
299
300 if (image->vk.aspects & (VK_IMAGE_ASPECT_DEPTH_BIT |
301 VK_IMAGE_ASPECT_STENCIL_BIT)) {
302 assert(image->plane_count == 1);
303 assert(lvp_image_aspects_to_plane(image, view->vk.aspects) == 0);
304 view->plane_count = 1;
305 view->planes[0].image_plane = 0;
306 } else {
307 /* For other formats, retrieve the plane count from the aspect mask
308 * and then walk through the aspect mask to map each image plane
309 * to its corresponding view plane
310 */
311 assert(util_bitcount(view->vk.aspects) ==
312 vk_format_get_plane_count(view->vk.format));
313 view->plane_count = 0;
314 u_foreach_bit(aspect_bit, view->vk.aspects) {
315 uint8_t image_plane = lvp_image_aspects_to_plane(image, 1u << aspect_bit);
316 view->planes[view->plane_count++].image_plane = image_plane;
317 }
318 }
319
320 simple_mtx_lock(&device->queue.lock);
321
322 for (unsigned view_plane = 0; view_plane < view->plane_count; view_plane++) {
323 const uint8_t image_plane = view->planes[view_plane].image_plane;
324 const struct vk_format_ycbcr_info *ycbcr_info =
325 vk_format_get_ycbcr_info(view->vk.format);
326 assert(ycbcr_info || view_plane == 0);
327 VkFormat plane_format = ycbcr_info ?
328 ycbcr_info->planes[view_plane].format : view->vk.format;
329
330 if (image->planes[image_plane].bo->bind & PIPE_BIND_SHADER_IMAGE) {
331 view->planes[view_plane].iv = lvp_create_imageview(view, plane_format, image_plane);
332 view->planes[view_plane].image_handle = (void *)(uintptr_t)device->queue.ctx->create_image_handle(device->queue.ctx, &view->planes[view_plane].iv);
333 }
334
335 if (image->planes[image_plane].bo->bind & PIPE_BIND_SAMPLER_VIEW) {
336 view->planes[view_plane].sv = lvp_create_samplerview(device->queue.ctx, view, plane_format, image_plane);
337 view->planes[view_plane].texture_handle = (void *)(uintptr_t)device->queue.ctx->create_texture_handle(device->queue.ctx, view->planes[view_plane].sv, NULL);
338 }
339 }
340
341 simple_mtx_unlock(&device->queue.lock);
342
343 *pView = lvp_image_view_to_handle(view);
344
345 return VK_SUCCESS;
346 }
347
348 VKAPI_ATTR void VKAPI_CALL
lvp_DestroyImageView(VkDevice _device,VkImageView _iview,const VkAllocationCallbacks * pAllocator)349 lvp_DestroyImageView(VkDevice _device, VkImageView _iview,
350 const VkAllocationCallbacks *pAllocator)
351 {
352 LVP_FROM_HANDLE(lvp_device, device, _device);
353 LVP_FROM_HANDLE(lvp_image_view, iview, _iview);
354
355 if (!_iview)
356 return;
357
358 simple_mtx_lock(&device->queue.lock);
359
360 for (uint8_t plane = 0; plane < iview->plane_count; plane++) {
361 device->queue.ctx->delete_image_handle(device->queue.ctx, (uint64_t)(uintptr_t)iview->planes[plane].image_handle);
362
363 pipe_sampler_view_reference(&iview->planes[plane].sv, NULL);
364 device->queue.ctx->delete_texture_handle(device->queue.ctx, (uint64_t)(uintptr_t)iview->planes[plane].texture_handle);
365 }
366 simple_mtx_unlock(&device->queue.lock);
367
368 pipe_surface_reference(&iview->surface, NULL);
369 vk_image_view_destroy(&device->vk, pAllocator, &iview->vk);
370 }
371
lvp_GetImageSubresourceLayout(VkDevice _device,VkImage _image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)372 VKAPI_ATTR void VKAPI_CALL lvp_GetImageSubresourceLayout(
373 VkDevice _device,
374 VkImage _image,
375 const VkImageSubresource* pSubresource,
376 VkSubresourceLayout* pLayout)
377 {
378 LVP_FROM_HANDLE(lvp_device, device, _device);
379 LVP_FROM_HANDLE(lvp_image, image, _image);
380 uint64_t value;
381
382 const uint8_t p = lvp_image_aspects_to_plane(image, pSubresource->aspectMask);
383 const struct lvp_image_plane *plane = &image->planes[p];
384
385 device->pscreen->resource_get_param(device->pscreen,
386 NULL,
387 plane->bo,
388 0,
389 pSubresource->arrayLayer,
390 pSubresource->mipLevel,
391 PIPE_RESOURCE_PARAM_STRIDE,
392 0, &value);
393
394 pLayout->rowPitch = value;
395
396 device->pscreen->resource_get_param(device->pscreen,
397 NULL,
398 plane->bo,
399 0,
400 pSubresource->arrayLayer,
401 pSubresource->mipLevel,
402 PIPE_RESOURCE_PARAM_OFFSET,
403 0, &value);
404
405 pLayout->offset = value;
406
407 device->pscreen->resource_get_param(device->pscreen,
408 NULL,
409 plane->bo,
410 0,
411 pSubresource->arrayLayer,
412 pSubresource->mipLevel,
413 PIPE_RESOURCE_PARAM_LAYER_STRIDE,
414 0, &value);
415
416 if (plane->bo->target == PIPE_TEXTURE_3D) {
417 pLayout->depthPitch = value;
418 pLayout->arrayPitch = 0;
419 } else {
420 pLayout->depthPitch = 0;
421 pLayout->arrayPitch = value;
422 }
423 pLayout->offset += plane->plane_offset;
424 pLayout->size = plane->size;
425 }
426
lvp_GetImageSubresourceLayout2KHR(VkDevice _device,VkImage _image,const VkImageSubresource2KHR * pSubresource,VkSubresourceLayout2KHR * pLayout)427 VKAPI_ATTR void VKAPI_CALL lvp_GetImageSubresourceLayout2KHR(
428 VkDevice _device,
429 VkImage _image,
430 const VkImageSubresource2KHR* pSubresource,
431 VkSubresourceLayout2KHR* pLayout)
432 {
433 lvp_GetImageSubresourceLayout(_device, _image, &pSubresource->imageSubresource, &pLayout->subresourceLayout);
434 VkSubresourceHostMemcpySizeEXT *size = vk_find_struct(pLayout, SUBRESOURCE_HOST_MEMCPY_SIZE_EXT);
435 if (size)
436 size->size = pLayout->subresourceLayout.size;
437 }
438
lvp_GetDeviceImageSubresourceLayoutKHR(VkDevice _device,const VkDeviceImageSubresourceInfoKHR * pInfo,VkSubresourceLayout2KHR * pLayout)439 VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceImageSubresourceLayoutKHR(
440 VkDevice _device,
441 const VkDeviceImageSubresourceInfoKHR* pInfo,
442 VkSubresourceLayout2KHR* pLayout)
443 {
444 VkImage image;
445 /* technically supposed to be able to do this without creating an image, but that's harder */
446 if (lvp_image_create(_device, pInfo->pCreateInfo, NULL, &image) != VK_SUCCESS)
447 return;
448 lvp_GetImageSubresourceLayout2KHR(_device, image, pInfo->pSubresource, pLayout);
449 lvp_DestroyImage(_device, image, NULL);
450 }
451
lvp_CreateBuffer(VkDevice _device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)452 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateBuffer(
453 VkDevice _device,
454 const VkBufferCreateInfo* pCreateInfo,
455 const VkAllocationCallbacks* pAllocator,
456 VkBuffer* pBuffer)
457 {
458 LVP_FROM_HANDLE(lvp_device, device, _device);
459 struct lvp_buffer *buffer;
460
461 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
462
463 /* gallium has max 32-bit buffer sizes */
464 if (pCreateInfo->size > UINT32_MAX)
465 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
466
467 buffer = vk_buffer_create(&device->vk, pCreateInfo,
468 pAllocator, sizeof(*buffer));
469 if (buffer == NULL)
470 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
471
472 {
473 struct pipe_resource template;
474 memset(&template, 0, sizeof(struct pipe_resource));
475
476 if (pCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)
477 template.bind |= PIPE_BIND_CONSTANT_BUFFER;
478
479 template.screen = device->pscreen;
480 template.target = PIPE_BUFFER;
481 template.format = PIPE_FORMAT_R8_UNORM;
482 template.width0 = buffer->vk.size;
483 template.height0 = 1;
484 template.depth0 = 1;
485 template.array_size = 1;
486 if (buffer->vk.usage & VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR)
487 template.bind |= PIPE_BIND_SAMPLER_VIEW;
488 if (buffer->vk.usage & VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR)
489 template.bind |= PIPE_BIND_SHADER_BUFFER;
490 if (buffer->vk.usage & VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR)
491 template.bind |= PIPE_BIND_SHADER_IMAGE;
492 template.flags = PIPE_RESOURCE_FLAG_DONT_OVER_ALLOCATE;
493 buffer->bo = device->pscreen->resource_create_unbacked(device->pscreen,
494 &template,
495 &buffer->total_size);
496 if (!buffer->bo) {
497 vk_free2(&device->vk.alloc, pAllocator, buffer);
498 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
499 }
500 }
501 *pBuffer = lvp_buffer_to_handle(buffer);
502
503 return VK_SUCCESS;
504 }
505
lvp_DestroyBuffer(VkDevice _device,VkBuffer _buffer,const VkAllocationCallbacks * pAllocator)506 VKAPI_ATTR void VKAPI_CALL lvp_DestroyBuffer(
507 VkDevice _device,
508 VkBuffer _buffer,
509 const VkAllocationCallbacks* pAllocator)
510 {
511 LVP_FROM_HANDLE(lvp_device, device, _device);
512 LVP_FROM_HANDLE(lvp_buffer, buffer, _buffer);
513
514 if (!_buffer)
515 return;
516
517 char *ptr = (char*)buffer->pmem + buffer->offset;
518 if (ptr) {
519 simple_mtx_lock(&device->bda_lock);
520 struct hash_entry *he = _mesa_hash_table_search(&device->bda, ptr);
521 if (he)
522 _mesa_hash_table_remove(&device->bda, he);
523 simple_mtx_unlock(&device->bda_lock);
524 }
525 pipe_resource_reference(&buffer->bo, NULL);
526 vk_buffer_destroy(&device->vk, pAllocator, &buffer->vk);
527 }
528
lvp_GetBufferDeviceAddress(VkDevice _device,const VkBufferDeviceAddressInfo * pInfo)529 VKAPI_ATTR VkDeviceAddress VKAPI_CALL lvp_GetBufferDeviceAddress(
530 VkDevice _device,
531 const VkBufferDeviceAddressInfo* pInfo)
532 {
533 LVP_FROM_HANDLE(lvp_device, device, _device);
534 LVP_FROM_HANDLE(lvp_buffer, buffer, pInfo->buffer);
535 char *ptr = (char*)buffer->pmem + buffer->offset;
536 simple_mtx_lock(&device->bda_lock);
537 _mesa_hash_table_insert(&device->bda, ptr, buffer);
538 simple_mtx_unlock(&device->bda_lock);
539
540 return (VkDeviceAddress)(uintptr_t)ptr;
541 }
542
lvp_GetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)543 VKAPI_ATTR uint64_t VKAPI_CALL lvp_GetBufferOpaqueCaptureAddress(
544 VkDevice device,
545 const VkBufferDeviceAddressInfo* pInfo)
546 {
547 return 0;
548 }
549
lvp_GetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)550 VKAPI_ATTR uint64_t VKAPI_CALL lvp_GetDeviceMemoryOpaqueCaptureAddress(
551 VkDevice device,
552 const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo)
553 {
554 return 0;
555 }
556
557 static struct pipe_sampler_view *
lvp_create_samplerview_buffer(struct pipe_context * pctx,struct lvp_buffer_view * bv)558 lvp_create_samplerview_buffer(struct pipe_context *pctx, struct lvp_buffer_view *bv)
559 {
560 if (!bv)
561 return NULL;
562
563 struct pipe_resource *bo = ((struct lvp_buffer *)bv->vk.buffer)->bo;
564 struct pipe_sampler_view templ;
565 memset(&templ, 0, sizeof(templ));
566 templ.target = PIPE_BUFFER;
567 templ.swizzle_r = PIPE_SWIZZLE_X;
568 templ.swizzle_g = PIPE_SWIZZLE_Y;
569 templ.swizzle_b = PIPE_SWIZZLE_Z;
570 templ.swizzle_a = PIPE_SWIZZLE_W;
571 templ.format = bv->pformat;
572 templ.u.buf.offset = bv->vk.offset;
573 templ.u.buf.size = bv->vk.range;
574 templ.texture = bo;
575 templ.context = pctx;
576 return pctx->create_sampler_view(pctx, bo, &templ);
577 }
578
579 static struct pipe_image_view
lvp_create_imageview_buffer(const struct lvp_buffer_view * bv)580 lvp_create_imageview_buffer(const struct lvp_buffer_view *bv)
581 {
582 struct pipe_image_view view = {0};
583 if (!bv)
584 return view;
585 view.resource = ((struct lvp_buffer *)bv->vk.buffer)->bo;
586 view.format = bv->pformat;
587 view.u.buf.offset = bv->vk.offset;
588 view.u.buf.size = bv->vk.range;
589 return view;
590 }
591
592 VKAPI_ATTR VkResult VKAPI_CALL
lvp_CreateBufferView(VkDevice _device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)593 lvp_CreateBufferView(VkDevice _device,
594 const VkBufferViewCreateInfo *pCreateInfo,
595 const VkAllocationCallbacks *pAllocator,
596 VkBufferView *pView)
597 {
598 LVP_FROM_HANDLE(lvp_device, device, _device);
599 LVP_FROM_HANDLE(lvp_buffer, buffer, pCreateInfo->buffer);
600 struct lvp_buffer_view *view;
601
602 view = vk_buffer_view_create(&device->vk,
603 pCreateInfo,
604 pAllocator,
605 sizeof(*view));
606 if (!view)
607 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
608
609 view->pformat = lvp_vk_format_to_pipe_format(pCreateInfo->format);
610
611 simple_mtx_lock(&device->queue.lock);
612
613 if (buffer->bo->bind & PIPE_BIND_SAMPLER_VIEW) {
614 view->sv = lvp_create_samplerview_buffer(device->queue.ctx, view);
615 view->texture_handle = (void *)(uintptr_t)device->queue.ctx->create_texture_handle(device->queue.ctx, view->sv, NULL);
616 }
617
618 if (buffer->bo->bind & PIPE_BIND_SHADER_IMAGE) {
619 view->iv = lvp_create_imageview_buffer(view);
620 view->image_handle = (void *)(uintptr_t)device->queue.ctx->create_image_handle(device->queue.ctx, &view->iv);
621 }
622
623 simple_mtx_unlock(&device->queue.lock);
624
625 *pView = lvp_buffer_view_to_handle(view);
626
627 return VK_SUCCESS;
628 }
629
630 VKAPI_ATTR void VKAPI_CALL
lvp_DestroyBufferView(VkDevice _device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)631 lvp_DestroyBufferView(VkDevice _device, VkBufferView bufferView,
632 const VkAllocationCallbacks *pAllocator)
633 {
634 LVP_FROM_HANDLE(lvp_device, device, _device);
635 LVP_FROM_HANDLE(lvp_buffer_view, view, bufferView);
636
637 if (!bufferView)
638 return;
639
640 simple_mtx_lock(&device->queue.lock);
641
642 pipe_sampler_view_reference(&view->sv, NULL);
643 device->queue.ctx->delete_texture_handle(device->queue.ctx, (uint64_t)(uintptr_t)view->texture_handle);
644
645 device->queue.ctx->delete_image_handle(device->queue.ctx, (uint64_t)(uintptr_t)view->image_handle);
646
647 simple_mtx_unlock(&device->queue.lock);
648
649 vk_buffer_view_destroy(&device->vk, pAllocator, &view->vk);
650 }
651
652 VKAPI_ATTR VkResult VKAPI_CALL
lvp_CopyMemoryToImageEXT(VkDevice _device,const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo)653 lvp_CopyMemoryToImageEXT(VkDevice _device, const VkCopyMemoryToImageInfoEXT *pCopyMemoryToImageInfo)
654 {
655 LVP_FROM_HANDLE(lvp_device, device, _device);
656 LVP_FROM_HANDLE(lvp_image, image, pCopyMemoryToImageInfo->dstImage);
657 for (unsigned i = 0; i < pCopyMemoryToImageInfo->regionCount; i++) {
658 const VkMemoryToImageCopyEXT *copy = &pCopyMemoryToImageInfo->pRegions[i];
659 const VkImageAspectFlagBits aspects = copy->imageSubresource.aspectMask;
660 uint8_t plane = lvp_image_aspects_to_plane(image, aspects);
661 struct pipe_box box = {
662 .x = copy->imageOffset.x,
663 .y = copy->imageOffset.y,
664 .width = copy->imageExtent.width,
665 .height = copy->imageExtent.height,
666 .depth = 1,
667 };
668 switch (image->planes[plane].bo->target) {
669 case PIPE_TEXTURE_CUBE:
670 case PIPE_TEXTURE_CUBE_ARRAY:
671 case PIPE_TEXTURE_2D_ARRAY:
672 case PIPE_TEXTURE_1D_ARRAY:
673 /* these use layer */
674 box.z = copy->imageSubresource.baseArrayLayer;
675 box.depth = copy->imageSubresource.layerCount;
676 break;
677 case PIPE_TEXTURE_3D:
678 /* this uses depth */
679 box.z = copy->imageOffset.z;
680 box.depth = copy->imageExtent.depth;
681 break;
682 default:
683 break;
684 }
685
686 unsigned stride = util_format_get_stride(image->planes[plane].bo->format, copy->memoryRowLength ? copy->memoryRowLength : box.width);
687 unsigned layer_stride = util_format_get_2d_size(image->planes[plane].bo->format, stride, copy->memoryImageHeight ? copy->memoryImageHeight : box.height);
688 device->queue.ctx->texture_subdata(device->queue.ctx, image->planes[plane].bo, copy->imageSubresource.mipLevel, 0,
689 &box, copy->pHostPointer, stride, layer_stride);
690 }
691 return VK_SUCCESS;
692 }
693
694 VKAPI_ATTR VkResult VKAPI_CALL
lvp_CopyImageToMemoryEXT(VkDevice _device,const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo)695 lvp_CopyImageToMemoryEXT(VkDevice _device, const VkCopyImageToMemoryInfoEXT *pCopyImageToMemoryInfo)
696 {
697 LVP_FROM_HANDLE(lvp_device, device, _device);
698 LVP_FROM_HANDLE(lvp_image, image, pCopyImageToMemoryInfo->srcImage);
699
700 for (unsigned i = 0; i < pCopyImageToMemoryInfo->regionCount; i++) {
701 const VkImageToMemoryCopyEXT *copy = &pCopyImageToMemoryInfo->pRegions[i];
702
703 const VkImageAspectFlagBits aspects = copy->imageSubresource.aspectMask;
704 uint8_t plane = lvp_image_aspects_to_plane(image, aspects);
705
706 struct pipe_box box = {
707 .x = copy->imageOffset.x,
708 .y = copy->imageOffset.y,
709 .width = copy->imageExtent.width,
710 .height = copy->imageExtent.height,
711 .depth = 1,
712 };
713 switch (image->planes[plane].bo->target) {
714 case PIPE_TEXTURE_CUBE:
715 case PIPE_TEXTURE_CUBE_ARRAY:
716 case PIPE_TEXTURE_2D_ARRAY:
717 case PIPE_TEXTURE_1D_ARRAY:
718 /* these use layer */
719 box.z = copy->imageSubresource.baseArrayLayer;
720 box.depth = copy->imageSubresource.layerCount;
721 break;
722 case PIPE_TEXTURE_3D:
723 /* this uses depth */
724 box.z = copy->imageOffset.z;
725 box.depth = copy->imageExtent.depth;
726 break;
727 default:
728 break;
729 }
730 struct pipe_transfer *xfer;
731 uint8_t *data = device->queue.ctx->texture_map(device->queue.ctx, image->planes[plane].bo, copy->imageSubresource.mipLevel,
732 PIPE_MAP_READ | PIPE_MAP_UNSYNCHRONIZED | PIPE_MAP_THREAD_SAFE, &box, &xfer);
733 if (!data)
734 return VK_ERROR_MEMORY_MAP_FAILED;
735
736 unsigned stride = util_format_get_stride(image->planes[plane].bo->format, copy->memoryRowLength ? copy->memoryRowLength : box.width);
737 unsigned layer_stride = util_format_get_2d_size(image->planes[plane].bo->format, stride, copy->memoryImageHeight ? copy->memoryImageHeight : box.height);
738 util_copy_box(copy->pHostPointer, image->planes[plane].bo->format, stride, layer_stride,
739 /* offsets are all zero because texture_map handles the offset */
740 0, 0, 0, box.width, box.height, box.depth, data, xfer->stride, xfer->layer_stride, 0, 0, 0);
741 pipe_texture_unmap(device->queue.ctx, xfer);
742 }
743 return VK_SUCCESS;
744 }
745
746 VKAPI_ATTR VkResult VKAPI_CALL
lvp_CopyImageToImageEXT(VkDevice _device,const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo)747 lvp_CopyImageToImageEXT(VkDevice _device, const VkCopyImageToImageInfoEXT *pCopyImageToImageInfo)
748 {
749 LVP_FROM_HANDLE(lvp_device, device, _device);
750 LVP_FROM_HANDLE(lvp_image, src_image, pCopyImageToImageInfo->srcImage);
751 LVP_FROM_HANDLE(lvp_image, dst_image, pCopyImageToImageInfo->dstImage);
752
753 /* basically the same as handle_copy_image() */
754 for (unsigned i = 0; i < pCopyImageToImageInfo->regionCount; i++) {
755
756 const VkImageAspectFlagBits src_aspects = pCopyImageToImageInfo->pRegions[i].srcSubresource.aspectMask;
757 uint8_t src_plane = lvp_image_aspects_to_plane(src_image, src_aspects);
758 const VkImageAspectFlagBits dst_aspects = pCopyImageToImageInfo->pRegions[i].dstSubresource.aspectMask;
759 uint8_t dst_plane = lvp_image_aspects_to_plane(dst_image, dst_aspects);
760
761 struct pipe_box src_box;
762 src_box.x = pCopyImageToImageInfo->pRegions[i].srcOffset.x;
763 src_box.y = pCopyImageToImageInfo->pRegions[i].srcOffset.y;
764 src_box.width = pCopyImageToImageInfo->pRegions[i].extent.width;
765 src_box.height = pCopyImageToImageInfo->pRegions[i].extent.height;
766 if (src_image->planes[src_plane].bo->target == PIPE_TEXTURE_3D) {
767 src_box.depth = pCopyImageToImageInfo->pRegions[i].extent.depth;
768 src_box.z = pCopyImageToImageInfo->pRegions[i].srcOffset.z;
769 } else {
770 src_box.depth = pCopyImageToImageInfo->pRegions[i].srcSubresource.layerCount;
771 src_box.z = pCopyImageToImageInfo->pRegions[i].srcSubresource.baseArrayLayer;
772 }
773
774 unsigned dstz = dst_image->planes[dst_plane].bo->target == PIPE_TEXTURE_3D ?
775 pCopyImageToImageInfo->pRegions[i].dstOffset.z :
776 pCopyImageToImageInfo->pRegions[i].dstSubresource.baseArrayLayer;
777 device->queue.ctx->resource_copy_region(device->queue.ctx, dst_image->planes[dst_plane].bo,
778 pCopyImageToImageInfo->pRegions[i].dstSubresource.mipLevel,
779 pCopyImageToImageInfo->pRegions[i].dstOffset.x,
780 pCopyImageToImageInfo->pRegions[i].dstOffset.y,
781 dstz,
782 src_image->planes[src_plane].bo,
783 pCopyImageToImageInfo->pRegions[i].srcSubresource.mipLevel,
784 &src_box);
785 }
786 return VK_SUCCESS;
787 }
788
789 VKAPI_ATTR VkResult VKAPI_CALL
lvp_TransitionImageLayoutEXT(VkDevice device,uint32_t transitionCount,const VkHostImageLayoutTransitionInfoEXT * pTransitions)790 lvp_TransitionImageLayoutEXT(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT *pTransitions)
791 {
792 /* no-op */
793 return VK_SUCCESS;
794 }
795