1 /*
2 * Copyright 2018 Collabora Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "zink_resource.h"
25
26 #include "zink_batch.h"
27 #include "zink_context.h"
28 #include "zink_screen.h"
29
30 #include "vulkan/wsi/wsi_common.h"
31
32 #include "util/slab.h"
33 #include "util/u_debug.h"
34 #include "util/format/u_format.h"
35 #include "util/u_transfer_helper.h"
36 #include "util/u_inlines.h"
37 #include "util/u_memory.h"
38
39 #include "frontend/sw_winsys.h"
40
41 #include "drm-uapi/drm_fourcc.h"
42
43 static void
zink_resource_destroy(struct pipe_screen * pscreen,struct pipe_resource * pres)44 zink_resource_destroy(struct pipe_screen *pscreen,
45 struct pipe_resource *pres)
46 {
47 struct zink_screen *screen = zink_screen(pscreen);
48 struct zink_resource *res = zink_resource(pres);
49 if (pres->target == PIPE_BUFFER)
50 vkDestroyBuffer(screen->dev, res->buffer, NULL);
51 else
52 vkDestroyImage(screen->dev, res->image, NULL);
53
54 vkFreeMemory(screen->dev, res->mem, NULL);
55 FREE(res);
56 }
57
58 static uint32_t
get_memory_type_index(struct zink_screen * screen,const VkMemoryRequirements * reqs,VkMemoryPropertyFlags props)59 get_memory_type_index(struct zink_screen *screen,
60 const VkMemoryRequirements *reqs,
61 VkMemoryPropertyFlags props)
62 {
63 for (uint32_t i = 0u; i < VK_MAX_MEMORY_TYPES; i++) {
64 if (((reqs->memoryTypeBits >> i) & 1) == 1) {
65 if ((screen->info.mem_props.memoryTypes[i].propertyFlags & props) == props) {
66 return i;
67 break;
68 }
69 }
70 }
71
72 unreachable("Unsupported memory-type");
73 return 0;
74 }
75
76 static VkImageAspectFlags
aspect_from_format(enum pipe_format fmt)77 aspect_from_format(enum pipe_format fmt)
78 {
79 if (util_format_is_depth_or_stencil(fmt)) {
80 VkImageAspectFlags aspect = 0;
81 const struct util_format_description *desc = util_format_description(fmt);
82 if (util_format_has_depth(desc))
83 aspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
84 if (util_format_has_stencil(desc))
85 aspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
86 return aspect;
87 } else
88 return VK_IMAGE_ASPECT_COLOR_BIT;
89 }
90
91 static struct pipe_resource *
resource_create(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct winsys_handle * whandle,unsigned external_usage)92 resource_create(struct pipe_screen *pscreen,
93 const struct pipe_resource *templ,
94 struct winsys_handle *whandle,
95 unsigned external_usage)
96 {
97 struct zink_screen *screen = zink_screen(pscreen);
98 struct zink_resource *res = CALLOC_STRUCT(zink_resource);
99
100 res->base = *templ;
101
102 pipe_reference_init(&res->base.reference, 1);
103 res->base.screen = pscreen;
104
105 VkMemoryRequirements reqs;
106 VkMemoryPropertyFlags flags = 0;
107
108 res->internal_format = templ->format;
109 if (templ->target == PIPE_BUFFER) {
110 VkBufferCreateInfo bci = {};
111 bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
112 bci.size = templ->width0;
113
114 bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
115 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
116
117 if (templ->bind & PIPE_BIND_SAMPLER_VIEW)
118 bci.usage |= VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
119
120 if (templ->bind & PIPE_BIND_VERTEX_BUFFER)
121 bci.usage |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
122 VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
123 VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
124
125 if (templ->bind & PIPE_BIND_INDEX_BUFFER)
126 bci.usage |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
127
128 if (templ->bind & PIPE_BIND_CONSTANT_BUFFER)
129 bci.usage |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
130
131 if (templ->bind & PIPE_BIND_SHADER_BUFFER)
132 bci.usage |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
133
134 if (templ->bind & PIPE_BIND_COMMAND_ARGS_BUFFER)
135 bci.usage |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
136
137 if (templ->bind == (PIPE_BIND_STREAM_OUTPUT | PIPE_BIND_CUSTOM)) {
138 bci.usage |= VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT;
139 } else if (templ->bind & PIPE_BIND_STREAM_OUTPUT) {
140 bci.usage |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT;
141 }
142
143 if (vkCreateBuffer(screen->dev, &bci, NULL, &res->buffer) !=
144 VK_SUCCESS) {
145 FREE(res);
146 return NULL;
147 }
148
149 vkGetBufferMemoryRequirements(screen->dev, res->buffer, &reqs);
150 flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
151 } else {
152 res->format = zink_get_format(screen, templ->format);
153
154 VkImageCreateInfo ici = {};
155 ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
156 ici.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
157
158 switch (templ->target) {
159 case PIPE_TEXTURE_1D:
160 case PIPE_TEXTURE_1D_ARRAY:
161 ici.imageType = VK_IMAGE_TYPE_1D;
162 break;
163
164 case PIPE_TEXTURE_CUBE:
165 case PIPE_TEXTURE_CUBE_ARRAY:
166 ici.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
167 /* fall-through */
168 case PIPE_TEXTURE_2D:
169 case PIPE_TEXTURE_2D_ARRAY:
170 case PIPE_TEXTURE_RECT:
171 ici.imageType = VK_IMAGE_TYPE_2D;
172 break;
173
174 case PIPE_TEXTURE_3D:
175 ici.imageType = VK_IMAGE_TYPE_3D;
176 if (templ->bind & PIPE_BIND_RENDER_TARGET)
177 ici.flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
178 break;
179
180 case PIPE_BUFFER:
181 unreachable("PIPE_BUFFER should already be handled");
182
183 default:
184 unreachable("Unknown target");
185 }
186
187 ici.format = res->format;
188 ici.extent.width = templ->width0;
189 ici.extent.height = templ->height0;
190 ici.extent.depth = templ->depth0;
191 ici.mipLevels = templ->last_level + 1;
192 ici.arrayLayers = MAX2(templ->array_size, 1);
193 ici.samples = templ->nr_samples ? templ->nr_samples : VK_SAMPLE_COUNT_1_BIT;
194 ici.tiling = templ->bind & PIPE_BIND_LINEAR ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
195
196 if (templ->target == PIPE_TEXTURE_CUBE ||
197 templ->target == PIPE_TEXTURE_CUBE_ARRAY)
198 ici.arrayLayers *= 6;
199
200 if (templ->bind & PIPE_BIND_SHARED)
201 ici.tiling = VK_IMAGE_TILING_LINEAR;
202
203 if (templ->usage == PIPE_USAGE_STAGING)
204 ici.tiling = VK_IMAGE_TILING_LINEAR;
205
206 /* sadly, gallium doesn't let us know if it'll ever need this, so we have to assume */
207 ici.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
208 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
209 VK_IMAGE_USAGE_SAMPLED_BIT;
210
211 if (templ->bind & PIPE_BIND_SHADER_IMAGE)
212 ici.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
213
214 if (templ->bind & PIPE_BIND_RENDER_TARGET)
215 ici.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
216
217 if (templ->bind & PIPE_BIND_DEPTH_STENCIL)
218 ici.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
219
220 if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
221 ici.usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
222
223 if (templ->bind & PIPE_BIND_STREAM_OUTPUT)
224 ici.usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
225
226 ici.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
227 ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
228 res->layout = VK_IMAGE_LAYOUT_UNDEFINED;
229
230 struct wsi_image_create_info image_wsi_info = {
231 VK_STRUCTURE_TYPE_WSI_IMAGE_CREATE_INFO_MESA,
232 NULL,
233 .scanout = true,
234 };
235
236 if (templ->bind & PIPE_BIND_SCANOUT)
237 ici.pNext = &image_wsi_info;
238
239 VkResult result = vkCreateImage(screen->dev, &ici, NULL, &res->image);
240 if (result != VK_SUCCESS) {
241 FREE(res);
242 return NULL;
243 }
244
245 res->optimial_tiling = ici.tiling != VK_IMAGE_TILING_LINEAR;
246 res->aspect = aspect_from_format(templ->format);
247
248 vkGetImageMemoryRequirements(screen->dev, res->image, &reqs);
249 if (templ->usage == PIPE_USAGE_STAGING || (screen->winsys && (templ->bind & (PIPE_BIND_SCANOUT|PIPE_BIND_DISPLAY_TARGET|PIPE_BIND_SHARED))))
250 flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
251 else
252 flags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
253 }
254
255 VkMemoryAllocateInfo mai = {};
256 mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
257 mai.allocationSize = reqs.size;
258 mai.memoryTypeIndex = get_memory_type_index(screen, &reqs, flags);
259
260 VkExportMemoryAllocateInfo emai = {};
261 if (templ->bind & PIPE_BIND_SHARED) {
262 emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
263 emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
264
265 emai.pNext = mai.pNext;
266 mai.pNext = &emai;
267 }
268
269 VkImportMemoryFdInfoKHR imfi = {
270 VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
271 NULL,
272 };
273
274 if (whandle && whandle->type == WINSYS_HANDLE_TYPE_FD) {
275 imfi.pNext = NULL;
276 imfi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
277 imfi.fd = whandle->handle;
278
279 imfi.pNext = mai.pNext;
280 emai.pNext = &imfi;
281 }
282
283 struct wsi_memory_allocate_info memory_wsi_info = {
284 VK_STRUCTURE_TYPE_WSI_MEMORY_ALLOCATE_INFO_MESA,
285 NULL,
286 };
287
288 if (templ->bind & PIPE_BIND_SCANOUT) {
289 memory_wsi_info.implicit_sync = true;
290
291 memory_wsi_info.pNext = mai.pNext;
292 mai.pNext = &memory_wsi_info;
293 }
294
295 if (vkAllocateMemory(screen->dev, &mai, NULL, &res->mem) != VK_SUCCESS)
296 goto fail;
297
298 res->offset = 0;
299 res->size = reqs.size;
300
301 if (templ->target == PIPE_BUFFER)
302 vkBindBufferMemory(screen->dev, res->buffer, res->mem, res->offset);
303 else
304 vkBindImageMemory(screen->dev, res->image, res->mem, res->offset);
305
306 if (screen->winsys && (templ->bind & (PIPE_BIND_DISPLAY_TARGET |
307 PIPE_BIND_SCANOUT |
308 PIPE_BIND_SHARED))) {
309 struct sw_winsys *winsys = screen->winsys;
310 res->dt = winsys->displaytarget_create(screen->winsys,
311 res->base.bind,
312 res->base.format,
313 templ->width0,
314 templ->height0,
315 64, NULL,
316 &res->dt_stride);
317 }
318
319 return &res->base;
320
321 fail:
322 if (templ->target == PIPE_BUFFER)
323 vkDestroyBuffer(screen->dev, res->buffer, NULL);
324 else
325 vkDestroyImage(screen->dev, res->image, NULL);
326
327 FREE(res);
328
329 return NULL;
330 }
331
332 static struct pipe_resource *
zink_resource_create(struct pipe_screen * pscreen,const struct pipe_resource * templ)333 zink_resource_create(struct pipe_screen *pscreen,
334 const struct pipe_resource *templ)
335 {
336 return resource_create(pscreen, templ, NULL, 0);
337 }
338
339 static bool
zink_resource_get_handle(struct pipe_screen * pscreen,struct pipe_context * context,struct pipe_resource * tex,struct winsys_handle * whandle,unsigned usage)340 zink_resource_get_handle(struct pipe_screen *pscreen,
341 struct pipe_context *context,
342 struct pipe_resource *tex,
343 struct winsys_handle *whandle,
344 unsigned usage)
345 {
346 struct zink_resource *res = zink_resource(tex);
347 struct zink_screen *screen = zink_screen(pscreen);
348 VkMemoryGetFdInfoKHR fd_info = {};
349 int fd;
350
351 if (res->base.target != PIPE_BUFFER) {
352 VkImageSubresource sub_res = {};
353 VkSubresourceLayout sub_res_layout = {};
354
355 sub_res.aspectMask = res->aspect;
356
357 vkGetImageSubresourceLayout(screen->dev, res->image, &sub_res, &sub_res_layout);
358
359 whandle->stride = sub_res_layout.rowPitch;
360 }
361
362 if (whandle->type == WINSYS_HANDLE_TYPE_FD) {
363 fd_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
364 fd_info.memory = res->mem;
365 fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
366 VkResult result = (*screen->vk_GetMemoryFdKHR)(screen->dev, &fd_info, &fd);
367 if (result != VK_SUCCESS)
368 return false;
369 whandle->handle = fd;
370 whandle->modifier = DRM_FORMAT_MOD_INVALID;
371 }
372 return true;
373 }
374
375 static struct pipe_resource *
zink_resource_from_handle(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct winsys_handle * whandle,unsigned usage)376 zink_resource_from_handle(struct pipe_screen *pscreen,
377 const struct pipe_resource *templ,
378 struct winsys_handle *whandle,
379 unsigned usage)
380 {
381 if (whandle->modifier != DRM_FORMAT_MOD_INVALID)
382 return NULL;
383
384 return resource_create(pscreen, templ, whandle, usage);
385 }
386
387 static bool
zink_transfer_copy_bufimage(struct zink_context * ctx,struct zink_resource * res,struct zink_resource * staging_res,struct zink_transfer * trans,bool buf2img)388 zink_transfer_copy_bufimage(struct zink_context *ctx,
389 struct zink_resource *res,
390 struct zink_resource *staging_res,
391 struct zink_transfer *trans,
392 bool buf2img)
393 {
394 struct zink_batch *batch = zink_batch_no_rp(ctx);
395
396 if (buf2img) {
397 if (res->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
398 zink_resource_barrier(batch->cmdbuf, res, res->aspect,
399 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
400 }
401 } else {
402 if (res->layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
403 zink_resource_barrier(batch->cmdbuf, res, res->aspect,
404 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
405 }
406 }
407
408 VkBufferImageCopy copyRegion = {};
409 copyRegion.bufferOffset = staging_res->offset;
410 copyRegion.bufferRowLength = 0;
411 copyRegion.bufferImageHeight = 0;
412 copyRegion.imageSubresource.mipLevel = trans->base.level;
413 copyRegion.imageSubresource.layerCount = 1;
414 if (res->base.array_size > 1) {
415 copyRegion.imageSubresource.baseArrayLayer = trans->base.box.z;
416 copyRegion.imageSubresource.layerCount = trans->base.box.depth;
417 copyRegion.imageExtent.depth = 1;
418 } else {
419 copyRegion.imageOffset.z = trans->base.box.z;
420 copyRegion.imageExtent.depth = trans->base.box.depth;
421 }
422 copyRegion.imageOffset.x = trans->base.box.x;
423 copyRegion.imageOffset.y = trans->base.box.y;
424
425 copyRegion.imageExtent.width = trans->base.box.width;
426 copyRegion.imageExtent.height = trans->base.box.height;
427
428 zink_batch_reference_resource_rw(batch, res, buf2img);
429 zink_batch_reference_resource_rw(batch, staging_res, !buf2img);
430
431 /* we're using u_transfer_helper_deinterleave, which means we'll be getting PIPE_MAP_* usage
432 * to indicate whether to copy either the depth or stencil aspects
433 */
434 unsigned aspects = 0;
435 assert((trans->base.usage & (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY)) !=
436 (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY));
437 if (trans->base.usage & PIPE_MAP_DEPTH_ONLY)
438 aspects = VK_IMAGE_ASPECT_DEPTH_BIT;
439 else if (trans->base.usage & PIPE_MAP_STENCIL_ONLY)
440 aspects = VK_IMAGE_ASPECT_STENCIL_BIT;
441 else {
442 aspects = aspect_from_format(res->base.format);
443 }
444 while (aspects) {
445 int aspect = 1 << u_bit_scan(&aspects);
446 copyRegion.imageSubresource.aspectMask = aspect;
447
448 /* this may or may not work with multisampled depth/stencil buffers depending on the driver implementation:
449 *
450 * srcImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT
451 * - vkCmdCopyImageToBuffer spec
452 *
453 * dstImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT
454 * - vkCmdCopyBufferToImage spec
455 */
456 if (buf2img)
457 vkCmdCopyBufferToImage(batch->cmdbuf, staging_res->buffer, res->image, res->layout, 1, ©Region);
458 else
459 vkCmdCopyImageToBuffer(batch->cmdbuf, res->image, res->layout, staging_res->buffer, 1, ©Region);
460 }
461
462 return true;
463 }
464
465 static uint32_t
get_resource_usage(struct zink_resource * res)466 get_resource_usage(struct zink_resource *res)
467 {
468 uint32_t batch_uses = 0;
469 for (unsigned i = 0; i < 4; i++)
470 batch_uses |= p_atomic_read(&res->batch_uses[i]) << i;
471 return batch_uses;
472 }
473
474 static void *
zink_transfer_map(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,unsigned usage,const struct pipe_box * box,struct pipe_transfer ** transfer)475 zink_transfer_map(struct pipe_context *pctx,
476 struct pipe_resource *pres,
477 unsigned level,
478 unsigned usage,
479 const struct pipe_box *box,
480 struct pipe_transfer **transfer)
481 {
482 struct zink_context *ctx = zink_context(pctx);
483 struct zink_screen *screen = zink_screen(pctx->screen);
484 struct zink_resource *res = zink_resource(pres);
485 uint32_t batch_uses = get_resource_usage(res);
486
487 struct zink_transfer *trans = slab_alloc(&ctx->transfer_pool);
488 if (!trans)
489 return NULL;
490
491 memset(trans, 0, sizeof(*trans));
492 pipe_resource_reference(&trans->base.resource, pres);
493
494 trans->base.resource = pres;
495 trans->base.level = level;
496 trans->base.usage = usage;
497 trans->base.box = *box;
498
499 void *ptr;
500 if (pres->target == PIPE_BUFFER) {
501 if (!(usage & PIPE_MAP_UNSYNCHRONIZED)) {
502 if ((usage & PIPE_MAP_READ && batch_uses >= ZINK_RESOURCE_ACCESS_WRITE) ||
503 (usage & PIPE_MAP_WRITE && batch_uses)) {
504 /* need to wait for rendering to finish
505 * TODO: optimize/fix this to be much less obtrusive
506 * mesa/mesa#2966
507 */
508 zink_fence_wait(pctx);
509 }
510 }
511
512
513 VkResult result = vkMapMemory(screen->dev, res->mem, res->offset, res->size, 0, &ptr);
514 if (result != VK_SUCCESS)
515 return NULL;
516
517 #if defined(__APPLE__)
518 if (!(usage & PIPE_MAP_DISCARD_WHOLE_RESOURCE)) {
519 // Work around for MoltenVk limitation
520 // MoltenVk returns blank memory ranges when there should be data present
521 // This is a known limitation of MoltenVK.
522 // See https://github.com/KhronosGroup/MoltenVK/blob/master/Docs/MoltenVK_Runtime_UserGuide.md#known-moltenvk-limitations
523 VkMappedMemoryRange range = {
524 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
525 NULL,
526 res->mem,
527 res->offset,
528 res->size
529 };
530 result = vkFlushMappedMemoryRanges(screen->dev, 1, &range);
531 if (result != VK_SUCCESS)
532 return NULL;
533 }
534 #endif
535
536 trans->base.stride = 0;
537 trans->base.layer_stride = 0;
538 ptr = ((uint8_t *)ptr) + box->x;
539 } else {
540 if (res->optimial_tiling || ((res->base.usage != PIPE_USAGE_STAGING))) {
541 enum pipe_format format = pres->format;
542 if (usage & PIPE_MAP_DEPTH_ONLY)
543 format = util_format_get_depth_only(pres->format);
544 else if (usage & PIPE_MAP_STENCIL_ONLY)
545 format = PIPE_FORMAT_S8_UINT;
546 trans->base.stride = util_format_get_stride(format, box->width);
547 trans->base.layer_stride = util_format_get_2d_size(format,
548 trans->base.stride,
549 box->height);
550
551 struct pipe_resource templ = *pres;
552 templ.format = format;
553 templ.usage = PIPE_USAGE_STAGING;
554 templ.target = PIPE_BUFFER;
555 templ.bind = 0;
556 templ.width0 = trans->base.layer_stride * box->depth;
557 templ.height0 = templ.depth0 = 0;
558 templ.last_level = 0;
559 templ.array_size = 1;
560 templ.flags = 0;
561
562 trans->staging_res = zink_resource_create(pctx->screen, &templ);
563 if (!trans->staging_res)
564 return NULL;
565
566 struct zink_resource *staging_res = zink_resource(trans->staging_res);
567
568 if (usage & PIPE_MAP_READ) {
569 struct zink_context *ctx = zink_context(pctx);
570 bool ret = zink_transfer_copy_bufimage(ctx, res,
571 staging_res, trans,
572 false);
573 if (ret == false)
574 return NULL;
575
576 /* need to wait for rendering to finish */
577 zink_fence_wait(pctx);
578 }
579
580 VkResult result = vkMapMemory(screen->dev, staging_res->mem,
581 staging_res->offset,
582 staging_res->size, 0, &ptr);
583 if (result != VK_SUCCESS)
584 return NULL;
585
586 } else {
587 assert(!res->optimial_tiling);
588 if (batch_uses >= ZINK_RESOURCE_ACCESS_WRITE)
589 zink_fence_wait(pctx);
590 VkResult result = vkMapMemory(screen->dev, res->mem, res->offset, res->size, 0, &ptr);
591 if (result != VK_SUCCESS)
592 return NULL;
593 VkImageSubresource isr = {
594 res->aspect,
595 level,
596 0
597 };
598 VkSubresourceLayout srl;
599 vkGetImageSubresourceLayout(screen->dev, res->image, &isr, &srl);
600 trans->base.stride = srl.rowPitch;
601 trans->base.layer_stride = srl.arrayPitch;
602 const struct util_format_description *desc = util_format_description(res->base.format);
603 unsigned offset = srl.offset +
604 box->z * srl.depthPitch +
605 (box->y / desc->block.height) * srl.rowPitch +
606 (box->x / desc->block.width) * (desc->block.bits / 8);
607 ptr = ((uint8_t *)ptr) + offset;
608 }
609 }
610
611 *transfer = &trans->base;
612 return ptr;
613 }
614
615 static void
zink_transfer_unmap(struct pipe_context * pctx,struct pipe_transfer * ptrans)616 zink_transfer_unmap(struct pipe_context *pctx,
617 struct pipe_transfer *ptrans)
618 {
619 struct zink_context *ctx = zink_context(pctx);
620 struct zink_screen *screen = zink_screen(pctx->screen);
621 struct zink_resource *res = zink_resource(ptrans->resource);
622 struct zink_transfer *trans = (struct zink_transfer *)ptrans;
623 if (trans->staging_res) {
624 struct zink_resource *staging_res = zink_resource(trans->staging_res);
625 vkUnmapMemory(screen->dev, staging_res->mem);
626
627 if (trans->base.usage & PIPE_MAP_WRITE) {
628 struct zink_context *ctx = zink_context(pctx);
629 uint32_t batch_uses = get_resource_usage(res);
630 if (batch_uses >= ZINK_RESOURCE_ACCESS_WRITE)
631 zink_fence_wait(pctx);
632 zink_transfer_copy_bufimage(ctx, res, staging_res, trans, true);
633 }
634
635 pipe_resource_reference(&trans->staging_res, NULL);
636 } else
637 vkUnmapMemory(screen->dev, res->mem);
638
639 pipe_resource_reference(&trans->base.resource, NULL);
640 slab_free(&ctx->transfer_pool, ptrans);
641 }
642
643 static struct pipe_resource *
zink_resource_get_separate_stencil(struct pipe_resource * pres)644 zink_resource_get_separate_stencil(struct pipe_resource *pres)
645 {
646 /* For packed depth-stencil, we treat depth as the primary resource
647 * and store S8 as the "second plane" resource.
648 */
649 if (pres->next && pres->next->format == PIPE_FORMAT_S8_UINT)
650 return pres->next;
651
652 return NULL;
653
654 }
655
656 void
zink_resource_setup_transfer_layouts(struct zink_batch * batch,struct zink_resource * src,struct zink_resource * dst)657 zink_resource_setup_transfer_layouts(struct zink_batch *batch, struct zink_resource *src, struct zink_resource *dst)
658 {
659 if (src == dst) {
660 /* The Vulkan 1.1 specification says the following about valid usage
661 * of vkCmdBlitImage:
662 *
663 * "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
664 * VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
665 *
666 * and:
667 *
668 * "dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
669 * VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
670 *
671 * Since we cant have the same image in two states at the same time,
672 * we're effectively left with VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR or
673 * VK_IMAGE_LAYOUT_GENERAL. And since this isn't a present-related
674 * operation, VK_IMAGE_LAYOUT_GENERAL seems most appropriate.
675 */
676 if (src->layout != VK_IMAGE_LAYOUT_GENERAL)
677 zink_resource_barrier(batch->cmdbuf, src, src->aspect,
678 VK_IMAGE_LAYOUT_GENERAL);
679 } else {
680 if (src->layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
681 zink_resource_barrier(batch->cmdbuf, src, src->aspect,
682 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
683
684 if (dst->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
685 zink_resource_barrier(batch->cmdbuf, dst, dst->aspect,
686 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
687 }
688 }
689
690 void
zink_get_depth_stencil_resources(struct pipe_resource * res,struct zink_resource ** out_z,struct zink_resource ** out_s)691 zink_get_depth_stencil_resources(struct pipe_resource *res,
692 struct zink_resource **out_z,
693 struct zink_resource **out_s)
694 {
695 if (!res) {
696 if (out_z) *out_z = NULL;
697 if (out_s) *out_s = NULL;
698 return;
699 }
700
701 if (res->format != PIPE_FORMAT_S8_UINT) {
702 if (out_z) *out_z = zink_resource(res);
703 if (out_s) *out_s = zink_resource(zink_resource_get_separate_stencil(res));
704 } else {
705 if (out_z) *out_z = NULL;
706 if (out_s) *out_s = zink_resource(res);
707 }
708 }
709
710 static void
zink_resource_set_separate_stencil(struct pipe_resource * pres,struct pipe_resource * stencil)711 zink_resource_set_separate_stencil(struct pipe_resource *pres,
712 struct pipe_resource *stencil)
713 {
714 assert(util_format_has_depth(util_format_description(pres->format)));
715 pipe_resource_reference(&pres->next, stencil);
716 }
717
718 static enum pipe_format
zink_resource_get_internal_format(struct pipe_resource * pres)719 zink_resource_get_internal_format(struct pipe_resource *pres)
720 {
721 struct zink_resource *res = zink_resource(pres);
722 return res->internal_format;
723 }
724
725 static const struct u_transfer_vtbl transfer_vtbl = {
726 .resource_create = zink_resource_create,
727 .resource_destroy = zink_resource_destroy,
728 .transfer_map = zink_transfer_map,
729 .transfer_unmap = zink_transfer_unmap,
730 .transfer_flush_region = u_default_transfer_flush_region,
731 .get_internal_format = zink_resource_get_internal_format,
732 .set_stencil = zink_resource_set_separate_stencil,
733 .get_stencil = zink_resource_get_separate_stencil,
734 };
735
736 void
zink_screen_resource_init(struct pipe_screen * pscreen)737 zink_screen_resource_init(struct pipe_screen *pscreen)
738 {
739 pscreen->resource_create = zink_resource_create;
740 pscreen->resource_destroy = zink_resource_destroy;
741 pscreen->transfer_helper = u_transfer_helper_create(&transfer_vtbl, true, true, false, false);
742
743 if (zink_screen(pscreen)->info.have_KHR_external_memory_fd) {
744 pscreen->resource_get_handle = zink_resource_get_handle;
745 pscreen->resource_from_handle = zink_resource_from_handle;
746 }
747 }
748
749 void
zink_context_resource_init(struct pipe_context * pctx)750 zink_context_resource_init(struct pipe_context *pctx)
751 {
752 pctx->transfer_map = u_transfer_helper_deinterleave_transfer_map;
753 pctx->transfer_unmap = u_transfer_helper_deinterleave_transfer_unmap;
754
755 pctx->transfer_flush_region = u_transfer_helper_transfer_flush_region;
756 pctx->buffer_subdata = u_default_buffer_subdata;
757 pctx->texture_subdata = u_default_texture_subdata;
758 }
759