1 /*
2 * Copyright 2018 Collabora Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "zink_resource.h"
25
26 #include "zink_batch.h"
27 #include "zink_context.h"
28 #include "zink_fence.h"
29 #include "zink_program.h"
30 #include "zink_screen.h"
31 #include "zink_kopper.h"
32
33 #ifdef VK_USE_PLATFORM_METAL_EXT
34 #include "QuartzCore/CAMetalLayer.h"
35 #endif
36 #include "vulkan/wsi/wsi_common.h"
37
38 #include "vk_format.h"
39 #include "util/slab.h"
40 #include "util/u_blitter.h"
41 #include "util/u_debug.h"
42 #include "util/format/u_format.h"
43 #include "util/u_transfer_helper.h"
44 #include "util/u_inlines.h"
45 #include "util/u_memory.h"
46 #include "util/u_upload_mgr.h"
47 #include "util/os_file.h"
48 #include "frontend/winsys_handle.h"
49
50 #if !defined(__APPLE__)
51 #define ZINK_USE_DMABUF
52 #endif
53
54 #if defined(ZINK_USE_DMABUF) && !defined(_WIN32)
55 #include "drm-uapi/drm_fourcc.h"
56 #else
57 /* these won't actually be used */
58 #define DRM_FORMAT_MOD_INVALID 0
59 #define DRM_FORMAT_MOD_LINEAR 0
60 #endif
61
62 #if defined(__APPLE__)
63 // Source of MVK_VERSION
64 #include "MoltenVK/vk_mvk_moltenvk.h"
65 #endif
66
67 #define ZINK_EXTERNAL_MEMORY_HANDLE 999
68
69 static bool
equals_ivci(const void * a,const void * b)70 equals_ivci(const void *a, const void *b)
71 {
72 const uint8_t *pa = a;
73 const uint8_t *pb = b;
74 size_t offset = offsetof(VkImageViewCreateInfo, flags);
75 return memcmp(pa + offset, pb + offset, sizeof(VkImageViewCreateInfo) - offset) == 0;
76 }
77
78 static bool
equals_bvci(const void * a,const void * b)79 equals_bvci(const void *a, const void *b)
80 {
81 const uint8_t *pa = a;
82 const uint8_t *pb = b;
83 size_t offset = offsetof(VkBufferViewCreateInfo, flags);
84 return memcmp(pa + offset, pb + offset, sizeof(VkBufferViewCreateInfo) - offset) == 0;
85 }
86
87 static void
88 zink_transfer_flush_region(struct pipe_context *pctx,
89 struct pipe_transfer *ptrans,
90 const struct pipe_box *box);
91
92 void
debug_describe_zink_resource_object(char * buf,const struct zink_resource_object * ptr)93 debug_describe_zink_resource_object(char *buf, const struct zink_resource_object *ptr)
94 {
95 sprintf(buf, "zink_resource_object");
96 }
97
98 void
zink_destroy_resource_object(struct zink_screen * screen,struct zink_resource_object * obj)99 zink_destroy_resource_object(struct zink_screen *screen, struct zink_resource_object *obj)
100 {
101 if (obj->is_buffer) {
102 VKSCR(DestroyBuffer)(screen->dev, obj->buffer, NULL);
103 VKSCR(DestroyBuffer)(screen->dev, obj->storage_buffer, NULL);
104 } else if (obj->dt) {
105 zink_kopper_displaytarget_destroy(screen, obj->dt);
106 } else if (!obj->is_aux) {
107 VKSCR(DestroyImage)(screen->dev, obj->image, NULL);
108 } else {
109 #if defined(ZINK_USE_DMABUF) && !defined(_WIN32)
110 close(obj->handle);
111 #endif
112 }
113
114 zink_descriptor_set_refs_clear(&obj->desc_set_refs, obj);
115 if (obj->dt) {
116 FREE(obj->bo); //this is a dummy struct
117 } else
118 zink_bo_unref(screen, obj->bo);
119 FREE(obj);
120 }
121
122 static void
zink_resource_destroy(struct pipe_screen * pscreen,struct pipe_resource * pres)123 zink_resource_destroy(struct pipe_screen *pscreen,
124 struct pipe_resource *pres)
125 {
126 struct zink_screen *screen = zink_screen(pscreen);
127 struct zink_resource *res = zink_resource(pres);
128 if (pres->target == PIPE_BUFFER) {
129 util_range_destroy(&res->valid_buffer_range);
130 util_idalloc_mt_free(&screen->buffer_ids, res->base.buffer_id_unique);
131 assert(!_mesa_hash_table_num_entries(&res->bufferview_cache));
132 simple_mtx_destroy(&res->bufferview_mtx);
133 ralloc_free(res->bufferview_cache.table);
134 } else {
135 assert(!_mesa_hash_table_num_entries(&res->surface_cache));
136 simple_mtx_destroy(&res->surface_mtx);
137 ralloc_free(res->surface_cache.table);
138 }
139 /* no need to do anything for the caches, these objects own the resource lifetimes */
140
141 zink_resource_object_reference(screen, &res->obj, NULL);
142 threaded_resource_deinit(pres);
143 FREE_CL(res);
144 }
145
146 static VkImageAspectFlags
aspect_from_format(enum pipe_format fmt)147 aspect_from_format(enum pipe_format fmt)
148 {
149 if (util_format_is_depth_or_stencil(fmt)) {
150 VkImageAspectFlags aspect = 0;
151 const struct util_format_description *desc = util_format_description(fmt);
152 if (util_format_has_depth(desc))
153 aspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
154 if (util_format_has_stencil(desc))
155 aspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
156 return aspect;
157 } else
158 return VK_IMAGE_ASPECT_COLOR_BIT;
159 }
160
161 static VkBufferCreateInfo
create_bci(struct zink_screen * screen,const struct pipe_resource * templ,unsigned bind)162 create_bci(struct zink_screen *screen, const struct pipe_resource *templ, unsigned bind)
163 {
164 VkBufferCreateInfo bci;
165 bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
166 bci.pNext = NULL;
167 bci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
168 bci.queueFamilyIndexCount = 0;
169 bci.pQueueFamilyIndices = NULL;
170 bci.size = templ->width0;
171 bci.flags = 0;
172 assert(bci.size > 0);
173
174 bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
175 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
176 VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
177
178 bci.usage |= VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
179 VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT |
180 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
181 VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
182 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
183 VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT |
184 VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT;
185
186 if (bind & PIPE_BIND_SHADER_IMAGE)
187 bci.usage |= VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
188
189 if (bind & PIPE_BIND_QUERY_BUFFER)
190 bci.usage |= VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT;
191
192 if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
193 bci.flags |= VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
194 return bci;
195 }
196
197 static bool
check_ici(struct zink_screen * screen,VkImageCreateInfo * ici,uint64_t modifier)198 check_ici(struct zink_screen *screen, VkImageCreateInfo *ici, uint64_t modifier)
199 {
200 VkImageFormatProperties image_props;
201 VkResult ret;
202 assert(modifier == DRM_FORMAT_MOD_INVALID ||
203 (VKSCR(GetPhysicalDeviceImageFormatProperties2) && screen->info.have_EXT_image_drm_format_modifier));
204 if (VKSCR(GetPhysicalDeviceImageFormatProperties2)) {
205 VkImageFormatProperties2 props2;
206 props2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
207 props2.pNext = NULL;
208 VkSamplerYcbcrConversionImageFormatProperties ycbcr_props;
209 ycbcr_props.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
210 ycbcr_props.pNext = NULL;
211 if (screen->info.have_KHR_sampler_ycbcr_conversion)
212 props2.pNext = &ycbcr_props;
213 VkPhysicalDeviceImageFormatInfo2 info;
214 info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
215 /* possibly VkImageFormatListCreateInfo */
216 info.pNext = ici->pNext;
217 info.format = ici->format;
218 info.type = ici->imageType;
219 info.tiling = ici->tiling;
220 info.usage = ici->usage;
221 info.flags = ici->flags;
222
223 VkPhysicalDeviceImageDrmFormatModifierInfoEXT mod_info;
224 if (modifier != DRM_FORMAT_MOD_INVALID) {
225 mod_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
226 mod_info.pNext = info.pNext;
227 mod_info.drmFormatModifier = modifier;
228 mod_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
229 mod_info.queueFamilyIndexCount = 0;
230 info.pNext = &mod_info;
231 }
232
233 ret = VKSCR(GetPhysicalDeviceImageFormatProperties2)(screen->pdev, &info, &props2);
234 /* this is using VK_IMAGE_CREATE_EXTENDED_USAGE_BIT and can't be validated */
235 if (vk_format_aspects(ici->format) & VK_IMAGE_ASPECT_PLANE_1_BIT)
236 ret = VK_SUCCESS;
237 image_props = props2.imageFormatProperties;
238 } else
239 ret = VKSCR(GetPhysicalDeviceImageFormatProperties)(screen->pdev, ici->format, ici->imageType,
240 ici->tiling, ici->usage, ici->flags, &image_props);
241 if (ret != VK_SUCCESS)
242 return false;
243 if (ici->extent.depth > image_props.maxExtent.depth ||
244 ici->extent.height > image_props.maxExtent.height ||
245 ici->extent.width > image_props.maxExtent.width)
246 return false;
247 if (ici->mipLevels > image_props.maxMipLevels)
248 return false;
249 if (ici->arrayLayers > image_props.maxArrayLayers)
250 return false;
251 return true;
252 }
253
254 static VkImageUsageFlags
get_image_usage_for_feats(struct zink_screen * screen,VkFormatFeatureFlags feats,const struct pipe_resource * templ,unsigned bind,bool * need_extended)255 get_image_usage_for_feats(struct zink_screen *screen, VkFormatFeatureFlags feats, const struct pipe_resource *templ, unsigned bind, bool *need_extended)
256 {
257 VkImageUsageFlags usage = 0;
258 bool is_planar = util_format_get_num_planes(templ->format) > 1;
259 *need_extended = false;
260
261 if (bind & ZINK_BIND_TRANSIENT)
262 usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
263 else {
264 /* sadly, gallium doesn't let us know if it'll ever need this, so we have to assume */
265 if (is_planar || (feats & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT))
266 usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
267 if (is_planar || (feats & VK_FORMAT_FEATURE_TRANSFER_DST_BIT))
268 usage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
269 if (feats & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)
270 usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
271
272 if ((is_planar || (feats & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) && (bind & PIPE_BIND_SHADER_IMAGE)) {
273 assert(templ->nr_samples <= 1 || screen->info.feats.features.shaderStorageImageMultisample);
274 usage |= VK_IMAGE_USAGE_STORAGE_BIT;
275 }
276 }
277
278 if (bind & PIPE_BIND_RENDER_TARGET) {
279 if (feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
280 usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
281 if ((bind & (PIPE_BIND_LINEAR | PIPE_BIND_SHARED)) != (PIPE_BIND_LINEAR | PIPE_BIND_SHARED))
282 usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
283 } else {
284 /* trust that gallium isn't going to give us anything wild */
285 *need_extended = true;
286 return 0;
287 }
288 } else if ((bind & PIPE_BIND_SAMPLER_VIEW) && !util_format_is_depth_or_stencil(templ->format)) {
289 if (!(feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
290 /* ensure we can u_blitter this later */
291 *need_extended = true;
292 return 0;
293 }
294 usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
295 }
296
297 if (bind & PIPE_BIND_DEPTH_STENCIL) {
298 if (feats & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
299 usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
300 else
301 return 0;
302 /* this is unlikely to occur and has been included for completeness */
303 } else if (bind & PIPE_BIND_SAMPLER_VIEW && !(usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
304 if (feats & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)
305 usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
306 else
307 return 0;
308 }
309
310 if (bind & PIPE_BIND_STREAM_OUTPUT)
311 usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
312
313 return usage;
314 }
315
316 static VkFormatFeatureFlags
find_modifier_feats(const struct zink_modifier_prop * prop,uint64_t modifier,uint64_t * mod)317 find_modifier_feats(const struct zink_modifier_prop *prop, uint64_t modifier, uint64_t *mod)
318 {
319 for (unsigned j = 0; j < prop->drmFormatModifierCount; j++) {
320 if (prop->pDrmFormatModifierProperties[j].drmFormatModifier == modifier) {
321 *mod = modifier;
322 return prop->pDrmFormatModifierProperties[j].drmFormatModifierTilingFeatures;
323 }
324 }
325 return 0;
326 }
327
328 /* If the driver can't do mutable with this ICI, then try again after removing mutable (and
329 * thus also the list of formats we might might mutate to)
330 */
331 static bool
double_check_ici(struct zink_screen * screen,VkImageCreateInfo * ici,VkImageUsageFlags usage,uint64_t * mod)332 double_check_ici(struct zink_screen *screen, VkImageCreateInfo *ici, VkImageUsageFlags usage, uint64_t *mod)
333 {
334 if (!usage)
335 return false;
336
337 const void *pNext = ici->pNext;
338 ici->usage = usage;
339 if (check_ici(screen, ici, *mod))
340 return true;
341 if (pNext) {
342 ici->pNext = NULL;
343 ici->flags &= ~VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
344 if (check_ici(screen, ici, *mod))
345 return true;
346 ici->pNext = pNext;
347 ici->flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
348 }
349 return false;
350 }
351
352 static VkImageUsageFlags
get_image_usage(struct zink_screen * screen,VkImageCreateInfo * ici,const struct pipe_resource * templ,unsigned bind,unsigned modifiers_count,const uint64_t * modifiers,uint64_t * mod)353 get_image_usage(struct zink_screen *screen, VkImageCreateInfo *ici, const struct pipe_resource *templ, unsigned bind, unsigned modifiers_count, const uint64_t *modifiers, uint64_t *mod)
354 {
355 VkImageTiling tiling = ici->tiling;
356 bool need_extended = false;
357 *mod = DRM_FORMAT_MOD_INVALID;
358 if (modifiers_count) {
359 bool have_linear = false;
360 const struct zink_modifier_prop *prop = &screen->modifier_props[templ->format];
361 assert(tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
362 for (unsigned i = 0; i < modifiers_count; i++) {
363 if (modifiers[i] == DRM_FORMAT_MOD_LINEAR) {
364 have_linear = true;
365 if (!screen->info.have_EXT_image_drm_format_modifier)
366 break;
367 continue;
368 }
369 VkFormatFeatureFlags feats = find_modifier_feats(prop, modifiers[i], mod);
370 if (feats) {
371 VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
372 assert(!need_extended);
373 if (double_check_ici(screen, ici, usage, mod))
374 return usage;
375 }
376 }
377 /* only try linear if no other options available */
378 if (have_linear) {
379 VkFormatFeatureFlags feats = find_modifier_feats(prop, DRM_FORMAT_MOD_LINEAR, mod);
380 if (feats) {
381 VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
382 assert(!need_extended);
383 if (double_check_ici(screen, ici, usage, mod))
384 return usage;
385 }
386 }
387 } else
388 {
389 VkFormatProperties props = screen->format_props[templ->format];
390 VkFormatFeatureFlags feats = tiling == VK_IMAGE_TILING_LINEAR ? props.linearTilingFeatures : props.optimalTilingFeatures;
391 if (ici->flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT)
392 feats = UINT32_MAX;
393 VkImageUsageFlags usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
394 if (need_extended) {
395 ici->flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
396 feats = UINT32_MAX;
397 usage = get_image_usage_for_feats(screen, feats, templ, bind, &need_extended);
398 }
399 if (double_check_ici(screen, ici, usage, mod))
400 return usage;
401 }
402 *mod = DRM_FORMAT_MOD_INVALID;
403 return 0;
404 }
405
406 static uint64_t
create_ici(struct zink_screen * screen,VkImageCreateInfo * ici,const struct pipe_resource * templ,bool dmabuf,unsigned bind,unsigned modifiers_count,const uint64_t * modifiers,bool * success)407 create_ici(struct zink_screen *screen, VkImageCreateInfo *ici, const struct pipe_resource *templ, bool dmabuf, unsigned bind, unsigned modifiers_count, const uint64_t *modifiers, bool *success)
408 {
409 ici->sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
410 /* pNext may already be set */
411 if (util_format_get_num_planes(templ->format) > 1)
412 ici->flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
413 else
414 ici->flags = modifiers_count || dmabuf || bind & (PIPE_BIND_SCANOUT | PIPE_BIND_DEPTH_STENCIL) ? 0 : VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
415 if (ici->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)
416 /* unset VkImageFormatListCreateInfo if mutable */
417 ici->pNext = NULL;
418 else if (ici->pNext)
419 /* add mutable if VkImageFormatListCreateInfo */
420 ici->flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
421 ici->usage = 0;
422 ici->queueFamilyIndexCount = 0;
423
424 if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
425 ici->flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
426
427 bool need_2D = false;
428 switch (templ->target) {
429 case PIPE_TEXTURE_1D:
430 case PIPE_TEXTURE_1D_ARRAY:
431 if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
432 need_2D |= screen->need_2D_sparse;
433 if (util_format_is_depth_or_stencil(templ->format))
434 need_2D |= screen->need_2D_zs;
435 ici->imageType = need_2D ? VK_IMAGE_TYPE_2D : VK_IMAGE_TYPE_1D;
436 break;
437
438 case PIPE_TEXTURE_CUBE:
439 case PIPE_TEXTURE_CUBE_ARRAY:
440 case PIPE_TEXTURE_2D:
441 case PIPE_TEXTURE_2D_ARRAY:
442 case PIPE_TEXTURE_RECT:
443 ici->imageType = VK_IMAGE_TYPE_2D;
444 break;
445
446 case PIPE_TEXTURE_3D:
447 ici->imageType = VK_IMAGE_TYPE_3D;
448 ici->flags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
449 if (screen->info.have_EXT_image_2d_view_of_3d)
450 ici->flags |= VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT;
451 break;
452
453 case PIPE_BUFFER:
454 unreachable("PIPE_BUFFER should already be handled");
455
456 default:
457 unreachable("Unknown target");
458 }
459
460 if (screen->info.have_EXT_sample_locations &&
461 bind & PIPE_BIND_DEPTH_STENCIL &&
462 util_format_has_depth(util_format_description(templ->format)))
463 ici->flags |= VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT;
464
465 ici->format = zink_get_format(screen, templ->format);
466 ici->extent.width = templ->width0;
467 ici->extent.height = templ->height0;
468 ici->extent.depth = templ->depth0;
469 ici->mipLevels = templ->last_level + 1;
470 ici->arrayLayers = MAX2(templ->array_size, 1);
471 ici->samples = templ->nr_samples ? templ->nr_samples : VK_SAMPLE_COUNT_1_BIT;
472 ici->tiling = screen->info.have_EXT_image_drm_format_modifier && modifiers_count ?
473 VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
474 bind & (PIPE_BIND_LINEAR | ZINK_BIND_DMABUF) ? VK_IMAGE_TILING_LINEAR : VK_IMAGE_TILING_OPTIMAL;
475 ici->sharingMode = VK_SHARING_MODE_EXCLUSIVE;
476 ici->initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
477
478 /* sampleCounts will be set to VK_SAMPLE_COUNT_1_BIT if at least one of the following conditions is true:
479 * - flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
480 *
481 * 44.1.1. Supported Sample Counts
482 */
483 bool want_cube = ici->samples == 1 &&
484 (templ->target == PIPE_TEXTURE_CUBE ||
485 templ->target == PIPE_TEXTURE_CUBE_ARRAY ||
486 (templ->target == PIPE_TEXTURE_2D_ARRAY && ici->extent.width == ici->extent.height && ici->arrayLayers >= 6));
487
488 if (templ->target == PIPE_TEXTURE_CUBE)
489 ici->arrayLayers *= 6;
490
491 if (templ->usage == PIPE_USAGE_STAGING &&
492 templ->format != PIPE_FORMAT_B4G4R4A4_UNORM &&
493 templ->format != PIPE_FORMAT_B4G4R4A4_UINT)
494 ici->tiling = VK_IMAGE_TILING_LINEAR;
495 if (ici->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
496 modifiers_count = 0;
497
498 bool first = true;
499 bool tried[2] = {0};
500 uint64_t mod = DRM_FORMAT_MOD_INVALID;
501 retry:
502 while (!ici->usage) {
503 if (!first) {
504 switch (ici->tiling) {
505 case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
506 ici->tiling = VK_IMAGE_TILING_OPTIMAL;
507 modifiers_count = 0;
508 break;
509 case VK_IMAGE_TILING_OPTIMAL:
510 ici->tiling = VK_IMAGE_TILING_LINEAR;
511 break;
512 case VK_IMAGE_TILING_LINEAR:
513 if (bind & PIPE_BIND_LINEAR) {
514 *success = false;
515 return DRM_FORMAT_MOD_INVALID;
516 }
517 ici->tiling = VK_IMAGE_TILING_OPTIMAL;
518 break;
519 default:
520 unreachable("unhandled tiling mode");
521 }
522 if (tried[ici->tiling]) {
523 if (ici->flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT) {
524 *success = false;
525 return DRM_FORMAT_MOD_INVALID;
526 }
527 ici->flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
528 tried[0] = false;
529 tried[1] = false;
530 first = true;
531 goto retry;
532 }
533 }
534 ici->usage = get_image_usage(screen, ici, templ, bind, modifiers_count, modifiers, &mod);
535 first = false;
536 if (ici->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
537 tried[ici->tiling] = true;
538 }
539 if (want_cube) {
540 ici->flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
541 if (get_image_usage(screen, ici, templ, bind, modifiers_count, modifiers, &mod) != ici->usage)
542 ici->flags &= ~VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
543 }
544
545 *success = true;
546 return mod;
547 }
548
549 static struct zink_resource_object *
resource_object_create(struct zink_screen * screen,const struct pipe_resource * templ,struct winsys_handle * whandle,bool * optimal_tiling,const uint64_t * modifiers,int modifiers_count,const void * loader_private)550 resource_object_create(struct zink_screen *screen, const struct pipe_resource *templ, struct winsys_handle *whandle, bool *optimal_tiling,
551 const uint64_t *modifiers, int modifiers_count, const void *loader_private)
552 {
553 struct zink_resource_object *obj = CALLOC_STRUCT(zink_resource_object);
554 if (!obj)
555 return NULL;
556 obj->last_dt_idx = obj->dt_idx = UINT32_MAX; //TODO: unionize
557
558 VkMemoryRequirements reqs = {0};
559 VkMemoryPropertyFlags flags;
560
561 /* figure out aux plane count */
562 if (whandle && whandle->plane >= util_format_get_num_planes(whandle->format))
563 obj->is_aux = true;
564 struct pipe_resource *pnext = templ->next;
565 for (obj->plane_count = 1; pnext; obj->plane_count++, pnext = pnext->next) {
566 struct zink_resource *next = zink_resource(pnext);
567 if (!next->obj->is_aux)
568 break;
569 }
570
571 bool need_dedicated = false;
572 bool shared = templ->bind & PIPE_BIND_SHARED;
573 #if !defined(_WIN32)
574 VkExternalMemoryHandleTypeFlags export_types = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
575 #else
576 VkExternalMemoryHandleTypeFlags export_types = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
577 #endif
578 unsigned num_planes = util_format_get_num_planes(templ->format);
579 VkImageAspectFlags plane_aspects[] = {
580 VK_IMAGE_ASPECT_PLANE_0_BIT,
581 VK_IMAGE_ASPECT_PLANE_1_BIT,
582 VK_IMAGE_ASPECT_PLANE_2_BIT,
583 };
584 VkExternalMemoryHandleTypeFlags external = 0;
585 bool needs_export = (templ->bind & (ZINK_BIND_VIDEO | ZINK_BIND_DMABUF)) != 0;
586 if (whandle) {
587 if (whandle->type == WINSYS_HANDLE_TYPE_FD || whandle->type == ZINK_EXTERNAL_MEMORY_HANDLE)
588 needs_export |= true;
589 else
590 unreachable("unknown handle type");
591 }
592 if (needs_export) {
593 if (whandle && whandle->type == ZINK_EXTERNAL_MEMORY_HANDLE) {
594 #if !defined(_WIN32)
595 external = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
596 #else
597 external = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
598 #endif
599 } else {
600 external = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
601 export_types |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
602 }
603 }
604
605 /* we may export WINSYS_HANDLE_TYPE_FD handle which is dma-buf */
606 if (shared && screen->info.have_EXT_external_memory_dma_buf)
607 export_types |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
608
609 pipe_reference_init(&obj->reference, 1);
610 util_dynarray_init(&obj->desc_set_refs.refs, NULL);
611 if (loader_private) {
612 obj->bo = CALLOC_STRUCT(zink_bo);
613 obj->transfer_dst = true;
614 return obj;
615 } else if (templ->target == PIPE_BUFFER) {
616 VkBufferCreateInfo bci = create_bci(screen, templ, templ->bind);
617
618 if (VKSCR(CreateBuffer)(screen->dev, &bci, NULL, &obj->buffer) != VK_SUCCESS) {
619 mesa_loge("ZINK: vkCreateBuffer failed");
620 goto fail1;
621 }
622
623 if (!(templ->bind & PIPE_BIND_SHADER_IMAGE)) {
624 bci.usage |= VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
625 if (VKSCR(CreateBuffer)(screen->dev, &bci, NULL, &obj->storage_buffer) != VK_SUCCESS) {
626 mesa_loge("ZINK: vkCreateBuffer failed");
627 goto fail2;
628 }
629 }
630
631 VKSCR(GetBufferMemoryRequirements)(screen->dev, obj->buffer, &reqs);
632 if (templ->usage == PIPE_USAGE_STAGING)
633 flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
634 else if (templ->usage == PIPE_USAGE_STREAM)
635 flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
636 else if (templ->usage == PIPE_USAGE_IMMUTABLE)
637 flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
638 else
639 flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
640 obj->is_buffer = true;
641 obj->transfer_dst = true;
642 } else {
643 bool winsys_modifier = (export_types & VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) && whandle && whandle->modifier != DRM_FORMAT_MOD_INVALID;
644 uint64_t mods[10];
645 bool try_modifiers = false;
646 if ((export_types & VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) &&
647 whandle && whandle->modifier == DRM_FORMAT_MOD_INVALID && whandle->stride) {
648 modifiers = mods;
649 modifiers_count = screen->modifier_props[templ->format].drmFormatModifierCount;
650 for (unsigned j = 0; j < modifiers_count; j++)
651 mods[j] = screen->modifier_props[templ->format].pDrmFormatModifierProperties[j].drmFormatModifier;
652 if (modifiers_count > 1)
653 try_modifiers = true;
654 }
655 const uint64_t *ici_modifiers = winsys_modifier ? &whandle->modifier : modifiers;
656 unsigned ici_modifier_count = winsys_modifier ? 1 : modifiers_count;
657 bool success = false;
658 VkImageCreateInfo ici;
659 enum pipe_format srgb = PIPE_FORMAT_NONE;
660 /* We use modifiers as a proxy for "this surface is used as a window system render target".
661 * For winsys, we need to be able to mutate between srgb and linear, but we don't need general
662 * image view/shader image format compatibility (that path means losing fast clears or compression on some hardware).
663 */
664 if (ici_modifier_count) {
665 srgb = util_format_is_srgb(templ->format) ? util_format_linear(templ->format) : util_format_srgb(templ->format);
666 /* why do these helpers have different default return values? */
667 if (srgb == templ->format)
668 srgb = PIPE_FORMAT_NONE;
669 }
670 VkFormat formats[2];
671 VkImageFormatListCreateInfo format_list;
672 if (srgb) {
673 format_list.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO;
674 format_list.pNext = NULL;
675 format_list.viewFormatCount = 2;
676 format_list.pViewFormats = formats;
677
678 formats[0] = zink_get_format(screen, templ->format);
679 formats[1] = zink_get_format(screen, srgb);
680 ici.pNext = &format_list;
681 } else {
682 ici.pNext = NULL;
683 }
684 uint64_t mod = create_ici(screen, &ici, templ, external == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
685 templ->bind, ici_modifier_count, ici_modifiers, &success);
686 VkExternalMemoryImageCreateInfo emici;
687 VkImageDrmFormatModifierExplicitCreateInfoEXT idfmeci;
688 VkImageDrmFormatModifierListCreateInfoEXT idfmlci;
689 VkSubresourceLayout plane_layouts[4];
690 VkSubresourceLayout plane_layout = {
691 .offset = whandle ? whandle->offset : 0,
692 .size = 0,
693 .rowPitch = whandle ? whandle->stride : 0,
694 .arrayPitch = 0,
695 .depthPitch = 0,
696 };
697 if (!success)
698 goto fail1;
699
700 obj->render_target = (ici.usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0;
701
702 if (shared || external) {
703 emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
704 emici.pNext = ici.pNext;
705 emici.handleTypes = export_types;
706 ici.pNext = &emici;
707
708 assert(ici.tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT || mod != DRM_FORMAT_MOD_INVALID);
709 if (whandle && ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
710 assert(mod == whandle->modifier || !winsys_modifier);
711 idfmeci.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
712 idfmeci.pNext = ici.pNext;
713 idfmeci.drmFormatModifier = mod;
714
715 idfmeci.drmFormatModifierPlaneCount = obj->plane_count;
716 plane_layouts[0] = plane_layout;
717 pnext = templ->next;
718 for (unsigned i = 1; i < obj->plane_count; i++, pnext = pnext->next) {
719 struct zink_resource *next = zink_resource(pnext);
720 obj->plane_offsets[i] = plane_layouts[i].offset = next->obj->plane_offsets[i];
721 obj->plane_strides[i] = plane_layouts[i].rowPitch = next->obj->plane_strides[i];
722 plane_layouts[i].size = 0;
723 plane_layouts[i].arrayPitch = 0;
724 plane_layouts[i].depthPitch = 0;
725 }
726 idfmeci.pPlaneLayouts = plane_layouts;
727
728 ici.pNext = &idfmeci;
729 } else if (ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
730 idfmlci.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
731 idfmlci.pNext = ici.pNext;
732 idfmlci.drmFormatModifierCount = modifiers_count;
733 idfmlci.pDrmFormatModifiers = modifiers;
734 ici.pNext = &idfmlci;
735 } else if (ici.tiling == VK_IMAGE_TILING_OPTIMAL) {
736 shared = false;
737 }
738 }
739
740 if (optimal_tiling)
741 *optimal_tiling = ici.tiling == VK_IMAGE_TILING_OPTIMAL;
742
743 if (ici.usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
744 obj->transfer_dst = true;
745
746 #if defined(ZINK_USE_DMABUF) && !defined(_WIN32)
747 if (obj->is_aux) {
748 obj->modifier = mod;
749 obj->modifier_aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT << whandle->plane;
750 obj->plane_offsets[whandle->plane] = whandle->offset;
751 obj->plane_strides[whandle->plane] = whandle->stride;
752 obj->handle = os_dupfd_cloexec(whandle->handle);
753 if (obj->handle < 0) {
754 mesa_loge("ZINK: failed to dup dmabuf fd: %s\n", strerror(errno));
755 goto fail1;
756 }
757 return obj;
758 }
759 #endif
760
761 VkFormatFeatureFlags feats = 0;
762 switch (ici.tiling) {
763 case VK_IMAGE_TILING_LINEAR:
764 feats = screen->format_props[templ->format].linearTilingFeatures;
765 break;
766 case VK_IMAGE_TILING_OPTIMAL:
767 feats = screen->format_props[templ->format].optimalTilingFeatures;
768 break;
769 case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
770 feats = VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM;
771 /*
772 If is tiling then VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, the value of
773 imageCreateFormatFeatures is found by calling vkGetPhysicalDeviceFormatProperties2
774 with VkImageFormatProperties::format equal to VkImageCreateInfo::format and with
775 VkDrmFormatModifierPropertiesListEXT chained into VkImageFormatProperties2; by
776 collecting all members of the returned array
777 VkDrmFormatModifierPropertiesListEXT::pDrmFormatModifierProperties
778 whose drmFormatModifier belongs to imageCreateDrmFormatModifiers; and by taking the bitwise
779 intersection, over the collected array members, of drmFormatModifierTilingFeatures.
780 (The resultant imageCreateFormatFeatures may be empty).
781 * -Chapter 12. Resource Creation
782 */
783 for (unsigned i = 0; i < screen->modifier_props[templ->format].drmFormatModifierCount; i++)
784 feats &= screen->modifier_props[templ->format].pDrmFormatModifierProperties[i].drmFormatModifierTilingFeatures;
785 break;
786 default:
787 unreachable("unknown tiling");
788 }
789 obj->vkfeats = feats;
790 if (util_format_is_yuv(templ->format)) {
791 if (feats & VK_FORMAT_FEATURE_DISJOINT_BIT)
792 ici.flags |= VK_IMAGE_CREATE_DISJOINT_BIT;
793 VkSamplerYcbcrConversionCreateInfo sycci = {0};
794 sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
795 sycci.pNext = NULL;
796 sycci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
797 sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709;
798 sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
799 sycci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
800 sycci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
801 sycci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
802 sycci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
803 if (!feats || (feats & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)) {
804 sycci.xChromaOffset = VK_CHROMA_LOCATION_COSITED_EVEN;
805 sycci.yChromaOffset = VK_CHROMA_LOCATION_COSITED_EVEN;
806 } else {
807 assert(feats & VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT);
808 sycci.xChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
809 sycci.yChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
810 }
811 sycci.chromaFilter = VK_FILTER_LINEAR;
812 sycci.forceExplicitReconstruction = VK_FALSE;
813 VkResult res = VKSCR(CreateSamplerYcbcrConversion)(screen->dev, &sycci, NULL, &obj->sampler_conversion);
814 if (res != VK_SUCCESS) {
815 mesa_loge("ZINK: vkCreateSamplerYcbcrConversion failed");
816 goto fail1;
817 }
818 } else if (whandle) {
819 obj->plane_strides[whandle->plane] = whandle->stride;
820 }
821
822 VkResult result = VKSCR(CreateImage)(screen->dev, &ici, NULL, &obj->image);
823 if (result != VK_SUCCESS) {
824 if (try_modifiers) {
825 for (unsigned i = 0; i < modifiers_count; i++) {
826 if (modifiers[i] == mod)
827 continue;
828 idfmeci.drmFormatModifier = modifiers[i];
829 result = VKSCR(CreateImage)(screen->dev, &ici, NULL, &obj->image);
830 if (result == VK_SUCCESS)
831 break;
832 }
833 }
834 }
835 if (result != VK_SUCCESS) {
836 mesa_loge("ZINK: vkCreateImage failed (%s)", vk_Result_to_str(result));
837 goto fail1;
838 }
839
840 if (ici.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
841 VkImageDrmFormatModifierPropertiesEXT modprops = {0};
842 modprops.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT;
843 result = VKSCR(GetImageDrmFormatModifierPropertiesEXT)(screen->dev, obj->image, &modprops);
844 if (result != VK_SUCCESS) {
845 mesa_loge("ZINK: vkGetImageDrmFormatModifierPropertiesEXT failed");
846 goto fail1;
847 }
848 obj->modifier = modprops.drmFormatModifier;
849 unsigned num_dmabuf_planes = screen->base.get_dmabuf_modifier_planes(&screen->base, obj->modifier, templ->format);
850 obj->modifier_aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
851 if (num_dmabuf_planes > 1)
852 obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
853 if (num_dmabuf_planes > 2)
854 obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
855 if (num_dmabuf_planes > 3)
856 obj->modifier_aspect |= VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
857 assert(num_dmabuf_planes <= 4);
858 }
859
860 if (VKSCR(GetImageMemoryRequirements2)) {
861 VkMemoryRequirements2 req2;
862 req2.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
863 VkImageMemoryRequirementsInfo2 info2;
864 info2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
865 info2.pNext = NULL;
866 info2.image = obj->image;
867 VkMemoryDedicatedRequirements ded;
868 ded.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
869 ded.pNext = NULL;
870 req2.pNext = &ded;
871 VkImagePlaneMemoryRequirementsInfo plane;
872 plane.sType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
873 plane.pNext = NULL;
874 if (num_planes > 1)
875 info2.pNext = &plane;
876 unsigned offset = 0;
877 for (unsigned i = 0; i < num_planes; i++) {
878 assert(i < ARRAY_SIZE(plane_aspects));
879 plane.planeAspect = plane_aspects[i];
880 VKSCR(GetImageMemoryRequirements2)(screen->dev, &info2, &req2);
881 if (!i)
882 reqs.alignment = req2.memoryRequirements.alignment;
883 obj->plane_offsets[i] = offset;
884 offset += req2.memoryRequirements.size;
885 reqs.size += req2.memoryRequirements.size;
886 reqs.memoryTypeBits |= req2.memoryRequirements.memoryTypeBits;
887 need_dedicated |= ded.prefersDedicatedAllocation || ded.requiresDedicatedAllocation;
888 }
889 } else {
890 VKSCR(GetImageMemoryRequirements)(screen->dev, obj->image, &reqs);
891 }
892 if (templ->usage == PIPE_USAGE_STAGING && ici.tiling == VK_IMAGE_TILING_LINEAR)
893 flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
894 else
895 flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
896
897 obj->vkflags = ici.flags;
898 obj->vkusage = ici.usage;
899 }
900 obj->alignment = reqs.alignment;
901
902 if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT || templ->usage == PIPE_USAGE_DYNAMIC)
903 flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
904 else if (!(flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) &&
905 templ->usage == PIPE_USAGE_STAGING)
906 flags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
907
908 if (templ->bind & ZINK_BIND_TRANSIENT)
909 flags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
910
911 VkMemoryAllocateInfo mai;
912 enum zink_alloc_flag aflags = templ->flags & PIPE_RESOURCE_FLAG_SPARSE ? ZINK_ALLOC_SPARSE : 0;
913 mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
914 mai.pNext = NULL;
915 mai.allocationSize = reqs.size;
916 enum zink_heap heap = zink_heap_from_domain_flags(flags, aflags);
917 mai.memoryTypeIndex = screen->heap_map[heap];
918 if (unlikely(!(reqs.memoryTypeBits & BITFIELD_BIT(mai.memoryTypeIndex)))) {
919 /* not valid based on reqs; demote to more compatible type */
920 switch (heap) {
921 case ZINK_HEAP_DEVICE_LOCAL_VISIBLE:
922 heap = ZINK_HEAP_DEVICE_LOCAL;
923 break;
924 case ZINK_HEAP_HOST_VISIBLE_CACHED:
925 heap = ZINK_HEAP_HOST_VISIBLE_COHERENT;
926 break;
927 default:
928 break;
929 }
930 mai.memoryTypeIndex = screen->heap_map[heap];
931 assert(reqs.memoryTypeBits & BITFIELD_BIT(mai.memoryTypeIndex));
932 }
933
934 VkMemoryDedicatedAllocateInfo ded_alloc_info = {
935 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
936 .pNext = mai.pNext,
937 .image = obj->image,
938 .buffer = VK_NULL_HANDLE,
939 };
940
941 if (screen->info.have_KHR_dedicated_allocation && need_dedicated) {
942 ded_alloc_info.pNext = mai.pNext;
943 mai.pNext = &ded_alloc_info;
944 }
945
946 VkExportMemoryAllocateInfo emai;
947 if ((templ->bind & ZINK_BIND_VIDEO) || ((templ->bind & PIPE_BIND_SHARED) && shared) || (templ->bind & ZINK_BIND_DMABUF)) {
948 emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
949 emai.handleTypes = export_types;
950
951 emai.pNext = mai.pNext;
952 mai.pNext = &emai;
953 obj->exportable = true;
954 }
955
956 #ifdef ZINK_USE_DMABUF
957
958 #if !defined(_WIN32)
959 VkImportMemoryFdInfoKHR imfi = {
960 VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
961 NULL,
962 };
963
964 if (whandle) {
965 imfi.pNext = NULL;
966 imfi.handleType = external;
967 imfi.fd = os_dupfd_cloexec(whandle->handle);
968 if (imfi.fd < 0) {
969 mesa_loge("ZINK: failed to dup dmabuf fd: %s\n", strerror(errno));
970 goto fail1;
971 }
972
973 imfi.pNext = mai.pNext;
974 mai.pNext = &imfi;
975 }
976 #else
977 VkImportMemoryWin32HandleInfoKHR imfi = {
978 VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
979 NULL,
980 };
981
982 if (whandle) {
983 HANDLE source_target = GetCurrentProcess();
984 HANDLE out_handle;
985
986 bool result = DuplicateHandle(source_target, whandle->handle, source_target, &out_handle, 0, false, DUPLICATE_SAME_ACCESS);
987
988 if (!result || !out_handle) {
989 mesa_loge("ZINK: failed to DuplicateHandle with winerr: %08x\n", (int)GetLastError());
990 goto fail1;
991 }
992
993 imfi.pNext = NULL;
994 imfi.handleType = external;
995 imfi.handle = out_handle;
996
997 imfi.pNext = mai.pNext;
998 mai.pNext = &imfi;
999 }
1000 #endif
1001
1002 #endif
1003
1004 unsigned alignment = MAX2(reqs.alignment, 256);
1005 if (templ->usage == PIPE_USAGE_STAGING && obj->is_buffer)
1006 alignment = MAX2(alignment, screen->info.props.limits.minMemoryMapAlignment);
1007 obj->alignment = alignment;
1008 retry:
1009 obj->bo = zink_bo(zink_bo_create(screen, reqs.size, alignment, heap, mai.pNext ? ZINK_ALLOC_NO_SUBALLOC : 0, mai.pNext));
1010 if (!obj->bo) {
1011 if (heap == ZINK_HEAP_DEVICE_LOCAL_VISIBLE) {
1012 if (templ->flags & PIPE_RESOURCE_FLAG_MAP_COHERENT || templ->usage == PIPE_USAGE_DYNAMIC)
1013 heap = ZINK_HEAP_HOST_VISIBLE_COHERENT;
1014 else
1015 heap = ZINK_HEAP_DEVICE_LOCAL;
1016 goto retry;
1017 }
1018 goto fail2;
1019 }
1020 if (aflags == ZINK_ALLOC_SPARSE) {
1021 obj->size = templ->width0;
1022 } else {
1023 obj->offset = zink_bo_get_offset(obj->bo);
1024 obj->size = zink_bo_get_size(obj->bo);
1025 }
1026
1027 obj->coherent = obj->bo->base.placement & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1028 if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE)) {
1029 obj->host_visible = obj->bo->base.placement & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1030 }
1031
1032 if (templ->target == PIPE_BUFFER) {
1033 if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE)) {
1034 if (VKSCR(BindBufferMemory)(screen->dev, obj->buffer, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
1035 mesa_loge("ZINK: vkBindBufferMemory failed");
1036 goto fail3;
1037 }
1038 if (obj->storage_buffer && VKSCR(BindBufferMemory)(screen->dev, obj->storage_buffer, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
1039 mesa_loge("ZINK: vkBindBufferMemory failed");
1040 goto fail3;
1041 }
1042 }
1043 } else {
1044 if (num_planes > 1) {
1045 VkBindImageMemoryInfo infos[3];
1046 VkBindImagePlaneMemoryInfo planes[3];
1047 for (unsigned i = 0; i < num_planes; i++) {
1048 infos[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
1049 infos[i].image = obj->image;
1050 infos[i].memory = zink_bo_get_mem(obj->bo);
1051 infos[i].memoryOffset = obj->plane_offsets[i];
1052 if (templ->bind & ZINK_BIND_VIDEO) {
1053 infos[i].pNext = &planes[i];
1054 planes[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
1055 planes[i].pNext = NULL;
1056 planes[i].planeAspect = plane_aspects[i];
1057 }
1058 }
1059 if (VKSCR(BindImageMemory2)(screen->dev, num_planes, infos) != VK_SUCCESS) {
1060 mesa_loge("ZINK: vkBindImageMemory2 failed");
1061 goto fail3;
1062 }
1063 } else {
1064 if (!(templ->flags & PIPE_RESOURCE_FLAG_SPARSE))
1065 if (VKSCR(BindImageMemory)(screen->dev, obj->image, zink_bo_get_mem(obj->bo), obj->offset) != VK_SUCCESS) {
1066 mesa_loge("ZINK: vkBindImageMemory failed");
1067 goto fail3;
1068 }
1069 }
1070 }
1071 return obj;
1072
1073 fail3:
1074 zink_bo_unref(screen, obj->bo);
1075
1076 fail2:
1077 if (templ->target == PIPE_BUFFER) {
1078 VKSCR(DestroyBuffer)(screen->dev, obj->buffer, NULL);
1079 VKSCR(DestroyBuffer)(screen->dev, obj->storage_buffer, NULL);
1080 } else
1081 VKSCR(DestroyImage)(screen->dev, obj->image, NULL);
1082 fail1:
1083 FREE(obj);
1084 return NULL;
1085 }
1086
1087 static struct pipe_resource *
resource_create(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct winsys_handle * whandle,unsigned external_usage,const uint64_t * modifiers,int modifiers_count,const void * loader_private)1088 resource_create(struct pipe_screen *pscreen,
1089 const struct pipe_resource *templ,
1090 struct winsys_handle *whandle,
1091 unsigned external_usage,
1092 const uint64_t *modifiers, int modifiers_count,
1093 const void *loader_private)
1094 {
1095 struct zink_screen *screen = zink_screen(pscreen);
1096 struct zink_resource *res = CALLOC_STRUCT_CL(zink_resource);
1097
1098 if (modifiers_count > 0 && screen->info.have_EXT_image_drm_format_modifier) {
1099 /* for rebinds */
1100 res->modifiers_count = modifiers_count;
1101 res->modifiers = mem_dup(modifiers, modifiers_count * sizeof(uint64_t));
1102 if (!res->modifiers) {
1103 FREE_CL(res);
1104 return NULL;
1105 }
1106 }
1107
1108 res->base.b = *templ;
1109
1110 threaded_resource_init(&res->base.b, false);
1111 pipe_reference_init(&res->base.b.reference, 1);
1112 res->base.b.screen = pscreen;
1113
1114 bool optimal_tiling = false;
1115 struct pipe_resource templ2 = *templ;
1116 if (templ2.flags & PIPE_RESOURCE_FLAG_SPARSE)
1117 templ2.bind |= PIPE_BIND_SHADER_IMAGE;
1118 if (screen->faked_e5sparse && templ->format == PIPE_FORMAT_R9G9B9E5_FLOAT) {
1119 templ2.flags &= ~PIPE_RESOURCE_FLAG_SPARSE;
1120 res->base.b.flags &= ~PIPE_RESOURCE_FLAG_SPARSE;
1121 }
1122 res->obj = resource_object_create(screen, &templ2, whandle, &optimal_tiling, modifiers, modifiers_count, loader_private);
1123 if (!res->obj) {
1124 free(res->modifiers);
1125 FREE_CL(res);
1126 return NULL;
1127 }
1128
1129 res->internal_format = templ->format;
1130 if (templ->target == PIPE_BUFFER) {
1131 util_range_init(&res->valid_buffer_range);
1132 res->base.b.bind |= PIPE_BIND_SHADER_IMAGE;
1133 if (!screen->resizable_bar && templ->width0 >= 8196) {
1134 /* We don't want to evict buffers from VRAM by mapping them for CPU access,
1135 * because they might never be moved back again. If a buffer is large enough,
1136 * upload data by copying from a temporary GTT buffer. 8K might not seem much,
1137 * but there can be 100000 buffers.
1138 *
1139 * This tweak improves performance for viewperf.
1140 */
1141 res->base.b.flags |= PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY;
1142 }
1143 } else {
1144 if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE)
1145 res->base.b.bind |= PIPE_BIND_SHADER_IMAGE;
1146 if (templ->flags & PIPE_RESOURCE_FLAG_SPARSE) {
1147 uint32_t count = 1;
1148 VKSCR(GetImageSparseMemoryRequirements)(screen->dev, res->obj->image, &count, &res->sparse);
1149 res->base.b.nr_sparse_levels = res->sparse.imageMipTailFirstLod;
1150 }
1151 res->format = zink_get_format(screen, templ->format);
1152 if (templ->target == PIPE_TEXTURE_1D || templ->target == PIPE_TEXTURE_1D_ARRAY) {
1153 res->need_2D = (screen->need_2D_zs && util_format_is_depth_or_stencil(templ->format)) ||
1154 (screen->need_2D_sparse && (templ->flags & PIPE_RESOURCE_FLAG_SPARSE));
1155 }
1156 res->dmabuf_acquire = whandle && whandle->type == WINSYS_HANDLE_TYPE_FD;
1157 res->dmabuf = res->dmabuf_acquire = whandle && whandle->type == WINSYS_HANDLE_TYPE_FD;
1158 res->layout = res->dmabuf_acquire ? VK_IMAGE_LAYOUT_PREINITIALIZED : VK_IMAGE_LAYOUT_UNDEFINED;
1159 res->optimal_tiling = optimal_tiling;
1160 res->aspect = aspect_from_format(templ->format);
1161 }
1162
1163 if (loader_private) {
1164 if (templ->bind & PIPE_BIND_DISPLAY_TARGET) {
1165 /* backbuffer */
1166 res->obj->dt = zink_kopper_displaytarget_create(screen,
1167 res->base.b.bind,
1168 res->base.b.format,
1169 templ->width0,
1170 templ->height0,
1171 64, loader_private,
1172 &res->dt_stride);
1173 assert(res->obj->dt);
1174 } else {
1175 /* frontbuffer */
1176 struct zink_resource *back = (void*)loader_private;
1177 struct kopper_displaytarget *cdt = back->obj->dt;
1178 cdt->refcount++;
1179 assert(back->obj->dt);
1180 res->obj->dt = back->obj->dt;
1181 }
1182 struct kopper_displaytarget *cdt = res->obj->dt;
1183 if (zink_kopper_has_srgb(cdt))
1184 res->obj->vkflags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1185 if (cdt->swapchain->scci.flags == VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
1186 res->obj->vkflags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
1187 res->obj->vkusage = cdt->swapchain->scci.imageUsage;
1188 res->base.b.bind |= PIPE_BIND_DISPLAY_TARGET;
1189 res->optimal_tiling = true;
1190 res->swapchain = true;
1191 }
1192 if (!res->obj->host_visible)
1193 res->base.b.flags |= PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY;
1194 if (res->obj->is_buffer) {
1195 res->base.buffer_id_unique = util_idalloc_mt_alloc(&screen->buffer_ids);
1196 _mesa_hash_table_init(&res->bufferview_cache, NULL, NULL, equals_bvci);
1197 simple_mtx_init(&res->bufferview_mtx, mtx_plain);
1198 } else {
1199 _mesa_hash_table_init(&res->surface_cache, NULL, NULL, equals_ivci);
1200 simple_mtx_init(&res->surface_mtx, mtx_plain);
1201 }
1202 if (res->obj->exportable)
1203 res->base.b.bind |= ZINK_BIND_DMABUF;
1204 return &res->base.b;
1205 }
1206
1207 static struct pipe_resource *
zink_resource_create(struct pipe_screen * pscreen,const struct pipe_resource * templ)1208 zink_resource_create(struct pipe_screen *pscreen,
1209 const struct pipe_resource *templ)
1210 {
1211 return resource_create(pscreen, templ, NULL, 0, NULL, 0, NULL);
1212 }
1213
1214 static struct pipe_resource *
zink_resource_create_with_modifiers(struct pipe_screen * pscreen,const struct pipe_resource * templ,const uint64_t * modifiers,int modifiers_count)1215 zink_resource_create_with_modifiers(struct pipe_screen *pscreen, const struct pipe_resource *templ,
1216 const uint64_t *modifiers, int modifiers_count)
1217 {
1218 return resource_create(pscreen, templ, NULL, 0, modifiers, modifiers_count, NULL);
1219 }
1220
1221 static struct pipe_resource *
zink_resource_create_drawable(struct pipe_screen * pscreen,const struct pipe_resource * templ,const void * loader_private)1222 zink_resource_create_drawable(struct pipe_screen *pscreen,
1223 const struct pipe_resource *templ,
1224 const void *loader_private)
1225 {
1226 return resource_create(pscreen, templ, NULL, 0, NULL, 0, loader_private);
1227 }
1228
1229 static bool
add_resource_bind(struct zink_context * ctx,struct zink_resource * res,unsigned bind)1230 add_resource_bind(struct zink_context *ctx, struct zink_resource *res, unsigned bind)
1231 {
1232 struct zink_screen *screen = zink_screen(ctx->base.screen);
1233 assert((res->base.b.bind & bind) == 0);
1234 zink_resource_image_barrier(ctx, res, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 0, 0);
1235 res->base.b.bind |= bind;
1236 struct zink_resource_object *old_obj = res->obj;
1237 if (bind & ZINK_BIND_DMABUF && !res->modifiers_count && screen->info.have_EXT_image_drm_format_modifier) {
1238 res->modifiers_count = screen->modifier_props[res->base.b.format].drmFormatModifierCount;
1239 res->modifiers = malloc(res->modifiers_count * sizeof(uint64_t));
1240 for (unsigned i = 0; i < screen->modifier_props[res->base.b.format].drmFormatModifierCount; i++)
1241 res->modifiers[i] = screen->modifier_props[res->base.b.format].pDrmFormatModifierProperties[i].drmFormatModifier;
1242 }
1243 struct zink_resource_object *new_obj = resource_object_create(screen, &res->base.b, NULL, &res->optimal_tiling, res->modifiers, res->modifiers_count, NULL);
1244 if (!new_obj) {
1245 debug_printf("new backing resource alloc failed!");
1246 res->base.b.bind &= ~bind;
1247 return false;
1248 }
1249 struct zink_resource staging = *res;
1250 staging.obj = old_obj;
1251 staging.all_binds = 0;
1252 res->layout = VK_IMAGE_LAYOUT_UNDEFINED;
1253 res->obj->access = 0;
1254 res->obj->access_stage = 0;
1255 bool needs_unref = true;
1256 if (zink_resource_has_usage(res)) {
1257 zink_batch_reference_resource_move(&ctx->batch, res);
1258 needs_unref = false;
1259 }
1260 res->obj = new_obj;
1261 zink_descriptor_set_refs_clear(&old_obj->desc_set_refs, old_obj);
1262 for (unsigned i = 0; i <= res->base.b.last_level; i++) {
1263 struct pipe_box box = {0, 0, 0,
1264 u_minify(res->base.b.width0, i),
1265 u_minify(res->base.b.height0, i), res->base.b.array_size};
1266 box.depth = util_num_layers(&res->base.b, i);
1267 ctx->base.resource_copy_region(&ctx->base, &res->base.b, i, 0, 0, 0, &staging.base.b, i, &box);
1268 }
1269 if (needs_unref)
1270 zink_resource_object_reference(screen, &old_obj, NULL);
1271 return true;
1272 }
1273
1274 static bool
zink_resource_get_param(struct pipe_screen * pscreen,struct pipe_context * pctx,struct pipe_resource * pres,unsigned plane,unsigned layer,unsigned level,enum pipe_resource_param param,unsigned handle_usage,uint64_t * value)1275 zink_resource_get_param(struct pipe_screen *pscreen, struct pipe_context *pctx,
1276 struct pipe_resource *pres,
1277 unsigned plane,
1278 unsigned layer,
1279 unsigned level,
1280 enum pipe_resource_param param,
1281 unsigned handle_usage,
1282 uint64_t *value)
1283 {
1284 struct zink_screen *screen = zink_screen(pscreen);
1285 struct zink_resource *res = zink_resource(pres);
1286 struct zink_resource_object *obj = res->obj;
1287 struct winsys_handle whandle;
1288 VkImageAspectFlags aspect;
1289 if (obj->modifier_aspect) {
1290 switch (plane) {
1291 case 0:
1292 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
1293 break;
1294 case 1:
1295 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
1296 break;
1297 case 2:
1298 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
1299 break;
1300 case 3:
1301 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
1302 break;
1303 default:
1304 unreachable("how many planes you got in this thing?");
1305 }
1306 } else if (res->obj->sampler_conversion) {
1307 aspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
1308 } else {
1309 aspect = res->aspect;
1310 }
1311 switch (param) {
1312 case PIPE_RESOURCE_PARAM_NPLANES:
1313 if (screen->info.have_EXT_image_drm_format_modifier)
1314 *value = util_format_get_num_planes(res->drm_format);
1315 else
1316 *value = 1;
1317 break;
1318
1319 case PIPE_RESOURCE_PARAM_STRIDE: {
1320 VkImageSubresource sub_res = {0};
1321 VkSubresourceLayout sub_res_layout = {0};
1322
1323 sub_res.aspectMask = aspect;
1324
1325 VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &sub_res, &sub_res_layout);
1326
1327 *value = sub_res_layout.rowPitch;
1328 break;
1329 }
1330
1331 case PIPE_RESOURCE_PARAM_OFFSET: {
1332 VkImageSubresource isr = {
1333 aspect,
1334 level,
1335 layer
1336 };
1337 VkSubresourceLayout srl;
1338 VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &isr, &srl);
1339 *value = srl.offset;
1340 break;
1341 }
1342
1343 case PIPE_RESOURCE_PARAM_MODIFIER: {
1344 *value = obj->modifier;
1345 break;
1346 }
1347
1348 case PIPE_RESOURCE_PARAM_LAYER_STRIDE: {
1349 VkImageSubresource isr = {
1350 aspect,
1351 level,
1352 layer
1353 };
1354 VkSubresourceLayout srl;
1355 VKSCR(GetImageSubresourceLayout)(screen->dev, obj->image, &isr, &srl);
1356 if (res->base.b.target == PIPE_TEXTURE_3D)
1357 *value = srl.depthPitch;
1358 else
1359 *value = srl.arrayPitch;
1360 break;
1361 }
1362
1363 return false;
1364 case PIPE_RESOURCE_PARAM_HANDLE_TYPE_KMS:
1365 case PIPE_RESOURCE_PARAM_HANDLE_TYPE_SHARED:
1366 case PIPE_RESOURCE_PARAM_HANDLE_TYPE_FD: {
1367 #ifdef ZINK_USE_DMABUF
1368 memset(&whandle, 0, sizeof(whandle));
1369 if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_SHARED)
1370 whandle.type = WINSYS_HANDLE_TYPE_SHARED;
1371 if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_KMS)
1372 whandle.type = WINSYS_HANDLE_TYPE_KMS;
1373 else if (param == PIPE_RESOURCE_PARAM_HANDLE_TYPE_FD)
1374 whandle.type = WINSYS_HANDLE_TYPE_FD;
1375
1376 if (!pscreen->resource_get_handle(pscreen, pctx, pres, &whandle, handle_usage))
1377 return false;
1378
1379 #ifdef _WIN32
1380 *value = (uintptr_t)whandle.handle;
1381 #else
1382 *value = whandle.handle;
1383 #endif
1384 break;
1385 #else
1386 (void)whandle;
1387 return false;
1388 #endif
1389 }
1390 }
1391 return true;
1392 }
1393
1394 static bool
zink_resource_get_handle(struct pipe_screen * pscreen,struct pipe_context * context,struct pipe_resource * tex,struct winsys_handle * whandle,unsigned usage)1395 zink_resource_get_handle(struct pipe_screen *pscreen,
1396 struct pipe_context *context,
1397 struct pipe_resource *tex,
1398 struct winsys_handle *whandle,
1399 unsigned usage)
1400 {
1401 if (whandle->type == WINSYS_HANDLE_TYPE_FD || whandle->type == WINSYS_HANDLE_TYPE_KMS) {
1402 #ifdef ZINK_USE_DMABUF
1403 struct zink_resource *res = zink_resource(tex);
1404 struct zink_screen *screen = zink_screen(pscreen);
1405 struct zink_resource_object *obj = res->obj;
1406
1407 #if !defined(_WIN32)
1408 if (whandle->type == WINSYS_HANDLE_TYPE_KMS && screen->drm_fd == -1) {
1409 whandle->handle = -1;
1410 } else {
1411 if (!res->obj->exportable) {
1412 assert(!res->all_binds); //TODO handle if problematic
1413 assert(!zink_resource_usage_is_unflushed(res));
1414 unsigned bind = ZINK_BIND_DMABUF;
1415 if (!(res->base.b.bind & PIPE_BIND_SHARED))
1416 bind |= PIPE_BIND_SHARED;
1417 if (!add_resource_bind(screen->copy_context, res, bind))
1418 return false;
1419 p_atomic_inc(&screen->image_rebind_counter);
1420 screen->copy_context->base.flush(&screen->copy_context->base, NULL, 0);
1421 obj = res->obj;
1422 }
1423
1424 VkMemoryGetFdInfoKHR fd_info = {0};
1425 int fd;
1426 fd_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
1427 fd_info.memory = zink_bo_get_mem(obj->bo);
1428 if (whandle->type == WINSYS_HANDLE_TYPE_FD)
1429 fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1430 else
1431 fd_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
1432 VkResult result = VKSCR(GetMemoryFdKHR)(screen->dev, &fd_info, &fd);
1433 if (result != VK_SUCCESS) {
1434 mesa_loge("ZINK: vkGetMemoryFdKHR failed");
1435 return false;
1436 }
1437 if (whandle->type == WINSYS_HANDLE_TYPE_KMS) {
1438 uint32_t h;
1439 bool ret = zink_bo_get_kms_handle(screen, obj->bo, fd, &h);
1440 close(fd);
1441 if (!ret)
1442 return false;
1443 fd = h;
1444 }
1445
1446 whandle->handle = fd;
1447 }
1448 #else
1449 VkMemoryGetWin32HandleInfoKHR handle_info = {0};
1450 HANDLE handle;
1451 handle_info.sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR;
1452 //TODO: remove for wsi
1453 handle_info.memory = zink_bo_get_mem(obj->bo);
1454 handle_info.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
1455 VkResult result = VKSCR(GetMemoryWin32HandleKHR)(screen->dev, &handle_info, &handle);
1456 if (result != VK_SUCCESS)
1457 return false;
1458 whandle->handle = handle;
1459 #endif
1460 uint64_t value;
1461 zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_MODIFIER, 0, &value);
1462 whandle->modifier = value;
1463 zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_OFFSET, 0, &value);
1464 whandle->offset = value;
1465 zink_resource_get_param(pscreen, context, tex, 0, 0, 0, PIPE_RESOURCE_PARAM_STRIDE, 0, &value);
1466 whandle->stride = value;
1467 #else
1468 return false;
1469 #endif
1470 }
1471 return true;
1472 }
1473
1474 static struct pipe_resource *
zink_resource_from_handle(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct winsys_handle * whandle,unsigned usage)1475 zink_resource_from_handle(struct pipe_screen *pscreen,
1476 const struct pipe_resource *templ,
1477 struct winsys_handle *whandle,
1478 unsigned usage)
1479 {
1480 #ifdef ZINK_USE_DMABUF
1481 if (whandle->modifier != DRM_FORMAT_MOD_INVALID &&
1482 !zink_screen(pscreen)->info.have_EXT_image_drm_format_modifier)
1483 return NULL;
1484
1485 struct pipe_resource templ2 = *templ;
1486 if (templ->format == PIPE_FORMAT_NONE)
1487 templ2.format = whandle->format;
1488
1489 uint64_t modifier = DRM_FORMAT_MOD_INVALID;
1490 int modifier_count = 0;
1491 if (whandle->modifier != DRM_FORMAT_MOD_INVALID) {
1492 modifier = whandle->modifier;
1493 modifier_count = 1;
1494 }
1495 struct pipe_resource *pres = resource_create(pscreen, &templ2, whandle, usage, &modifier, modifier_count, NULL);
1496 if (pres) {
1497 struct zink_resource *res = zink_resource(pres);
1498 res->drm_format = whandle->format;
1499 if (pres->target != PIPE_BUFFER)
1500 res->valid = true;
1501 }
1502 return pres;
1503 #else
1504 return NULL;
1505 #endif
1506 }
1507
1508 struct zink_memory_object {
1509 struct pipe_memory_object b;
1510 struct winsys_handle whandle;
1511 };
1512
1513 static struct pipe_memory_object *
zink_memobj_create_from_handle(struct pipe_screen * pscreen,struct winsys_handle * whandle,bool dedicated)1514 zink_memobj_create_from_handle(struct pipe_screen *pscreen, struct winsys_handle *whandle, bool dedicated)
1515 {
1516 struct zink_memory_object *memobj = CALLOC_STRUCT(zink_memory_object);
1517 if (!memobj)
1518 return NULL;
1519 memcpy(&memobj->whandle, whandle, sizeof(struct winsys_handle));
1520 memobj->whandle.type = ZINK_EXTERNAL_MEMORY_HANDLE;
1521
1522 #ifdef ZINK_USE_DMABUF
1523
1524 #if !defined(_WIN32)
1525 memobj->whandle.handle = os_dupfd_cloexec(whandle->handle);
1526 #else
1527 HANDLE source_target = GetCurrentProcess();
1528 HANDLE out_handle;
1529
1530 DuplicateHandle(source_target, whandle->handle, source_target, &out_handle, 0, false, DUPLICATE_SAME_ACCESS);
1531 memobj->whandle.handle = out_handle;
1532
1533 #endif /* _WIN32 */
1534 #endif /* ZINK_USE_DMABUF */
1535
1536 return (struct pipe_memory_object *)memobj;
1537 }
1538
1539 static void
zink_memobj_destroy(struct pipe_screen * pscreen,struct pipe_memory_object * pmemobj)1540 zink_memobj_destroy(struct pipe_screen *pscreen, struct pipe_memory_object *pmemobj)
1541 {
1542 #ifdef ZINK_USE_DMABUF
1543 struct zink_memory_object *memobj = (struct zink_memory_object *)pmemobj;
1544
1545 #if !defined(_WIN32)
1546 close(memobj->whandle.handle);
1547 #else
1548 CloseHandle(memobj->whandle.handle);
1549 #endif /* _WIN32 */
1550 #endif /* ZINK_USE_DMABUF */
1551
1552 FREE(pmemobj);
1553 }
1554
1555 static struct pipe_resource *
zink_resource_from_memobj(struct pipe_screen * pscreen,const struct pipe_resource * templ,struct pipe_memory_object * pmemobj,uint64_t offset)1556 zink_resource_from_memobj(struct pipe_screen *pscreen,
1557 const struct pipe_resource *templ,
1558 struct pipe_memory_object *pmemobj,
1559 uint64_t offset)
1560 {
1561 struct zink_memory_object *memobj = (struct zink_memory_object *)pmemobj;
1562
1563 struct pipe_resource *pres = resource_create(pscreen, templ, &memobj->whandle, 0, NULL, 0, NULL);
1564 if (pres && pres->target != PIPE_BUFFER)
1565 zink_resource(pres)->valid = true;
1566 return pres;
1567 }
1568
1569 static bool
invalidate_buffer(struct zink_context * ctx,struct zink_resource * res)1570 invalidate_buffer(struct zink_context *ctx, struct zink_resource *res)
1571 {
1572 struct zink_screen *screen = zink_screen(ctx->base.screen);
1573
1574 assert(res->base.b.target == PIPE_BUFFER);
1575
1576 if (res->base.b.flags & PIPE_RESOURCE_FLAG_SPARSE)
1577 return false;
1578
1579 if (res->valid_buffer_range.start > res->valid_buffer_range.end)
1580 return false;
1581
1582 if (res->so_valid)
1583 ctx->dirty_so_targets = true;
1584 /* force counter buffer reset */
1585 res->so_valid = false;
1586
1587 util_range_set_empty(&res->valid_buffer_range);
1588 if (!zink_resource_has_usage(res))
1589 return false;
1590
1591 struct zink_resource_object *old_obj = res->obj;
1592 struct zink_resource_object *new_obj = resource_object_create(screen, &res->base.b, NULL, NULL, NULL, 0, NULL);
1593 if (!new_obj) {
1594 debug_printf("new backing resource alloc failed!");
1595 return false;
1596 }
1597 /* this ref must be transferred before rebind or else BOOM */
1598 zink_batch_reference_resource_move(&ctx->batch, res);
1599 res->obj = new_obj;
1600 zink_resource_rebind(ctx, res);
1601 zink_descriptor_set_refs_clear(&old_obj->desc_set_refs, old_obj);
1602 return true;
1603 }
1604
1605
1606 static void
zink_resource_invalidate(struct pipe_context * pctx,struct pipe_resource * pres)1607 zink_resource_invalidate(struct pipe_context *pctx, struct pipe_resource *pres)
1608 {
1609 if (pres->target == PIPE_BUFFER)
1610 invalidate_buffer(zink_context(pctx), zink_resource(pres));
1611 else {
1612 struct zink_resource *res = zink_resource(pres);
1613 if (res->valid && res->fb_binds)
1614 zink_context(pctx)->rp_loadop_changed = true;
1615 res->valid = false;
1616 }
1617 }
1618
1619 static void
zink_transfer_copy_bufimage(struct zink_context * ctx,struct zink_resource * dst,struct zink_resource * src,struct zink_transfer * trans)1620 zink_transfer_copy_bufimage(struct zink_context *ctx,
1621 struct zink_resource *dst,
1622 struct zink_resource *src,
1623 struct zink_transfer *trans)
1624 {
1625 assert((trans->base.b.usage & (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY)) !=
1626 (PIPE_MAP_DEPTH_ONLY | PIPE_MAP_STENCIL_ONLY));
1627
1628 bool buf2img = src->base.b.target == PIPE_BUFFER;
1629
1630 struct pipe_box box = trans->base.b.box;
1631 int x = box.x;
1632 if (buf2img)
1633 box.x = trans->offset;
1634
1635 if (dst->obj->transfer_dst)
1636 zink_copy_image_buffer(ctx, dst, src, trans->base.b.level, buf2img ? x : 0,
1637 box.y, box.z, trans->base.b.level, &box, trans->base.b.usage);
1638 else
1639 util_blitter_copy_texture(ctx->blitter, &dst->base.b, trans->base.b.level,
1640 x, box.y, box.z, &src->base.b,
1641 0, &box);
1642 }
1643
1644 ALWAYS_INLINE static void
align_offset_size(const VkDeviceSize alignment,VkDeviceSize * offset,VkDeviceSize * size,VkDeviceSize obj_size)1645 align_offset_size(const VkDeviceSize alignment, VkDeviceSize *offset, VkDeviceSize *size, VkDeviceSize obj_size)
1646 {
1647 VkDeviceSize align = *offset % alignment;
1648 if (alignment - 1 > *offset)
1649 *offset = 0;
1650 else
1651 *offset -= align, *size += align;
1652 align = alignment - (*size % alignment);
1653 if (*offset + *size + align > obj_size)
1654 *size = obj_size - *offset;
1655 else
1656 *size += align;
1657 }
1658
1659 VkMappedMemoryRange
zink_resource_init_mem_range(struct zink_screen * screen,struct zink_resource_object * obj,VkDeviceSize offset,VkDeviceSize size)1660 zink_resource_init_mem_range(struct zink_screen *screen, struct zink_resource_object *obj, VkDeviceSize offset, VkDeviceSize size)
1661 {
1662 assert(obj->size);
1663 align_offset_size(screen->info.props.limits.nonCoherentAtomSize, &offset, &size, obj->size);
1664 VkMappedMemoryRange range = {
1665 VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
1666 NULL,
1667 zink_bo_get_mem(obj->bo),
1668 offset,
1669 size
1670 };
1671 assert(range.size);
1672 return range;
1673 }
1674
1675 static void *
map_resource(struct zink_screen * screen,struct zink_resource * res)1676 map_resource(struct zink_screen *screen, struct zink_resource *res)
1677 {
1678 assert(res->obj->host_visible);
1679 return zink_bo_map(screen, res->obj->bo);
1680 }
1681
1682 static void
unmap_resource(struct zink_screen * screen,struct zink_resource * res)1683 unmap_resource(struct zink_screen *screen, struct zink_resource *res)
1684 {
1685 zink_bo_unmap(screen, res->obj->bo);
1686 }
1687
1688 static struct zink_transfer *
create_transfer(struct zink_context * ctx,struct pipe_resource * pres,unsigned usage,const struct pipe_box * box)1689 create_transfer(struct zink_context *ctx, struct pipe_resource *pres, unsigned usage, const struct pipe_box *box)
1690 {
1691 struct zink_transfer *trans;
1692
1693 if (usage & PIPE_MAP_THREAD_SAFE)
1694 trans = calloc(1, sizeof(*trans));
1695 else if (usage & TC_TRANSFER_MAP_THREADED_UNSYNC)
1696 trans = slab_zalloc(&ctx->transfer_pool_unsync);
1697 else
1698 trans = slab_zalloc(&ctx->transfer_pool);
1699 if (!trans)
1700 return NULL;
1701
1702 pipe_resource_reference(&trans->base.b.resource, pres);
1703
1704 trans->base.b.usage = usage;
1705 trans->base.b.box = *box;
1706 return trans;
1707 }
1708
1709 static void
destroy_transfer(struct zink_context * ctx,struct zink_transfer * trans)1710 destroy_transfer(struct zink_context *ctx, struct zink_transfer *trans)
1711 {
1712 if (trans->base.b.usage & PIPE_MAP_THREAD_SAFE) {
1713 free(trans);
1714 } else {
1715 /* Don't use pool_transfers_unsync. We are always in the driver
1716 * thread. Freeing an object into a different pool is allowed.
1717 */
1718 slab_free(&ctx->transfer_pool, trans);
1719 }
1720 }
1721
1722 static void *
zink_buffer_map(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,unsigned usage,const struct pipe_box * box,struct pipe_transfer ** transfer)1723 zink_buffer_map(struct pipe_context *pctx,
1724 struct pipe_resource *pres,
1725 unsigned level,
1726 unsigned usage,
1727 const struct pipe_box *box,
1728 struct pipe_transfer **transfer)
1729 {
1730 struct zink_context *ctx = zink_context(pctx);
1731 struct zink_screen *screen = zink_screen(pctx->screen);
1732 struct zink_resource *res = zink_resource(pres);
1733 struct zink_transfer *trans = create_transfer(ctx, pres, usage, box);
1734 if (!trans)
1735 return NULL;
1736
1737 void *ptr = NULL;
1738
1739 if (res->base.is_user_ptr)
1740 usage |= PIPE_MAP_PERSISTENT;
1741
1742 /* See if the buffer range being mapped has never been initialized,
1743 * in which case it can be mapped unsynchronized. */
1744 if (!(usage & (PIPE_MAP_UNSYNCHRONIZED | TC_TRANSFER_MAP_NO_INFER_UNSYNCHRONIZED)) &&
1745 usage & PIPE_MAP_WRITE && !res->base.is_shared &&
1746 !util_ranges_intersect(&res->valid_buffer_range, box->x, box->x + box->width)) {
1747 usage |= PIPE_MAP_UNSYNCHRONIZED;
1748 }
1749
1750 /* If discarding the entire range, discard the whole resource instead. */
1751 if (usage & PIPE_MAP_DISCARD_RANGE && box->x == 0 && box->width == res->base.b.width0) {
1752 usage |= PIPE_MAP_DISCARD_WHOLE_RESOURCE;
1753 }
1754
1755 /* If a buffer in VRAM is too large and the range is discarded, don't
1756 * map it directly. This makes sure that the buffer stays in VRAM.
1757 */
1758 bool force_discard_range = false;
1759 if (usage & (PIPE_MAP_DISCARD_WHOLE_RESOURCE | PIPE_MAP_DISCARD_RANGE) &&
1760 !(usage & PIPE_MAP_PERSISTENT) &&
1761 res->base.b.flags & PIPE_RESOURCE_FLAG_DONT_MAP_DIRECTLY) {
1762 usage &= ~(PIPE_MAP_DISCARD_WHOLE_RESOURCE | PIPE_MAP_UNSYNCHRONIZED);
1763 usage |= PIPE_MAP_DISCARD_RANGE;
1764 force_discard_range = true;
1765 }
1766
1767 if (usage & PIPE_MAP_DISCARD_WHOLE_RESOURCE &&
1768 !(usage & (PIPE_MAP_UNSYNCHRONIZED | TC_TRANSFER_MAP_NO_INVALIDATE))) {
1769 assert(usage & PIPE_MAP_WRITE);
1770
1771 if (invalidate_buffer(ctx, res)) {
1772 /* At this point, the buffer is always idle. */
1773 usage |= PIPE_MAP_UNSYNCHRONIZED;
1774 } else {
1775 /* Fall back to a temporary buffer. */
1776 usage |= PIPE_MAP_DISCARD_RANGE;
1777 }
1778 }
1779
1780 unsigned map_offset = box->x;
1781 if (usage & PIPE_MAP_DISCARD_RANGE &&
1782 (!res->obj->host_visible ||
1783 !(usage & (PIPE_MAP_UNSYNCHRONIZED | PIPE_MAP_PERSISTENT)))) {
1784
1785 /* Check if mapping this buffer would cause waiting for the GPU.
1786 */
1787
1788 if (!res->obj->host_visible || force_discard_range ||
1789 !zink_resource_usage_check_completion(screen, res, ZINK_RESOURCE_ACCESS_RW)) {
1790 /* Do a wait-free write-only transfer using a temporary buffer. */
1791 unsigned offset;
1792
1793 /* If we are not called from the driver thread, we have
1794 * to use the uploader from u_threaded_context, which is
1795 * local to the calling thread.
1796 */
1797 struct u_upload_mgr *mgr;
1798 if (usage & TC_TRANSFER_MAP_THREADED_UNSYNC)
1799 mgr = ctx->tc->base.stream_uploader;
1800 else
1801 mgr = ctx->base.stream_uploader;
1802 u_upload_alloc(mgr, 0, box->width,
1803 screen->info.props.limits.minMemoryMapAlignment, &offset,
1804 (struct pipe_resource **)&trans->staging_res, (void **)&ptr);
1805 res = zink_resource(trans->staging_res);
1806 trans->offset = offset;
1807 usage |= PIPE_MAP_UNSYNCHRONIZED;
1808 ptr = ((uint8_t *)ptr);
1809 } else {
1810 /* At this point, the buffer is always idle (we checked it above). */
1811 usage |= PIPE_MAP_UNSYNCHRONIZED;
1812 }
1813 } else if (usage & PIPE_MAP_DONTBLOCK) {
1814 /* sparse/device-local will always need to wait since it has to copy */
1815 if (!res->obj->host_visible)
1816 goto success;
1817 if (!zink_resource_usage_check_completion(screen, res, ZINK_RESOURCE_ACCESS_WRITE))
1818 goto success;
1819 usage |= PIPE_MAP_UNSYNCHRONIZED;
1820 } else if (!(usage & PIPE_MAP_UNSYNCHRONIZED) &&
1821 (((usage & PIPE_MAP_READ) && !(usage & PIPE_MAP_PERSISTENT) && res->base.b.usage != PIPE_USAGE_STAGING) || !res->obj->host_visible)) {
1822 assert(!(usage & (TC_TRANSFER_MAP_THREADED_UNSYNC | PIPE_MAP_THREAD_SAFE)));
1823 if (!res->obj->host_visible || !(usage & PIPE_MAP_ONCE)) {
1824 trans->offset = box->x % screen->info.props.limits.minMemoryMapAlignment;
1825 trans->staging_res = pipe_buffer_create(&screen->base, PIPE_BIND_LINEAR, PIPE_USAGE_STAGING, box->width + trans->offset);
1826 if (!trans->staging_res)
1827 goto fail;
1828 struct zink_resource *staging_res = zink_resource(trans->staging_res);
1829 zink_copy_buffer(ctx, staging_res, res, trans->offset, box->x, box->width);
1830 res = staging_res;
1831 usage &= ~PIPE_MAP_UNSYNCHRONIZED;
1832 map_offset = trans->offset;
1833 }
1834 } else if ((usage & PIPE_MAP_UNSYNCHRONIZED) && !res->obj->host_visible) {
1835 trans->offset = box->x % screen->info.props.limits.minMemoryMapAlignment;
1836 trans->staging_res = pipe_buffer_create(&screen->base, PIPE_BIND_LINEAR, PIPE_USAGE_STAGING, box->width + trans->offset);
1837 if (!trans->staging_res)
1838 goto fail;
1839 struct zink_resource *staging_res = zink_resource(trans->staging_res);
1840 res = staging_res;
1841 map_offset = trans->offset;
1842 }
1843
1844 if (!(usage & PIPE_MAP_UNSYNCHRONIZED)) {
1845 if (usage & PIPE_MAP_WRITE)
1846 zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_RW);
1847 else
1848 zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
1849 res->obj->access = 0;
1850 res->obj->access_stage = 0;
1851 }
1852
1853 if (!ptr) {
1854 /* if writing to a streamout buffer, ensure synchronization next time it's used */
1855 if (usage & PIPE_MAP_WRITE && res->so_valid) {
1856 ctx->dirty_so_targets = true;
1857 /* force counter buffer reset */
1858 res->so_valid = false;
1859 }
1860 ptr = map_resource(screen, res);
1861 if (!ptr)
1862 goto fail;
1863 ptr = ((uint8_t *)ptr) + map_offset;
1864 }
1865
1866 if (!res->obj->coherent
1867 #if defined(MVK_VERSION)
1868 // Work around for MoltenVk limitation specifically on coherent memory
1869 // MoltenVk returns blank memory ranges when there should be data present
1870 // This is a known limitation of MoltenVK.
1871 // See https://github.com/KhronosGroup/MoltenVK/blob/master/Docs/MoltenVK_Runtime_UserGuide.md#known-moltenvk-limitations
1872
1873 || screen->instance_info.have_MVK_moltenvk
1874 #endif
1875 ) {
1876 VkDeviceSize size = box->width;
1877 VkDeviceSize offset = res->obj->offset + trans->offset;
1878 VkMappedMemoryRange range = zink_resource_init_mem_range(screen, res->obj, offset, size);
1879 if (VKSCR(InvalidateMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
1880 mesa_loge("ZINK: vkInvalidateMappedMemoryRanges failed");
1881 zink_bo_unmap(screen, res->obj->bo);
1882 goto fail;
1883 }
1884 }
1885 trans->base.b.usage = usage;
1886 if (usage & PIPE_MAP_WRITE)
1887 util_range_add(&res->base.b, &res->valid_buffer_range, box->x, box->x + box->width);
1888 if ((usage & PIPE_MAP_PERSISTENT) && !(usage & PIPE_MAP_COHERENT))
1889 res->obj->persistent_maps++;
1890
1891 success:
1892 *transfer = &trans->base.b;
1893 return ptr;
1894
1895 fail:
1896 destroy_transfer(ctx, trans);
1897 return NULL;
1898 }
1899
1900 static void *
zink_image_map(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,unsigned usage,const struct pipe_box * box,struct pipe_transfer ** transfer)1901 zink_image_map(struct pipe_context *pctx,
1902 struct pipe_resource *pres,
1903 unsigned level,
1904 unsigned usage,
1905 const struct pipe_box *box,
1906 struct pipe_transfer **transfer)
1907 {
1908 struct zink_context *ctx = zink_context(pctx);
1909 struct zink_screen *screen = zink_screen(pctx->screen);
1910 struct zink_resource *res = zink_resource(pres);
1911 struct zink_transfer *trans = create_transfer(ctx, pres, usage, box);
1912 if (!trans)
1913 return NULL;
1914
1915 trans->base.b.level = level;
1916 if (zink_is_swapchain(res))
1917 /* this is probably a multi-chain which has already been acquired */
1918 zink_kopper_acquire(ctx, res, 0);
1919
1920 void *ptr;
1921 if (usage & PIPE_MAP_WRITE && !(usage & PIPE_MAP_READ))
1922 /* this is like a blit, so we can potentially dump some clears or maybe we have to */
1923 zink_fb_clears_apply_or_discard(ctx, pres, zink_rect_from_box(box), false);
1924 else if (usage & PIPE_MAP_READ)
1925 /* if the map region intersects with any clears then we have to apply them */
1926 zink_fb_clears_apply_region(ctx, pres, zink_rect_from_box(box));
1927 if (res->optimal_tiling || !res->obj->host_visible) {
1928 enum pipe_format format = pres->format;
1929 if (usage & PIPE_MAP_DEPTH_ONLY)
1930 format = util_format_get_depth_only(pres->format);
1931 else if (usage & PIPE_MAP_STENCIL_ONLY)
1932 format = PIPE_FORMAT_S8_UINT;
1933 trans->base.b.stride = util_format_get_stride(format, box->width);
1934 trans->base.b.layer_stride = util_format_get_2d_size(format,
1935 trans->base.b.stride,
1936 box->height);
1937
1938 struct pipe_resource templ = *pres;
1939 templ.next = NULL;
1940 templ.format = format;
1941 templ.usage = usage & PIPE_MAP_READ ? PIPE_USAGE_STAGING : PIPE_USAGE_STREAM;
1942 templ.target = PIPE_BUFFER;
1943 templ.bind = PIPE_BIND_LINEAR;
1944 templ.width0 = trans->base.b.layer_stride * box->depth;
1945 templ.height0 = templ.depth0 = 0;
1946 templ.last_level = 0;
1947 templ.array_size = 1;
1948 templ.flags = 0;
1949
1950 trans->staging_res = zink_resource_create(pctx->screen, &templ);
1951 if (!trans->staging_res)
1952 goto fail;
1953
1954 struct zink_resource *staging_res = zink_resource(trans->staging_res);
1955
1956 if (usage & PIPE_MAP_READ) {
1957 /* force multi-context sync */
1958 if (zink_resource_usage_is_unflushed_write(res))
1959 zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
1960 zink_transfer_copy_bufimage(ctx, staging_res, res, trans);
1961 /* need to wait for rendering to finish */
1962 zink_fence_wait(pctx);
1963 }
1964
1965 ptr = map_resource(screen, staging_res);
1966 } else {
1967 assert(!res->optimal_tiling);
1968 ptr = map_resource(screen, res);
1969 if (!ptr)
1970 goto fail;
1971 if (zink_resource_has_usage(res)) {
1972 if (usage & PIPE_MAP_WRITE)
1973 zink_fence_wait(pctx);
1974 else
1975 zink_resource_usage_wait(ctx, res, ZINK_RESOURCE_ACCESS_WRITE);
1976 }
1977 VkImageSubresource isr = {
1978 res->modifiers ? res->obj->modifier_aspect : res->aspect,
1979 level,
1980 0
1981 };
1982 VkSubresourceLayout srl;
1983 VKSCR(GetImageSubresourceLayout)(screen->dev, res->obj->image, &isr, &srl);
1984 trans->base.b.stride = srl.rowPitch;
1985 if (res->base.b.target == PIPE_TEXTURE_3D)
1986 trans->base.b.layer_stride = srl.depthPitch;
1987 else
1988 trans->base.b.layer_stride = srl.arrayPitch;
1989 trans->offset = srl.offset;
1990 trans->depthPitch = srl.depthPitch;
1991 const struct util_format_description *desc = util_format_description(res->base.b.format);
1992 unsigned offset = srl.offset +
1993 box->z * srl.depthPitch +
1994 (box->y / desc->block.height) * srl.rowPitch +
1995 (box->x / desc->block.width) * (desc->block.bits / 8);
1996 if (!res->obj->coherent) {
1997 VkDeviceSize size = (VkDeviceSize)box->width * box->height * desc->block.bits / 8;
1998 VkMappedMemoryRange range = zink_resource_init_mem_range(screen, res->obj, res->obj->offset + offset, size);
1999 if (VKSCR(FlushMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
2000 mesa_loge("ZINK: vkFlushMappedMemoryRanges failed");
2001 }
2002 }
2003 ptr = ((uint8_t *)ptr) + offset;
2004 }
2005 if (!ptr)
2006 goto fail;
2007 if (usage & PIPE_MAP_WRITE) {
2008 if (!res->valid && res->fb_binds)
2009 ctx->rp_loadop_changed = true;
2010 res->valid = true;
2011 }
2012
2013 if (sizeof(void*) == 4)
2014 trans->base.b.usage |= ZINK_MAP_TEMPORARY;
2015 if ((usage & PIPE_MAP_PERSISTENT) && !(usage & PIPE_MAP_COHERENT))
2016 res->obj->persistent_maps++;
2017
2018 *transfer = &trans->base.b;
2019 return ptr;
2020
2021 fail:
2022 destroy_transfer(ctx, trans);
2023 return NULL;
2024 }
2025
2026 static void
zink_transfer_flush_region(struct pipe_context * pctx,struct pipe_transfer * ptrans,const struct pipe_box * box)2027 zink_transfer_flush_region(struct pipe_context *pctx,
2028 struct pipe_transfer *ptrans,
2029 const struct pipe_box *box)
2030 {
2031 struct zink_context *ctx = zink_context(pctx);
2032 struct zink_resource *res = zink_resource(ptrans->resource);
2033 struct zink_transfer *trans = (struct zink_transfer *)ptrans;
2034
2035 if (trans->base.b.usage & PIPE_MAP_WRITE) {
2036 struct zink_screen *screen = zink_screen(pctx->screen);
2037 struct zink_resource *m = trans->staging_res ? zink_resource(trans->staging_res) :
2038 res;
2039 ASSERTED VkDeviceSize size, src_offset, dst_offset = 0;
2040 if (m->obj->is_buffer) {
2041 size = box->width;
2042 src_offset = box->x + (trans->staging_res ? trans->offset : ptrans->box.x);
2043 dst_offset = box->x + ptrans->box.x;
2044 } else {
2045 size = (VkDeviceSize)box->width * box->height * util_format_get_blocksize(m->base.b.format);
2046 src_offset = trans->offset +
2047 box->z * trans->depthPitch +
2048 util_format_get_2d_size(m->base.b.format, trans->base.b.stride, box->y) +
2049 util_format_get_stride(m->base.b.format, box->x);
2050 assert(src_offset + size <= res->obj->size);
2051 }
2052 if (!m->obj->coherent) {
2053 VkMappedMemoryRange range = zink_resource_init_mem_range(screen, m->obj, m->obj->offset, m->obj->size);
2054 if (VKSCR(FlushMappedMemoryRanges)(screen->dev, 1, &range) != VK_SUCCESS) {
2055 mesa_loge("ZINK: vkFlushMappedMemoryRanges failed");
2056 }
2057 }
2058 if (trans->staging_res) {
2059 struct zink_resource *staging_res = zink_resource(trans->staging_res);
2060
2061 if (ptrans->resource->target == PIPE_BUFFER)
2062 zink_copy_buffer(ctx, res, staging_res, dst_offset, src_offset, size);
2063 else
2064 zink_transfer_copy_bufimage(ctx, res, staging_res, trans);
2065 }
2066 }
2067 }
2068
2069 static void
transfer_unmap(struct pipe_context * pctx,struct pipe_transfer * ptrans)2070 transfer_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
2071 {
2072 struct zink_context *ctx = zink_context(pctx);
2073 struct zink_resource *res = zink_resource(ptrans->resource);
2074 struct zink_transfer *trans = (struct zink_transfer *)ptrans;
2075
2076 if (!(trans->base.b.usage & (PIPE_MAP_FLUSH_EXPLICIT | PIPE_MAP_COHERENT))) {
2077 /* flush_region is relative to the mapped region: use only the extents */
2078 struct pipe_box box = ptrans->box;
2079 box.x = box.y = box.z = 0;
2080 zink_transfer_flush_region(pctx, ptrans, &box);
2081 }
2082
2083 if ((trans->base.b.usage & PIPE_MAP_PERSISTENT) && !(trans->base.b.usage & PIPE_MAP_COHERENT))
2084 res->obj->persistent_maps--;
2085
2086 if (trans->staging_res)
2087 pipe_resource_reference(&trans->staging_res, NULL);
2088 pipe_resource_reference(&trans->base.b.resource, NULL);
2089
2090 destroy_transfer(ctx, trans);
2091 }
2092
2093 static void
do_transfer_unmap(struct zink_screen * screen,struct zink_transfer * trans)2094 do_transfer_unmap(struct zink_screen *screen, struct zink_transfer *trans)
2095 {
2096 struct zink_resource *res = zink_resource(trans->staging_res);
2097 if (!res)
2098 res = zink_resource(trans->base.b.resource);
2099 unmap_resource(screen, res);
2100 }
2101
2102 static void
zink_buffer_unmap(struct pipe_context * pctx,struct pipe_transfer * ptrans)2103 zink_buffer_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
2104 {
2105 struct zink_screen *screen = zink_screen(pctx->screen);
2106 struct zink_transfer *trans = (struct zink_transfer *)ptrans;
2107 if (trans->base.b.usage & PIPE_MAP_ONCE && !trans->staging_res)
2108 do_transfer_unmap(screen, trans);
2109 transfer_unmap(pctx, ptrans);
2110 }
2111
2112 static void
zink_image_unmap(struct pipe_context * pctx,struct pipe_transfer * ptrans)2113 zink_image_unmap(struct pipe_context *pctx, struct pipe_transfer *ptrans)
2114 {
2115 struct zink_screen *screen = zink_screen(pctx->screen);
2116 struct zink_transfer *trans = (struct zink_transfer *)ptrans;
2117 if (sizeof(void*) == 4)
2118 do_transfer_unmap(screen, trans);
2119 transfer_unmap(pctx, ptrans);
2120 }
2121
2122 static void
zink_buffer_subdata(struct pipe_context * ctx,struct pipe_resource * buffer,unsigned usage,unsigned offset,unsigned size,const void * data)2123 zink_buffer_subdata(struct pipe_context *ctx, struct pipe_resource *buffer,
2124 unsigned usage, unsigned offset, unsigned size, const void *data)
2125 {
2126 struct pipe_transfer *transfer = NULL;
2127 struct pipe_box box;
2128 uint8_t *map = NULL;
2129
2130 usage |= PIPE_MAP_WRITE;
2131
2132 if (!(usage & PIPE_MAP_DIRECTLY))
2133 usage |= PIPE_MAP_DISCARD_RANGE;
2134
2135 u_box_1d(offset, size, &box);
2136 map = zink_buffer_map(ctx, buffer, 0, usage, &box, &transfer);
2137 if (!map)
2138 return;
2139
2140 memcpy(map, data, size);
2141 zink_buffer_unmap(ctx, transfer);
2142 }
2143
2144 static struct pipe_resource *
zink_resource_get_separate_stencil(struct pipe_resource * pres)2145 zink_resource_get_separate_stencil(struct pipe_resource *pres)
2146 {
2147 /* For packed depth-stencil, we treat depth as the primary resource
2148 * and store S8 as the "second plane" resource.
2149 */
2150 if (pres->next && pres->next->format == PIPE_FORMAT_S8_UINT)
2151 return pres->next;
2152
2153 return NULL;
2154
2155 }
2156
2157 bool
zink_resource_object_init_storage(struct zink_context * ctx,struct zink_resource * res)2158 zink_resource_object_init_storage(struct zink_context *ctx, struct zink_resource *res)
2159 {
2160 /* base resource already has the cap */
2161 if (res->base.b.bind & PIPE_BIND_SHADER_IMAGE)
2162 return true;
2163 if (res->obj->is_buffer) {
2164 unreachable("zink: all buffers should have this bit");
2165 return true;
2166 }
2167 assert(!res->obj->dt);
2168 zink_fb_clears_apply_region(ctx, &res->base.b, (struct u_rect){0, res->base.b.width0, 0, res->base.b.height0});
2169 bool ret = add_resource_bind(ctx, res, PIPE_BIND_SHADER_IMAGE);
2170 if (ret)
2171 zink_resource_rebind(ctx, res);
2172
2173 return ret;
2174 }
2175
2176 void
zink_resource_setup_transfer_layouts(struct zink_context * ctx,struct zink_resource * src,struct zink_resource * dst)2177 zink_resource_setup_transfer_layouts(struct zink_context *ctx, struct zink_resource *src, struct zink_resource *dst)
2178 {
2179 if (src == dst) {
2180 /* The Vulkan 1.1 specification says the following about valid usage
2181 * of vkCmdBlitImage:
2182 *
2183 * "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
2184 * VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
2185 *
2186 * and:
2187 *
2188 * "dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
2189 * VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL"
2190 *
2191 * Since we cant have the same image in two states at the same time,
2192 * we're effectively left with VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR or
2193 * VK_IMAGE_LAYOUT_GENERAL. And since this isn't a present-related
2194 * operation, VK_IMAGE_LAYOUT_GENERAL seems most appropriate.
2195 */
2196 zink_resource_image_barrier(ctx, src,
2197 VK_IMAGE_LAYOUT_GENERAL,
2198 VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
2199 VK_PIPELINE_STAGE_TRANSFER_BIT);
2200 } else {
2201 zink_resource_image_barrier(ctx, src,
2202 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2203 VK_ACCESS_TRANSFER_READ_BIT,
2204 VK_PIPELINE_STAGE_TRANSFER_BIT);
2205
2206 zink_resource_image_barrier(ctx, dst,
2207 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2208 VK_ACCESS_TRANSFER_WRITE_BIT,
2209 VK_PIPELINE_STAGE_TRANSFER_BIT);
2210 }
2211 }
2212
2213 void
zink_get_depth_stencil_resources(struct pipe_resource * res,struct zink_resource ** out_z,struct zink_resource ** out_s)2214 zink_get_depth_stencil_resources(struct pipe_resource *res,
2215 struct zink_resource **out_z,
2216 struct zink_resource **out_s)
2217 {
2218 if (!res) {
2219 if (out_z) *out_z = NULL;
2220 if (out_s) *out_s = NULL;
2221 return;
2222 }
2223
2224 if (res->format != PIPE_FORMAT_S8_UINT) {
2225 if (out_z) *out_z = zink_resource(res);
2226 if (out_s) *out_s = zink_resource(zink_resource_get_separate_stencil(res));
2227 } else {
2228 if (out_z) *out_z = NULL;
2229 if (out_s) *out_s = zink_resource(res);
2230 }
2231 }
2232
2233 static void
zink_resource_set_separate_stencil(struct pipe_resource * pres,struct pipe_resource * stencil)2234 zink_resource_set_separate_stencil(struct pipe_resource *pres,
2235 struct pipe_resource *stencil)
2236 {
2237 assert(util_format_has_depth(util_format_description(pres->format)));
2238 pipe_resource_reference(&pres->next, stencil);
2239 }
2240
2241 static enum pipe_format
zink_resource_get_internal_format(struct pipe_resource * pres)2242 zink_resource_get_internal_format(struct pipe_resource *pres)
2243 {
2244 struct zink_resource *res = zink_resource(pres);
2245 return res->internal_format;
2246 }
2247
2248 static const struct u_transfer_vtbl transfer_vtbl = {
2249 .resource_create = zink_resource_create,
2250 .resource_destroy = zink_resource_destroy,
2251 .transfer_map = zink_image_map,
2252 .transfer_unmap = zink_image_unmap,
2253 .transfer_flush_region = zink_transfer_flush_region,
2254 .get_internal_format = zink_resource_get_internal_format,
2255 .set_stencil = zink_resource_set_separate_stencil,
2256 .get_stencil = zink_resource_get_separate_stencil,
2257 };
2258
2259 bool
zink_screen_resource_init(struct pipe_screen * pscreen)2260 zink_screen_resource_init(struct pipe_screen *pscreen)
2261 {
2262 struct zink_screen *screen = zink_screen(pscreen);
2263 pscreen->resource_create = zink_resource_create;
2264 pscreen->resource_create_with_modifiers = zink_resource_create_with_modifiers;
2265 pscreen->resource_create_drawable = zink_resource_create_drawable;
2266 pscreen->resource_destroy = zink_resource_destroy;
2267 pscreen->transfer_helper = u_transfer_helper_create(&transfer_vtbl, true, true, false, false, !screen->have_D24_UNORM_S8_UINT);
2268
2269 if (screen->info.have_KHR_external_memory_fd || screen->info.have_KHR_external_memory_win32) {
2270 pscreen->resource_get_handle = zink_resource_get_handle;
2271 pscreen->resource_from_handle = zink_resource_from_handle;
2272 }
2273 if (screen->instance_info.have_KHR_external_memory_capabilities) {
2274 pscreen->memobj_create_from_handle = zink_memobj_create_from_handle;
2275 pscreen->memobj_destroy = zink_memobj_destroy;
2276 pscreen->resource_from_memobj = zink_resource_from_memobj;
2277 }
2278 pscreen->resource_get_param = zink_resource_get_param;
2279 return true;
2280 }
2281
2282 void
zink_context_resource_init(struct pipe_context * pctx)2283 zink_context_resource_init(struct pipe_context *pctx)
2284 {
2285 pctx->buffer_map = zink_buffer_map;
2286 pctx->buffer_unmap = zink_buffer_unmap;
2287 pctx->texture_map = u_transfer_helper_deinterleave_transfer_map;
2288 pctx->texture_unmap = u_transfer_helper_deinterleave_transfer_unmap;
2289
2290 pctx->transfer_flush_region = u_transfer_helper_transfer_flush_region;
2291 pctx->buffer_subdata = zink_buffer_subdata;
2292 pctx->texture_subdata = u_default_texture_subdata;
2293 pctx->invalidate_resource = zink_resource_invalidate;
2294 }
2295