/external/mesa3d/src/amd/vulkan/ |
D | radv_meta_fast_clear.c | 134 &device->meta_state.alloc, in create_dcc_compress_compute() 135 &device->meta_state.fast_clear_flush.dcc_decompress_compute_ds_layout); in create_dcc_compress_compute() 143 .pSetLayouts = &device->meta_state.fast_clear_flush.dcc_decompress_compute_ds_layout, in create_dcc_compress_compute() 150 &device->meta_state.alloc, in create_dcc_compress_compute() 151 &device->meta_state.fast_clear_flush.dcc_decompress_compute_p_layout); in create_dcc_compress_compute() 169 .layout = device->meta_state.fast_clear_flush.dcc_decompress_compute_p_layout, in create_dcc_compress_compute() 173 radv_pipeline_cache_to_handle(&device->meta_state.cache), in create_dcc_compress_compute() 175 &device->meta_state.fast_clear_flush.dcc_decompress_compute_pipeline); in create_dcc_compress_compute() 189 const VkAllocationCallbacks *alloc = &device->meta_state.alloc; in create_pass() 245 &device->meta_state.fast_clear_flush.pass); in create_pass() [all …]
|
D | radv_meta_bufimage.c | 169 &device->meta_state.alloc, in radv_device_init_meta_itob_state() 170 &device->meta_state.itob.img_ds_layout); in radv_device_init_meta_itob_state() 178 .pSetLayouts = &device->meta_state.itob.img_ds_layout, in radv_device_init_meta_itob_state() 185 &device->meta_state.alloc, in radv_device_init_meta_itob_state() 186 &device->meta_state.itob.img_p_layout); in radv_device_init_meta_itob_state() 204 .layout = device->meta_state.itob.img_p_layout, in radv_device_init_meta_itob_state() 208 radv_pipeline_cache_to_handle(&device->meta_state.cache), in radv_device_init_meta_itob_state() 210 &device->meta_state.itob.pipeline); in radv_device_init_meta_itob_state() 227 .layout = device->meta_state.itob.img_p_layout, in radv_device_init_meta_itob_state() 231 radv_pipeline_cache_to_handle(&device->meta_state.cache), in radv_device_init_meta_itob_state() [all …]
|
D | radv_meta_blit2d.c | 128 device->meta_state.blit2d[log2_samples].p_layouts[src_type], in blit2d_bind_src() 143 device->meta_state.blit2d[log2_samples].p_layouts[src_type], in blit2d_bind_src() 151 device->meta_state.blit2d[log2_samples].p_layouts[src_type], in blit2d_bind_src() 156 device->meta_state.blit2d[log2_samples].p_layouts[src_type], in blit2d_bind_src() 214 cmd_buffer->device->meta_state.blit2d[log2_samples].pipelines[src_type][fs_key]; in bind_pipeline() 226 cmd_buffer->device->meta_state.blit2d[log2_samples].depth_only_pipeline[src_type]; in bind_depth_pipeline() 238 cmd_buffer->device->meta_state.blit2d[log2_samples].stencil_only_pipeline[src_type]; in bind_stencil_pipeline() 283 device->meta_state.blit2d[log2_samples].p_layouts[src_type], in radv_meta_blit2d_normal_dst() 294 if (device->meta_state.blit2d[log2_samples].pipelines[src_type][fs_key] == VK_NULL_HANDLE) { in radv_meta_blit2d_normal_dst() 305 .renderPass = device->meta_state.blit2d_render_passes[fs_key][dst_layout], in radv_meta_blit2d_normal_dst() [all …]
|
D | radv_meta_resolve_fs.c | 124 &device->meta_state.alloc, in create_layout() 125 &device->meta_state.resolve_fragment.ds_layout); in create_layout() 133 .pSetLayouts = &device->meta_state.resolve_fragment.ds_layout, in create_layout() 140 &device->meta_state.alloc, in create_layout() 141 &device->meta_state.resolve_fragment.p_layout); in create_layout() 160 mtx_lock(&device->meta_state.mtx); in create_resolve_pipeline() 163 VkPipeline *pipeline = &device->meta_state.resolve_fragment.rc[samples_log2].pipeline[fs_key]; in create_resolve_pipeline() 165 mtx_unlock(&device->meta_state.mtx); in create_resolve_pipeline() 183 VkRenderPass *rp = &device->meta_state.resolve_fragment.rc[samples_log2].render_pass[fs_key][0]; in create_resolve_pipeline() 255 }, &device->meta_state.alloc, rp + dst_layout); in create_resolve_pipeline() [all …]
|
D | radv_meta_buffer.c | 130 &device->meta_state.alloc, in radv_device_init_meta_buffer_state() 131 &device->meta_state.buffer.fill_ds_layout); in radv_device_init_meta_buffer_state() 159 &device->meta_state.alloc, in radv_device_init_meta_buffer_state() 160 &device->meta_state.buffer.copy_ds_layout); in radv_device_init_meta_buffer_state() 168 .pSetLayouts = &device->meta_state.buffer.fill_ds_layout, in radv_device_init_meta_buffer_state() 175 &device->meta_state.alloc, in radv_device_init_meta_buffer_state() 176 &device->meta_state.buffer.fill_p_layout); in radv_device_init_meta_buffer_state() 183 .pSetLayouts = &device->meta_state.buffer.copy_ds_layout, in radv_device_init_meta_buffer_state() 189 &device->meta_state.alloc, in radv_device_init_meta_buffer_state() 190 &device->meta_state.buffer.copy_p_layout); in radv_device_init_meta_buffer_state() [all …]
|
D | radv_meta_blit.c | 330 device->meta_state.blit.pipeline_layout, in meta_emit_blit() 356 .renderPass = device->meta_state.blit.render_pass[fs_key][dst_layout], in meta_emit_blit() 367 pipeline = &device->meta_state.blit.pipeline_1d_src[fs_key]; in meta_emit_blit() 370 pipeline = &device->meta_state.blit.pipeline_2d_src[fs_key]; in meta_emit_blit() 373 pipeline = &device->meta_state.blit.pipeline_3d_src[fs_key]; in meta_emit_blit() 385 .renderPass = device->meta_state.blit.depth_only_rp[ds_layout], in meta_emit_blit() 396 pipeline = &device->meta_state.blit.depth_only_1d_pipeline; in meta_emit_blit() 399 pipeline = &device->meta_state.blit.depth_only_2d_pipeline; in meta_emit_blit() 402 pipeline = &device->meta_state.blit.depth_only_3d_pipeline; in meta_emit_blit() 414 .renderPass = device->meta_state.blit.stencil_only_rp[ds_layout], in meta_emit_blit() [all …]
|
D | radv_meta_resolve_cs.c | 334 &device->meta_state.alloc, in create_layout() 335 &device->meta_state.resolve_compute.ds_layout); in create_layout() 343 .pSetLayouts = &device->meta_state.resolve_compute.ds_layout, in create_layout() 350 &device->meta_state.alloc, in create_layout() 351 &device->meta_state.resolve_compute.p_layout); in create_layout() 369 mtx_lock(&device->meta_state.mtx); in create_resolve_pipeline() 371 mtx_unlock(&device->meta_state.mtx); in create_resolve_pipeline() 391 .layout = device->meta_state.resolve_compute.p_layout, in create_resolve_pipeline() 395 radv_pipeline_cache_to_handle(&device->meta_state.cache), in create_resolve_pipeline() 402 mtx_unlock(&device->meta_state.mtx); in create_resolve_pipeline() [all …]
|
D | radv_meta.h | 257 struct radv_meta_state *meta_state = &cmd_buffer->device->meta_state; in radv_is_fmask_decompress_pipeline() local 261 meta_state->fast_clear_flush.fmask_decompress_pipeline; in radv_is_fmask_decompress_pipeline() 270 struct radv_meta_state *meta_state = &cmd_buffer->device->meta_state; in radv_is_dcc_decompress_pipeline() local 274 meta_state->fast_clear_flush.dcc_decompress_pipeline; in radv_is_dcc_decompress_pipeline() 283 struct radv_meta_state *meta_state = &cmd_buffer->device->meta_state; in radv_is_hw_resolve_pipeline() local 290 if (radv_pipeline_to_handle(pipeline) == meta_state->resolve.pipeline[i]) in radv_is_hw_resolve_pipeline()
|
D | radv_meta_clear.c | 115 radv_pipeline_cache_to_handle(&device->meta_state.cache), in create_pipeline() 203 mtx_lock(&device->meta_state.mtx); in create_color_renderpass() 205 mtx_unlock (&device->meta_state.mtx); in create_color_renderpass() 259 }, &device->meta_state.alloc, pass); in create_color_renderpass() 260 mtx_unlock(&device->meta_state.mtx); in create_color_renderpass() 275 mtx_lock(&device->meta_state.mtx); in create_color_pipeline() 277 mtx_unlock(&device->meta_state.mtx); in create_color_pipeline() 319 device->meta_state.clear_color_p_layout, in create_color_pipeline() 320 &extra, &device->meta_state.alloc, pipeline); in create_color_pipeline() 322 mtx_unlock(&device->meta_state.mtx); in create_color_pipeline() [all …]
|
D | radv_query.c | 817 mtx_lock(&device->meta_state.mtx); in radv_device_init_meta_query_state_internal() 818 if (device->meta_state.query.pipeline_statistics_query_pipeline) { in radv_device_init_meta_query_state_internal() 819 mtx_unlock(&device->meta_state.mtx); in radv_device_init_meta_query_state_internal() 851 &device->meta_state.alloc, in radv_device_init_meta_query_state_internal() 852 &device->meta_state.query.ds_layout); in radv_device_init_meta_query_state_internal() 859 .pSetLayouts = &device->meta_state.query.ds_layout, in radv_device_init_meta_query_state_internal() 866 &device->meta_state.alloc, in radv_device_init_meta_query_state_internal() 867 &device->meta_state.query.p_layout); in radv_device_init_meta_query_state_internal() 883 .layout = device->meta_state.query.p_layout, in radv_device_init_meta_query_state_internal() 887 radv_pipeline_cache_to_handle(&device->meta_state.cache), in radv_device_init_meta_query_state_internal() [all …]
|
D | radv_meta_decompress.c | 49 const VkAllocationCallbacks *alloc = &device->meta_state.alloc; in create_pass() 122 &device->meta_state.alloc, in create_pipeline_layout() 138 mtx_lock(&device->meta_state.mtx); in create_pipeline() 140 mtx_unlock(&device->meta_state.mtx); in create_pipeline() 255 radv_pipeline_cache_to_handle(&device->meta_state.cache), in create_pipeline() 257 &device->meta_state.alloc, in create_pipeline() 263 mtx_unlock(&device->meta_state.mtx); in create_pipeline() 270 struct radv_meta_state *state = &device->meta_state; in radv_device_finish_meta_depth_decomp_state() 294 struct radv_meta_state *state = &device->meta_state; in radv_device_init_meta_depth_decomp_state() 346 struct radv_meta_state *state = &cmd_buffer->device->meta_state; in radv_get_depth_pipeline() [all …]
|
D | radv_meta_resolve.c | 58 const VkAllocationCallbacks *alloc = &device->meta_state.alloc; in create_pass() 155 if (!device->meta_state.resolve.p_layout) { in create_pipeline() 158 &device->meta_state.alloc, in create_pipeline() 159 &device->meta_state.resolve.p_layout); in create_pipeline() 165 radv_pipeline_cache_to_handle(&device->meta_state.cache), in create_pipeline() 239 .layout = device->meta_state.resolve.p_layout, in create_pipeline() 247 &device->meta_state.alloc, pipeline); in create_pipeline() 261 struct radv_meta_state *state = &device->meta_state; in radv_device_finish_meta_resolve_state() 281 struct radv_meta_state *state = &device->meta_state; in radv_device_init_meta_resolve_state() 327 device->meta_state.resolve.pipeline[fs_key]); in emit_resolve() [all …]
|
D | radv_meta.c | 337 ret = radv_pipeline_cache_load(&device->meta_state.cache, data, st.st_size); in radv_load_meta_pipeline() 351 if (!device->meta_state.cache.modified) in radv_store_meta_pipeline() 355 radv_pipeline_cache_to_handle(&device->meta_state.cache), in radv_store_meta_pipeline() 372 radv_pipeline_cache_to_handle(&device->meta_state.cache), in radv_store_meta_pipeline() 390 memset(&device->meta_state, 0, sizeof(device->meta_state)); in radv_device_init_meta() 392 device->meta_state.alloc = (VkAllocationCallbacks) { in radv_device_init_meta() 399 device->meta_state.cache.alloc = device->meta_state.alloc; in radv_device_init_meta() 400 radv_pipeline_cache_init(&device->meta_state.cache, device); in radv_device_init_meta() 404 mtx_init(&device->meta_state.mtx, mtx_plain); in radv_device_init_meta() 479 mtx_destroy(&device->meta_state.mtx); in radv_device_init_meta() [all …]
|
D | radv_meta_fmask_expand.c | 125 VkPipeline pipeline = device->meta_state.fmask_expand.pipeline[samples_log2]; in radv_expand_fmask_image_inplace() 150 cmd_buffer->device->meta_state.fmask_expand.p_layout, in radv_expand_fmask_image_inplace() 184 struct radv_meta_state *state = &device->meta_state; in radv_device_finish_meta_fmask_expand_state() 205 struct radv_meta_state *state = &device->meta_state; in create_fmask_expand_pipeline() 238 struct radv_meta_state *state = &device->meta_state; in radv_device_init_meta_fmask_expand_state()
|
D | radv_private.h | 772 struct radv_meta_state meta_state; member
|