| /third_party/mesa3d/src/amd/vulkan/ |
| D | radv_cmd_buffer.c | 57 static void radv_handle_image_transition(struct radv_cmd_buffer *cmd_buffer, 64 static void radv_set_rt_stack_size(struct radv_cmd_buffer *cmd_buffer, uint32_t size); 125 radv_bind_dynamic_state(struct radv_cmd_buffer *cmd_buffer, const struct radv_dynamic_state *src) in radv_bind_dynamic_state() argument 127 struct radv_dynamic_state *dest = &cmd_buffer->state.dynamic; in radv_bind_dynamic_state() 349 cmd_buffer->state.dirty |= dest_mask; in radv_bind_dynamic_state() 353 radv_cmd_buffer_uses_mec(struct radv_cmd_buffer *cmd_buffer) in radv_cmd_buffer_uses_mec() argument 355 return cmd_buffer->qf == RADV_QUEUE_COMPUTE && in radv_cmd_buffer_uses_mec() 356 cmd_buffer->device->physical_device->rad_info.gfx_level >= GFX7; in radv_cmd_buffer_uses_mec() 376 radv_emit_write_data_packet(struct radv_cmd_buffer *cmd_buffer, unsigned engine_sel, uint64_t va, in radv_emit_write_data_packet() argument 379 struct radeon_cmdbuf *cs = cmd_buffer->cs; in radv_emit_write_data_packet() [all …]
|
| D | radv_meta.c | 38 radv_suspend_queries(struct radv_meta_saved_state *state, struct radv_cmd_buffer *cmd_buffer) in radv_suspend_queries() argument 41 if (cmd_buffer->state.active_pipeline_queries > 0) { in radv_suspend_queries() 42 cmd_buffer->state.flush_bits &= ~RADV_CMD_FLAG_START_PIPELINE_STATS; in radv_suspend_queries() 43 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_STOP_PIPELINE_STATS; in radv_suspend_queries() 47 if (cmd_buffer->state.active_occlusion_queries > 0) { in radv_suspend_queries() 48 radv_set_db_count_control(cmd_buffer, false); in radv_suspend_queries() 52 if (cmd_buffer->state.prims_gen_query_enabled) { in radv_suspend_queries() 53 cmd_buffer->state.suspend_streamout = true; in radv_suspend_queries() 54 radv_emit_streamout_enable(cmd_buffer); in radv_suspend_queries() 59 state->active_pipeline_gds_queries = cmd_buffer->state.active_pipeline_gds_queries; in radv_suspend_queries() [all …]
|
| D | radv_sdma_copy_image.c | 71 radv_sdma_v4_v5_copy_image_to_buffer(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image, in radv_sdma_v4_v5_copy_image_to_buffer() argument 76 struct radv_device *device = cmd_buffer->device; in radv_sdma_v4_v5_copy_image_to_buffer() 85 …uint32_t ib_pad_dw_mask = cmd_buffer->device->physical_device->rad_info.ib_pad_dw_mask[AMD_IP_SDMA… in radv_sdma_v4_v5_copy_image_to_buffer() 90 radeon_check_space(cmd_buffer->device->ws, cmd_buffer->cs, align(8, ib_pad_dw_mask + 1)); in radv_sdma_v4_v5_copy_image_to_buffer() 96 radeon_emit(cmd_buffer->cs, 0x00000000); in radv_sdma_v4_v5_copy_image_to_buffer() 100 radeon_emit(cmd_buffer->cs, CIK_SDMA_PACKET(CIK_SDMA_OPCODE_COPY, in radv_sdma_v4_v5_copy_image_to_buffer() 102 radeon_emit(cmd_buffer->cs, bytes); in radv_sdma_v4_v5_copy_image_to_buffer() 103 radeon_emit(cmd_buffer->cs, 0); in radv_sdma_v4_v5_copy_image_to_buffer() 104 radeon_emit(cmd_buffer->cs, src_address); in radv_sdma_v4_v5_copy_image_to_buffer() 105 radeon_emit(cmd_buffer->cs, src_address >> 32); in radv_sdma_v4_v5_copy_image_to_buffer() [all …]
|
| D | radv_meta_buffer.c | 170 fill_buffer_shader(struct radv_cmd_buffer *cmd_buffer, uint64_t va, uint64_t size, uint32_t data) in fill_buffer_shader() argument 172 struct radv_device *device = cmd_buffer->device; in fill_buffer_shader() 176 &saved_state, cmd_buffer, in fill_buffer_shader() 179 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_COMPUTE, in fill_buffer_shader() 190 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer), in fill_buffer_shader() 194 radv_unaligned_dispatch(cmd_buffer, DIV_ROUND_UP(size, 16), 1, 1); in fill_buffer_shader() 196 radv_meta_restore(&saved_state, cmd_buffer); in fill_buffer_shader() 200 copy_buffer_shader(struct radv_cmd_buffer *cmd_buffer, uint64_t src_va, uint64_t dst_va, in copy_buffer_shader() argument 203 struct radv_device *device = cmd_buffer->device; in copy_buffer_shader() 207 &saved_state, cmd_buffer, in copy_buffer_shader() [all …]
|
| D | radv_meta_copy.c | 88 copy_buffer_to_image(struct radv_cmd_buffer *cmd_buffer, struct radv_buffer *buffer, in copy_buffer_to_image() argument 100 cs = cmd_buffer->qf == RADV_QUEUE_COMPUTE || in copy_buffer_to_image() 101 !radv_image_is_renderable(cmd_buffer->device, image); in copy_buffer_to_image() 106 radv_meta_save(&saved_state, cmd_buffer, in copy_buffer_to_image() 135 uint32_t queue_mask = radv_image_queue_family_mask(image, cmd_buffer->qf, in copy_buffer_to_image() 136 cmd_buffer->qf); in copy_buffer_to_image() 138 radv_layout_dcc_compressed(cmd_buffer->device, image, region->imageSubresource.mipLevel, in copy_buffer_to_image() 141 radv_decompress_dcc(cmd_buffer, image, in copy_buffer_to_image() 177 radv_meta_buffer_to_image_cs(cmd_buffer, &buf_bsurf, &img_bsurf, 1, &rect); in copy_buffer_to_image() 179 radv_meta_blit2d(cmd_buffer, NULL, &buf_bsurf, &img_bsurf, 1, &rect); in copy_buffer_to_image() [all …]
|
| D | radv_meta_resolve.c | 233 emit_resolve(struct radv_cmd_buffer *cmd_buffer, const struct radv_image *src_image, in emit_resolve() argument 237 struct radv_device *device = cmd_buffer->device; in emit_resolve() 238 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer); in emit_resolve() 241 cmd_buffer->state.flush_bits |= in emit_resolve() 242 radv_src_access_flush(cmd_buffer, VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, src_image) | in emit_resolve() 243 radv_dst_access_flush(cmd_buffer, VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT, src_image) | in emit_resolve() 244 radv_dst_access_flush(cmd_buffer, VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, dst_image); in emit_resolve() 249 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, in emit_resolve() 257 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, in emit_resolve() 264 cmd_buffer->state.flush_bits |= in emit_resolve() [all …]
|
| D | radv_meta_fast_clear.c | 461 radv_emit_set_predication_state_from_image(struct radv_cmd_buffer *cmd_buffer, in radv_emit_set_predication_state_from_image() argument 472 si_emit_set_predication_state(cmd_buffer, true, PREDICATION_OP_BOOL64, va); in radv_emit_set_predication_state_from_image() 476 radv_process_color_image_layer(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image, in radv_process_color_image_layer() argument 480 struct radv_device *device = cmd_buffer->device; in radv_process_color_image_layer() 523 radv_CmdBeginRendering(radv_cmd_buffer_to_handle(cmd_buffer), &rendering_info); in radv_process_color_image_layer() 526 cmd_buffer->state.flush_bits |= in radv_process_color_image_layer() 527 radv_dst_access_flush(cmd_buffer, VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, image); in radv_process_color_image_layer() 529 radv_CmdDraw(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0); in radv_process_color_image_layer() 532 cmd_buffer->state.flush_bits |= in radv_process_color_image_layer() 533 radv_src_access_flush(cmd_buffer, VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, image); in radv_process_color_image_layer() [all …]
|
| D | radv_meta.h | 116 void radv_meta_save(struct radv_meta_saved_state *saved_state, struct radv_cmd_buffer *cmd_buffer, 120 struct radv_cmd_buffer *cmd_buffer); 155 void radv_meta_begin_blit2d(struct radv_cmd_buffer *cmd_buffer, struct radv_meta_saved_state *save); 157 void radv_meta_blit2d(struct radv_cmd_buffer *cmd_buffer, struct radv_meta_blit2d_surf *src_img, 161 void radv_meta_end_blit2d(struct radv_cmd_buffer *cmd_buffer, struct radv_meta_saved_state *save); 165 void radv_meta_image_to_buffer(struct radv_cmd_buffer *cmd_buffer, 170 void radv_meta_buffer_to_image_cs(struct radv_cmd_buffer *cmd_buffer, 174 void radv_meta_image_to_image_cs(struct radv_cmd_buffer *cmd_buffer, 178 void radv_meta_clear_image_cs(struct radv_cmd_buffer *cmd_buffer, struct radv_meta_blit2d_surf *dst, 181 void radv_expand_depth_stencil(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image, [all …]
|
| /third_party/mesa3d/src/broadcom/vulkan/ |
| D | v3dv_cmd_buffer.c | 54 cmd_buffer_init(struct v3dv_cmd_buffer *cmd_buffer, in cmd_buffer_init() argument 62 uint8_t *cmd_buffer_driver_start = ((uint8_t *) cmd_buffer) + base_size; in cmd_buffer_init() 63 memset(cmd_buffer_driver_start, 0, sizeof(*cmd_buffer) - base_size); in cmd_buffer_init() 65 cmd_buffer->device = device; in cmd_buffer_init() 67 list_inithead(&cmd_buffer->private_objs); in cmd_buffer_init() 68 list_inithead(&cmd_buffer->jobs); in cmd_buffer_init() 69 list_inithead(&cmd_buffer->list_link); in cmd_buffer_init() 71 cmd_buffer->state.subpass_idx = -1; in cmd_buffer_init() 72 cmd_buffer->state.meta.subpass_idx = -1; in cmd_buffer_init() 74 cmd_buffer->status = V3DV_CMD_BUFFER_STATUS_INITIALIZED; in cmd_buffer_init() [all …]
|
| D | v3dv_uniforms.c | 94 check_push_constants_ubo(struct v3dv_cmd_buffer *cmd_buffer, in check_push_constants_ubo() argument 97 if (!(cmd_buffer->state.dirty & V3DV_CMD_DIRTY_PUSH_CONSTANTS_UBO) || in check_push_constants_ubo() 101 if (cmd_buffer->push_constants_resource.bo == NULL) { in check_push_constants_ubo() 102 cmd_buffer->push_constants_resource.bo = in check_push_constants_ubo() 103 v3dv_bo_alloc(cmd_buffer->device, 4096, "push constants", true); in check_push_constants_ubo() 105 v3dv_job_add_bo(cmd_buffer->state.job, in check_push_constants_ubo() 106 cmd_buffer->push_constants_resource.bo); in check_push_constants_ubo() 108 if (!cmd_buffer->push_constants_resource.bo) { in check_push_constants_ubo() 113 bool ok = v3dv_bo_map(cmd_buffer->device, in check_push_constants_ubo() 114 cmd_buffer->push_constants_resource.bo, in check_push_constants_ubo() [all …]
|
| D | v3dvx_cmd_buffer.c | 77 v3dX(cmd_buffer_end_render_pass_secondary)(struct v3dv_cmd_buffer *cmd_buffer) in v3dX() 79 assert(cmd_buffer->state.job); in v3dX() 80 v3dv_cl_ensure_space_with_branch(&cmd_buffer->state.job->bcl, in v3dX() 82 v3dv_return_if_oom(cmd_buffer, NULL); in v3dX() 83 cl_emit(&cmd_buffer->state.job->bcl, RETURN_FROM_SUB_LIST, ret); in v3dX() 103 cmd_buffer_render_pass_emit_load(struct v3dv_cmd_buffer *cmd_buffer, in cmd_buffer_render_pass_emit_load() argument 199 cmd_buffer_render_pass_emit_loads(struct v3dv_cmd_buffer *cmd_buffer, in cmd_buffer_render_pass_emit_loads() argument 203 const struct v3dv_cmd_buffer_state *state = &cmd_buffer->state; in cmd_buffer_render_pass_emit_loads() 247 cmd_buffer_render_pass_emit_load(cmd_buffer, cl, iview, in cmd_buffer_render_pass_emit_loads() 291 cmd_buffer_render_pass_emit_load(cmd_buffer, cl, in cmd_buffer_render_pass_emit_loads() [all …]
|
| /third_party/mesa3d/src/intel/vulkan/ |
| D | anv_cmd_buffer.c | 45 anv_cmd_state_init(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_init() argument 47 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_init() 57 anv_cmd_pipeline_state_finish(struct anv_cmd_buffer *cmd_buffer, in anv_cmd_pipeline_state_finish() argument 62 anv_descriptor_set_layout_unref(cmd_buffer->device, in anv_cmd_pipeline_state_finish() 64 vk_free(&cmd_buffer->vk.pool->alloc, pipe_state->push_descriptors[i]); in anv_cmd_pipeline_state_finish() 70 anv_cmd_state_finish(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_finish() argument 72 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_finish() 74 anv_cmd_pipeline_state_finish(cmd_buffer, &state->gfx.base); in anv_cmd_state_finish() 75 anv_cmd_pipeline_state_finish(cmd_buffer, &state->compute.base); in anv_cmd_state_finish() 79 anv_cmd_state_reset(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_reset() argument [all …]
|
| D | genX_blorp_exec.c | 42 struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch; in blorp_measure_start() local 43 trace_intel_begin_blorp(&cmd_buffer->trace); in blorp_measure_start() 44 anv_measure_snapshot(cmd_buffer, in blorp_measure_start() 52 struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch; in blorp_measure_end() local 53 trace_intel_end_blorp(&cmd_buffer->trace, in blorp_measure_end() 65 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_dwords() local 66 return anv_batch_emit_dwords(&cmd_buffer->batch, n); in blorp_emit_dwords() 73 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_reloc() local 74 assert(cmd_buffer->batch.start <= location && in blorp_emit_reloc() 75 location < cmd_buffer->batch.end); in blorp_emit_reloc() [all …]
|
| D | genX_cmd_buffer.c | 56 static void genX(flush_pipeline_select)(struct anv_cmd_buffer *cmd_buffer, 91 is_render_queue_cmd_buffer(const struct anv_cmd_buffer *cmd_buffer) in is_render_queue_cmd_buffer() argument 93 struct anv_queue_family *queue_family = cmd_buffer->queue_family; in is_render_queue_cmd_buffer() 98 genX(cmd_buffer_emit_state_base_address)(struct anv_cmd_buffer *cmd_buffer) in genX() 100 struct anv_device *device = cmd_buffer->device; in genX() 106 cmd_buffer->state.descriptors_dirty |= ~0; in genX() 109 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in genX() 114 &cmd_buffer->batch, GENX(3DSTATE_BINDING_TABLE_POOL_ALLOC), btpa) { in genX() 116 anv_cmd_buffer_surface_base_address(cmd_buffer); in genX() 128 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in genX() [all …]
|
| D | anv_measure.c | 88 config_from_command_buffer(struct anv_cmd_buffer *cmd_buffer) in config_from_command_buffer() argument 90 return cmd_buffer->device->physical->measure_device.config; in config_from_command_buffer() 94 anv_measure_init(struct anv_cmd_buffer *cmd_buffer) in anv_measure_init() argument 96 struct intel_measure_config *config = config_from_command_buffer(cmd_buffer); in anv_measure_init() 97 struct anv_device *device = cmd_buffer->device; in anv_measure_init() 100 cmd_buffer->measure = NULL; in anv_measure_init() 111 vk_alloc(&cmd_buffer->vk.pool->alloc, in anv_measure_init() 125 cmd_buffer->measure = measure; in anv_measure_init() 129 anv_measure_start_snapshot(struct anv_cmd_buffer *cmd_buffer, in anv_measure_start_snapshot() argument 134 struct anv_batch *batch = &cmd_buffer->batch; in anv_measure_start_snapshot() [all …]
|
| D | anv_measure.h | 33 void anv_measure_init(struct anv_cmd_buffer *cmd_buffer); 34 void anv_measure_destroy(struct anv_cmd_buffer *cmd_buffer); 35 void anv_measure_reset(struct anv_cmd_buffer *cmd_buffer); 37 void _anv_measure_snapshot(struct anv_cmd_buffer *cmd_buffer, 43 void _anv_measure_endcommandbuffer(struct anv_cmd_buffer *cmd_buffer); 46 void _anv_measure_beginrenderpass(struct anv_cmd_buffer *cmd_buffer); 52 void _anv_measure_submit(struct anv_cmd_buffer *cmd_buffer); 62 #define anv_measure_snapshot(cmd_buffer, type, event_name, count) \ argument 63 if (unlikely(cmd_buffer->measure)) \ 64 _anv_measure_snapshot(cmd_buffer, type, event_name, count) [all …]
|
| D | anv_batch_chain.c | 346 anv_batch_bo_create(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_create() argument 352 struct anv_batch_bo *bbo = vk_zalloc(&cmd_buffer->vk.pool->alloc, sizeof(*bbo), in anv_batch_bo_create() 355 return vk_error(cmd_buffer, VK_ERROR_OUT_OF_HOST_MEMORY); in anv_batch_bo_create() 357 result = anv_bo_pool_alloc(&cmd_buffer->device->batch_bo_pool, in anv_batch_bo_create() 362 result = anv_reloc_list_init(&bbo->relocs, &cmd_buffer->vk.pool->alloc); in anv_batch_bo_create() 371 anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, bbo->bo); in anv_batch_bo_create() 373 vk_free(&cmd_buffer->vk.pool->alloc, bbo); in anv_batch_bo_create() 379 anv_batch_bo_clone(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_clone() argument 385 struct anv_batch_bo *bbo = vk_alloc(&cmd_buffer->vk.pool->alloc, sizeof(*bbo), in anv_batch_bo_clone() 388 return vk_error(cmd_buffer, VK_ERROR_OUT_OF_HOST_MEMORY); in anv_batch_bo_clone() [all …]
|
| D | genX_query.c | 330 khr_perf_query_ensure_relocs(struct anv_cmd_buffer *cmd_buffer) in khr_perf_query_ensure_relocs() argument 332 if (anv_batch_has_error(&cmd_buffer->batch)) in khr_perf_query_ensure_relocs() 335 if (cmd_buffer->self_mod_locations) in khr_perf_query_ensure_relocs() 338 struct anv_device *device = cmd_buffer->device; in khr_perf_query_ensure_relocs() 341 cmd_buffer->self_mod_locations = in khr_perf_query_ensure_relocs() 342 vk_alloc(&cmd_buffer->vk.pool->alloc, in khr_perf_query_ensure_relocs() 343 pdevice->n_perf_query_commands * sizeof(*cmd_buffer->self_mod_locations), 8, in khr_perf_query_ensure_relocs() 346 if (!cmd_buffer->self_mod_locations) { in khr_perf_query_ensure_relocs() 347 anv_batch_set_error(&cmd_buffer->batch, VK_ERROR_OUT_OF_HOST_MEMORY); in khr_perf_query_ensure_relocs() 613 emit_ps_depth_count(struct anv_cmd_buffer *cmd_buffer, in emit_ps_depth_count() argument [all …]
|
| D | gfx8_cmd_buffer.c | 36 genX(cmd_buffer_enable_pma_fix)(struct anv_cmd_buffer *cmd_buffer, bool enable) in genX() 38 if (cmd_buffer->state.pma_fix_enabled == enable) in genX() 41 cmd_buffer->state.pma_fix_enabled = enable; in genX() 52 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in genX() 72 anv_batch_emit(&cmd_buffer->batch, GENX(MI_LOAD_REGISTER_IMM), lri) { in genX() 85 anv_batch_emit(&cmd_buffer->batch, GENX(MI_LOAD_REGISTER_IMM), lri) { in genX() 99 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in genX() 110 want_depth_pma_fix(struct anv_cmd_buffer *cmd_buffer, in want_depth_pma_fix() argument 157 if (!cmd_buffer->state.hiz_enabled) in want_depth_pma_fix() 161 struct anv_graphics_pipeline *pipeline = cmd_buffer->state.gfx.pipeline; in want_depth_pma_fix() [all …]
|
| D | gfx7_cmd_buffer.c | 37 get_depth_format(struct anv_cmd_buffer *cmd_buffer) in get_depth_format() argument 39 struct anv_cmd_graphics_state *gfx = &cmd_buffer->state.gfx; in get_depth_format() 60 genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) in genX() 62 struct anv_graphics_pipeline *pipeline = cmd_buffer->state.gfx.pipeline; in genX() 64 &cmd_buffer->vk.dynamic_graphics_state; in genX() 66 if ((cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | in genX() 78 genX(raster_polygon_mode)(cmd_buffer->state.gfx.pipeline, in genX() 89 .DepthBufferSurfaceFormat = get_depth_format(cmd_buffer), in genX() 104 anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gfx7.sf); in genX() 110 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, in genX() [all …]
|
| D | anv_blorp.c | 132 anv_blorp_batch_init(struct anv_cmd_buffer *cmd_buffer, in anv_blorp_batch_init() argument 135 if (!(cmd_buffer->queue_family->queueFlags & VK_QUEUE_GRAPHICS_BIT)) { in anv_blorp_batch_init() 136 assert(cmd_buffer->queue_family->queueFlags & VK_QUEUE_COMPUTE_BIT); in anv_blorp_batch_init() 140 blorp_batch_init(&cmd_buffer->device->blorp, batch, cmd_buffer, flags); in anv_blorp_batch_init() 300 copy_image(struct anv_cmd_buffer *cmd_buffer, in copy_image() argument 345 get_blorp_surf_for_anv_image(cmd_buffer->device, in copy_image() 350 get_blorp_surf_for_anv_image(cmd_buffer->device, in copy_image() 355 anv_cmd_buffer_mark_image_written(cmd_buffer, dst_image, in copy_image() 369 if (get_blorp_surf_for_anv_shadow_image(cmd_buffer->device, in copy_image() 384 get_blorp_surf_for_anv_image(cmd_buffer->device, src_image, src_mask, in copy_image() [all …]
|
| /third_party/mesa3d/src/gallium/frontends/lavapipe/ |
| D | lvp_cmd_buffer.c | 34 struct lvp_cmd_buffer *cmd_buffer; in lvp_create_cmd_buffer() local 36 cmd_buffer = vk_alloc(&pool->vk.alloc, sizeof(*cmd_buffer), 8, in lvp_create_cmd_buffer() 38 if (cmd_buffer == NULL) in lvp_create_cmd_buffer() 41 VkResult result = vk_command_buffer_init(&cmd_buffer->vk, &pool->vk, level); in lvp_create_cmd_buffer() 43 vk_free(&pool->vk.alloc, cmd_buffer); in lvp_create_cmd_buffer() 47 cmd_buffer->device = device; in lvp_create_cmd_buffer() 48 cmd_buffer->pool = pool; in lvp_create_cmd_buffer() 50 cmd_buffer->status = LVP_CMD_BUFFER_STATUS_INITIAL; in lvp_create_cmd_buffer() 52 list_addtail(&cmd_buffer->pool_link, &pool->cmd_buffers); in lvp_create_cmd_buffer() 57 list_inithead(&cmd_buffer->pool_link); in lvp_create_cmd_buffer() [all …]
|
| /third_party/mesa3d/src/imagination/vulkan/ |
| D | pvr_cmd_buffer.c | 83 static void pvr_cmd_buffer_free_sub_cmd(struct pvr_cmd_buffer *cmd_buffer, in pvr_cmd_buffer_free_sub_cmd() argument 89 pvr_bo_free(cmd_buffer->device, sub_cmd->gfx.depth_bias_bo); in pvr_cmd_buffer_free_sub_cmd() 90 pvr_bo_free(cmd_buffer->device, sub_cmd->gfx.scissor_bo); in pvr_cmd_buffer_free_sub_cmd() 103 vk_free(&cmd_buffer->vk.pool->alloc, transfer_cmd); in pvr_cmd_buffer_free_sub_cmd() 113 vk_free(&cmd_buffer->vk.pool->alloc, sub_cmd); in pvr_cmd_buffer_free_sub_cmd() 116 static void pvr_cmd_buffer_free_sub_cmds(struct pvr_cmd_buffer *cmd_buffer) in pvr_cmd_buffer_free_sub_cmds() argument 120 &cmd_buffer->sub_cmds, in pvr_cmd_buffer_free_sub_cmds() 122 pvr_cmd_buffer_free_sub_cmd(cmd_buffer, sub_cmd); in pvr_cmd_buffer_free_sub_cmds() 128 struct pvr_cmd_buffer *cmd_buffer = in pvr_cmd_buffer_destroy() local 131 vk_free(&cmd_buffer->vk.pool->alloc, in pvr_cmd_buffer_destroy() [all …]
|
| /third_party/mesa3d/src/freedreno/vulkan/ |
| D | tu_dynamic_rendering.c | 27 struct tu_cmd_buffer *cmd_buffer; member 49 VkCommandBuffer vk_buf = tu_cmd_buffer_to_handle(entry->cmd_buffer); in get_cmd_buffer() 75 TU_FROM_HANDLE(tu_cmd_buffer, cmd_buffer, vk_buf); in get_cmd_buffer() 78 .cmd_buffer = cmd_buffer, in get_cmd_buffer() 84 *cmd_buffer_out = cmd_buffer; in get_cmd_buffer() 132 struct tu_cmd_buffer *cmd_buffer = NULL; in tu_insert_dynamic_cmdbufs() local 143 tu_append_pre_chain(cmd_buffer, old_cmds[i]); in tu_insert_dynamic_cmdbufs() 151 tu_cmd_render(cmd_buffer); in tu_insert_dynamic_cmdbufs() 153 tu_cs_emit_pkt7(&cmd_buffer->cs, CP_MEM_WRITE, 3); in tu_insert_dynamic_cmdbufs() 154 tu_cs_emit_qw(&cmd_buffer->cs, in tu_insert_dynamic_cmdbufs() [all …]
|
| /third_party/mesa3d/src/amd/vulkan/layers/ |
| D | radv_sqtt_layer.c | 32 radv_write_begin_general_api_marker(struct radv_cmd_buffer *cmd_buffer, in radv_write_begin_general_api_marker() argument 40 radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4); in radv_write_begin_general_api_marker() 44 radv_write_end_general_api_marker(struct radv_cmd_buffer *cmd_buffer, in radv_write_end_general_api_marker() argument 53 radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4); in radv_write_end_general_api_marker() 57 radv_write_event_marker(struct radv_cmd_buffer *cmd_buffer, in radv_write_event_marker() argument 65 marker.cmd_id = cmd_buffer->state.num_events++; in radv_write_event_marker() 80 radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4); in radv_write_event_marker() 84 radv_write_event_with_dims_marker(struct radv_cmd_buffer *cmd_buffer, in radv_write_event_with_dims_marker() argument 92 marker.event.cmd_id = cmd_buffer->state.num_events++; in radv_write_event_with_dims_marker() 100 radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4); in radv_write_event_with_dims_marker() [all …]
|