/external/mesa3d/src/amd/vulkan/ |
D | radv_cmd_buffer.c | 54 static void radv_handle_image_transition(struct radv_cmd_buffer *cmd_buffer, 105 radv_bind_dynamic_state(struct radv_cmd_buffer *cmd_buffer, in radv_bind_dynamic_state() argument 108 struct radv_dynamic_state *dest = &cmd_buffer->state.dynamic; in radv_bind_dynamic_state() 299 cmd_buffer->state.dirty |= dest_mask; in radv_bind_dynamic_state() 303 radv_bind_streamout_state(struct radv_cmd_buffer *cmd_buffer, in radv_bind_streamout_state() argument 306 struct radv_streamout_state *so = &cmd_buffer->state.streamout; in radv_bind_streamout_state() 310 cmd_buffer->device->physical_device->use_ngg_streamout) in radv_bind_streamout_state() 320 bool radv_cmd_buffer_uses_mec(struct radv_cmd_buffer *cmd_buffer) in radv_cmd_buffer_uses_mec() argument 322 return cmd_buffer->queue_family_index == RADV_QUEUE_COMPUTE && in radv_cmd_buffer_uses_mec() 323 cmd_buffer->device->physical_device->rad_info.chip_class >= GFX7; in radv_cmd_buffer_uses_mec() [all …]
|
D | radv_meta.h | 124 struct radv_cmd_buffer *cmd_buffer, uint32_t flags); 127 struct radv_cmd_buffer *cmd_buffer); 161 void radv_meta_begin_blit2d(struct radv_cmd_buffer *cmd_buffer, 164 void radv_meta_blit2d(struct radv_cmd_buffer *cmd_buffer, 171 void radv_meta_end_blit2d(struct radv_cmd_buffer *cmd_buffer, 177 void radv_meta_image_to_buffer(struct radv_cmd_buffer *cmd_buffer, 183 void radv_meta_buffer_to_image_cs(struct radv_cmd_buffer *cmd_buffer, 188 void radv_meta_image_to_image_cs(struct radv_cmd_buffer *cmd_buffer, 193 void radv_meta_clear_image_cs(struct radv_cmd_buffer *cmd_buffer, 197 void radv_decompress_depth_stencil(struct radv_cmd_buffer *cmd_buffer, [all …]
|
D | radv_meta_buffer.c | 268 static void fill_buffer_shader(struct radv_cmd_buffer *cmd_buffer, in fill_buffer_shader() argument 272 struct radv_device *device = cmd_buffer->device; in fill_buffer_shader() 276 radv_meta_save(&saved_state, cmd_buffer, in fill_buffer_shader() 287 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), in fill_buffer_shader() 291 radv_meta_push_descriptor_set(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, in fill_buffer_shader() 310 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer), in fill_buffer_shader() 315 radv_CmdDispatch(radv_cmd_buffer_to_handle(cmd_buffer), block_count, 1, 1); in fill_buffer_shader() 317 radv_meta_restore(&saved_state, cmd_buffer); in fill_buffer_shader() 320 static void copy_buffer_shader(struct radv_cmd_buffer *cmd_buffer, in copy_buffer_shader() argument 326 struct radv_device *device = cmd_buffer->device; in copy_buffer_shader() [all …]
|
D | radv_meta_copy.c | 124 copy_buffer_to_image(struct radv_cmd_buffer *cmd_buffer, in copy_buffer_to_image() argument 130 bool cs = cmd_buffer->queue_family_index == RADV_QUEUE_COMPUTE; in copy_buffer_to_image() 139 radv_meta_save(&saved_state, cmd_buffer, in copy_buffer_to_image() 148 old_predicating = cmd_buffer->state.predicating; in copy_buffer_to_image() 149 cmd_buffer->state.predicating = false; in copy_buffer_to_image() 189 cmd_buffer->queue_family_index, in copy_buffer_to_image() 190 cmd_buffer->queue_family_index); in copy_buffer_to_image() 191 …bool compressed = radv_layout_dcc_compressed(cmd_buffer->device, image, layout, false, queue_mask); in copy_buffer_to_image() 193 radv_decompress_dcc(cmd_buffer, image, &(VkImageSubresourceRange) { in copy_buffer_to_image() 228 !image_is_renderable(cmd_buffer->device, img_bsurf.image)) { in copy_buffer_to_image() [all …]
|
D | radv_meta_resolve.c | 315 emit_resolve(struct radv_cmd_buffer *cmd_buffer, in emit_resolve() argument 320 struct radv_device *device = cmd_buffer->device; in emit_resolve() 321 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer); in emit_resolve() 324 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB; in emit_resolve() 329 radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) { in emit_resolve() 338 radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkRect2D) { in emit_resolve() 344 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB; in emit_resolve() 359 struct radv_cmd_buffer *cmd_buffer, in radv_pick_resolve_method_images() argument 364 cmd_buffer->queue_family_index, in radv_pick_resolve_method_images() 365 cmd_buffer->queue_family_index); in radv_pick_resolve_method_images() [all …]
|
D | radv_meta.c | 35 struct radv_cmd_buffer *cmd_buffer, uint32_t flags) in radv_meta_save() argument 42 radv_get_descriptors_state(cmd_buffer, bind_point); in radv_meta_save() 52 state->old_pipeline = cmd_buffer->state.pipeline; in radv_meta_save() 55 state->viewport.count = cmd_buffer->state.dynamic.viewport.count; in radv_meta_save() 57 cmd_buffer->state.dynamic.viewport.viewports, in radv_meta_save() 61 state->scissor.count = cmd_buffer->state.dynamic.scissor.count; in radv_meta_save() 63 cmd_buffer->state.dynamic.scissor.scissors, in radv_meta_save() 66 state->cull_mode = cmd_buffer->state.dynamic.cull_mode; in radv_meta_save() 67 state->front_face = cmd_buffer->state.dynamic.front_face; in radv_meta_save() 69 state->primitive_topology = cmd_buffer->state.dynamic.primitive_topology; in radv_meta_save() [all …]
|
D | radv_meta_clear.c | 396 emit_color_clear(struct radv_cmd_buffer *cmd_buffer, in emit_color_clear() argument 401 struct radv_device *device = cmd_buffer->device; in emit_color_clear() 402 const struct radv_subpass *subpass = cmd_buffer->state.subpass; in emit_color_clear() 405 const struct radv_image_view *iview = cmd_buffer->state.attachments ? in emit_color_clear() 406 cmd_buffer->state.attachments[pass_att].iview : NULL; in emit_color_clear() 411 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer); in emit_color_clear() 422 samples = cmd_buffer->state.pass->attachments[pass_att].samples; in emit_color_clear() 423 format = cmd_buffer->state.pass->attachments[pass_att].format; in emit_color_clear() 439 cmd_buffer->record_result = ret; in emit_color_clear() 449 cmd_buffer->record_result = ret; in emit_color_clear() [all …]
|
D | radv_meta_fast_clear.c | 570 radv_emit_set_predication_state_from_image(struct radv_cmd_buffer *cmd_buffer, in radv_emit_set_predication_state_from_image() argument 581 si_emit_set_predication_state(cmd_buffer, true, va); in radv_emit_set_predication_state_from_image() 585 radv_process_color_image_layer(struct radv_cmd_buffer *cmd_buffer, in radv_process_color_image_layer() argument 590 struct radv_device *device = cmd_buffer->device; in radv_process_color_image_layer() 623 }, &cmd_buffer->pool->alloc, &fb_h); in radv_process_color_image_layer() 625 radv_cmd_buffer_begin_render_pass(cmd_buffer, in radv_process_color_image_layer() 638 radv_cmd_buffer_set_subpass(cmd_buffer, in radv_process_color_image_layer() 639 &cmd_buffer->state.pass->subpasses[0]); in radv_process_color_image_layer() 642 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB | in radv_process_color_image_layer() 645 radv_CmdDraw(radv_cmd_buffer_to_handle(cmd_buffer), 3, 1, 0, 0); in radv_process_color_image_layer() [all …]
|
D | si_cmd_buffer.c | 793 si_get_ia_multi_vgt_param(struct radv_cmd_buffer *cmd_buffer, in si_get_ia_multi_vgt_param() argument 799 enum chip_class chip_class = cmd_buffer->device->physical_device->rad_info.chip_class; in si_get_ia_multi_vgt_param() 800 enum radeon_family family = cmd_buffer->device->physical_device->rad_info.family; in si_get_ia_multi_vgt_param() 801 struct radeon_info *info = &cmd_buffer->device->physical_device->rad_info; in si_get_ia_multi_vgt_param() 808 bool partial_es_wave = cmd_buffer->state.pipeline->graphics.ia_multi_vgt_param.partial_es_wave; in si_get_ia_multi_vgt_param() 812 if (radv_pipeline_has_tess(cmd_buffer->state.pipeline)) { in si_get_ia_multi_vgt_param() 814 prim_vertex_count.min = cmd_buffer->state.pipeline->graphics.tess_patch_control_points; in si_get_ia_multi_vgt_param() 822 if (num_prims < cmd_buffer->state.pipeline->graphics.ia_multi_vgt_param.primgroup_size) in si_get_ia_multi_vgt_param() 826 ia_switch_on_eoi = cmd_buffer->state.pipeline->graphics.ia_multi_vgt_param.ia_switch_on_eoi; in si_get_ia_multi_vgt_param() 827 partial_vs_wave = cmd_buffer->state.pipeline->graphics.ia_multi_vgt_param.partial_vs_wave; in si_get_ia_multi_vgt_param() [all …]
|
D | radv_meta_resolve_fs.c | 827 radv_get_resolve_pipeline(struct radv_cmd_buffer *cmd_buffer, in radv_get_resolve_pipeline() argument 831 struct radv_device *device = cmd_buffer->device; in radv_get_resolve_pipeline() 844 cmd_buffer->record_result = ret; in radv_get_resolve_pipeline() 853 emit_resolve(struct radv_cmd_buffer *cmd_buffer, in emit_resolve() argument 860 struct radv_device *device = cmd_buffer->device; in emit_resolve() 861 VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer); in emit_resolve() 864 radv_meta_push_descriptor_set(cmd_buffer, in emit_resolve() 866 cmd_buffer->device->meta_state.resolve_fragment.p_layout, in emit_resolve() 886 cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB; in emit_resolve() 892 radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer), in emit_resolve() [all …]
|
/external/mesa3d/src/intel/vulkan/ |
D | anv_cmd_buffer.c | 197 anv_cmd_state_init(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_init() argument 199 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_init() 209 anv_cmd_pipeline_state_finish(struct anv_cmd_buffer *cmd_buffer, in anv_cmd_pipeline_state_finish() argument 214 anv_descriptor_set_layout_unref(cmd_buffer->device, in anv_cmd_pipeline_state_finish() 216 vk_free(&cmd_buffer->pool->alloc, pipe_state->push_descriptors[i]); in anv_cmd_pipeline_state_finish() 222 anv_cmd_state_finish(struct anv_cmd_buffer *cmd_buffer) in anv_cmd_state_finish() argument 224 struct anv_cmd_state *state = &cmd_buffer->state; in anv_cmd_state_finish() 226 anv_cmd_pipeline_state_finish(cmd_buffer, &state->gfx.base); in anv_cmd_state_finish() 227 anv_cmd_pipeline_state_finish(cmd_buffer, &state->compute.base); in anv_cmd_state_finish() 229 vk_free(&cmd_buffer->pool->alloc, state->attachments); in anv_cmd_state_finish() [all …]
|
D | genX_blorp_exec.c | 40 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_dwords() local 41 return anv_batch_emit_dwords(&cmd_buffer->batch, n); in blorp_emit_dwords() 48 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_emit_reloc() local 49 assert(cmd_buffer->batch.start <= location && in blorp_emit_reloc() 50 location < cmd_buffer->batch.end); in blorp_emit_reloc() 51 return anv_batch_emit_reloc(&cmd_buffer->batch, location, in blorp_emit_reloc() 59 struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; in blorp_surface_reloc() local 62 anv_reloc_list_add(&cmd_buffer->surface_relocs, &cmd_buffer->pool->alloc, in blorp_surface_reloc() 66 anv_batch_set_error(&cmd_buffer->batch, result); in blorp_surface_reloc() 69 &cmd_buffer->device->surface_state_pool.block_pool, ss_offset, 8); in blorp_surface_reloc() [all …]
|
D | genX_cmd_buffer.c | 46 static void genX(flush_pipeline_select)(struct anv_cmd_buffer *cmd_buffer, 59 genX(cmd_buffer_emit_state_base_address)(struct anv_cmd_buffer *cmd_buffer) in genX() 61 struct anv_device *device = cmd_buffer->device; in genX() 68 cmd_buffer->state.descriptors_dirty |= ~0; in genX() 77 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in genX() 103 uint32_t gen12_wa_pipeline = cmd_buffer->state.current_pipeline; in genX() 104 genX(flush_pipeline_select_3d)(cmd_buffer); in genX() 107 anv_batch_emit(&cmd_buffer->batch, GENX(STATE_BASE_ADDRESS), sba) { in genX() 115 anv_cmd_buffer_surface_base_address(cmd_buffer); in genX() 173 if (cmd_buffer->device->physical->use_softpin) { in genX() [all …]
|
D | anv_batch_chain.c | 344 anv_batch_bo_create(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_create() argument 349 struct anv_batch_bo *bbo = vk_alloc(&cmd_buffer->pool->alloc, sizeof(*bbo), in anv_batch_bo_create() 354 result = anv_bo_pool_alloc(&cmd_buffer->device->batch_bo_pool, in anv_batch_bo_create() 359 result = anv_reloc_list_init(&bbo->relocs, &cmd_buffer->pool->alloc); in anv_batch_bo_create() 368 anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, bbo->bo); in anv_batch_bo_create() 370 vk_free(&cmd_buffer->pool->alloc, bbo); in anv_batch_bo_create() 376 anv_batch_bo_clone(struct anv_cmd_buffer *cmd_buffer, in anv_batch_bo_clone() argument 382 struct anv_batch_bo *bbo = vk_alloc(&cmd_buffer->pool->alloc, sizeof(*bbo), in anv_batch_bo_clone() 387 result = anv_bo_pool_alloc(&cmd_buffer->device->batch_bo_pool, in anv_batch_bo_clone() 392 result = anv_reloc_list_init_clone(&bbo->relocs, &cmd_buffer->pool->alloc, in anv_batch_bo_clone() [all …]
|
D | genX_gpu_memcpy.c | 55 genX(cmd_buffer_so_memcpy)(struct anv_cmd_buffer *cmd_buffer, in genX() 75 if (!cmd_buffer->state.current_l3_config) { in genX() 77 gen_get_default_l3_config(&cmd_buffer->device->info); in genX() 78 genX(cmd_buffer_config_l3)(cmd_buffer, cfg); in genX() 81 genX(cmd_buffer_set_binding_for_gen8_vb_flush)(cmd_buffer, 32, src, size); in genX() 82 genX(cmd_buffer_apply_pipe_flushes)(cmd_buffer); in genX() 84 genX(flush_pipeline_select_3d)(cmd_buffer); in genX() 87 dw = anv_batch_emitn(&cmd_buffer->batch, 5, GENX(3DSTATE_VERTEX_BUFFERS)); in genX() 88 GENX(VERTEX_BUFFER_STATE_pack)(&cmd_buffer->batch, dw + 1, in genX() 94 .MOCS = anv_mocs(cmd_buffer->device, src.bo, 0), in genX() [all …]
|
D | gen7_cmd_buffer.c | 49 gen7_cmd_buffer_emit_scissor(struct anv_cmd_buffer *cmd_buffer) in gen7_cmd_buffer_emit_scissor() argument 51 struct anv_framebuffer *fb = cmd_buffer->state.framebuffer; in gen7_cmd_buffer_emit_scissor() 52 uint32_t count = cmd_buffer->state.gfx.dynamic.scissor.count; in gen7_cmd_buffer_emit_scissor() 53 const VkRect2D *scissors = cmd_buffer->state.gfx.dynamic.scissor.scissors; in gen7_cmd_buffer_emit_scissor() 63 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, count * 8, alignment); in gen7_cmd_buffer_emit_scissor() 88 if (cmd_buffer->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) { in gen7_cmd_buffer_emit_scissor() 90 cmd_buffer->state.render_area.offset.y, max); in gen7_cmd_buffer_emit_scissor() 92 cmd_buffer->state.render_area.offset.x, max); in gen7_cmd_buffer_emit_scissor() 94 cmd_buffer->state.render_area.offset.y + in gen7_cmd_buffer_emit_scissor() 95 cmd_buffer->state.render_area.extent.height - 1); in gen7_cmd_buffer_emit_scissor() [all …]
|
D | gen8_cmd_buffer.c | 38 gen8_cmd_buffer_emit_viewport(struct anv_cmd_buffer *cmd_buffer) in gen8_cmd_buffer_emit_viewport() argument 40 struct anv_framebuffer *fb = cmd_buffer->state.framebuffer; in gen8_cmd_buffer_emit_viewport() 41 uint32_t count = cmd_buffer->state.gfx.dynamic.viewport.count; in gen8_cmd_buffer_emit_viewport() 43 cmd_buffer->state.gfx.dynamic.viewport.viewports; in gen8_cmd_buffer_emit_viewport() 45 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, count * 64, 64); in gen8_cmd_buffer_emit_viewport() 88 anv_batch_emit(&cmd_buffer->batch, in gen8_cmd_buffer_emit_viewport() 95 gen8_cmd_buffer_emit_depth_viewport(struct anv_cmd_buffer *cmd_buffer, in gen8_cmd_buffer_emit_depth_viewport() argument 98 uint32_t count = cmd_buffer->state.gfx.dynamic.viewport.count; in gen8_cmd_buffer_emit_depth_viewport() 100 cmd_buffer->state.gfx.dynamic.viewport.viewports; in gen8_cmd_buffer_emit_depth_viewport() 102 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, count * 8, 32); in gen8_cmd_buffer_emit_depth_viewport() [all …]
|
D | anv_genX.h | 51 void genX(cmd_buffer_emit_state_base_address)(struct anv_cmd_buffer *cmd_buffer); 53 void genX(cmd_buffer_apply_pipe_flushes)(struct anv_cmd_buffer *cmd_buffer); 55 void genX(cmd_buffer_emit_gen7_depth_flush)(struct anv_cmd_buffer *cmd_buffer); 57 void genX(cmd_buffer_set_binding_for_gen8_vb_flush)(struct anv_cmd_buffer *cmd_buffer, 61 void genX(cmd_buffer_update_dirty_vbs_for_gen8_vb_flush)(struct anv_cmd_buffer *cmd_buffer, 65 void genX(cmd_buffer_emit_hashing_mode)(struct anv_cmd_buffer *cmd_buffer, 69 void genX(flush_pipeline_select_3d)(struct anv_cmd_buffer *cmd_buffer); 70 void genX(flush_pipeline_select_gpgpu)(struct anv_cmd_buffer *cmd_buffer); 72 void genX(cmd_buffer_config_l3)(struct anv_cmd_buffer *cmd_buffer, 75 void genX(cmd_buffer_flush_state)(struct anv_cmd_buffer *cmd_buffer); [all …]
|
D | anv_blorp.c | 295 copy_image(struct anv_cmd_buffer *cmd_buffer, in copy_image() argument 340 get_blorp_surf_for_anv_image(cmd_buffer->device, in copy_image() 345 get_blorp_surf_for_anv_image(cmd_buffer->device, in copy_image() 350 anv_cmd_buffer_mark_image_written(cmd_buffer, dst_image, in copy_image() 364 if (get_blorp_surf_for_anv_shadow_image(cmd_buffer->device, in copy_image() 379 get_blorp_surf_for_anv_image(cmd_buffer->device, src_image, src_mask, in copy_image() 383 get_blorp_surf_for_anv_image(cmd_buffer->device, dst_image, dst_mask, in copy_image() 387 anv_cmd_buffer_mark_image_written(cmd_buffer, dst_image, dst_mask, in copy_image() 400 if (get_blorp_surf_for_anv_shadow_image(cmd_buffer->device, in copy_image() 424 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); in anv_CmdCopyImage() [all …]
|
D | genX_query.c | 583 emit_ps_depth_count(struct anv_cmd_buffer *cmd_buffer, in emit_ps_depth_count() argument 586 cmd_buffer->state.pending_pipe_bits |= ANV_PIPE_POST_SYNC_BIT; in emit_ps_depth_count() 587 genX(cmd_buffer_apply_pipe_flushes)(cmd_buffer); in emit_ps_depth_count() 589 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in emit_ps_depth_count() 595 if (GEN_GEN == 9 && cmd_buffer->device->info.gt == 4) in emit_ps_depth_count() 609 emit_query_pc_availability(struct anv_cmd_buffer *cmd_buffer, in emit_query_pc_availability() argument 613 cmd_buffer->state.pending_pipe_bits |= ANV_PIPE_POST_SYNC_BIT; in emit_query_pc_availability() 614 genX(cmd_buffer_apply_pipe_flushes)(cmd_buffer); in emit_query_pc_availability() 616 anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { in emit_query_pc_availability() 629 emit_zero_queries(struct anv_cmd_buffer *cmd_buffer, in emit_zero_queries() argument [all …]
|
/external/mesa3d/src/broadcom/vulkan/ |
D | v3dv_cmd_buffer.c | 74 cmd_buffer_emit_render_pass_rcl(struct v3dv_cmd_buffer *cmd_buffer); 106 cmd_buffer_init(struct v3dv_cmd_buffer *cmd_buffer, in cmd_buffer_init() argument 116 uint8_t *cmd_buffer_driver_start = ((uint8_t *) cmd_buffer) + ld_size; in cmd_buffer_init() 117 memset(cmd_buffer_driver_start, 0, sizeof(*cmd_buffer) - ld_size); in cmd_buffer_init() 119 cmd_buffer->device = device; in cmd_buffer_init() 120 cmd_buffer->pool = pool; in cmd_buffer_init() 121 cmd_buffer->level = level; in cmd_buffer_init() 123 list_inithead(&cmd_buffer->private_objs); in cmd_buffer_init() 124 list_inithead(&cmd_buffer->jobs); in cmd_buffer_init() 125 list_inithead(&cmd_buffer->list_link); in cmd_buffer_init() [all …]
|
D | v3dv_uniforms.c | 43 check_push_constants_ubo(struct v3dv_cmd_buffer *cmd_buffer) in check_push_constants_ubo() argument 45 if (!(cmd_buffer->state.dirty & V3DV_CMD_DIRTY_PUSH_CONSTANTS) || in check_push_constants_ubo() 46 cmd_buffer->state.pipeline->layout->push_constant_size == 0) in check_push_constants_ubo() 49 if (cmd_buffer->push_constants_resource.bo == NULL) { in check_push_constants_ubo() 50 cmd_buffer->push_constants_resource.bo = in check_push_constants_ubo() 51 v3dv_bo_alloc(cmd_buffer->device, MAX_PUSH_CONSTANTS_SIZE, in check_push_constants_ubo() 54 if (!cmd_buffer->push_constants_resource.bo) { in check_push_constants_ubo() 59 bool ok = v3dv_bo_map(cmd_buffer->device, in check_push_constants_ubo() 60 cmd_buffer->push_constants_resource.bo, in check_push_constants_ubo() 67 if (cmd_buffer->push_constants_resource.offset + MAX_PUSH_CONSTANTS_SIZE <= in check_push_constants_ubo() [all …]
|
D | v3dv_meta_clear.c | 796 emit_color_clear_rect(struct v3dv_cmd_buffer *cmd_buffer, in emit_color_clear_rect() argument 804 assert(cmd_buffer->state.pass); in emit_color_clear_rect() 805 struct v3dv_device *device = cmd_buffer->device; in emit_color_clear_rect() 806 struct v3dv_render_pass *pass = cmd_buffer->state.pass; in emit_color_clear_rect() 820 v3dv_flag_oom(cmd_buffer, NULL); in emit_color_clear_rect() 835 v3dv_cmd_buffer_meta_state_push(cmd_buffer, false); in emit_color_clear_rect() 836 v3dv_cmd_buffer_finish_job(cmd_buffer); in emit_color_clear_rect() 839 v3dv_framebuffer_from_handle(cmd_buffer->state.meta.framebuffer); in emit_color_clear_rect() 840 VkCommandBuffer cmd_buffer_handle = v3dv_cmd_buffer_to_handle(cmd_buffer); in emit_color_clear_rect() 841 VkDevice device_handle = v3dv_device_to_handle(cmd_buffer->device); in emit_color_clear_rect() [all …]
|
/external/mesa3d/src/gallium/frontends/lavapipe/ |
D | lvp_cmd_buffer.c | 33 struct lvp_cmd_buffer *cmd_buffer; in lvp_create_cmd_buffer() local 35 cmd_buffer = vk_alloc(&pool->alloc, sizeof(*cmd_buffer), 8, in lvp_create_cmd_buffer() 37 if (cmd_buffer == NULL) in lvp_create_cmd_buffer() 40 vk_object_base_init(&device->vk, &cmd_buffer->base, in lvp_create_cmd_buffer() 42 cmd_buffer->device = device; in lvp_create_cmd_buffer() 43 cmd_buffer->pool = pool; in lvp_create_cmd_buffer() 44 list_inithead(&cmd_buffer->cmds); in lvp_create_cmd_buffer() 45 cmd_buffer->status = LVP_CMD_BUFFER_STATUS_INITIAL; in lvp_create_cmd_buffer() 47 list_addtail(&cmd_buffer->pool_link, &pool->cmd_buffers); in lvp_create_cmd_buffer() 52 list_inithead(&cmd_buffer->pool_link); in lvp_create_cmd_buffer() [all …]
|
/external/mesa3d/src/amd/vulkan/layers/ |
D | radv_sqtt_layer.c | 355 radv_write_begin_general_api_marker(struct radv_cmd_buffer *cmd_buffer, in radv_write_begin_general_api_marker() argument 359 struct radeon_cmdbuf *cs = cmd_buffer->cs; in radv_write_begin_general_api_marker() 364 radv_emit_thread_trace_userdata(cmd_buffer->device, cs, &marker, sizeof(marker) / 4); in radv_write_begin_general_api_marker() 368 radv_write_end_general_api_marker(struct radv_cmd_buffer *cmd_buffer, in radv_write_end_general_api_marker() argument 372 struct radeon_cmdbuf *cs = cmd_buffer->cs; in radv_write_end_general_api_marker() 378 radv_emit_thread_trace_userdata(cmd_buffer->device, cs, &marker, sizeof(marker) / 4); in radv_write_end_general_api_marker() 382 radv_write_event_marker(struct radv_cmd_buffer *cmd_buffer, in radv_write_event_marker() argument 389 struct radeon_cmdbuf *cs = cmd_buffer->cs; in radv_write_event_marker() 393 marker.cmd_id = cmd_buffer->state.num_events++; in radv_write_event_marker() 409 radv_emit_thread_trace_userdata(cmd_buffer->device, cs, &marker, sizeof(marker) / 4); in radv_write_event_marker() [all …]
|