/external/mesa3d/src/gallium/drivers/radeonsi/ |
D | si_cp_dma.c | 143 si_emit_cp_dma(sctx, sctx->gfx_cs, 0, 0, 0, CP_DMA_SYNC, L2_BYPASS); in si_cp_dma_wait_for_idle() 171 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, si_resource(dst), RADEON_USAGE_WRITE, in si_cp_dma_prepare() 174 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, si_resource(src), RADEON_USAGE_READ, in si_cp_dma_prepare() 281 si_emit_cp_dma(sctx, sctx->gfx_cs, va, va + SI_CPDMA_ALIGNMENT, size, dma_flags, cache_policy); in si_cp_dma_realign_engine() 345 if (secure != sctx->ws->cs_is_secure(sctx->gfx_cs)) { in si_cp_dma_copy_buffer() 368 si_emit_cp_dma(sctx, sctx->gfx_cs, main_dst_offset, main_src_offset, byte_count, dma_flags, in si_cp_dma_copy_buffer() 383 si_emit_cp_dma(sctx, sctx->gfx_cs, dst_offset, src_offset, skipped_size, dma_flags, in si_cp_dma_copy_buffer() 539 si_cp_dma_clear_buffer(sctx, sctx->gfx_cs, src, 0, 4, 0xabcdef01, 0, SI_COHERENCY_SHADER, in si_test_gds() 541 si_cp_dma_clear_buffer(sctx, sctx->gfx_cs, src, 4, 4, 0x23456789, 0, SI_COHERENCY_SHADER, in si_test_gds() 543 si_cp_dma_clear_buffer(sctx, sctx->gfx_cs, src, 8, 4, 0x87654321, 0, SI_COHERENCY_SHADER, in si_test_gds() [all …]
|
D | si_compute_prim_discard.c | 920 ws->cs_add_buffer(sctx->gfx_cs, sctx->gds, RADEON_USAGE_READWRITE, 0, 0); in si_initialize_prim_discard_cmdbuf() 929 ws->cs_add_buffer(sctx->gfx_cs, sctx->gds_oa, RADEON_USAGE_READWRITE, 0, 0); in si_initialize_prim_discard_cmdbuf() 933 ws->cs_add_parallel_compute_ib(sctx->gfx_cs, num_oa_counters > 0); in si_initialize_prim_discard_cmdbuf() 969 struct radeon_cmdbuf *gfx_cs = sctx->gfx_cs; in si_prepare_prim_discard_or_split_draw() local 1082 !sctx->ws->cs_check_space(gfx_cs, need_gfx_dw, false)) { in si_prepare_prim_discard_or_split_draw() 1086 if (!radeon_emitted(gfx_cs, sctx->initial_gfx_cs_size) && in si_prepare_prim_discard_or_split_draw() 1087 gfx_cs->current.cdw + need_gfx_dw > gfx_cs->current.max_dw) { in si_prepare_prim_discard_or_split_draw() 1088 radeon_emit(gfx_cs, PKT3(PKT3_NOP, 0, 0)); in si_prepare_prim_discard_or_split_draw() 1089 radeon_emit(gfx_cs, 0); in si_prepare_prim_discard_or_split_draw() 1144 struct radeon_cmdbuf *gfx_cs = sctx->gfx_cs; in si_dispatch_prim_discard_cs_and_draw() local [all …]
|
D | si_state_streamout.c | 216 struct radeon_cmdbuf *cs = sctx->gfx_cs; in gfx10_emit_streamout_begin() 234 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, t[i]->buf_filled_size, RADEON_USAGE_READ, in gfx10_emit_streamout_begin() 263 si_cp_release_mem(sctx, sctx->gfx_cs, V_028A90_PS_DONE, 0, EOP_DST_SEL_TC_L2, in gfx10_emit_streamout_end() 275 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_flush_vgt_streamout() 302 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_streamout_begin() 334 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, t[i]->buf_filled_size, RADEON_USAGE_READ, in si_emit_streamout_begin() 358 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_streamout_end() 378 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, t[i]->buf_filled_size, RADEON_USAGE_WRITE, in si_emit_streamout_end() 405 radeon_set_context_reg_seq(sctx->gfx_cs, R_028B94_VGT_STRMOUT_CONFIG, 2); in si_emit_streamout_enable() 406 radeon_emit(sctx->gfx_cs, S_028B94_STREAMOUT_0_EN(si_get_strmout_en(sctx)) | in si_emit_streamout_enable() [all …]
|
D | si_gfx_cs.c | 35 struct radeon_cmdbuf *cs = ctx->gfx_cs; in si_need_gfx_cs_space() 48 if (unlikely(!radeon_cs_memory_below_limit(ctx->screen, ctx->gfx_cs, ctx->vram, ctx->gtt))) { in si_need_gfx_cs_space() 73 struct radeon_cmdbuf *cs = ctx->gfx_cs; in si_flush_gfx_cs() 211 ctx->gfx_cs, ctx->last_gfx_fence, in si_flush_gfx_cs() 293 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, ctx->current_saved_cs->trace_buf, in si_begin_gfx_cs_debug() 300 sctx->ws->cs_add_buffer(sctx->gfx_cs, sctx->gds, RADEON_USAGE_READWRITE, 0, 0); in si_add_gds_to_buffer_list() 302 sctx->ws->cs_add_buffer(sctx->gfx_cs, sctx->gds_oa, RADEON_USAGE_READWRITE, 0, 0); in si_add_gds_to_buffer_list() 406 is_secure = ctx->ws->cs_is_secure(ctx->gfx_cs); in si_begin_new_gfx_cs() 431 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, ctx->border_color_buffer, in si_begin_new_gfx_cs() 434 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, ctx->shadowed_regs, in si_begin_new_gfx_cs() [all …]
|
D | si_fence.c | 90 struct si_resource *scratch = unlikely(ctx->ws->cs_is_secure(ctx->gfx_cs)) ? in si_cp_release_mem() 99 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, scratch, RADEON_USAGE_WRITE, in si_cp_release_mem() 128 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, scratch, RADEON_USAGE_WRITE, in si_cp_release_mem() 141 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, buf, RADEON_USAGE_WRITE, RADEON_PRIO_QUERY); in si_cp_release_mem() 173 ws->cs_add_fence_dependency(sctx->gfx_cs, fence, 0); in si_add_fence_dependency() 178 sctx->ws->cs_add_syncobj_signal(sctx->gfx_cs, fence); in si_add_syncobj_signal() 255 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, fine->buf, RADEON_USAGE_WRITE, RADEON_PRIO_QUERY); in si_fine_fence_set() 256 si_cp_release_mem(ctx, ctx->gfx_cs, V_028A90_BOTTOM_OF_PIPE_TS, 0, EOP_DST_SEL_MEM, in si_fine_fence_set() 487 if (!radeon_emitted(sctx->gfx_cs, sctx->initial_gfx_cs_size)) { in si_flush_from_st() 491 ws->cs_sync_flush(sctx->gfx_cs); in si_flush_from_st() [all …]
|
D | si_cp_reg_shadowing.c | 165 si_cp_dma_clear_buffer(sctx, sctx->gfx_cs, &sctx->shadowed_regs->b.b, in si_init_cp_reg_shadowing() 174 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, sctx->shadowed_regs, in si_init_cp_reg_shadowing() 177 ac_emulate_clear_state(&sctx->screen->info, sctx->gfx_cs, in si_init_cp_reg_shadowing() 190 sctx->ws->cs_setup_preemption(sctx->gfx_cs, shadowing_preamble->pm4, in si_init_cp_reg_shadowing()
|
D | si_compute.c | 379 if (cs != sctx->gfx_cs || !sctx->screen->info.has_graphics) { in si_emit_initial_compute_regs() 394 (cs != sctx->gfx_cs || !sctx->screen->info.has_graphics)) { in si_emit_initial_compute_regs() 447 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_switch_compute_shader() 494 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, shader->scratch_bo, RADEON_USAGE_READWRITE, in si_switch_compute_shader() 505 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, shader->bo, RADEON_USAGE_READ, in si_switch_compute_shader() 539 struct radeon_cmdbuf *cs = sctx->gfx_cs; in setup_scratch_rsrc_user_sgprs() 576 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_setup_user_sgprs_co_v2() 621 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, dispatch_buf, RADEON_USAGE_READ, in si_setup_user_sgprs_co_v2() 677 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, input_buffer, RADEON_USAGE_READ, in si_upload_compute_input() 689 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_setup_nir_user_data() [all …]
|
D | si_build_pm4.h | 153 struct radeon_cmdbuf *cs = sctx->gfx_cs; in radeon_opt_set_context_reg_rmw() 171 struct radeon_cmdbuf *cs = sctx->gfx_cs; in radeon_opt_set_context_reg() 192 struct radeon_cmdbuf *cs = sctx->gfx_cs; in radeon_opt_set_context_reg2() 214 struct radeon_cmdbuf *cs = sctx->gfx_cs; in radeon_opt_set_context_reg3() 239 struct radeon_cmdbuf *cs = sctx->gfx_cs; in radeon_opt_set_context_reg4() 266 struct radeon_cmdbuf *cs = sctx->gfx_cs; in radeon_opt_set_context_regn()
|
D | si_state_viewport.c | 104 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, sctx->small_prim_cull_info_buf, in si_emit_cull_state() 106 radeon_set_sh_reg(sctx->gfx_cs, R_00B220_SPI_SHADER_PGM_LO_GS, in si_emit_cull_state() 353 unsigned initial_cdw = ctx->gfx_cs->current.cdw; in si_emit_guardband() 365 if (initial_cdw != ctx->gfx_cs->current.cdw) in si_emit_guardband() 371 struct radeon_cmdbuf *cs = ctx->gfx_cs; in si_emit_scissors() 478 struct radeon_cmdbuf *cs = ctx->gfx_cs; in si_emit_one_viewport() 490 struct radeon_cmdbuf *cs = ctx->gfx_cs; in si_emit_viewports() 521 struct radeon_cmdbuf *cs = ctx->gfx_cs; in si_emit_depth_ranges() 608 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_window_rectangles()
|
D | si_state_draw.c | 71 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_derived_tess_state() 218 uint64_t ring_va = (unlikely(sctx->ws->cs_is_secure(sctx->gfx_cs)) ? in si_emit_derived_tess_state() 565 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_rasterizer_prim_state() 612 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_vs_state() 654 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_ia_multi_vgt_param() 715 radeon_set_uconfig_reg(sctx->gfx_cs, R_03096C_GE_CNTL, ge_cntl); in gfx10_emit_ge_cntl() 726 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_draw_registers() 770 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_draw_packets() 780 si_cp_copy_data(sctx, sctx->gfx_cs, COPY_DATA_REG, NULL, in si_emit_draw_packets() 831 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, si_resource(indexbuf), RADEON_USAGE_READ, in si_emit_draw_packets() [all …]
|
D | si_pm4.c | 112 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_pm4_emit() 115 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, state->shader->bo, in si_pm4_emit()
|
D | si_state_binning.c | 407 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in si_emit_dpbb_disable() 444 if (initial_cdw != sctx->gfx_cs->current.cdw) in si_emit_dpbb_disable() 529 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in si_emit_dpbb_state() 549 if (initial_cdw != sctx->gfx_cs->current.cdw) in si_emit_dpbb_state()
|
D | si_buffer.c | 36 if (sctx->ws->cs_is_buffer_referenced(sctx->gfx_cs, buf, usage)) { in si_rings_is_buffer_referenced() 63 if (radeon_emitted(sctx->gfx_cs, sctx->initial_gfx_cs_size) && in si_buffer_map_sync_with_rings() 64 sctx->ws->cs_is_buffer_referenced(sctx->gfx_cs, resource->buf, rusage)) { in si_buffer_map_sync_with_rings() 90 sctx->ws->cs_sync_flush(sctx->gfx_cs); in si_buffer_map_sync_with_rings() 791 if (radeon_emitted(ctx->gfx_cs, ctx->initial_gfx_cs_size) && in si_resource_commit() 792 ctx->ws->cs_is_buffer_referenced(ctx->gfx_cs, res->buf, RADEON_USAGE_READWRITE)) { in si_resource_commit() 802 ctx->ws->cs_sync_flush(ctx->gfx_cs); in si_resource_commit()
|
D | si_perfcounter.c | 706 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_pc_emit_instance() 731 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_pc_emit_shaders() 742 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_pc_emit_select() 833 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_pc_emit_start() 835 si_cp_copy_data(sctx, sctx->gfx_cs, COPY_DATA_DST_MEM, buffer, va - buffer->gpu_address, in si_pc_emit_start() 850 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_pc_emit_stop() 869 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_pc_emit_read() 925 radeon_set_uconfig_reg(sctx->gfx_cs, R_037390_RLC_PERFMON_CLK_CNTL, in si_inhibit_clockgating() 928 radeon_set_uconfig_reg(sctx->gfx_cs, R_0372FC_RLC_PERFMON_CLK_CNTL, in si_inhibit_clockgating()
|
D | si_dma_cs.c | 230 if (!ctx->sdma_uploads_in_progress && radeon_emitted(ctx->gfx_cs, ctx->initial_gfx_cs_size) && in si_need_dma_space() 231 ((dst && ws->cs_is_buffer_referenced(ctx->gfx_cs, dst->buf, RADEON_USAGE_READWRITE)) || in si_need_dma_space() 232 (src && ws->cs_is_buffer_referenced(ctx->gfx_cs, src->buf, RADEON_USAGE_WRITE)))) in si_need_dma_space()
|
D | si_state_shaders.c | 563 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in si_emit_shader_es() 581 if (initial_cdw != sctx->gfx_cs->current.cdw) in si_emit_shader_es() 726 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in si_emit_shader_gs() 780 if (initial_cdw != sctx->gfx_cs->current.cdw) in si_emit_shader_gs() 926 struct radeon_cmdbuf *cs = sctx->gfx_cs; in gfx10_emit_ge_pc_alloc() 973 if (initial_cdw != sctx->gfx_cs->current.cdw) in gfx10_emit_shader_ngg_tail() 983 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in gfx10_emit_shader_ngg_notess_nogs() 994 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in gfx10_emit_shader_ngg_tess_nogs() 1008 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in gfx10_emit_shader_ngg_notess_gs() 1022 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in gfx10_emit_shader_ngg_tess_gs() [all …]
|
D | si_descriptors.c | 167 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, desc->buffer, RADEON_USAGE_READ, in si_upload_descriptors() 188 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, desc->buffer, RADEON_USAGE_READ, in si_add_descriptors_to_bo_list() 909 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, &tex->buffer, RADEON_USAGE_READ, in si_update_ps_colorbuf0_slot() 1006 sctx, sctx->gfx_cs, si_resource(buffers->buffers[i]), in si_buffer_resources_begin_new_cs() 1065 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, in si_vertex_buffers_begin_new_cs() 1072 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, sctx->vb_descriptors_buffer, RADEON_USAGE_READ, in si_vertex_buffers_begin_new_cs() 1396 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, si_resource(buffer), RADEON_USAGE_READWRITE, in si_set_ring_buffer() 1976 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_shader_pointer() 1989 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_consecutive_shader_pointers() 2057 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_graphics_shader_pointers() [all …]
|
D | si_pipe.c | 282 if (sctx->gfx_cs) in si_destroy_context() 283 sctx->ws->cs_destroy(sctx->gfx_cs); in si_destroy_context() 532 sctx->gfx_cs = ws->cs_create(sctx->ctx, sctx->has_graphics ? RING_GFX : RING_COMPUTE, in si_create_context() 716 assert(sctx->gfx_cs->current.cdw == 0); in si_create_context() 723 assert(sctx->gfx_cs->current.cdw == sctx->initial_gfx_cs_size); in si_create_context() 743 sctx->initial_gfx_cs_size = sctx->gfx_cs->current.cdw; in si_create_context()
|
D | gfx10_query.c | 226 si_cp_release_mem(sctx, sctx->gfx_cs, V_028A90_BOTTOM_OF_PIPE_TS, 0, EOP_DST_SEL_MEM, in gfx10_sh_query_end() 454 si_cp_wait_mem(sctx, sctx->gfx_cs, va, 0x00000001, 0x00000001, 0); in gfx10_sh_query_get_result_resource()
|
D | si_query.c | 835 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_query_hw_do_emit_start() 872 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, query->buffer.buf, RADEON_USAGE_WRITE, in si_query_hw_do_emit_start() 899 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_query_hw_do_emit_stop() 952 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, query->buffer.buf, RADEON_USAGE_WRITE, in si_query_hw_do_emit_stop() 994 struct radeon_cmdbuf *cs = ctx->gfx_cs; in emit_set_predicate() 1006 radeon_add_to_buffer_list(ctx, ctx->gfx_cs, buf, RADEON_USAGE_READ, RADEON_PRIO_QUERY); in emit_set_predicate() 1570 si_cp_wait_mem(sctx, sctx->gfx_cs, va, 0x80000000, 0x80000000, WAIT_REG_MEM_EQUAL); in si_query_hw_get_result_resource()
|
D | si_state.c | 71 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_cb_render_state() 690 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_blend_color() 723 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_clip_state() 751 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in si_emit_clip_regs() 769 if (initial_cdw != sctx->gfx_cs->current.cdw) in si_emit_clip_regs() 1047 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_stencil_ref() 1345 unsigned initial_cdw = sctx->gfx_cs->current.cdw; in si_emit_db_render_state() 1414 if (initial_cdw != sctx->gfx_cs->current.cdw) in si_emit_db_render_state() 2889 struct radeon_cmdbuf *cs = sctx->gfx_cs; in si_emit_framebuffer_state() 2913 sctx, sctx->gfx_cs, &tex->buffer, RADEON_USAGE_READWRITE, in si_emit_framebuffer_state() [all …]
|
D | si_test_dma_perf.c | 201 si_cp_dma_clear_buffer(sctx, sctx->gfx_cs, dst, 0, size, clear_value, 0, in si_test_dma_perf()
|
D | si_pipe.h | 905 struct radeon_cmdbuf *gfx_cs; /* compute IB if graphics is disabled */ member 1895 !radeon_cs_memory_below_limit(sctx->screen, sctx->gfx_cs, sctx->vram + bo->vram_usage, in radeon_add_to_gfx_buffer_list_check_mem() 1899 radeon_add_to_buffer_list(sctx, sctx->gfx_cs, bo, usage, priority); in radeon_add_to_gfx_buffer_list_check_mem()
|
D | si_debug.c | 427 si_parse_current_ib(f, ctx->gfx_cs, chunk->gfx_begin, chunk->gfx_end, &last_trace_id, in si_log_chunk_type_cs_print() 464 unsigned gfx_cur = ctx->gfx_cs->prev_dw + ctx->gfx_cs->current.cdw; in si_log_cs()
|
D | si_compute_blit.c | 359 si_cp_dma_clear_buffer(sctx, sctx->gfx_cs, dst, offset, aligned_size, *clear_value, 0, in si_clear_buffer()
|