Home
last modified time | relevance | path

Searched refs:rctx (Results 1 – 25 of 52) sorted by relevance

123

/external/mesa3d/src/gallium/drivers/r600/
Dr600_streamout.c37 static void r600_set_streamout_enable(struct r600_common_context *rctx, bool enable);
45 struct r600_common_context *rctx = (struct r600_common_context *)ctx; in r600_create_so_target() local
54 u_suballocator_alloc(rctx->allocator_zeroed_memory, 4, 4, in r600_create_so_target()
82 void r600_streamout_buffers_dirty(struct r600_common_context *rctx) in r600_streamout_buffers_dirty() argument
84 struct r600_atom *begin = &rctx->streamout.begin_atom; in r600_streamout_buffers_dirty()
85 unsigned num_bufs = util_bitcount(rctx->streamout.enabled_mask); in r600_streamout_buffers_dirty()
86 unsigned num_bufs_appended = util_bitcount(rctx->streamout.enabled_mask & in r600_streamout_buffers_dirty()
87 rctx->streamout.append_bitmask); in r600_streamout_buffers_dirty()
92 rctx->streamout.num_dw_for_end = in r600_streamout_buffers_dirty()
100 if (rctx->family >= CHIP_RS780 && rctx->family <= CHIP_RV740) in r600_streamout_buffers_dirty()
[all …]
Dr600_state_common.c56 void r600_add_atom(struct r600_context *rctx, in r600_add_atom() argument
61 assert(rctx->atoms[id] == NULL); in r600_add_atom()
62 rctx->atoms[id] = atom; in r600_add_atom()
66 void r600_init_atom(struct r600_context *rctx, in r600_init_atom() argument
74 r600_add_atom(rctx, atom, id); in r600_init_atom()
77 void r600_emit_cso_state(struct r600_context *rctx, struct r600_atom *atom) in r600_emit_cso_state() argument
79 r600_emit_command_buffer(rctx->b.gfx.cs, ((struct r600_cso_state*)atom)->cb); in r600_emit_cso_state()
82 void r600_emit_alphatest_state(struct r600_context *rctx, struct r600_atom *atom) in r600_emit_alphatest_state() argument
84 struct radeon_cmdbuf *cs = rctx->b.gfx.cs; in r600_emit_alphatest_state()
88 if (rctx->b.chip_class >= EVERGREEN && a->cb0_export_16bpc) { in r600_emit_alphatest_state()
[all …]
Dr600_viewport.c51 #define GET_MAX_SCISSOR(rctx) (rctx->chip_class >= EVERGREEN ? 16384 : 8192) argument
58 struct r600_common_context *rctx = (struct r600_common_context *)ctx; in r600_set_scissor_states() local
62 rctx->scissors.states[start_slot + i] = state[i]; in r600_set_scissor_states()
64 if (!rctx->scissor_enabled) in r600_set_scissor_states()
67 rctx->scissors.dirty_mask |= ((1 << num_scissors) - 1) << start_slot; in r600_set_scissor_states()
68 rctx->set_atom_dirty(rctx, &rctx->scissors.atom, true); in r600_set_scissor_states()
74 static void r600_get_scissor_from_viewport(struct r600_common_context *rctx, in r600_get_scissor_from_viewport() argument
89 scissor->maxx = scissor->maxy = GET_MAX_SCISSOR(rctx); in r600_get_scissor_from_viewport()
112 static void r600_clamp_scissor(struct r600_common_context *rctx, in r600_clamp_scissor() argument
116 unsigned max_scissor = GET_MAX_SCISSOR(rctx); in r600_clamp_scissor()
[all …]
Dr600_pipe.c68 struct r600_context *rctx = (struct r600_context *)context; in r600_destroy_context() local
71 r600_isa_destroy(rctx->isa); in r600_destroy_context()
73 r600_sb_context_destroy(rctx->sb_context); in r600_destroy_context()
75 for (sh = 0; sh < (rctx->b.chip_class < EVERGREEN ? R600_NUM_HW_STAGES : EG_NUM_HW_STAGES); sh++) { in r600_destroy_context()
76 r600_resource_reference(&rctx->scratch_buffers[sh].buffer, NULL); in r600_destroy_context()
78 r600_resource_reference(&rctx->dummy_cmask, NULL); in r600_destroy_context()
79 r600_resource_reference(&rctx->dummy_fmask, NULL); in r600_destroy_context()
81 if (rctx->append_fence) in r600_destroy_context()
82 pipe_resource_reference((struct pipe_resource**)&rctx->append_fence, NULL); in r600_destroy_context()
84 rctx->b.b.set_constant_buffer(&rctx->b.b, sh, R600_BUFFER_INFO_CONST_BUFFER, NULL); in r600_destroy_context()
[all …]
Dr600_blit.c55 struct r600_context *rctx = (struct r600_context *)ctx; in r600_blitter_begin() local
57 if (rctx->cmd_buf_is_compute) { in r600_blitter_begin()
58 rctx->b.gfx.flush(rctx, PIPE_FLUSH_ASYNC, NULL); in r600_blitter_begin()
59 rctx->cmd_buf_is_compute = false; in r600_blitter_begin()
62 util_blitter_save_vertex_buffer_slot(rctx->blitter, rctx->vertex_buffer_state.vb); in r600_blitter_begin()
63 util_blitter_save_vertex_elements(rctx->blitter, rctx->vertex_fetch_shader.cso); in r600_blitter_begin()
64 util_blitter_save_vertex_shader(rctx->blitter, rctx->vs_shader); in r600_blitter_begin()
65 util_blitter_save_geometry_shader(rctx->blitter, rctx->gs_shader); in r600_blitter_begin()
66 util_blitter_save_tessctrl_shader(rctx->blitter, rctx->tcs_shader); in r600_blitter_begin()
67 util_blitter_save_tesseval_shader(rctx->blitter, rctx->tes_shader); in r600_blitter_begin()
[all …]
Dr600_state.c247 static void r600_emit_polygon_offset(struct r600_context *rctx, struct r600_atom *a) in r600_emit_polygon_offset() argument
249 struct radeon_cmdbuf *cs = rctx->b.gfx.cs; in r600_emit_polygon_offset()
318 struct r600_context *rctx = (struct r600_context *)ctx; in r600_create_blend_state_mode() local
330 if (rctx->b.family > CHIP_R600) in r600_create_blend_state_mode()
387 if (rctx->b.family > CHIP_R600) { in r600_create_blend_state_mode()
459 struct r600_context *rctx = (struct r600_context *)ctx; in r600_create_rs_state() local
485 if (rctx->b.chip_class == R700) { in r600_create_rs_state()
509 S_028A4C_PS_ITER_SAMPLE(state->multisample && rctx->ps_iter_samples > 1); in r600_create_rs_state()
510 if (rctx->b.family == CHIP_RV770) { in r600_create_rs_state()
512 sc_mode_cntl |= S_028A4C_TILE_COVER_DISABLE(state->multisample && rctx->ps_iter_samples > 1); in r600_create_rs_state()
[all …]
Devergreen_state.c472 struct r600_context *rctx = (struct r600_context *)ctx; in evergreen_create_rs_state() local
543 if (rctx->b.chip_class == CAYMAN) { in evergreen_create_rs_state()
632 static void evergreen_fill_buffer_resource_words(struct r600_context *rctx, in evergreen_fill_buffer_resource_words() argument
679 texture_buffer_sampler_view(struct r600_context *rctx, in texture_buffer_sampler_view() argument
696 evergreen_fill_buffer_resource_words(rctx, view->base.texture, in texture_buffer_sampler_view()
702 list_addtail(&view->list, &rctx->texture_buffers); in texture_buffer_sampler_view()
719 static int evergreen_fill_tex_resource_words(struct r600_context *rctx, in evergreen_fill_tex_resource_words() argument
725 struct r600_screen *rscreen = (struct r600_screen*)rctx->b.b.screen; in evergreen_fill_tex_resource_words()
768 format = r600_translate_texformat(rctx->b.b.screen, params->pipe_format, in evergreen_fill_tex_resource_words()
913 struct r600_context *rctx = (struct r600_context*)ctx; in evergreen_create_sampler_view_custom() local
[all …]
Devergreen_compute.c127 struct r600_context *rctx = NULL; in evergreen_set_rat() local
133 rctx = pipe->ctx; in evergreen_set_rat()
135 COMPUTE_DBG(rctx->screen, "bind rat: %i \n", id); in evergreen_set_rat()
161 evergreen_init_color_surface_rat(rctx, surf); in evergreen_set_rat()
164 static void evergreen_cs_set_vertex_buffer(struct r600_context *rctx, in evergreen_cs_set_vertex_buffer() argument
169 struct r600_vertexbuf_state *state = &rctx->cs_vertex_buffer_state; in evergreen_cs_set_vertex_buffer()
178 rctx->b.flags |= R600_CONTEXT_INV_VERTEX_CACHE; in evergreen_cs_set_vertex_buffer()
181 r600_mark_atom_dirty(rctx, &state->atom); in evergreen_cs_set_vertex_buffer()
184 static void evergreen_cs_set_constant_buffer(struct r600_context *rctx, in evergreen_cs_set_constant_buffer() argument
196 rctx->b.b.set_constant_buffer(&rctx->b.b, PIPE_SHADER_COMPUTE, cb_index, &cb); in evergreen_cs_set_constant_buffer()
[all …]
Dr600_pipe_common.c136 struct r600_common_context *rctx = in r600_draw_rectangle() local
143 rctx->b.bind_vertex_elements_state(&rctx->b, vertex_elements_cso); in r600_draw_rectangle()
144 rctx->b.bind_vs_state(&rctx->b, get_vs(blitter)); in r600_draw_rectangle()
157 rctx->b.set_viewport_states(&rctx->b, 0, 1, &viewport); in r600_draw_rectangle()
162 u_upload_alloc(rctx->b.stream_uploader, 0, sizeof(float) * 24, in r600_draw_rectangle()
163 rctx->screen->info.tcc_cache_line_size, in r600_draw_rectangle()
210 rctx->b.set_vertex_buffers(&rctx->b, blitter->vb_slot, 1, &vbuffer); in r600_draw_rectangle()
211 util_draw_arrays_instanced(&rctx->b, R600_PRIM_RECTANGLE_LIST, 0, 3, in r600_draw_rectangle()
216 static void r600_dma_emit_wait_idle(struct r600_common_context *rctx) in r600_dma_emit_wait_idle() argument
218 struct radeon_cmdbuf *cs = rctx->dma.cs; in r600_dma_emit_wait_idle()
[all …]
Dr600_hw_context.c92 void r600_flush_emit(struct r600_context *rctx) in r600_flush_emit() argument
94 struct radeon_cmdbuf *cs = rctx->b.gfx.cs; in r600_flush_emit()
98 if (!rctx->b.flags) { in r600_flush_emit()
103 if (rctx->b.flags & R600_CONTEXT_STREAMOUT_FLUSH) in r600_flush_emit()
104 rctx->b.flags |= r600_get_flush_flags(R600_COHERENCY_SHADER); in r600_flush_emit()
106 if (rctx->b.flags & R600_CONTEXT_WAIT_3D_IDLE) { in r600_flush_emit()
109 if (rctx->b.flags & R600_CONTEXT_WAIT_CP_DMA_IDLE) { in r600_flush_emit()
115 if (rctx->b.family >= CHIP_CAYMAN) { in r600_flush_emit()
117 rctx->b.flags |= R600_CONTEXT_PS_PARTIAL_FLUSH; in r600_flush_emit()
124 if (rctx->b.flags & R600_CONTEXT_PS_PARTIAL_FLUSH) { in r600_flush_emit()
[all …]
Dr600_query.c92 static bool r600_query_sw_begin(struct r600_common_context *rctx, in r600_query_sw_begin() argument
103 query->begin_result = rctx->num_draw_calls; in r600_query_sw_begin()
106 query->begin_result = rctx->num_decompress_calls; in r600_query_sw_begin()
109 query->begin_result = rctx->num_mrt_draw_calls; in r600_query_sw_begin()
112 query->begin_result = rctx->num_prim_restart_calls; in r600_query_sw_begin()
115 query->begin_result = rctx->num_spill_draw_calls; in r600_query_sw_begin()
118 query->begin_result = rctx->num_compute_calls; in r600_query_sw_begin()
121 query->begin_result = rctx->num_spill_compute_calls; in r600_query_sw_begin()
124 query->begin_result = rctx->num_dma_calls; in r600_query_sw_begin()
127 query->begin_result = rctx->num_cp_dma_calls; in r600_query_sw_begin()
[all …]
Dr600_pipe.h627 static inline void r600_set_atom_dirty(struct r600_context *rctx, in r600_set_atom_dirty() argument
637 rctx->dirty_atoms |= mask; in r600_set_atom_dirty()
639 rctx->dirty_atoms &= ~mask; in r600_set_atom_dirty()
642 static inline void r600_mark_atom_dirty(struct r600_context *rctx, in r600_mark_atom_dirty() argument
645 r600_set_atom_dirty(rctx, atom, true); in r600_mark_atom_dirty()
648 static inline void r600_emit_atom(struct r600_context *rctx, struct r600_atom *atom) in r600_emit_atom() argument
650 atom->emit(&rctx->b, atom); in r600_emit_atom()
651 r600_set_atom_dirty(rctx, atom, false); in r600_emit_atom()
654 static inline void r600_set_cso_state(struct r600_context *rctx, in r600_set_cso_state() argument
658 r600_set_atom_dirty(rctx, &state->atom, cso != NULL); in r600_set_cso_state()
[all …]
Devergreen_hw_context.c31 void evergreen_dma_copy_buffer(struct r600_context *rctx, in evergreen_dma_copy_buffer() argument
38 struct radeon_cmdbuf *cs = rctx->b.dma.cs; in evergreen_dma_copy_buffer()
63 r600_need_dma_space(&rctx->b, ncopy * 5, rdst, rsrc); in evergreen_dma_copy_buffer()
67 radeon_add_to_buffer_list(&rctx->b, &rctx->b.dma, rsrc, RADEON_USAGE_READ, 0); in evergreen_dma_copy_buffer()
68 radeon_add_to_buffer_list(&rctx->b, &rctx->b.dma, rdst, RADEON_USAGE_WRITE, 0); in evergreen_dma_copy_buffer()
83 void evergreen_cp_dma_clear_buffer(struct r600_context *rctx, in evergreen_cp_dma_clear_buffer() argument
88 struct radeon_cmdbuf *cs = rctx->b.gfx.cs; in evergreen_cp_dma_clear_buffer()
91 assert(rctx->screen->b.has_cp_dma); in evergreen_cp_dma_clear_buffer()
102 rctx->b.flags |= r600_get_flush_flags(coher) | in evergreen_cp_dma_clear_buffer()
110 r600_need_cs_space(rctx, in evergreen_cp_dma_clear_buffer()
[all …]
Dr600_cs.h69 static inline unsigned radeon_add_to_buffer_list(struct r600_common_context *rctx, in radeon_add_to_buffer_list() argument
76 return rctx->ws->cs_add_buffer( in radeon_add_to_buffer_list()
100 radeon_add_to_buffer_list_check_mem(struct r600_common_context *rctx, in radeon_add_to_buffer_list_check_mem() argument
108 !radeon_cs_memory_below_limit(rctx->screen, ring->cs, in radeon_add_to_buffer_list_check_mem()
109 rctx->vram + rbo->vram_usage, in radeon_add_to_buffer_list_check_mem()
110 rctx->gtt + rbo->gart_usage)) in radeon_add_to_buffer_list_check_mem()
111 ring->flush(rctx, PIPE_FLUSH_ASYNC, NULL); in radeon_add_to_buffer_list_check_mem()
113 return radeon_add_to_buffer_list(rctx, ring, rbo, usage, priority); in radeon_add_to_buffer_list_check_mem()
116 static inline void r600_emit_reloc(struct r600_common_context *rctx, in r600_emit_reloc() argument
122 bool has_vm = ((struct r600_common_screen*)rctx->b.screen)->info.r600_has_virtual_memory; in r600_emit_reloc()
[all …]
Dr600_buffer_common.c238 r600_invalidate_buffer(struct r600_common_context *rctx, in r600_invalidate_buffer() argument
256 if (r600_rings_is_buffer_referenced(rctx, rbuffer->buf, RADEON_USAGE_READWRITE) || in r600_invalidate_buffer()
257 !rctx->ws->buffer_wait(rbuffer->buf, 0, RADEON_USAGE_READWRITE)) { in r600_invalidate_buffer()
258 rctx->invalidate_buffer(&rctx->b, &rbuffer->b.b); in r600_invalidate_buffer()
271 struct r600_common_context *rctx = (struct r600_common_context *)ctx; in r600_replace_buffer_storage() local
287 rctx->rebind_buffer(ctx, dst, old_gpu_address); in r600_replace_buffer_storage()
293 struct r600_common_context *rctx = (struct r600_common_context*)ctx; in r600_invalidate_resource() local
298 (void)r600_invalidate_buffer(rctx, rbuffer); in r600_invalidate_resource()
309 struct r600_common_context *rctx = (struct r600_common_context*)ctx; in r600_buffer_get_transfer() local
313 transfer = slab_alloc(&rctx->pool_transfers_unsync); in r600_buffer_get_transfer()
[all …]
Dr600_pipe_common.h681 bool r600_common_context_init(struct r600_common_context *rctx,
684 void r600_common_context_cleanup(struct r600_common_context *rctx);
699 bool r600_check_device_reset(struct r600_common_context *rctx);
712 void r600_query_init(struct r600_common_context *rctx);
718 void r600_streamout_buffers_dirty(struct r600_common_context *rctx);
723 void r600_emit_streamout_end(struct r600_common_context *rctx);
724 void r600_update_prims_generated_query_state(struct r600_common_context *rctx,
726 void r600_streamout_init(struct r600_common_context *rctx);
732 bool r600_prepare_for_dma_blit(struct r600_common_context *rctx,
759 void evergreen_do_fast_color_clear(struct r600_common_context *rctx,
[all …]
Deg_debug.c323 static void eg_dump_last_ib(struct r600_context *rctx, FILE *f) in eg_dump_last_ib() argument
327 if (!rctx->last_gfx.ib) in eg_dump_last_ib()
330 if (rctx->last_trace_buf) { in eg_dump_last_ib()
335 uint32_t *map = rctx->b.ws->buffer_map(rctx->last_trace_buf->buf, in eg_dump_last_ib()
343 eg_parse_ib(f, rctx->last_gfx.ib, rctx->last_gfx.num_dw, in eg_dump_last_ib()
344 last_trace_id, "IB", rctx->b.chip_class, in eg_dump_last_ib()
352 struct r600_context *rctx = (struct r600_context*)ctx; in eg_dump_debug_state() local
354 eg_dump_last_ib(rctx, f); in eg_dump_debug_state()
359 radeon_clear_saved_cs(&rctx->last_gfx); in eg_dump_debug_state()
360 r600_resource_reference(&rctx->last_trace_buf, NULL); in eg_dump_debug_state()
Dr600_texture.c47 bool r600_prepare_for_dma_blit(struct r600_common_context *rctx, in r600_prepare_for_dma_blit() argument
55 if (!rctx->dma.cs) in r600_prepare_for_dma_blit()
86 r600_texture_discard_cmask(rctx->screen, rdst); in r600_prepare_for_dma_blit()
91 rctx->b.flush_resource(&rctx->b, &rsrc->resource.b.b); in r600_prepare_for_dma_blit()
136 struct r600_common_context *rctx = (struct r600_common_context*)ctx; in r600_copy_to_staging_texture() local
147 rctx->dma_copy(ctx, dst, 0, 0, 0, 0, src, transfer->level, in r600_copy_to_staging_texture()
154 struct r600_common_context *rctx = (struct r600_common_context*)ctx; in r600_copy_from_staging_texture() local
169 rctx->dma_copy(ctx, dst, transfer->level, in r600_copy_from_staging_texture()
316 static void r600_eliminate_fast_color_clear(struct r600_common_context *rctx, in r600_eliminate_fast_color_clear() argument
319 struct r600_common_screen *rscreen = rctx->screen; in r600_eliminate_fast_color_clear()
[all …]
Devergreen_compute.h41 void evergreen_init_atom_start_compute_cs(struct r600_context *rctx);
42 void evergreen_init_compute_state_functions(struct r600_context *rctx);
43 void evergreen_emit_cs_shader(struct r600_context *rctx, struct r600_atom * atom);
/external/rust/crates/quiche/deps/boringssl/src/crypto/evp/
Dp_rsa.c100 RSA_PKEY_CTX *rctx; in pkey_rsa_init() local
101 rctx = OPENSSL_malloc(sizeof(RSA_PKEY_CTX)); in pkey_rsa_init()
102 if (!rctx) { in pkey_rsa_init()
105 OPENSSL_memset(rctx, 0, sizeof(RSA_PKEY_CTX)); in pkey_rsa_init()
107 rctx->nbits = 2048; in pkey_rsa_init()
108 rctx->pad_mode = RSA_PKCS1_PADDING; in pkey_rsa_init()
109 rctx->saltlen = -2; in pkey_rsa_init()
111 ctx->data = rctx; in pkey_rsa_init()
148 RSA_PKEY_CTX *rctx = ctx->data; in pkey_rsa_cleanup() local
150 if (rctx == NULL) { in pkey_rsa_cleanup()
[all …]
/external/boringssl/src/crypto/evp/
Dp_rsa.c100 RSA_PKEY_CTX *rctx; in pkey_rsa_init() local
101 rctx = OPENSSL_malloc(sizeof(RSA_PKEY_CTX)); in pkey_rsa_init()
102 if (!rctx) { in pkey_rsa_init()
105 OPENSSL_memset(rctx, 0, sizeof(RSA_PKEY_CTX)); in pkey_rsa_init()
107 rctx->nbits = 2048; in pkey_rsa_init()
108 rctx->pad_mode = RSA_PKCS1_PADDING; in pkey_rsa_init()
109 rctx->saltlen = -2; in pkey_rsa_init()
111 ctx->data = rctx; in pkey_rsa_init()
148 RSA_PKEY_CTX *rctx = ctx->data; in pkey_rsa_cleanup() local
150 if (rctx == NULL) { in pkey_rsa_cleanup()
[all …]
/external/mesa3d/src/gallium/drivers/r600/sb/
Dsb_core.cpp48 sb_context *r600_sb_context_create(struct r600_context *rctx) { in r600_sb_context_create() argument
52 if (sctx->init(rctx->isa, translate_chip(rctx->b.family), in r600_sb_context_create()
53 translate_chip_class(rctx->b.chip_class))) { in r600_sb_context_create()
58 unsigned df = rctx->screen->b.debug_flags; in r600_sb_context_create()
90 int r600_sb_bytecode_process(struct r600_context *rctx, in r600_sb_bytecode_process() argument
98 sb_context *ctx = (sb_context *)rctx->sb_context; in r600_sb_bytecode_process()
100 rctx->sb_context = ctx = r600_sb_context_create(rctx); in r600_sb_bytecode_process()
/external/llvm-project/llvm/test/MC/AArch64/
Darmv8.5a-predres.s5 cfp rctx, x0
6 dvp rctx, x1
7 cpp rctx, x2
Darmv8.5a-predres-error.s3 cfp rctx
4 dvp rctx
5 cpp rctx
/external/llvm-project/llvm/test/MC/Disassembler/AArch64/
Darmv8.5a-predres.txt9 # CHECK: cfp rctx, x0
10 # CHECK: dvp rctx, x1
11 # CHECK: cpp rctx, x2

123