Home
last modified time | relevance | path

Searched refs:dma (Results 1 – 25 of 86) sorted by relevance

1234

/third_party/mesa3d/src/mesa/drivers/dri/r200/
Dradeon_dma.c207 make_empty_list(&rmesa->dma.free); in radeon_init_dma()
208 make_empty_list(&rmesa->dma.wait); in radeon_init_dma()
209 make_empty_list(&rmesa->dma.reserved); in radeon_init_dma()
210 rmesa->dma.minimum_size = MAX_DMA_BUF_SZ; in radeon_init_dma()
218 if (size > rmesa->dma.minimum_size) in radeonRefillCurrentDmaRegion()
219 rmesa->dma.minimum_size = (size + 15) & (~15); in radeonRefillCurrentDmaRegion()
222 __func__, size, rmesa->dma.minimum_size); in radeonRefillCurrentDmaRegion()
224 if (is_empty_list(&rmesa->dma.free) in radeonRefillCurrentDmaRegion()
225 || last_elem(&rmesa->dma.free)->bo->size < size) { in radeonRefillCurrentDmaRegion()
231 0, rmesa->dma.minimum_size, 4, in radeonRefillCurrentDmaRegion()
[all …]
Dr200_tcl.c143 if (rmesa->radeon.dma.flush == r200FlushElts && in r200AllocElts()
154 if (rmesa->radeon.dma.flush) in r200AllocElts()
155 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); in r200AllocElts()
547 if ( rmesa->radeon.dma.flush ) in transition_to_hwtnl()
548 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); in transition_to_hwtnl()
550 rmesa->radeon.dma.flush = NULL; in transition_to_hwtnl()
614 if ( rmesa->radeon.dma.flush ) in r200TclFallback()
615 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); in r200TclFallback()
627 if ( rmesa->radeon.dma.flush ) in r200TclFallback()
628 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); in r200TclFallback()
Dradeon_queryobj.c113 if (radeon->dma.flush) in radeonBeginQuery()
114 radeon->dma.flush(&radeon->glCtx); in radeonBeginQuery()
153 if (radeon->dma.flush) in radeonEndQuery()
154 radeon->dma.flush(&radeon->glCtx); in radeonEndQuery()
Dr200_ioctl.h75 if ( rmesa->radeon.dma.flush ) \
76 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); \
Dr200_cmdbuf.c167 assert( rmesa->radeon.dma.flush == r200FlushElts ); in r200FlushElts()
168 rmesa->radeon.dma.flush = NULL; in r200FlushElts()
203 assert(!rmesa->radeon.dma.flush); in r200AllocEltsOpenEnded()
205 rmesa->radeon.dma.flush = r200FlushElts; in r200AllocEltsOpenEnded()
Dradeon_common.c537 if (!radeon->dma.flush && !radeon->cmdbuf.cs->cdw && is_empty_list(&radeon->dma.reserved)) in radeonFlush()
540 if (radeon->dma.flush) in radeonFlush()
541 radeon->dma.flush( ctx ); in radeonFlush()
Dradeon_cmdbuf.h103 if (radeon->cmdbuf.cs->cdw || radeon->dma.flush ) in radeon_firevertices()
/third_party/mesa3d/src/mesa/drivers/dri/radeon/
Dradeon_dma.c207 make_empty_list(&rmesa->dma.free); in radeon_init_dma()
208 make_empty_list(&rmesa->dma.wait); in radeon_init_dma()
209 make_empty_list(&rmesa->dma.reserved); in radeon_init_dma()
210 rmesa->dma.minimum_size = MAX_DMA_BUF_SZ; in radeon_init_dma()
218 if (size > rmesa->dma.minimum_size) in radeonRefillCurrentDmaRegion()
219 rmesa->dma.minimum_size = (size + 15) & (~15); in radeonRefillCurrentDmaRegion()
222 __func__, size, rmesa->dma.minimum_size); in radeonRefillCurrentDmaRegion()
224 if (is_empty_list(&rmesa->dma.free) in radeonRefillCurrentDmaRegion()
225 || last_elem(&rmesa->dma.free)->bo->size < size) { in radeonRefillCurrentDmaRegion()
231 0, rmesa->dma.minimum_size, 4, in radeonRefillCurrentDmaRegion()
[all …]
Dradeon_queryobj.c113 if (radeon->dma.flush) in radeonBeginQuery()
114 radeon->dma.flush(&radeon->glCtx); in radeonBeginQuery()
153 if (radeon->dma.flush) in radeonEndQuery()
154 radeon->dma.flush(&radeon->glCtx); in radeonEndQuery()
Dradeon_tcl.c150 if (rmesa->radeon.dma.flush) in radeonAllocElts()
151 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); in radeonAllocElts()
503 if ( rmesa->radeon.dma.flush ) in transition_to_hwtnl()
504 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); in transition_to_hwtnl()
506 rmesa->radeon.dma.flush = NULL; in transition_to_hwtnl()
Dradeon_ioctl.h93 if ( rmesa->radeon.dma.flush ) \
94 rmesa->radeon.dma.flush( &rmesa->radeon.glCtx ); \
Dradeon_ioctl.c177 assert( rmesa->radeon.dma.flush == radeonFlushElts ); in radeonFlushElts()
178 rmesa->radeon.dma.flush = NULL; in radeonFlushElts()
266 assert(!rmesa->radeon.dma.flush); in radeonAllocEltsOpenEnded()
268 rmesa->radeon.dma.flush = radeonFlushElts; in radeonAllocEltsOpenEnded()
Dradeon_common.c537 if (!radeon->dma.flush && !radeon->cmdbuf.cs->cdw && is_empty_list(&radeon->dma.reserved)) in radeonFlush()
540 if (radeon->dma.flush) in radeonFlush()
541 radeon->dma.flush( ctx ); in radeonFlush()
/third_party/mesa3d/src/gallium/drivers/svga/
Dsvga_resource_buffer_upload.c124 assert(sbuf->handle || !sbuf->dma.pending); in svga_buffer_create_hw_storage()
137 assert(!sbuf->dma.pending); in svga_buffer_create_hw_storage()
224 sbuf->dma.flags.discard = TRUE; in svga_buffer_create_host_surface()
465 assert(sbuf->dma.updates == NULL); in svga_buffer_upload_gb_command()
467 if (sbuf->dma.flags.discard) { in svga_buffer_upload_gb_command()
529 sbuf->dma.updates = whole_update_cmd; in svga_buffer_upload_gb_command()
537 memcpy(whole_update_cmd, sbuf->dma.updates, sizeof(*whole_update_cmd)); in svga_buffer_upload_gb_command()
545 sbuf->dma.svga = svga; in svga_buffer_upload_gb_command()
551 sbuf->dma.flags.discard = FALSE; in svga_buffer_upload_gb_command()
613 sbuf->dma.boxes = (SVGA3dCopyBox *)&cmd[1]; in svga_buffer_upload_hb_command()
[all …]
Dsvga_resource_buffer.c146 if (sbuf->dma.pending) { in svga_buffer_transfer_map()
173 if (sbuf->dma.pending) { in svga_buffer_transfer_map()
189 sbuf->dma.flags.discard = TRUE; in svga_buffer_transfer_map()
199 sbuf->dma.flags.unsynchronized = TRUE; in svga_buffer_transfer_map()
209 if (sbuf->dma.pending) { in svga_buffer_transfer_map()
244 sbuf->dma.flags.unsynchronized = FALSE; in svga_buffer_transfer_map()
367 sbuf->dma.flags.discard = TRUE; in svga_buffer_transfer_unmap()
403 assert(!sbuf->dma.pending); in svga_resource_destroy()
/third_party/mesa3d/src/gallium/drivers/r600/
Dr600_pipe_common.c218 struct radeon_cmdbuf *cs = &rctx->dma.cs; in r600_dma_emit_wait_idle()
231 uint64_t vram = (uint64_t)ctx->dma.cs.used_vram_kb * 1024; in r600_need_dma_space()
232 uint64_t gtt = (uint64_t)ctx->dma.cs.used_gart_kb * 1024; in r600_need_dma_space()
266 if (!ctx->ws->cs_check_space(&ctx->dma.cs, num_dw, false) || in r600_need_dma_space()
267 ctx->dma.cs.used_vram_kb + ctx->dma.cs.used_gart_kb > 64 * 1024 || in r600_need_dma_space()
268 !radeon_cs_memory_below_limit(ctx->screen, &ctx->dma.cs, vram, gtt)) { in r600_need_dma_space()
269 ctx->dma.flush(ctx, PIPE_FLUSH_ASYNC, NULL); in r600_need_dma_space()
270 assert((num_dw + ctx->dma.cs.current.cdw) <= ctx->dma.cs.current.max_dw); in r600_need_dma_space()
277 ctx->ws->cs_is_buffer_referenced(&ctx->dma.cs, dst->buf, in r600_need_dma_space()
280 ctx->ws->cs_is_buffer_referenced(&ctx->dma.cs, src->buf, in r600_need_dma_space()
[all …]
Dr600_buffer_common.c41 if (radeon_emitted(&ctx->dma.cs, 0) && in r600_rings_is_buffer_referenced()
42 ctx->ws->cs_is_buffer_referenced(&ctx->dma.cs, buf, usage)) { in r600_rings_is_buffer_referenced()
77 if (radeon_emitted(&ctx->dma.cs, 0) && in r600_buffer_map_sync_with_rings()
78 ctx->ws->cs_is_buffer_referenced(&ctx->dma.cs, in r600_buffer_map_sync_with_rings()
81 ctx->dma.flush(ctx, PIPE_FLUSH_ASYNC, NULL); in r600_buffer_map_sync_with_rings()
84 ctx->dma.flush(ctx, 0, NULL); in r600_buffer_map_sync_with_rings()
96 if (ctx->dma.cs.priv) in r600_buffer_map_sync_with_rings()
97 ctx->ws->cs_sync_flush(&ctx->dma.cs); in r600_buffer_map_sync_with_rings()
337 (dword_aligned && (rctx->dma.cs.priv || in r600_can_dma_copy_buffer()
Devergreen_hw_context.c38 struct radeon_cmdbuf *cs = &rctx->b.dma.cs; in evergreen_dma_copy_buffer()
67 radeon_add_to_buffer_list(&rctx->b, &rctx->b.dma, rsrc, RADEON_USAGE_READ, 0); in evergreen_dma_copy_buffer()
68 radeon_add_to_buffer_list(&rctx->b, &rctx->b.dma, rdst, RADEON_USAGE_WRITE, 0); in evergreen_dma_copy_buffer()
Dr600_hw_context.c37 if (radeon_emitted(&ctx->b.dma.cs, 0)) in r600_need_cs_space()
38 ctx->b.dma.flush(ctx, PIPE_FLUSH_ASYNC, NULL); in r600_need_cs_space()
587 struct radeon_cmdbuf *cs = &rctx->b.dma.cs; in r600_dma_copy_buffer()
605 radeon_add_to_buffer_list(&rctx->b, &rctx->b.dma, rsrc, RADEON_USAGE_READ, 0); in r600_dma_copy_buffer()
606 radeon_add_to_buffer_list(&rctx->b, &rctx->b.dma, rdst, RADEON_USAGE_WRITE, 0); in r600_dma_copy_buffer()
/third_party/alsa-utils/alsaconf/
Dalsaconf.in161 for dma in $*; do
165 if grep -q '^dma '$dma'$' $i/resources; then
170 if [ -r $PROCFS/dma ]; then
171 if grep -q '^ *'$dma': ' $PROCFS/dma ; then
176 list="$list $dma"
180 if [ -r $PROCFS/dma ]; then
181 for dma in $*; do
182 grep -q '^ *'$dma': ' $PROCFS/dma || list="$list $dma"
237 if [ -r $PROCFS/dma ]; then
239 cat $PROCFS/dma
/third_party/ltp/testcases/kernel/device-drivers/usb/tusb/
Dst_tusb.h42 dma_addr_t *dma);
44 void *addr, dma_addr_t dma);
/third_party/wayland-protocols_standard/unstable/linux-explicit-synchronization/
DREADME1 Linux explicit synchronization (dma-fence) protocol
/third_party/EGL/extensions/MESA/
DEGL_MESA_image_dma_buf_export.txt42 dma-buf infrastructure. The extension allows creating a Linux dma_buf
84 enough information to enable an exported dma-buf to be imported
/third_party/mesa3d/docs/_extra/specs/
DMESA_image_dma_buf_export.txt42 dma-buf infrastructure. The extension allows creating a Linux dma_buf
84 enough information to enable an exported dma-buf to be imported
/third_party/skia/third_party/externals/egl-registry/extensions/MESA/
DEGL_MESA_image_dma_buf_export.txt42 dma-buf infrastructure. The extension allows creating a Linux dma_buf
84 enough information to enable an exported dma-buf to be imported

1234