Lines Matching refs:tx
142 struct nouveau_transfer *tx, bool permit_pb) in nouveau_transfer_staging() argument
144 const unsigned adj = tx->base.box.x & NOUVEAU_MIN_BUFFER_MAP_ALIGN_MASK; in nouveau_transfer_staging()
145 const unsigned size = align(tx->base.box.width, 4) + adj; in nouveau_transfer_staging()
151 tx->map = align_malloc(size, NOUVEAU_MIN_BUFFER_MAP_ALIGN); in nouveau_transfer_staging()
152 if (tx->map) in nouveau_transfer_staging()
153 tx->map += adj; in nouveau_transfer_staging()
155 tx->mm = in nouveau_transfer_staging()
156 nouveau_mm_allocate(nv->screen->mm_GART, size, &tx->bo, &tx->offset); in nouveau_transfer_staging()
157 if (tx->bo) { in nouveau_transfer_staging()
158 tx->offset += adj; in nouveau_transfer_staging()
159 if (!nouveau_bo_map(tx->bo, 0, NULL)) in nouveau_transfer_staging()
160 tx->map = (uint8_t *)tx->bo->map + tx->offset; in nouveau_transfer_staging()
163 return tx->map; in nouveau_transfer_staging()
171 nouveau_transfer_read(struct nouveau_context *nv, struct nouveau_transfer *tx) in nouveau_transfer_read() argument
173 struct nv04_resource *buf = nv04_resource(tx->base.resource); in nouveau_transfer_read()
174 const unsigned base = tx->base.box.x; in nouveau_transfer_read()
175 const unsigned size = tx->base.box.width; in nouveau_transfer_read()
179 nv->copy_data(nv, tx->bo, tx->offset, NOUVEAU_BO_GART, in nouveau_transfer_read()
182 if (nouveau_bo_wait(tx->bo, NOUVEAU_BO_RD, nv->client)) in nouveau_transfer_read()
186 memcpy(buf->data + base, tx->map, size); in nouveau_transfer_read()
192 nouveau_transfer_write(struct nouveau_context *nv, struct nouveau_transfer *tx, in nouveau_transfer_write() argument
195 struct nv04_resource *buf = nv04_resource(tx->base.resource); in nouveau_transfer_write()
196 uint8_t *data = tx->map + offset; in nouveau_transfer_write()
197 const unsigned base = tx->base.box.x + offset; in nouveau_transfer_write()
210 if (tx->bo) in nouveau_transfer_write()
212 tx->bo, tx->offset + offset, NOUVEAU_BO_GART, size); in nouveau_transfer_write()
263 nouveau_buffer_transfer_init(struct nouveau_transfer *tx, in nouveau_buffer_transfer_init() argument
268 tx->base.resource = resource; in nouveau_buffer_transfer_init()
269 tx->base.level = 0; in nouveau_buffer_transfer_init()
270 tx->base.usage = usage; in nouveau_buffer_transfer_init()
271 tx->base.box.x = box->x; in nouveau_buffer_transfer_init()
272 tx->base.box.y = 0; in nouveau_buffer_transfer_init()
273 tx->base.box.z = 0; in nouveau_buffer_transfer_init()
274 tx->base.box.width = box->width; in nouveau_buffer_transfer_init()
275 tx->base.box.height = 1; in nouveau_buffer_transfer_init()
276 tx->base.box.depth = 1; in nouveau_buffer_transfer_init()
277 tx->base.stride = 0; in nouveau_buffer_transfer_init()
278 tx->base.layer_stride = 0; in nouveau_buffer_transfer_init()
280 tx->bo = NULL; in nouveau_buffer_transfer_init()
281 tx->map = NULL; in nouveau_buffer_transfer_init()
286 struct nouveau_transfer *tx) in nouveau_buffer_transfer_del() argument
288 if (tx->map) { in nouveau_buffer_transfer_del()
289 if (likely(tx->bo)) { in nouveau_buffer_transfer_del()
291 nouveau_fence_unref_bo, tx->bo); in nouveau_buffer_transfer_del()
292 if (tx->mm) in nouveau_buffer_transfer_del()
293 release_allocation(&tx->mm, nv->screen->fence.current); in nouveau_buffer_transfer_del()
295 align_free(tx->map - in nouveau_buffer_transfer_del()
296 (tx->base.box.x & NOUVEAU_MIN_BUFFER_MAP_ALIGN_MASK)); in nouveau_buffer_transfer_del()
305 struct nouveau_transfer tx; in nouveau_buffer_cache() local
307 tx.base.resource = &buf->base; in nouveau_buffer_cache()
308 tx.base.box.x = 0; in nouveau_buffer_cache()
309 tx.base.box.width = buf->base.width0; in nouveau_buffer_cache()
310 tx.bo = NULL; in nouveau_buffer_cache()
311 tx.map = NULL; in nouveau_buffer_cache()
320 if (!nouveau_transfer_staging(nv, &tx, false)) in nouveau_buffer_cache()
323 ret = nouveau_transfer_read(nv, &tx); in nouveau_buffer_cache()
326 memcpy(buf->data, tx.map, buf->base.width0); in nouveau_buffer_cache()
328 nouveau_buffer_transfer_del(nv, &tx); in nouveau_buffer_cache()
384 struct nouveau_transfer *tx = MALLOC_STRUCT(nouveau_transfer); in nouveau_buffer_transfer_map() local
388 if (!tx) in nouveau_buffer_transfer_map()
390 nouveau_buffer_transfer_init(tx, resource, box, usage); in nouveau_buffer_transfer_map()
391 *ptransfer = &tx->base; in nouveau_buffer_transfer_map()
415 nouveau_transfer_staging(nv, tx, true); in nouveau_buffer_transfer_map()
426 nouveau_transfer_staging(nv, tx, false); in nouveau_buffer_transfer_map()
427 nouveau_transfer_read(nv, tx); in nouveau_buffer_transfer_map()
432 nouveau_transfer_staging(nv, tx, true); in nouveau_buffer_transfer_map()
437 return buf->data ? (buf->data + box->x) : tx->map; in nouveau_buffer_transfer_map()
462 FREE(tx); in nouveau_buffer_transfer_map()
485 nouveau_transfer_staging(nv, tx, true); in nouveau_buffer_transfer_map()
486 map = tx->map; in nouveau_buffer_transfer_map()
496 nouveau_transfer_staging(nv, tx, true); in nouveau_buffer_transfer_map()
497 if (tx->map) in nouveau_buffer_transfer_map()
498 memcpy(tx->map, map, box->width); in nouveau_buffer_transfer_map()
499 map = tx->map; in nouveau_buffer_transfer_map()
503 FREE(tx); in nouveau_buffer_transfer_map()
514 struct nouveau_transfer *tx = nouveau_transfer(transfer); in nouveau_buffer_transfer_flush_region() local
517 if (tx->map) in nouveau_buffer_transfer_flush_region()
518 nouveau_transfer_write(nouveau_context(pipe), tx, box->x, box->width); in nouveau_buffer_transfer_flush_region()
521 tx->base.box.x + box->x, in nouveau_buffer_transfer_flush_region()
522 tx->base.box.x + box->x + box->width); in nouveau_buffer_transfer_flush_region()
536 struct nouveau_transfer *tx = nouveau_transfer(transfer); in nouveau_buffer_transfer_unmap() local
539 if (tx->base.usage & PIPE_MAP_WRITE) { in nouveau_buffer_transfer_unmap()
540 if (!(tx->base.usage & PIPE_MAP_FLUSH_EXPLICIT)) { in nouveau_buffer_transfer_unmap()
541 if (tx->map) in nouveau_buffer_transfer_unmap()
542 nouveau_transfer_write(nv, tx, 0, tx->base.box.width); in nouveau_buffer_transfer_unmap()
545 tx->base.box.x, tx->base.box.x + tx->base.box.width); in nouveau_buffer_transfer_unmap()
556 if (!tx->bo && (tx->base.usage & PIPE_MAP_WRITE)) in nouveau_buffer_transfer_unmap()
557 NOUVEAU_DRV_STAT(nv->screen, buf_write_bytes_direct, tx->base.box.width); in nouveau_buffer_transfer_unmap()
559 nouveau_buffer_transfer_del(nv, tx); in nouveau_buffer_transfer_unmap()
560 FREE(tx); in nouveau_buffer_transfer_unmap()
655 struct nouveau_transfer *tx = MALLOC_STRUCT(nouveau_transfer); in nouveau_user_ptr_transfer_map() local
656 if (!tx) in nouveau_user_ptr_transfer_map()
658 nouveau_buffer_transfer_init(tx, resource, box, usage); in nouveau_user_ptr_transfer_map()
659 *ptransfer = &tx->base; in nouveau_user_ptr_transfer_map()
667 struct nouveau_transfer *tx = nouveau_transfer(transfer); in nouveau_user_ptr_transfer_unmap() local
668 FREE(tx); in nouveau_user_ptr_transfer_unmap()
866 struct nouveau_transfer tx; in nouveau_buffer_migrate() local
869 tx.base.resource = &buf->base; in nouveau_buffer_migrate()
870 tx.base.box.x = 0; in nouveau_buffer_migrate()
871 tx.base.box.width = buf->base.width0; in nouveau_buffer_migrate()
872 tx.bo = NULL; in nouveau_buffer_migrate()
873 tx.map = NULL; in nouveau_buffer_migrate()
874 if (!nouveau_transfer_staging(nv, &tx, false)) in nouveau_buffer_migrate()
876 nouveau_transfer_write(nv, &tx, 0, tx.base.box.width); in nouveau_buffer_migrate()
877 nouveau_buffer_transfer_del(nv, &tx); in nouveau_buffer_migrate()