Home
last modified time | relevance | path

Searched refs:bufs (Results 1 – 25 of 83) sorted by relevance

1234

/drivers/gpu/drm/
Ddrm_dma.c68 memset(&dev->dma->bufs[i], 0, sizeof(dev->dma->bufs[0])); in drm_legacy_dma_setup()
96 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown()
100 dma->bufs[i].buf_count, in drm_legacy_dma_takedown()
101 dma->bufs[i].seg_count); in drm_legacy_dma_takedown()
102 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown()
103 if (dma->bufs[i].seglist[j]) { in drm_legacy_dma_takedown()
104 dmah = dma->bufs[i].seglist[j]; in drm_legacy_dma_takedown()
112 kfree(dma->bufs[i].seglist); in drm_legacy_dma_takedown()
114 if (dma->bufs[i].buf_count) { in drm_legacy_dma_takedown()
115 for (j = 0; j < dma->bufs[i].buf_count; j++) { in drm_legacy_dma_takedown()
[all …]
/drivers/net/ethernet/cisco/enic/
Dvnic_wq.c38 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ(count), GFP_KERNEL); in vnic_wq_alloc_bufs()
39 if (!wq->bufs[i]) in vnic_wq_alloc_bufs()
44 buf = wq->bufs[i]; in vnic_wq_alloc_bufs()
50 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs()
54 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs()
64 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs()
79 if (wq->bufs[i]) { in vnic_wq_free()
80 kfree(wq->bufs[i]); in vnic_wq_free()
81 wq->bufs[i] = NULL; in vnic_wq_free()
153 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in enic_wq_init_start()
[all …]
Dvnic_rq.c38 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ(count), GFP_KERNEL); in vnic_rq_alloc_bufs()
39 if (!rq->bufs[i]) in vnic_rq_alloc_bufs()
44 buf = rq->bufs[i]; in vnic_rq_alloc_bufs()
50 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs()
53 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs()
61 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs()
76 if (rq->bufs[i]) { in vnic_rq_free()
77 kfree(rq->bufs[i]); in vnic_rq_free()
78 rq->bufs[i] = NULL; in vnic_rq_free()
134 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES(count)] in vnic_rq_init_start()
[all …]
/drivers/scsi/fnic/
Dvnic_rq.c34 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_rq_alloc_bufs()
35 if (!rq->bufs[i]) { in vnic_rq_alloc_bufs()
42 buf = rq->bufs[i]; in vnic_rq_alloc_bufs()
48 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs()
51 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs()
59 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs()
75 kfree(rq->bufs[i]); in vnic_rq_free()
76 rq->bufs[i] = NULL; in vnic_rq_free()
130 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_init()
186 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_clean()
Dvnic_wq.c54 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs()
55 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs()
62 buf = wq->bufs[i]; in vnic_wq_alloc_bufs()
68 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs()
71 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs()
79 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs()
94 kfree(wq->bufs[i]); in vnic_wq_free()
95 wq->bufs[i] = NULL; in vnic_wq_free()
172 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES] in vnic_wq_init_start()
239 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_clean()
/drivers/scsi/snic/
Dvnic_wq.c50 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs()
51 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs()
59 buf = wq->bufs[i]; in vnic_wq_alloc_bufs()
65 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs()
68 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs()
76 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs()
91 kfree(wq->bufs[i]); in svnic_wq_free()
92 wq->bufs[i] = NULL; in svnic_wq_free()
173 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in vnic_wq_init_start()
230 wq->to_use = wq->to_clean = wq->bufs[0]; in svnic_wq_clean()
/drivers/media/v4l2-core/
Dvideobuf-core.c204 if (NULL == q->bufs[i]) in videobuf_queue_is_busy()
206 if (q->bufs[i]->map) { in videobuf_queue_is_busy()
210 if (q->bufs[i]->state == VIDEOBUF_QUEUED) { in videobuf_queue_is_busy()
214 if (q->bufs[i]->state == VIDEOBUF_ACTIVE) { in videobuf_queue_is_busy()
246 if (q->bufs[i] && q->bufs[i]->map) { in __videobuf_free()
252 if (NULL == q->bufs[i]) in __videobuf_free()
254 q->ops->buf_release(q, q->bufs[i]); in __videobuf_free()
255 kfree(q->bufs[i]); in __videobuf_free()
256 q->bufs[i] = NULL; in __videobuf_free()
275 if (NULL == q->bufs[i]) in videobuf_queue_cancel()
[all …]
Dvideobuf-dma-sg.c409 if (NULL == q->bufs[i]) in videobuf_vm_close()
411 mem = q->bufs[i]->priv; in videobuf_vm_close()
417 if (q->bufs[i]->map != map) in videobuf_vm_close()
419 q->bufs[i]->map = NULL; in videobuf_vm_close()
420 q->bufs[i]->baddr = 0; in videobuf_vm_close()
421 q->ops->buf_release(q, q->bufs[i]); in videobuf_vm_close()
604 if (buf == q->bufs[first]) { in __videobuf_mmap_mapper()
605 size = PAGE_ALIGN(q->bufs[first]->bsize); in __videobuf_mmap_mapper()
627 if (NULL == q->bufs[i]) in __videobuf_mmap_mapper()
629 q->bufs[i]->map = map; in __videobuf_mmap_mapper()
[all …]
Dvideobuf-vmalloc.c82 if (NULL == q->bufs[i]) in videobuf_vm_close()
85 if (q->bufs[i]->map != map) in videobuf_vm_close()
88 mem = q->bufs[i]->priv; in videobuf_vm_close()
108 q->bufs[i]->map = NULL; in videobuf_vm_close()
109 q->bufs[i]->baddr = 0; in videobuf_vm_close()
Dvideobuf-dma-contig.c93 if (NULL == q->bufs[i]) in videobuf_vm_close()
96 if (q->bufs[i]->map != map) in videobuf_vm_close()
99 mem = q->bufs[i]->priv; in videobuf_vm_close()
119 q->bufs[i]->map = NULL; in videobuf_vm_close()
120 q->bufs[i]->baddr = 0; in videobuf_vm_close()
/drivers/scsi/arm/
Dscsi.h26 int bufs = SCp->buffers_residual; in copy_SCp_to_sg() local
31 BUG_ON(bufs + 1 > max); in copy_SCp_to_sg()
35 if (bufs) { in copy_SCp_to_sg()
39 for_each_sg(sg_next(SCp->buffer), src_sg, bufs, i) in copy_SCp_to_sg()
44 return bufs + 1; in copy_SCp_to_sg()
Dpowertec.c137 int bufs, map_dir, dma_dir; in powertecscsi_dma_setup() local
139 bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG); in powertecscsi_dma_setup()
149 dma_map_sg(dev, info->sg, bufs, map_dir); in powertecscsi_dma_setup()
152 set_dma_sg(dmach, info->sg, bufs); in powertecscsi_dma_setup()
Dcumana_2.c165 int bufs, map_dir, dma_dir, alatch_dir; in cumanascsi_2_dma_setup() local
167 bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG); in cumanascsi_2_dma_setup()
179 dma_map_sg(dev, info->sg, bufs, map_dir); in cumanascsi_2_dma_setup()
182 set_dma_sg(dmach, info->sg, bufs); in cumanascsi_2_dma_setup()
/drivers/net/ethernet/marvell/prestera/
Dprestera_rxtx.c84 struct prestera_sdma_buf *bufs; member
89 struct prestera_sdma_buf *bufs; member
300 buf = &ring->bufs[buf_idx]; in prestera_sdma_rx_poll()
348 if (!ring->bufs) in prestera_sdma_rx_fini()
352 struct prestera_sdma_buf *buf = &ring->bufs[b]; in prestera_sdma_rx_fini()
385 ring->bufs = kmalloc_array(bnum, sizeof(*head), GFP_KERNEL); in prestera_sdma_rx_init()
386 if (!ring->bufs) in prestera_sdma_rx_init()
391 tail = &ring->bufs[bnum - 1]; in prestera_sdma_rx_init()
392 head = &ring->bufs[0]; in prestera_sdma_rx_init()
506 struct prestera_sdma_buf *buf = &tx_ring->bufs[b]; in prestera_sdma_tx_recycle_work_fn()
[all …]
/drivers/soc/fsl/qbman/
Dbman.c113 struct bm_buffer bufs[8]; member
151 struct bm_buffer bufs[8]; member
738 int bman_release(struct bman_pool *pool, const struct bm_buffer *bufs, u8 num) in bman_release() argument
772 bm_buffer_set64(r->bufs, bm_buffer_get64(bufs)); in bman_release()
773 bm_buffer_set_bpid(r->bufs, pool->bpid); in bman_release()
775 memcpy(&r->bufs[1], &bufs[1], i * sizeof(bufs[0])); in bman_release()
786 int bman_acquire(struct bman_pool *pool, struct bm_buffer *bufs, u8 num) in bman_acquire() argument
805 if (bufs) in bman_acquire()
806 memcpy(&bufs[0], &mcr->bufs[0], num * sizeof(bufs[0])); in bman_acquire()
/drivers/media/common/videobuf2/
Dvideobuf2-core.c368 struct vb2_buffer *prev = q->bufs[vb->index - 1]; in __setup_offsets()
435 q->bufs[vb->index] = vb; in __vb2_queue_alloc()
443 q->bufs[vb->index] = NULL; in __vb2_queue_alloc()
458 q->bufs[vb->index] = NULL; in __vb2_queue_alloc()
481 vb = q->bufs[buffer]; in __vb2_free_mem()
514 if (q->bufs[buffer] == NULL) in __vb2_queue_free()
516 if (q->bufs[buffer]->state == VB2_BUF_STATE_PREPARING) { in __vb2_queue_free()
525 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free()
560 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free()
600 kfree(q->bufs[buffer]); in __vb2_queue_free()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/fpga/
Dconn.c122 conn->qp.rq.bufs[ix] = buf; in mlx5_fpga_conn_post_recv()
171 conn->qp.sq.bufs[ix] = buf; in mlx5_fpga_conn_post_send()
257 buf = conn->qp.rq.bufs[ix]; in mlx5_fpga_conn_rq_cqe()
258 conn->qp.rq.bufs[ix] = NULL; in mlx5_fpga_conn_rq_cqe()
300 buf = conn->qp.sq.bufs[ix]; in mlx5_fpga_conn_sq_cqe()
301 conn->qp.sq.bufs[ix] = NULL; in mlx5_fpga_conn_sq_cqe()
536 conn->qp.rq.bufs = kvcalloc(conn->qp.rq.size, in mlx5_fpga_conn_create_qp()
537 sizeof(conn->qp.rq.bufs[0]), in mlx5_fpga_conn_create_qp()
539 if (!conn->qp.rq.bufs) { in mlx5_fpga_conn_create_qp()
544 conn->qp.sq.bufs = kvcalloc(conn->qp.sq.size, in mlx5_fpga_conn_create_qp()
[all …]
Dconn.h74 struct mlx5_fpga_dma_buf **bufs; member
81 struct mlx5_fpga_dma_buf **bufs; member
/drivers/md/
Ddm-verity-fec.c108 return &fio->bufs[i][j * v->fec->rsn]; in fec_buffer_rs_block()
314 if (fio->bufs[n]) in fec_alloc_bufs()
317 fio->bufs[n] = mempool_alloc(&v->fec->prealloc_pool, GFP_NOWAIT); in fec_alloc_bufs()
318 if (unlikely(!fio->bufs[n])) { in fec_alloc_bufs()
326 if (fio->bufs[n]) in fec_alloc_bufs()
329 fio->bufs[n] = mempool_alloc(&v->fec->extra_pool, GFP_NOWAIT); in fec_alloc_bufs()
331 if (unlikely(!fio->bufs[n])) in fec_alloc_bufs()
351 memset(fio->bufs[n], 0, v->fec->rsn << DM_VERITY_FEC_BUF_RS_BITS); in fec_init_bufs()
498 mempool_free(fio->bufs[n], &f->prealloc_pool); in verity_fec_finish_io()
501 mempool_free(fio->bufs[n], &f->extra_pool); in verity_fec_finish_io()
[all …]
/drivers/infiniband/hw/hns/
Dhns_roce_alloc.c133 int hns_roce_get_kmem_bufs(struct hns_roce_dev *hr_dev, dma_addr_t *bufs, in hns_roce_get_kmem_bufs() argument
150 bufs[total++] = hns_roce_buf_dma_addr(buf, offset); in hns_roce_get_kmem_bufs()
157 int hns_roce_get_umem_bufs(struct hns_roce_dev *hr_dev, dma_addr_t *bufs, in hns_roce_get_umem_bufs() argument
166 bufs[total++] = rdma_block_iter_dma_address(&biter); in hns_roce_get_umem_bufs()
/drivers/gpu/drm/tegra/
Dsubmit.c238 struct drm_tegra_submit_buf *bufs; in submit_process_bufs() local
242 bufs = alloc_copy_user_array(u64_to_user_ptr(args->bufs_ptr), args->num_bufs, in submit_process_bufs()
243 sizeof(*bufs)); in submit_process_bufs()
244 if (IS_ERR(bufs)) { in submit_process_bufs()
246 return PTR_ERR(bufs); in submit_process_bufs()
257 struct drm_tegra_submit_buf *buf = &bufs[i]; in submit_process_bufs()
298 kvfree(bufs); in submit_process_bufs()
/drivers/net/ethernet/atheros/alx/
Dmain.c56 struct alx_buffer *txb = &txq->bufs[entry]; in alx_free_txbuf()
83 cur_buf = &rxq->bufs[cur]; in alx_refill_rx_ring()
128 cur_buf = &rxq->bufs[cur]; in alx_refill_rx_ring()
183 skb = txq->bufs[sw_read_idx].skb; in alx_clean_tx_irq()
242 rxb = &rxq->bufs[rxq->read_idx]; in alx_clean_rx_irq()
493 if (!txq->bufs) in alx_free_txring_buf()
499 memset(txq->bufs, 0, txq->count * sizeof(struct alx_buffer)); in alx_free_txring_buf()
512 if (!rxq->bufs) in alx_free_rxring_buf()
516 cur_buf = rxq->bufs + i; in alx_free_rxring_buf()
620 txq->bufs = kcalloc(txq->count, sizeof(struct alx_buffer), GFP_KERNEL); in alx_alloc_tx_ring()
[all …]
Dalx.h63 struct alx_buffer *bufs; member
79 struct alx_buffer *bufs; member
/drivers/net/ethernet/mellanox/mlxsw/
Dspectrum_buffers.c325 hdroom->bufs.buf[i].lossy = true; in mlxsw_sp_hdroom_bufs_reset_lossiness()
329 hdroom->bufs.buf[hdroom->prios.prio[prio].buf_idx].lossy = false; in mlxsw_sp_hdroom_bufs_reset_lossiness()
403 struct mlxsw_sp_hdroom_buf *buf = &hdroom->bufs.buf[i]; in mlxsw_sp_hdroom_bufs_reset_sizes()
444 dirty = memcmp(&mlxsw_sp_port->hdroom->bufs, &hdroom->bufs, sizeof(hdroom->bufs)); in mlxsw_sp_hdroom_configure_buffers()
450 const struct mlxsw_sp_hdroom_buf *buf = &hdroom->bufs.buf[i]; in mlxsw_sp_hdroom_configure_buffers()
463 mlxsw_sp_port->hdroom->bufs = hdroom->bufs; in mlxsw_sp_hdroom_configure_buffers()
518 taken_headroom_cells += hdroom->bufs.buf[i].size_cells; in mlxsw_sp_hdroom_bufs_fit()
543 if (hdroom->bufs.buf[i].size_cells) in __mlxsw_sp_hdroom_configure()
544 tmp_hdroom.bufs.buf[i] = hdroom->bufs.buf[i]; in __mlxsw_sp_hdroom_configure()
602 hdroom.bufs.buf[9].size_cells = mlxsw_sp_bytes_cells(mlxsw_sp, size9); in mlxsw_sp_port_headroom_init()
/drivers/net/ethernet/pensando/ionic/
Dionic_txrx.c134 buf_info = &desc_info->bufs[0]; in ionic_rx_frags()
193 buf_info = &desc_info->bufs[0]; in ionic_rx_copybreak()
380 buf_info = &desc_info->bufs[0]; in ionic_rx_fill()
447 buf_info = &desc_info->bufs[j]; in ionic_rx_empty()
642 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_map_skb()
693 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_clean()
929 buf_info = desc_info->bufs; in ionic_tx_tso()
1027 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_calc_csum()
1066 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_calc_no_csum()
1102 struct ionic_buf_info *buf_info = &desc_info->bufs[1]; in ionic_tx_skb_frags()

1234