/drivers/staging/lustre/lustre/include/ |
D | lustre_cfg.h | 124 static inline void lustre_cfg_bufs_set(struct lustre_cfg_bufs *bufs, in lustre_cfg_bufs_set() argument 131 if (bufs == NULL) in lustre_cfg_bufs_set() 134 if (bufs->lcfg_bufcount <= index) in lustre_cfg_bufs_set() 135 bufs->lcfg_bufcount = index + 1; in lustre_cfg_bufs_set() 137 bufs->lcfg_buf[index] = buf; in lustre_cfg_bufs_set() 138 bufs->lcfg_buflen[index] = buflen; in lustre_cfg_bufs_set() 141 static inline void lustre_cfg_bufs_set_string(struct lustre_cfg_bufs *bufs, in lustre_cfg_bufs_set_string() argument 145 lustre_cfg_bufs_set(bufs, index, str, str ? strlen(str) + 1 : 0); in lustre_cfg_bufs_set_string() 148 static inline void lustre_cfg_bufs_reset(struct lustre_cfg_bufs *bufs, char *name) in lustre_cfg_bufs_reset() argument 150 memset((bufs), 0, sizeof(*bufs)); in lustre_cfg_bufs_reset() [all …]
|
/drivers/gpu/drm/ |
D | drm_dma.c | 65 memset(&dev->dma->bufs[i], 0, sizeof(dev->dma->bufs[0])); in drm_legacy_dma_setup() 93 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown() 97 dma->bufs[i].buf_count, in drm_legacy_dma_takedown() 98 dma->bufs[i].seg_count); in drm_legacy_dma_takedown() 99 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown() 100 if (dma->bufs[i].seglist[j]) { in drm_legacy_dma_takedown() 101 drm_pci_free(dev, dma->bufs[i].seglist[j]); in drm_legacy_dma_takedown() 104 kfree(dma->bufs[i].seglist); in drm_legacy_dma_takedown() 106 if (dma->bufs[i].buf_count) { in drm_legacy_dma_takedown() 107 for (j = 0; j < dma->bufs[i].buf_count; j++) { in drm_legacy_dma_takedown() [all …]
|
D | drm_info.c | 133 if (dma->bufs[i].buf_count) { in drm_bufs_info() 134 seg_pages = dma->bufs[i].seg_count * (1 << dma->bufs[i].page_order); in drm_bufs_info() 137 dma->bufs[i].buf_size, in drm_bufs_info() 138 dma->bufs[i].buf_count, in drm_bufs_info() 140 dma->bufs[i].seg_count, in drm_bufs_info()
|
/drivers/net/ethernet/cisco/enic/ |
D | vnic_wq.c | 38 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ(count), GFP_ATOMIC); in vnic_wq_alloc_bufs() 39 if (!wq->bufs[i]) in vnic_wq_alloc_bufs() 44 buf = wq->bufs[i]; in vnic_wq_alloc_bufs() 50 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs() 54 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs() 64 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs() 79 if (wq->bufs[i]) { in vnic_wq_free() 80 kfree(wq->bufs[i]); in vnic_wq_free() 81 wq->bufs[i] = NULL; in vnic_wq_free() 153 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in enic_wq_init_start() [all …]
|
D | vnic_rq.c | 38 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ(count), GFP_ATOMIC); in vnic_rq_alloc_bufs() 39 if (!rq->bufs[i]) in vnic_rq_alloc_bufs() 44 buf = rq->bufs[i]; in vnic_rq_alloc_bufs() 50 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs() 53 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs() 61 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs() 76 if (rq->bufs[i]) { in vnic_rq_free() 77 kfree(rq->bufs[i]); in vnic_rq_free() 78 rq->bufs[i] = NULL; in vnic_rq_free() 134 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES(count)] in vnic_rq_init_start() [all …]
|
/drivers/scsi/fnic/ |
D | vnic_rq.c | 37 rq->bufs[i] = kzalloc(VNIC_RQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_rq_alloc_bufs() 38 if (!rq->bufs[i]) { in vnic_rq_alloc_bufs() 45 buf = rq->bufs[i]; in vnic_rq_alloc_bufs() 51 buf->next = rq->bufs[0]; in vnic_rq_alloc_bufs() 54 buf->next = rq->bufs[i + 1]; in vnic_rq_alloc_bufs() 62 rq->to_use = rq->to_clean = rq->bufs[0]; in vnic_rq_alloc_bufs() 78 kfree(rq->bufs[i]); in vnic_rq_free() 79 rq->bufs[i] = NULL; in vnic_rq_free() 133 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_init() 189 &rq->bufs[fetch_index / VNIC_RQ_BUF_BLK_ENTRIES] in vnic_rq_clean()
|
D | vnic_wq.c | 37 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs() 38 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs() 45 buf = wq->bufs[i]; in vnic_wq_alloc_bufs() 51 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs() 54 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs() 62 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs() 77 kfree(wq->bufs[i]); in vnic_wq_free() 78 wq->bufs[i] = NULL; in vnic_wq_free() 176 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_clean()
|
/drivers/scsi/snic/ |
D | vnic_wq.c | 50 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC); in vnic_wq_alloc_bufs() 51 if (!wq->bufs[i]) { in vnic_wq_alloc_bufs() 59 buf = wq->bufs[i]; in vnic_wq_alloc_bufs() 65 buf->next = wq->bufs[0]; in vnic_wq_alloc_bufs() 68 buf->next = wq->bufs[i + 1]; in vnic_wq_alloc_bufs() 76 wq->to_use = wq->to_clean = wq->bufs[0]; in vnic_wq_alloc_bufs() 91 kfree(wq->bufs[i]); in svnic_wq_free() 92 wq->bufs[i] = NULL; in svnic_wq_free() 173 &wq->bufs[fetch_index / VNIC_WQ_BUF_BLK_ENTRIES(count)] in vnic_wq_init_start() 230 wq->to_use = wq->to_clean = wq->bufs[0]; in svnic_wq_clean()
|
/drivers/media/v4l2-core/ |
D | videobuf-core.c | 204 if (NULL == q->bufs[i]) in videobuf_queue_is_busy() 206 if (q->bufs[i]->map) { in videobuf_queue_is_busy() 210 if (q->bufs[i]->state == VIDEOBUF_QUEUED) { in videobuf_queue_is_busy() 214 if (q->bufs[i]->state == VIDEOBUF_ACTIVE) { in videobuf_queue_is_busy() 246 if (q->bufs[i] && q->bufs[i]->map) { in __videobuf_free() 252 if (NULL == q->bufs[i]) in __videobuf_free() 254 q->ops->buf_release(q, q->bufs[i]); in __videobuf_free() 255 kfree(q->bufs[i]); in __videobuf_free() 256 q->bufs[i] = NULL; in __videobuf_free() 275 if (NULL == q->bufs[i]) in videobuf_queue_cancel() [all …]
|
D | videobuf-dma-sg.c | 412 if (NULL == q->bufs[i]) in videobuf_vm_close() 414 mem = q->bufs[i]->priv; in videobuf_vm_close() 420 if (q->bufs[i]->map != map) in videobuf_vm_close() 422 q->bufs[i]->map = NULL; in videobuf_vm_close() 423 q->bufs[i]->baddr = 0; in videobuf_vm_close() 424 q->ops->buf_release(q, q->bufs[i]); in videobuf_vm_close() 606 if (buf == q->bufs[first]) { in __videobuf_mmap_mapper() 607 size = PAGE_ALIGN(q->bufs[first]->bsize); in __videobuf_mmap_mapper() 629 if (NULL == q->bufs[i]) in __videobuf_mmap_mapper() 631 q->bufs[i]->map = map; in __videobuf_mmap_mapper() [all …]
|
D | videobuf2-core.c | 149 vb = q->bufs[buffer]; in __setup_lengths() 170 vb = q->bufs[q->num_buffers - 1]; in __setup_offsets() 178 vb = q->bufs[buffer]; in __setup_offsets() 251 q->bufs[q->num_buffers + buffer] = vb; in __vb2_queue_alloc() 274 vb = q->bufs[buffer]; in __vb2_free_mem() 307 if (q->bufs[buffer] == NULL) in __vb2_queue_free() 309 if (q->bufs[buffer]->state == VB2_BUF_STATE_PREPARING) { in __vb2_queue_free() 318 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free() 353 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free() 392 kfree(q->bufs[buffer]); in __vb2_queue_free() [all …]
|
D | videobuf-vmalloc.c | 85 if (NULL == q->bufs[i]) in videobuf_vm_close() 88 if (q->bufs[i]->map != map) in videobuf_vm_close() 91 mem = q->bufs[i]->priv; in videobuf_vm_close() 111 q->bufs[i]->map = NULL; in videobuf_vm_close() 112 q->bufs[i]->baddr = 0; in videobuf_vm_close()
|
D | videobuf2-v4l2.c | 165 if (q->bufs[b->index] == NULL) { in vb2_queue_or_prepare_buf() 176 return __verify_planes_array(q->bufs[b->index], b); in vb2_queue_or_prepare_buf() 472 vb = q->bufs[b->index]; in vb2_querybuf() 914 struct vb2_fileio_buf bufs[VB2_MAX_FRAME]; member 985 if (q->bufs[0]->num_planes != 1) { in __vb2_init_fileio() 994 fileio->bufs[i].vaddr = vb2_plane_vaddr(q->bufs[i], 0); in __vb2_init_fileio() 995 if (fileio->bufs[i].vaddr == NULL) { in __vb2_init_fileio() 999 fileio->bufs[i].size = vb2_plane_size(q->bufs[i], 0); in __vb2_init_fileio() 1026 fileio->bufs[i].queued = 1; in __vb2_init_fileio() 1140 buf = &fileio->bufs[index]; in __vb2_perform_fileio() [all …]
|
D | videobuf-dma-contig.c | 97 if (NULL == q->bufs[i]) in videobuf_vm_close() 100 if (q->bufs[i]->map != map) in videobuf_vm_close() 103 mem = q->bufs[i]->priv; in videobuf_vm_close() 123 q->bufs[i]->map = NULL; in videobuf_vm_close() 124 q->bufs[i]->baddr = 0; in videobuf_vm_close()
|
/drivers/staging/lustre/lustre/obdclass/ |
D | obd_mount.c | 78 struct lustre_cfg_bufs *bufs; in lustre_process_log() local 86 bufs = kzalloc(sizeof(*bufs), GFP_NOFS); in lustre_process_log() 87 if (!bufs) in lustre_process_log() 91 lustre_cfg_bufs_reset(bufs, mgc->obd_name); in lustre_process_log() 92 lustre_cfg_bufs_set_string(bufs, 1, logname); in lustre_process_log() 93 lustre_cfg_bufs_set(bufs, 2, cfg, sizeof(*cfg)); in lustre_process_log() 94 lustre_cfg_bufs_set(bufs, 3, &sb, sizeof(sb)); in lustre_process_log() 95 lcfg = lustre_cfg_new(LCFG_LOG_START, bufs); in lustre_process_log() 99 kfree(bufs); in lustre_process_log() 120 struct lustre_cfg_bufs bufs; in lustre_end_log() local [all …]
|
D | obd_config.c | 1076 struct lustre_cfg_bufs bufs; in class_config_llog_handler() local 1165 lustre_cfg_bufs_init(&bufs, lcfg); in class_config_llog_handler() 1179 lustre_cfg_bufs_set_string(&bufs, 0, inst_name); in class_config_llog_handler() 1188 lustre_cfg_bufs_set_string(&bufs, 2, in class_config_llog_handler() 1200 lustre_cfg_bufs_set(&bufs, 2, bufs.lcfg_buf[1], in class_config_llog_handler() 1201 bufs.lcfg_buflen[1]); in class_config_llog_handler() 1202 lustre_cfg_bufs_set(&bufs, 1, bufs.lcfg_buf[0], in class_config_llog_handler() 1203 bufs.lcfg_buflen[0]); in class_config_llog_handler() 1204 lustre_cfg_bufs_set_string(&bufs, 0, in class_config_llog_handler() 1208 lcfg_new = lustre_cfg_new(lcfg->lcfg_command, &bufs); in class_config_llog_handler() [all …]
|
/drivers/scsi/arm/ |
D | scsi.h | 29 int bufs = SCp->buffers_residual; in copy_SCp_to_sg() local 34 BUG_ON(bufs + 1 > max); in copy_SCp_to_sg() 38 if (bufs) { in copy_SCp_to_sg() 42 for_each_sg(sg_next(SCp->buffer), src_sg, bufs, i) in copy_SCp_to_sg() 47 return bufs + 1; in copy_SCp_to_sg()
|
D | powertec.c | 140 int bufs, map_dir, dma_dir; in powertecscsi_dma_setup() local 142 bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG); in powertecscsi_dma_setup() 151 dma_map_sg(dev, info->sg, bufs, map_dir); in powertecscsi_dma_setup() 154 set_dma_sg(dmach, info->sg, bufs); in powertecscsi_dma_setup()
|
/drivers/crypto/caam/ |
D | caamrng.c | 76 struct buf_data bufs[2]; member 95 rng_unmap_buf(jrdev, &ctx->bufs[0]); in rng_unmap_ctx() 96 rng_unmap_buf(jrdev, &ctx->bufs[1]); in rng_unmap_ctx() 123 struct buf_data *bd = &ctx->bufs[!(to_current ^ ctx->current_buf)]; in submit_job() 142 struct buf_data *bd = &ctx->bufs[ctx->current_buf]; in caam_read() 224 struct buf_data *bd = &ctx->bufs[buf_id]; in rng_create_job_desc() 251 bd = &rng_ctx->bufs[i]; in caam_cleanup() 261 struct buf_data *bd = &ctx->bufs[buf_id]; in caam_init_buf()
|
/drivers/video/adf/ |
D | adf_client.c | 364 struct adf_buffer *bufs, size_t n_bufs, void *custom_data, in adf_device_post() argument 391 for (j = 0; j < bufs[i].n_planes; j++) in adf_device_post() 392 get_dma_buf(bufs[i].dma_bufs[j]); in adf_device_post() 396 memcpy(bufs_copy, bufs, sizeof(bufs_copy[0]) * n_bufs); in adf_device_post() 409 for (j = 0; j < bufs[i].n_planes; j++) in adf_device_post() 410 dma_buf_put(bufs[i].dma_bufs[j]); in adf_device_post() 439 struct adf_buffer *bufs, size_t n_bufs, in adf_device_post_nocopy() argument 461 err = adf_buffer_validate(&bufs[i]); in adf_device_post_nocopy() 467 err = adf_buffer_map(dev, &bufs[i], &mappings[i]); in adf_device_post_nocopy() 476 cfg->config.bufs = bufs; in adf_device_post_nocopy() [all …]
|
D | adf_fops.c | 221 struct adf_buffer *bufs = NULL; in adf_device_post_config() local 274 bufs = kzalloc(sizeof(bufs[0]) * data.n_bufs, GFP_KERNEL); in adf_device_post_config() 275 if (!bufs) { in adf_device_post_config() 282 ret = adf_buffer_import(dev, &data.bufs[i], &bufs[i]); in adf_device_post_config() 284 memset(&bufs[i], 0, sizeof(bufs[i])); in adf_device_post_config() 309 bufs, data.n_bufs, custom_data, data.custom_data_size); in adf_device_post_config() 320 adf_buffer_cleanup(&bufs[i]); in adf_device_post_config() 324 kfree(bufs); in adf_device_post_config()
|
/drivers/md/ |
D | dm-verity-fec.c | 113 return &fio->bufs[i][j * v->fec->rsn]; in fec_buffer_rs_block() 323 if (fio->bufs[n]) in fec_alloc_bufs() 326 fio->bufs[n] = mempool_alloc(v->fec->prealloc_pool, GFP_NOIO); in fec_alloc_bufs() 327 if (unlikely(!fio->bufs[n])) { in fec_alloc_bufs() 335 if (fio->bufs[n]) in fec_alloc_bufs() 338 fio->bufs[n] = mempool_alloc(v->fec->extra_pool, GFP_NOIO); in fec_alloc_bufs() 340 if (unlikely(!fio->bufs[n])) in fec_alloc_bufs() 366 memset(fio->bufs[n], 0, v->fec->rsn << DM_VERITY_FEC_BUF_RS_BITS); in fec_init_bufs() 513 mempool_free(fio->bufs[n], f->prealloc_pool); in verity_fec_finish_io() 516 mempool_free(fio->bufs[n], f->extra_pool); in verity_fec_finish_io() [all …]
|
/drivers/net/ethernet/atheros/alx/ |
D | main.c | 57 struct alx_buffer *txb = &alx->txq.bufs[entry]; in alx_free_txbuf() 84 cur_buf = &rxq->bufs[cur]; in alx_refill_rx_ring() 121 cur_buf = &rxq->bufs[cur]; in alx_refill_rx_ring() 158 skb = txq->bufs[sw_read_idx].skb; in alx_clean_tx_irq() 215 rxb = &rxq->bufs[rxq->read_idx]; in alx_clean_rx_irq() 396 if (!txq->bufs) in alx_free_txring_buf() 402 memset(txq->bufs, 0, alx->tx_ringsz * sizeof(struct alx_buffer)); in alx_free_txring_buf() 420 cur_buf = rxq->bufs + i; in alx_free_rxring_buf() 517 alx->txq.bufs = kcalloc(alx->tx_ringsz, in alx_alloc_descriptors() 520 if (!alx->txq.bufs) in alx_alloc_descriptors() [all …]
|
D | alx.h | 59 struct alx_buffer *bufs; member 69 struct alx_buffer *bufs; member
|
/drivers/dma/ |
D | dmatest.c | 237 static void dmatest_init_srcs(u8 **bufs, unsigned int start, unsigned int len, in dmatest_init_srcs() argument 243 for (; (buf = *bufs); bufs++) { in dmatest_init_srcs() 255 static void dmatest_init_dsts(u8 **bufs, unsigned int start, unsigned int len, in dmatest_init_dsts() argument 261 for (; (buf = *bufs); bufs++) { in dmatest_init_dsts() 294 static unsigned int dmatest_verify(u8 **bufs, unsigned int start, in dmatest_verify() argument 305 for (; (buf = *bufs); bufs++) { in dmatest_verify()
|