/drivers/media/test-drivers/vivid/ |
D | vivid-vid-common.c | 41 .buffers = 1, 50 .buffers = 1, 58 .buffers = 1, 66 .buffers = 1, 74 .buffers = 1, 82 .buffers = 1, 90 .buffers = 1, 98 .buffers = 1, 106 .buffers = 1, 114 .buffers = 1, [all …]
|
D | vivid-vid-out.c | 29 unsigned planes = vfmt->buffers; in vid_out_queue_setup() 34 for (p = vfmt->buffers; p < vfmt->planes; p++) in vid_out_queue_setup() 106 unsigned int planes = vfmt->buffers; in vid_out_buf_prepare() 111 for (p = vfmt->buffers; p < vfmt->planes; p++) in vid_out_buf_prepare() 333 mp->num_planes = fmt->buffers; in vivid_g_fmt_vid_out() 340 for (p = fmt->buffers; p < fmt->planes; p++) { in vivid_g_fmt_vid_out() 403 mp->num_planes = fmt->buffers; in vivid_try_fmt_vid_out() 404 for (p = 0; p < fmt->buffers; p++) { in vivid_try_fmt_vid_out() 420 for (p = fmt->buffers; p < fmt->planes; p++) in vivid_try_fmt_vid_out() 555 for (p = dev->fmt_out->buffers; p < dev->fmt_out->planes; p++) in vivid_s_fmt_vid_out()
|
/drivers/crypto/intel/qat/qat_common/ |
D | qat_bl.c | 29 dma_unmap_single(dev, bl->buffers[i].addr, in qat_bl_free_bufl() 30 bl->buffers[i].len, bl_dma_dir); in qat_bl_free_bufl() 39 dma_unmap_single(dev, blout->buffers[i].addr, in qat_bl_free_bufl() 40 blout->buffers[i].len, in qat_bl_free_bufl() 68 size_t sz_out, sz = struct_size(bufl, buffers, n); in __qat_bl_sgl_to_bufl() 92 bufl->buffers[i].addr = DMA_MAPPING_ERROR; in __qat_bl_sgl_to_bufl() 106 bufl->buffers[y].addr = dma_map_single(dev, sg_virt(sg) + left, in __qat_bl_sgl_to_bufl() 109 bufl->buffers[y].len = sg->length; in __qat_bl_sgl_to_bufl() 110 if (unlikely(dma_mapping_error(dev, bufl->buffers[y].addr))) in __qat_bl_sgl_to_bufl() 114 bufl->buffers[y].len -= left; in __qat_bl_sgl_to_bufl() [all …]
|
/drivers/android/ |
D | binder_alloc_selftest.c | 116 struct binder_buffer *buffers[], in binder_selftest_alloc_buf() argument 122 buffers[i] = binder_alloc_new_buf(alloc, sizes[i], 0, 0, 0); in binder_selftest_alloc_buf() 123 if (IS_ERR(buffers[i]) || in binder_selftest_alloc_buf() 124 !check_buffer_pages_allocated(alloc, buffers[i], in binder_selftest_alloc_buf() 133 struct binder_buffer *buffers[], in binder_selftest_free_buf() argument 139 binder_alloc_free_buf(alloc, buffers[seq[i]]); in binder_selftest_free_buf() 179 struct binder_buffer *buffers[BUFFER_NUM]; in binder_selftest_alloc_free() local 181 binder_selftest_alloc_buf(alloc, buffers, sizes, seq); in binder_selftest_alloc_free() 182 binder_selftest_free_buf(alloc, buffers, sizes, seq, end); in binder_selftest_alloc_free() 185 binder_selftest_alloc_buf(alloc, buffers, sizes, seq); in binder_selftest_alloc_free() [all …]
|
D | binder_alloc.c | 64 if (list_is_last(&buffer->entry, &alloc->buffers)) in binder_alloc_buffer_size() 652 BUG_ON(alloc->buffers.next == &buffer->entry); in binder_delete_free_buffer() 658 if (!list_is_last(&buffer->entry, &alloc->buffers)) { in binder_delete_free_buffer() 704 if (!list_is_last(&buffer->entry, &alloc->buffers)) { in binder_free_buf_locked() 712 if (alloc->buffers.next != &buffer->entry) { in binder_free_buf_locked() 873 list_add(&buffer->entry, &alloc->buffers); in binder_alloc_mmap_handler() 904 int buffers, page_count; in binder_alloc_deferred_release() local 907 buffers = 0; in binder_alloc_deferred_release() 922 buffers++; in binder_alloc_deferred_release() 925 while (!list_empty(&alloc->buffers)) { in binder_alloc_deferred_release() [all …]
|
/drivers/iio/buffer/ |
D | industrialio-hw-consumer.c | 23 struct list_head buffers; member 57 list_for_each_entry(buf, &hwc->buffers, head) { in iio_hw_consumer_get_buffer() 72 list_add_tail(&buf->head, &hwc->buffers); in iio_hw_consumer_get_buffer() 94 INIT_LIST_HEAD(&hwc->buffers); in iio_hw_consumer_alloc() 116 list_for_each_entry(buf, &hwc->buffers, head) in iio_hw_consumer_alloc() 134 list_for_each_entry_safe(buf, n, &hwc->buffers, head) in iio_hw_consumer_free() 183 list_for_each_entry(buf, &hwc->buffers, head) { in iio_hw_consumer_enable() 192 list_for_each_entry_continue_reverse(buf, &hwc->buffers, head) in iio_hw_consumer_enable() 206 list_for_each_entry(buf, &hwc->buffers, head) in iio_hw_consumer_disable()
|
/drivers/media/pci/ivtv/ |
D | ivtv-queue.c | 35 q->buffers = 0; in ivtv_queue_init() 53 q->buffers++; in ivtv_enqueue() 68 q->buffers--; in ivtv_dequeue() 82 from->buffers--; in ivtv_queue_move_buf() 88 to->buffers++; in ivtv_queue_move_buf() 143 steal->buffers--; in ivtv_queue_move() 147 from->buffers++; in ivtv_queue_move() 184 int SGsize = sizeof(struct ivtv_sg_host_element) * s->buffers; in ivtv_stream_alloc() 187 if (s->buffers == 0) in ivtv_stream_alloc() 192 s->name, s->buffers, s->buf_size, s->buffers * s->buf_size / 1024); in ivtv_stream_alloc() [all …]
|
/drivers/gpu/drm/i915/gem/selftests/ |
D | i915_gem_client_blt.c | 102 struct blit_buffer buffers[3]; member 264 for (i = 0; i < ARRAY_SIZE(t->buffers); i++) in tiled_blits_destroy_buffers() 265 i915_vma_put(t->buffers[i].vma); in tiled_blits_destroy_buffers() 317 for (i = 0; i < ARRAY_SIZE(t->buffers); i++) { in tiled_blits_create_buffers() 326 t->buffers[i].vma = vma; in tiled_blits_create_buffers() 327 t->buffers[i].tiling = in tiled_blits_create_buffers() 331 if (HAS_4TILE(i915) && t->buffers[i].tiling == CLIENT_TILING_Y) in tiled_blits_create_buffers() 332 t->buffers[i].tiling = CLIENT_TILING_4; in tiled_blits_create_buffers() 333 else if (!HAS_4TILE(i915) && t->buffers[i].tiling == CLIENT_TILING_4) in tiled_blits_create_buffers() 334 t->buffers[i].tiling = CLIENT_TILING_Y; in tiled_blits_create_buffers() [all …]
|
/drivers/scsi/isci/ |
D | unsolicited_frame_control.c | 110 uf = &uf_control->buffers.array[i]; in sci_unsolicited_frame_control_construct() 136 *frame_header = &uf_control->buffers.array[frame_index].header->data; in sci_unsolicited_frame_control_get_header() 149 *frame_buffer = uf_control->buffers.array[frame_index].buffer; in sci_unsolicited_frame_control_get_buffer() 184 uf_control->buffers.array[frame_index].state = UNSOLICITED_FRAME_RELEASED; in sci_unsolicited_frame_control_release_frame() 198 while (uf_control->buffers.array[frame_get].state == UNSOLICITED_FRAME_RELEASED) { in sci_unsolicited_frame_control_release_frame() 199 uf_control->buffers.array[frame_get].state = UNSOLICITED_FRAME_EMPTY; in sci_unsolicited_frame_control_release_frame()
|
/drivers/media/usb/pvrusb2/ |
D | pvrusb2-io.c | 49 struct pvr2_buffer **buffers; member 305 memcpy(nb, sp->buffers, in pvr2_stream_buffer_count() 307 kfree(sp->buffers); in pvr2_stream_buffer_count() 309 sp->buffers = nb; in pvr2_stream_buffer_count() 321 sp->buffers[sp->buffer_total_count] = bp; in pvr2_stream_buffer_count() 328 bp = sp->buffers[sp->buffer_total_count - 1]; in pvr2_stream_buffer_count() 330 sp->buffers[sp->buffer_total_count - 1] = NULL; in pvr2_stream_buffer_count() 338 nb = kmemdup(sp->buffers, scnt * sizeof(*nb), in pvr2_stream_buffer_count() 342 kfree(sp->buffers); in pvr2_stream_buffer_count() 343 sp->buffers = nb; in pvr2_stream_buffer_count() [all …]
|
/drivers/infiniband/hw/hfi1/ |
D | init.c | 414 rcd->egrbufs.buffers = in hfi1_create_ctxtdata() 416 sizeof(*rcd->egrbufs.buffers), in hfi1_create_ctxtdata() 418 if (!rcd->egrbufs.buffers) in hfi1_create_ctxtdata() 1110 if (rcd->egrbufs.buffers[e].addr) in hfi1_free_ctxtdata() 1112 rcd->egrbufs.buffers[e].len, in hfi1_free_ctxtdata() 1113 rcd->egrbufs.buffers[e].addr, in hfi1_free_ctxtdata() 1114 rcd->egrbufs.buffers[e].dma); in hfi1_free_ctxtdata() 1116 kfree(rcd->egrbufs.buffers); in hfi1_free_ctxtdata() 1118 rcd->egrbufs.buffers = NULL; in hfi1_free_ctxtdata() 1848 rcd->egrbufs.buffers[idx].addr = in hfi1_setup_eagerbufs() [all …]
|
/drivers/media/pci/cx18/ |
D | cx18-queue.c | 337 if (s->buffers == 0) in cx18_stream_alloc() 341 s->name, s->buffers, s->buf_size, in cx18_stream_alloc() 342 s->buffers * s->buf_size / 1024, in cx18_stream_alloc() 343 (s->buffers * s->buf_size * 100 / 1024) % 100); in cx18_stream_alloc() 345 if (((char __iomem *)&cx->scb->cpu_mdl[cx->free_mdl_idx + s->buffers] - in cx18_stream_alloc() 359 for (i = 0; i < s->buffers; i++) { in cx18_stream_alloc() 394 if (i == s->buffers) { in cx18_stream_alloc() 395 cx->free_mdl_idx += s->buffers; in cx18_stream_alloc()
|
/drivers/atm/ |
D | nicstarmac.copyright | 25 * IDT always receives data into a small buffer, then large buffers 28 * Fix is simple: make large buffers large enough to hold entire 32 * buffers. This is done by 2 things: 35 * recycle large data buffers 36 * 2) skb_clone of received buffers
|
/drivers/crypto/ccree/ |
D | cc_hash.h | 38 u8 buffers[2][CC_MAX_HASH_BLCK_SIZE] ____cacheline_aligned; member 66 return state->buffers[state->buff_index]; in cc_hash_buf() 76 return state->buffers[state->buff_index ^ 1]; in cc_next_buf()
|
/drivers/platform/goldfish/ |
D | goldfish_pipe.c | 202 struct goldfish_pipe_dev_buffers *buffers; member 641 dev->buffers->signalled_pipe_buffers[i].id, in goldfish_pipe_interrupt() 642 dev->buffers->signalled_pipe_buffers[i].flags); in goldfish_pipe_interrupt() 737 dev->buffers->open_command_params.rw_params_max_count = in goldfish_pipe_open() 739 dev->buffers->open_command_params.command_buffer_ptr = in goldfish_pipe_open() 844 dev->buffers = (struct goldfish_pipe_dev_buffers *) in goldfish_pipe_device_init() 846 if (!dev->buffers) { in goldfish_pipe_device_init() 853 write_pa_addr(&dev->buffers->signalled_pipe_buffers, in goldfish_pipe_device_init() 860 write_pa_addr(&dev->buffers->open_command_params, in goldfish_pipe_device_init() 873 free_page((unsigned long)dev->buffers); in goldfish_pipe_device_deinit()
|
/drivers/staging/qlge/ |
D | TODO | 1 * commit 7c734359d350 ("qlge: Size RX buffers based on MTU.", v2.6.33-rc1) 9 * while in that area, using two 8k buffers to store one 9k frame is a poor 11 * in the "chain of large buffers" case, the driver uses an skb allocated with
|
/drivers/net/ethernet/ibm/emac/ |
D | Kconfig | 13 int "Number of receive buffers" 18 int "Number of transmit buffers"
|
/drivers/char/xillybus/ |
D | xillyusb.c | 94 struct list_head buffers; member 474 list_splice(&ep->filled_buffers, &ep->buffers); in endpoint_dealloc() 476 list_for_each_safe(this, next, &ep->buffers) { in endpoint_dealloc() 503 INIT_LIST_HEAD(&ep->buffers); in endpoint_alloc() 542 list_add_tail(&xb->entry, &ep->buffers); in endpoint_alloc() 659 list_add_tail(&xb->entry, &ep->buffers); in bulk_in_completer() 689 list_add_tail(&xb->entry, &ep->buffers); in bulk_out_completer() 715 if (list_empty(&ep->buffers)) { in try_queue_bulk_in() 720 xb = list_first_entry(&ep->buffers, struct xillybuffer, entry); in try_queue_bulk_in() 755 list_add_tail(&xb->entry, &ep->buffers); in try_queue_bulk_in() [all …]
|
D | xillybus_core.c | 345 struct xilly_buffer **buffers, in xilly_get_dma_buffers() argument 353 if (buffers) { /* Not the message buffer */ in xilly_get_dma_buffers() 405 if (buffers) { /* Not the message buffer */ in xilly_get_dma_buffers() 408 buffers[i] = this_buffer++; in xilly_get_dma_buffers() 508 struct xilly_buffer **buffers = NULL; in xilly_setupchannels() local 537 buffers = devm_kcalloc(dev, bufnum, in xilly_setupchannels() 540 if (!buffers) in xilly_setupchannels() 554 channel->rd_buffers = buffers; in xilly_setupchannels() 555 rc = xilly_get_dma_buffers(ep, &rd_alloc, buffers, in xilly_setupchannels() 568 channel->wr_buffers = buffers; in xilly_setupchannels() [all …]
|
/drivers/staging/greybus/ |
D | camera.c | 61 struct gb_camera_debugfs_buffer *buffers; member 875 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_CAPABILITIES]; in gb_camera_debugfs_capabilities() 911 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_STREAMS]; in gb_camera_debugfs_configure_streams() 1046 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_FLUSH]; in gb_camera_debugfs_flush() 1105 buffer = &gcam->debugfs.buffers[op->buffer]; in gb_camera_debugfs_read() 1171 gcam->debugfs.buffers = in gb_camera_debugfs_init() 1173 sizeof(*gcam->debugfs.buffers))); in gb_camera_debugfs_init() 1174 if (!gcam->debugfs.buffers) in gb_camera_debugfs_init() 1181 gcam->debugfs.buffers[i].length = 0; in gb_camera_debugfs_init() 1195 vfree(gcam->debugfs.buffers); in gb_camera_debugfs_cleanup()
|
/drivers/net/ethernet/sun/ |
D | sunqe.c | 127 struct sunqe_buffers *qbufs = qep->buffers; in qe_init_rings() 417 struct sunqe_buffers *qbufs = qep->buffers; in qe_rx() 576 struct sunqe_buffers *qbufs = qep->buffers; in qe_start_xmit() 885 qe->buffers = dma_alloc_coherent(&op->dev, sizeof(struct sunqe_buffers), in qec_ether_init() 888 qe->buffers == NULL || qe->buffers_dvma == 0) in qec_ether_init() 920 if (qe->buffers) in qec_ether_init() 923 qe->buffers, in qec_ether_init() 948 qp->buffers, qp->buffers_dvma); in qec_sbus_remove()
|
/drivers/media/platform/marvell/ |
D | mcam-core.c | 459 if (list_empty(&cam->buffers)) { in mcam_frame_tasklet() 465 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, in mcam_frame_tasklet() 544 if (list_empty(&cam->buffers)) { in mcam_set_contig_buffer() 552 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, in mcam_set_contig_buffer() 608 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue); in mcam_sg_next_buffer() 634 if (list_empty(&cam->buffers)) { in mcam_ctlr_dma_sg() 671 if (!list_empty(&cam->buffers)) { in mcam_dma_sg_done() 1103 start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers); in mcam_vb_buf_queue() 1104 list_add(&mvb->queue, &cam->buffers); in mcam_vb_buf_queue() 1121 list_for_each_entry_safe(buf, node, &cam->buffers, queue) { in mcam_vb_requeue_bufs() [all …]
|
/drivers/dma-buf/ |
D | Kconfig | 47 Don't pin buffers if the dynamic DMA-buf interface is available on 82 in location /sys/kernel/dmabuf/buffers. 84 /sys/kernel/dmabuf/buffers/<inode_number> will contain
|
/drivers/soc/fsl/dpio/ |
D | qbman-portal.h | 191 const u64 *buffers, 235 int qbman_swp_acquire(struct qbman_swp *s, u16 bpid, u64 *buffers, 624 const u64 *buffers, in qbman_swp_release() argument 627 return qbman_swp_release_ptr(s, d, buffers, num_buffers); in qbman_swp_release()
|
/drivers/net/ethernet/freescale/fman/ |
D | Kconfig | 20 align buffers, data start, SG fragment length to avoid FMan DMA 35 the SG list and any one of the buffers, except the last
|