/drivers/media/platform/vivid/ |
D | vivid-vid-common.c | 53 .buffers = 1, 62 .buffers = 1, 70 .buffers = 1, 78 .buffers = 1, 86 .buffers = 1, 94 .buffers = 1, 102 .buffers = 1, 110 .buffers = 1, 118 .buffers = 1, 126 .buffers = 1, [all …]
|
D | vivid-vid-cap.c | 47 .buffers = 1, 54 .buffers = 1, 61 .buffers = 1, 103 unsigned buffers = tpg_g_buffers(&dev->tpg); in vid_cap_queue_setup() local 129 if (*nplanes != buffers) in vid_cap_queue_setup() 131 for (p = 0; p < buffers; p++) { in vid_cap_queue_setup() 137 for (p = 0; p < buffers; p++) in vid_cap_queue_setup() 145 *nplanes = buffers; in vid_cap_queue_setup() 148 for (p = 0; p < buffers; p++) in vid_cap_queue_setup() 158 unsigned buffers = tpg_g_buffers(&dev->tpg); in vid_cap_buf_prepare() local [all …]
|
/drivers/media/pci/zoran/ |
D | zoran_driver.c | 196 fh->buffers.buffer_size = v4l_bufsize; in map_mode_raw() 197 fh->buffers.num_buffers = v4l_nbufs; in map_mode_raw() 202 fh->buffers.buffer_size = jpg_bufsize; in map_mode_jpg() 203 fh->buffers.num_buffers = jpg_nbufs; in map_mode_jpg() 222 for (i = 0; i < fh->buffers.num_buffers; i++) { in v4l_fbuffer_alloc() 223 if (fh->buffers.buffer[i].v4l.fbuffer) in v4l_fbuffer_alloc() 230 mem = kmalloc(fh->buffers.buffer_size, in v4l_fbuffer_alloc() 240 fh->buffers.buffer[i].v4l.fbuffer = mem; in v4l_fbuffer_alloc() 241 fh->buffers.buffer[i].v4l.fbuffer_phys = virt_to_phys(mem); in v4l_fbuffer_alloc() 242 fh->buffers.buffer[i].v4l.fbuffer_bus = virt_to_bus(mem); in v4l_fbuffer_alloc() [all …]
|
/drivers/android/ |
D | binder_alloc_selftest.c | 124 struct binder_buffer *buffers[], in binder_selftest_alloc_buf() argument 130 buffers[i] = binder_alloc_new_buf(alloc, sizes[i], 0, 0, 0); in binder_selftest_alloc_buf() 131 if (IS_ERR(buffers[i]) || in binder_selftest_alloc_buf() 132 !check_buffer_pages_allocated(alloc, buffers[i], in binder_selftest_alloc_buf() 141 struct binder_buffer *buffers[], in binder_selftest_free_buf() argument 147 binder_alloc_free_buf(alloc, buffers[seq[i]]); in binder_selftest_free_buf() 187 struct binder_buffer *buffers[BUFFER_NUM]; in binder_selftest_alloc_free() local 189 binder_selftest_alloc_buf(alloc, buffers, sizes, seq); in binder_selftest_alloc_free() 190 binder_selftest_free_buf(alloc, buffers, sizes, seq, end); in binder_selftest_alloc_free() 193 binder_selftest_alloc_buf(alloc, buffers, sizes, seq); in binder_selftest_alloc_free() [all …]
|
D | binder_alloc.c | 67 if (list_is_last(&buffer->entry, &alloc->buffers)) in binder_alloc_buffer_size() 533 BUG_ON(alloc->buffers.next == &buffer->entry); in binder_delete_free_buffer() 543 if (!list_is_last(&buffer->entry, &alloc->buffers)) { in binder_delete_free_buffer() 609 if (!list_is_last(&buffer->entry, &alloc->buffers)) { in binder_free_buf_locked() 617 if (alloc->buffers.next != &buffer->entry) { in binder_free_buf_locked() 712 list_add(&buffer->entry, &alloc->buffers); in binder_alloc_mmap_handler() 743 int buffers, page_count; in binder_alloc_deferred_release() local 748 buffers = 0; in binder_alloc_deferred_release() 757 buffers++; in binder_alloc_deferred_release() 760 while (!list_empty(&alloc->buffers)) { in binder_alloc_deferred_release() [all …]
|
/drivers/media/pci/ivtv/ |
D | ivtv-queue.c | 47 q->buffers = 0; in ivtv_queue_init() 65 q->buffers++; in ivtv_enqueue() 80 q->buffers--; in ivtv_dequeue() 94 from->buffers--; in ivtv_queue_move_buf() 100 to->buffers++; in ivtv_queue_move_buf() 155 steal->buffers--; in ivtv_queue_move() 159 from->buffers++; in ivtv_queue_move() 196 int SGsize = sizeof(struct ivtv_sg_host_element) * s->buffers; in ivtv_stream_alloc() 199 if (s->buffers == 0) in ivtv_stream_alloc() 204 s->name, s->buffers, s->buf_size, s->buffers * s->buf_size / 1024); in ivtv_stream_alloc() [all …]
|
/drivers/scsi/isci/ |
D | unsolicited_frame_control.c | 110 uf = &uf_control->buffers.array[i]; in sci_unsolicited_frame_control_construct() 136 *frame_header = &uf_control->buffers.array[frame_index].header->data; in sci_unsolicited_frame_control_get_header() 149 *frame_buffer = uf_control->buffers.array[frame_index].buffer; in sci_unsolicited_frame_control_get_buffer() 184 uf_control->buffers.array[frame_index].state = UNSOLICITED_FRAME_RELEASED; in sci_unsolicited_frame_control_release_frame() 198 while (uf_control->buffers.array[frame_get].state == UNSOLICITED_FRAME_RELEASED) { in sci_unsolicited_frame_control_release_frame() 199 uf_control->buffers.array[frame_get].state = UNSOLICITED_FRAME_EMPTY; in sci_unsolicited_frame_control_release_frame()
|
/drivers/infiniband/hw/hfi1/ |
D | init.c | 300 rcd->egrbufs.buffers = kzalloc_node( in hfi1_create_ctxtdata() 301 rcd->egrbufs.count * sizeof(*rcd->egrbufs.buffers), in hfi1_create_ctxtdata() 303 if (!rcd->egrbufs.buffers) in hfi1_create_ctxtdata() 336 kfree(rcd->egrbufs.buffers); in hfi1_create_ctxtdata() 954 if (rcd->egrbufs.buffers[e].dma) in hfi1_free_ctxtdata() 956 rcd->egrbufs.buffers[e].len, in hfi1_free_ctxtdata() 957 rcd->egrbufs.buffers[e].addr, in hfi1_free_ctxtdata() 958 rcd->egrbufs.buffers[e].dma); in hfi1_free_ctxtdata() 960 kfree(rcd->egrbufs.buffers); in hfi1_free_ctxtdata() 1732 rcd->egrbufs.buffers[idx].addr = in hfi1_setup_eagerbufs() [all …]
|
/drivers/media/usb/pvrusb2/ |
D | pvrusb2-io.c | 63 struct pvr2_buffer **buffers; member 324 memcpy(nb,sp->buffers, in pvr2_stream_buffer_count() 326 kfree(sp->buffers); in pvr2_stream_buffer_count() 328 sp->buffers = nb; in pvr2_stream_buffer_count() 340 sp->buffers[sp->buffer_total_count] = bp; in pvr2_stream_buffer_count() 347 bp = sp->buffers[sp->buffer_total_count - 1]; in pvr2_stream_buffer_count() 349 sp->buffers[sp->buffer_total_count - 1] = NULL; in pvr2_stream_buffer_count() 357 nb = kmemdup(sp->buffers, scnt * sizeof(*nb), in pvr2_stream_buffer_count() 361 kfree(sp->buffers); in pvr2_stream_buffer_count() 362 sp->buffers = nb; in pvr2_stream_buffer_count() [all …]
|
/drivers/media/usb/cpia2/ |
D | cpia2_v4l.c | 190 struct framebuf *frame = &cam->buffers[frame_nr]; in sync() 442 if (cam->buffers[frame].status == FRAME_READING) in cpia2_s_fmt_vid_cap() 446 cam->buffers[frame].status = FRAME_EMPTY; in cpia2_s_fmt_vid_cap() 818 buf->m.offset = cam->buffers[buf->index].data - cam->frame_buffer; in cpia2_querybuf() 830 switch (cam->buffers[buf->index].status) { in cpia2_querybuf() 838 buf->bytesused = cam->buffers[buf->index].length; in cpia2_querybuf() 839 buf->timestamp = cam->buffers[buf->index].timestamp; in cpia2_querybuf() 840 buf->sequence = cam->buffers[buf->index].seq; in cpia2_querybuf() 871 if(cam->buffers[buf->index].status == FRAME_READY) in cpia2_qbuf() 872 cam->buffers[buf->index].status = FRAME_EMPTY; in cpia2_qbuf() [all …]
|
D | cpia2_core.c | 2237 if(!cam->buffers) { in cpia2_allocate_buffers() 2239 cam->buffers = kmalloc(size, GFP_KERNEL); in cpia2_allocate_buffers() 2240 if(!cam->buffers) { in cpia2_allocate_buffers() 2250 kfree(cam->buffers); in cpia2_allocate_buffers() 2251 cam->buffers = NULL; in cpia2_allocate_buffers() 2257 cam->buffers[i].next = &cam->buffers[i+1]; in cpia2_allocate_buffers() 2258 cam->buffers[i].data = cam->frame_buffer +i*cam->frame_size; in cpia2_allocate_buffers() 2259 cam->buffers[i].status = FRAME_EMPTY; in cpia2_allocate_buffers() 2260 cam->buffers[i].length = 0; in cpia2_allocate_buffers() 2261 cam->buffers[i].max_length = 0; in cpia2_allocate_buffers() [all …]
|
/drivers/media/pci/cx18/ |
D | cx18-queue.c | 352 if (s->buffers == 0) in cx18_stream_alloc() 357 s->name, s->buffers, s->buf_size, in cx18_stream_alloc() 358 s->buffers * s->buf_size / 1024, in cx18_stream_alloc() 359 (s->buffers * s->buf_size * 100 / 1024) % 100); in cx18_stream_alloc() 361 if (((char __iomem *)&cx->scb->cpu_mdl[cx->free_mdl_idx + s->buffers] - in cx18_stream_alloc() 375 for (i = 0; i < s->buffers; i++) { in cx18_stream_alloc() 409 if (i == s->buffers) { in cx18_stream_alloc() 410 cx->free_mdl_idx += s->buffers; in cx18_stream_alloc()
|
/drivers/atm/ |
D | nicstarmac.copyright | 25 * IDT always receives data into a small buffer, then large buffers 28 * Fix is simple: make large buffers large enough to hold entire 32 * buffers. This is done by 2 things: 35 * recycle large data buffers 36 * 2) skb_clone of received buffers
|
/drivers/platform/goldfish/ |
D | goldfish_pipe_v2.c | 599 dev->buffers->signalled_pipe_buffers[i].id, in goldfish_pipe_interrupt() 600 dev->buffers->signalled_pipe_buffers[i].flags); in goldfish_pipe_interrupt() 683 dev->buffers->open_command_params.rw_params_max_count = in goldfish_pipe_open() 685 dev->buffers->open_command_params.command_buffer_ptr = in goldfish_pipe_open() 770 BUG_ON(sizeof(*dev->buffers) > PAGE_SIZE); in goldfish_pipe_device_init_v2() 776 dev->buffers = (struct goldfish_pipe_dev_buffers*)page; in goldfish_pipe_device_init_v2() 780 u64 paddr = __pa(&dev->buffers->signalled_pipe_buffers); in goldfish_pipe_device_init_v2() 785 paddr = __pa(&dev->buffers->open_command_params); in goldfish_pipe_device_init_v2() 796 free_page((unsigned long)dev->buffers); in goldfish_pipe_device_deinit_v2()
|
D | goldfish_pipe.h | 76 struct goldfish_pipe_dev_buffers *buffers; member
|
/drivers/media/platform/marvell-ccic/ |
D | mcam-core.c | 472 if (list_empty(&cam->buffers)) { in mcam_frame_tasklet() 478 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, in mcam_frame_tasklet() 557 if (list_empty(&cam->buffers)) { in mcam_set_contig_buffer() 565 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, in mcam_set_contig_buffer() 620 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue); in mcam_sg_next_buffer() 645 if (list_empty(&cam->buffers)) { in mcam_ctlr_dma_sg() 682 if (!list_empty(&cam->buffers)) { in mcam_dma_sg_done() 1080 start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers); in mcam_vb_buf_queue() 1081 list_add(&mvb->queue, &cam->buffers); in mcam_vb_buf_queue() 1098 list_for_each_entry_safe(buf, node, &cam->buffers, queue) { in mcam_vb_requeue_bufs() [all …]
|
/drivers/staging/iio/Documentation/ |
D | overview.txt | 34 fifo / ring buffers on the sensor chip. These greatly reduce the load 46 used in IIO to fill software buffers acting in a very similar 47 fashion to the hardware buffers described above.
|
/drivers/staging/greybus/ |
D | camera.c | 62 struct gb_camera_debugfs_buffer *buffers; member 875 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_CAPABILITIES]; in gb_camera_debugfs_capabilities() 911 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_STREAMS]; in gb_camera_debugfs_configure_streams() 1046 &gcam->debugfs.buffers[GB_CAMERA_DEBUGFS_BUFFER_FLUSH]; in gb_camera_debugfs_flush() 1105 buffer = &gcam->debugfs.buffers[op->buffer]; in gb_camera_debugfs_read() 1183 gcam->debugfs.buffers = vmalloc(sizeof(*gcam->debugfs.buffers) * in gb_camera_debugfs_init() 1185 if (!gcam->debugfs.buffers) in gb_camera_debugfs_init() 1193 gcam->debugfs.buffers[i].length = 0; in gb_camera_debugfs_init() 1213 vfree(gcam->debugfs.buffers); in gb_camera_debugfs_cleanup()
|
/drivers/net/ethernet/sun/ |
D | sunqe.c | 126 struct sunqe_buffers *qbufs = qep->buffers; in qe_init_rings() 416 struct sunqe_buffers *qbufs = qep->buffers; in qe_rx() 575 struct sunqe_buffers *qbufs = qep->buffers; in qe_start_xmit() 885 qe->buffers = dma_alloc_coherent(&op->dev, sizeof(struct sunqe_buffers), in qec_ether_init() 888 qe->buffers == NULL || qe->buffers_dvma == 0) in qec_ether_init() 920 if (qe->buffers) in qec_ether_init() 923 qe->buffers, in qec_ether_init() 948 qp->buffers, qp->buffers_dvma); in qec_sbus_remove()
|
/drivers/net/ethernet/ibm/emac/ |
D | Kconfig | 11 int "Number of receive buffers" 16 int "Number of transmit buffers"
|
/drivers/base/ |
D | Kconfig | 262 lockup related problems for dma-buffers shared across multiple 323 int "Maximum PAGE_SIZE order of alignment for contiguous buffers" 327 DMA mapping framework by default aligns all buffers to the smallest 329 size. This works well for buffers up to a few hundreds kilobytes, but 330 for larger buffers it just a memory waste. With this parameter you can 331 specify the maximum PAGE_SIZE order for contiguous buffers. Larger 332 buffers will be aligned only to this specified order. The order is 336 of 8 means that the buffers will be aligned up to 1MiB only.
|
/drivers/char/xillybus/ |
D | xillybus_core.c | 323 struct xilly_buffer **buffers, in xilly_get_dma_buffers() argument 331 if (buffers) { /* Not the message buffer */ in xilly_get_dma_buffers() 383 if (buffers) { /* Not the message buffer */ in xilly_get_dma_buffers() 386 buffers[i] = this_buffer++; in xilly_get_dma_buffers() 486 struct xilly_buffer **buffers = NULL; in xilly_setupchannels() local 515 buffers = devm_kcalloc(dev, bufnum, in xilly_setupchannels() 518 if (!buffers) in xilly_setupchannels() 532 channel->rd_buffers = buffers; in xilly_setupchannels() 533 rc = xilly_get_dma_buffers(ep, &rd_alloc, buffers, in xilly_setupchannels() 546 channel->wr_buffers = buffers; in xilly_setupchannels() [all …]
|
/drivers/mtd/nand/ |
D | nand_base.c | 1587 uint8_t *ecc_calc = chip->buffers->ecccalc; in nand_read_page_swecc() 1588 uint8_t *ecc_code = chip->buffers->ecccode; in nand_read_page_swecc() 1660 chip->ecc.calculate(mtd, p, &chip->buffers->ecccalc[i]); in nand_read_subpage() 1694 ret = mtd_ooblayout_get_eccbytes(mtd, chip->buffers->ecccode, in nand_read_subpage() 1704 &chip->buffers->ecccode[i], &chip->buffers->ecccalc[i]); in nand_read_subpage() 1709 &chip->buffers->ecccode[i], in nand_read_subpage() 1742 uint8_t *ecc_calc = chip->buffers->ecccalc; in nand_read_page_hwecc() 1743 uint8_t *ecc_code = chip->buffers->ecccode; in nand_read_page_hwecc() 1805 uint8_t *ecc_code = chip->buffers->ecccode; in nand_read_page_hwecc_oob_first() 1806 uint8_t *ecc_calc = chip->buffers->ecccalc; in nand_read_page_hwecc_oob_first() [all …]
|
/drivers/net/ethernet/pasemi/ |
D | pasemi_mac.h | 50 u64 *buffers; /* RX interface buffer ring */ member 109 #define RX_BUFF(rx, num) ((rx)->buffers[(num) & (RX_RING_SIZE-1)])
|
/drivers/gpu/drm/sti/ |
D | NOTES | 42 buffers) and to HQVDP+VID (video buffers)
|