/drivers/media/video/ |
D | videobuf-core.c | 44 #define CALL(q, f, arg...) \ argument 45 ((q->int_ops->f) ? q->int_ops->f(arg) : 0) 47 void *videobuf_alloc(struct videobuf_queue *q) in videobuf_alloc() argument 51 BUG_ON(q->msize < sizeof(*vb)); in videobuf_alloc() 53 if (!q->int_ops || !q->int_ops->alloc) { in videobuf_alloc() 58 vb = q->int_ops->alloc(q->msize); in videobuf_alloc() 89 int videobuf_iolock(struct videobuf_queue *q, struct videobuf_buffer *vb, in videobuf_iolock() argument 93 MAGIC_CHECK(q->int_ops->magic, MAGIC_QTYPE_OPS); in videobuf_iolock() 95 return CALL(q, iolock, q, vb, fbuf); in videobuf_iolock() 98 void *videobuf_queue_to_vmalloc (struct videobuf_queue *q, in videobuf_queue_to_vmalloc() argument [all …]
|
D | bw-qcam.c | 96 static inline int read_lpstatus(struct qcam_device *q) in read_lpstatus() argument 98 return parport_read_status(q->pport); in read_lpstatus() 101 static inline int read_lpdata(struct qcam_device *q) in read_lpdata() argument 103 return parport_read_data(q->pport); in read_lpdata() 106 static inline void write_lpdata(struct qcam_device *q, int d) in write_lpdata() argument 108 parport_write_data(q->pport, d); in write_lpdata() 111 static inline void write_lpcontrol(struct qcam_device *q, int d) in write_lpcontrol() argument 115 parport_data_reverse(q->pport); in write_lpcontrol() 118 parport_data_forward(q->pport); in write_lpcontrol() 124 parport_write_control(q->pport, d); in write_lpcontrol() [all …]
|
D | videobuf-dma-contig.c | 42 dev_dbg(map->q->dev, "vm_open %p [count=%u,vma=%08lx-%08lx]\n", in videobuf_vm_open() 51 struct videobuf_queue *q = map->q; in videobuf_vm_close() local 54 dev_dbg(map->q->dev, "vm_close %p [count=%u,vma=%08lx-%08lx]\n", in videobuf_vm_close() 61 dev_dbg(map->q->dev, "munmap %p q=%p\n", map, q); in videobuf_vm_close() 62 mutex_lock(&q->vb_lock); in videobuf_vm_close() 65 if (q->streaming) in videobuf_vm_close() 66 videobuf_queue_cancel(q); in videobuf_vm_close() 69 if (NULL == q->bufs[i]) in videobuf_vm_close() 72 if (q->bufs[i]->map != map) in videobuf_vm_close() 75 mem = q->bufs[i]->priv; in videobuf_vm_close() [all …]
|
D | c-qcam.c | 262 static void qc_setup(struct qcam_device *q) in qc_setup() argument 264 qc_reset(q); in qc_setup() 267 qcam_set(q, 11, q->brightness); in qc_setup() 271 qcam_set(q, 17, q->ccd_height); in qc_setup() 272 qcam_set(q, 19, q->ccd_width / 2); in qc_setup() 275 qcam_set(q, 0xd, q->top); in qc_setup() 276 qcam_set(q, 0xf, q->left); in qc_setup() 279 qcam_set(q, 0x19, q->contrast); in qc_setup() 280 qcam_set(q, 0x1f, q->whitebal); in qc_setup() 283 qcam_set(q, 45, 2); in qc_setup() [all …]
|
D | videobuf-vmalloc.c | 63 struct videobuf_queue *q = map->q; in videobuf_vm_close() local 73 dprintk(1, "munmap %p q=%p\n", map, q); in videobuf_vm_close() 74 mutex_lock(&q->vb_lock); in videobuf_vm_close() 77 if (q->streaming) in videobuf_vm_close() 78 videobuf_queue_cancel(q); in videobuf_vm_close() 81 if (NULL == q->bufs[i]) in videobuf_vm_close() 84 if (q->bufs[i]->map != map) in videobuf_vm_close() 87 mem = q->bufs[i]->priv; in videobuf_vm_close() 107 q->bufs[i]->map = NULL; in videobuf_vm_close() 108 q->bufs[i]->baddr = 0; in videobuf_vm_close() [all …]
|
D | videobuf-dma-sg.c | 219 int videobuf_dma_map(struct videobuf_queue* q, struct videobuf_dmabuf *dma) in videobuf_dma_map() argument 246 dma->sglen = dma_map_sg(q->dev, dma->sglist, in videobuf_dma_map() 260 int videobuf_dma_sync(struct videobuf_queue *q, struct videobuf_dmabuf *dma) in videobuf_dma_sync() argument 265 dma_sync_sg_for_cpu(q->dev, dma->sglist, dma->nr_pages, dma->direction); in videobuf_dma_sync() 269 int videobuf_dma_unmap(struct videobuf_queue* q,struct videobuf_dmabuf *dma) in videobuf_dma_unmap() argument 275 dma_unmap_sg(q->dev, dma->sglist, dma->nr_pages, dma->direction); in videobuf_dma_unmap() 310 struct videobuf_queue q; in videobuf_sg_dma_map() local 312 q.dev = dev; in videobuf_sg_dma_map() 314 return videobuf_dma_map(&q, dma); in videobuf_sg_dma_map() 319 struct videobuf_queue q; in videobuf_sg_dma_unmap() local [all …]
|
/drivers/infiniband/hw/amso1100/ |
D | c2_mq.c | 36 void *c2_mq_alloc(struct c2_mq *q) in c2_mq_alloc() argument 38 BUG_ON(q->magic != C2_MQ_MAGIC); in c2_mq_alloc() 39 BUG_ON(q->type != C2_MQ_ADAPTER_TARGET); in c2_mq_alloc() 41 if (c2_mq_full(q)) { in c2_mq_alloc() 46 (struct c2wr_hdr *) (q->msg_pool.host + q->priv * q->msg_size); in c2_mq_alloc() 53 return q->msg_pool.host + q->priv * q->msg_size; in c2_mq_alloc() 58 void c2_mq_produce(struct c2_mq *q) in c2_mq_produce() argument 60 BUG_ON(q->magic != C2_MQ_MAGIC); in c2_mq_produce() 61 BUG_ON(q->type != C2_MQ_ADAPTER_TARGET); in c2_mq_produce() 63 if (!c2_mq_full(q)) { in c2_mq_produce() [all …]
|
/drivers/s390/cio/ |
D | qdio_main.c | 96 static inline int qdio_check_ccq(struct qdio_q *q, unsigned int ccq) in qdio_check_ccq() argument 105 DBF_ERROR("%4x ccq:%3d", SCH_NO(q), ccq); in qdio_check_ccq() 120 static int qdio_do_eqbs(struct qdio_q *q, unsigned char *state, in qdio_do_eqbs() argument 125 int nr = q->nr; in qdio_do_eqbs() 128 BUG_ON(!q->irq_ptr->sch_token); in qdio_do_eqbs() 131 if (!q->is_input_q) in qdio_do_eqbs() 132 nr += q->irq_ptr->nr_input_qs; in qdio_do_eqbs() 134 ccq = do_eqbs(q->irq_ptr->sch_token, state, nr, &tmp_start, &tmp_count, in qdio_do_eqbs() 136 rc = qdio_check_ccq(q, ccq); in qdio_do_eqbs() 147 DBF_DEV_EVENT(DBF_WARN, q->irq_ptr, "EQBS again:%2d", ccq); in qdio_do_eqbs() [all …]
|
D | qdio_setup.c | 45 struct qdio_q *q; in set_impl_params() local 60 for_each_input_queue(irq_ptr, q, i) { in set_impl_params() 62 q->slib->slibe[j].parms = in set_impl_params() 69 for_each_output_queue(irq_ptr, q, i) { in set_impl_params() 71 q->slib->slibe[j].parms = in set_impl_params() 78 struct qdio_q *q; in __qdio_allocate_qs() local 82 q = kmem_cache_alloc(qdio_q_cache, GFP_KERNEL); in __qdio_allocate_qs() 83 if (!q) in __qdio_allocate_qs() 85 WARN_ON((unsigned long)q & 0xff); in __qdio_allocate_qs() 87 q->slib = (struct slib *) __get_free_page(GFP_KERNEL); in __qdio_allocate_qs() [all …]
|
D | qdio_debug.c | 57 struct qdio_q *q = m->private; in qstat_show() local 60 if (!q) in qstat_show() 63 seq_printf(m, "device state indicator: %d\n", *(u32 *)q->irq_ptr->dsci); in qstat_show() 64 seq_printf(m, "nr_used: %d\n", atomic_read(&q->nr_buf_used)); in qstat_show() 65 seq_printf(m, "ftc: %d\n", q->first_to_check); in qstat_show() 66 seq_printf(m, "last_move_ftc: %d\n", q->last_move_ftc); in qstat_show() 67 seq_printf(m, "polling: %d\n", q->u.in.polling); in qstat_show() 68 seq_printf(m, "ack count: %d\n", q->u.in.ack_count); in qstat_show() 72 qdio_siga_sync_q(q); in qstat_show() 74 get_buf_state(q, i, &state, 0); in qstat_show() [all …]
|
D | qdio.h | 303 #define queue_type(q) q->irq_ptr->qib.qfmt argument 304 #define SCH_NO(q) (q->irq_ptr->schid.sch_no) argument 311 static inline int multicast_outbound(struct qdio_q *q) in multicast_outbound() argument 313 return (q->irq_ptr->nr_output_qs > 1) && in multicast_outbound() 314 (q->nr == q->irq_ptr->nr_output_qs - 1); in multicast_outbound() 322 #define pci_out_supported(q) \ argument 323 (q->irq_ptr->qib.ac & QIB_AC_OUTBOUND_PCI_SUPPORTED) 324 #define is_qebsm(q) (q->irq_ptr->sch_token != 0) argument 326 #define need_siga_sync_thinint(q) (!q->irq_ptr->siga_flag.no_sync_ti) argument 327 #define need_siga_sync_out_thinint(q) (!q->irq_ptr->siga_flag.no_sync_out_ti) argument [all …]
|
D | qdio_thinint.c | 91 struct qdio_q *q; in tiqdio_add_input_queues() local 98 for_each_input_queue(irq_ptr, q, i) { in tiqdio_add_input_queues() 99 list_add_rcu(&q->entry, &tiq_list); in tiqdio_add_input_queues() 113 struct qdio_q *q; in tiqdio_remove_input_queues() local 117 q = irq_ptr->input_qs[i]; in tiqdio_remove_input_queues() 119 if (!q || !q->entry.prev || !q->entry.next) in tiqdio_remove_input_queues() 121 list_del_rcu(&q->entry); in tiqdio_remove_input_queues() 126 static inline int tiqdio_inbound_q_done(struct qdio_q *q) in tiqdio_inbound_q_done() argument 130 if (!atomic_read(&q->nr_buf_used)) in tiqdio_inbound_q_done() 133 qdio_siga_sync_q(q); in tiqdio_inbound_q_done() [all …]
|
/drivers/isdn/hardware/eicon/ |
D | dqueue.c | 17 diva_data_q_init(diva_um_idi_data_queue_t * q, in diva_data_q_init() argument 22 q->max_length = max_length; in diva_data_q_init() 23 q->segments = max_segments; in diva_data_q_init() 25 for (i = 0; i < q->segments; i++) { in diva_data_q_init() 26 q->data[i] = NULL; in diva_data_q_init() 27 q->length[i] = 0; in diva_data_q_init() 29 q->read = q->write = q->count = q->segment_pending = 0; in diva_data_q_init() 31 for (i = 0; i < q->segments; i++) { in diva_data_q_init() 32 if (!(q->data[i] = diva_os_malloc(0, q->max_length))) { in diva_data_q_init() 33 diva_data_q_finit(q); in diva_data_q_init() [all …]
|
/drivers/net/wireless/b43/ |
D | pio.c | 38 static u16 generate_cookie(struct b43_pio_txqueue *q, in generate_cookie() argument 51 cookie = (((u16)q->index + 1) << 12); in generate_cookie() 63 struct b43_pio_txqueue *q = NULL; in parse_cookie() local 68 q = pio->tx_queue_AC_BK; in parse_cookie() 71 q = pio->tx_queue_AC_BE; in parse_cookie() 74 q = pio->tx_queue_AC_VI; in parse_cookie() 77 q = pio->tx_queue_AC_VO; in parse_cookie() 80 q = pio->tx_queue_mcast; in parse_cookie() 83 if (B43_WARN_ON(!q)) in parse_cookie() 86 if (B43_WARN_ON(pack_index >= ARRAY_SIZE(q->packets))) in parse_cookie() [all …]
|
D | pio.h | 118 static inline u16 b43_piotx_read16(struct b43_pio_txqueue *q, u16 offset) in b43_piotx_read16() argument 120 return b43_read16(q->dev, q->mmio_base + offset); in b43_piotx_read16() 123 static inline u32 b43_piotx_read32(struct b43_pio_txqueue *q, u16 offset) in b43_piotx_read32() argument 125 return b43_read32(q->dev, q->mmio_base + offset); in b43_piotx_read32() 128 static inline void b43_piotx_write16(struct b43_pio_txqueue *q, in b43_piotx_write16() argument 131 b43_write16(q->dev, q->mmio_base + offset, value); in b43_piotx_write16() 134 static inline void b43_piotx_write32(struct b43_pio_txqueue *q, in b43_piotx_write32() argument 137 b43_write32(q->dev, q->mmio_base + offset, value); in b43_piotx_write32() 141 static inline u16 b43_piorx_read16(struct b43_pio_rxqueue *q, u16 offset) in b43_piorx_read16() argument 143 return b43_read16(q->dev, q->mmio_base + offset); in b43_piorx_read16() [all …]
|
/drivers/staging/otus/80211core/ |
D | queue.c | 32 struct zsQueue* q; in zfQueueCreate() local 34 if ((q = (struct zsQueue*)zfwMemAllocate(dev, sizeof(struct zsQueue) in zfQueueCreate() 37 q->size = size; in zfQueueCreate() 38 q->sizeMask = size-1; in zfQueueCreate() 39 q->head = 0; in zfQueueCreate() 40 q->tail = 0; in zfQueueCreate() 42 return q; in zfQueueCreate() 45 void zfQueueDestroy(zdev_t* dev, struct zsQueue* q) in zfQueueDestroy() argument 47 u16_t size = sizeof(struct zsQueue) + (sizeof(struct zsQueueCell)*(q->size-1)); in zfQueueDestroy() 49 zfQueueFlush(dev, q); in zfQueueDestroy() [all …]
|
/drivers/net/cxgb3/ |
D | sge.c | 154 static inline struct sge_qset *fl_to_qset(const struct sge_fl *q, int qidx) in fl_to_qset() argument 156 return container_of(q, struct sge_qset, fl[qidx]); in fl_to_qset() 159 static inline struct sge_qset *rspq_to_qset(const struct sge_rspq *q) in rspq_to_qset() argument 161 return container_of(q, struct sge_qset, rspq); in rspq_to_qset() 164 static inline struct sge_qset *txq_to_qset(const struct sge_txq *q, int qidx) in txq_to_qset() argument 166 return container_of(q, struct sge_qset, txq[qidx]); in txq_to_qset() 179 const struct sge_rspq *q, unsigned int credits) in refill_rspq() argument 183 V_RSPQ(q->cntxt_id) | V_CREDITS(credits)); in refill_rspq() 227 static inline void unmap_skb(struct sk_buff *skb, struct sge_txq *q, in unmap_skb() argument 231 struct tx_sw_desc *d = &q->sdesc[cidx]; in unmap_skb() [all …]
|
/drivers/net/chelsio/ |
D | sge.c | 488 struct cmdQ *q = &sge->cmdQ[0]; in sched_skb() local 489 clear_bit(CMDQ_STAT_LAST_PKT_DB, &q->status); in sched_skb() 490 if (test_and_set_bit(CMDQ_STAT_RUNNING, &q->status) == 0) { in sched_skb() 491 set_bit(CMDQ_STAT_LAST_PKT_DB, &q->status); in sched_skb() 513 static void free_freelQ_buffers(struct pci_dev *pdev, struct freelQ *q) in free_freelQ_buffers() argument 515 unsigned int cidx = q->cidx; in free_freelQ_buffers() 517 while (q->credits--) { in free_freelQ_buffers() 518 struct freelQ_ce *ce = &q->centries[cidx]; in free_freelQ_buffers() 525 if (++cidx == q->size) in free_freelQ_buffers() 545 struct freelQ *q = &sge->freelQ[i]; in free_rx_resources() local [all …]
|
/drivers/scsi/arm/ |
D | queue.c | 42 #define SET_MAGIC(q,m) ((q)->magic = (m)) argument 43 #define BAD_MAGIC(q,m) ((q)->magic != (m)) argument 45 #define SET_MAGIC(q,m) do { } while (0) argument 46 #define BAD_MAGIC(q,m) (0) argument 61 QE_t *q; in queue_initialise() local 73 queue->alloc = q = kmalloc(sizeof(QE_t) * nqueues, GFP_KERNEL); in queue_initialise() 74 if (q) { in queue_initialise() 75 for (; nqueues; q++, nqueues--) { in queue_initialise() 76 SET_MAGIC(q, QUEUE_MAGIC_FREE); in queue_initialise() 77 q->SCpnt = NULL; in queue_initialise() [all …]
|
/drivers/ieee1394/ |
D | dv1394-private.h | 80 struct output_more_immediate { __le32 q[8]; }; member 81 struct output_more { __le32 q[4]; }; member 82 struct output_last { __le32 q[4]; }; member 83 struct input_more { __le32 q[4]; }; member 84 struct input_last { __le32 q[4]; }; member 94 …omi->q[0] = cpu_to_le32(0x02000000 | 8); /* OUTPUT_MORE_IMMEDIATE; 8 is the size of the IT header … in fill_output_more_immediate() 95 omi->q[1] = cpu_to_le32(0); in fill_output_more_immediate() 96 omi->q[2] = cpu_to_le32(0); in fill_output_more_immediate() 97 omi->q[3] = cpu_to_le32(0); in fill_output_more_immediate() 100 omi->q[4] = cpu_to_le32( (0x0 << 16) /* IEEE1394_SPEED_100 */ in fill_output_more_immediate() [all …]
|
/drivers/net/wireless/ath9k/ |
D | mac.c | 43 u32 ath9k_hw_gettxbuf(struct ath_hal *ah, u32 q) in ath9k_hw_gettxbuf() argument 45 return REG_READ(ah, AR_QTXDP(q)); in ath9k_hw_gettxbuf() 48 bool ath9k_hw_puttxbuf(struct ath_hal *ah, u32 q, u32 txdp) in ath9k_hw_puttxbuf() argument 50 REG_WRITE(ah, AR_QTXDP(q), txdp); in ath9k_hw_puttxbuf() 55 bool ath9k_hw_txstart(struct ath_hal *ah, u32 q) in ath9k_hw_txstart() argument 57 DPRINTF(ah->ah_sc, ATH_DBG_QUEUE, "queue %u\n", q); in ath9k_hw_txstart() 59 REG_WRITE(ah, AR_Q_TXE, 1 << q); in ath9k_hw_txstart() 64 u32 ath9k_hw_numtxpending(struct ath_hal *ah, u32 q) in ath9k_hw_numtxpending() argument 68 npend = REG_READ(ah, AR_QSTS(q)) & AR_Q_STS_PEND_FR_CNT; in ath9k_hw_numtxpending() 71 if (REG_READ(ah, AR_Q_TXE) & (1 << q)) in ath9k_hw_numtxpending() [all …]
|
/drivers/net/wireless/iwlwifi/ |
D | iwl-tx.c | 131 int index = txq->q.read_ptr; in iwl_hw_txq_free_tfd() 159 dev_kfree_skb(txq->txb[txq->q.read_ptr].skb[i - 1]); in iwl_hw_txq_free_tfd() 160 txq->txb[txq->q.read_ptr].skb[i - 1] = NULL; in iwl_hw_txq_free_tfd() 196 int txq_id = txq->q.id; in iwl_txq_update_write_ptr() 220 txq->q.write_ptr | (txq_id << 8)); in iwl_txq_update_write_ptr() 227 txq->q.write_ptr | (txq_id << 8)); in iwl_txq_update_write_ptr() 247 struct iwl_queue *q = &txq->q; in iwl_tx_queue_free() local 251 if (q->n_bd == 0) in iwl_tx_queue_free() 255 for (; q->write_ptr != q->read_ptr; in iwl_tx_queue_free() 256 q->read_ptr = iwl_queue_inc_wrap(q->read_ptr, q->n_bd)) in iwl_tx_queue_free() [all …]
|
/drivers/media/video/cx23885/ |
D | cx23885-vbi.c | 67 struct cx23885_dmaqueue *q, in cx23885_start_vbi_dma() argument 75 q->count = 1; in cx23885_start_vbi_dma() 90 struct cx23885_dmaqueue *q) in cx23885_restart_vbi_queue() argument 95 if (list_empty(&q->active)) in cx23885_restart_vbi_queue() 98 buf = list_entry(q->active.next, struct cx23885_buffer, vb.queue); in cx23885_restart_vbi_queue() 101 cx23885_start_vbi_dma(dev, q, buf); in cx23885_restart_vbi_queue() 102 list_for_each(item, &q->active) { in cx23885_restart_vbi_queue() 104 buf->count = q->count++; in cx23885_restart_vbi_queue() 106 mod_timer(&q->timeout, jiffies+BUFFER_TIMEOUT); in cx23885_restart_vbi_queue() 113 struct cx23885_dmaqueue *q = &dev->vbiq; in cx23885_vbi_timeout() local [all …]
|
/drivers/media/video/cx88/ |
D | cx88-vbi.c | 51 struct cx88_dmaqueue *q, in cx8800_start_vbi_dma() argument 66 q->count = 1; in cx8800_start_vbi_dma() 99 struct cx88_dmaqueue *q) in cx8800_restart_vbi_queue() argument 103 if (list_empty(&q->active)) in cx8800_restart_vbi_queue() 106 buf = list_entry(q->active.next, struct cx88_buffer, vb.queue); in cx8800_restart_vbi_queue() 109 cx8800_start_vbi_dma(dev, q, buf); in cx8800_restart_vbi_queue() 110 list_for_each_entry(buf, &q->active, vb.queue) in cx8800_restart_vbi_queue() 111 buf->count = q->count++; in cx8800_restart_vbi_queue() 112 mod_timer(&q->timeout, jiffies+BUFFER_TIMEOUT); in cx8800_restart_vbi_queue() 120 struct cx88_dmaqueue *q = &dev->vbiq; in cx8800_vbi_timeout() local [all …]
|
/drivers/char/ |
D | consolemap.c | 190 unsigned char *q; in set_inverse_transl() local 193 q = p->inverse_translations[i]; in set_inverse_transl() 195 if (!q) { in set_inverse_transl() 196 q = p->inverse_translations[i] = (unsigned char *) in set_inverse_transl() 198 if (!q) return; in set_inverse_transl() 200 memset(q, 0, MAX_GLYPH); in set_inverse_transl() 204 if (glyph >= 0 && glyph < MAX_GLYPH && q[glyph] < 32) { in set_inverse_transl() 206 q[glyph] = j; in set_inverse_transl() 216 u16 *q; in set_inverse_trans_unicode() local 219 q = p->inverse_trans_unicode; in set_inverse_trans_unicode() [all …]
|