/drivers/firmware/tegra/ |
D | ivc.c | 70 } tx; member 105 u32 tx = ACCESS_ONCE(header->tx.count); in tegra_ivc_empty() local 118 if (tx - rx > ivc->num_frames) in tegra_ivc_empty() 121 return tx == rx; in tegra_ivc_empty() 127 u32 tx = ACCESS_ONCE(header->tx.count); in tegra_ivc_full() local 134 return tx - rx >= ivc->num_frames; in tegra_ivc_full() 140 u32 tx = ACCESS_ONCE(header->tx.count); in tegra_ivc_available() local 149 return tx - rx; in tegra_ivc_available() 154 ACCESS_ONCE(ivc->tx.channel->tx.count) = in tegra_ivc_advance_tx() 155 ACCESS_ONCE(ivc->tx.channel->tx.count) + 1; in tegra_ivc_advance_tx() [all …]
|
/drivers/media/i2c/adv748x/ |
D | adv748x-csi2.c | 21 static bool is_txa(struct adv748x_csi2 *tx) in is_txa() argument 23 return tx == &tx->state->txa; in is_txa() 26 static int adv748x_csi2_set_virtual_channel(struct adv748x_csi2 *tx, in adv748x_csi2_set_virtual_channel() argument 29 return tx_write(tx, ADV748X_CSI_VC_REF, vc << ADV748X_CSI_VC_REF_SHIFT); in adv748x_csi2_set_virtual_channel() 43 static int adv748x_csi2_register_link(struct adv748x_csi2 *tx, in adv748x_csi2_register_link() argument 64 &tx->sd.entity, ADV748X_CSI2_SINK, in adv748x_csi2_register_link() 78 struct adv748x_csi2 *tx = adv748x_sd_to_csi2(sd); in adv748x_csi2_registered() local 79 struct adv748x_state *state = tx->state; in adv748x_csi2_registered() 81 adv_dbg(state, "Registered %s (%s)", is_txa(tx) ? "TXA":"TXB", in adv748x_csi2_registered() 90 if (is_txa(tx)) { in adv748x_csi2_registered() [all …]
|
/drivers/staging/lustre/lnet/klnds/socklnd/ |
D | socklnd_proto.c | 52 struct ksock_tx *tx = conn->ksnc_tx_carrier; in ksocknal_next_tx_carrier() local 56 LASSERT(tx); in ksocknal_next_tx_carrier() 59 if (tx->tx_list.next == &conn->ksnc_tx_queue) { in ksocknal_next_tx_carrier() 63 conn->ksnc_tx_carrier = list_next_entry(tx, tx_list); in ksocknal_next_tx_carrier() 64 LASSERT(conn->ksnc_tx_carrier->tx_msg.ksm_type == tx->tx_msg.ksm_type); in ksocknal_next_tx_carrier() 72 struct ksock_tx *tx = conn->ksnc_tx_carrier; in ksocknal_queue_tx_zcack_v2() local 84 if (!tx) { in ksocknal_queue_tx_zcack_v2() 93 if (tx->tx_msg.ksm_type == KSOCK_MSG_NOOP) { in ksocknal_queue_tx_zcack_v2() 101 LASSERT(tx->tx_msg.ksm_type == KSOCK_MSG_LNET); in ksocknal_queue_tx_zcack_v2() 102 LASSERT(!tx->tx_msg.ksm_zc_cookies[1]); in ksocknal_queue_tx_zcack_v2() [all …]
|
D | socklnd_cb.c | 29 struct ksock_tx *tx = NULL; in ksocknal_alloc_tx() local 38 tx = list_entry(ksocknal_data.ksnd_idle_noop_txs.next, in ksocknal_alloc_tx() 40 LASSERT(tx->tx_desc_size == size); in ksocknal_alloc_tx() 41 list_del(&tx->tx_list); in ksocknal_alloc_tx() 47 if (!tx) in ksocknal_alloc_tx() 48 LIBCFS_ALLOC(tx, size); in ksocknal_alloc_tx() 50 if (!tx) in ksocknal_alloc_tx() 53 atomic_set(&tx->tx_refcount, 1); in ksocknal_alloc_tx() 54 tx->tx_zc_aborted = 0; in ksocknal_alloc_tx() 55 tx->tx_zc_capable = 0; in ksocknal_alloc_tx() [all …]
|
D | socklnd_lib.c | 74 ksocknal_lib_send_iov(struct ksock_conn *conn, struct ksock_tx *tx) in ksocknal_lib_send_iov() argument 82 tx->tx_nob == tx->tx_resid && /* frist sending */ in ksocknal_lib_send_iov() 83 !tx->tx_msg.ksm_csum) /* not checksummed */ in ksocknal_lib_send_iov() 84 ksocknal_lib_csum_tx(tx); in ksocknal_lib_send_iov() 86 for (nob = i = 0; i < tx->tx_niov; i++) in ksocknal_lib_send_iov() 87 nob += tx->tx_iov[i].iov_len; in ksocknal_lib_send_iov() 90 nob < tx->tx_resid) in ksocknal_lib_send_iov() 94 tx->tx_iov, tx->tx_niov, nob); in ksocknal_lib_send_iov() 99 ksocknal_lib_send_kiov(struct ksock_conn *conn, struct ksock_tx *tx) in ksocknal_lib_send_kiov() argument 102 struct bio_vec *kiov = tx->tx_kiov; in ksocknal_lib_send_kiov() [all …]
|
/drivers/infiniband/hw/hfi1/ |
D | sdma.h | 488 struct sdma_txreq *tx, 547 struct sdma_txreq *tx, in sdma_txinit_ahg() argument 560 tx->desc_limit = ARRAY_SIZE(tx->descs); in sdma_txinit_ahg() 561 tx->descp = &tx->descs[0]; in sdma_txinit_ahg() 562 INIT_LIST_HEAD(&tx->list); in sdma_txinit_ahg() 563 tx->num_desc = 0; in sdma_txinit_ahg() 564 tx->flags = flags; in sdma_txinit_ahg() 565 tx->complete = cb; in sdma_txinit_ahg() 566 tx->coalesce_buf = NULL; in sdma_txinit_ahg() 567 tx->wait = NULL; in sdma_txinit_ahg() [all …]
|
D | vnic_sdma.c | 83 struct vnic_txreq *tx = container_of(txreq, struct vnic_txreq, txreq); in vnic_sdma_complete() local 84 struct hfi1_vnic_sdma *vnic_sdma = tx->sdma; in vnic_sdma_complete() 87 dev_kfree_skb_any(tx->skb); in vnic_sdma_complete() 88 kmem_cache_free(vnic_sdma->dd->vnic.txreq_cache, tx); in vnic_sdma_complete() 92 struct vnic_txreq *tx) in build_vnic_ulp_payload() argument 98 &tx->txreq, in build_vnic_ulp_payload() 99 tx->skb->data, in build_vnic_ulp_payload() 100 skb_headlen(tx->skb)); in build_vnic_ulp_payload() 104 for (i = 0; i < skb_shinfo(tx->skb)->nr_frags; i++) { in build_vnic_ulp_payload() 105 struct skb_frag_struct *frag = &skb_shinfo(tx->skb)->frags[i]; in build_vnic_ulp_payload() [all …]
|
D | verbs_txreq.h | 80 struct verbs_txreq *tx; in get_txreq() local 83 tx = kmem_cache_alloc(dev->verbs_txreq_cache, VERBS_TXREQ_GFP); in get_txreq() 84 if (unlikely(!tx)) { in get_txreq() 86 tx = __get_txreq(dev, qp); in get_txreq() 87 if (!tx) in get_txreq() 88 return tx; in get_txreq() 90 tx->qp = qp; in get_txreq() 91 tx->mr = NULL; in get_txreq() 92 tx->sde = priv->s_sde; in get_txreq() 93 tx->psc = priv->s_sendcontext; in get_txreq() [all …]
|
/drivers/staging/iio/meter/ |
D | ade7854-spi.c | 26 .tx_buf = st->tx, in ade7854_spi_write_reg_8() 32 st->tx[0] = ADE7854_WRITE_REG; in ade7854_spi_write_reg_8() 33 st->tx[1] = (reg_address >> 8) & 0xFF; in ade7854_spi_write_reg_8() 34 st->tx[2] = reg_address & 0xFF; in ade7854_spi_write_reg_8() 35 st->tx[3] = value & 0xFF; in ade7854_spi_write_reg_8() 51 .tx_buf = st->tx, in ade7854_spi_write_reg_16() 57 st->tx[0] = ADE7854_WRITE_REG; in ade7854_spi_write_reg_16() 58 st->tx[1] = (reg_address >> 8) & 0xFF; in ade7854_spi_write_reg_16() 59 st->tx[2] = reg_address & 0xFF; in ade7854_spi_write_reg_16() 60 st->tx[3] = (value >> 8) & 0xFF; in ade7854_spi_write_reg_16() [all …]
|
D | ade7854-i2c.c | 27 st->tx[0] = (reg_address >> 8) & 0xFF; in ade7854_i2c_write_reg_8() 28 st->tx[1] = reg_address & 0xFF; in ade7854_i2c_write_reg_8() 29 st->tx[2] = value; in ade7854_i2c_write_reg_8() 31 ret = i2c_master_send(st->i2c, st->tx, 3); in ade7854_i2c_write_reg_8() 46 st->tx[0] = (reg_address >> 8) & 0xFF; in ade7854_i2c_write_reg_16() 47 st->tx[1] = reg_address & 0xFF; in ade7854_i2c_write_reg_16() 48 st->tx[2] = (value >> 8) & 0xFF; in ade7854_i2c_write_reg_16() 49 st->tx[3] = value & 0xFF; in ade7854_i2c_write_reg_16() 51 ret = i2c_master_send(st->i2c, st->tx, 4); in ade7854_i2c_write_reg_16() 66 st->tx[0] = (reg_address >> 8) & 0xFF; in ade7854_i2c_write_reg_24() [all …]
|
/drivers/net/wireless/ti/wl18xx/ |
D | debugfs.c | 57 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_prepared_descs, "%u"); 58 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_cmplt, "%u"); 59 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_template_prepared, "%u"); 60 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_data_prepared, "%u"); 61 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_template_programmed, "%u"); 62 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_data_programmed, "%u"); 63 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_burst_programmed, "%u"); 64 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_starts, "%u"); 65 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_stop, "%u"); 66 WL18XX_DEBUGFS_FWSTATS_FILE(tx, tx_start_templates, "%u"); [all …]
|
/drivers/staging/media/lirc/ |
D | lirc_zilog.c | 115 struct IR_tx *tx; member 276 struct IR_tx *tx; in get_ir_tx() local 279 tx = ir->tx; in get_ir_tx() 280 if (tx) in get_ir_tx() 281 kref_get(&tx->ref); in get_ir_tx() 283 return tx; in get_ir_tx() 288 struct IR_tx *tx = container_of(ref, struct IR_tx, ref); in release_ir_tx() local 289 struct IR *ir = tx->ir; in release_ir_tx() 293 ir->tx = NULL; in release_ir_tx() 294 kfree(tx); in release_ir_tx() [all …]
|
/drivers/staging/lustre/lnet/klnds/o2iblnd/ |
D | o2iblnd_cb.c | 43 static void kiblnd_init_tx_msg(struct lnet_ni *ni, struct kib_tx *tx, 45 static int kiblnd_init_rdma(struct kib_conn *conn, struct kib_tx *tx, int type, 48 static void kiblnd_queue_tx_locked(struct kib_tx *tx, struct kib_conn *conn); 49 static void kiblnd_queue_tx(struct kib_tx *tx, struct kib_conn *conn); 50 static void kiblnd_unmap_tx(struct kib_tx *tx); 54 kiblnd_tx_done(struct lnet_ni *ni, struct kib_tx *tx) in kiblnd_tx_done() argument 63 LASSERT(!tx->tx_queued); /* mustn't be queued for sending */ in kiblnd_tx_done() 64 LASSERT(!tx->tx_sending); /* mustn't be awaiting sent callback */ in kiblnd_tx_done() 65 LASSERT(!tx->tx_waiting); /* mustn't be awaiting peer response */ in kiblnd_tx_done() 66 LASSERT(tx->tx_pool); in kiblnd_tx_done() [all …]
|
/drivers/clk/mediatek/ |
D | clk-apmixed.c | 39 struct mtk_ref2usb_tx *tx = to_mtk_ref2usb_tx(hw); in mtk_ref2usb_tx_is_prepared() local 41 return (readl(tx->base_addr) & REF2USB_EN_MASK) == REF2USB_EN_MASK; in mtk_ref2usb_tx_is_prepared() 46 struct mtk_ref2usb_tx *tx = to_mtk_ref2usb_tx(hw); in mtk_ref2usb_tx_prepare() local 49 val = readl(tx->base_addr); in mtk_ref2usb_tx_prepare() 52 writel(val, tx->base_addr); in mtk_ref2usb_tx_prepare() 56 writel(val, tx->base_addr); in mtk_ref2usb_tx_prepare() 59 writel(val, tx->base_addr); in mtk_ref2usb_tx_prepare() 66 struct mtk_ref2usb_tx *tx = to_mtk_ref2usb_tx(hw); in mtk_ref2usb_tx_unprepare() local 69 val = readl(tx->base_addr); in mtk_ref2usb_tx_unprepare() 71 writel(val, tx->base_addr); in mtk_ref2usb_tx_unprepare() [all …]
|
/drivers/spi/ |
D | spi-dln2.c | 110 } tx; in dln2_spi_enable() local 111 unsigned len = sizeof(tx); in dln2_spi_enable() 113 tx.port = dln2->port; in dln2_spi_enable() 117 len -= sizeof(tx.wait_for_completion); in dln2_spi_enable() 119 tx.wait_for_completion = DLN2_TRANSFERS_WAIT_COMPLETE; in dln2_spi_enable() 123 return dln2_transfer_tx(dln2->pdev, cmd, &tx, len); in dln2_spi_enable() 139 } tx; in dln2_spi_cs_set() local 141 tx.port = dln2->port; in dln2_spi_cs_set() 148 tx.cs = ~cs_mask; in dln2_spi_cs_set() 150 return dln2_transfer_tx(dln2->pdev, DLN2_SPI_SET_SS, &tx, sizeof(tx)); in dln2_spi_cs_set() [all …]
|
/drivers/staging/gdm724x/ |
D | gdm_usb.c | 155 static struct usb_tx_sdu *get_tx_sdu_struct(struct tx_cxt *tx, int *no_spc) in get_tx_sdu_struct() argument 159 if (list_empty(&tx->free_list)) in get_tx_sdu_struct() 162 t_sdu = list_entry(tx->free_list.next, struct usb_tx_sdu, list); in get_tx_sdu_struct() 165 tx->avail_count--; in get_tx_sdu_struct() 167 *no_spc = list_empty(&tx->free_list) ? 1 : 0; in get_tx_sdu_struct() 172 static void put_tx_struct(struct tx_cxt *tx, struct usb_tx_sdu *t_sdu) in put_tx_struct() argument 174 list_add_tail(&t_sdu->list, &tx->free_list); in put_tx_struct() 175 tx->avail_count++; in put_tx_struct() 257 struct tx_cxt *tx = &udev->tx; in release_usb() local 263 spin_lock_irqsave(&tx->lock, flags); in release_usb() [all …]
|
/drivers/iio/gyro/ |
D | adxrs450.c | 77 __be32 tx ____cacheline_aligned; 94 u32 tx; in adxrs450_spi_read_reg_16() local 98 .tx_buf = &st->tx, in adxrs450_spi_read_reg_16() 100 .len = sizeof(st->tx), in adxrs450_spi_read_reg_16() 110 tx = ADXRS450_READ_DATA | (reg_address << 17); in adxrs450_spi_read_reg_16() 112 if (!(hweight32(tx) & 1)) in adxrs450_spi_read_reg_16() 113 tx |= ADXRS450_P; in adxrs450_spi_read_reg_16() 115 st->tx = cpu_to_be32(tx); in adxrs450_spi_read_reg_16() 142 u32 tx; in adxrs450_spi_write_reg_16() local 146 tx = ADXRS450_WRITE_DATA | (reg_address << 17) | (val << 1); in adxrs450_spi_write_reg_16() [all …]
|
/drivers/net/ethernet/brocade/bna/ |
D | bna_tx_rx.c | 2766 #define call_tx_stop_cbfn(tx) \ argument 2768 if ((tx)->stop_cbfn) { \ 2771 cbfn = (tx)->stop_cbfn; \ 2772 cbarg = (tx)->stop_cbarg; \ 2773 (tx)->stop_cbfn = NULL; \ 2774 (tx)->stop_cbarg = NULL; \ 2775 cbfn(cbarg, (tx)); \ 2779 static void bna_tx_mod_cb_tx_stopped(void *tx_mod, struct bna_tx *tx); 2780 static void bna_bfi_tx_enet_start(struct bna_tx *tx); 2781 static void bna_tx_enet_stop(struct bna_tx *tx); [all …]
|
/drivers/dma/ |
D | dmaengine.h | 29 static inline dma_cookie_t dma_cookie_assign(struct dma_async_tx_descriptor *tx) in dma_cookie_assign() argument 31 struct dma_chan *chan = tx->chan; in dma_cookie_assign() 37 tx->cookie = chan->cookie = cookie; in dma_cookie_assign() 52 static inline void dma_cookie_complete(struct dma_async_tx_descriptor *tx) in dma_cookie_complete() argument 54 BUG_ON(tx->cookie < DMA_MIN_COOKIE); in dma_cookie_complete() 55 tx->chan->completed_cookie = tx->cookie; in dma_cookie_complete() 56 tx->cookie = 0; in dma_cookie_complete() 106 dmaengine_desc_get_callback(struct dma_async_tx_descriptor *tx, in dmaengine_desc_get_callback() argument 109 cb->callback = tx->callback; in dmaengine_desc_get_callback() 110 cb->callback_result = tx->callback_result; in dmaengine_desc_get_callback() [all …]
|
D | virt-dma.c | 17 static struct virt_dma_desc *to_virt_desc(struct dma_async_tx_descriptor *tx) in to_virt_desc() argument 19 return container_of(tx, struct virt_dma_desc, tx); in to_virt_desc() 22 dma_cookie_t vchan_tx_submit(struct dma_async_tx_descriptor *tx) in vchan_tx_submit() argument 24 struct virt_dma_chan *vc = to_virt_chan(tx->chan); in vchan_tx_submit() 25 struct virt_dma_desc *vd = to_virt_desc(tx); in vchan_tx_submit() 30 cookie = dma_cookie_assign(tx); in vchan_tx_submit() 52 int vchan_tx_desc_free(struct dma_async_tx_descriptor *tx) in vchan_tx_desc_free() argument 54 struct virt_dma_chan *vc = to_virt_chan(tx->chan); in vchan_tx_desc_free() 55 struct virt_dma_desc *vd = to_virt_desc(tx); in vchan_tx_desc_free() 63 vc, vd, vd->tx.cookie); in vchan_tx_desc_free() [all …]
|
/drivers/i2c/busses/ |
D | i2c-dln2.c | 61 } tx; in dln2_i2c_enable() local 63 tx.port = dln2->port; in dln2_i2c_enable() 70 return dln2_transfer_tx(dln2->pdev, cmd, &tx, sizeof(tx)); in dln2_i2c_enable() 84 } __packed *tx = dln2->buf; in dln2_i2c_write() local 87 BUILD_BUG_ON(sizeof(*tx) > DLN2_I2C_BUF_SIZE); in dln2_i2c_write() 89 tx->port = dln2->port; in dln2_i2c_write() 90 tx->addr = addr; in dln2_i2c_write() 91 tx->mem_addr_len = 0; in dln2_i2c_write() 92 tx->mem_addr = 0; in dln2_i2c_write() 93 tx->buf_len = cpu_to_le16(data_len); in dln2_i2c_write() [all …]
|
/drivers/atm/ |
D | eni.c | 188 if (eni_dev->tx[i].send) in dump() 190 eni_dev->tx[i].send,eni_dev->tx[i].words*4); in dump() 1036 struct eni_tx *tx; in do_tx() local 1050 tx = eni_vcc->tx; in do_tx() 1051 NULLCHECK(tx); in do_tx() 1090 if (!NEPMOK(tx->tx_pos,size+TX_GAP, in do_tx() 1091 eni_in(MID_TX_RDPTR(tx->index)),tx->words)) { in do_tx() 1108 DPRINTK("dma_wr is %d, tx_pos is %ld\n",dma_wr,tx->tx_pos); in do_tx() 1120 eni_dev->dma[j++] = (((tx->tx_pos+TX_DESCR_SIZE) & (tx->words-1)) << in do_tx() 1121 MID_DMA_COUNT_SHIFT) | (tx->index << MID_DMA_CHAN_SHIFT) | in do_tx() [all …]
|
/drivers/iio/imu/ |
D | adis.c | 38 .tx_buf = adis->tx, in adis_write_reg() 44 .tx_buf = adis->tx + 2, in adis_write_reg() 50 .tx_buf = adis->tx + 4, in adis_write_reg() 56 .tx_buf = adis->tx + 6, in adis_write_reg() 61 .tx_buf = adis->tx + 8, in adis_write_reg() 73 adis->tx[0] = ADIS_WRITE_REG(ADIS_REG_PAGE_ID); in adis_write_reg() 74 adis->tx[1] = page; in adis_write_reg() 80 adis->tx[8] = ADIS_WRITE_REG(reg + 3); in adis_write_reg() 81 adis->tx[9] = (value >> 24) & 0xff; in adis_write_reg() 82 adis->tx[6] = ADIS_WRITE_REG(reg + 2); in adis_write_reg() [all …]
|
/drivers/usb/musb/ |
D | cppi_dma.c | 97 static void cppi_reset_tx(struct cppi_tx_stateram __iomem *tx, u32 ptr) in cppi_reset_tx() argument 99 musb_writel(&tx->tx_head, 0, 0); in cppi_reset_tx() 100 musb_writel(&tx->tx_buf, 0, 0); in cppi_reset_tx() 101 musb_writel(&tx->tx_current, 0, 0); in cppi_reset_tx() 102 musb_writel(&tx->tx_buf_current, 0, 0); in cppi_reset_tx() 103 musb_writel(&tx->tx_info, 0, 0); in cppi_reset_tx() 104 musb_writel(&tx->tx_rem_len, 0, 0); in cppi_reset_tx() 106 musb_writel(&tx->tx_complete, 0, ptr); in cppi_reset_tx() 160 for (i = 0; i < ARRAY_SIZE(controller->tx); i++) { in cppi_controller_start() 161 controller->tx[i].transmit = true; in cppi_controller_start() [all …]
|
/drivers/net/wireless/ath/ath9k/ |
D | htc_drv_txrx.c | 58 spin_lock_bh(&priv->tx.tx_lock); in ath9k_htc_check_stop_queues() 59 priv->tx.queued_cnt++; in ath9k_htc_check_stop_queues() 60 if ((priv->tx.queued_cnt >= ATH9K_HTC_TX_THRESHOLD) && in ath9k_htc_check_stop_queues() 61 !(priv->tx.flags & ATH9K_HTC_OP_TX_QUEUES_STOP)) { in ath9k_htc_check_stop_queues() 62 priv->tx.flags |= ATH9K_HTC_OP_TX_QUEUES_STOP; in ath9k_htc_check_stop_queues() 65 spin_unlock_bh(&priv->tx.tx_lock); in ath9k_htc_check_stop_queues() 70 spin_lock_bh(&priv->tx.tx_lock); in ath9k_htc_check_wake_queues() 71 if ((priv->tx.queued_cnt < ATH9K_HTC_TX_THRESHOLD) && in ath9k_htc_check_wake_queues() 72 (priv->tx.flags & ATH9K_HTC_OP_TX_QUEUES_STOP)) { in ath9k_htc_check_wake_queues() 73 priv->tx.flags &= ~ATH9K_HTC_OP_TX_QUEUES_STOP; in ath9k_htc_check_wake_queues() [all …]
|