/drivers/net/ |
D | ifb.c | 64 struct ifb_q_private *txp = (struct ifb_q_private *)_txp; in ifb_ri_tasklet() local 68 txq = netdev_get_tx_queue(txp->dev, txp->txqnum); in ifb_ri_tasklet() 69 skb = skb_peek(&txp->tq); in ifb_ri_tasklet() 73 skb_queue_splice_tail_init(&txp->rq, &txp->tq); in ifb_ri_tasklet() 77 while ((skb = __skb_dequeue(&txp->tq)) != NULL) { in ifb_ri_tasklet() 83 u64_stats_update_begin(&txp->tsync); in ifb_ri_tasklet() 84 txp->tx_packets++; in ifb_ri_tasklet() 85 txp->tx_bytes += skb->len; in ifb_ri_tasklet() 86 u64_stats_update_end(&txp->tsync); in ifb_ri_tasklet() 89 skb->dev = dev_get_by_index_rcu(dev_net(txp->dev), skb->skb_iif); in ifb_ri_tasklet() [all …]
|
/drivers/net/wireless/mediatek/mt76/mt7615/ |
D | pci_mac.c | 28 struct mt7615_txp_common *txp; in mt7615_tx_complete_skb() local 32 txp = mt7615_txwi_to_txp(mdev, e->txwi); in mt7615_tx_complete_skb() 35 token = le16_to_cpu(txp->fw.token); in mt7615_tx_complete_skb() 37 token = le16_to_cpu(txp->hw.msdu_id[0]) & in mt7615_tx_complete_skb() 54 struct mt7615_hw_txp *txp = txp_ptr; in mt7615_write_hw_txp() local 55 struct mt7615_txp_ptr *ptr = &txp->ptr[0]; in mt7615_write_hw_txp() 59 tx_info->buf[0].len = MT_TXD_SIZE + sizeof(*txp); in mt7615_write_hw_txp() 62 txp->msdu_id[0] = cpu_to_le16(id | MT_MSDU_ID_VALID); in mt7615_write_hw_txp() 96 struct mt7615_fw_txp *txp = txp_ptr; in mt7615_write_fw_txp() local 101 txp->buf[i] = cpu_to_le32(tx_info->buf[i + 1].addr); in mt7615_write_fw_txp() [all …]
|
D | mac.c | 690 mt7615_txp_skb_unmap_fw(struct mt76_dev *dev, struct mt7615_fw_txp *txp) in mt7615_txp_skb_unmap_fw() argument 694 for (i = 0; i < txp->nbuf; i++) in mt7615_txp_skb_unmap_fw() 695 dma_unmap_single(dev->dev, le32_to_cpu(txp->buf[i]), in mt7615_txp_skb_unmap_fw() 696 le16_to_cpu(txp->len[i]), DMA_TO_DEVICE); in mt7615_txp_skb_unmap_fw() 700 mt7615_txp_skb_unmap_hw(struct mt76_dev *dev, struct mt7615_hw_txp *txp) in mt7615_txp_skb_unmap_hw() argument 707 for (i = 0; i < ARRAY_SIZE(txp->ptr); i++) { in mt7615_txp_skb_unmap_hw() 708 struct mt7615_txp_ptr *ptr = &txp->ptr[i]; in mt7615_txp_skb_unmap_hw() 733 struct mt7615_txp_common *txp; in mt7615_txp_skb_unmap() local 735 txp = mt7615_txwi_to_txp(dev, t); in mt7615_txp_skb_unmap() 737 mt7615_txp_skb_unmap_fw(dev, &txp->fw); in mt7615_txp_skb_unmap() [all …]
|
/drivers/gpu/drm/vc4/ |
D | vc4_txp.c | 145 #define TXP_READ(offset) readl(txp->regs + (offset)) 146 #define TXP_WRITE(offset, val) writel(val, txp->regs + (offset)) 277 struct vc4_txp *txp = connector_to_vc4_txp(conn); in vc4_txp_connector_atomic_commit() local 320 drm_writeback_queue_job(&txp->connector, conn_state); in vc4_txp_connector_atomic_commit() 353 struct vc4_txp *txp = encoder_to_vc4_txp(encoder); in vc4_txp_encoder_disable() local 449 struct vc4_txp *txp = data; in vc4_txp_interrupt() local 450 struct vc4_crtc *vc4_crtc = &txp->base; in vc4_txp_interrupt() 454 drm_writeback_signal_completion(&txp->connector, 0); in vc4_txp_interrupt() 470 struct vc4_txp *txp; in vc4_txp_bind() local 479 txp = devm_kzalloc(dev, sizeof(*txp), GFP_KERNEL); in vc4_txp_bind() [all …]
|
/drivers/infiniband/hw/hfi1/ |
D | ipoib_tx.c | 236 struct ipoib_txparms *txp) in hfi1_ipoib_build_ulp_payload() argument 238 struct hfi1_devdata *dd = txp->dd; in hfi1_ipoib_build_ulp_payload() 267 struct ipoib_txparms *txp) in hfi1_ipoib_build_tx_desc() argument 269 struct hfi1_devdata *dd = txp->dd; in hfi1_ipoib_build_tx_desc() 273 sizeof(sdma_hdr->pbc) + (txp->hdr_dwords << 2) + tx->skb->len; in hfi1_ipoib_build_tx_desc() 284 sizeof(sdma_hdr->pbc) + (txp->hdr_dwords << 2)); in hfi1_ipoib_build_tx_desc() 289 return hfi1_ipoib_build_ulp_payload(tx, txp); in hfi1_ipoib_build_tx_desc() 293 struct ipoib_txparms *txp) in hfi1_ipoib_build_ib_tx_headers() argument 298 struct hfi1_pportdata *ppd = ppd_from_ibp(txp->ibp); in hfi1_ipoib_build_ib_tx_headers() 299 struct rdma_ah_attr *ah_attr = txp->ah_attr; in hfi1_ipoib_build_ib_tx_headers() [all …]
|
D | trace_tx.h | 404 struct sdma_txreq *txp 406 TP_ARGS(sde, hwhead, swhead, txp), 421 __entry->txnext = txp ? txp->next_descq_idx : ~0; 423 __entry->sn = txp ? txp->sn : ~0; 441 struct sdma_txreq *txp 443 TP_ARGS(sde, hwhead, swhead, txp), 457 __entry->txnext = txp ? txp->next_descq_idx : ~0;
|
D | sdma.c | 406 struct sdma_txreq *txp, *txp_next; in sdma_flush() local 418 list_for_each_entry_safe(txp, txp_next, &flushlist, list) in sdma_flush() 419 complete_tx(sde, txp, SDMA_TXREQ_S_ABORTED); in sdma_flush() 582 struct sdma_txreq *txp = get_txhead(sde); in sdma_flush_descq() local 595 if (txp && txp->next_descq_idx == head) { in sdma_flush_descq() 598 complete_tx(sde, txp, SDMA_TXREQ_S_ABORTED); in sdma_flush_descq() 599 trace_hfi1_sdma_progress(sde, head, tail, txp); in sdma_flush_descq() 600 txp = get_txhead(sde); in sdma_flush_descq() 1828 struct sdma_txreq *txp = NULL; in sdma_make_progress() local 1842 txp = get_txhead(sde); in sdma_make_progress() [all …]
|
/drivers/net/wireless/intel/iwlwifi/ |
D | iwl-eeprom-parse.c | 352 struct iwl_eeprom_enhanced_txpwr *txp) in iwl_get_max_txpwr_half_dbm() argument 357 if (data->valid_tx_ant & ANT_A && txp->chain_a_max > result) in iwl_get_max_txpwr_half_dbm() 358 result = txp->chain_a_max; in iwl_get_max_txpwr_half_dbm() 360 if (data->valid_tx_ant & ANT_B && txp->chain_b_max > result) in iwl_get_max_txpwr_half_dbm() 361 result = txp->chain_b_max; in iwl_get_max_txpwr_half_dbm() 363 if (data->valid_tx_ant & ANT_C && txp->chain_c_max > result) in iwl_get_max_txpwr_half_dbm() 364 result = txp->chain_c_max; in iwl_get_max_txpwr_half_dbm() 368 data->valid_tx_ant == ANT_AC) && txp->mimo2_max > result) in iwl_get_max_txpwr_half_dbm() 369 result = txp->mimo2_max; in iwl_get_max_txpwr_half_dbm() 371 if (data->valid_tx_ant == ANT_ABC && txp->mimo3_max > result) in iwl_get_max_txpwr_half_dbm() [all …]
|
/drivers/net/xen-netback/ |
D | netback.c | 110 const struct xen_netif_tx_request *txp, 206 struct xen_netif_tx_request *txp, in xenvif_tx_err() argument 212 make_tx_response(queue, txp, extra_count, XEN_NETIF_RSP_ERROR); in xenvif_tx_err() 215 RING_COPY_REQUEST(&queue->tx, cons++, txp); in xenvif_tx_err() 233 struct xen_netif_tx_request *txp, in xenvif_count_requests() argument 282 txp = &dropped_tx; in xenvif_count_requests() 284 RING_COPY_REQUEST(&queue->tx, cons + slots, txp); in xenvif_count_requests() 295 if (!drop_err && txp->size > first->size) { in xenvif_count_requests() 299 txp->size, first->size); in xenvif_count_requests() 303 first->size -= txp->size; in xenvif_count_requests() [all …]
|
/drivers/net/wireless/mediatek/mt76/mt76x2/ |
D | phy.c | 141 struct mt76x2_tx_power_info txp; in mt76x2_phy_set_txpower() local 146 mt76x2_get_power_info(dev, &txp, chan); in mt76x2_phy_set_txpower() 149 delta = txp.delta_bw40; in mt76x2_phy_set_txpower() 151 delta = txp.delta_bw80; in mt76x2_phy_set_txpower() 154 mt76x02_add_rate_power_offset(&t, txp.target_power + delta); in mt76x2_phy_set_txpower() 159 delta = base_power - txp.target_power; in mt76x2_phy_set_txpower() 160 txp_0 = txp.chain[0].target_power + txp.chain[0].delta + delta; in mt76x2_phy_set_txpower() 161 txp_1 = txp.chain[1].target_power + txp.chain[1].delta + delta; in mt76x2_phy_set_txpower() 175 dev->target_power = txp.target_power; in mt76x2_phy_set_txpower() 176 dev->target_power_delta[0] = txp_0 - txp.chain[0].target_power; in mt76x2_phy_set_txpower() [all …]
|
D | init.c | 155 struct mt76x2_tx_power_info txp; in mt76x2_init_txpower() local 162 mt76x2_get_power_info(dev, &txp, chan); in mt76x2_init_txpower() 166 txp.target_power; in mt76x2_init_txpower()
|
/drivers/spi/ |
D | spi-oc-tiny.c | 50 const u8 *txp; member 110 const u8 *txp = t->tx_buf; in tiny_spi_txrx_bufs() local 117 hw->txp = t->tx_buf; in tiny_spi_txrx_bufs() 124 writeb(hw->txp ? *hw->txp++ : 0, in tiny_spi_txrx_bufs() 127 writeb(hw->txp ? *hw->txp++ : 0, in tiny_spi_txrx_bufs() 132 writeb(hw->txp ? *hw->txp++ : 0, in tiny_spi_txrx_bufs() 141 writeb(txp ? *txp++ : 0, hw->base + TINY_SPI_TXDATA); in tiny_spi_txrx_bufs() 143 writeb(txp ? *txp++ : 0, hw->base + TINY_SPI_TXDATA); in tiny_spi_txrx_bufs() 173 writeb(hw->txp ? *hw->txp++ : 0, in tiny_spi_irq()
|
D | spi-falcon.c | 102 const u8 *txp = t->tx_buf; in falcon_sflash_xfer() local 122 if (!txp) { in falcon_sflash_xfer() 138 priv->sfcmd |= *txp; in falcon_sflash_xfer() 139 txp++; in falcon_sflash_xfer() 154 if (txp && bytelen) { in falcon_sflash_xfer() 179 val = (val << 8) | (*txp++); in falcon_sflash_xfer() 181 } else if ((dumlen < 15) && (*txp == 0)) { in falcon_sflash_xfer() 187 txp++; in falcon_sflash_xfer() 226 val |= (*txp++) << (8 * len++); in falcon_sflash_xfer()
|
D | spi-ti-qspi.c | 266 u32 *txp = (u32 *)txbuf; in qspi_write_msg() local 268 data = cpu_to_be32(*txp++); in qspi_write_msg() 271 data = cpu_to_be32(*txp++); in qspi_write_msg() 274 data = cpu_to_be32(*txp++); in qspi_write_msg() 277 data = cpu_to_be32(*txp++); in qspi_write_msg()
|
/drivers/rtc/ |
D | rtc-rs5c348.c | 63 u8 txbuf[5+7], *txp; in rs5c348_rtc_set_time() local 78 txp = txbuf; in rs5c348_rtc_set_time() 84 txp = &txbuf[5]; in rs5c348_rtc_set_time() 85 txp[RS5C348_REG_SECS] = bin2bcd(tm->tm_sec); in rs5c348_rtc_set_time() 86 txp[RS5C348_REG_MINS] = bin2bcd(tm->tm_min); in rs5c348_rtc_set_time() 88 txp[RS5C348_REG_HOURS] = bin2bcd(tm->tm_hour); in rs5c348_rtc_set_time() 91 txp[RS5C348_REG_HOURS] = bin2bcd((tm->tm_hour + 11) % 12 + 1) | in rs5c348_rtc_set_time() 94 txp[RS5C348_REG_WDAY] = bin2bcd(tm->tm_wday); in rs5c348_rtc_set_time() 95 txp[RS5C348_REG_DAY] = bin2bcd(tm->tm_mday); in rs5c348_rtc_set_time() 96 txp[RS5C348_REG_MONTH] = bin2bcd(tm->tm_mon + 1) | in rs5c348_rtc_set_time() [all …]
|
/drivers/infiniband/hw/qib/ |
D | qib_sdma.c | 103 struct qib_sdma_txreq *txp, *txp_next; in clear_sdma_activelist() local 105 list_for_each_entry_safe(txp, txp_next, &ppd->sdma_activelist, list) { in clear_sdma_activelist() 106 list_del_init(&txp->list); in clear_sdma_activelist() 107 if (txp->flags & QIB_SDMA_TXREQ_F_FREEDESC) { in clear_sdma_activelist() 110 idx = txp->start_idx; in clear_sdma_activelist() 111 while (idx != txp->next_descq_idx) { in clear_sdma_activelist() 117 if (txp->callback) in clear_sdma_activelist() 118 (*txp->callback)(txp, QIB_SDMA_TXREQ_S_ABORTED); in clear_sdma_activelist() 328 struct qib_sdma_txreq *txp = NULL; in qib_sdma_make_progress() local 344 txp = list_entry(lp, struct qib_sdma_txreq, list); in qib_sdma_make_progress() [all …]
|
/drivers/net/ethernet/micrel/ |
D | ks8851_par.c | 193 static void ks8851_wrfifo_par(struct ks8851_net *ks, struct sk_buff *txp, in ks8851_wrfifo_par() argument 197 unsigned int len = ALIGN(txp->len, 4); in ks8851_wrfifo_par() 201 __func__, txp, txp->len, txp->data, irq); in ks8851_wrfifo_par() 210 iowrite16(txp->len, ksp->hw_addr); in ks8851_wrfifo_par() 212 iowrite16_rep(ksp->hw_addr, txp->data, len / 2); in ks8851_wrfifo_par()
|
D | ks8851_spi.c | 254 static void ks8851_wrfifo_spi(struct ks8851_net *ks, struct sk_buff *txp, in ks8851_wrfifo_spi() argument 264 __func__, txp, txp->len, txp->data, irq); in ks8851_wrfifo_spi() 275 ks->txh.txw[2] = cpu_to_le16(txp->len); in ks8851_wrfifo_spi() 282 xfer->tx_buf = txp->data; in ks8851_wrfifo_spi() 284 xfer->len = ALIGN(txp->len, 4); in ks8851_wrfifo_spi()
|
/drivers/net/wireless/mediatek/mt76/mt7915/ |
D | mac.c | 719 struct mt7915_txp *txp; in mt7915_tx_prepare_skb() local 731 txp = (struct mt7915_txp *)(txwi + MT_TXD_SIZE); in mt7915_tx_prepare_skb() 733 txp->buf[i] = cpu_to_le32(tx_info->buf[i + 1].addr); in mt7915_tx_prepare_skb() 734 txp->len[i] = cpu_to_le16(tx_info->buf[i + 1].len); in mt7915_tx_prepare_skb() 736 txp->nbuf = nbuf; in mt7915_tx_prepare_skb() 743 txp->flags = cpu_to_le16(MT_CT_INFO_APPLY_TXD); in mt7915_tx_prepare_skb() 746 txp->flags |= cpu_to_le16(MT_CT_INFO_NONE_CIPHER_FRAME); in mt7915_tx_prepare_skb() 749 txp->flags |= cpu_to_le16(MT_CT_INFO_MGMT_FRAME); in mt7915_tx_prepare_skb() 754 txp->bss_idx = mvif->idx; in mt7915_tx_prepare_skb() 766 txp->token = cpu_to_le16(id); in mt7915_tx_prepare_skb() [all …]
|
/drivers/net/wireless/broadcom/brcm80211/brcmsmac/ |
D | dma.c | 236 struct sk_buff **txp; member 652 di->txp = kzalloc(size, GFP_ATOMIC); in dma_attach() 653 if (di->txp == NULL) in dma_attach() 764 kfree(di->txp); in dma_detach() 1311 di->txp[prevtxd(di, txout)] = p; in dma_txenq() 1473 struct sk_buff *txp; in dma_getnexttxp() local 1484 txp = NULL; in dma_getnexttxp() 1511 for (i = start; i != end && !txp; i = nexttxd(di, i)) { in dma_getnexttxp() 1524 txp = di->txp[i]; in dma_getnexttxp() 1525 di->txp[i] = NULL; in dma_getnexttxp() [all …]
|
/drivers/net/wireless/broadcom/brcm80211/brcmfmac/ |
D | bcdc.c | 362 brcmf_proto_bcdc_txcomplete(struct device *dev, struct sk_buff *txp, in brcmf_proto_bcdc_txcomplete() argument 372 brcmf_fws_bustxfail(bcdc->fws, txp); in brcmf_proto_bcdc_txcomplete() 374 if (brcmf_proto_bcdc_hdrpull(bus_if->drvr, false, txp, &ifp)) in brcmf_proto_bcdc_txcomplete() 375 brcmu_pkt_buf_free_skb(txp); in brcmf_proto_bcdc_txcomplete() 377 brcmf_txfinalize(ifp, txp, success); in brcmf_proto_bcdc_txcomplete()
|
D | bcdc.h | 12 void brcmf_proto_bcdc_txcomplete(struct device *dev, struct sk_buff *txp,
|
D | core.h | 210 void brcmf_txfinalize(struct brcmf_if *ifp, struct sk_buff *txp, bool success);
|
/drivers/net/wireless/marvell/mwifiex/ |
D | sta_cmdresp.c | 438 struct host_cmd_ds_rf_tx_pwr *txp = &resp->params.txp; in mwifiex_ret_rf_tx_power() local 439 u16 action = le16_to_cpu(txp->action); in mwifiex_ret_rf_tx_power() 441 priv->tx_power_level = le16_to_cpu(txp->cur_level); in mwifiex_ret_rf_tx_power() 444 priv->max_tx_power_level = txp->max_power; in mwifiex_ret_rf_tx_power() 445 priv->min_tx_power_level = txp->min_power; in mwifiex_ret_rf_tx_power()
|
D | sta_cmd.c | 244 struct host_cmd_ds_txpwr_cfg *txp) in mwifiex_cmd_tx_power_cfg() argument 254 if (txp->mode) { in mwifiex_cmd_tx_power_cfg() 256 *) ((unsigned long) txp + in mwifiex_cmd_tx_power_cfg() 258 memmove(cmd_txp_cfg, txp, in mwifiex_cmd_tx_power_cfg() 270 memmove(cmd_txp_cfg, txp, sizeof(*txp)); in mwifiex_cmd_tx_power_cfg() 289 struct host_cmd_ds_rf_tx_pwr *txp = &cmd->params.txp; in mwifiex_cmd_rf_tx_power() local 294 txp->action = cpu_to_le16(cmd_action); in mwifiex_cmd_rf_tx_power()
|