/drivers/ps3/ |
D | ps3-vuart.c | 72 } tx_list; member 486 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 488 if (list_empty(&priv->tx_list.head)) { in ps3_vuart_write() 493 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 511 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 523 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 524 list_add_tail(&lb->link, &priv->tx_list.head); in ps3_vuart_write() 526 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 728 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_handle_interrupt_tx() 730 list_for_each_entry_safe(lb, n, &priv->tx_list.head, link) { in ps3_vuart_handle_interrupt_tx() [all …]
|
/drivers/dma/ |
D | mmp_pdma.c | 84 struct list_head tx_list; member 351 list_for_each_entry(child, &desc->tx_list, node) { in mmp_pdma_tx_submit() 356 list_splice_tail_init(&desc->tx_list, &chan->chain_pending); in mmp_pdma_tx_submit() 375 INIT_LIST_HEAD(&desc->tx_list); in mmp_pdma_alloc_descriptor() 505 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_memcpy() 521 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_memcpy() 579 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_slave_sg() 601 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_slave_sg() 674 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_dma_cyclic() 688 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_dma_cyclic()
|
D | xgene-dma.c | 240 struct list_head tx_list; member 473 list_splice_tail_init(&desc->tx_list, &chan->ld_pending); in xgene_dma_tx_submit() 500 INIT_LIST_HEAD(&desc->tx_list); in xgene_dma_alloc_descriptor() 868 list_add_tail(&new->node, &first->tx_list); in xgene_dma_prep_xor() 873 list_splice(&first->tx_list, &new->tx_list); in xgene_dma_prep_xor() 881 xgene_dma_free_desc_list(chan, &first->tx_list); in xgene_dma_prep_xor() 926 list_add_tail(&new->node, &first->tx_list); in xgene_dma_prep_pq() 950 list_splice(&first->tx_list, &new->tx_list); in xgene_dma_prep_pq() 958 xgene_dma_free_desc_list(chan, &first->tx_list); in xgene_dma_prep_pq()
|
D | txx9dmac.c | 182 if (!list_empty(&desc->tx_list)) in txx9dmac_last_child() 183 desc = list_entry(desc->tx_list.prev, typeof(*desc), desc_node); in txx9dmac_last_child() 198 INIT_LIST_HEAD(&desc->tx_list); in txx9dmac_desc_alloc() 247 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_sync_desc_for_cpu() 269 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_desc_put() 273 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_desc_put() 412 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_descriptor_complete() 530 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in txx9dmac_handle_error() 572 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_scan_descriptors() 782 list_add_tail(&desc->desc_node, &first->tx_list); in txx9dmac_prep_dma_memcpy() [all …]
|
D | altera-msgdma.c | 163 struct list_head tx_list; member 219 INIT_LIST_HEAD(&desc->tx_list); in msgdma_get_descriptor() 236 list_for_each_entry_safe(child, next, &desc->tx_list, node) { in msgdma_free_descriptor() 366 list_add_tail(&new->node, &first->tx_list); in msgdma_prep_memcpy() 440 list_add_tail(&new->node, &first->tx_list); in msgdma_prep_slave_sg() 540 list_for_each_entry_safe(sdesc, next, &desc->tx_list, node) in msgdma_copy_desc_to_fifo()
|
D | ep93xx_dma.c | 128 struct list_head tx_list; member 248 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active() 249 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active() 712 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put() 925 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources() 1011 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy() 1086 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg() 1167 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic()
|
D | pch_dma.c | 87 struct list_head tx_list; member 336 if (list_empty(&desc->tx_list)) { in pdc_dostart() 355 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_chain_complete() 434 INIT_LIST_HEAD(&desc->tx_list); in pdc_alloc_desc() 483 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_desc_put() 630 list_add_tail(&desc->desc_node, &first->tx_list); in pd_prep_slave_sg()
|
D | tegra20-apb-dma.c | 170 struct list_head tx_list; member 308 if (!list_empty(&dma_desc->tx_list)) in tegra_dma_desc_put() 309 list_splice_init(&dma_desc->tx_list, &tdc->free_sg_req); in tegra_dma_desc_put() 708 list_splice_tail_init(&dma_desc->tx_list, &tdc->pending_sg_req); in tegra_dma_tx_submit() 1105 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_slave_sg() 1148 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_slave_sg() 1254 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_dma_cyclic() 1283 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_dma_cyclic()
|
D | fsldma.c | 403 list_splice_tail_init(&desc->tx_list, &chan->ld_pending); in append_ld_queue() 427 list_for_each_entry(child, &desc->tx_list, node) { in fsl_dma_tx_submit() 469 INIT_LIST_HEAD(&desc->tx_list); in fsl_dma_alloc_descriptor() 804 list_add_tail(&new->node, &first->tx_list); in fsl_dma_prep_memcpy() 819 fsldma_free_desc_list_reverse(chan, &first->tx_list); in fsl_dma_prep_memcpy()
|
D | at_hdmac.c | 111 INIT_LIST_HEAD(&desc->tx_list); in atc_alloc_descriptor() 167 list_for_each_entry(child, &desc->tx_list, desc_node) in atc_desc_put() 171 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put() 197 &(*first)->tx_list); in atc_desc_chain() 407 list_for_each_entry(desc, &desc_first->tx_list, desc_node) { in atc_get_bytes_left() 461 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete() 545 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in atc_handle_error()
|
D | iop-adma.c | 334 list_splice(&chain, &alloc_tail->tx_list); in iop_adma_alloc_slots() 379 list_splice_init(&sw_desc->tx_list, in iop_adma_tx_submit() 446 INIT_LIST_HEAD(&slot->tx_list); in iop_adma_alloc_chan_resources() 1449 list_splice_init(&sw_desc->tx_list, &iop_chan->chain); in iop_chan_start_null_memcpy() 1501 list_splice_init(&sw_desc->tx_list, &iop_chan->chain); in iop_chan_start_null_xor()
|
D | txx9dmac.h | 230 struct list_head tx_list; member
|
/drivers/net/ethernet/cavium/octeon/ |
D | octeon_mgmt.c | 131 struct sk_buff_head tx_list; member 263 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 268 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 279 skb = __skb_dequeue(&p->tx_list); in octeon_mgmt_clean_tx_buffers() 288 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 1262 skb_queue_purge(&p->tx_list); in octeon_mgmt_stop() 1293 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1296 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1298 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1303 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() [all …]
|
/drivers/hid/intel-ish-hid/ishtp/ |
D | client-buffers.c | 149 while (!list_empty(&cl->tx_list.list)) { in ishtp_cl_free_tx_ring() 150 tx_buf = list_entry(cl->tx_list.list.next, in ishtp_cl_free_tx_ring() 268 tx_list_empty = list_empty(&cl->tx_list.list); in ishtp_cl_tx_empty()
|
D | client.c | 97 INIT_LIST_HEAD(&cl->tx_list.list); in ishtp_cl_init() 591 have_msg_to_send = !list_empty(&cl->tx_list.list); in ishtp_cl_send() 592 list_add_tail(&cl_msg->list, &cl->tx_list.list); in ishtp_cl_send() 660 if (list_empty(&cl->tx_list.list)) { in ipc_tx_send() 677 cl_msg = list_entry(cl->tx_list.list.next, struct ishtp_cl_tx_ring, in ipc_tx_send() 759 if (list_empty(&cl->tx_list.list)) { in ishtp_cl_send_msg_dma() 764 cl_msg = list_entry(cl->tx_list.list.next, struct ishtp_cl_tx_ring, in ishtp_cl_send_msg_dma()
|
D | client.h | 69 struct ishtp_cl_tx_ring tx_list, tx_free_list; member
|
/drivers/dma/dw/ |
D | core.c | 91 INIT_LIST_HEAD(&desc->tx_list); in dwc_desc_get() 107 list_for_each_entry_safe(child, _next, &desc->tx_list, desc_node) { in dwc_desc_put() 203 dwc->tx_node_active = &first->tx_list; in dwc_dostart() 253 list_for_each_entry(child, &desc->tx_list, desc_node) in dwc_descriptor_complete() 325 head = &desc->tx_list; in dwc_scan_descriptors() 384 list_for_each_entry(child, &desc->tx_list, desc_node) { in dwc_scan_descriptors() 457 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in dwc_handle_error() 595 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_dma_memcpy() 685 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_slave_sg() 734 list_add_tail(&desc->desc_node, &first->tx_list); in dwc_prep_slave_sg()
|
/drivers/infiniband/hw/hfi1/ |
D | ipoib_tx.c | 436 &txq->tx_list, in hfi1_ipoib_submit_tx_list() 451 if (!list_empty(&txq->tx_list)) { in hfi1_ipoib_flush_tx_list() 560 list_add_tail(&tx->txreq.list, &txq->tx_list); in hfi1_ipoib_send_dma_list() 613 if (netdev_xmit_more() || !list_empty(&txp.txq->tx_list)) in hfi1_ipoib_send_dma() 646 list_add_tail(&txreq->list, &txq->tx_list); in hfi1_ipoib_sdma_sleep() 743 INIT_LIST_HEAD(&txq->tx_list); in hfi1_ipoib_txreq_init() 802 list_for_each_entry_safe(txreq, txreq_tmp, &txq->tx_list, list) { in hfi1_ipoib_drain_tx_list()
|
D | ipoib.h | 85 struct list_head tx_list; member
|
/drivers/dma/xilinx/ |
D | zynqmp_dma.c | 184 struct list_head tx_list; member 385 if (!list_empty(&desc->tx_list)) in zynqmp_dma_tx_submit() 386 desc = list_last_entry(&desc->tx_list, in zynqmp_dma_tx_submit() 418 INIT_LIST_HEAD(&desc->tx_list); in zynqmp_dma_get_descriptor() 439 list_for_each_entry_safe(child, next, &sdesc->tx_list, node) { in zynqmp_dma_free_descriptor() 845 list_add_tail(&new->node, &first->tx_list); in zynqmp_dma_prep_memcpy()
|
/drivers/net/wireless/intersil/p54/ |
D | p54spi.c | 434 struct p54s_tx_info, tx_list); in p54spi_wq_tx() 436 list_del_init(&entry->tx_list); in p54spi_wq_tx() 441 tx_list); in p54spi_wq_tx() 472 list_add_tail(&di->tx_list, &priv->tx_pending); in p54spi_op_tx()
|
D | p54spi.h | 89 struct list_head tx_list; member
|
/drivers/dma/sh/ |
D | shdma-base.c | 566 LIST_HEAD(tx_list); in shdma_prep_sg() 615 list_add_tail(&new->node, &tx_list); in shdma_prep_sg() 623 list_splice_tail(&tx_list, &schan->ld_free); in shdma_prep_sg() 630 list_for_each_entry(new, &tx_list, node) in shdma_prep_sg() 632 list_splice(&tx_list, &schan->ld_free); in shdma_prep_sg()
|
/drivers/net/ethernet/ti/ |
D | tlan.c | 837 priv->tx_list = priv->rx_list + TLAN_NUM_RX_LISTS; in tlan_init() 1052 tail_list = priv->tx_list + priv->tx_tail; in tlan_start_tx() 1091 (priv->tx_list + (TLAN_NUM_TX_LISTS - 1))->forward in tlan_start_tx() 1094 (priv->tx_list + (priv->tx_tail - 1))->forward in tlan_start_tx() 1226 tlan_print_list(priv->tx_list + i, "TX", i); in tlan_get_stats() 1356 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1379 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1390 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1632 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eoc() 1900 list = priv->tx_list + i; in tlan_reset_lists() [all …]
|
/drivers/infiniband/sw/siw/ |
D | siw_qp_tx.c | 1241 qp = container_of(fifo_list, struct siw_qp, tx_list); in siw_run_sq() 1243 qp->tx_list.next = NULL; in siw_run_sq() 1250 llist_for_each_entry(qp, active, tx_list) { in siw_run_sq() 1251 qp->tx_list.next = NULL; in siw_run_sq() 1274 llist_add(&qp->tx_list, &per_cpu(siw_tx_task_g, qp->tx_cpu).active); in siw_sq_start()
|