/drivers/ps3/ |
D | ps3-vuart.c | 84 } tx_list; member 503 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 505 if (list_empty(&priv->tx_list.head)) { in ps3_vuart_write() 510 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 528 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 540 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_write() 541 list_add_tail(&lb->link, &priv->tx_list.head); in ps3_vuart_write() 543 spin_unlock_irqrestore(&priv->tx_list.lock, flags); in ps3_vuart_write() 747 spin_lock_irqsave(&priv->tx_list.lock, flags); in ps3_vuart_handle_interrupt_tx() 749 list_for_each_entry_safe(lb, n, &priv->tx_list.head, link) { in ps3_vuart_handle_interrupt_tx() [all …]
|
/drivers/dma/ |
D | mmp_pdma.c | 84 struct list_head tx_list; member 261 list_splice_tail_init(&desc->tx_list, &chan->chain_pending); in append_pending_queue() 325 list_for_each_entry(child, &desc->tx_list, node) { in mmp_pdma_tx_submit() 348 INIT_LIST_HEAD(&desc->tx_list); in mmp_pdma_alloc_descriptor() 479 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_memcpy() 493 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_memcpy() 545 list_add_tail(&new->node, &first->tx_list); in mmp_pdma_prep_slave_sg() 564 mmp_pdma_free_desc_list(chan, &first->tx_list); in mmp_pdma_prep_slave_sg()
|
D | txx9dmac.c | 185 if (!list_empty(&desc->tx_list)) in txx9dmac_last_child() 186 desc = list_entry(desc->tx_list.prev, typeof(*desc), desc_node); in txx9dmac_last_child() 201 INIT_LIST_HEAD(&desc->tx_list); in txx9dmac_desc_alloc() 250 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_sync_desc_for_cpu() 272 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_desc_put() 276 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_desc_put() 419 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_descriptor_complete() 561 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in txx9dmac_handle_error() 603 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_scan_descriptors() 813 list_add_tail(&desc->desc_node, &first->tx_list); in txx9dmac_prep_dma_memcpy() [all …]
|
D | fsldma.c | 400 list_splice_tail_init(&desc->tx_list, &chan->ld_pending); in append_ld_queue() 417 list_for_each_entry(child, &desc->tx_list, node) { in fsl_dma_tx_submit() 447 INIT_LIST_HEAD(&desc->tx_list); in fsl_dma_alloc_descriptor() 566 list_add_tail(&new->node, &new->tx_list); in fsl_dma_prep_interrupt() 620 list_add_tail(&new->node, &first->tx_list); in fsl_dma_prep_memcpy() 635 fsldma_free_desc_list_reverse(chan, &first->tx_list); in fsl_dma_prep_memcpy() 699 list_add_tail(&new->node, &first->tx_list); in fsl_dma_prep_sg() 751 fsldma_free_desc_list_reverse(chan, &first->tx_list); in fsl_dma_prep_sg()
|
D | ep93xx_dma.c | 128 struct list_head tx_list; member 244 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active() 245 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active() 700 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put() 943 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources() 1029 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy() 1102 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg() 1183 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic()
|
D | dw_dmac.c | 142 list_for_each_entry(child, &desc->tx_list, desc_node) in dwc_desc_put() 146 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_desc_put() 276 dwc->tx_node_active = &first->tx_list; in dwc_dostart() 315 list_for_each_entry(child, &desc->tx_list, desc_node) in dwc_descriptor_complete() 319 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_descriptor_complete() 413 head = &desc->tx_list; in dwc_scan_descriptors() 476 list_for_each_entry(child, &desc->tx_list, desc_node) { in dwc_scan_descriptors() 548 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in dwc_handle_error() 786 &first->tx_list); in dwc_prep_dma_memcpy() 886 &first->tx_list); in dwc_prep_slave_sg() [all …]
|
D | tegra20-apb-dma.c | 166 struct list_head tx_list; member 303 if (!list_empty(&dma_desc->tx_list)) in tegra_dma_desc_put() 304 list_splice_init(&dma_desc->tx_list, &tdc->free_sg_req); in tegra_dma_desc_put() 676 list_splice_tail_init(&dma_desc->tx_list, &tdc->pending_sg_req); in tegra_dma_tx_submit() 957 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_slave_sg() 999 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_slave_sg() 1100 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_dma_cyclic() 1129 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_dma_cyclic()
|
D | pch_dma.c | 98 struct list_head tx_list; member 347 if (list_empty(&desc->tx_list)) { in pdc_dostart() 366 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_chain_complete() 449 INIT_LIST_HEAD(&desc->tx_list); in pdc_alloc_desc() 498 list_splice_init(&desc->tx_list, &pd_chan->free_list); in pdc_desc_put() 652 list_add_tail(&desc->desc_node, &first->tx_list); in pd_prep_slave_sg()
|
D | at_hdmac.c | 93 INIT_LIST_HEAD(&desc->tx_list); in atc_alloc_descriptor() 159 list_for_each_entry(child, &desc->tx_list, desc_node) in atc_desc_put() 163 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put() 189 &(*first)->tx_list); in atc_desc_chain() 252 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete() 347 list_for_each_entry(child, &desc->tx_list, desc_node) in atc_cleanup_descriptors() 423 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in atc_handle_error()
|
D | mv_xor.h | 135 struct list_head tx_list; member
|
D | txx9dmac.h | 233 struct list_head tx_list; member
|
D | fsldma.h | 100 struct list_head tx_list; member
|
D | iop-adma.c | 427 list_splice(&chain, &alloc_tail->tx_list); in iop_adma_alloc_slots() 474 list_splice_init(&sw_desc->tx_list, in iop_adma_tx_submit() 541 INIT_LIST_HEAD(&slot->tx_list); in iop_adma_alloc_chan_resources() 1619 list_splice_init(&sw_desc->tx_list, &iop_chan->chain); in iop_chan_start_null_memcpy() 1671 list_splice_init(&sw_desc->tx_list, &iop_chan->chain); in iop_chan_start_null_xor()
|
D | mv_xor.c | 526 list_splice(&chain, &alloc_tail->tx_list); in mv_xor_alloc_slots() 562 list_splice_init(&sw_desc->tx_list, &mv_chan->chain); in mv_xor_tx_submit() 569 list_splice_init(&grp_start->tx_list, in mv_xor_tx_submit() 627 INIT_LIST_HEAD(&slot->tx_list); in mv_xor_alloc_chan_resources()
|
D | dw_dmac_regs.h | 299 struct list_head tx_list; member
|
D | at_hdmac_regs.h | 191 struct list_head tx_list; member
|
/drivers/net/ethernet/octeon/ |
D | octeon_mgmt.c | 132 struct sk_buff_head tx_list; member 282 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 287 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 298 skb = __skb_dequeue(&p->tx_list); in octeon_mgmt_clean_tx_buffers() 307 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_clean_tx_buffers() 1279 skb_queue_purge(&p->tx_list); in octeon_mgmt_stop() 1309 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1312 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1314 spin_lock_irqsave(&p->tx_list.lock, flags); in octeon_mgmt_xmit() 1319 spin_unlock_irqrestore(&p->tx_list.lock, flags); in octeon_mgmt_xmit() [all …]
|
/drivers/net/wireless/p54/ |
D | p54spi.c | 448 struct p54s_tx_info, tx_list); in p54spi_wq_tx() 450 list_del_init(&entry->tx_list); in p54spi_wq_tx() 455 tx_list); in p54spi_wq_tx() 486 list_add_tail(&di->tx_list, &priv->tx_pending); in p54spi_op_tx()
|
D | p54spi.h | 102 struct list_head tx_list; member
|
/drivers/dma/sh/ |
D | shdma-base.c | 513 LIST_HEAD(tx_list); in shdma_prep_sg() 558 list_add_tail(&new->node, &tx_list); in shdma_prep_sg() 566 list_splice_tail(&tx_list, &schan->ld_free); in shdma_prep_sg() 573 list_for_each_entry(new, &tx_list, node) in shdma_prep_sg() 575 list_splice(&tx_list, &schan->ld_free); in shdma_prep_sg()
|
/drivers/net/ethernet/ti/ |
D | tlan.c | 830 priv->tx_list = priv->rx_list + TLAN_NUM_RX_LISTS; in tlan_init() 1035 tail_list = priv->tx_list + priv->tx_tail; in tlan_start_tx() 1074 (priv->tx_list + (TLAN_NUM_TX_LISTS - 1))->forward in tlan_start_tx() 1077 (priv->tx_list + (priv->tx_tail - 1))->forward in tlan_start_tx() 1212 tlan_print_list(priv->tx_list + i, "TX", i); in tlan_get_stats() 1342 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1365 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1376 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eof() 1621 head_list = priv->tx_list + priv->tx_head; in tlan_handle_tx_eoc() 1897 list = priv->tx_list + i; in tlan_reset_lists() [all …]
|
D | tlan.h | 186 struct tlan_list *tx_list; member
|
/drivers/rapidio/devices/ |
D | tsi721_dma.c | 264 list_splice_init(&desc->tx_list, &bdma_chan->free_list); in tsi721_desc_put() 356 list_splice_init(&desc->tx_list, &bdma_chan->free_list); in tsi721_dma_chain_complete() 517 INIT_LIST_HEAD(&desc[i].tx_list); in tsi721_alloc_chan_resources() 727 list_add_tail(&desc->desc_node, &first->tx_list); in tsi721_prep_rio_sg()
|
/drivers/dma/ioat/ |
D | dma.c | 245 first = to_ioat_desc(desc->tx_list.next); in ioat1_tx_submit() 250 list_splice_tail_init(&desc->tx_list, &ioat->used_desc); in ioat1_tx_submit() 292 INIT_LIST_HEAD(&desc_sw->tx_list); in ioat_dma_alloc_descriptor() 517 list_splice(&chain, &desc->tx_list); in ioat1_dma_prep_memcpy()
|
/drivers/tty/ |
D | n_gsm.c | 247 struct list_head tx_list; /* Pending data packets */ member 686 list_for_each_entry_safe(msg, nmsg, &gsm->tx_list, list) { in gsm_data_kick() 767 list_add_tail(&msg->list, &gsm->tx_list); in __gsm_data_queue() 2060 list_for_each_entry_safe(txq, ntxq, &gsm->tx_list, list) in gsm_cleanup_mux() 2062 INIT_LIST_HEAD(&gsm->tx_list); in gsm_cleanup_mux() 2173 INIT_LIST_HEAD(&gsm->tx_list); in gsm_alloc_mux()
|