/drivers/staging/irda/drivers/ |
D | sir_dev.c | 367 if (unlikely(len > dev->tx_buff.truesize)) in sirdev_raw_write() 371 while (dev->tx_buff.len > 0) { /* wait until tx idle */ in sirdev_raw_write() 377 dev->tx_buff.data = dev->tx_buff.head; in sirdev_raw_write() 378 memcpy(dev->tx_buff.data, buf, len); in sirdev_raw_write() 379 dev->tx_buff.len = len; in sirdev_raw_write() 381 ret = dev->drv->do_write(dev, dev->tx_buff.data, dev->tx_buff.len); in sirdev_raw_write() 385 dev->tx_buff.data += ret; in sirdev_raw_write() 386 dev->tx_buff.len -= ret; in sirdev_raw_write() 444 __func__, dev->tx_buff.len); in sirdev_write_complete() 446 if (likely(dev->tx_buff.len > 0)) { in sirdev_write_complete() [all …]
|
D | bfin_sir.c | 183 if (self->tx_buff.len != 0) { in bfin_sir_tx_chars() 184 chr = *(self->tx_buff.data); in bfin_sir_tx_chars() 186 self->tx_buff.data++; in bfin_sir_tx_chars() 187 self->tx_buff.len--; in bfin_sir_tx_chars() 190 self->stats.tx_bytes += self->tx_buff.data - self->tx_buff.head; in bfin_sir_tx_chars() 253 if (self->tx_buff.len == 0) { in bfin_sir_dma_tx_chars() 266 blackfin_dcache_flush_range((unsigned long)(self->tx_buff.data), in bfin_sir_dma_tx_chars() 267 (unsigned long)(self->tx_buff.data+self->tx_buff.len)); in bfin_sir_dma_tx_chars() 273 (unsigned long)(self->tx_buff.data)); in bfin_sir_dma_tx_chars() 274 set_dma_x_count(port->tx_dma_channel, self->tx_buff.len); in bfin_sir_dma_tx_chars() [all …]
|
D | w83977af_ir.c | 204 self->tx_buff.truesize = 4000; in w83977af_open() 215 self->tx_buff.head = in w83977af_open() 216 dma_zalloc_coherent(NULL, self->tx_buff.truesize, in w83977af_open() 218 if (!self->tx_buff.head) { in w83977af_open() 225 self->tx_buff.data = self->tx_buff.head; in w83977af_open() 244 dma_free_coherent(NULL, self->tx_buff.truesize, in w83977af_open() 245 self->tx_buff.head, self->tx_buff_dma); in w83977af_open() 287 if (self->tx_buff.head) in w83977af_close() 288 dma_free_coherent(NULL, self->tx_buff.truesize, in w83977af_close() 289 self->tx_buff.head, self->tx_buff_dma); in w83977af_close() [all …]
|
D | sh_sir.c | 116 iobuff_t tx_buff; member 398 kfree(self->tx_buff.head); in sh_sir_remove_iobuf() 401 self->tx_buff.head = NULL; in sh_sir_remove_iobuf() 409 self->tx_buff.head) { in sh_sir_init_iobuf() 418 err = __sh_sir_init_iobuf(&self->tx_buff, txsize); in sh_sir_init_iobuf() 492 if (0 >= self->tx_buff.len) { in sh_sir_tx() 495 sh_sir_write(self, IRIF_UART3, self->tx_buff.data[0]); in sh_sir_tx() 496 self->tx_buff.len--; in sh_sir_tx() 497 self->tx_buff.data++; in sh_sir_tx() 603 self->tx_buff.data = self->tx_buff.head; in sh_sir_hard_xmit() [all …]
|
D | ali-ircc.c | 342 self->tx_buff.truesize = 14384; in ali_ircc_open() 353 self->tx_buff.head = in ali_ircc_open() 354 dma_zalloc_coherent(NULL, self->tx_buff.truesize, in ali_ircc_open() 356 if (self->tx_buff.head == NULL) { in ali_ircc_open() 363 self->tx_buff.data = self->tx_buff.head; in ali_ircc_open() 368 self->tx_fifo.tail = self->tx_buff.head; in ali_ircc_open() 393 dma_free_coherent(NULL, self->tx_buff.truesize, in ali_ircc_open() 394 self->tx_buff.head, self->tx_buff_dma); in ali_ircc_open() 428 if (self->tx_buff.head) in ali_ircc_close() 429 dma_free_coherent(NULL, self->tx_buff.truesize, in ali_ircc_close() [all …]
|
D | via-ircc.c | 353 self->tx_buff.truesize = 14384 + 2048; in via_ircc_open() 364 self->tx_buff.head = in via_ircc_open() 365 dma_zalloc_coherent(&pdev->dev, self->tx_buff.truesize, in via_ircc_open() 367 if (self->tx_buff.head == NULL) { in via_ircc_open() 374 self->tx_buff.data = self->tx_buff.head; in via_ircc_open() 379 self->tx_fifo.tail = self->tx_buff.head; in via_ircc_open() 397 dma_free_coherent(&pdev->dev, self->tx_buff.truesize, in via_ircc_open() 398 self->tx_buff.head, self->tx_buff_dma); in via_ircc_open() 430 if (self->tx_buff.head) in via_remove_one() 431 dma_free_coherent(&pdev->dev, self->tx_buff.truesize, in via_remove_one() [all …]
|
D | smsc-ircc2.c | 154 iobuff_t tx_buff; /* Transmit buffer */ member 565 self->tx_buff.truesize = SMSC_IRCC2_TX_BUFF_TRUESIZE; in smsc_ircc_open() 573 self->tx_buff.head = in smsc_ircc_open() 574 dma_zalloc_coherent(NULL, self->tx_buff.truesize, in smsc_ircc_open() 576 if (self->tx_buff.head == NULL) in smsc_ircc_open() 581 self->tx_buff.data = self->tx_buff.head; in smsc_ircc_open() 618 dma_free_coherent(NULL, self->tx_buff.truesize, in smsc_ircc_open() 619 self->tx_buff.head, self->tx_buff_dma); in smsc_ircc_open() 921 self->tx_buff.data = self->tx_buff.head; in smsc_ircc_hard_xmit_sir() 924 self->tx_buff.len = async_wrap_skb(skb, self->tx_buff.data, in smsc_ircc_hard_xmit_sir() [all …]
|
D | nsc-ircc.c | 433 self->tx_buff.truesize = 14384; in nsc_ircc_open() 445 self->tx_buff.head = in nsc_ircc_open() 446 dma_zalloc_coherent(NULL, self->tx_buff.truesize, in nsc_ircc_open() 448 if (self->tx_buff.head == NULL) { in nsc_ircc_open() 455 self->tx_buff.data = self->tx_buff.head; in nsc_ircc_open() 460 self->tx_fifo.tail = self->tx_buff.head; in nsc_ircc_open() 501 dma_free_coherent(NULL, self->tx_buff.truesize, in nsc_ircc_open() 502 self->tx_buff.head, self->tx_buff_dma); in nsc_ircc_open() 538 if (self->tx_buff.head) in nsc_ircc_close() 539 dma_free_coherent(NULL, self->tx_buff.truesize, in nsc_ircc_close() [all …]
|
D | pxaficp_ir.c | 165 iobuff_t tx_buff; member 378 while ((si->tx_buff.len) && in pxa_irda_sir_irq() 380 stuart_writel(si, *si->tx_buff.data++, STTHR); in pxa_irda_sir_irq() 381 si->tx_buff.len -= 1; in pxa_irda_sir_irq() 384 if (si->tx_buff.len == 0) { in pxa_irda_sir_irq() 386 dev->stats.tx_bytes += si->tx_buff.data - si->tx_buff.head; in pxa_irda_sir_irq() 607 si->tx_buff.data = si->tx_buff.head; in pxa_irda_hard_xmit() 608 si->tx_buff.len = async_wrap_skb(skb, si->tx_buff.data, si->tx_buff.truesize); in pxa_irda_hard_xmit() 985 err = pxa_irda_init_iobuf(&si->tx_buff, 4000); in pxa_irda_probe() 1034 kfree(si->tx_buff.head); in pxa_irda_probe() [all …]
|
D | sa1100_ir.c | 71 iobuff_t tx_buff; member 258 si->tx_buff.data = si->tx_buff.head; in sa1100_irda_sir_tx_start() 259 si->tx_buff.len = async_wrap_skb(skb, si->tx_buff.data, in sa1100_irda_sir_tx_start() 260 si->tx_buff.truesize); in sa1100_irda_sir_tx_start() 263 sg_set_buf(&si->dma_tx.sg, si->tx_buff.data, si->tx_buff.len); in sa1100_irda_sir_tx_start() 963 err = sa1100_irda_init_iobuf(&si->tx_buff, IRDA_SIR_MAX_FRAME); in sa1100_irda_probe() 1009 kfree(si->tx_buff.head); in sa1100_irda_probe() 1030 kfree(si->tx_buff.head); in sa1100_irda_remove()
|
D | irda-usb.c | 443 skb_copy_from_linear_data(skb, self->tx_buff + self->header_length, skb->len); in irda_usb_hard_xmit() 448 __u8* frame = self->tx_buff; in irda_usb_hard_xmit() 462 irda_usb_build_header(self, self->tx_buff, 0); in irda_usb_hard_xmit() 470 self->tx_buff, skb->len + self->header_length, in irda_usb_hard_xmit() 1430 kfree(self->tx_buff); in irda_usb_close() 1431 self->tx_buff = NULL; in irda_usb_close() 1731 self->tx_buff = kzalloc(IRDA_SKB_MAX_MTU + self->header_length, in irda_usb_probe() 1733 if (!self->tx_buff) in irda_usb_probe() 1767 kfree(self->tx_buff); in irda_usb_probe()
|
D | bfin_sir.h | 76 iobuff_t tx_buff; member
|
D | irda-usb.h | 158 char *tx_buff; member
|
D | w83977af_ir.h | 180 iobuff_t tx_buff; /* Transmit buffer */ member
|
D | ali-ircc.h | 199 iobuff_t tx_buff; /* Transmit buffer */ member
|
/drivers/staging/irda/net/ |
D | wrapper.c | 83 int async_wrap_skb(struct sk_buff *skb, __u8 *tx_buff, int buffsize) in async_wrap_skb() argument 123 memset(tx_buff + n, XBOF, xbofs); in async_wrap_skb() 127 tx_buff[n++] = BOF; in async_wrap_skb() 142 n += stuff_byte(skb->data[i], tx_buff+n); in async_wrap_skb() 149 n += stuff_byte(fcs.bytes[0], tx_buff+n); in async_wrap_skb() 150 n += stuff_byte(fcs.bytes[1], tx_buff+n); in async_wrap_skb() 152 n += stuff_byte(fcs.bytes[1], tx_buff+n); in async_wrap_skb() 153 n += stuff_byte(fcs.bytes[0], tx_buff+n); in async_wrap_skb() 155 tx_buff[n++] = EOF; in async_wrap_skb()
|
/drivers/net/ethernet/arc/ |
D | emac_main.c | 121 struct buffer_state *tx_buff = &priv->tx_buff[*txbd_dirty]; in arc_emac_tx_clean() local 122 struct sk_buff *skb = tx_buff->skb; in arc_emac_tx_clean() 145 dma_unmap_single(&ndev->dev, dma_unmap_addr(tx_buff, addr), in arc_emac_tx_clean() 146 dma_unmap_len(tx_buff, len), DMA_TO_DEVICE); in arc_emac_tx_clean() 153 tx_buff->skb = NULL; in arc_emac_tx_clean() 504 struct buffer_state *tx_buff = &priv->tx_buff[i]; in arc_free_tx_queue() local 506 if (tx_buff->skb) { in arc_free_tx_queue() 508 dma_unmap_addr(tx_buff, addr), in arc_free_tx_queue() 509 dma_unmap_len(tx_buff, len), in arc_free_tx_queue() 513 dev_kfree_skb_irq(tx_buff->skb); in arc_free_tx_queue() [all …]
|
D | emac.h | 153 struct buffer_state tx_buff[TX_BD_NUM]; member
|
/drivers/net/ethernet/amd/ |
D | ariadne.c | 90 volatile u_short *tx_buff[TX_RING_SIZE]; member 102 u_short tx_buff[TX_RING_SIZE][PKT_BUF_SIZE / sizeof(u_short)]; member 132 offsetof(struct lancedata, tx_buff[i])); in ariadne_init_ring() 134 offsetof(struct lancedata, tx_buff[i])) | in ariadne_init_ring() 139 priv->tx_buff[i] = lancedata->tx_buff[i]; in ariadne_init_ring() 141 i, &lancedata->tx_ring[i], lancedata->tx_buff[i]); in ariadne_init_ring() 583 memcpyw(priv->tx_buff[entry], (u_short *)skb->data, len); in ariadne_start_xmit() 587 (void *)priv->tx_buff[entry], in ariadne_start_xmit()
|
/drivers/s390/net/ |
D | netiucv.c | 192 struct sk_buff *tx_buff; member 756 conn->tx_buff->data = conn->tx_buff->head; in conn_action_txdone() 757 skb_reset_tail_pointer(conn->tx_buff); in conn_action_txdone() 758 conn->tx_buff->len = 0; in conn_action_txdone() 761 header.next = conn->tx_buff->len + skb->len + NETIUCV_HDRLEN; in conn_action_txdone() 762 skb_put_data(conn->tx_buff, &header, NETIUCV_HDRLEN); in conn_action_txdone() 764 skb_put(conn->tx_buff, skb->len), in conn_action_txdone() 776 if (conn->tx_buff->len == 0) { in conn_action_txdone() 782 skb_put_data(conn->tx_buff, &header, NETIUCV_HDRLEN); in conn_action_txdone() 787 conn->tx_buff->data, conn->tx_buff->len); in conn_action_txdone() [all …]
|
/drivers/spi/ |
D | spi-tle62x0.c | 35 unsigned char tx_buff[4]; member 43 unsigned char *buff = st->tx_buff; in tle62x0_write() 62 unsigned char *txbuff = st->tx_buff; in tle62x0_read()
|
/drivers/net/hamradio/ |
D | scc.c | 300 if (scc->tx_buff != NULL) in scc_discard_buffers() 302 dev_kfree_skb(scc->tx_buff); in scc_discard_buffers() 303 scc->tx_buff = NULL; in scc_discard_buffers() 371 skb = scc->tx_buff; in scc_txint() 378 scc->tx_buff = skb; in scc_txint() 391 scc->tx_buff = NULL; in scc_txint() 417 scc->tx_buff = NULL; in scc_txint() 499 if (scc->tx_buff != NULL) in scc_exint() 501 dev_kfree_skb_irq(scc->tx_buff); in scc_exint() 502 scc->tx_buff = NULL; in scc_exint() [all …]
|
/drivers/net/ethernet/atheros/atl1e/ |
D | atl1e.h | 387 #define ATL1E_SET_PCIMAP_TYPE(tx_buff, type) do { \ argument 388 ((tx_buff)->flags) &= ~ATL1E_TX_PCIMAP_TYPE_MASK; \ 389 ((tx_buff)->flags) |= (type); \
|
/drivers/net/ethernet/ibm/ |
D | ibmvnic.c | 556 memset(tx_pool->tx_buff, 0, in reset_tx_pools() 582 kfree(tx_pool->tx_buff); in release_tx_pools() 612 tx_pool->tx_buff = kcalloc(adapter->req_tx_entries_per_subcrq, in init_tx_pools() 615 if (!tx_pool->tx_buff) { in init_tx_pools() 986 if (tx_pool->tx_buff[j].skb) { in clean_tx_pools() 987 dev_kfree_skb_any(tx_pool->tx_buff[j].skb); in clean_tx_pools() 988 tx_pool->tx_buff[j].skb = NULL; in clean_tx_pools() 1196 struct ibmvnic_tx_buff *tx_buff = NULL; in ibmvnic_xmit() local 1243 tx_buff = &tx_pool->tx_buff[index]; in ibmvnic_xmit() 1244 tx_buff->skb = skb; in ibmvnic_xmit() [all …]
|
/drivers/staging/irda/include/net/irda/ |
D | wrapper.h | 54 int async_wrap_skb(struct sk_buff *skb, __u8 *tx_buff, int buffsize);
|