/drivers/infiniband/ulp/iser/ |
D | iser_initiator.c | 181 struct iser_desc *rx_desc; in iser_post_receive_control() local 206 rx_desc = kmem_cache_alloc(ig.desc_cache, GFP_NOIO); in iser_post_receive_control() 207 if (rx_desc == NULL) { in iser_post_receive_control() 213 rx_desc->type = ISCSI_RX; in iser_post_receive_control() 214 rx_desc->data = kmalloc(rx_data_size, GFP_NOIO); in iser_post_receive_control() 215 if (rx_desc->data == NULL) { in iser_post_receive_control() 222 recv_dto = &rx_desc->dto; in iser_post_receive_control() 226 regd_hdr = &rx_desc->hdr_regd_buf; in iser_post_receive_control() 229 regd_hdr->virt_addr = rx_desc; /* == &rx_desc->iser_header */ in iser_post_receive_control() 236 regd_data = &rx_desc->data_regd_buf; in iser_post_receive_control() [all …]
|
D | iser_verbs.c | 681 int iser_post_recv(struct iser_desc *rx_desc) in iser_post_recv() argument 687 struct iser_dto *recv_dto = &rx_desc->dto; in iser_post_recv() 697 recv_wr.wr_id = (unsigned long)rx_desc; in iser_post_recv()
|
/drivers/net/ |
D | sgiseeq.c | 57 (unsigned long)((sp)->rx_desc))) 93 struct sgiseeq_rx_desc *rx_desc; member 196 if (!sp->rx_desc[i].skb) { in seeq_init_ring() 206 sp->rx_desc[i].skb = skb; in seeq_init_ring() 207 sp->rx_desc[i].rdma.pbuf = dma_addr; in seeq_init_ring() 209 sp->rx_desc[i].rdma.cntinfo = RCNTINFO_INIT; in seeq_init_ring() 210 dma_sync_desc_dev(dev, &sp->rx_desc[i]); in seeq_init_ring() 212 sp->rx_desc[i - 1].rdma.cntinfo |= HPCDMA_EOR; in seeq_init_ring() 213 dma_sync_desc_dev(dev, &sp->rx_desc[i - 1]); in seeq_init_ring() 232 if (sp->rx_desc[i].skb) { in seeq_purge_ring() [all …]
|
D | acenic.h | 475 #define RX_STD_RING_SIZE (RX_STD_RING_ENTRIES * sizeof(struct rx_desc)) 478 #define RX_JUMBO_RING_SIZE (RX_JUMBO_RING_ENTRIES *sizeof(struct rx_desc)) 481 #define RX_MINI_RING_SIZE (RX_MINI_RING_ENTRIES *sizeof(struct rx_desc)) 485 sizeof(struct rx_desc)) 487 struct rx_desc{ struct 662 struct rx_desc *rx_std_ring; 663 struct rx_desc *rx_jumbo_ring; 664 struct rx_desc *rx_mini_ring; 665 struct rx_desc *rx_return_ring;
|
D | mv643xx_eth.c | 184 struct rx_desc { struct 200 struct rx_desc { argument 333 struct rx_desc *rx_desc_area; 504 struct rx_desc *rx_desc; in rxq_process() local 509 rx_desc = &rxq->rx_desc_area[rxq->rx_curr_desc]; in rxq_process() 511 cmd_sts = rx_desc->cmd_sts; in rxq_process() 523 dma_unmap_single(NULL, rx_desc->buf_ptr, in rxq_process() 524 rx_desc->buf_size, DMA_FROM_DEVICE); in rxq_process() 530 byte_cnt = rx_desc->byte_cnt; in rxq_process() 598 struct rx_desc *rx_desc; in rxq_refill() local [all …]
|
D | bfin_mac.c | 74 static struct net_dma_desc_rx *rx_desc; variable 108 if (rx_desc) { in desc_list_free() 119 bfin_mac_free(dma_handle, rx_desc); in desc_list_free() 141 rx_desc = bfin_mac_alloc(&dma_handle, in desc_list_init() 144 if (rx_desc == NULL) in desc_list_init() 189 rx_list_head = rx_list_tail = rx_desc; in desc_list_init() 192 struct net_dma_desc_rx *r = rx_desc + i; in desc_list_init()
|
D | via-rhine.c | 333 struct rx_desc { struct 368 struct rx_desc *rx_ring; 395 struct rx_desc *rx_head_desc; 853 RX_RING_SIZE * sizeof(struct rx_desc) + in alloc_ring() 866 RX_RING_SIZE * sizeof(struct rx_desc) + in alloc_ring() 874 rp->tx_ring = ring + RX_RING_SIZE * sizeof(struct rx_desc); in alloc_ring() 876 rp->tx_ring_dma = ring_dma + RX_RING_SIZE * sizeof(struct rx_desc); in alloc_ring() 886 RX_RING_SIZE * sizeof(struct rx_desc) + in free_ring() 915 next += sizeof(struct rx_desc); in alloc_rbufs() 1436 struct rx_desc *desc = rp->rx_head_desc; in rhine_rx()
|
D | acenic.c | 743 size = (sizeof(struct rx_desc) * in ace_free_descriptors() 792 size = (sizeof(struct rx_desc) * in ace_allocate_descriptors() 1251 RX_STD_RING_ENTRIES * sizeof(struct rx_desc)); in ace_init() 1261 (sizeof(struct rx_desc) * RX_STD_RING_ENTRIES))); in ace_init() 1267 RX_JUMBO_RING_ENTRIES * sizeof(struct rx_desc)); in ace_init() 1276 RX_MINI_RING_ENTRIES * sizeof(struct rx_desc)); in ace_init() 1281 (sizeof(struct rx_desc) * in ace_init() 1302 (sizeof(struct rx_desc) * in ace_init() 1310 RX_RETURN_RING_ENTRIES * sizeof(struct rx_desc)); in ace_init() 1671 struct rx_desc *rd; in ace_load_std_rx_ring() [all …]
|
D | rrunner.h | 577 #define RX_TOTAL_SIZE (RX_RING_ENTRIES * sizeof(struct rx_desc)) 579 struct rx_desc{ struct 801 struct rx_desc *rx_ring;
|
/drivers/net/mlx4/ |
D | en_rx.c | 67 struct mlx4_en_rx_desc *rx_desc, in mlx4_en_alloc_frag() argument 99 rx_desc->data[i].addr = cpu_to_be64(dma); in mlx4_en_alloc_frag() 151 struct mlx4_en_rx_desc *rx_desc = ring->buf + ring->stride * index; in mlx4_en_init_rx_desc() local 158 rx_desc->next.next_wqe_index = cpu_to_be16((index + 1) & ring->size_mask); in mlx4_en_init_rx_desc() 163 rx_desc->data[i].byte_count = in mlx4_en_init_rx_desc() 165 rx_desc->data[i].lkey = cpu_to_be32(priv->mdev->mr.key); in mlx4_en_init_rx_desc() 173 rx_desc->data[i].byte_count = 0; in mlx4_en_init_rx_desc() 174 rx_desc->data[i].lkey = cpu_to_be32(MLX4_EN_MEMTYPE_PAD); in mlx4_en_init_rx_desc() 175 rx_desc->data[i].addr = 0; in mlx4_en_init_rx_desc() 183 struct mlx4_en_rx_desc *rx_desc = ring->buf + (index * ring->stride); in mlx4_en_prepare_rx_desc() local [all …]
|
/drivers/infiniband/hw/amso1100/ |
D | c2.c | 165 struct c2_rx_desc *rx_desc; in c2_rx_ring_alloc() local 175 rx_desc = vaddr; in c2_rx_ring_alloc() 177 for (i = 0; i < rx_ring->count; i++, elem++, rx_desc++, rxp_desc++) { in c2_rx_ring_alloc() 178 rx_desc->len = 0; in c2_rx_ring_alloc() 179 rx_desc->status = 0; in c2_rx_ring_alloc() 192 elem->ht_desc = rx_desc; in c2_rx_ring_alloc() 197 rx_desc->next_offset = base; in c2_rx_ring_alloc() 200 rx_desc->next_offset = in c2_rx_ring_alloc() 201 base + (i + 1) * sizeof(*rx_desc); in c2_rx_ring_alloc() 214 struct c2_rx_desc *rx_desc = elem->ht_desc; in c2_rx_alloc() local [all …]
|
/drivers/net/ixp2000/ |
D | ixpdev.c | 32 static struct ixpdev_rx_desc * const rx_desc = variable 88 desc = rx_desc + in ixpdev_rx() 321 free_page((unsigned long)phys_to_virt(rx_desc[i].buf_addr)); in ixpdev_init() 324 rx_desc[i].buf_addr = virt_to_phys(buf); in ixpdev_init() 325 rx_desc[i].buf_length = PAGE_SIZE; in ixpdev_init() 398 free_page((unsigned long)phys_to_virt(rx_desc[i].buf_addr)); in ixpdev_init() 421 free_page((unsigned long)phys_to_virt(rx_desc[i].buf_addr)); in ixpdev_deinit()
|
/drivers/net/ibm_newemac/ |
D | debug.c | 58 i, p->rx_desc[i].data_ptr, p->rx_skb[i] ? 'V' : ' ', in emac_desc_dump() 59 p->rx_desc[i].ctrl, p->rx_desc[i].data_len, in emac_desc_dump() 61 p->rx_desc[NUM_RX_BUFF / 2 + i].data_ptr, in emac_desc_dump() 63 p->rx_desc[NUM_RX_BUFF / 2 + i].ctrl, in emac_desc_dump() 64 p->rx_desc[NUM_RX_BUFF / 2 + i].data_len); in emac_desc_dump()
|
D | core.c | 977 if (dev->rx_desc[i].ctrl & MAL_RX_CTRL_FIRST) in emac_resize_rx_ring() 980 dev->rx_desc[i].data_len = 0; in emac_resize_rx_ring() 981 dev->rx_desc[i].ctrl = MAL_RX_CTRL_EMPTY | in emac_resize_rx_ring() 1001 dev->rx_desc[i].data_ptr = in emac_resize_rx_ring() 1077 dev->rx_desc[i].ctrl = 0; in emac_clean_rx_ring() 1080 dev->rx_desc[i].data_ptr = 0; in emac_clean_rx_ring() 1097 dev->rx_desc[slot].data_len = 0; in emac_alloc_rx_skb() 1100 dev->rx_desc[slot].data_ptr = in emac_alloc_rx_skb() 1104 dev->rx_desc[slot].ctrl = MAL_RX_CTRL_EMPTY | in emac_alloc_rx_skb() 1575 dev->rx_desc[slot].data_len = 0; in emac_recycle_rx_skb() [all …]
|
/drivers/net/e1000e/ |
D | netdev.c | 163 struct e1000_rx_desc *rx_desc; in e1000_alloc_rx_buffers() local 204 rx_desc = E1000_RX_DESC(*rx_ring, i); in e1000_alloc_rx_buffers() 205 rx_desc->buffer_addr = cpu_to_le64(buffer_info->dma); in e1000_alloc_rx_buffers() 238 union e1000_rx_desc_packet_split *rx_desc; in e1000_alloc_rx_buffers_ps() local 249 rx_desc = E1000_RX_DESC_PS(*rx_ring, i); in e1000_alloc_rx_buffers_ps() 255 rx_desc->read.buffer_addr[j+1] = ~cpu_to_le64(0); in e1000_alloc_rx_buffers_ps() 280 rx_desc->read.buffer_addr[j+1] = in e1000_alloc_rx_buffers_ps() 312 rx_desc->read.buffer_addr[0] = cpu_to_le64(buffer_info->dma); in e1000_alloc_rx_buffers_ps() 354 struct e1000_rx_desc *rx_desc; in e1000_alloc_jumbo_rx_buffers() local 403 rx_desc = E1000_RX_DESC(*rx_ring, i); in e1000_alloc_jumbo_rx_buffers() [all …]
|
/drivers/atm/ |
D | horizon.c | 742 rx_ch_desc * rx_desc = &memmap->rx_descs[channel]; in hrz_open_rx() local 747 channel_type = rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK; in hrz_open_rx() 775 wr_mem (dev, &rx_desc->wr_buf_type, in hrz_open_rx() 778 wr_mem (dev, &rx_desc->rd_buf_type, buf_ptr); in hrz_open_rx() 816 rx_ch_desc * rx_desc = &memmap->rx_descs[vc]; in hrz_close_rx() local 821 value = rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK; in hrz_close_rx() 835 wr_mem (dev, &rx_desc->wr_buf_type, RX_CHANNEL_DISABLED); in hrz_close_rx() 837 if ((rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK) == RX_CHANNEL_DISABLED) in hrz_close_rx() 881 r1 = rd_mem (dev, &rx_desc->rd_buf_type); in hrz_close_rx() 899 r2 = rd_mem (dev, &rx_desc->rd_buf_type); in hrz_close_rx() [all …]
|
/drivers/scsi/ |
D | mvsas.c | 836 u32 rx_desc = le32_to_cpu(mvi->rx[entry]); in mvs_hba_cq_dump() local 841 mvi->slot_info[rx_desc & RXQ_SLOT_MASK].task); in mvs_hba_cq_dump() 846 mvs_hexdump(sizeof(u32), (u8 *)(&rx_desc), in mvs_hba_cq_dump() 1271 static void mvs_slot_free(struct mvs_info *mvi, u32 rx_desc) in mvs_slot_free() argument 1273 u32 slot_idx = rx_desc & RXQ_SLOT_MASK; in mvs_slot_free() 1337 static int mvs_slot_complete(struct mvs_info *mvi, u32 rx_desc, u32 flags) in mvs_slot_complete() argument 1339 u32 slot_idx = rx_desc & RXQ_SLOT_MASK; in mvs_slot_complete() 1363 mvs_slot_free(mvi, rx_desc); in mvs_slot_complete() 1380 if (unlikely((rx_desc & RXQ_ERR) && (*(u64 *) slot->response))) { in mvs_slot_complete() 1388 if (rx_desc & RXQ_GOOD) { in mvs_slot_complete() [all …]
|
/drivers/net/ixgb/ |
D | ixgb_main.c | 1860 struct ixgb_rx_desc *rx_desc, in ixgb_rx_checksum() argument 1866 if ((rx_desc->status & IXGB_RX_DESC_STATUS_IXSM) || in ixgb_rx_checksum() 1867 (!(rx_desc->status & IXGB_RX_DESC_STATUS_TCPCS))) { in ixgb_rx_checksum() 1874 if (rx_desc->errors & IXGB_RX_DESC_ERRORS_TCPE) { in ixgb_rx_checksum() 1896 struct ixgb_rx_desc *rx_desc, *next_rxd; in ixgb_clean_rx_irq() local 1904 rx_desc = IXGB_RX_DESC(*rx_ring, i); in ixgb_clean_rx_irq() 1907 while (rx_desc->status & IXGB_RX_DESC_STATUS_DD) { in ixgb_clean_rx_irq() 1915 status = rx_desc->status; in ixgb_clean_rx_irq() 1940 length = le16_to_cpu(rx_desc->length); in ixgb_clean_rx_irq() 1941 rx_desc->length = 0; in ixgb_clean_rx_irq() [all …]
|
/drivers/net/tokenring/ |
D | lanstreamer.c | 904 struct streamer_rx_desc *rx_desc; in streamer_rx() local 909 …rx_desc = &streamer_priv->streamer_rx_ring[(streamer_priv->rx_ring_last_received + 1) & (STREAMER_… in streamer_rx() 912 while (rx_desc->status & 0x01000000) { /* While processed descriptors are available */ in streamer_rx() 921 length = rx_desc->framelen_buflen & 0xffff; /* buffer length */ in streamer_rx() 922 frame_length = (rx_desc->framelen_buflen >> 16) & 0xffff; in streamer_rx() 924 if (rx_desc->status & 0x7E830000) { /* errors */ in streamer_rx() 927 dev->name, rx_desc->status); in streamer_rx() 930 if (rx_desc->status & 0x80000000) { /* frame complete */ in streamer_rx() 980 …pci_unmap_single(streamer_priv->pci_dev, le32_to_cpu(rx_desc->buffer), length, PCI_DMA_FROMDEVICE)… in streamer_rx() 981 memcpy(skb_put(skb, length), (void *)rx_desc->buffer, length); /* copy this fragment */ in streamer_rx() [all …]
|
/drivers/net/ixgbe/ |
D | ixgbe_main.c | 409 union ixgbe_adv_rx_desc *rx_desc) in ixgbe_receive_skb() argument 412 u16 tag = le16_to_cpu(rx_desc->wb.upper.vlan); in ixgbe_receive_skb() 419 rx_desc); in ixgbe_receive_skb() 421 lro_receive_skb(&ring->lro_mgr, skb, rx_desc); in ixgbe_receive_skb() 482 union ixgbe_adv_rx_desc *rx_desc; in ixgbe_alloc_rx_buffers() local 490 rx_desc = IXGBE_RX_DESC_ADV(*rx_ring, i); in ixgbe_alloc_rx_buffers() 538 rx_desc->read.pkt_addr = cpu_to_le64(bi->page_dma); in ixgbe_alloc_rx_buffers() 539 rx_desc->read.hdr_addr = cpu_to_le64(bi->dma); in ixgbe_alloc_rx_buffers() 541 rx_desc->read.pkt_addr = cpu_to_le64(bi->dma); in ixgbe_alloc_rx_buffers() 567 static inline u16 ixgbe_get_hdr_info(union ixgbe_adv_rx_desc *rx_desc) in ixgbe_get_hdr_info() argument [all …]
|
/drivers/net/tulip/ |
D | uli526x.c | 120 struct rx_desc { struct 123 struct rx_desc *next_rx_desc; argument 153 struct rx_desc *first_rx_desc; 154 struct rx_desc *rx_insert_ptr; 155 struct rx_desc *rx_ready_ptr; /* packet come pointer */ 801 struct rx_desc *rxptr; in uli526x_rx_packet() 1276 struct rx_desc *rxptr = db->rx_insert_ptr; in uli526x_reuse_skb() 1301 struct rx_desc *tmp_rx; in uli526x_descriptor_init() 1344 tmp_rx_dma += sizeof(struct rx_desc); in uli526x_descriptor_init() 1444 struct rx_desc *rxptr; in allocate_rx_buffer()
|
D | dmfe.c | 201 struct rx_desc { struct 204 struct rx_desc *next_rx_desc; argument 235 struct rx_desc *first_rx_desc; 236 struct rx_desc *rx_insert_ptr; 237 struct rx_desc *rx_ready_ptr; /* packet come pointer */ 943 struct rx_desc *rxptr; in dmfe_rx_packet() 1334 struct rx_desc *rxptr = db->rx_insert_ptr; in dmfe_reuse_skb() 1357 struct rx_desc *tmp_rx; in dmfe_descriptor_init() 1403 tmp_rx_dma += sizeof(struct rx_desc); in dmfe_descriptor_init() 1544 struct rx_desc *rxptr; in allocate_rx_buffer()
|
/drivers/net/igb/ |
D | igb_main.c | 3757 union e1000_adv_rx_desc *rx_desc = priv; in igb_get_skb_hdr() local 3758 u16 pkt_type = rx_desc->wb.lower.lo_dword.pkt_info & in igb_get_skb_hdr() 3786 union e1000_adv_rx_desc * rx_desc, in igb_receive_skb() argument 3798 le16_to_cpu(rx_desc->wb.upper.vlan), in igb_receive_skb() 3799 rx_desc); in igb_receive_skb() 3801 lro_receive_skb(&ring->lro_mgr,skb, rx_desc); in igb_receive_skb() 3807 le16_to_cpu(rx_desc->wb.upper.vlan)); in igb_receive_skb() 3845 union e1000_adv_rx_desc *rx_desc , *next_rxd; in igb_clean_rx_irq_adv() local 3855 rx_desc = E1000_RX_DESC_ADV(*rx_ring, i); in igb_clean_rx_irq_adv() 3856 staterr = le32_to_cpu(rx_desc->wb.upper.status_error); in igb_clean_rx_irq_adv() [all …]
|
/drivers/net/cxgb3/ |
D | adapter.h | 89 struct rx_desc; 102 struct rx_desc *desc; /* address of HW Rx descriptor ring */
|
/drivers/net/atlx/ |
D | atl2.h | 356 struct rx_desc { struct 484 struct rx_desc *rxd_ring;
|