/drivers/atm/ |
D | atmtcp.c | 195 struct sk_buff *new_skb; in atmtcp_v_send() local 214 new_skb = atm_alloc_charge(out_vcc,size,GFP_ATOMIC); in atmtcp_v_send() 215 if (!new_skb) { in atmtcp_v_send() 221 hdr = skb_put(new_skb, sizeof(struct atmtcp_hdr)); in atmtcp_v_send() 225 skb_copy_from_linear_data(skb, skb_put(new_skb, skb->len), skb->len); in atmtcp_v_send() 228 out_vcc->push(out_vcc,new_skb); in atmtcp_v_send() 288 struct sk_buff *new_skb; in atmtcp_c_send() local 308 new_skb = atm_alloc_charge(out_vcc,skb->len,GFP_KERNEL); in atmtcp_c_send() 309 if (!new_skb) { in atmtcp_c_send() 313 __net_timestamp(new_skb); in atmtcp_c_send() [all …]
|
/drivers/bluetooth/ |
D | btrsi.c | 51 struct sk_buff *new_skb = NULL; in rsi_hci_send_pkt() local 67 new_skb = skb_realloc_headroom(skb, RSI_HEADROOM_FOR_BT_HAL); in rsi_hci_send_pkt() 68 if (unlikely(!new_skb)) in rsi_hci_send_pkt() 70 bt_cb(new_skb)->pkt_type = hci_skb_pkt_type(skb); in rsi_hci_send_pkt() 72 skb = new_skb; in rsi_hci_send_pkt()
|
/drivers/net/ethernet/sunplus/ |
D | spl2sw_int.c | 22 struct sk_buff *skb, *new_skb; in spl2sw_rx_poll() local 74 new_skb = netdev_alloc_skb(NULL, comm->rx_desc_buff_size); in spl2sw_rx_poll() 75 if (unlikely(!new_skb)) { in spl2sw_rx_poll() 84 sinfo->mapping = dma_map_single(&comm->pdev->dev, new_skb->data, in spl2sw_rx_poll() 88 dev_kfree_skb_irq(new_skb); in spl2sw_rx_poll() 97 sinfo->skb = new_skb; in spl2sw_rx_poll()
|
/drivers/net/ethernet/sgi/ |
D | ioc3-eth.c | 126 struct sk_buff *new_skb; in ioc3_alloc_skb() local 130 new_skb = alloc_skb(RX_BUF_SIZE + IOC3_DMA_XFER_LEN - 1, GFP_ATOMIC); in ioc3_alloc_skb() 131 if (!new_skb) in ioc3_alloc_skb() 135 offset = aligned_rx_skb_addr((unsigned long)new_skb->data); in ioc3_alloc_skb() 137 skb_reserve(new_skb, offset); in ioc3_alloc_skb() 139 d = dma_map_single(ip->dma_dev, new_skb->data, in ioc3_alloc_skb() 143 dev_kfree_skb_any(new_skb); in ioc3_alloc_skb() 147 *rxb = (struct ioc3_erxbuf *)new_skb->data; in ioc3_alloc_skb() 148 skb_reserve(new_skb, RX_OFFSET); in ioc3_alloc_skb() 149 *skb = new_skb; in ioc3_alloc_skb() [all …]
|
/drivers/net/ethernet/natsemi/ |
D | sonic.c | 501 struct sk_buff **new_skb, dma_addr_t *new_addr) in sonic_alloc_rb() argument 503 *new_skb = netdev_alloc_skb(dev, SONIC_RBSIZE + 2); in sonic_alloc_rb() 504 if (!*new_skb) in sonic_alloc_rb() 508 skb_reserve(*new_skb, 2); in sonic_alloc_rb() 510 *new_addr = dma_map_single(lp->device, skb_put(*new_skb, SONIC_RBSIZE), in sonic_alloc_rb() 513 dev_kfree_skb(*new_skb); in sonic_alloc_rb() 514 *new_skb = NULL; in sonic_alloc_rb() 567 struct sk_buff *new_skb; in sonic_rx() local 579 if (sonic_alloc_rb(dev, lp, &new_skb, &new_laddr)) { in sonic_rx() 596 lp->rx_skb[i] = new_skb; in sonic_rx()
|
/drivers/net/usb/ |
D | sierra_net.c | 781 struct sk_buff *new_skb; in sierra_net_skb_clone() local 784 new_skb = skb_clone(skb, GFP_ATOMIC); in sierra_net_skb_clone() 790 if (new_skb) { in sierra_net_skb_clone() 791 skb_trim(new_skb, len); in sierra_net_skb_clone() 798 return new_skb; in sierra_net_skb_clone() 806 struct sk_buff *new_skb; in sierra_net_rx_fixup() local 848 new_skb = sierra_net_skb_clone(dev, skb, hh.payload_len.word); in sierra_net_rx_fixup() 849 if (new_skb) in sierra_net_rx_fixup() 850 usbnet_skb_return(dev, new_skb); in sierra_net_rx_fixup()
|
D | aqc111.c | 1074 struct sk_buff *new_skb = NULL; in aqc111_rx_fixup() local 1144 new_skb = netdev_alloc_skb_ip_align(dev->net, pkt_len); in aqc111_rx_fixup() 1146 if (!new_skb) in aqc111_rx_fixup() 1149 skb_put(new_skb, pkt_len); in aqc111_rx_fixup() 1150 memcpy(new_skb->data, skb->data, pkt_len); in aqc111_rx_fixup() 1151 skb_pull(new_skb, AQ_RX_HW_PAD); in aqc111_rx_fixup() 1154 aqc111_rx_checksum(new_skb, pkt_desc); in aqc111_rx_fixup() 1158 __vlan_hwaccel_put_tag(new_skb, htons(ETH_P_8021Q), in aqc111_rx_fixup() 1162 usbnet_skb_return(dev, new_skb); in aqc111_rx_fixup() 1171 new_skb = NULL; in aqc111_rx_fixup() [all …]
|
/drivers/net/ethernet/xilinx/ |
D | xilinx_emaclite.c | 999 struct sk_buff *new_skb; in xemaclite_send() local 1005 new_skb = orig_skb; in xemaclite_send() 1008 if (xemaclite_send_data(lp, (u8 *)new_skb->data, len) != 0) { in xemaclite_send() 1014 lp->deferred_skb = new_skb; in xemaclite_send() 1016 skb_tx_timestamp(new_skb); in xemaclite_send() 1022 skb_tx_timestamp(new_skb); in xemaclite_send() 1025 dev_consume_skb_any(new_skb); in xemaclite_send()
|
/drivers/net/ethernet/microchip/sparx5/ |
D | sparx5_fdma.c | 210 struct sk_buff *new_skb; in sparx5_fdma_rx_get_frame() local 221 new_skb = sparx5_fdma_rx_alloc_skb(rx); in sparx5_fdma_rx_get_frame() 222 if (unlikely(!new_skb)) in sparx5_fdma_rx_get_frame() 225 dma_addr = virt_to_phys(new_skb->data); in sparx5_fdma_rx_get_frame() 226 rx->skb[rx->dcb_index][rx->db_index] = new_skb; in sparx5_fdma_rx_get_frame()
|
/drivers/net/wireless/marvell/mwifiex/ |
D | uap_txrx.c | 87 struct sk_buff *new_skb; in mwifiex_uap_queue_bridged_pkt() local 163 new_skb = in mwifiex_uap_queue_bridged_pkt() 165 if (unlikely(!new_skb)) { in mwifiex_uap_queue_bridged_pkt() 174 skb = new_skb; in mwifiex_uap_queue_bridged_pkt()
|
/drivers/net/ethernet/freescale/fs_enet/ |
D | fs_enet-main.c | 457 struct sk_buff *new_skb; in tx_skb_align_workaround() local 463 new_skb = netdev_alloc_skb(dev, skb->len + 4); in tx_skb_align_workaround() 464 if (!new_skb) in tx_skb_align_workaround() 468 skb_align(new_skb, 4); in tx_skb_align_workaround() 471 skb_copy_from_linear_data(skb, new_skb->data, skb->len); in tx_skb_align_workaround() 472 skb_put(new_skb, skb->len); in tx_skb_align_workaround() 477 return new_skb; in tx_skb_align_workaround()
|
/drivers/net/ethernet/freescale/dpaa/ |
D | dpaa_eth.c | 2123 struct sk_buff *new_skb, *skb = *s; in dpaa_a050385_wa_skb() local 2155 new_skb = netdev_alloc_skb(net_dev, skb->len + DPAA_A050385_ALIGN - 1 + in dpaa_a050385_wa_skb() 2157 if (!new_skb) in dpaa_a050385_wa_skb() 2161 skb_reserve(new_skb, priv->tx_headroom - NET_SKB_PAD); in dpaa_a050385_wa_skb() 2164 start = PTR_ALIGN(new_skb->data, DPAA_A050385_ALIGN); in dpaa_a050385_wa_skb() 2165 if (start - new_skb->data) in dpaa_a050385_wa_skb() 2166 skb_reserve(new_skb, start - new_skb->data); in dpaa_a050385_wa_skb() 2168 skb_put(new_skb, skb->len); in dpaa_a050385_wa_skb() 2169 skb_copy_bits(skb, 0, new_skb->data, skb->len); in dpaa_a050385_wa_skb() 2170 skb_copy_header(new_skb, skb); in dpaa_a050385_wa_skb() [all …]
|
/drivers/net/ethernet/mediatek/ |
D | mtk_star_emac.c | 1276 struct sk_buff *curr_skb, *new_skb; in mtk_star_rx() local 1290 new_skb = curr_skb; in mtk_star_rx() 1297 new_skb = mtk_star_alloc_skb(ndev); in mtk_star_rx() 1298 if (!new_skb) { in mtk_star_rx() 1300 new_skb = curr_skb; in mtk_star_rx() 1304 new_dma_addr = mtk_star_dma_map_rx(priv, new_skb); in mtk_star_rx() 1307 dev_kfree_skb(new_skb); in mtk_star_rx() 1308 new_skb = curr_skb; in mtk_star_rx() 1331 desc_data.len = skb_tailroom(new_skb); in mtk_star_rx() 1332 desc_data.skb = new_skb; in mtk_star_rx()
|
/drivers/net/ppp/ |
D | ppp_generic.c | 1701 struct sk_buff *new_skb; in pad_compress_skb() local 1707 new_skb = alloc_skb(new_skb_size, GFP_ATOMIC); in pad_compress_skb() 1708 if (!new_skb) { in pad_compress_skb() 1714 skb_reserve(new_skb, in pad_compress_skb() 1719 new_skb->data, skb->len + 2, in pad_compress_skb() 1723 skb = new_skb; in pad_compress_skb() 1728 consume_skb(new_skb); in pad_compress_skb() 1729 new_skb = skb; in pad_compress_skb() 1742 consume_skb(new_skb); in pad_compress_skb() 1743 new_skb = NULL; in pad_compress_skb() [all …]
|
D | pptp.c | 181 struct sk_buff *new_skb = skb_realloc_headroom(skb, max_headroom); in pptp_xmit() local 182 if (!new_skb) { in pptp_xmit() 187 skb_set_owner_w(new_skb, skb->sk); in pptp_xmit() 189 skb = new_skb; in pptp_xmit()
|
/drivers/net/ethernet/sun/ |
D | sunbmac.c | 825 struct sk_buff *new_skb; in bigmac_rx() local 828 new_skb = big_mac_alloc_skb(RX_BUF_ALLOC_SIZE, GFP_ATOMIC); in bigmac_rx() 829 if (new_skb == NULL) { in bigmac_rx() 837 bp->rx_skbs[elem] = new_skb; in bigmac_rx() 838 skb_put(new_skb, ETH_FRAME_LEN); in bigmac_rx() 839 skb_reserve(new_skb, 34); in bigmac_rx() 842 new_skb->data, in bigmac_rx()
|
/drivers/net/ethernet/3com/ |
D | typhoon.c | 1632 struct sk_buff *skb, *new_skb; in typhoon_rx() local 1662 (new_skb = netdev_alloc_skb(tp->dev, pkt_len + 2)) != NULL) { in typhoon_rx() 1663 skb_reserve(new_skb, 2); in typhoon_rx() 1666 skb_copy_to_linear_data(new_skb, skb->data, pkt_len); in typhoon_rx() 1670 skb_put(new_skb, pkt_len); in typhoon_rx() 1673 new_skb = skb; in typhoon_rx() 1674 skb_put(new_skb, pkt_len); in typhoon_rx() 1679 new_skb->protocol = eth_type_trans(new_skb, tp->dev); in typhoon_rx() 1686 new_skb->ip_summed = CHECKSUM_UNNECESSARY; in typhoon_rx() 1688 skb_checksum_none_assert(new_skb); in typhoon_rx() [all …]
|
/drivers/net/ethernet/actions/ |
D | owl-emac.c | 752 struct sk_buff *curr_skb, *new_skb; in owl_emac_rx_process() local 815 new_skb = owl_emac_alloc_skb(netdev); in owl_emac_rx_process() 816 if (unlikely(!new_skb)) in owl_emac_rx_process() 819 new_dma = owl_emac_dma_map_rx(priv, new_skb); in owl_emac_rx_process() 821 dev_kfree_skb(new_skb); in owl_emac_rx_process() 844 new_skb = curr_skb; in owl_emac_rx_process() 850 ring->skbs[ring->head] = new_skb; in owl_emac_rx_process()
|
/drivers/net/wireless/ath/ath6kl/ |
D | txrx.c | 993 struct sk_buff *new_skb; in aggr_slice_amsdu() local 1014 new_skb = aggr_get_free_skb(p_aggr); in aggr_slice_amsdu() 1015 if (!new_skb) { in aggr_slice_amsdu() 1020 memcpy(new_skb->data, framep, frame_8023_len); in aggr_slice_amsdu() 1021 skb_put(new_skb, frame_8023_len); in aggr_slice_amsdu() 1022 if (ath6kl_wmi_dot3_2_dix(new_skb)) { in aggr_slice_amsdu() 1024 dev_kfree_skb(new_skb); in aggr_slice_amsdu() 1028 skb_queue_tail(&rxtid->q, new_skb); in aggr_slice_amsdu()
|
/drivers/net/hippi/ |
D | rrunner.c | 1405 struct sk_buff *new_skb; in rr_start_xmit() local 1417 if (!(new_skb = dev_alloc_skb(len + 8))) { in rr_start_xmit() 1422 skb_reserve(new_skb, 8); in rr_start_xmit() 1423 skb_put(new_skb, len); in rr_start_xmit() 1424 skb_copy_from_linear_data(skb, new_skb->data, len); in rr_start_xmit() 1426 skb = new_skb; in rr_start_xmit()
|
/drivers/net/ethernet/amd/ |
D | amd8111e.c | 687 struct sk_buff *skb, *new_skb; in amd8111e_rx_poll() local 733 new_skb = netdev_alloc_skb(dev, lp->rx_buff_len); in amd8111e_rx_poll() 734 if (!new_skb) { in amd8111e_rx_poll() 743 skb_reserve(new_skb, 2); in amd8111e_rx_poll() 748 lp->rx_skbuff[rx_index] = new_skb; in amd8111e_rx_poll() 750 new_skb->data, in amd8111e_rx_poll()
|
/drivers/net/ethernet/ni/ |
D | nixge.c | 598 struct sk_buff *skb, *new_skb; in nixge_recv() local 639 new_skb = netdev_alloc_skb_ip_align(ndev, in nixge_recv() 641 if (!new_skb) in nixge_recv() 644 cur_phys = dma_map_single(ndev->dev.parent, new_skb->data, in nixge_recv() 654 nixge_hw_dma_bd_set_offset(cur_p, (uintptr_t)new_skb); in nixge_recv()
|
/drivers/s390/net/ |
D | ctcm_fsms.c | 1379 struct sk_buff *new_skb; in ctcmpc_chx_rx() local 1405 new_skb = __dev_alloc_skb(ch->max_bufsize, GFP_ATOMIC); in ctcmpc_chx_rx() 1407 if (new_skb == NULL) { in ctcmpc_chx_rx() 1417 dev_kfree_skb_any(new_skb); in ctcmpc_chx_rx() 1421 skb_put_data(new_skb, skb->data, block_len); in ctcmpc_chx_rx() 1422 skb_queue_tail(&ch->io_queue, new_skb); in ctcmpc_chx_rx() 1426 skb_put_data(new_skb, skb->data, len); in ctcmpc_chx_rx() 1427 skb_queue_tail(&ch->io_queue, new_skb); in ctcmpc_chx_rx()
|
/drivers/net/ethernet/hisilicon/hns/ |
D | hns_enet.c | 214 struct sk_buff *new_skb = NULL; in hns_nic_maybe_stop_tx() local 224 new_skb = skb_copy(skb, GFP_ATOMIC); in hns_nic_maybe_stop_tx() 225 if (!new_skb) in hns_nic_maybe_stop_tx() 229 *out_skb = new_skb; in hns_nic_maybe_stop_tx() 247 struct sk_buff *new_skb = NULL; in hns_nic_maybe_stop_tso() local 265 new_skb = skb_copy(skb, GFP_ATOMIC); in hns_nic_maybe_stop_tso() 266 if (!new_skb) in hns_nic_maybe_stop_tso() 269 *out_skb = new_skb; in hns_nic_maybe_stop_tso()
|
/drivers/net/ethernet/rdc/ |
D | r6040.c | 513 struct sk_buff *skb_ptr, *new_skb; in r6040_rx() local 545 new_skb = netdev_alloc_skb(dev, MAX_BUF_SIZE); in r6040_rx() 546 if (!new_skb) { in r6040_rx() 565 descptr->skb_ptr = new_skb; in r6040_rx()
|