Home
last modified time | relevance | path

Searched refs:nskb (Results 1 – 25 of 40) sorted by relevance

12

/drivers/bluetooth/
Dhci_bcsp.c180 struct sk_buff *nskb; in bcsp_prepare_pkt() local
232 nskb = alloc_skb((len + 6) * 2 + 2, GFP_ATOMIC); in bcsp_prepare_pkt()
233 if (!nskb) in bcsp_prepare_pkt()
236 hci_skb_pkt_type(nskb) = pkt_type; in bcsp_prepare_pkt()
238 bcsp_slip_msgdelim(nskb); in bcsp_prepare_pkt()
259 bcsp_slip_one_byte(nskb, hdr[i]); in bcsp_prepare_pkt()
267 bcsp_slip_one_byte(nskb, data[i]); in bcsp_prepare_pkt()
276 bcsp_slip_one_byte(nskb, (u8)((bcsp_txmsg_crc >> 8) & 0x00ff)); in bcsp_prepare_pkt()
277 bcsp_slip_one_byte(nskb, (u8)(bcsp_txmsg_crc & 0x00ff)); in bcsp_prepare_pkt()
280 bcsp_slip_msgdelim(nskb); in bcsp_prepare_pkt()
[all …]
Dhci_h5.c104 struct sk_buff *nskb; in h5_link_control() local
106 nskb = alloc_skb(3, GFP_ATOMIC); in h5_link_control()
107 if (!nskb) in h5_link_control()
110 hci_skb_pkt_type(nskb) = HCI_3WIRE_LINK_PKT; in h5_link_control()
112 memcpy(skb_put(nskb, len), data, len); in h5_link_control()
114 skb_queue_tail(&h5->unrel, nskb); in h5_link_control()
622 struct sk_buff *nskb; in h5_prepare_pkt() local
637 nskb = alloc_skb((len + 6) * 2 + 2, GFP_ATOMIC); in h5_prepare_pkt()
638 if (!nskb) in h5_prepare_pkt()
641 hci_skb_pkt_type(nskb) = pkt_type; in h5_prepare_pkt()
[all …]
Dbfusb.c468 struct sk_buff *nskb; in bfusb_send_frame() local
493 nskb = bt_skb_alloc(count + 32, GFP_ATOMIC); in bfusb_send_frame()
494 if (!nskb) { in bfusb_send_frame()
499 nskb->dev = (void *) data; in bfusb_send_frame()
508 memcpy(skb_put(nskb, 3), buf, 3); in bfusb_send_frame()
509 skb_copy_from_linear_data_offset(skb, sent, skb_put(nskb, size), size); in bfusb_send_frame()
516 if ((nskb->len % data->bulk_pkt_size) == 0) { in bfusb_send_frame()
519 memcpy(skb_put(nskb, 2), buf, 2); in bfusb_send_frame()
524 skb_queue_tail(&data->transmit_q, nskb); in bfusb_send_frame()
/drivers/net/wireless/ath/ath9k/
Dhif_usb.c311 struct sk_buff *nskb = NULL; in __hif_usb_tx() local
331 nskb = __skb_dequeue(&hif_dev->tx.tx_skb_queue); in __hif_usb_tx()
334 BUG_ON(!nskb); in __hif_usb_tx()
341 *hdr++ = cpu_to_le16(nskb->len); in __hif_usb_tx()
344 memcpy(buf, nskb->data, nskb->len); in __hif_usb_tx()
345 tx_buf->len = nskb->len + 4; in __hif_usb_tx()
353 __skb_queue_tail(&tx_buf->skb_queue, nskb); in __hif_usb_tx()
530 struct sk_buff *nskb, *skb_pool[MAX_PKT_NUM_IN_TRANSFER]; in ath9k_hif_usb_rx_stream() local
596 nskb = __dev_alloc_skb(pkt_len + 32, GFP_ATOMIC); in ath9k_hif_usb_rx_stream()
597 if (!nskb) { in ath9k_hif_usb_rx_stream()
[all …]
/drivers/isdn/mISDN/
Ddsp_core.c285 struct sk_buff *nskb; in dsp_control_req() local
591 nskb = _alloc_mISDN_skb(PH_CONTROL_IND, MISDN_ID_ANY, in dsp_control_req()
593 if (nskb) { in dsp_control_req()
595 if (dsp->up->send(dsp->up, nskb)) in dsp_control_req()
596 dev_kfree_skb(nskb); in dsp_control_req()
598 dev_kfree_skb(nskb); in dsp_control_req()
743 struct sk_buff *nskb; in dsp_function() local
749 nskb = _alloc_mISDN_skb(PH_CONTROL_IND, in dsp_function()
752 if (nskb) { in dsp_function()
755 dsp->up, nskb)) in dsp_function()
[all …]
Ddsp_cmx.c1315 struct sk_buff *nskb, *txskb; local
1354 nskb = mI_alloc_skb(len + preload, GFP_ATOMIC);
1355 if (!nskb) {
1361 hh = mISDN_HEAD_P(nskb);
1370 d = skb_put(nskb, preload + len); /* result */
1584 skb_queue_tail(&dsp->sendq, nskb);
1598 memcpy(skb_put(txskb, len), nskb->data + preload,
1609 dsp_change_volume(nskb, dsp->tx_volume);
1612 dsp_pipeline_process_tx(&dsp->pipeline, nskb->data,
1613 nskb->len);
[all …]
Ddsp_tones.c440 struct sk_buff *nskb; in dsp_tone_hw_message() local
443 nskb = _alloc_mISDN_skb(PH_CONTROL_REQ, in dsp_tone_hw_message()
446 if (nskb) { in dsp_tone_hw_message()
448 if (dsp->ch.recv(dsp->ch.peer, nskb)) in dsp_tone_hw_message()
449 dev_kfree_skb(nskb); in dsp_tone_hw_message()
451 dev_kfree_skb(nskb); in dsp_tone_hw_message()
Dl1oip_core.c365 struct sk_buff *nskb; in l1oip_socket_recv() local
396 nskb = mI_alloc_skb((remotecodec == 3) ? (len << 1) : len, GFP_ATOMIC); in l1oip_socket_recv()
397 if (!nskb) { in l1oip_socket_recv()
401 p = skb_put(nskb, (remotecodec == 3) ? (len << 1) : len); in l1oip_socket_recv()
414 dch->rx_skb = nskb; in l1oip_socket_recv()
444 hc->chan[channel].disorder_skb = nskb; in l1oip_socket_recv()
445 nskb = skb; in l1oip_socket_recv()
451 if (nskb) in l1oip_socket_recv()
453 queue_ch_frame(&bch->ch, PH_DATA_IND, rx_counter, nskb); in l1oip_socket_recv()
Dlayer2.c247 struct sk_buff *nskb = skb; in ph_data_confirm() local
252 nskb = skb_dequeue(&l2->down_queue); in ph_data_confirm()
253 if (nskb) { in ph_data_confirm()
254 l2->down_id = mISDN_HEAD_ID(nskb); in ph_data_confirm()
255 if (l2down_skb(l2, nskb)) { in ph_data_confirm()
256 dev_kfree_skb(nskb); in ph_data_confirm()
272 nskb = skb_dequeue(&l2->down_queue); in ph_data_confirm()
273 if (nskb) { in ph_data_confirm()
274 l2->down_id = mISDN_HEAD_ID(nskb); in ph_data_confirm()
275 if (l2down_skb(l2, nskb)) { in ph_data_confirm()
[all …]
/drivers/isdn/i4l/
Disdn_v110.c463 struct sk_buff *nskb; in isdn_v110_encode() local
483 if (!(nskb = dev_alloc_skb(size + v->skbres + sizeof(int)))) { in isdn_v110_encode()
487 skb_reserve(nskb, v->skbres + sizeof(int)); in isdn_v110_encode()
489 memcpy(skb_put(nskb, v->framelen), v->OnlineFrame, v->framelen); in isdn_v110_encode()
490 *((int *)skb_push(nskb, sizeof(int))) = 0; in isdn_v110_encode()
491 return nskb; in isdn_v110_encode()
495 rbuf = skb_put(nskb, size); in isdn_v110_encode()
512 skb_trim(nskb, olen); in isdn_v110_encode()
513 *((int *)skb_push(nskb, sizeof(int))) = rlen; in isdn_v110_encode()
514 return nskb; in isdn_v110_encode()
Disdn_common.c2006 struct sk_buff *nskb = NULL; in isdn_writebuf_skb_stub() local
2012 nskb = isdn_v110_encode(dev->v110[idx], skb); in isdn_writebuf_skb_stub()
2014 if (!nskb) in isdn_writebuf_skb_stub()
2016 v110_ret = *((int *)nskb->data); in isdn_writebuf_skb_stub()
2017 skb_pull(nskb, sizeof(int)); in isdn_writebuf_skb_stub()
2018 if (!nskb->len) { in isdn_writebuf_skb_stub()
2019 dev_kfree_skb(nskb); in isdn_writebuf_skb_stub()
2024 ret = dev->drv[drvidx]->interface->writebuf_skb(drvidx, chan, ack, nskb); in isdn_writebuf_skb_stub()
2067 dev_kfree_skb(nskb); in isdn_writebuf_skb_stub()
/drivers/infiniband/sw/rxe/
Drxe_net.c430 struct sk_buff *nskb; in send() local
436 nskb = skb_clone(skb, GFP_ATOMIC); in send()
437 if (!nskb) in send()
440 nskb->destructor = rxe_skb_tx_dtor; in send()
441 nskb->sk = pkt->qp->sk->sk; in send()
444 err = ip_local_out(dev_net(skb_dst(skb)->dev), nskb->sk, nskb); in send()
446 err = ip6_local_out(dev_net(skb_dst(skb)->dev), nskb->sk, nskb); in send()
449 kfree_skb(nskb); in send()
/drivers/net/ethernet/sun/
Dsunvnet_common.c1043 struct sk_buff *nskb; in vnet_skb_shape() local
1069 nskb = alloc_and_align_skb(skb->dev, len); in vnet_skb_shape()
1070 if (!nskb) { in vnet_skb_shape()
1074 skb_reserve(nskb, VNET_PACKET_SKIP); in vnet_skb_shape()
1076 nskb->protocol = skb->protocol; in vnet_skb_shape()
1078 skb_set_mac_header(nskb, offset); in vnet_skb_shape()
1080 skb_set_network_header(nskb, offset); in vnet_skb_shape()
1082 skb_set_transport_header(nskb, offset); in vnet_skb_shape()
1085 nskb->csum_offset = skb->csum_offset; in vnet_skb_shape()
1086 nskb->ip_summed = skb->ip_summed; in vnet_skb_shape()
[all …]
/drivers/net/irda/
Dstir4200.c319 struct sk_buff *skb, *nskb; in fir_eof() local
341 nskb = dev_alloc_skb(len + 1); in fir_eof()
342 if (unlikely(!nskb)) { in fir_eof()
346 skb_reserve(nskb, 1); in fir_eof()
347 skb = nskb; in fir_eof()
348 skb_copy_to_linear_data(nskb, rx_buff->data, len); in fir_eof()
350 nskb = dev_alloc_skb(rx_buff->truesize); in fir_eof()
351 if (unlikely(!nskb)) { in fir_eof()
355 skb_reserve(nskb, 1); in fir_eof()
357 rx_buff->skb = nskb; in fir_eof()
[all …]
/drivers/net/ipvlan/
Dipvlan_core.c195 struct sk_buff *skb, *nskb; in ipvlan_process_multicast() local
231 nskb = skb_clone(skb, GFP_ATOMIC); in ipvlan_process_multicast()
232 if (!nskb) in ipvlan_process_multicast()
235 nskb->pkt_type = pkt_type; in ipvlan_process_multicast()
236 nskb->dev = ipvlan->dev; in ipvlan_process_multicast()
238 ret = dev_forward_skb(ipvlan->dev, nskb); in ipvlan_process_multicast()
240 ret = netif_rx(nskb); in ipvlan_process_multicast()
632 struct sk_buff *nskb = skb_clone(skb, GFP_ATOMIC); in ipvlan_handle_mode_l2() local
640 if (nskb) { in ipvlan_handle_mode_l2()
641 ipvlan_skb_crossing_ns(nskb, NULL); in ipvlan_handle_mode_l2()
[all …]
/drivers/net/xen-netback/
Dnetback.c365 struct sk_buff *nskb) in xenvif_get_requests() argument
389 shinfo = skb_shinfo(nskb); in xenvif_get_requests()
402 skb_shinfo(skb)->frag_list = nskb; in xenvif_get_requests()
796 struct sk_buff *skb, *nskb; in xenvif_tx_build_gops() local
923 nskb = NULL; in xenvif_tx_build_gops()
928 nskb = xenvif_alloc_skb(0); in xenvif_tx_build_gops()
929 if (unlikely(nskb == NULL)) { in xenvif_tx_build_gops()
946 kfree_skb(nskb); in xenvif_tx_build_gops()
1014 frag_overflow, nskb); in xenvif_tx_build_gops()
1038 struct sk_buff *nskb = skb_shinfo(skb)->frag_list; in xenvif_handle_frag_list() local
[all …]
/drivers/net/
Dmacvlan.c239 struct sk_buff *nskb; in macvlan_broadcast() local
257 nskb = skb_clone(skb, GFP_ATOMIC); in macvlan_broadcast()
258 if (likely(nskb)) in macvlan_broadcast()
260 nskb, vlan, eth, in macvlan_broadcast()
262 netif_rx_ni(nskb); in macvlan_broadcast()
319 struct sk_buff *nskb; in macvlan_broadcast_enqueue() local
322 nskb = skb_clone(skb, GFP_ATOMIC); in macvlan_broadcast_enqueue()
323 if (!nskb) in macvlan_broadcast_enqueue()
326 MACVLAN_SKB_CB(nskb)->src = src; in macvlan_broadcast_enqueue()
332 __skb_queue_tail(&port->bc_queue, nskb); in macvlan_broadcast_enqueue()
[all …]
Dxen-netfront.c578 struct sk_buff *nskb; in xennet_start_xmit() local
612 nskb = skb_copy(skb, GFP_ATOMIC); in xennet_start_xmit()
613 if (!nskb) in xennet_start_xmit()
616 skb = nskb; in xennet_start_xmit()
894 struct sk_buff *nskb; in xennet_fill_frags() local
896 while ((nskb = __skb_dequeue(list))) { in xennet_fill_frags()
899 skb_frag_t *nfrag = &skb_shinfo(nskb)->frags[0]; in xennet_fill_frags()
912 skb_shinfo(nskb)->nr_frags = 0; in xennet_fill_frags()
913 kfree_skb(nskb); in xennet_fill_frags()
Dmacsec.c679 struct sk_buff *nskb = skb_copy_expand(skb, in macsec_encrypt() local
683 if (likely(nskb)) { in macsec_encrypt()
685 skb = nskb; in macsec_encrypt()
1045 struct sk_buff *nskb; in handle_not_macsec() local
1057 nskb = skb_clone(skb, GFP_ATOMIC); in handle_not_macsec()
1058 if (!nskb) in handle_not_macsec()
1061 nskb->dev = macsec->secy.netdev; in handle_not_macsec()
1063 ret = netif_rx(nskb); in handle_not_macsec()
1270 struct sk_buff *nskb; in macsec_handle_frame() local
1288 nskb = skb_clone(skb, GFP_ATOMIC); in macsec_handle_frame()
[all …]
/drivers/isdn/capi/
Dcapi.c385 struct sk_buff *nskb; in gen_data_b3_resp_for() local
386 nskb = alloc_skb(CAPI_DATA_B3_RESP_LEN, GFP_KERNEL); in gen_data_b3_resp_for()
387 if (nskb) { in gen_data_b3_resp_for()
389 unsigned char *s = skb_put(nskb, CAPI_DATA_B3_RESP_LEN); in gen_data_b3_resp_for()
398 return nskb; in gen_data_b3_resp_for()
405 struct sk_buff *nskb; in handle_recv_skb() local
439 nskb = gen_data_b3_resp_for(mp, skb); in handle_recv_skb()
440 if (!nskb) { in handle_recv_skb()
447 errcode = capi20_put_message(mp->ap, nskb); in handle_recv_skb()
457 kfree_skb(nskb); in handle_recv_skb()
/drivers/s390/net/
Dctcm_main.c471 struct sk_buff *nskb; in ctcm_transmit_skb() local
518 nskb = alloc_skb(skb->len, GFP_ATOMIC | GFP_DMA); in ctcm_transmit_skb()
519 if (!nskb) { in ctcm_transmit_skb()
525 memcpy(skb_put(nskb, skb->len), skb->data, skb->len); in ctcm_transmit_skb()
526 atomic_inc(&nskb->users); in ctcm_transmit_skb()
529 skb = nskb; in ctcm_transmit_skb()
670 struct sk_buff *nskb; in ctcmpc_transmit_skb() local
727 nskb = __dev_alloc_skb(skb->len, GFP_ATOMIC | GFP_DMA); in ctcmpc_transmit_skb()
728 if (!nskb) { in ctcmpc_transmit_skb()
731 memcpy(skb_put(nskb, skb->len), skb->data, skb->len); in ctcmpc_transmit_skb()
[all …]
Dnetiucv.c1188 struct sk_buff *nskb = skb; in netiucv_transmit_skb() local
1197 nskb = alloc_skb(skb->len + NETIUCV_HDRLEN + in netiucv_transmit_skb()
1199 if (!nskb) { in netiucv_transmit_skb()
1204 skb_reserve(nskb, NETIUCV_HDRLEN); in netiucv_transmit_skb()
1205 memcpy(skb_put(nskb, skb->len), in netiucv_transmit_skb()
1213 header.next = nskb->len + NETIUCV_HDRLEN; in netiucv_transmit_skb()
1214 memcpy(skb_push(nskb, NETIUCV_HDRLEN), &header, NETIUCV_HDRLEN); in netiucv_transmit_skb()
1216 memcpy(skb_put(nskb, NETIUCV_HDRLEN), &header, NETIUCV_HDRLEN); in netiucv_transmit_skb()
1224 nskb->data, nskb->len); in netiucv_transmit_skb()
1238 dev_kfree_skb(nskb); in netiucv_transmit_skb()
[all …]
/drivers/isdn/hisax/
Disdnl1.c206 struct sk_buff *skb, *nskb; in DChannel_proc_rcv() local
238 if ((nskb = skb_clone(skb, GFP_ATOMIC))) in DChannel_proc_rcv()
239 stptr->l1.l1l2(stptr, PH_DATA | INDICATION, nskb); in DChannel_proc_rcv()
246 if ((nskb = skb_clone(skb, GFP_ATOMIC))) in DChannel_proc_rcv()
247 stptr->l1.l1tei(stptr, PH_DATA | INDICATION, nskb); in DChannel_proc_rcv()
/drivers/scsi/fcoe/
Dfcoe_transport.c360 struct sk_buff *nskb; in fcoe_start_io() local
363 nskb = skb_clone(skb, GFP_ATOMIC); in fcoe_start_io()
364 if (!nskb) in fcoe_start_io()
366 rc = dev_queue_xmit(nskb); in fcoe_start_io()
/drivers/isdn/isdnloop/
Disdnloop.c404 struct sk_buff *nskb; in isdnloop_sendbuf() local
417 nskb = dev_alloc_skb(skb->len); in isdnloop_sendbuf()
418 if (nskb) { in isdnloop_sendbuf()
420 skb_put(nskb, len), len); in isdnloop_sendbuf()
421 skb_queue_tail(&card->bqueue[channel], nskb); in isdnloop_sendbuf()

12