Home
last modified time | relevance | path

Searched refs:fragments (Results 1 – 15 of 15) sorted by relevance

/drivers/of/
Doverlay.c73 struct fragment *fragments; member
166 struct fragment *fragment = &ovcs->fragments[i]; in overlay_notify()
230 fragment = &ovcs->fragments[k]; in dup_and_fixup_symbol_prop()
650 fragment = &ovcs->fragments[i]; in build_changeset()
664 fragment = &ovcs->fragments[ovcs->count - 1]; in build_changeset()
735 struct fragment *fragments; in init_overlay_changeset() local
779 fragments = kcalloc(cnt, sizeof(*fragments), GFP_KERNEL); in init_overlay_changeset()
780 if (!fragments) { in init_overlay_changeset()
791 fragment = &fragments[cnt]; in init_overlay_changeset()
810 fragment = &fragments[cnt]; in init_overlay_changeset()
[all …]
/drivers/net/wireless/intel/ipw2x00/
Dlibipw_tx.c172 if (txb->fragments[i]) in libipw_txb_free()
173 dev_kfree_skb_any(txb->fragments[i]); in libipw_txb_free()
192 txb->fragments[i] = __dev_alloc_skb(txb_size + headroom, in libipw_alloc_txb()
194 if (unlikely(!txb->fragments[i])) { in libipw_alloc_txb()
198 skb_reserve(txb->fragments[i], headroom); in libipw_alloc_txb()
202 dev_kfree_skb_any(txb->fragments[i--]); in libipw_alloc_txb()
427 skb_frag = txb->fragments[0]; in libipw_xmit()
452 skb_frag = txb->fragments[i]; in libipw_xmit()
Dipw2200.c10095 txb->fragments[0]->data; in ipw_tx_skb()
10219 txb->fragments[i]->len - hdr_len); in ipw_tx_skb()
10222 txb->fragments[i]->len - hdr_len); in ipw_tx_skb()
10223 printk_buf(IPW_DL_TX, txb->fragments[i]->data + hdr_len, in ipw_tx_skb()
10224 txb->fragments[i]->len - hdr_len); in ipw_tx_skb()
10229 txb->fragments[i]->data + hdr_len, in ipw_tx_skb()
10230 txb->fragments[i]->len - hdr_len, in ipw_tx_skb()
10233 cpu_to_le16(txb->fragments[i]->len - hdr_len); in ipw_tx_skb()
10242 remaining_bytes += txb->fragments[j]->len - hdr_len; in ipw_tx_skb()
10250 int size = txb->fragments[j]->len - hdr_len; in ipw_tx_skb()
[all …]
Dlibipw.h493 struct sk_buff *fragments[0]; member
Dipw2100.c3109 fragments[0]->data; in ipw2100_tx_send_data()
3165 fragments[i]->len - LIBIPW_3ADDR_LEN; in ipw2100_tx_send_data()
3169 txb->fragments[i]-> in ipw2100_tx_send_data()
3405 IPW_DEBUG_TX("Sending fragment (%d bytes):\n", txb->fragments[0]->len); in ipw2100_tx()
3406 printk_buf(IPW_DL_TX, txb->fragments[0]->data, txb->fragments[0]->len); in ipw2100_tx()
/drivers/staging/vc04_services/interface/vchiq_arm/
Dvchiq_2835_arm.c522 char *fragments; in create_pagelist() local
532 fragments = g_free_fragments; in create_pagelist()
533 WARN_ON(fragments == NULL); in create_pagelist()
537 (fragments - g_fragments_base) / g_fragments_size; in create_pagelist()
564 char *fragments = g_fragments_base + in free_pagelist() local
580 fragments, in free_pagelist()
589 fragments + g_cache_line_size, in free_pagelist()
595 *(char **)fragments = g_free_fragments; in free_pagelist()
596 g_free_fragments = fragments; in free_pagelist()
/drivers/staging/rtl8192u/ieee80211/
Dieee80211_tx.c242 txb->fragments[i] = dev_alloc_skb(txb_size); in ieee80211_alloc_txb()
243 if (unlikely(!txb->fragments[i])) { in ieee80211_alloc_txb()
247 memset(txb->fragments[i]->cb, 0, sizeof(txb->fragments[i]->cb)); in ieee80211_alloc_txb()
251 dev_kfree_skb_any(txb->fragments[i--]); in ieee80211_alloc_txb()
704 skb_frag = txb->fragments[i]; in ieee80211_xmit()
796 skb_put_data(txb->fragments[0], skb->data, skb->len); in ieee80211_xmit()
802 struct cb_desc *tcb_desc = (struct cb_desc *)(txb->fragments[0]->cb + MAX_DEV_ADDR_SIZE); in ieee80211_xmit()
814 ieee80211_tx_query_agg_cap(ieee, txb->fragments[0], tcb_desc); in ieee80211_xmit()
817 ieee80211_query_protectionmode(ieee, tcb_desc, txb->fragments[0]); in ieee80211_xmit()
818 ieee80211_query_seqnum(ieee, txb->fragments[0], header.addr1); in ieee80211_xmit()
Dieee80211_softmac.c2039 tcb_desc = (struct cb_desc *)(txb->fragments[0]->cb + MAX_DEV_ADDR_SIZE); in ieee80211_softmac_xmit()
2059 skb_queue_tail(&ieee->skb_drv_aggQ[queue_index], txb->fragments[i]); in ieee80211_softmac_xmit()
2061 skb_queue_tail(&ieee->skb_waitQ[queue_index], txb->fragments[i]); in ieee80211_softmac_xmit()
2064 ieee->softmac_data_hard_start_xmit(txb->fragments[i], in ieee80211_softmac_xmit()
2087 ieee->softmac_data_hard_start_xmit(ieee->tx_pending.txb->fragments[i], in ieee80211_resume_tx()
Dieee80211.h1024 struct sk_buff *fragments[0]; member
/drivers/staging/rtl8192e/
Drtllib_tx.c218 txb->fragments[i] = dev_alloc_skb(txb_size); in rtllib_alloc_txb()
219 if (unlikely(!txb->fragments[i])) { in rtllib_alloc_txb()
223 memset(txb->fragments[i]->cb, 0, sizeof(txb->fragments[i]->cb)); in rtllib_alloc_txb()
227 dev_kfree_skb_any(txb->fragments[i--]); in rtllib_alloc_txb()
611 skb_put_data(txb->fragments[0], skb->data, skb->len); in rtllib_xmit_inter()
781 skb_frag = txb->fragments[i]; in rtllib_xmit_inter()
879 skb_put_data(txb->fragments[0], skb->data, skb->len); in rtllib_xmit_inter()
885 (txb->fragments[0]->cb + MAX_DEV_ADDR_SIZE); in rtllib_xmit_inter()
932 rtllib_tx_query_agg_cap(ieee, txb->fragments[0], in rtllib_xmit_inter()
937 txb->fragments[0]); in rtllib_xmit_inter()
Drtllib_softmac.c2463 tcb_desc = (struct cb_desc *)(txb->fragments[0]->cb + in rtllib_softmac_xmit()
2482 txb->fragments[i]); in rtllib_softmac_xmit()
2484 kfree_skb(txb->fragments[i]); in rtllib_softmac_xmit()
2487 txb->fragments[i], in rtllib_softmac_xmit()
Drtllib.h862 struct sk_buff *fragments[0]; member
/drivers/staging/rtl8712/
Dieee80211.h600 struct sk_buff *fragments[0]; member
/drivers/staging/rtl8723bs/include/
Dieee80211.h761 struct sk_buff *fragments[0]; member
/drivers/net/ethernet/nvidia/
Dforcedeth.c2215 unsigned int fragments = skb_shinfo(skb)->nr_frags; in nv_start_xmit() local
2230 for (i = 0; i < fragments; i++) { in nv_start_xmit()
2279 for (i = 0; i < fragments; i++) { in nv_start_xmit()
2370 unsigned int fragments = skb_shinfo(skb)->nr_frags; in nv_start_xmit_optimized() local
2386 for (i = 0; i < fragments; i++) { in nv_start_xmit_optimized()
2437 for (i = 0; i < fragments; i++) { in nv_start_xmit_optimized()