/drivers/net/ethernet/hisilicon/hns3/ |
D | hns3_trace.h | 27 __field(unsigned int, gso_type) 38 __entry->gso_type = skb_shinfo(skb)->gso_type; 51 __entry->gso_type, __entry->fraglist, __entry->nr_frags,
|
D | hns3_enet.c | 1181 if (skb_shinfo(skb)->gso_type & (SKB_GSO_GRE | in hns3_set_tso() 1202 if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) { in hns3_set_tso() 1219 if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM) in hns3_set_tso() 3663 skb_shinfo(skb)->gso_type |= SKB_GSO_TCP_ECN; in hns3_gro_complete() 3666 skb_shinfo(skb)->gso_type |= SKB_GSO_TCP_FIXEDID; in hns3_gro_complete() 3983 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV4; in hns3_set_gro_and_checksum() 3985 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV6; in hns3_set_gro_and_checksum()
|
/drivers/net/ethernet/sfc/ |
D | tx.h | 36 !(skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL) && in efx_tx_csum_type_skb() 37 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM)) in efx_tx_csum_type_skb()
|
D | ef100_tx.c | 190 bool gso_partial = skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL; in ef100_make_tso_desc() 203 if (skb_shinfo(skb)->gso_type & SKB_GSO_TCP_FIXEDID) in ef100_make_tso_desc() 216 if (skb_shinfo(skb)->gso_type & in ef100_make_tso_desc() 224 outer_csum = skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM; in ef100_make_tso_desc()
|
/drivers/net/xen-netback/ |
D | rx.c | 233 if (skb_shinfo(skb)->gso_type & SKB_GSO_TCPV4) in xenvif_gso_type() 256 unsigned int gso_type; in xenvif_rx_next_skb() local 271 gso_type = xenvif_gso_type(skb); in xenvif_rx_next_skb() 272 if ((1 << gso_type) & queue->vif->gso_mask) { in xenvif_rx_next_skb() 277 extra->u.gso.type = gso_type; in xenvif_rx_next_skb()
|
D | common.h | 74 int gso_type; member
|
D | netback.c | 770 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV4; in xenvif_set_skb_gso() 773 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV6; in xenvif_set_skb_gso()
|
/drivers/net/ethernet/qlogic/qede/ |
D | qede_fp.c | 191 unsigned short gso_type = skb_shinfo(skb)->gso_type; in qede_xmit_type() local 193 if ((gso_type & SKB_GSO_UDP_TUNNEL_CSUM) || in qede_xmit_type() 194 (gso_type & SKB_GSO_GRE_CSUM)) in qede_xmit_type() 657 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV6; in qede_set_gro_params() 659 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV4; in qede_set_gro_params() 929 skb_shinfo(skb)->gso_type = 0; in qede_gro_receive()
|
/drivers/net/ethernet/huawei/hinic/ |
D | hinic_tx.c | 289 u32 gso_type = skb_shinfo(skb)->gso_type; in offload_tso() local 309 if (gso_type & SKB_GSO_UDP_TUNNEL_CSUM) { in offload_tso() 312 } else if (gso_type & SKB_GSO_UDP_TUNNEL) { in offload_tso()
|
/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
D | tls_rxtx.c | 173 skb_shinfo(nskb)->gso_type = skb_shinfo(skb)->gso_type; in mlx5e_tls_complete_sync_skb()
|
D | en_accel.h | 122 if (skb_is_gso(skb) && skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) in mlx5e_accel_tx_begin()
|
/drivers/net/ethernet/sun/ |
D | sunvnet_common.c | 1210 skb_shinfo(nskb)->gso_type = skb_shinfo(skb)->gso_type; in vnet_skb_shape() 1229 int gso_size, gso_type, gso_segs; in vnet_handle_offloads() local 1250 gso_type = skb_shinfo(skb)->gso_type; in vnet_handle_offloads() 1289 skb_shinfo(curr)->gso_type = gso_type; in vnet_handle_offloads()
|
/drivers/net/ethernet/intel/iavf/ |
D | iavf_txrx.c | 1876 if (skb_shinfo(skb)->gso_type & (SKB_GSO_GRE | in iavf_tso() 1882 if (!(skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL) && in iavf_tso() 1883 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM)) { in iavf_tso() 1913 if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) { in iavf_tso() 2036 !(skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL) && in iavf_tx_enable_csum() 2037 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM)) in iavf_tx_enable_csum()
|
/drivers/net/ethernet/intel/ice/ |
D | ice_txrx.c | 1749 gso_ena = skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL; in ice_tx_csum() 1752 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM)) in ice_tx_csum() 1910 if (skb_shinfo(skb)->gso_type & (SKB_GSO_GRE | in ice_tso() 1916 if (!(skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL) && in ice_tso() 1917 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM)) { in ice_tso() 1950 if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) { in ice_tso()
|
/drivers/net/ethernet/marvell/octeontx2/nic/ |
D | otx2_txrx.c | 566 if (skb_shinfo(skb)->gso_type & SKB_GSO_TCPV4) { in otx2_sqe_add_ext() 575 } else if (skb_shinfo(skb)->gso_type & SKB_GSO_TCPV6) { in otx2_sqe_add_ext() 578 } else if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) { in otx2_sqe_add_ext()
|
/drivers/net/wireless/ath/wil6210/ |
D | txrx_edma.c | 1413 int gso_type; in __wil_tx_ring_tso_edma() local 1435 gso_type = skb_shinfo(skb)->gso_type & (SKB_GSO_TCPV6 | SKB_GSO_TCPV4); in __wil_tx_ring_tso_edma() 1436 switch (gso_type) { in __wil_tx_ring_tso_edma()
|
D | txrx.c | 1763 int gso_type; in __wil_tx_vring_tso() local 1789 gso_type = skb_shinfo(skb)->gso_type & (SKB_GSO_TCPV6 | SKB_GSO_TCPV4); in __wil_tx_vring_tso() 1790 switch (gso_type) { in __wil_tx_vring_tso()
|
/drivers/net/ethernet/google/gve/ |
D | gve_rx_dqo.c | 616 shinfo->gso_type = SKB_GSO_TCPV4; in gve_rx_complete_rsc() 619 shinfo->gso_type = SKB_GSO_TCPV6; in gve_rx_complete_rsc()
|
/drivers/net/ethernet/intel/i40e/ |
D | i40e_txrx.c | 3039 if (skb_shinfo(skb)->gso_type & (SKB_GSO_GRE | in i40e_tso() 3045 if (!(skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL) && in i40e_tso() 3046 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM)) { in i40e_tso() 3077 if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) { in i40e_tso() 3245 !(skb_shinfo(skb)->gso_type & SKB_GSO_PARTIAL) && in i40e_tx_enable_csum() 3246 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM)) in i40e_tx_enable_csum()
|
/drivers/net/ethernet/neterion/vxge/ |
D | vxge-main.h | 515 #define vxge_offload_type(skb) (skb_shinfo(skb)->gso_type)
|
/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_fcoe.c | 500 if (skb_is_gso(skb) && (skb_shinfo(skb)->gso_type != SKB_GSO_FCOE)) { in ixgbe_fso() 502 skb_shinfo(skb)->gso_type); in ixgbe_fso()
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | sge.c | 738 } else if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) { in is_eth_imm() 785 } else if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) { in calc_tx_flits() 1386 if (ssi->gso_type & SKB_GSO_TCPV6) in write_tso_wr() 1611 if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) in cxgb4_eth_xmit() 1618 if (ssi->gso_size && !(ssi->gso_type & SKB_GSO_UDP_L4)) { in cxgb4_eth_xmit() 1937 bool v6 = (ssi->gso_type & SKB_GSO_TCPV6) != 0; in cxgb4_vf_eth_xmit() 2178 !(skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4)) in ethofld_calc_tx_flits() 2214 !(skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4)) in write_eo_wr() 2375 if (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_L4) in ethofld_hard_xmit()
|
/drivers/net/ethernet/qlogic/qlcnic/ |
D | qlcnic_io.c | 1341 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV6; in qlcnic_process_lro() 1343 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV4; in qlcnic_process_lro() 1865 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV6; in qlcnic_83xx_process_lro() 1867 skb_shinfo(skb)->gso_type = SKB_GSO_TCPV4; in qlcnic_83xx_process_lro()
|
/drivers/net/ethernet/pensando/ionic/ |
D | ionic_txrx.c | 936 outer_csum = (skb_shinfo(skb)->gso_type & SKB_GSO_GRE_CSUM) || in ionic_tx_tso() 937 (skb_shinfo(skb)->gso_type & SKB_GSO_UDP_TUNNEL_CSUM); in ionic_tx_tso()
|
/drivers/net/ethernet/qualcomm/emac/ |
D | emac-mac.c | 1275 if (skb_shinfo(skb)->gso_type & SKB_GSO_TCPV4) { in emac_tso_csum() 1284 if (skb_shinfo(skb)->gso_type & SKB_GSO_TCPV6) { in emac_tso_csum()
|