• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Interface for implementing AF_XDP zero-copy support in drivers.
3  * Copyright(c) 2020 Intel Corporation.
4  */
5 
6 #ifndef _LINUX_XDP_SOCK_DRV_H
7 #define _LINUX_XDP_SOCK_DRV_H
8 
9 #include <net/xdp_sock.h>
10 #include <net/xsk_buff_pool.h>
11 
12 #ifdef CONFIG_XDP_SOCKETS
13 
14 void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries);
15 bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc);
16 void xsk_tx_release(struct xsk_buff_pool *pool);
17 struct xsk_buff_pool *xsk_get_pool_from_qid(struct net_device *dev,
18 					    u16 queue_id);
19 void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool);
20 void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool);
21 void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool);
22 void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool);
23 bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool);
24 
xsk_pool_get_headroom(struct xsk_buff_pool * pool)25 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
26 {
27 	return XDP_PACKET_HEADROOM + pool->headroom;
28 }
29 
xsk_pool_get_chunk_size(struct xsk_buff_pool * pool)30 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
31 {
32 	return pool->chunk_size;
33 }
34 
xsk_pool_get_rx_frame_size(struct xsk_buff_pool * pool)35 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
36 {
37 	return xsk_pool_get_chunk_size(pool) - xsk_pool_get_headroom(pool);
38 }
39 
xsk_pool_set_rxq_info(struct xsk_buff_pool * pool,struct xdp_rxq_info * rxq)40 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
41 					 struct xdp_rxq_info *rxq)
42 {
43 	xp_set_rxq_info(pool, rxq);
44 }
45 
xsk_pool_dma_unmap(struct xsk_buff_pool * pool,unsigned long attrs)46 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
47 				      unsigned long attrs)
48 {
49 	xp_dma_unmap(pool, attrs);
50 }
51 
xsk_pool_dma_map(struct xsk_buff_pool * pool,struct device * dev,unsigned long attrs)52 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
53 				   struct device *dev, unsigned long attrs)
54 {
55 	struct xdp_umem *umem = pool->umem;
56 
57 	return xp_dma_map(pool, dev, attrs, umem->pgs, umem->npgs);
58 }
59 
xsk_buff_xdp_get_dma(struct xdp_buff * xdp)60 static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
61 {
62 	struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
63 
64 	return xp_get_dma(xskb);
65 }
66 
xsk_buff_xdp_get_frame_dma(struct xdp_buff * xdp)67 static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
68 {
69 	struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
70 
71 	return xp_get_frame_dma(xskb);
72 }
73 
xsk_buff_alloc(struct xsk_buff_pool * pool)74 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
75 {
76 	return xp_alloc(pool);
77 }
78 
xsk_buff_can_alloc(struct xsk_buff_pool * pool,u32 count)79 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
80 {
81 	return xp_can_alloc(pool, count);
82 }
83 
xsk_buff_free(struct xdp_buff * xdp)84 static inline void xsk_buff_free(struct xdp_buff *xdp)
85 {
86 	struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
87 
88 	xp_free(xskb);
89 }
90 
xsk_buff_raw_get_dma(struct xsk_buff_pool * pool,u64 addr)91 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
92 					      u64 addr)
93 {
94 	return xp_raw_get_dma(pool, addr);
95 }
96 
xsk_buff_raw_get_data(struct xsk_buff_pool * pool,u64 addr)97 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
98 {
99 	return xp_raw_get_data(pool, addr);
100 }
101 
xsk_buff_dma_sync_for_cpu(struct xdp_buff * xdp,struct xsk_buff_pool * pool)102 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
103 {
104 	struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
105 
106 	if (!pool->dma_need_sync)
107 		return;
108 
109 	xp_dma_sync_for_cpu(xskb);
110 }
111 
xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool * pool,dma_addr_t dma,size_t size)112 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
113 						    dma_addr_t dma,
114 						    size_t size)
115 {
116 	xp_dma_sync_for_device(pool, dma, size);
117 }
118 
119 #else
120 
xsk_tx_completed(struct xsk_buff_pool * pool,u32 nb_entries)121 static inline void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
122 {
123 }
124 
xsk_tx_peek_desc(struct xsk_buff_pool * pool,struct xdp_desc * desc)125 static inline bool xsk_tx_peek_desc(struct xsk_buff_pool *pool,
126 				    struct xdp_desc *desc)
127 {
128 	return false;
129 }
130 
xsk_tx_release(struct xsk_buff_pool * pool)131 static inline void xsk_tx_release(struct xsk_buff_pool *pool)
132 {
133 }
134 
135 static inline struct xsk_buff_pool *
xsk_get_pool_from_qid(struct net_device * dev,u16 queue_id)136 xsk_get_pool_from_qid(struct net_device *dev, u16 queue_id)
137 {
138 	return NULL;
139 }
140 
xsk_set_rx_need_wakeup(struct xsk_buff_pool * pool)141 static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
142 {
143 }
144 
xsk_set_tx_need_wakeup(struct xsk_buff_pool * pool)145 static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
146 {
147 }
148 
xsk_clear_rx_need_wakeup(struct xsk_buff_pool * pool)149 static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
150 {
151 }
152 
xsk_clear_tx_need_wakeup(struct xsk_buff_pool * pool)153 static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
154 {
155 }
156 
xsk_uses_need_wakeup(struct xsk_buff_pool * pool)157 static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
158 {
159 	return false;
160 }
161 
xsk_pool_get_headroom(struct xsk_buff_pool * pool)162 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
163 {
164 	return 0;
165 }
166 
xsk_pool_get_chunk_size(struct xsk_buff_pool * pool)167 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
168 {
169 	return 0;
170 }
171 
xsk_pool_get_rx_frame_size(struct xsk_buff_pool * pool)172 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
173 {
174 	return 0;
175 }
176 
xsk_pool_set_rxq_info(struct xsk_buff_pool * pool,struct xdp_rxq_info * rxq)177 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
178 					 struct xdp_rxq_info *rxq)
179 {
180 }
181 
xsk_pool_dma_unmap(struct xsk_buff_pool * pool,unsigned long attrs)182 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
183 				      unsigned long attrs)
184 {
185 }
186 
xsk_pool_dma_map(struct xsk_buff_pool * pool,struct device * dev,unsigned long attrs)187 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
188 				   struct device *dev, unsigned long attrs)
189 {
190 	return 0;
191 }
192 
xsk_buff_xdp_get_dma(struct xdp_buff * xdp)193 static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
194 {
195 	return 0;
196 }
197 
xsk_buff_xdp_get_frame_dma(struct xdp_buff * xdp)198 static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
199 {
200 	return 0;
201 }
202 
xsk_buff_alloc(struct xsk_buff_pool * pool)203 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
204 {
205 	return NULL;
206 }
207 
xsk_buff_can_alloc(struct xsk_buff_pool * pool,u32 count)208 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
209 {
210 	return false;
211 }
212 
xsk_buff_free(struct xdp_buff * xdp)213 static inline void xsk_buff_free(struct xdp_buff *xdp)
214 {
215 }
216 
xsk_buff_raw_get_dma(struct xsk_buff_pool * pool,u64 addr)217 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
218 					      u64 addr)
219 {
220 	return 0;
221 }
222 
xsk_buff_raw_get_data(struct xsk_buff_pool * pool,u64 addr)223 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
224 {
225 	return NULL;
226 }
227 
xsk_buff_dma_sync_for_cpu(struct xdp_buff * xdp,struct xsk_buff_pool * pool)228 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
229 {
230 }
231 
xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool * pool,dma_addr_t dma,size_t size)232 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
233 						    dma_addr_t dma,
234 						    size_t size)
235 {
236 }
237 
238 #endif /* CONFIG_XDP_SOCKETS */
239 
240 #endif /* _LINUX_XDP_SOCK_DRV_H */
241