0001
0002
0003
0004
0005
0006 #ifndef _LINUX_XDP_SOCK_DRV_H
0007 #define _LINUX_XDP_SOCK_DRV_H
0008
0009 #include <net/xdp_sock.h>
0010 #include <net/xsk_buff_pool.h>
0011
0012 #ifdef CONFIG_XDP_SOCKETS
0013
0014 void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries);
0015 bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc);
0016 u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max);
0017 void xsk_tx_release(struct xsk_buff_pool *pool);
0018 struct xsk_buff_pool *xsk_get_pool_from_qid(struct net_device *dev,
0019 u16 queue_id);
0020 void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool);
0021 void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool);
0022 void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool);
0023 void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool);
0024 bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool);
0025
0026 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
0027 {
0028 return XDP_PACKET_HEADROOM + pool->headroom;
0029 }
0030
0031 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
0032 {
0033 return pool->chunk_size;
0034 }
0035
0036 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
0037 {
0038 return xsk_pool_get_chunk_size(pool) - xsk_pool_get_headroom(pool);
0039 }
0040
0041 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
0042 struct xdp_rxq_info *rxq)
0043 {
0044 xp_set_rxq_info(pool, rxq);
0045 }
0046
0047 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool *pool)
0048 {
0049 #ifdef CONFIG_NET_RX_BUSY_POLL
0050 return pool->heads[0].xdp.rxq->napi_id;
0051 #else
0052 return 0;
0053 #endif
0054 }
0055
0056 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
0057 unsigned long attrs)
0058 {
0059 xp_dma_unmap(pool, attrs);
0060 }
0061
0062 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
0063 struct device *dev, unsigned long attrs)
0064 {
0065 struct xdp_umem *umem = pool->umem;
0066
0067 return xp_dma_map(pool, dev, attrs, umem->pgs, umem->npgs);
0068 }
0069
0070 static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
0071 {
0072 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
0073
0074 return xp_get_dma(xskb);
0075 }
0076
0077 static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
0078 {
0079 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
0080
0081 return xp_get_frame_dma(xskb);
0082 }
0083
0084 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
0085 {
0086 return xp_alloc(pool);
0087 }
0088
0089
0090 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
0091 {
0092 return xp_alloc_batch(pool, xdp, max);
0093 }
0094
0095 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
0096 {
0097 return xp_can_alloc(pool, count);
0098 }
0099
0100 static inline void xsk_buff_free(struct xdp_buff *xdp)
0101 {
0102 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
0103
0104 xp_free(xskb);
0105 }
0106
0107 static inline void xsk_buff_discard(struct xdp_buff *xdp)
0108 {
0109 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
0110
0111 xp_release(xskb);
0112 }
0113
0114 static inline void xsk_buff_set_size(struct xdp_buff *xdp, u32 size)
0115 {
0116 xdp->data = xdp->data_hard_start + XDP_PACKET_HEADROOM;
0117 xdp->data_meta = xdp->data;
0118 xdp->data_end = xdp->data + size;
0119 }
0120
0121 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
0122 u64 addr)
0123 {
0124 return xp_raw_get_dma(pool, addr);
0125 }
0126
0127 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
0128 {
0129 return xp_raw_get_data(pool, addr);
0130 }
0131
0132 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
0133 {
0134 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
0135
0136 if (!pool->dma_need_sync)
0137 return;
0138
0139 xp_dma_sync_for_cpu(xskb);
0140 }
0141
0142 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
0143 dma_addr_t dma,
0144 size_t size)
0145 {
0146 xp_dma_sync_for_device(pool, dma, size);
0147 }
0148
0149 #else
0150
0151 static inline void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
0152 {
0153 }
0154
0155 static inline bool xsk_tx_peek_desc(struct xsk_buff_pool *pool,
0156 struct xdp_desc *desc)
0157 {
0158 return false;
0159 }
0160
0161 static inline u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max)
0162 {
0163 return 0;
0164 }
0165
0166 static inline void xsk_tx_release(struct xsk_buff_pool *pool)
0167 {
0168 }
0169
0170 static inline struct xsk_buff_pool *
0171 xsk_get_pool_from_qid(struct net_device *dev, u16 queue_id)
0172 {
0173 return NULL;
0174 }
0175
0176 static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
0177 {
0178 }
0179
0180 static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
0181 {
0182 }
0183
0184 static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
0185 {
0186 }
0187
0188 static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
0189 {
0190 }
0191
0192 static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
0193 {
0194 return false;
0195 }
0196
0197 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
0198 {
0199 return 0;
0200 }
0201
0202 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
0203 {
0204 return 0;
0205 }
0206
0207 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
0208 {
0209 return 0;
0210 }
0211
0212 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
0213 struct xdp_rxq_info *rxq)
0214 {
0215 }
0216
0217 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool *pool)
0218 {
0219 return 0;
0220 }
0221
0222 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
0223 unsigned long attrs)
0224 {
0225 }
0226
0227 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
0228 struct device *dev, unsigned long attrs)
0229 {
0230 return 0;
0231 }
0232
0233 static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
0234 {
0235 return 0;
0236 }
0237
0238 static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
0239 {
0240 return 0;
0241 }
0242
0243 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
0244 {
0245 return NULL;
0246 }
0247
0248 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
0249 {
0250 return 0;
0251 }
0252
0253 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
0254 {
0255 return false;
0256 }
0257
0258 static inline void xsk_buff_free(struct xdp_buff *xdp)
0259 {
0260 }
0261
0262 static inline void xsk_buff_discard(struct xdp_buff *xdp)
0263 {
0264 }
0265
0266 static inline void xsk_buff_set_size(struct xdp_buff *xdp, u32 size)
0267 {
0268 }
0269
0270 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
0271 u64 addr)
0272 {
0273 return 0;
0274 }
0275
0276 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
0277 {
0278 return NULL;
0279 }
0280
0281 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
0282 {
0283 }
0284
0285 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
0286 dma_addr_t dma,
0287 size_t size)
0288 {
0289 }
0290
0291 #endif
0292
0293 #endif