linux/include/net/xdp_sock_drv.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/* Interface for implementing AF_XDP zero-copy support in drivers.
   3 * Copyright(c) 2020 Intel Corporation.
   4 */
   5
   6#ifndef _LINUX_XDP_SOCK_DRV_H
   7#define _LINUX_XDP_SOCK_DRV_H
   8
   9#include <net/xdp_sock.h>
  10#include <net/xsk_buff_pool.h>
  11
  12#ifdef CONFIG_XDP_SOCKETS
  13
  14void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries);
  15bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc);
  16u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, struct xdp_desc *desc, u32 max);
  17void xsk_tx_release(struct xsk_buff_pool *pool);
  18struct xsk_buff_pool *xsk_get_pool_from_qid(struct net_device *dev,
  19                                            u16 queue_id);
  20void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool);
  21void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool);
  22void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool);
  23void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool);
  24bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool);
  25
  26static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
  27{
  28        return XDP_PACKET_HEADROOM + pool->headroom;
  29}
  30
  31static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
  32{
  33        return pool->chunk_size;
  34}
  35
  36static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
  37{
  38        return xsk_pool_get_chunk_size(pool) - xsk_pool_get_headroom(pool);
  39}
  40
  41static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
  42                                         struct xdp_rxq_info *rxq)
  43{
  44        xp_set_rxq_info(pool, rxq);
  45}
  46
  47static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
  48                                      unsigned long attrs)
  49{
  50        xp_dma_unmap(pool, attrs);
  51}
  52
  53static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
  54                                   struct device *dev, unsigned long attrs)
  55{
  56        struct xdp_umem *umem = pool->umem;
  57
  58        return xp_dma_map(pool, dev, attrs, umem->pgs, umem->npgs);
  59}
  60
  61static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
  62{
  63        struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
  64
  65        return xp_get_dma(xskb);
  66}
  67
  68static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
  69{
  70        struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
  71
  72        return xp_get_frame_dma(xskb);
  73}
  74
  75static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
  76{
  77        return xp_alloc(pool);
  78}
  79
  80static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
  81{
  82        return xp_can_alloc(pool, count);
  83}
  84
  85static inline void xsk_buff_free(struct xdp_buff *xdp)
  86{
  87        struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
  88
  89        xp_free(xskb);
  90}
  91
  92static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
  93                                              u64 addr)
  94{
  95        return xp_raw_get_dma(pool, addr);
  96}
  97
  98static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
  99{
 100        return xp_raw_get_data(pool, addr);
 101}
 102
 103static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
 104{
 105        struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
 106
 107        if (!pool->dma_need_sync)
 108                return;
 109
 110        xp_dma_sync_for_cpu(xskb);
 111}
 112
 113static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
 114                                                    dma_addr_t dma,
 115                                                    size_t size)
 116{
 117        xp_dma_sync_for_device(pool, dma, size);
 118}
 119
 120#else
 121
 122static inline void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
 123{
 124}
 125
 126static inline bool xsk_tx_peek_desc(struct xsk_buff_pool *pool,
 127                                    struct xdp_desc *desc)
 128{
 129        return false;
 130}
 131
 132static inline u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, struct xdp_desc *desc,
 133                                                 u32 max)
 134{
 135        return 0;
 136}
 137
 138static inline void xsk_tx_release(struct xsk_buff_pool *pool)
 139{
 140}
 141
 142static inline struct xsk_buff_pool *
 143xsk_get_pool_from_qid(struct net_device *dev, u16 queue_id)
 144{
 145        return NULL;
 146}
 147
 148static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
 149{
 150}
 151
 152static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
 153{
 154}
 155
 156static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
 157{
 158}
 159
 160static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
 161{
 162}
 163
 164static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
 165{
 166        return false;
 167}
 168
 169static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
 170{
 171        return 0;
 172}
 173
 174static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
 175{
 176        return 0;
 177}
 178
 179static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
 180{
 181        return 0;
 182}
 183
 184static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
 185                                         struct xdp_rxq_info *rxq)
 186{
 187}
 188
 189static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
 190                                      unsigned long attrs)
 191{
 192}
 193
 194static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
 195                                   struct device *dev, unsigned long attrs)
 196{
 197        return 0;
 198}
 199
 200static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
 201{
 202        return 0;
 203}
 204
 205static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
 206{
 207        return 0;
 208}
 209
 210static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
 211{
 212        return NULL;
 213}
 214
 215static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
 216{
 217        return false;
 218}
 219
 220static inline void xsk_buff_free(struct xdp_buff *xdp)
 221{
 222}
 223
 224static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
 225                                              u64 addr)
 226{
 227        return 0;
 228}
 229
 230static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
 231{
 232        return NULL;
 233}
 234
 235static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
 236{
 237}
 238
 239static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
 240                                                    dma_addr_t dma,
 241                                                    size_t size)
 242{
 243}
 244
 245#endif /* CONFIG_XDP_SOCKETS */
 246
 247#endif /* _LINUX_XDP_SOCK_DRV_H */
 248