linux/arch/mips/include/asm/dma-mapping.h
<<
>>
Prefs
   1#ifndef _ASM_DMA_MAPPING_H
   2#define _ASM_DMA_MAPPING_H
   3
   4#include <asm/scatterlist.h>
   5#include <asm/cache.h>
   6#include <asm-generic/dma-coherent.h>
   7
   8#ifndef CONFIG_SGI_IP27 /* Kludge to fix 2.6.39 build for IP27 */
   9#include <dma-coherence.h>
  10#endif
  11
  12extern struct dma_map_ops *mips_dma_map_ops;
  13
  14static inline struct dma_map_ops *get_dma_ops(struct device *dev)
  15{
  16        if (dev && dev->archdata.dma_ops)
  17                return dev->archdata.dma_ops;
  18        else
  19                return mips_dma_map_ops;
  20}
  21
  22static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
  23{
  24        if (!dev->dma_mask)
  25                return 0;
  26
  27        return addr + size <= *dev->dma_mask;
  28}
  29
  30static inline void dma_mark_clean(void *addr, size_t size) {}
  31
  32#include <asm-generic/dma-mapping-common.h>
  33
  34static inline int dma_supported(struct device *dev, u64 mask)
  35{
  36        struct dma_map_ops *ops = get_dma_ops(dev);
  37        return ops->dma_supported(dev, mask);
  38}
  39
  40static inline int dma_mapping_error(struct device *dev, u64 mask)
  41{
  42        struct dma_map_ops *ops = get_dma_ops(dev);
  43        return ops->mapping_error(dev, mask);
  44}
  45
  46static inline int
  47dma_set_mask(struct device *dev, u64 mask)
  48{
  49        if(!dev->dma_mask || !dma_supported(dev, mask))
  50                return -EIO;
  51
  52        *dev->dma_mask = mask;
  53
  54        return 0;
  55}
  56
  57extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
  58               enum dma_data_direction direction);
  59
  60#define dma_alloc_coherent(d,s,h,f)     dma_alloc_attrs(d,s,h,f,NULL)
  61
  62static inline void *dma_alloc_attrs(struct device *dev, size_t size,
  63                                    dma_addr_t *dma_handle, gfp_t gfp,
  64                                    struct dma_attrs *attrs)
  65{
  66        void *ret;
  67        struct dma_map_ops *ops = get_dma_ops(dev);
  68
  69        ret = ops->alloc(dev, size, dma_handle, gfp, attrs);
  70
  71        debug_dma_alloc_coherent(dev, size, *dma_handle, ret);
  72
  73        return ret;
  74}
  75
  76#define dma_free_coherent(d,s,c,h) dma_free_attrs(d,s,c,h,NULL)
  77
  78static inline void dma_free_attrs(struct device *dev, size_t size,
  79                                  void *vaddr, dma_addr_t dma_handle,
  80                                  struct dma_attrs *attrs)
  81{
  82        struct dma_map_ops *ops = get_dma_ops(dev);
  83
  84        ops->free(dev, size, vaddr, dma_handle, attrs);
  85
  86        debug_dma_free_coherent(dev, size, vaddr, dma_handle);
  87}
  88
  89
  90void *dma_alloc_noncoherent(struct device *dev, size_t size,
  91                           dma_addr_t *dma_handle, gfp_t flag);
  92
  93void dma_free_noncoherent(struct device *dev, size_t size,
  94                         void *vaddr, dma_addr_t dma_handle);
  95
  96#endif /* _ASM_DMA_MAPPING_H */
  97