PageRenderTime 48ms CodeModel.GetById 39ms app.highlight 7ms RepoModel.GetById 1ms app.codeStats 0ms

/arch/m68k/include/asm/dma-mapping.h

https://bitbucket.org/thekraven/iscream_thunderc-2.6.35
C++ Header | 112 lines | 90 code | 19 blank | 3 comment | 0 complexity | f5009a3a95999feefdc435fa5b096e9c MD5 | raw file
Possible License(s): GPL-2.0, LGPL-2.0, AGPL-1.0
  1#ifndef _M68K_DMA_MAPPING_H
  2#define _M68K_DMA_MAPPING_H
  3
  4#include <asm/cache.h>
  5
  6struct scatterlist;
  7
  8#ifndef CONFIG_MMU_SUN3
  9static inline int dma_supported(struct device *dev, u64 mask)
 10{
 11	return 1;
 12}
 13
 14static inline int dma_set_mask(struct device *dev, u64 mask)
 15{
 16	return 0;
 17}
 18
 19static inline int dma_get_cache_alignment(void)
 20{
 21	return 1 << L1_CACHE_SHIFT;
 22}
 23
 24static inline int dma_is_consistent(struct device *dev, dma_addr_t dma_addr)
 25{
 26	return 0;
 27}
 28
 29extern void *dma_alloc_coherent(struct device *, size_t,
 30				dma_addr_t *, gfp_t);
 31extern void dma_free_coherent(struct device *, size_t,
 32			      void *, dma_addr_t);
 33
 34static inline void *dma_alloc_noncoherent(struct device *dev, size_t size,
 35					  dma_addr_t *handle, gfp_t flag)
 36{
 37	return dma_alloc_coherent(dev, size, handle, flag);
 38}
 39static inline void dma_free_noncoherent(struct device *dev, size_t size,
 40					void *addr, dma_addr_t handle)
 41{
 42	dma_free_coherent(dev, size, addr, handle);
 43}
 44static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
 45				  enum dma_data_direction dir)
 46{
 47	/* we use coherent allocation, so not much to do here. */
 48}
 49
 50extern dma_addr_t dma_map_single(struct device *, void *, size_t,
 51				 enum dma_data_direction);
 52static inline void dma_unmap_single(struct device *dev, dma_addr_t addr,
 53				    size_t size, enum dma_data_direction dir)
 54{
 55}
 56
 57extern dma_addr_t dma_map_page(struct device *, struct page *,
 58			       unsigned long, size_t size,
 59			       enum dma_data_direction);
 60static inline void dma_unmap_page(struct device *dev, dma_addr_t address,
 61				  size_t size, enum dma_data_direction dir)
 62{
 63}
 64
 65extern int dma_map_sg(struct device *, struct scatterlist *, int,
 66		      enum dma_data_direction);
 67static inline void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
 68				int nhwentries, enum dma_data_direction dir)
 69{
 70}
 71
 72extern void dma_sync_single_for_device(struct device *, dma_addr_t, size_t,
 73				       enum dma_data_direction);
 74extern void dma_sync_sg_for_device(struct device *, struct scatterlist *, int,
 75				   enum dma_data_direction);
 76
 77static inline void dma_sync_single_range_for_device(struct device *dev,
 78		dma_addr_t dma_handle, unsigned long offset, size_t size,
 79		enum dma_data_direction direction)
 80{
 81	/* just sync everything for now */
 82	dma_sync_single_for_device(dev, dma_handle, offset + size, direction);
 83}
 84
 85static inline void dma_sync_single_for_cpu(struct device *dev, dma_addr_t handle,
 86					   size_t size, enum dma_data_direction dir)
 87{
 88}
 89
 90static inline void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
 91				       int nents, enum dma_data_direction dir)
 92{
 93}
 94
 95static inline void dma_sync_single_range_for_cpu(struct device *dev,
 96		dma_addr_t dma_handle, unsigned long offset, size_t size,
 97		enum dma_data_direction direction)
 98{
 99	/* just sync everything for now */
100	dma_sync_single_for_cpu(dev, dma_handle, offset + size, direction);
101}
102
103static inline int dma_mapping_error(struct device *dev, dma_addr_t handle)
104{
105	return 0;
106}
107
108#else
109#include <asm-generic/dma-mapping-broken.h>
110#endif
111
112#endif  /* _M68K_DMA_MAPPING_H */