8c911ea9ee5f69c2217a117ce4339603af931275
1 #ifndef ___ASM_SPARC_DMA_MAPPING_H
2 #define ___ASM_SPARC_DMA_MAPPING_H
3 #if defined(__sparc__) && defined(__arch64__)
4 #include <asm/dma-mapping_64.h>
6 #include <asm/dma-mapping_32.h>
9 #define DMA_ERROR_CODE (~(dma_addr_t)0x0)
11 extern int dma_supported(struct device
*dev
, u64 mask
);
12 extern int dma_set_mask(struct device
*dev
, u64 dma_mask
);
14 static inline int dma_mapping_error(struct device
*dev
, dma_addr_t dma_addr
)
16 return (dma_addr
== DMA_ERROR_CODE
);
19 static inline int dma_get_cache_alignment(void)
22 * no easy way to get cache size on all processors, so return
23 * the maximum possible, to be safe
25 return (1 << INTERNODE_CACHE_SHIFT
);
28 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
29 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
30 #define dma_is_consistent(d, h) (1)
32 static inline void dma_sync_single_range_for_cpu(struct device
*dev
,
33 dma_addr_t dma_handle
,
36 enum dma_data_direction dir
)
38 dma_sync_single_for_cpu(dev
, dma_handle
+offset
, size
, dir
);
41 static inline void dma_sync_single_range_for_device(struct device
*dev
,
42 dma_addr_t dma_handle
,
45 enum dma_data_direction dir
)
47 dma_sync_single_for_device(dev
, dma_handle
+offset
, size
, dir
);
This page took 0.035391 seconds and 4 git commands to generate.