xref: /linux/arch/sparc/include/asm/dma-mapping.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 #ifndef ___ASM_SPARC_DMA_MAPPING_H
2 #define ___ASM_SPARC_DMA_MAPPING_H
3 
4 #include <linux/scatterlist.h>
5 #include <linux/mm.h>
6 #include <linux/dma-debug.h>
7 
8 #define DMA_ERROR_CODE	(~(dma_addr_t)0x0)
9 
10 #define HAVE_ARCH_DMA_SUPPORTED 1
11 int dma_supported(struct device *dev, u64 mask);
12 
13 static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
14 				  enum dma_data_direction dir)
15 {
16 	/* Since dma_{alloc,free}_noncoherent() allocated coherent memory, this
17 	 * routine can be a nop.
18 	 */
19 }
20 
21 extern struct dma_map_ops *dma_ops;
22 extern struct dma_map_ops *leon_dma_ops;
23 extern struct dma_map_ops pci32_dma_ops;
24 
25 extern struct bus_type pci_bus_type;
26 
27 static inline struct dma_map_ops *get_dma_ops(struct device *dev)
28 {
29 #ifdef CONFIG_SPARC_LEON
30 	if (sparc_cpu_model == sparc_leon)
31 		return leon_dma_ops;
32 #endif
33 #if defined(CONFIG_SPARC32) && defined(CONFIG_PCI)
34 	if (dev->bus == &pci_bus_type)
35 		return &pci32_dma_ops;
36 #endif
37 	return dma_ops;
38 }
39 
40 #define HAVE_ARCH_DMA_SET_MASK 1
41 
42 static inline int dma_set_mask(struct device *dev, u64 mask)
43 {
44 #ifdef CONFIG_PCI
45 	if (dev->bus == &pci_bus_type) {
46 		if (!dev->dma_mask || !dma_supported(dev, mask))
47 			return -EINVAL;
48 		*dev->dma_mask = mask;
49 		return 0;
50 	}
51 #endif
52 	return -EINVAL;
53 }
54 
55 #include <asm-generic/dma-mapping-common.h>
56 
57 #endif
58