1 #ifndef _X8664_DMA_MAPPING_H
2 #define _X8664_DMA_MAPPING_H 1
5 * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
9 #include <linux/config.h>
11 #include <asm/scatterlist.h>
13 #include <asm/swiotlb.h>
15 extern dma_addr_t bad_dma_address;
16 #define dma_mapping_error(x) \
17 (swiotlb ? swiotlb_dma_mapping_error(x) : ((x) == bad_dma_address))
19 void *dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle,
21 void dma_free_coherent(struct device *dev, size_t size, void *vaddr,
22 dma_addr_t dma_handle);
24 #ifdef CONFIG_GART_IOMMU
26 extern dma_addr_t dma_map_single(struct device *hwdev, void *ptr, size_t size,
28 extern void dma_unmap_single(struct device *dev, dma_addr_t addr,size_t size,
35 static inline dma_addr_t dma_map_single(struct device *hwdev, void *ptr,
36 size_t size, int direction)
40 if (direction == DMA_NONE)
42 addr = virt_to_bus(ptr);
44 if ((addr+size) & ~*hwdev->dma_mask)
49 static inline void dma_unmap_single(struct device *hwdev, dma_addr_t dma_addr,
50 size_t size, int direction)
52 if (direction == DMA_NONE)
59 #define dma_map_page(dev,page,offset,size,dir) \
60 dma_map_single((dev), page_address(page)+(offset), (size), (dir))
62 static inline void dma_sync_single_for_cpu(struct device *hwdev,
63 dma_addr_t dma_handle,
64 size_t size, int direction)
66 if (direction == DMA_NONE)
70 return swiotlb_sync_single_for_cpu(hwdev,dma_handle,size,direction);
72 flush_write_buffers();
75 static inline void dma_sync_single_for_device(struct device *hwdev,
76 dma_addr_t dma_handle,
77 size_t size, int direction)
79 if (direction == DMA_NONE)
83 return swiotlb_sync_single_for_device(hwdev,dma_handle,size,direction);
85 flush_write_buffers();
88 static inline void dma_sync_single_range_for_cpu(struct device *hwdev,
89 dma_addr_t dma_handle,
91 size_t size, int direction)
93 if (direction == DMA_NONE)
97 return swiotlb_sync_single_range_for_cpu(hwdev,dma_handle,offset,size,direction);
99 flush_write_buffers();
102 static inline void dma_sync_single_range_for_device(struct device *hwdev,
103 dma_addr_t dma_handle,
104 unsigned long offset,
105 size_t size, int direction)
107 if (direction == DMA_NONE)
111 return swiotlb_sync_single_range_for_device(hwdev,dma_handle,offset,size,direction);
113 flush_write_buffers();
116 static inline void dma_sync_sg_for_cpu(struct device *hwdev,
117 struct scatterlist *sg,
118 int nelems, int direction)
120 if (direction == DMA_NONE)
124 return swiotlb_sync_sg_for_cpu(hwdev,sg,nelems,direction);
126 flush_write_buffers();
129 static inline void dma_sync_sg_for_device(struct device *hwdev,
130 struct scatterlist *sg,
131 int nelems, int direction)
133 if (direction == DMA_NONE)
137 return swiotlb_sync_sg_for_device(hwdev,sg,nelems,direction);
139 flush_write_buffers();
142 extern int dma_map_sg(struct device *hwdev, struct scatterlist *sg,
143 int nents, int direction);
144 extern void dma_unmap_sg(struct device *hwdev, struct scatterlist *sg,
145 int nents, int direction);
147 #define dma_unmap_page dma_unmap_single
149 extern int dma_supported(struct device *hwdev, u64 mask);
150 extern int dma_get_cache_alignment(void);
151 #define dma_is_consistent(h) 1
153 static inline int dma_set_mask(struct device *dev, u64 mask)
155 if (!dev->dma_mask || !dma_supported(dev, mask))
157 *dev->dma_mask = mask;
161 static inline void dma_cache_sync(void *vaddr, size_t size, enum dma_data_direction dir)
163 flush_write_buffers();