1 /* dma.c: PCI and SBUS DMA accessors for 32-bit sparc.
3 * Copyright (C) 2008 David S. Miller <davem@davemloft.net>
6 #include <linux/kernel.h>
7 #include <linux/module.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/scatterlist.h>
13 #include <linux/pci.h>
18 int dma_supported(struct device *dev, u64 mask)
21 if (dev->bus == &pci_bus_type)
22 return pci_dma_supported(to_pci_dev(dev), mask);
26 EXPORT_SYMBOL(dma_supported);
28 int dma_set_mask(struct device *dev, u64 dma_mask)
31 if (dev->bus == &pci_bus_type)
32 return pci_set_dma_mask(to_pci_dev(dev), dma_mask);
36 EXPORT_SYMBOL(dma_set_mask);
38 void *dma_alloc_coherent(struct device *dev, size_t size,
39 dma_addr_t *dma_handle, gfp_t flag)
42 if (dev->bus == &pci_bus_type)
43 return pci_alloc_consistent(to_pci_dev(dev), size, dma_handle);
45 return sbus_alloc_consistent(dev, size, dma_handle);
47 EXPORT_SYMBOL(dma_alloc_coherent);
49 void dma_free_coherent(struct device *dev, size_t size,
50 void *cpu_addr, dma_addr_t dma_handle)
53 if (dev->bus == &pci_bus_type) {
54 pci_free_consistent(to_pci_dev(dev), size,
55 cpu_addr, dma_handle);
59 sbus_free_consistent(dev, size, cpu_addr, dma_handle);
61 EXPORT_SYMBOL(dma_free_coherent);
63 dma_addr_t dma_map_single(struct device *dev, void *cpu_addr,
64 size_t size, enum dma_data_direction direction)
67 if (dev->bus == &pci_bus_type)
68 return pci_map_single(to_pci_dev(dev), cpu_addr,
69 size, (int)direction);
71 return sbus_map_single(dev, cpu_addr, size, (int)direction);
73 EXPORT_SYMBOL(dma_map_single);
75 void dma_unmap_single(struct device *dev, dma_addr_t dma_addr,
77 enum dma_data_direction direction)
80 if (dev->bus == &pci_bus_type) {
81 pci_unmap_single(to_pci_dev(dev), dma_addr,
82 size, (int)direction);
86 sbus_unmap_single(dev, dma_addr, size, (int)direction);
88 EXPORT_SYMBOL(dma_unmap_single);
90 dma_addr_t dma_map_page(struct device *dev, struct page *page,
91 unsigned long offset, size_t size,
92 enum dma_data_direction direction)
95 if (dev->bus == &pci_bus_type)
96 return pci_map_page(to_pci_dev(dev), page, offset,
97 size, (int)direction);
99 return sbus_map_single(dev, page_address(page) + offset,
100 size, (int)direction);
102 EXPORT_SYMBOL(dma_map_page);
104 void dma_unmap_page(struct device *dev, dma_addr_t dma_address,
105 size_t size, enum dma_data_direction direction)
108 if (dev->bus == &pci_bus_type) {
109 pci_unmap_page(to_pci_dev(dev), dma_address,
110 size, (int)direction);
114 sbus_unmap_single(dev, dma_address, size, (int)direction);
116 EXPORT_SYMBOL(dma_unmap_page);
118 int dma_map_sg(struct device *dev, struct scatterlist *sg,
119 int nents, enum dma_data_direction direction)
122 if (dev->bus == &pci_bus_type)
123 return pci_map_sg(to_pci_dev(dev), sg, nents, (int)direction);
125 return sbus_map_sg(dev, sg, nents, direction);
127 EXPORT_SYMBOL(dma_map_sg);
129 void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
130 int nents, enum dma_data_direction direction)
133 if (dev->bus == &pci_bus_type) {
134 pci_unmap_sg(to_pci_dev(dev), sg, nents, (int)direction);
138 sbus_unmap_sg(dev, sg, nents, (int)direction);
140 EXPORT_SYMBOL(dma_unmap_sg);
142 void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle,
143 size_t size, enum dma_data_direction direction)
146 if (dev->bus == &pci_bus_type) {
147 pci_dma_sync_single_for_cpu(to_pci_dev(dev), dma_handle,
148 size, (int)direction);
152 sbus_dma_sync_single_for_cpu(dev, dma_handle, size, (int) direction);
154 EXPORT_SYMBOL(dma_sync_single_for_cpu);
156 void dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle,
157 size_t size, enum dma_data_direction direction)
160 if (dev->bus == &pci_bus_type) {
161 pci_dma_sync_single_for_device(to_pci_dev(dev), dma_handle,
162 size, (int)direction);
166 sbus_dma_sync_single_for_device(dev, dma_handle, size, (int) direction);
168 EXPORT_SYMBOL(dma_sync_single_for_device);
170 void dma_sync_single_range_for_cpu(struct device *dev,
171 dma_addr_t dma_handle,
172 unsigned long offset,
174 enum dma_data_direction direction)
176 dma_sync_single_for_cpu(dev, dma_handle+offset, size, direction);
178 EXPORT_SYMBOL(dma_sync_single_range_for_cpu);
180 void dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle,
181 unsigned long offset, size_t size,
182 enum dma_data_direction direction)
184 dma_sync_single_for_device(dev, dma_handle+offset, size, direction);
186 EXPORT_SYMBOL(dma_sync_single_range_for_device);
188 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
189 int nelems, enum dma_data_direction direction)
192 if (dev->bus == &pci_bus_type) {
193 pci_dma_sync_sg_for_cpu(to_pci_dev(dev), sg,
194 nelems, (int)direction);
200 EXPORT_SYMBOL(dma_sync_sg_for_cpu);
202 void dma_sync_sg_for_device(struct device *dev,
203 struct scatterlist *sg, int nelems,
204 enum dma_data_direction direction)
207 if (dev->bus == &pci_bus_type) {
208 pci_dma_sync_sg_for_device(to_pci_dev(dev), sg,
209 nelems, (int)direction);
215 EXPORT_SYMBOL(dma_sync_sg_for_device);
217 int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
219 return (dma_addr == DMA_ERROR_CODE);
221 EXPORT_SYMBOL(dma_mapping_error);
223 int dma_get_cache_alignment(void)
227 EXPORT_SYMBOL(dma_get_cache_alignment);