Blackfin arch: Remove all traces of the relocation stack
[linux-2.6] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /**************************************************************************
42  * Global Variables
43 ***************************************************************************/
44
45 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
46
47 /*------------------------------------------------------------------------------
48  *       Set the Buffer Clear bit in the Configuration register of specific DMA
49  *       channel. This will stop the descriptor based DMA operation.
50  *-----------------------------------------------------------------------------*/
51 static void clear_dma_buffer(unsigned int channel)
52 {
53         dma_ch[channel].regs->cfg |= RESTART;
54         SSYNC();
55         dma_ch[channel].regs->cfg &= ~RESTART;
56 }
57
58 static int __init blackfin_dma_init(void)
59 {
60         int i;
61
62         printk(KERN_INFO "Blackfin DMA Controller\n");
63
64         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
65                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
66                 dma_ch[i].regs = dma_io_base_addr[i];
67                 mutex_init(&(dma_ch[i].dmalock));
68         }
69         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
70         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
71         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
72
73 #if defined(CONFIG_DEB_DMA_URGENT)
74         bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
75                          | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
76 #endif
77         return 0;
78 }
79
80 arch_initcall(blackfin_dma_init);
81
82 /*------------------------------------------------------------------------------
83  *      Request the specific DMA channel from the system.
84  *-----------------------------------------------------------------------------*/
85 int request_dma(unsigned int channel, char *device_id)
86 {
87
88         pr_debug("request_dma() : BEGIN \n");
89
90 #if defined(CONFIG_BF561) && ANOMALY_05000182
91         if (channel >= CH_IMEM_STREAM0_DEST && channel <= CH_IMEM_STREAM1_DEST) {
92                 if (get_cclk() > 500000000) {
93                         printk(KERN_WARNING
94                                "Request IMDMA failed due to ANOMALY 05000182\n");
95                         return -EFAULT;
96                 }
97         }
98 #endif
99
100         mutex_lock(&(dma_ch[channel].dmalock));
101
102         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
103             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
104                 mutex_unlock(&(dma_ch[channel].dmalock));
105                 pr_debug("DMA CHANNEL IN USE  \n");
106                 return -EBUSY;
107         } else {
108                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
109                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
110         }
111
112         mutex_unlock(&(dma_ch[channel].dmalock));
113
114 #ifdef CONFIG_BF54x
115         if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
116                 unsigned int per_map;
117                 per_map = dma_ch[channel].regs->peripheral_map & 0xFFF;
118                 if (strncmp(device_id, "BFIN_UART", 9) == 0)
119                         dma_ch[channel].regs->peripheral_map = per_map |
120                                 ((channel - CH_UART2_RX + 0xC)<<12);
121                 else
122                         dma_ch[channel].regs->peripheral_map = per_map |
123                                 ((channel - CH_UART2_RX + 0x6)<<12);
124         }
125 #endif
126
127         dma_ch[channel].device_id = device_id;
128         dma_ch[channel].irq_callback = NULL;
129
130         /* This is to be enabled by putting a restriction -
131          * you have to request DMA, before doing any operations on
132          * descriptor/channel
133          */
134         pr_debug("request_dma() : END  \n");
135         return channel;
136 }
137 EXPORT_SYMBOL(request_dma);
138
139 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
140 {
141         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
142                && channel < MAX_BLACKFIN_DMA_CHANNEL));
143
144         if (callback != NULL) {
145                 int ret_val;
146                 dma_ch[channel].irq = channel2irq(channel);
147                 dma_ch[channel].data = data;
148
149                 ret_val =
150                     request_irq(dma_ch[channel].irq, callback, IRQF_DISABLED,
151                                 dma_ch[channel].device_id, data);
152                 if (ret_val) {
153                         printk(KERN_NOTICE
154                                "Request irq in DMA engine failed.\n");
155                         return -EPERM;
156                 }
157                 dma_ch[channel].irq_callback = callback;
158         }
159         return 0;
160 }
161 EXPORT_SYMBOL(set_dma_callback);
162
163 void free_dma(unsigned int channel)
164 {
165         pr_debug("freedma() : BEGIN \n");
166         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
167                && channel < MAX_BLACKFIN_DMA_CHANNEL));
168
169         /* Halt the DMA */
170         disable_dma(channel);
171         clear_dma_buffer(channel);
172
173         if (dma_ch[channel].irq_callback != NULL)
174                 free_irq(dma_ch[channel].irq, dma_ch[channel].data);
175
176         /* Clear the DMA Variable in the Channel */
177         mutex_lock(&(dma_ch[channel].dmalock));
178         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
179         mutex_unlock(&(dma_ch[channel].dmalock));
180
181         pr_debug("freedma() : END \n");
182 }
183 EXPORT_SYMBOL(free_dma);
184
185 void dma_enable_irq(unsigned int channel)
186 {
187         pr_debug("dma_enable_irq() : BEGIN \n");
188         enable_irq(dma_ch[channel].irq);
189 }
190 EXPORT_SYMBOL(dma_enable_irq);
191
192 void dma_disable_irq(unsigned int channel)
193 {
194         pr_debug("dma_disable_irq() : BEGIN \n");
195         disable_irq(dma_ch[channel].irq);
196 }
197 EXPORT_SYMBOL(dma_disable_irq);
198
199 int dma_channel_active(unsigned int channel)
200 {
201         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
202                 return 0;
203         } else {
204                 return 1;
205         }
206 }
207 EXPORT_SYMBOL(dma_channel_active);
208
209 /*------------------------------------------------------------------------------
210 *       stop the specific DMA channel.
211 *-----------------------------------------------------------------------------*/
212 void disable_dma(unsigned int channel)
213 {
214         pr_debug("stop_dma() : BEGIN \n");
215         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
216         SSYNC();
217         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
218         /* Needs to be enabled Later */
219         pr_debug("stop_dma() : END \n");
220         return;
221 }
222 EXPORT_SYMBOL(disable_dma);
223
224 void enable_dma(unsigned int channel)
225 {
226         pr_debug("enable_dma() : BEGIN \n");
227         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
228         dma_ch[channel].regs->curr_x_count = 0;
229         dma_ch[channel].regs->curr_y_count = 0;
230
231         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
232         pr_debug("enable_dma() : END \n");
233         return;
234 }
235 EXPORT_SYMBOL(enable_dma);
236
237 /*------------------------------------------------------------------------------
238 *               Set the Start Address register for the specific DMA channel
239 *               This function can be used for register based DMA,
240 *               to setup the start address
241 *               addr:           Starting address of the DMA Data to be transferred.
242 *-----------------------------------------------------------------------------*/
243 void set_dma_start_addr(unsigned int channel, unsigned long addr)
244 {
245         pr_debug("set_dma_start_addr() : BEGIN \n");
246         dma_ch[channel].regs->start_addr = addr;
247         pr_debug("set_dma_start_addr() : END\n");
248 }
249 EXPORT_SYMBOL(set_dma_start_addr);
250
251 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
252 {
253         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
254         dma_ch[channel].regs->next_desc_ptr = addr;
255         pr_debug("set_dma_next_desc_addr() : END\n");
256 }
257 EXPORT_SYMBOL(set_dma_next_desc_addr);
258
259 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
260 {
261         pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
262         dma_ch[channel].regs->curr_desc_ptr = addr;
263         pr_debug("set_dma_curr_desc_addr() : END\n");
264 }
265 EXPORT_SYMBOL(set_dma_curr_desc_addr);
266
267 void set_dma_x_count(unsigned int channel, unsigned short x_count)
268 {
269         dma_ch[channel].regs->x_count = x_count;
270 }
271 EXPORT_SYMBOL(set_dma_x_count);
272
273 void set_dma_y_count(unsigned int channel, unsigned short y_count)
274 {
275         dma_ch[channel].regs->y_count = y_count;
276 }
277 EXPORT_SYMBOL(set_dma_y_count);
278
279 void set_dma_x_modify(unsigned int channel, short x_modify)
280 {
281         dma_ch[channel].regs->x_modify = x_modify;
282 }
283 EXPORT_SYMBOL(set_dma_x_modify);
284
285 void set_dma_y_modify(unsigned int channel, short y_modify)
286 {
287         dma_ch[channel].regs->y_modify = y_modify;
288 }
289 EXPORT_SYMBOL(set_dma_y_modify);
290
291 void set_dma_config(unsigned int channel, unsigned short config)
292 {
293         dma_ch[channel].regs->cfg = config;
294 }
295 EXPORT_SYMBOL(set_dma_config);
296
297 unsigned short
298 set_bfin_dma_config(char direction, char flow_mode,
299                     char intr_mode, char dma_mode, char width, char syncmode)
300 {
301         unsigned short config;
302
303         config =
304             ((direction << 1) | (width << 2) | (dma_mode << 4) |
305              (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
306         return config;
307 }
308 EXPORT_SYMBOL(set_bfin_dma_config);
309
310 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
311 {
312         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
313         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
314 }
315 EXPORT_SYMBOL(set_dma_sg);
316
317 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
318 {
319         dma_ch[channel].regs->curr_addr_ptr = addr;
320 }
321 EXPORT_SYMBOL(set_dma_curr_addr);
322
323 /*------------------------------------------------------------------------------
324  *      Get the DMA status of a specific DMA channel from the system.
325  *-----------------------------------------------------------------------------*/
326 unsigned short get_dma_curr_irqstat(unsigned int channel)
327 {
328         return dma_ch[channel].regs->irq_status;
329 }
330 EXPORT_SYMBOL(get_dma_curr_irqstat);
331
332 /*------------------------------------------------------------------------------
333  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
334  *-----------------------------------------------------------------------------*/
335 void clear_dma_irqstat(unsigned int channel)
336 {
337         dma_ch[channel].regs->irq_status |= 3;
338 }
339 EXPORT_SYMBOL(clear_dma_irqstat);
340
341 /*------------------------------------------------------------------------------
342  *      Get current DMA xcount of a specific DMA channel from the system.
343  *-----------------------------------------------------------------------------*/
344 unsigned short get_dma_curr_xcount(unsigned int channel)
345 {
346         return dma_ch[channel].regs->curr_x_count;
347 }
348 EXPORT_SYMBOL(get_dma_curr_xcount);
349
350 /*------------------------------------------------------------------------------
351  *      Get current DMA ycount of a specific DMA channel from the system.
352  *-----------------------------------------------------------------------------*/
353 unsigned short get_dma_curr_ycount(unsigned int channel)
354 {
355         return dma_ch[channel].regs->curr_y_count;
356 }
357 EXPORT_SYMBOL(get_dma_curr_ycount);
358
359 unsigned long get_dma_next_desc_ptr(unsigned int channel)
360 {
361         return dma_ch[channel].regs->next_desc_ptr;
362 }
363 EXPORT_SYMBOL(get_dma_next_desc_ptr);
364
365 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
366 {
367         return dma_ch[channel].regs->curr_desc_ptr;
368 }
369 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
370
371 unsigned long get_dma_curr_addr(unsigned int channel)
372 {
373         return dma_ch[channel].regs->curr_addr_ptr;
374 }
375 EXPORT_SYMBOL(get_dma_curr_addr);
376
377 #ifdef CONFIG_PM
378 int blackfin_dma_suspend(void)
379 {
380         int i;
381
382 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
383         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++) {
384 #else
385         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
386 #endif
387                 if (dma_ch[i].chan_status == DMA_CHANNEL_ENABLED) {
388                         printk(KERN_ERR "DMA Channel %d failed to suspend\n", i);
389                         return -EBUSY;
390                 }
391
392                 dma_ch[i].saved_peripheral_map = dma_ch[i].regs->peripheral_map;
393         }
394
395         return 0;
396 }
397
398 void blackfin_dma_resume(void)
399 {
400         int i;
401
402 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
403         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++)
404 #else
405         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++)
406 #endif
407                 dma_ch[i].regs->peripheral_map = dma_ch[i].saved_peripheral_map;
408 }
409 #endif
410
411 static void *__dma_memcpy(void *dest, const void *src, size_t size)
412 {
413         int direction;  /* 1 - address decrease, 0 - address increase */
414         int flag_align; /* 1 - address aligned,  0 - address unaligned */
415         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
416         unsigned long flags;
417
418         if (size <= 0)
419                 return NULL;
420
421         local_irq_save(flags);
422
423         if ((unsigned long)src < memory_end)
424                 blackfin_dcache_flush_range((unsigned int)src,
425                                             (unsigned int)(src + size));
426
427         if ((unsigned long)dest < memory_end)
428                 blackfin_dcache_invalidate_range((unsigned int)dest,
429                                                  (unsigned int)(dest + size));
430
431         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
432
433         if ((unsigned long)src < (unsigned long)dest)
434                 direction = 1;
435         else
436                 direction = 0;
437
438         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
439             && ((size % 2) == 0))
440                 flag_align = 1;
441         else
442                 flag_align = 0;
443
444         if (size > 0x10000)     /* size > 64K */
445                 flag_2D = 1;
446         else
447                 flag_2D = 0;
448
449         /* Setup destination and source start address */
450         if (direction) {
451                 if (flag_align) {
452                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
453                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
454                 } else {
455                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
456                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
457                 }
458         } else {
459                 bfin_write_MDMA_D0_START_ADDR(dest);
460                 bfin_write_MDMA_S0_START_ADDR(src);
461         }
462
463         /* Setup destination and source xcount */
464         if (flag_2D) {
465                 if (flag_align) {
466                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
467                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
468                 } else {
469                         bfin_write_MDMA_D0_X_COUNT(1024);
470                         bfin_write_MDMA_S0_X_COUNT(1024);
471                 }
472                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
473                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
474         } else {
475                 if (flag_align) {
476                         bfin_write_MDMA_D0_X_COUNT(size / 2);
477                         bfin_write_MDMA_S0_X_COUNT(size / 2);
478                 } else {
479                         bfin_write_MDMA_D0_X_COUNT(size);
480                         bfin_write_MDMA_S0_X_COUNT(size);
481                 }
482         }
483
484         /* Setup destination and source xmodify and ymodify */
485         if (direction) {
486                 if (flag_align) {
487                         bfin_write_MDMA_D0_X_MODIFY(-2);
488                         bfin_write_MDMA_S0_X_MODIFY(-2);
489                         if (flag_2D) {
490                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
491                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
492                         }
493                 } else {
494                         bfin_write_MDMA_D0_X_MODIFY(-1);
495                         bfin_write_MDMA_S0_X_MODIFY(-1);
496                         if (flag_2D) {
497                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
498                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
499                         }
500                 }
501         } else {
502                 if (flag_align) {
503                         bfin_write_MDMA_D0_X_MODIFY(2);
504                         bfin_write_MDMA_S0_X_MODIFY(2);
505                         if (flag_2D) {
506                                 bfin_write_MDMA_D0_Y_MODIFY(2);
507                                 bfin_write_MDMA_S0_Y_MODIFY(2);
508                         }
509                 } else {
510                         bfin_write_MDMA_D0_X_MODIFY(1);
511                         bfin_write_MDMA_S0_X_MODIFY(1);
512                         if (flag_2D) {
513                                 bfin_write_MDMA_D0_Y_MODIFY(1);
514                                 bfin_write_MDMA_S0_Y_MODIFY(1);
515                         }
516                 }
517         }
518
519         /* Enable source DMA */
520         if (flag_2D) {
521                 if (flag_align) {
522                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
523                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
524                 } else {
525                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
526                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
527                 }
528         } else {
529                 if (flag_align) {
530                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
531                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
532                 } else {
533                         bfin_write_MDMA_S0_CONFIG(DMAEN);
534                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
535                 }
536         }
537
538         SSYNC();
539
540         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
541                 ;
542
543         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
544                                       (DMA_DONE | DMA_ERR));
545
546         bfin_write_MDMA_S0_CONFIG(0);
547         bfin_write_MDMA_D0_CONFIG(0);
548
549         local_irq_restore(flags);
550
551         return dest;
552 }
553
554 void *dma_memcpy(void *dest, const void *src, size_t size)
555 {
556         size_t bulk;
557         size_t rest;
558         void * addr;
559
560         bulk = (size >> 16) << 16;
561         rest = size - bulk;
562         if (bulk)
563                 __dma_memcpy(dest, src, bulk);
564         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
565         return addr;
566 }
567 EXPORT_SYMBOL(dma_memcpy);
568
569 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
570 {
571         void *addr;
572         addr = dma_memcpy(dest, src, size);
573         return addr;
574 }
575 EXPORT_SYMBOL(safe_dma_memcpy);
576
577 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
578 {
579         unsigned long flags;
580
581         local_irq_save(flags);
582
583         blackfin_dcache_flush_range((unsigned int)buf,
584                          (unsigned int)(buf) + len);
585
586         bfin_write_MDMA_D0_START_ADDR(addr);
587         bfin_write_MDMA_D0_X_COUNT(len);
588         bfin_write_MDMA_D0_X_MODIFY(0);
589         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
590
591         bfin_write_MDMA_S0_START_ADDR(buf);
592         bfin_write_MDMA_S0_X_COUNT(len);
593         bfin_write_MDMA_S0_X_MODIFY(1);
594         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
595
596         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
597         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
598
599         SSYNC();
600
601         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
602
603         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
604
605         bfin_write_MDMA_S0_CONFIG(0);
606         bfin_write_MDMA_D0_CONFIG(0);
607         local_irq_restore(flags);
608
609 }
610 EXPORT_SYMBOL(dma_outsb);
611
612
613 void dma_insb(unsigned long addr, void *buf, unsigned short len)
614 {
615         unsigned long flags;
616
617         blackfin_dcache_invalidate_range((unsigned int)buf,
618                          (unsigned int)(buf) + len);
619
620         local_irq_save(flags);
621         bfin_write_MDMA_D0_START_ADDR(buf);
622         bfin_write_MDMA_D0_X_COUNT(len);
623         bfin_write_MDMA_D0_X_MODIFY(1);
624         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
625
626         bfin_write_MDMA_S0_START_ADDR(addr);
627         bfin_write_MDMA_S0_X_COUNT(len);
628         bfin_write_MDMA_S0_X_MODIFY(0);
629         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
630
631         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
632         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
633
634         SSYNC();
635
636         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
637
638         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
639
640         bfin_write_MDMA_S0_CONFIG(0);
641         bfin_write_MDMA_D0_CONFIG(0);
642         local_irq_restore(flags);
643
644 }
645 EXPORT_SYMBOL(dma_insb);
646
647 void dma_outsw(unsigned long addr, const void  *buf, unsigned short len)
648 {
649         unsigned long flags;
650
651         local_irq_save(flags);
652
653         blackfin_dcache_flush_range((unsigned int)buf,
654                          (unsigned int)(buf) + len * sizeof(short));
655
656         bfin_write_MDMA_D0_START_ADDR(addr);
657         bfin_write_MDMA_D0_X_COUNT(len);
658         bfin_write_MDMA_D0_X_MODIFY(0);
659         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
660
661         bfin_write_MDMA_S0_START_ADDR(buf);
662         bfin_write_MDMA_S0_X_COUNT(len);
663         bfin_write_MDMA_S0_X_MODIFY(2);
664         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
665
666         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
667         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
668
669         SSYNC();
670
671         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
672
673         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
674
675         bfin_write_MDMA_S0_CONFIG(0);
676         bfin_write_MDMA_D0_CONFIG(0);
677         local_irq_restore(flags);
678
679 }
680 EXPORT_SYMBOL(dma_outsw);
681
682 void dma_insw(unsigned long addr, void *buf, unsigned short len)
683 {
684         unsigned long flags;
685
686         blackfin_dcache_invalidate_range((unsigned int)buf,
687                          (unsigned int)(buf) + len * sizeof(short));
688
689         local_irq_save(flags);
690
691         bfin_write_MDMA_D0_START_ADDR(buf);
692         bfin_write_MDMA_D0_X_COUNT(len);
693         bfin_write_MDMA_D0_X_MODIFY(2);
694         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
695
696         bfin_write_MDMA_S0_START_ADDR(addr);
697         bfin_write_MDMA_S0_X_COUNT(len);
698         bfin_write_MDMA_S0_X_MODIFY(0);
699         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
700
701         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
702         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
703
704         SSYNC();
705
706         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
707
708         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
709
710         bfin_write_MDMA_S0_CONFIG(0);
711         bfin_write_MDMA_D0_CONFIG(0);
712         local_irq_restore(flags);
713
714 }
715 EXPORT_SYMBOL(dma_insw);
716
717 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
718 {
719         unsigned long flags;
720
721         local_irq_save(flags);
722
723         blackfin_dcache_flush_range((unsigned int)buf,
724                          (unsigned int)(buf) + len * sizeof(long));
725
726         bfin_write_MDMA_D0_START_ADDR(addr);
727         bfin_write_MDMA_D0_X_COUNT(len);
728         bfin_write_MDMA_D0_X_MODIFY(0);
729         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
730
731         bfin_write_MDMA_S0_START_ADDR(buf);
732         bfin_write_MDMA_S0_X_COUNT(len);
733         bfin_write_MDMA_S0_X_MODIFY(4);
734         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
735
736         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
737         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
738
739         SSYNC();
740
741         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
742
743         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
744
745         bfin_write_MDMA_S0_CONFIG(0);
746         bfin_write_MDMA_D0_CONFIG(0);
747         local_irq_restore(flags);
748
749 }
750 EXPORT_SYMBOL(dma_outsl);
751
752 void dma_insl(unsigned long addr, void *buf, unsigned short len)
753 {
754         unsigned long flags;
755
756         blackfin_dcache_invalidate_range((unsigned int)buf,
757                          (unsigned int)(buf) + len * sizeof(long));
758
759         local_irq_save(flags);
760
761         bfin_write_MDMA_D0_START_ADDR(buf);
762         bfin_write_MDMA_D0_X_COUNT(len);
763         bfin_write_MDMA_D0_X_MODIFY(4);
764         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
765
766         bfin_write_MDMA_S0_START_ADDR(addr);
767         bfin_write_MDMA_S0_X_COUNT(len);
768         bfin_write_MDMA_S0_X_MODIFY(0);
769         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
770
771         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
772         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
773
774         SSYNC();
775
776         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
777
778         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
779
780         bfin_write_MDMA_S0_CONFIG(0);
781         bfin_write_MDMA_D0_CONFIG(0);
782         local_irq_restore(flags);
783
784 }
785 EXPORT_SYMBOL(dma_insl);