Merge branch 'devel' of master.kernel.org:/home/rmk/linux-2.6-arm
[linux-2.6] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
43
44 /**************************************************************************
45  * Global Variables
46 ***************************************************************************/
47
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49
50 /*------------------------------------------------------------------------------
51  *       Set the Buffer Clear bit in the Configuration register of specific DMA
52  *       channel. This will stop the descriptor based DMA operation.
53  *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
55 {
56         dma_ch[channel].regs->cfg |= RESTART;
57         SSYNC();
58         dma_ch[channel].regs->cfg &= ~RESTART;
59         SSYNC();
60 }
61
62 static int __init blackfin_dma_init(void)
63 {
64         int i;
65
66         printk(KERN_INFO "Blackfin DMA Controller\n");
67
68         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70                 dma_ch[i].regs = base_addr[i];
71                 mutex_init(&(dma_ch[i].dmalock));
72         }
73         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
76         return 0;
77 }
78
79 arch_initcall(blackfin_dma_init);
80
81 /*------------------------------------------------------------------------------
82  *      Request the specific DMA channel from the system.
83  *-----------------------------------------------------------------------------*/
84 int request_dma(unsigned int channel, char *device_id)
85 {
86
87         pr_debug("request_dma() : BEGIN \n");
88         mutex_lock(&(dma_ch[channel].dmalock));
89
90         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
91             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
92                 mutex_unlock(&(dma_ch[channel].dmalock));
93                 pr_debug("DMA CHANNEL IN USE  \n");
94                 return -EBUSY;
95         } else {
96                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
97                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
98         }
99
100         mutex_unlock(&(dma_ch[channel].dmalock));
101
102         dma_ch[channel].device_id = device_id;
103         dma_ch[channel].irq_callback = NULL;
104
105         /* This is to be enabled by putting a restriction -
106          * you have to request DMA, before doing any operations on
107          * descriptor/channel
108          */
109         pr_debug("request_dma() : END  \n");
110         return channel;
111 }
112 EXPORT_SYMBOL(request_dma);
113
114 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
115 {
116         int ret_irq = 0;
117
118         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
119                && channel < MAX_BLACKFIN_DMA_CHANNEL));
120
121         if (callback != NULL) {
122                 int ret_val;
123                 ret_irq = channel2irq(channel);
124
125                 dma_ch[channel].data = data;
126
127                 ret_val =
128                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
129                                 dma_ch[channel].device_id, data);
130                 if (ret_val) {
131                         printk(KERN_NOTICE
132                                "Request irq in DMA engine failed.\n");
133                         return -EPERM;
134                 }
135                 dma_ch[channel].irq_callback = callback;
136         }
137         return 0;
138 }
139 EXPORT_SYMBOL(set_dma_callback);
140
141 void free_dma(unsigned int channel)
142 {
143         int ret_irq;
144
145         pr_debug("freedma() : BEGIN \n");
146         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
147                && channel < MAX_BLACKFIN_DMA_CHANNEL));
148
149         /* Halt the DMA */
150         disable_dma(channel);
151         clear_dma_buffer(channel);
152
153         if (dma_ch[channel].irq_callback != NULL) {
154                 ret_irq = channel2irq(channel);
155                 free_irq(ret_irq, dma_ch[channel].data);
156         }
157
158         /* Clear the DMA Variable in the Channel */
159         mutex_lock(&(dma_ch[channel].dmalock));
160         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
161         mutex_unlock(&(dma_ch[channel].dmalock));
162
163         pr_debug("freedma() : END \n");
164 }
165 EXPORT_SYMBOL(free_dma);
166
167 void dma_enable_irq(unsigned int channel)
168 {
169         int ret_irq;
170
171         pr_debug("dma_enable_irq() : BEGIN \n");
172         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
173                && channel < MAX_BLACKFIN_DMA_CHANNEL));
174
175         ret_irq = channel2irq(channel);
176         enable_irq(ret_irq);
177 }
178 EXPORT_SYMBOL(dma_enable_irq);
179
180 void dma_disable_irq(unsigned int channel)
181 {
182         int ret_irq;
183
184         pr_debug("dma_disable_irq() : BEGIN \n");
185         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
186                && channel < MAX_BLACKFIN_DMA_CHANNEL));
187
188         ret_irq = channel2irq(channel);
189         disable_irq(ret_irq);
190 }
191 EXPORT_SYMBOL(dma_disable_irq);
192
193 int dma_channel_active(unsigned int channel)
194 {
195         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
196                 return 0;
197         } else {
198                 return 1;
199         }
200 }
201 EXPORT_SYMBOL(dma_channel_active);
202
203 /*------------------------------------------------------------------------------
204 *       stop the specific DMA channel.
205 *-----------------------------------------------------------------------------*/
206 void disable_dma(unsigned int channel)
207 {
208         pr_debug("stop_dma() : BEGIN \n");
209
210         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
211                && channel < MAX_BLACKFIN_DMA_CHANNEL));
212
213         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
214         SSYNC();
215         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
216         /* Needs to be enabled Later */
217         pr_debug("stop_dma() : END \n");
218         return;
219 }
220 EXPORT_SYMBOL(disable_dma);
221
222 void enable_dma(unsigned int channel)
223 {
224         pr_debug("enable_dma() : BEGIN \n");
225
226         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
227                && channel < MAX_BLACKFIN_DMA_CHANNEL));
228
229         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
230         dma_ch[channel].regs->curr_x_count = 0;
231         dma_ch[channel].regs->curr_y_count = 0;
232
233         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
234         SSYNC();
235         pr_debug("enable_dma() : END \n");
236         return;
237 }
238 EXPORT_SYMBOL(enable_dma);
239
240 /*------------------------------------------------------------------------------
241 *               Set the Start Address register for the specific DMA channel
242 *               This function can be used for register based DMA,
243 *               to setup the start address
244 *               addr:           Starting address of the DMA Data to be transferred.
245 *-----------------------------------------------------------------------------*/
246 void set_dma_start_addr(unsigned int channel, unsigned long addr)
247 {
248         pr_debug("set_dma_start_addr() : BEGIN \n");
249
250         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
251                && channel < MAX_BLACKFIN_DMA_CHANNEL));
252
253         dma_ch[channel].regs->start_addr = addr;
254         SSYNC();
255         pr_debug("set_dma_start_addr() : END\n");
256 }
257 EXPORT_SYMBOL(set_dma_start_addr);
258
259 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
260 {
261         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
262
263         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
264                && channel < MAX_BLACKFIN_DMA_CHANNEL));
265
266         dma_ch[channel].regs->next_desc_ptr = addr;
267         SSYNC();
268         pr_debug("set_dma_start_addr() : END\n");
269 }
270 EXPORT_SYMBOL(set_dma_next_desc_addr);
271
272 void set_dma_x_count(unsigned int channel, unsigned short x_count)
273 {
274         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
275                && channel < MAX_BLACKFIN_DMA_CHANNEL));
276
277         dma_ch[channel].regs->x_count = x_count;
278         SSYNC();
279 }
280 EXPORT_SYMBOL(set_dma_x_count);
281
282 void set_dma_y_count(unsigned int channel, unsigned short y_count)
283 {
284         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
285                && channel < MAX_BLACKFIN_DMA_CHANNEL));
286
287         dma_ch[channel].regs->y_count = y_count;
288         SSYNC();
289 }
290 EXPORT_SYMBOL(set_dma_y_count);
291
292 void set_dma_x_modify(unsigned int channel, short x_modify)
293 {
294         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
295                && channel < MAX_BLACKFIN_DMA_CHANNEL));
296
297         dma_ch[channel].regs->x_modify = x_modify;
298         SSYNC();
299 }
300 EXPORT_SYMBOL(set_dma_x_modify);
301
302 void set_dma_y_modify(unsigned int channel, short y_modify)
303 {
304         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
305                && channel < MAX_BLACKFIN_DMA_CHANNEL));
306
307         dma_ch[channel].regs->y_modify = y_modify;
308         SSYNC();
309 }
310 EXPORT_SYMBOL(set_dma_y_modify);
311
312 void set_dma_config(unsigned int channel, unsigned short config)
313 {
314         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
315                && channel < MAX_BLACKFIN_DMA_CHANNEL));
316
317         dma_ch[channel].regs->cfg = config;
318         SSYNC();
319 }
320 EXPORT_SYMBOL(set_dma_config);
321
322 unsigned short
323 set_bfin_dma_config(char direction, char flow_mode,
324                     char intr_mode, char dma_mode, char width)
325 {
326         unsigned short config;
327
328         config =
329             ((direction << 1) | (width << 2) | (dma_mode << 4) |
330              (intr_mode << 6) | (flow_mode << 12) | RESTART);
331         return config;
332 }
333 EXPORT_SYMBOL(set_bfin_dma_config);
334
335 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
336 {
337         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
338                && channel < MAX_BLACKFIN_DMA_CHANNEL));
339
340         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
341
342         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
343
344         SSYNC();
345 }
346 EXPORT_SYMBOL(set_dma_sg);
347
348 /*------------------------------------------------------------------------------
349  *      Get the DMA status of a specific DMA channel from the system.
350  *-----------------------------------------------------------------------------*/
351 unsigned short get_dma_curr_irqstat(unsigned int channel)
352 {
353         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
354                && channel < MAX_BLACKFIN_DMA_CHANNEL));
355
356         return dma_ch[channel].regs->irq_status;
357 }
358 EXPORT_SYMBOL(get_dma_curr_irqstat);
359
360 /*------------------------------------------------------------------------------
361  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
362  *-----------------------------------------------------------------------------*/
363 void clear_dma_irqstat(unsigned int channel)
364 {
365         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
366                && channel < MAX_BLACKFIN_DMA_CHANNEL));
367         dma_ch[channel].regs->irq_status |= 3;
368 }
369 EXPORT_SYMBOL(clear_dma_irqstat);
370
371 /*------------------------------------------------------------------------------
372  *      Get current DMA xcount of a specific DMA channel from the system.
373  *-----------------------------------------------------------------------------*/
374 unsigned short get_dma_curr_xcount(unsigned int channel)
375 {
376         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
377                && channel < MAX_BLACKFIN_DMA_CHANNEL));
378
379         return dma_ch[channel].regs->curr_x_count;
380 }
381 EXPORT_SYMBOL(get_dma_curr_xcount);
382
383 /*------------------------------------------------------------------------------
384  *      Get current DMA ycount of a specific DMA channel from the system.
385  *-----------------------------------------------------------------------------*/
386 unsigned short get_dma_curr_ycount(unsigned int channel)
387 {
388         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
389                && channel < MAX_BLACKFIN_DMA_CHANNEL));
390
391         return dma_ch[channel].regs->curr_y_count;
392 }
393 EXPORT_SYMBOL(get_dma_curr_ycount);
394
395 static void *__dma_memcpy(void *dest, const void *src, size_t size)
396 {
397         int direction;  /* 1 - address decrease, 0 - address increase */
398         int flag_align; /* 1 - address aligned,  0 - address unaligned */
399         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
400         unsigned long flags;
401
402         if (size <= 0)
403                 return NULL;
404
405         local_irq_save(flags);
406
407         if ((unsigned long)src < memory_end)
408                 blackfin_dcache_flush_range((unsigned int)src,
409                                             (unsigned int)(src + size));
410
411         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
412
413         if ((unsigned long)src < (unsigned long)dest)
414                 direction = 1;
415         else
416                 direction = 0;
417
418         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
419             && ((size % 2) == 0))
420                 flag_align = 1;
421         else
422                 flag_align = 0;
423
424         if (size > 0x10000)     /* size > 64K */
425                 flag_2D = 1;
426         else
427                 flag_2D = 0;
428
429         /* Setup destination and source start address */
430         if (direction) {
431                 if (flag_align) {
432                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
433                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
434                 } else {
435                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
436                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
437                 }
438         } else {
439                 bfin_write_MDMA_D0_START_ADDR(dest);
440                 bfin_write_MDMA_S0_START_ADDR(src);
441         }
442
443         /* Setup destination and source xcount */
444         if (flag_2D) {
445                 if (flag_align) {
446                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
447                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
448                 } else {
449                         bfin_write_MDMA_D0_X_COUNT(1024);
450                         bfin_write_MDMA_S0_X_COUNT(1024);
451                 }
452                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
453                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
454         } else {
455                 if (flag_align) {
456                         bfin_write_MDMA_D0_X_COUNT(size / 2);
457                         bfin_write_MDMA_S0_X_COUNT(size / 2);
458                 } else {
459                         bfin_write_MDMA_D0_X_COUNT(size);
460                         bfin_write_MDMA_S0_X_COUNT(size);
461                 }
462         }
463
464         /* Setup destination and source xmodify and ymodify */
465         if (direction) {
466                 if (flag_align) {
467                         bfin_write_MDMA_D0_X_MODIFY(-2);
468                         bfin_write_MDMA_S0_X_MODIFY(-2);
469                         if (flag_2D) {
470                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
471                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
472                         }
473                 } else {
474                         bfin_write_MDMA_D0_X_MODIFY(-1);
475                         bfin_write_MDMA_S0_X_MODIFY(-1);
476                         if (flag_2D) {
477                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
478                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
479                         }
480                 }
481         } else {
482                 if (flag_align) {
483                         bfin_write_MDMA_D0_X_MODIFY(2);
484                         bfin_write_MDMA_S0_X_MODIFY(2);
485                         if (flag_2D) {
486                                 bfin_write_MDMA_D0_Y_MODIFY(2);
487                                 bfin_write_MDMA_S0_Y_MODIFY(2);
488                         }
489                 } else {
490                         bfin_write_MDMA_D0_X_MODIFY(1);
491                         bfin_write_MDMA_S0_X_MODIFY(1);
492                         if (flag_2D) {
493                                 bfin_write_MDMA_D0_Y_MODIFY(1);
494                                 bfin_write_MDMA_S0_Y_MODIFY(1);
495                         }
496                 }
497         }
498
499         /* Enable source DMA */
500         if (flag_2D) {
501                 if (flag_align) {
502                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
503                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
504                 } else {
505                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
506                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
507                 }
508         } else {
509                 if (flag_align) {
510                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
511                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
512                 } else {
513                         bfin_write_MDMA_S0_CONFIG(DMAEN);
514                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
515                 }
516         }
517
518         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
519                 ;
520
521         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
522                                       (DMA_DONE | DMA_ERR));
523
524         bfin_write_MDMA_S0_CONFIG(0);
525         bfin_write_MDMA_D0_CONFIG(0);
526
527         if ((unsigned long)dest < memory_end)
528                 blackfin_dcache_invalidate_range((unsigned int)dest,
529                                                  (unsigned int)(dest + size));
530         local_irq_restore(flags);
531
532         return dest;
533 }
534
535 void *dma_memcpy(void *dest, const void *src, size_t size)
536 {
537         size_t bulk;
538         size_t rest;
539         void * addr;
540
541         bulk = (size >> 16) << 16;
542         rest = size - bulk;
543         if (bulk)
544                 __dma_memcpy(dest, src, bulk);
545         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
546         return addr;
547 }
548 EXPORT_SYMBOL(dma_memcpy);
549
550 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
551 {
552         void *addr;
553         addr = dma_memcpy(dest, src, size);
554         return addr;
555 }
556 EXPORT_SYMBOL(safe_dma_memcpy);
557
558 void dma_outsb(void __iomem *addr, const void *buf, unsigned short len)
559 {
560         unsigned long flags;
561
562         local_irq_save(flags);
563
564         blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
565
566         bfin_write_MDMA_D0_START_ADDR(addr);
567         bfin_write_MDMA_D0_X_COUNT(len);
568         bfin_write_MDMA_D0_X_MODIFY(0);
569         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
570
571         bfin_write_MDMA_S0_START_ADDR(buf);
572         bfin_write_MDMA_S0_X_COUNT(len);
573         bfin_write_MDMA_S0_X_MODIFY(1);
574         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
575
576         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
577         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
578
579         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
580
581         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
582
583         bfin_write_MDMA_S0_CONFIG(0);
584         bfin_write_MDMA_D0_CONFIG(0);
585         local_irq_restore(flags);
586
587 }
588 EXPORT_SYMBOL(dma_outsb);
589
590
591 void dma_insb(const void __iomem *addr, void *buf, unsigned short len)
592 {
593         unsigned long flags;
594
595         local_irq_save(flags);
596         bfin_write_MDMA_D0_START_ADDR(buf);
597         bfin_write_MDMA_D0_X_COUNT(len);
598         bfin_write_MDMA_D0_X_MODIFY(1);
599         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
600
601         bfin_write_MDMA_S0_START_ADDR(addr);
602         bfin_write_MDMA_S0_X_COUNT(len);
603         bfin_write_MDMA_S0_X_MODIFY(0);
604         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
605
606         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
607         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
608
609         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
610
611         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
612
613         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
614
615         bfin_write_MDMA_S0_CONFIG(0);
616         bfin_write_MDMA_D0_CONFIG(0);
617         local_irq_restore(flags);
618
619 }
620 EXPORT_SYMBOL(dma_insb);
621
622 void dma_outsw(void __iomem *addr, const void  *buf, unsigned short len)
623 {
624         unsigned long flags;
625
626         local_irq_save(flags);
627
628         blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
629
630         bfin_write_MDMA_D0_START_ADDR(addr);
631         bfin_write_MDMA_D0_X_COUNT(len);
632         bfin_write_MDMA_D0_X_MODIFY(0);
633         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
634
635         bfin_write_MDMA_S0_START_ADDR(buf);
636         bfin_write_MDMA_S0_X_COUNT(len);
637         bfin_write_MDMA_S0_X_MODIFY(2);
638         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
639
640         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
641         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
642
643         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
644
645         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
646
647         bfin_write_MDMA_S0_CONFIG(0);
648         bfin_write_MDMA_D0_CONFIG(0);
649         local_irq_restore(flags);
650
651 }
652 EXPORT_SYMBOL(dma_outsw);
653
654 void dma_insw(const void __iomem *addr, void *buf, unsigned short len)
655 {
656         unsigned long flags;
657
658         local_irq_save(flags);
659
660         bfin_write_MDMA_D0_START_ADDR(buf);
661         bfin_write_MDMA_D0_X_COUNT(len);
662         bfin_write_MDMA_D0_X_MODIFY(2);
663         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
664
665         bfin_write_MDMA_S0_START_ADDR(addr);
666         bfin_write_MDMA_S0_X_COUNT(len);
667         bfin_write_MDMA_S0_X_MODIFY(0);
668         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
669
670         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
671         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
672
673         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
674
675         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
676
677         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
678
679         bfin_write_MDMA_S0_CONFIG(0);
680         bfin_write_MDMA_D0_CONFIG(0);
681         local_irq_restore(flags);
682
683 }
684 EXPORT_SYMBOL(dma_insw);
685
686 void dma_outsl(void __iomem *addr, const void *buf, unsigned short len)
687 {
688         unsigned long flags;
689
690         local_irq_save(flags);
691
692         blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
693
694         bfin_write_MDMA_D0_START_ADDR(addr);
695         bfin_write_MDMA_D0_X_COUNT(len);
696         bfin_write_MDMA_D0_X_MODIFY(0);
697         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
698
699         bfin_write_MDMA_S0_START_ADDR(buf);
700         bfin_write_MDMA_S0_X_COUNT(len);
701         bfin_write_MDMA_S0_X_MODIFY(4);
702         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
703
704         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
705         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
706
707         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
708
709         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
710
711         bfin_write_MDMA_S0_CONFIG(0);
712         bfin_write_MDMA_D0_CONFIG(0);
713         local_irq_restore(flags);
714
715 }
716 EXPORT_SYMBOL(dma_outsl);
717
718 void dma_insl(const void __iomem *addr, void *buf, unsigned short len)
719 {
720         unsigned long flags;
721
722         local_irq_save(flags);
723
724         bfin_write_MDMA_D0_START_ADDR(buf);
725         bfin_write_MDMA_D0_X_COUNT(len);
726         bfin_write_MDMA_D0_X_MODIFY(4);
727         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
728
729         bfin_write_MDMA_S0_START_ADDR(addr);
730         bfin_write_MDMA_S0_X_COUNT(len);
731         bfin_write_MDMA_S0_X_MODIFY(0);
732         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
733
734         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
735         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
736
737         blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
738
739         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
740
741         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
742
743         bfin_write_MDMA_S0_CONFIG(0);
744         bfin_write_MDMA_D0_CONFIG(0);
745         local_irq_restore(flags);
746
747 }
748 EXPORT_SYMBOL(dma_insl);