powerpc: Don't use the wrong thread_struct for ptrace get/set VSX regs
[linux-2.6] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
43
44 /**************************************************************************
45  * Global Variables
46 ***************************************************************************/
47
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49
50 /*------------------------------------------------------------------------------
51  *       Set the Buffer Clear bit in the Configuration register of specific DMA
52  *       channel. This will stop the descriptor based DMA operation.
53  *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
55 {
56         dma_ch[channel].regs->cfg |= RESTART;
57         SSYNC();
58         dma_ch[channel].regs->cfg &= ~RESTART;
59         SSYNC();
60 }
61
62 static int __init blackfin_dma_init(void)
63 {
64         int i;
65
66         printk(KERN_INFO "Blackfin DMA Controller\n");
67
68         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70                 dma_ch[i].regs = dma_io_base_addr[i];
71                 mutex_init(&(dma_ch[i].dmalock));
72         }
73         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
76
77 #if defined(CONFIG_DEB_DMA_URGENT)
78         bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79                          | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81         return 0;
82 }
83
84 arch_initcall(blackfin_dma_init);
85
86 /*------------------------------------------------------------------------------
87  *      Request the specific DMA channel from the system.
88  *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
90 {
91
92         pr_debug("request_dma() : BEGIN \n");
93
94 #if defined(CONFIG_BF561) && ANOMALY_05000182
95         if (channel >= CH_IMEM_STREAM0_DEST && channel <= CH_IMEM_STREAM1_DEST) {
96                 if (get_cclk() > 500000000) {
97                         printk(KERN_WARNING
98                                "Request IMDMA failed due to ANOMALY 05000182\n");
99                         return -EFAULT;
100                 }
101         }
102 #endif
103
104         mutex_lock(&(dma_ch[channel].dmalock));
105
106         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
107             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
108                 mutex_unlock(&(dma_ch[channel].dmalock));
109                 pr_debug("DMA CHANNEL IN USE  \n");
110                 return -EBUSY;
111         } else {
112                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
113                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
114         }
115
116         mutex_unlock(&(dma_ch[channel].dmalock));
117
118 #ifdef CONFIG_BF54x
119         if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
120                 if (strncmp(device_id, "BFIN_UART", 9) == 0) {
121                         dma_ch[channel].regs->peripheral_map &= 0x0FFF;
122                         dma_ch[channel].regs->peripheral_map |=
123                                 ((channel - CH_UART2_RX + 0xC)<<12);
124                 } else {
125                         dma_ch[channel].regs->peripheral_map &= 0x0FFF;
126                         dma_ch[channel].regs->peripheral_map |=
127                                 ((channel - CH_UART2_RX + 0x6)<<12);
128                 }
129         }
130 #endif
131
132         dma_ch[channel].device_id = device_id;
133         dma_ch[channel].irq_callback = NULL;
134
135         /* This is to be enabled by putting a restriction -
136          * you have to request DMA, before doing any operations on
137          * descriptor/channel
138          */
139         pr_debug("request_dma() : END  \n");
140         return channel;
141 }
142 EXPORT_SYMBOL(request_dma);
143
144 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
145 {
146         int ret_irq = 0;
147
148         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
149                && channel < MAX_BLACKFIN_DMA_CHANNEL));
150
151         if (callback != NULL) {
152                 int ret_val;
153                 ret_irq = channel2irq(channel);
154
155                 dma_ch[channel].data = data;
156
157                 ret_val =
158                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
159                                 dma_ch[channel].device_id, data);
160                 if (ret_val) {
161                         printk(KERN_NOTICE
162                                "Request irq in DMA engine failed.\n");
163                         return -EPERM;
164                 }
165                 dma_ch[channel].irq_callback = callback;
166         }
167         return 0;
168 }
169 EXPORT_SYMBOL(set_dma_callback);
170
171 void free_dma(unsigned int channel)
172 {
173         int ret_irq;
174
175         pr_debug("freedma() : BEGIN \n");
176         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
177                && channel < MAX_BLACKFIN_DMA_CHANNEL));
178
179         /* Halt the DMA */
180         disable_dma(channel);
181         clear_dma_buffer(channel);
182
183         if (dma_ch[channel].irq_callback != NULL) {
184                 ret_irq = channel2irq(channel);
185                 free_irq(ret_irq, dma_ch[channel].data);
186         }
187
188         /* Clear the DMA Variable in the Channel */
189         mutex_lock(&(dma_ch[channel].dmalock));
190         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
191         mutex_unlock(&(dma_ch[channel].dmalock));
192
193         pr_debug("freedma() : END \n");
194 }
195 EXPORT_SYMBOL(free_dma);
196
197 void dma_enable_irq(unsigned int channel)
198 {
199         int ret_irq;
200
201         pr_debug("dma_enable_irq() : BEGIN \n");
202         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
203                && channel < MAX_BLACKFIN_DMA_CHANNEL));
204
205         ret_irq = channel2irq(channel);
206         enable_irq(ret_irq);
207 }
208 EXPORT_SYMBOL(dma_enable_irq);
209
210 void dma_disable_irq(unsigned int channel)
211 {
212         int ret_irq;
213
214         pr_debug("dma_disable_irq() : BEGIN \n");
215         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
216                && channel < MAX_BLACKFIN_DMA_CHANNEL));
217
218         ret_irq = channel2irq(channel);
219         disable_irq(ret_irq);
220 }
221 EXPORT_SYMBOL(dma_disable_irq);
222
223 int dma_channel_active(unsigned int channel)
224 {
225         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
226                 return 0;
227         } else {
228                 return 1;
229         }
230 }
231 EXPORT_SYMBOL(dma_channel_active);
232
233 /*------------------------------------------------------------------------------
234 *       stop the specific DMA channel.
235 *-----------------------------------------------------------------------------*/
236 void disable_dma(unsigned int channel)
237 {
238         pr_debug("stop_dma() : BEGIN \n");
239
240         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
241                && channel < MAX_BLACKFIN_DMA_CHANNEL));
242
243         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
244         SSYNC();
245         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
246         /* Needs to be enabled Later */
247         pr_debug("stop_dma() : END \n");
248         return;
249 }
250 EXPORT_SYMBOL(disable_dma);
251
252 void enable_dma(unsigned int channel)
253 {
254         pr_debug("enable_dma() : BEGIN \n");
255
256         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
257                && channel < MAX_BLACKFIN_DMA_CHANNEL));
258
259         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
260         dma_ch[channel].regs->curr_x_count = 0;
261         dma_ch[channel].regs->curr_y_count = 0;
262
263         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
264         SSYNC();
265         pr_debug("enable_dma() : END \n");
266         return;
267 }
268 EXPORT_SYMBOL(enable_dma);
269
270 /*------------------------------------------------------------------------------
271 *               Set the Start Address register for the specific DMA channel
272 *               This function can be used for register based DMA,
273 *               to setup the start address
274 *               addr:           Starting address of the DMA Data to be transferred.
275 *-----------------------------------------------------------------------------*/
276 void set_dma_start_addr(unsigned int channel, unsigned long addr)
277 {
278         pr_debug("set_dma_start_addr() : BEGIN \n");
279
280         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
281                && channel < MAX_BLACKFIN_DMA_CHANNEL));
282
283         dma_ch[channel].regs->start_addr = addr;
284         SSYNC();
285         pr_debug("set_dma_start_addr() : END\n");
286 }
287 EXPORT_SYMBOL(set_dma_start_addr);
288
289 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
290 {
291         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
292
293         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
294                && channel < MAX_BLACKFIN_DMA_CHANNEL));
295
296         dma_ch[channel].regs->next_desc_ptr = addr;
297         SSYNC();
298         pr_debug("set_dma_next_desc_addr() : END\n");
299 }
300 EXPORT_SYMBOL(set_dma_next_desc_addr);
301
302 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
303 {
304         pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
305
306         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
307                && channel < MAX_BLACKFIN_DMA_CHANNEL));
308
309         dma_ch[channel].regs->curr_desc_ptr = addr;
310         SSYNC();
311         pr_debug("set_dma_curr_desc_addr() : END\n");
312 }
313 EXPORT_SYMBOL(set_dma_curr_desc_addr);
314
315 void set_dma_x_count(unsigned int channel, unsigned short x_count)
316 {
317         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
318                && channel < MAX_BLACKFIN_DMA_CHANNEL));
319
320         dma_ch[channel].regs->x_count = x_count;
321         SSYNC();
322 }
323 EXPORT_SYMBOL(set_dma_x_count);
324
325 void set_dma_y_count(unsigned int channel, unsigned short y_count)
326 {
327         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
328                && channel < MAX_BLACKFIN_DMA_CHANNEL));
329
330         dma_ch[channel].regs->y_count = y_count;
331         SSYNC();
332 }
333 EXPORT_SYMBOL(set_dma_y_count);
334
335 void set_dma_x_modify(unsigned int channel, short x_modify)
336 {
337         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
338                && channel < MAX_BLACKFIN_DMA_CHANNEL));
339
340         dma_ch[channel].regs->x_modify = x_modify;
341         SSYNC();
342 }
343 EXPORT_SYMBOL(set_dma_x_modify);
344
345 void set_dma_y_modify(unsigned int channel, short y_modify)
346 {
347         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
348                && channel < MAX_BLACKFIN_DMA_CHANNEL));
349
350         dma_ch[channel].regs->y_modify = y_modify;
351         SSYNC();
352 }
353 EXPORT_SYMBOL(set_dma_y_modify);
354
355 void set_dma_config(unsigned int channel, unsigned short config)
356 {
357         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
358                && channel < MAX_BLACKFIN_DMA_CHANNEL));
359
360         dma_ch[channel].regs->cfg = config;
361         SSYNC();
362 }
363 EXPORT_SYMBOL(set_dma_config);
364
365 unsigned short
366 set_bfin_dma_config(char direction, char flow_mode,
367                     char intr_mode, char dma_mode, char width, char syncmode)
368 {
369         unsigned short config;
370
371         config =
372             ((direction << 1) | (width << 2) | (dma_mode << 4) |
373              (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
374         return config;
375 }
376 EXPORT_SYMBOL(set_bfin_dma_config);
377
378 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
379 {
380         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
381                && channel < MAX_BLACKFIN_DMA_CHANNEL));
382
383         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
384
385         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
386
387         SSYNC();
388 }
389 EXPORT_SYMBOL(set_dma_sg);
390
391 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
392 {
393         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
394                && channel < MAX_BLACKFIN_DMA_CHANNEL));
395
396         dma_ch[channel].regs->curr_addr_ptr = addr;
397         SSYNC();
398 }
399 EXPORT_SYMBOL(set_dma_curr_addr);
400
401 /*------------------------------------------------------------------------------
402  *      Get the DMA status of a specific DMA channel from the system.
403  *-----------------------------------------------------------------------------*/
404 unsigned short get_dma_curr_irqstat(unsigned int channel)
405 {
406         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
407                && channel < MAX_BLACKFIN_DMA_CHANNEL));
408
409         return dma_ch[channel].regs->irq_status;
410 }
411 EXPORT_SYMBOL(get_dma_curr_irqstat);
412
413 /*------------------------------------------------------------------------------
414  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
415  *-----------------------------------------------------------------------------*/
416 void clear_dma_irqstat(unsigned int channel)
417 {
418         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
419                && channel < MAX_BLACKFIN_DMA_CHANNEL));
420         dma_ch[channel].regs->irq_status |= 3;
421 }
422 EXPORT_SYMBOL(clear_dma_irqstat);
423
424 /*------------------------------------------------------------------------------
425  *      Get current DMA xcount of a specific DMA channel from the system.
426  *-----------------------------------------------------------------------------*/
427 unsigned short get_dma_curr_xcount(unsigned int channel)
428 {
429         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
430                && channel < MAX_BLACKFIN_DMA_CHANNEL));
431
432         return dma_ch[channel].regs->curr_x_count;
433 }
434 EXPORT_SYMBOL(get_dma_curr_xcount);
435
436 /*------------------------------------------------------------------------------
437  *      Get current DMA ycount of a specific DMA channel from the system.
438  *-----------------------------------------------------------------------------*/
439 unsigned short get_dma_curr_ycount(unsigned int channel)
440 {
441         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
442                && channel < MAX_BLACKFIN_DMA_CHANNEL));
443
444         return dma_ch[channel].regs->curr_y_count;
445 }
446 EXPORT_SYMBOL(get_dma_curr_ycount);
447
448 unsigned long get_dma_next_desc_ptr(unsigned int channel)
449 {
450         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
451               && channel < MAX_BLACKFIN_DMA_CHANNEL));
452
453         return dma_ch[channel].regs->next_desc_ptr;
454 }
455 EXPORT_SYMBOL(get_dma_next_desc_ptr);
456
457 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
458 {
459         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
460               && channel < MAX_BLACKFIN_DMA_CHANNEL));
461
462         return dma_ch[channel].regs->curr_desc_ptr;
463 }
464 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
465
466 unsigned long get_dma_curr_addr(unsigned int channel)
467 {
468         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
469               && channel < MAX_BLACKFIN_DMA_CHANNEL));
470
471         return dma_ch[channel].regs->curr_addr_ptr;
472 }
473 EXPORT_SYMBOL(get_dma_curr_addr);
474
475 #ifdef CONFIG_PM
476 int blackfin_dma_suspend(void)
477 {
478         int i;
479
480 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
481         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++) {
482 #else
483         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
484 #endif
485                 if (dma_ch[i].chan_status == DMA_CHANNEL_ENABLED) {
486                         printk(KERN_ERR "DMA Channel %d failed to suspend\n", i);
487                         return -EBUSY;
488                 }
489
490                 dma_ch[i].saved_peripheral_map = dma_ch[i].regs->peripheral_map;
491         }
492
493         return 0;
494 }
495
496 void blackfin_dma_resume(void)
497 {
498         int i;
499
500 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
501         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++)
502 #else
503         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++)
504 #endif
505                 dma_ch[i].regs->peripheral_map = dma_ch[i].saved_peripheral_map;
506 }
507 #endif
508
509 static void *__dma_memcpy(void *dest, const void *src, size_t size)
510 {
511         int direction;  /* 1 - address decrease, 0 - address increase */
512         int flag_align; /* 1 - address aligned,  0 - address unaligned */
513         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
514         unsigned long flags;
515
516         if (size <= 0)
517                 return NULL;
518
519         local_irq_save(flags);
520
521         if ((unsigned long)src < memory_end)
522                 blackfin_dcache_flush_range((unsigned int)src,
523                                             (unsigned int)(src + size));
524
525         if ((unsigned long)dest < memory_end)
526                 blackfin_dcache_invalidate_range((unsigned int)dest,
527                                                  (unsigned int)(dest + size));
528
529         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
530
531         if ((unsigned long)src < (unsigned long)dest)
532                 direction = 1;
533         else
534                 direction = 0;
535
536         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
537             && ((size % 2) == 0))
538                 flag_align = 1;
539         else
540                 flag_align = 0;
541
542         if (size > 0x10000)     /* size > 64K */
543                 flag_2D = 1;
544         else
545                 flag_2D = 0;
546
547         /* Setup destination and source start address */
548         if (direction) {
549                 if (flag_align) {
550                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
551                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
552                 } else {
553                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
554                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
555                 }
556         } else {
557                 bfin_write_MDMA_D0_START_ADDR(dest);
558                 bfin_write_MDMA_S0_START_ADDR(src);
559         }
560
561         /* Setup destination and source xcount */
562         if (flag_2D) {
563                 if (flag_align) {
564                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
565                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
566                 } else {
567                         bfin_write_MDMA_D0_X_COUNT(1024);
568                         bfin_write_MDMA_S0_X_COUNT(1024);
569                 }
570                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
571                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
572         } else {
573                 if (flag_align) {
574                         bfin_write_MDMA_D0_X_COUNT(size / 2);
575                         bfin_write_MDMA_S0_X_COUNT(size / 2);
576                 } else {
577                         bfin_write_MDMA_D0_X_COUNT(size);
578                         bfin_write_MDMA_S0_X_COUNT(size);
579                 }
580         }
581
582         /* Setup destination and source xmodify and ymodify */
583         if (direction) {
584                 if (flag_align) {
585                         bfin_write_MDMA_D0_X_MODIFY(-2);
586                         bfin_write_MDMA_S0_X_MODIFY(-2);
587                         if (flag_2D) {
588                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
589                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
590                         }
591                 } else {
592                         bfin_write_MDMA_D0_X_MODIFY(-1);
593                         bfin_write_MDMA_S0_X_MODIFY(-1);
594                         if (flag_2D) {
595                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
596                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
597                         }
598                 }
599         } else {
600                 if (flag_align) {
601                         bfin_write_MDMA_D0_X_MODIFY(2);
602                         bfin_write_MDMA_S0_X_MODIFY(2);
603                         if (flag_2D) {
604                                 bfin_write_MDMA_D0_Y_MODIFY(2);
605                                 bfin_write_MDMA_S0_Y_MODIFY(2);
606                         }
607                 } else {
608                         bfin_write_MDMA_D0_X_MODIFY(1);
609                         bfin_write_MDMA_S0_X_MODIFY(1);
610                         if (flag_2D) {
611                                 bfin_write_MDMA_D0_Y_MODIFY(1);
612                                 bfin_write_MDMA_S0_Y_MODIFY(1);
613                         }
614                 }
615         }
616
617         /* Enable source DMA */
618         if (flag_2D) {
619                 if (flag_align) {
620                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
621                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
622                 } else {
623                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
624                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
625                 }
626         } else {
627                 if (flag_align) {
628                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
629                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
630                 } else {
631                         bfin_write_MDMA_S0_CONFIG(DMAEN);
632                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
633                 }
634         }
635
636         SSYNC();
637
638         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
639                 ;
640
641         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
642                                       (DMA_DONE | DMA_ERR));
643
644         bfin_write_MDMA_S0_CONFIG(0);
645         bfin_write_MDMA_D0_CONFIG(0);
646
647         local_irq_restore(flags);
648
649         return dest;
650 }
651
652 void *dma_memcpy(void *dest, const void *src, size_t size)
653 {
654         size_t bulk;
655         size_t rest;
656         void * addr;
657
658         bulk = (size >> 16) << 16;
659         rest = size - bulk;
660         if (bulk)
661                 __dma_memcpy(dest, src, bulk);
662         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
663         return addr;
664 }
665 EXPORT_SYMBOL(dma_memcpy);
666
667 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
668 {
669         void *addr;
670         addr = dma_memcpy(dest, src, size);
671         return addr;
672 }
673 EXPORT_SYMBOL(safe_dma_memcpy);
674
675 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
676 {
677         unsigned long flags;
678
679         local_irq_save(flags);
680
681         blackfin_dcache_flush_range((unsigned int)buf,
682                          (unsigned int)(buf) + len);
683
684         bfin_write_MDMA_D0_START_ADDR(addr);
685         bfin_write_MDMA_D0_X_COUNT(len);
686         bfin_write_MDMA_D0_X_MODIFY(0);
687         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
688
689         bfin_write_MDMA_S0_START_ADDR(buf);
690         bfin_write_MDMA_S0_X_COUNT(len);
691         bfin_write_MDMA_S0_X_MODIFY(1);
692         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
693
694         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
695         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
696
697         SSYNC();
698
699         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
700
701         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
702
703         bfin_write_MDMA_S0_CONFIG(0);
704         bfin_write_MDMA_D0_CONFIG(0);
705         local_irq_restore(flags);
706
707 }
708 EXPORT_SYMBOL(dma_outsb);
709
710
711 void dma_insb(unsigned long addr, void *buf, unsigned short len)
712 {
713         unsigned long flags;
714
715         blackfin_dcache_invalidate_range((unsigned int)buf,
716                          (unsigned int)(buf) + len);
717
718         local_irq_save(flags);
719         bfin_write_MDMA_D0_START_ADDR(buf);
720         bfin_write_MDMA_D0_X_COUNT(len);
721         bfin_write_MDMA_D0_X_MODIFY(1);
722         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
723
724         bfin_write_MDMA_S0_START_ADDR(addr);
725         bfin_write_MDMA_S0_X_COUNT(len);
726         bfin_write_MDMA_S0_X_MODIFY(0);
727         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
728
729         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
730         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
731
732         SSYNC();
733
734         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
735
736         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
737
738         bfin_write_MDMA_S0_CONFIG(0);
739         bfin_write_MDMA_D0_CONFIG(0);
740         local_irq_restore(flags);
741
742 }
743 EXPORT_SYMBOL(dma_insb);
744
745 void dma_outsw(unsigned long addr, const void  *buf, unsigned short len)
746 {
747         unsigned long flags;
748
749         local_irq_save(flags);
750
751         blackfin_dcache_flush_range((unsigned int)buf,
752                          (unsigned int)(buf) + len * sizeof(short));
753
754         bfin_write_MDMA_D0_START_ADDR(addr);
755         bfin_write_MDMA_D0_X_COUNT(len);
756         bfin_write_MDMA_D0_X_MODIFY(0);
757         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
758
759         bfin_write_MDMA_S0_START_ADDR(buf);
760         bfin_write_MDMA_S0_X_COUNT(len);
761         bfin_write_MDMA_S0_X_MODIFY(2);
762         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
763
764         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
765         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
766
767         SSYNC();
768
769         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
770
771         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
772
773         bfin_write_MDMA_S0_CONFIG(0);
774         bfin_write_MDMA_D0_CONFIG(0);
775         local_irq_restore(flags);
776
777 }
778 EXPORT_SYMBOL(dma_outsw);
779
780 void dma_insw(unsigned long addr, void *buf, unsigned short len)
781 {
782         unsigned long flags;
783
784         blackfin_dcache_invalidate_range((unsigned int)buf,
785                          (unsigned int)(buf) + len * sizeof(short));
786
787         local_irq_save(flags);
788
789         bfin_write_MDMA_D0_START_ADDR(buf);
790         bfin_write_MDMA_D0_X_COUNT(len);
791         bfin_write_MDMA_D0_X_MODIFY(2);
792         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
793
794         bfin_write_MDMA_S0_START_ADDR(addr);
795         bfin_write_MDMA_S0_X_COUNT(len);
796         bfin_write_MDMA_S0_X_MODIFY(0);
797         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
798
799         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
800         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
801
802         SSYNC();
803
804         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
805
806         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
807
808         bfin_write_MDMA_S0_CONFIG(0);
809         bfin_write_MDMA_D0_CONFIG(0);
810         local_irq_restore(flags);
811
812 }
813 EXPORT_SYMBOL(dma_insw);
814
815 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
816 {
817         unsigned long flags;
818
819         local_irq_save(flags);
820
821         blackfin_dcache_flush_range((unsigned int)buf,
822                          (unsigned int)(buf) + len * sizeof(long));
823
824         bfin_write_MDMA_D0_START_ADDR(addr);
825         bfin_write_MDMA_D0_X_COUNT(len);
826         bfin_write_MDMA_D0_X_MODIFY(0);
827         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
828
829         bfin_write_MDMA_S0_START_ADDR(buf);
830         bfin_write_MDMA_S0_X_COUNT(len);
831         bfin_write_MDMA_S0_X_MODIFY(4);
832         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
833
834         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
835         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
836
837         SSYNC();
838
839         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
840
841         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
842
843         bfin_write_MDMA_S0_CONFIG(0);
844         bfin_write_MDMA_D0_CONFIG(0);
845         local_irq_restore(flags);
846
847 }
848 EXPORT_SYMBOL(dma_outsl);
849
850 void dma_insl(unsigned long addr, void *buf, unsigned short len)
851 {
852         unsigned long flags;
853
854         blackfin_dcache_invalidate_range((unsigned int)buf,
855                          (unsigned int)(buf) + len * sizeof(long));
856
857         local_irq_save(flags);
858
859         bfin_write_MDMA_D0_START_ADDR(buf);
860         bfin_write_MDMA_D0_X_COUNT(len);
861         bfin_write_MDMA_D0_X_MODIFY(4);
862         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
863
864         bfin_write_MDMA_S0_START_ADDR(addr);
865         bfin_write_MDMA_S0_X_COUNT(len);
866         bfin_write_MDMA_S0_X_MODIFY(0);
867         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
868
869         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
870         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
871
872         SSYNC();
873
874         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
875
876         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
877
878         bfin_write_MDMA_S0_CONFIG(0);
879         bfin_write_MDMA_D0_CONFIG(0);
880         local_irq_restore(flags);
881
882 }
883 EXPORT_SYMBOL(dma_insl);