Merge git://git.kernel.org/pub/scm/linux/kernel/git/steve/gfs2-2.6-nmw
[linux-2.6] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/dma.h>
38 #include <asm/cacheflush.h>
39
40 /* Remove unused code not exported by symbol or internally called */
41 #define REMOVE_DEAD_CODE
42
43 /**************************************************************************
44  * Global Variables
45 ***************************************************************************/
46
47 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
48 #if defined (CONFIG_BF561)
49 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
50         (struct dma_register *) DMA1_0_NEXT_DESC_PTR,
51         (struct dma_register *) DMA1_1_NEXT_DESC_PTR,
52         (struct dma_register *) DMA1_2_NEXT_DESC_PTR,
53         (struct dma_register *) DMA1_3_NEXT_DESC_PTR,
54         (struct dma_register *) DMA1_4_NEXT_DESC_PTR,
55         (struct dma_register *) DMA1_5_NEXT_DESC_PTR,
56         (struct dma_register *) DMA1_6_NEXT_DESC_PTR,
57         (struct dma_register *) DMA1_7_NEXT_DESC_PTR,
58         (struct dma_register *) DMA1_8_NEXT_DESC_PTR,
59         (struct dma_register *) DMA1_9_NEXT_DESC_PTR,
60         (struct dma_register *) DMA1_10_NEXT_DESC_PTR,
61         (struct dma_register *) DMA1_11_NEXT_DESC_PTR,
62         (struct dma_register *) DMA2_0_NEXT_DESC_PTR,
63         (struct dma_register *) DMA2_1_NEXT_DESC_PTR,
64         (struct dma_register *) DMA2_2_NEXT_DESC_PTR,
65         (struct dma_register *) DMA2_3_NEXT_DESC_PTR,
66         (struct dma_register *) DMA2_4_NEXT_DESC_PTR,
67         (struct dma_register *) DMA2_5_NEXT_DESC_PTR,
68         (struct dma_register *) DMA2_6_NEXT_DESC_PTR,
69         (struct dma_register *) DMA2_7_NEXT_DESC_PTR,
70         (struct dma_register *) DMA2_8_NEXT_DESC_PTR,
71         (struct dma_register *) DMA2_9_NEXT_DESC_PTR,
72         (struct dma_register *) DMA2_10_NEXT_DESC_PTR,
73         (struct dma_register *) DMA2_11_NEXT_DESC_PTR,
74         (struct dma_register *) MDMA1_D0_NEXT_DESC_PTR,
75         (struct dma_register *) MDMA1_S0_NEXT_DESC_PTR,
76         (struct dma_register *) MDMA1_D1_NEXT_DESC_PTR,
77         (struct dma_register *) MDMA1_S1_NEXT_DESC_PTR,
78         (struct dma_register *) MDMA2_D0_NEXT_DESC_PTR,
79         (struct dma_register *) MDMA2_S0_NEXT_DESC_PTR,
80         (struct dma_register *) MDMA2_D1_NEXT_DESC_PTR,
81         (struct dma_register *) MDMA2_S1_NEXT_DESC_PTR,
82         (struct dma_register *) IMDMA_D0_NEXT_DESC_PTR,
83         (struct dma_register *) IMDMA_S0_NEXT_DESC_PTR,
84         (struct dma_register *) IMDMA_D1_NEXT_DESC_PTR,
85         (struct dma_register *) IMDMA_S1_NEXT_DESC_PTR,
86 };
87 #else
88 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
89         (struct dma_register *) DMA0_NEXT_DESC_PTR,
90         (struct dma_register *) DMA1_NEXT_DESC_PTR,
91         (struct dma_register *) DMA2_NEXT_DESC_PTR,
92         (struct dma_register *) DMA3_NEXT_DESC_PTR,
93         (struct dma_register *) DMA4_NEXT_DESC_PTR,
94         (struct dma_register *) DMA5_NEXT_DESC_PTR,
95         (struct dma_register *) DMA6_NEXT_DESC_PTR,
96         (struct dma_register *) DMA7_NEXT_DESC_PTR,
97 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
98         (struct dma_register *) DMA8_NEXT_DESC_PTR,
99         (struct dma_register *) DMA9_NEXT_DESC_PTR,
100         (struct dma_register *) DMA10_NEXT_DESC_PTR,
101         (struct dma_register *) DMA11_NEXT_DESC_PTR,
102 #endif
103         (struct dma_register *) MDMA_D0_NEXT_DESC_PTR,
104         (struct dma_register *) MDMA_S0_NEXT_DESC_PTR,
105         (struct dma_register *) MDMA_D1_NEXT_DESC_PTR,
106         (struct dma_register *) MDMA_S1_NEXT_DESC_PTR,
107 };
108 #endif
109
110 /*------------------------------------------------------------------------------
111  *       Set the Buffer Clear bit in the Configuration register of specific DMA
112  *       channel. This will stop the descriptor based DMA operation.
113  *-----------------------------------------------------------------------------*/
114 static void clear_dma_buffer(unsigned int channel)
115 {
116         dma_ch[channel].regs->cfg |= RESTART;
117         SSYNC();
118         dma_ch[channel].regs->cfg &= ~RESTART;
119         SSYNC();
120 }
121
122 int __init blackfin_dma_init(void)
123 {
124         int i;
125
126         printk(KERN_INFO "Blackfin DMA Controller\n");
127
128         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
129                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
130                 dma_ch[i].regs = base_addr[i];
131                 mutex_init(&(dma_ch[i].dmalock));
132         }
133
134         return 0;
135 }
136
137 arch_initcall(blackfin_dma_init);
138
139 /*
140  *      Form the channel find the irq number for that channel.
141  */
142 #if !defined(CONFIG_BF561)
143
144 static int bf533_channel2irq(unsigned int channel)
145 {
146         int ret_irq = -1;
147
148         switch (channel) {
149         case CH_PPI:
150                 ret_irq = IRQ_PPI;
151                 break;
152
153 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
154         case CH_EMAC_RX:
155                 ret_irq = IRQ_MAC_RX;
156                 break;
157
158         case CH_EMAC_TX:
159                 ret_irq = IRQ_MAC_TX;
160                 break;
161
162         case CH_UART1_RX:
163                 ret_irq = IRQ_UART1_RX;
164                 break;
165
166         case CH_UART1_TX:
167                 ret_irq = IRQ_UART1_TX;
168                 break;
169 #endif
170
171         case CH_SPORT0_RX:
172                 ret_irq = IRQ_SPORT0_RX;
173                 break;
174
175         case CH_SPORT0_TX:
176                 ret_irq = IRQ_SPORT0_TX;
177                 break;
178
179         case CH_SPORT1_RX:
180                 ret_irq = IRQ_SPORT1_RX;
181                 break;
182
183         case CH_SPORT1_TX:
184                 ret_irq = IRQ_SPORT1_TX;
185                 break;
186
187         case CH_SPI:
188                 ret_irq = IRQ_SPI;
189                 break;
190
191         case CH_UART_RX:
192                 ret_irq = IRQ_UART_RX;
193                 break;
194
195         case CH_UART_TX:
196                 ret_irq = IRQ_UART_TX;
197                 break;
198
199         case CH_MEM_STREAM0_SRC:
200         case CH_MEM_STREAM0_DEST:
201                 ret_irq = IRQ_MEM_DMA0;
202                 break;
203
204         case CH_MEM_STREAM1_SRC:
205         case CH_MEM_STREAM1_DEST:
206                 ret_irq = IRQ_MEM_DMA1;
207                 break;
208         }
209         return ret_irq;
210 }
211
212 # define channel2irq(channel) bf533_channel2irq(channel)
213
214 #else
215
216 static int bf561_channel2irq(unsigned int channel)
217 {
218         int ret_irq = -1;
219
220         switch (channel) {
221         case CH_PPI0:
222                 ret_irq = IRQ_PPI0;
223                 break;
224         case CH_PPI1:
225                 ret_irq = IRQ_PPI1;
226                 break;
227         case CH_SPORT0_RX:
228                 ret_irq = IRQ_SPORT0_RX;
229                 break;
230         case CH_SPORT0_TX:
231                 ret_irq = IRQ_SPORT0_TX;
232                 break;
233         case CH_SPORT1_RX:
234                 ret_irq = IRQ_SPORT1_RX;
235                 break;
236         case CH_SPORT1_TX:
237                 ret_irq = IRQ_SPORT1_TX;
238                 break;
239         case CH_SPI:
240                 ret_irq = IRQ_SPI;
241                 break;
242         case CH_UART_RX:
243                 ret_irq = IRQ_UART_RX;
244                 break;
245         case CH_UART_TX:
246                 ret_irq = IRQ_UART_TX;
247                 break;
248
249         case CH_MEM_STREAM0_SRC:
250         case CH_MEM_STREAM0_DEST:
251                 ret_irq = IRQ_MEM_DMA0;
252                 break;
253         case CH_MEM_STREAM1_SRC:
254         case CH_MEM_STREAM1_DEST:
255                 ret_irq = IRQ_MEM_DMA1;
256                 break;
257         case CH_MEM_STREAM2_SRC:
258         case CH_MEM_STREAM2_DEST:
259                 ret_irq = IRQ_MEM_DMA2;
260                 break;
261         case CH_MEM_STREAM3_SRC:
262         case CH_MEM_STREAM3_DEST:
263                 ret_irq = IRQ_MEM_DMA3;
264                 break;
265
266         case CH_IMEM_STREAM0_SRC:
267         case CH_IMEM_STREAM0_DEST:
268                 ret_irq = IRQ_IMEM_DMA0;
269                 break;
270         case CH_IMEM_STREAM1_SRC:
271         case CH_IMEM_STREAM1_DEST:
272                 ret_irq = IRQ_IMEM_DMA1;
273                 break;
274         }
275         return ret_irq;
276 }
277
278 # define channel2irq(channel) bf561_channel2irq(channel)
279
280 #endif
281
282 /*------------------------------------------------------------------------------
283  *      Request the specific DMA channel from the system.
284  *-----------------------------------------------------------------------------*/
285 int request_dma(unsigned int channel, char *device_id)
286 {
287
288         pr_debug("request_dma() : BEGIN \n");
289         mutex_lock(&(dma_ch[channel].dmalock));
290
291         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
292             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
293                 mutex_unlock(&(dma_ch[channel].dmalock));
294                 pr_debug("DMA CHANNEL IN USE  \n");
295                 return -EBUSY;
296         } else {
297                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
298                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
299         }
300
301         mutex_unlock(&(dma_ch[channel].dmalock));
302
303         dma_ch[channel].device_id = device_id;
304         dma_ch[channel].irq_callback = NULL;
305
306         /* This is to be enabled by putting a restriction -
307          * you have to request DMA, before doing any operations on
308          * descriptor/channel
309          */
310         pr_debug("request_dma() : END  \n");
311         return channel;
312 }
313 EXPORT_SYMBOL(request_dma);
314
315 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
316 {
317         int ret_irq = 0;
318
319         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
320                && channel < MAX_BLACKFIN_DMA_CHANNEL));
321
322         if (callback != NULL) {
323                 int ret_val;
324                 ret_irq = channel2irq(channel);
325
326                 dma_ch[channel].data = data;
327
328                 ret_val =
329                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
330                                 dma_ch[channel].device_id, data);
331                 if (ret_val) {
332                         printk(KERN_NOTICE
333                                "Request irq in DMA engine failed.\n");
334                         return -EPERM;
335                 }
336                 dma_ch[channel].irq_callback = callback;
337         }
338         return 0;
339 }
340 EXPORT_SYMBOL(set_dma_callback);
341
342 void free_dma(unsigned int channel)
343 {
344         int ret_irq;
345
346         pr_debug("freedma() : BEGIN \n");
347         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
348                && channel < MAX_BLACKFIN_DMA_CHANNEL));
349
350         /* Halt the DMA */
351         disable_dma(channel);
352         clear_dma_buffer(channel);
353
354         if (dma_ch[channel].irq_callback != NULL) {
355                 ret_irq = channel2irq(channel);
356                 free_irq(ret_irq, dma_ch[channel].data);
357         }
358
359         /* Clear the DMA Variable in the Channel */
360         mutex_lock(&(dma_ch[channel].dmalock));
361         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
362         mutex_unlock(&(dma_ch[channel].dmalock));
363
364         pr_debug("freedma() : END \n");
365 }
366 EXPORT_SYMBOL(free_dma);
367
368 void dma_enable_irq(unsigned int channel)
369 {
370         int ret_irq;
371
372         pr_debug("dma_enable_irq() : BEGIN \n");
373         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
374                && channel < MAX_BLACKFIN_DMA_CHANNEL));
375
376         ret_irq = channel2irq(channel);
377         enable_irq(ret_irq);
378 }
379 EXPORT_SYMBOL(dma_enable_irq);
380
381 void dma_disable_irq(unsigned int channel)
382 {
383         int ret_irq;
384
385         pr_debug("dma_disable_irq() : BEGIN \n");
386         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
387                && channel < MAX_BLACKFIN_DMA_CHANNEL));
388
389         ret_irq = channel2irq(channel);
390         disable_irq(ret_irq);
391 }
392 EXPORT_SYMBOL(dma_disable_irq);
393
394 int dma_channel_active(unsigned int channel)
395 {
396         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
397                 return 0;
398         } else {
399                 return 1;
400         }
401 }
402 EXPORT_SYMBOL(dma_channel_active);
403
404 /*------------------------------------------------------------------------------
405 *       stop the specific DMA channel.
406 *-----------------------------------------------------------------------------*/
407 void disable_dma(unsigned int channel)
408 {
409         pr_debug("stop_dma() : BEGIN \n");
410
411         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
412                && channel < MAX_BLACKFIN_DMA_CHANNEL));
413
414         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
415         SSYNC();
416         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
417         /* Needs to be enabled Later */
418         pr_debug("stop_dma() : END \n");
419         return;
420 }
421 EXPORT_SYMBOL(disable_dma);
422
423 void enable_dma(unsigned int channel)
424 {
425         pr_debug("enable_dma() : BEGIN \n");
426
427         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
428                && channel < MAX_BLACKFIN_DMA_CHANNEL));
429
430         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
431         dma_ch[channel].regs->curr_x_count = 0;
432         dma_ch[channel].regs->curr_y_count = 0;
433
434         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
435         SSYNC();
436         pr_debug("enable_dma() : END \n");
437         return;
438 }
439 EXPORT_SYMBOL(enable_dma);
440
441 /*------------------------------------------------------------------------------
442 *               Set the Start Address register for the specific DMA channel
443 *               This function can be used for register based DMA,
444 *               to setup the start address
445 *               addr:           Starting address of the DMA Data to be transferred.
446 *-----------------------------------------------------------------------------*/
447 void set_dma_start_addr(unsigned int channel, unsigned long addr)
448 {
449         pr_debug("set_dma_start_addr() : BEGIN \n");
450
451         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
452                && channel < MAX_BLACKFIN_DMA_CHANNEL));
453
454         dma_ch[channel].regs->start_addr = addr;
455         SSYNC();
456         pr_debug("set_dma_start_addr() : END\n");
457 }
458 EXPORT_SYMBOL(set_dma_start_addr);
459
460 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
461 {
462         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
463
464         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
465                && channel < MAX_BLACKFIN_DMA_CHANNEL));
466
467         dma_ch[channel].regs->next_desc_ptr = addr;
468         SSYNC();
469         pr_debug("set_dma_start_addr() : END\n");
470 }
471 EXPORT_SYMBOL(set_dma_next_desc_addr);
472
473 void set_dma_x_count(unsigned int channel, unsigned short x_count)
474 {
475         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
476                && channel < MAX_BLACKFIN_DMA_CHANNEL));
477
478         dma_ch[channel].regs->x_count = x_count;
479         SSYNC();
480 }
481 EXPORT_SYMBOL(set_dma_x_count);
482
483 void set_dma_y_count(unsigned int channel, unsigned short y_count)
484 {
485         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
486                && channel < MAX_BLACKFIN_DMA_CHANNEL));
487
488         dma_ch[channel].regs->y_count = y_count;
489         SSYNC();
490 }
491 EXPORT_SYMBOL(set_dma_y_count);
492
493 void set_dma_x_modify(unsigned int channel, short x_modify)
494 {
495         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
496                && channel < MAX_BLACKFIN_DMA_CHANNEL));
497
498         dma_ch[channel].regs->x_modify = x_modify;
499         SSYNC();
500 }
501 EXPORT_SYMBOL(set_dma_x_modify);
502
503 void set_dma_y_modify(unsigned int channel, short y_modify)
504 {
505         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
506                && channel < MAX_BLACKFIN_DMA_CHANNEL));
507
508         dma_ch[channel].regs->y_modify = y_modify;
509         SSYNC();
510 }
511 EXPORT_SYMBOL(set_dma_y_modify);
512
513 void set_dma_config(unsigned int channel, unsigned short config)
514 {
515         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
516                && channel < MAX_BLACKFIN_DMA_CHANNEL));
517
518         dma_ch[channel].regs->cfg = config;
519         SSYNC();
520 }
521 EXPORT_SYMBOL(set_dma_config);
522
523 unsigned short
524 set_bfin_dma_config(char direction, char flow_mode,
525                     char intr_mode, char dma_mode, char width)
526 {
527         unsigned short config;
528
529         config =
530             ((direction << 1) | (width << 2) | (dma_mode << 4) |
531              (intr_mode << 6) | (flow_mode << 12) | RESTART);
532         return config;
533 }
534 EXPORT_SYMBOL(set_bfin_dma_config);
535
536 void set_dma_sg(unsigned int channel, struct dmasg * sg, int nr_sg)
537 {
538         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
539                && channel < MAX_BLACKFIN_DMA_CHANNEL));
540
541         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
542
543         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
544
545         SSYNC();
546 }
547 EXPORT_SYMBOL(set_dma_sg);
548
549 /*------------------------------------------------------------------------------
550  *      Get the DMA status of a specific DMA channel from the system.
551  *-----------------------------------------------------------------------------*/
552 unsigned short get_dma_curr_irqstat(unsigned int channel)
553 {
554         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
555                && channel < MAX_BLACKFIN_DMA_CHANNEL));
556
557         return dma_ch[channel].regs->irq_status;
558 }
559 EXPORT_SYMBOL(get_dma_curr_irqstat);
560
561 /*------------------------------------------------------------------------------
562  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
563  *-----------------------------------------------------------------------------*/
564 void clear_dma_irqstat(unsigned int channel)
565 {
566         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
567                && channel < MAX_BLACKFIN_DMA_CHANNEL));
568         dma_ch[channel].regs->irq_status |= 3;
569 }
570 EXPORT_SYMBOL(clear_dma_irqstat);
571
572 /*------------------------------------------------------------------------------
573  *      Get current DMA xcount of a specific DMA channel from the system.
574  *-----------------------------------------------------------------------------*/
575 unsigned short get_dma_curr_xcount(unsigned int channel)
576 {
577         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
578                && channel < MAX_BLACKFIN_DMA_CHANNEL));
579
580         return dma_ch[channel].regs->curr_x_count;
581 }
582 EXPORT_SYMBOL(get_dma_curr_xcount);
583
584 /*------------------------------------------------------------------------------
585  *      Get current DMA ycount of a specific DMA channel from the system.
586  *-----------------------------------------------------------------------------*/
587 unsigned short get_dma_curr_ycount(unsigned int channel)
588 {
589         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
590                && channel < MAX_BLACKFIN_DMA_CHANNEL));
591
592         return dma_ch[channel].regs->curr_y_count;
593 }
594 EXPORT_SYMBOL(get_dma_curr_ycount);
595
596 void *dma_memcpy(void *dest, const void *src, size_t size)
597 {
598         int direction;  /* 1 - address decrease, 0 - address increase */
599         int flag_align; /* 1 - address aligned,  0 - address unaligned */
600         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
601
602         if (size <= 0)
603                 return NULL;
604
605         if ((unsigned long)src < memory_end)
606                 blackfin_dcache_flush_range((unsigned int)src,
607                                             (unsigned int)(src + size));
608
609         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
610
611         if ((unsigned long)src < (unsigned long)dest)
612                 direction = 1;
613         else
614                 direction = 0;
615
616         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
617             && ((size % 2) == 0))
618                 flag_align = 1;
619         else
620                 flag_align = 0;
621
622         if (size > 0x10000)     /* size > 64K */
623                 flag_2D = 1;
624         else
625                 flag_2D = 0;
626
627         /* Setup destination and source start address */
628         if (direction) {
629                 if (flag_align) {
630                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
631                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
632                 } else {
633                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
634                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
635                 }
636         } else {
637                 bfin_write_MDMA_D0_START_ADDR(dest);
638                 bfin_write_MDMA_S0_START_ADDR(src);
639         }
640
641         /* Setup destination and source xcount */
642         if (flag_2D) {
643                 if (flag_align) {
644                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
645                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
646                 } else {
647                         bfin_write_MDMA_D0_X_COUNT(1024);
648                         bfin_write_MDMA_S0_X_COUNT(1024);
649                 }
650                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
651                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
652         } else {
653                 if (flag_align) {
654                         bfin_write_MDMA_D0_X_COUNT(size / 2);
655                         bfin_write_MDMA_S0_X_COUNT(size / 2);
656                 } else {
657                         bfin_write_MDMA_D0_X_COUNT(size);
658                         bfin_write_MDMA_S0_X_COUNT(size);
659                 }
660         }
661
662         /* Setup destination and source xmodify and ymodify */
663         if (direction) {
664                 if (flag_align) {
665                         bfin_write_MDMA_D0_X_MODIFY(-2);
666                         bfin_write_MDMA_S0_X_MODIFY(-2);
667                         if (flag_2D) {
668                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
669                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
670                         }
671                 } else {
672                         bfin_write_MDMA_D0_X_MODIFY(-1);
673                         bfin_write_MDMA_S0_X_MODIFY(-1);
674                         if (flag_2D) {
675                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
676                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
677                         }
678                 }
679         } else {
680                 if (flag_align) {
681                         bfin_write_MDMA_D0_X_MODIFY(2);
682                         bfin_write_MDMA_S0_X_MODIFY(2);
683                         if (flag_2D) {
684                                 bfin_write_MDMA_D0_Y_MODIFY(2);
685                                 bfin_write_MDMA_S0_Y_MODIFY(2);
686                         }
687                 } else {
688                         bfin_write_MDMA_D0_X_MODIFY(1);
689                         bfin_write_MDMA_S0_X_MODIFY(1);
690                         if (flag_2D) {
691                                 bfin_write_MDMA_D0_Y_MODIFY(1);
692                                 bfin_write_MDMA_S0_Y_MODIFY(1);
693                         }
694                 }
695         }
696
697         /* Enable source DMA */
698         if (flag_2D) {
699                 if (flag_align) {
700                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
701                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
702                 } else {
703                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
704                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
705                 }
706         } else {
707                 if (flag_align) {
708                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
709                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
710                 } else {
711                         bfin_write_MDMA_S0_CONFIG(DMAEN);
712                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
713                 }
714         }
715
716         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
717                 ;
718
719         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
720                                       (DMA_DONE | DMA_ERR));
721
722         bfin_write_MDMA_S0_CONFIG(0);
723         bfin_write_MDMA_D0_CONFIG(0);
724
725         if ((unsigned long)dest < memory_end)
726                 blackfin_dcache_invalidate_range((unsigned int)dest,
727                                                  (unsigned int)(dest + size));
728
729         return dest;
730 }
731 EXPORT_SYMBOL(dma_memcpy);
732
733 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
734 {
735         int flags = 0;
736         void *addr;
737         local_irq_save(flags);
738         addr = dma_memcpy(dest, src, size);
739         local_irq_restore(flags);
740         return addr;
741 }
742 EXPORT_SYMBOL(safe_dma_memcpy);