Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/drzeus/mmc
[linux-2.6] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
43
44 /**************************************************************************
45  * Global Variables
46 ***************************************************************************/
47
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49
50 /*------------------------------------------------------------------------------
51  *       Set the Buffer Clear bit in the Configuration register of specific DMA
52  *       channel. This will stop the descriptor based DMA operation.
53  *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
55 {
56         dma_ch[channel].regs->cfg |= RESTART;
57         SSYNC();
58         dma_ch[channel].regs->cfg &= ~RESTART;
59         SSYNC();
60 }
61
62 static int __init blackfin_dma_init(void)
63 {
64         int i;
65
66         printk(KERN_INFO "Blackfin DMA Controller\n");
67
68         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70                 dma_ch[i].regs = base_addr[i];
71                 mutex_init(&(dma_ch[i].dmalock));
72         }
73         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
76
77 #if defined(CONFIG_DEB_DMA_URGENT)
78         bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79                          | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81         return 0;
82 }
83
84 arch_initcall(blackfin_dma_init);
85
86 /*------------------------------------------------------------------------------
87  *      Request the specific DMA channel from the system.
88  *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
90 {
91
92         pr_debug("request_dma() : BEGIN \n");
93         mutex_lock(&(dma_ch[channel].dmalock));
94
95         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
96             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
97                 mutex_unlock(&(dma_ch[channel].dmalock));
98                 pr_debug("DMA CHANNEL IN USE  \n");
99                 return -EBUSY;
100         } else {
101                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
102                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
103         }
104
105         mutex_unlock(&(dma_ch[channel].dmalock));
106
107 #ifdef CONFIG_BF54x
108         if (channel >= CH_UART2_RX && channel <= CH_UART3_TX &&
109                 strncmp(device_id, "BFIN_UART", 9) == 0)
110                 dma_ch[channel].regs->peripheral_map |=
111                         (channel - CH_UART2_RX + 0xC);
112         else
113                 dma_ch[channel].regs->peripheral_map |=
114                         (channel - CH_UART2_RX + 0x6);
115 #endif
116
117         dma_ch[channel].device_id = device_id;
118         dma_ch[channel].irq_callback = NULL;
119
120         /* This is to be enabled by putting a restriction -
121          * you have to request DMA, before doing any operations on
122          * descriptor/channel
123          */
124         pr_debug("request_dma() : END  \n");
125         return channel;
126 }
127 EXPORT_SYMBOL(request_dma);
128
129 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
130 {
131         int ret_irq = 0;
132
133         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
134                && channel < MAX_BLACKFIN_DMA_CHANNEL));
135
136         if (callback != NULL) {
137                 int ret_val;
138                 ret_irq = channel2irq(channel);
139
140                 dma_ch[channel].data = data;
141
142                 ret_val =
143                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
144                                 dma_ch[channel].device_id, data);
145                 if (ret_val) {
146                         printk(KERN_NOTICE
147                                "Request irq in DMA engine failed.\n");
148                         return -EPERM;
149                 }
150                 dma_ch[channel].irq_callback = callback;
151         }
152         return 0;
153 }
154 EXPORT_SYMBOL(set_dma_callback);
155
156 void free_dma(unsigned int channel)
157 {
158         int ret_irq;
159
160         pr_debug("freedma() : BEGIN \n");
161         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
162                && channel < MAX_BLACKFIN_DMA_CHANNEL));
163
164         /* Halt the DMA */
165         disable_dma(channel);
166         clear_dma_buffer(channel);
167
168         if (dma_ch[channel].irq_callback != NULL) {
169                 ret_irq = channel2irq(channel);
170                 free_irq(ret_irq, dma_ch[channel].data);
171         }
172
173         /* Clear the DMA Variable in the Channel */
174         mutex_lock(&(dma_ch[channel].dmalock));
175         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
176         mutex_unlock(&(dma_ch[channel].dmalock));
177
178         pr_debug("freedma() : END \n");
179 }
180 EXPORT_SYMBOL(free_dma);
181
182 void dma_enable_irq(unsigned int channel)
183 {
184         int ret_irq;
185
186         pr_debug("dma_enable_irq() : BEGIN \n");
187         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
188                && channel < MAX_BLACKFIN_DMA_CHANNEL));
189
190         ret_irq = channel2irq(channel);
191         enable_irq(ret_irq);
192 }
193 EXPORT_SYMBOL(dma_enable_irq);
194
195 void dma_disable_irq(unsigned int channel)
196 {
197         int ret_irq;
198
199         pr_debug("dma_disable_irq() : BEGIN \n");
200         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
201                && channel < MAX_BLACKFIN_DMA_CHANNEL));
202
203         ret_irq = channel2irq(channel);
204         disable_irq(ret_irq);
205 }
206 EXPORT_SYMBOL(dma_disable_irq);
207
208 int dma_channel_active(unsigned int channel)
209 {
210         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
211                 return 0;
212         } else {
213                 return 1;
214         }
215 }
216 EXPORT_SYMBOL(dma_channel_active);
217
218 /*------------------------------------------------------------------------------
219 *       stop the specific DMA channel.
220 *-----------------------------------------------------------------------------*/
221 void disable_dma(unsigned int channel)
222 {
223         pr_debug("stop_dma() : BEGIN \n");
224
225         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
226                && channel < MAX_BLACKFIN_DMA_CHANNEL));
227
228         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
229         SSYNC();
230         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
231         /* Needs to be enabled Later */
232         pr_debug("stop_dma() : END \n");
233         return;
234 }
235 EXPORT_SYMBOL(disable_dma);
236
237 void enable_dma(unsigned int channel)
238 {
239         pr_debug("enable_dma() : BEGIN \n");
240
241         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
242                && channel < MAX_BLACKFIN_DMA_CHANNEL));
243
244         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
245         dma_ch[channel].regs->curr_x_count = 0;
246         dma_ch[channel].regs->curr_y_count = 0;
247
248         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
249         SSYNC();
250         pr_debug("enable_dma() : END \n");
251         return;
252 }
253 EXPORT_SYMBOL(enable_dma);
254
255 /*------------------------------------------------------------------------------
256 *               Set the Start Address register for the specific DMA channel
257 *               This function can be used for register based DMA,
258 *               to setup the start address
259 *               addr:           Starting address of the DMA Data to be transferred.
260 *-----------------------------------------------------------------------------*/
261 void set_dma_start_addr(unsigned int channel, unsigned long addr)
262 {
263         pr_debug("set_dma_start_addr() : BEGIN \n");
264
265         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
266                && channel < MAX_BLACKFIN_DMA_CHANNEL));
267
268         dma_ch[channel].regs->start_addr = addr;
269         SSYNC();
270         pr_debug("set_dma_start_addr() : END\n");
271 }
272 EXPORT_SYMBOL(set_dma_start_addr);
273
274 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
275 {
276         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
277
278         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
279                && channel < MAX_BLACKFIN_DMA_CHANNEL));
280
281         dma_ch[channel].regs->next_desc_ptr = addr;
282         SSYNC();
283         pr_debug("set_dma_next_desc_addr() : END\n");
284 }
285 EXPORT_SYMBOL(set_dma_next_desc_addr);
286
287 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
288 {
289         pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
290
291         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
292                && channel < MAX_BLACKFIN_DMA_CHANNEL));
293
294         dma_ch[channel].regs->curr_desc_ptr = addr;
295         SSYNC();
296         pr_debug("set_dma_curr_desc_addr() : END\n");
297 }
298 EXPORT_SYMBOL(set_dma_curr_desc_addr);
299
300 void set_dma_x_count(unsigned int channel, unsigned short x_count)
301 {
302         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
303                && channel < MAX_BLACKFIN_DMA_CHANNEL));
304
305         dma_ch[channel].regs->x_count = x_count;
306         SSYNC();
307 }
308 EXPORT_SYMBOL(set_dma_x_count);
309
310 void set_dma_y_count(unsigned int channel, unsigned short y_count)
311 {
312         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
313                && channel < MAX_BLACKFIN_DMA_CHANNEL));
314
315         dma_ch[channel].regs->y_count = y_count;
316         SSYNC();
317 }
318 EXPORT_SYMBOL(set_dma_y_count);
319
320 void set_dma_x_modify(unsigned int channel, short x_modify)
321 {
322         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
323                && channel < MAX_BLACKFIN_DMA_CHANNEL));
324
325         dma_ch[channel].regs->x_modify = x_modify;
326         SSYNC();
327 }
328 EXPORT_SYMBOL(set_dma_x_modify);
329
330 void set_dma_y_modify(unsigned int channel, short y_modify)
331 {
332         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
333                && channel < MAX_BLACKFIN_DMA_CHANNEL));
334
335         dma_ch[channel].regs->y_modify = y_modify;
336         SSYNC();
337 }
338 EXPORT_SYMBOL(set_dma_y_modify);
339
340 void set_dma_config(unsigned int channel, unsigned short config)
341 {
342         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
343                && channel < MAX_BLACKFIN_DMA_CHANNEL));
344
345         dma_ch[channel].regs->cfg = config;
346         SSYNC();
347 }
348 EXPORT_SYMBOL(set_dma_config);
349
350 unsigned short
351 set_bfin_dma_config(char direction, char flow_mode,
352                     char intr_mode, char dma_mode, char width, char syncmode)
353 {
354         unsigned short config;
355
356         config =
357             ((direction << 1) | (width << 2) | (dma_mode << 4) |
358              (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
359         return config;
360 }
361 EXPORT_SYMBOL(set_bfin_dma_config);
362
363 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
364 {
365         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
366                && channel < MAX_BLACKFIN_DMA_CHANNEL));
367
368         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
369
370         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
371
372         SSYNC();
373 }
374 EXPORT_SYMBOL(set_dma_sg);
375
376 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
377 {
378         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
379                && channel < MAX_BLACKFIN_DMA_CHANNEL));
380
381         dma_ch[channel].regs->curr_addr_ptr = addr;
382         SSYNC();
383 }
384 EXPORT_SYMBOL(set_dma_curr_addr);
385
386 /*------------------------------------------------------------------------------
387  *      Get the DMA status of a specific DMA channel from the system.
388  *-----------------------------------------------------------------------------*/
389 unsigned short get_dma_curr_irqstat(unsigned int channel)
390 {
391         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
392                && channel < MAX_BLACKFIN_DMA_CHANNEL));
393
394         return dma_ch[channel].regs->irq_status;
395 }
396 EXPORT_SYMBOL(get_dma_curr_irqstat);
397
398 /*------------------------------------------------------------------------------
399  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
400  *-----------------------------------------------------------------------------*/
401 void clear_dma_irqstat(unsigned int channel)
402 {
403         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
404                && channel < MAX_BLACKFIN_DMA_CHANNEL));
405         dma_ch[channel].regs->irq_status |= 3;
406 }
407 EXPORT_SYMBOL(clear_dma_irqstat);
408
409 /*------------------------------------------------------------------------------
410  *      Get current DMA xcount of a specific DMA channel from the system.
411  *-----------------------------------------------------------------------------*/
412 unsigned short get_dma_curr_xcount(unsigned int channel)
413 {
414         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
415                && channel < MAX_BLACKFIN_DMA_CHANNEL));
416
417         return dma_ch[channel].regs->curr_x_count;
418 }
419 EXPORT_SYMBOL(get_dma_curr_xcount);
420
421 /*------------------------------------------------------------------------------
422  *      Get current DMA ycount of a specific DMA channel from the system.
423  *-----------------------------------------------------------------------------*/
424 unsigned short get_dma_curr_ycount(unsigned int channel)
425 {
426         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
427                && channel < MAX_BLACKFIN_DMA_CHANNEL));
428
429         return dma_ch[channel].regs->curr_y_count;
430 }
431 EXPORT_SYMBOL(get_dma_curr_ycount);
432
433 unsigned long get_dma_next_desc_ptr(unsigned int channel)
434 {
435         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
436               && channel < MAX_BLACKFIN_DMA_CHANNEL));
437
438         return dma_ch[channel].regs->next_desc_ptr;
439 }
440 EXPORT_SYMBOL(get_dma_next_desc_ptr);
441
442 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
443 {
444         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
445               && channel < MAX_BLACKFIN_DMA_CHANNEL));
446
447         return dma_ch[channel].regs->curr_desc_ptr;
448 }
449 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
450
451 unsigned long get_dma_curr_addr(unsigned int channel)
452 {
453         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
454               && channel < MAX_BLACKFIN_DMA_CHANNEL));
455
456         return dma_ch[channel].regs->curr_addr_ptr;
457 }
458 EXPORT_SYMBOL(get_dma_curr_addr);
459
460 static void *__dma_memcpy(void *dest, const void *src, size_t size)
461 {
462         int direction;  /* 1 - address decrease, 0 - address increase */
463         int flag_align; /* 1 - address aligned,  0 - address unaligned */
464         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
465         unsigned long flags;
466
467         if (size <= 0)
468                 return NULL;
469
470         local_irq_save(flags);
471
472         if ((unsigned long)src < memory_end)
473                 blackfin_dcache_flush_range((unsigned int)src,
474                                             (unsigned int)(src + size));
475
476         if ((unsigned long)dest < memory_end)
477                 blackfin_dcache_invalidate_range((unsigned int)dest,
478                                                  (unsigned int)(dest + size));
479
480         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
481
482         if ((unsigned long)src < (unsigned long)dest)
483                 direction = 1;
484         else
485                 direction = 0;
486
487         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
488             && ((size % 2) == 0))
489                 flag_align = 1;
490         else
491                 flag_align = 0;
492
493         if (size > 0x10000)     /* size > 64K */
494                 flag_2D = 1;
495         else
496                 flag_2D = 0;
497
498         /* Setup destination and source start address */
499         if (direction) {
500                 if (flag_align) {
501                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
502                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
503                 } else {
504                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
505                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
506                 }
507         } else {
508                 bfin_write_MDMA_D0_START_ADDR(dest);
509                 bfin_write_MDMA_S0_START_ADDR(src);
510         }
511
512         /* Setup destination and source xcount */
513         if (flag_2D) {
514                 if (flag_align) {
515                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
516                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
517                 } else {
518                         bfin_write_MDMA_D0_X_COUNT(1024);
519                         bfin_write_MDMA_S0_X_COUNT(1024);
520                 }
521                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
522                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
523         } else {
524                 if (flag_align) {
525                         bfin_write_MDMA_D0_X_COUNT(size / 2);
526                         bfin_write_MDMA_S0_X_COUNT(size / 2);
527                 } else {
528                         bfin_write_MDMA_D0_X_COUNT(size);
529                         bfin_write_MDMA_S0_X_COUNT(size);
530                 }
531         }
532
533         /* Setup destination and source xmodify and ymodify */
534         if (direction) {
535                 if (flag_align) {
536                         bfin_write_MDMA_D0_X_MODIFY(-2);
537                         bfin_write_MDMA_S0_X_MODIFY(-2);
538                         if (flag_2D) {
539                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
540                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
541                         }
542                 } else {
543                         bfin_write_MDMA_D0_X_MODIFY(-1);
544                         bfin_write_MDMA_S0_X_MODIFY(-1);
545                         if (flag_2D) {
546                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
547                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
548                         }
549                 }
550         } else {
551                 if (flag_align) {
552                         bfin_write_MDMA_D0_X_MODIFY(2);
553                         bfin_write_MDMA_S0_X_MODIFY(2);
554                         if (flag_2D) {
555                                 bfin_write_MDMA_D0_Y_MODIFY(2);
556                                 bfin_write_MDMA_S0_Y_MODIFY(2);
557                         }
558                 } else {
559                         bfin_write_MDMA_D0_X_MODIFY(1);
560                         bfin_write_MDMA_S0_X_MODIFY(1);
561                         if (flag_2D) {
562                                 bfin_write_MDMA_D0_Y_MODIFY(1);
563                                 bfin_write_MDMA_S0_Y_MODIFY(1);
564                         }
565                 }
566         }
567
568         /* Enable source DMA */
569         if (flag_2D) {
570                 if (flag_align) {
571                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
572                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
573                 } else {
574                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
575                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
576                 }
577         } else {
578                 if (flag_align) {
579                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
580                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
581                 } else {
582                         bfin_write_MDMA_S0_CONFIG(DMAEN);
583                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
584                 }
585         }
586
587         SSYNC();
588
589         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
590                 ;
591
592         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
593                                       (DMA_DONE | DMA_ERR));
594
595         bfin_write_MDMA_S0_CONFIG(0);
596         bfin_write_MDMA_D0_CONFIG(0);
597
598         local_irq_restore(flags);
599
600         return dest;
601 }
602
603 void *dma_memcpy(void *dest, const void *src, size_t size)
604 {
605         size_t bulk;
606         size_t rest;
607         void * addr;
608
609         bulk = (size >> 16) << 16;
610         rest = size - bulk;
611         if (bulk)
612                 __dma_memcpy(dest, src, bulk);
613         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
614         return addr;
615 }
616 EXPORT_SYMBOL(dma_memcpy);
617
618 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
619 {
620         void *addr;
621         addr = dma_memcpy(dest, src, size);
622         return addr;
623 }
624 EXPORT_SYMBOL(safe_dma_memcpy);
625
626 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
627 {
628         unsigned long flags;
629
630         local_irq_save(flags);
631
632         blackfin_dcache_flush_range((unsigned int)buf,
633                          (unsigned int)(buf) + len);
634
635         bfin_write_MDMA_D0_START_ADDR(addr);
636         bfin_write_MDMA_D0_X_COUNT(len);
637         bfin_write_MDMA_D0_X_MODIFY(0);
638         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
639
640         bfin_write_MDMA_S0_START_ADDR(buf);
641         bfin_write_MDMA_S0_X_COUNT(len);
642         bfin_write_MDMA_S0_X_MODIFY(1);
643         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
644
645         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
646         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
647
648         SSYNC();
649
650         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
651
652         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
653
654         bfin_write_MDMA_S0_CONFIG(0);
655         bfin_write_MDMA_D0_CONFIG(0);
656         local_irq_restore(flags);
657
658 }
659 EXPORT_SYMBOL(dma_outsb);
660
661
662 void dma_insb(unsigned long addr, void *buf, unsigned short len)
663 {
664         unsigned long flags;
665
666         blackfin_dcache_invalidate_range((unsigned int)buf,
667                          (unsigned int)(buf) + len);
668
669         local_irq_save(flags);
670         bfin_write_MDMA_D0_START_ADDR(buf);
671         bfin_write_MDMA_D0_X_COUNT(len);
672         bfin_write_MDMA_D0_X_MODIFY(1);
673         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
674
675         bfin_write_MDMA_S0_START_ADDR(addr);
676         bfin_write_MDMA_S0_X_COUNT(len);
677         bfin_write_MDMA_S0_X_MODIFY(0);
678         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
679
680         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
681         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
682
683         SSYNC();
684
685         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
686
687         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
688
689         bfin_write_MDMA_S0_CONFIG(0);
690         bfin_write_MDMA_D0_CONFIG(0);
691         local_irq_restore(flags);
692
693 }
694 EXPORT_SYMBOL(dma_insb);
695
696 void dma_outsw(unsigned long addr, const void  *buf, unsigned short len)
697 {
698         unsigned long flags;
699
700         local_irq_save(flags);
701
702         blackfin_dcache_flush_range((unsigned int)buf,
703                          (unsigned int)(buf) + len * sizeof(short));
704
705         bfin_write_MDMA_D0_START_ADDR(addr);
706         bfin_write_MDMA_D0_X_COUNT(len);
707         bfin_write_MDMA_D0_X_MODIFY(0);
708         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
709
710         bfin_write_MDMA_S0_START_ADDR(buf);
711         bfin_write_MDMA_S0_X_COUNT(len);
712         bfin_write_MDMA_S0_X_MODIFY(2);
713         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
714
715         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
716         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
717
718         SSYNC();
719
720         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
721
722         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
723
724         bfin_write_MDMA_S0_CONFIG(0);
725         bfin_write_MDMA_D0_CONFIG(0);
726         local_irq_restore(flags);
727
728 }
729 EXPORT_SYMBOL(dma_outsw);
730
731 void dma_insw(unsigned long addr, void *buf, unsigned short len)
732 {
733         unsigned long flags;
734
735         blackfin_dcache_invalidate_range((unsigned int)buf,
736                          (unsigned int)(buf) + len * sizeof(short));
737
738         local_irq_save(flags);
739
740         bfin_write_MDMA_D0_START_ADDR(buf);
741         bfin_write_MDMA_D0_X_COUNT(len);
742         bfin_write_MDMA_D0_X_MODIFY(2);
743         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
744
745         bfin_write_MDMA_S0_START_ADDR(addr);
746         bfin_write_MDMA_S0_X_COUNT(len);
747         bfin_write_MDMA_S0_X_MODIFY(0);
748         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
749
750         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
751         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
752
753         SSYNC();
754
755         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
756
757         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
758
759         bfin_write_MDMA_S0_CONFIG(0);
760         bfin_write_MDMA_D0_CONFIG(0);
761         local_irq_restore(flags);
762
763 }
764 EXPORT_SYMBOL(dma_insw);
765
766 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
767 {
768         unsigned long flags;
769
770         local_irq_save(flags);
771
772         blackfin_dcache_flush_range((unsigned int)buf,
773                          (unsigned int)(buf) + len * sizeof(long));
774
775         bfin_write_MDMA_D0_START_ADDR(addr);
776         bfin_write_MDMA_D0_X_COUNT(len);
777         bfin_write_MDMA_D0_X_MODIFY(0);
778         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
779
780         bfin_write_MDMA_S0_START_ADDR(buf);
781         bfin_write_MDMA_S0_X_COUNT(len);
782         bfin_write_MDMA_S0_X_MODIFY(4);
783         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
784
785         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
786         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
787
788         SSYNC();
789
790         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
791
792         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
793
794         bfin_write_MDMA_S0_CONFIG(0);
795         bfin_write_MDMA_D0_CONFIG(0);
796         local_irq_restore(flags);
797
798 }
799 EXPORT_SYMBOL(dma_outsl);
800
801 void dma_insl(unsigned long addr, void *buf, unsigned short len)
802 {
803         unsigned long flags;
804
805         blackfin_dcache_invalidate_range((unsigned int)buf,
806                          (unsigned int)(buf) + len * sizeof(long));
807
808         local_irq_save(flags);
809
810         bfin_write_MDMA_D0_START_ADDR(buf);
811         bfin_write_MDMA_D0_X_COUNT(len);
812         bfin_write_MDMA_D0_X_MODIFY(4);
813         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
814
815         bfin_write_MDMA_S0_START_ADDR(addr);
816         bfin_write_MDMA_S0_X_COUNT(len);
817         bfin_write_MDMA_S0_X_MODIFY(0);
818         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
819
820         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
821         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
822
823         SSYNC();
824
825         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
826
827         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
828
829         bfin_write_MDMA_S0_CONFIG(0);
830         bfin_write_MDMA_D0_CONFIG(0);
831         local_irq_restore(flags);
832
833 }
834 EXPORT_SYMBOL(dma_insl);