2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
7 * Description: This file contains the simple DMA Implementation for Blackfin
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
37 #include <asm/blackfin.h>
39 #include <asm/cacheflush.h>
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
44 /**************************************************************************
46 ***************************************************************************/
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
50 /*------------------------------------------------------------------------------
51 * Set the Buffer Clear bit in the Configuration register of specific DMA
52 * channel. This will stop the descriptor based DMA operation.
53 *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
56 dma_ch[channel].regs->cfg |= RESTART;
58 dma_ch[channel].regs->cfg &= ~RESTART;
62 static int __init blackfin_dma_init(void)
66 printk(KERN_INFO "Blackfin DMA Controller\n");
68 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70 dma_ch[i].regs = base_addr[i];
71 mutex_init(&(dma_ch[i].dmalock));
73 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74 dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75 dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
79 arch_initcall(blackfin_dma_init);
81 /*------------------------------------------------------------------------------
82 * Request the specific DMA channel from the system.
83 *-----------------------------------------------------------------------------*/
84 int request_dma(unsigned int channel, char *device_id)
87 pr_debug("request_dma() : BEGIN \n");
88 mutex_lock(&(dma_ch[channel].dmalock));
90 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
91 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
92 mutex_unlock(&(dma_ch[channel].dmalock));
93 pr_debug("DMA CHANNEL IN USE \n");
96 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
97 pr_debug("DMA CHANNEL IS ALLOCATED \n");
100 mutex_unlock(&(dma_ch[channel].dmalock));
102 dma_ch[channel].device_id = device_id;
103 dma_ch[channel].irq_callback = NULL;
105 /* This is to be enabled by putting a restriction -
106 * you have to request DMA, before doing any operations on
109 pr_debug("request_dma() : END \n");
112 EXPORT_SYMBOL(request_dma);
114 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
118 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
119 && channel < MAX_BLACKFIN_DMA_CHANNEL));
121 if (callback != NULL) {
123 ret_irq = channel2irq(channel);
125 dma_ch[channel].data = data;
128 request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
129 dma_ch[channel].device_id, data);
132 "Request irq in DMA engine failed.\n");
135 dma_ch[channel].irq_callback = callback;
139 EXPORT_SYMBOL(set_dma_callback);
141 void free_dma(unsigned int channel)
145 pr_debug("freedma() : BEGIN \n");
146 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
147 && channel < MAX_BLACKFIN_DMA_CHANNEL));
150 disable_dma(channel);
151 clear_dma_buffer(channel);
153 if (dma_ch[channel].irq_callback != NULL) {
154 ret_irq = channel2irq(channel);
155 free_irq(ret_irq, dma_ch[channel].data);
158 /* Clear the DMA Variable in the Channel */
159 mutex_lock(&(dma_ch[channel].dmalock));
160 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
161 mutex_unlock(&(dma_ch[channel].dmalock));
163 pr_debug("freedma() : END \n");
165 EXPORT_SYMBOL(free_dma);
167 void dma_enable_irq(unsigned int channel)
171 pr_debug("dma_enable_irq() : BEGIN \n");
172 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
173 && channel < MAX_BLACKFIN_DMA_CHANNEL));
175 ret_irq = channel2irq(channel);
178 EXPORT_SYMBOL(dma_enable_irq);
180 void dma_disable_irq(unsigned int channel)
184 pr_debug("dma_disable_irq() : BEGIN \n");
185 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
186 && channel < MAX_BLACKFIN_DMA_CHANNEL));
188 ret_irq = channel2irq(channel);
189 disable_irq(ret_irq);
191 EXPORT_SYMBOL(dma_disable_irq);
193 int dma_channel_active(unsigned int channel)
195 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
201 EXPORT_SYMBOL(dma_channel_active);
203 /*------------------------------------------------------------------------------
204 * stop the specific DMA channel.
205 *-----------------------------------------------------------------------------*/
206 void disable_dma(unsigned int channel)
208 pr_debug("stop_dma() : BEGIN \n");
210 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
211 && channel < MAX_BLACKFIN_DMA_CHANNEL));
213 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
215 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
216 /* Needs to be enabled Later */
217 pr_debug("stop_dma() : END \n");
220 EXPORT_SYMBOL(disable_dma);
222 void enable_dma(unsigned int channel)
224 pr_debug("enable_dma() : BEGIN \n");
226 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
227 && channel < MAX_BLACKFIN_DMA_CHANNEL));
229 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
230 dma_ch[channel].regs->curr_x_count = 0;
231 dma_ch[channel].regs->curr_y_count = 0;
233 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
235 pr_debug("enable_dma() : END \n");
238 EXPORT_SYMBOL(enable_dma);
240 /*------------------------------------------------------------------------------
241 * Set the Start Address register for the specific DMA channel
242 * This function can be used for register based DMA,
243 * to setup the start address
244 * addr: Starting address of the DMA Data to be transferred.
245 *-----------------------------------------------------------------------------*/
246 void set_dma_start_addr(unsigned int channel, unsigned long addr)
248 pr_debug("set_dma_start_addr() : BEGIN \n");
250 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
251 && channel < MAX_BLACKFIN_DMA_CHANNEL));
253 dma_ch[channel].regs->start_addr = addr;
255 pr_debug("set_dma_start_addr() : END\n");
257 EXPORT_SYMBOL(set_dma_start_addr);
259 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
261 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
263 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
264 && channel < MAX_BLACKFIN_DMA_CHANNEL));
266 dma_ch[channel].regs->next_desc_ptr = addr;
268 pr_debug("set_dma_start_addr() : END\n");
270 EXPORT_SYMBOL(set_dma_next_desc_addr);
272 void set_dma_x_count(unsigned int channel, unsigned short x_count)
274 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
275 && channel < MAX_BLACKFIN_DMA_CHANNEL));
277 dma_ch[channel].regs->x_count = x_count;
280 EXPORT_SYMBOL(set_dma_x_count);
282 void set_dma_y_count(unsigned int channel, unsigned short y_count)
284 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
285 && channel < MAX_BLACKFIN_DMA_CHANNEL));
287 dma_ch[channel].regs->y_count = y_count;
290 EXPORT_SYMBOL(set_dma_y_count);
292 void set_dma_x_modify(unsigned int channel, short x_modify)
294 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
295 && channel < MAX_BLACKFIN_DMA_CHANNEL));
297 dma_ch[channel].regs->x_modify = x_modify;
300 EXPORT_SYMBOL(set_dma_x_modify);
302 void set_dma_y_modify(unsigned int channel, short y_modify)
304 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
305 && channel < MAX_BLACKFIN_DMA_CHANNEL));
307 dma_ch[channel].regs->y_modify = y_modify;
310 EXPORT_SYMBOL(set_dma_y_modify);
312 void set_dma_config(unsigned int channel, unsigned short config)
314 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
315 && channel < MAX_BLACKFIN_DMA_CHANNEL));
317 dma_ch[channel].regs->cfg = config;
320 EXPORT_SYMBOL(set_dma_config);
323 set_bfin_dma_config(char direction, char flow_mode,
324 char intr_mode, char dma_mode, char width)
326 unsigned short config;
329 ((direction << 1) | (width << 2) | (dma_mode << 4) |
330 (intr_mode << 6) | (flow_mode << 12) | RESTART);
333 EXPORT_SYMBOL(set_bfin_dma_config);
335 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
337 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
338 && channel < MAX_BLACKFIN_DMA_CHANNEL));
340 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
342 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
346 EXPORT_SYMBOL(set_dma_sg);
348 /*------------------------------------------------------------------------------
349 * Get the DMA status of a specific DMA channel from the system.
350 *-----------------------------------------------------------------------------*/
351 unsigned short get_dma_curr_irqstat(unsigned int channel)
353 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
354 && channel < MAX_BLACKFIN_DMA_CHANNEL));
356 return dma_ch[channel].regs->irq_status;
358 EXPORT_SYMBOL(get_dma_curr_irqstat);
360 /*------------------------------------------------------------------------------
361 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
362 *-----------------------------------------------------------------------------*/
363 void clear_dma_irqstat(unsigned int channel)
365 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
366 && channel < MAX_BLACKFIN_DMA_CHANNEL));
367 dma_ch[channel].regs->irq_status |= 3;
369 EXPORT_SYMBOL(clear_dma_irqstat);
371 /*------------------------------------------------------------------------------
372 * Get current DMA xcount of a specific DMA channel from the system.
373 *-----------------------------------------------------------------------------*/
374 unsigned short get_dma_curr_xcount(unsigned int channel)
376 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
377 && channel < MAX_BLACKFIN_DMA_CHANNEL));
379 return dma_ch[channel].regs->curr_x_count;
381 EXPORT_SYMBOL(get_dma_curr_xcount);
383 /*------------------------------------------------------------------------------
384 * Get current DMA ycount of a specific DMA channel from the system.
385 *-----------------------------------------------------------------------------*/
386 unsigned short get_dma_curr_ycount(unsigned int channel)
388 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
389 && channel < MAX_BLACKFIN_DMA_CHANNEL));
391 return dma_ch[channel].regs->curr_y_count;
393 EXPORT_SYMBOL(get_dma_curr_ycount);
395 static void *__dma_memcpy(void *dest, const void *src, size_t size)
397 int direction; /* 1 - address decrease, 0 - address increase */
398 int flag_align; /* 1 - address aligned, 0 - address unaligned */
399 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
405 local_irq_save(flags);
407 if ((unsigned long)src < memory_end)
408 blackfin_dcache_flush_range((unsigned int)src,
409 (unsigned int)(src + size));
411 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
413 if ((unsigned long)src < (unsigned long)dest)
418 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
419 && ((size % 2) == 0))
424 if (size > 0x10000) /* size > 64K */
429 /* Setup destination and source start address */
432 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
433 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
435 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
436 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
439 bfin_write_MDMA_D0_START_ADDR(dest);
440 bfin_write_MDMA_S0_START_ADDR(src);
443 /* Setup destination and source xcount */
446 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
447 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
449 bfin_write_MDMA_D0_X_COUNT(1024);
450 bfin_write_MDMA_S0_X_COUNT(1024);
452 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
453 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
456 bfin_write_MDMA_D0_X_COUNT(size / 2);
457 bfin_write_MDMA_S0_X_COUNT(size / 2);
459 bfin_write_MDMA_D0_X_COUNT(size);
460 bfin_write_MDMA_S0_X_COUNT(size);
464 /* Setup destination and source xmodify and ymodify */
467 bfin_write_MDMA_D0_X_MODIFY(-2);
468 bfin_write_MDMA_S0_X_MODIFY(-2);
470 bfin_write_MDMA_D0_Y_MODIFY(-2);
471 bfin_write_MDMA_S0_Y_MODIFY(-2);
474 bfin_write_MDMA_D0_X_MODIFY(-1);
475 bfin_write_MDMA_S0_X_MODIFY(-1);
477 bfin_write_MDMA_D0_Y_MODIFY(-1);
478 bfin_write_MDMA_S0_Y_MODIFY(-1);
483 bfin_write_MDMA_D0_X_MODIFY(2);
484 bfin_write_MDMA_S0_X_MODIFY(2);
486 bfin_write_MDMA_D0_Y_MODIFY(2);
487 bfin_write_MDMA_S0_Y_MODIFY(2);
490 bfin_write_MDMA_D0_X_MODIFY(1);
491 bfin_write_MDMA_S0_X_MODIFY(1);
493 bfin_write_MDMA_D0_Y_MODIFY(1);
494 bfin_write_MDMA_S0_Y_MODIFY(1);
499 /* Enable source DMA */
502 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
503 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
505 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
506 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
510 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
511 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
513 bfin_write_MDMA_S0_CONFIG(DMAEN);
514 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
518 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
521 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
522 (DMA_DONE | DMA_ERR));
524 bfin_write_MDMA_S0_CONFIG(0);
525 bfin_write_MDMA_D0_CONFIG(0);
527 if ((unsigned long)dest < memory_end)
528 blackfin_dcache_invalidate_range((unsigned int)dest,
529 (unsigned int)(dest + size));
530 local_irq_restore(flags);
535 void *dma_memcpy(void *dest, const void *src, size_t size)
541 bulk = (size >> 16) << 16;
544 __dma_memcpy(dest, src, bulk);
545 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
548 EXPORT_SYMBOL(dma_memcpy);
550 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
553 addr = dma_memcpy(dest, src, size);
556 EXPORT_SYMBOL(safe_dma_memcpy);
558 void dma_outsb(void __iomem *addr, const void *buf, unsigned short len)
562 local_irq_save(flags);
564 blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
566 bfin_write_MDMA_D0_START_ADDR(addr);
567 bfin_write_MDMA_D0_X_COUNT(len);
568 bfin_write_MDMA_D0_X_MODIFY(0);
569 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
571 bfin_write_MDMA_S0_START_ADDR(buf);
572 bfin_write_MDMA_S0_X_COUNT(len);
573 bfin_write_MDMA_S0_X_MODIFY(1);
574 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
576 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
577 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
579 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
581 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
583 bfin_write_MDMA_S0_CONFIG(0);
584 bfin_write_MDMA_D0_CONFIG(0);
585 local_irq_restore(flags);
588 EXPORT_SYMBOL(dma_outsb);
591 void dma_insb(const void __iomem *addr, void *buf, unsigned short len)
595 local_irq_save(flags);
596 bfin_write_MDMA_D0_START_ADDR(buf);
597 bfin_write_MDMA_D0_X_COUNT(len);
598 bfin_write_MDMA_D0_X_MODIFY(1);
599 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
601 bfin_write_MDMA_S0_START_ADDR(addr);
602 bfin_write_MDMA_S0_X_COUNT(len);
603 bfin_write_MDMA_S0_X_MODIFY(0);
604 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
606 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
607 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
609 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
611 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
613 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
615 bfin_write_MDMA_S0_CONFIG(0);
616 bfin_write_MDMA_D0_CONFIG(0);
617 local_irq_restore(flags);
620 EXPORT_SYMBOL(dma_insb);
622 void dma_outsw(void __iomem *addr, const void *buf, unsigned short len)
626 local_irq_save(flags);
628 blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
630 bfin_write_MDMA_D0_START_ADDR(addr);
631 bfin_write_MDMA_D0_X_COUNT(len);
632 bfin_write_MDMA_D0_X_MODIFY(0);
633 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
635 bfin_write_MDMA_S0_START_ADDR(buf);
636 bfin_write_MDMA_S0_X_COUNT(len);
637 bfin_write_MDMA_S0_X_MODIFY(2);
638 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
640 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
641 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
643 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
645 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
647 bfin_write_MDMA_S0_CONFIG(0);
648 bfin_write_MDMA_D0_CONFIG(0);
649 local_irq_restore(flags);
652 EXPORT_SYMBOL(dma_outsw);
654 void dma_insw(const void __iomem *addr, void *buf, unsigned short len)
658 local_irq_save(flags);
660 bfin_write_MDMA_D0_START_ADDR(buf);
661 bfin_write_MDMA_D0_X_COUNT(len);
662 bfin_write_MDMA_D0_X_MODIFY(2);
663 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
665 bfin_write_MDMA_S0_START_ADDR(addr);
666 bfin_write_MDMA_S0_X_COUNT(len);
667 bfin_write_MDMA_S0_X_MODIFY(0);
668 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
670 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
671 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
673 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
675 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
677 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
679 bfin_write_MDMA_S0_CONFIG(0);
680 bfin_write_MDMA_D0_CONFIG(0);
681 local_irq_restore(flags);
684 EXPORT_SYMBOL(dma_insw);
686 void dma_outsl(void __iomem *addr, const void *buf, unsigned short len)
690 local_irq_save(flags);
692 blackfin_dcache_flush_range((unsigned int)buf, (unsigned int)(buf) + len);
694 bfin_write_MDMA_D0_START_ADDR(addr);
695 bfin_write_MDMA_D0_X_COUNT(len);
696 bfin_write_MDMA_D0_X_MODIFY(0);
697 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
699 bfin_write_MDMA_S0_START_ADDR(buf);
700 bfin_write_MDMA_S0_X_COUNT(len);
701 bfin_write_MDMA_S0_X_MODIFY(4);
702 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
704 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
705 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
707 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
709 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
711 bfin_write_MDMA_S0_CONFIG(0);
712 bfin_write_MDMA_D0_CONFIG(0);
713 local_irq_restore(flags);
716 EXPORT_SYMBOL(dma_outsl);
718 void dma_insl(const void __iomem *addr, void *buf, unsigned short len)
722 local_irq_save(flags);
724 bfin_write_MDMA_D0_START_ADDR(buf);
725 bfin_write_MDMA_D0_X_COUNT(len);
726 bfin_write_MDMA_D0_X_MODIFY(4);
727 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
729 bfin_write_MDMA_S0_START_ADDR(addr);
730 bfin_write_MDMA_S0_X_COUNT(len);
731 bfin_write_MDMA_S0_X_MODIFY(0);
732 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
734 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
735 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
737 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
739 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
741 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
743 bfin_write_MDMA_S0_CONFIG(0);
744 bfin_write_MDMA_D0_CONFIG(0);
745 local_irq_restore(flags);
748 EXPORT_SYMBOL(dma_insl);