2 * File: arch/blackfin/kernel/bfin_dma_5xx.c
7 * Description: This file contains the simple DMA Implementation for Blackfin
10 * Copyright 2004-2006 Analog Devices Inc.
12 * Bugs: Enter bugs at http://blackfin.uclinux.org/
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, see the file COPYING, or write
26 * to the Free Software Foundation, Inc.,
27 * 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
38 #include <asm/cacheflush.h>
40 /* Remove unused code not exported by symbol or internally called */
41 #define REMOVE_DEAD_CODE
43 /**************************************************************************
45 ***************************************************************************/
47 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
48 #if defined (CONFIG_BF561)
49 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
50 (struct dma_register *) DMA1_0_NEXT_DESC_PTR,
51 (struct dma_register *) DMA1_1_NEXT_DESC_PTR,
52 (struct dma_register *) DMA1_2_NEXT_DESC_PTR,
53 (struct dma_register *) DMA1_3_NEXT_DESC_PTR,
54 (struct dma_register *) DMA1_4_NEXT_DESC_PTR,
55 (struct dma_register *) DMA1_5_NEXT_DESC_PTR,
56 (struct dma_register *) DMA1_6_NEXT_DESC_PTR,
57 (struct dma_register *) DMA1_7_NEXT_DESC_PTR,
58 (struct dma_register *) DMA1_8_NEXT_DESC_PTR,
59 (struct dma_register *) DMA1_9_NEXT_DESC_PTR,
60 (struct dma_register *) DMA1_10_NEXT_DESC_PTR,
61 (struct dma_register *) DMA1_11_NEXT_DESC_PTR,
62 (struct dma_register *) DMA2_0_NEXT_DESC_PTR,
63 (struct dma_register *) DMA2_1_NEXT_DESC_PTR,
64 (struct dma_register *) DMA2_2_NEXT_DESC_PTR,
65 (struct dma_register *) DMA2_3_NEXT_DESC_PTR,
66 (struct dma_register *) DMA2_4_NEXT_DESC_PTR,
67 (struct dma_register *) DMA2_5_NEXT_DESC_PTR,
68 (struct dma_register *) DMA2_6_NEXT_DESC_PTR,
69 (struct dma_register *) DMA2_7_NEXT_DESC_PTR,
70 (struct dma_register *) DMA2_8_NEXT_DESC_PTR,
71 (struct dma_register *) DMA2_9_NEXT_DESC_PTR,
72 (struct dma_register *) DMA2_10_NEXT_DESC_PTR,
73 (struct dma_register *) DMA2_11_NEXT_DESC_PTR,
74 (struct dma_register *) MDMA1_D0_NEXT_DESC_PTR,
75 (struct dma_register *) MDMA1_S0_NEXT_DESC_PTR,
76 (struct dma_register *) MDMA1_D1_NEXT_DESC_PTR,
77 (struct dma_register *) MDMA1_S1_NEXT_DESC_PTR,
78 (struct dma_register *) MDMA2_D0_NEXT_DESC_PTR,
79 (struct dma_register *) MDMA2_S0_NEXT_DESC_PTR,
80 (struct dma_register *) MDMA2_D1_NEXT_DESC_PTR,
81 (struct dma_register *) MDMA2_S1_NEXT_DESC_PTR,
82 (struct dma_register *) IMDMA_D0_NEXT_DESC_PTR,
83 (struct dma_register *) IMDMA_S0_NEXT_DESC_PTR,
84 (struct dma_register *) IMDMA_D1_NEXT_DESC_PTR,
85 (struct dma_register *) IMDMA_S1_NEXT_DESC_PTR,
88 static struct dma_register *base_addr[MAX_BLACKFIN_DMA_CHANNEL] = {
89 (struct dma_register *) DMA0_NEXT_DESC_PTR,
90 (struct dma_register *) DMA1_NEXT_DESC_PTR,
91 (struct dma_register *) DMA2_NEXT_DESC_PTR,
92 (struct dma_register *) DMA3_NEXT_DESC_PTR,
93 (struct dma_register *) DMA4_NEXT_DESC_PTR,
94 (struct dma_register *) DMA5_NEXT_DESC_PTR,
95 (struct dma_register *) DMA6_NEXT_DESC_PTR,
96 (struct dma_register *) DMA7_NEXT_DESC_PTR,
97 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
98 (struct dma_register *) DMA8_NEXT_DESC_PTR,
99 (struct dma_register *) DMA9_NEXT_DESC_PTR,
100 (struct dma_register *) DMA10_NEXT_DESC_PTR,
101 (struct dma_register *) DMA11_NEXT_DESC_PTR,
103 (struct dma_register *) MDMA_D0_NEXT_DESC_PTR,
104 (struct dma_register *) MDMA_S0_NEXT_DESC_PTR,
105 (struct dma_register *) MDMA_D1_NEXT_DESC_PTR,
106 (struct dma_register *) MDMA_S1_NEXT_DESC_PTR,
110 /*------------------------------------------------------------------------------
111 * Set the Buffer Clear bit in the Configuration register of specific DMA
112 * channel. This will stop the descriptor based DMA operation.
113 *-----------------------------------------------------------------------------*/
114 static void clear_dma_buffer(unsigned int channel)
116 dma_ch[channel].regs->cfg |= RESTART;
118 dma_ch[channel].regs->cfg &= ~RESTART;
122 static int __init blackfin_dma_init(void)
126 printk(KERN_INFO "Blackfin DMA Controller\n");
128 for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
129 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
130 dma_ch[i].regs = base_addr[i];
131 mutex_init(&(dma_ch[i].dmalock));
133 /* Mark MEMDMA Channel 0 as requested since we're using it internally */
134 dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
135 dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
139 arch_initcall(blackfin_dma_init);
142 * Form the channel find the irq number for that channel.
144 #if !defined(CONFIG_BF561)
146 static int bf533_channel2irq(unsigned int channel)
155 #if (defined(CONFIG_BF537) || defined(CONFIG_BF534) || defined(CONFIG_BF536))
157 ret_irq = IRQ_MAC_RX;
161 ret_irq = IRQ_MAC_TX;
165 ret_irq = IRQ_UART1_RX;
169 ret_irq = IRQ_UART1_TX;
174 ret_irq = IRQ_SPORT0_RX;
178 ret_irq = IRQ_SPORT0_TX;
182 ret_irq = IRQ_SPORT1_RX;
186 ret_irq = IRQ_SPORT1_TX;
194 ret_irq = IRQ_UART_RX;
198 ret_irq = IRQ_UART_TX;
201 case CH_MEM_STREAM0_SRC:
202 case CH_MEM_STREAM0_DEST:
203 ret_irq = IRQ_MEM_DMA0;
206 case CH_MEM_STREAM1_SRC:
207 case CH_MEM_STREAM1_DEST:
208 ret_irq = IRQ_MEM_DMA1;
214 # define channel2irq(channel) bf533_channel2irq(channel)
218 static int bf561_channel2irq(unsigned int channel)
230 ret_irq = IRQ_SPORT0_RX;
233 ret_irq = IRQ_SPORT0_TX;
236 ret_irq = IRQ_SPORT1_RX;
239 ret_irq = IRQ_SPORT1_TX;
245 ret_irq = IRQ_UART_RX;
248 ret_irq = IRQ_UART_TX;
251 case CH_MEM_STREAM0_SRC:
252 case CH_MEM_STREAM0_DEST:
253 ret_irq = IRQ_MEM_DMA0;
255 case CH_MEM_STREAM1_SRC:
256 case CH_MEM_STREAM1_DEST:
257 ret_irq = IRQ_MEM_DMA1;
259 case CH_MEM_STREAM2_SRC:
260 case CH_MEM_STREAM2_DEST:
261 ret_irq = IRQ_MEM_DMA2;
263 case CH_MEM_STREAM3_SRC:
264 case CH_MEM_STREAM3_DEST:
265 ret_irq = IRQ_MEM_DMA3;
268 case CH_IMEM_STREAM0_SRC:
269 case CH_IMEM_STREAM0_DEST:
270 ret_irq = IRQ_IMEM_DMA0;
272 case CH_IMEM_STREAM1_SRC:
273 case CH_IMEM_STREAM1_DEST:
274 ret_irq = IRQ_IMEM_DMA1;
280 # define channel2irq(channel) bf561_channel2irq(channel)
284 /*------------------------------------------------------------------------------
285 * Request the specific DMA channel from the system.
286 *-----------------------------------------------------------------------------*/
287 int request_dma(unsigned int channel, char *device_id)
290 pr_debug("request_dma() : BEGIN \n");
291 mutex_lock(&(dma_ch[channel].dmalock));
293 if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
294 || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
295 mutex_unlock(&(dma_ch[channel].dmalock));
296 pr_debug("DMA CHANNEL IN USE \n");
299 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
300 pr_debug("DMA CHANNEL IS ALLOCATED \n");
303 mutex_unlock(&(dma_ch[channel].dmalock));
305 dma_ch[channel].device_id = device_id;
306 dma_ch[channel].irq_callback = NULL;
308 /* This is to be enabled by putting a restriction -
309 * you have to request DMA, before doing any operations on
312 pr_debug("request_dma() : END \n");
315 EXPORT_SYMBOL(request_dma);
317 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
321 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
322 && channel < MAX_BLACKFIN_DMA_CHANNEL));
324 if (callback != NULL) {
326 ret_irq = channel2irq(channel);
328 dma_ch[channel].data = data;
331 request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
332 dma_ch[channel].device_id, data);
335 "Request irq in DMA engine failed.\n");
338 dma_ch[channel].irq_callback = callback;
342 EXPORT_SYMBOL(set_dma_callback);
344 void free_dma(unsigned int channel)
348 pr_debug("freedma() : BEGIN \n");
349 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
350 && channel < MAX_BLACKFIN_DMA_CHANNEL));
353 disable_dma(channel);
354 clear_dma_buffer(channel);
356 if (dma_ch[channel].irq_callback != NULL) {
357 ret_irq = channel2irq(channel);
358 free_irq(ret_irq, dma_ch[channel].data);
361 /* Clear the DMA Variable in the Channel */
362 mutex_lock(&(dma_ch[channel].dmalock));
363 dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
364 mutex_unlock(&(dma_ch[channel].dmalock));
366 pr_debug("freedma() : END \n");
368 EXPORT_SYMBOL(free_dma);
370 void dma_enable_irq(unsigned int channel)
374 pr_debug("dma_enable_irq() : BEGIN \n");
375 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
376 && channel < MAX_BLACKFIN_DMA_CHANNEL));
378 ret_irq = channel2irq(channel);
381 EXPORT_SYMBOL(dma_enable_irq);
383 void dma_disable_irq(unsigned int channel)
387 pr_debug("dma_disable_irq() : BEGIN \n");
388 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
389 && channel < MAX_BLACKFIN_DMA_CHANNEL));
391 ret_irq = channel2irq(channel);
392 disable_irq(ret_irq);
394 EXPORT_SYMBOL(dma_disable_irq);
396 int dma_channel_active(unsigned int channel)
398 if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
404 EXPORT_SYMBOL(dma_channel_active);
406 /*------------------------------------------------------------------------------
407 * stop the specific DMA channel.
408 *-----------------------------------------------------------------------------*/
409 void disable_dma(unsigned int channel)
411 pr_debug("stop_dma() : BEGIN \n");
413 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
414 && channel < MAX_BLACKFIN_DMA_CHANNEL));
416 dma_ch[channel].regs->cfg &= ~DMAEN; /* Clean the enable bit */
418 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
419 /* Needs to be enabled Later */
420 pr_debug("stop_dma() : END \n");
423 EXPORT_SYMBOL(disable_dma);
425 void enable_dma(unsigned int channel)
427 pr_debug("enable_dma() : BEGIN \n");
429 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
430 && channel < MAX_BLACKFIN_DMA_CHANNEL));
432 dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
433 dma_ch[channel].regs->curr_x_count = 0;
434 dma_ch[channel].regs->curr_y_count = 0;
436 dma_ch[channel].regs->cfg |= DMAEN; /* Set the enable bit */
438 pr_debug("enable_dma() : END \n");
441 EXPORT_SYMBOL(enable_dma);
443 /*------------------------------------------------------------------------------
444 * Set the Start Address register for the specific DMA channel
445 * This function can be used for register based DMA,
446 * to setup the start address
447 * addr: Starting address of the DMA Data to be transferred.
448 *-----------------------------------------------------------------------------*/
449 void set_dma_start_addr(unsigned int channel, unsigned long addr)
451 pr_debug("set_dma_start_addr() : BEGIN \n");
453 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
454 && channel < MAX_BLACKFIN_DMA_CHANNEL));
456 dma_ch[channel].regs->start_addr = addr;
458 pr_debug("set_dma_start_addr() : END\n");
460 EXPORT_SYMBOL(set_dma_start_addr);
462 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
464 pr_debug("set_dma_next_desc_addr() : BEGIN \n");
466 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
467 && channel < MAX_BLACKFIN_DMA_CHANNEL));
469 dma_ch[channel].regs->next_desc_ptr = addr;
471 pr_debug("set_dma_start_addr() : END\n");
473 EXPORT_SYMBOL(set_dma_next_desc_addr);
475 void set_dma_x_count(unsigned int channel, unsigned short x_count)
477 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
478 && channel < MAX_BLACKFIN_DMA_CHANNEL));
480 dma_ch[channel].regs->x_count = x_count;
483 EXPORT_SYMBOL(set_dma_x_count);
485 void set_dma_y_count(unsigned int channel, unsigned short y_count)
487 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
488 && channel < MAX_BLACKFIN_DMA_CHANNEL));
490 dma_ch[channel].regs->y_count = y_count;
493 EXPORT_SYMBOL(set_dma_y_count);
495 void set_dma_x_modify(unsigned int channel, short x_modify)
497 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
498 && channel < MAX_BLACKFIN_DMA_CHANNEL));
500 dma_ch[channel].regs->x_modify = x_modify;
503 EXPORT_SYMBOL(set_dma_x_modify);
505 void set_dma_y_modify(unsigned int channel, short y_modify)
507 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
508 && channel < MAX_BLACKFIN_DMA_CHANNEL));
510 dma_ch[channel].regs->y_modify = y_modify;
513 EXPORT_SYMBOL(set_dma_y_modify);
515 void set_dma_config(unsigned int channel, unsigned short config)
517 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
518 && channel < MAX_BLACKFIN_DMA_CHANNEL));
520 dma_ch[channel].regs->cfg = config;
523 EXPORT_SYMBOL(set_dma_config);
526 set_bfin_dma_config(char direction, char flow_mode,
527 char intr_mode, char dma_mode, char width)
529 unsigned short config;
532 ((direction << 1) | (width << 2) | (dma_mode << 4) |
533 (intr_mode << 6) | (flow_mode << 12) | RESTART);
536 EXPORT_SYMBOL(set_bfin_dma_config);
538 void set_dma_sg(unsigned int channel, struct dmasg * sg, int nr_sg)
540 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
541 && channel < MAX_BLACKFIN_DMA_CHANNEL));
543 dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
545 dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
549 EXPORT_SYMBOL(set_dma_sg);
551 /*------------------------------------------------------------------------------
552 * Get the DMA status of a specific DMA channel from the system.
553 *-----------------------------------------------------------------------------*/
554 unsigned short get_dma_curr_irqstat(unsigned int channel)
556 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
557 && channel < MAX_BLACKFIN_DMA_CHANNEL));
559 return dma_ch[channel].regs->irq_status;
561 EXPORT_SYMBOL(get_dma_curr_irqstat);
563 /*------------------------------------------------------------------------------
564 * Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
565 *-----------------------------------------------------------------------------*/
566 void clear_dma_irqstat(unsigned int channel)
568 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
569 && channel < MAX_BLACKFIN_DMA_CHANNEL));
570 dma_ch[channel].regs->irq_status |= 3;
572 EXPORT_SYMBOL(clear_dma_irqstat);
574 /*------------------------------------------------------------------------------
575 * Get current DMA xcount of a specific DMA channel from the system.
576 *-----------------------------------------------------------------------------*/
577 unsigned short get_dma_curr_xcount(unsigned int channel)
579 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
580 && channel < MAX_BLACKFIN_DMA_CHANNEL));
582 return dma_ch[channel].regs->curr_x_count;
584 EXPORT_SYMBOL(get_dma_curr_xcount);
586 /*------------------------------------------------------------------------------
587 * Get current DMA ycount of a specific DMA channel from the system.
588 *-----------------------------------------------------------------------------*/
589 unsigned short get_dma_curr_ycount(unsigned int channel)
591 BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
592 && channel < MAX_BLACKFIN_DMA_CHANNEL));
594 return dma_ch[channel].regs->curr_y_count;
596 EXPORT_SYMBOL(get_dma_curr_ycount);
598 static void *__dma_memcpy(void *dest, const void *src, size_t size)
600 int direction; /* 1 - address decrease, 0 - address increase */
601 int flag_align; /* 1 - address aligned, 0 - address unaligned */
602 int flag_2D; /* 1 - 2D DMA needed, 0 - 1D DMA needed */
608 local_irq_save(flags);
610 if ((unsigned long)src < memory_end)
611 blackfin_dcache_flush_range((unsigned int)src,
612 (unsigned int)(src + size));
614 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
616 if ((unsigned long)src < (unsigned long)dest)
621 if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
622 && ((size % 2) == 0))
627 if (size > 0x10000) /* size > 64K */
632 /* Setup destination and source start address */
635 bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
636 bfin_write_MDMA_S0_START_ADDR(src + size - 2);
638 bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
639 bfin_write_MDMA_S0_START_ADDR(src + size - 1);
642 bfin_write_MDMA_D0_START_ADDR(dest);
643 bfin_write_MDMA_S0_START_ADDR(src);
646 /* Setup destination and source xcount */
649 bfin_write_MDMA_D0_X_COUNT(1024 / 2);
650 bfin_write_MDMA_S0_X_COUNT(1024 / 2);
652 bfin_write_MDMA_D0_X_COUNT(1024);
653 bfin_write_MDMA_S0_X_COUNT(1024);
655 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
656 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
659 bfin_write_MDMA_D0_X_COUNT(size / 2);
660 bfin_write_MDMA_S0_X_COUNT(size / 2);
662 bfin_write_MDMA_D0_X_COUNT(size);
663 bfin_write_MDMA_S0_X_COUNT(size);
667 /* Setup destination and source xmodify and ymodify */
670 bfin_write_MDMA_D0_X_MODIFY(-2);
671 bfin_write_MDMA_S0_X_MODIFY(-2);
673 bfin_write_MDMA_D0_Y_MODIFY(-2);
674 bfin_write_MDMA_S0_Y_MODIFY(-2);
677 bfin_write_MDMA_D0_X_MODIFY(-1);
678 bfin_write_MDMA_S0_X_MODIFY(-1);
680 bfin_write_MDMA_D0_Y_MODIFY(-1);
681 bfin_write_MDMA_S0_Y_MODIFY(-1);
686 bfin_write_MDMA_D0_X_MODIFY(2);
687 bfin_write_MDMA_S0_X_MODIFY(2);
689 bfin_write_MDMA_D0_Y_MODIFY(2);
690 bfin_write_MDMA_S0_Y_MODIFY(2);
693 bfin_write_MDMA_D0_X_MODIFY(1);
694 bfin_write_MDMA_S0_X_MODIFY(1);
696 bfin_write_MDMA_D0_Y_MODIFY(1);
697 bfin_write_MDMA_S0_Y_MODIFY(1);
702 /* Enable source DMA */
705 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
706 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
708 bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
709 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
713 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
714 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
716 bfin_write_MDMA_S0_CONFIG(DMAEN);
717 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
721 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
724 bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
725 (DMA_DONE | DMA_ERR));
727 bfin_write_MDMA_S0_CONFIG(0);
728 bfin_write_MDMA_D0_CONFIG(0);
730 if ((unsigned long)dest < memory_end)
731 blackfin_dcache_invalidate_range((unsigned int)dest,
732 (unsigned int)(dest + size));
733 local_irq_restore(flags);
738 void *dma_memcpy(void *dest, const void *src, size_t size)
744 bulk = (size >> 16) << 16;
747 __dma_memcpy(dest, src, bulk);
748 addr = __dma_memcpy(dest+bulk, src+bulk, rest);
752 EXPORT_SYMBOL(dma_memcpy);
754 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
757 addr = dma_memcpy(dest, src, size);
760 EXPORT_SYMBOL(safe_dma_memcpy);
762 void dma_outsb(void __iomem *addr, const void *buf, unsigned short len)
767 local_irq_save(flags);
769 blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
771 bfin_write_MDMA_D0_START_ADDR(addr);
772 bfin_write_MDMA_D0_X_COUNT(len);
773 bfin_write_MDMA_D0_X_MODIFY(0);
774 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
776 bfin_write_MDMA_S0_START_ADDR(buf);
777 bfin_write_MDMA_S0_X_COUNT(len);
778 bfin_write_MDMA_S0_X_MODIFY(1);
779 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
781 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
782 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
784 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
786 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
788 bfin_write_MDMA_S0_CONFIG(0);
789 bfin_write_MDMA_D0_CONFIG(0);
790 local_irq_restore(flags);
793 EXPORT_SYMBOL(dma_outsb);
796 void dma_insb(const void __iomem *addr, void *buf, unsigned short len)
800 local_irq_save(flags);
801 bfin_write_MDMA_D0_START_ADDR(buf);
802 bfin_write_MDMA_D0_X_COUNT(len);
803 bfin_write_MDMA_D0_X_MODIFY(1);
804 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
806 bfin_write_MDMA_S0_START_ADDR(addr);
807 bfin_write_MDMA_S0_X_COUNT(len);
808 bfin_write_MDMA_S0_X_MODIFY(0);
809 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
811 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
812 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
814 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
816 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
818 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
820 bfin_write_MDMA_S0_CONFIG(0);
821 bfin_write_MDMA_D0_CONFIG(0);
822 local_irq_restore(flags);
825 EXPORT_SYMBOL(dma_insb);
827 void dma_outsw(void __iomem *addr, const void *buf, unsigned short len)
831 local_irq_save(flags);
833 blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
835 bfin_write_MDMA_D0_START_ADDR(addr);
836 bfin_write_MDMA_D0_X_COUNT(len);
837 bfin_write_MDMA_D0_X_MODIFY(0);
838 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
840 bfin_write_MDMA_S0_START_ADDR(buf);
841 bfin_write_MDMA_S0_X_COUNT(len);
842 bfin_write_MDMA_S0_X_MODIFY(2);
843 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
845 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
846 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
848 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
850 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
852 bfin_write_MDMA_S0_CONFIG(0);
853 bfin_write_MDMA_D0_CONFIG(0);
854 local_irq_restore(flags);
857 EXPORT_SYMBOL(dma_outsw);
859 void dma_insw(const void __iomem *addr, void *buf, unsigned short len)
863 local_irq_save(flags);
865 bfin_write_MDMA_D0_START_ADDR(buf);
866 bfin_write_MDMA_D0_X_COUNT(len);
867 bfin_write_MDMA_D0_X_MODIFY(2);
868 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
870 bfin_write_MDMA_S0_START_ADDR(addr);
871 bfin_write_MDMA_S0_X_COUNT(len);
872 bfin_write_MDMA_S0_X_MODIFY(0);
873 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
875 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
876 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
878 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
880 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
882 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
884 bfin_write_MDMA_S0_CONFIG(0);
885 bfin_write_MDMA_D0_CONFIG(0);
886 local_irq_restore(flags);
889 EXPORT_SYMBOL(dma_insw);
891 void dma_outsl(void __iomem *addr, const void *buf, unsigned short len)
895 local_irq_save(flags);
897 blackfin_dcache_flush_range((unsigned int)buf,(unsigned int)(buf) + len);
899 bfin_write_MDMA_D0_START_ADDR(addr);
900 bfin_write_MDMA_D0_X_COUNT(len);
901 bfin_write_MDMA_D0_X_MODIFY(0);
902 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
904 bfin_write_MDMA_S0_START_ADDR(buf);
905 bfin_write_MDMA_S0_X_COUNT(len);
906 bfin_write_MDMA_S0_X_MODIFY(4);
907 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
909 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
910 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
912 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
914 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
916 bfin_write_MDMA_S0_CONFIG(0);
917 bfin_write_MDMA_D0_CONFIG(0);
918 local_irq_restore(flags);
921 EXPORT_SYMBOL(dma_outsl);
923 void dma_insl(const void __iomem *addr, void *buf, unsigned short len)
927 local_irq_save(flags);
929 bfin_write_MDMA_D0_START_ADDR(buf);
930 bfin_write_MDMA_D0_X_COUNT(len);
931 bfin_write_MDMA_D0_X_MODIFY(4);
932 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
934 bfin_write_MDMA_S0_START_ADDR(addr);
935 bfin_write_MDMA_S0_X_COUNT(len);
936 bfin_write_MDMA_S0_X_MODIFY(0);
937 bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
939 bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
940 bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
942 blackfin_dcache_invalidate_range((unsigned int)buf, (unsigned int)(buf) + len);
944 while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
946 bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
948 bfin_write_MDMA_S0_CONFIG(0);
949 bfin_write_MDMA_D0_CONFIG(0);
950 local_irq_restore(flags);
953 EXPORT_SYMBOL(dma_insl);