2 * arch/ppc/kernel/ppc4xx_dma.c
4 * IBM PPC4xx DMA engine core library
6 * Copyright 2000-2004 MontaVista Software Inc.
8 * Cleaned up and converted to new DCR access
9 * Matt Porter <mporter@kernel.crashing.org>
11 * Original code by Armin Kuster <akuster@mvista.com>
12 * and Pete Popov <ppopov@mvista.com>
14 * This program is free software; you can redistribute it and/or modify it
15 * under the terms of the GNU General Public License as published by the
16 * Free Software Foundation; either version 2 of the License, or (at your
17 * option) any later version.
19 * You should have received a copy of the GNU General Public License along
20 * with this program; if not, write to the Free Software Foundation, Inc.,
21 * 675 Mass Ave, Cambridge, MA 02139, USA.
24 #include <linux/config.h>
25 #include <linux/kernel.h>
27 #include <linux/miscdevice.h>
28 #include <linux/init.h>
29 #include <linux/module.h>
31 #include <asm/system.h>
34 #include <asm/ppc4xx_dma.h>
36 ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
39 ppc4xx_get_dma_status(void)
41 return (mfdcr(DCRN_DMASR));
45 ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
47 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
48 printk("set_src_addr: bad channel: %d\n", dmanr);
52 #ifdef PPC4xx_DMA_64BIT
53 mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
55 mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
60 ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
62 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
63 printk("set_dst_addr: bad channel: %d\n", dmanr);
67 #ifdef PPC4xx_DMA_64BIT
68 mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
70 mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
75 ppc4xx_enable_dma(unsigned int dmanr)
78 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
79 unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
80 DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
81 DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
82 DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
84 if (p_dma_ch->in_use) {
85 printk("enable_dma: channel %d in use\n", dmanr);
89 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
90 printk("enable_dma: bad channel: %d\n", dmanr);
94 if (p_dma_ch->mode == DMA_MODE_READ) {
95 /* peripheral to memory */
96 ppc4xx_set_src_addr(dmanr, 0);
97 ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
98 } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
99 /* memory to peripheral */
100 ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
101 ppc4xx_set_dst_addr(dmanr, 0);
104 /* for other xfer modes, the addresses are already set */
105 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
107 control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
108 if (p_dma_ch->mode == DMA_MODE_MM) {
109 /* software initiated memory to memory */
110 control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
113 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
116 * Clear the CS, TS, RI bits for the channel from DMASR. This
117 * has been observed to happen correctly only after the mode and
118 * ETD/DCE bits in DMACRx are set above. Must do this before
119 * enabling the channel.
122 mtdcr(DCRN_DMASR, status_bits[dmanr]);
125 * For device-paced transfers, Terminal Count Enable apparently
126 * must be on, and this must be turned on after the mode, etc.
127 * bits are cleared above (at least on Redwood-6).
130 if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
131 (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
132 control |= DMA_TCE_ENABLE;
135 * Now enable the channel.
138 control |= (p_dma_ch->mode | DMA_CE_ENABLE);
140 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
142 p_dma_ch->in_use = 1;
146 ppc4xx_disable_dma(unsigned int dmanr)
148 unsigned int control;
149 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
151 if (!p_dma_ch->in_use) {
152 printk("disable_dma: channel %d not in use\n", dmanr);
156 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
157 printk("disable_dma: bad channel: %d\n", dmanr);
161 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
162 control &= ~DMA_CE_ENABLE;
163 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
165 p_dma_ch->in_use = 0;
169 * Sets the dma mode for single DMA transfers only.
170 * For scatter/gather transfers, the mode is passed to the
171 * alloc_dma_handle() function as one of the parameters.
173 * The mode is simply saved and used later. This allows
174 * the driver to call set_dma_mode() and set_dma_addr() in
177 * Valid mode values are:
179 * DMA_MODE_READ peripheral to memory
180 * DMA_MODE_WRITE memory to peripheral
181 * DMA_MODE_MM memory to memory
182 * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
183 * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
186 ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
188 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
190 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
191 printk("set_dma_mode: bad channel 0x%x\n", dmanr);
192 return DMA_STATUS_BAD_CHANNEL;
195 p_dma_ch->mode = mode;
197 return DMA_STATUS_GOOD;
201 * Sets the DMA Count register. Note that 'count' is in bytes.
202 * However, the DMA Count register counts the number of "transfers",
203 * where each transfer is equal to the bus width. Thus, count
204 * MUST be a multiple of the bus width.
207 ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
209 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
214 switch (p_dma_ch->pwidth) {
230 printk("set_dma_count: invalid bus width: 0x%x\n",
236 ("Warning: set_dma_count count 0x%x bus width %d\n",
237 count, p_dma_ch->pwidth);
241 count = count >> p_dma_ch->shift;
243 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
247 * Returns the number of bytes left to be transfered.
248 * After a DMA transfer, this should return zero.
249 * Reading this while a DMA transfer is still in progress will return
250 * unpredictable results.
253 ppc4xx_get_dma_residue(unsigned int dmanr)
256 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
258 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
259 printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
260 return DMA_STATUS_BAD_CHANNEL;
263 count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
265 return (count << p_dma_ch->shift);
269 * Sets the DMA address for a memory to peripheral or peripheral
270 * to memory transfer. The address is just saved in the channel
271 * structure for now and used later in enable_dma().
274 ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
276 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
278 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
279 printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
286 switch (p_dma_ch->pwidth) {
290 if ((unsigned) addr & 0x1)
294 if ((unsigned) addr & 0x3)
298 if ((unsigned) addr & 0x7)
302 printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
307 printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
308 addr, p_dma_ch->pwidth);
312 /* save dma address and program it later after we know the xfer mode */
313 p_dma_ch->addr = addr;
317 * Sets both DMA addresses for a memory to memory transfer.
318 * For memory to peripheral or peripheral to memory transfers
319 * the function set_dma_addr() should be used instead.
322 ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
323 phys_addr_t dst_dma_addr)
325 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
326 printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
332 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
334 switch (p_dma_ch->pwidth) {
338 if (((unsigned) src_dma_addr & 0x1) ||
339 ((unsigned) dst_dma_addr & 0x1)
344 if (((unsigned) src_dma_addr & 0x3) ||
345 ((unsigned) dst_dma_addr & 0x3)
350 if (((unsigned) src_dma_addr & 0x7) ||
351 ((unsigned) dst_dma_addr & 0x7)
356 printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
362 ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
363 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
367 ppc4xx_set_src_addr(dmanr, src_dma_addr);
368 ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
372 * Enables the channel interrupt.
374 * If performing a scatter/gatter transfer, this function
375 * MUST be called before calling alloc_dma_handle() and building
376 * the sgl list. Otherwise, interrupts will not be enabled, if
377 * they were previously disabled.
380 ppc4xx_enable_dma_interrupt(unsigned int dmanr)
382 unsigned int control;
383 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
385 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
386 printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
387 return DMA_STATUS_BAD_CHANNEL;
390 p_dma_ch->int_enable = 1;
392 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
393 control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
394 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
396 return DMA_STATUS_GOOD;
400 * Disables the channel interrupt.
402 * If performing a scatter/gatter transfer, this function
403 * MUST be called before calling alloc_dma_handle() and building
404 * the sgl list. Otherwise, interrupts will not be disabled, if
405 * they were previously enabled.
408 ppc4xx_disable_dma_interrupt(unsigned int dmanr)
410 unsigned int control;
411 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
413 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
414 printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
415 return DMA_STATUS_BAD_CHANNEL;
418 p_dma_ch->int_enable = 0;
420 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
421 control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
422 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
424 return DMA_STATUS_GOOD;
428 * Configures a DMA channel, including the peripheral bus width, if a
429 * peripheral is attached to the channel, the polarity of the DMAReq and
430 * DMAAck signals, etc. This information should really be setup by the boot
431 * code, since most likely the configuration won't change dynamically.
432 * If the kernel has to call this function, it's recommended that it's
433 * called from platform specific init code. The driver should not need to
434 * call this function.
437 ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
439 unsigned int polarity;
440 uint32_t control = 0;
441 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
443 DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
444 DMA_MODE_WRITE = 0; /* Memory to Peripheral */
447 printk("ppc4xx_init_dma_channel: NULL p_init\n");
448 return DMA_STATUS_NULL_POINTER;
451 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
452 printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
453 return DMA_STATUS_BAD_CHANNEL;
457 polarity = mfdcr(DCRN_POL);
462 /* Setup the control register based on the values passed to
463 * us in p_init. Then, over-write the control register with this
466 control |= SET_DMA_CONTROL;
468 /* clear all polarity signals and then "or" in new signal levels */
469 polarity &= ~GET_DMA_POLARITY(dmanr);
470 polarity |= p_init->polarity;
472 mtdcr(DCRN_POL, polarity);
474 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
476 /* save these values in our dma channel structure */
477 memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
480 * The peripheral width values written in the control register are:
486 * Since the DMA count register takes the number of "transfers",
487 * we need to divide the count sent to us in certain
488 * functions by the appropriate number. It so happens that our
489 * right shift value is equal to the peripheral width value.
491 p_dma_ch->shift = p_init->pwidth;
494 * Save the control word for easy access.
496 p_dma_ch->control = control;
498 mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
499 return DMA_STATUS_GOOD;
503 * This function returns the channel configuration.
506 ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
508 unsigned int polarity;
509 unsigned int control;
511 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
512 printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
513 return DMA_STATUS_BAD_CHANNEL;
516 memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
519 polarity = mfdcr(DCRN_POL);
524 p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
525 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
527 p_dma_ch->cp = GET_DMA_PRIORITY(control);
528 p_dma_ch->pwidth = GET_DMA_PW(control);
529 p_dma_ch->psc = GET_DMA_PSC(control);
530 p_dma_ch->pwc = GET_DMA_PWC(control);
531 p_dma_ch->phc = GET_DMA_PHC(control);
532 p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
533 p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
534 p_dma_ch->shift = GET_DMA_PW(control);
536 #ifdef CONFIG_PPC4xx_EDMA
537 p_dma_ch->pf = GET_DMA_PREFETCH(control);
539 p_dma_ch->ch_enable = GET_DMA_CH(control);
540 p_dma_ch->ece_enable = GET_DMA_ECE(control);
541 p_dma_ch->tcd_disable = GET_DMA_TCD(control);
543 return DMA_STATUS_GOOD;
547 * Sets the priority for the DMA channel dmanr.
548 * Since this is setup by the hardware init function, this function
549 * can be used to dynamically change the priority of a channel.
551 * Acceptable priorities:
560 ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
562 unsigned int control;
564 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
565 printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
566 return DMA_STATUS_BAD_CHANNEL;
569 if ((priority != PRIORITY_LOW) &&
570 (priority != PRIORITY_MID_LOW) &&
571 (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
572 printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
575 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
576 control |= SET_DMA_PRIORITY(priority);
577 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
579 return DMA_STATUS_GOOD;
583 * Returns the width of the peripheral attached to this channel. This assumes
584 * that someone who knows the hardware configuration, boot code or some other
585 * init code, already set the width.
587 * The return value is one of:
593 * The function returns 0 on error.
596 ppc4xx_get_peripheral_width(unsigned int dmanr)
598 unsigned int control;
600 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
601 printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
602 return DMA_STATUS_BAD_CHANNEL;
605 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
607 return (GET_DMA_PW(control));
611 * Clears the channel status bits
614 ppc4xx_clr_dma_status(unsigned int dmanr)
616 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
617 printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
618 return DMA_STATUS_BAD_CHANNEL;
620 mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
621 return DMA_STATUS_GOOD;
624 #ifdef CONFIG_PPC4xx_EDMA
626 * Enables the burst on the channel (BTEN bit in the control/count register)
628 * For scatter/gather dma, this function MUST be called before the
629 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
630 * sgl list and used as each sgl element is added.
633 ppc4xx_enable_burst(unsigned int dmanr)
636 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
637 printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
638 return DMA_STATUS_BAD_CHANNEL;
640 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
641 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
642 return DMA_STATUS_GOOD;
645 * Disables the burst on the channel (BTEN bit in the control/count register)
647 * For scatter/gather dma, this function MUST be called before the
648 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
649 * sgl list and used as each sgl element is added.
652 ppc4xx_disable_burst(unsigned int dmanr)
655 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
656 printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
657 return DMA_STATUS_BAD_CHANNEL;
659 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
660 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
661 return DMA_STATUS_GOOD;
664 * Sets the burst size (number of peripheral widths) for the channel
665 * (BSIZ bits in the control/count register))
672 * For scatter/gather dma, this function MUST be called before the
673 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
674 * sgl list and used as each sgl element is added.
677 ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
680 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
681 printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
682 return DMA_STATUS_BAD_CHANNEL;
684 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
685 ctc |= (bsize & DMA_CTC_BSIZ_MSK);
686 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
687 return DMA_STATUS_GOOD;
690 EXPORT_SYMBOL(ppc4xx_enable_burst);
691 EXPORT_SYMBOL(ppc4xx_disable_burst);
692 EXPORT_SYMBOL(ppc4xx_set_burst_size);
693 #endif /* CONFIG_PPC4xx_EDMA */
695 EXPORT_SYMBOL(ppc4xx_init_dma_channel);
696 EXPORT_SYMBOL(ppc4xx_get_channel_config);
697 EXPORT_SYMBOL(ppc4xx_set_channel_priority);
698 EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
699 EXPORT_SYMBOL(dma_channels);
700 EXPORT_SYMBOL(ppc4xx_set_src_addr);
701 EXPORT_SYMBOL(ppc4xx_set_dst_addr);
702 EXPORT_SYMBOL(ppc4xx_set_dma_addr);
703 EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
704 EXPORT_SYMBOL(ppc4xx_enable_dma);
705 EXPORT_SYMBOL(ppc4xx_disable_dma);
706 EXPORT_SYMBOL(ppc4xx_set_dma_mode);
707 EXPORT_SYMBOL(ppc4xx_set_dma_count);
708 EXPORT_SYMBOL(ppc4xx_get_dma_residue);
709 EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
710 EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
711 EXPORT_SYMBOL(ppc4xx_get_dma_status);
712 EXPORT_SYMBOL(ppc4xx_clr_dma_status);