2 * BRIEF MODULE DESCRIPTION
3 * AMD Alchemy Au1xxx IDE interface routines over the Static Bus
5 * Copyright (c) 2003-2005 AMD, Personal Connectivity Solutions
7 * This program is free software; you can redistribute it and/or modify it under
8 * the terms of the GNU General Public License as published by the Free Software
9 * Foundation; either version 2 of the License, or (at your option) any later
12 * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
13 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
14 * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR
15 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
16 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
17 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
18 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
19 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
20 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
21 * POSSIBILITY OF SUCH DAMAGE.
23 * You should have received a copy of the GNU General Public License along with
24 * this program; if not, write to the Free Software Foundation, Inc.,
25 * 675 Mass Ave, Cambridge, MA 02139, USA.
27 * Note: for more information, please refer "AMD Alchemy Au1200/Au1550 IDE
28 * Interface and Linux Device Driver" Application Note.
30 #include <linux/types.h>
31 #include <linux/module.h>
32 #include <linux/kernel.h>
33 #include <linux/delay.h>
34 #include <linux/platform_device.h>
36 #include <linux/init.h>
37 #include <linux/ide.h>
38 #include <linux/sysdev.h>
40 #include <linux/dma-mapping.h>
42 #include "ide-timing.h"
45 #include <asm/mach-au1x00/au1xxx.h>
46 #include <asm/mach-au1x00/au1xxx_dbdma.h>
48 #include <asm/mach-au1x00/au1xxx_ide.h>
50 #define DRV_NAME "au1200-ide"
51 #define DRV_AUTHOR "Enrico Walther <enrico.walther@amd.com> / Pete Popov <ppopov@embeddedalley.com>"
53 /* enable the burstmode in the dbdma */
54 #define IDE_AU1XXX_BURSTMODE 1
56 static _auide_hwif auide_hwif;
57 static int dbdma_init_done;
59 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
61 void auide_insw(unsigned long port, void *addr, u32 count)
63 _auide_hwif *ahwif = &auide_hwif;
67 if(!put_dest_flags(ahwif->rx_chan, (void*)addr, count << 1,
69 printk(KERN_ERR "%s failed %d\n", __FUNCTION__, __LINE__);
72 ctp = *((chan_tab_t **)ahwif->rx_chan);
74 while (dp->dscr_cmd0 & DSCR_CMD0_V)
76 ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp);
79 void auide_outsw(unsigned long port, void *addr, u32 count)
81 _auide_hwif *ahwif = &auide_hwif;
85 if(!put_source_flags(ahwif->tx_chan, (void*)addr,
86 count << 1, DDMA_FLAGS_NOIE)) {
87 printk(KERN_ERR "%s failed %d\n", __FUNCTION__, __LINE__);
90 ctp = *((chan_tab_t **)ahwif->tx_chan);
92 while (dp->dscr_cmd0 & DSCR_CMD0_V)
94 ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp);
99 static void au1xxx_set_pio_mode(ide_drive_t *drive, const u8 pio)
101 int mem_sttime = 0, mem_stcfg = au_readl(MEM_STCFG2);
106 mem_sttime = SBC_IDE_TIMING(PIO0);
108 /* set configuration for RCS2# */
109 mem_stcfg |= TS_MASK;
110 mem_stcfg &= ~TCSOE_MASK;
111 mem_stcfg &= ~TOECS_MASK;
112 mem_stcfg |= SBC_IDE_PIO0_TCSOE | SBC_IDE_PIO0_TOECS;
116 mem_sttime = SBC_IDE_TIMING(PIO1);
118 /* set configuration for RCS2# */
119 mem_stcfg |= TS_MASK;
120 mem_stcfg &= ~TCSOE_MASK;
121 mem_stcfg &= ~TOECS_MASK;
122 mem_stcfg |= SBC_IDE_PIO1_TCSOE | SBC_IDE_PIO1_TOECS;
126 mem_sttime = SBC_IDE_TIMING(PIO2);
128 /* set configuration for RCS2# */
129 mem_stcfg &= ~TS_MASK;
130 mem_stcfg &= ~TCSOE_MASK;
131 mem_stcfg &= ~TOECS_MASK;
132 mem_stcfg |= SBC_IDE_PIO2_TCSOE | SBC_IDE_PIO2_TOECS;
136 mem_sttime = SBC_IDE_TIMING(PIO3);
138 /* set configuration for RCS2# */
139 mem_stcfg &= ~TS_MASK;
140 mem_stcfg &= ~TCSOE_MASK;
141 mem_stcfg &= ~TOECS_MASK;
142 mem_stcfg |= SBC_IDE_PIO3_TCSOE | SBC_IDE_PIO3_TOECS;
147 mem_sttime = SBC_IDE_TIMING(PIO4);
149 /* set configuration for RCS2# */
150 mem_stcfg &= ~TS_MASK;
151 mem_stcfg &= ~TCSOE_MASK;
152 mem_stcfg &= ~TOECS_MASK;
153 mem_stcfg |= SBC_IDE_PIO4_TCSOE | SBC_IDE_PIO4_TOECS;
157 au_writel(mem_sttime,MEM_STTIME2);
158 au_writel(mem_stcfg,MEM_STCFG2);
161 static void auide_set_dma_mode(ide_drive_t *drive, const u8 speed)
163 int mem_sttime = 0, mem_stcfg = au_readl(MEM_STCFG2);
166 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
168 mem_sttime = SBC_IDE_TIMING(MDMA2);
170 /* set configuration for RCS2# */
171 mem_stcfg &= ~TS_MASK;
172 mem_stcfg &= ~TCSOE_MASK;
173 mem_stcfg &= ~TOECS_MASK;
174 mem_stcfg |= SBC_IDE_MDMA2_TCSOE | SBC_IDE_MDMA2_TOECS;
178 mem_sttime = SBC_IDE_TIMING(MDMA1);
180 /* set configuration for RCS2# */
181 mem_stcfg &= ~TS_MASK;
182 mem_stcfg &= ~TCSOE_MASK;
183 mem_stcfg &= ~TOECS_MASK;
184 mem_stcfg |= SBC_IDE_MDMA1_TCSOE | SBC_IDE_MDMA1_TOECS;
188 mem_sttime = SBC_IDE_TIMING(MDMA0);
190 /* set configuration for RCS2# */
191 mem_stcfg |= TS_MASK;
192 mem_stcfg &= ~TCSOE_MASK;
193 mem_stcfg &= ~TOECS_MASK;
194 mem_stcfg |= SBC_IDE_MDMA0_TCSOE | SBC_IDE_MDMA0_TOECS;
200 au_writel(mem_sttime,MEM_STTIME2);
201 au_writel(mem_stcfg,MEM_STCFG2);
205 * Multi-Word DMA + DbDMA functions
208 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
209 static int auide_build_dmatable(ide_drive_t *drive)
211 int i, iswrite, count = 0;
212 ide_hwif_t *hwif = HWIF(drive);
214 struct request *rq = HWGROUP(drive)->rq;
216 _auide_hwif *ahwif = (_auide_hwif*)hwif->hwif_data;
217 struct scatterlist *sg;
219 iswrite = (rq_data_dir(rq) == WRITE);
220 /* Save for interrupt context */
221 ahwif->drive = drive;
223 hwif->sg_nents = i = ide_build_sglist(drive, rq);
228 /* fill the descriptors */
230 while (i && sg_dma_len(sg)) {
234 cur_addr = sg_dma_address(sg);
235 cur_len = sg_dma_len(sg);
238 u32 flags = DDMA_FLAGS_NOIE;
239 unsigned int tc = (cur_len < 0xfe00)? cur_len: 0xfe00;
241 if (++count >= PRD_ENTRIES) {
242 printk(KERN_WARNING "%s: DMA table too small\n",
244 goto use_pio_instead;
247 /* Lets enable intr for the last descriptor only */
249 flags = DDMA_FLAGS_IE;
251 flags = DDMA_FLAGS_NOIE;
254 if(!put_source_flags(ahwif->tx_chan,
257 printk(KERN_ERR "%s failed %d\n",
258 __FUNCTION__, __LINE__);
262 if(!put_dest_flags(ahwif->rx_chan,
265 printk(KERN_ERR "%s failed %d\n",
266 __FUNCTION__, __LINE__);
281 ide_destroy_dmatable(drive);
283 return 0; /* revert to PIO for this request */
286 static int auide_dma_end(ide_drive_t *drive)
288 ide_hwif_t *hwif = HWIF(drive);
290 if (hwif->sg_nents) {
291 ide_destroy_dmatable(drive);
298 static void auide_dma_start(ide_drive_t *drive )
303 static void auide_dma_exec_cmd(ide_drive_t *drive, u8 command)
305 /* issue cmd to drive */
306 ide_execute_command(drive, command, &ide_dma_intr,
310 static int auide_dma_setup(ide_drive_t *drive)
312 struct request *rq = HWGROUP(drive)->rq;
314 if (!auide_build_dmatable(drive)) {
315 ide_map_sg(drive, rq);
319 drive->waiting_for_dma = 1;
323 static u8 auide_mdma_filter(ide_drive_t *drive)
326 * FIXME: ->white_list and ->black_list are based on completely bogus
327 * ->ide_dma_check implementation which didn't set neither the host
328 * controller timings nor the device for the desired transfer mode.
330 * They should be either removed or 0x00 MWDMA mask should be
331 * returned for devices on the ->black_list.
334 if (dbdma_init_done == 0) {
335 auide_hwif.white_list = ide_in_drive_list(drive->id,
337 auide_hwif.black_list = ide_in_drive_list(drive->id,
339 auide_hwif.drive = drive;
340 auide_ddma_init(&auide_hwif);
344 /* Is the drive in our DMA black list? */
345 if (auide_hwif.black_list)
346 printk(KERN_WARNING "%s: Disabling DMA for %s (blacklisted)\n",
347 drive->name, drive->id->model);
349 return drive->hwif->mwdma_mask;
352 static int auide_dma_test_irq(ide_drive_t *drive)
354 if (drive->waiting_for_dma == 0)
355 printk(KERN_WARNING "%s: ide_dma_test_irq \
356 called while not waiting\n", drive->name);
358 /* If dbdma didn't execute the STOP command yet, the
359 * active bit is still set
361 drive->waiting_for_dma++;
362 if (drive->waiting_for_dma >= DMA_WAIT_TIMEOUT) {
363 printk(KERN_WARNING "%s: timeout waiting for ddma to \
364 complete\n", drive->name);
371 static void auide_dma_host_set(ide_drive_t *drive, int on)
375 static void auide_dma_lost_irq(ide_drive_t *drive)
377 printk(KERN_ERR "%s: IRQ lost\n", drive->name);
380 static void auide_ddma_tx_callback(int irq, void *param)
382 _auide_hwif *ahwif = (_auide_hwif*)param;
383 ahwif->drive->waiting_for_dma = 0;
386 static void auide_ddma_rx_callback(int irq, void *param)
388 _auide_hwif *ahwif = (_auide_hwif*)param;
389 ahwif->drive->waiting_for_dma = 0;
392 #endif /* end CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA */
394 static void auide_init_dbdma_dev(dbdev_tab_t *dev, u32 dev_id, u32 tsize, u32 devwidth, u32 flags)
396 dev->dev_id = dev_id;
397 dev->dev_physaddr = (u32)AU1XXX_ATA_PHYS_ADDR;
398 dev->dev_intlevel = 0;
399 dev->dev_intpolarity = 0;
400 dev->dev_tsize = tsize;
401 dev->dev_devwidth = devwidth;
402 dev->dev_flags = flags;
405 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA)
407 static void auide_dma_timeout(ide_drive_t *drive)
409 ide_hwif_t *hwif = HWIF(drive);
411 printk(KERN_ERR "%s: DMA timeout occurred: ", drive->name);
413 if (hwif->ide_dma_test_irq(drive))
416 hwif->ide_dma_end(drive);
420 static int auide_ddma_init(_auide_hwif *auide) {
422 dbdev_tab_t source_dev_tab, target_dev_tab;
423 u32 dev_id, tsize, devwidth, flags;
424 ide_hwif_t *hwif = auide->hwif;
426 dev_id = AU1XXX_ATA_DDMA_REQ;
428 if (auide->white_list || auide->black_list) {
436 printk(KERN_ERR "au1xxx-ide: %s is not on ide driver whitelist.\n",auide_hwif.drive->id->model);
437 printk(KERN_ERR " please read 'Documentation/mips/AU1xxx_IDE.README'");
440 #ifdef IDE_AU1XXX_BURSTMODE
441 flags = DEV_FLAGS_SYNC | DEV_FLAGS_BURSTABLE;
443 flags = DEV_FLAGS_SYNC;
446 /* setup dev_tab for tx channel */
447 auide_init_dbdma_dev( &source_dev_tab,
449 tsize, devwidth, DEV_FLAGS_OUT | flags);
450 auide->tx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
452 auide_init_dbdma_dev( &source_dev_tab,
454 tsize, devwidth, DEV_FLAGS_IN | flags);
455 auide->rx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
457 /* We also need to add a target device for the DMA */
458 auide_init_dbdma_dev( &target_dev_tab,
459 (u32)DSCR_CMD0_ALWAYS,
460 tsize, devwidth, DEV_FLAGS_ANYUSE);
461 auide->target_dev_id = au1xxx_ddma_add_device(&target_dev_tab);
463 /* Get a channel for TX */
464 auide->tx_chan = au1xxx_dbdma_chan_alloc(auide->target_dev_id,
466 auide_ddma_tx_callback,
469 /* Get a channel for RX */
470 auide->rx_chan = au1xxx_dbdma_chan_alloc(auide->rx_dev_id,
471 auide->target_dev_id,
472 auide_ddma_rx_callback,
475 auide->tx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->tx_chan,
477 auide->rx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->rx_chan,
480 hwif->dmatable_cpu = dma_alloc_coherent(hwif->dev,
481 PRD_ENTRIES * PRD_BYTES, /* 1 Page */
482 &hwif->dmatable_dma, GFP_KERNEL);
484 au1xxx_dbdma_start( auide->tx_chan );
485 au1xxx_dbdma_start( auide->rx_chan );
491 static int auide_ddma_init( _auide_hwif *auide )
493 dbdev_tab_t source_dev_tab;
496 #ifdef IDE_AU1XXX_BURSTMODE
497 flags = DEV_FLAGS_SYNC | DEV_FLAGS_BURSTABLE;
499 flags = DEV_FLAGS_SYNC;
502 /* setup dev_tab for tx channel */
503 auide_init_dbdma_dev( &source_dev_tab,
504 (u32)DSCR_CMD0_ALWAYS,
505 8, 32, DEV_FLAGS_OUT | flags);
506 auide->tx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
508 auide_init_dbdma_dev( &source_dev_tab,
509 (u32)DSCR_CMD0_ALWAYS,
510 8, 32, DEV_FLAGS_IN | flags);
511 auide->rx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
513 /* Get a channel for TX */
514 auide->tx_chan = au1xxx_dbdma_chan_alloc(DSCR_CMD0_ALWAYS,
519 /* Get a channel for RX */
520 auide->rx_chan = au1xxx_dbdma_chan_alloc(auide->rx_dev_id,
525 auide->tx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->tx_chan,
527 auide->rx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->rx_chan,
530 au1xxx_dbdma_start( auide->tx_chan );
531 au1xxx_dbdma_start( auide->rx_chan );
537 static void auide_setup_ports(hw_regs_t *hw, _auide_hwif *ahwif)
540 unsigned long *ata_regs = hw->io_ports;
543 for (i = 0; i < IDE_CONTROL_OFFSET; i++) {
544 *ata_regs++ = ahwif->regbase + (i << AU1XXX_ATA_REG_OFFSET);
547 /* set the Alternative Status register */
548 *ata_regs = ahwif->regbase + (14 << AU1XXX_ATA_REG_OFFSET);
551 static int au_ide_probe(struct device *dev)
553 struct platform_device *pdev = to_platform_device(dev);
554 _auide_hwif *ahwif = &auide_hwif;
556 struct resource *res;
558 u8 idx[4] = { 0xff, 0xff, 0xff, 0xff };
561 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA)
562 char *mode = "MWDMA2";
563 #elif defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
564 char *mode = "PIO+DDMA(offload)";
567 memset(&auide_hwif, 0, sizeof(_auide_hwif));
568 ahwif->irq = platform_get_irq(pdev, 0);
570 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
573 pr_debug("%s %d: no base address\n", DRV_NAME, pdev->id);
577 if (ahwif->irq < 0) {
578 pr_debug("%s %d: no IRQ\n", DRV_NAME, pdev->id);
583 if (!request_mem_region (res->start, res->end-res->start, pdev->name)) {
584 pr_debug("%s: request_mem_region failed\n", DRV_NAME);
589 ahwif->regbase = (u32)ioremap(res->start, res->end-res->start);
590 if (ahwif->regbase == 0) {
595 /* FIXME: This might possibly break PCMCIA IDE devices */
597 hwif = &ide_hwifs[pdev->id];
599 memset(&hw, 0, sizeof(hw));
600 auide_setup_ports(&hw, ahwif);
603 hw.chipset = ide_au1xxx;
605 ide_init_port_hw(hwif, &hw);
609 hwif->ultra_mask = 0x0; /* Disable Ultra DMA */
610 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
611 hwif->mwdma_mask = 0x07; /* Multimode-2 DMA */
612 hwif->swdma_mask = 0x00;
614 hwif->mwdma_mask = 0x0;
615 hwif->swdma_mask = 0x0;
618 hwif->pio_mask = ATA_PIO4;
619 hwif->host_flags = IDE_HFLAG_POST_SET_MODE;
621 hwif->drives[0].unmask = 1;
622 hwif->drives[1].unmask = 1;
624 /* hold should be on in all cases */
629 /* If the user has selected DDMA assisted copies,
630 then set up a few local I/O function entry points
633 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
634 hwif->INSW = auide_insw;
635 hwif->OUTSW = auide_outsw;
638 hwif->set_pio_mode = &au1xxx_set_pio_mode;
639 hwif->set_dma_mode = &auide_set_dma_mode;
641 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
642 hwif->dma_timeout = &auide_dma_timeout;
644 hwif->mdma_filter = &auide_mdma_filter;
646 hwif->dma_host_set = &auide_dma_host_set;
647 hwif->dma_exec_cmd = &auide_dma_exec_cmd;
648 hwif->dma_start = &auide_dma_start;
649 hwif->ide_dma_end = &auide_dma_end;
650 hwif->dma_setup = &auide_dma_setup;
651 hwif->ide_dma_test_irq = &auide_dma_test_irq;
652 hwif->dma_lost_irq = &auide_dma_lost_irq;
655 hwif->select_data = 0; /* no chipset-specific code */
656 hwif->config_data = 0; /* no chipset-specific code */
658 hwif->drives[0].autotune = 1; /* 1=autotune, 2=noautotune, 0=default */
659 hwif->drives[1].autotune = 1;
661 hwif->drives[0].no_io_32bit = 1;
662 hwif->drives[1].no_io_32bit = 1;
664 auide_hwif.hwif = hwif;
665 hwif->hwif_data = &auide_hwif;
667 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
668 auide_ddma_init(&auide_hwif);
672 idx[0] = hwif->index;
676 dev_set_drvdata(dev, hwif);
678 printk(KERN_INFO "Au1xxx IDE(builtin) configured for %s\n", mode );
684 static int au_ide_remove(struct device *dev)
686 struct platform_device *pdev = to_platform_device(dev);
687 struct resource *res;
688 ide_hwif_t *hwif = dev_get_drvdata(dev);
689 _auide_hwif *ahwif = &auide_hwif;
691 ide_unregister(hwif->index);
693 iounmap((void *)ahwif->regbase);
695 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
696 release_mem_region(res->start, res->end - res->start);
701 static struct device_driver au1200_ide_driver = {
702 .name = "au1200-ide",
703 .bus = &platform_bus_type,
704 .probe = au_ide_probe,
705 .remove = au_ide_remove,
708 static int __init au_ide_init(void)
710 return driver_register(&au1200_ide_driver);
713 static void __exit au_ide_exit(void)
715 driver_unregister(&au1200_ide_driver);
718 MODULE_LICENSE("GPL");
719 MODULE_DESCRIPTION("AU1200 IDE driver");
721 module_init(au_ide_init);
722 module_exit(au_ide_exit);