2 * BRIEF MODULE DESCRIPTION
3 * AMD Alchemy Au1xxx IDE interface routines over the Static Bus
5 * Copyright (c) 2003-2005 AMD, Personal Connectivity Solutions
7 * This program is free software; you can redistribute it and/or modify it under
8 * the terms of the GNU General Public License as published by the Free Software
9 * Foundation; either version 2 of the License, or (at your option) any later
12 * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
13 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
14 * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR
15 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
16 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
17 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
18 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
19 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
20 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
21 * POSSIBILITY OF SUCH DAMAGE.
23 * You should have received a copy of the GNU General Public License along with
24 * this program; if not, write to the Free Software Foundation, Inc.,
25 * 675 Mass Ave, Cambridge, MA 02139, USA.
27 * Note: for more information, please refer "AMD Alchemy Au1200/Au1550 IDE
28 * Interface and Linux Device Driver" Application Note.
30 #include <linux/types.h>
31 #include <linux/module.h>
32 #include <linux/kernel.h>
33 #include <linux/delay.h>
34 #include <linux/platform_device.h>
35 #include <linux/init.h>
36 #include <linux/ide.h>
37 #include <linux/scatterlist.h>
39 #include <asm/mach-au1x00/au1xxx.h>
40 #include <asm/mach-au1x00/au1xxx_dbdma.h>
41 #include <asm/mach-au1x00/au1xxx_ide.h>
43 #define DRV_NAME "au1200-ide"
44 #define DRV_AUTHOR "Enrico Walther <enrico.walther@amd.com> / Pete Popov <ppopov@embeddedalley.com>"
46 /* enable the burstmode in the dbdma */
47 #define IDE_AU1XXX_BURSTMODE 1
49 static _auide_hwif auide_hwif
;
50 static int dbdma_init_done
;
52 static int auide_ddma_init(_auide_hwif
*auide
);
54 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
56 void auide_insw(unsigned long port
, void *addr
, u32 count
)
58 _auide_hwif
*ahwif
= &auide_hwif
;
62 if(!put_dest_flags(ahwif
->rx_chan
, (void*)addr
, count
<< 1,
64 printk(KERN_ERR
"%s failed %d\n", __FUNCTION__
, __LINE__
);
67 ctp
= *((chan_tab_t
**)ahwif
->rx_chan
);
69 while (dp
->dscr_cmd0
& DSCR_CMD0_V
)
71 ctp
->cur_ptr
= au1xxx_ddma_get_nextptr_virt(dp
);
74 void auide_outsw(unsigned long port
, void *addr
, u32 count
)
76 _auide_hwif
*ahwif
= &auide_hwif
;
80 if(!put_source_flags(ahwif
->tx_chan
, (void*)addr
,
81 count
<< 1, DDMA_FLAGS_NOIE
)) {
82 printk(KERN_ERR
"%s failed %d\n", __FUNCTION__
, __LINE__
);
85 ctp
= *((chan_tab_t
**)ahwif
->tx_chan
);
87 while (dp
->dscr_cmd0
& DSCR_CMD0_V
)
89 ctp
->cur_ptr
= au1xxx_ddma_get_nextptr_virt(dp
);
94 static void au1xxx_set_pio_mode(ide_drive_t
*drive
, const u8 pio
)
96 int mem_sttime
= 0, mem_stcfg
= au_readl(MEM_STCFG2
);
101 mem_sttime
= SBC_IDE_TIMING(PIO0
);
103 /* set configuration for RCS2# */
104 mem_stcfg
|= TS_MASK
;
105 mem_stcfg
&= ~TCSOE_MASK
;
106 mem_stcfg
&= ~TOECS_MASK
;
107 mem_stcfg
|= SBC_IDE_PIO0_TCSOE
| SBC_IDE_PIO0_TOECS
;
111 mem_sttime
= SBC_IDE_TIMING(PIO1
);
113 /* set configuration for RCS2# */
114 mem_stcfg
|= TS_MASK
;
115 mem_stcfg
&= ~TCSOE_MASK
;
116 mem_stcfg
&= ~TOECS_MASK
;
117 mem_stcfg
|= SBC_IDE_PIO1_TCSOE
| SBC_IDE_PIO1_TOECS
;
121 mem_sttime
= SBC_IDE_TIMING(PIO2
);
123 /* set configuration for RCS2# */
124 mem_stcfg
&= ~TS_MASK
;
125 mem_stcfg
&= ~TCSOE_MASK
;
126 mem_stcfg
&= ~TOECS_MASK
;
127 mem_stcfg
|= SBC_IDE_PIO2_TCSOE
| SBC_IDE_PIO2_TOECS
;
131 mem_sttime
= SBC_IDE_TIMING(PIO3
);
133 /* set configuration for RCS2# */
134 mem_stcfg
&= ~TS_MASK
;
135 mem_stcfg
&= ~TCSOE_MASK
;
136 mem_stcfg
&= ~TOECS_MASK
;
137 mem_stcfg
|= SBC_IDE_PIO3_TCSOE
| SBC_IDE_PIO3_TOECS
;
142 mem_sttime
= SBC_IDE_TIMING(PIO4
);
144 /* set configuration for RCS2# */
145 mem_stcfg
&= ~TS_MASK
;
146 mem_stcfg
&= ~TCSOE_MASK
;
147 mem_stcfg
&= ~TOECS_MASK
;
148 mem_stcfg
|= SBC_IDE_PIO4_TCSOE
| SBC_IDE_PIO4_TOECS
;
152 au_writel(mem_sttime
,MEM_STTIME2
);
153 au_writel(mem_stcfg
,MEM_STCFG2
);
156 static void auide_set_dma_mode(ide_drive_t
*drive
, const u8 speed
)
158 int mem_sttime
= 0, mem_stcfg
= au_readl(MEM_STCFG2
);
161 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
163 mem_sttime
= SBC_IDE_TIMING(MDMA2
);
165 /* set configuration for RCS2# */
166 mem_stcfg
&= ~TS_MASK
;
167 mem_stcfg
&= ~TCSOE_MASK
;
168 mem_stcfg
&= ~TOECS_MASK
;
169 mem_stcfg
|= SBC_IDE_MDMA2_TCSOE
| SBC_IDE_MDMA2_TOECS
;
173 mem_sttime
= SBC_IDE_TIMING(MDMA1
);
175 /* set configuration for RCS2# */
176 mem_stcfg
&= ~TS_MASK
;
177 mem_stcfg
&= ~TCSOE_MASK
;
178 mem_stcfg
&= ~TOECS_MASK
;
179 mem_stcfg
|= SBC_IDE_MDMA1_TCSOE
| SBC_IDE_MDMA1_TOECS
;
183 mem_sttime
= SBC_IDE_TIMING(MDMA0
);
185 /* set configuration for RCS2# */
186 mem_stcfg
|= TS_MASK
;
187 mem_stcfg
&= ~TCSOE_MASK
;
188 mem_stcfg
&= ~TOECS_MASK
;
189 mem_stcfg
|= SBC_IDE_MDMA0_TCSOE
| SBC_IDE_MDMA0_TOECS
;
195 au_writel(mem_sttime
,MEM_STTIME2
);
196 au_writel(mem_stcfg
,MEM_STCFG2
);
200 * Multi-Word DMA + DbDMA functions
203 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
204 static int auide_build_dmatable(ide_drive_t
*drive
)
206 int i
, iswrite
, count
= 0;
207 ide_hwif_t
*hwif
= HWIF(drive
);
209 struct request
*rq
= HWGROUP(drive
)->rq
;
211 _auide_hwif
*ahwif
= (_auide_hwif
*)hwif
->hwif_data
;
212 struct scatterlist
*sg
;
214 iswrite
= (rq_data_dir(rq
) == WRITE
);
215 /* Save for interrupt context */
216 ahwif
->drive
= drive
;
218 hwif
->sg_nents
= i
= ide_build_sglist(drive
, rq
);
223 /* fill the descriptors */
225 while (i
&& sg_dma_len(sg
)) {
229 cur_addr
= sg_dma_address(sg
);
230 cur_len
= sg_dma_len(sg
);
233 u32 flags
= DDMA_FLAGS_NOIE
;
234 unsigned int tc
= (cur_len
< 0xfe00)? cur_len
: 0xfe00;
236 if (++count
>= PRD_ENTRIES
) {
237 printk(KERN_WARNING
"%s: DMA table too small\n",
239 goto use_pio_instead
;
242 /* Lets enable intr for the last descriptor only */
244 flags
= DDMA_FLAGS_IE
;
246 flags
= DDMA_FLAGS_NOIE
;
249 if(!put_source_flags(ahwif
->tx_chan
,
252 printk(KERN_ERR
"%s failed %d\n",
253 __FUNCTION__
, __LINE__
);
257 if(!put_dest_flags(ahwif
->rx_chan
,
260 printk(KERN_ERR
"%s failed %d\n",
261 __FUNCTION__
, __LINE__
);
276 ide_destroy_dmatable(drive
);
278 return 0; /* revert to PIO for this request */
281 static int auide_dma_end(ide_drive_t
*drive
)
283 ide_hwif_t
*hwif
= HWIF(drive
);
285 if (hwif
->sg_nents
) {
286 ide_destroy_dmatable(drive
);
293 static void auide_dma_start(ide_drive_t
*drive
)
298 static void auide_dma_exec_cmd(ide_drive_t
*drive
, u8 command
)
300 /* issue cmd to drive */
301 ide_execute_command(drive
, command
, &ide_dma_intr
,
305 static int auide_dma_setup(ide_drive_t
*drive
)
307 struct request
*rq
= HWGROUP(drive
)->rq
;
309 if (!auide_build_dmatable(drive
)) {
310 ide_map_sg(drive
, rq
);
314 drive
->waiting_for_dma
= 1;
318 static u8
auide_mdma_filter(ide_drive_t
*drive
)
321 * FIXME: ->white_list and ->black_list are based on completely bogus
322 * ->ide_dma_check implementation which didn't set neither the host
323 * controller timings nor the device for the desired transfer mode.
325 * They should be either removed or 0x00 MWDMA mask should be
326 * returned for devices on the ->black_list.
329 if (dbdma_init_done
== 0) {
330 auide_hwif
.white_list
= ide_in_drive_list(drive
->id
,
332 auide_hwif
.black_list
= ide_in_drive_list(drive
->id
,
334 auide_hwif
.drive
= drive
;
335 auide_ddma_init(&auide_hwif
);
339 /* Is the drive in our DMA black list? */
340 if (auide_hwif
.black_list
)
341 printk(KERN_WARNING
"%s: Disabling DMA for %s (blacklisted)\n",
342 drive
->name
, drive
->id
->model
);
344 return drive
->hwif
->mwdma_mask
;
347 static int auide_dma_test_irq(ide_drive_t
*drive
)
349 if (drive
->waiting_for_dma
== 0)
350 printk(KERN_WARNING
"%s: ide_dma_test_irq \
351 called while not waiting\n", drive
->name
);
353 /* If dbdma didn't execute the STOP command yet, the
354 * active bit is still set
356 drive
->waiting_for_dma
++;
357 if (drive
->waiting_for_dma
>= DMA_WAIT_TIMEOUT
) {
358 printk(KERN_WARNING
"%s: timeout waiting for ddma to \
359 complete\n", drive
->name
);
366 static void auide_dma_host_set(ide_drive_t
*drive
, int on
)
370 static void auide_dma_lost_irq(ide_drive_t
*drive
)
372 printk(KERN_ERR
"%s: IRQ lost\n", drive
->name
);
375 static void auide_ddma_tx_callback(int irq
, void *param
)
377 _auide_hwif
*ahwif
= (_auide_hwif
*)param
;
378 ahwif
->drive
->waiting_for_dma
= 0;
381 static void auide_ddma_rx_callback(int irq
, void *param
)
383 _auide_hwif
*ahwif
= (_auide_hwif
*)param
;
384 ahwif
->drive
->waiting_for_dma
= 0;
387 #endif /* end CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA */
389 static void auide_init_dbdma_dev(dbdev_tab_t
*dev
, u32 dev_id
, u32 tsize
, u32 devwidth
, u32 flags
)
391 dev
->dev_id
= dev_id
;
392 dev
->dev_physaddr
= (u32
)AU1XXX_ATA_PHYS_ADDR
;
393 dev
->dev_intlevel
= 0;
394 dev
->dev_intpolarity
= 0;
395 dev
->dev_tsize
= tsize
;
396 dev
->dev_devwidth
= devwidth
;
397 dev
->dev_flags
= flags
;
400 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA)
402 static void auide_dma_timeout(ide_drive_t
*drive
)
404 ide_hwif_t
*hwif
= HWIF(drive
);
406 printk(KERN_ERR
"%s: DMA timeout occurred: ", drive
->name
);
408 if (hwif
->ide_dma_test_irq(drive
))
411 hwif
->ide_dma_end(drive
);
415 static int auide_ddma_init(_auide_hwif
*auide
) {
417 dbdev_tab_t source_dev_tab
, target_dev_tab
;
418 u32 dev_id
, tsize
, devwidth
, flags
;
419 ide_hwif_t
*hwif
= auide
->hwif
;
421 dev_id
= AU1XXX_ATA_DDMA_REQ
;
423 if (auide
->white_list
|| auide
->black_list
) {
431 printk(KERN_ERR
"au1xxx-ide: %s is not on ide driver whitelist.\n",auide_hwif
.drive
->id
->model
);
432 printk(KERN_ERR
" please read 'Documentation/mips/AU1xxx_IDE.README'");
435 #ifdef IDE_AU1XXX_BURSTMODE
436 flags
= DEV_FLAGS_SYNC
| DEV_FLAGS_BURSTABLE
;
438 flags
= DEV_FLAGS_SYNC
;
441 /* setup dev_tab for tx channel */
442 auide_init_dbdma_dev( &source_dev_tab
,
444 tsize
, devwidth
, DEV_FLAGS_OUT
| flags
);
445 auide
->tx_dev_id
= au1xxx_ddma_add_device( &source_dev_tab
);
447 auide_init_dbdma_dev( &source_dev_tab
,
449 tsize
, devwidth
, DEV_FLAGS_IN
| flags
);
450 auide
->rx_dev_id
= au1xxx_ddma_add_device( &source_dev_tab
);
452 /* We also need to add a target device for the DMA */
453 auide_init_dbdma_dev( &target_dev_tab
,
454 (u32
)DSCR_CMD0_ALWAYS
,
455 tsize
, devwidth
, DEV_FLAGS_ANYUSE
);
456 auide
->target_dev_id
= au1xxx_ddma_add_device(&target_dev_tab
);
458 /* Get a channel for TX */
459 auide
->tx_chan
= au1xxx_dbdma_chan_alloc(auide
->target_dev_id
,
461 auide_ddma_tx_callback
,
464 /* Get a channel for RX */
465 auide
->rx_chan
= au1xxx_dbdma_chan_alloc(auide
->rx_dev_id
,
466 auide
->target_dev_id
,
467 auide_ddma_rx_callback
,
470 auide
->tx_desc_head
= (void*)au1xxx_dbdma_ring_alloc(auide
->tx_chan
,
472 auide
->rx_desc_head
= (void*)au1xxx_dbdma_ring_alloc(auide
->rx_chan
,
475 hwif
->dmatable_cpu
= dma_alloc_coherent(hwif
->dev
,
476 PRD_ENTRIES
* PRD_BYTES
, /* 1 Page */
477 &hwif
->dmatable_dma
, GFP_KERNEL
);
479 au1xxx_dbdma_start( auide
->tx_chan
);
480 au1xxx_dbdma_start( auide
->rx_chan
);
486 static int auide_ddma_init( _auide_hwif
*auide
)
488 dbdev_tab_t source_dev_tab
;
491 #ifdef IDE_AU1XXX_BURSTMODE
492 flags
= DEV_FLAGS_SYNC
| DEV_FLAGS_BURSTABLE
;
494 flags
= DEV_FLAGS_SYNC
;
497 /* setup dev_tab for tx channel */
498 auide_init_dbdma_dev( &source_dev_tab
,
499 (u32
)DSCR_CMD0_ALWAYS
,
500 8, 32, DEV_FLAGS_OUT
| flags
);
501 auide
->tx_dev_id
= au1xxx_ddma_add_device( &source_dev_tab
);
503 auide_init_dbdma_dev( &source_dev_tab
,
504 (u32
)DSCR_CMD0_ALWAYS
,
505 8, 32, DEV_FLAGS_IN
| flags
);
506 auide
->rx_dev_id
= au1xxx_ddma_add_device( &source_dev_tab
);
508 /* Get a channel for TX */
509 auide
->tx_chan
= au1xxx_dbdma_chan_alloc(DSCR_CMD0_ALWAYS
,
514 /* Get a channel for RX */
515 auide
->rx_chan
= au1xxx_dbdma_chan_alloc(auide
->rx_dev_id
,
520 auide
->tx_desc_head
= (void*)au1xxx_dbdma_ring_alloc(auide
->tx_chan
,
522 auide
->rx_desc_head
= (void*)au1xxx_dbdma_ring_alloc(auide
->rx_chan
,
525 au1xxx_dbdma_start( auide
->tx_chan
);
526 au1xxx_dbdma_start( auide
->rx_chan
);
532 static void auide_setup_ports(hw_regs_t
*hw
, _auide_hwif
*ahwif
)
535 unsigned long *ata_regs
= hw
->io_ports
;
538 for (i
= 0; i
< IDE_CONTROL_OFFSET
; i
++) {
539 *ata_regs
++ = ahwif
->regbase
+ (i
<< AU1XXX_ATA_REG_OFFSET
);
542 /* set the Alternative Status register */
543 *ata_regs
= ahwif
->regbase
+ (14 << AU1XXX_ATA_REG_OFFSET
);
546 static const struct ide_port_info au1xxx_port_info
= {
547 .host_flags
= IDE_HFLAG_POST_SET_MODE
|
548 IDE_HFLAG_NO_DMA
| /* no SFF-style DMA */
549 IDE_HFLAG_NO_IO_32BIT
|
550 IDE_HFLAG_UNMASK_IRQS
,
551 .pio_mask
= ATA_PIO4
,
552 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
553 .mwdma_mask
= ATA_MWDMA2
,
557 static int au_ide_probe(struct device
*dev
)
559 struct platform_device
*pdev
= to_platform_device(dev
);
560 _auide_hwif
*ahwif
= &auide_hwif
;
562 struct resource
*res
;
564 u8 idx
[4] = { 0xff, 0xff, 0xff, 0xff };
567 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA)
568 char *mode
= "MWDMA2";
569 #elif defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
570 char *mode
= "PIO+DDMA(offload)";
573 memset(&auide_hwif
, 0, sizeof(_auide_hwif
));
574 ahwif
->irq
= platform_get_irq(pdev
, 0);
576 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
579 pr_debug("%s %d: no base address\n", DRV_NAME
, pdev
->id
);
583 if (ahwif
->irq
< 0) {
584 pr_debug("%s %d: no IRQ\n", DRV_NAME
, pdev
->id
);
589 if (!request_mem_region (res
->start
, res
->end
-res
->start
, pdev
->name
)) {
590 pr_debug("%s: request_mem_region failed\n", DRV_NAME
);
595 ahwif
->regbase
= (u32
)ioremap(res
->start
, res
->end
-res
->start
);
596 if (ahwif
->regbase
== 0) {
601 /* FIXME: This might possibly break PCMCIA IDE devices */
603 hwif
= &ide_hwifs
[pdev
->id
];
605 memset(&hw
, 0, sizeof(hw
));
606 auide_setup_ports(&hw
, ahwif
);
609 hw
.chipset
= ide_au1xxx
;
611 ide_init_port_hw(hwif
, &hw
);
615 /* hold should be on in all cases */
620 /* If the user has selected DDMA assisted copies,
621 then set up a few local I/O function entry points
624 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
625 hwif
->INSW
= auide_insw
;
626 hwif
->OUTSW
= auide_outsw
;
629 hwif
->set_pio_mode
= &au1xxx_set_pio_mode
;
630 hwif
->set_dma_mode
= &auide_set_dma_mode
;
632 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
633 hwif
->dma_timeout
= &auide_dma_timeout
;
635 hwif
->mdma_filter
= &auide_mdma_filter
;
637 hwif
->dma_host_set
= &auide_dma_host_set
;
638 hwif
->dma_exec_cmd
= &auide_dma_exec_cmd
;
639 hwif
->dma_start
= &auide_dma_start
;
640 hwif
->ide_dma_end
= &auide_dma_end
;
641 hwif
->dma_setup
= &auide_dma_setup
;
642 hwif
->ide_dma_test_irq
= &auide_dma_test_irq
;
643 hwif
->dma_lost_irq
= &auide_dma_lost_irq
;
645 hwif
->select_data
= 0; /* no chipset-specific code */
646 hwif
->config_data
= 0; /* no chipset-specific code */
648 auide_hwif
.hwif
= hwif
;
649 hwif
->hwif_data
= &auide_hwif
;
651 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
652 auide_ddma_init(&auide_hwif
);
656 idx
[0] = hwif
->index
;
658 ide_device_add(idx
, &au1xxx_port_info
);
660 dev_set_drvdata(dev
, hwif
);
662 printk(KERN_INFO
"Au1xxx IDE(builtin) configured for %s\n", mode
);
668 static int au_ide_remove(struct device
*dev
)
670 struct platform_device
*pdev
= to_platform_device(dev
);
671 struct resource
*res
;
672 ide_hwif_t
*hwif
= dev_get_drvdata(dev
);
673 _auide_hwif
*ahwif
= &auide_hwif
;
675 ide_unregister(hwif
->index
, 0, 0);
677 iounmap((void *)ahwif
->regbase
);
679 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
680 release_mem_region(res
->start
, res
->end
- res
->start
);
685 static struct device_driver au1200_ide_driver
= {
686 .name
= "au1200-ide",
687 .bus
= &platform_bus_type
,
688 .probe
= au_ide_probe
,
689 .remove
= au_ide_remove
,
692 static int __init
au_ide_init(void)
694 return driver_register(&au1200_ide_driver
);
697 static void __exit
au_ide_exit(void)
699 driver_unregister(&au1200_ide_driver
);
702 MODULE_LICENSE("GPL");
703 MODULE_DESCRIPTION("AU1200 IDE driver");
705 module_init(au_ide_init
);
706 module_exit(au_ide_exit
);