1 /*
2 * BRIEF MODULE DESCRIPTION
3 * AMD Alchemy Au1xxx IDE interface routines over the Static Bus
4 *
5 * Copyright (c) 2003-2005 AMD, Personal Connectivity Solutions
6 *
7 * This program is free software; you can redistribute it and/or modify it under
8 * the terms of the GNU General Public License as published by the Free Software
9 * Foundation; either version 2 of the License, or (at your option) any later
10 * version.
11 *
12 * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
13 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
14 * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR
15 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
16 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
17 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
18 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
19 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
20 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
21 * POSSIBILITY OF SUCH DAMAGE.
22 *
23 * You should have received a copy of the GNU General Public License along with
24 * this program; if not, write to the Free Software Foundation, Inc.,
25 * 675 Mass Ave, Cambridge, MA 02139, USA.
26 *
27 * Note: for more information, please refer "AMD Alchemy Au1200/Au1550 IDE
28 * Interface and Linux Device Driver" Application Note.
29 */
30 #include <linux/types.h>
31 #include <linux/module.h>
32 #include <linux/kernel.h>
33 #include <linux/delay.h>
34 #include <linux/platform_device.h>
35 #include <linux/init.h>
36 #include <linux/ide.h>
37 #include <linux/scatterlist.h>
38
39 #include <asm/mach-au1x00/au1xxx.h>
40 #include <asm/mach-au1x00/au1xxx_dbdma.h>
41 #include <asm/mach-au1x00/au1xxx_ide.h>
42
43 #define DRV_NAME "au1200-ide"
44 #define DRV_AUTHOR "Enrico Walther <enrico.walther@amd.com> / Pete Popov <ppopov@embeddedalley.com>"
45
46 /* enable the burstmode in the dbdma */
47 #define IDE_AU1XXX_BURSTMODE 1
48
49 static _auide_hwif auide_hwif;
50
51 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
52
auide_insw(unsigned long port,void * addr,u32 count)53 static inline void auide_insw(unsigned long port, void *addr, u32 count)
54 {
55 _auide_hwif *ahwif = &auide_hwif;
56 chan_tab_t *ctp;
57 au1x_ddma_desc_t *dp;
58
59 if (!au1xxx_dbdma_put_dest(ahwif->rx_chan, virt_to_phys(addr),
60 count << 1, DDMA_FLAGS_NOIE)) {
61 printk(KERN_ERR "%s failed %d\n", __func__, __LINE__);
62 return;
63 }
64 ctp = *((chan_tab_t **)ahwif->rx_chan);
65 dp = ctp->cur_ptr;
66 while (dp->dscr_cmd0 & DSCR_CMD0_V)
67 ;
68 ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp);
69 }
70
auide_outsw(unsigned long port,void * addr,u32 count)71 static inline void auide_outsw(unsigned long port, void *addr, u32 count)
72 {
73 _auide_hwif *ahwif = &auide_hwif;
74 chan_tab_t *ctp;
75 au1x_ddma_desc_t *dp;
76
77 if (!au1xxx_dbdma_put_source(ahwif->tx_chan, virt_to_phys(addr),
78 count << 1, DDMA_FLAGS_NOIE)) {
79 printk(KERN_ERR "%s failed %d\n", __func__, __LINE__);
80 return;
81 }
82 ctp = *((chan_tab_t **)ahwif->tx_chan);
83 dp = ctp->cur_ptr;
84 while (dp->dscr_cmd0 & DSCR_CMD0_V)
85 ;
86 ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp);
87 }
88
au1xxx_input_data(ide_drive_t * drive,struct ide_cmd * cmd,void * buf,unsigned int len)89 static void au1xxx_input_data(ide_drive_t *drive, struct ide_cmd *cmd,
90 void *buf, unsigned int len)
91 {
92 auide_insw(drive->hwif->io_ports.data_addr, buf, (len + 1) / 2);
93 }
94
au1xxx_output_data(ide_drive_t * drive,struct ide_cmd * cmd,void * buf,unsigned int len)95 static void au1xxx_output_data(ide_drive_t *drive, struct ide_cmd *cmd,
96 void *buf, unsigned int len)
97 {
98 auide_outsw(drive->hwif->io_ports.data_addr, buf, (len + 1) / 2);
99 }
100 #endif
101
au1xxx_set_pio_mode(ide_hwif_t * hwif,ide_drive_t * drive)102 static void au1xxx_set_pio_mode(ide_hwif_t *hwif, ide_drive_t *drive)
103 {
104 int mem_sttime = 0, mem_stcfg = au_readl(MEM_STCFG2);
105
106 switch (drive->pio_mode - XFER_PIO_0) {
107 case 0:
108 mem_sttime = SBC_IDE_TIMING(PIO0);
109
110 /* set configuration for RCS2# */
111 mem_stcfg |= TS_MASK;
112 mem_stcfg &= ~TCSOE_MASK;
113 mem_stcfg &= ~TOECS_MASK;
114 mem_stcfg |= SBC_IDE_PIO0_TCSOE | SBC_IDE_PIO0_TOECS;
115 break;
116
117 case 1:
118 mem_sttime = SBC_IDE_TIMING(PIO1);
119
120 /* set configuration for RCS2# */
121 mem_stcfg |= TS_MASK;
122 mem_stcfg &= ~TCSOE_MASK;
123 mem_stcfg &= ~TOECS_MASK;
124 mem_stcfg |= SBC_IDE_PIO1_TCSOE | SBC_IDE_PIO1_TOECS;
125 break;
126
127 case 2:
128 mem_sttime = SBC_IDE_TIMING(PIO2);
129
130 /* set configuration for RCS2# */
131 mem_stcfg &= ~TS_MASK;
132 mem_stcfg &= ~TCSOE_MASK;
133 mem_stcfg &= ~TOECS_MASK;
134 mem_stcfg |= SBC_IDE_PIO2_TCSOE | SBC_IDE_PIO2_TOECS;
135 break;
136
137 case 3:
138 mem_sttime = SBC_IDE_TIMING(PIO3);
139
140 /* set configuration for RCS2# */
141 mem_stcfg &= ~TS_MASK;
142 mem_stcfg &= ~TCSOE_MASK;
143 mem_stcfg &= ~TOECS_MASK;
144 mem_stcfg |= SBC_IDE_PIO3_TCSOE | SBC_IDE_PIO3_TOECS;
145
146 break;
147
148 case 4:
149 mem_sttime = SBC_IDE_TIMING(PIO4);
150
151 /* set configuration for RCS2# */
152 mem_stcfg &= ~TS_MASK;
153 mem_stcfg &= ~TCSOE_MASK;
154 mem_stcfg &= ~TOECS_MASK;
155 mem_stcfg |= SBC_IDE_PIO4_TCSOE | SBC_IDE_PIO4_TOECS;
156 break;
157 }
158
159 au_writel(mem_sttime,MEM_STTIME2);
160 au_writel(mem_stcfg,MEM_STCFG2);
161 }
162
auide_set_dma_mode(ide_hwif_t * hwif,ide_drive_t * drive)163 static void auide_set_dma_mode(ide_hwif_t *hwif, ide_drive_t *drive)
164 {
165 int mem_sttime = 0, mem_stcfg = au_readl(MEM_STCFG2);
166
167 switch (drive->dma_mode) {
168 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
169 case XFER_MW_DMA_2:
170 mem_sttime = SBC_IDE_TIMING(MDMA2);
171
172 /* set configuration for RCS2# */
173 mem_stcfg &= ~TS_MASK;
174 mem_stcfg &= ~TCSOE_MASK;
175 mem_stcfg &= ~TOECS_MASK;
176 mem_stcfg |= SBC_IDE_MDMA2_TCSOE | SBC_IDE_MDMA2_TOECS;
177
178 break;
179 case XFER_MW_DMA_1:
180 mem_sttime = SBC_IDE_TIMING(MDMA1);
181
182 /* set configuration for RCS2# */
183 mem_stcfg &= ~TS_MASK;
184 mem_stcfg &= ~TCSOE_MASK;
185 mem_stcfg &= ~TOECS_MASK;
186 mem_stcfg |= SBC_IDE_MDMA1_TCSOE | SBC_IDE_MDMA1_TOECS;
187
188 break;
189 case XFER_MW_DMA_0:
190 mem_sttime = SBC_IDE_TIMING(MDMA0);
191
192 /* set configuration for RCS2# */
193 mem_stcfg |= TS_MASK;
194 mem_stcfg &= ~TCSOE_MASK;
195 mem_stcfg &= ~TOECS_MASK;
196 mem_stcfg |= SBC_IDE_MDMA0_TCSOE | SBC_IDE_MDMA0_TOECS;
197
198 break;
199 #endif
200 }
201
202 au_writel(mem_sttime,MEM_STTIME2);
203 au_writel(mem_stcfg,MEM_STCFG2);
204 }
205
206 /*
207 * Multi-Word DMA + DbDMA functions
208 */
209
210 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
auide_build_dmatable(ide_drive_t * drive,struct ide_cmd * cmd)211 static int auide_build_dmatable(ide_drive_t *drive, struct ide_cmd *cmd)
212 {
213 ide_hwif_t *hwif = drive->hwif;
214 _auide_hwif *ahwif = &auide_hwif;
215 struct scatterlist *sg;
216 int i = cmd->sg_nents, count = 0;
217 int iswrite = !!(cmd->tf_flags & IDE_TFLAG_WRITE);
218
219 /* Save for interrupt context */
220 ahwif->drive = drive;
221
222 /* fill the descriptors */
223 sg = hwif->sg_table;
224 while (i && sg_dma_len(sg)) {
225 u32 cur_addr;
226 u32 cur_len;
227
228 cur_addr = sg_dma_address(sg);
229 cur_len = sg_dma_len(sg);
230
231 while (cur_len) {
232 u32 flags = DDMA_FLAGS_NOIE;
233 unsigned int tc = (cur_len < 0xfe00)? cur_len: 0xfe00;
234
235 if (++count >= PRD_ENTRIES) {
236 printk(KERN_WARNING "%s: DMA table too small\n",
237 drive->name);
238 return 0;
239 }
240
241 /* Lets enable intr for the last descriptor only */
242 if (1==i)
243 flags = DDMA_FLAGS_IE;
244 else
245 flags = DDMA_FLAGS_NOIE;
246
247 if (iswrite) {
248 if (!au1xxx_dbdma_put_source(ahwif->tx_chan,
249 sg_phys(sg), tc, flags)) {
250 printk(KERN_ERR "%s failed %d\n",
251 __func__, __LINE__);
252 }
253 } else {
254 if (!au1xxx_dbdma_put_dest(ahwif->rx_chan,
255 sg_phys(sg), tc, flags)) {
256 printk(KERN_ERR "%s failed %d\n",
257 __func__, __LINE__);
258 }
259 }
260
261 cur_addr += tc;
262 cur_len -= tc;
263 }
264 sg = sg_next(sg);
265 i--;
266 }
267
268 if (count)
269 return 1;
270
271 return 0; /* revert to PIO for this request */
272 }
273
auide_dma_end(ide_drive_t * drive)274 static int auide_dma_end(ide_drive_t *drive)
275 {
276 return 0;
277 }
278
auide_dma_start(ide_drive_t * drive)279 static void auide_dma_start(ide_drive_t *drive )
280 {
281 }
282
283
auide_dma_setup(ide_drive_t * drive,struct ide_cmd * cmd)284 static int auide_dma_setup(ide_drive_t *drive, struct ide_cmd *cmd)
285 {
286 if (auide_build_dmatable(drive, cmd) == 0)
287 return 1;
288
289 return 0;
290 }
291
auide_dma_test_irq(ide_drive_t * drive)292 static int auide_dma_test_irq(ide_drive_t *drive)
293 {
294 /* If dbdma didn't execute the STOP command yet, the
295 * active bit is still set
296 */
297 drive->waiting_for_dma++;
298 if (drive->waiting_for_dma >= DMA_WAIT_TIMEOUT) {
299 printk(KERN_WARNING "%s: timeout waiting for ddma to complete\n",
300 drive->name);
301 return 1;
302 }
303 udelay(10);
304 return 0;
305 }
306
auide_dma_host_set(ide_drive_t * drive,int on)307 static void auide_dma_host_set(ide_drive_t *drive, int on)
308 {
309 }
310
auide_ddma_tx_callback(int irq,void * param)311 static void auide_ddma_tx_callback(int irq, void *param)
312 {
313 }
314
auide_ddma_rx_callback(int irq,void * param)315 static void auide_ddma_rx_callback(int irq, void *param)
316 {
317 }
318 #endif /* end CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA */
319
auide_init_dbdma_dev(dbdev_tab_t * dev,u32 dev_id,u32 tsize,u32 devwidth,u32 flags)320 static void auide_init_dbdma_dev(dbdev_tab_t *dev, u32 dev_id, u32 tsize, u32 devwidth, u32 flags)
321 {
322 dev->dev_id = dev_id;
323 dev->dev_physaddr = (u32)IDE_PHYS_ADDR;
324 dev->dev_intlevel = 0;
325 dev->dev_intpolarity = 0;
326 dev->dev_tsize = tsize;
327 dev->dev_devwidth = devwidth;
328 dev->dev_flags = flags;
329 }
330
331 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
332 static const struct ide_dma_ops au1xxx_dma_ops = {
333 .dma_host_set = auide_dma_host_set,
334 .dma_setup = auide_dma_setup,
335 .dma_start = auide_dma_start,
336 .dma_end = auide_dma_end,
337 .dma_test_irq = auide_dma_test_irq,
338 .dma_lost_irq = ide_dma_lost_irq,
339 };
340
auide_ddma_init(ide_hwif_t * hwif,const struct ide_port_info * d)341 static int auide_ddma_init(ide_hwif_t *hwif, const struct ide_port_info *d)
342 {
343 _auide_hwif *auide = &auide_hwif;
344 dbdev_tab_t source_dev_tab, target_dev_tab;
345 u32 dev_id, tsize, devwidth, flags;
346
347 dev_id = IDE_DDMA_REQ;
348
349 tsize = 8; /* 1 */
350 devwidth = 32; /* 16 */
351
352 #ifdef IDE_AU1XXX_BURSTMODE
353 flags = DEV_FLAGS_SYNC | DEV_FLAGS_BURSTABLE;
354 #else
355 flags = DEV_FLAGS_SYNC;
356 #endif
357
358 /* setup dev_tab for tx channel */
359 auide_init_dbdma_dev( &source_dev_tab,
360 dev_id,
361 tsize, devwidth, DEV_FLAGS_OUT | flags);
362 auide->tx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
363
364 auide_init_dbdma_dev( &source_dev_tab,
365 dev_id,
366 tsize, devwidth, DEV_FLAGS_IN | flags);
367 auide->rx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
368
369 /* We also need to add a target device for the DMA */
370 auide_init_dbdma_dev( &target_dev_tab,
371 (u32)DSCR_CMD0_ALWAYS,
372 tsize, devwidth, DEV_FLAGS_ANYUSE);
373 auide->target_dev_id = au1xxx_ddma_add_device(&target_dev_tab);
374
375 /* Get a channel for TX */
376 auide->tx_chan = au1xxx_dbdma_chan_alloc(auide->target_dev_id,
377 auide->tx_dev_id,
378 auide_ddma_tx_callback,
379 (void*)auide);
380
381 /* Get a channel for RX */
382 auide->rx_chan = au1xxx_dbdma_chan_alloc(auide->rx_dev_id,
383 auide->target_dev_id,
384 auide_ddma_rx_callback,
385 (void*)auide);
386
387 auide->tx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->tx_chan,
388 NUM_DESCRIPTORS);
389 auide->rx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->rx_chan,
390 NUM_DESCRIPTORS);
391
392 /* FIXME: check return value */
393 (void)ide_allocate_dma_engine(hwif);
394
395 au1xxx_dbdma_start( auide->tx_chan );
396 au1xxx_dbdma_start( auide->rx_chan );
397
398 return 0;
399 }
400 #else
auide_ddma_init(ide_hwif_t * hwif,const struct ide_port_info * d)401 static int auide_ddma_init(ide_hwif_t *hwif, const struct ide_port_info *d)
402 {
403 _auide_hwif *auide = &auide_hwif;
404 dbdev_tab_t source_dev_tab;
405 int flags;
406
407 #ifdef IDE_AU1XXX_BURSTMODE
408 flags = DEV_FLAGS_SYNC | DEV_FLAGS_BURSTABLE;
409 #else
410 flags = DEV_FLAGS_SYNC;
411 #endif
412
413 /* setup dev_tab for tx channel */
414 auide_init_dbdma_dev( &source_dev_tab,
415 (u32)DSCR_CMD0_ALWAYS,
416 8, 32, DEV_FLAGS_OUT | flags);
417 auide->tx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
418
419 auide_init_dbdma_dev( &source_dev_tab,
420 (u32)DSCR_CMD0_ALWAYS,
421 8, 32, DEV_FLAGS_IN | flags);
422 auide->rx_dev_id = au1xxx_ddma_add_device( &source_dev_tab );
423
424 /* Get a channel for TX */
425 auide->tx_chan = au1xxx_dbdma_chan_alloc(DSCR_CMD0_ALWAYS,
426 auide->tx_dev_id,
427 NULL,
428 (void*)auide);
429
430 /* Get a channel for RX */
431 auide->rx_chan = au1xxx_dbdma_chan_alloc(auide->rx_dev_id,
432 DSCR_CMD0_ALWAYS,
433 NULL,
434 (void*)auide);
435
436 auide->tx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->tx_chan,
437 NUM_DESCRIPTORS);
438 auide->rx_desc_head = (void*)au1xxx_dbdma_ring_alloc(auide->rx_chan,
439 NUM_DESCRIPTORS);
440
441 au1xxx_dbdma_start( auide->tx_chan );
442 au1xxx_dbdma_start( auide->rx_chan );
443
444 return 0;
445 }
446 #endif
447
auide_setup_ports(struct ide_hw * hw,_auide_hwif * ahwif)448 static void auide_setup_ports(struct ide_hw *hw, _auide_hwif *ahwif)
449 {
450 int i;
451 unsigned long *ata_regs = hw->io_ports_array;
452
453 /* FIXME? */
454 for (i = 0; i < 8; i++)
455 *ata_regs++ = ahwif->regbase + (i << IDE_REG_SHIFT);
456
457 /* set the Alternative Status register */
458 *ata_regs = ahwif->regbase + (14 << IDE_REG_SHIFT);
459 }
460
461 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
462 static const struct ide_tp_ops au1xxx_tp_ops = {
463 .exec_command = ide_exec_command,
464 .read_status = ide_read_status,
465 .read_altstatus = ide_read_altstatus,
466 .write_devctl = ide_write_devctl,
467
468 .dev_select = ide_dev_select,
469 .tf_load = ide_tf_load,
470 .tf_read = ide_tf_read,
471
472 .input_data = au1xxx_input_data,
473 .output_data = au1xxx_output_data,
474 };
475 #endif
476
477 static const struct ide_port_ops au1xxx_port_ops = {
478 .set_pio_mode = au1xxx_set_pio_mode,
479 .set_dma_mode = auide_set_dma_mode,
480 };
481
482 static const struct ide_port_info au1xxx_port_info = {
483 .init_dma = auide_ddma_init,
484 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA
485 .tp_ops = &au1xxx_tp_ops,
486 #endif
487 .port_ops = &au1xxx_port_ops,
488 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
489 .dma_ops = &au1xxx_dma_ops,
490 #endif
491 .host_flags = IDE_HFLAG_POST_SET_MODE |
492 IDE_HFLAG_NO_IO_32BIT |
493 IDE_HFLAG_UNMASK_IRQS,
494 .pio_mask = ATA_PIO4,
495 #ifdef CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA
496 .mwdma_mask = ATA_MWDMA2,
497 #endif
498 .chipset = ide_au1xxx,
499 };
500
au_ide_probe(struct platform_device * dev)501 static int au_ide_probe(struct platform_device *dev)
502 {
503 _auide_hwif *ahwif = &auide_hwif;
504 struct resource *res;
505 struct ide_host *host;
506 int ret = 0;
507 struct ide_hw hw, *hws[] = { &hw };
508
509 #if defined(CONFIG_BLK_DEV_IDE_AU1XXX_MDMA2_DBDMA)
510 char *mode = "MWDMA2";
511 #elif defined(CONFIG_BLK_DEV_IDE_AU1XXX_PIO_DBDMA)
512 char *mode = "PIO+DDMA(offload)";
513 #endif
514
515 memset(&auide_hwif, 0, sizeof(_auide_hwif));
516 ahwif->irq = platform_get_irq(dev, 0);
517
518 res = platform_get_resource(dev, IORESOURCE_MEM, 0);
519
520 if (res == NULL) {
521 pr_debug("%s %d: no base address\n", DRV_NAME, dev->id);
522 ret = -ENODEV;
523 goto out;
524 }
525 if (ahwif->irq < 0) {
526 pr_debug("%s %d: no IRQ\n", DRV_NAME, dev->id);
527 ret = -ENODEV;
528 goto out;
529 }
530
531 if (!request_mem_region(res->start, resource_size(res), dev->name)) {
532 pr_debug("%s: request_mem_region failed\n", DRV_NAME);
533 ret = -EBUSY;
534 goto out;
535 }
536
537 ahwif->regbase = (u32)ioremap(res->start, resource_size(res));
538 if (ahwif->regbase == 0) {
539 ret = -ENOMEM;
540 goto out;
541 }
542
543 memset(&hw, 0, sizeof(hw));
544 auide_setup_ports(&hw, ahwif);
545 hw.irq = ahwif->irq;
546 hw.dev = &dev->dev;
547
548 ret = ide_host_add(&au1xxx_port_info, hws, 1, &host);
549 if (ret)
550 goto out;
551
552 auide_hwif.hwif = host->ports[0];
553
554 platform_set_drvdata(dev, host);
555
556 printk(KERN_INFO "Au1xxx IDE(builtin) configured for %s\n", mode );
557
558 out:
559 return ret;
560 }
561
au_ide_remove(struct platform_device * dev)562 static int au_ide_remove(struct platform_device *dev)
563 {
564 struct resource *res;
565 struct ide_host *host = platform_get_drvdata(dev);
566 _auide_hwif *ahwif = &auide_hwif;
567
568 ide_host_remove(host);
569
570 iounmap((void *)ahwif->regbase);
571
572 res = platform_get_resource(dev, IORESOURCE_MEM, 0);
573 release_mem_region(res->start, resource_size(res));
574
575 return 0;
576 }
577
578 static struct platform_driver au1200_ide_driver = {
579 .driver = {
580 .name = "au1200-ide",
581 .owner = THIS_MODULE,
582 },
583 .probe = au_ide_probe,
584 .remove = au_ide_remove,
585 };
586
au_ide_init(void)587 static int __init au_ide_init(void)
588 {
589 return platform_driver_register(&au1200_ide_driver);
590 }
591
au_ide_exit(void)592 static void __exit au_ide_exit(void)
593 {
594 platform_driver_unregister(&au1200_ide_driver);
595 }
596
597 MODULE_LICENSE("GPL");
598 MODULE_DESCRIPTION("AU1200 IDE driver");
599
600 module_init(au_ide_init);
601 module_exit(au_ide_exit);
602