0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035 #include <linux/err.h>
0036 #include <linux/kernel.h>
0037 #include <linux/module.h>
0038 #include <linux/blkdev.h>
0039 #include <scsi/scsi_host.h>
0040 #include <linux/ata.h>
0041 #include <linux/libata.h>
0042 #include <linux/platform_device.h>
0043 #include <linux/delay.h>
0044 #include <linux/dmaengine.h>
0045 #include <linux/ktime.h>
0046
0047 #include <linux/platform_data/dma-ep93xx.h>
0048 #include <linux/soc/cirrus/ep93xx.h>
0049
0050 #define DRV_NAME "ep93xx-ide"
0051 #define DRV_VERSION "1.0"
0052
0053 enum {
0054
0055 IDECTRL = 0x00,
0056 IDECTRL_CS0N = (1 << 0),
0057 IDECTRL_CS1N = (1 << 1),
0058 IDECTRL_DIORN = (1 << 5),
0059 IDECTRL_DIOWN = (1 << 6),
0060 IDECTRL_INTRQ = (1 << 9),
0061 IDECTRL_IORDY = (1 << 10),
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073 IDECTRL_ADDR_CMD = 0 + 2,
0074 IDECTRL_ADDR_DATA = (ATA_REG_DATA << 2) + 2,
0075 IDECTRL_ADDR_ERROR = (ATA_REG_ERR << 2) + 2,
0076 IDECTRL_ADDR_FEATURE = (ATA_REG_FEATURE << 2) + 2,
0077 IDECTRL_ADDR_NSECT = (ATA_REG_NSECT << 2) + 2,
0078 IDECTRL_ADDR_LBAL = (ATA_REG_LBAL << 2) + 2,
0079 IDECTRL_ADDR_LBAM = (ATA_REG_LBAM << 2) + 2,
0080 IDECTRL_ADDR_LBAH = (ATA_REG_LBAH << 2) + 2,
0081 IDECTRL_ADDR_DEVICE = (ATA_REG_DEVICE << 2) + 2,
0082 IDECTRL_ADDR_STATUS = (ATA_REG_STATUS << 2) + 2,
0083 IDECTRL_ADDR_COMMAND = (ATA_REG_CMD << 2) + 2,
0084 IDECTRL_ADDR_ALTSTATUS = (0x06 << 2) + 1,
0085 IDECTRL_ADDR_CTL = (0x06 << 2) + 1,
0086
0087
0088 IDECFG = 0x04,
0089 IDECFG_IDEEN = (1 << 0),
0090 IDECFG_PIO = (1 << 1),
0091 IDECFG_MDMA = (1 << 2),
0092 IDECFG_UDMA = (1 << 3),
0093 IDECFG_MODE_SHIFT = 4,
0094 IDECFG_MODE_MASK = (0xf << 4),
0095 IDECFG_WST_SHIFT = 8,
0096 IDECFG_WST_MASK = (0x3 << 8),
0097
0098
0099 IDEMDMAOP = 0x08,
0100
0101
0102 IDEUDMAOP = 0x0c,
0103 IDEUDMAOP_UEN = (1 << 0),
0104 IDEUDMAOP_RWOP = (1 << 1),
0105
0106
0107 IDEDATAOUT = 0x10,
0108 IDEDATAIN = 0x14,
0109 IDEMDMADATAOUT = 0x18,
0110 IDEMDMADATAIN = 0x1c,
0111 IDEUDMADATAOUT = 0x20,
0112 IDEUDMADATAIN = 0x24,
0113
0114
0115 IDEUDMASTS = 0x28,
0116 IDEUDMASTS_DMAIDE = (1 << 16),
0117 IDEUDMASTS_INTIDE = (1 << 17),
0118 IDEUDMASTS_SBUSY = (1 << 18),
0119 IDEUDMASTS_NDO = (1 << 24),
0120 IDEUDMASTS_NDI = (1 << 25),
0121 IDEUDMASTS_N4X = (1 << 26),
0122
0123
0124 IDEUDMADEBUG = 0x2c,
0125 };
0126
0127 struct ep93xx_pata_data {
0128 const struct platform_device *pdev;
0129 void __iomem *ide_base;
0130 struct ata_timing t;
0131 bool iordy;
0132
0133 unsigned long udma_in_phys;
0134 unsigned long udma_out_phys;
0135
0136 struct dma_chan *dma_rx_channel;
0137 struct ep93xx_dma_data dma_rx_data;
0138 struct dma_chan *dma_tx_channel;
0139 struct ep93xx_dma_data dma_tx_data;
0140 };
0141
0142 static void ep93xx_pata_clear_regs(void __iomem *base)
0143 {
0144 writel(IDECTRL_CS0N | IDECTRL_CS1N | IDECTRL_DIORN |
0145 IDECTRL_DIOWN, base + IDECTRL);
0146
0147 writel(0, base + IDECFG);
0148 writel(0, base + IDEMDMAOP);
0149 writel(0, base + IDEUDMAOP);
0150 writel(0, base + IDEDATAOUT);
0151 writel(0, base + IDEDATAIN);
0152 writel(0, base + IDEMDMADATAOUT);
0153 writel(0, base + IDEMDMADATAIN);
0154 writel(0, base + IDEUDMADATAOUT);
0155 writel(0, base + IDEUDMADATAIN);
0156 writel(0, base + IDEUDMADEBUG);
0157 }
0158
0159 static bool ep93xx_pata_check_iordy(void __iomem *base)
0160 {
0161 return !!(readl(base + IDECTRL) & IDECTRL_IORDY);
0162 }
0163
0164
0165
0166
0167
0168
0169
0170
0171
0172
0173
0174
0175
0176
0177
0178 static int ep93xx_pata_get_wst(int pio_mode)
0179 {
0180 int val;
0181
0182 if (pio_mode == 0)
0183 val = 3;
0184 else if (pio_mode < 3)
0185 val = 2;
0186 else
0187 val = 1;
0188
0189 return val << IDECFG_WST_SHIFT;
0190 }
0191
0192 static void ep93xx_pata_enable_pio(void __iomem *base, int pio_mode)
0193 {
0194 writel(IDECFG_IDEEN | IDECFG_PIO |
0195 ep93xx_pata_get_wst(pio_mode) |
0196 (pio_mode << IDECFG_MODE_SHIFT), base + IDECFG);
0197 }
0198
0199
0200
0201
0202
0203
0204
0205
0206 static void ep93xx_pata_delay(unsigned long count)
0207 {
0208 __asm__ volatile (
0209 "0:\n"
0210 "mov r0, r0\n"
0211 "subs %0, %1, #1\n"
0212 "bge 0b\n"
0213 : "=r" (count)
0214 : "0" (count)
0215 );
0216 }
0217
0218 static unsigned long ep93xx_pata_wait_for_iordy(void __iomem *base,
0219 unsigned long t2)
0220 {
0221
0222
0223
0224
0225
0226
0227
0228
0229
0230 unsigned long start = (1250 + 35) / 25 - t2;
0231 unsigned long counter = start;
0232
0233 while (!ep93xx_pata_check_iordy(base) && counter--)
0234 ep93xx_pata_delay(1);
0235 return start - counter;
0236 }
0237
0238
0239 static void ep93xx_pata_rw_begin(void __iomem *base, unsigned long addr,
0240 unsigned long t1)
0241 {
0242 writel(IDECTRL_DIOWN | IDECTRL_DIORN | addr, base + IDECTRL);
0243 ep93xx_pata_delay(t1);
0244 }
0245
0246
0247 static void ep93xx_pata_rw_end(void __iomem *base, unsigned long addr,
0248 bool iordy, unsigned long t0, unsigned long t2,
0249 unsigned long t2i)
0250 {
0251 ep93xx_pata_delay(t2);
0252
0253 if (iordy)
0254 t2 += ep93xx_pata_wait_for_iordy(base, t2);
0255 writel(IDECTRL_DIOWN | IDECTRL_DIORN | addr, base + IDECTRL);
0256 if (t0 > t2 && t0 - t2 > t2i)
0257 ep93xx_pata_delay(t0 - t2);
0258 else
0259 ep93xx_pata_delay(t2i);
0260 }
0261
0262 static u16 ep93xx_pata_read(struct ep93xx_pata_data *drv_data,
0263 unsigned long addr,
0264 bool reg)
0265 {
0266 void __iomem *base = drv_data->ide_base;
0267 const struct ata_timing *t = &drv_data->t;
0268 unsigned long t0 = reg ? t->cyc8b : t->cycle;
0269 unsigned long t2 = reg ? t->act8b : t->active;
0270 unsigned long t2i = reg ? t->rec8b : t->recover;
0271
0272 ep93xx_pata_rw_begin(base, addr, t->setup);
0273 writel(IDECTRL_DIOWN | addr, base + IDECTRL);
0274
0275
0276
0277
0278 ep93xx_pata_rw_end(base, addr, drv_data->iordy, t0, t2, t2i);
0279 return readl(base + IDEDATAIN);
0280 }
0281
0282
0283 static u16 ep93xx_pata_read_reg(struct ep93xx_pata_data *drv_data,
0284 unsigned long addr)
0285 {
0286 return ep93xx_pata_read(drv_data, addr, true);
0287 }
0288
0289
0290 static u16 ep93xx_pata_read_data(struct ep93xx_pata_data *drv_data,
0291 unsigned long addr)
0292 {
0293 return ep93xx_pata_read(drv_data, addr, false);
0294 }
0295
0296 static void ep93xx_pata_write(struct ep93xx_pata_data *drv_data,
0297 u16 value, unsigned long addr,
0298 bool reg)
0299 {
0300 void __iomem *base = drv_data->ide_base;
0301 const struct ata_timing *t = &drv_data->t;
0302 unsigned long t0 = reg ? t->cyc8b : t->cycle;
0303 unsigned long t2 = reg ? t->act8b : t->active;
0304 unsigned long t2i = reg ? t->rec8b : t->recover;
0305
0306 ep93xx_pata_rw_begin(base, addr, t->setup);
0307
0308
0309
0310
0311 writel(value, base + IDEDATAOUT);
0312 writel(IDECTRL_DIORN | addr, base + IDECTRL);
0313 ep93xx_pata_rw_end(base, addr, drv_data->iordy, t0, t2, t2i);
0314 }
0315
0316
0317 static void ep93xx_pata_write_reg(struct ep93xx_pata_data *drv_data,
0318 u16 value, unsigned long addr)
0319 {
0320 ep93xx_pata_write(drv_data, value, addr, true);
0321 }
0322
0323
0324 static void ep93xx_pata_write_data(struct ep93xx_pata_data *drv_data,
0325 u16 value, unsigned long addr)
0326 {
0327 ep93xx_pata_write(drv_data, value, addr, false);
0328 }
0329
0330 static void ep93xx_pata_set_piomode(struct ata_port *ap,
0331 struct ata_device *adev)
0332 {
0333 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0334 struct ata_device *pair = ata_dev_pair(adev);
0335
0336
0337
0338
0339
0340
0341
0342 unsigned long T = 1000000 / (200 / 5);
0343
0344 ata_timing_compute(adev, adev->pio_mode, &drv_data->t, T, 0);
0345 if (pair && pair->pio_mode) {
0346 struct ata_timing t;
0347 ata_timing_compute(pair, pair->pio_mode, &t, T, 0);
0348 ata_timing_merge(&t, &drv_data->t, &drv_data->t,
0349 ATA_TIMING_SETUP | ATA_TIMING_8BIT);
0350 }
0351 drv_data->iordy = ata_pio_need_iordy(adev);
0352
0353 ep93xx_pata_enable_pio(drv_data->ide_base,
0354 adev->pio_mode - XFER_PIO_0);
0355 }
0356
0357
0358 static u8 ep93xx_pata_check_status(struct ata_port *ap)
0359 {
0360 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0361
0362 return ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_STATUS);
0363 }
0364
0365 static u8 ep93xx_pata_check_altstatus(struct ata_port *ap)
0366 {
0367 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0368
0369 return ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_ALTSTATUS);
0370 }
0371
0372
0373 static void ep93xx_pata_tf_load(struct ata_port *ap,
0374 const struct ata_taskfile *tf)
0375 {
0376 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0377 unsigned int is_addr = tf->flags & ATA_TFLAG_ISADDR;
0378
0379 if (tf->ctl != ap->last_ctl) {
0380 ep93xx_pata_write_reg(drv_data, tf->ctl, IDECTRL_ADDR_CTL);
0381 ap->last_ctl = tf->ctl;
0382 ata_wait_idle(ap);
0383 }
0384
0385 if (is_addr && (tf->flags & ATA_TFLAG_LBA48)) {
0386 ep93xx_pata_write_reg(drv_data, tf->hob_feature,
0387 IDECTRL_ADDR_FEATURE);
0388 ep93xx_pata_write_reg(drv_data, tf->hob_nsect,
0389 IDECTRL_ADDR_NSECT);
0390 ep93xx_pata_write_reg(drv_data, tf->hob_lbal,
0391 IDECTRL_ADDR_LBAL);
0392 ep93xx_pata_write_reg(drv_data, tf->hob_lbam,
0393 IDECTRL_ADDR_LBAM);
0394 ep93xx_pata_write_reg(drv_data, tf->hob_lbah,
0395 IDECTRL_ADDR_LBAH);
0396 }
0397
0398 if (is_addr) {
0399 ep93xx_pata_write_reg(drv_data, tf->feature,
0400 IDECTRL_ADDR_FEATURE);
0401 ep93xx_pata_write_reg(drv_data, tf->nsect, IDECTRL_ADDR_NSECT);
0402 ep93xx_pata_write_reg(drv_data, tf->lbal, IDECTRL_ADDR_LBAL);
0403 ep93xx_pata_write_reg(drv_data, tf->lbam, IDECTRL_ADDR_LBAM);
0404 ep93xx_pata_write_reg(drv_data, tf->lbah, IDECTRL_ADDR_LBAH);
0405 }
0406
0407 if (tf->flags & ATA_TFLAG_DEVICE)
0408 ep93xx_pata_write_reg(drv_data, tf->device,
0409 IDECTRL_ADDR_DEVICE);
0410
0411 ata_wait_idle(ap);
0412 }
0413
0414
0415 static void ep93xx_pata_tf_read(struct ata_port *ap, struct ata_taskfile *tf)
0416 {
0417 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0418
0419 tf->status = ep93xx_pata_check_status(ap);
0420 tf->error = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_FEATURE);
0421 tf->nsect = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_NSECT);
0422 tf->lbal = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_LBAL);
0423 tf->lbam = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_LBAM);
0424 tf->lbah = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_LBAH);
0425 tf->device = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_DEVICE);
0426
0427 if (tf->flags & ATA_TFLAG_LBA48) {
0428 ep93xx_pata_write_reg(drv_data, tf->ctl | ATA_HOB,
0429 IDECTRL_ADDR_CTL);
0430 tf->hob_feature = ep93xx_pata_read_reg(drv_data,
0431 IDECTRL_ADDR_FEATURE);
0432 tf->hob_nsect = ep93xx_pata_read_reg(drv_data,
0433 IDECTRL_ADDR_NSECT);
0434 tf->hob_lbal = ep93xx_pata_read_reg(drv_data,
0435 IDECTRL_ADDR_LBAL);
0436 tf->hob_lbam = ep93xx_pata_read_reg(drv_data,
0437 IDECTRL_ADDR_LBAM);
0438 tf->hob_lbah = ep93xx_pata_read_reg(drv_data,
0439 IDECTRL_ADDR_LBAH);
0440 ep93xx_pata_write_reg(drv_data, tf->ctl, IDECTRL_ADDR_CTL);
0441 ap->last_ctl = tf->ctl;
0442 }
0443 }
0444
0445
0446 static void ep93xx_pata_exec_command(struct ata_port *ap,
0447 const struct ata_taskfile *tf)
0448 {
0449 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0450
0451 ep93xx_pata_write_reg(drv_data, tf->command,
0452 IDECTRL_ADDR_COMMAND);
0453 ata_sff_pause(ap);
0454 }
0455
0456
0457 static void ep93xx_pata_dev_select(struct ata_port *ap, unsigned int device)
0458 {
0459 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0460 u8 tmp = ATA_DEVICE_OBS;
0461
0462 if (device != 0)
0463 tmp |= ATA_DEV1;
0464
0465 ep93xx_pata_write_reg(drv_data, tmp, IDECTRL_ADDR_DEVICE);
0466 ata_sff_pause(ap);
0467 }
0468
0469
0470 static void ep93xx_pata_set_devctl(struct ata_port *ap, u8 ctl)
0471 {
0472 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0473
0474 ep93xx_pata_write_reg(drv_data, ctl, IDECTRL_ADDR_CTL);
0475 }
0476
0477
0478 static unsigned int ep93xx_pata_data_xfer(struct ata_queued_cmd *qc,
0479 unsigned char *buf,
0480 unsigned int buflen, int rw)
0481 {
0482 struct ata_port *ap = qc->dev->link->ap;
0483 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0484 u16 *data = (u16 *)buf;
0485 unsigned int words = buflen >> 1;
0486
0487
0488 while (words--)
0489 if (rw == READ)
0490 *data++ = cpu_to_le16(
0491 ep93xx_pata_read_data(
0492 drv_data, IDECTRL_ADDR_DATA));
0493 else
0494 ep93xx_pata_write_data(drv_data, le16_to_cpu(*data++),
0495 IDECTRL_ADDR_DATA);
0496
0497
0498 if (unlikely(buflen & 0x01)) {
0499 unsigned char pad[2] = { };
0500
0501 buf += buflen - 1;
0502
0503 if (rw == READ) {
0504 *pad = cpu_to_le16(
0505 ep93xx_pata_read_data(
0506 drv_data, IDECTRL_ADDR_DATA));
0507 *buf = pad[0];
0508 } else {
0509 pad[0] = *buf;
0510 ep93xx_pata_write_data(drv_data, le16_to_cpu(*pad),
0511 IDECTRL_ADDR_DATA);
0512 }
0513 words++;
0514 }
0515
0516 return words << 1;
0517 }
0518
0519
0520 static bool ep93xx_pata_device_is_present(struct ata_port *ap,
0521 unsigned int device)
0522 {
0523 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0524 u8 nsect, lbal;
0525
0526 ap->ops->sff_dev_select(ap, device);
0527
0528 ep93xx_pata_write_reg(drv_data, 0x55, IDECTRL_ADDR_NSECT);
0529 ep93xx_pata_write_reg(drv_data, 0xaa, IDECTRL_ADDR_LBAL);
0530
0531 ep93xx_pata_write_reg(drv_data, 0xaa, IDECTRL_ADDR_NSECT);
0532 ep93xx_pata_write_reg(drv_data, 0x55, IDECTRL_ADDR_LBAL);
0533
0534 ep93xx_pata_write_reg(drv_data, 0x55, IDECTRL_ADDR_NSECT);
0535 ep93xx_pata_write_reg(drv_data, 0xaa, IDECTRL_ADDR_LBAL);
0536
0537 nsect = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_NSECT);
0538 lbal = ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_LBAL);
0539
0540 if ((nsect == 0x55) && (lbal == 0xaa))
0541 return true;
0542
0543 return false;
0544 }
0545
0546
0547 static int ep93xx_pata_wait_after_reset(struct ata_link *link,
0548 unsigned int devmask,
0549 unsigned long deadline)
0550 {
0551 struct ata_port *ap = link->ap;
0552 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0553 unsigned int dev0 = devmask & (1 << 0);
0554 unsigned int dev1 = devmask & (1 << 1);
0555 int rc, ret = 0;
0556
0557 ata_msleep(ap, ATA_WAIT_AFTER_RESET);
0558
0559
0560 rc = ata_sff_wait_ready(link, deadline);
0561
0562
0563
0564
0565 if (rc)
0566 return rc;
0567
0568
0569
0570
0571
0572 if (dev1) {
0573 int i;
0574
0575 ap->ops->sff_dev_select(ap, 1);
0576
0577
0578
0579
0580
0581
0582 for (i = 0; i < 2; i++) {
0583 u8 nsect, lbal;
0584
0585 nsect = ep93xx_pata_read_reg(drv_data,
0586 IDECTRL_ADDR_NSECT);
0587 lbal = ep93xx_pata_read_reg(drv_data,
0588 IDECTRL_ADDR_LBAL);
0589 if (nsect == 1 && lbal == 1)
0590 break;
0591 msleep(50);
0592 }
0593
0594 rc = ata_sff_wait_ready(link, deadline);
0595 if (rc) {
0596 if (rc != -ENODEV)
0597 return rc;
0598 ret = rc;
0599 }
0600 }
0601
0602 ap->ops->sff_dev_select(ap, 0);
0603 if (dev1)
0604 ap->ops->sff_dev_select(ap, 1);
0605 if (dev0)
0606 ap->ops->sff_dev_select(ap, 0);
0607
0608 return ret;
0609 }
0610
0611
0612 static int ep93xx_pata_bus_softreset(struct ata_port *ap, unsigned int devmask,
0613 unsigned long deadline)
0614 {
0615 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0616
0617 ep93xx_pata_write_reg(drv_data, ap->ctl, IDECTRL_ADDR_CTL);
0618 udelay(20);
0619 ep93xx_pata_write_reg(drv_data, ap->ctl | ATA_SRST, IDECTRL_ADDR_CTL);
0620 udelay(20);
0621 ep93xx_pata_write_reg(drv_data, ap->ctl, IDECTRL_ADDR_CTL);
0622 ap->last_ctl = ap->ctl;
0623
0624 return ep93xx_pata_wait_after_reset(&ap->link, devmask, deadline);
0625 }
0626
0627 static void ep93xx_pata_release_dma(struct ep93xx_pata_data *drv_data)
0628 {
0629 if (drv_data->dma_rx_channel) {
0630 dma_release_channel(drv_data->dma_rx_channel);
0631 drv_data->dma_rx_channel = NULL;
0632 }
0633 if (drv_data->dma_tx_channel) {
0634 dma_release_channel(drv_data->dma_tx_channel);
0635 drv_data->dma_tx_channel = NULL;
0636 }
0637 }
0638
0639 static bool ep93xx_pata_dma_filter(struct dma_chan *chan, void *filter_param)
0640 {
0641 if (ep93xx_dma_chan_is_m2p(chan))
0642 return false;
0643
0644 chan->private = filter_param;
0645 return true;
0646 }
0647
0648 static void ep93xx_pata_dma_init(struct ep93xx_pata_data *drv_data)
0649 {
0650 const struct platform_device *pdev = drv_data->pdev;
0651 dma_cap_mask_t mask;
0652 struct dma_slave_config conf;
0653
0654 dma_cap_zero(mask);
0655 dma_cap_set(DMA_SLAVE, mask);
0656
0657
0658
0659
0660
0661
0662 drv_data->dma_rx_data.port = EP93XX_DMA_IDE;
0663 drv_data->dma_rx_data.direction = DMA_DEV_TO_MEM;
0664 drv_data->dma_rx_data.name = "ep93xx-pata-rx";
0665 drv_data->dma_rx_channel = dma_request_channel(mask,
0666 ep93xx_pata_dma_filter, &drv_data->dma_rx_data);
0667 if (!drv_data->dma_rx_channel)
0668 return;
0669
0670 drv_data->dma_tx_data.port = EP93XX_DMA_IDE;
0671 drv_data->dma_tx_data.direction = DMA_MEM_TO_DEV;
0672 drv_data->dma_tx_data.name = "ep93xx-pata-tx";
0673 drv_data->dma_tx_channel = dma_request_channel(mask,
0674 ep93xx_pata_dma_filter, &drv_data->dma_tx_data);
0675 if (!drv_data->dma_tx_channel) {
0676 dma_release_channel(drv_data->dma_rx_channel);
0677 return;
0678 }
0679
0680
0681 memset(&conf, 0, sizeof(conf));
0682 conf.direction = DMA_DEV_TO_MEM;
0683 conf.src_addr = drv_data->udma_in_phys;
0684 conf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
0685 if (dmaengine_slave_config(drv_data->dma_rx_channel, &conf)) {
0686 dev_err(&pdev->dev, "failed to configure rx dma channel\n");
0687 ep93xx_pata_release_dma(drv_data);
0688 return;
0689 }
0690
0691
0692 memset(&conf, 0, sizeof(conf));
0693 conf.direction = DMA_MEM_TO_DEV;
0694 conf.dst_addr = drv_data->udma_out_phys;
0695 conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
0696 if (dmaengine_slave_config(drv_data->dma_tx_channel, &conf)) {
0697 dev_err(&pdev->dev, "failed to configure tx dma channel\n");
0698 ep93xx_pata_release_dma(drv_data);
0699 }
0700 }
0701
0702 static void ep93xx_pata_dma_start(struct ata_queued_cmd *qc)
0703 {
0704 struct dma_async_tx_descriptor *txd;
0705 struct ep93xx_pata_data *drv_data = qc->ap->host->private_data;
0706 void __iomem *base = drv_data->ide_base;
0707 struct ata_device *adev = qc->dev;
0708 u32 v = qc->dma_dir == DMA_TO_DEVICE ? IDEUDMAOP_RWOP : 0;
0709 struct dma_chan *channel = qc->dma_dir == DMA_TO_DEVICE
0710 ? drv_data->dma_tx_channel : drv_data->dma_rx_channel;
0711
0712 txd = dmaengine_prep_slave_sg(channel, qc->sg, qc->n_elem, qc->dma_dir,
0713 DMA_CTRL_ACK);
0714 if (!txd) {
0715 dev_err(qc->ap->dev, "failed to prepare slave for sg dma\n");
0716 return;
0717 }
0718 txd->callback = NULL;
0719 txd->callback_param = NULL;
0720
0721 if (dmaengine_submit(txd) < 0) {
0722 dev_err(qc->ap->dev, "failed to submit dma transfer\n");
0723 return;
0724 }
0725 dma_async_issue_pending(channel);
0726
0727
0728
0729
0730
0731
0732
0733
0734 writel(v, base + IDEUDMAOP);
0735 readl(base + IDEUDMAOP);
0736 writel(v | IDEUDMAOP_UEN, base + IDEUDMAOP);
0737
0738 writel(IDECFG_IDEEN | IDECFG_UDMA |
0739 ((adev->xfer_mode - XFER_UDMA_0) << IDECFG_MODE_SHIFT),
0740 base + IDECFG);
0741 }
0742
0743 static void ep93xx_pata_dma_stop(struct ata_queued_cmd *qc)
0744 {
0745 struct ep93xx_pata_data *drv_data = qc->ap->host->private_data;
0746 void __iomem *base = drv_data->ide_base;
0747
0748
0749 dmaengine_terminate_all(drv_data->dma_rx_channel);
0750 dmaengine_terminate_all(drv_data->dma_tx_channel);
0751
0752
0753
0754
0755
0756 writel(0, base + IDEUDMAOP);
0757 writel(readl(base + IDECTRL) | IDECTRL_DIOWN | IDECTRL_DIORN |
0758 IDECTRL_CS0N | IDECTRL_CS1N, base + IDECTRL);
0759
0760 ep93xx_pata_enable_pio(drv_data->ide_base,
0761 qc->dev->pio_mode - XFER_PIO_0);
0762
0763 ata_sff_dma_pause(qc->ap);
0764 }
0765
0766 static void ep93xx_pata_dma_setup(struct ata_queued_cmd *qc)
0767 {
0768 qc->ap->ops->sff_exec_command(qc->ap, &qc->tf);
0769 }
0770
0771 static u8 ep93xx_pata_dma_status(struct ata_port *ap)
0772 {
0773 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0774 u32 val = readl(drv_data->ide_base + IDEUDMASTS);
0775
0776
0777
0778
0779
0780
0781
0782
0783
0784
0785
0786
0787
0788
0789 if (val & IDEUDMASTS_NDO || val & IDEUDMASTS_NDI ||
0790 val & IDEUDMASTS_N4X || val & IDEUDMASTS_INTIDE)
0791 return ATA_DMA_ERR;
0792
0793
0794 if (readl(drv_data->ide_base + IDECTRL) & IDECTRL_INTRQ)
0795 return ATA_DMA_INTR;
0796
0797 if (val & IDEUDMASTS_SBUSY || val & IDEUDMASTS_DMAIDE)
0798 return ATA_DMA_ACTIVE;
0799
0800 return 0;
0801 }
0802
0803
0804 static int ep93xx_pata_softreset(struct ata_link *al, unsigned int *classes,
0805 unsigned long deadline)
0806 {
0807 struct ata_port *ap = al->ap;
0808 unsigned int slave_possible = ap->flags & ATA_FLAG_SLAVE_POSS;
0809 unsigned int devmask = 0;
0810 int rc;
0811 u8 err;
0812
0813
0814 if (ep93xx_pata_device_is_present(ap, 0))
0815 devmask |= (1 << 0);
0816 if (slave_possible && ep93xx_pata_device_is_present(ap, 1))
0817 devmask |= (1 << 1);
0818
0819
0820 ap->ops->sff_dev_select(al->ap, 0);
0821
0822
0823 rc = ep93xx_pata_bus_softreset(ap, devmask, deadline);
0824
0825 if (rc && (rc != -ENODEV || sata_scr_valid(al))) {
0826 ata_link_err(al, "SRST failed (errno=%d)\n", rc);
0827 return rc;
0828 }
0829
0830
0831 classes[0] = ata_sff_dev_classify(&al->device[0], devmask & (1 << 0),
0832 &err);
0833 if (slave_possible && err != 0x81)
0834 classes[1] = ata_sff_dev_classify(&al->device[1],
0835 devmask & (1 << 1), &err);
0836
0837 return 0;
0838 }
0839
0840
0841 static void ep93xx_pata_drain_fifo(struct ata_queued_cmd *qc)
0842 {
0843 int count;
0844 struct ata_port *ap;
0845 struct ep93xx_pata_data *drv_data;
0846
0847
0848 if (qc == NULL || qc->dma_dir == DMA_TO_DEVICE)
0849 return;
0850
0851 ap = qc->ap;
0852 drv_data = ap->host->private_data;
0853
0854 for (count = 0; (ap->ops->sff_check_status(ap) & ATA_DRQ)
0855 && count < 65536; count += 2)
0856 ep93xx_pata_read_reg(drv_data, IDECTRL_ADDR_DATA);
0857
0858 if (count)
0859 ata_port_dbg(ap, "drained %d bytes to clear DRQ.\n", count);
0860
0861 }
0862
0863 static int ep93xx_pata_port_start(struct ata_port *ap)
0864 {
0865 struct ep93xx_pata_data *drv_data = ap->host->private_data;
0866
0867
0868
0869
0870
0871 drv_data->t = *ata_timing_find_mode(XFER_PIO_0);
0872 return 0;
0873 }
0874
0875 static struct scsi_host_template ep93xx_pata_sht = {
0876 ATA_BASE_SHT(DRV_NAME),
0877
0878 .sg_tablesize = 32,
0879
0880 .dma_boundary = 0x7fff,
0881 };
0882
0883 static struct ata_port_operations ep93xx_pata_port_ops = {
0884 .inherits = &ata_bmdma_port_ops,
0885
0886 .qc_prep = ata_noop_qc_prep,
0887
0888 .softreset = ep93xx_pata_softreset,
0889 .hardreset = ATA_OP_NULL,
0890
0891 .sff_dev_select = ep93xx_pata_dev_select,
0892 .sff_set_devctl = ep93xx_pata_set_devctl,
0893 .sff_check_status = ep93xx_pata_check_status,
0894 .sff_check_altstatus = ep93xx_pata_check_altstatus,
0895 .sff_tf_load = ep93xx_pata_tf_load,
0896 .sff_tf_read = ep93xx_pata_tf_read,
0897 .sff_exec_command = ep93xx_pata_exec_command,
0898 .sff_data_xfer = ep93xx_pata_data_xfer,
0899 .sff_drain_fifo = ep93xx_pata_drain_fifo,
0900 .sff_irq_clear = ATA_OP_NULL,
0901
0902 .set_piomode = ep93xx_pata_set_piomode,
0903
0904 .bmdma_setup = ep93xx_pata_dma_setup,
0905 .bmdma_start = ep93xx_pata_dma_start,
0906 .bmdma_stop = ep93xx_pata_dma_stop,
0907 .bmdma_status = ep93xx_pata_dma_status,
0908
0909 .cable_detect = ata_cable_unknown,
0910 .port_start = ep93xx_pata_port_start,
0911 };
0912
0913 static int ep93xx_pata_probe(struct platform_device *pdev)
0914 {
0915 struct ep93xx_pata_data *drv_data;
0916 struct ata_host *host;
0917 struct ata_port *ap;
0918 int irq;
0919 struct resource *mem_res;
0920 void __iomem *ide_base;
0921 int err;
0922
0923 err = ep93xx_ide_acquire_gpio(pdev);
0924 if (err)
0925 return err;
0926
0927
0928 irq = platform_get_irq(pdev, 0);
0929 if (irq < 0) {
0930 err = irq;
0931 goto err_rel_gpio;
0932 }
0933
0934 mem_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
0935 ide_base = devm_ioremap_resource(&pdev->dev, mem_res);
0936 if (IS_ERR(ide_base)) {
0937 err = PTR_ERR(ide_base);
0938 goto err_rel_gpio;
0939 }
0940
0941 drv_data = devm_kzalloc(&pdev->dev, sizeof(*drv_data), GFP_KERNEL);
0942 if (!drv_data) {
0943 err = -ENXIO;
0944 goto err_rel_gpio;
0945 }
0946
0947 drv_data->pdev = pdev;
0948 drv_data->ide_base = ide_base;
0949 drv_data->udma_in_phys = mem_res->start + IDEUDMADATAIN;
0950 drv_data->udma_out_phys = mem_res->start + IDEUDMADATAOUT;
0951 ep93xx_pata_dma_init(drv_data);
0952
0953
0954 host = ata_host_alloc(&pdev->dev, 1);
0955 if (!host) {
0956 err = -ENXIO;
0957 goto err_rel_dma;
0958 }
0959
0960 ep93xx_pata_clear_regs(ide_base);
0961
0962 host->private_data = drv_data;
0963
0964 ap = host->ports[0];
0965 ap->dev = &pdev->dev;
0966 ap->ops = &ep93xx_pata_port_ops;
0967 ap->flags |= ATA_FLAG_SLAVE_POSS;
0968 ap->pio_mask = ATA_PIO4;
0969
0970
0971
0972
0973
0974
0975
0976
0977
0978
0979 if (drv_data->dma_rx_channel && drv_data->dma_tx_channel) {
0980 int chip_rev = ep93xx_chip_revision();
0981
0982 if (chip_rev == EP93XX_CHIP_REV_E1)
0983 ap->udma_mask = ATA_UDMA3;
0984 else if (chip_rev == EP93XX_CHIP_REV_E2)
0985 ap->udma_mask = ATA_UDMA4;
0986 else
0987 ap->udma_mask = ATA_UDMA2;
0988 }
0989
0990
0991 ep93xx_pata_enable_pio(ide_base, 0);
0992
0993 dev_info(&pdev->dev, "version " DRV_VERSION "\n");
0994
0995
0996 err = ata_host_activate(host, irq, ata_bmdma_interrupt, 0,
0997 &ep93xx_pata_sht);
0998 if (err == 0)
0999 return 0;
1000
1001 err_rel_dma:
1002 ep93xx_pata_release_dma(drv_data);
1003 err_rel_gpio:
1004 ep93xx_ide_release_gpio(pdev);
1005 return err;
1006 }
1007
1008 static int ep93xx_pata_remove(struct platform_device *pdev)
1009 {
1010 struct ata_host *host = platform_get_drvdata(pdev);
1011 struct ep93xx_pata_data *drv_data = host->private_data;
1012
1013 ata_host_detach(host);
1014 ep93xx_pata_release_dma(drv_data);
1015 ep93xx_pata_clear_regs(drv_data->ide_base);
1016 ep93xx_ide_release_gpio(pdev);
1017 return 0;
1018 }
1019
1020 static struct platform_driver ep93xx_pata_platform_driver = {
1021 .driver = {
1022 .name = DRV_NAME,
1023 },
1024 .probe = ep93xx_pata_probe,
1025 .remove = ep93xx_pata_remove,
1026 };
1027
1028 module_platform_driver(ep93xx_pata_platform_driver);
1029
1030 MODULE_AUTHOR("Alessandro Zummo, Lennert Buytenhek, Joao Ramos, "
1031 "Bartlomiej Zolnierkiewicz, Rafal Prylowski");
1032 MODULE_DESCRIPTION("low-level driver for cirrus ep93xx IDE controller");
1033 MODULE_LICENSE("GPL");
1034 MODULE_VERSION(DRV_VERSION);
1035 MODULE_ALIAS("platform:pata_ep93xx");