0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016 #include <linux/kernel.h>
0017 #include <linux/module.h>
0018 #include <linux/pci.h>
0019 #include <linux/blkdev.h>
0020 #include <linux/delay.h>
0021 #include <scsi/scsi_host.h>
0022 #include <linux/libata.h>
0023 #include <linux/dmi.h>
0024
0025 #define DRV_NAME "pata_atiixp"
0026 #define DRV_VERSION "0.4.6"
0027
0028 enum {
0029 ATIIXP_IDE_PIO_TIMING = 0x40,
0030 ATIIXP_IDE_MWDMA_TIMING = 0x44,
0031 ATIIXP_IDE_PIO_CONTROL = 0x48,
0032 ATIIXP_IDE_PIO_MODE = 0x4a,
0033 ATIIXP_IDE_UDMA_CONTROL = 0x54,
0034 ATIIXP_IDE_UDMA_MODE = 0x56
0035 };
0036
0037 static const struct dmi_system_id attixp_cable_override_dmi_table[] = {
0038 {
0039
0040 .ident = "MSI E350DM-E33",
0041 .matches = {
0042 DMI_MATCH(DMI_BOARD_VENDOR, "MSI"),
0043 DMI_MATCH(DMI_BOARD_NAME, "E350DM-E33(MS-7720)"),
0044 },
0045 },
0046 { }
0047 };
0048
0049 static int atiixp_cable_detect(struct ata_port *ap)
0050 {
0051 struct pci_dev *pdev = to_pci_dev(ap->host->dev);
0052 u8 udma;
0053
0054 if (dmi_check_system(attixp_cable_override_dmi_table))
0055 return ATA_CBL_PATA40_SHORT;
0056
0057
0058
0059 pci_read_config_byte(pdev, ATIIXP_IDE_UDMA_MODE + ap->port_no, &udma);
0060 if ((udma & 0x07) >= 0x04 || (udma & 0x70) >= 0x40)
0061 return ATA_CBL_PATA80;
0062 return ATA_CBL_PATA40;
0063 }
0064
0065 static DEFINE_SPINLOCK(atiixp_lock);
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076 static int atiixp_prereset(struct ata_link *link, unsigned long deadline)
0077 {
0078 static const struct pci_bits atiixp_enable_bits[] = {
0079 { 0x48, 1, 0x01, 0x00 },
0080 { 0x48, 1, 0x08, 0x00 }
0081 };
0082
0083 struct ata_port *ap = link->ap;
0084 struct pci_dev *pdev = to_pci_dev(ap->host->dev);
0085
0086 if (!pci_test_config_bits(pdev, &atiixp_enable_bits[ap->port_no]))
0087 return -ENOENT;
0088
0089 return ata_sff_prereset(link, deadline);
0090 }
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101
0102
0103 static void atiixp_set_pio_timing(struct ata_port *ap, struct ata_device *adev, int pio)
0104 {
0105 static const u8 pio_timings[5] = { 0x5D, 0x47, 0x34, 0x22, 0x20 };
0106
0107 struct pci_dev *pdev = to_pci_dev(ap->host->dev);
0108 int dn = 2 * ap->port_no + adev->devno;
0109 int timing_shift = (16 * ap->port_no) + 8 * (adev->devno ^ 1);
0110 u32 pio_timing_data;
0111 u16 pio_mode_data;
0112
0113 pci_read_config_word(pdev, ATIIXP_IDE_PIO_MODE, &pio_mode_data);
0114 pio_mode_data &= ~(0x7 << (4 * dn));
0115 pio_mode_data |= pio << (4 * dn);
0116 pci_write_config_word(pdev, ATIIXP_IDE_PIO_MODE, pio_mode_data);
0117
0118 pci_read_config_dword(pdev, ATIIXP_IDE_PIO_TIMING, &pio_timing_data);
0119 pio_timing_data &= ~(0xFF << timing_shift);
0120 pio_timing_data |= (pio_timings[pio] << timing_shift);
0121 pci_write_config_dword(pdev, ATIIXP_IDE_PIO_TIMING, pio_timing_data);
0122 }
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133 static void atiixp_set_piomode(struct ata_port *ap, struct ata_device *adev)
0134 {
0135 unsigned long flags;
0136 spin_lock_irqsave(&atiixp_lock, flags);
0137 atiixp_set_pio_timing(ap, adev, adev->pio_mode - XFER_PIO_0);
0138 spin_unlock_irqrestore(&atiixp_lock, flags);
0139 }
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150 static void atiixp_set_dmamode(struct ata_port *ap, struct ata_device *adev)
0151 {
0152 static const u8 mwdma_timings[5] = { 0x77, 0x21, 0x20 };
0153
0154 struct pci_dev *pdev = to_pci_dev(ap->host->dev);
0155 int dma = adev->dma_mode;
0156 int dn = 2 * ap->port_no + adev->devno;
0157 int wanted_pio;
0158 unsigned long flags;
0159
0160 spin_lock_irqsave(&atiixp_lock, flags);
0161
0162 if (adev->dma_mode >= XFER_UDMA_0) {
0163 u16 udma_mode_data;
0164
0165 dma -= XFER_UDMA_0;
0166
0167 pci_read_config_word(pdev, ATIIXP_IDE_UDMA_MODE, &udma_mode_data);
0168 udma_mode_data &= ~(0x7 << (4 * dn));
0169 udma_mode_data |= dma << (4 * dn);
0170 pci_write_config_word(pdev, ATIIXP_IDE_UDMA_MODE, udma_mode_data);
0171 } else {
0172 int timing_shift = (16 * ap->port_no) + 8 * (adev->devno ^ 1);
0173 u32 mwdma_timing_data;
0174
0175 dma -= XFER_MW_DMA_0;
0176
0177 pci_read_config_dword(pdev, ATIIXP_IDE_MWDMA_TIMING,
0178 &mwdma_timing_data);
0179 mwdma_timing_data &= ~(0xFF << timing_shift);
0180 mwdma_timing_data |= (mwdma_timings[dma] << timing_shift);
0181 pci_write_config_dword(pdev, ATIIXP_IDE_MWDMA_TIMING,
0182 mwdma_timing_data);
0183 }
0184
0185
0186
0187
0188 if (adev->dma_mode >= XFER_MW_DMA_2)
0189 wanted_pio = 4;
0190 else if (adev->dma_mode == XFER_MW_DMA_1)
0191 wanted_pio = 3;
0192 else if (adev->dma_mode == XFER_MW_DMA_0)
0193 wanted_pio = 0;
0194 else BUG();
0195
0196 if (adev->pio_mode != wanted_pio)
0197 atiixp_set_pio_timing(ap, adev, wanted_pio);
0198 spin_unlock_irqrestore(&atiixp_lock, flags);
0199 }
0200
0201
0202
0203
0204
0205
0206
0207
0208
0209
0210
0211
0212 static void atiixp_bmdma_start(struct ata_queued_cmd *qc)
0213 {
0214 struct ata_port *ap = qc->ap;
0215 struct ata_device *adev = qc->dev;
0216
0217 struct pci_dev *pdev = to_pci_dev(ap->host->dev);
0218 int dn = (2 * ap->port_no) + adev->devno;
0219 u16 tmp16;
0220
0221 pci_read_config_word(pdev, ATIIXP_IDE_UDMA_CONTROL, &tmp16);
0222 if (ata_using_udma(adev))
0223 tmp16 |= (1 << dn);
0224 else
0225 tmp16 &= ~(1 << dn);
0226 pci_write_config_word(pdev, ATIIXP_IDE_UDMA_CONTROL, tmp16);
0227 ata_bmdma_start(qc);
0228 }
0229
0230
0231
0232
0233
0234
0235
0236
0237
0238
0239
0240
0241 static void atiixp_bmdma_stop(struct ata_queued_cmd *qc)
0242 {
0243 struct ata_port *ap = qc->ap;
0244 struct pci_dev *pdev = to_pci_dev(ap->host->dev);
0245 int dn = (2 * ap->port_no) + qc->dev->devno;
0246 u16 tmp16;
0247
0248 pci_read_config_word(pdev, ATIIXP_IDE_UDMA_CONTROL, &tmp16);
0249 tmp16 &= ~(1 << dn);
0250 pci_write_config_word(pdev, ATIIXP_IDE_UDMA_CONTROL, tmp16);
0251 ata_bmdma_stop(qc);
0252 }
0253
0254 static struct scsi_host_template atiixp_sht = {
0255 ATA_BASE_SHT(DRV_NAME),
0256 .sg_tablesize = LIBATA_DUMB_MAX_PRD,
0257 .dma_boundary = ATA_DMA_BOUNDARY,
0258 };
0259
0260 static struct ata_port_operations atiixp_port_ops = {
0261 .inherits = &ata_bmdma_port_ops,
0262
0263 .qc_prep = ata_bmdma_dumb_qc_prep,
0264 .bmdma_start = atiixp_bmdma_start,
0265 .bmdma_stop = atiixp_bmdma_stop,
0266
0267 .prereset = atiixp_prereset,
0268 .cable_detect = atiixp_cable_detect,
0269 .set_piomode = atiixp_set_piomode,
0270 .set_dmamode = atiixp_set_dmamode,
0271 };
0272
0273 static int atiixp_init_one(struct pci_dev *pdev, const struct pci_device_id *id)
0274 {
0275 static const struct ata_port_info info = {
0276 .flags = ATA_FLAG_SLAVE_POSS,
0277 .pio_mask = ATA_PIO4,
0278 .mwdma_mask = ATA_MWDMA12_ONLY,
0279 .udma_mask = ATA_UDMA5,
0280 .port_ops = &atiixp_port_ops
0281 };
0282 const struct ata_port_info *ppi[] = { &info, &info };
0283
0284
0285 if (pdev->device == PCI_DEVICE_ID_ATI_IXP600_IDE)
0286 ppi[1] = &ata_dummy_port_info;
0287
0288 return ata_pci_bmdma_init_one(pdev, ppi, &atiixp_sht, NULL,
0289 ATA_HOST_PARALLEL_SCAN);
0290 }
0291
0292 static const struct pci_device_id atiixp[] = {
0293 { PCI_VDEVICE(ATI, PCI_DEVICE_ID_ATI_IXP200_IDE), },
0294 { PCI_VDEVICE(ATI, PCI_DEVICE_ID_ATI_IXP300_IDE), },
0295 { PCI_VDEVICE(ATI, PCI_DEVICE_ID_ATI_IXP400_IDE), },
0296 { PCI_VDEVICE(ATI, PCI_DEVICE_ID_ATI_IXP600_IDE), },
0297 { PCI_VDEVICE(ATI, PCI_DEVICE_ID_ATI_IXP700_IDE), },
0298 { PCI_VDEVICE(AMD, PCI_DEVICE_ID_AMD_HUDSON2_IDE), },
0299
0300 { },
0301 };
0302
0303 static struct pci_driver atiixp_pci_driver = {
0304 .name = DRV_NAME,
0305 .id_table = atiixp,
0306 .probe = atiixp_init_one,
0307 .remove = ata_pci_remove_one,
0308 #ifdef CONFIG_PM_SLEEP
0309 .resume = ata_pci_device_resume,
0310 .suspend = ata_pci_device_suspend,
0311 #endif
0312 };
0313
0314 module_pci_driver(atiixp_pci_driver);
0315
0316 MODULE_AUTHOR("Alan Cox");
0317 MODULE_DESCRIPTION("low-level driver for ATI IXP200/300/400");
0318 MODULE_LICENSE("GPL");
0319 MODULE_DEVICE_TABLE(pci, atiixp);
0320 MODULE_VERSION(DRV_VERSION);