0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016 #include <linux/clk.h>
0017 #include <linux/completion.h>
0018 #include <linux/dma-mapping.h>
0019 #include <linux/dmaengine.h>
0020 #include <linux/err.h>
0021 #include <linux/gpio/consumer.h>
0022 #include <linux/i2c.h>
0023 #include <linux/interrupt.h>
0024 #include <linux/io.h>
0025 #include <linux/of.h>
0026 #include <linux/of_device.h>
0027 #include <linux/pinctrl/consumer.h>
0028 #include <linux/platform_device.h>
0029 #include <linux/pm_runtime.h>
0030
0031 #include "i2c-at91.h"
0032
0033 void at91_init_twi_bus_master(struct at91_twi_dev *dev)
0034 {
0035 struct at91_twi_pdata *pdata = dev->pdata;
0036 u32 filtr = 0;
0037
0038
0039 if (dev->fifo_size)
0040 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_FIFOEN);
0041 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_MSEN);
0042 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_SVDIS);
0043 at91_twi_write(dev, AT91_TWI_CWGR, dev->twi_cwgr_reg);
0044
0045
0046 if (pdata->has_dig_filtr && dev->enable_dig_filt)
0047 filtr |= AT91_TWI_FILTR_FILT;
0048
0049
0050 if (pdata->has_adv_dig_filtr && dev->enable_dig_filt)
0051 filtr |= AT91_TWI_FILTR_FILT |
0052 (AT91_TWI_FILTR_THRES(dev->filter_width) &
0053 AT91_TWI_FILTR_THRES_MASK);
0054
0055
0056 if (pdata->has_ana_filtr && dev->enable_ana_filt)
0057 filtr |= AT91_TWI_FILTR_PADFEN;
0058
0059 if (filtr)
0060 at91_twi_write(dev, AT91_TWI_FILTR, filtr);
0061 }
0062
0063
0064
0065
0066
0067 static void at91_calc_twi_clock(struct at91_twi_dev *dev)
0068 {
0069 int ckdiv, cdiv, div, hold = 0, filter_width = 0;
0070 struct at91_twi_pdata *pdata = dev->pdata;
0071 int offset = pdata->clk_offset;
0072 int max_ckdiv = pdata->clk_max_div;
0073 struct i2c_timings timings, *t = &timings;
0074
0075 i2c_parse_fw_timings(dev->dev, t, true);
0076
0077 div = max(0, (int)DIV_ROUND_UP(clk_get_rate(dev->clk),
0078 2 * t->bus_freq_hz) - offset);
0079 ckdiv = fls(div >> 8);
0080 cdiv = div >> ckdiv;
0081
0082 if (ckdiv > max_ckdiv) {
0083 dev_warn(dev->dev, "%d exceeds ckdiv max value which is %d.\n",
0084 ckdiv, max_ckdiv);
0085 ckdiv = max_ckdiv;
0086 cdiv = 255;
0087 }
0088
0089 if (pdata->has_hold_field) {
0090
0091
0092
0093
0094
0095 hold = DIV_ROUND_UP(t->sda_hold_ns
0096 * (clk_get_rate(dev->clk) / 1000), 1000000);
0097 hold -= 3;
0098 if (hold < 0)
0099 hold = 0;
0100 if (hold > AT91_TWI_CWGR_HOLD_MAX) {
0101 dev_warn(dev->dev,
0102 "HOLD field set to its maximum value (%d instead of %d)\n",
0103 AT91_TWI_CWGR_HOLD_MAX, hold);
0104 hold = AT91_TWI_CWGR_HOLD_MAX;
0105 }
0106 }
0107
0108 if (pdata->has_adv_dig_filtr) {
0109
0110
0111
0112
0113 filter_width = DIV_ROUND_UP(t->digital_filter_width_ns
0114 * (clk_get_rate(dev->clk) / 1000), 1000000);
0115 if (filter_width > AT91_TWI_FILTR_THRES_MAX) {
0116 dev_warn(dev->dev,
0117 "Filter threshold set to its maximum value (%d instead of %d)\n",
0118 AT91_TWI_FILTR_THRES_MAX, filter_width);
0119 filter_width = AT91_TWI_FILTR_THRES_MAX;
0120 }
0121 }
0122
0123 dev->twi_cwgr_reg = (ckdiv << 16) | (cdiv << 8) | cdiv
0124 | AT91_TWI_CWGR_HOLD(hold);
0125
0126 dev->filter_width = filter_width;
0127
0128 dev_dbg(dev->dev, "cdiv %d ckdiv %d hold %d (%d ns), filter_width %d (%d ns)\n",
0129 cdiv, ckdiv, hold, t->sda_hold_ns, filter_width,
0130 t->digital_filter_width_ns);
0131 }
0132
0133 static void at91_twi_dma_cleanup(struct at91_twi_dev *dev)
0134 {
0135 struct at91_twi_dma *dma = &dev->dma;
0136
0137 at91_twi_irq_save(dev);
0138
0139 if (dma->xfer_in_progress) {
0140 if (dma->direction == DMA_FROM_DEVICE)
0141 dmaengine_terminate_sync(dma->chan_rx);
0142 else
0143 dmaengine_terminate_sync(dma->chan_tx);
0144 dma->xfer_in_progress = false;
0145 }
0146 if (dma->buf_mapped) {
0147 dma_unmap_single(dev->dev, sg_dma_address(&dma->sg[0]),
0148 dev->buf_len, dma->direction);
0149 dma->buf_mapped = false;
0150 }
0151
0152 at91_twi_irq_restore(dev);
0153 }
0154
0155 static void at91_twi_write_next_byte(struct at91_twi_dev *dev)
0156 {
0157 if (!dev->buf_len)
0158 return;
0159
0160
0161 writeb_relaxed(*dev->buf, dev->base + AT91_TWI_THR);
0162
0163
0164 if (--dev->buf_len == 0) {
0165 if (!dev->use_alt_cmd)
0166 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_STOP);
0167 at91_twi_write(dev, AT91_TWI_IDR, AT91_TWI_TXRDY);
0168 }
0169
0170 dev_dbg(dev->dev, "wrote 0x%x, to go %zu\n", *dev->buf, dev->buf_len);
0171
0172 ++dev->buf;
0173 }
0174
0175 static void at91_twi_write_data_dma_callback(void *data)
0176 {
0177 struct at91_twi_dev *dev = (struct at91_twi_dev *)data;
0178
0179 dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg[0]),
0180 dev->buf_len, DMA_TO_DEVICE);
0181
0182
0183
0184
0185
0186
0187
0188
0189 at91_twi_write(dev, AT91_TWI_IER, AT91_TWI_TXCOMP);
0190 if (!dev->use_alt_cmd)
0191 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_STOP);
0192 }
0193
0194 static void at91_twi_write_data_dma(struct at91_twi_dev *dev)
0195 {
0196 dma_addr_t dma_addr;
0197 struct dma_async_tx_descriptor *txdesc;
0198 struct at91_twi_dma *dma = &dev->dma;
0199 struct dma_chan *chan_tx = dma->chan_tx;
0200 unsigned int sg_len = 1;
0201
0202 if (!dev->buf_len)
0203 return;
0204
0205 dma->direction = DMA_TO_DEVICE;
0206
0207 at91_twi_irq_save(dev);
0208 dma_addr = dma_map_single(dev->dev, dev->buf, dev->buf_len,
0209 DMA_TO_DEVICE);
0210 if (dma_mapping_error(dev->dev, dma_addr)) {
0211 dev_err(dev->dev, "dma map failed\n");
0212 return;
0213 }
0214 dma->buf_mapped = true;
0215 at91_twi_irq_restore(dev);
0216
0217 if (dev->fifo_size) {
0218 size_t part1_len, part2_len;
0219 struct scatterlist *sg;
0220 unsigned fifo_mr;
0221
0222 sg_len = 0;
0223
0224 part1_len = dev->buf_len & ~0x3;
0225 if (part1_len) {
0226 sg = &dma->sg[sg_len++];
0227 sg_dma_len(sg) = part1_len;
0228 sg_dma_address(sg) = dma_addr;
0229 }
0230
0231 part2_len = dev->buf_len & 0x3;
0232 if (part2_len) {
0233 sg = &dma->sg[sg_len++];
0234 sg_dma_len(sg) = part2_len;
0235 sg_dma_address(sg) = dma_addr + part1_len;
0236 }
0237
0238
0239
0240
0241
0242 fifo_mr = at91_twi_read(dev, AT91_TWI_FMR);
0243 fifo_mr &= ~AT91_TWI_FMR_TXRDYM_MASK;
0244 fifo_mr |= AT91_TWI_FMR_TXRDYM(AT91_TWI_FOUR_DATA);
0245 at91_twi_write(dev, AT91_TWI_FMR, fifo_mr);
0246 } else {
0247 sg_dma_len(&dma->sg[0]) = dev->buf_len;
0248 sg_dma_address(&dma->sg[0]) = dma_addr;
0249 }
0250
0251 txdesc = dmaengine_prep_slave_sg(chan_tx, dma->sg, sg_len,
0252 DMA_MEM_TO_DEV,
0253 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
0254 if (!txdesc) {
0255 dev_err(dev->dev, "dma prep slave sg failed\n");
0256 goto error;
0257 }
0258
0259 txdesc->callback = at91_twi_write_data_dma_callback;
0260 txdesc->callback_param = dev;
0261
0262 dma->xfer_in_progress = true;
0263 dmaengine_submit(txdesc);
0264 dma_async_issue_pending(chan_tx);
0265
0266 return;
0267
0268 error:
0269 at91_twi_dma_cleanup(dev);
0270 }
0271
0272 static void at91_twi_read_next_byte(struct at91_twi_dev *dev)
0273 {
0274
0275
0276
0277
0278 if (!dev->buf_len) {
0279 at91_twi_read(dev, AT91_TWI_RHR);
0280 return;
0281 }
0282
0283
0284 *dev->buf = readb_relaxed(dev->base + AT91_TWI_RHR);
0285 --dev->buf_len;
0286
0287
0288 if (dev->recv_len_abort)
0289 return;
0290
0291
0292 if (unlikely(dev->msg->flags & I2C_M_RECV_LEN)) {
0293
0294 if (*dev->buf <= I2C_SMBUS_BLOCK_MAX && *dev->buf > 0) {
0295 dev->msg->flags &= ~I2C_M_RECV_LEN;
0296 dev->buf_len += *dev->buf;
0297 dev->msg->len = dev->buf_len + 1;
0298 dev_dbg(dev->dev, "received block length %zu\n",
0299 dev->buf_len);
0300 } else {
0301
0302 dev->recv_len_abort = true;
0303 dev->buf_len = 1;
0304 }
0305 }
0306
0307
0308 if (!dev->use_alt_cmd && dev->buf_len == 1)
0309 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_STOP);
0310
0311 dev_dbg(dev->dev, "read 0x%x, to go %zu\n", *dev->buf, dev->buf_len);
0312
0313 ++dev->buf;
0314 }
0315
0316 static void at91_twi_read_data_dma_callback(void *data)
0317 {
0318 struct at91_twi_dev *dev = (struct at91_twi_dev *)data;
0319 unsigned ier = AT91_TWI_TXCOMP;
0320
0321 dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg[0]),
0322 dev->buf_len, DMA_FROM_DEVICE);
0323
0324 if (!dev->use_alt_cmd) {
0325
0326 dev->buf += dev->buf_len - 2;
0327 dev->buf_len = 2;
0328 ier |= AT91_TWI_RXRDY;
0329 }
0330 at91_twi_write(dev, AT91_TWI_IER, ier);
0331 }
0332
0333 static void at91_twi_read_data_dma(struct at91_twi_dev *dev)
0334 {
0335 dma_addr_t dma_addr;
0336 struct dma_async_tx_descriptor *rxdesc;
0337 struct at91_twi_dma *dma = &dev->dma;
0338 struct dma_chan *chan_rx = dma->chan_rx;
0339 size_t buf_len;
0340
0341 buf_len = (dev->use_alt_cmd) ? dev->buf_len : dev->buf_len - 2;
0342 dma->direction = DMA_FROM_DEVICE;
0343
0344
0345 at91_twi_irq_save(dev);
0346 dma_addr = dma_map_single(dev->dev, dev->buf, buf_len, DMA_FROM_DEVICE);
0347 if (dma_mapping_error(dev->dev, dma_addr)) {
0348 dev_err(dev->dev, "dma map failed\n");
0349 return;
0350 }
0351 dma->buf_mapped = true;
0352 at91_twi_irq_restore(dev);
0353
0354 if (dev->fifo_size && IS_ALIGNED(buf_len, 4)) {
0355 unsigned fifo_mr;
0356
0357
0358
0359
0360
0361 fifo_mr = at91_twi_read(dev, AT91_TWI_FMR);
0362 fifo_mr &= ~AT91_TWI_FMR_RXRDYM_MASK;
0363 fifo_mr |= AT91_TWI_FMR_RXRDYM(AT91_TWI_FOUR_DATA);
0364 at91_twi_write(dev, AT91_TWI_FMR, fifo_mr);
0365 }
0366
0367 sg_dma_len(&dma->sg[0]) = buf_len;
0368 sg_dma_address(&dma->sg[0]) = dma_addr;
0369
0370 rxdesc = dmaengine_prep_slave_sg(chan_rx, dma->sg, 1, DMA_DEV_TO_MEM,
0371 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
0372 if (!rxdesc) {
0373 dev_err(dev->dev, "dma prep slave sg failed\n");
0374 goto error;
0375 }
0376
0377 rxdesc->callback = at91_twi_read_data_dma_callback;
0378 rxdesc->callback_param = dev;
0379
0380 dma->xfer_in_progress = true;
0381 dmaengine_submit(rxdesc);
0382 dma_async_issue_pending(dma->chan_rx);
0383
0384 return;
0385
0386 error:
0387 at91_twi_dma_cleanup(dev);
0388 }
0389
0390 static irqreturn_t atmel_twi_interrupt(int irq, void *dev_id)
0391 {
0392 struct at91_twi_dev *dev = dev_id;
0393 const unsigned status = at91_twi_read(dev, AT91_TWI_SR);
0394 const unsigned irqstatus = status & at91_twi_read(dev, AT91_TWI_IMR);
0395
0396 if (!irqstatus)
0397 return IRQ_NONE;
0398
0399
0400
0401
0402
0403
0404
0405
0406
0407
0408
0409
0410
0411
0412
0413
0414 if (irqstatus & AT91_TWI_RXRDY) {
0415
0416
0417
0418
0419
0420 do {
0421 at91_twi_read_next_byte(dev);
0422 } while (at91_twi_read(dev, AT91_TWI_SR) & AT91_TWI_RXRDY);
0423 }
0424
0425
0426
0427
0428
0429
0430
0431
0432
0433
0434
0435
0436
0437
0438
0439
0440
0441
0442
0443
0444
0445
0446
0447
0448
0449
0450
0451
0452
0453
0454
0455
0456
0457
0458
0459
0460
0461
0462
0463 if (irqstatus & (AT91_TWI_TXCOMP | AT91_TWI_NACK)) {
0464 at91_disable_twi_interrupts(dev);
0465 complete(&dev->cmd_complete);
0466 } else if (irqstatus & AT91_TWI_TXRDY) {
0467 at91_twi_write_next_byte(dev);
0468 }
0469
0470
0471 dev->transfer_status |= status;
0472
0473 return IRQ_HANDLED;
0474 }
0475
0476 static int at91_do_twi_transfer(struct at91_twi_dev *dev)
0477 {
0478 int ret;
0479 unsigned long time_left;
0480 bool has_unre_flag = dev->pdata->has_unre_flag;
0481 bool has_alt_cmd = dev->pdata->has_alt_cmd;
0482
0483
0484
0485
0486
0487
0488
0489
0490
0491
0492
0493
0494
0495
0496
0497
0498
0499
0500
0501
0502
0503
0504
0505
0506
0507
0508
0509
0510
0511
0512
0513
0514
0515
0516
0517
0518
0519
0520
0521
0522
0523
0524
0525
0526 dev_dbg(dev->dev, "transfer: %s %zu bytes.\n",
0527 (dev->msg->flags & I2C_M_RD) ? "read" : "write", dev->buf_len);
0528
0529 reinit_completion(&dev->cmd_complete);
0530 dev->transfer_status = 0;
0531
0532
0533 at91_twi_read(dev, AT91_TWI_SR);
0534
0535 if (dev->fifo_size) {
0536 unsigned fifo_mr = at91_twi_read(dev, AT91_TWI_FMR);
0537
0538
0539 fifo_mr &= ~(AT91_TWI_FMR_TXRDYM_MASK |
0540 AT91_TWI_FMR_RXRDYM_MASK);
0541 fifo_mr |= AT91_TWI_FMR_TXRDYM(AT91_TWI_ONE_DATA);
0542 fifo_mr |= AT91_TWI_FMR_RXRDYM(AT91_TWI_ONE_DATA);
0543 at91_twi_write(dev, AT91_TWI_FMR, fifo_mr);
0544
0545
0546 at91_twi_write(dev, AT91_TWI_CR,
0547 AT91_TWI_THRCLR | AT91_TWI_RHRCLR);
0548 }
0549
0550 if (!dev->buf_len) {
0551 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_QUICK);
0552 at91_twi_write(dev, AT91_TWI_IER, AT91_TWI_TXCOMP);
0553 } else if (dev->msg->flags & I2C_M_RD) {
0554 unsigned start_flags = AT91_TWI_START;
0555
0556
0557 if (!dev->use_alt_cmd && dev->buf_len <= 1 &&
0558 !(dev->msg->flags & I2C_M_RECV_LEN))
0559 start_flags |= AT91_TWI_STOP;
0560 at91_twi_write(dev, AT91_TWI_CR, start_flags);
0561
0562
0563
0564
0565
0566
0567
0568
0569
0570 if (dev->use_dma && (dev->buf_len > AT91_I2C_DMA_THRESHOLD)) {
0571 at91_twi_write(dev, AT91_TWI_IER, AT91_TWI_NACK);
0572 at91_twi_read_data_dma(dev);
0573 } else {
0574 at91_twi_write(dev, AT91_TWI_IER,
0575 AT91_TWI_TXCOMP |
0576 AT91_TWI_NACK |
0577 AT91_TWI_RXRDY);
0578 }
0579 } else {
0580 if (dev->use_dma && (dev->buf_len > AT91_I2C_DMA_THRESHOLD)) {
0581 at91_twi_write(dev, AT91_TWI_IER, AT91_TWI_NACK);
0582 at91_twi_write_data_dma(dev);
0583 } else {
0584 at91_twi_write_next_byte(dev);
0585 at91_twi_write(dev, AT91_TWI_IER,
0586 AT91_TWI_TXCOMP | AT91_TWI_NACK |
0587 (dev->buf_len ? AT91_TWI_TXRDY : 0));
0588 }
0589 }
0590
0591 time_left = wait_for_completion_timeout(&dev->cmd_complete,
0592 dev->adapter.timeout);
0593 if (time_left == 0) {
0594 dev->transfer_status |= at91_twi_read(dev, AT91_TWI_SR);
0595 dev_err(dev->dev, "controller timed out\n");
0596 at91_init_twi_bus(dev);
0597 ret = -ETIMEDOUT;
0598 goto error;
0599 }
0600 if (dev->transfer_status & AT91_TWI_NACK) {
0601 dev_dbg(dev->dev, "received nack\n");
0602 ret = -EREMOTEIO;
0603 goto error;
0604 }
0605 if (dev->transfer_status & AT91_TWI_OVRE) {
0606 dev_err(dev->dev, "overrun while reading\n");
0607 ret = -EIO;
0608 goto error;
0609 }
0610 if (has_unre_flag && dev->transfer_status & AT91_TWI_UNRE) {
0611 dev_err(dev->dev, "underrun while writing\n");
0612 ret = -EIO;
0613 goto error;
0614 }
0615 if ((has_alt_cmd || dev->fifo_size) &&
0616 (dev->transfer_status & AT91_TWI_LOCK)) {
0617 dev_err(dev->dev, "tx locked\n");
0618 ret = -EIO;
0619 goto error;
0620 }
0621 if (dev->recv_len_abort) {
0622 dev_err(dev->dev, "invalid smbus block length recvd\n");
0623 ret = -EPROTO;
0624 goto error;
0625 }
0626
0627 dev_dbg(dev->dev, "transfer complete\n");
0628
0629 return 0;
0630
0631 error:
0632
0633 at91_twi_dma_cleanup(dev);
0634
0635 if ((has_alt_cmd || dev->fifo_size) &&
0636 (dev->transfer_status & AT91_TWI_LOCK)) {
0637 dev_dbg(dev->dev, "unlock tx\n");
0638 at91_twi_write(dev, AT91_TWI_CR,
0639 AT91_TWI_THRCLR | AT91_TWI_LOCKCLR);
0640 }
0641
0642
0643
0644
0645
0646
0647 i2c_recover_bus(&dev->adapter);
0648
0649 return ret;
0650 }
0651
0652 static int at91_twi_xfer(struct i2c_adapter *adap, struct i2c_msg *msg, int num)
0653 {
0654 struct at91_twi_dev *dev = i2c_get_adapdata(adap);
0655 int ret;
0656 unsigned int_addr_flag = 0;
0657 struct i2c_msg *m_start = msg;
0658 bool is_read;
0659 u8 *dma_buf = NULL;
0660
0661 dev_dbg(&adap->dev, "at91_xfer: processing %d messages:\n", num);
0662
0663 ret = pm_runtime_get_sync(dev->dev);
0664 if (ret < 0)
0665 goto out;
0666
0667 if (num == 2) {
0668 int internal_address = 0;
0669 int i;
0670
0671
0672 m_start = &msg[1];
0673 for (i = 0; i < msg->len; ++i) {
0674 const unsigned addr = msg->buf[msg->len - 1 - i];
0675
0676 internal_address |= addr << (8 * i);
0677 int_addr_flag += AT91_TWI_IADRSZ_1;
0678 }
0679 at91_twi_write(dev, AT91_TWI_IADR, internal_address);
0680 }
0681
0682 dev->use_alt_cmd = false;
0683 is_read = (m_start->flags & I2C_M_RD);
0684 if (dev->pdata->has_alt_cmd) {
0685 if (m_start->len > 0 &&
0686 m_start->len < AT91_I2C_MAX_ALT_CMD_DATA_SIZE) {
0687 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_ACMEN);
0688 at91_twi_write(dev, AT91_TWI_ACR,
0689 AT91_TWI_ACR_DATAL(m_start->len) |
0690 ((is_read) ? AT91_TWI_ACR_DIR : 0));
0691 dev->use_alt_cmd = true;
0692 } else {
0693 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_ACMDIS);
0694 }
0695 }
0696
0697 at91_twi_write(dev, AT91_TWI_MMR,
0698 (m_start->addr << 16) |
0699 int_addr_flag |
0700 ((!dev->use_alt_cmd && is_read) ? AT91_TWI_MREAD : 0));
0701
0702 dev->buf_len = m_start->len;
0703 dev->buf = m_start->buf;
0704 dev->msg = m_start;
0705 dev->recv_len_abort = false;
0706
0707 if (dev->use_dma) {
0708 dma_buf = i2c_get_dma_safe_msg_buf(m_start, 1);
0709 if (!dma_buf) {
0710 ret = -ENOMEM;
0711 goto out;
0712 }
0713 dev->buf = dma_buf;
0714 }
0715
0716 ret = at91_do_twi_transfer(dev);
0717 i2c_put_dma_safe_msg_buf(dma_buf, m_start, !ret);
0718
0719 ret = (ret < 0) ? ret : num;
0720 out:
0721 pm_runtime_mark_last_busy(dev->dev);
0722 pm_runtime_put_autosuspend(dev->dev);
0723
0724 return ret;
0725 }
0726
0727
0728
0729
0730
0731 static const struct i2c_adapter_quirks at91_twi_quirks = {
0732 .flags = I2C_AQ_COMB | I2C_AQ_COMB_WRITE_FIRST | I2C_AQ_COMB_SAME_ADDR,
0733 .max_comb_1st_msg_len = 3,
0734 };
0735
0736 static u32 at91_twi_func(struct i2c_adapter *adapter)
0737 {
0738 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL
0739 | I2C_FUNC_SMBUS_READ_BLOCK_DATA;
0740 }
0741
0742 static const struct i2c_algorithm at91_twi_algorithm = {
0743 .master_xfer = at91_twi_xfer,
0744 .functionality = at91_twi_func,
0745 };
0746
0747 static int at91_twi_configure_dma(struct at91_twi_dev *dev, u32 phy_addr)
0748 {
0749 int ret = 0;
0750 struct dma_slave_config slave_config;
0751 struct at91_twi_dma *dma = &dev->dma;
0752 enum dma_slave_buswidth addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
0753
0754
0755
0756
0757
0758
0759
0760
0761
0762
0763
0764
0765
0766
0767
0768
0769 if (dev->fifo_size)
0770 addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
0771
0772 memset(&slave_config, 0, sizeof(slave_config));
0773 slave_config.src_addr = (dma_addr_t)phy_addr + AT91_TWI_RHR;
0774 slave_config.src_addr_width = addr_width;
0775 slave_config.src_maxburst = 1;
0776 slave_config.dst_addr = (dma_addr_t)phy_addr + AT91_TWI_THR;
0777 slave_config.dst_addr_width = addr_width;
0778 slave_config.dst_maxburst = 1;
0779 slave_config.device_fc = false;
0780
0781 dma->chan_tx = dma_request_chan(dev->dev, "tx");
0782 if (IS_ERR(dma->chan_tx)) {
0783 ret = PTR_ERR(dma->chan_tx);
0784 dma->chan_tx = NULL;
0785 goto error;
0786 }
0787
0788 dma->chan_rx = dma_request_chan(dev->dev, "rx");
0789 if (IS_ERR(dma->chan_rx)) {
0790 ret = PTR_ERR(dma->chan_rx);
0791 dma->chan_rx = NULL;
0792 goto error;
0793 }
0794
0795 slave_config.direction = DMA_MEM_TO_DEV;
0796 if (dmaengine_slave_config(dma->chan_tx, &slave_config)) {
0797 dev_err(dev->dev, "failed to configure tx channel\n");
0798 ret = -EINVAL;
0799 goto error;
0800 }
0801
0802 slave_config.direction = DMA_DEV_TO_MEM;
0803 if (dmaengine_slave_config(dma->chan_rx, &slave_config)) {
0804 dev_err(dev->dev, "failed to configure rx channel\n");
0805 ret = -EINVAL;
0806 goto error;
0807 }
0808
0809 sg_init_table(dma->sg, 2);
0810 dma->buf_mapped = false;
0811 dma->xfer_in_progress = false;
0812 dev->use_dma = true;
0813
0814 dev_info(dev->dev, "using %s (tx) and %s (rx) for DMA transfers\n",
0815 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
0816
0817 return ret;
0818
0819 error:
0820 if (ret != -EPROBE_DEFER)
0821 dev_info(dev->dev, "can't get DMA channel, continue without DMA support\n");
0822 if (dma->chan_rx)
0823 dma_release_channel(dma->chan_rx);
0824 if (dma->chan_tx)
0825 dma_release_channel(dma->chan_tx);
0826 return ret;
0827 }
0828
0829 static int at91_init_twi_recovery_gpio(struct platform_device *pdev,
0830 struct at91_twi_dev *dev)
0831 {
0832 struct i2c_bus_recovery_info *rinfo = &dev->rinfo;
0833
0834 rinfo->pinctrl = devm_pinctrl_get(&pdev->dev);
0835 if (!rinfo->pinctrl || IS_ERR(rinfo->pinctrl)) {
0836 dev_info(dev->dev, "can't get pinctrl, bus recovery not supported\n");
0837 return PTR_ERR(rinfo->pinctrl);
0838 }
0839 dev->adapter.bus_recovery_info = rinfo;
0840
0841 return 0;
0842 }
0843
0844 static int at91_twi_recover_bus_cmd(struct i2c_adapter *adap)
0845 {
0846 struct at91_twi_dev *dev = i2c_get_adapdata(adap);
0847
0848 dev->transfer_status |= at91_twi_read(dev, AT91_TWI_SR);
0849 if (!(dev->transfer_status & AT91_TWI_SDA)) {
0850 dev_dbg(dev->dev, "SDA is down; sending bus clear command\n");
0851 if (dev->use_alt_cmd) {
0852 unsigned int acr;
0853
0854 acr = at91_twi_read(dev, AT91_TWI_ACR);
0855 acr &= ~AT91_TWI_ACR_DATAL_MASK;
0856 at91_twi_write(dev, AT91_TWI_ACR, acr);
0857 }
0858 at91_twi_write(dev, AT91_TWI_CR, AT91_TWI_CLEAR);
0859 }
0860
0861 return 0;
0862 }
0863
0864 static int at91_init_twi_recovery_info(struct platform_device *pdev,
0865 struct at91_twi_dev *dev)
0866 {
0867 struct i2c_bus_recovery_info *rinfo = &dev->rinfo;
0868 bool has_clear_cmd = dev->pdata->has_clear_cmd;
0869
0870 if (!has_clear_cmd)
0871 return at91_init_twi_recovery_gpio(pdev, dev);
0872
0873 rinfo->recover_bus = at91_twi_recover_bus_cmd;
0874 dev->adapter.bus_recovery_info = rinfo;
0875
0876 return 0;
0877 }
0878
0879 int at91_twi_probe_master(struct platform_device *pdev,
0880 u32 phy_addr, struct at91_twi_dev *dev)
0881 {
0882 int rc;
0883
0884 init_completion(&dev->cmd_complete);
0885
0886 rc = devm_request_irq(&pdev->dev, dev->irq, atmel_twi_interrupt, 0,
0887 dev_name(dev->dev), dev);
0888 if (rc) {
0889 dev_err(dev->dev, "Cannot get irq %d: %d\n", dev->irq, rc);
0890 return rc;
0891 }
0892
0893 if (dev->dev->of_node) {
0894 rc = at91_twi_configure_dma(dev, phy_addr);
0895 if (rc == -EPROBE_DEFER)
0896 return rc;
0897 }
0898
0899 if (!of_property_read_u32(pdev->dev.of_node, "atmel,fifo-size",
0900 &dev->fifo_size)) {
0901 dev_info(dev->dev, "Using FIFO (%u data)\n", dev->fifo_size);
0902 }
0903
0904 dev->enable_dig_filt = of_property_read_bool(pdev->dev.of_node,
0905 "i2c-digital-filter");
0906
0907 dev->enable_ana_filt = of_property_read_bool(pdev->dev.of_node,
0908 "i2c-analog-filter");
0909 at91_calc_twi_clock(dev);
0910
0911 rc = at91_init_twi_recovery_info(pdev, dev);
0912 if (rc == -EPROBE_DEFER)
0913 return rc;
0914
0915 dev->adapter.algo = &at91_twi_algorithm;
0916 dev->adapter.quirks = &at91_twi_quirks;
0917
0918 return 0;
0919 }