0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #include <linux/clk.h>
0013 #include <linux/delay.h>
0014 #include <linux/dmaengine.h>
0015 #include <linux/dma-mapping.h>
0016 #include <linux/err.h>
0017 #include <linux/i2c.h>
0018 #include <linux/init.h>
0019 #include <linux/interrupt.h>
0020 #include <linux/io.h>
0021 #include <linux/kernel.h>
0022 #include <linux/module.h>
0023 #include <linux/of_device.h>
0024 #include <linux/platform_device.h>
0025 #include <linux/pm_runtime.h>
0026 #include <linux/slab.h>
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101
0102 enum sh_mobile_i2c_op {
0103 OP_START = 0,
0104 OP_TX_FIRST,
0105 OP_TX,
0106 OP_TX_STOP,
0107 OP_TX_TO_RX,
0108 OP_RX,
0109 OP_RX_STOP,
0110 OP_RX_STOP_DATA,
0111 };
0112
0113 struct sh_mobile_i2c_data {
0114 struct device *dev;
0115 void __iomem *reg;
0116 struct i2c_adapter adap;
0117 unsigned long bus_speed;
0118 unsigned int clks_per_count;
0119 struct clk *clk;
0120 u_int8_t icic;
0121 u_int8_t flags;
0122 u_int16_t iccl;
0123 u_int16_t icch;
0124
0125 spinlock_t lock;
0126 wait_queue_head_t wait;
0127 struct i2c_msg *msg;
0128 int pos;
0129 int sr;
0130 bool send_stop;
0131 bool stop_after_dma;
0132 bool atomic_xfer;
0133
0134 struct resource *res;
0135 struct dma_chan *dma_tx;
0136 struct dma_chan *dma_rx;
0137 struct scatterlist sg;
0138 enum dma_data_direction dma_direction;
0139 u8 *dma_buf;
0140 };
0141
0142 struct sh_mobile_dt_config {
0143 int clks_per_count;
0144 int (*setup)(struct sh_mobile_i2c_data *pd);
0145 };
0146
0147 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
0148
0149
0150 #define ICDR 0x00
0151 #define ICCR 0x04
0152 #define ICSR 0x08
0153 #define ICIC 0x0c
0154 #define ICCL 0x10
0155 #define ICCH 0x14
0156 #define ICSTART 0x70
0157
0158
0159 #define ICCR_ICE 0x80
0160 #define ICCR_RACK 0x40
0161 #define ICCR_TRS 0x10
0162 #define ICCR_BBSY 0x04
0163 #define ICCR_SCP 0x01
0164
0165 #define ICSR_SCLM 0x80
0166 #define ICSR_SDAM 0x40
0167 #define SW_DONE 0x20
0168 #define ICSR_BUSY 0x10
0169 #define ICSR_AL 0x08
0170 #define ICSR_TACK 0x04
0171 #define ICSR_WAIT 0x02
0172 #define ICSR_DTE 0x01
0173
0174 #define ICIC_ICCLB8 0x80
0175 #define ICIC_ICCHB8 0x40
0176 #define ICIC_TDMAE 0x20
0177 #define ICIC_RDMAE 0x10
0178 #define ICIC_ALE 0x08
0179 #define ICIC_TACKE 0x04
0180 #define ICIC_WAITE 0x02
0181 #define ICIC_DTEE 0x01
0182
0183 #define ICSTART_ICSTART 0x10
0184
0185 static void iic_wr(struct sh_mobile_i2c_data *pd, int offs, unsigned char data)
0186 {
0187 if (offs == ICIC)
0188 data |= pd->icic;
0189
0190 iowrite8(data, pd->reg + offs);
0191 }
0192
0193 static unsigned char iic_rd(struct sh_mobile_i2c_data *pd, int offs)
0194 {
0195 return ioread8(pd->reg + offs);
0196 }
0197
0198 static void iic_set_clr(struct sh_mobile_i2c_data *pd, int offs,
0199 unsigned char set, unsigned char clr)
0200 {
0201 iic_wr(pd, offs, (iic_rd(pd, offs) | set) & ~clr);
0202 }
0203
0204 static u32 sh_mobile_i2c_iccl(unsigned long count_khz, u32 tLOW, u32 tf)
0205 {
0206
0207
0208
0209
0210
0211
0212
0213
0214
0215
0216 return (((count_khz * (tLOW + tf)) + 5000) / 10000);
0217 }
0218
0219 static u32 sh_mobile_i2c_icch(unsigned long count_khz, u32 tHIGH, u32 tf)
0220 {
0221
0222
0223
0224
0225
0226
0227
0228
0229
0230
0231
0232
0233
0234
0235 return (((count_khz * (tHIGH + tf)) + 5000) / 10000);
0236 }
0237
0238 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data *pd)
0239 {
0240 u16 max_val = pd->flags & IIC_FLAG_HAS_ICIC67 ? 0x1ff : 0xff;
0241
0242 if (pd->iccl > max_val || pd->icch > max_val) {
0243 dev_err(pd->dev, "timing values out of range: L/H=0x%x/0x%x\n",
0244 pd->iccl, pd->icch);
0245 return -EINVAL;
0246 }
0247
0248
0249 if (pd->iccl & 0x100)
0250 pd->icic |= ICIC_ICCLB8;
0251 else
0252 pd->icic &= ~ICIC_ICCLB8;
0253
0254
0255 if (pd->icch & 0x100)
0256 pd->icic |= ICIC_ICCHB8;
0257 else
0258 pd->icic &= ~ICIC_ICCHB8;
0259
0260 dev_dbg(pd->dev, "timing values: L/H=0x%x/0x%x\n", pd->iccl, pd->icch);
0261 return 0;
0262 }
0263
0264 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data *pd)
0265 {
0266 unsigned long i2c_clk_khz;
0267 u32 tHIGH, tLOW, tf;
0268
0269 i2c_clk_khz = clk_get_rate(pd->clk) / 1000 / pd->clks_per_count;
0270
0271 if (pd->bus_speed == I2C_MAX_STANDARD_MODE_FREQ) {
0272 tLOW = 47;
0273 tHIGH = 40;
0274 tf = 3;
0275 } else if (pd->bus_speed == I2C_MAX_FAST_MODE_FREQ) {
0276 tLOW = 13;
0277 tHIGH = 6;
0278 tf = 3;
0279 } else {
0280 dev_err(pd->dev, "unrecognized bus speed %lu Hz\n",
0281 pd->bus_speed);
0282 return -EINVAL;
0283 }
0284
0285 pd->iccl = sh_mobile_i2c_iccl(i2c_clk_khz, tLOW, tf);
0286 pd->icch = sh_mobile_i2c_icch(i2c_clk_khz, tHIGH, tf);
0287
0288 return sh_mobile_i2c_check_timing(pd);
0289 }
0290
0291 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data *pd)
0292 {
0293 unsigned long clks_per_cycle;
0294
0295
0296 clks_per_cycle = clk_get_rate(pd->clk) / pd->bus_speed;
0297 pd->iccl = DIV_ROUND_UP(clks_per_cycle * 5 / 9 - 1, pd->clks_per_count);
0298 pd->icch = DIV_ROUND_UP(clks_per_cycle * 4 / 9 - 5, pd->clks_per_count);
0299
0300 return sh_mobile_i2c_check_timing(pd);
0301 }
0302
0303 static unsigned char i2c_op(struct sh_mobile_i2c_data *pd, enum sh_mobile_i2c_op op)
0304 {
0305 unsigned char ret = 0;
0306 unsigned long flags;
0307
0308 dev_dbg(pd->dev, "op %d\n", op);
0309
0310 spin_lock_irqsave(&pd->lock, flags);
0311
0312 switch (op) {
0313 case OP_START:
0314 iic_wr(pd, ICCR, ICCR_ICE | ICCR_TRS | ICCR_BBSY);
0315 break;
0316 case OP_TX_FIRST:
0317 iic_wr(pd, ICIC, ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
0318 iic_wr(pd, ICDR, i2c_8bit_addr_from_msg(pd->msg));
0319 break;
0320 case OP_TX:
0321 iic_wr(pd, ICDR, pd->msg->buf[pd->pos]);
0322 break;
0323 case OP_TX_STOP:
0324 iic_wr(pd, ICCR, pd->send_stop ? ICCR_ICE | ICCR_TRS
0325 : ICCR_ICE | ICCR_TRS | ICCR_BBSY);
0326 break;
0327 case OP_TX_TO_RX:
0328 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
0329 break;
0330 case OP_RX:
0331 ret = iic_rd(pd, ICDR);
0332 break;
0333 case OP_RX_STOP:
0334 if (!pd->atomic_xfer)
0335 iic_wr(pd, ICIC,
0336 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
0337 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
0338 break;
0339 case OP_RX_STOP_DATA:
0340 if (!pd->atomic_xfer)
0341 iic_wr(pd, ICIC,
0342 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
0343 ret = iic_rd(pd, ICDR);
0344 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
0345 break;
0346 }
0347
0348 spin_unlock_irqrestore(&pd->lock, flags);
0349
0350 dev_dbg(pd->dev, "op %d, data out 0x%02x\n", op, ret);
0351 return ret;
0352 }
0353
0354 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data *pd)
0355 {
0356 if (pd->pos == pd->msg->len) {
0357 i2c_op(pd, OP_TX_STOP);
0358 return 1;
0359 }
0360
0361 if (pd->pos == -1)
0362 i2c_op(pd, OP_TX_FIRST);
0363 else
0364 i2c_op(pd, OP_TX);
0365
0366 pd->pos++;
0367 return 0;
0368 }
0369
0370 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data *pd)
0371 {
0372 int real_pos;
0373
0374
0375 real_pos = pd->pos - 2;
0376
0377 if (pd->pos == -1) {
0378 i2c_op(pd, OP_TX_FIRST);
0379 } else if (pd->pos == 0) {
0380 i2c_op(pd, OP_TX_TO_RX);
0381 } else if (pd->pos == pd->msg->len) {
0382 if (pd->stop_after_dma) {
0383
0384 i2c_op(pd, OP_RX_STOP);
0385 pd->pos++;
0386 goto done;
0387 }
0388
0389 if (real_pos < 0)
0390 i2c_op(pd, OP_RX_STOP);
0391 else
0392 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX_STOP_DATA);
0393 } else if (real_pos >= 0) {
0394 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX);
0395 }
0396
0397 done:
0398 pd->pos++;
0399 return pd->pos == (pd->msg->len + 2);
0400 }
0401
0402 static irqreturn_t sh_mobile_i2c_isr(int irq, void *dev_id)
0403 {
0404 struct sh_mobile_i2c_data *pd = dev_id;
0405 unsigned char sr;
0406 int wakeup = 0;
0407
0408 sr = iic_rd(pd, ICSR);
0409 pd->sr |= sr;
0410
0411 dev_dbg(pd->dev, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr, pd->sr,
0412 (pd->msg->flags & I2C_M_RD) ? "read" : "write",
0413 pd->pos, pd->msg->len);
0414
0415
0416 if (pd->dma_direction == DMA_TO_DEVICE && pd->pos == 0)
0417 iic_set_clr(pd, ICIC, ICIC_TDMAE, 0);
0418 else if (sr & (ICSR_AL | ICSR_TACK))
0419
0420 iic_wr(pd, ICSR, sr & ~(ICSR_AL | ICSR_TACK));
0421 else if (pd->msg->flags & I2C_M_RD)
0422 wakeup = sh_mobile_i2c_isr_rx(pd);
0423 else
0424 wakeup = sh_mobile_i2c_isr_tx(pd);
0425
0426
0427 if (pd->dma_direction == DMA_FROM_DEVICE && pd->pos == 1)
0428 iic_set_clr(pd, ICIC, ICIC_RDMAE, 0);
0429
0430 if (sr & ICSR_WAIT)
0431 iic_wr(pd, ICSR, sr & ~ICSR_WAIT);
0432
0433 if (wakeup) {
0434 pd->sr |= SW_DONE;
0435 if (!pd->atomic_xfer)
0436 wake_up(&pd->wait);
0437 }
0438
0439
0440 iic_rd(pd, ICSR);
0441
0442 return IRQ_HANDLED;
0443 }
0444
0445 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data *pd, bool terminate)
0446 {
0447 struct dma_chan *chan = pd->dma_direction == DMA_FROM_DEVICE
0448 ? pd->dma_rx : pd->dma_tx;
0449
0450
0451 if (terminate)
0452 dmaengine_terminate_sync(chan);
0453
0454 dma_unmap_single(chan->device->dev, sg_dma_address(&pd->sg),
0455 pd->msg->len, pd->dma_direction);
0456
0457 pd->dma_direction = DMA_NONE;
0458 }
0459
0460 static void sh_mobile_i2c_dma_callback(void *data)
0461 {
0462 struct sh_mobile_i2c_data *pd = data;
0463
0464 sh_mobile_i2c_cleanup_dma(pd, false);
0465 pd->pos = pd->msg->len;
0466 pd->stop_after_dma = true;
0467
0468 iic_set_clr(pd, ICIC, 0, ICIC_TDMAE | ICIC_RDMAE);
0469 }
0470
0471 static struct dma_chan *sh_mobile_i2c_request_dma_chan(struct device *dev,
0472 enum dma_transfer_direction dir, dma_addr_t port_addr)
0473 {
0474 struct dma_chan *chan;
0475 struct dma_slave_config cfg;
0476 char *chan_name = dir == DMA_MEM_TO_DEV ? "tx" : "rx";
0477 int ret;
0478
0479 chan = dma_request_chan(dev, chan_name);
0480 if (IS_ERR(chan)) {
0481 dev_dbg(dev, "request_channel failed for %s (%ld)\n", chan_name,
0482 PTR_ERR(chan));
0483 return chan;
0484 }
0485
0486 memset(&cfg, 0, sizeof(cfg));
0487 cfg.direction = dir;
0488 if (dir == DMA_MEM_TO_DEV) {
0489 cfg.dst_addr = port_addr;
0490 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
0491 } else {
0492 cfg.src_addr = port_addr;
0493 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
0494 }
0495
0496 ret = dmaengine_slave_config(chan, &cfg);
0497 if (ret) {
0498 dev_dbg(dev, "slave_config failed for %s (%d)\n", chan_name, ret);
0499 dma_release_channel(chan);
0500 return ERR_PTR(ret);
0501 }
0502
0503 dev_dbg(dev, "got DMA channel for %s\n", chan_name);
0504 return chan;
0505 }
0506
0507 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data *pd)
0508 {
0509 bool read = pd->msg->flags & I2C_M_RD;
0510 enum dma_data_direction dir = read ? DMA_FROM_DEVICE : DMA_TO_DEVICE;
0511 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx;
0512 struct dma_async_tx_descriptor *txdesc;
0513 dma_addr_t dma_addr;
0514 dma_cookie_t cookie;
0515
0516 if (PTR_ERR(chan) == -EPROBE_DEFER) {
0517 if (read)
0518 chan = pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM,
0519 pd->res->start + ICDR);
0520 else
0521 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV,
0522 pd->res->start + ICDR);
0523 }
0524
0525 if (IS_ERR(chan))
0526 return;
0527
0528 dma_addr = dma_map_single(chan->device->dev, pd->dma_buf, pd->msg->len, dir);
0529 if (dma_mapping_error(chan->device->dev, dma_addr)) {
0530 dev_dbg(pd->dev, "dma map failed, using PIO\n");
0531 return;
0532 }
0533
0534 sg_dma_len(&pd->sg) = pd->msg->len;
0535 sg_dma_address(&pd->sg) = dma_addr;
0536
0537 pd->dma_direction = dir;
0538
0539 txdesc = dmaengine_prep_slave_sg(chan, &pd->sg, 1,
0540 read ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV,
0541 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
0542 if (!txdesc) {
0543 dev_dbg(pd->dev, "dma prep slave sg failed, using PIO\n");
0544 sh_mobile_i2c_cleanup_dma(pd, false);
0545 return;
0546 }
0547
0548 txdesc->callback = sh_mobile_i2c_dma_callback;
0549 txdesc->callback_param = pd;
0550
0551 cookie = dmaengine_submit(txdesc);
0552 if (dma_submit_error(cookie)) {
0553 dev_dbg(pd->dev, "submitting dma failed, using PIO\n");
0554 sh_mobile_i2c_cleanup_dma(pd, false);
0555 return;
0556 }
0557
0558 dma_async_issue_pending(chan);
0559 }
0560
0561 static void start_ch(struct sh_mobile_i2c_data *pd, struct i2c_msg *usr_msg,
0562 bool do_init)
0563 {
0564 if (do_init) {
0565
0566 iic_wr(pd, ICCR, ICCR_SCP);
0567
0568
0569 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
0570
0571
0572 iic_wr(pd, ICCL, pd->iccl & 0xff);
0573 iic_wr(pd, ICCH, pd->icch & 0xff);
0574 }
0575
0576 pd->msg = usr_msg;
0577 pd->pos = -1;
0578 pd->sr = 0;
0579
0580 if (pd->atomic_xfer)
0581 return;
0582
0583 pd->dma_buf = i2c_get_dma_safe_msg_buf(pd->msg, 8);
0584 if (pd->dma_buf)
0585 sh_mobile_i2c_xfer_dma(pd);
0586
0587
0588 iic_wr(pd, ICIC, ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
0589 }
0590
0591 static int poll_dte(struct sh_mobile_i2c_data *pd)
0592 {
0593 int i;
0594
0595 for (i = 1000; i; i--) {
0596 u_int8_t val = iic_rd(pd, ICSR);
0597
0598 if (val & ICSR_DTE)
0599 break;
0600
0601 if (val & ICSR_TACK)
0602 return -ENXIO;
0603
0604 udelay(10);
0605 }
0606
0607 return i ? 0 : -ETIMEDOUT;
0608 }
0609
0610 static int poll_busy(struct sh_mobile_i2c_data *pd)
0611 {
0612 int i;
0613
0614 for (i = 1000; i; i--) {
0615 u_int8_t val = iic_rd(pd, ICSR);
0616
0617 dev_dbg(pd->dev, "val 0x%02x pd->sr 0x%02x\n", val, pd->sr);
0618
0619
0620
0621
0622
0623 if (!(val & ICSR_BUSY)) {
0624
0625 val |= pd->sr;
0626 if (val & ICSR_TACK)
0627 return -ENXIO;
0628 if (val & ICSR_AL)
0629 return -EAGAIN;
0630 break;
0631 }
0632
0633 udelay(10);
0634 }
0635
0636 return i ? 0 : -ETIMEDOUT;
0637 }
0638
0639 static int sh_mobile_xfer(struct sh_mobile_i2c_data *pd,
0640 struct i2c_msg *msgs, int num)
0641 {
0642 struct i2c_msg *msg;
0643 int err = 0;
0644 int i;
0645 long time_left;
0646
0647
0648 pm_runtime_get_sync(pd->dev);
0649
0650
0651 for (i = 0; i < num; i++) {
0652 bool do_start = pd->send_stop || !i;
0653 msg = &msgs[i];
0654 pd->send_stop = i == num - 1 || msg->flags & I2C_M_STOP;
0655 pd->stop_after_dma = false;
0656
0657 start_ch(pd, msg, do_start);
0658
0659 if (do_start)
0660 i2c_op(pd, OP_START);
0661
0662 if (pd->atomic_xfer) {
0663 unsigned long j = jiffies + pd->adap.timeout;
0664
0665 time_left = time_before_eq(jiffies, j);
0666 while (time_left &&
0667 !(pd->sr & (ICSR_TACK | SW_DONE))) {
0668 unsigned char sr = iic_rd(pd, ICSR);
0669
0670 if (sr & (ICSR_AL | ICSR_TACK |
0671 ICSR_WAIT | ICSR_DTE)) {
0672 sh_mobile_i2c_isr(0, pd);
0673 udelay(150);
0674 } else {
0675 cpu_relax();
0676 }
0677 time_left = time_before_eq(jiffies, j);
0678 }
0679 } else {
0680
0681 time_left = wait_event_timeout(pd->wait,
0682 pd->sr & (ICSR_TACK | SW_DONE),
0683 pd->adap.timeout);
0684
0685
0686 i2c_put_dma_safe_msg_buf(pd->dma_buf, pd->msg,
0687 pd->stop_after_dma);
0688 }
0689
0690 if (!time_left) {
0691 dev_err(pd->dev, "Transfer request timed out\n");
0692 if (pd->dma_direction != DMA_NONE)
0693 sh_mobile_i2c_cleanup_dma(pd, true);
0694
0695 err = -ETIMEDOUT;
0696 break;
0697 }
0698
0699 if (pd->send_stop)
0700 err = poll_busy(pd);
0701 else
0702 err = poll_dte(pd);
0703 if (err < 0)
0704 break;
0705 }
0706
0707
0708 iic_wr(pd, ICCR, ICCR_SCP);
0709
0710
0711 pm_runtime_put_sync(pd->dev);
0712
0713 return err ?: num;
0714 }
0715
0716 static int sh_mobile_i2c_xfer(struct i2c_adapter *adapter,
0717 struct i2c_msg *msgs,
0718 int num)
0719 {
0720 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter);
0721
0722 pd->atomic_xfer = false;
0723 return sh_mobile_xfer(pd, msgs, num);
0724 }
0725
0726 static int sh_mobile_i2c_xfer_atomic(struct i2c_adapter *adapter,
0727 struct i2c_msg *msgs,
0728 int num)
0729 {
0730 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter);
0731
0732 pd->atomic_xfer = true;
0733 return sh_mobile_xfer(pd, msgs, num);
0734 }
0735
0736 static u32 sh_mobile_i2c_func(struct i2c_adapter *adapter)
0737 {
0738 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL | I2C_FUNC_PROTOCOL_MANGLING;
0739 }
0740
0741 static const struct i2c_algorithm sh_mobile_i2c_algorithm = {
0742 .functionality = sh_mobile_i2c_func,
0743 .master_xfer = sh_mobile_i2c_xfer,
0744 .master_xfer_atomic = sh_mobile_i2c_xfer_atomic,
0745 };
0746
0747 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks = {
0748 .flags = I2C_AQ_NO_ZERO_LEN_READ,
0749 };
0750
0751
0752
0753
0754 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data *pd)
0755 {
0756 iic_set_clr(pd, ICCR, ICCR_ICE, 0);
0757 iic_rd(pd, ICCR);
0758
0759 iic_set_clr(pd, ICSTART, ICSTART_ICSTART, 0);
0760 iic_rd(pd, ICSTART);
0761
0762 udelay(10);
0763
0764 iic_wr(pd, ICCR, ICCR_SCP);
0765 iic_wr(pd, ICSTART, 0);
0766
0767 udelay(10);
0768
0769 iic_wr(pd, ICCR, ICCR_TRS);
0770 udelay(10);
0771 iic_wr(pd, ICCR, 0);
0772 udelay(10);
0773 iic_wr(pd, ICCR, ICCR_TRS);
0774 udelay(10);
0775
0776 return sh_mobile_i2c_init(pd);
0777 }
0778
0779 static const struct sh_mobile_dt_config default_dt_config = {
0780 .clks_per_count = 1,
0781 .setup = sh_mobile_i2c_init,
0782 };
0783
0784 static const struct sh_mobile_dt_config fast_clock_dt_config = {
0785 .clks_per_count = 2,
0786 .setup = sh_mobile_i2c_init,
0787 };
0788
0789 static const struct sh_mobile_dt_config v2_freq_calc_dt_config = {
0790 .clks_per_count = 2,
0791 .setup = sh_mobile_i2c_v2_init,
0792 };
0793
0794 static const struct sh_mobile_dt_config r8a7740_dt_config = {
0795 .clks_per_count = 1,
0796 .setup = sh_mobile_i2c_r8a7740_workaround,
0797 };
0798
0799 static const struct of_device_id sh_mobile_i2c_dt_ids[] = {
0800 { .compatible = "renesas,iic-r8a73a4", .data = &fast_clock_dt_config },
0801 { .compatible = "renesas,iic-r8a7740", .data = &r8a7740_dt_config },
0802 { .compatible = "renesas,iic-r8a774c0", .data = &v2_freq_calc_dt_config },
0803 { .compatible = "renesas,iic-r8a7790", .data = &v2_freq_calc_dt_config },
0804 { .compatible = "renesas,iic-r8a7791", .data = &v2_freq_calc_dt_config },
0805 { .compatible = "renesas,iic-r8a7792", .data = &v2_freq_calc_dt_config },
0806 { .compatible = "renesas,iic-r8a7793", .data = &v2_freq_calc_dt_config },
0807 { .compatible = "renesas,iic-r8a7794", .data = &v2_freq_calc_dt_config },
0808 { .compatible = "renesas,iic-r8a7795", .data = &v2_freq_calc_dt_config },
0809 { .compatible = "renesas,iic-r8a77990", .data = &v2_freq_calc_dt_config },
0810 { .compatible = "renesas,iic-sh73a0", .data = &fast_clock_dt_config },
0811 { .compatible = "renesas,rcar-gen2-iic", .data = &v2_freq_calc_dt_config },
0812 { .compatible = "renesas,rcar-gen3-iic", .data = &v2_freq_calc_dt_config },
0813 { .compatible = "renesas,rmobile-iic", .data = &default_dt_config },
0814 {},
0815 };
0816 MODULE_DEVICE_TABLE(of, sh_mobile_i2c_dt_ids);
0817
0818 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data *pd)
0819 {
0820 if (!IS_ERR(pd->dma_tx)) {
0821 dma_release_channel(pd->dma_tx);
0822 pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
0823 }
0824
0825 if (!IS_ERR(pd->dma_rx)) {
0826 dma_release_channel(pd->dma_rx);
0827 pd->dma_rx = ERR_PTR(-EPROBE_DEFER);
0828 }
0829 }
0830
0831 static int sh_mobile_i2c_hook_irqs(struct platform_device *dev, struct sh_mobile_i2c_data *pd)
0832 {
0833 struct device_node *np = dev_of_node(&dev->dev);
0834 int k = 0, ret;
0835
0836 if (np) {
0837 int irq;
0838
0839 while ((irq = platform_get_irq_optional(dev, k)) != -ENXIO) {
0840 if (irq < 0)
0841 return irq;
0842 ret = devm_request_irq(&dev->dev, irq, sh_mobile_i2c_isr,
0843 0, dev_name(&dev->dev), pd);
0844 if (ret) {
0845 dev_err(&dev->dev, "cannot request IRQ %d\n", irq);
0846 return ret;
0847 }
0848 k++;
0849 }
0850 } else {
0851 struct resource *res;
0852 resource_size_t n;
0853
0854 while ((res = platform_get_resource(dev, IORESOURCE_IRQ, k))) {
0855 for (n = res->start; n <= res->end; n++) {
0856 ret = devm_request_irq(&dev->dev, n, sh_mobile_i2c_isr,
0857 0, dev_name(&dev->dev), pd);
0858 if (ret) {
0859 dev_err(&dev->dev, "cannot request IRQ %pa\n", &n);
0860 return ret;
0861 }
0862 }
0863 k++;
0864 }
0865 }
0866
0867 return k > 0 ? 0 : -ENOENT;
0868 }
0869
0870 static int sh_mobile_i2c_probe(struct platform_device *dev)
0871 {
0872 struct sh_mobile_i2c_data *pd;
0873 struct i2c_adapter *adap;
0874 struct resource *res;
0875 const struct sh_mobile_dt_config *config;
0876 int ret;
0877 u32 bus_speed;
0878
0879 pd = devm_kzalloc(&dev->dev, sizeof(struct sh_mobile_i2c_data), GFP_KERNEL);
0880 if (!pd)
0881 return -ENOMEM;
0882
0883 pd->clk = devm_clk_get(&dev->dev, NULL);
0884 if (IS_ERR(pd->clk)) {
0885 dev_err(&dev->dev, "cannot get clock\n");
0886 return PTR_ERR(pd->clk);
0887 }
0888
0889 ret = sh_mobile_i2c_hook_irqs(dev, pd);
0890 if (ret)
0891 return ret;
0892
0893 pd->dev = &dev->dev;
0894 platform_set_drvdata(dev, pd);
0895
0896 res = platform_get_resource(dev, IORESOURCE_MEM, 0);
0897
0898 pd->res = res;
0899 pd->reg = devm_ioremap_resource(&dev->dev, res);
0900 if (IS_ERR(pd->reg))
0901 return PTR_ERR(pd->reg);
0902
0903 ret = of_property_read_u32(dev->dev.of_node, "clock-frequency", &bus_speed);
0904 pd->bus_speed = (ret || !bus_speed) ? I2C_MAX_STANDARD_MODE_FREQ : bus_speed;
0905 pd->clks_per_count = 1;
0906
0907
0908 if (resource_size(res) > 0x17)
0909 pd->flags |= IIC_FLAG_HAS_ICIC67;
0910
0911 pm_runtime_enable(&dev->dev);
0912 pm_runtime_get_sync(&dev->dev);
0913
0914 config = of_device_get_match_data(&dev->dev);
0915 if (config) {
0916 pd->clks_per_count = config->clks_per_count;
0917 ret = config->setup(pd);
0918 } else {
0919 ret = sh_mobile_i2c_init(pd);
0920 }
0921
0922 pm_runtime_put_sync(&dev->dev);
0923 if (ret)
0924 return ret;
0925
0926
0927 sg_init_table(&pd->sg, 1);
0928 pd->dma_direction = DMA_NONE;
0929 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
0930
0931
0932 adap = &pd->adap;
0933 i2c_set_adapdata(adap, pd);
0934
0935 adap->owner = THIS_MODULE;
0936 adap->algo = &sh_mobile_i2c_algorithm;
0937 adap->quirks = &sh_mobile_i2c_quirks;
0938 adap->dev.parent = &dev->dev;
0939 adap->retries = 5;
0940 adap->nr = dev->id;
0941 adap->dev.of_node = dev->dev.of_node;
0942
0943 strscpy(adap->name, dev->name, sizeof(adap->name));
0944
0945 spin_lock_init(&pd->lock);
0946 init_waitqueue_head(&pd->wait);
0947
0948 ret = i2c_add_numbered_adapter(adap);
0949 if (ret < 0) {
0950 sh_mobile_i2c_release_dma(pd);
0951 return ret;
0952 }
0953
0954 dev_info(&dev->dev, "I2C adapter %d, bus speed %lu Hz\n", adap->nr, pd->bus_speed);
0955
0956 return 0;
0957 }
0958
0959 static int sh_mobile_i2c_remove(struct platform_device *dev)
0960 {
0961 struct sh_mobile_i2c_data *pd = platform_get_drvdata(dev);
0962
0963 i2c_del_adapter(&pd->adap);
0964 sh_mobile_i2c_release_dma(pd);
0965 pm_runtime_disable(&dev->dev);
0966 return 0;
0967 }
0968
0969 #ifdef CONFIG_PM_SLEEP
0970 static int sh_mobile_i2c_suspend(struct device *dev)
0971 {
0972 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev);
0973
0974 i2c_mark_adapter_suspended(&pd->adap);
0975 return 0;
0976 }
0977
0978 static int sh_mobile_i2c_resume(struct device *dev)
0979 {
0980 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev);
0981
0982 i2c_mark_adapter_resumed(&pd->adap);
0983 return 0;
0984 }
0985
0986 static const struct dev_pm_ops sh_mobile_i2c_pm_ops = {
0987 SET_NOIRQ_SYSTEM_SLEEP_PM_OPS(sh_mobile_i2c_suspend,
0988 sh_mobile_i2c_resume)
0989 };
0990
0991 #define DEV_PM_OPS (&sh_mobile_i2c_pm_ops)
0992 #else
0993 #define DEV_PM_OPS NULL
0994 #endif
0995
0996 static struct platform_driver sh_mobile_i2c_driver = {
0997 .driver = {
0998 .name = "i2c-sh_mobile",
0999 .of_match_table = sh_mobile_i2c_dt_ids,
1000 .pm = DEV_PM_OPS,
1001 },
1002 .probe = sh_mobile_i2c_probe,
1003 .remove = sh_mobile_i2c_remove,
1004 };
1005
1006 static int __init sh_mobile_i2c_adap_init(void)
1007 {
1008 return platform_driver_register(&sh_mobile_i2c_driver);
1009 }
1010 subsys_initcall(sh_mobile_i2c_adap_init);
1011
1012 static void __exit sh_mobile_i2c_adap_exit(void)
1013 {
1014 platform_driver_unregister(&sh_mobile_i2c_driver);
1015 }
1016 module_exit(sh_mobile_i2c_adap_exit);
1017
1018 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
1019 MODULE_AUTHOR("Magnus Damm");
1020 MODULE_AUTHOR("Wolfram Sang");
1021 MODULE_LICENSE("GPL v2");
1022 MODULE_ALIAS("platform:i2c-sh_mobile");