0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042 #include <linux/kernel.h>
0043 #include <linux/module.h>
0044 #include <linux/spinlock.h>
0045 #include <linux/interrupt.h>
0046 #include <linux/platform_device.h>
0047 #include <linux/dma-mapping.h>
0048 #include <linux/delay.h>
0049 #include <linux/io.h>
0050 #include <linux/slab.h>
0051 #include <linux/usb.h>
0052
0053 #include <linux/usb/hcd.h>
0054 #include <linux/usb/ch11.h>
0055 #include <linux/usb/of.h>
0056
0057 #include "core.h"
0058 #include "hcd.h"
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072 static void dwc2_enable_common_interrupts(struct dwc2_hsotg *hsotg)
0073 {
0074 u32 intmsk;
0075
0076
0077 dwc2_writel(hsotg, 0xffffffff, GOTGINT);
0078
0079
0080 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
0081
0082
0083 intmsk = GINTSTS_MODEMIS | GINTSTS_OTGINT;
0084
0085 if (!hsotg->params.host_dma)
0086 intmsk |= GINTSTS_RXFLVL;
0087 if (!hsotg->params.external_id_pin_ctl)
0088 intmsk |= GINTSTS_CONIDSTSCHNG;
0089
0090 intmsk |= GINTSTS_WKUPINT | GINTSTS_USBSUSP |
0091 GINTSTS_SESSREQINT;
0092
0093 if (dwc2_is_device_mode(hsotg) && hsotg->params.lpm)
0094 intmsk |= GINTSTS_LPMTRANRCVD;
0095
0096 dwc2_writel(hsotg, intmsk, GINTMSK);
0097 }
0098
0099 static int dwc2_gahbcfg_init(struct dwc2_hsotg *hsotg)
0100 {
0101 u32 ahbcfg = dwc2_readl(hsotg, GAHBCFG);
0102
0103 switch (hsotg->hw_params.arch) {
0104 case GHWCFG2_EXT_DMA_ARCH:
0105 dev_err(hsotg->dev, "External DMA Mode not supported\n");
0106 return -EINVAL;
0107
0108 case GHWCFG2_INT_DMA_ARCH:
0109 dev_dbg(hsotg->dev, "Internal DMA Mode\n");
0110 if (hsotg->params.ahbcfg != -1) {
0111 ahbcfg &= GAHBCFG_CTRL_MASK;
0112 ahbcfg |= hsotg->params.ahbcfg &
0113 ~GAHBCFG_CTRL_MASK;
0114 }
0115 break;
0116
0117 case GHWCFG2_SLAVE_ONLY_ARCH:
0118 default:
0119 dev_dbg(hsotg->dev, "Slave Only Mode\n");
0120 break;
0121 }
0122
0123 if (hsotg->params.host_dma)
0124 ahbcfg |= GAHBCFG_DMA_EN;
0125 else
0126 hsotg->params.dma_desc_enable = false;
0127
0128 dwc2_writel(hsotg, ahbcfg, GAHBCFG);
0129
0130 return 0;
0131 }
0132
0133 static void dwc2_gusbcfg_init(struct dwc2_hsotg *hsotg)
0134 {
0135 u32 usbcfg;
0136
0137 usbcfg = dwc2_readl(hsotg, GUSBCFG);
0138 usbcfg &= ~(GUSBCFG_HNPCAP | GUSBCFG_SRPCAP);
0139
0140 switch (hsotg->hw_params.op_mode) {
0141 case GHWCFG2_OP_MODE_HNP_SRP_CAPABLE:
0142 if (hsotg->params.otg_caps.hnp_support &&
0143 hsotg->params.otg_caps.srp_support)
0144 usbcfg |= GUSBCFG_HNPCAP;
0145 fallthrough;
0146
0147 case GHWCFG2_OP_MODE_SRP_ONLY_CAPABLE:
0148 case GHWCFG2_OP_MODE_SRP_CAPABLE_DEVICE:
0149 case GHWCFG2_OP_MODE_SRP_CAPABLE_HOST:
0150 if (hsotg->params.otg_caps.srp_support)
0151 usbcfg |= GUSBCFG_SRPCAP;
0152 break;
0153
0154 case GHWCFG2_OP_MODE_NO_HNP_SRP_CAPABLE:
0155 case GHWCFG2_OP_MODE_NO_SRP_CAPABLE_DEVICE:
0156 case GHWCFG2_OP_MODE_NO_SRP_CAPABLE_HOST:
0157 default:
0158 break;
0159 }
0160
0161 dwc2_writel(hsotg, usbcfg, GUSBCFG);
0162 }
0163
0164 static int dwc2_vbus_supply_init(struct dwc2_hsotg *hsotg)
0165 {
0166 if (hsotg->vbus_supply)
0167 return regulator_enable(hsotg->vbus_supply);
0168
0169 return 0;
0170 }
0171
0172 static int dwc2_vbus_supply_exit(struct dwc2_hsotg *hsotg)
0173 {
0174 if (hsotg->vbus_supply)
0175 return regulator_disable(hsotg->vbus_supply);
0176
0177 return 0;
0178 }
0179
0180
0181
0182
0183
0184
0185 static void dwc2_enable_host_interrupts(struct dwc2_hsotg *hsotg)
0186 {
0187 u32 intmsk;
0188
0189 dev_dbg(hsotg->dev, "%s()\n", __func__);
0190
0191
0192 dwc2_writel(hsotg, 0, GINTMSK);
0193 dwc2_writel(hsotg, 0, HAINTMSK);
0194
0195
0196 dwc2_enable_common_interrupts(hsotg);
0197
0198
0199 intmsk = dwc2_readl(hsotg, GINTMSK);
0200 intmsk |= GINTSTS_DISCONNINT | GINTSTS_PRTINT | GINTSTS_HCHINT;
0201 dwc2_writel(hsotg, intmsk, GINTMSK);
0202 }
0203
0204
0205
0206
0207
0208
0209 static void dwc2_disable_host_interrupts(struct dwc2_hsotg *hsotg)
0210 {
0211 u32 intmsk = dwc2_readl(hsotg, GINTMSK);
0212
0213
0214 intmsk &= ~(GINTSTS_SOF | GINTSTS_PRTINT | GINTSTS_HCHINT |
0215 GINTSTS_PTXFEMP | GINTSTS_NPTXFEMP | GINTSTS_DISCONNINT);
0216 dwc2_writel(hsotg, intmsk, GINTMSK);
0217 }
0218
0219
0220
0221
0222
0223
0224
0225
0226 static void dwc2_calculate_dynamic_fifo(struct dwc2_hsotg *hsotg)
0227 {
0228 struct dwc2_core_params *params = &hsotg->params;
0229 struct dwc2_hw_params *hw = &hsotg->hw_params;
0230 u32 rxfsiz, nptxfsiz, ptxfsiz, total_fifo_size;
0231
0232 total_fifo_size = hw->total_fifo_size;
0233 rxfsiz = params->host_rx_fifo_size;
0234 nptxfsiz = params->host_nperio_tx_fifo_size;
0235 ptxfsiz = params->host_perio_tx_fifo_size;
0236
0237
0238
0239
0240
0241
0242
0243 if (total_fifo_size < (rxfsiz + nptxfsiz + ptxfsiz)) {
0244
0245
0246
0247
0248
0249
0250 rxfsiz = 516 + hw->host_channels;
0251
0252
0253
0254
0255
0256
0257 nptxfsiz = 256;
0258
0259
0260
0261
0262
0263
0264 ptxfsiz = 768;
0265
0266 params->host_rx_fifo_size = rxfsiz;
0267 params->host_nperio_tx_fifo_size = nptxfsiz;
0268 params->host_perio_tx_fifo_size = ptxfsiz;
0269 }
0270
0271
0272
0273
0274
0275
0276
0277
0278
0279
0280
0281 if (unlikely(total_fifo_size < (rxfsiz + nptxfsiz + ptxfsiz)))
0282 dev_err(hsotg->dev, "invalid fifo sizes\n");
0283 }
0284
0285 static void dwc2_config_fifos(struct dwc2_hsotg *hsotg)
0286 {
0287 struct dwc2_core_params *params = &hsotg->params;
0288 u32 nptxfsiz, hptxfsiz, dfifocfg, grxfsiz;
0289
0290 if (!params->enable_dynamic_fifo)
0291 return;
0292
0293 dwc2_calculate_dynamic_fifo(hsotg);
0294
0295
0296 grxfsiz = dwc2_readl(hsotg, GRXFSIZ);
0297 dev_dbg(hsotg->dev, "initial grxfsiz=%08x\n", grxfsiz);
0298 grxfsiz &= ~GRXFSIZ_DEPTH_MASK;
0299 grxfsiz |= params->host_rx_fifo_size <<
0300 GRXFSIZ_DEPTH_SHIFT & GRXFSIZ_DEPTH_MASK;
0301 dwc2_writel(hsotg, grxfsiz, GRXFSIZ);
0302 dev_dbg(hsotg->dev, "new grxfsiz=%08x\n",
0303 dwc2_readl(hsotg, GRXFSIZ));
0304
0305
0306 dev_dbg(hsotg->dev, "initial gnptxfsiz=%08x\n",
0307 dwc2_readl(hsotg, GNPTXFSIZ));
0308 nptxfsiz = params->host_nperio_tx_fifo_size <<
0309 FIFOSIZE_DEPTH_SHIFT & FIFOSIZE_DEPTH_MASK;
0310 nptxfsiz |= params->host_rx_fifo_size <<
0311 FIFOSIZE_STARTADDR_SHIFT & FIFOSIZE_STARTADDR_MASK;
0312 dwc2_writel(hsotg, nptxfsiz, GNPTXFSIZ);
0313 dev_dbg(hsotg->dev, "new gnptxfsiz=%08x\n",
0314 dwc2_readl(hsotg, GNPTXFSIZ));
0315
0316
0317 dev_dbg(hsotg->dev, "initial hptxfsiz=%08x\n",
0318 dwc2_readl(hsotg, HPTXFSIZ));
0319 hptxfsiz = params->host_perio_tx_fifo_size <<
0320 FIFOSIZE_DEPTH_SHIFT & FIFOSIZE_DEPTH_MASK;
0321 hptxfsiz |= (params->host_rx_fifo_size +
0322 params->host_nperio_tx_fifo_size) <<
0323 FIFOSIZE_STARTADDR_SHIFT & FIFOSIZE_STARTADDR_MASK;
0324 dwc2_writel(hsotg, hptxfsiz, HPTXFSIZ);
0325 dev_dbg(hsotg->dev, "new hptxfsiz=%08x\n",
0326 dwc2_readl(hsotg, HPTXFSIZ));
0327
0328 if (hsotg->params.en_multiple_tx_fifo &&
0329 hsotg->hw_params.snpsid >= DWC2_CORE_REV_2_91a) {
0330
0331
0332
0333
0334
0335 dfifocfg = dwc2_readl(hsotg, GDFIFOCFG);
0336 dfifocfg &= ~GDFIFOCFG_EPINFOBASE_MASK;
0337 dfifocfg |= (params->host_rx_fifo_size +
0338 params->host_nperio_tx_fifo_size +
0339 params->host_perio_tx_fifo_size) <<
0340 GDFIFOCFG_EPINFOBASE_SHIFT &
0341 GDFIFOCFG_EPINFOBASE_MASK;
0342 dwc2_writel(hsotg, dfifocfg, GDFIFOCFG);
0343 }
0344 }
0345
0346
0347
0348
0349
0350
0351
0352
0353
0354
0355
0356 u32 dwc2_calc_frame_interval(struct dwc2_hsotg *hsotg)
0357 {
0358 u32 usbcfg;
0359 u32 hprt0;
0360 int clock = 60;
0361
0362 usbcfg = dwc2_readl(hsotg, GUSBCFG);
0363 hprt0 = dwc2_readl(hsotg, HPRT0);
0364
0365 if (!(usbcfg & GUSBCFG_PHYSEL) && (usbcfg & GUSBCFG_ULPI_UTMI_SEL) &&
0366 !(usbcfg & GUSBCFG_PHYIF16))
0367 clock = 60;
0368 if ((usbcfg & GUSBCFG_PHYSEL) && hsotg->hw_params.fs_phy_type ==
0369 GHWCFG2_FS_PHY_TYPE_SHARED_ULPI)
0370 clock = 48;
0371 if (!(usbcfg & GUSBCFG_PHY_LP_CLK_SEL) && !(usbcfg & GUSBCFG_PHYSEL) &&
0372 !(usbcfg & GUSBCFG_ULPI_UTMI_SEL) && (usbcfg & GUSBCFG_PHYIF16))
0373 clock = 30;
0374 if (!(usbcfg & GUSBCFG_PHY_LP_CLK_SEL) && !(usbcfg & GUSBCFG_PHYSEL) &&
0375 !(usbcfg & GUSBCFG_ULPI_UTMI_SEL) && !(usbcfg & GUSBCFG_PHYIF16))
0376 clock = 60;
0377 if ((usbcfg & GUSBCFG_PHY_LP_CLK_SEL) && !(usbcfg & GUSBCFG_PHYSEL) &&
0378 !(usbcfg & GUSBCFG_ULPI_UTMI_SEL) && (usbcfg & GUSBCFG_PHYIF16))
0379 clock = 48;
0380 if ((usbcfg & GUSBCFG_PHYSEL) && !(usbcfg & GUSBCFG_PHYIF16) &&
0381 hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_SHARED_UTMI)
0382 clock = 48;
0383 if ((usbcfg & GUSBCFG_PHYSEL) &&
0384 hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_DEDICATED)
0385 clock = 48;
0386
0387 if ((hprt0 & HPRT0_SPD_MASK) >> HPRT0_SPD_SHIFT == HPRT0_SPD_HIGH_SPEED)
0388
0389 return 125 * clock - 1;
0390
0391
0392 return 1000 * clock - 1;
0393 }
0394
0395
0396
0397
0398
0399
0400
0401
0402
0403 void dwc2_read_packet(struct dwc2_hsotg *hsotg, u8 *dest, u16 bytes)
0404 {
0405 u32 *data_buf = (u32 *)dest;
0406 int word_count = (bytes + 3) / 4;
0407 int i;
0408
0409
0410
0411
0412
0413
0414
0415 dev_vdbg(hsotg->dev, "%s(%p,%p,%d)\n", __func__, hsotg, dest, bytes);
0416
0417 for (i = 0; i < word_count; i++, data_buf++)
0418 *data_buf = dwc2_readl(hsotg, HCFIFO(0));
0419 }
0420
0421
0422
0423
0424
0425
0426
0427
0428
0429
0430
0431
0432 static void dwc2_dump_channel_info(struct dwc2_hsotg *hsotg,
0433 struct dwc2_host_chan *chan)
0434 {
0435 #ifdef VERBOSE_DEBUG
0436 int num_channels = hsotg->params.host_channels;
0437 struct dwc2_qh *qh;
0438 u32 hcchar;
0439 u32 hcsplt;
0440 u32 hctsiz;
0441 u32 hc_dma;
0442 int i;
0443
0444 if (!chan)
0445 return;
0446
0447 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
0448 hcsplt = dwc2_readl(hsotg, HCSPLT(chan->hc_num));
0449 hctsiz = dwc2_readl(hsotg, HCTSIZ(chan->hc_num));
0450 hc_dma = dwc2_readl(hsotg, HCDMA(chan->hc_num));
0451
0452 dev_dbg(hsotg->dev, " Assigned to channel %p:\n", chan);
0453 dev_dbg(hsotg->dev, " hcchar 0x%08x, hcsplt 0x%08x\n",
0454 hcchar, hcsplt);
0455 dev_dbg(hsotg->dev, " hctsiz 0x%08x, hc_dma 0x%08x\n",
0456 hctsiz, hc_dma);
0457 dev_dbg(hsotg->dev, " dev_addr: %d, ep_num: %d, ep_is_in: %d\n",
0458 chan->dev_addr, chan->ep_num, chan->ep_is_in);
0459 dev_dbg(hsotg->dev, " ep_type: %d\n", chan->ep_type);
0460 dev_dbg(hsotg->dev, " max_packet: %d\n", chan->max_packet);
0461 dev_dbg(hsotg->dev, " data_pid_start: %d\n", chan->data_pid_start);
0462 dev_dbg(hsotg->dev, " xfer_started: %d\n", chan->xfer_started);
0463 dev_dbg(hsotg->dev, " halt_status: %d\n", chan->halt_status);
0464 dev_dbg(hsotg->dev, " xfer_buf: %p\n", chan->xfer_buf);
0465 dev_dbg(hsotg->dev, " xfer_dma: %08lx\n",
0466 (unsigned long)chan->xfer_dma);
0467 dev_dbg(hsotg->dev, " xfer_len: %d\n", chan->xfer_len);
0468 dev_dbg(hsotg->dev, " qh: %p\n", chan->qh);
0469 dev_dbg(hsotg->dev, " NP inactive sched:\n");
0470 list_for_each_entry(qh, &hsotg->non_periodic_sched_inactive,
0471 qh_list_entry)
0472 dev_dbg(hsotg->dev, " %p\n", qh);
0473 dev_dbg(hsotg->dev, " NP waiting sched:\n");
0474 list_for_each_entry(qh, &hsotg->non_periodic_sched_waiting,
0475 qh_list_entry)
0476 dev_dbg(hsotg->dev, " %p\n", qh);
0477 dev_dbg(hsotg->dev, " NP active sched:\n");
0478 list_for_each_entry(qh, &hsotg->non_periodic_sched_active,
0479 qh_list_entry)
0480 dev_dbg(hsotg->dev, " %p\n", qh);
0481 dev_dbg(hsotg->dev, " Channels:\n");
0482 for (i = 0; i < num_channels; i++) {
0483 struct dwc2_host_chan *chan = hsotg->hc_ptr_array[i];
0484
0485 dev_dbg(hsotg->dev, " %2d: %p\n", i, chan);
0486 }
0487 #endif
0488 }
0489
0490 static int _dwc2_hcd_start(struct usb_hcd *hcd);
0491
0492 static void dwc2_host_start(struct dwc2_hsotg *hsotg)
0493 {
0494 struct usb_hcd *hcd = dwc2_hsotg_to_hcd(hsotg);
0495
0496 hcd->self.is_b_host = dwc2_hcd_is_b_host(hsotg);
0497 _dwc2_hcd_start(hcd);
0498 }
0499
0500 static void dwc2_host_disconnect(struct dwc2_hsotg *hsotg)
0501 {
0502 struct usb_hcd *hcd = dwc2_hsotg_to_hcd(hsotg);
0503
0504 hcd->self.is_b_host = 0;
0505 }
0506
0507 static void dwc2_host_hub_info(struct dwc2_hsotg *hsotg, void *context,
0508 int *hub_addr, int *hub_port)
0509 {
0510 struct urb *urb = context;
0511
0512 if (urb->dev->tt)
0513 *hub_addr = urb->dev->tt->hub->devnum;
0514 else
0515 *hub_addr = 0;
0516 *hub_port = urb->dev->ttport;
0517 }
0518
0519
0520
0521
0522
0523
0524
0525 static void dwc2_hc_enable_slave_ints(struct dwc2_hsotg *hsotg,
0526 struct dwc2_host_chan *chan)
0527 {
0528 u32 hcintmsk = HCINTMSK_CHHLTD;
0529
0530 switch (chan->ep_type) {
0531 case USB_ENDPOINT_XFER_CONTROL:
0532 case USB_ENDPOINT_XFER_BULK:
0533 dev_vdbg(hsotg->dev, "control/bulk\n");
0534 hcintmsk |= HCINTMSK_XFERCOMPL;
0535 hcintmsk |= HCINTMSK_STALL;
0536 hcintmsk |= HCINTMSK_XACTERR;
0537 hcintmsk |= HCINTMSK_DATATGLERR;
0538 if (chan->ep_is_in) {
0539 hcintmsk |= HCINTMSK_BBLERR;
0540 } else {
0541 hcintmsk |= HCINTMSK_NAK;
0542 hcintmsk |= HCINTMSK_NYET;
0543 if (chan->do_ping)
0544 hcintmsk |= HCINTMSK_ACK;
0545 }
0546
0547 if (chan->do_split) {
0548 hcintmsk |= HCINTMSK_NAK;
0549 if (chan->complete_split)
0550 hcintmsk |= HCINTMSK_NYET;
0551 else
0552 hcintmsk |= HCINTMSK_ACK;
0553 }
0554
0555 if (chan->error_state)
0556 hcintmsk |= HCINTMSK_ACK;
0557 break;
0558
0559 case USB_ENDPOINT_XFER_INT:
0560 if (dbg_perio())
0561 dev_vdbg(hsotg->dev, "intr\n");
0562 hcintmsk |= HCINTMSK_XFERCOMPL;
0563 hcintmsk |= HCINTMSK_NAK;
0564 hcintmsk |= HCINTMSK_STALL;
0565 hcintmsk |= HCINTMSK_XACTERR;
0566 hcintmsk |= HCINTMSK_DATATGLERR;
0567 hcintmsk |= HCINTMSK_FRMOVRUN;
0568
0569 if (chan->ep_is_in)
0570 hcintmsk |= HCINTMSK_BBLERR;
0571 if (chan->error_state)
0572 hcintmsk |= HCINTMSK_ACK;
0573 if (chan->do_split) {
0574 if (chan->complete_split)
0575 hcintmsk |= HCINTMSK_NYET;
0576 else
0577 hcintmsk |= HCINTMSK_ACK;
0578 }
0579 break;
0580
0581 case USB_ENDPOINT_XFER_ISOC:
0582 if (dbg_perio())
0583 dev_vdbg(hsotg->dev, "isoc\n");
0584 hcintmsk |= HCINTMSK_XFERCOMPL;
0585 hcintmsk |= HCINTMSK_FRMOVRUN;
0586 hcintmsk |= HCINTMSK_ACK;
0587
0588 if (chan->ep_is_in) {
0589 hcintmsk |= HCINTMSK_XACTERR;
0590 hcintmsk |= HCINTMSK_BBLERR;
0591 }
0592 break;
0593 default:
0594 dev_err(hsotg->dev, "## Unknown EP type ##\n");
0595 break;
0596 }
0597
0598 dwc2_writel(hsotg, hcintmsk, HCINTMSK(chan->hc_num));
0599 if (dbg_hc(chan))
0600 dev_vdbg(hsotg->dev, "set HCINTMSK to %08x\n", hcintmsk);
0601 }
0602
0603 static void dwc2_hc_enable_dma_ints(struct dwc2_hsotg *hsotg,
0604 struct dwc2_host_chan *chan)
0605 {
0606 u32 hcintmsk = HCINTMSK_CHHLTD;
0607
0608
0609
0610
0611
0612 if (!hsotg->params.dma_desc_enable) {
0613 if (dbg_hc(chan))
0614 dev_vdbg(hsotg->dev, "desc DMA disabled\n");
0615 hcintmsk |= HCINTMSK_AHBERR;
0616 } else {
0617 if (dbg_hc(chan))
0618 dev_vdbg(hsotg->dev, "desc DMA enabled\n");
0619 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC)
0620 hcintmsk |= HCINTMSK_XFERCOMPL;
0621 }
0622
0623 if (chan->error_state && !chan->do_split &&
0624 chan->ep_type != USB_ENDPOINT_XFER_ISOC) {
0625 if (dbg_hc(chan))
0626 dev_vdbg(hsotg->dev, "setting ACK\n");
0627 hcintmsk |= HCINTMSK_ACK;
0628 if (chan->ep_is_in) {
0629 hcintmsk |= HCINTMSK_DATATGLERR;
0630 if (chan->ep_type != USB_ENDPOINT_XFER_INT)
0631 hcintmsk |= HCINTMSK_NAK;
0632 }
0633 }
0634
0635 dwc2_writel(hsotg, hcintmsk, HCINTMSK(chan->hc_num));
0636 if (dbg_hc(chan))
0637 dev_vdbg(hsotg->dev, "set HCINTMSK to %08x\n", hcintmsk);
0638 }
0639
0640 static void dwc2_hc_enable_ints(struct dwc2_hsotg *hsotg,
0641 struct dwc2_host_chan *chan)
0642 {
0643 u32 intmsk;
0644
0645 if (hsotg->params.host_dma) {
0646 if (dbg_hc(chan))
0647 dev_vdbg(hsotg->dev, "DMA enabled\n");
0648 dwc2_hc_enable_dma_ints(hsotg, chan);
0649 } else {
0650 if (dbg_hc(chan))
0651 dev_vdbg(hsotg->dev, "DMA disabled\n");
0652 dwc2_hc_enable_slave_ints(hsotg, chan);
0653 }
0654
0655
0656 intmsk = dwc2_readl(hsotg, HAINTMSK);
0657 intmsk |= 1 << chan->hc_num;
0658 dwc2_writel(hsotg, intmsk, HAINTMSK);
0659 if (dbg_hc(chan))
0660 dev_vdbg(hsotg->dev, "set HAINTMSK to %08x\n", intmsk);
0661
0662
0663 intmsk = dwc2_readl(hsotg, GINTMSK);
0664 intmsk |= GINTSTS_HCHINT;
0665 dwc2_writel(hsotg, intmsk, GINTMSK);
0666 if (dbg_hc(chan))
0667 dev_vdbg(hsotg->dev, "set GINTMSK to %08x\n", intmsk);
0668 }
0669
0670
0671
0672
0673
0674
0675
0676
0677
0678
0679
0680
0681 static void dwc2_hc_init(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan)
0682 {
0683 u8 hc_num = chan->hc_num;
0684 u32 hcintmsk;
0685 u32 hcchar;
0686 u32 hcsplt = 0;
0687
0688 if (dbg_hc(chan))
0689 dev_vdbg(hsotg->dev, "%s()\n", __func__);
0690
0691
0692 hcintmsk = 0xffffffff;
0693 hcintmsk &= ~HCINTMSK_RESERVED14_31;
0694 dwc2_writel(hsotg, hcintmsk, HCINT(hc_num));
0695
0696
0697 dwc2_hc_enable_ints(hsotg, chan);
0698
0699
0700
0701
0702
0703 hcchar = chan->dev_addr << HCCHAR_DEVADDR_SHIFT & HCCHAR_DEVADDR_MASK;
0704 hcchar |= chan->ep_num << HCCHAR_EPNUM_SHIFT & HCCHAR_EPNUM_MASK;
0705 if (chan->ep_is_in)
0706 hcchar |= HCCHAR_EPDIR;
0707 if (chan->speed == USB_SPEED_LOW)
0708 hcchar |= HCCHAR_LSPDDEV;
0709 hcchar |= chan->ep_type << HCCHAR_EPTYPE_SHIFT & HCCHAR_EPTYPE_MASK;
0710 hcchar |= chan->max_packet << HCCHAR_MPS_SHIFT & HCCHAR_MPS_MASK;
0711 dwc2_writel(hsotg, hcchar, HCCHAR(hc_num));
0712 if (dbg_hc(chan)) {
0713 dev_vdbg(hsotg->dev, "set HCCHAR(%d) to %08x\n",
0714 hc_num, hcchar);
0715
0716 dev_vdbg(hsotg->dev, "%s: Channel %d\n",
0717 __func__, hc_num);
0718 dev_vdbg(hsotg->dev, " Dev Addr: %d\n",
0719 chan->dev_addr);
0720 dev_vdbg(hsotg->dev, " Ep Num: %d\n",
0721 chan->ep_num);
0722 dev_vdbg(hsotg->dev, " Is In: %d\n",
0723 chan->ep_is_in);
0724 dev_vdbg(hsotg->dev, " Is Low Speed: %d\n",
0725 chan->speed == USB_SPEED_LOW);
0726 dev_vdbg(hsotg->dev, " Ep Type: %d\n",
0727 chan->ep_type);
0728 dev_vdbg(hsotg->dev, " Max Pkt: %d\n",
0729 chan->max_packet);
0730 }
0731
0732
0733 if (chan->do_split) {
0734 if (dbg_hc(chan))
0735 dev_vdbg(hsotg->dev,
0736 "Programming HC %d with split --> %s\n",
0737 hc_num,
0738 chan->complete_split ? "CSPLIT" : "SSPLIT");
0739 if (chan->complete_split)
0740 hcsplt |= HCSPLT_COMPSPLT;
0741 hcsplt |= chan->xact_pos << HCSPLT_XACTPOS_SHIFT &
0742 HCSPLT_XACTPOS_MASK;
0743 hcsplt |= chan->hub_addr << HCSPLT_HUBADDR_SHIFT &
0744 HCSPLT_HUBADDR_MASK;
0745 hcsplt |= chan->hub_port << HCSPLT_PRTADDR_SHIFT &
0746 HCSPLT_PRTADDR_MASK;
0747 if (dbg_hc(chan)) {
0748 dev_vdbg(hsotg->dev, " comp split %d\n",
0749 chan->complete_split);
0750 dev_vdbg(hsotg->dev, " xact pos %d\n",
0751 chan->xact_pos);
0752 dev_vdbg(hsotg->dev, " hub addr %d\n",
0753 chan->hub_addr);
0754 dev_vdbg(hsotg->dev, " hub port %d\n",
0755 chan->hub_port);
0756 dev_vdbg(hsotg->dev, " is_in %d\n",
0757 chan->ep_is_in);
0758 dev_vdbg(hsotg->dev, " Max Pkt %d\n",
0759 chan->max_packet);
0760 dev_vdbg(hsotg->dev, " xferlen %d\n",
0761 chan->xfer_len);
0762 }
0763 }
0764
0765 dwc2_writel(hsotg, hcsplt, HCSPLT(hc_num));
0766 }
0767
0768
0769
0770
0771
0772
0773
0774
0775
0776
0777
0778
0779
0780
0781
0782
0783
0784
0785
0786
0787
0788
0789
0790
0791
0792
0793
0794
0795
0796 void dwc2_hc_halt(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan,
0797 enum dwc2_halt_status halt_status)
0798 {
0799 u32 nptxsts, hptxsts, hcchar;
0800
0801 if (dbg_hc(chan))
0802 dev_vdbg(hsotg->dev, "%s()\n", __func__);
0803
0804
0805
0806
0807
0808
0809
0810 if ((hsotg->params.g_dma && !hsotg->params.g_dma_desc) ||
0811 hsotg->hw_params.arch == GHWCFG2_EXT_DMA_ARCH) {
0812 if (!chan->do_split &&
0813 (chan->ep_type == USB_ENDPOINT_XFER_ISOC ||
0814 chan->ep_type == USB_ENDPOINT_XFER_INT)) {
0815 dev_err(hsotg->dev, "%s() Channel can't be halted\n",
0816 __func__);
0817 return;
0818 }
0819 }
0820
0821 if (halt_status == DWC2_HC_XFER_NO_HALT_STATUS)
0822 dev_err(hsotg->dev, "!!! halt_status = %d !!!\n", halt_status);
0823
0824 if (halt_status == DWC2_HC_XFER_URB_DEQUEUE ||
0825 halt_status == DWC2_HC_XFER_AHB_ERR) {
0826
0827
0828
0829
0830
0831
0832 u32 hcintmsk = HCINTMSK_CHHLTD;
0833
0834 dev_vdbg(hsotg->dev, "dequeue/error\n");
0835 dwc2_writel(hsotg, hcintmsk, HCINTMSK(chan->hc_num));
0836
0837
0838
0839
0840
0841
0842 dwc2_writel(hsotg, ~hcintmsk, HCINT(chan->hc_num));
0843
0844
0845
0846
0847
0848
0849 chan->halt_status = halt_status;
0850
0851 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
0852 if (!(hcchar & HCCHAR_CHENA)) {
0853
0854
0855
0856
0857
0858
0859
0860
0861
0862
0863
0864 return;
0865 }
0866 }
0867 if (chan->halt_pending) {
0868
0869
0870
0871
0872
0873 dev_vdbg(hsotg->dev,
0874 "*** %s: Channel %d, chan->halt_pending already set ***\n",
0875 __func__, chan->hc_num);
0876 return;
0877 }
0878
0879 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
0880
0881
0882
0883 if (!hsotg->params.dma_desc_enable) {
0884 if (dbg_hc(chan))
0885 dev_vdbg(hsotg->dev, "desc DMA disabled\n");
0886 hcchar |= HCCHAR_CHENA;
0887 } else {
0888 if (dbg_hc(chan))
0889 dev_dbg(hsotg->dev, "desc DMA enabled\n");
0890 }
0891 hcchar |= HCCHAR_CHDIS;
0892
0893 if (!hsotg->params.host_dma) {
0894 if (dbg_hc(chan))
0895 dev_vdbg(hsotg->dev, "DMA not enabled\n");
0896 hcchar |= HCCHAR_CHENA;
0897
0898
0899 if (chan->ep_type == USB_ENDPOINT_XFER_CONTROL ||
0900 chan->ep_type == USB_ENDPOINT_XFER_BULK) {
0901 dev_vdbg(hsotg->dev, "control/bulk\n");
0902 nptxsts = dwc2_readl(hsotg, GNPTXSTS);
0903 if ((nptxsts & TXSTS_QSPCAVAIL_MASK) == 0) {
0904 dev_vdbg(hsotg->dev, "Disabling channel\n");
0905 hcchar &= ~HCCHAR_CHENA;
0906 }
0907 } else {
0908 if (dbg_perio())
0909 dev_vdbg(hsotg->dev, "isoc/intr\n");
0910 hptxsts = dwc2_readl(hsotg, HPTXSTS);
0911 if ((hptxsts & TXSTS_QSPCAVAIL_MASK) == 0 ||
0912 hsotg->queuing_high_bandwidth) {
0913 if (dbg_perio())
0914 dev_vdbg(hsotg->dev, "Disabling channel\n");
0915 hcchar &= ~HCCHAR_CHENA;
0916 }
0917 }
0918 } else {
0919 if (dbg_hc(chan))
0920 dev_vdbg(hsotg->dev, "DMA enabled\n");
0921 }
0922
0923 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
0924 chan->halt_status = halt_status;
0925
0926 if (hcchar & HCCHAR_CHENA) {
0927 if (dbg_hc(chan))
0928 dev_vdbg(hsotg->dev, "Channel enabled\n");
0929 chan->halt_pending = 1;
0930 chan->halt_on_queue = 0;
0931 } else {
0932 if (dbg_hc(chan))
0933 dev_vdbg(hsotg->dev, "Channel disabled\n");
0934 chan->halt_on_queue = 1;
0935 }
0936
0937 if (dbg_hc(chan)) {
0938 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
0939 chan->hc_num);
0940 dev_vdbg(hsotg->dev, " hcchar: 0x%08x\n",
0941 hcchar);
0942 dev_vdbg(hsotg->dev, " halt_pending: %d\n",
0943 chan->halt_pending);
0944 dev_vdbg(hsotg->dev, " halt_on_queue: %d\n",
0945 chan->halt_on_queue);
0946 dev_vdbg(hsotg->dev, " halt_status: %d\n",
0947 chan->halt_status);
0948 }
0949 }
0950
0951
0952
0953
0954
0955
0956
0957
0958
0959
0960 void dwc2_hc_cleanup(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan)
0961 {
0962 u32 hcintmsk;
0963
0964 chan->xfer_started = 0;
0965
0966 list_del_init(&chan->split_order_list_entry);
0967
0968
0969
0970
0971
0972 dwc2_writel(hsotg, 0, HCINTMSK(chan->hc_num));
0973 hcintmsk = 0xffffffff;
0974 hcintmsk &= ~HCINTMSK_RESERVED14_31;
0975 dwc2_writel(hsotg, hcintmsk, HCINT(chan->hc_num));
0976 }
0977
0978
0979
0980
0981
0982
0983
0984
0985
0986
0987
0988 static void dwc2_hc_set_even_odd_frame(struct dwc2_hsotg *hsotg,
0989 struct dwc2_host_chan *chan, u32 *hcchar)
0990 {
0991 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
0992 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
0993 int host_speed;
0994 int xfer_ns;
0995 int xfer_us;
0996 int bytes_in_fifo;
0997 u16 fifo_space;
0998 u16 frame_number;
0999 u16 wire_frame;
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025 host_speed = (chan->speed != USB_SPEED_HIGH &&
1026 !chan->do_split) ? chan->speed : USB_SPEED_HIGH;
1027
1028
1029 fifo_space = (dwc2_readl(hsotg, HPTXSTS) &
1030 TXSTS_FSPCAVAIL_MASK) >> TXSTS_FSPCAVAIL_SHIFT;
1031 bytes_in_fifo = sizeof(u32) *
1032 (hsotg->params.host_perio_tx_fifo_size -
1033 fifo_space);
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043 xfer_ns = usb_calc_bus_time(host_speed, false, false,
1044 chan->xfer_len + bytes_in_fifo);
1045 xfer_us = NS_TO_US(xfer_ns);
1046
1047
1048 frame_number = dwc2_hcd_get_future_frame_number(hsotg, xfer_us);
1049
1050
1051 wire_frame = dwc2_frame_num_inc(chan->qh->next_active_frame, 1);
1052
1053
1054
1055
1056
1057
1058
1059
1060 if (dwc2_frame_num_gt(frame_number, wire_frame)) {
1061 dwc2_sch_vdbg(hsotg,
1062 "QH=%p EO MISS fr=%04x=>%04x (%+d)\n",
1063 chan->qh, wire_frame, frame_number,
1064 dwc2_frame_num_dec(frame_number,
1065 wire_frame));
1066 wire_frame = frame_number;
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076 chan->qh->next_active_frame =
1077 dwc2_frame_num_dec(frame_number, 1);
1078 }
1079
1080 if (wire_frame & 1)
1081 *hcchar |= HCCHAR_ODDFRM;
1082 else
1083 *hcchar &= ~HCCHAR_ODDFRM;
1084 }
1085 }
1086
1087 static void dwc2_set_pid_isoc(struct dwc2_host_chan *chan)
1088 {
1089
1090 if (chan->speed == USB_SPEED_HIGH) {
1091 if (chan->ep_is_in) {
1092 if (chan->multi_count == 1)
1093 chan->data_pid_start = DWC2_HC_PID_DATA0;
1094 else if (chan->multi_count == 2)
1095 chan->data_pid_start = DWC2_HC_PID_DATA1;
1096 else
1097 chan->data_pid_start = DWC2_HC_PID_DATA2;
1098 } else {
1099 if (chan->multi_count == 1)
1100 chan->data_pid_start = DWC2_HC_PID_DATA0;
1101 else
1102 chan->data_pid_start = DWC2_HC_PID_MDATA;
1103 }
1104 } else {
1105 chan->data_pid_start = DWC2_HC_PID_DATA0;
1106 }
1107 }
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123 static void dwc2_hc_write_packet(struct dwc2_hsotg *hsotg,
1124 struct dwc2_host_chan *chan)
1125 {
1126 u32 i;
1127 u32 remaining_count;
1128 u32 byte_count;
1129 u32 dword_count;
1130 u32 *data_buf = (u32 *)chan->xfer_buf;
1131
1132 if (dbg_hc(chan))
1133 dev_vdbg(hsotg->dev, "%s()\n", __func__);
1134
1135 remaining_count = chan->xfer_len - chan->xfer_count;
1136 if (remaining_count > chan->max_packet)
1137 byte_count = chan->max_packet;
1138 else
1139 byte_count = remaining_count;
1140
1141 dword_count = (byte_count + 3) / 4;
1142
1143 if (((unsigned long)data_buf & 0x3) == 0) {
1144
1145 for (i = 0; i < dword_count; i++, data_buf++)
1146 dwc2_writel(hsotg, *data_buf, HCFIFO(chan->hc_num));
1147 } else {
1148
1149 for (i = 0; i < dword_count; i++, data_buf++) {
1150 u32 data = data_buf[0] | data_buf[1] << 8 |
1151 data_buf[2] << 16 | data_buf[3] << 24;
1152 dwc2_writel(hsotg, data, HCFIFO(chan->hc_num));
1153 }
1154 }
1155
1156 chan->xfer_count += byte_count;
1157 chan->xfer_buf += byte_count;
1158 }
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169 static void dwc2_hc_do_ping(struct dwc2_hsotg *hsotg,
1170 struct dwc2_host_chan *chan)
1171 {
1172 u32 hcchar;
1173 u32 hctsiz;
1174
1175 if (dbg_hc(chan))
1176 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1177 chan->hc_num);
1178
1179 hctsiz = TSIZ_DOPNG;
1180 hctsiz |= 1 << TSIZ_PKTCNT_SHIFT;
1181 dwc2_writel(hsotg, hctsiz, HCTSIZ(chan->hc_num));
1182
1183 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1184 hcchar |= HCCHAR_CHENA;
1185 hcchar &= ~HCCHAR_CHDIS;
1186 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1187 }
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222 static void dwc2_hc_start_transfer(struct dwc2_hsotg *hsotg,
1223 struct dwc2_host_chan *chan)
1224 {
1225 u32 max_hc_xfer_size = hsotg->params.max_transfer_size;
1226 u16 max_hc_pkt_count = hsotg->params.max_packet_count;
1227 u32 hcchar;
1228 u32 hctsiz = 0;
1229 u16 num_packets;
1230 u32 ec_mc;
1231
1232 if (dbg_hc(chan))
1233 dev_vdbg(hsotg->dev, "%s()\n", __func__);
1234
1235 if (chan->do_ping) {
1236 if (!hsotg->params.host_dma) {
1237 if (dbg_hc(chan))
1238 dev_vdbg(hsotg->dev, "ping, no DMA\n");
1239 dwc2_hc_do_ping(hsotg, chan);
1240 chan->xfer_started = 1;
1241 return;
1242 }
1243
1244 if (dbg_hc(chan))
1245 dev_vdbg(hsotg->dev, "ping, DMA\n");
1246
1247 hctsiz |= TSIZ_DOPNG;
1248 }
1249
1250 if (chan->do_split) {
1251 if (dbg_hc(chan))
1252 dev_vdbg(hsotg->dev, "split\n");
1253 num_packets = 1;
1254
1255 if (chan->complete_split && !chan->ep_is_in)
1256
1257
1258
1259
1260 chan->xfer_len = 0;
1261 else if (chan->ep_is_in || chan->xfer_len > chan->max_packet)
1262 chan->xfer_len = chan->max_packet;
1263 else if (!chan->ep_is_in && chan->xfer_len > 188)
1264 chan->xfer_len = 188;
1265
1266 hctsiz |= chan->xfer_len << TSIZ_XFERSIZE_SHIFT &
1267 TSIZ_XFERSIZE_MASK;
1268
1269
1270 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1271 chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1272 ec_mc = 3;
1273 else
1274 ec_mc = 1;
1275 } else {
1276 if (dbg_hc(chan))
1277 dev_vdbg(hsotg->dev, "no split\n");
1278
1279
1280
1281
1282 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1283 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
1284
1285
1286
1287
1288
1289
1290
1291 u32 max_periodic_len =
1292 chan->multi_count * chan->max_packet;
1293
1294 if (chan->xfer_len > max_periodic_len)
1295 chan->xfer_len = max_periodic_len;
1296 } else if (chan->xfer_len > max_hc_xfer_size) {
1297
1298
1299
1300
1301 chan->xfer_len =
1302 max_hc_xfer_size - chan->max_packet + 1;
1303 }
1304
1305 if (chan->xfer_len > 0) {
1306 num_packets = (chan->xfer_len + chan->max_packet - 1) /
1307 chan->max_packet;
1308 if (num_packets > max_hc_pkt_count) {
1309 num_packets = max_hc_pkt_count;
1310 chan->xfer_len = num_packets * chan->max_packet;
1311 } else if (chan->ep_is_in) {
1312
1313
1314
1315
1316
1317
1318 chan->xfer_len = num_packets * chan->max_packet;
1319 }
1320 } else {
1321
1322 num_packets = 1;
1323 }
1324
1325 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1326 chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1327
1328
1329
1330
1331 chan->multi_count = num_packets;
1332
1333 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1334 dwc2_set_pid_isoc(chan);
1335
1336 hctsiz |= chan->xfer_len << TSIZ_XFERSIZE_SHIFT &
1337 TSIZ_XFERSIZE_MASK;
1338
1339
1340 ec_mc = chan->multi_count;
1341 }
1342
1343 chan->start_pkt_count = num_packets;
1344 hctsiz |= num_packets << TSIZ_PKTCNT_SHIFT & TSIZ_PKTCNT_MASK;
1345 hctsiz |= chan->data_pid_start << TSIZ_SC_MC_PID_SHIFT &
1346 TSIZ_SC_MC_PID_MASK;
1347 dwc2_writel(hsotg, hctsiz, HCTSIZ(chan->hc_num));
1348 if (dbg_hc(chan)) {
1349 dev_vdbg(hsotg->dev, "Wrote %08x to HCTSIZ(%d)\n",
1350 hctsiz, chan->hc_num);
1351
1352 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1353 chan->hc_num);
1354 dev_vdbg(hsotg->dev, " Xfer Size: %d\n",
1355 (hctsiz & TSIZ_XFERSIZE_MASK) >>
1356 TSIZ_XFERSIZE_SHIFT);
1357 dev_vdbg(hsotg->dev, " Num Pkts: %d\n",
1358 (hctsiz & TSIZ_PKTCNT_MASK) >>
1359 TSIZ_PKTCNT_SHIFT);
1360 dev_vdbg(hsotg->dev, " Start PID: %d\n",
1361 (hctsiz & TSIZ_SC_MC_PID_MASK) >>
1362 TSIZ_SC_MC_PID_SHIFT);
1363 }
1364
1365 if (hsotg->params.host_dma) {
1366 dma_addr_t dma_addr;
1367
1368 if (chan->align_buf) {
1369 if (dbg_hc(chan))
1370 dev_vdbg(hsotg->dev, "align_buf\n");
1371 dma_addr = chan->align_buf;
1372 } else {
1373 dma_addr = chan->xfer_dma;
1374 }
1375 dwc2_writel(hsotg, (u32)dma_addr, HCDMA(chan->hc_num));
1376
1377 if (dbg_hc(chan))
1378 dev_vdbg(hsotg->dev, "Wrote %08lx to HCDMA(%d)\n",
1379 (unsigned long)dma_addr, chan->hc_num);
1380 }
1381
1382
1383 if (chan->do_split) {
1384 u32 hcsplt = dwc2_readl(hsotg, HCSPLT(chan->hc_num));
1385
1386 hcsplt |= HCSPLT_SPLTENA;
1387 dwc2_writel(hsotg, hcsplt, HCSPLT(chan->hc_num));
1388 }
1389
1390 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1391 hcchar &= ~HCCHAR_MULTICNT_MASK;
1392 hcchar |= (ec_mc << HCCHAR_MULTICNT_SHIFT) & HCCHAR_MULTICNT_MASK;
1393 dwc2_hc_set_even_odd_frame(hsotg, chan, &hcchar);
1394
1395 if (hcchar & HCCHAR_CHDIS)
1396 dev_warn(hsotg->dev,
1397 "%s: chdis set, channel %d, hcchar 0x%08x\n",
1398 __func__, chan->hc_num, hcchar);
1399
1400
1401 hcchar |= HCCHAR_CHENA;
1402 hcchar &= ~HCCHAR_CHDIS;
1403
1404 if (dbg_hc(chan))
1405 dev_vdbg(hsotg->dev, " Multi Cnt: %d\n",
1406 (hcchar & HCCHAR_MULTICNT_MASK) >>
1407 HCCHAR_MULTICNT_SHIFT);
1408
1409 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1410 if (dbg_hc(chan))
1411 dev_vdbg(hsotg->dev, "Wrote %08x to HCCHAR(%d)\n", hcchar,
1412 chan->hc_num);
1413
1414 chan->xfer_started = 1;
1415 chan->requests++;
1416
1417 if (!hsotg->params.host_dma &&
1418 !chan->ep_is_in && chan->xfer_len > 0)
1419
1420 dwc2_hc_write_packet(hsotg, chan);
1421 }
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437 void dwc2_hc_start_transfer_ddma(struct dwc2_hsotg *hsotg,
1438 struct dwc2_host_chan *chan)
1439 {
1440 u32 hcchar;
1441 u32 hctsiz = 0;
1442
1443 if (chan->do_ping)
1444 hctsiz |= TSIZ_DOPNG;
1445
1446 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1447 dwc2_set_pid_isoc(chan);
1448
1449
1450 hctsiz |= chan->data_pid_start << TSIZ_SC_MC_PID_SHIFT &
1451 TSIZ_SC_MC_PID_MASK;
1452
1453
1454 hctsiz |= (chan->ntd - 1) << TSIZ_NTD_SHIFT & TSIZ_NTD_MASK;
1455
1456
1457 hctsiz |= chan->schinfo << TSIZ_SCHINFO_SHIFT & TSIZ_SCHINFO_MASK;
1458
1459 if (dbg_hc(chan)) {
1460 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1461 chan->hc_num);
1462 dev_vdbg(hsotg->dev, " Start PID: %d\n",
1463 chan->data_pid_start);
1464 dev_vdbg(hsotg->dev, " NTD: %d\n", chan->ntd - 1);
1465 }
1466
1467 dwc2_writel(hsotg, hctsiz, HCTSIZ(chan->hc_num));
1468
1469 dma_sync_single_for_device(hsotg->dev, chan->desc_list_addr,
1470 chan->desc_list_sz, DMA_TO_DEVICE);
1471
1472 dwc2_writel(hsotg, chan->desc_list_addr, HCDMA(chan->hc_num));
1473
1474 if (dbg_hc(chan))
1475 dev_vdbg(hsotg->dev, "Wrote %pad to HCDMA(%d)\n",
1476 &chan->desc_list_addr, chan->hc_num);
1477
1478 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1479 hcchar &= ~HCCHAR_MULTICNT_MASK;
1480 hcchar |= chan->multi_count << HCCHAR_MULTICNT_SHIFT &
1481 HCCHAR_MULTICNT_MASK;
1482
1483 if (hcchar & HCCHAR_CHDIS)
1484 dev_warn(hsotg->dev,
1485 "%s: chdis set, channel %d, hcchar 0x%08x\n",
1486 __func__, chan->hc_num, hcchar);
1487
1488
1489 hcchar |= HCCHAR_CHENA;
1490 hcchar &= ~HCCHAR_CHDIS;
1491
1492 if (dbg_hc(chan))
1493 dev_vdbg(hsotg->dev, " Multi Cnt: %d\n",
1494 (hcchar & HCCHAR_MULTICNT_MASK) >>
1495 HCCHAR_MULTICNT_SHIFT);
1496
1497 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1498 if (dbg_hc(chan))
1499 dev_vdbg(hsotg->dev, "Wrote %08x to HCCHAR(%d)\n", hcchar,
1500 chan->hc_num);
1501
1502 chan->xfer_started = 1;
1503 chan->requests++;
1504 }
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526 static int dwc2_hc_continue_transfer(struct dwc2_hsotg *hsotg,
1527 struct dwc2_host_chan *chan)
1528 {
1529 if (dbg_hc(chan))
1530 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1531 chan->hc_num);
1532
1533 if (chan->do_split)
1534
1535 return 0;
1536
1537 if (chan->data_pid_start == DWC2_HC_PID_SETUP)
1538
1539 return 0;
1540
1541 if (chan->ep_is_in) {
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554 u32 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1555
1556 dwc2_hc_set_even_odd_frame(hsotg, chan, &hcchar);
1557 hcchar |= HCCHAR_CHENA;
1558 hcchar &= ~HCCHAR_CHDIS;
1559 if (dbg_hc(chan))
1560 dev_vdbg(hsotg->dev, " IN xfer: hcchar = 0x%08x\n",
1561 hcchar);
1562 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1563 chan->requests++;
1564 return 1;
1565 }
1566
1567
1568
1569 if (chan->xfer_count < chan->xfer_len) {
1570 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1571 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
1572 u32 hcchar = dwc2_readl(hsotg,
1573 HCCHAR(chan->hc_num));
1574
1575 dwc2_hc_set_even_odd_frame(hsotg, chan,
1576 &hcchar);
1577 }
1578
1579
1580 dwc2_hc_write_packet(hsotg, chan);
1581 chan->requests++;
1582 return 1;
1583 }
1584
1585 return 0;
1586 }
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600 static void dwc2_kill_urbs_in_qh_list(struct dwc2_hsotg *hsotg,
1601 struct list_head *qh_list)
1602 {
1603 struct dwc2_qh *qh, *qh_tmp;
1604 struct dwc2_qtd *qtd, *qtd_tmp;
1605
1606 list_for_each_entry_safe(qh, qh_tmp, qh_list, qh_list_entry) {
1607 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list,
1608 qtd_list_entry) {
1609 dwc2_host_complete(hsotg, qtd, -ECONNRESET);
1610 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
1611 }
1612 }
1613 }
1614
1615 static void dwc2_qh_list_free(struct dwc2_hsotg *hsotg,
1616 struct list_head *qh_list)
1617 {
1618 struct dwc2_qtd *qtd, *qtd_tmp;
1619 struct dwc2_qh *qh, *qh_tmp;
1620 unsigned long flags;
1621
1622 if (!qh_list->next)
1623
1624 return;
1625
1626 spin_lock_irqsave(&hsotg->lock, flags);
1627
1628
1629 dwc2_kill_urbs_in_qh_list(hsotg, qh_list);
1630
1631 list_for_each_entry_safe(qh, qh_tmp, qh_list, qh_list_entry) {
1632 dwc2_hcd_qh_unlink(hsotg, qh);
1633
1634
1635 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list,
1636 qtd_list_entry)
1637 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
1638
1639 if (qh->channel && qh->channel->qh == qh)
1640 qh->channel->qh = NULL;
1641
1642 spin_unlock_irqrestore(&hsotg->lock, flags);
1643 dwc2_hcd_qh_free(hsotg, qh);
1644 spin_lock_irqsave(&hsotg->lock, flags);
1645 }
1646
1647 spin_unlock_irqrestore(&hsotg->lock, flags);
1648 }
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658 static void dwc2_kill_all_urbs(struct dwc2_hsotg *hsotg)
1659 {
1660 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->non_periodic_sched_inactive);
1661 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->non_periodic_sched_waiting);
1662 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->non_periodic_sched_active);
1663 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_inactive);
1664 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_ready);
1665 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_assigned);
1666 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_queued);
1667 }
1668
1669
1670
1671
1672
1673
1674 void dwc2_hcd_start(struct dwc2_hsotg *hsotg)
1675 {
1676 u32 hprt0;
1677
1678 if (hsotg->op_state == OTG_STATE_B_HOST) {
1679
1680
1681
1682
1683
1684 hprt0 = dwc2_read_hprt0(hsotg);
1685 hprt0 |= HPRT0_RST;
1686 dwc2_writel(hsotg, hprt0, HPRT0);
1687 }
1688
1689 queue_delayed_work(hsotg->wq_otg, &hsotg->start_work,
1690 msecs_to_jiffies(50));
1691 }
1692
1693
1694 static void dwc2_hcd_cleanup_channels(struct dwc2_hsotg *hsotg)
1695 {
1696 int num_channels = hsotg->params.host_channels;
1697 struct dwc2_host_chan *channel;
1698 u32 hcchar;
1699 int i;
1700
1701 if (!hsotg->params.host_dma) {
1702
1703 for (i = 0; i < num_channels; i++) {
1704 channel = hsotg->hc_ptr_array[i];
1705 if (!list_empty(&channel->hc_list_entry))
1706 continue;
1707 hcchar = dwc2_readl(hsotg, HCCHAR(i));
1708 if (hcchar & HCCHAR_CHENA) {
1709 hcchar &= ~(HCCHAR_CHENA | HCCHAR_EPDIR);
1710 hcchar |= HCCHAR_CHDIS;
1711 dwc2_writel(hsotg, hcchar, HCCHAR(i));
1712 }
1713 }
1714 }
1715
1716 for (i = 0; i < num_channels; i++) {
1717 channel = hsotg->hc_ptr_array[i];
1718 if (!list_empty(&channel->hc_list_entry))
1719 continue;
1720 hcchar = dwc2_readl(hsotg, HCCHAR(i));
1721 if (hcchar & HCCHAR_CHENA) {
1722
1723 hcchar |= HCCHAR_CHDIS;
1724 dwc2_writel(hsotg, hcchar, HCCHAR(i));
1725 }
1726
1727 dwc2_hc_cleanup(hsotg, channel);
1728 list_add_tail(&channel->hc_list_entry, &hsotg->free_hc_list);
1729
1730
1731
1732
1733
1734 channel->qh = NULL;
1735 }
1736
1737 if (hsotg->params.uframe_sched) {
1738 hsotg->available_host_channels =
1739 hsotg->params.host_channels;
1740 } else {
1741 hsotg->non_periodic_channels = 0;
1742 hsotg->periodic_channels = 0;
1743 }
1744 }
1745
1746
1747
1748
1749
1750
1751
1752
1753 void dwc2_hcd_connect(struct dwc2_hsotg *hsotg)
1754 {
1755 if (hsotg->lx_state != DWC2_L0)
1756 usb_hcd_resume_root_hub(hsotg->priv);
1757
1758 hsotg->flags.b.port_connect_status_change = 1;
1759 hsotg->flags.b.port_connect_status = 1;
1760 }
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770 void dwc2_hcd_disconnect(struct dwc2_hsotg *hsotg, bool force)
1771 {
1772 u32 intr;
1773 u32 hprt0;
1774
1775
1776 hsotg->flags.b.port_connect_status_change = 1;
1777 hsotg->flags.b.port_connect_status = 0;
1778
1779
1780
1781
1782
1783
1784 intr = dwc2_readl(hsotg, GINTMSK);
1785 intr &= ~(GINTSTS_NPTXFEMP | GINTSTS_PTXFEMP | GINTSTS_HCHINT);
1786 dwc2_writel(hsotg, intr, GINTMSK);
1787 intr = GINTSTS_NPTXFEMP | GINTSTS_PTXFEMP | GINTSTS_HCHINT;
1788 dwc2_writel(hsotg, intr, GINTSTS);
1789
1790
1791
1792
1793
1794
1795 if (dwc2_is_device_mode(hsotg)) {
1796 if (hsotg->op_state != OTG_STATE_A_SUSPEND) {
1797 dev_dbg(hsotg->dev, "Disconnect: PortPower off\n");
1798 dwc2_writel(hsotg, 0, HPRT0);
1799 }
1800
1801 dwc2_disable_host_interrupts(hsotg);
1802 }
1803
1804
1805 dwc2_kill_all_urbs(hsotg);
1806
1807 if (dwc2_is_host_mode(hsotg))
1808
1809 dwc2_hcd_cleanup_channels(hsotg);
1810
1811 dwc2_host_disconnect(hsotg);
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825 if (!force) {
1826 hprt0 = dwc2_readl(hsotg, HPRT0);
1827 if (!(hprt0 & HPRT0_CONNDET) && (hprt0 & HPRT0_CONNSTS))
1828 dwc2_hcd_connect(hsotg);
1829 }
1830 }
1831
1832
1833
1834
1835
1836
1837 static void dwc2_hcd_rem_wakeup(struct dwc2_hsotg *hsotg)
1838 {
1839 if (hsotg->bus_suspended) {
1840 hsotg->flags.b.port_suspend_change = 1;
1841 usb_hcd_resume_root_hub(hsotg->priv);
1842 }
1843
1844 if (hsotg->lx_state == DWC2_L1)
1845 hsotg->flags.b.port_l1_change = 1;
1846 }
1847
1848
1849
1850
1851
1852
1853
1854
1855 void dwc2_hcd_stop(struct dwc2_hsotg *hsotg)
1856 {
1857 dev_dbg(hsotg->dev, "DWC OTG HCD STOP\n");
1858
1859
1860
1861
1862
1863
1864
1865
1866 dwc2_disable_host_interrupts(hsotg);
1867
1868
1869 dev_dbg(hsotg->dev, "PortPower off\n");
1870 dwc2_writel(hsotg, 0, HPRT0);
1871 }
1872
1873
1874 static int dwc2_hcd_urb_enqueue(struct dwc2_hsotg *hsotg,
1875 struct dwc2_hcd_urb *urb, struct dwc2_qh *qh,
1876 struct dwc2_qtd *qtd)
1877 {
1878 u32 intr_mask;
1879 int retval;
1880 int dev_speed;
1881
1882 if (!hsotg->flags.b.port_connect_status) {
1883
1884 dev_err(hsotg->dev, "Not connected\n");
1885 return -ENODEV;
1886 }
1887
1888 dev_speed = dwc2_host_get_speed(hsotg, urb->priv);
1889
1890
1891 if ((dev_speed == USB_SPEED_LOW) &&
1892 (hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_DEDICATED) &&
1893 (hsotg->hw_params.hs_phy_type == GHWCFG2_HS_PHY_TYPE_UTMI)) {
1894 u32 hprt0 = dwc2_readl(hsotg, HPRT0);
1895 u32 prtspd = (hprt0 & HPRT0_SPD_MASK) >> HPRT0_SPD_SHIFT;
1896
1897 if (prtspd == HPRT0_SPD_FULL_SPEED)
1898 return -ENODEV;
1899 }
1900
1901 if (!qtd)
1902 return -EINVAL;
1903
1904 dwc2_hcd_qtd_init(qtd, urb);
1905 retval = dwc2_hcd_qtd_add(hsotg, qtd, qh);
1906 if (retval) {
1907 dev_err(hsotg->dev,
1908 "DWC OTG HCD URB Enqueue failed adding QTD. Error status %d\n",
1909 retval);
1910 return retval;
1911 }
1912
1913 intr_mask = dwc2_readl(hsotg, GINTMSK);
1914 if (!(intr_mask & GINTSTS_SOF)) {
1915 enum dwc2_transaction_type tr_type;
1916
1917 if (qtd->qh->ep_type == USB_ENDPOINT_XFER_BULK &&
1918 !(qtd->urb->flags & URB_GIVEBACK_ASAP))
1919
1920
1921
1922
1923 return 0;
1924
1925 tr_type = dwc2_hcd_select_transactions(hsotg);
1926 if (tr_type != DWC2_TRANSACTION_NONE)
1927 dwc2_hcd_queue_transactions(hsotg, tr_type);
1928 }
1929
1930 return 0;
1931 }
1932
1933
1934 static int dwc2_hcd_urb_dequeue(struct dwc2_hsotg *hsotg,
1935 struct dwc2_hcd_urb *urb)
1936 {
1937 struct dwc2_qh *qh;
1938 struct dwc2_qtd *urb_qtd;
1939
1940 urb_qtd = urb->qtd;
1941 if (!urb_qtd) {
1942 dev_dbg(hsotg->dev, "## Urb QTD is NULL ##\n");
1943 return -EINVAL;
1944 }
1945
1946 qh = urb_qtd->qh;
1947 if (!qh) {
1948 dev_dbg(hsotg->dev, "## Urb QTD QH is NULL ##\n");
1949 return -EINVAL;
1950 }
1951
1952 urb->priv = NULL;
1953
1954 if (urb_qtd->in_process && qh->channel) {
1955 dwc2_dump_channel_info(hsotg, qh->channel);
1956
1957
1958 if (hsotg->flags.b.port_connect_status)
1959
1960
1961
1962
1963
1964
1965
1966 dwc2_hc_halt(hsotg, qh->channel,
1967 DWC2_HC_XFER_URB_DEQUEUE);
1968 }
1969
1970
1971
1972
1973
1974 if (!hsotg->params.dma_desc_enable) {
1975 u8 in_process = urb_qtd->in_process;
1976
1977 dwc2_hcd_qtd_unlink_and_free(hsotg, urb_qtd, qh);
1978 if (in_process) {
1979 dwc2_hcd_qh_deactivate(hsotg, qh, 0);
1980 qh->channel = NULL;
1981 } else if (list_empty(&qh->qtd_list)) {
1982 dwc2_hcd_qh_unlink(hsotg, qh);
1983 }
1984 } else {
1985 dwc2_hcd_qtd_unlink_and_free(hsotg, urb_qtd, qh);
1986 }
1987
1988 return 0;
1989 }
1990
1991
1992 static int dwc2_hcd_endpoint_disable(struct dwc2_hsotg *hsotg,
1993 struct usb_host_endpoint *ep, int retry)
1994 {
1995 struct dwc2_qtd *qtd, *qtd_tmp;
1996 struct dwc2_qh *qh;
1997 unsigned long flags;
1998 int rc;
1999
2000 spin_lock_irqsave(&hsotg->lock, flags);
2001
2002 qh = ep->hcpriv;
2003 if (!qh) {
2004 rc = -EINVAL;
2005 goto err;
2006 }
2007
2008 while (!list_empty(&qh->qtd_list) && retry--) {
2009 if (retry == 0) {
2010 dev_err(hsotg->dev,
2011 "## timeout in dwc2_hcd_endpoint_disable() ##\n");
2012 rc = -EBUSY;
2013 goto err;
2014 }
2015
2016 spin_unlock_irqrestore(&hsotg->lock, flags);
2017 msleep(20);
2018 spin_lock_irqsave(&hsotg->lock, flags);
2019 qh = ep->hcpriv;
2020 if (!qh) {
2021 rc = -EINVAL;
2022 goto err;
2023 }
2024 }
2025
2026 dwc2_hcd_qh_unlink(hsotg, qh);
2027
2028
2029 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list, qtd_list_entry)
2030 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
2031
2032 ep->hcpriv = NULL;
2033
2034 if (qh->channel && qh->channel->qh == qh)
2035 qh->channel->qh = NULL;
2036
2037 spin_unlock_irqrestore(&hsotg->lock, flags);
2038
2039 dwc2_hcd_qh_free(hsotg, qh);
2040
2041 return 0;
2042
2043 err:
2044 ep->hcpriv = NULL;
2045 spin_unlock_irqrestore(&hsotg->lock, flags);
2046
2047 return rc;
2048 }
2049
2050
2051 static int dwc2_hcd_endpoint_reset(struct dwc2_hsotg *hsotg,
2052 struct usb_host_endpoint *ep)
2053 {
2054 struct dwc2_qh *qh = ep->hcpriv;
2055
2056 if (!qh)
2057 return -EINVAL;
2058
2059 qh->data_toggle = DWC2_HC_PID_DATA0;
2060
2061 return 0;
2062 }
2063
2064
2065
2066
2067
2068
2069
2070
2071 int dwc2_core_init(struct dwc2_hsotg *hsotg, bool initial_setup)
2072 {
2073 u32 usbcfg, otgctl;
2074 int retval;
2075
2076 dev_dbg(hsotg->dev, "%s(%p)\n", __func__, hsotg);
2077
2078 usbcfg = dwc2_readl(hsotg, GUSBCFG);
2079
2080
2081 usbcfg &= ~GUSBCFG_ULPI_EXT_VBUS_DRV;
2082 if (hsotg->params.phy_ulpi_ext_vbus)
2083 usbcfg |= GUSBCFG_ULPI_EXT_VBUS_DRV;
2084
2085
2086 usbcfg &= ~GUSBCFG_TERMSELDLPULSE;
2087 if (hsotg->params.ts_dline)
2088 usbcfg |= GUSBCFG_TERMSELDLPULSE;
2089
2090 dwc2_writel(hsotg, usbcfg, GUSBCFG);
2091
2092
2093
2094
2095
2096
2097
2098
2099 if (!initial_setup) {
2100 retval = dwc2_core_reset(hsotg, false);
2101 if (retval) {
2102 dev_err(hsotg->dev, "%s(): Reset failed, aborting\n",
2103 __func__);
2104 return retval;
2105 }
2106 }
2107
2108
2109
2110
2111 retval = dwc2_phy_init(hsotg, initial_setup);
2112 if (retval)
2113 return retval;
2114
2115
2116 retval = dwc2_gahbcfg_init(hsotg);
2117 if (retval)
2118 return retval;
2119
2120
2121 dwc2_gusbcfg_init(hsotg);
2122
2123
2124 otgctl = dwc2_readl(hsotg, GOTGCTL);
2125 otgctl &= ~GOTGCTL_OTGVER;
2126 dwc2_writel(hsotg, otgctl, GOTGCTL);
2127
2128
2129 hsotg->srp_success = 0;
2130
2131
2132 dwc2_enable_common_interrupts(hsotg);
2133
2134
2135
2136
2137
2138 if (dwc2_is_host_mode(hsotg)) {
2139 dev_dbg(hsotg->dev, "Host Mode\n");
2140 hsotg->op_state = OTG_STATE_A_HOST;
2141 } else {
2142 dev_dbg(hsotg->dev, "Device Mode\n");
2143 hsotg->op_state = OTG_STATE_B_PERIPHERAL;
2144 }
2145
2146 return 0;
2147 }
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159 static void dwc2_core_host_init(struct dwc2_hsotg *hsotg)
2160 {
2161 u32 hcfg, hfir, otgctl, usbcfg;
2162
2163 dev_dbg(hsotg->dev, "%s(%p)\n", __func__, hsotg);
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173 usbcfg = dwc2_readl(hsotg, GUSBCFG);
2174 usbcfg |= GUSBCFG_TOUTCAL(7);
2175 dwc2_writel(hsotg, usbcfg, GUSBCFG);
2176
2177
2178 dwc2_writel(hsotg, 0, PCGCTL);
2179
2180
2181 dwc2_init_fs_ls_pclk_sel(hsotg);
2182 if (hsotg->params.speed == DWC2_SPEED_PARAM_FULL ||
2183 hsotg->params.speed == DWC2_SPEED_PARAM_LOW) {
2184 hcfg = dwc2_readl(hsotg, HCFG);
2185 hcfg |= HCFG_FSLSSUPP;
2186 dwc2_writel(hsotg, hcfg, HCFG);
2187 }
2188
2189
2190
2191
2192
2193
2194 if (hsotg->params.reload_ctl) {
2195 hfir = dwc2_readl(hsotg, HFIR);
2196 hfir |= HFIR_RLDCTRL;
2197 dwc2_writel(hsotg, hfir, HFIR);
2198 }
2199
2200 if (hsotg->params.dma_desc_enable) {
2201 u32 op_mode = hsotg->hw_params.op_mode;
2202
2203 if (hsotg->hw_params.snpsid < DWC2_CORE_REV_2_90a ||
2204 !hsotg->hw_params.dma_desc_enable ||
2205 op_mode == GHWCFG2_OP_MODE_SRP_CAPABLE_DEVICE ||
2206 op_mode == GHWCFG2_OP_MODE_NO_SRP_CAPABLE_DEVICE ||
2207 op_mode == GHWCFG2_OP_MODE_UNDEFINED) {
2208 dev_err(hsotg->dev,
2209 "Hardware does not support descriptor DMA mode -\n");
2210 dev_err(hsotg->dev,
2211 "falling back to buffer DMA mode.\n");
2212 hsotg->params.dma_desc_enable = false;
2213 } else {
2214 hcfg = dwc2_readl(hsotg, HCFG);
2215 hcfg |= HCFG_DESCDMA;
2216 dwc2_writel(hsotg, hcfg, HCFG);
2217 }
2218 }
2219
2220
2221 dwc2_config_fifos(hsotg);
2222
2223
2224
2225 otgctl = dwc2_readl(hsotg, GOTGCTL);
2226 otgctl &= ~GOTGCTL_HSTSETHNPEN;
2227 dwc2_writel(hsotg, otgctl, GOTGCTL);
2228
2229
2230 dwc2_flush_tx_fifo(hsotg, 0x10 );
2231 dwc2_flush_rx_fifo(hsotg);
2232
2233
2234 otgctl = dwc2_readl(hsotg, GOTGCTL);
2235 otgctl &= ~GOTGCTL_HSTSETHNPEN;
2236 dwc2_writel(hsotg, otgctl, GOTGCTL);
2237
2238 if (!hsotg->params.dma_desc_enable) {
2239 int num_channels, i;
2240 u32 hcchar;
2241
2242
2243 num_channels = hsotg->params.host_channels;
2244 for (i = 0; i < num_channels; i++) {
2245 hcchar = dwc2_readl(hsotg, HCCHAR(i));
2246 if (hcchar & HCCHAR_CHENA) {
2247 hcchar &= ~HCCHAR_CHENA;
2248 hcchar |= HCCHAR_CHDIS;
2249 hcchar &= ~HCCHAR_EPDIR;
2250 dwc2_writel(hsotg, hcchar, HCCHAR(i));
2251 }
2252 }
2253
2254
2255 for (i = 0; i < num_channels; i++) {
2256 hcchar = dwc2_readl(hsotg, HCCHAR(i));
2257 if (hcchar & HCCHAR_CHENA) {
2258 hcchar |= HCCHAR_CHENA | HCCHAR_CHDIS;
2259 hcchar &= ~HCCHAR_EPDIR;
2260 dwc2_writel(hsotg, hcchar, HCCHAR(i));
2261 dev_dbg(hsotg->dev, "%s: Halt channel %d\n",
2262 __func__, i);
2263
2264 if (dwc2_hsotg_wait_bit_clear(hsotg, HCCHAR(i),
2265 HCCHAR_CHENA,
2266 1000)) {
2267 dev_warn(hsotg->dev,
2268 "Unable to clear enable on channel %d\n",
2269 i);
2270 }
2271 }
2272 }
2273 }
2274
2275
2276 dwc2_enable_acg(hsotg);
2277
2278
2279 dev_dbg(hsotg->dev, "Init: Port Power? op_state=%d\n", hsotg->op_state);
2280 if (hsotg->op_state == OTG_STATE_A_HOST) {
2281 u32 hprt0 = dwc2_read_hprt0(hsotg);
2282
2283 dev_dbg(hsotg->dev, "Init: Power Port (%d)\n",
2284 !!(hprt0 & HPRT0_PWR));
2285 if (!(hprt0 & HPRT0_PWR)) {
2286 hprt0 |= HPRT0_PWR;
2287 dwc2_writel(hsotg, hprt0, HPRT0);
2288 }
2289 }
2290
2291 dwc2_enable_host_interrupts(hsotg);
2292 }
2293
2294
2295
2296
2297
2298
2299 static void dwc2_hcd_reinit(struct dwc2_hsotg *hsotg)
2300 {
2301 struct dwc2_host_chan *chan, *chan_tmp;
2302 int num_channels;
2303 int i;
2304
2305 hsotg->flags.d32 = 0;
2306 hsotg->non_periodic_qh_ptr = &hsotg->non_periodic_sched_active;
2307
2308 if (hsotg->params.uframe_sched) {
2309 hsotg->available_host_channels =
2310 hsotg->params.host_channels;
2311 } else {
2312 hsotg->non_periodic_channels = 0;
2313 hsotg->periodic_channels = 0;
2314 }
2315
2316
2317
2318
2319
2320 list_for_each_entry_safe(chan, chan_tmp, &hsotg->free_hc_list,
2321 hc_list_entry)
2322 list_del_init(&chan->hc_list_entry);
2323
2324 num_channels = hsotg->params.host_channels;
2325 for (i = 0; i < num_channels; i++) {
2326 chan = hsotg->hc_ptr_array[i];
2327 list_add_tail(&chan->hc_list_entry, &hsotg->free_hc_list);
2328 dwc2_hc_cleanup(hsotg, chan);
2329 }
2330
2331
2332 dwc2_core_host_init(hsotg);
2333 }
2334
2335 static void dwc2_hc_init_split(struct dwc2_hsotg *hsotg,
2336 struct dwc2_host_chan *chan,
2337 struct dwc2_qtd *qtd, struct dwc2_hcd_urb *urb)
2338 {
2339 int hub_addr, hub_port;
2340
2341 chan->do_split = 1;
2342 chan->xact_pos = qtd->isoc_split_pos;
2343 chan->complete_split = qtd->complete_split;
2344 dwc2_host_hub_info(hsotg, urb->priv, &hub_addr, &hub_port);
2345 chan->hub_addr = (u8)hub_addr;
2346 chan->hub_port = (u8)hub_port;
2347 }
2348
2349 static void dwc2_hc_init_xfer(struct dwc2_hsotg *hsotg,
2350 struct dwc2_host_chan *chan,
2351 struct dwc2_qtd *qtd)
2352 {
2353 struct dwc2_hcd_urb *urb = qtd->urb;
2354 struct dwc2_hcd_iso_packet_desc *frame_desc;
2355
2356 switch (dwc2_hcd_get_pipe_type(&urb->pipe_info)) {
2357 case USB_ENDPOINT_XFER_CONTROL:
2358 chan->ep_type = USB_ENDPOINT_XFER_CONTROL;
2359
2360 switch (qtd->control_phase) {
2361 case DWC2_CONTROL_SETUP:
2362 dev_vdbg(hsotg->dev, " Control setup transaction\n");
2363 chan->do_ping = 0;
2364 chan->ep_is_in = 0;
2365 chan->data_pid_start = DWC2_HC_PID_SETUP;
2366 if (hsotg->params.host_dma)
2367 chan->xfer_dma = urb->setup_dma;
2368 else
2369 chan->xfer_buf = urb->setup_packet;
2370 chan->xfer_len = 8;
2371 break;
2372
2373 case DWC2_CONTROL_DATA:
2374 dev_vdbg(hsotg->dev, " Control data transaction\n");
2375 chan->data_pid_start = qtd->data_toggle;
2376 break;
2377
2378 case DWC2_CONTROL_STATUS:
2379
2380
2381
2382
2383 dev_vdbg(hsotg->dev, " Control status transaction\n");
2384 if (urb->length == 0)
2385 chan->ep_is_in = 1;
2386 else
2387 chan->ep_is_in =
2388 dwc2_hcd_is_pipe_out(&urb->pipe_info);
2389 if (chan->ep_is_in)
2390 chan->do_ping = 0;
2391 chan->data_pid_start = DWC2_HC_PID_DATA1;
2392 chan->xfer_len = 0;
2393 if (hsotg->params.host_dma)
2394 chan->xfer_dma = hsotg->status_buf_dma;
2395 else
2396 chan->xfer_buf = hsotg->status_buf;
2397 break;
2398 }
2399 break;
2400
2401 case USB_ENDPOINT_XFER_BULK:
2402 chan->ep_type = USB_ENDPOINT_XFER_BULK;
2403 break;
2404
2405 case USB_ENDPOINT_XFER_INT:
2406 chan->ep_type = USB_ENDPOINT_XFER_INT;
2407 break;
2408
2409 case USB_ENDPOINT_XFER_ISOC:
2410 chan->ep_type = USB_ENDPOINT_XFER_ISOC;
2411 if (hsotg->params.dma_desc_enable)
2412 break;
2413
2414 frame_desc = &urb->iso_descs[qtd->isoc_frame_index];
2415 frame_desc->status = 0;
2416
2417 if (hsotg->params.host_dma) {
2418 chan->xfer_dma = urb->dma;
2419 chan->xfer_dma += frame_desc->offset +
2420 qtd->isoc_split_offset;
2421 } else {
2422 chan->xfer_buf = urb->buf;
2423 chan->xfer_buf += frame_desc->offset +
2424 qtd->isoc_split_offset;
2425 }
2426
2427 chan->xfer_len = frame_desc->length - qtd->isoc_split_offset;
2428
2429 if (chan->xact_pos == DWC2_HCSPLT_XACTPOS_ALL) {
2430 if (chan->xfer_len <= 188)
2431 chan->xact_pos = DWC2_HCSPLT_XACTPOS_ALL;
2432 else
2433 chan->xact_pos = DWC2_HCSPLT_XACTPOS_BEGIN;
2434 }
2435 break;
2436 }
2437 }
2438
2439 static int dwc2_alloc_split_dma_aligned_buf(struct dwc2_hsotg *hsotg,
2440 struct dwc2_qh *qh,
2441 struct dwc2_host_chan *chan)
2442 {
2443 if (!hsotg->unaligned_cache ||
2444 chan->max_packet > DWC2_KMEM_UNALIGNED_BUF_SIZE)
2445 return -ENOMEM;
2446
2447 if (!qh->dw_align_buf) {
2448 qh->dw_align_buf = kmem_cache_alloc(hsotg->unaligned_cache,
2449 GFP_ATOMIC | GFP_DMA);
2450 if (!qh->dw_align_buf)
2451 return -ENOMEM;
2452 }
2453
2454 qh->dw_align_buf_dma = dma_map_single(hsotg->dev, qh->dw_align_buf,
2455 DWC2_KMEM_UNALIGNED_BUF_SIZE,
2456 DMA_FROM_DEVICE);
2457
2458 if (dma_mapping_error(hsotg->dev, qh->dw_align_buf_dma)) {
2459 dev_err(hsotg->dev, "can't map align_buf\n");
2460 chan->align_buf = 0;
2461 return -EINVAL;
2462 }
2463
2464 chan->align_buf = qh->dw_align_buf_dma;
2465 return 0;
2466 }
2467
2468 #define DWC2_USB_DMA_ALIGN 4
2469
2470 static void dwc2_free_dma_aligned_buffer(struct urb *urb)
2471 {
2472 void *stored_xfer_buffer;
2473 size_t length;
2474
2475 if (!(urb->transfer_flags & URB_ALIGNED_TEMP_BUFFER))
2476 return;
2477
2478
2479 memcpy(&stored_xfer_buffer,
2480 PTR_ALIGN(urb->transfer_buffer + urb->transfer_buffer_length,
2481 dma_get_cache_alignment()),
2482 sizeof(urb->transfer_buffer));
2483
2484 if (usb_urb_dir_in(urb)) {
2485 if (usb_pipeisoc(urb->pipe))
2486 length = urb->transfer_buffer_length;
2487 else
2488 length = urb->actual_length;
2489
2490 memcpy(stored_xfer_buffer, urb->transfer_buffer, length);
2491 }
2492 kfree(urb->transfer_buffer);
2493 urb->transfer_buffer = stored_xfer_buffer;
2494
2495 urb->transfer_flags &= ~URB_ALIGNED_TEMP_BUFFER;
2496 }
2497
2498 static int dwc2_alloc_dma_aligned_buffer(struct urb *urb, gfp_t mem_flags)
2499 {
2500 void *kmalloc_ptr;
2501 size_t kmalloc_size;
2502
2503 if (urb->num_sgs || urb->sg ||
2504 urb->transfer_buffer_length == 0 ||
2505 !((uintptr_t)urb->transfer_buffer & (DWC2_USB_DMA_ALIGN - 1)))
2506 return 0;
2507
2508
2509
2510
2511
2512
2513 kmalloc_size = urb->transfer_buffer_length +
2514 (dma_get_cache_alignment() - 1) +
2515 sizeof(urb->transfer_buffer);
2516
2517 kmalloc_ptr = kmalloc(kmalloc_size, mem_flags);
2518 if (!kmalloc_ptr)
2519 return -ENOMEM;
2520
2521
2522
2523
2524
2525 memcpy(PTR_ALIGN(kmalloc_ptr + urb->transfer_buffer_length,
2526 dma_get_cache_alignment()),
2527 &urb->transfer_buffer, sizeof(urb->transfer_buffer));
2528
2529 if (usb_urb_dir_out(urb))
2530 memcpy(kmalloc_ptr, urb->transfer_buffer,
2531 urb->transfer_buffer_length);
2532 urb->transfer_buffer = kmalloc_ptr;
2533
2534 urb->transfer_flags |= URB_ALIGNED_TEMP_BUFFER;
2535
2536 return 0;
2537 }
2538
2539 static int dwc2_map_urb_for_dma(struct usb_hcd *hcd, struct urb *urb,
2540 gfp_t mem_flags)
2541 {
2542 int ret;
2543
2544
2545 WARN_ON_ONCE(urb->setup_dma &&
2546 (urb->setup_dma & (DWC2_USB_DMA_ALIGN - 1)));
2547
2548 ret = dwc2_alloc_dma_aligned_buffer(urb, mem_flags);
2549 if (ret)
2550 return ret;
2551
2552 ret = usb_hcd_map_urb_for_dma(hcd, urb, mem_flags);
2553 if (ret)
2554 dwc2_free_dma_aligned_buffer(urb);
2555
2556 return ret;
2557 }
2558
2559 static void dwc2_unmap_urb_for_dma(struct usb_hcd *hcd, struct urb *urb)
2560 {
2561 usb_hcd_unmap_urb_for_dma(hcd, urb);
2562 dwc2_free_dma_aligned_buffer(urb);
2563 }
2564
2565
2566
2567
2568
2569
2570
2571
2572
2573
2574 static int dwc2_assign_and_init_hc(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
2575 {
2576 struct dwc2_host_chan *chan;
2577 struct dwc2_hcd_urb *urb;
2578 struct dwc2_qtd *qtd;
2579
2580 if (dbg_qh(qh))
2581 dev_vdbg(hsotg->dev, "%s(%p,%p)\n", __func__, hsotg, qh);
2582
2583 if (list_empty(&qh->qtd_list)) {
2584 dev_dbg(hsotg->dev, "No QTDs in QH list\n");
2585 return -ENOMEM;
2586 }
2587
2588 if (list_empty(&hsotg->free_hc_list)) {
2589 dev_dbg(hsotg->dev, "No free channel to assign\n");
2590 return -ENOMEM;
2591 }
2592
2593 chan = list_first_entry(&hsotg->free_hc_list, struct dwc2_host_chan,
2594 hc_list_entry);
2595
2596
2597 list_del_init(&chan->hc_list_entry);
2598
2599 qtd = list_first_entry(&qh->qtd_list, struct dwc2_qtd, qtd_list_entry);
2600 urb = qtd->urb;
2601 qh->channel = chan;
2602 qtd->in_process = 1;
2603
2604
2605
2606
2607
2608 chan->dev_addr = dwc2_hcd_get_dev_addr(&urb->pipe_info);
2609 chan->ep_num = dwc2_hcd_get_ep_num(&urb->pipe_info);
2610 chan->speed = qh->dev_speed;
2611 chan->max_packet = qh->maxp;
2612
2613 chan->xfer_started = 0;
2614 chan->halt_status = DWC2_HC_XFER_NO_HALT_STATUS;
2615 chan->error_state = (qtd->error_count > 0);
2616 chan->halt_on_queue = 0;
2617 chan->halt_pending = 0;
2618 chan->requests = 0;
2619
2620
2621
2622
2623
2624
2625
2626
2627 chan->ep_is_in = (dwc2_hcd_is_pipe_in(&urb->pipe_info) != 0);
2628 if (chan->ep_is_in)
2629 chan->do_ping = 0;
2630 else
2631 chan->do_ping = qh->ping_state;
2632
2633 chan->data_pid_start = qh->data_toggle;
2634 chan->multi_count = 1;
2635
2636 if (urb->actual_length > urb->length &&
2637 !dwc2_hcd_is_pipe_in(&urb->pipe_info))
2638 urb->actual_length = urb->length;
2639
2640 if (hsotg->params.host_dma)
2641 chan->xfer_dma = urb->dma + urb->actual_length;
2642 else
2643 chan->xfer_buf = (u8 *)urb->buf + urb->actual_length;
2644
2645 chan->xfer_len = urb->length - urb->actual_length;
2646 chan->xfer_count = 0;
2647
2648
2649 if (qh->do_split)
2650 dwc2_hc_init_split(hsotg, chan, qtd, urb);
2651 else
2652 chan->do_split = 0;
2653
2654
2655 dwc2_hc_init_xfer(hsotg, chan, qtd);
2656
2657
2658 if (hsotg->params.host_dma && qh->do_split &&
2659 chan->ep_is_in && (chan->xfer_dma & 0x3)) {
2660 dev_vdbg(hsotg->dev, "Non-aligned buffer\n");
2661 if (dwc2_alloc_split_dma_aligned_buf(hsotg, qh, chan)) {
2662 dev_err(hsotg->dev,
2663 "Failed to allocate memory to handle non-aligned buffer\n");
2664
2665 chan->align_buf = 0;
2666 chan->multi_count = 0;
2667 list_add_tail(&chan->hc_list_entry,
2668 &hsotg->free_hc_list);
2669 qtd->in_process = 0;
2670 qh->channel = NULL;
2671 return -ENOMEM;
2672 }
2673 } else {
2674
2675
2676
2677
2678 WARN_ON_ONCE(hsotg->params.host_dma &&
2679 (chan->xfer_dma & 0x3));
2680 chan->align_buf = 0;
2681 }
2682
2683 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
2684 chan->ep_type == USB_ENDPOINT_XFER_ISOC)
2685
2686
2687
2688
2689 chan->multi_count = qh->maxp_mult;
2690
2691 if (hsotg->params.dma_desc_enable) {
2692 chan->desc_list_addr = qh->desc_list_dma;
2693 chan->desc_list_sz = qh->desc_list_sz;
2694 }
2695
2696 dwc2_hc_init(hsotg, chan);
2697 chan->qh = qh;
2698
2699 return 0;
2700 }
2701
2702
2703
2704
2705
2706
2707
2708
2709
2710
2711 enum dwc2_transaction_type dwc2_hcd_select_transactions(
2712 struct dwc2_hsotg *hsotg)
2713 {
2714 enum dwc2_transaction_type ret_val = DWC2_TRANSACTION_NONE;
2715 struct list_head *qh_ptr;
2716 struct dwc2_qh *qh;
2717 int num_channels;
2718
2719 #ifdef DWC2_DEBUG_SOF
2720 dev_vdbg(hsotg->dev, " Select Transactions\n");
2721 #endif
2722
2723
2724 qh_ptr = hsotg->periodic_sched_ready.next;
2725 while (qh_ptr != &hsotg->periodic_sched_ready) {
2726 if (list_empty(&hsotg->free_hc_list))
2727 break;
2728 if (hsotg->params.uframe_sched) {
2729 if (hsotg->available_host_channels <= 1)
2730 break;
2731 hsotg->available_host_channels--;
2732 }
2733 qh = list_entry(qh_ptr, struct dwc2_qh, qh_list_entry);
2734 if (dwc2_assign_and_init_hc(hsotg, qh))
2735 break;
2736
2737
2738
2739
2740
2741 qh_ptr = qh_ptr->next;
2742 list_move_tail(&qh->qh_list_entry,
2743 &hsotg->periodic_sched_assigned);
2744 ret_val = DWC2_TRANSACTION_PERIODIC;
2745 }
2746
2747
2748
2749
2750
2751
2752 num_channels = hsotg->params.host_channels;
2753 qh_ptr = hsotg->non_periodic_sched_inactive.next;
2754 while (qh_ptr != &hsotg->non_periodic_sched_inactive) {
2755 if (!hsotg->params.uframe_sched &&
2756 hsotg->non_periodic_channels >= num_channels -
2757 hsotg->periodic_channels)
2758 break;
2759 if (list_empty(&hsotg->free_hc_list))
2760 break;
2761 qh = list_entry(qh_ptr, struct dwc2_qh, qh_list_entry);
2762 if (hsotg->params.uframe_sched) {
2763 if (hsotg->available_host_channels < 1)
2764 break;
2765 hsotg->available_host_channels--;
2766 }
2767
2768 if (dwc2_assign_and_init_hc(hsotg, qh))
2769 break;
2770
2771
2772
2773
2774
2775 qh_ptr = qh_ptr->next;
2776 list_move_tail(&qh->qh_list_entry,
2777 &hsotg->non_periodic_sched_active);
2778
2779 if (ret_val == DWC2_TRANSACTION_NONE)
2780 ret_val = DWC2_TRANSACTION_NON_PERIODIC;
2781 else
2782 ret_val = DWC2_TRANSACTION_ALL;
2783
2784 if (!hsotg->params.uframe_sched)
2785 hsotg->non_periodic_channels++;
2786 }
2787
2788 return ret_val;
2789 }
2790
2791
2792
2793
2794
2795
2796
2797
2798
2799
2800
2801
2802
2803
2804
2805
2806
2807
2808
2809
2810
2811
2812 static int dwc2_queue_transaction(struct dwc2_hsotg *hsotg,
2813 struct dwc2_host_chan *chan,
2814 u16 fifo_dwords_avail)
2815 {
2816 int retval = 0;
2817
2818 if (chan->do_split)
2819
2820 list_move_tail(&chan->split_order_list_entry,
2821 &hsotg->split_order);
2822
2823 if (hsotg->params.host_dma && chan->qh) {
2824 if (hsotg->params.dma_desc_enable) {
2825 if (!chan->xfer_started ||
2826 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
2827 dwc2_hcd_start_xfer_ddma(hsotg, chan->qh);
2828 chan->qh->ping_state = 0;
2829 }
2830 } else if (!chan->xfer_started) {
2831 dwc2_hc_start_transfer(hsotg, chan);
2832 chan->qh->ping_state = 0;
2833 }
2834 } else if (chan->halt_pending) {
2835
2836 } else if (chan->halt_on_queue) {
2837 dwc2_hc_halt(hsotg, chan, chan->halt_status);
2838 } else if (chan->do_ping) {
2839 if (!chan->xfer_started)
2840 dwc2_hc_start_transfer(hsotg, chan);
2841 } else if (!chan->ep_is_in ||
2842 chan->data_pid_start == DWC2_HC_PID_SETUP) {
2843 if ((fifo_dwords_avail * 4) >= chan->max_packet) {
2844 if (!chan->xfer_started) {
2845 dwc2_hc_start_transfer(hsotg, chan);
2846 retval = 1;
2847 } else {
2848 retval = dwc2_hc_continue_transfer(hsotg, chan);
2849 }
2850 } else {
2851 retval = -1;
2852 }
2853 } else {
2854 if (!chan->xfer_started) {
2855 dwc2_hc_start_transfer(hsotg, chan);
2856 retval = 1;
2857 } else {
2858 retval = dwc2_hc_continue_transfer(hsotg, chan);
2859 }
2860 }
2861
2862 return retval;
2863 }
2864
2865
2866
2867
2868
2869
2870
2871
2872
2873
2874 static void dwc2_process_periodic_channels(struct dwc2_hsotg *hsotg)
2875 {
2876 struct list_head *qh_ptr;
2877 struct dwc2_qh *qh;
2878 u32 tx_status;
2879 u32 fspcavail;
2880 u32 gintmsk;
2881 int status;
2882 bool no_queue_space = false;
2883 bool no_fifo_space = false;
2884 u32 qspcavail;
2885
2886
2887 if (list_empty(&hsotg->periodic_sched_assigned))
2888 goto exit;
2889
2890 if (dbg_perio())
2891 dev_vdbg(hsotg->dev, "Queue periodic transactions\n");
2892
2893 tx_status = dwc2_readl(hsotg, HPTXSTS);
2894 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
2895 TXSTS_QSPCAVAIL_SHIFT;
2896 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
2897 TXSTS_FSPCAVAIL_SHIFT;
2898
2899 if (dbg_perio()) {
2900 dev_vdbg(hsotg->dev, " P Tx Req Queue Space Avail (before queue): %d\n",
2901 qspcavail);
2902 dev_vdbg(hsotg->dev, " P Tx FIFO Space Avail (before queue): %d\n",
2903 fspcavail);
2904 }
2905
2906 qh_ptr = hsotg->periodic_sched_assigned.next;
2907 while (qh_ptr != &hsotg->periodic_sched_assigned) {
2908 tx_status = dwc2_readl(hsotg, HPTXSTS);
2909 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
2910 TXSTS_QSPCAVAIL_SHIFT;
2911 if (qspcavail == 0) {
2912 no_queue_space = true;
2913 break;
2914 }
2915
2916 qh = list_entry(qh_ptr, struct dwc2_qh, qh_list_entry);
2917 if (!qh->channel) {
2918 qh_ptr = qh_ptr->next;
2919 continue;
2920 }
2921
2922
2923 if (qh->tt_buffer_dirty) {
2924 qh_ptr = qh_ptr->next;
2925 continue;
2926 }
2927
2928
2929
2930
2931
2932
2933 if (!hsotg->params.host_dma &&
2934 qh->channel->multi_count > 1)
2935 hsotg->queuing_high_bandwidth = 1;
2936
2937 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
2938 TXSTS_FSPCAVAIL_SHIFT;
2939 status = dwc2_queue_transaction(hsotg, qh->channel, fspcavail);
2940 if (status < 0) {
2941 no_fifo_space = true;
2942 break;
2943 }
2944
2945
2946
2947
2948
2949
2950
2951
2952 if (hsotg->params.host_dma || status == 0 ||
2953 qh->channel->requests == qh->channel->multi_count) {
2954 qh_ptr = qh_ptr->next;
2955
2956
2957
2958
2959 list_move_tail(&qh->qh_list_entry,
2960 &hsotg->periodic_sched_queued);
2961
2962
2963 hsotg->queuing_high_bandwidth = 0;
2964 }
2965 }
2966
2967 exit:
2968 if (no_queue_space || no_fifo_space ||
2969 (!hsotg->params.host_dma &&
2970 !list_empty(&hsotg->periodic_sched_assigned))) {
2971
2972
2973
2974
2975
2976
2977
2978 gintmsk = dwc2_readl(hsotg, GINTMSK);
2979 if (!(gintmsk & GINTSTS_PTXFEMP)) {
2980 gintmsk |= GINTSTS_PTXFEMP;
2981 dwc2_writel(hsotg, gintmsk, GINTMSK);
2982 }
2983 } else {
2984
2985
2986
2987
2988
2989
2990
2991 gintmsk = dwc2_readl(hsotg, GINTMSK);
2992 if (gintmsk & GINTSTS_PTXFEMP) {
2993 gintmsk &= ~GINTSTS_PTXFEMP;
2994 dwc2_writel(hsotg, gintmsk, GINTMSK);
2995 }
2996 }
2997 }
2998
2999
3000
3001
3002
3003
3004
3005
3006
3007
3008 static void dwc2_process_non_periodic_channels(struct dwc2_hsotg *hsotg)
3009 {
3010 struct list_head *orig_qh_ptr;
3011 struct dwc2_qh *qh;
3012 u32 tx_status;
3013 u32 qspcavail;
3014 u32 fspcavail;
3015 u32 gintmsk;
3016 int status;
3017 int no_queue_space = 0;
3018 int no_fifo_space = 0;
3019 int more_to_do = 0;
3020
3021 dev_vdbg(hsotg->dev, "Queue non-periodic transactions\n");
3022
3023 tx_status = dwc2_readl(hsotg, GNPTXSTS);
3024 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
3025 TXSTS_QSPCAVAIL_SHIFT;
3026 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
3027 TXSTS_FSPCAVAIL_SHIFT;
3028 dev_vdbg(hsotg->dev, " NP Tx Req Queue Space Avail (before queue): %d\n",
3029 qspcavail);
3030 dev_vdbg(hsotg->dev, " NP Tx FIFO Space Avail (before queue): %d\n",
3031 fspcavail);
3032
3033
3034
3035
3036
3037 if (hsotg->non_periodic_qh_ptr == &hsotg->non_periodic_sched_active)
3038 hsotg->non_periodic_qh_ptr = hsotg->non_periodic_qh_ptr->next;
3039 orig_qh_ptr = hsotg->non_periodic_qh_ptr;
3040
3041
3042
3043
3044
3045 do {
3046 tx_status = dwc2_readl(hsotg, GNPTXSTS);
3047 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
3048 TXSTS_QSPCAVAIL_SHIFT;
3049 if (!hsotg->params.host_dma && qspcavail == 0) {
3050 no_queue_space = 1;
3051 break;
3052 }
3053
3054 qh = list_entry(hsotg->non_periodic_qh_ptr, struct dwc2_qh,
3055 qh_list_entry);
3056 if (!qh->channel)
3057 goto next;
3058
3059
3060 if (qh->tt_buffer_dirty)
3061 goto next;
3062
3063 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
3064 TXSTS_FSPCAVAIL_SHIFT;
3065 status = dwc2_queue_transaction(hsotg, qh->channel, fspcavail);
3066
3067 if (status > 0) {
3068 more_to_do = 1;
3069 } else if (status < 0) {
3070 no_fifo_space = 1;
3071 break;
3072 }
3073 next:
3074
3075 hsotg->non_periodic_qh_ptr = hsotg->non_periodic_qh_ptr->next;
3076 if (hsotg->non_periodic_qh_ptr ==
3077 &hsotg->non_periodic_sched_active)
3078 hsotg->non_periodic_qh_ptr =
3079 hsotg->non_periodic_qh_ptr->next;
3080 } while (hsotg->non_periodic_qh_ptr != orig_qh_ptr);
3081
3082 if (!hsotg->params.host_dma) {
3083 tx_status = dwc2_readl(hsotg, GNPTXSTS);
3084 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
3085 TXSTS_QSPCAVAIL_SHIFT;
3086 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
3087 TXSTS_FSPCAVAIL_SHIFT;
3088 dev_vdbg(hsotg->dev,
3089 " NP Tx Req Queue Space Avail (after queue): %d\n",
3090 qspcavail);
3091 dev_vdbg(hsotg->dev,
3092 " NP Tx FIFO Space Avail (after queue): %d\n",
3093 fspcavail);
3094
3095 if (more_to_do || no_queue_space || no_fifo_space) {
3096
3097
3098
3099
3100
3101
3102
3103 gintmsk = dwc2_readl(hsotg, GINTMSK);
3104 gintmsk |= GINTSTS_NPTXFEMP;
3105 dwc2_writel(hsotg, gintmsk, GINTMSK);
3106 } else {
3107
3108
3109
3110
3111
3112
3113
3114 gintmsk = dwc2_readl(hsotg, GINTMSK);
3115 gintmsk &= ~GINTSTS_NPTXFEMP;
3116 dwc2_writel(hsotg, gintmsk, GINTMSK);
3117 }
3118 }
3119 }
3120
3121
3122
3123
3124
3125
3126
3127
3128
3129
3130
3131
3132 void dwc2_hcd_queue_transactions(struct dwc2_hsotg *hsotg,
3133 enum dwc2_transaction_type tr_type)
3134 {
3135 #ifdef DWC2_DEBUG_SOF
3136 dev_vdbg(hsotg->dev, "Queue Transactions\n");
3137 #endif
3138
3139 if (tr_type == DWC2_TRANSACTION_PERIODIC ||
3140 tr_type == DWC2_TRANSACTION_ALL)
3141 dwc2_process_periodic_channels(hsotg);
3142
3143
3144 if (tr_type == DWC2_TRANSACTION_NON_PERIODIC ||
3145 tr_type == DWC2_TRANSACTION_ALL) {
3146 if (!list_empty(&hsotg->non_periodic_sched_active)) {
3147 dwc2_process_non_periodic_channels(hsotg);
3148 } else {
3149
3150
3151
3152
3153 u32 gintmsk = dwc2_readl(hsotg, GINTMSK);
3154
3155 gintmsk &= ~GINTSTS_NPTXFEMP;
3156 dwc2_writel(hsotg, gintmsk, GINTMSK);
3157 }
3158 }
3159 }
3160
3161 static void dwc2_conn_id_status_change(struct work_struct *work)
3162 {
3163 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
3164 wf_otg);
3165 u32 count = 0;
3166 u32 gotgctl;
3167 unsigned long flags;
3168
3169 dev_dbg(hsotg->dev, "%s()\n", __func__);
3170
3171 gotgctl = dwc2_readl(hsotg, GOTGCTL);
3172 dev_dbg(hsotg->dev, "gotgctl=%0x\n", gotgctl);
3173 dev_dbg(hsotg->dev, "gotgctl.b.conidsts=%d\n",
3174 !!(gotgctl & GOTGCTL_CONID_B));
3175
3176
3177 if (gotgctl & GOTGCTL_CONID_B) {
3178 dwc2_vbus_supply_exit(hsotg);
3179
3180 dev_dbg(hsotg->dev, "connId B\n");
3181 if (hsotg->bus_suspended) {
3182 dev_info(hsotg->dev,
3183 "Do port resume before switching to device mode\n");
3184 dwc2_port_resume(hsotg);
3185 }
3186 while (!dwc2_is_device_mode(hsotg)) {
3187 dev_info(hsotg->dev,
3188 "Waiting for Peripheral Mode, Mode=%s\n",
3189 dwc2_is_host_mode(hsotg) ? "Host" :
3190 "Peripheral");
3191 msleep(20);
3192
3193
3194
3195
3196
3197 gotgctl = dwc2_readl(hsotg, GOTGCTL);
3198 if (!(gotgctl & GOTGCTL_CONID_B))
3199 goto host;
3200 if (++count > 250)
3201 break;
3202 }
3203 if (count > 250)
3204 dev_err(hsotg->dev,
3205 "Connection id status change timed out\n");
3206
3207
3208
3209
3210
3211
3212 if (hsotg->in_ppd && hsotg->lx_state == DWC2_L2)
3213 dwc2_exit_partial_power_down(hsotg, 0, false);
3214
3215 hsotg->op_state = OTG_STATE_B_PERIPHERAL;
3216 dwc2_core_init(hsotg, false);
3217 dwc2_enable_global_interrupts(hsotg);
3218 spin_lock_irqsave(&hsotg->lock, flags);
3219 dwc2_hsotg_core_init_disconnected(hsotg, false);
3220 spin_unlock_irqrestore(&hsotg->lock, flags);
3221
3222 dwc2_enable_acg(hsotg);
3223 dwc2_hsotg_core_connect(hsotg);
3224 } else {
3225 host:
3226
3227 dev_dbg(hsotg->dev, "connId A\n");
3228 while (!dwc2_is_host_mode(hsotg)) {
3229 dev_info(hsotg->dev, "Waiting for Host Mode, Mode=%s\n",
3230 dwc2_is_host_mode(hsotg) ?
3231 "Host" : "Peripheral");
3232 msleep(20);
3233 if (++count > 250)
3234 break;
3235 }
3236 if (count > 250)
3237 dev_err(hsotg->dev,
3238 "Connection id status change timed out\n");
3239
3240 spin_lock_irqsave(&hsotg->lock, flags);
3241 dwc2_hsotg_disconnect(hsotg);
3242 spin_unlock_irqrestore(&hsotg->lock, flags);
3243
3244 hsotg->op_state = OTG_STATE_A_HOST;
3245
3246 dwc2_core_init(hsotg, false);
3247 dwc2_enable_global_interrupts(hsotg);
3248 dwc2_hcd_start(hsotg);
3249 }
3250 }
3251
3252 static void dwc2_wakeup_detected(struct timer_list *t)
3253 {
3254 struct dwc2_hsotg *hsotg = from_timer(hsotg, t, wkp_timer);
3255 u32 hprt0;
3256
3257 dev_dbg(hsotg->dev, "%s()\n", __func__);
3258
3259
3260
3261
3262
3263 hprt0 = dwc2_read_hprt0(hsotg);
3264 dev_dbg(hsotg->dev, "Resume: HPRT0=%0x\n", hprt0);
3265 hprt0 &= ~HPRT0_RES;
3266 dwc2_writel(hsotg, hprt0, HPRT0);
3267 dev_dbg(hsotg->dev, "Clear Resume: HPRT0=%0x\n",
3268 dwc2_readl(hsotg, HPRT0));
3269
3270 dwc2_hcd_rem_wakeup(hsotg);
3271 hsotg->bus_suspended = false;
3272
3273
3274 hsotg->lx_state = DWC2_L0;
3275 }
3276
3277 static int dwc2_host_is_b_hnp_enabled(struct dwc2_hsotg *hsotg)
3278 {
3279 struct usb_hcd *hcd = dwc2_hsotg_to_hcd(hsotg);
3280
3281 return hcd->self.b_hnp_enable;
3282 }
3283
3284
3285
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295 int dwc2_port_suspend(struct dwc2_hsotg *hsotg, u16 windex)
3296 {
3297 unsigned long flags;
3298 u32 pcgctl;
3299 u32 gotgctl;
3300 int ret = 0;
3301
3302 dev_dbg(hsotg->dev, "%s()\n", __func__);
3303
3304 spin_lock_irqsave(&hsotg->lock, flags);
3305
3306 if (windex == hsotg->otg_port && dwc2_host_is_b_hnp_enabled(hsotg)) {
3307 gotgctl = dwc2_readl(hsotg, GOTGCTL);
3308 gotgctl |= GOTGCTL_HSTSETHNPEN;
3309 dwc2_writel(hsotg, gotgctl, GOTGCTL);
3310 hsotg->op_state = OTG_STATE_A_SUSPEND;
3311 }
3312
3313 switch (hsotg->params.power_down) {
3314 case DWC2_POWER_DOWN_PARAM_PARTIAL:
3315 ret = dwc2_enter_partial_power_down(hsotg);
3316 if (ret)
3317 dev_err(hsotg->dev,
3318 "enter partial_power_down failed.\n");
3319 break;
3320 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
3321
3322
3323
3324
3325
3326
3327 spin_unlock_irqrestore(&hsotg->lock, flags);
3328 ret = dwc2_enter_hibernation(hsotg, 1);
3329 if (ret)
3330 dev_err(hsotg->dev, "enter hibernation failed.\n");
3331 spin_lock_irqsave(&hsotg->lock, flags);
3332 break;
3333 case DWC2_POWER_DOWN_PARAM_NONE:
3334
3335
3336
3337
3338 if (!hsotg->params.no_clock_gating)
3339 dwc2_host_enter_clock_gating(hsotg);
3340 break;
3341 }
3342
3343
3344 if (dwc2_host_is_b_hnp_enabled(hsotg)) {
3345 pcgctl = dwc2_readl(hsotg, PCGCTL);
3346 pcgctl &= ~PCGCTL_STOPPCLK;
3347 dwc2_writel(hsotg, pcgctl, PCGCTL);
3348
3349 spin_unlock_irqrestore(&hsotg->lock, flags);
3350
3351 msleep(200);
3352 } else {
3353 spin_unlock_irqrestore(&hsotg->lock, flags);
3354 }
3355
3356 return ret;
3357 }
3358
3359
3360
3361
3362
3363
3364
3365
3366
3367
3368
3369 int dwc2_port_resume(struct dwc2_hsotg *hsotg)
3370 {
3371 unsigned long flags;
3372 int ret = 0;
3373
3374 spin_lock_irqsave(&hsotg->lock, flags);
3375
3376 switch (hsotg->params.power_down) {
3377 case DWC2_POWER_DOWN_PARAM_PARTIAL:
3378 ret = dwc2_exit_partial_power_down(hsotg, 0, true);
3379 if (ret)
3380 dev_err(hsotg->dev,
3381 "exit partial_power_down failed.\n");
3382 break;
3383 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
3384
3385 ret = dwc2_exit_hibernation(hsotg, 0, 0, 1);
3386 if (ret)
3387 dev_err(hsotg->dev, "exit hibernation failed.\n");
3388 break;
3389 case DWC2_POWER_DOWN_PARAM_NONE:
3390
3391
3392
3393
3394 spin_unlock_irqrestore(&hsotg->lock, flags);
3395 dwc2_host_exit_clock_gating(hsotg, 0);
3396 spin_lock_irqsave(&hsotg->lock, flags);
3397 break;
3398 }
3399
3400 spin_unlock_irqrestore(&hsotg->lock, flags);
3401
3402 return ret;
3403 }
3404
3405
3406 static int dwc2_hcd_hub_control(struct dwc2_hsotg *hsotg, u16 typereq,
3407 u16 wvalue, u16 windex, char *buf, u16 wlength)
3408 {
3409 struct usb_hub_descriptor *hub_desc;
3410 int retval = 0;
3411 u32 hprt0;
3412 u32 port_status;
3413 u32 speed;
3414 u32 pcgctl;
3415 u32 pwr;
3416
3417 switch (typereq) {
3418 case ClearHubFeature:
3419 dev_dbg(hsotg->dev, "ClearHubFeature %1xh\n", wvalue);
3420
3421 switch (wvalue) {
3422 case C_HUB_LOCAL_POWER:
3423 case C_HUB_OVER_CURRENT:
3424
3425 break;
3426
3427 default:
3428 retval = -EINVAL;
3429 dev_err(hsotg->dev,
3430 "ClearHubFeature request %1xh unknown\n",
3431 wvalue);
3432 }
3433 break;
3434
3435 case ClearPortFeature:
3436 if (wvalue != USB_PORT_FEAT_L1)
3437 if (!windex || windex > 1)
3438 goto error;
3439 switch (wvalue) {
3440 case USB_PORT_FEAT_ENABLE:
3441 dev_dbg(hsotg->dev,
3442 "ClearPortFeature USB_PORT_FEAT_ENABLE\n");
3443 hprt0 = dwc2_read_hprt0(hsotg);
3444 hprt0 |= HPRT0_ENA;
3445 dwc2_writel(hsotg, hprt0, HPRT0);
3446 break;
3447
3448 case USB_PORT_FEAT_SUSPEND:
3449 dev_dbg(hsotg->dev,
3450 "ClearPortFeature USB_PORT_FEAT_SUSPEND\n");
3451
3452 if (hsotg->bus_suspended)
3453 retval = dwc2_port_resume(hsotg);
3454 break;
3455
3456 case USB_PORT_FEAT_POWER:
3457 dev_dbg(hsotg->dev,
3458 "ClearPortFeature USB_PORT_FEAT_POWER\n");
3459 hprt0 = dwc2_read_hprt0(hsotg);
3460 pwr = hprt0 & HPRT0_PWR;
3461 hprt0 &= ~HPRT0_PWR;
3462 dwc2_writel(hsotg, hprt0, HPRT0);
3463 if (pwr)
3464 dwc2_vbus_supply_exit(hsotg);
3465 break;
3466
3467 case USB_PORT_FEAT_INDICATOR:
3468 dev_dbg(hsotg->dev,
3469 "ClearPortFeature USB_PORT_FEAT_INDICATOR\n");
3470
3471 break;
3472
3473 case USB_PORT_FEAT_C_CONNECTION:
3474
3475
3476
3477 dev_dbg(hsotg->dev,
3478 "ClearPortFeature USB_PORT_FEAT_C_CONNECTION\n");
3479 hsotg->flags.b.port_connect_status_change = 0;
3480 break;
3481
3482 case USB_PORT_FEAT_C_RESET:
3483
3484 dev_dbg(hsotg->dev,
3485 "ClearPortFeature USB_PORT_FEAT_C_RESET\n");
3486 hsotg->flags.b.port_reset_change = 0;
3487 break;
3488
3489 case USB_PORT_FEAT_C_ENABLE:
3490
3491
3492
3493
3494 dev_dbg(hsotg->dev,
3495 "ClearPortFeature USB_PORT_FEAT_C_ENABLE\n");
3496 hsotg->flags.b.port_enable_change = 0;
3497 break;
3498
3499 case USB_PORT_FEAT_C_SUSPEND:
3500
3501
3502
3503
3504
3505 dev_dbg(hsotg->dev,
3506 "ClearPortFeature USB_PORT_FEAT_C_SUSPEND\n");
3507 hsotg->flags.b.port_suspend_change = 0;
3508 break;
3509
3510 case USB_PORT_FEAT_C_PORT_L1:
3511 dev_dbg(hsotg->dev,
3512 "ClearPortFeature USB_PORT_FEAT_C_PORT_L1\n");
3513 hsotg->flags.b.port_l1_change = 0;
3514 break;
3515
3516 case USB_PORT_FEAT_C_OVER_CURRENT:
3517 dev_dbg(hsotg->dev,
3518 "ClearPortFeature USB_PORT_FEAT_C_OVER_CURRENT\n");
3519 hsotg->flags.b.port_over_current_change = 0;
3520 break;
3521
3522 default:
3523 retval = -EINVAL;
3524 dev_err(hsotg->dev,
3525 "ClearPortFeature request %1xh unknown or unsupported\n",
3526 wvalue);
3527 }
3528 break;
3529
3530 case GetHubDescriptor:
3531 dev_dbg(hsotg->dev, "GetHubDescriptor\n");
3532 hub_desc = (struct usb_hub_descriptor *)buf;
3533 hub_desc->bDescLength = 9;
3534 hub_desc->bDescriptorType = USB_DT_HUB;
3535 hub_desc->bNbrPorts = 1;
3536 hub_desc->wHubCharacteristics =
3537 cpu_to_le16(HUB_CHAR_COMMON_LPSM |
3538 HUB_CHAR_INDV_PORT_OCPM);
3539 hub_desc->bPwrOn2PwrGood = 1;
3540 hub_desc->bHubContrCurrent = 0;
3541 hub_desc->u.hs.DeviceRemovable[0] = 0;
3542 hub_desc->u.hs.DeviceRemovable[1] = 0xff;
3543 break;
3544
3545 case GetHubStatus:
3546 dev_dbg(hsotg->dev, "GetHubStatus\n");
3547 memset(buf, 0, 4);
3548 break;
3549
3550 case GetPortStatus:
3551 dev_vdbg(hsotg->dev,
3552 "GetPortStatus wIndex=0x%04x flags=0x%08x\n", windex,
3553 hsotg->flags.d32);
3554 if (!windex || windex > 1)
3555 goto error;
3556
3557 port_status = 0;
3558 if (hsotg->flags.b.port_connect_status_change)
3559 port_status |= USB_PORT_STAT_C_CONNECTION << 16;
3560 if (hsotg->flags.b.port_enable_change)
3561 port_status |= USB_PORT_STAT_C_ENABLE << 16;
3562 if (hsotg->flags.b.port_suspend_change)
3563 port_status |= USB_PORT_STAT_C_SUSPEND << 16;
3564 if (hsotg->flags.b.port_l1_change)
3565 port_status |= USB_PORT_STAT_C_L1 << 16;
3566 if (hsotg->flags.b.port_reset_change)
3567 port_status |= USB_PORT_STAT_C_RESET << 16;
3568 if (hsotg->flags.b.port_over_current_change) {
3569 dev_warn(hsotg->dev, "Overcurrent change detected\n");
3570 port_status |= USB_PORT_STAT_C_OVERCURRENT << 16;
3571 }
3572
3573 if (!hsotg->flags.b.port_connect_status) {
3574
3575
3576
3577
3578
3579
3580
3581 *(__le32 *)buf = cpu_to_le32(port_status);
3582 break;
3583 }
3584
3585 hprt0 = dwc2_readl(hsotg, HPRT0);
3586 dev_vdbg(hsotg->dev, " HPRT0: 0x%08x\n", hprt0);
3587
3588 if (hprt0 & HPRT0_CONNSTS)
3589 port_status |= USB_PORT_STAT_CONNECTION;
3590 if (hprt0 & HPRT0_ENA)
3591 port_status |= USB_PORT_STAT_ENABLE;
3592 if (hprt0 & HPRT0_SUSP)
3593 port_status |= USB_PORT_STAT_SUSPEND;
3594 if (hprt0 & HPRT0_OVRCURRACT)
3595 port_status |= USB_PORT_STAT_OVERCURRENT;
3596 if (hprt0 & HPRT0_RST)
3597 port_status |= USB_PORT_STAT_RESET;
3598 if (hprt0 & HPRT0_PWR)
3599 port_status |= USB_PORT_STAT_POWER;
3600
3601 speed = (hprt0 & HPRT0_SPD_MASK) >> HPRT0_SPD_SHIFT;
3602 if (speed == HPRT0_SPD_HIGH_SPEED)
3603 port_status |= USB_PORT_STAT_HIGH_SPEED;
3604 else if (speed == HPRT0_SPD_LOW_SPEED)
3605 port_status |= USB_PORT_STAT_LOW_SPEED;
3606
3607 if (hprt0 & HPRT0_TSTCTL_MASK)
3608 port_status |= USB_PORT_STAT_TEST;
3609
3610
3611 if (hsotg->params.dma_desc_fs_enable) {
3612
3613
3614
3615
3616 if (hsotg->new_connection &&
3617 ((port_status &
3618 (USB_PORT_STAT_CONNECTION |
3619 USB_PORT_STAT_HIGH_SPEED |
3620 USB_PORT_STAT_LOW_SPEED)) ==
3621 USB_PORT_STAT_CONNECTION)) {
3622 u32 hcfg;
3623
3624 dev_info(hsotg->dev, "Enabling descriptor DMA mode\n");
3625 hsotg->params.dma_desc_enable = true;
3626 hcfg = dwc2_readl(hsotg, HCFG);
3627 hcfg |= HCFG_DESCDMA;
3628 dwc2_writel(hsotg, hcfg, HCFG);
3629 hsotg->new_connection = false;
3630 }
3631 }
3632
3633 dev_vdbg(hsotg->dev, "port_status=%08x\n", port_status);
3634 *(__le32 *)buf = cpu_to_le32(port_status);
3635 break;
3636
3637 case SetHubFeature:
3638 dev_dbg(hsotg->dev, "SetHubFeature\n");
3639
3640 break;
3641
3642 case SetPortFeature:
3643 dev_dbg(hsotg->dev, "SetPortFeature\n");
3644 if (wvalue != USB_PORT_FEAT_TEST && (!windex || windex > 1))
3645 goto error;
3646
3647 if (!hsotg->flags.b.port_connect_status) {
3648
3649
3650
3651
3652
3653
3654
3655 break;
3656 }
3657
3658 switch (wvalue) {
3659 case USB_PORT_FEAT_SUSPEND:
3660 dev_dbg(hsotg->dev,
3661 "SetPortFeature - USB_PORT_FEAT_SUSPEND\n");
3662 if (windex != hsotg->otg_port)
3663 goto error;
3664 if (!hsotg->bus_suspended)
3665 retval = dwc2_port_suspend(hsotg, windex);
3666 break;
3667
3668 case USB_PORT_FEAT_POWER:
3669 dev_dbg(hsotg->dev,
3670 "SetPortFeature - USB_PORT_FEAT_POWER\n");
3671 hprt0 = dwc2_read_hprt0(hsotg);
3672 pwr = hprt0 & HPRT0_PWR;
3673 hprt0 |= HPRT0_PWR;
3674 dwc2_writel(hsotg, hprt0, HPRT0);
3675 if (!pwr)
3676 dwc2_vbus_supply_init(hsotg);
3677 break;
3678
3679 case USB_PORT_FEAT_RESET:
3680 dev_dbg(hsotg->dev,
3681 "SetPortFeature - USB_PORT_FEAT_RESET\n");
3682
3683 hprt0 = dwc2_read_hprt0(hsotg);
3684
3685 if (hsotg->hibernated) {
3686 retval = dwc2_exit_hibernation(hsotg, 0, 1, 1);
3687 if (retval)
3688 dev_err(hsotg->dev,
3689 "exit hibernation failed\n");
3690 }
3691
3692 if (hsotg->in_ppd) {
3693 retval = dwc2_exit_partial_power_down(hsotg, 1,
3694 true);
3695 if (retval)
3696 dev_err(hsotg->dev,
3697 "exit partial_power_down failed\n");
3698 }
3699
3700 if (hsotg->params.power_down ==
3701 DWC2_POWER_DOWN_PARAM_NONE && hsotg->bus_suspended)
3702 dwc2_host_exit_clock_gating(hsotg, 0);
3703
3704 pcgctl = dwc2_readl(hsotg, PCGCTL);
3705 pcgctl &= ~(PCGCTL_ENBL_SLEEP_GATING | PCGCTL_STOPPCLK);
3706 dwc2_writel(hsotg, pcgctl, PCGCTL);
3707
3708 dwc2_writel(hsotg, 0, PCGCTL);
3709
3710 hprt0 = dwc2_read_hprt0(hsotg);
3711 pwr = hprt0 & HPRT0_PWR;
3712
3713 hprt0 &= ~HPRT0_SUSP;
3714
3715
3716
3717
3718
3719
3720 if (!dwc2_hcd_is_b_host(hsotg)) {
3721 hprt0 |= HPRT0_PWR | HPRT0_RST;
3722 dev_dbg(hsotg->dev,
3723 "In host mode, hprt0=%08x\n", hprt0);
3724 dwc2_writel(hsotg, hprt0, HPRT0);
3725 if (!pwr)
3726 dwc2_vbus_supply_init(hsotg);
3727 }
3728
3729
3730 msleep(50);
3731 hprt0 &= ~HPRT0_RST;
3732 dwc2_writel(hsotg, hprt0, HPRT0);
3733 hsotg->lx_state = DWC2_L0;
3734 break;
3735
3736 case USB_PORT_FEAT_INDICATOR:
3737 dev_dbg(hsotg->dev,
3738 "SetPortFeature - USB_PORT_FEAT_INDICATOR\n");
3739
3740 break;
3741
3742 case USB_PORT_FEAT_TEST:
3743 hprt0 = dwc2_read_hprt0(hsotg);
3744 dev_dbg(hsotg->dev,
3745 "SetPortFeature - USB_PORT_FEAT_TEST\n");
3746 hprt0 &= ~HPRT0_TSTCTL_MASK;
3747 hprt0 |= (windex >> 8) << HPRT0_TSTCTL_SHIFT;
3748 dwc2_writel(hsotg, hprt0, HPRT0);
3749 break;
3750
3751 default:
3752 retval = -EINVAL;
3753 dev_err(hsotg->dev,
3754 "SetPortFeature %1xh unknown or unsupported\n",
3755 wvalue);
3756 break;
3757 }
3758 break;
3759
3760 default:
3761 error:
3762 retval = -EINVAL;
3763 dev_dbg(hsotg->dev,
3764 "Unknown hub control request: %1xh wIndex: %1xh wValue: %1xh\n",
3765 typereq, windex, wvalue);
3766 break;
3767 }
3768
3769 return retval;
3770 }
3771
3772 static int dwc2_hcd_is_status_changed(struct dwc2_hsotg *hsotg, int port)
3773 {
3774 int retval;
3775
3776 if (port != 1)
3777 return -EINVAL;
3778
3779 retval = (hsotg->flags.b.port_connect_status_change ||
3780 hsotg->flags.b.port_reset_change ||
3781 hsotg->flags.b.port_enable_change ||
3782 hsotg->flags.b.port_suspend_change ||
3783 hsotg->flags.b.port_over_current_change);
3784
3785 if (retval) {
3786 dev_dbg(hsotg->dev,
3787 "DWC OTG HCD HUB STATUS DATA: Root port status changed\n");
3788 dev_dbg(hsotg->dev, " port_connect_status_change: %d\n",
3789 hsotg->flags.b.port_connect_status_change);
3790 dev_dbg(hsotg->dev, " port_reset_change: %d\n",
3791 hsotg->flags.b.port_reset_change);
3792 dev_dbg(hsotg->dev, " port_enable_change: %d\n",
3793 hsotg->flags.b.port_enable_change);
3794 dev_dbg(hsotg->dev, " port_suspend_change: %d\n",
3795 hsotg->flags.b.port_suspend_change);
3796 dev_dbg(hsotg->dev, " port_over_current_change: %d\n",
3797 hsotg->flags.b.port_over_current_change);
3798 }
3799
3800 return retval;
3801 }
3802
3803 int dwc2_hcd_get_frame_number(struct dwc2_hsotg *hsotg)
3804 {
3805 u32 hfnum = dwc2_readl(hsotg, HFNUM);
3806
3807 #ifdef DWC2_DEBUG_SOF
3808 dev_vdbg(hsotg->dev, "DWC OTG HCD GET FRAME NUMBER %d\n",
3809 (hfnum & HFNUM_FRNUM_MASK) >> HFNUM_FRNUM_SHIFT);
3810 #endif
3811 return (hfnum & HFNUM_FRNUM_MASK) >> HFNUM_FRNUM_SHIFT;
3812 }
3813
3814 int dwc2_hcd_get_future_frame_number(struct dwc2_hsotg *hsotg, int us)
3815 {
3816 u32 hprt = dwc2_readl(hsotg, HPRT0);
3817 u32 hfir = dwc2_readl(hsotg, HFIR);
3818 u32 hfnum = dwc2_readl(hsotg, HFNUM);
3819 unsigned int us_per_frame;
3820 unsigned int frame_number;
3821 unsigned int remaining;
3822 unsigned int interval;
3823 unsigned int phy_clks;
3824
3825
3826 us_per_frame = (hprt & HPRT0_SPD_MASK) ? 1000 : 125;
3827
3828
3829 frame_number = (hfnum & HFNUM_FRNUM_MASK) >> HFNUM_FRNUM_SHIFT;
3830 remaining = (hfnum & HFNUM_FRREM_MASK) >> HFNUM_FRREM_SHIFT;
3831 interval = (hfir & HFIR_FRINT_MASK) >> HFIR_FRINT_SHIFT;
3832
3833
3834
3835
3836
3837 phy_clks = (interval - remaining) +
3838 DIV_ROUND_UP(interval * us, us_per_frame);
3839
3840 return dwc2_frame_num_inc(frame_number, phy_clks / interval);
3841 }
3842
3843 int dwc2_hcd_is_b_host(struct dwc2_hsotg *hsotg)
3844 {
3845 return hsotg->op_state == OTG_STATE_B_HOST;
3846 }
3847
3848 static struct dwc2_hcd_urb *dwc2_hcd_urb_alloc(struct dwc2_hsotg *hsotg,
3849 int iso_desc_count,
3850 gfp_t mem_flags)
3851 {
3852 struct dwc2_hcd_urb *urb;
3853
3854 urb = kzalloc(struct_size(urb, iso_descs, iso_desc_count), mem_flags);
3855 if (urb)
3856 urb->packet_count = iso_desc_count;
3857 return urb;
3858 }
3859
3860 static void dwc2_hcd_urb_set_pipeinfo(struct dwc2_hsotg *hsotg,
3861 struct dwc2_hcd_urb *urb, u8 dev_addr,
3862 u8 ep_num, u8 ep_type, u8 ep_dir,
3863 u16 maxp, u16 maxp_mult)
3864 {
3865 if (dbg_perio() ||
3866 ep_type == USB_ENDPOINT_XFER_BULK ||
3867 ep_type == USB_ENDPOINT_XFER_CONTROL)
3868 dev_vdbg(hsotg->dev,
3869 "addr=%d, ep_num=%d, ep_dir=%1x, ep_type=%1x, maxp=%d (%d mult)\n",
3870 dev_addr, ep_num, ep_dir, ep_type, maxp, maxp_mult);
3871 urb->pipe_info.dev_addr = dev_addr;
3872 urb->pipe_info.ep_num = ep_num;
3873 urb->pipe_info.pipe_type = ep_type;
3874 urb->pipe_info.pipe_dir = ep_dir;
3875 urb->pipe_info.maxp = maxp;
3876 urb->pipe_info.maxp_mult = maxp_mult;
3877 }
3878
3879
3880
3881
3882
3883 void dwc2_hcd_dump_state(struct dwc2_hsotg *hsotg)
3884 {
3885 #ifdef DEBUG
3886 struct dwc2_host_chan *chan;
3887 struct dwc2_hcd_urb *urb;
3888 struct dwc2_qtd *qtd;
3889 int num_channels;
3890 u32 np_tx_status;
3891 u32 p_tx_status;
3892 int i;
3893
3894 num_channels = hsotg->params.host_channels;
3895 dev_dbg(hsotg->dev, "\n");
3896 dev_dbg(hsotg->dev,
3897 "************************************************************\n");
3898 dev_dbg(hsotg->dev, "HCD State:\n");
3899 dev_dbg(hsotg->dev, " Num channels: %d\n", num_channels);
3900
3901 for (i = 0; i < num_channels; i++) {
3902 chan = hsotg->hc_ptr_array[i];
3903 dev_dbg(hsotg->dev, " Channel %d:\n", i);
3904 dev_dbg(hsotg->dev,
3905 " dev_addr: %d, ep_num: %d, ep_is_in: %d\n",
3906 chan->dev_addr, chan->ep_num, chan->ep_is_in);
3907 dev_dbg(hsotg->dev, " speed: %d\n", chan->speed);
3908 dev_dbg(hsotg->dev, " ep_type: %d\n", chan->ep_type);
3909 dev_dbg(hsotg->dev, " max_packet: %d\n", chan->max_packet);
3910 dev_dbg(hsotg->dev, " data_pid_start: %d\n",
3911 chan->data_pid_start);
3912 dev_dbg(hsotg->dev, " multi_count: %d\n", chan->multi_count);
3913 dev_dbg(hsotg->dev, " xfer_started: %d\n",
3914 chan->xfer_started);
3915 dev_dbg(hsotg->dev, " xfer_buf: %p\n", chan->xfer_buf);
3916 dev_dbg(hsotg->dev, " xfer_dma: %08lx\n",
3917 (unsigned long)chan->xfer_dma);
3918 dev_dbg(hsotg->dev, " xfer_len: %d\n", chan->xfer_len);
3919 dev_dbg(hsotg->dev, " xfer_count: %d\n", chan->xfer_count);
3920 dev_dbg(hsotg->dev, " halt_on_queue: %d\n",
3921 chan->halt_on_queue);
3922 dev_dbg(hsotg->dev, " halt_pending: %d\n",
3923 chan->halt_pending);
3924 dev_dbg(hsotg->dev, " halt_status: %d\n", chan->halt_status);
3925 dev_dbg(hsotg->dev, " do_split: %d\n", chan->do_split);
3926 dev_dbg(hsotg->dev, " complete_split: %d\n",
3927 chan->complete_split);
3928 dev_dbg(hsotg->dev, " hub_addr: %d\n", chan->hub_addr);
3929 dev_dbg(hsotg->dev, " hub_port: %d\n", chan->hub_port);
3930 dev_dbg(hsotg->dev, " xact_pos: %d\n", chan->xact_pos);
3931 dev_dbg(hsotg->dev, " requests: %d\n", chan->requests);
3932 dev_dbg(hsotg->dev, " qh: %p\n", chan->qh);
3933
3934 if (chan->xfer_started) {
3935 u32 hfnum, hcchar, hctsiz, hcint, hcintmsk;
3936
3937 hfnum = dwc2_readl(hsotg, HFNUM);
3938 hcchar = dwc2_readl(hsotg, HCCHAR(i));
3939 hctsiz = dwc2_readl(hsotg, HCTSIZ(i));
3940 hcint = dwc2_readl(hsotg, HCINT(i));
3941 hcintmsk = dwc2_readl(hsotg, HCINTMSK(i));
3942 dev_dbg(hsotg->dev, " hfnum: 0x%08x\n", hfnum);
3943 dev_dbg(hsotg->dev, " hcchar: 0x%08x\n", hcchar);
3944 dev_dbg(hsotg->dev, " hctsiz: 0x%08x\n", hctsiz);
3945 dev_dbg(hsotg->dev, " hcint: 0x%08x\n", hcint);
3946 dev_dbg(hsotg->dev, " hcintmsk: 0x%08x\n", hcintmsk);
3947 }
3948
3949 if (!(chan->xfer_started && chan->qh))
3950 continue;
3951
3952 list_for_each_entry(qtd, &chan->qh->qtd_list, qtd_list_entry) {
3953 if (!qtd->in_process)
3954 break;
3955 urb = qtd->urb;
3956 dev_dbg(hsotg->dev, " URB Info:\n");
3957 dev_dbg(hsotg->dev, " qtd: %p, urb: %p\n",
3958 qtd, urb);
3959 if (urb) {
3960 dev_dbg(hsotg->dev,
3961 " Dev: %d, EP: %d %s\n",
3962 dwc2_hcd_get_dev_addr(&urb->pipe_info),
3963 dwc2_hcd_get_ep_num(&urb->pipe_info),
3964 dwc2_hcd_is_pipe_in(&urb->pipe_info) ?
3965 "IN" : "OUT");
3966 dev_dbg(hsotg->dev,
3967 " Max packet size: %d (%d mult)\n",
3968 dwc2_hcd_get_maxp(&urb->pipe_info),
3969 dwc2_hcd_get_maxp_mult(&urb->pipe_info));
3970 dev_dbg(hsotg->dev,
3971 " transfer_buffer: %p\n",
3972 urb->buf);
3973 dev_dbg(hsotg->dev,
3974 " transfer_dma: %08lx\n",
3975 (unsigned long)urb->dma);
3976 dev_dbg(hsotg->dev,
3977 " transfer_buffer_length: %d\n",
3978 urb->length);
3979 dev_dbg(hsotg->dev, " actual_length: %d\n",
3980 urb->actual_length);
3981 }
3982 }
3983 }
3984
3985 dev_dbg(hsotg->dev, " non_periodic_channels: %d\n",
3986 hsotg->non_periodic_channels);
3987 dev_dbg(hsotg->dev, " periodic_channels: %d\n",
3988 hsotg->periodic_channels);
3989 dev_dbg(hsotg->dev, " periodic_usecs: %d\n", hsotg->periodic_usecs);
3990 np_tx_status = dwc2_readl(hsotg, GNPTXSTS);
3991 dev_dbg(hsotg->dev, " NP Tx Req Queue Space Avail: %d\n",
3992 (np_tx_status & TXSTS_QSPCAVAIL_MASK) >> TXSTS_QSPCAVAIL_SHIFT);
3993 dev_dbg(hsotg->dev, " NP Tx FIFO Space Avail: %d\n",
3994 (np_tx_status & TXSTS_FSPCAVAIL_MASK) >> TXSTS_FSPCAVAIL_SHIFT);
3995 p_tx_status = dwc2_readl(hsotg, HPTXSTS);
3996 dev_dbg(hsotg->dev, " P Tx Req Queue Space Avail: %d\n",
3997 (p_tx_status & TXSTS_QSPCAVAIL_MASK) >> TXSTS_QSPCAVAIL_SHIFT);
3998 dev_dbg(hsotg->dev, " P Tx FIFO Space Avail: %d\n",
3999 (p_tx_status & TXSTS_FSPCAVAIL_MASK) >> TXSTS_FSPCAVAIL_SHIFT);
4000 dwc2_dump_global_registers(hsotg);
4001 dwc2_dump_host_registers(hsotg);
4002 dev_dbg(hsotg->dev,
4003 "************************************************************\n");
4004 dev_dbg(hsotg->dev, "\n");
4005 #endif
4006 }
4007
4008 struct wrapper_priv_data {
4009 struct dwc2_hsotg *hsotg;
4010 };
4011
4012
4013 static struct dwc2_hsotg *dwc2_hcd_to_hsotg(struct usb_hcd *hcd)
4014 {
4015 struct wrapper_priv_data *p;
4016
4017 p = (struct wrapper_priv_data *)&hcd->hcd_priv;
4018 return p->hsotg;
4019 }
4020
4021
4022
4023
4024
4025
4026
4027
4028
4029
4030
4031
4032
4033
4034
4035
4036
4037
4038
4039
4040
4041 struct dwc2_tt *dwc2_host_get_tt_info(struct dwc2_hsotg *hsotg, void *context,
4042 gfp_t mem_flags, int *ttport)
4043 {
4044 struct urb *urb = context;
4045 struct dwc2_tt *dwc_tt = NULL;
4046
4047 if (urb->dev->tt) {
4048 *ttport = urb->dev->ttport;
4049
4050 dwc_tt = urb->dev->tt->hcpriv;
4051 if (!dwc_tt) {
4052 size_t bitmap_size;
4053
4054
4055
4056
4057
4058 bitmap_size = DWC2_ELEMENTS_PER_LS_BITMAP *
4059 sizeof(dwc_tt->periodic_bitmaps[0]);
4060 if (urb->dev->tt->multi)
4061 bitmap_size *= urb->dev->tt->hub->maxchild;
4062
4063 dwc_tt = kzalloc(sizeof(*dwc_tt) + bitmap_size,
4064 mem_flags);
4065 if (!dwc_tt)
4066 return NULL;
4067
4068 dwc_tt->usb_tt = urb->dev->tt;
4069 dwc_tt->usb_tt->hcpriv = dwc_tt;
4070 }
4071
4072 dwc_tt->refcount++;
4073 }
4074
4075 return dwc_tt;
4076 }
4077
4078
4079
4080
4081
4082
4083
4084
4085
4086
4087
4088
4089 void dwc2_host_put_tt_info(struct dwc2_hsotg *hsotg, struct dwc2_tt *dwc_tt)
4090 {
4091
4092 if (!dwc_tt)
4093 return;
4094
4095 WARN_ON(dwc_tt->refcount < 1);
4096
4097 dwc_tt->refcount--;
4098 if (!dwc_tt->refcount) {
4099 dwc_tt->usb_tt->hcpriv = NULL;
4100 kfree(dwc_tt);
4101 }
4102 }
4103
4104 int dwc2_host_get_speed(struct dwc2_hsotg *hsotg, void *context)
4105 {
4106 struct urb *urb = context;
4107
4108 return urb->dev->speed;
4109 }
4110
4111 static void dwc2_allocate_bus_bandwidth(struct usb_hcd *hcd, u16 bw,
4112 struct urb *urb)
4113 {
4114 struct usb_bus *bus = hcd_to_bus(hcd);
4115
4116 if (urb->interval)
4117 bus->bandwidth_allocated += bw / urb->interval;
4118 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS)
4119 bus->bandwidth_isoc_reqs++;
4120 else
4121 bus->bandwidth_int_reqs++;
4122 }
4123
4124 static void dwc2_free_bus_bandwidth(struct usb_hcd *hcd, u16 bw,
4125 struct urb *urb)
4126 {
4127 struct usb_bus *bus = hcd_to_bus(hcd);
4128
4129 if (urb->interval)
4130 bus->bandwidth_allocated -= bw / urb->interval;
4131 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS)
4132 bus->bandwidth_isoc_reqs--;
4133 else
4134 bus->bandwidth_int_reqs--;
4135 }
4136
4137
4138
4139
4140
4141
4142
4143 void dwc2_host_complete(struct dwc2_hsotg *hsotg, struct dwc2_qtd *qtd,
4144 int status)
4145 {
4146 struct urb *urb;
4147 int i;
4148
4149 if (!qtd) {
4150 dev_dbg(hsotg->dev, "## %s: qtd is NULL ##\n", __func__);
4151 return;
4152 }
4153
4154 if (!qtd->urb) {
4155 dev_dbg(hsotg->dev, "## %s: qtd->urb is NULL ##\n", __func__);
4156 return;
4157 }
4158
4159 urb = qtd->urb->priv;
4160 if (!urb) {
4161 dev_dbg(hsotg->dev, "## %s: urb->priv is NULL ##\n", __func__);
4162 return;
4163 }
4164
4165 urb->actual_length = dwc2_hcd_urb_get_actual_length(qtd->urb);
4166
4167 if (dbg_urb(urb))
4168 dev_vdbg(hsotg->dev,
4169 "%s: urb %p device %d ep %d-%s status %d actual %d\n",
4170 __func__, urb, usb_pipedevice(urb->pipe),
4171 usb_pipeendpoint(urb->pipe),
4172 usb_pipein(urb->pipe) ? "IN" : "OUT", status,
4173 urb->actual_length);
4174
4175 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS) {
4176 urb->error_count = dwc2_hcd_urb_get_error_count(qtd->urb);
4177 for (i = 0; i < urb->number_of_packets; ++i) {
4178 urb->iso_frame_desc[i].actual_length =
4179 dwc2_hcd_urb_get_iso_desc_actual_length(
4180 qtd->urb, i);
4181 urb->iso_frame_desc[i].status =
4182 dwc2_hcd_urb_get_iso_desc_status(qtd->urb, i);
4183 }
4184 }
4185
4186 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS && dbg_perio()) {
4187 for (i = 0; i < urb->number_of_packets; i++)
4188 dev_vdbg(hsotg->dev, " ISO Desc %d status %d\n",
4189 i, urb->iso_frame_desc[i].status);
4190 }
4191
4192 urb->status = status;
4193 if (!status) {
4194 if ((urb->transfer_flags & URB_SHORT_NOT_OK) &&
4195 urb->actual_length < urb->transfer_buffer_length)
4196 urb->status = -EREMOTEIO;
4197 }
4198
4199 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS ||
4200 usb_pipetype(urb->pipe) == PIPE_INTERRUPT) {
4201 struct usb_host_endpoint *ep = urb->ep;
4202
4203 if (ep)
4204 dwc2_free_bus_bandwidth(dwc2_hsotg_to_hcd(hsotg),
4205 dwc2_hcd_get_ep_bandwidth(hsotg, ep),
4206 urb);
4207 }
4208
4209 usb_hcd_unlink_urb_from_ep(dwc2_hsotg_to_hcd(hsotg), urb);
4210 urb->hcpriv = NULL;
4211 kfree(qtd->urb);
4212 qtd->urb = NULL;
4213
4214 usb_hcd_giveback_urb(dwc2_hsotg_to_hcd(hsotg), urb, status);
4215 }
4216
4217
4218
4219
4220 static void dwc2_hcd_start_func(struct work_struct *work)
4221 {
4222 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
4223 start_work.work);
4224
4225 dev_dbg(hsotg->dev, "%s() %p\n", __func__, hsotg);
4226 dwc2_host_start(hsotg);
4227 }
4228
4229
4230
4231
4232 static void dwc2_hcd_reset_func(struct work_struct *work)
4233 {
4234 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
4235 reset_work.work);
4236 unsigned long flags;
4237 u32 hprt0;
4238
4239 dev_dbg(hsotg->dev, "USB RESET function called\n");
4240
4241 spin_lock_irqsave(&hsotg->lock, flags);
4242
4243 hprt0 = dwc2_read_hprt0(hsotg);
4244 hprt0 &= ~HPRT0_RST;
4245 dwc2_writel(hsotg, hprt0, HPRT0);
4246 hsotg->flags.b.port_reset_change = 1;
4247
4248 spin_unlock_irqrestore(&hsotg->lock, flags);
4249 }
4250
4251 static void dwc2_hcd_phy_reset_func(struct work_struct *work)
4252 {
4253 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
4254 phy_reset_work);
4255 int ret;
4256
4257 ret = phy_reset(hsotg->phy);
4258 if (ret)
4259 dev_warn(hsotg->dev, "PHY reset failed\n");
4260 }
4261
4262
4263
4264
4265
4266
4267
4268
4269
4270
4271
4272
4273 static int _dwc2_hcd_start(struct usb_hcd *hcd)
4274 {
4275 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4276 struct usb_bus *bus = hcd_to_bus(hcd);
4277 unsigned long flags;
4278 u32 hprt0;
4279 int ret;
4280
4281 dev_dbg(hsotg->dev, "DWC OTG HCD START\n");
4282
4283 spin_lock_irqsave(&hsotg->lock, flags);
4284 hsotg->lx_state = DWC2_L0;
4285 hcd->state = HC_STATE_RUNNING;
4286 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4287
4288 if (dwc2_is_device_mode(hsotg)) {
4289 spin_unlock_irqrestore(&hsotg->lock, flags);
4290 return 0;
4291 }
4292
4293 dwc2_hcd_reinit(hsotg);
4294
4295 hprt0 = dwc2_read_hprt0(hsotg);
4296
4297 if (hprt0 & HPRT0_PWR) {
4298
4299 spin_unlock_irqrestore(&hsotg->lock, flags);
4300 ret = dwc2_vbus_supply_init(hsotg);
4301 if (ret)
4302 return ret;
4303 spin_lock_irqsave(&hsotg->lock, flags);
4304 }
4305
4306
4307 if (bus->root_hub) {
4308 dev_dbg(hsotg->dev, "DWC OTG HCD Has Root Hub\n");
4309
4310 usb_hcd_resume_root_hub(hcd);
4311 }
4312
4313 spin_unlock_irqrestore(&hsotg->lock, flags);
4314
4315 return 0;
4316 }
4317
4318
4319
4320
4321
4322 static void _dwc2_hcd_stop(struct usb_hcd *hcd)
4323 {
4324 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4325 unsigned long flags;
4326 u32 hprt0;
4327
4328
4329 dwc2_disable_host_interrupts(hsotg);
4330
4331
4332 synchronize_irq(hcd->irq);
4333
4334 spin_lock_irqsave(&hsotg->lock, flags);
4335 hprt0 = dwc2_read_hprt0(hsotg);
4336
4337 dwc2_hcd_disconnect(hsotg, true);
4338 dwc2_hcd_stop(hsotg);
4339 hsotg->lx_state = DWC2_L3;
4340 hcd->state = HC_STATE_HALT;
4341 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4342 spin_unlock_irqrestore(&hsotg->lock, flags);
4343
4344
4345 if (hprt0 & HPRT0_PWR)
4346 dwc2_vbus_supply_exit(hsotg);
4347
4348 usleep_range(1000, 3000);
4349 }
4350
4351 static int _dwc2_hcd_suspend(struct usb_hcd *hcd)
4352 {
4353 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4354 unsigned long flags;
4355 int ret = 0;
4356
4357 spin_lock_irqsave(&hsotg->lock, flags);
4358
4359 if (dwc2_is_device_mode(hsotg))
4360 goto unlock;
4361
4362 if (hsotg->lx_state != DWC2_L0)
4363 goto unlock;
4364
4365 if (!HCD_HW_ACCESSIBLE(hcd))
4366 goto unlock;
4367
4368 if (hsotg->op_state == OTG_STATE_B_PERIPHERAL)
4369 goto unlock;
4370
4371 if (hsotg->bus_suspended)
4372 goto skip_power_saving;
4373
4374 if (hsotg->flags.b.port_connect_status == 0)
4375 goto skip_power_saving;
4376
4377 switch (hsotg->params.power_down) {
4378 case DWC2_POWER_DOWN_PARAM_PARTIAL:
4379
4380 ret = dwc2_enter_partial_power_down(hsotg);
4381 if (ret)
4382 dev_err(hsotg->dev,
4383 "enter partial_power_down failed\n");
4384
4385 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4386 break;
4387 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
4388
4389 spin_unlock_irqrestore(&hsotg->lock, flags);
4390 ret = dwc2_enter_hibernation(hsotg, 1);
4391 if (ret)
4392 dev_err(hsotg->dev, "enter hibernation failed\n");
4393 spin_lock_irqsave(&hsotg->lock, flags);
4394
4395
4396 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4397 break;
4398 case DWC2_POWER_DOWN_PARAM_NONE:
4399
4400
4401
4402
4403 if (!hsotg->params.no_clock_gating) {
4404 dwc2_host_enter_clock_gating(hsotg);
4405
4406
4407 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4408 }
4409 break;
4410 default:
4411 goto skip_power_saving;
4412 }
4413
4414 spin_unlock_irqrestore(&hsotg->lock, flags);
4415 dwc2_vbus_supply_exit(hsotg);
4416 spin_lock_irqsave(&hsotg->lock, flags);
4417
4418
4419 if (!IS_ERR_OR_NULL(hsotg->uphy)) {
4420 spin_unlock_irqrestore(&hsotg->lock, flags);
4421 usb_phy_set_suspend(hsotg->uphy, true);
4422 spin_lock_irqsave(&hsotg->lock, flags);
4423 }
4424
4425 skip_power_saving:
4426 hsotg->lx_state = DWC2_L2;
4427 unlock:
4428 spin_unlock_irqrestore(&hsotg->lock, flags);
4429
4430 return ret;
4431 }
4432
4433 static int _dwc2_hcd_resume(struct usb_hcd *hcd)
4434 {
4435 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4436 unsigned long flags;
4437 u32 hprt0;
4438 int ret = 0;
4439
4440 spin_lock_irqsave(&hsotg->lock, flags);
4441
4442 if (dwc2_is_device_mode(hsotg))
4443 goto unlock;
4444
4445 if (hsotg->lx_state != DWC2_L2)
4446 goto unlock;
4447
4448 hprt0 = dwc2_read_hprt0(hsotg);
4449
4450
4451
4452
4453
4454
4455 if (hprt0 & HPRT0_CONNSTS) {
4456 hsotg->lx_state = DWC2_L0;
4457 goto unlock;
4458 }
4459
4460 switch (hsotg->params.power_down) {
4461 case DWC2_POWER_DOWN_PARAM_PARTIAL:
4462 ret = dwc2_exit_partial_power_down(hsotg, 0, true);
4463 if (ret)
4464 dev_err(hsotg->dev,
4465 "exit partial_power_down failed\n");
4466
4467
4468
4469
4470 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4471 break;
4472 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
4473 ret = dwc2_exit_hibernation(hsotg, 0, 0, 1);
4474 if (ret)
4475 dev_err(hsotg->dev, "exit hibernation failed.\n");
4476
4477
4478
4479
4480
4481 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4482 break;
4483 case DWC2_POWER_DOWN_PARAM_NONE:
4484
4485
4486
4487
4488 spin_unlock_irqrestore(&hsotg->lock, flags);
4489 dwc2_host_exit_clock_gating(hsotg, 0);
4490
4491
4492
4493
4494
4495 dwc2_core_init(hsotg, false);
4496 dwc2_enable_global_interrupts(hsotg);
4497 dwc2_hcd_reinit(hsotg);
4498 spin_lock_irqsave(&hsotg->lock, flags);
4499
4500
4501
4502
4503
4504 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4505 break;
4506 default:
4507 hsotg->lx_state = DWC2_L0;
4508 goto unlock;
4509 }
4510
4511
4512 hsotg->flags.b.port_suspend_change = 1;
4513
4514
4515
4516
4517
4518
4519 if (!IS_ERR_OR_NULL(hsotg->uphy)) {
4520 spin_unlock_irqrestore(&hsotg->lock, flags);
4521 usb_phy_set_suspend(hsotg->uphy, false);
4522 spin_lock_irqsave(&hsotg->lock, flags);
4523 }
4524
4525
4526 spin_unlock_irqrestore(&hsotg->lock, flags);
4527 dwc2_vbus_supply_init(hsotg);
4528
4529
4530 usleep_range(3000, 5000);
4531 spin_lock_irqsave(&hsotg->lock, flags);
4532
4533
4534
4535
4536
4537 dwc2_writel(hsotg, HPRT0_PWR | HPRT0_CONNDET |
4538 HPRT0_ENACHG, HPRT0);
4539
4540
4541 spin_unlock_irqrestore(&hsotg->lock, flags);
4542 usleep_range(5000, 7000);
4543 spin_lock_irqsave(&hsotg->lock, flags);
4544 unlock:
4545 spin_unlock_irqrestore(&hsotg->lock, flags);
4546
4547 return ret;
4548 }
4549
4550
4551 static int _dwc2_hcd_get_frame_number(struct usb_hcd *hcd)
4552 {
4553 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4554
4555 return dwc2_hcd_get_frame_number(hsotg);
4556 }
4557
4558 static void dwc2_dump_urb_info(struct usb_hcd *hcd, struct urb *urb,
4559 char *fn_name)
4560 {
4561 #ifdef VERBOSE_DEBUG
4562 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4563 char *pipetype = NULL;
4564 char *speed = NULL;
4565
4566 dev_vdbg(hsotg->dev, "%s, urb %p\n", fn_name, urb);
4567 dev_vdbg(hsotg->dev, " Device address: %d\n",
4568 usb_pipedevice(urb->pipe));
4569 dev_vdbg(hsotg->dev, " Endpoint: %d, %s\n",
4570 usb_pipeendpoint(urb->pipe),
4571 usb_pipein(urb->pipe) ? "IN" : "OUT");
4572
4573 switch (usb_pipetype(urb->pipe)) {
4574 case PIPE_CONTROL:
4575 pipetype = "CONTROL";
4576 break;
4577 case PIPE_BULK:
4578 pipetype = "BULK";
4579 break;
4580 case PIPE_INTERRUPT:
4581 pipetype = "INTERRUPT";
4582 break;
4583 case PIPE_ISOCHRONOUS:
4584 pipetype = "ISOCHRONOUS";
4585 break;
4586 }
4587
4588 dev_vdbg(hsotg->dev, " Endpoint type: %s %s (%s)\n", pipetype,
4589 usb_urb_dir_in(urb) ? "IN" : "OUT", usb_pipein(urb->pipe) ?
4590 "IN" : "OUT");
4591
4592 switch (urb->dev->speed) {
4593 case USB_SPEED_HIGH:
4594 speed = "HIGH";
4595 break;
4596 case USB_SPEED_FULL:
4597 speed = "FULL";
4598 break;
4599 case USB_SPEED_LOW:
4600 speed = "LOW";
4601 break;
4602 default:
4603 speed = "UNKNOWN";
4604 break;
4605 }
4606
4607 dev_vdbg(hsotg->dev, " Speed: %s\n", speed);
4608 dev_vdbg(hsotg->dev, " Max packet size: %d (%d mult)\n",
4609 usb_endpoint_maxp(&urb->ep->desc),
4610 usb_endpoint_maxp_mult(&urb->ep->desc));
4611
4612 dev_vdbg(hsotg->dev, " Data buffer length: %d\n",
4613 urb->transfer_buffer_length);
4614 dev_vdbg(hsotg->dev, " Transfer buffer: %p, Transfer DMA: %08lx\n",
4615 urb->transfer_buffer, (unsigned long)urb->transfer_dma);
4616 dev_vdbg(hsotg->dev, " Setup buffer: %p, Setup DMA: %08lx\n",
4617 urb->setup_packet, (unsigned long)urb->setup_dma);
4618 dev_vdbg(hsotg->dev, " Interval: %d\n", urb->interval);
4619
4620 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS) {
4621 int i;
4622
4623 for (i = 0; i < urb->number_of_packets; i++) {
4624 dev_vdbg(hsotg->dev, " ISO Desc %d:\n", i);
4625 dev_vdbg(hsotg->dev, " offset: %d, length %d\n",
4626 urb->iso_frame_desc[i].offset,
4627 urb->iso_frame_desc[i].length);
4628 }
4629 }
4630 #endif
4631 }
4632
4633
4634
4635
4636
4637
4638 static int _dwc2_hcd_urb_enqueue(struct usb_hcd *hcd, struct urb *urb,
4639 gfp_t mem_flags)
4640 {
4641 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4642 struct usb_host_endpoint *ep = urb->ep;
4643 struct dwc2_hcd_urb *dwc2_urb;
4644 int i;
4645 int retval;
4646 int alloc_bandwidth = 0;
4647 u8 ep_type = 0;
4648 u32 tflags = 0;
4649 void *buf;
4650 unsigned long flags;
4651 struct dwc2_qh *qh;
4652 bool qh_allocated = false;
4653 struct dwc2_qtd *qtd;
4654 struct dwc2_gregs_backup *gr;
4655
4656 gr = &hsotg->gr_backup;
4657
4658 if (dbg_urb(urb)) {
4659 dev_vdbg(hsotg->dev, "DWC OTG HCD URB Enqueue\n");
4660 dwc2_dump_urb_info(hcd, urb, "urb_enqueue");
4661 }
4662
4663 if (hsotg->hibernated) {
4664 if (gr->gotgctl & GOTGCTL_CURMODE_HOST)
4665 retval = dwc2_exit_hibernation(hsotg, 0, 0, 1);
4666 else
4667 retval = dwc2_exit_hibernation(hsotg, 0, 0, 0);
4668
4669 if (retval)
4670 dev_err(hsotg->dev,
4671 "exit hibernation failed.\n");
4672 }
4673
4674 if (hsotg->in_ppd) {
4675 retval = dwc2_exit_partial_power_down(hsotg, 0, true);
4676 if (retval)
4677 dev_err(hsotg->dev,
4678 "exit partial_power_down failed\n");
4679 }
4680
4681 if (hsotg->params.power_down == DWC2_POWER_DOWN_PARAM_NONE &&
4682 hsotg->bus_suspended) {
4683 if (dwc2_is_device_mode(hsotg))
4684 dwc2_gadget_exit_clock_gating(hsotg, 0);
4685 else
4686 dwc2_host_exit_clock_gating(hsotg, 0);
4687 }
4688
4689 if (!ep)
4690 return -EINVAL;
4691
4692 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS ||
4693 usb_pipetype(urb->pipe) == PIPE_INTERRUPT) {
4694 spin_lock_irqsave(&hsotg->lock, flags);
4695 if (!dwc2_hcd_is_bandwidth_allocated(hsotg, ep))
4696 alloc_bandwidth = 1;
4697 spin_unlock_irqrestore(&hsotg->lock, flags);
4698 }
4699
4700 switch (usb_pipetype(urb->pipe)) {
4701 case PIPE_CONTROL:
4702 ep_type = USB_ENDPOINT_XFER_CONTROL;
4703 break;
4704 case PIPE_ISOCHRONOUS:
4705 ep_type = USB_ENDPOINT_XFER_ISOC;
4706 break;
4707 case PIPE_BULK:
4708 ep_type = USB_ENDPOINT_XFER_BULK;
4709 break;
4710 case PIPE_INTERRUPT:
4711 ep_type = USB_ENDPOINT_XFER_INT;
4712 break;
4713 }
4714
4715 dwc2_urb = dwc2_hcd_urb_alloc(hsotg, urb->number_of_packets,
4716 mem_flags);
4717 if (!dwc2_urb)
4718 return -ENOMEM;
4719
4720 dwc2_hcd_urb_set_pipeinfo(hsotg, dwc2_urb, usb_pipedevice(urb->pipe),
4721 usb_pipeendpoint(urb->pipe), ep_type,
4722 usb_pipein(urb->pipe),
4723 usb_endpoint_maxp(&ep->desc),
4724 usb_endpoint_maxp_mult(&ep->desc));
4725
4726 buf = urb->transfer_buffer;
4727
4728 if (hcd_uses_dma(hcd)) {
4729 if (!buf && (urb->transfer_dma & 3)) {
4730 dev_err(hsotg->dev,
4731 "%s: unaligned transfer with no transfer_buffer",
4732 __func__);
4733 retval = -EINVAL;
4734 goto fail0;
4735 }
4736 }
4737
4738 if (!(urb->transfer_flags & URB_NO_INTERRUPT))
4739 tflags |= URB_GIVEBACK_ASAP;
4740 if (urb->transfer_flags & URB_ZERO_PACKET)
4741 tflags |= URB_SEND_ZERO_PACKET;
4742
4743 dwc2_urb->priv = urb;
4744 dwc2_urb->buf = buf;
4745 dwc2_urb->dma = urb->transfer_dma;
4746 dwc2_urb->length = urb->transfer_buffer_length;
4747 dwc2_urb->setup_packet = urb->setup_packet;
4748 dwc2_urb->setup_dma = urb->setup_dma;
4749 dwc2_urb->flags = tflags;
4750 dwc2_urb->interval = urb->interval;
4751 dwc2_urb->status = -EINPROGRESS;
4752
4753 for (i = 0; i < urb->number_of_packets; ++i)
4754 dwc2_hcd_urb_set_iso_desc_params(dwc2_urb, i,
4755 urb->iso_frame_desc[i].offset,
4756 urb->iso_frame_desc[i].length);
4757
4758 urb->hcpriv = dwc2_urb;
4759 qh = (struct dwc2_qh *)ep->hcpriv;
4760
4761 if (!qh) {
4762 qh = dwc2_hcd_qh_create(hsotg, dwc2_urb, mem_flags);
4763 if (!qh) {
4764 retval = -ENOMEM;
4765 goto fail0;
4766 }
4767 ep->hcpriv = qh;
4768 qh_allocated = true;
4769 }
4770
4771 qtd = kzalloc(sizeof(*qtd), mem_flags);
4772 if (!qtd) {
4773 retval = -ENOMEM;
4774 goto fail1;
4775 }
4776
4777 spin_lock_irqsave(&hsotg->lock, flags);
4778 retval = usb_hcd_link_urb_to_ep(hcd, urb);
4779 if (retval)
4780 goto fail2;
4781
4782 retval = dwc2_hcd_urb_enqueue(hsotg, dwc2_urb, qh, qtd);
4783 if (retval)
4784 goto fail3;
4785
4786 if (alloc_bandwidth) {
4787 dwc2_allocate_bus_bandwidth(hcd,
4788 dwc2_hcd_get_ep_bandwidth(hsotg, ep),
4789 urb);
4790 }
4791
4792 spin_unlock_irqrestore(&hsotg->lock, flags);
4793
4794 return 0;
4795
4796 fail3:
4797 dwc2_urb->priv = NULL;
4798 usb_hcd_unlink_urb_from_ep(hcd, urb);
4799 if (qh_allocated && qh->channel && qh->channel->qh == qh)
4800 qh->channel->qh = NULL;
4801 fail2:
4802 spin_unlock_irqrestore(&hsotg->lock, flags);
4803 urb->hcpriv = NULL;
4804 kfree(qtd);
4805 fail1:
4806 if (qh_allocated) {
4807 struct dwc2_qtd *qtd2, *qtd2_tmp;
4808
4809 ep->hcpriv = NULL;
4810 dwc2_hcd_qh_unlink(hsotg, qh);
4811
4812 list_for_each_entry_safe(qtd2, qtd2_tmp, &qh->qtd_list,
4813 qtd_list_entry)
4814 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd2, qh);
4815 dwc2_hcd_qh_free(hsotg, qh);
4816 }
4817 fail0:
4818 kfree(dwc2_urb);
4819
4820 return retval;
4821 }
4822
4823
4824
4825
4826 static int _dwc2_hcd_urb_dequeue(struct usb_hcd *hcd, struct urb *urb,
4827 int status)
4828 {
4829 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4830 int rc;
4831 unsigned long flags;
4832
4833 dev_dbg(hsotg->dev, "DWC OTG HCD URB Dequeue\n");
4834 dwc2_dump_urb_info(hcd, urb, "urb_dequeue");
4835
4836 spin_lock_irqsave(&hsotg->lock, flags);
4837
4838 rc = usb_hcd_check_unlink_urb(hcd, urb, status);
4839 if (rc)
4840 goto out;
4841
4842 if (!urb->hcpriv) {
4843 dev_dbg(hsotg->dev, "## urb->hcpriv is NULL ##\n");
4844 goto out;
4845 }
4846
4847 rc = dwc2_hcd_urb_dequeue(hsotg, urb->hcpriv);
4848
4849 usb_hcd_unlink_urb_from_ep(hcd, urb);
4850
4851 kfree(urb->hcpriv);
4852 urb->hcpriv = NULL;
4853
4854
4855 spin_unlock(&hsotg->lock);
4856 usb_hcd_giveback_urb(hcd, urb, status);
4857 spin_lock(&hsotg->lock);
4858
4859 dev_dbg(hsotg->dev, "Called usb_hcd_giveback_urb()\n");
4860 dev_dbg(hsotg->dev, " urb->status = %d\n", urb->status);
4861 out:
4862 spin_unlock_irqrestore(&hsotg->lock, flags);
4863
4864 return rc;
4865 }
4866
4867
4868
4869
4870
4871
4872 static void _dwc2_hcd_endpoint_disable(struct usb_hcd *hcd,
4873 struct usb_host_endpoint *ep)
4874 {
4875 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4876
4877 dev_dbg(hsotg->dev,
4878 "DWC OTG HCD EP DISABLE: bEndpointAddress=0x%02x, ep->hcpriv=%p\n",
4879 ep->desc.bEndpointAddress, ep->hcpriv);
4880 dwc2_hcd_endpoint_disable(hsotg, ep, 250);
4881 }
4882
4883
4884
4885
4886
4887
4888 static void _dwc2_hcd_endpoint_reset(struct usb_hcd *hcd,
4889 struct usb_host_endpoint *ep)
4890 {
4891 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4892 unsigned long flags;
4893
4894 dev_dbg(hsotg->dev,
4895 "DWC OTG HCD EP RESET: bEndpointAddress=0x%02x\n",
4896 ep->desc.bEndpointAddress);
4897
4898 spin_lock_irqsave(&hsotg->lock, flags);
4899 dwc2_hcd_endpoint_reset(hsotg, ep);
4900 spin_unlock_irqrestore(&hsotg->lock, flags);
4901 }
4902
4903
4904
4905
4906
4907
4908
4909
4910 static irqreturn_t _dwc2_hcd_irq(struct usb_hcd *hcd)
4911 {
4912 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4913
4914 return dwc2_handle_hcd_intr(hsotg);
4915 }
4916
4917
4918
4919
4920
4921
4922
4923 static int _dwc2_hcd_hub_status_data(struct usb_hcd *hcd, char *buf)
4924 {
4925 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4926
4927 buf[0] = dwc2_hcd_is_status_changed(hsotg, 1) << 1;
4928 return buf[0] != 0;
4929 }
4930
4931
4932 static int _dwc2_hcd_hub_control(struct usb_hcd *hcd, u16 typereq, u16 wvalue,
4933 u16 windex, char *buf, u16 wlength)
4934 {
4935 int retval = dwc2_hcd_hub_control(dwc2_hcd_to_hsotg(hcd), typereq,
4936 wvalue, windex, buf, wlength);
4937 return retval;
4938 }
4939
4940
4941 static void _dwc2_hcd_clear_tt_buffer_complete(struct usb_hcd *hcd,
4942 struct usb_host_endpoint *ep)
4943 {
4944 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4945 struct dwc2_qh *qh;
4946 unsigned long flags;
4947
4948 qh = ep->hcpriv;
4949 if (!qh)
4950 return;
4951
4952 spin_lock_irqsave(&hsotg->lock, flags);
4953 qh->tt_buffer_dirty = 0;
4954
4955 if (hsotg->flags.b.port_connect_status)
4956 dwc2_hcd_queue_transactions(hsotg, DWC2_TRANSACTION_ALL);
4957
4958 spin_unlock_irqrestore(&hsotg->lock, flags);
4959 }
4960
4961
4962
4963
4964
4965 static void dwc2_change_bus_speed(struct usb_hcd *hcd, int speed)
4966 {
4967 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4968
4969 if (hsotg->params.speed == speed)
4970 return;
4971
4972 hsotg->params.speed = speed;
4973 queue_work(hsotg->wq_otg, &hsotg->wf_otg);
4974 }
4975
4976 static void dwc2_free_dev(struct usb_hcd *hcd, struct usb_device *udev)
4977 {
4978 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4979
4980 if (!hsotg->params.change_speed_quirk)
4981 return;
4982
4983
4984
4985
4986 if (udev->parent && udev->parent->speed > USB_SPEED_UNKNOWN &&
4987 udev->parent->speed < USB_SPEED_HIGH) {
4988 dev_info(hsotg->dev, "Set speed to default high-speed\n");
4989 dwc2_change_bus_speed(hcd, HPRT0_SPD_HIGH_SPEED);
4990 }
4991 }
4992
4993 static int dwc2_reset_device(struct usb_hcd *hcd, struct usb_device *udev)
4994 {
4995 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4996
4997 if (!hsotg->params.change_speed_quirk)
4998 return 0;
4999
5000 if (udev->speed == USB_SPEED_HIGH) {
5001 dev_info(hsotg->dev, "Set speed to high-speed\n");
5002 dwc2_change_bus_speed(hcd, HPRT0_SPD_HIGH_SPEED);
5003 } else if ((udev->speed == USB_SPEED_FULL ||
5004 udev->speed == USB_SPEED_LOW)) {
5005
5006
5007
5008
5009 dev_info(hsotg->dev, "Set speed to full-speed\n");
5010 dwc2_change_bus_speed(hcd, HPRT0_SPD_FULL_SPEED);
5011 }
5012
5013 return 0;
5014 }
5015
5016 static struct hc_driver dwc2_hc_driver = {
5017 .description = "dwc2_hsotg",
5018 .product_desc = "DWC OTG Controller",
5019 .hcd_priv_size = sizeof(struct wrapper_priv_data),
5020
5021 .irq = _dwc2_hcd_irq,
5022 .flags = HCD_MEMORY | HCD_USB2 | HCD_BH,
5023
5024 .start = _dwc2_hcd_start,
5025 .stop = _dwc2_hcd_stop,
5026 .urb_enqueue = _dwc2_hcd_urb_enqueue,
5027 .urb_dequeue = _dwc2_hcd_urb_dequeue,
5028 .endpoint_disable = _dwc2_hcd_endpoint_disable,
5029 .endpoint_reset = _dwc2_hcd_endpoint_reset,
5030 .get_frame_number = _dwc2_hcd_get_frame_number,
5031
5032 .hub_status_data = _dwc2_hcd_hub_status_data,
5033 .hub_control = _dwc2_hcd_hub_control,
5034 .clear_tt_buffer_complete = _dwc2_hcd_clear_tt_buffer_complete,
5035
5036 .bus_suspend = _dwc2_hcd_suspend,
5037 .bus_resume = _dwc2_hcd_resume,
5038
5039 .map_urb_for_dma = dwc2_map_urb_for_dma,
5040 .unmap_urb_for_dma = dwc2_unmap_urb_for_dma,
5041 };
5042
5043
5044
5045
5046
5047 static void dwc2_hcd_free(struct dwc2_hsotg *hsotg)
5048 {
5049 u32 ahbcfg;
5050 u32 dctl;
5051 int i;
5052
5053 dev_dbg(hsotg->dev, "DWC OTG HCD FREE\n");
5054
5055
5056 dwc2_qh_list_free(hsotg, &hsotg->non_periodic_sched_inactive);
5057 dwc2_qh_list_free(hsotg, &hsotg->non_periodic_sched_waiting);
5058 dwc2_qh_list_free(hsotg, &hsotg->non_periodic_sched_active);
5059 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_inactive);
5060 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_ready);
5061 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_assigned);
5062 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_queued);
5063
5064
5065 for (i = 0; i < MAX_EPS_CHANNELS; i++) {
5066 struct dwc2_host_chan *chan = hsotg->hc_ptr_array[i];
5067
5068 if (chan) {
5069 dev_dbg(hsotg->dev, "HCD Free channel #%i, chan=%p\n",
5070 i, chan);
5071 hsotg->hc_ptr_array[i] = NULL;
5072 kfree(chan);
5073 }
5074 }
5075
5076 if (hsotg->params.host_dma) {
5077 if (hsotg->status_buf) {
5078 dma_free_coherent(hsotg->dev, DWC2_HCD_STATUS_BUF_SIZE,
5079 hsotg->status_buf,
5080 hsotg->status_buf_dma);
5081 hsotg->status_buf = NULL;
5082 }
5083 } else {
5084 kfree(hsotg->status_buf);
5085 hsotg->status_buf = NULL;
5086 }
5087
5088 ahbcfg = dwc2_readl(hsotg, GAHBCFG);
5089
5090
5091 ahbcfg &= ~GAHBCFG_GLBL_INTR_EN;
5092 dwc2_writel(hsotg, ahbcfg, GAHBCFG);
5093 dwc2_writel(hsotg, 0, GINTMSK);
5094
5095 if (hsotg->hw_params.snpsid >= DWC2_CORE_REV_3_00a) {
5096 dctl = dwc2_readl(hsotg, DCTL);
5097 dctl |= DCTL_SFTDISCON;
5098 dwc2_writel(hsotg, dctl, DCTL);
5099 }
5100
5101 if (hsotg->wq_otg) {
5102 if (!cancel_work_sync(&hsotg->wf_otg))
5103 flush_workqueue(hsotg->wq_otg);
5104 destroy_workqueue(hsotg->wq_otg);
5105 }
5106
5107 cancel_work_sync(&hsotg->phy_reset_work);
5108
5109 del_timer(&hsotg->wkp_timer);
5110 }
5111
5112 static void dwc2_hcd_release(struct dwc2_hsotg *hsotg)
5113 {
5114
5115 dwc2_disable_host_interrupts(hsotg);
5116
5117 dwc2_hcd_free(hsotg);
5118 }
5119
5120
5121
5122
5123
5124
5125
5126 int dwc2_hcd_init(struct dwc2_hsotg *hsotg)
5127 {
5128 struct platform_device *pdev = to_platform_device(hsotg->dev);
5129 struct resource *res;
5130 struct usb_hcd *hcd;
5131 struct dwc2_host_chan *channel;
5132 u32 hcfg;
5133 int i, num_channels;
5134 int retval;
5135
5136 if (usb_disabled())
5137 return -ENODEV;
5138
5139 dev_dbg(hsotg->dev, "DWC OTG HCD INIT\n");
5140
5141 retval = -ENOMEM;
5142
5143 hcfg = dwc2_readl(hsotg, HCFG);
5144 dev_dbg(hsotg->dev, "hcfg=%08x\n", hcfg);
5145
5146 #ifdef CONFIG_USB_DWC2_TRACK_MISSED_SOFS
5147 hsotg->frame_num_array = kcalloc(FRAME_NUM_ARRAY_SIZE,
5148 sizeof(*hsotg->frame_num_array),
5149 GFP_KERNEL);
5150 if (!hsotg->frame_num_array)
5151 goto error1;
5152 hsotg->last_frame_num_array =
5153 kcalloc(FRAME_NUM_ARRAY_SIZE,
5154 sizeof(*hsotg->last_frame_num_array), GFP_KERNEL);
5155 if (!hsotg->last_frame_num_array)
5156 goto error1;
5157 #endif
5158 hsotg->last_frame_num = HFNUM_MAX_FRNUM;
5159
5160
5161 if (hsotg->params.host_dma &&
5162 !hsotg->dev->dma_mask) {
5163 dev_warn(hsotg->dev,
5164 "dma_mask not set, disabling DMA\n");
5165 hsotg->params.host_dma = false;
5166 hsotg->params.dma_desc_enable = false;
5167 }
5168
5169
5170 if (hsotg->params.host_dma) {
5171 if (dma_set_mask(hsotg->dev, DMA_BIT_MASK(32)) < 0)
5172 dev_warn(hsotg->dev, "can't set DMA mask\n");
5173 if (dma_set_coherent_mask(hsotg->dev, DMA_BIT_MASK(32)) < 0)
5174 dev_warn(hsotg->dev, "can't set coherent DMA mask\n");
5175 }
5176
5177 if (hsotg->params.change_speed_quirk) {
5178 dwc2_hc_driver.free_dev = dwc2_free_dev;
5179 dwc2_hc_driver.reset_device = dwc2_reset_device;
5180 }
5181
5182 if (hsotg->params.host_dma)
5183 dwc2_hc_driver.flags |= HCD_DMA;
5184
5185 hcd = usb_create_hcd(&dwc2_hc_driver, hsotg->dev, dev_name(hsotg->dev));
5186 if (!hcd)
5187 goto error1;
5188
5189 hcd->has_tt = 1;
5190
5191 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
5192 if (!res) {
5193 retval = -EINVAL;
5194 goto error2;
5195 }
5196 hcd->rsrc_start = res->start;
5197 hcd->rsrc_len = resource_size(res);
5198
5199 ((struct wrapper_priv_data *)&hcd->hcd_priv)->hsotg = hsotg;
5200 hsotg->priv = hcd;
5201
5202
5203
5204
5205
5206 dwc2_disable_global_interrupts(hsotg);
5207
5208
5209 retval = dwc2_core_init(hsotg, true);
5210 if (retval)
5211 goto error2;
5212
5213
5214 retval = -ENOMEM;
5215 hsotg->wq_otg = alloc_ordered_workqueue("dwc2", 0);
5216 if (!hsotg->wq_otg) {
5217 dev_err(hsotg->dev, "Failed to create workqueue\n");
5218 goto error2;
5219 }
5220 INIT_WORK(&hsotg->wf_otg, dwc2_conn_id_status_change);
5221
5222 timer_setup(&hsotg->wkp_timer, dwc2_wakeup_detected, 0);
5223
5224
5225 INIT_LIST_HEAD(&hsotg->non_periodic_sched_inactive);
5226 INIT_LIST_HEAD(&hsotg->non_periodic_sched_waiting);
5227 INIT_LIST_HEAD(&hsotg->non_periodic_sched_active);
5228
5229
5230 INIT_LIST_HEAD(&hsotg->periodic_sched_inactive);
5231 INIT_LIST_HEAD(&hsotg->periodic_sched_ready);
5232 INIT_LIST_HEAD(&hsotg->periodic_sched_assigned);
5233 INIT_LIST_HEAD(&hsotg->periodic_sched_queued);
5234
5235 INIT_LIST_HEAD(&hsotg->split_order);
5236
5237
5238
5239
5240
5241 INIT_LIST_HEAD(&hsotg->free_hc_list);
5242 num_channels = hsotg->params.host_channels;
5243 memset(&hsotg->hc_ptr_array[0], 0, sizeof(hsotg->hc_ptr_array));
5244
5245 for (i = 0; i < num_channels; i++) {
5246 channel = kzalloc(sizeof(*channel), GFP_KERNEL);
5247 if (!channel)
5248 goto error3;
5249 channel->hc_num = i;
5250 INIT_LIST_HEAD(&channel->split_order_list_entry);
5251 hsotg->hc_ptr_array[i] = channel;
5252 }
5253
5254
5255 INIT_DELAYED_WORK(&hsotg->start_work, dwc2_hcd_start_func);
5256 INIT_DELAYED_WORK(&hsotg->reset_work, dwc2_hcd_reset_func);
5257 INIT_WORK(&hsotg->phy_reset_work, dwc2_hcd_phy_reset_func);
5258
5259
5260
5261
5262
5263
5264
5265 if (hsotg->params.host_dma)
5266 hsotg->status_buf = dma_alloc_coherent(hsotg->dev,
5267 DWC2_HCD_STATUS_BUF_SIZE,
5268 &hsotg->status_buf_dma, GFP_KERNEL);
5269 else
5270 hsotg->status_buf = kzalloc(DWC2_HCD_STATUS_BUF_SIZE,
5271 GFP_KERNEL);
5272
5273 if (!hsotg->status_buf)
5274 goto error3;
5275
5276
5277
5278
5279
5280
5281 if (hsotg->params.dma_desc_enable ||
5282 hsotg->params.dma_desc_fs_enable) {
5283 hsotg->desc_gen_cache = kmem_cache_create("dwc2-gen-desc",
5284 sizeof(struct dwc2_dma_desc) *
5285 MAX_DMA_DESC_NUM_GENERIC, 512, SLAB_CACHE_DMA,
5286 NULL);
5287 if (!hsotg->desc_gen_cache) {
5288 dev_err(hsotg->dev,
5289 "unable to create dwc2 generic desc cache\n");
5290
5291
5292
5293
5294
5295 hsotg->params.dma_desc_enable = false;
5296 hsotg->params.dma_desc_fs_enable = false;
5297 }
5298
5299 hsotg->desc_hsisoc_cache = kmem_cache_create("dwc2-hsisoc-desc",
5300 sizeof(struct dwc2_dma_desc) *
5301 MAX_DMA_DESC_NUM_HS_ISOC, 512, 0, NULL);
5302 if (!hsotg->desc_hsisoc_cache) {
5303 dev_err(hsotg->dev,
5304 "unable to create dwc2 hs isoc desc cache\n");
5305
5306 kmem_cache_destroy(hsotg->desc_gen_cache);
5307
5308
5309
5310
5311
5312 hsotg->params.dma_desc_enable = false;
5313 hsotg->params.dma_desc_fs_enable = false;
5314 }
5315 }
5316
5317 if (hsotg->params.host_dma) {
5318
5319
5320
5321
5322 hsotg->unaligned_cache = kmem_cache_create("dwc2-unaligned-dma",
5323 DWC2_KMEM_UNALIGNED_BUF_SIZE, 4,
5324 SLAB_CACHE_DMA, NULL);
5325 if (!hsotg->unaligned_cache)
5326 dev_err(hsotg->dev,
5327 "unable to create dwc2 unaligned cache\n");
5328 }
5329
5330 hsotg->otg_port = 1;
5331 hsotg->frame_list = NULL;
5332 hsotg->frame_list_dma = 0;
5333 hsotg->periodic_qh_count = 0;
5334
5335
5336 hsotg->lx_state = DWC2_L3;
5337
5338 hcd->self.otg_port = hsotg->otg_port;
5339
5340
5341 hcd->self.sg_tablesize = 0;
5342
5343 hcd->tpl_support = of_usb_host_tpl_support(hsotg->dev->of_node);
5344
5345 if (!IS_ERR_OR_NULL(hsotg->uphy))
5346 otg_set_host(hsotg->uphy->otg, &hcd->self);
5347
5348
5349
5350
5351
5352
5353 retval = usb_add_hcd(hcd, hsotg->irq, IRQF_SHARED);
5354 if (retval < 0)
5355 goto error4;
5356
5357 device_wakeup_enable(hcd->self.controller);
5358
5359 dwc2_hcd_dump_state(hsotg);
5360
5361 dwc2_enable_global_interrupts(hsotg);
5362
5363 return 0;
5364
5365 error4:
5366 kmem_cache_destroy(hsotg->unaligned_cache);
5367 kmem_cache_destroy(hsotg->desc_hsisoc_cache);
5368 kmem_cache_destroy(hsotg->desc_gen_cache);
5369 error3:
5370 dwc2_hcd_release(hsotg);
5371 error2:
5372 usb_put_hcd(hcd);
5373 error1:
5374
5375 #ifdef CONFIG_USB_DWC2_TRACK_MISSED_SOFS
5376 kfree(hsotg->last_frame_num_array);
5377 kfree(hsotg->frame_num_array);
5378 #endif
5379
5380 dev_err(hsotg->dev, "%s() FAILED, returning %d\n", __func__, retval);
5381 return retval;
5382 }
5383
5384
5385
5386
5387
5388 void dwc2_hcd_remove(struct dwc2_hsotg *hsotg)
5389 {
5390 struct usb_hcd *hcd;
5391
5392 dev_dbg(hsotg->dev, "DWC OTG HCD REMOVE\n");
5393
5394 hcd = dwc2_hsotg_to_hcd(hsotg);
5395 dev_dbg(hsotg->dev, "hsotg->hcd = %p\n", hcd);
5396
5397 if (!hcd) {
5398 dev_dbg(hsotg->dev, "%s: dwc2_hsotg_to_hcd(hsotg) NULL!\n",
5399 __func__);
5400 return;
5401 }
5402
5403 if (!IS_ERR_OR_NULL(hsotg->uphy))
5404 otg_set_host(hsotg->uphy->otg, NULL);
5405
5406 usb_remove_hcd(hcd);
5407 hsotg->priv = NULL;
5408
5409 kmem_cache_destroy(hsotg->unaligned_cache);
5410 kmem_cache_destroy(hsotg->desc_hsisoc_cache);
5411 kmem_cache_destroy(hsotg->desc_gen_cache);
5412
5413 dwc2_hcd_release(hsotg);
5414 usb_put_hcd(hcd);
5415
5416 #ifdef CONFIG_USB_DWC2_TRACK_MISSED_SOFS
5417 kfree(hsotg->last_frame_num_array);
5418 kfree(hsotg->frame_num_array);
5419 #endif
5420 }
5421
5422
5423
5424
5425
5426
5427
5428
5429 int dwc2_backup_host_registers(struct dwc2_hsotg *hsotg)
5430 {
5431 struct dwc2_hregs_backup *hr;
5432 int i;
5433
5434 dev_dbg(hsotg->dev, "%s\n", __func__);
5435
5436
5437 hr = &hsotg->hr_backup;
5438 hr->hcfg = dwc2_readl(hsotg, HCFG);
5439 hr->haintmsk = dwc2_readl(hsotg, HAINTMSK);
5440 for (i = 0; i < hsotg->params.host_channels; ++i)
5441 hr->hcintmsk[i] = dwc2_readl(hsotg, HCINTMSK(i));
5442
5443 hr->hprt0 = dwc2_read_hprt0(hsotg);
5444 hr->hfir = dwc2_readl(hsotg, HFIR);
5445 hr->hptxfsiz = dwc2_readl(hsotg, HPTXFSIZ);
5446 hr->valid = true;
5447
5448 return 0;
5449 }
5450
5451
5452
5453
5454
5455
5456
5457
5458 int dwc2_restore_host_registers(struct dwc2_hsotg *hsotg)
5459 {
5460 struct dwc2_hregs_backup *hr;
5461 int i;
5462
5463 dev_dbg(hsotg->dev, "%s\n", __func__);
5464
5465
5466 hr = &hsotg->hr_backup;
5467 if (!hr->valid) {
5468 dev_err(hsotg->dev, "%s: no host registers to restore\n",
5469 __func__);
5470 return -EINVAL;
5471 }
5472 hr->valid = false;
5473
5474 dwc2_writel(hsotg, hr->hcfg, HCFG);
5475 dwc2_writel(hsotg, hr->haintmsk, HAINTMSK);
5476
5477 for (i = 0; i < hsotg->params.host_channels; ++i)
5478 dwc2_writel(hsotg, hr->hcintmsk[i], HCINTMSK(i));
5479
5480 dwc2_writel(hsotg, hr->hprt0, HPRT0);
5481 dwc2_writel(hsotg, hr->hfir, HFIR);
5482 dwc2_writel(hsotg, hr->hptxfsiz, HPTXFSIZ);
5483 hsotg->frame_number = 0;
5484
5485 return 0;
5486 }
5487
5488
5489
5490
5491
5492
5493 int dwc2_host_enter_hibernation(struct dwc2_hsotg *hsotg)
5494 {
5495 unsigned long flags;
5496 int ret = 0;
5497 u32 hprt0;
5498 u32 pcgcctl;
5499 u32 gusbcfg;
5500 u32 gpwrdn;
5501
5502 dev_dbg(hsotg->dev, "Preparing host for hibernation\n");
5503 ret = dwc2_backup_global_registers(hsotg);
5504 if (ret) {
5505 dev_err(hsotg->dev, "%s: failed to backup global registers\n",
5506 __func__);
5507 return ret;
5508 }
5509 ret = dwc2_backup_host_registers(hsotg);
5510 if (ret) {
5511 dev_err(hsotg->dev, "%s: failed to backup host registers\n",
5512 __func__);
5513 return ret;
5514 }
5515
5516
5517 hprt0 = dwc2_readl(hsotg, HPRT0);
5518 hprt0 |= HPRT0_SUSP;
5519 hprt0 &= ~HPRT0_ENA;
5520 dwc2_writel(hsotg, hprt0, HPRT0);
5521
5522
5523 if (dwc2_hsotg_wait_bit_set(hsotg, HPRT0, HPRT0_SUSP, 5000))
5524 dev_warn(hsotg->dev, "Suspend wasn't generated\n");
5525
5526
5527
5528
5529
5530 spin_lock_irqsave(&hsotg->lock, flags);
5531 hsotg->lx_state = DWC2_L2;
5532
5533 gusbcfg = dwc2_readl(hsotg, GUSBCFG);
5534 if (gusbcfg & GUSBCFG_ULPI_UTMI_SEL) {
5535
5536
5537 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5538 pcgcctl |= PCGCTL_STOPPCLK;
5539 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5540 udelay(10);
5541
5542 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5543 gpwrdn |= GPWRDN_PMUACTV;
5544 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5545 udelay(10);
5546 } else {
5547
5548 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5549 gpwrdn |= GPWRDN_PMUACTV;
5550 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5551 udelay(10);
5552
5553 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5554 pcgcctl |= PCGCTL_STOPPCLK;
5555 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5556 udelay(10);
5557 }
5558
5559
5560 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5561 gpwrdn |= GPWRDN_PMUINTSEL;
5562 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5563 udelay(10);
5564
5565
5566 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5567 gpwrdn |= GPWRDN_DISCONN_DET_MSK;
5568 gpwrdn |= GPWRDN_LNSTSCHG_MSK;
5569 gpwrdn |= GPWRDN_STS_CHGINT_MSK;
5570 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5571 udelay(10);
5572
5573
5574 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5575 gpwrdn |= GPWRDN_PWRDNCLMP;
5576 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5577 udelay(10);
5578
5579
5580 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5581 gpwrdn |= GPWRDN_PWRDNSWTCH;
5582 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5583
5584 hsotg->hibernated = 1;
5585 hsotg->bus_suspended = 1;
5586 dev_dbg(hsotg->dev, "Host hibernation completed\n");
5587 spin_unlock_irqrestore(&hsotg->lock, flags);
5588 return ret;
5589 }
5590
5591
5592
5593
5594
5595
5596
5597
5598
5599
5600
5601
5602
5603 int dwc2_host_exit_hibernation(struct dwc2_hsotg *hsotg, int rem_wakeup,
5604 int reset)
5605 {
5606 u32 gpwrdn;
5607 u32 hprt0;
5608 int ret = 0;
5609 struct dwc2_gregs_backup *gr;
5610 struct dwc2_hregs_backup *hr;
5611
5612 gr = &hsotg->gr_backup;
5613 hr = &hsotg->hr_backup;
5614
5615 dev_dbg(hsotg->dev,
5616 "%s: called with rem_wakeup = %d reset = %d\n",
5617 __func__, rem_wakeup, reset);
5618
5619 dwc2_hib_restore_common(hsotg, rem_wakeup, 1);
5620 hsotg->hibernated = 0;
5621
5622
5623
5624
5625
5626
5627 mdelay(100);
5628
5629
5630 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5631
5632
5633 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5634 gpwrdn &= ~GPWRDN_RESTORE;
5635 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5636 udelay(10);
5637
5638
5639 dwc2_writel(hsotg, gr->gusbcfg, GUSBCFG);
5640 dwc2_writel(hsotg, hr->hcfg, HCFG);
5641
5642
5643 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5644 gpwrdn &= ~GPWRDN_PMUACTV;
5645 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5646 udelay(10);
5647
5648 hprt0 = hr->hprt0;
5649 hprt0 |= HPRT0_PWR;
5650 hprt0 &= ~HPRT0_ENA;
5651 hprt0 &= ~HPRT0_SUSP;
5652 dwc2_writel(hsotg, hprt0, HPRT0);
5653
5654 hprt0 = hr->hprt0;
5655 hprt0 |= HPRT0_PWR;
5656 hprt0 &= ~HPRT0_ENA;
5657 hprt0 &= ~HPRT0_SUSP;
5658
5659 if (reset) {
5660 hprt0 |= HPRT0_RST;
5661 dwc2_writel(hsotg, hprt0, HPRT0);
5662
5663
5664 mdelay(60);
5665 hprt0 &= ~HPRT0_RST;
5666 dwc2_writel(hsotg, hprt0, HPRT0);
5667 } else {
5668 hprt0 |= HPRT0_RES;
5669 dwc2_writel(hsotg, hprt0, HPRT0);
5670
5671
5672 mdelay(100);
5673 hprt0 &= ~HPRT0_RES;
5674 dwc2_writel(hsotg, hprt0, HPRT0);
5675 }
5676
5677 hprt0 = dwc2_readl(hsotg, HPRT0);
5678 hprt0 |= HPRT0_CONNDET;
5679 hprt0 |= HPRT0_ENACHG;
5680 hprt0 &= ~HPRT0_ENA;
5681 dwc2_writel(hsotg, hprt0, HPRT0);
5682
5683 hprt0 = dwc2_readl(hsotg, HPRT0);
5684
5685
5686 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5687
5688
5689 ret = dwc2_restore_global_registers(hsotg);
5690 if (ret) {
5691 dev_err(hsotg->dev, "%s: failed to restore registers\n",
5692 __func__);
5693 return ret;
5694 }
5695
5696
5697 ret = dwc2_restore_host_registers(hsotg);
5698 if (ret) {
5699 dev_err(hsotg->dev, "%s: failed to restore host registers\n",
5700 __func__);
5701 return ret;
5702 }
5703
5704 if (rem_wakeup) {
5705 dwc2_hcd_rem_wakeup(hsotg);
5706
5707
5708
5709
5710
5711 hsotg->flags.b.port_connect_status_change = 1;
5712 }
5713
5714 hsotg->hibernated = 0;
5715 hsotg->bus_suspended = 0;
5716 hsotg->lx_state = DWC2_L0;
5717 dev_dbg(hsotg->dev, "Host hibernation restore complete\n");
5718 return ret;
5719 }
5720
5721 bool dwc2_host_can_poweroff_phy(struct dwc2_hsotg *dwc2)
5722 {
5723 struct usb_device *root_hub = dwc2_hsotg_to_hcd(dwc2)->self.root_hub;
5724
5725
5726 if (!device_may_wakeup(dwc2->dev))
5727 return true;
5728
5729
5730
5731
5732
5733 if (usb_wakeup_enabled_descendants(root_hub))
5734 return false;
5735
5736
5737 return true;
5738 }
5739
5740
5741
5742
5743
5744
5745
5746
5747
5748
5749
5750 int dwc2_host_enter_partial_power_down(struct dwc2_hsotg *hsotg)
5751 {
5752 u32 pcgcctl;
5753 u32 hprt0;
5754 int ret = 0;
5755
5756 dev_dbg(hsotg->dev, "Entering host partial power down started.\n");
5757
5758
5759 hprt0 = dwc2_read_hprt0(hsotg);
5760 hprt0 |= HPRT0_SUSP;
5761 dwc2_writel(hsotg, hprt0, HPRT0);
5762 udelay(5);
5763
5764
5765 if (dwc2_hsotg_wait_bit_set(hsotg, HPRT0, HPRT0_SUSP, 3000))
5766 dev_warn(hsotg->dev, "Suspend wasn't generated\n");
5767
5768
5769 ret = dwc2_backup_global_registers(hsotg);
5770 if (ret) {
5771 dev_err(hsotg->dev, "%s: failed to backup global registers\n",
5772 __func__);
5773 return ret;
5774 }
5775
5776 ret = dwc2_backup_host_registers(hsotg);
5777 if (ret) {
5778 dev_err(hsotg->dev, "%s: failed to backup host registers\n",
5779 __func__);
5780 return ret;
5781 }
5782
5783
5784
5785
5786
5787 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5788
5789
5790 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5791
5792 pcgcctl |= PCGCTL_PWRCLMP;
5793 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5794 udelay(5);
5795
5796 pcgcctl |= PCGCTL_RSTPDWNMODULE;
5797 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5798 udelay(5);
5799
5800 pcgcctl |= PCGCTL_STOPPCLK;
5801 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5802
5803
5804 hsotg->in_ppd = 1;
5805 hsotg->lx_state = DWC2_L2;
5806 hsotg->bus_suspended = true;
5807
5808 dev_dbg(hsotg->dev, "Entering host partial power down completed.\n");
5809
5810 return ret;
5811 }
5812
5813
5814
5815
5816
5817
5818
5819
5820
5821
5822
5823
5824
5825 int dwc2_host_exit_partial_power_down(struct dwc2_hsotg *hsotg,
5826 int rem_wakeup, bool restore)
5827 {
5828 u32 pcgcctl;
5829 int ret = 0;
5830 u32 hprt0;
5831
5832 dev_dbg(hsotg->dev, "Exiting host partial power down started.\n");
5833
5834 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5835 pcgcctl &= ~PCGCTL_STOPPCLK;
5836 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5837 udelay(5);
5838
5839 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5840 pcgcctl &= ~PCGCTL_PWRCLMP;
5841 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5842 udelay(5);
5843
5844 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5845 pcgcctl &= ~PCGCTL_RSTPDWNMODULE;
5846 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5847
5848 udelay(100);
5849 if (restore) {
5850 ret = dwc2_restore_global_registers(hsotg);
5851 if (ret) {
5852 dev_err(hsotg->dev, "%s: failed to restore registers\n",
5853 __func__);
5854 return ret;
5855 }
5856
5857 ret = dwc2_restore_host_registers(hsotg);
5858 if (ret) {
5859 dev_err(hsotg->dev, "%s: failed to restore host registers\n",
5860 __func__);
5861 return ret;
5862 }
5863 }
5864
5865
5866 hprt0 = dwc2_read_hprt0(hsotg);
5867 hprt0 |= HPRT0_RES;
5868 hprt0 &= ~HPRT0_SUSP;
5869 dwc2_writel(hsotg, hprt0, HPRT0);
5870 udelay(5);
5871
5872 if (!rem_wakeup) {
5873
5874 hprt0 = dwc2_read_hprt0(hsotg);
5875 hprt0 &= ~HPRT0_RES;
5876 dwc2_writel(hsotg, hprt0, HPRT0);
5877
5878 hsotg->bus_suspended = false;
5879 } else {
5880
5881 hprt0 = dwc2_read_hprt0(hsotg);
5882 hprt0 |= HPRT0_PWR;
5883 dwc2_writel(hsotg, hprt0, HPRT0);
5884
5885
5886 dwc2_hcd_connect(hsotg);
5887
5888 mod_timer(&hsotg->wkp_timer,
5889 jiffies + msecs_to_jiffies(71));
5890 }
5891
5892
5893 hsotg->in_ppd = 0;
5894 hsotg->lx_state = DWC2_L0;
5895
5896 dev_dbg(hsotg->dev, "Exiting host partial power down completed.\n");
5897 return ret;
5898 }
5899
5900
5901
5902
5903
5904
5905
5906
5907 void dwc2_host_enter_clock_gating(struct dwc2_hsotg *hsotg)
5908 {
5909 u32 hprt0;
5910 u32 pcgctl;
5911
5912 dev_dbg(hsotg->dev, "Entering host clock gating.\n");
5913
5914
5915 hprt0 = dwc2_read_hprt0(hsotg);
5916 hprt0 |= HPRT0_SUSP;
5917 dwc2_writel(hsotg, hprt0, HPRT0);
5918
5919
5920 pcgctl = dwc2_readl(hsotg, PCGCTL);
5921 pcgctl |= PCGCTL_STOPPCLK;
5922 dwc2_writel(hsotg, pcgctl, PCGCTL);
5923 udelay(5);
5924
5925
5926 pcgctl = dwc2_readl(hsotg, PCGCTL);
5927 pcgctl |= PCGCTL_GATEHCLK;
5928 dwc2_writel(hsotg, pcgctl, PCGCTL);
5929 udelay(5);
5930
5931 hsotg->bus_suspended = true;
5932 hsotg->lx_state = DWC2_L2;
5933 }
5934
5935
5936
5937
5938
5939
5940
5941
5942
5943 void dwc2_host_exit_clock_gating(struct dwc2_hsotg *hsotg, int rem_wakeup)
5944 {
5945 u32 hprt0;
5946 u32 pcgctl;
5947
5948 dev_dbg(hsotg->dev, "Exiting host clock gating.\n");
5949
5950
5951 pcgctl = dwc2_readl(hsotg, PCGCTL);
5952 pcgctl &= ~PCGCTL_GATEHCLK;
5953 dwc2_writel(hsotg, pcgctl, PCGCTL);
5954 udelay(5);
5955
5956
5957 pcgctl = dwc2_readl(hsotg, PCGCTL);
5958 pcgctl &= ~PCGCTL_STOPPCLK;
5959 dwc2_writel(hsotg, pcgctl, PCGCTL);
5960 udelay(5);
5961
5962
5963 hprt0 = dwc2_read_hprt0(hsotg);
5964 hprt0 |= HPRT0_RES;
5965 hprt0 &= ~HPRT0_SUSP;
5966 dwc2_writel(hsotg, hprt0, HPRT0);
5967 udelay(5);
5968
5969 if (!rem_wakeup) {
5970
5971 msleep(USB_RESUME_TIMEOUT);
5972
5973
5974 hprt0 = dwc2_read_hprt0(hsotg);
5975 hprt0 &= ~HPRT0_RES;
5976 dwc2_writel(hsotg, hprt0, HPRT0);
5977
5978 hsotg->bus_suspended = false;
5979 hsotg->lx_state = DWC2_L0;
5980 } else {
5981 mod_timer(&hsotg->wkp_timer,
5982 jiffies + msecs_to_jiffies(71));
5983 }
5984 }