0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041 #include <linux/kernel.h>
0042 #include <linux/module.h>
0043 #include <linux/spinlock.h>
0044 #include <linux/interrupt.h>
0045 #include <linux/dma-mapping.h>
0046 #include <linux/io.h>
0047 #include <linux/slab.h>
0048 #include <linux/usb.h>
0049
0050 #include <linux/usb/hcd.h>
0051 #include <linux/usb/ch11.h>
0052
0053 #include "core.h"
0054 #include "hcd.h"
0055
0056 static u16 dwc2_frame_list_idx(u16 frame)
0057 {
0058 return frame & (FRLISTEN_64_SIZE - 1);
0059 }
0060
0061 static u16 dwc2_desclist_idx_inc(u16 idx, u16 inc, u8 speed)
0062 {
0063 return (idx + inc) &
0064 ((speed == USB_SPEED_HIGH ? MAX_DMA_DESC_NUM_HS_ISOC :
0065 MAX_DMA_DESC_NUM_GENERIC) - 1);
0066 }
0067
0068 static u16 dwc2_desclist_idx_dec(u16 idx, u16 inc, u8 speed)
0069 {
0070 return (idx - inc) &
0071 ((speed == USB_SPEED_HIGH ? MAX_DMA_DESC_NUM_HS_ISOC :
0072 MAX_DMA_DESC_NUM_GENERIC) - 1);
0073 }
0074
0075 static u16 dwc2_max_desc_num(struct dwc2_qh *qh)
0076 {
0077 return (qh->ep_type == USB_ENDPOINT_XFER_ISOC &&
0078 qh->dev_speed == USB_SPEED_HIGH) ?
0079 MAX_DMA_DESC_NUM_HS_ISOC : MAX_DMA_DESC_NUM_GENERIC;
0080 }
0081
0082 static u16 dwc2_frame_incr_val(struct dwc2_qh *qh)
0083 {
0084 return qh->dev_speed == USB_SPEED_HIGH ?
0085 (qh->host_interval + 8 - 1) / 8 : qh->host_interval;
0086 }
0087
0088 static int dwc2_desc_list_alloc(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh,
0089 gfp_t flags)
0090 {
0091 struct kmem_cache *desc_cache;
0092
0093 if (qh->ep_type == USB_ENDPOINT_XFER_ISOC &&
0094 qh->dev_speed == USB_SPEED_HIGH)
0095 desc_cache = hsotg->desc_hsisoc_cache;
0096 else
0097 desc_cache = hsotg->desc_gen_cache;
0098
0099 qh->desc_list_sz = sizeof(struct dwc2_dma_desc) *
0100 dwc2_max_desc_num(qh);
0101
0102 qh->desc_list = kmem_cache_zalloc(desc_cache, flags | GFP_DMA);
0103 if (!qh->desc_list)
0104 return -ENOMEM;
0105
0106 qh->desc_list_dma = dma_map_single(hsotg->dev, qh->desc_list,
0107 qh->desc_list_sz,
0108 DMA_TO_DEVICE);
0109
0110 qh->n_bytes = kcalloc(dwc2_max_desc_num(qh), sizeof(u32), flags);
0111 if (!qh->n_bytes) {
0112 dma_unmap_single(hsotg->dev, qh->desc_list_dma,
0113 qh->desc_list_sz,
0114 DMA_FROM_DEVICE);
0115 kmem_cache_free(desc_cache, qh->desc_list);
0116 qh->desc_list = NULL;
0117 return -ENOMEM;
0118 }
0119
0120 return 0;
0121 }
0122
0123 static void dwc2_desc_list_free(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
0124 {
0125 struct kmem_cache *desc_cache;
0126
0127 if (qh->ep_type == USB_ENDPOINT_XFER_ISOC &&
0128 qh->dev_speed == USB_SPEED_HIGH)
0129 desc_cache = hsotg->desc_hsisoc_cache;
0130 else
0131 desc_cache = hsotg->desc_gen_cache;
0132
0133 if (qh->desc_list) {
0134 dma_unmap_single(hsotg->dev, qh->desc_list_dma,
0135 qh->desc_list_sz, DMA_FROM_DEVICE);
0136 kmem_cache_free(desc_cache, qh->desc_list);
0137 qh->desc_list = NULL;
0138 }
0139
0140 kfree(qh->n_bytes);
0141 qh->n_bytes = NULL;
0142 }
0143
0144 static int dwc2_frame_list_alloc(struct dwc2_hsotg *hsotg, gfp_t mem_flags)
0145 {
0146 if (hsotg->frame_list)
0147 return 0;
0148
0149 hsotg->frame_list_sz = 4 * FRLISTEN_64_SIZE;
0150 hsotg->frame_list = kzalloc(hsotg->frame_list_sz, GFP_ATOMIC | GFP_DMA);
0151 if (!hsotg->frame_list)
0152 return -ENOMEM;
0153
0154 hsotg->frame_list_dma = dma_map_single(hsotg->dev, hsotg->frame_list,
0155 hsotg->frame_list_sz,
0156 DMA_TO_DEVICE);
0157
0158 return 0;
0159 }
0160
0161 static void dwc2_frame_list_free(struct dwc2_hsotg *hsotg)
0162 {
0163 unsigned long flags;
0164
0165 spin_lock_irqsave(&hsotg->lock, flags);
0166
0167 if (!hsotg->frame_list) {
0168 spin_unlock_irqrestore(&hsotg->lock, flags);
0169 return;
0170 }
0171
0172 dma_unmap_single(hsotg->dev, hsotg->frame_list_dma,
0173 hsotg->frame_list_sz, DMA_FROM_DEVICE);
0174
0175 kfree(hsotg->frame_list);
0176 hsotg->frame_list = NULL;
0177
0178 spin_unlock_irqrestore(&hsotg->lock, flags);
0179 }
0180
0181 static void dwc2_per_sched_enable(struct dwc2_hsotg *hsotg, u32 fr_list_en)
0182 {
0183 u32 hcfg;
0184 unsigned long flags;
0185
0186 spin_lock_irqsave(&hsotg->lock, flags);
0187
0188 hcfg = dwc2_readl(hsotg, HCFG);
0189 if (hcfg & HCFG_PERSCHEDENA) {
0190
0191 spin_unlock_irqrestore(&hsotg->lock, flags);
0192 return;
0193 }
0194
0195 dwc2_writel(hsotg, hsotg->frame_list_dma, HFLBADDR);
0196
0197 hcfg &= ~HCFG_FRLISTEN_MASK;
0198 hcfg |= fr_list_en | HCFG_PERSCHEDENA;
0199 dev_vdbg(hsotg->dev, "Enabling Periodic schedule\n");
0200 dwc2_writel(hsotg, hcfg, HCFG);
0201
0202 spin_unlock_irqrestore(&hsotg->lock, flags);
0203 }
0204
0205 static void dwc2_per_sched_disable(struct dwc2_hsotg *hsotg)
0206 {
0207 u32 hcfg;
0208 unsigned long flags;
0209
0210 spin_lock_irqsave(&hsotg->lock, flags);
0211
0212 hcfg = dwc2_readl(hsotg, HCFG);
0213 if (!(hcfg & HCFG_PERSCHEDENA)) {
0214
0215 spin_unlock_irqrestore(&hsotg->lock, flags);
0216 return;
0217 }
0218
0219 hcfg &= ~HCFG_PERSCHEDENA;
0220 dev_vdbg(hsotg->dev, "Disabling Periodic schedule\n");
0221 dwc2_writel(hsotg, hcfg, HCFG);
0222
0223 spin_unlock_irqrestore(&hsotg->lock, flags);
0224 }
0225
0226
0227
0228
0229
0230 static void dwc2_update_frame_list(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh,
0231 int enable)
0232 {
0233 struct dwc2_host_chan *chan;
0234 u16 i, j, inc;
0235
0236 if (!hsotg) {
0237 pr_err("hsotg = %p\n", hsotg);
0238 return;
0239 }
0240
0241 if (!qh->channel) {
0242 dev_err(hsotg->dev, "qh->channel = %p\n", qh->channel);
0243 return;
0244 }
0245
0246 if (!hsotg->frame_list) {
0247 dev_err(hsotg->dev, "hsotg->frame_list = %p\n",
0248 hsotg->frame_list);
0249 return;
0250 }
0251
0252 chan = qh->channel;
0253 inc = dwc2_frame_incr_val(qh);
0254 if (qh->ep_type == USB_ENDPOINT_XFER_ISOC)
0255 i = dwc2_frame_list_idx(qh->next_active_frame);
0256 else
0257 i = 0;
0258
0259 j = i;
0260 do {
0261 if (enable)
0262 hsotg->frame_list[j] |= 1 << chan->hc_num;
0263 else
0264 hsotg->frame_list[j] &= ~(1 << chan->hc_num);
0265 j = (j + inc) & (FRLISTEN_64_SIZE - 1);
0266 } while (j != i);
0267
0268
0269
0270
0271
0272 dma_sync_single_for_device(hsotg->dev,
0273 hsotg->frame_list_dma,
0274 hsotg->frame_list_sz,
0275 DMA_TO_DEVICE);
0276
0277 if (!enable)
0278 return;
0279
0280 chan->schinfo = 0;
0281 if (chan->speed == USB_SPEED_HIGH && qh->host_interval) {
0282 j = 1;
0283
0284 inc = (8 + qh->host_interval - 1) / qh->host_interval;
0285 for (i = 0; i < inc; i++) {
0286 chan->schinfo |= j;
0287 j = j << qh->host_interval;
0288 }
0289 } else {
0290 chan->schinfo = 0xff;
0291 }
0292 }
0293
0294 static void dwc2_release_channel_ddma(struct dwc2_hsotg *hsotg,
0295 struct dwc2_qh *qh)
0296 {
0297 struct dwc2_host_chan *chan = qh->channel;
0298
0299 if (dwc2_qh_is_non_per(qh)) {
0300 if (hsotg->params.uframe_sched)
0301 hsotg->available_host_channels++;
0302 else
0303 hsotg->non_periodic_channels--;
0304 } else {
0305 dwc2_update_frame_list(hsotg, qh, 0);
0306 hsotg->available_host_channels++;
0307 }
0308
0309
0310
0311
0312
0313 if (chan->qh) {
0314 if (!list_empty(&chan->hc_list_entry))
0315 list_del(&chan->hc_list_entry);
0316 dwc2_hc_cleanup(hsotg, chan);
0317 list_add_tail(&chan->hc_list_entry, &hsotg->free_hc_list);
0318 chan->qh = NULL;
0319 }
0320
0321 qh->channel = NULL;
0322 qh->ntd = 0;
0323
0324 if (qh->desc_list)
0325 memset(qh->desc_list, 0, sizeof(struct dwc2_dma_desc) *
0326 dwc2_max_desc_num(qh));
0327 }
0328
0329
0330
0331
0332
0333
0334
0335
0336
0337
0338
0339
0340
0341
0342 int dwc2_hcd_qh_init_ddma(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh,
0343 gfp_t mem_flags)
0344 {
0345 int retval;
0346
0347 if (qh->do_split) {
0348 dev_err(hsotg->dev,
0349 "SPLIT Transfers are not supported in Descriptor DMA mode.\n");
0350 retval = -EINVAL;
0351 goto err0;
0352 }
0353
0354 retval = dwc2_desc_list_alloc(hsotg, qh, mem_flags);
0355 if (retval)
0356 goto err0;
0357
0358 if (qh->ep_type == USB_ENDPOINT_XFER_ISOC ||
0359 qh->ep_type == USB_ENDPOINT_XFER_INT) {
0360 if (!hsotg->frame_list) {
0361 retval = dwc2_frame_list_alloc(hsotg, mem_flags);
0362 if (retval)
0363 goto err1;
0364
0365 dwc2_per_sched_enable(hsotg, HCFG_FRLISTEN_64);
0366 }
0367 }
0368
0369 qh->ntd = 0;
0370 return 0;
0371
0372 err1:
0373 dwc2_desc_list_free(hsotg, qh);
0374 err0:
0375 return retval;
0376 }
0377
0378
0379
0380
0381
0382
0383
0384
0385
0386
0387
0388 void dwc2_hcd_qh_free_ddma(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
0389 {
0390 unsigned long flags;
0391
0392 dwc2_desc_list_free(hsotg, qh);
0393
0394
0395
0396
0397
0398
0399
0400
0401 spin_lock_irqsave(&hsotg->lock, flags);
0402 if (qh->channel)
0403 dwc2_release_channel_ddma(hsotg, qh);
0404 spin_unlock_irqrestore(&hsotg->lock, flags);
0405
0406 if ((qh->ep_type == USB_ENDPOINT_XFER_ISOC ||
0407 qh->ep_type == USB_ENDPOINT_XFER_INT) &&
0408 (hsotg->params.uframe_sched ||
0409 !hsotg->periodic_channels) && hsotg->frame_list) {
0410 dwc2_per_sched_disable(hsotg);
0411 dwc2_frame_list_free(hsotg);
0412 }
0413 }
0414
0415 static u8 dwc2_frame_to_desc_idx(struct dwc2_qh *qh, u16 frame_idx)
0416 {
0417 if (qh->dev_speed == USB_SPEED_HIGH)
0418
0419 return (frame_idx & ((MAX_DMA_DESC_NUM_HS_ISOC / 8) - 1)) * 8;
0420 else
0421 return frame_idx & (MAX_DMA_DESC_NUM_GENERIC - 1);
0422 }
0423
0424
0425
0426
0427
0428 static u16 dwc2_calc_starting_frame(struct dwc2_hsotg *hsotg,
0429 struct dwc2_qh *qh, u16 *skip_frames)
0430 {
0431 u16 frame;
0432
0433 hsotg->frame_number = dwc2_hcd_get_frame_number(hsotg);
0434
0435
0436
0437
0438
0439
0440
0441
0442
0443
0444
0445
0446
0447
0448
0449
0450
0451
0452
0453
0454 if (qh->dev_speed == USB_SPEED_HIGH) {
0455
0456
0457
0458
0459
0460
0461
0462 if (dwc2_micro_frame_num(hsotg->frame_number) >= 5) {
0463 *skip_frames = 2 * 8;
0464 frame = dwc2_frame_num_inc(hsotg->frame_number,
0465 *skip_frames);
0466 } else {
0467 *skip_frames = 1 * 8;
0468 frame = dwc2_frame_num_inc(hsotg->frame_number,
0469 *skip_frames);
0470 }
0471
0472 frame = dwc2_full_frame_num(frame);
0473 } else {
0474
0475
0476
0477
0478
0479 *skip_frames = 1;
0480 frame = dwc2_frame_num_inc(hsotg->frame_number, 2);
0481 }
0482
0483 return frame;
0484 }
0485
0486
0487
0488
0489
0490 static u16 dwc2_recalc_initial_desc_idx(struct dwc2_hsotg *hsotg,
0491 struct dwc2_qh *qh)
0492 {
0493 u16 frame, fr_idx, fr_idx_tmp, skip_frames;
0494
0495
0496
0497
0498
0499
0500
0501
0502
0503
0504
0505
0506
0507
0508
0509
0510
0511
0512
0513 if (qh->channel) {
0514 frame = dwc2_calc_starting_frame(hsotg, qh, &skip_frames);
0515
0516
0517
0518
0519 fr_idx_tmp = dwc2_frame_list_idx(frame);
0520 fr_idx = (FRLISTEN_64_SIZE +
0521 dwc2_frame_list_idx(qh->next_active_frame) -
0522 fr_idx_tmp) % dwc2_frame_incr_val(qh);
0523 fr_idx = (fr_idx + fr_idx_tmp) % FRLISTEN_64_SIZE;
0524 } else {
0525 qh->next_active_frame = dwc2_calc_starting_frame(hsotg, qh,
0526 &skip_frames);
0527 fr_idx = dwc2_frame_list_idx(qh->next_active_frame);
0528 }
0529
0530 qh->td_first = qh->td_last = dwc2_frame_to_desc_idx(qh, fr_idx);
0531
0532 return skip_frames;
0533 }
0534
0535 #define ISOC_URB_GIVEBACK_ASAP
0536
0537 #define MAX_ISOC_XFER_SIZE_FS 1023
0538 #define MAX_ISOC_XFER_SIZE_HS 3072
0539 #define DESCNUM_THRESHOLD 4
0540
0541 static void dwc2_fill_host_isoc_dma_desc(struct dwc2_hsotg *hsotg,
0542 struct dwc2_qtd *qtd,
0543 struct dwc2_qh *qh, u32 max_xfer_size,
0544 u16 idx)
0545 {
0546 struct dwc2_dma_desc *dma_desc = &qh->desc_list[idx];
0547 struct dwc2_hcd_iso_packet_desc *frame_desc;
0548
0549 memset(dma_desc, 0, sizeof(*dma_desc));
0550 frame_desc = &qtd->urb->iso_descs[qtd->isoc_frame_index_last];
0551
0552 if (frame_desc->length > max_xfer_size)
0553 qh->n_bytes[idx] = max_xfer_size;
0554 else
0555 qh->n_bytes[idx] = frame_desc->length;
0556
0557 dma_desc->buf = (u32)(qtd->urb->dma + frame_desc->offset);
0558 dma_desc->status = qh->n_bytes[idx] << HOST_DMA_ISOC_NBYTES_SHIFT &
0559 HOST_DMA_ISOC_NBYTES_MASK;
0560
0561
0562 dma_desc->status |= HOST_DMA_A;
0563
0564 qh->ntd++;
0565 qtd->isoc_frame_index_last++;
0566
0567 #ifdef ISOC_URB_GIVEBACK_ASAP
0568
0569 if (qtd->isoc_frame_index_last == qtd->urb->packet_count)
0570 dma_desc->status |= HOST_DMA_IOC;
0571 #endif
0572
0573 dma_sync_single_for_device(hsotg->dev,
0574 qh->desc_list_dma +
0575 (idx * sizeof(struct dwc2_dma_desc)),
0576 sizeof(struct dwc2_dma_desc),
0577 DMA_TO_DEVICE);
0578 }
0579
0580 static void dwc2_init_isoc_dma_desc(struct dwc2_hsotg *hsotg,
0581 struct dwc2_qh *qh, u16 skip_frames)
0582 {
0583 struct dwc2_qtd *qtd;
0584 u32 max_xfer_size;
0585 u16 idx, inc, n_desc = 0, ntd_max = 0;
0586 u16 cur_idx;
0587 u16 next_idx;
0588
0589 idx = qh->td_last;
0590 inc = qh->host_interval;
0591 hsotg->frame_number = dwc2_hcd_get_frame_number(hsotg);
0592 cur_idx = dwc2_frame_list_idx(hsotg->frame_number);
0593 next_idx = dwc2_desclist_idx_inc(qh->td_last, inc, qh->dev_speed);
0594
0595
0596
0597
0598
0599
0600
0601
0602 if (dwc2_frame_idx_num_gt(cur_idx, next_idx) || (cur_idx == next_idx)) {
0603 if (inc < 32) {
0604 dev_vdbg(hsotg->dev,
0605 "current frame number overstep last descriptor\n");
0606 qh->td_last = dwc2_desclist_idx_inc(cur_idx, inc,
0607 qh->dev_speed);
0608 idx = qh->td_last;
0609 }
0610 }
0611
0612 if (qh->host_interval) {
0613 ntd_max = (dwc2_max_desc_num(qh) + qh->host_interval - 1) /
0614 qh->host_interval;
0615 if (skip_frames && !qh->channel)
0616 ntd_max -= skip_frames / qh->host_interval;
0617 }
0618
0619 max_xfer_size = qh->dev_speed == USB_SPEED_HIGH ?
0620 MAX_ISOC_XFER_SIZE_HS : MAX_ISOC_XFER_SIZE_FS;
0621
0622 list_for_each_entry(qtd, &qh->qtd_list, qtd_list_entry) {
0623 if (qtd->in_process &&
0624 qtd->isoc_frame_index_last ==
0625 qtd->urb->packet_count)
0626 continue;
0627
0628 qtd->isoc_td_first = idx;
0629 while (qh->ntd < ntd_max && qtd->isoc_frame_index_last <
0630 qtd->urb->packet_count) {
0631 dwc2_fill_host_isoc_dma_desc(hsotg, qtd, qh,
0632 max_xfer_size, idx);
0633 idx = dwc2_desclist_idx_inc(idx, inc, qh->dev_speed);
0634 n_desc++;
0635 }
0636 qtd->isoc_td_last = idx;
0637 qtd->in_process = 1;
0638 }
0639
0640 qh->td_last = idx;
0641
0642 #ifdef ISOC_URB_GIVEBACK_ASAP
0643
0644 if (qh->ntd == ntd_max) {
0645 idx = dwc2_desclist_idx_dec(qh->td_last, inc, qh->dev_speed);
0646 qh->desc_list[idx].status |= HOST_DMA_IOC;
0647 dma_sync_single_for_device(hsotg->dev,
0648 qh->desc_list_dma + (idx *
0649 sizeof(struct dwc2_dma_desc)),
0650 sizeof(struct dwc2_dma_desc),
0651 DMA_TO_DEVICE);
0652 }
0653 #else
0654
0655
0656
0657
0658
0659
0660
0661 if (n_desc > DESCNUM_THRESHOLD)
0662
0663
0664
0665
0666
0667
0668
0669
0670 idx = dwc2_desclist_idx_dec(idx, inc * ((qh->ntd + 1) / 2),
0671 qh->dev_speed);
0672 else
0673
0674
0675
0676
0677
0678 idx = dwc2_desclist_idx_dec(qh->td_last, inc, qh->dev_speed);
0679
0680 qh->desc_list[idx].status |= HOST_DMA_IOC;
0681 dma_sync_single_for_device(hsotg->dev,
0682 qh->desc_list_dma +
0683 (idx * sizeof(struct dwc2_dma_desc)),
0684 sizeof(struct dwc2_dma_desc),
0685 DMA_TO_DEVICE);
0686 #endif
0687 }
0688
0689 static void dwc2_fill_host_dma_desc(struct dwc2_hsotg *hsotg,
0690 struct dwc2_host_chan *chan,
0691 struct dwc2_qtd *qtd, struct dwc2_qh *qh,
0692 int n_desc)
0693 {
0694 struct dwc2_dma_desc *dma_desc = &qh->desc_list[n_desc];
0695 int len = chan->xfer_len;
0696
0697 if (len > HOST_DMA_NBYTES_LIMIT - (chan->max_packet - 1))
0698 len = HOST_DMA_NBYTES_LIMIT - (chan->max_packet - 1);
0699
0700 if (chan->ep_is_in) {
0701 int num_packets;
0702
0703 if (len > 0 && chan->max_packet)
0704 num_packets = (len + chan->max_packet - 1)
0705 / chan->max_packet;
0706 else
0707
0708 num_packets = 1;
0709
0710
0711 len = num_packets * chan->max_packet;
0712 }
0713
0714 dma_desc->status = len << HOST_DMA_NBYTES_SHIFT & HOST_DMA_NBYTES_MASK;
0715 qh->n_bytes[n_desc] = len;
0716
0717 if (qh->ep_type == USB_ENDPOINT_XFER_CONTROL &&
0718 qtd->control_phase == DWC2_CONTROL_SETUP)
0719 dma_desc->status |= HOST_DMA_SUP;
0720
0721 dma_desc->buf = (u32)chan->xfer_dma;
0722
0723 dma_sync_single_for_device(hsotg->dev,
0724 qh->desc_list_dma +
0725 (n_desc * sizeof(struct dwc2_dma_desc)),
0726 sizeof(struct dwc2_dma_desc),
0727 DMA_TO_DEVICE);
0728
0729
0730
0731
0732
0733 if (len > chan->xfer_len) {
0734 chan->xfer_len = 0;
0735 } else {
0736 chan->xfer_dma += len;
0737 chan->xfer_len -= len;
0738 }
0739 }
0740
0741 static void dwc2_init_non_isoc_dma_desc(struct dwc2_hsotg *hsotg,
0742 struct dwc2_qh *qh)
0743 {
0744 struct dwc2_qtd *qtd;
0745 struct dwc2_host_chan *chan = qh->channel;
0746 int n_desc = 0;
0747
0748 dev_vdbg(hsotg->dev, "%s(): qh=%p dma=%08lx len=%d\n", __func__, qh,
0749 (unsigned long)chan->xfer_dma, chan->xfer_len);
0750
0751
0752
0753
0754
0755
0756
0757
0758 list_for_each_entry(qtd, &qh->qtd_list, qtd_list_entry) {
0759 dev_vdbg(hsotg->dev, "qtd=%p\n", qtd);
0760
0761 if (n_desc) {
0762
0763 chan->xfer_dma = qtd->urb->dma +
0764 qtd->urb->actual_length;
0765 chan->xfer_len = qtd->urb->length -
0766 qtd->urb->actual_length;
0767 dev_vdbg(hsotg->dev, "buf=%08lx len=%d\n",
0768 (unsigned long)chan->xfer_dma, chan->xfer_len);
0769 }
0770
0771 qtd->n_desc = 0;
0772 do {
0773 if (n_desc > 1) {
0774 qh->desc_list[n_desc - 1].status |= HOST_DMA_A;
0775 dev_vdbg(hsotg->dev,
0776 "set A bit in desc %d (%p)\n",
0777 n_desc - 1,
0778 &qh->desc_list[n_desc - 1]);
0779 dma_sync_single_for_device(hsotg->dev,
0780 qh->desc_list_dma +
0781 ((n_desc - 1) *
0782 sizeof(struct dwc2_dma_desc)),
0783 sizeof(struct dwc2_dma_desc),
0784 DMA_TO_DEVICE);
0785 }
0786 dwc2_fill_host_dma_desc(hsotg, chan, qtd, qh, n_desc);
0787 dev_vdbg(hsotg->dev,
0788 "desc %d (%p) buf=%08x status=%08x\n",
0789 n_desc, &qh->desc_list[n_desc],
0790 qh->desc_list[n_desc].buf,
0791 qh->desc_list[n_desc].status);
0792 qtd->n_desc++;
0793 n_desc++;
0794 } while (chan->xfer_len > 0 &&
0795 n_desc != MAX_DMA_DESC_NUM_GENERIC);
0796
0797 dev_vdbg(hsotg->dev, "n_desc=%d\n", n_desc);
0798 qtd->in_process = 1;
0799 if (qh->ep_type == USB_ENDPOINT_XFER_CONTROL)
0800 break;
0801 if (n_desc == MAX_DMA_DESC_NUM_GENERIC)
0802 break;
0803 }
0804
0805 if (n_desc) {
0806 qh->desc_list[n_desc - 1].status |=
0807 HOST_DMA_IOC | HOST_DMA_EOL | HOST_DMA_A;
0808 dev_vdbg(hsotg->dev, "set IOC/EOL/A bits in desc %d (%p)\n",
0809 n_desc - 1, &qh->desc_list[n_desc - 1]);
0810 dma_sync_single_for_device(hsotg->dev,
0811 qh->desc_list_dma + (n_desc - 1) *
0812 sizeof(struct dwc2_dma_desc),
0813 sizeof(struct dwc2_dma_desc),
0814 DMA_TO_DEVICE);
0815 if (n_desc > 1) {
0816 qh->desc_list[0].status |= HOST_DMA_A;
0817 dev_vdbg(hsotg->dev, "set A bit in desc 0 (%p)\n",
0818 &qh->desc_list[0]);
0819 dma_sync_single_for_device(hsotg->dev,
0820 qh->desc_list_dma,
0821 sizeof(struct dwc2_dma_desc),
0822 DMA_TO_DEVICE);
0823 }
0824 chan->ntd = n_desc;
0825 }
0826 }
0827
0828
0829
0830
0831
0832
0833
0834
0835
0836
0837
0838
0839
0840
0841
0842
0843
0844
0845
0846
0847 void dwc2_hcd_start_xfer_ddma(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
0848 {
0849
0850 struct dwc2_host_chan *chan = qh->channel;
0851 u16 skip_frames = 0;
0852
0853 switch (chan->ep_type) {
0854 case USB_ENDPOINT_XFER_CONTROL:
0855 case USB_ENDPOINT_XFER_BULK:
0856 dwc2_init_non_isoc_dma_desc(hsotg, qh);
0857 dwc2_hc_start_transfer_ddma(hsotg, chan);
0858 break;
0859 case USB_ENDPOINT_XFER_INT:
0860 dwc2_init_non_isoc_dma_desc(hsotg, qh);
0861 dwc2_update_frame_list(hsotg, qh, 1);
0862 dwc2_hc_start_transfer_ddma(hsotg, chan);
0863 break;
0864 case USB_ENDPOINT_XFER_ISOC:
0865 if (!qh->ntd)
0866 skip_frames = dwc2_recalc_initial_desc_idx(hsotg, qh);
0867 dwc2_init_isoc_dma_desc(hsotg, qh, skip_frames);
0868
0869 if (!chan->xfer_started) {
0870 dwc2_update_frame_list(hsotg, qh, 1);
0871
0872
0873
0874
0875
0876
0877 chan->ntd = dwc2_max_desc_num(qh);
0878
0879
0880 dwc2_hc_start_transfer_ddma(hsotg, chan);
0881 }
0882
0883 break;
0884 default:
0885 break;
0886 }
0887 }
0888
0889 #define DWC2_CMPL_DONE 1
0890 #define DWC2_CMPL_STOP 2
0891
0892 static int dwc2_cmpl_host_isoc_dma_desc(struct dwc2_hsotg *hsotg,
0893 struct dwc2_host_chan *chan,
0894 struct dwc2_qtd *qtd,
0895 struct dwc2_qh *qh, u16 idx)
0896 {
0897 struct dwc2_dma_desc *dma_desc;
0898 struct dwc2_hcd_iso_packet_desc *frame_desc;
0899 u16 remain = 0;
0900 int rc = 0;
0901
0902 if (!qtd->urb)
0903 return -EINVAL;
0904
0905 dma_sync_single_for_cpu(hsotg->dev, qh->desc_list_dma + (idx *
0906 sizeof(struct dwc2_dma_desc)),
0907 sizeof(struct dwc2_dma_desc),
0908 DMA_FROM_DEVICE);
0909
0910 dma_desc = &qh->desc_list[idx];
0911
0912 frame_desc = &qtd->urb->iso_descs[qtd->isoc_frame_index_last];
0913 dma_desc->buf = (u32)(qtd->urb->dma + frame_desc->offset);
0914 if (chan->ep_is_in)
0915 remain = (dma_desc->status & HOST_DMA_ISOC_NBYTES_MASK) >>
0916 HOST_DMA_ISOC_NBYTES_SHIFT;
0917
0918 if ((dma_desc->status & HOST_DMA_STS_MASK) == HOST_DMA_STS_PKTERR) {
0919
0920
0921
0922
0923
0924 qtd->urb->error_count++;
0925 frame_desc->actual_length = qh->n_bytes[idx] - remain;
0926 frame_desc->status = -EPROTO;
0927 } else {
0928
0929 frame_desc->actual_length = qh->n_bytes[idx] - remain;
0930 frame_desc->status = 0;
0931 }
0932
0933 if (++qtd->isoc_frame_index == qtd->urb->packet_count) {
0934
0935
0936
0937
0938 dwc2_host_complete(hsotg, qtd, 0);
0939 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
0940
0941
0942
0943
0944
0945
0946
0947 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE)
0948 return -1;
0949 rc = DWC2_CMPL_DONE;
0950 }
0951
0952 qh->ntd--;
0953
0954
0955 if (dma_desc->status & HOST_DMA_IOC)
0956 rc = DWC2_CMPL_STOP;
0957
0958 return rc;
0959 }
0960
0961 static void dwc2_complete_isoc_xfer_ddma(struct dwc2_hsotg *hsotg,
0962 struct dwc2_host_chan *chan,
0963 enum dwc2_halt_status halt_status)
0964 {
0965 struct dwc2_hcd_iso_packet_desc *frame_desc;
0966 struct dwc2_qtd *qtd, *qtd_tmp;
0967 struct dwc2_qh *qh;
0968 u16 idx;
0969 int rc;
0970
0971 qh = chan->qh;
0972 idx = qh->td_first;
0973
0974 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE) {
0975 list_for_each_entry(qtd, &qh->qtd_list, qtd_list_entry)
0976 qtd->in_process = 0;
0977 return;
0978 }
0979
0980 if (halt_status == DWC2_HC_XFER_AHB_ERR ||
0981 halt_status == DWC2_HC_XFER_BABBLE_ERR) {
0982
0983
0984
0985
0986
0987
0988
0989
0990
0991 int err = halt_status == DWC2_HC_XFER_AHB_ERR ?
0992 -EIO : -EOVERFLOW;
0993
0994 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list,
0995 qtd_list_entry) {
0996 if (qtd->urb) {
0997 for (idx = 0; idx < qtd->urb->packet_count;
0998 idx++) {
0999 frame_desc = &qtd->urb->iso_descs[idx];
1000 frame_desc->status = err;
1001 }
1002
1003 dwc2_host_complete(hsotg, qtd, err);
1004 }
1005
1006 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
1007 }
1008
1009 return;
1010 }
1011
1012 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list, qtd_list_entry) {
1013 if (!qtd->in_process)
1014 break;
1015
1016
1017
1018
1019
1020
1021 if (idx != qtd->isoc_td_first) {
1022 dev_vdbg(hsotg->dev,
1023 "try to complete %d instead of %d\n",
1024 idx, qtd->isoc_td_first);
1025 idx = qtd->isoc_td_first;
1026 }
1027
1028 do {
1029 struct dwc2_qtd *qtd_next;
1030 u16 cur_idx;
1031
1032 rc = dwc2_cmpl_host_isoc_dma_desc(hsotg, chan, qtd, qh,
1033 idx);
1034 if (rc < 0)
1035 return;
1036 idx = dwc2_desclist_idx_inc(idx, qh->host_interval,
1037 chan->speed);
1038 if (!rc)
1039 continue;
1040
1041 if (rc == DWC2_CMPL_DONE)
1042 break;
1043
1044
1045
1046 if (qh->host_interval >= 32)
1047 goto stop_scan;
1048
1049 qh->td_first = idx;
1050 cur_idx = dwc2_frame_list_idx(hsotg->frame_number);
1051 qtd_next = list_first_entry(&qh->qtd_list,
1052 struct dwc2_qtd,
1053 qtd_list_entry);
1054 if (dwc2_frame_idx_num_gt(cur_idx,
1055 qtd_next->isoc_td_last))
1056 break;
1057
1058 goto stop_scan;
1059
1060 } while (idx != qh->td_first);
1061 }
1062
1063 stop_scan:
1064 qh->td_first = idx;
1065 }
1066
1067 static int dwc2_update_non_isoc_urb_state_ddma(struct dwc2_hsotg *hsotg,
1068 struct dwc2_host_chan *chan,
1069 struct dwc2_qtd *qtd,
1070 struct dwc2_dma_desc *dma_desc,
1071 enum dwc2_halt_status halt_status,
1072 u32 n_bytes, int *xfer_done)
1073 {
1074 struct dwc2_hcd_urb *urb = qtd->urb;
1075 u16 remain = 0;
1076
1077 if (chan->ep_is_in)
1078 remain = (dma_desc->status & HOST_DMA_NBYTES_MASK) >>
1079 HOST_DMA_NBYTES_SHIFT;
1080
1081 dev_vdbg(hsotg->dev, "remain=%d dwc2_urb=%p\n", remain, urb);
1082
1083 if (halt_status == DWC2_HC_XFER_AHB_ERR) {
1084 dev_err(hsotg->dev, "EIO\n");
1085 urb->status = -EIO;
1086 return 1;
1087 }
1088
1089 if ((dma_desc->status & HOST_DMA_STS_MASK) == HOST_DMA_STS_PKTERR) {
1090 switch (halt_status) {
1091 case DWC2_HC_XFER_STALL:
1092 dev_vdbg(hsotg->dev, "Stall\n");
1093 urb->status = -EPIPE;
1094 break;
1095 case DWC2_HC_XFER_BABBLE_ERR:
1096 dev_err(hsotg->dev, "Babble\n");
1097 urb->status = -EOVERFLOW;
1098 break;
1099 case DWC2_HC_XFER_XACT_ERR:
1100 dev_err(hsotg->dev, "XactErr\n");
1101 urb->status = -EPROTO;
1102 break;
1103 default:
1104 dev_err(hsotg->dev,
1105 "%s: Unhandled descriptor error status (%d)\n",
1106 __func__, halt_status);
1107 break;
1108 }
1109 return 1;
1110 }
1111
1112 if (dma_desc->status & HOST_DMA_A) {
1113 dev_vdbg(hsotg->dev,
1114 "Active descriptor encountered on channel %d\n",
1115 chan->hc_num);
1116 return 0;
1117 }
1118
1119 if (chan->ep_type == USB_ENDPOINT_XFER_CONTROL) {
1120 if (qtd->control_phase == DWC2_CONTROL_DATA) {
1121 urb->actual_length += n_bytes - remain;
1122 if (remain || urb->actual_length >= urb->length) {
1123
1124
1125
1126
1127
1128 *xfer_done = 1;
1129 }
1130 } else if (qtd->control_phase == DWC2_CONTROL_STATUS) {
1131 urb->status = 0;
1132 *xfer_done = 1;
1133 }
1134
1135 } else {
1136
1137 urb->actual_length += n_bytes - remain;
1138 dev_vdbg(hsotg->dev, "length=%d actual=%d\n", urb->length,
1139 urb->actual_length);
1140 if (remain || urb->actual_length >= urb->length) {
1141 urb->status = 0;
1142 *xfer_done = 1;
1143 }
1144 }
1145
1146 return 0;
1147 }
1148
1149 static int dwc2_process_non_isoc_desc(struct dwc2_hsotg *hsotg,
1150 struct dwc2_host_chan *chan,
1151 int chnum, struct dwc2_qtd *qtd,
1152 int desc_num,
1153 enum dwc2_halt_status halt_status,
1154 int *xfer_done)
1155 {
1156 struct dwc2_qh *qh = chan->qh;
1157 struct dwc2_hcd_urb *urb = qtd->urb;
1158 struct dwc2_dma_desc *dma_desc;
1159 u32 n_bytes;
1160 int failed;
1161
1162 dev_vdbg(hsotg->dev, "%s()\n", __func__);
1163
1164 if (!urb)
1165 return -EINVAL;
1166
1167 dma_sync_single_for_cpu(hsotg->dev,
1168 qh->desc_list_dma + (desc_num *
1169 sizeof(struct dwc2_dma_desc)),
1170 sizeof(struct dwc2_dma_desc),
1171 DMA_FROM_DEVICE);
1172
1173 dma_desc = &qh->desc_list[desc_num];
1174 n_bytes = qh->n_bytes[desc_num];
1175 dev_vdbg(hsotg->dev,
1176 "qtd=%p dwc2_urb=%p desc_num=%d desc=%p n_bytes=%d\n",
1177 qtd, urb, desc_num, dma_desc, n_bytes);
1178 failed = dwc2_update_non_isoc_urb_state_ddma(hsotg, chan, qtd, dma_desc,
1179 halt_status, n_bytes,
1180 xfer_done);
1181 if (failed || (*xfer_done && urb->status != -EINPROGRESS)) {
1182 dwc2_host_complete(hsotg, qtd, urb->status);
1183 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
1184 dev_vdbg(hsotg->dev, "failed=%1x xfer_done=%1x\n",
1185 failed, *xfer_done);
1186 return failed;
1187 }
1188
1189 if (qh->ep_type == USB_ENDPOINT_XFER_CONTROL) {
1190 switch (qtd->control_phase) {
1191 case DWC2_CONTROL_SETUP:
1192 if (urb->length > 0)
1193 qtd->control_phase = DWC2_CONTROL_DATA;
1194 else
1195 qtd->control_phase = DWC2_CONTROL_STATUS;
1196 dev_vdbg(hsotg->dev,
1197 " Control setup transaction done\n");
1198 break;
1199 case DWC2_CONTROL_DATA:
1200 if (*xfer_done) {
1201 qtd->control_phase = DWC2_CONTROL_STATUS;
1202 dev_vdbg(hsotg->dev,
1203 " Control data transfer done\n");
1204 } else if (desc_num + 1 == qtd->n_desc) {
1205
1206
1207
1208
1209 dwc2_hcd_save_data_toggle(hsotg, chan, chnum,
1210 qtd);
1211 }
1212 break;
1213 default:
1214 break;
1215 }
1216 }
1217
1218 return 0;
1219 }
1220
1221 static void dwc2_complete_non_isoc_xfer_ddma(struct dwc2_hsotg *hsotg,
1222 struct dwc2_host_chan *chan,
1223 int chnum,
1224 enum dwc2_halt_status halt_status)
1225 {
1226 struct list_head *qtd_item, *qtd_tmp;
1227 struct dwc2_qh *qh = chan->qh;
1228 struct dwc2_qtd *qtd = NULL;
1229 int xfer_done;
1230 int desc_num = 0;
1231
1232 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE) {
1233 list_for_each_entry(qtd, &qh->qtd_list, qtd_list_entry)
1234 qtd->in_process = 0;
1235 return;
1236 }
1237
1238 list_for_each_safe(qtd_item, qtd_tmp, &qh->qtd_list) {
1239 int i;
1240 int qtd_desc_count;
1241
1242 qtd = list_entry(qtd_item, struct dwc2_qtd, qtd_list_entry);
1243 xfer_done = 0;
1244 qtd_desc_count = qtd->n_desc;
1245
1246 for (i = 0; i < qtd_desc_count; i++) {
1247 if (dwc2_process_non_isoc_desc(hsotg, chan, chnum, qtd,
1248 desc_num, halt_status,
1249 &xfer_done)) {
1250 qtd = NULL;
1251 goto stop_scan;
1252 }
1253
1254 desc_num++;
1255 }
1256 }
1257
1258 stop_scan:
1259 if (qh->ep_type != USB_ENDPOINT_XFER_CONTROL) {
1260
1261
1262
1263
1264 if (halt_status == DWC2_HC_XFER_STALL)
1265 qh->data_toggle = DWC2_HC_PID_DATA0;
1266 else
1267 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, NULL);
1268 }
1269
1270 if (halt_status == DWC2_HC_XFER_COMPLETE) {
1271 if (chan->hcint & HCINTMSK_NYET) {
1272
1273
1274
1275
1276
1277 qh->ping_state = 1;
1278 }
1279 }
1280 }
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299 void dwc2_hcd_complete_xfer_ddma(struct dwc2_hsotg *hsotg,
1300 struct dwc2_host_chan *chan, int chnum,
1301 enum dwc2_halt_status halt_status)
1302 {
1303 struct dwc2_qh *qh = chan->qh;
1304 int continue_isoc_xfer = 0;
1305 enum dwc2_transaction_type tr_type;
1306
1307 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
1308 dwc2_complete_isoc_xfer_ddma(hsotg, chan, halt_status);
1309
1310
1311 if (halt_status != DWC2_HC_XFER_COMPLETE ||
1312 list_empty(&qh->qtd_list)) {
1313 struct dwc2_qtd *qtd, *qtd_tmp;
1314
1315
1316
1317
1318
1319 list_for_each_entry_safe(qtd, qtd_tmp,
1320 &qh->qtd_list,
1321 qtd_list_entry) {
1322 dwc2_host_complete(hsotg, qtd,
1323 -ECONNRESET);
1324 dwc2_hcd_qtd_unlink_and_free(hsotg,
1325 qtd, qh);
1326 }
1327
1328
1329 if (halt_status == DWC2_HC_XFER_COMPLETE)
1330 dwc2_hc_halt(hsotg, chan, halt_status);
1331 dwc2_release_channel_ddma(hsotg, qh);
1332 dwc2_hcd_qh_unlink(hsotg, qh);
1333 } else {
1334
1335 list_move_tail(&qh->qh_list_entry,
1336 &hsotg->periodic_sched_assigned);
1337
1338
1339
1340
1341 if (!chan->halt_status)
1342 continue_isoc_xfer = 1;
1343 }
1344
1345
1346
1347
1348 } else {
1349
1350
1351
1352
1353 dwc2_complete_non_isoc_xfer_ddma(hsotg, chan, chnum,
1354 halt_status);
1355 dwc2_release_channel_ddma(hsotg, qh);
1356 dwc2_hcd_qh_unlink(hsotg, qh);
1357
1358 if (!list_empty(&qh->qtd_list)) {
1359
1360
1361
1362
1363 dwc2_hcd_qh_add(hsotg, qh);
1364 }
1365 }
1366
1367 tr_type = dwc2_hcd_select_transactions(hsotg);
1368 if (tr_type != DWC2_TRANSACTION_NONE || continue_isoc_xfer) {
1369 if (continue_isoc_xfer) {
1370 if (tr_type == DWC2_TRANSACTION_NONE)
1371 tr_type = DWC2_TRANSACTION_PERIODIC;
1372 else if (tr_type == DWC2_TRANSACTION_NON_PERIODIC)
1373 tr_type = DWC2_TRANSACTION_ALL;
1374 }
1375 dwc2_hcd_queue_transactions(hsotg, tr_type);
1376 }
1377 }