0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <linux/delay.h>
0011 #include <linux/ktime.h>
0012
0013 #include "sb_regs.h"
0014 #include "tb.h"
0015
0016 #define USB4_DATA_RETRIES 3
0017
0018 enum usb4_sb_target {
0019 USB4_SB_TARGET_ROUTER,
0020 USB4_SB_TARGET_PARTNER,
0021 USB4_SB_TARGET_RETIMER,
0022 };
0023
0024 #define USB4_NVM_READ_OFFSET_MASK GENMASK(23, 2)
0025 #define USB4_NVM_READ_OFFSET_SHIFT 2
0026 #define USB4_NVM_READ_LENGTH_MASK GENMASK(27, 24)
0027 #define USB4_NVM_READ_LENGTH_SHIFT 24
0028
0029 #define USB4_NVM_SET_OFFSET_MASK USB4_NVM_READ_OFFSET_MASK
0030 #define USB4_NVM_SET_OFFSET_SHIFT USB4_NVM_READ_OFFSET_SHIFT
0031
0032 #define USB4_DROM_ADDRESS_MASK GENMASK(14, 2)
0033 #define USB4_DROM_ADDRESS_SHIFT 2
0034 #define USB4_DROM_SIZE_MASK GENMASK(19, 15)
0035 #define USB4_DROM_SIZE_SHIFT 15
0036
0037 #define USB4_NVM_SECTOR_SIZE_MASK GENMASK(23, 0)
0038
0039 #define USB4_BA_LENGTH_MASK GENMASK(7, 0)
0040 #define USB4_BA_INDEX_MASK GENMASK(15, 0)
0041
0042 enum usb4_ba_index {
0043 USB4_BA_MAX_USB3 = 0x1,
0044 USB4_BA_MIN_DP_AUX = 0x2,
0045 USB4_BA_MIN_DP_MAIN = 0x3,
0046 USB4_BA_MAX_PCIE = 0x4,
0047 USB4_BA_MAX_HI = 0x5,
0048 };
0049
0050 #define USB4_BA_VALUE_MASK GENMASK(31, 16)
0051 #define USB4_BA_VALUE_SHIFT 16
0052
0053 static int usb4_native_switch_op(struct tb_switch *sw, u16 opcode,
0054 u32 *metadata, u8 *status,
0055 const void *tx_data, size_t tx_dwords,
0056 void *rx_data, size_t rx_dwords)
0057 {
0058 u32 val;
0059 int ret;
0060
0061 if (metadata) {
0062 ret = tb_sw_write(sw, metadata, TB_CFG_SWITCH, ROUTER_CS_25, 1);
0063 if (ret)
0064 return ret;
0065 }
0066 if (tx_dwords) {
0067 ret = tb_sw_write(sw, tx_data, TB_CFG_SWITCH, ROUTER_CS_9,
0068 tx_dwords);
0069 if (ret)
0070 return ret;
0071 }
0072
0073 val = opcode | ROUTER_CS_26_OV;
0074 ret = tb_sw_write(sw, &val, TB_CFG_SWITCH, ROUTER_CS_26, 1);
0075 if (ret)
0076 return ret;
0077
0078 ret = tb_switch_wait_for_bit(sw, ROUTER_CS_26, ROUTER_CS_26_OV, 0, 500);
0079 if (ret)
0080 return ret;
0081
0082 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, ROUTER_CS_26, 1);
0083 if (ret)
0084 return ret;
0085
0086 if (val & ROUTER_CS_26_ONS)
0087 return -EOPNOTSUPP;
0088
0089 if (status)
0090 *status = (val & ROUTER_CS_26_STATUS_MASK) >>
0091 ROUTER_CS_26_STATUS_SHIFT;
0092
0093 if (metadata) {
0094 ret = tb_sw_read(sw, metadata, TB_CFG_SWITCH, ROUTER_CS_25, 1);
0095 if (ret)
0096 return ret;
0097 }
0098 if (rx_dwords) {
0099 ret = tb_sw_read(sw, rx_data, TB_CFG_SWITCH, ROUTER_CS_9,
0100 rx_dwords);
0101 if (ret)
0102 return ret;
0103 }
0104
0105 return 0;
0106 }
0107
0108 static int __usb4_switch_op(struct tb_switch *sw, u16 opcode, u32 *metadata,
0109 u8 *status, const void *tx_data, size_t tx_dwords,
0110 void *rx_data, size_t rx_dwords)
0111 {
0112 const struct tb_cm_ops *cm_ops = sw->tb->cm_ops;
0113
0114 if (tx_dwords > NVM_DATA_DWORDS || rx_dwords > NVM_DATA_DWORDS)
0115 return -EINVAL;
0116
0117
0118
0119
0120
0121
0122 if (cm_ops->usb4_switch_op) {
0123 int ret;
0124
0125 ret = cm_ops->usb4_switch_op(sw, opcode, metadata, status,
0126 tx_data, tx_dwords, rx_data,
0127 rx_dwords);
0128 if (ret != -EOPNOTSUPP)
0129 return ret;
0130
0131
0132
0133
0134
0135 }
0136
0137 return usb4_native_switch_op(sw, opcode, metadata, status, tx_data,
0138 tx_dwords, rx_data, rx_dwords);
0139 }
0140
0141 static inline int usb4_switch_op(struct tb_switch *sw, u16 opcode,
0142 u32 *metadata, u8 *status)
0143 {
0144 return __usb4_switch_op(sw, opcode, metadata, status, NULL, 0, NULL, 0);
0145 }
0146
0147 static inline int usb4_switch_op_data(struct tb_switch *sw, u16 opcode,
0148 u32 *metadata, u8 *status,
0149 const void *tx_data, size_t tx_dwords,
0150 void *rx_data, size_t rx_dwords)
0151 {
0152 return __usb4_switch_op(sw, opcode, metadata, status, tx_data,
0153 tx_dwords, rx_data, rx_dwords);
0154 }
0155
0156 static void usb4_switch_check_wakes(struct tb_switch *sw)
0157 {
0158 struct tb_port *port;
0159 bool wakeup = false;
0160 u32 val;
0161
0162 if (!device_may_wakeup(&sw->dev))
0163 return;
0164
0165 if (tb_route(sw)) {
0166 if (tb_sw_read(sw, &val, TB_CFG_SWITCH, ROUTER_CS_6, 1))
0167 return;
0168
0169 tb_sw_dbg(sw, "PCIe wake: %s, USB3 wake: %s\n",
0170 (val & ROUTER_CS_6_WOPS) ? "yes" : "no",
0171 (val & ROUTER_CS_6_WOUS) ? "yes" : "no");
0172
0173 wakeup = val & (ROUTER_CS_6_WOPS | ROUTER_CS_6_WOUS);
0174 }
0175
0176
0177 tb_switch_for_each_port(sw, port) {
0178 if (!tb_port_has_remote(port))
0179 continue;
0180
0181 if (tb_port_read(port, &val, TB_CFG_PORT,
0182 port->cap_usb4 + PORT_CS_18, 1))
0183 break;
0184
0185 tb_port_dbg(port, "USB4 wake: %s\n",
0186 (val & PORT_CS_18_WOU4S) ? "yes" : "no");
0187
0188 if (val & PORT_CS_18_WOU4S)
0189 wakeup = true;
0190 }
0191
0192 if (wakeup)
0193 pm_wakeup_event(&sw->dev, 0);
0194 }
0195
0196 static bool link_is_usb4(struct tb_port *port)
0197 {
0198 u32 val;
0199
0200 if (!port->cap_usb4)
0201 return false;
0202
0203 if (tb_port_read(port, &val, TB_CFG_PORT,
0204 port->cap_usb4 + PORT_CS_18, 1))
0205 return false;
0206
0207 return !(val & PORT_CS_18_TCM);
0208 }
0209
0210
0211
0212
0213
0214
0215
0216
0217
0218
0219
0220
0221 int usb4_switch_setup(struct tb_switch *sw)
0222 {
0223 struct tb_port *downstream_port;
0224 struct tb_switch *parent;
0225 bool tbt3, xhci;
0226 u32 val = 0;
0227 int ret;
0228
0229 usb4_switch_check_wakes(sw);
0230
0231 if (!tb_route(sw))
0232 return 0;
0233
0234 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, ROUTER_CS_6, 1);
0235 if (ret)
0236 return ret;
0237
0238 parent = tb_switch_parent(sw);
0239 downstream_port = tb_port_at(tb_route(sw), parent);
0240 sw->link_usb4 = link_is_usb4(downstream_port);
0241 tb_sw_dbg(sw, "link: %s\n", sw->link_usb4 ? "USB4" : "TBT");
0242
0243 xhci = val & ROUTER_CS_6_HCI;
0244 tbt3 = !(val & ROUTER_CS_6_TNS);
0245
0246 tb_sw_dbg(sw, "TBT3 support: %s, xHCI: %s\n",
0247 tbt3 ? "yes" : "no", xhci ? "yes" : "no");
0248
0249 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, ROUTER_CS_5, 1);
0250 if (ret)
0251 return ret;
0252
0253 if (tb_acpi_may_tunnel_usb3() && sw->link_usb4 &&
0254 tb_switch_find_port(parent, TB_TYPE_USB3_DOWN)) {
0255 val |= ROUTER_CS_5_UTO;
0256 xhci = false;
0257 }
0258
0259
0260
0261
0262
0263 if (tb_acpi_may_tunnel_pcie() &&
0264 tb_switch_find_port(parent, TB_TYPE_PCIE_DOWN)) {
0265 val |= ROUTER_CS_5_PTO;
0266
0267
0268
0269
0270
0271 if (xhci)
0272 val |= ROUTER_CS_5_HCO;
0273 }
0274
0275
0276 val |= ROUTER_CS_5_C3S;
0277
0278 val |= ROUTER_CS_5_CV;
0279
0280 ret = tb_sw_write(sw, &val, TB_CFG_SWITCH, ROUTER_CS_5, 1);
0281 if (ret)
0282 return ret;
0283
0284 return tb_switch_wait_for_bit(sw, ROUTER_CS_6, ROUTER_CS_6_CR,
0285 ROUTER_CS_6_CR, 50);
0286 }
0287
0288
0289
0290
0291
0292
0293
0294
0295 int usb4_switch_read_uid(struct tb_switch *sw, u64 *uid)
0296 {
0297 return tb_sw_read(sw, uid, TB_CFG_SWITCH, ROUTER_CS_7, 2);
0298 }
0299
0300 static int usb4_switch_drom_read_block(void *data,
0301 unsigned int dwaddress, void *buf,
0302 size_t dwords)
0303 {
0304 struct tb_switch *sw = data;
0305 u8 status = 0;
0306 u32 metadata;
0307 int ret;
0308
0309 metadata = (dwords << USB4_DROM_SIZE_SHIFT) & USB4_DROM_SIZE_MASK;
0310 metadata |= (dwaddress << USB4_DROM_ADDRESS_SHIFT) &
0311 USB4_DROM_ADDRESS_MASK;
0312
0313 ret = usb4_switch_op_data(sw, USB4_SWITCH_OP_DROM_READ, &metadata,
0314 &status, NULL, 0, buf, dwords);
0315 if (ret)
0316 return ret;
0317
0318 return status ? -EIO : 0;
0319 }
0320
0321
0322
0323
0324
0325
0326
0327
0328
0329
0330
0331
0332 int usb4_switch_drom_read(struct tb_switch *sw, unsigned int address, void *buf,
0333 size_t size)
0334 {
0335 return tb_nvm_read_data(address, buf, size, USB4_DATA_RETRIES,
0336 usb4_switch_drom_read_block, sw);
0337 }
0338
0339
0340
0341
0342
0343
0344
0345
0346 bool usb4_switch_lane_bonding_possible(struct tb_switch *sw)
0347 {
0348 struct tb_port *up;
0349 int ret;
0350 u32 val;
0351
0352 up = tb_upstream_port(sw);
0353 ret = tb_port_read(up, &val, TB_CFG_PORT, up->cap_usb4 + PORT_CS_18, 1);
0354 if (ret)
0355 return false;
0356
0357 return !!(val & PORT_CS_18_BE);
0358 }
0359
0360
0361
0362
0363
0364
0365
0366
0367 int usb4_switch_set_wake(struct tb_switch *sw, unsigned int flags)
0368 {
0369 struct tb_port *port;
0370 u64 route = tb_route(sw);
0371 u32 val;
0372 int ret;
0373
0374
0375
0376
0377
0378
0379 tb_switch_for_each_port(sw, port) {
0380 if (!tb_port_is_null(port))
0381 continue;
0382 if (!route && tb_is_upstream_port(port))
0383 continue;
0384 if (!port->cap_usb4)
0385 continue;
0386
0387 ret = tb_port_read(port, &val, TB_CFG_PORT,
0388 port->cap_usb4 + PORT_CS_19, 1);
0389 if (ret)
0390 return ret;
0391
0392 val &= ~(PORT_CS_19_WOC | PORT_CS_19_WOD | PORT_CS_19_WOU4);
0393
0394 if (tb_is_upstream_port(port)) {
0395 val |= PORT_CS_19_WOU4;
0396 } else {
0397 bool configured = val & PORT_CS_19_PC;
0398
0399 if ((flags & TB_WAKE_ON_CONNECT) && !configured)
0400 val |= PORT_CS_19_WOC;
0401 if ((flags & TB_WAKE_ON_DISCONNECT) && configured)
0402 val |= PORT_CS_19_WOD;
0403 if ((flags & TB_WAKE_ON_USB4) && configured)
0404 val |= PORT_CS_19_WOU4;
0405 }
0406
0407 ret = tb_port_write(port, &val, TB_CFG_PORT,
0408 port->cap_usb4 + PORT_CS_19, 1);
0409 if (ret)
0410 return ret;
0411 }
0412
0413
0414
0415
0416
0417 if (route) {
0418 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, ROUTER_CS_5, 1);
0419 if (ret)
0420 return ret;
0421
0422 val &= ~(ROUTER_CS_5_WOP | ROUTER_CS_5_WOU | ROUTER_CS_5_WOD);
0423 if (flags & TB_WAKE_ON_USB3)
0424 val |= ROUTER_CS_5_WOU;
0425 if (flags & TB_WAKE_ON_PCIE)
0426 val |= ROUTER_CS_5_WOP;
0427 if (flags & TB_WAKE_ON_DP)
0428 val |= ROUTER_CS_5_WOD;
0429
0430 ret = tb_sw_write(sw, &val, TB_CFG_SWITCH, ROUTER_CS_5, 1);
0431 if (ret)
0432 return ret;
0433 }
0434
0435 return 0;
0436 }
0437
0438
0439
0440
0441
0442
0443
0444
0445 int usb4_switch_set_sleep(struct tb_switch *sw)
0446 {
0447 int ret;
0448 u32 val;
0449
0450
0451 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, ROUTER_CS_5, 1);
0452 if (ret)
0453 return ret;
0454
0455 val |= ROUTER_CS_5_SLP;
0456
0457 ret = tb_sw_write(sw, &val, TB_CFG_SWITCH, ROUTER_CS_5, 1);
0458 if (ret)
0459 return ret;
0460
0461 return tb_switch_wait_for_bit(sw, ROUTER_CS_6, ROUTER_CS_6_SLPR,
0462 ROUTER_CS_6_SLPR, 500);
0463 }
0464
0465
0466
0467
0468
0469
0470
0471
0472
0473 int usb4_switch_nvm_sector_size(struct tb_switch *sw)
0474 {
0475 u32 metadata;
0476 u8 status;
0477 int ret;
0478
0479 ret = usb4_switch_op(sw, USB4_SWITCH_OP_NVM_SECTOR_SIZE, &metadata,
0480 &status);
0481 if (ret)
0482 return ret;
0483
0484 if (status)
0485 return status == 0x2 ? -EOPNOTSUPP : -EIO;
0486
0487 return metadata & USB4_NVM_SECTOR_SIZE_MASK;
0488 }
0489
0490 static int usb4_switch_nvm_read_block(void *data,
0491 unsigned int dwaddress, void *buf, size_t dwords)
0492 {
0493 struct tb_switch *sw = data;
0494 u8 status = 0;
0495 u32 metadata;
0496 int ret;
0497
0498 metadata = (dwords << USB4_NVM_READ_LENGTH_SHIFT) &
0499 USB4_NVM_READ_LENGTH_MASK;
0500 metadata |= (dwaddress << USB4_NVM_READ_OFFSET_SHIFT) &
0501 USB4_NVM_READ_OFFSET_MASK;
0502
0503 ret = usb4_switch_op_data(sw, USB4_SWITCH_OP_NVM_READ, &metadata,
0504 &status, NULL, 0, buf, dwords);
0505 if (ret)
0506 return ret;
0507
0508 return status ? -EIO : 0;
0509 }
0510
0511
0512
0513
0514
0515
0516
0517
0518
0519
0520
0521 int usb4_switch_nvm_read(struct tb_switch *sw, unsigned int address, void *buf,
0522 size_t size)
0523 {
0524 return tb_nvm_read_data(address, buf, size, USB4_DATA_RETRIES,
0525 usb4_switch_nvm_read_block, sw);
0526 }
0527
0528
0529
0530
0531
0532
0533
0534
0535
0536
0537
0538 int usb4_switch_nvm_set_offset(struct tb_switch *sw, unsigned int address)
0539 {
0540 u32 metadata, dwaddress;
0541 u8 status = 0;
0542 int ret;
0543
0544 dwaddress = address / 4;
0545 metadata = (dwaddress << USB4_NVM_SET_OFFSET_SHIFT) &
0546 USB4_NVM_SET_OFFSET_MASK;
0547
0548 ret = usb4_switch_op(sw, USB4_SWITCH_OP_NVM_SET_OFFSET, &metadata,
0549 &status);
0550 if (ret)
0551 return ret;
0552
0553 return status ? -EIO : 0;
0554 }
0555
0556 static int usb4_switch_nvm_write_next_block(void *data, unsigned int dwaddress,
0557 const void *buf, size_t dwords)
0558 {
0559 struct tb_switch *sw = data;
0560 u8 status;
0561 int ret;
0562
0563 ret = usb4_switch_op_data(sw, USB4_SWITCH_OP_NVM_WRITE, NULL, &status,
0564 buf, dwords, NULL, 0);
0565 if (ret)
0566 return ret;
0567
0568 return status ? -EIO : 0;
0569 }
0570
0571
0572
0573
0574
0575
0576
0577
0578
0579
0580
0581 int usb4_switch_nvm_write(struct tb_switch *sw, unsigned int address,
0582 const void *buf, size_t size)
0583 {
0584 int ret;
0585
0586 ret = usb4_switch_nvm_set_offset(sw, address);
0587 if (ret)
0588 return ret;
0589
0590 return tb_nvm_write_data(address, buf, size, USB4_DATA_RETRIES,
0591 usb4_switch_nvm_write_next_block, sw);
0592 }
0593
0594
0595
0596
0597
0598
0599
0600
0601
0602
0603
0604
0605
0606
0607 int usb4_switch_nvm_authenticate(struct tb_switch *sw)
0608 {
0609 int ret;
0610
0611 ret = usb4_switch_op(sw, USB4_SWITCH_OP_NVM_AUTH, NULL, NULL);
0612 switch (ret) {
0613
0614
0615
0616
0617 case -EACCES:
0618 case -ENOTCONN:
0619 case -ETIMEDOUT:
0620 return 0;
0621
0622 default:
0623 return ret;
0624 }
0625 }
0626
0627
0628
0629
0630
0631
0632
0633
0634
0635
0636
0637
0638
0639 int usb4_switch_nvm_authenticate_status(struct tb_switch *sw, u32 *status)
0640 {
0641 const struct tb_cm_ops *cm_ops = sw->tb->cm_ops;
0642 u16 opcode;
0643 u32 val;
0644 int ret;
0645
0646 if (cm_ops->usb4_switch_nvm_authenticate_status) {
0647 ret = cm_ops->usb4_switch_nvm_authenticate_status(sw, status);
0648 if (ret != -EOPNOTSUPP)
0649 return ret;
0650 }
0651
0652 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, ROUTER_CS_26, 1);
0653 if (ret)
0654 return ret;
0655
0656
0657 opcode = val & ROUTER_CS_26_OPCODE_MASK;
0658 if (opcode == USB4_SWITCH_OP_NVM_AUTH) {
0659 if (val & ROUTER_CS_26_OV)
0660 return -EBUSY;
0661 if (val & ROUTER_CS_26_ONS)
0662 return -EOPNOTSUPP;
0663
0664 *status = (val & ROUTER_CS_26_STATUS_MASK) >>
0665 ROUTER_CS_26_STATUS_SHIFT;
0666 } else {
0667 *status = 0;
0668 }
0669
0670 return 0;
0671 }
0672
0673
0674
0675
0676
0677
0678
0679
0680
0681
0682
0683 int usb4_switch_credits_init(struct tb_switch *sw)
0684 {
0685 int max_usb3, min_dp_aux, min_dp_main, max_pcie, max_dma;
0686 int ret, length, i, nports;
0687 const struct tb_port *port;
0688 u32 data[NVM_DATA_DWORDS];
0689 u32 metadata = 0;
0690 u8 status = 0;
0691
0692 memset(data, 0, sizeof(data));
0693 ret = usb4_switch_op_data(sw, USB4_SWITCH_OP_BUFFER_ALLOC, &metadata,
0694 &status, NULL, 0, data, ARRAY_SIZE(data));
0695 if (ret)
0696 return ret;
0697 if (status)
0698 return -EIO;
0699
0700 length = metadata & USB4_BA_LENGTH_MASK;
0701 if (WARN_ON(length > ARRAY_SIZE(data)))
0702 return -EMSGSIZE;
0703
0704 max_usb3 = -1;
0705 min_dp_aux = -1;
0706 min_dp_main = -1;
0707 max_pcie = -1;
0708 max_dma = -1;
0709
0710 tb_sw_dbg(sw, "credit allocation parameters:\n");
0711
0712 for (i = 0; i < length; i++) {
0713 u16 index, value;
0714
0715 index = data[i] & USB4_BA_INDEX_MASK;
0716 value = (data[i] & USB4_BA_VALUE_MASK) >> USB4_BA_VALUE_SHIFT;
0717
0718 switch (index) {
0719 case USB4_BA_MAX_USB3:
0720 tb_sw_dbg(sw, " USB3: %u\n", value);
0721 max_usb3 = value;
0722 break;
0723 case USB4_BA_MIN_DP_AUX:
0724 tb_sw_dbg(sw, " DP AUX: %u\n", value);
0725 min_dp_aux = value;
0726 break;
0727 case USB4_BA_MIN_DP_MAIN:
0728 tb_sw_dbg(sw, " DP main: %u\n", value);
0729 min_dp_main = value;
0730 break;
0731 case USB4_BA_MAX_PCIE:
0732 tb_sw_dbg(sw, " PCIe: %u\n", value);
0733 max_pcie = value;
0734 break;
0735 case USB4_BA_MAX_HI:
0736 tb_sw_dbg(sw, " DMA: %u\n", value);
0737 max_dma = value;
0738 break;
0739 default:
0740 tb_sw_dbg(sw, " unknown credit allocation index %#x, skipping\n",
0741 index);
0742 break;
0743 }
0744 }
0745
0746
0747
0748
0749
0750
0751
0752
0753 if (!tb_route(sw) && max_dma < 0) {
0754 tb_sw_warn(sw, "host router is missing baMaxHI\n");
0755 goto err_invalid;
0756 }
0757
0758 nports = 0;
0759 tb_switch_for_each_port(sw, port) {
0760 if (tb_port_is_null(port))
0761 nports++;
0762 }
0763
0764
0765 if (nports > 2 && (min_dp_aux < 0 || min_dp_main < 0)) {
0766 tb_sw_warn(sw, "multiple USB4 ports require baMinDPaux/baMinDPmain\n");
0767 goto err_invalid;
0768 }
0769
0770 tb_switch_for_each_port(sw, port) {
0771 if (tb_port_is_dpout(port) && min_dp_main < 0) {
0772 tb_sw_warn(sw, "missing baMinDPmain");
0773 goto err_invalid;
0774 }
0775 if ((tb_port_is_dpin(port) || tb_port_is_dpout(port)) &&
0776 min_dp_aux < 0) {
0777 tb_sw_warn(sw, "missing baMinDPaux");
0778 goto err_invalid;
0779 }
0780 if ((tb_port_is_usb3_down(port) || tb_port_is_usb3_up(port)) &&
0781 max_usb3 < 0) {
0782 tb_sw_warn(sw, "missing baMaxUSB3");
0783 goto err_invalid;
0784 }
0785 if ((tb_port_is_pcie_down(port) || tb_port_is_pcie_up(port)) &&
0786 max_pcie < 0) {
0787 tb_sw_warn(sw, "missing baMaxPCIe");
0788 goto err_invalid;
0789 }
0790 }
0791
0792
0793
0794
0795
0796 sw->credit_allocation = true;
0797 if (max_usb3 > 0)
0798 sw->max_usb3_credits = max_usb3;
0799 if (min_dp_aux > 0)
0800 sw->min_dp_aux_credits = min_dp_aux;
0801 if (min_dp_main > 0)
0802 sw->min_dp_main_credits = min_dp_main;
0803 if (max_pcie > 0)
0804 sw->max_pcie_credits = max_pcie;
0805 if (max_dma > 0)
0806 sw->max_dma_credits = max_dma;
0807
0808 return 0;
0809
0810 err_invalid:
0811 return -EINVAL;
0812 }
0813
0814
0815
0816
0817
0818
0819
0820
0821
0822
0823 bool usb4_switch_query_dp_resource(struct tb_switch *sw, struct tb_port *in)
0824 {
0825 u32 metadata = in->port;
0826 u8 status;
0827 int ret;
0828
0829 ret = usb4_switch_op(sw, USB4_SWITCH_OP_QUERY_DP_RESOURCE, &metadata,
0830 &status);
0831
0832
0833
0834
0835 if (ret == -EOPNOTSUPP)
0836 return true;
0837 else if (ret)
0838 return false;
0839
0840 return !status;
0841 }
0842
0843
0844
0845
0846
0847
0848
0849
0850
0851
0852
0853 int usb4_switch_alloc_dp_resource(struct tb_switch *sw, struct tb_port *in)
0854 {
0855 u32 metadata = in->port;
0856 u8 status;
0857 int ret;
0858
0859 ret = usb4_switch_op(sw, USB4_SWITCH_OP_ALLOC_DP_RESOURCE, &metadata,
0860 &status);
0861 if (ret == -EOPNOTSUPP)
0862 return 0;
0863 else if (ret)
0864 return ret;
0865
0866 return status ? -EBUSY : 0;
0867 }
0868
0869
0870
0871
0872
0873
0874
0875
0876 int usb4_switch_dealloc_dp_resource(struct tb_switch *sw, struct tb_port *in)
0877 {
0878 u32 metadata = in->port;
0879 u8 status;
0880 int ret;
0881
0882 ret = usb4_switch_op(sw, USB4_SWITCH_OP_DEALLOC_DP_RESOURCE, &metadata,
0883 &status);
0884 if (ret == -EOPNOTSUPP)
0885 return 0;
0886 else if (ret)
0887 return ret;
0888
0889 return status ? -EIO : 0;
0890 }
0891
0892 static int usb4_port_idx(const struct tb_switch *sw, const struct tb_port *port)
0893 {
0894 struct tb_port *p;
0895 int usb4_idx = 0;
0896
0897
0898 tb_switch_for_each_port(sw, p) {
0899 if (!tb_port_is_null(p))
0900 continue;
0901 if (tb_is_upstream_port(p))
0902 continue;
0903 if (!p->link_nr) {
0904 if (p == port)
0905 break;
0906 usb4_idx++;
0907 }
0908 }
0909
0910 return usb4_idx;
0911 }
0912
0913
0914
0915
0916
0917
0918
0919
0920
0921
0922
0923 struct tb_port *usb4_switch_map_pcie_down(struct tb_switch *sw,
0924 const struct tb_port *port)
0925 {
0926 int usb4_idx = usb4_port_idx(sw, port);
0927 struct tb_port *p;
0928 int pcie_idx = 0;
0929
0930
0931 tb_switch_for_each_port(sw, p) {
0932 if (!tb_port_is_pcie_down(p))
0933 continue;
0934
0935 if (pcie_idx == usb4_idx)
0936 return p;
0937
0938 pcie_idx++;
0939 }
0940
0941 return NULL;
0942 }
0943
0944
0945
0946
0947
0948
0949
0950
0951
0952
0953
0954 struct tb_port *usb4_switch_map_usb3_down(struct tb_switch *sw,
0955 const struct tb_port *port)
0956 {
0957 int usb4_idx = usb4_port_idx(sw, port);
0958 struct tb_port *p;
0959 int usb_idx = 0;
0960
0961
0962 tb_switch_for_each_port(sw, p) {
0963 if (!tb_port_is_usb3_down(p))
0964 continue;
0965
0966 if (usb_idx == usb4_idx)
0967 return p;
0968
0969 usb_idx++;
0970 }
0971
0972 return NULL;
0973 }
0974
0975
0976
0977
0978
0979
0980
0981
0982
0983
0984 int usb4_switch_add_ports(struct tb_switch *sw)
0985 {
0986 struct tb_port *port;
0987
0988 if (tb_switch_is_icm(sw) || !tb_switch_is_usb4(sw))
0989 return 0;
0990
0991 tb_switch_for_each_port(sw, port) {
0992 struct usb4_port *usb4;
0993
0994 if (!tb_port_is_null(port))
0995 continue;
0996 if (!port->cap_usb4)
0997 continue;
0998
0999 usb4 = usb4_port_device_add(port);
1000 if (IS_ERR(usb4)) {
1001 usb4_switch_remove_ports(sw);
1002 return PTR_ERR(usb4);
1003 }
1004
1005 port->usb4 = usb4;
1006 }
1007
1008 return 0;
1009 }
1010
1011
1012
1013
1014
1015
1016
1017 void usb4_switch_remove_ports(struct tb_switch *sw)
1018 {
1019 struct tb_port *port;
1020
1021 tb_switch_for_each_port(sw, port) {
1022 if (port->usb4) {
1023 usb4_port_device_remove(port->usb4);
1024 port->usb4 = NULL;
1025 }
1026 }
1027 }
1028
1029
1030
1031
1032
1033
1034
1035
1036 int usb4_port_unlock(struct tb_port *port)
1037 {
1038 int ret;
1039 u32 val;
1040
1041 ret = tb_port_read(port, &val, TB_CFG_PORT, ADP_CS_4, 1);
1042 if (ret)
1043 return ret;
1044
1045 val &= ~ADP_CS_4_LCK;
1046 return tb_port_write(port, &val, TB_CFG_PORT, ADP_CS_4, 1);
1047 }
1048
1049 static int usb4_port_set_configured(struct tb_port *port, bool configured)
1050 {
1051 int ret;
1052 u32 val;
1053
1054 if (!port->cap_usb4)
1055 return -EINVAL;
1056
1057 ret = tb_port_read(port, &val, TB_CFG_PORT,
1058 port->cap_usb4 + PORT_CS_19, 1);
1059 if (ret)
1060 return ret;
1061
1062 if (configured)
1063 val |= PORT_CS_19_PC;
1064 else
1065 val &= ~PORT_CS_19_PC;
1066
1067 return tb_port_write(port, &val, TB_CFG_PORT,
1068 port->cap_usb4 + PORT_CS_19, 1);
1069 }
1070
1071
1072
1073
1074
1075
1076
1077 int usb4_port_configure(struct tb_port *port)
1078 {
1079 return usb4_port_set_configured(port, true);
1080 }
1081
1082
1083
1084
1085
1086
1087
1088 void usb4_port_unconfigure(struct tb_port *port)
1089 {
1090 usb4_port_set_configured(port, false);
1091 }
1092
1093 static int usb4_set_xdomain_configured(struct tb_port *port, bool configured)
1094 {
1095 int ret;
1096 u32 val;
1097
1098 if (!port->cap_usb4)
1099 return -EINVAL;
1100
1101 ret = tb_port_read(port, &val, TB_CFG_PORT,
1102 port->cap_usb4 + PORT_CS_19, 1);
1103 if (ret)
1104 return ret;
1105
1106 if (configured)
1107 val |= PORT_CS_19_PID;
1108 else
1109 val &= ~PORT_CS_19_PID;
1110
1111 return tb_port_write(port, &val, TB_CFG_PORT,
1112 port->cap_usb4 + PORT_CS_19, 1);
1113 }
1114
1115
1116
1117
1118
1119
1120
1121
1122 int usb4_port_configure_xdomain(struct tb_port *port)
1123 {
1124 return usb4_set_xdomain_configured(port, true);
1125 }
1126
1127
1128
1129
1130
1131
1132
1133 void usb4_port_unconfigure_xdomain(struct tb_port *port)
1134 {
1135 usb4_set_xdomain_configured(port, false);
1136 }
1137
1138 static int usb4_port_wait_for_bit(struct tb_port *port, u32 offset, u32 bit,
1139 u32 value, int timeout_msec)
1140 {
1141 ktime_t timeout = ktime_add_ms(ktime_get(), timeout_msec);
1142
1143 do {
1144 u32 val;
1145 int ret;
1146
1147 ret = tb_port_read(port, &val, TB_CFG_PORT, offset, 1);
1148 if (ret)
1149 return ret;
1150
1151 if ((val & bit) == value)
1152 return 0;
1153
1154 usleep_range(50, 100);
1155 } while (ktime_before(ktime_get(), timeout));
1156
1157 return -ETIMEDOUT;
1158 }
1159
1160 static int usb4_port_read_data(struct tb_port *port, void *data, size_t dwords)
1161 {
1162 if (dwords > NVM_DATA_DWORDS)
1163 return -EINVAL;
1164
1165 return tb_port_read(port, data, TB_CFG_PORT, port->cap_usb4 + PORT_CS_2,
1166 dwords);
1167 }
1168
1169 static int usb4_port_write_data(struct tb_port *port, const void *data,
1170 size_t dwords)
1171 {
1172 if (dwords > NVM_DATA_DWORDS)
1173 return -EINVAL;
1174
1175 return tb_port_write(port, data, TB_CFG_PORT, port->cap_usb4 + PORT_CS_2,
1176 dwords);
1177 }
1178
1179 static int usb4_port_sb_read(struct tb_port *port, enum usb4_sb_target target,
1180 u8 index, u8 reg, void *buf, u8 size)
1181 {
1182 size_t dwords = DIV_ROUND_UP(size, 4);
1183 int ret;
1184 u32 val;
1185
1186 if (!port->cap_usb4)
1187 return -EINVAL;
1188
1189 val = reg;
1190 val |= size << PORT_CS_1_LENGTH_SHIFT;
1191 val |= (target << PORT_CS_1_TARGET_SHIFT) & PORT_CS_1_TARGET_MASK;
1192 if (target == USB4_SB_TARGET_RETIMER)
1193 val |= (index << PORT_CS_1_RETIMER_INDEX_SHIFT);
1194 val |= PORT_CS_1_PND;
1195
1196 ret = tb_port_write(port, &val, TB_CFG_PORT,
1197 port->cap_usb4 + PORT_CS_1, 1);
1198 if (ret)
1199 return ret;
1200
1201 ret = usb4_port_wait_for_bit(port, port->cap_usb4 + PORT_CS_1,
1202 PORT_CS_1_PND, 0, 500);
1203 if (ret)
1204 return ret;
1205
1206 ret = tb_port_read(port, &val, TB_CFG_PORT,
1207 port->cap_usb4 + PORT_CS_1, 1);
1208 if (ret)
1209 return ret;
1210
1211 if (val & PORT_CS_1_NR)
1212 return -ENODEV;
1213 if (val & PORT_CS_1_RC)
1214 return -EIO;
1215
1216 return buf ? usb4_port_read_data(port, buf, dwords) : 0;
1217 }
1218
1219 static int usb4_port_sb_write(struct tb_port *port, enum usb4_sb_target target,
1220 u8 index, u8 reg, const void *buf, u8 size)
1221 {
1222 size_t dwords = DIV_ROUND_UP(size, 4);
1223 int ret;
1224 u32 val;
1225
1226 if (!port->cap_usb4)
1227 return -EINVAL;
1228
1229 if (buf) {
1230 ret = usb4_port_write_data(port, buf, dwords);
1231 if (ret)
1232 return ret;
1233 }
1234
1235 val = reg;
1236 val |= size << PORT_CS_1_LENGTH_SHIFT;
1237 val |= PORT_CS_1_WNR_WRITE;
1238 val |= (target << PORT_CS_1_TARGET_SHIFT) & PORT_CS_1_TARGET_MASK;
1239 if (target == USB4_SB_TARGET_RETIMER)
1240 val |= (index << PORT_CS_1_RETIMER_INDEX_SHIFT);
1241 val |= PORT_CS_1_PND;
1242
1243 ret = tb_port_write(port, &val, TB_CFG_PORT,
1244 port->cap_usb4 + PORT_CS_1, 1);
1245 if (ret)
1246 return ret;
1247
1248 ret = usb4_port_wait_for_bit(port, port->cap_usb4 + PORT_CS_1,
1249 PORT_CS_1_PND, 0, 500);
1250 if (ret)
1251 return ret;
1252
1253 ret = tb_port_read(port, &val, TB_CFG_PORT,
1254 port->cap_usb4 + PORT_CS_1, 1);
1255 if (ret)
1256 return ret;
1257
1258 if (val & PORT_CS_1_NR)
1259 return -ENODEV;
1260 if (val & PORT_CS_1_RC)
1261 return -EIO;
1262
1263 return 0;
1264 }
1265
1266 static int usb4_port_sb_op(struct tb_port *port, enum usb4_sb_target target,
1267 u8 index, enum usb4_sb_opcode opcode, int timeout_msec)
1268 {
1269 ktime_t timeout;
1270 u32 val;
1271 int ret;
1272
1273 val = opcode;
1274 ret = usb4_port_sb_write(port, target, index, USB4_SB_OPCODE, &val,
1275 sizeof(val));
1276 if (ret)
1277 return ret;
1278
1279 timeout = ktime_add_ms(ktime_get(), timeout_msec);
1280
1281 do {
1282
1283 ret = usb4_port_sb_read(port, target, index, USB4_SB_OPCODE,
1284 &val, sizeof(val));
1285 if (ret)
1286 return ret;
1287
1288 switch (val) {
1289 case 0:
1290 return 0;
1291
1292 case USB4_SB_OPCODE_ERR:
1293 return -EAGAIN;
1294
1295 case USB4_SB_OPCODE_ONS:
1296 return -EOPNOTSUPP;
1297
1298 default:
1299 if (val != opcode)
1300 return -EIO;
1301 break;
1302 }
1303 } while (ktime_before(ktime_get(), timeout));
1304
1305 return -ETIMEDOUT;
1306 }
1307
1308 static int usb4_port_set_router_offline(struct tb_port *port, bool offline)
1309 {
1310 u32 val = !offline;
1311 int ret;
1312
1313 ret = usb4_port_sb_write(port, USB4_SB_TARGET_ROUTER, 0,
1314 USB4_SB_METADATA, &val, sizeof(val));
1315 if (ret)
1316 return ret;
1317
1318 val = USB4_SB_OPCODE_ROUTER_OFFLINE;
1319 return usb4_port_sb_write(port, USB4_SB_TARGET_ROUTER, 0,
1320 USB4_SB_OPCODE, &val, sizeof(val));
1321 }
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334 int usb4_port_router_offline(struct tb_port *port)
1335 {
1336 return usb4_port_set_router_offline(port, true);
1337 }
1338
1339
1340
1341
1342
1343
1344
1345 int usb4_port_router_online(struct tb_port *port)
1346 {
1347 return usb4_port_set_router_offline(port, false);
1348 }
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358 int usb4_port_enumerate_retimers(struct tb_port *port)
1359 {
1360 u32 val;
1361
1362 val = USB4_SB_OPCODE_ENUMERATE_RETIMERS;
1363 return usb4_port_sb_write(port, USB4_SB_TARGET_ROUTER, 0,
1364 USB4_SB_OPCODE, &val, sizeof(val));
1365 }
1366
1367
1368
1369
1370
1371
1372
1373
1374 bool usb4_port_clx_supported(struct tb_port *port)
1375 {
1376 int ret;
1377 u32 val;
1378
1379 ret = tb_port_read(port, &val, TB_CFG_PORT,
1380 port->cap_usb4 + PORT_CS_18, 1);
1381 if (ret)
1382 return false;
1383
1384 return !!(val & PORT_CS_18_CPS);
1385 }
1386
1387 static inline int usb4_port_retimer_op(struct tb_port *port, u8 index,
1388 enum usb4_sb_opcode opcode,
1389 int timeout_msec)
1390 {
1391 return usb4_port_sb_op(port, USB4_SB_TARGET_RETIMER, index, opcode,
1392 timeout_msec);
1393 }
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403 int usb4_port_retimer_set_inbound_sbtx(struct tb_port *port, u8 index)
1404 {
1405 int ret;
1406
1407 ret = usb4_port_retimer_op(port, index, USB4_SB_OPCODE_SET_INBOUND_SBTX,
1408 500);
1409
1410 if (ret != -ENODEV)
1411 return ret;
1412
1413
1414
1415
1416
1417
1418 return usb4_port_retimer_op(port, index, USB4_SB_OPCODE_SET_INBOUND_SBTX,
1419 500);
1420 }
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436 int usb4_port_retimer_read(struct tb_port *port, u8 index, u8 reg, void *buf,
1437 u8 size)
1438 {
1439 return usb4_port_sb_read(port, USB4_SB_TARGET_RETIMER, index, reg, buf,
1440 size);
1441 }
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456 int usb4_port_retimer_write(struct tb_port *port, u8 index, u8 reg,
1457 const void *buf, u8 size)
1458 {
1459 return usb4_port_sb_write(port, USB4_SB_TARGET_RETIMER, index, reg, buf,
1460 size);
1461 }
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473 int usb4_port_retimer_is_last(struct tb_port *port, u8 index)
1474 {
1475 u32 metadata;
1476 int ret;
1477
1478 ret = usb4_port_retimer_op(port, index, USB4_SB_OPCODE_QUERY_LAST_RETIMER,
1479 500);
1480 if (ret)
1481 return ret;
1482
1483 ret = usb4_port_retimer_read(port, index, USB4_SB_METADATA, &metadata,
1484 sizeof(metadata));
1485 return ret ? ret : metadata & 1;
1486 }
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499 int usb4_port_retimer_nvm_sector_size(struct tb_port *port, u8 index)
1500 {
1501 u32 metadata;
1502 int ret;
1503
1504 ret = usb4_port_retimer_op(port, index, USB4_SB_OPCODE_GET_NVM_SECTOR_SIZE,
1505 500);
1506 if (ret)
1507 return ret;
1508
1509 ret = usb4_port_retimer_read(port, index, USB4_SB_METADATA, &metadata,
1510 sizeof(metadata));
1511 return ret ? ret : metadata & USB4_NVM_SECTOR_SIZE_MASK;
1512 }
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525 int usb4_port_retimer_nvm_set_offset(struct tb_port *port, u8 index,
1526 unsigned int address)
1527 {
1528 u32 metadata, dwaddress;
1529 int ret;
1530
1531 dwaddress = address / 4;
1532 metadata = (dwaddress << USB4_NVM_SET_OFFSET_SHIFT) &
1533 USB4_NVM_SET_OFFSET_MASK;
1534
1535 ret = usb4_port_retimer_write(port, index, USB4_SB_METADATA, &metadata,
1536 sizeof(metadata));
1537 if (ret)
1538 return ret;
1539
1540 return usb4_port_retimer_op(port, index, USB4_SB_OPCODE_NVM_SET_OFFSET,
1541 500);
1542 }
1543
1544 struct retimer_info {
1545 struct tb_port *port;
1546 u8 index;
1547 };
1548
1549 static int usb4_port_retimer_nvm_write_next_block(void *data,
1550 unsigned int dwaddress, const void *buf, size_t dwords)
1551
1552 {
1553 const struct retimer_info *info = data;
1554 struct tb_port *port = info->port;
1555 u8 index = info->index;
1556 int ret;
1557
1558 ret = usb4_port_retimer_write(port, index, USB4_SB_DATA,
1559 buf, dwords * 4);
1560 if (ret)
1561 return ret;
1562
1563 return usb4_port_retimer_op(port, index,
1564 USB4_SB_OPCODE_NVM_BLOCK_WRITE, 1000);
1565 }
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580 int usb4_port_retimer_nvm_write(struct tb_port *port, u8 index, unsigned int address,
1581 const void *buf, size_t size)
1582 {
1583 struct retimer_info info = { .port = port, .index = index };
1584 int ret;
1585
1586 ret = usb4_port_retimer_nvm_set_offset(port, index, address);
1587 if (ret)
1588 return ret;
1589
1590 return tb_nvm_write_data(address, buf, size, USB4_DATA_RETRIES,
1591 usb4_port_retimer_nvm_write_next_block, &info);
1592 }
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605 int usb4_port_retimer_nvm_authenticate(struct tb_port *port, u8 index)
1606 {
1607 u32 val;
1608
1609
1610
1611
1612
1613
1614 val = USB4_SB_OPCODE_NVM_AUTH_WRITE;
1615 return usb4_port_sb_write(port, USB4_SB_TARGET_RETIMER, index,
1616 USB4_SB_OPCODE, &val, sizeof(val));
1617 }
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632 int usb4_port_retimer_nvm_authenticate_status(struct tb_port *port, u8 index,
1633 u32 *status)
1634 {
1635 u32 metadata, val;
1636 int ret;
1637
1638 ret = usb4_port_retimer_read(port, index, USB4_SB_OPCODE, &val,
1639 sizeof(val));
1640 if (ret)
1641 return ret;
1642
1643 switch (val) {
1644 case 0:
1645 *status = 0;
1646 return 0;
1647
1648 case USB4_SB_OPCODE_ERR:
1649 ret = usb4_port_retimer_read(port, index, USB4_SB_METADATA,
1650 &metadata, sizeof(metadata));
1651 if (ret)
1652 return ret;
1653
1654 *status = metadata & USB4_SB_METADATA_NVM_AUTH_WRITE_MASK;
1655 return 0;
1656
1657 case USB4_SB_OPCODE_ONS:
1658 return -EOPNOTSUPP;
1659
1660 default:
1661 return -EIO;
1662 }
1663 }
1664
1665 static int usb4_port_retimer_nvm_read_block(void *data, unsigned int dwaddress,
1666 void *buf, size_t dwords)
1667 {
1668 const struct retimer_info *info = data;
1669 struct tb_port *port = info->port;
1670 u8 index = info->index;
1671 u32 metadata;
1672 int ret;
1673
1674 metadata = dwaddress << USB4_NVM_READ_OFFSET_SHIFT;
1675 if (dwords < NVM_DATA_DWORDS)
1676 metadata |= dwords << USB4_NVM_READ_LENGTH_SHIFT;
1677
1678 ret = usb4_port_retimer_write(port, index, USB4_SB_METADATA, &metadata,
1679 sizeof(metadata));
1680 if (ret)
1681 return ret;
1682
1683 ret = usb4_port_retimer_op(port, index, USB4_SB_OPCODE_NVM_READ, 500);
1684 if (ret)
1685 return ret;
1686
1687 return usb4_port_retimer_read(port, index, USB4_SB_DATA, buf,
1688 dwords * 4);
1689 }
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703 int usb4_port_retimer_nvm_read(struct tb_port *port, u8 index,
1704 unsigned int address, void *buf, size_t size)
1705 {
1706 struct retimer_info info = { .port = port, .index = index };
1707
1708 return tb_nvm_read_data(address, buf, size, USB4_DATA_RETRIES,
1709 usb4_port_retimer_nvm_read_block, &info);
1710 }
1711
1712
1713
1714
1715
1716
1717
1718
1719 int usb4_usb3_port_max_link_rate(struct tb_port *port)
1720 {
1721 int ret, lr;
1722 u32 val;
1723
1724 if (!tb_port_is_usb3_down(port) && !tb_port_is_usb3_up(port))
1725 return -EINVAL;
1726
1727 ret = tb_port_read(port, &val, TB_CFG_PORT,
1728 port->cap_adap + ADP_USB3_CS_4, 1);
1729 if (ret)
1730 return ret;
1731
1732 lr = (val & ADP_USB3_CS_4_MSLR_MASK) >> ADP_USB3_CS_4_MSLR_SHIFT;
1733 return lr == ADP_USB3_CS_4_MSLR_20G ? 20000 : 10000;
1734 }
1735
1736
1737
1738
1739
1740
1741
1742
1743 int usb4_usb3_port_actual_link_rate(struct tb_port *port)
1744 {
1745 int ret, lr;
1746 u32 val;
1747
1748 if (!tb_port_is_usb3_down(port) && !tb_port_is_usb3_up(port))
1749 return -EINVAL;
1750
1751 ret = tb_port_read(port, &val, TB_CFG_PORT,
1752 port->cap_adap + ADP_USB3_CS_4, 1);
1753 if (ret)
1754 return ret;
1755
1756 if (!(val & ADP_USB3_CS_4_ULV))
1757 return 0;
1758
1759 lr = val & ADP_USB3_CS_4_ALR_MASK;
1760 return lr == ADP_USB3_CS_4_ALR_20G ? 20000 : 10000;
1761 }
1762
1763 static int usb4_usb3_port_cm_request(struct tb_port *port, bool request)
1764 {
1765 int ret;
1766 u32 val;
1767
1768 if (!tb_port_is_usb3_down(port))
1769 return -EINVAL;
1770 if (tb_route(port->sw))
1771 return -EINVAL;
1772
1773 ret = tb_port_read(port, &val, TB_CFG_PORT,
1774 port->cap_adap + ADP_USB3_CS_2, 1);
1775 if (ret)
1776 return ret;
1777
1778 if (request)
1779 val |= ADP_USB3_CS_2_CMR;
1780 else
1781 val &= ~ADP_USB3_CS_2_CMR;
1782
1783 ret = tb_port_write(port, &val, TB_CFG_PORT,
1784 port->cap_adap + ADP_USB3_CS_2, 1);
1785 if (ret)
1786 return ret;
1787
1788
1789
1790
1791
1792 val &= ADP_USB3_CS_2_CMR;
1793 return usb4_port_wait_for_bit(port, port->cap_adap + ADP_USB3_CS_1,
1794 ADP_USB3_CS_1_HCA, val, 1500);
1795 }
1796
1797 static inline int usb4_usb3_port_set_cm_request(struct tb_port *port)
1798 {
1799 return usb4_usb3_port_cm_request(port, true);
1800 }
1801
1802 static inline int usb4_usb3_port_clear_cm_request(struct tb_port *port)
1803 {
1804 return usb4_usb3_port_cm_request(port, false);
1805 }
1806
1807 static unsigned int usb3_bw_to_mbps(u32 bw, u8 scale)
1808 {
1809 unsigned long uframes;
1810
1811 uframes = bw * 512UL << scale;
1812 return DIV_ROUND_CLOSEST(uframes * 8000, 1000 * 1000);
1813 }
1814
1815 static u32 mbps_to_usb3_bw(unsigned int mbps, u8 scale)
1816 {
1817 unsigned long uframes;
1818
1819
1820 uframes = ((unsigned long)mbps * 1000 * 1000) / 8000;
1821 return DIV_ROUND_UP(uframes, 512UL << scale);
1822 }
1823
1824 static int usb4_usb3_port_read_allocated_bandwidth(struct tb_port *port,
1825 int *upstream_bw,
1826 int *downstream_bw)
1827 {
1828 u32 val, bw, scale;
1829 int ret;
1830
1831 ret = tb_port_read(port, &val, TB_CFG_PORT,
1832 port->cap_adap + ADP_USB3_CS_2, 1);
1833 if (ret)
1834 return ret;
1835
1836 ret = tb_port_read(port, &scale, TB_CFG_PORT,
1837 port->cap_adap + ADP_USB3_CS_3, 1);
1838 if (ret)
1839 return ret;
1840
1841 scale &= ADP_USB3_CS_3_SCALE_MASK;
1842
1843 bw = val & ADP_USB3_CS_2_AUBW_MASK;
1844 *upstream_bw = usb3_bw_to_mbps(bw, scale);
1845
1846 bw = (val & ADP_USB3_CS_2_ADBW_MASK) >> ADP_USB3_CS_2_ADBW_SHIFT;
1847 *downstream_bw = usb3_bw_to_mbps(bw, scale);
1848
1849 return 0;
1850 }
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862 int usb4_usb3_port_allocated_bandwidth(struct tb_port *port, int *upstream_bw,
1863 int *downstream_bw)
1864 {
1865 int ret;
1866
1867 ret = usb4_usb3_port_set_cm_request(port);
1868 if (ret)
1869 return ret;
1870
1871 ret = usb4_usb3_port_read_allocated_bandwidth(port, upstream_bw,
1872 downstream_bw);
1873 usb4_usb3_port_clear_cm_request(port);
1874
1875 return ret;
1876 }
1877
1878 static int usb4_usb3_port_read_consumed_bandwidth(struct tb_port *port,
1879 int *upstream_bw,
1880 int *downstream_bw)
1881 {
1882 u32 val, bw, scale;
1883 int ret;
1884
1885 ret = tb_port_read(port, &val, TB_CFG_PORT,
1886 port->cap_adap + ADP_USB3_CS_1, 1);
1887 if (ret)
1888 return ret;
1889
1890 ret = tb_port_read(port, &scale, TB_CFG_PORT,
1891 port->cap_adap + ADP_USB3_CS_3, 1);
1892 if (ret)
1893 return ret;
1894
1895 scale &= ADP_USB3_CS_3_SCALE_MASK;
1896
1897 bw = val & ADP_USB3_CS_1_CUBW_MASK;
1898 *upstream_bw = usb3_bw_to_mbps(bw, scale);
1899
1900 bw = (val & ADP_USB3_CS_1_CDBW_MASK) >> ADP_USB3_CS_1_CDBW_SHIFT;
1901 *downstream_bw = usb3_bw_to_mbps(bw, scale);
1902
1903 return 0;
1904 }
1905
1906 static int usb4_usb3_port_write_allocated_bandwidth(struct tb_port *port,
1907 int upstream_bw,
1908 int downstream_bw)
1909 {
1910 u32 val, ubw, dbw, scale;
1911 int ret;
1912
1913
1914 ret = tb_port_read(port, &scale, TB_CFG_PORT,
1915 port->cap_adap + ADP_USB3_CS_3, 1);
1916 if (ret)
1917 return ret;
1918
1919 scale &= ADP_USB3_CS_3_SCALE_MASK;
1920 ubw = mbps_to_usb3_bw(upstream_bw, scale);
1921 dbw = mbps_to_usb3_bw(downstream_bw, scale);
1922
1923 ret = tb_port_read(port, &val, TB_CFG_PORT,
1924 port->cap_adap + ADP_USB3_CS_2, 1);
1925 if (ret)
1926 return ret;
1927
1928 val &= ~(ADP_USB3_CS_2_AUBW_MASK | ADP_USB3_CS_2_ADBW_MASK);
1929 val |= dbw << ADP_USB3_CS_2_ADBW_SHIFT;
1930 val |= ubw;
1931
1932 return tb_port_write(port, &val, TB_CFG_PORT,
1933 port->cap_adap + ADP_USB3_CS_2, 1);
1934 }
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953 int usb4_usb3_port_allocate_bandwidth(struct tb_port *port, int *upstream_bw,
1954 int *downstream_bw)
1955 {
1956 int ret, consumed_up, consumed_down, allocate_up, allocate_down;
1957
1958 ret = usb4_usb3_port_set_cm_request(port);
1959 if (ret)
1960 return ret;
1961
1962 ret = usb4_usb3_port_read_consumed_bandwidth(port, &consumed_up,
1963 &consumed_down);
1964 if (ret)
1965 goto err_request;
1966
1967
1968 allocate_up = max(*upstream_bw, consumed_up);
1969 allocate_down = max(*downstream_bw, consumed_down);
1970
1971 ret = usb4_usb3_port_write_allocated_bandwidth(port, allocate_up,
1972 allocate_down);
1973 if (ret)
1974 goto err_request;
1975
1976 *upstream_bw = allocate_up;
1977 *downstream_bw = allocate_down;
1978
1979 err_request:
1980 usb4_usb3_port_clear_cm_request(port);
1981 return ret;
1982 }
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995 int usb4_usb3_port_release_bandwidth(struct tb_port *port, int *upstream_bw,
1996 int *downstream_bw)
1997 {
1998 int ret, consumed_up, consumed_down;
1999
2000 ret = usb4_usb3_port_set_cm_request(port);
2001 if (ret)
2002 return ret;
2003
2004 ret = usb4_usb3_port_read_consumed_bandwidth(port, &consumed_up,
2005 &consumed_down);
2006 if (ret)
2007 goto err_request;
2008
2009
2010
2011
2012
2013 if (consumed_up < 1000)
2014 consumed_up = 1000;
2015 if (consumed_down < 1000)
2016 consumed_down = 1000;
2017
2018 ret = usb4_usb3_port_write_allocated_bandwidth(port, consumed_up,
2019 consumed_down);
2020 if (ret)
2021 goto err_request;
2022
2023 *upstream_bw = consumed_up;
2024 *downstream_bw = consumed_down;
2025
2026 err_request:
2027 usb4_usb3_port_clear_cm_request(port);
2028 return ret;
2029 }