0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 #include "i915_drv.h"
0025 #include "intel_display_types.h"
0026 #include "intel_dp.h"
0027 #include "intel_dp_link_training.h"
0028
0029 static void intel_dp_reset_lttpr_common_caps(struct intel_dp *intel_dp)
0030 {
0031 memset(intel_dp->lttpr_common_caps, 0, sizeof(intel_dp->lttpr_common_caps));
0032 }
0033
0034 static void intel_dp_reset_lttpr_count(struct intel_dp *intel_dp)
0035 {
0036 intel_dp->lttpr_common_caps[DP_PHY_REPEATER_CNT -
0037 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] = 0;
0038 }
0039
0040 static const char *intel_dp_phy_name(enum drm_dp_phy dp_phy,
0041 char *buf, size_t buf_size)
0042 {
0043 if (dp_phy == DP_PHY_DPRX)
0044 snprintf(buf, buf_size, "DPRX");
0045 else
0046 snprintf(buf, buf_size, "LTTPR %d", dp_phy - DP_PHY_LTTPR1 + 1);
0047
0048 return buf;
0049 }
0050
0051 static u8 *intel_dp_lttpr_phy_caps(struct intel_dp *intel_dp,
0052 enum drm_dp_phy dp_phy)
0053 {
0054 return intel_dp->lttpr_phy_caps[dp_phy - DP_PHY_LTTPR1];
0055 }
0056
0057 static void intel_dp_read_lttpr_phy_caps(struct intel_dp *intel_dp,
0058 const u8 dpcd[DP_RECEIVER_CAP_SIZE],
0059 enum drm_dp_phy dp_phy)
0060 {
0061 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0062 u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
0063 char phy_name[10];
0064
0065 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
0066
0067 if (drm_dp_read_lttpr_phy_caps(&intel_dp->aux, dpcd, dp_phy, phy_caps) < 0) {
0068 drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
0069 "[ENCODER:%d:%s][%s] failed to read the PHY caps\n",
0070 encoder->base.base.id, encoder->base.name, phy_name);
0071 return;
0072 }
0073
0074 drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
0075 "[ENCODER:%d:%s][%s] PHY capabilities: %*ph\n",
0076 encoder->base.base.id, encoder->base.name, phy_name,
0077 (int)sizeof(intel_dp->lttpr_phy_caps[0]),
0078 phy_caps);
0079 }
0080
0081 static bool intel_dp_read_lttpr_common_caps(struct intel_dp *intel_dp,
0082 const u8 dpcd[DP_RECEIVER_CAP_SIZE])
0083 {
0084 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0085 int ret;
0086
0087 ret = drm_dp_read_lttpr_common_caps(&intel_dp->aux, dpcd,
0088 intel_dp->lttpr_common_caps);
0089 if (ret < 0)
0090 goto reset_caps;
0091
0092 drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
0093 "[ENCODER:%d:%s] LTTPR common capabilities: %*ph\n",
0094 encoder->base.base.id, encoder->base.name,
0095 (int)sizeof(intel_dp->lttpr_common_caps),
0096 intel_dp->lttpr_common_caps);
0097
0098
0099 if (intel_dp->lttpr_common_caps[0] < 0x14)
0100 goto reset_caps;
0101
0102 return true;
0103
0104 reset_caps:
0105 intel_dp_reset_lttpr_common_caps(intel_dp);
0106 return false;
0107 }
0108
0109 static bool
0110 intel_dp_set_lttpr_transparent_mode(struct intel_dp *intel_dp, bool enable)
0111 {
0112 u8 val = enable ? DP_PHY_REPEATER_MODE_TRANSPARENT :
0113 DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
0114
0115 return drm_dp_dpcd_write(&intel_dp->aux, DP_PHY_REPEATER_MODE, &val, 1) == 1;
0116 }
0117
0118 static int intel_dp_init_lttpr(struct intel_dp *intel_dp, const u8 dpcd[DP_RECEIVER_CAP_SIZE])
0119 {
0120 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0121 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0122 int lttpr_count;
0123 int i;
0124
0125 if (!intel_dp_read_lttpr_common_caps(intel_dp, dpcd))
0126 return 0;
0127
0128 lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
0129
0130
0131
0132
0133
0134 if (lttpr_count == 0)
0135 return 0;
0136
0137
0138
0139
0140
0141
0142 intel_dp_set_lttpr_transparent_mode(intel_dp, true);
0143
0144
0145
0146
0147
0148
0149 if (lttpr_count < 0)
0150 return 0;
0151
0152 if (!intel_dp_set_lttpr_transparent_mode(intel_dp, false)) {
0153 drm_dbg_kms(&i915->drm,
0154 "[ENCODER:%d:%s] Switching to LTTPR non-transparent LT mode failed, fall-back to transparent mode\n",
0155 encoder->base.base.id, encoder->base.name);
0156
0157 intel_dp_set_lttpr_transparent_mode(intel_dp, true);
0158 intel_dp_reset_lttpr_count(intel_dp);
0159
0160 return 0;
0161 }
0162
0163 for (i = 0; i < lttpr_count; i++)
0164 intel_dp_read_lttpr_phy_caps(intel_dp, dpcd, DP_PHY_LTTPR(i));
0165
0166 return lttpr_count;
0167 }
0168
0169
0170
0171
0172
0173
0174
0175
0176
0177
0178
0179
0180
0181
0182
0183
0184
0185
0186
0187 int intel_dp_init_lttpr_and_dprx_caps(struct intel_dp *intel_dp)
0188 {
0189 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0190 int lttpr_count = 0;
0191
0192
0193
0194
0195
0196 if (!intel_dp_is_edp(intel_dp) &&
0197 (DISPLAY_VER(i915) >= 10 && !IS_GEMINILAKE(i915))) {
0198 u8 dpcd[DP_RECEIVER_CAP_SIZE];
0199
0200 if (drm_dp_dpcd_probe(&intel_dp->aux, DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV))
0201 return -EIO;
0202
0203 if (drm_dp_read_dpcd_caps(&intel_dp->aux, dpcd))
0204 return -EIO;
0205
0206 lttpr_count = intel_dp_init_lttpr(intel_dp, dpcd);
0207 }
0208
0209
0210
0211
0212
0213 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd)) {
0214 intel_dp_reset_lttpr_common_caps(intel_dp);
0215 return -EIO;
0216 }
0217
0218 return lttpr_count;
0219 }
0220
0221 static u8 dp_voltage_max(u8 preemph)
0222 {
0223 switch (preemph & DP_TRAIN_PRE_EMPHASIS_MASK) {
0224 case DP_TRAIN_PRE_EMPH_LEVEL_0:
0225 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
0226 case DP_TRAIN_PRE_EMPH_LEVEL_1:
0227 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
0228 case DP_TRAIN_PRE_EMPH_LEVEL_2:
0229 return DP_TRAIN_VOLTAGE_SWING_LEVEL_1;
0230 case DP_TRAIN_PRE_EMPH_LEVEL_3:
0231 default:
0232 return DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
0233 }
0234 }
0235
0236 static u8 intel_dp_lttpr_voltage_max(struct intel_dp *intel_dp,
0237 enum drm_dp_phy dp_phy)
0238 {
0239 const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
0240
0241 if (drm_dp_lttpr_voltage_swing_level_3_supported(phy_caps))
0242 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
0243 else
0244 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
0245 }
0246
0247 static u8 intel_dp_lttpr_preemph_max(struct intel_dp *intel_dp,
0248 enum drm_dp_phy dp_phy)
0249 {
0250 const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
0251
0252 if (drm_dp_lttpr_pre_emphasis_level_3_supported(phy_caps))
0253 return DP_TRAIN_PRE_EMPH_LEVEL_3;
0254 else
0255 return DP_TRAIN_PRE_EMPH_LEVEL_2;
0256 }
0257
0258 static bool
0259 intel_dp_phy_is_downstream_of_source(struct intel_dp *intel_dp,
0260 enum drm_dp_phy dp_phy)
0261 {
0262 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0263 int lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
0264
0265 drm_WARN_ON_ONCE(&i915->drm, lttpr_count <= 0 && dp_phy != DP_PHY_DPRX);
0266
0267 return lttpr_count <= 0 || dp_phy == DP_PHY_LTTPR(lttpr_count - 1);
0268 }
0269
0270 static u8 intel_dp_phy_voltage_max(struct intel_dp *intel_dp,
0271 const struct intel_crtc_state *crtc_state,
0272 enum drm_dp_phy dp_phy)
0273 {
0274 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0275 u8 voltage_max;
0276
0277
0278
0279
0280
0281 if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
0282 voltage_max = intel_dp->voltage_max(intel_dp, crtc_state);
0283 else
0284 voltage_max = intel_dp_lttpr_voltage_max(intel_dp, dp_phy + 1);
0285
0286 drm_WARN_ON_ONCE(&i915->drm,
0287 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_2 &&
0288 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_3);
0289
0290 return voltage_max;
0291 }
0292
0293 static u8 intel_dp_phy_preemph_max(struct intel_dp *intel_dp,
0294 enum drm_dp_phy dp_phy)
0295 {
0296 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0297 u8 preemph_max;
0298
0299
0300
0301
0302
0303 if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
0304 preemph_max = intel_dp->preemph_max(intel_dp);
0305 else
0306 preemph_max = intel_dp_lttpr_preemph_max(intel_dp, dp_phy + 1);
0307
0308 drm_WARN_ON_ONCE(&i915->drm,
0309 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_2 &&
0310 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_3);
0311
0312 return preemph_max;
0313 }
0314
0315 static bool has_per_lane_signal_levels(struct intel_dp *intel_dp,
0316 enum drm_dp_phy dp_phy)
0317 {
0318 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0319
0320 return !intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy) ||
0321 DISPLAY_VER(i915) >= 11;
0322 }
0323
0324
0325 static u8 intel_dp_get_lane_adjust_tx_ffe_preset(struct intel_dp *intel_dp,
0326 const struct intel_crtc_state *crtc_state,
0327 enum drm_dp_phy dp_phy,
0328 const u8 link_status[DP_LINK_STATUS_SIZE],
0329 int lane)
0330 {
0331 u8 tx_ffe = 0;
0332
0333 if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
0334 lane = min(lane, crtc_state->lane_count - 1);
0335 tx_ffe = drm_dp_get_adjust_tx_ffe_preset(link_status, lane);
0336 } else {
0337 for (lane = 0; lane < crtc_state->lane_count; lane++)
0338 tx_ffe = max(tx_ffe, drm_dp_get_adjust_tx_ffe_preset(link_status, lane));
0339 }
0340
0341 return tx_ffe;
0342 }
0343
0344
0345 static u8 intel_dp_get_lane_adjust_vswing_preemph(struct intel_dp *intel_dp,
0346 const struct intel_crtc_state *crtc_state,
0347 enum drm_dp_phy dp_phy,
0348 const u8 link_status[DP_LINK_STATUS_SIZE],
0349 int lane)
0350 {
0351 u8 v = 0;
0352 u8 p = 0;
0353 u8 voltage_max;
0354 u8 preemph_max;
0355
0356 if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
0357 lane = min(lane, crtc_state->lane_count - 1);
0358
0359 v = drm_dp_get_adjust_request_voltage(link_status, lane);
0360 p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
0361 } else {
0362 for (lane = 0; lane < crtc_state->lane_count; lane++) {
0363 v = max(v, drm_dp_get_adjust_request_voltage(link_status, lane));
0364 p = max(p, drm_dp_get_adjust_request_pre_emphasis(link_status, lane));
0365 }
0366 }
0367
0368 preemph_max = intel_dp_phy_preemph_max(intel_dp, dp_phy);
0369 if (p >= preemph_max)
0370 p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
0371
0372 v = min(v, dp_voltage_max(p));
0373
0374 voltage_max = intel_dp_phy_voltage_max(intel_dp, crtc_state, dp_phy);
0375 if (v >= voltage_max)
0376 v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
0377
0378 return v | p;
0379 }
0380
0381 static u8 intel_dp_get_lane_adjust_train(struct intel_dp *intel_dp,
0382 const struct intel_crtc_state *crtc_state,
0383 enum drm_dp_phy dp_phy,
0384 const u8 link_status[DP_LINK_STATUS_SIZE],
0385 int lane)
0386 {
0387 if (intel_dp_is_uhbr(crtc_state))
0388 return intel_dp_get_lane_adjust_tx_ffe_preset(intel_dp, crtc_state,
0389 dp_phy, link_status, lane);
0390 else
0391 return intel_dp_get_lane_adjust_vswing_preemph(intel_dp, crtc_state,
0392 dp_phy, link_status, lane);
0393 }
0394
0395 #define TRAIN_REQ_FMT "%d/%d/%d/%d"
0396 #define _TRAIN_REQ_VSWING_ARGS(link_status, lane) \
0397 (drm_dp_get_adjust_request_voltage((link_status), (lane)) >> DP_TRAIN_VOLTAGE_SWING_SHIFT)
0398 #define TRAIN_REQ_VSWING_ARGS(link_status) \
0399 _TRAIN_REQ_VSWING_ARGS(link_status, 0), \
0400 _TRAIN_REQ_VSWING_ARGS(link_status, 1), \
0401 _TRAIN_REQ_VSWING_ARGS(link_status, 2), \
0402 _TRAIN_REQ_VSWING_ARGS(link_status, 3)
0403 #define _TRAIN_REQ_PREEMPH_ARGS(link_status, lane) \
0404 (drm_dp_get_adjust_request_pre_emphasis((link_status), (lane)) >> DP_TRAIN_PRE_EMPHASIS_SHIFT)
0405 #define TRAIN_REQ_PREEMPH_ARGS(link_status) \
0406 _TRAIN_REQ_PREEMPH_ARGS(link_status, 0), \
0407 _TRAIN_REQ_PREEMPH_ARGS(link_status, 1), \
0408 _TRAIN_REQ_PREEMPH_ARGS(link_status, 2), \
0409 _TRAIN_REQ_PREEMPH_ARGS(link_status, 3)
0410 #define _TRAIN_REQ_TX_FFE_ARGS(link_status, lane) \
0411 drm_dp_get_adjust_tx_ffe_preset((link_status), (lane))
0412 #define TRAIN_REQ_TX_FFE_ARGS(link_status) \
0413 _TRAIN_REQ_TX_FFE_ARGS(link_status, 0), \
0414 _TRAIN_REQ_TX_FFE_ARGS(link_status, 1), \
0415 _TRAIN_REQ_TX_FFE_ARGS(link_status, 2), \
0416 _TRAIN_REQ_TX_FFE_ARGS(link_status, 3)
0417
0418 void
0419 intel_dp_get_adjust_train(struct intel_dp *intel_dp,
0420 const struct intel_crtc_state *crtc_state,
0421 enum drm_dp_phy dp_phy,
0422 const u8 link_status[DP_LINK_STATUS_SIZE])
0423 {
0424 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0425 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0426 char phy_name[10];
0427 int lane;
0428
0429 if (intel_dp_is_uhbr(crtc_state)) {
0430 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
0431 "TX FFE request: " TRAIN_REQ_FMT "\n",
0432 encoder->base.base.id, encoder->base.name,
0433 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
0434 crtc_state->lane_count,
0435 TRAIN_REQ_TX_FFE_ARGS(link_status));
0436 } else {
0437 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
0438 "vswing request: " TRAIN_REQ_FMT ", "
0439 "pre-emphasis request: " TRAIN_REQ_FMT "\n",
0440 encoder->base.base.id, encoder->base.name,
0441 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
0442 crtc_state->lane_count,
0443 TRAIN_REQ_VSWING_ARGS(link_status),
0444 TRAIN_REQ_PREEMPH_ARGS(link_status));
0445 }
0446
0447 for (lane = 0; lane < 4; lane++)
0448 intel_dp->train_set[lane] =
0449 intel_dp_get_lane_adjust_train(intel_dp, crtc_state,
0450 dp_phy, link_status, lane);
0451 }
0452
0453 static int intel_dp_training_pattern_set_reg(struct intel_dp *intel_dp,
0454 enum drm_dp_phy dp_phy)
0455 {
0456 return dp_phy == DP_PHY_DPRX ?
0457 DP_TRAINING_PATTERN_SET :
0458 DP_TRAINING_PATTERN_SET_PHY_REPEATER(dp_phy);
0459 }
0460
0461 static bool
0462 intel_dp_set_link_train(struct intel_dp *intel_dp,
0463 const struct intel_crtc_state *crtc_state,
0464 enum drm_dp_phy dp_phy,
0465 u8 dp_train_pat)
0466 {
0467 int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
0468 u8 buf[sizeof(intel_dp->train_set) + 1];
0469 int len;
0470
0471 intel_dp_program_link_training_pattern(intel_dp, crtc_state,
0472 dp_phy, dp_train_pat);
0473
0474 buf[0] = dp_train_pat;
0475
0476 memcpy(buf + 1, intel_dp->train_set, crtc_state->lane_count);
0477 len = crtc_state->lane_count + 1;
0478
0479 return drm_dp_dpcd_write(&intel_dp->aux, reg, buf, len) == len;
0480 }
0481
0482 static char dp_training_pattern_name(u8 train_pat)
0483 {
0484 switch (train_pat) {
0485 case DP_TRAINING_PATTERN_1:
0486 case DP_TRAINING_PATTERN_2:
0487 case DP_TRAINING_PATTERN_3:
0488 return '0' + train_pat;
0489 case DP_TRAINING_PATTERN_4:
0490 return '4';
0491 default:
0492 MISSING_CASE(train_pat);
0493 return '?';
0494 }
0495 }
0496
0497 void
0498 intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
0499 const struct intel_crtc_state *crtc_state,
0500 enum drm_dp_phy dp_phy,
0501 u8 dp_train_pat)
0502 {
0503 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0504 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0505 u8 train_pat = intel_dp_training_pattern_symbol(dp_train_pat);
0506 char phy_name[10];
0507
0508 if (train_pat != DP_TRAINING_PATTERN_DISABLE)
0509 drm_dbg_kms(&i915->drm,
0510 "[ENCODER:%d:%s][%s] Using DP training pattern TPS%c\n",
0511 encoder->base.base.id, encoder->base.name,
0512 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
0513 dp_training_pattern_name(train_pat));
0514
0515 intel_dp->set_link_train(intel_dp, crtc_state, dp_train_pat);
0516 }
0517
0518 #define TRAIN_SET_FMT "%d%s/%d%s/%d%s/%d%s"
0519 #define _TRAIN_SET_VSWING_ARGS(train_set) \
0520 ((train_set) & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT, \
0521 (train_set) & DP_TRAIN_MAX_SWING_REACHED ? "(max)" : ""
0522 #define TRAIN_SET_VSWING_ARGS(train_set) \
0523 _TRAIN_SET_VSWING_ARGS((train_set)[0]), \
0524 _TRAIN_SET_VSWING_ARGS((train_set)[1]), \
0525 _TRAIN_SET_VSWING_ARGS((train_set)[2]), \
0526 _TRAIN_SET_VSWING_ARGS((train_set)[3])
0527 #define _TRAIN_SET_PREEMPH_ARGS(train_set) \
0528 ((train_set) & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT, \
0529 (train_set) & DP_TRAIN_MAX_PRE_EMPHASIS_REACHED ? "(max)" : ""
0530 #define TRAIN_SET_PREEMPH_ARGS(train_set) \
0531 _TRAIN_SET_PREEMPH_ARGS((train_set)[0]), \
0532 _TRAIN_SET_PREEMPH_ARGS((train_set)[1]), \
0533 _TRAIN_SET_PREEMPH_ARGS((train_set)[2]), \
0534 _TRAIN_SET_PREEMPH_ARGS((train_set)[3])
0535 #define _TRAIN_SET_TX_FFE_ARGS(train_set) \
0536 ((train_set) & DP_TX_FFE_PRESET_VALUE_MASK), ""
0537 #define TRAIN_SET_TX_FFE_ARGS(train_set) \
0538 _TRAIN_SET_TX_FFE_ARGS((train_set)[0]), \
0539 _TRAIN_SET_TX_FFE_ARGS((train_set)[1]), \
0540 _TRAIN_SET_TX_FFE_ARGS((train_set)[2]), \
0541 _TRAIN_SET_TX_FFE_ARGS((train_set)[3])
0542
0543 void intel_dp_set_signal_levels(struct intel_dp *intel_dp,
0544 const struct intel_crtc_state *crtc_state,
0545 enum drm_dp_phy dp_phy)
0546 {
0547 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0548 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0549 char phy_name[10];
0550
0551 if (intel_dp_is_uhbr(crtc_state)) {
0552 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
0553 "TX FFE presets: " TRAIN_SET_FMT "\n",
0554 encoder->base.base.id, encoder->base.name,
0555 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
0556 crtc_state->lane_count,
0557 TRAIN_SET_TX_FFE_ARGS(intel_dp->train_set));
0558 } else {
0559 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
0560 "vswing levels: " TRAIN_SET_FMT ", "
0561 "pre-emphasis levels: " TRAIN_SET_FMT "\n",
0562 encoder->base.base.id, encoder->base.name,
0563 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
0564 crtc_state->lane_count,
0565 TRAIN_SET_VSWING_ARGS(intel_dp->train_set),
0566 TRAIN_SET_PREEMPH_ARGS(intel_dp->train_set));
0567 }
0568
0569 if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
0570 encoder->set_signal_levels(encoder, crtc_state);
0571 }
0572
0573 static bool
0574 intel_dp_reset_link_train(struct intel_dp *intel_dp,
0575 const struct intel_crtc_state *crtc_state,
0576 enum drm_dp_phy dp_phy,
0577 u8 dp_train_pat)
0578 {
0579 memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
0580 intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
0581 return intel_dp_set_link_train(intel_dp, crtc_state, dp_phy, dp_train_pat);
0582 }
0583
0584 static bool
0585 intel_dp_update_link_train(struct intel_dp *intel_dp,
0586 const struct intel_crtc_state *crtc_state,
0587 enum drm_dp_phy dp_phy)
0588 {
0589 int reg = dp_phy == DP_PHY_DPRX ?
0590 DP_TRAINING_LANE0_SET :
0591 DP_TRAINING_LANE0_SET_PHY_REPEATER(dp_phy);
0592 int ret;
0593
0594 intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
0595
0596 ret = drm_dp_dpcd_write(&intel_dp->aux, reg,
0597 intel_dp->train_set, crtc_state->lane_count);
0598
0599 return ret == crtc_state->lane_count;
0600 }
0601
0602
0603 static bool intel_dp_lane_max_tx_ffe_reached(u8 train_set_lane)
0604 {
0605 return (train_set_lane & DP_TX_FFE_PRESET_VALUE_MASK) ==
0606 DP_TX_FFE_PRESET_VALUE_MASK;
0607 }
0608
0609
0610
0611
0612
0613
0614
0615
0616
0617
0618
0619 static bool intel_dp_lane_max_vswing_reached(u8 train_set_lane)
0620 {
0621 u8 v = (train_set_lane & DP_TRAIN_VOLTAGE_SWING_MASK) >>
0622 DP_TRAIN_VOLTAGE_SWING_SHIFT;
0623 u8 p = (train_set_lane & DP_TRAIN_PRE_EMPHASIS_MASK) >>
0624 DP_TRAIN_PRE_EMPHASIS_SHIFT;
0625
0626 if ((train_set_lane & DP_TRAIN_MAX_SWING_REACHED) == 0)
0627 return false;
0628
0629 if (v + p != 3)
0630 return false;
0631
0632 return true;
0633 }
0634
0635 static bool intel_dp_link_max_vswing_reached(struct intel_dp *intel_dp,
0636 const struct intel_crtc_state *crtc_state)
0637 {
0638 int lane;
0639
0640 for (lane = 0; lane < crtc_state->lane_count; lane++) {
0641 u8 train_set_lane = intel_dp->train_set[lane];
0642
0643 if (intel_dp_is_uhbr(crtc_state)) {
0644 if (!intel_dp_lane_max_tx_ffe_reached(train_set_lane))
0645 return false;
0646 } else {
0647 if (!intel_dp_lane_max_vswing_reached(train_set_lane))
0648 return false;
0649 }
0650 }
0651
0652 return true;
0653 }
0654
0655
0656
0657
0658
0659 static bool
0660 intel_dp_prepare_link_train(struct intel_dp *intel_dp,
0661 const struct intel_crtc_state *crtc_state)
0662 {
0663 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0664 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0665 u8 link_config[2];
0666 u8 link_bw, rate_select;
0667
0668 if (intel_dp->prepare_link_retrain)
0669 intel_dp->prepare_link_retrain(intel_dp, crtc_state);
0670
0671 intel_dp_compute_rate(intel_dp, crtc_state->port_clock,
0672 &link_bw, &rate_select);
0673
0674
0675
0676
0677
0678
0679
0680
0681
0682
0683
0684
0685 if (!link_bw) {
0686 struct intel_connector *connector = intel_dp->attached_connector;
0687 __le16 sink_rates[DP_MAX_SUPPORTED_RATES];
0688
0689 drm_dbg_kms(&i915->drm, "[CONNECTOR:%d:%s] Reloading eDP link rates\n",
0690 connector->base.base.id, connector->base.name);
0691
0692 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
0693 sink_rates, sizeof(sink_rates));
0694 }
0695
0696 if (link_bw)
0697 drm_dbg_kms(&i915->drm,
0698 "[ENCODER:%d:%s] Using LINK_BW_SET value %02x\n",
0699 encoder->base.base.id, encoder->base.name, link_bw);
0700 else
0701 drm_dbg_kms(&i915->drm,
0702 "[ENCODER:%d:%s] Using LINK_RATE_SET value %02x\n",
0703 encoder->base.base.id, encoder->base.name, rate_select);
0704
0705
0706 link_config[0] = link_bw;
0707 link_config[1] = crtc_state->lane_count;
0708 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
0709 link_config[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
0710 drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_BW_SET, link_config, 2);
0711
0712
0713 if (!link_bw)
0714 drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_RATE_SET,
0715 &rate_select, 1);
0716
0717 link_config[0] = crtc_state->vrr.enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0;
0718 link_config[1] = intel_dp_is_uhbr(crtc_state) ?
0719 DP_SET_ANSI_128B132B : DP_SET_ANSI_8B10B;
0720 drm_dp_dpcd_write(&intel_dp->aux, DP_DOWNSPREAD_CTRL, link_config, 2);
0721
0722 return true;
0723 }
0724
0725 static bool intel_dp_adjust_request_changed(const struct intel_crtc_state *crtc_state,
0726 const u8 old_link_status[DP_LINK_STATUS_SIZE],
0727 const u8 new_link_status[DP_LINK_STATUS_SIZE])
0728 {
0729 int lane;
0730
0731 for (lane = 0; lane < crtc_state->lane_count; lane++) {
0732 u8 old, new;
0733
0734 if (intel_dp_is_uhbr(crtc_state)) {
0735 old = drm_dp_get_adjust_tx_ffe_preset(old_link_status, lane);
0736 new = drm_dp_get_adjust_tx_ffe_preset(new_link_status, lane);
0737 } else {
0738 old = drm_dp_get_adjust_request_voltage(old_link_status, lane) |
0739 drm_dp_get_adjust_request_pre_emphasis(old_link_status, lane);
0740 new = drm_dp_get_adjust_request_voltage(new_link_status, lane) |
0741 drm_dp_get_adjust_request_pre_emphasis(new_link_status, lane);
0742 }
0743
0744 if (old != new)
0745 return true;
0746 }
0747
0748 return false;
0749 }
0750
0751 void
0752 intel_dp_dump_link_status(struct intel_dp *intel_dp, enum drm_dp_phy dp_phy,
0753 const u8 link_status[DP_LINK_STATUS_SIZE])
0754 {
0755 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0756 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0757 char phy_name[10];
0758
0759 drm_dbg_kms(&i915->drm,
0760 "[ENCODER:%d:%s][%s] ln0_1:0x%x ln2_3:0x%x align:0x%x sink:0x%x adj_req0_1:0x%x adj_req2_3:0x%x\n",
0761 encoder->base.base.id, encoder->base.name,
0762 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
0763 link_status[0], link_status[1], link_status[2],
0764 link_status[3], link_status[4], link_status[5]);
0765 }
0766
0767
0768
0769
0770
0771 static bool
0772 intel_dp_link_training_clock_recovery(struct intel_dp *intel_dp,
0773 const struct intel_crtc_state *crtc_state,
0774 enum drm_dp_phy dp_phy)
0775 {
0776 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0777 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0778 u8 old_link_status[DP_LINK_STATUS_SIZE] = {};
0779 int voltage_tries, cr_tries, max_cr_tries;
0780 u8 link_status[DP_LINK_STATUS_SIZE];
0781 bool max_vswing_reached = false;
0782 char phy_name[10];
0783 int delay_us;
0784
0785 delay_us = drm_dp_read_clock_recovery_delay(&intel_dp->aux,
0786 intel_dp->dpcd, dp_phy,
0787 intel_dp_is_uhbr(crtc_state));
0788
0789 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
0790
0791
0792 if (!intel_dp_reset_link_train(intel_dp, crtc_state, dp_phy,
0793 DP_TRAINING_PATTERN_1 |
0794 DP_LINK_SCRAMBLING_DISABLE)) {
0795 drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to enable link training\n",
0796 encoder->base.base.id, encoder->base.name, phy_name);
0797 return false;
0798 }
0799
0800
0801
0802
0803
0804
0805
0806
0807
0808 if (intel_dp->dpcd[DP_DPCD_REV] >= DP_DPCD_REV_14)
0809 max_cr_tries = 10;
0810 else
0811 max_cr_tries = 80;
0812
0813 voltage_tries = 1;
0814 for (cr_tries = 0; cr_tries < max_cr_tries; ++cr_tries) {
0815 usleep_range(delay_us, 2 * delay_us);
0816
0817 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
0818 link_status) < 0) {
0819 drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to get link status\n",
0820 encoder->base.base.id, encoder->base.name, phy_name);
0821 return false;
0822 }
0823
0824 if (drm_dp_clock_recovery_ok(link_status, crtc_state->lane_count)) {
0825 drm_dbg_kms(&i915->drm,
0826 "[ENCODER:%d:%s][%s] Clock recovery OK\n",
0827 encoder->base.base.id, encoder->base.name, phy_name);
0828 return true;
0829 }
0830
0831 if (voltage_tries == 5) {
0832 intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
0833 drm_dbg_kms(&i915->drm,
0834 "[ENCODER:%d:%s][%s] Same voltage tried 5 times\n",
0835 encoder->base.base.id, encoder->base.name, phy_name);
0836 return false;
0837 }
0838
0839 if (max_vswing_reached) {
0840 intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
0841 drm_dbg_kms(&i915->drm,
0842 "[ENCODER:%d:%s][%s] Max Voltage Swing reached\n",
0843 encoder->base.base.id, encoder->base.name, phy_name);
0844 return false;
0845 }
0846
0847
0848 intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
0849 link_status);
0850 if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
0851 drm_err(&i915->drm,
0852 "[ENCODER:%d:%s][%s] Failed to update link training\n",
0853 encoder->base.base.id, encoder->base.name, phy_name);
0854 return false;
0855 }
0856
0857 if (!intel_dp_adjust_request_changed(crtc_state, old_link_status, link_status))
0858 ++voltage_tries;
0859 else
0860 voltage_tries = 1;
0861
0862 memcpy(old_link_status, link_status, sizeof(link_status));
0863
0864 if (intel_dp_link_max_vswing_reached(intel_dp, crtc_state))
0865 max_vswing_reached = true;
0866 }
0867
0868 intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
0869 drm_err(&i915->drm,
0870 "[ENCODER:%d:%s][%s] Failed clock recovery %d times, giving up!\n",
0871 encoder->base.base.id, encoder->base.name, phy_name, max_cr_tries);
0872
0873 return false;
0874 }
0875
0876
0877
0878
0879
0880
0881 static u32 intel_dp_training_pattern(struct intel_dp *intel_dp,
0882 const struct intel_crtc_state *crtc_state,
0883 enum drm_dp_phy dp_phy)
0884 {
0885 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0886 bool source_tps3, sink_tps3, source_tps4, sink_tps4;
0887
0888
0889 if (intel_dp_is_uhbr(crtc_state))
0890 return DP_TRAINING_PATTERN_2;
0891
0892
0893
0894
0895
0896
0897
0898 source_tps4 = intel_dp_source_supports_tps4(i915);
0899 sink_tps4 = dp_phy != DP_PHY_DPRX ||
0900 drm_dp_tps4_supported(intel_dp->dpcd);
0901 if (source_tps4 && sink_tps4) {
0902 return DP_TRAINING_PATTERN_4;
0903 } else if (crtc_state->port_clock == 810000) {
0904 if (!source_tps4)
0905 drm_dbg_kms(&i915->drm,
0906 "8.1 Gbps link rate without source TPS4 support\n");
0907 if (!sink_tps4)
0908 drm_dbg_kms(&i915->drm,
0909 "8.1 Gbps link rate without sink TPS4 support\n");
0910 }
0911
0912
0913
0914
0915
0916 source_tps3 = intel_dp_source_supports_tps3(i915);
0917 sink_tps3 = dp_phy != DP_PHY_DPRX ||
0918 drm_dp_tps3_supported(intel_dp->dpcd);
0919 if (source_tps3 && sink_tps3) {
0920 return DP_TRAINING_PATTERN_3;
0921 } else if (crtc_state->port_clock >= 540000) {
0922 if (!source_tps3)
0923 drm_dbg_kms(&i915->drm,
0924 ">=5.4/6.48 Gbps link rate without source TPS3 support\n");
0925 if (!sink_tps3)
0926 drm_dbg_kms(&i915->drm,
0927 ">=5.4/6.48 Gbps link rate without sink TPS3 support\n");
0928 }
0929
0930 return DP_TRAINING_PATTERN_2;
0931 }
0932
0933
0934
0935
0936
0937
0938 static bool
0939 intel_dp_link_training_channel_equalization(struct intel_dp *intel_dp,
0940 const struct intel_crtc_state *crtc_state,
0941 enum drm_dp_phy dp_phy)
0942 {
0943 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
0944 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
0945 int tries;
0946 u32 training_pattern;
0947 u8 link_status[DP_LINK_STATUS_SIZE];
0948 bool channel_eq = false;
0949 char phy_name[10];
0950 int delay_us;
0951
0952 delay_us = drm_dp_read_channel_eq_delay(&intel_dp->aux,
0953 intel_dp->dpcd, dp_phy,
0954 intel_dp_is_uhbr(crtc_state));
0955
0956 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
0957
0958 training_pattern = intel_dp_training_pattern(intel_dp, crtc_state, dp_phy);
0959
0960 if (training_pattern != DP_TRAINING_PATTERN_4)
0961 training_pattern |= DP_LINK_SCRAMBLING_DISABLE;
0962
0963
0964 if (!intel_dp_set_link_train(intel_dp, crtc_state, dp_phy,
0965 training_pattern)) {
0966 drm_err(&i915->drm,
0967 "[ENCODER:%d:%s][%s] Failed to start channel equalization\n",
0968 encoder->base.base.id, encoder->base.name,
0969 phy_name);
0970 return false;
0971 }
0972
0973 for (tries = 0; tries < 5; tries++) {
0974 usleep_range(delay_us, 2 * delay_us);
0975
0976 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
0977 link_status) < 0) {
0978 drm_err(&i915->drm,
0979 "[ENCODER:%d:%s][%s] Failed to get link status\n",
0980 encoder->base.base.id, encoder->base.name, phy_name);
0981 break;
0982 }
0983
0984
0985 if (!drm_dp_clock_recovery_ok(link_status,
0986 crtc_state->lane_count)) {
0987 intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
0988 drm_dbg_kms(&i915->drm,
0989 "[ENCODER:%d:%s][%s] Clock recovery check failed, cannot "
0990 "continue channel equalization\n",
0991 encoder->base.base.id, encoder->base.name, phy_name);
0992 break;
0993 }
0994
0995 if (drm_dp_channel_eq_ok(link_status,
0996 crtc_state->lane_count)) {
0997 channel_eq = true;
0998 drm_dbg_kms(&i915->drm,
0999 "[ENCODER:%d:%s][%s] Channel EQ done. DP Training successful\n",
1000 encoder->base.base.id, encoder->base.name, phy_name);
1001 break;
1002 }
1003
1004
1005 intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
1006 link_status);
1007 if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
1008 drm_err(&i915->drm,
1009 "[ENCODER:%d:%s][%s] Failed to update link training\n",
1010 encoder->base.base.id, encoder->base.name, phy_name);
1011 break;
1012 }
1013 }
1014
1015
1016 if (tries == 5) {
1017 intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
1018 drm_dbg_kms(&i915->drm,
1019 "[ENCODER:%d:%s][%s] Channel equalization failed 5 times\n",
1020 encoder->base.base.id, encoder->base.name, phy_name);
1021 }
1022
1023 return channel_eq;
1024 }
1025
1026 static bool intel_dp_disable_dpcd_training_pattern(struct intel_dp *intel_dp,
1027 enum drm_dp_phy dp_phy)
1028 {
1029 int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
1030 u8 val = DP_TRAINING_PATTERN_DISABLE;
1031
1032 return drm_dp_dpcd_write(&intel_dp->aux, reg, &val, 1) == 1;
1033 }
1034
1035 static int
1036 intel_dp_128b132b_intra_hop(struct intel_dp *intel_dp,
1037 const struct intel_crtc_state *crtc_state)
1038 {
1039 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1040 u8 sink_status;
1041 int ret;
1042
1043 ret = drm_dp_dpcd_readb(&intel_dp->aux, DP_SINK_STATUS, &sink_status);
1044 if (ret != 1) {
1045 drm_dbg_kms(&i915->drm, "Failed to read sink status\n");
1046 return ret < 0 ? ret : -EIO;
1047 }
1048
1049 return sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION ? 1 : 0;
1050 }
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068 void intel_dp_stop_link_train(struct intel_dp *intel_dp,
1069 const struct intel_crtc_state *crtc_state)
1070 {
1071 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1072 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1073
1074 intel_dp->link_trained = true;
1075
1076 intel_dp_disable_dpcd_training_pattern(intel_dp, DP_PHY_DPRX);
1077 intel_dp_program_link_training_pattern(intel_dp, crtc_state, DP_PHY_DPRX,
1078 DP_TRAINING_PATTERN_DISABLE);
1079
1080 if (intel_dp_is_uhbr(crtc_state) &&
1081 wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1082 drm_dbg_kms(&i915->drm,
1083 "[ENCODER:%d:%s] 128b/132b intra-hop not clearing\n",
1084 encoder->base.base.id, encoder->base.name);
1085 }
1086 }
1087
1088 static bool
1089 intel_dp_link_train_phy(struct intel_dp *intel_dp,
1090 const struct intel_crtc_state *crtc_state,
1091 enum drm_dp_phy dp_phy)
1092 {
1093 struct intel_connector *connector = intel_dp->attached_connector;
1094 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1095 char phy_name[10];
1096 bool ret = false;
1097
1098 if (!intel_dp_link_training_clock_recovery(intel_dp, crtc_state, dp_phy))
1099 goto out;
1100
1101 if (!intel_dp_link_training_channel_equalization(intel_dp, crtc_state, dp_phy))
1102 goto out;
1103
1104 ret = true;
1105
1106 out:
1107 drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
1108 "[CONNECTOR:%d:%s][ENCODER:%d:%s][%s] Link Training %s at link rate = %d, lane count = %d\n",
1109 connector->base.base.id, connector->base.name,
1110 encoder->base.base.id, encoder->base.name,
1111 intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
1112 ret ? "passed" : "failed",
1113 crtc_state->port_clock, crtc_state->lane_count);
1114
1115 return ret;
1116 }
1117
1118 static void intel_dp_schedule_fallback_link_training(struct intel_dp *intel_dp,
1119 const struct intel_crtc_state *crtc_state)
1120 {
1121 struct intel_connector *intel_connector = intel_dp->attached_connector;
1122 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1123
1124 if (intel_dp->hobl_active) {
1125 drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
1126 "[ENCODER:%d:%s] Link Training failed with HOBL active, "
1127 "not enabling it from now on",
1128 encoder->base.base.id, encoder->base.name);
1129 intel_dp->hobl_failed = true;
1130 } else if (intel_dp_get_link_train_fallback_values(intel_dp,
1131 crtc_state->port_clock,
1132 crtc_state->lane_count)) {
1133 return;
1134 }
1135
1136
1137 schedule_work(&intel_connector->modeset_retry_work);
1138 }
1139
1140
1141 static bool
1142 intel_dp_link_train_all_phys(struct intel_dp *intel_dp,
1143 const struct intel_crtc_state *crtc_state,
1144 int lttpr_count)
1145 {
1146 bool ret = true;
1147 int i;
1148
1149 for (i = lttpr_count - 1; i >= 0; i--) {
1150 enum drm_dp_phy dp_phy = DP_PHY_LTTPR(i);
1151
1152 ret = intel_dp_link_train_phy(intel_dp, crtc_state, dp_phy);
1153 intel_dp_disable_dpcd_training_pattern(intel_dp, dp_phy);
1154
1155 if (!ret)
1156 break;
1157 }
1158
1159 if (ret)
1160 ret = intel_dp_link_train_phy(intel_dp, crtc_state, DP_PHY_DPRX);
1161
1162 if (intel_dp->set_idle_link_train)
1163 intel_dp->set_idle_link_train(intel_dp, crtc_state);
1164
1165 return ret;
1166 }
1167
1168
1169
1170
1171 static bool
1172 intel_dp_128b132b_lane_eq(struct intel_dp *intel_dp,
1173 const struct intel_crtc_state *crtc_state)
1174 {
1175 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1176 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1177 u8 link_status[DP_LINK_STATUS_SIZE];
1178 int delay_us;
1179 int try, max_tries = 20;
1180 unsigned long deadline;
1181 bool timeout = false;
1182
1183
1184
1185
1186
1187
1188
1189 if (!intel_dp_reset_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1190 DP_TRAINING_PATTERN_1)) {
1191 drm_err(&i915->drm,
1192 "[ENCODER:%d:%s] Failed to start 128b/132b TPS1\n",
1193 encoder->base.base.id, encoder->base.name);
1194 return false;
1195 }
1196
1197 delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1198
1199
1200 if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1201 drm_err(&i915->drm,
1202 "[ENCODER:%d:%s] Failed to read TX FFE presets\n",
1203 encoder->base.base.id, encoder->base.name);
1204 return false;
1205 }
1206
1207
1208 intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1209 if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1210 drm_err(&i915->drm,
1211 "[ENCODER:%d:%s] Failed to set initial TX FFE settings\n",
1212 encoder->base.base.id, encoder->base.name);
1213 return false;
1214 }
1215
1216
1217 if (!intel_dp_set_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1218 DP_TRAINING_PATTERN_2)) {
1219 drm_err(&i915->drm,
1220 "[ENCODER:%d:%s] Failed to start 128b/132b TPS2\n",
1221 encoder->base.base.id, encoder->base.name);
1222 return false;
1223 }
1224
1225
1226 deadline = jiffies + msecs_to_jiffies_timeout(400);
1227
1228 for (try = 0; try < max_tries; try++) {
1229 usleep_range(delay_us, 2 * delay_us);
1230
1231
1232
1233
1234
1235 delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1236
1237 if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1238 drm_err(&i915->drm,
1239 "[ENCODER:%d:%s] Failed to read link status\n",
1240 encoder->base.base.id, encoder->base.name);
1241 return false;
1242 }
1243
1244 if (drm_dp_128b132b_link_training_failed(link_status)) {
1245 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1246 drm_err(&i915->drm,
1247 "[ENCODER:%d:%s] Downstream link training failure\n",
1248 encoder->base.base.id, encoder->base.name);
1249 return false;
1250 }
1251
1252 if (drm_dp_128b132b_lane_channel_eq_done(link_status, crtc_state->lane_count)) {
1253 drm_dbg_kms(&i915->drm,
1254 "[ENCODER:%d:%s] Lane channel eq done\n",
1255 encoder->base.base.id, encoder->base.name);
1256 break;
1257 }
1258
1259 if (timeout) {
1260 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1261 drm_err(&i915->drm,
1262 "[ENCODER:%d:%s] Lane channel eq timeout\n",
1263 encoder->base.base.id, encoder->base.name);
1264 return false;
1265 }
1266
1267 if (time_after(jiffies, deadline))
1268 timeout = true;
1269
1270
1271 intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1272 if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1273 drm_err(&i915->drm,
1274 "[ENCODER:%d:%s] Failed to update TX FFE settings\n",
1275 encoder->base.base.id, encoder->base.name);
1276 return false;
1277 }
1278 }
1279
1280 if (try == max_tries) {
1281 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1282 drm_err(&i915->drm,
1283 "[ENCODER:%d:%s] Max loop count reached\n",
1284 encoder->base.base.id, encoder->base.name);
1285 return false;
1286 }
1287
1288 for (;;) {
1289 if (time_after(jiffies, deadline))
1290 timeout = true;
1291
1292 if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1293 drm_err(&i915->drm,
1294 "[ENCODER:%d:%s] Failed to read link status\n",
1295 encoder->base.base.id, encoder->base.name);
1296 return false;
1297 }
1298
1299 if (drm_dp_128b132b_link_training_failed(link_status)) {
1300 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1301 drm_err(&i915->drm,
1302 "[ENCODER:%d:%s] Downstream link training failure\n",
1303 encoder->base.base.id, encoder->base.name);
1304 return false;
1305 }
1306
1307 if (drm_dp_128b132b_eq_interlane_align_done(link_status)) {
1308 drm_dbg_kms(&i915->drm,
1309 "[ENCODER:%d:%s] Interlane align done\n",
1310 encoder->base.base.id, encoder->base.name);
1311 break;
1312 }
1313
1314 if (timeout) {
1315 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1316 drm_err(&i915->drm,
1317 "[ENCODER:%d:%s] Interlane align timeout\n",
1318 encoder->base.base.id, encoder->base.name);
1319 return false;
1320 }
1321
1322 usleep_range(2000, 3000);
1323 }
1324
1325 return true;
1326 }
1327
1328
1329
1330
1331 static bool
1332 intel_dp_128b132b_lane_cds(struct intel_dp *intel_dp,
1333 const struct intel_crtc_state *crtc_state,
1334 int lttpr_count)
1335 {
1336 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1337 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1338 u8 link_status[DP_LINK_STATUS_SIZE];
1339 unsigned long deadline;
1340
1341 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TRAINING_PATTERN_SET,
1342 DP_TRAINING_PATTERN_2_CDS) != 1) {
1343 drm_err(&i915->drm,
1344 "[ENCODER:%d:%s] Failed to start 128b/132b TPS2 CDS\n",
1345 encoder->base.base.id, encoder->base.name);
1346 return false;
1347 }
1348
1349
1350 deadline = jiffies + msecs_to_jiffies_timeout((lttpr_count + 1) * 20);
1351
1352 for (;;) {
1353 bool timeout = false;
1354
1355 if (time_after(jiffies, deadline))
1356 timeout = true;
1357
1358 usleep_range(2000, 3000);
1359
1360 if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1361 drm_err(&i915->drm,
1362 "[ENCODER:%d:%s] Failed to read link status\n",
1363 encoder->base.base.id, encoder->base.name);
1364 return false;
1365 }
1366
1367 if (drm_dp_128b132b_eq_interlane_align_done(link_status) &&
1368 drm_dp_128b132b_cds_interlane_align_done(link_status) &&
1369 drm_dp_128b132b_lane_symbol_locked(link_status, crtc_state->lane_count)) {
1370 drm_dbg_kms(&i915->drm,
1371 "[ENCODER:%d:%s] CDS interlane align done\n",
1372 encoder->base.base.id, encoder->base.name);
1373 break;
1374 }
1375
1376 if (drm_dp_128b132b_link_training_failed(link_status)) {
1377 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1378 drm_err(&i915->drm,
1379 "[ENCODER:%d:%s] Downstream link training failure\n",
1380 encoder->base.base.id, encoder->base.name);
1381 return false;
1382 }
1383
1384 if (timeout) {
1385 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1386 drm_err(&i915->drm,
1387 "[ENCODER:%d:%s] CDS timeout\n",
1388 encoder->base.base.id, encoder->base.name);
1389 return false;
1390 }
1391 }
1392
1393
1394 if (intel_dp->set_idle_link_train)
1395 intel_dp->set_idle_link_train(intel_dp, crtc_state);
1396
1397 return true;
1398 }
1399
1400
1401
1402
1403 static bool
1404 intel_dp_128b132b_link_train(struct intel_dp *intel_dp,
1405 const struct intel_crtc_state *crtc_state,
1406 int lttpr_count)
1407 {
1408 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1409 struct intel_connector *connector = intel_dp->attached_connector;
1410 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1411 bool passed = false;
1412
1413 if (wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1414 drm_err(&i915->drm,
1415 "[ENCODER:%d:%s] 128b/132b intra-hop not clear\n",
1416 encoder->base.base.id, encoder->base.name);
1417 return false;
1418 }
1419
1420 if (intel_dp_128b132b_lane_eq(intel_dp, crtc_state) &&
1421 intel_dp_128b132b_lane_cds(intel_dp, crtc_state, lttpr_count))
1422 passed = true;
1423
1424 drm_dbg_kms(&i915->drm,
1425 "[CONNECTOR:%d:%s][ENCODER:%d:%s] 128b/132b Link Training %s at link rate = %d, lane count = %d\n",
1426 connector->base.base.id, connector->base.name,
1427 encoder->base.base.id, encoder->base.name,
1428 passed ? "passed" : "failed",
1429 crtc_state->port_clock, crtc_state->lane_count);
1430
1431 return passed;
1432 }
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444 void intel_dp_start_link_train(struct intel_dp *intel_dp,
1445 const struct intel_crtc_state *crtc_state)
1446 {
1447 bool passed;
1448
1449
1450
1451
1452 int lttpr_count = intel_dp_init_lttpr_and_dprx_caps(intel_dp);
1453
1454 if (lttpr_count < 0)
1455
1456 lttpr_count = 0;
1457
1458 intel_dp_prepare_link_train(intel_dp, crtc_state);
1459
1460 if (intel_dp_is_uhbr(crtc_state))
1461 passed = intel_dp_128b132b_link_train(intel_dp, crtc_state, lttpr_count);
1462 else
1463 passed = intel_dp_link_train_all_phys(intel_dp, crtc_state, lttpr_count);
1464
1465 if (!passed)
1466 intel_dp_schedule_fallback_link_training(intel_dp, crtc_state);
1467 }