0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028 #include <drm/amdgpu_drm.h>
0029 #include <drm/display/drm_dp_helper.h>
0030
0031 #include "amdgpu.h"
0032
0033 #include "atom.h"
0034 #include "atom-bits.h"
0035 #include "atombios_encoders.h"
0036 #include "atombios_dp.h"
0037 #include "amdgpu_connectors.h"
0038 #include "amdgpu_atombios.h"
0039
0040
0041 #define DP_LINK_CONFIGURATION_SIZE 9
0042 #define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
0043
0044 static char *voltage_names[] = {
0045 "0.4V", "0.6V", "0.8V", "1.2V"
0046 };
0047 static char *pre_emph_names[] = {
0048 "0dB", "3.5dB", "6dB", "9.5dB"
0049 };
0050
0051
0052
0053 union aux_channel_transaction {
0054 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
0055 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
0056 };
0057
0058 static int amdgpu_atombios_dp_process_aux_ch(struct amdgpu_i2c_chan *chan,
0059 u8 *send, int send_bytes,
0060 u8 *recv, int recv_size,
0061 u8 delay, u8 *ack)
0062 {
0063 struct drm_device *dev = chan->dev;
0064 struct amdgpu_device *adev = drm_to_adev(dev);
0065 union aux_channel_transaction args;
0066 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
0067 unsigned char *base;
0068 int recv_bytes;
0069 int r = 0;
0070
0071 memset(&args, 0, sizeof(args));
0072
0073 mutex_lock(&chan->mutex);
0074
0075 base = (unsigned char *)(adev->mode_info.atom_context->scratch + 1);
0076
0077 amdgpu_atombios_copy_swap(base, send, send_bytes, true);
0078
0079 args.v2.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
0080 args.v2.lpDataOut = cpu_to_le16((u16)(16 + 4));
0081 args.v2.ucDataOutLen = 0;
0082 args.v2.ucChannelID = chan->rec.i2c_id;
0083 args.v2.ucDelay = delay / 10;
0084 args.v2.ucHPD_ID = chan->rec.hpd;
0085
0086 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
0087
0088 *ack = args.v2.ucReplyStatus;
0089
0090
0091 if (args.v2.ucReplyStatus == 1) {
0092 r = -ETIMEDOUT;
0093 goto done;
0094 }
0095
0096
0097 if (args.v2.ucReplyStatus == 2) {
0098 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
0099 r = -EIO;
0100 goto done;
0101 }
0102
0103
0104 if (args.v2.ucReplyStatus == 3) {
0105 DRM_DEBUG_KMS("dp_aux_ch error\n");
0106 r = -EIO;
0107 goto done;
0108 }
0109
0110 recv_bytes = args.v1.ucDataOutLen;
0111 if (recv_bytes > recv_size)
0112 recv_bytes = recv_size;
0113
0114 if (recv && recv_size)
0115 amdgpu_atombios_copy_swap(recv, base + 16, recv_bytes, false);
0116
0117 r = recv_bytes;
0118 done:
0119 mutex_unlock(&chan->mutex);
0120
0121 return r;
0122 }
0123
0124 #define BARE_ADDRESS_SIZE 3
0125 #define HEADER_SIZE (BARE_ADDRESS_SIZE + 1)
0126
0127 static ssize_t
0128 amdgpu_atombios_dp_aux_transfer(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg)
0129 {
0130 struct amdgpu_i2c_chan *chan =
0131 container_of(aux, struct amdgpu_i2c_chan, aux);
0132 int ret;
0133 u8 tx_buf[20];
0134 size_t tx_size;
0135 u8 ack, delay = 0;
0136
0137 if (WARN_ON(msg->size > 16))
0138 return -E2BIG;
0139
0140 tx_buf[0] = msg->address & 0xff;
0141 tx_buf[1] = msg->address >> 8;
0142 tx_buf[2] = (msg->request << 4) |
0143 ((msg->address >> 16) & 0xf);
0144 tx_buf[3] = msg->size ? (msg->size - 1) : 0;
0145
0146 switch (msg->request & ~DP_AUX_I2C_MOT) {
0147 case DP_AUX_NATIVE_WRITE:
0148 case DP_AUX_I2C_WRITE:
0149
0150
0151
0152 tx_size = HEADER_SIZE + msg->size;
0153 if (msg->size == 0)
0154 tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
0155 else
0156 tx_buf[3] |= tx_size << 4;
0157 memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size);
0158 ret = amdgpu_atombios_dp_process_aux_ch(chan,
0159 tx_buf, tx_size, NULL, 0, delay, &ack);
0160 if (ret >= 0)
0161
0162 ret = msg->size;
0163 break;
0164 case DP_AUX_NATIVE_READ:
0165 case DP_AUX_I2C_READ:
0166
0167
0168
0169 tx_size = HEADER_SIZE;
0170 if (msg->size == 0)
0171 tx_buf[3] |= BARE_ADDRESS_SIZE << 4;
0172 else
0173 tx_buf[3] |= tx_size << 4;
0174 ret = amdgpu_atombios_dp_process_aux_ch(chan,
0175 tx_buf, tx_size, msg->buffer, msg->size, delay, &ack);
0176 break;
0177 default:
0178 ret = -EINVAL;
0179 break;
0180 }
0181
0182 if (ret >= 0)
0183 msg->reply = ack >> 4;
0184
0185 return ret;
0186 }
0187
0188 void amdgpu_atombios_dp_aux_init(struct amdgpu_connector *amdgpu_connector)
0189 {
0190 amdgpu_connector->ddc_bus->rec.hpd = amdgpu_connector->hpd.hpd;
0191 amdgpu_connector->ddc_bus->aux.transfer = amdgpu_atombios_dp_aux_transfer;
0192 amdgpu_connector->ddc_bus->aux.drm_dev = amdgpu_connector->base.dev;
0193
0194 drm_dp_aux_init(&amdgpu_connector->ddc_bus->aux);
0195 amdgpu_connector->ddc_bus->has_aux = true;
0196 }
0197
0198
0199
0200 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
0201 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPH_LEVEL_3
0202
0203 static void amdgpu_atombios_dp_get_adjust_train(const u8 link_status[DP_LINK_STATUS_SIZE],
0204 int lane_count,
0205 u8 train_set[4])
0206 {
0207 u8 v = 0;
0208 u8 p = 0;
0209 int lane;
0210
0211 for (lane = 0; lane < lane_count; lane++) {
0212 u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
0213 u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
0214
0215 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
0216 lane,
0217 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
0218 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
0219
0220 if (this_v > v)
0221 v = this_v;
0222 if (this_p > p)
0223 p = this_p;
0224 }
0225
0226 if (v >= DP_VOLTAGE_MAX)
0227 v |= DP_TRAIN_MAX_SWING_REACHED;
0228
0229 if (p >= DP_PRE_EMPHASIS_MAX)
0230 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
0231
0232 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
0233 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
0234 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
0235
0236 for (lane = 0; lane < 4; lane++)
0237 train_set[lane] = v | p;
0238 }
0239
0240
0241
0242 static unsigned amdgpu_atombios_dp_convert_bpc_to_bpp(int bpc)
0243 {
0244 if (bpc == 0)
0245 return 24;
0246 else
0247 return bpc * 3;
0248 }
0249
0250
0251
0252 static int amdgpu_atombios_dp_get_dp_link_config(struct drm_connector *connector,
0253 const u8 dpcd[DP_DPCD_SIZE],
0254 unsigned pix_clock,
0255 unsigned *dp_lanes, unsigned *dp_rate)
0256 {
0257 unsigned bpp =
0258 amdgpu_atombios_dp_convert_bpc_to_bpp(amdgpu_connector_get_monitor_bpc(connector));
0259 static const unsigned link_rates[3] = { 162000, 270000, 540000 };
0260 unsigned max_link_rate = drm_dp_max_link_rate(dpcd);
0261 unsigned max_lane_num = drm_dp_max_lane_count(dpcd);
0262 unsigned lane_num, i, max_pix_clock;
0263
0264 if (amdgpu_connector_encoder_get_dp_bridge_encoder_id(connector) ==
0265 ENCODER_OBJECT_ID_NUTMEG) {
0266 for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
0267 max_pix_clock = (lane_num * 270000 * 8) / bpp;
0268 if (max_pix_clock >= pix_clock) {
0269 *dp_lanes = lane_num;
0270 *dp_rate = 270000;
0271 return 0;
0272 }
0273 }
0274 } else {
0275 for (i = 0; i < ARRAY_SIZE(link_rates) && link_rates[i] <= max_link_rate; i++) {
0276 for (lane_num = 1; lane_num <= max_lane_num; lane_num <<= 1) {
0277 max_pix_clock = (lane_num * link_rates[i] * 8) / bpp;
0278 if (max_pix_clock >= pix_clock) {
0279 *dp_lanes = lane_num;
0280 *dp_rate = link_rates[i];
0281 return 0;
0282 }
0283 }
0284 }
0285 }
0286
0287 return -EINVAL;
0288 }
0289
0290 static u8 amdgpu_atombios_dp_encoder_service(struct amdgpu_device *adev,
0291 int action, int dp_clock,
0292 u8 ucconfig, u8 lane_num)
0293 {
0294 DP_ENCODER_SERVICE_PARAMETERS args;
0295 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
0296
0297 memset(&args, 0, sizeof(args));
0298 args.ucLinkClock = dp_clock / 10;
0299 args.ucConfig = ucconfig;
0300 args.ucAction = action;
0301 args.ucLaneNum = lane_num;
0302 args.ucStatus = 0;
0303
0304 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
0305 return args.ucStatus;
0306 }
0307
0308 u8 amdgpu_atombios_dp_get_sinktype(struct amdgpu_connector *amdgpu_connector)
0309 {
0310 struct drm_device *dev = amdgpu_connector->base.dev;
0311 struct amdgpu_device *adev = drm_to_adev(dev);
0312
0313 return amdgpu_atombios_dp_encoder_service(adev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
0314 amdgpu_connector->ddc_bus->rec.i2c_id, 0);
0315 }
0316
0317 static void amdgpu_atombios_dp_probe_oui(struct amdgpu_connector *amdgpu_connector)
0318 {
0319 struct amdgpu_connector_atom_dig *dig_connector = amdgpu_connector->con_priv;
0320 u8 buf[3];
0321
0322 if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
0323 return;
0324
0325 if (drm_dp_dpcd_read(&amdgpu_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3)
0326 DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
0327 buf[0], buf[1], buf[2]);
0328
0329 if (drm_dp_dpcd_read(&amdgpu_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3)
0330 DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
0331 buf[0], buf[1], buf[2]);
0332 }
0333
0334 static void amdgpu_atombios_dp_ds_ports(struct amdgpu_connector *amdgpu_connector)
0335 {
0336 struct amdgpu_connector_atom_dig *dig_connector = amdgpu_connector->con_priv;
0337 int ret;
0338
0339 if (dig_connector->dpcd[DP_DPCD_REV] > 0x10) {
0340 ret = drm_dp_dpcd_read(&amdgpu_connector->ddc_bus->aux,
0341 DP_DOWNSTREAM_PORT_0,
0342 dig_connector->downstream_ports,
0343 DP_MAX_DOWNSTREAM_PORTS);
0344 if (ret)
0345 memset(dig_connector->downstream_ports, 0,
0346 DP_MAX_DOWNSTREAM_PORTS);
0347 }
0348 }
0349
0350 int amdgpu_atombios_dp_get_dpcd(struct amdgpu_connector *amdgpu_connector)
0351 {
0352 struct amdgpu_connector_atom_dig *dig_connector = amdgpu_connector->con_priv;
0353 u8 msg[DP_DPCD_SIZE];
0354 int ret;
0355
0356 ret = drm_dp_dpcd_read(&amdgpu_connector->ddc_bus->aux, DP_DPCD_REV,
0357 msg, DP_DPCD_SIZE);
0358 if (ret == DP_DPCD_SIZE) {
0359 memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
0360
0361 DRM_DEBUG_KMS("DPCD: %*ph\n", (int)sizeof(dig_connector->dpcd),
0362 dig_connector->dpcd);
0363
0364 amdgpu_atombios_dp_probe_oui(amdgpu_connector);
0365 amdgpu_atombios_dp_ds_ports(amdgpu_connector);
0366 return 0;
0367 }
0368
0369 dig_connector->dpcd[0] = 0;
0370 return -EINVAL;
0371 }
0372
0373 int amdgpu_atombios_dp_get_panel_mode(struct drm_encoder *encoder,
0374 struct drm_connector *connector)
0375 {
0376 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
0377 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
0378 u16 dp_bridge = amdgpu_connector_encoder_get_dp_bridge_encoder_id(connector);
0379 u8 tmp;
0380
0381 if (!amdgpu_connector->con_priv)
0382 return panel_mode;
0383
0384 if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
0385
0386 if (drm_dp_dpcd_readb(&amdgpu_connector->ddc_bus->aux,
0387 DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
0388 if (tmp & 1)
0389 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
0390 else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
0391 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
0392 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
0393 else
0394 panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
0395 }
0396 } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
0397
0398 if (drm_dp_dpcd_readb(&amdgpu_connector->ddc_bus->aux,
0399 DP_EDP_CONFIGURATION_CAP, &tmp) == 1) {
0400 if (tmp & 1)
0401 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
0402 }
0403 }
0404
0405 return panel_mode;
0406 }
0407
0408 void amdgpu_atombios_dp_set_link_config(struct drm_connector *connector,
0409 const struct drm_display_mode *mode)
0410 {
0411 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
0412 struct amdgpu_connector_atom_dig *dig_connector;
0413 int ret;
0414
0415 if (!amdgpu_connector->con_priv)
0416 return;
0417 dig_connector = amdgpu_connector->con_priv;
0418
0419 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
0420 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
0421 ret = amdgpu_atombios_dp_get_dp_link_config(connector, dig_connector->dpcd,
0422 mode->clock,
0423 &dig_connector->dp_lane_count,
0424 &dig_connector->dp_clock);
0425 if (ret) {
0426 dig_connector->dp_clock = 0;
0427 dig_connector->dp_lane_count = 0;
0428 }
0429 }
0430 }
0431
0432 int amdgpu_atombios_dp_mode_valid_helper(struct drm_connector *connector,
0433 struct drm_display_mode *mode)
0434 {
0435 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
0436 struct amdgpu_connector_atom_dig *dig_connector;
0437 unsigned dp_lanes, dp_clock;
0438 int ret;
0439
0440 if (!amdgpu_connector->con_priv)
0441 return MODE_CLOCK_HIGH;
0442 dig_connector = amdgpu_connector->con_priv;
0443
0444 ret = amdgpu_atombios_dp_get_dp_link_config(connector, dig_connector->dpcd,
0445 mode->clock, &dp_lanes, &dp_clock);
0446 if (ret)
0447 return MODE_CLOCK_HIGH;
0448
0449 if ((dp_clock == 540000) &&
0450 (!amdgpu_connector_is_dp12_capable(connector)))
0451 return MODE_CLOCK_HIGH;
0452
0453 return MODE_OK;
0454 }
0455
0456 bool amdgpu_atombios_dp_needs_link_train(struct amdgpu_connector *amdgpu_connector)
0457 {
0458 u8 link_status[DP_LINK_STATUS_SIZE];
0459 struct amdgpu_connector_atom_dig *dig = amdgpu_connector->con_priv;
0460
0461 if (drm_dp_dpcd_read_link_status(&amdgpu_connector->ddc_bus->aux, link_status)
0462 <= 0)
0463 return false;
0464 if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
0465 return false;
0466 return true;
0467 }
0468
0469 void amdgpu_atombios_dp_set_rx_power_state(struct drm_connector *connector,
0470 u8 power_state)
0471 {
0472 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
0473 struct amdgpu_connector_atom_dig *dig_connector;
0474
0475 if (!amdgpu_connector->con_priv)
0476 return;
0477
0478 dig_connector = amdgpu_connector->con_priv;
0479
0480
0481 if (dig_connector->dpcd[0] >= 0x11) {
0482 drm_dp_dpcd_writeb(&amdgpu_connector->ddc_bus->aux,
0483 DP_SET_POWER, power_state);
0484 usleep_range(1000, 2000);
0485 }
0486 }
0487
0488 struct amdgpu_atombios_dp_link_train_info {
0489 struct amdgpu_device *adev;
0490 struct drm_encoder *encoder;
0491 struct drm_connector *connector;
0492 int dp_clock;
0493 int dp_lane_count;
0494 bool tp3_supported;
0495 u8 dpcd[DP_RECEIVER_CAP_SIZE];
0496 u8 train_set[4];
0497 u8 link_status[DP_LINK_STATUS_SIZE];
0498 u8 tries;
0499 struct drm_dp_aux *aux;
0500 };
0501
0502 static void
0503 amdgpu_atombios_dp_update_vs_emph(struct amdgpu_atombios_dp_link_train_info *dp_info)
0504 {
0505
0506 amdgpu_atombios_encoder_setup_dig_transmitter(dp_info->encoder,
0507 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
0508 0, dp_info->train_set[0]);
0509
0510
0511 drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET,
0512 dp_info->train_set, dp_info->dp_lane_count);
0513 }
0514
0515 static void
0516 amdgpu_atombios_dp_set_tp(struct amdgpu_atombios_dp_link_train_info *dp_info, int tp)
0517 {
0518 int rtp = 0;
0519
0520
0521 switch (tp) {
0522 case DP_TRAINING_PATTERN_1:
0523 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
0524 break;
0525 case DP_TRAINING_PATTERN_2:
0526 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
0527 break;
0528 case DP_TRAINING_PATTERN_3:
0529 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
0530 break;
0531 }
0532 amdgpu_atombios_encoder_setup_dig_encoder(dp_info->encoder, rtp, 0);
0533
0534
0535 drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp);
0536 }
0537
0538 static int
0539 amdgpu_atombios_dp_link_train_init(struct amdgpu_atombios_dp_link_train_info *dp_info)
0540 {
0541 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(dp_info->encoder);
0542 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
0543 u8 tmp;
0544
0545
0546 amdgpu_atombios_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0);
0547
0548
0549 if (dp_info->dpcd[3] & 0x1)
0550 drm_dp_dpcd_writeb(dp_info->aux,
0551 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
0552 else
0553 drm_dp_dpcd_writeb(dp_info->aux,
0554 DP_DOWNSPREAD_CTRL, 0);
0555
0556 if (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)
0557 drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1);
0558
0559
0560 tmp = dp_info->dp_lane_count;
0561 if (drm_dp_enhanced_frame_cap(dp_info->dpcd))
0562 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
0563 drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp);
0564
0565
0566 tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
0567 drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp);
0568
0569
0570 amdgpu_atombios_encoder_setup_dig_encoder(dp_info->encoder,
0571 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
0572
0573
0574 drm_dp_dpcd_writeb(dp_info->aux,
0575 DP_TRAINING_PATTERN_SET,
0576 DP_TRAINING_PATTERN_DISABLE);
0577
0578 return 0;
0579 }
0580
0581 static int
0582 amdgpu_atombios_dp_link_train_finish(struct amdgpu_atombios_dp_link_train_info *dp_info)
0583 {
0584 udelay(400);
0585
0586
0587 drm_dp_dpcd_writeb(dp_info->aux,
0588 DP_TRAINING_PATTERN_SET,
0589 DP_TRAINING_PATTERN_DISABLE);
0590
0591
0592 amdgpu_atombios_encoder_setup_dig_encoder(dp_info->encoder,
0593 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
0594
0595 return 0;
0596 }
0597
0598 static int
0599 amdgpu_atombios_dp_link_train_cr(struct amdgpu_atombios_dp_link_train_info *dp_info)
0600 {
0601 bool clock_recovery;
0602 u8 voltage;
0603 int i;
0604
0605 amdgpu_atombios_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
0606 memset(dp_info->train_set, 0, 4);
0607 amdgpu_atombios_dp_update_vs_emph(dp_info);
0608
0609 udelay(400);
0610
0611
0612 clock_recovery = false;
0613 dp_info->tries = 0;
0614 voltage = 0xff;
0615 while (1) {
0616 drm_dp_link_train_clock_recovery_delay(dp_info->aux, dp_info->dpcd);
0617
0618 if (drm_dp_dpcd_read_link_status(dp_info->aux,
0619 dp_info->link_status) <= 0) {
0620 DRM_ERROR("displayport link status failed\n");
0621 break;
0622 }
0623
0624 if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
0625 clock_recovery = true;
0626 break;
0627 }
0628
0629 for (i = 0; i < dp_info->dp_lane_count; i++) {
0630 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
0631 break;
0632 }
0633 if (i == dp_info->dp_lane_count) {
0634 DRM_ERROR("clock recovery reached max voltage\n");
0635 break;
0636 }
0637
0638 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
0639 ++dp_info->tries;
0640 if (dp_info->tries == 5) {
0641 DRM_ERROR("clock recovery tried 5 times\n");
0642 break;
0643 }
0644 } else
0645 dp_info->tries = 0;
0646
0647 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
0648
0649
0650 amdgpu_atombios_dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count,
0651 dp_info->train_set);
0652
0653 amdgpu_atombios_dp_update_vs_emph(dp_info);
0654 }
0655 if (!clock_recovery) {
0656 DRM_ERROR("clock recovery failed\n");
0657 return -1;
0658 } else {
0659 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
0660 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
0661 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
0662 DP_TRAIN_PRE_EMPHASIS_SHIFT);
0663 return 0;
0664 }
0665 }
0666
0667 static int
0668 amdgpu_atombios_dp_link_train_ce(struct amdgpu_atombios_dp_link_train_info *dp_info)
0669 {
0670 bool channel_eq;
0671
0672 if (dp_info->tp3_supported)
0673 amdgpu_atombios_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
0674 else
0675 amdgpu_atombios_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
0676
0677
0678 dp_info->tries = 0;
0679 channel_eq = false;
0680 while (1) {
0681 drm_dp_link_train_channel_eq_delay(dp_info->aux, dp_info->dpcd);
0682
0683 if (drm_dp_dpcd_read_link_status(dp_info->aux,
0684 dp_info->link_status) <= 0) {
0685 DRM_ERROR("displayport link status failed\n");
0686 break;
0687 }
0688
0689 if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
0690 channel_eq = true;
0691 break;
0692 }
0693
0694
0695 if (dp_info->tries > 5) {
0696 DRM_ERROR("channel eq failed: 5 tries\n");
0697 break;
0698 }
0699
0700
0701 amdgpu_atombios_dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count,
0702 dp_info->train_set);
0703
0704 amdgpu_atombios_dp_update_vs_emph(dp_info);
0705 dp_info->tries++;
0706 }
0707
0708 if (!channel_eq) {
0709 DRM_ERROR("channel eq failed\n");
0710 return -1;
0711 } else {
0712 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
0713 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
0714 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
0715 >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
0716 return 0;
0717 }
0718 }
0719
0720 void amdgpu_atombios_dp_link_train(struct drm_encoder *encoder,
0721 struct drm_connector *connector)
0722 {
0723 struct drm_device *dev = encoder->dev;
0724 struct amdgpu_device *adev = drm_to_adev(dev);
0725 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
0726 struct amdgpu_connector *amdgpu_connector;
0727 struct amdgpu_connector_atom_dig *dig_connector;
0728 struct amdgpu_atombios_dp_link_train_info dp_info;
0729 u8 tmp;
0730
0731 if (!amdgpu_encoder->enc_priv)
0732 return;
0733
0734 amdgpu_connector = to_amdgpu_connector(connector);
0735 if (!amdgpu_connector->con_priv)
0736 return;
0737 dig_connector = amdgpu_connector->con_priv;
0738
0739 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
0740 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
0741 return;
0742
0743 if (drm_dp_dpcd_readb(&amdgpu_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp)
0744 == 1) {
0745 if (tmp & DP_TPS3_SUPPORTED)
0746 dp_info.tp3_supported = true;
0747 else
0748 dp_info.tp3_supported = false;
0749 } else {
0750 dp_info.tp3_supported = false;
0751 }
0752
0753 memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
0754 dp_info.adev = adev;
0755 dp_info.encoder = encoder;
0756 dp_info.connector = connector;
0757 dp_info.dp_lane_count = dig_connector->dp_lane_count;
0758 dp_info.dp_clock = dig_connector->dp_clock;
0759 dp_info.aux = &amdgpu_connector->ddc_bus->aux;
0760
0761 if (amdgpu_atombios_dp_link_train_init(&dp_info))
0762 goto done;
0763 if (amdgpu_atombios_dp_link_train_cr(&dp_info))
0764 goto done;
0765 if (amdgpu_atombios_dp_link_train_ce(&dp_info))
0766 goto done;
0767 done:
0768 if (amdgpu_atombios_dp_link_train_finish(&dp_info))
0769 return;
0770 }