0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011 #include <linux/component.h>
0012 #include <linux/i2c.h>
0013 #include <linux/random.h>
0014
0015 #include <drm/display/drm_hdcp_helper.h>
0016 #include <drm/i915_component.h>
0017
0018 #include "i915_drv.h"
0019 #include "i915_reg.h"
0020 #include "intel_connector.h"
0021 #include "intel_de.h"
0022 #include "intel_display_power.h"
0023 #include "intel_display_power_well.h"
0024 #include "intel_display_types.h"
0025 #include "intel_hdcp.h"
0026 #include "intel_pcode.h"
0027
0028 #define KEY_LOAD_TRIES 5
0029 #define HDCP2_LC_RETRY_CNT 3
0030
0031 static int intel_conn_to_vcpi(struct intel_connector *connector)
0032 {
0033
0034 return connector->port ? connector->port->vcpi.vcpi : 0;
0035 }
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047 static int
0048 intel_hdcp_required_content_stream(struct intel_digital_port *dig_port)
0049 {
0050 struct drm_connector_list_iter conn_iter;
0051 struct intel_digital_port *conn_dig_port;
0052 struct intel_connector *connector;
0053 struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev);
0054 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
0055 bool enforce_type0 = false;
0056 int k;
0057
0058 data->k = 0;
0059
0060 if (dig_port->hdcp_auth_status)
0061 return 0;
0062
0063 drm_connector_list_iter_begin(&i915->drm, &conn_iter);
0064 for_each_intel_connector_iter(connector, &conn_iter) {
0065 if (connector->base.status == connector_status_disconnected)
0066 continue;
0067
0068 if (!intel_encoder_is_mst(intel_attached_encoder(connector)))
0069 continue;
0070
0071 conn_dig_port = intel_attached_dig_port(connector);
0072 if (conn_dig_port != dig_port)
0073 continue;
0074
0075 if (!enforce_type0 && !dig_port->hdcp_mst_type1_capable)
0076 enforce_type0 = true;
0077
0078 data->streams[data->k].stream_id = intel_conn_to_vcpi(connector);
0079 data->k++;
0080
0081
0082 if (dig_port->dp.active_mst_links <= 1)
0083 break;
0084 }
0085 drm_connector_list_iter_end(&conn_iter);
0086
0087 if (drm_WARN_ON(&i915->drm, data->k > INTEL_NUM_PIPES(i915) || data->k == 0))
0088 return -EINVAL;
0089
0090
0091
0092
0093
0094 for (k = 0; k < data->k; k++)
0095 data->streams[k].stream_type =
0096 enforce_type0 ? DRM_MODE_HDCP_CONTENT_TYPE0 : DRM_MODE_HDCP_CONTENT_TYPE1;
0097
0098 return 0;
0099 }
0100
0101 static int intel_hdcp_prepare_streams(struct intel_connector *connector)
0102 {
0103 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0104 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
0105 struct intel_hdcp *hdcp = &connector->hdcp;
0106 int ret;
0107
0108 if (!intel_encoder_is_mst(intel_attached_encoder(connector))) {
0109 data->k = 1;
0110 data->streams[0].stream_type = hdcp->content_type;
0111 } else {
0112 ret = intel_hdcp_required_content_stream(dig_port);
0113 if (ret)
0114 return ret;
0115 }
0116
0117 return 0;
0118 }
0119
0120 static
0121 bool intel_hdcp_is_ksv_valid(u8 *ksv)
0122 {
0123 int i, ones = 0;
0124
0125 for (i = 0; i < DRM_HDCP_KSV_LEN; i++)
0126 ones += hweight8(ksv[i]);
0127 if (ones != 20)
0128 return false;
0129
0130 return true;
0131 }
0132
0133 static
0134 int intel_hdcp_read_valid_bksv(struct intel_digital_port *dig_port,
0135 const struct intel_hdcp_shim *shim, u8 *bksv)
0136 {
0137 struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev);
0138 int ret, i, tries = 2;
0139
0140
0141 for (i = 0; i < tries; i++) {
0142 ret = shim->read_bksv(dig_port, bksv);
0143 if (ret)
0144 return ret;
0145 if (intel_hdcp_is_ksv_valid(bksv))
0146 break;
0147 }
0148 if (i == tries) {
0149 drm_dbg_kms(&i915->drm, "Bksv is invalid\n");
0150 return -ENODEV;
0151 }
0152
0153 return 0;
0154 }
0155
0156
0157 bool intel_hdcp_capable(struct intel_connector *connector)
0158 {
0159 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0160 const struct intel_hdcp_shim *shim = connector->hdcp.shim;
0161 bool capable = false;
0162 u8 bksv[5];
0163
0164 if (!shim)
0165 return capable;
0166
0167 if (shim->hdcp_capable) {
0168 shim->hdcp_capable(dig_port, &capable);
0169 } else {
0170 if (!intel_hdcp_read_valid_bksv(dig_port, shim, bksv))
0171 capable = true;
0172 }
0173
0174 return capable;
0175 }
0176
0177
0178 bool intel_hdcp2_capable(struct intel_connector *connector)
0179 {
0180 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0181 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
0182 struct intel_hdcp *hdcp = &connector->hdcp;
0183 bool capable = false;
0184
0185
0186 if (!hdcp->hdcp2_supported)
0187 return false;
0188
0189
0190 mutex_lock(&dev_priv->hdcp_comp_mutex);
0191 if (!dev_priv->hdcp_comp_added || !dev_priv->hdcp_master) {
0192 mutex_unlock(&dev_priv->hdcp_comp_mutex);
0193 return false;
0194 }
0195 mutex_unlock(&dev_priv->hdcp_comp_mutex);
0196
0197
0198 hdcp->shim->hdcp_2_2_capable(dig_port, &capable);
0199
0200 return capable;
0201 }
0202
0203 static bool intel_hdcp_in_use(struct drm_i915_private *dev_priv,
0204 enum transcoder cpu_transcoder, enum port port)
0205 {
0206 return intel_de_read(dev_priv,
0207 HDCP_STATUS(dev_priv, cpu_transcoder, port)) &
0208 HDCP_STATUS_ENC;
0209 }
0210
0211 static bool intel_hdcp2_in_use(struct drm_i915_private *dev_priv,
0212 enum transcoder cpu_transcoder, enum port port)
0213 {
0214 return intel_de_read(dev_priv,
0215 HDCP2_STATUS(dev_priv, cpu_transcoder, port)) &
0216 LINK_ENCRYPTION_STATUS;
0217 }
0218
0219 static int intel_hdcp_poll_ksv_fifo(struct intel_digital_port *dig_port,
0220 const struct intel_hdcp_shim *shim)
0221 {
0222 int ret, read_ret;
0223 bool ksv_ready;
0224
0225
0226 ret = __wait_for(read_ret = shim->read_ksv_ready(dig_port,
0227 &ksv_ready),
0228 read_ret || ksv_ready, 5 * 1000 * 1000, 1000,
0229 100 * 1000);
0230 if (ret)
0231 return ret;
0232 if (read_ret)
0233 return read_ret;
0234 if (!ksv_ready)
0235 return -ETIMEDOUT;
0236
0237 return 0;
0238 }
0239
0240 static bool hdcp_key_loadable(struct drm_i915_private *dev_priv)
0241 {
0242 enum i915_power_well_id id;
0243 intel_wakeref_t wakeref;
0244 bool enabled = false;
0245
0246
0247
0248
0249
0250 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
0251 id = HSW_DISP_PW_GLOBAL;
0252 else
0253 id = SKL_DISP_PW_1;
0254
0255
0256 with_intel_runtime_pm(&dev_priv->runtime_pm, wakeref)
0257 enabled = intel_display_power_well_is_enabled(dev_priv, id);
0258
0259
0260
0261
0262
0263
0264
0265 return enabled;
0266 }
0267
0268 static void intel_hdcp_clear_keys(struct drm_i915_private *dev_priv)
0269 {
0270 intel_de_write(dev_priv, HDCP_KEY_CONF, HDCP_CLEAR_KEYS_TRIGGER);
0271 intel_de_write(dev_priv, HDCP_KEY_STATUS,
0272 HDCP_KEY_LOAD_DONE | HDCP_KEY_LOAD_STATUS | HDCP_FUSE_IN_PROGRESS | HDCP_FUSE_ERROR | HDCP_FUSE_DONE);
0273 }
0274
0275 static int intel_hdcp_load_keys(struct drm_i915_private *dev_priv)
0276 {
0277 int ret;
0278 u32 val;
0279
0280 val = intel_de_read(dev_priv, HDCP_KEY_STATUS);
0281 if ((val & HDCP_KEY_LOAD_DONE) && (val & HDCP_KEY_LOAD_STATUS))
0282 return 0;
0283
0284
0285
0286
0287
0288 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
0289 if (!(intel_de_read(dev_priv, HDCP_KEY_STATUS) & HDCP_KEY_LOAD_DONE))
0290 return -ENXIO;
0291
0292
0293
0294
0295
0296
0297
0298
0299
0300 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv)) {
0301 ret = snb_pcode_write(&dev_priv->uncore, SKL_PCODE_LOAD_HDCP_KEYS, 1);
0302 if (ret) {
0303 drm_err(&dev_priv->drm,
0304 "Failed to initiate HDCP key load (%d)\n",
0305 ret);
0306 return ret;
0307 }
0308 } else {
0309 intel_de_write(dev_priv, HDCP_KEY_CONF, HDCP_KEY_LOAD_TRIGGER);
0310 }
0311
0312
0313 ret = __intel_wait_for_register(&dev_priv->uncore, HDCP_KEY_STATUS,
0314 HDCP_KEY_LOAD_DONE, HDCP_KEY_LOAD_DONE,
0315 10, 1, &val);
0316 if (ret)
0317 return ret;
0318 else if (!(val & HDCP_KEY_LOAD_STATUS))
0319 return -ENXIO;
0320
0321
0322 intel_de_write(dev_priv, HDCP_KEY_CONF, HDCP_AKSV_SEND_TRIGGER);
0323
0324 return 0;
0325 }
0326
0327
0328 static int intel_write_sha_text(struct drm_i915_private *dev_priv, u32 sha_text)
0329 {
0330 intel_de_write(dev_priv, HDCP_SHA_TEXT, sha_text);
0331 if (intel_de_wait_for_set(dev_priv, HDCP_REP_CTL, HDCP_SHA1_READY, 1)) {
0332 drm_err(&dev_priv->drm, "Timed out waiting for SHA1 ready\n");
0333 return -ETIMEDOUT;
0334 }
0335 return 0;
0336 }
0337
0338 static
0339 u32 intel_hdcp_get_repeater_ctl(struct drm_i915_private *dev_priv,
0340 enum transcoder cpu_transcoder, enum port port)
0341 {
0342 if (DISPLAY_VER(dev_priv) >= 12) {
0343 switch (cpu_transcoder) {
0344 case TRANSCODER_A:
0345 return HDCP_TRANSA_REP_PRESENT |
0346 HDCP_TRANSA_SHA1_M0;
0347 case TRANSCODER_B:
0348 return HDCP_TRANSB_REP_PRESENT |
0349 HDCP_TRANSB_SHA1_M0;
0350 case TRANSCODER_C:
0351 return HDCP_TRANSC_REP_PRESENT |
0352 HDCP_TRANSC_SHA1_M0;
0353 case TRANSCODER_D:
0354 return HDCP_TRANSD_REP_PRESENT |
0355 HDCP_TRANSD_SHA1_M0;
0356 default:
0357 drm_err(&dev_priv->drm, "Unknown transcoder %d\n",
0358 cpu_transcoder);
0359 return -EINVAL;
0360 }
0361 }
0362
0363 switch (port) {
0364 case PORT_A:
0365 return HDCP_DDIA_REP_PRESENT | HDCP_DDIA_SHA1_M0;
0366 case PORT_B:
0367 return HDCP_DDIB_REP_PRESENT | HDCP_DDIB_SHA1_M0;
0368 case PORT_C:
0369 return HDCP_DDIC_REP_PRESENT | HDCP_DDIC_SHA1_M0;
0370 case PORT_D:
0371 return HDCP_DDID_REP_PRESENT | HDCP_DDID_SHA1_M0;
0372 case PORT_E:
0373 return HDCP_DDIE_REP_PRESENT | HDCP_DDIE_SHA1_M0;
0374 default:
0375 drm_err(&dev_priv->drm, "Unknown port %d\n", port);
0376 return -EINVAL;
0377 }
0378 }
0379
0380 static
0381 int intel_hdcp_validate_v_prime(struct intel_connector *connector,
0382 const struct intel_hdcp_shim *shim,
0383 u8 *ksv_fifo, u8 num_downstream, u8 *bstatus)
0384 {
0385 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0386 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
0387 enum transcoder cpu_transcoder = connector->hdcp.cpu_transcoder;
0388 enum port port = dig_port->base.port;
0389 u32 vprime, sha_text, sha_leftovers, rep_ctl;
0390 int ret, i, j, sha_idx;
0391
0392
0393 for (i = 0; i < DRM_HDCP_V_PRIME_NUM_PARTS; i++) {
0394 ret = shim->read_v_prime_part(dig_port, i, &vprime);
0395 if (ret)
0396 return ret;
0397 intel_de_write(dev_priv, HDCP_SHA_V_PRIME(i), vprime);
0398 }
0399
0400
0401
0402
0403
0404
0405
0406
0407
0408
0409
0410 sha_idx = 0;
0411 sha_text = 0;
0412 sha_leftovers = 0;
0413 rep_ctl = intel_hdcp_get_repeater_ctl(dev_priv, cpu_transcoder, port);
0414 intel_de_write(dev_priv, HDCP_REP_CTL, rep_ctl | HDCP_SHA1_TEXT_32);
0415 for (i = 0; i < num_downstream; i++) {
0416 unsigned int sha_empty;
0417 u8 *ksv = &ksv_fifo[i * DRM_HDCP_KSV_LEN];
0418
0419
0420 sha_empty = sizeof(sha_text) - sha_leftovers;
0421 for (j = 0; j < sha_empty; j++) {
0422 u8 off = ((sizeof(sha_text) - j - 1 - sha_leftovers) * 8);
0423 sha_text |= ksv[j] << off;
0424 }
0425
0426 ret = intel_write_sha_text(dev_priv, sha_text);
0427 if (ret < 0)
0428 return ret;
0429
0430
0431 sha_idx += sizeof(sha_text);
0432 if (!(sha_idx % 64))
0433 intel_de_write(dev_priv, HDCP_REP_CTL,
0434 rep_ctl | HDCP_SHA1_TEXT_32);
0435
0436
0437 sha_leftovers = DRM_HDCP_KSV_LEN - sha_empty;
0438 sha_text = 0;
0439 for (j = 0; j < sha_leftovers; j++)
0440 sha_text |= ksv[sha_empty + j] <<
0441 ((sizeof(sha_text) - j - 1) * 8);
0442
0443
0444
0445
0446
0447 if (sizeof(sha_text) > sha_leftovers)
0448 continue;
0449
0450 ret = intel_write_sha_text(dev_priv, sha_text);
0451 if (ret < 0)
0452 return ret;
0453 sha_leftovers = 0;
0454 sha_text = 0;
0455 sha_idx += sizeof(sha_text);
0456 }
0457
0458
0459
0460
0461
0462
0463
0464 if (sha_leftovers == 0) {
0465
0466 intel_de_write(dev_priv, HDCP_REP_CTL,
0467 rep_ctl | HDCP_SHA1_TEXT_16);
0468 ret = intel_write_sha_text(dev_priv,
0469 bstatus[0] << 8 | bstatus[1]);
0470 if (ret < 0)
0471 return ret;
0472 sha_idx += sizeof(sha_text);
0473
0474
0475 intel_de_write(dev_priv, HDCP_REP_CTL,
0476 rep_ctl | HDCP_SHA1_TEXT_0);
0477 ret = intel_write_sha_text(dev_priv, 0);
0478 if (ret < 0)
0479 return ret;
0480 sha_idx += sizeof(sha_text);
0481
0482
0483 intel_de_write(dev_priv, HDCP_REP_CTL,
0484 rep_ctl | HDCP_SHA1_TEXT_16);
0485 ret = intel_write_sha_text(dev_priv, 0);
0486 if (ret < 0)
0487 return ret;
0488 sha_idx += sizeof(sha_text);
0489
0490 } else if (sha_leftovers == 1) {
0491
0492 intel_de_write(dev_priv, HDCP_REP_CTL,
0493 rep_ctl | HDCP_SHA1_TEXT_24);
0494 sha_text |= bstatus[0] << 16 | bstatus[1] << 8;
0495
0496 sha_text = (sha_text & 0xffffff00) >> 8;
0497 ret = intel_write_sha_text(dev_priv, sha_text);
0498 if (ret < 0)
0499 return ret;
0500 sha_idx += sizeof(sha_text);
0501
0502
0503 intel_de_write(dev_priv, HDCP_REP_CTL,
0504 rep_ctl | HDCP_SHA1_TEXT_0);
0505 ret = intel_write_sha_text(dev_priv, 0);
0506 if (ret < 0)
0507 return ret;
0508 sha_idx += sizeof(sha_text);
0509
0510
0511 intel_de_write(dev_priv, HDCP_REP_CTL,
0512 rep_ctl | HDCP_SHA1_TEXT_8);
0513 ret = intel_write_sha_text(dev_priv, 0);
0514 if (ret < 0)
0515 return ret;
0516 sha_idx += sizeof(sha_text);
0517
0518 } else if (sha_leftovers == 2) {
0519
0520 intel_de_write(dev_priv, HDCP_REP_CTL,
0521 rep_ctl | HDCP_SHA1_TEXT_32);
0522 sha_text |= bstatus[0] << 8 | bstatus[1];
0523 ret = intel_write_sha_text(dev_priv, sha_text);
0524 if (ret < 0)
0525 return ret;
0526 sha_idx += sizeof(sha_text);
0527
0528
0529 intel_de_write(dev_priv, HDCP_REP_CTL,
0530 rep_ctl | HDCP_SHA1_TEXT_0);
0531 for (i = 0; i < 2; i++) {
0532 ret = intel_write_sha_text(dev_priv, 0);
0533 if (ret < 0)
0534 return ret;
0535 sha_idx += sizeof(sha_text);
0536 }
0537
0538
0539
0540
0541
0542 intel_de_write(dev_priv, HDCP_REP_CTL,
0543 rep_ctl | HDCP_SHA1_TEXT_32);
0544 sha_text = DRM_HDCP_SHA1_TERMINATOR << 24;
0545 ret = intel_write_sha_text(dev_priv, sha_text);
0546 if (ret < 0)
0547 return ret;
0548 sha_idx += sizeof(sha_text);
0549 } else if (sha_leftovers == 3) {
0550
0551 intel_de_write(dev_priv, HDCP_REP_CTL,
0552 rep_ctl | HDCP_SHA1_TEXT_32);
0553 sha_text |= bstatus[0];
0554 ret = intel_write_sha_text(dev_priv, sha_text);
0555 if (ret < 0)
0556 return ret;
0557 sha_idx += sizeof(sha_text);
0558
0559
0560 intel_de_write(dev_priv, HDCP_REP_CTL,
0561 rep_ctl | HDCP_SHA1_TEXT_8);
0562 ret = intel_write_sha_text(dev_priv, bstatus[1]);
0563 if (ret < 0)
0564 return ret;
0565 sha_idx += sizeof(sha_text);
0566
0567
0568 intel_de_write(dev_priv, HDCP_REP_CTL,
0569 rep_ctl | HDCP_SHA1_TEXT_0);
0570 ret = intel_write_sha_text(dev_priv, 0);
0571 if (ret < 0)
0572 return ret;
0573 sha_idx += sizeof(sha_text);
0574
0575
0576 intel_de_write(dev_priv, HDCP_REP_CTL,
0577 rep_ctl | HDCP_SHA1_TEXT_24);
0578 ret = intel_write_sha_text(dev_priv, 0);
0579 if (ret < 0)
0580 return ret;
0581 sha_idx += sizeof(sha_text);
0582 } else {
0583 drm_dbg_kms(&dev_priv->drm, "Invalid number of leftovers %d\n",
0584 sha_leftovers);
0585 return -EINVAL;
0586 }
0587
0588 intel_de_write(dev_priv, HDCP_REP_CTL, rep_ctl | HDCP_SHA1_TEXT_32);
0589
0590 while ((sha_idx % 64) < (64 - sizeof(sha_text))) {
0591 ret = intel_write_sha_text(dev_priv, 0);
0592 if (ret < 0)
0593 return ret;
0594 sha_idx += sizeof(sha_text);
0595 }
0596
0597
0598
0599
0600
0601
0602 sha_text = (num_downstream * 5 + 10) * 8;
0603 ret = intel_write_sha_text(dev_priv, sha_text);
0604 if (ret < 0)
0605 return ret;
0606
0607
0608 intel_de_write(dev_priv, HDCP_REP_CTL,
0609 rep_ctl | HDCP_SHA1_COMPLETE_HASH);
0610 if (intel_de_wait_for_set(dev_priv, HDCP_REP_CTL,
0611 HDCP_SHA1_COMPLETE, 1)) {
0612 drm_err(&dev_priv->drm, "Timed out waiting for SHA1 complete\n");
0613 return -ETIMEDOUT;
0614 }
0615 if (!(intel_de_read(dev_priv, HDCP_REP_CTL) & HDCP_SHA1_V_MATCH)) {
0616 drm_dbg_kms(&dev_priv->drm, "SHA-1 mismatch, HDCP failed\n");
0617 return -ENXIO;
0618 }
0619
0620 return 0;
0621 }
0622
0623
0624 static
0625 int intel_hdcp_auth_downstream(struct intel_connector *connector)
0626 {
0627 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0628 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
0629 const struct intel_hdcp_shim *shim = connector->hdcp.shim;
0630 u8 bstatus[2], num_downstream, *ksv_fifo;
0631 int ret, i, tries = 3;
0632
0633 ret = intel_hdcp_poll_ksv_fifo(dig_port, shim);
0634 if (ret) {
0635 drm_dbg_kms(&dev_priv->drm,
0636 "KSV list failed to become ready (%d)\n", ret);
0637 return ret;
0638 }
0639
0640 ret = shim->read_bstatus(dig_port, bstatus);
0641 if (ret)
0642 return ret;
0643
0644 if (DRM_HDCP_MAX_DEVICE_EXCEEDED(bstatus[0]) ||
0645 DRM_HDCP_MAX_CASCADE_EXCEEDED(bstatus[1])) {
0646 drm_dbg_kms(&dev_priv->drm, "Max Topology Limit Exceeded\n");
0647 return -EPERM;
0648 }
0649
0650
0651
0652
0653
0654
0655
0656
0657 num_downstream = DRM_HDCP_NUM_DOWNSTREAM(bstatus[0]);
0658 if (num_downstream == 0) {
0659 drm_dbg_kms(&dev_priv->drm,
0660 "Repeater with zero downstream devices\n");
0661 return -EINVAL;
0662 }
0663
0664 ksv_fifo = kcalloc(DRM_HDCP_KSV_LEN, num_downstream, GFP_KERNEL);
0665 if (!ksv_fifo) {
0666 drm_dbg_kms(&dev_priv->drm, "Out of mem: ksv_fifo\n");
0667 return -ENOMEM;
0668 }
0669
0670 ret = shim->read_ksv_fifo(dig_port, num_downstream, ksv_fifo);
0671 if (ret)
0672 goto err;
0673
0674 if (drm_hdcp_check_ksvs_revoked(&dev_priv->drm, ksv_fifo,
0675 num_downstream) > 0) {
0676 drm_err(&dev_priv->drm, "Revoked Ksv(s) in ksv_fifo\n");
0677 ret = -EPERM;
0678 goto err;
0679 }
0680
0681
0682
0683
0684
0685 for (i = 0; i < tries; i++) {
0686 ret = intel_hdcp_validate_v_prime(connector, shim,
0687 ksv_fifo, num_downstream,
0688 bstatus);
0689 if (!ret)
0690 break;
0691 }
0692
0693 if (i == tries) {
0694 drm_dbg_kms(&dev_priv->drm,
0695 "V Prime validation failed.(%d)\n", ret);
0696 goto err;
0697 }
0698
0699 drm_dbg_kms(&dev_priv->drm, "HDCP is enabled (%d downstream devices)\n",
0700 num_downstream);
0701 ret = 0;
0702 err:
0703 kfree(ksv_fifo);
0704 return ret;
0705 }
0706
0707
0708 static int intel_hdcp_auth(struct intel_connector *connector)
0709 {
0710 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0711 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
0712 struct intel_hdcp *hdcp = &connector->hdcp;
0713 const struct intel_hdcp_shim *shim = hdcp->shim;
0714 enum transcoder cpu_transcoder = connector->hdcp.cpu_transcoder;
0715 enum port port = dig_port->base.port;
0716 unsigned long r0_prime_gen_start;
0717 int ret, i, tries = 2;
0718 union {
0719 u32 reg[2];
0720 u8 shim[DRM_HDCP_AN_LEN];
0721 } an;
0722 union {
0723 u32 reg[2];
0724 u8 shim[DRM_HDCP_KSV_LEN];
0725 } bksv;
0726 union {
0727 u32 reg;
0728 u8 shim[DRM_HDCP_RI_LEN];
0729 } ri;
0730 bool repeater_present, hdcp_capable;
0731
0732
0733
0734
0735
0736
0737
0738 if (shim->hdcp_capable) {
0739 ret = shim->hdcp_capable(dig_port, &hdcp_capable);
0740 if (ret)
0741 return ret;
0742 if (!hdcp_capable) {
0743 drm_dbg_kms(&dev_priv->drm,
0744 "Panel is not HDCP capable\n");
0745 return -EINVAL;
0746 }
0747 }
0748
0749
0750 for (i = 0; i < 2; i++)
0751 intel_de_write(dev_priv,
0752 HDCP_ANINIT(dev_priv, cpu_transcoder, port),
0753 get_random_u32());
0754 intel_de_write(dev_priv, HDCP_CONF(dev_priv, cpu_transcoder, port),
0755 HDCP_CONF_CAPTURE_AN);
0756
0757
0758 if (intel_de_wait_for_set(dev_priv,
0759 HDCP_STATUS(dev_priv, cpu_transcoder, port),
0760 HDCP_STATUS_AN_READY, 1)) {
0761 drm_err(&dev_priv->drm, "Timed out waiting for An\n");
0762 return -ETIMEDOUT;
0763 }
0764
0765 an.reg[0] = intel_de_read(dev_priv,
0766 HDCP_ANLO(dev_priv, cpu_transcoder, port));
0767 an.reg[1] = intel_de_read(dev_priv,
0768 HDCP_ANHI(dev_priv, cpu_transcoder, port));
0769 ret = shim->write_an_aksv(dig_port, an.shim);
0770 if (ret)
0771 return ret;
0772
0773 r0_prime_gen_start = jiffies;
0774
0775 memset(&bksv, 0, sizeof(bksv));
0776
0777 ret = intel_hdcp_read_valid_bksv(dig_port, shim, bksv.shim);
0778 if (ret < 0)
0779 return ret;
0780
0781 if (drm_hdcp_check_ksvs_revoked(&dev_priv->drm, bksv.shim, 1) > 0) {
0782 drm_err(&dev_priv->drm, "BKSV is revoked\n");
0783 return -EPERM;
0784 }
0785
0786 intel_de_write(dev_priv, HDCP_BKSVLO(dev_priv, cpu_transcoder, port),
0787 bksv.reg[0]);
0788 intel_de_write(dev_priv, HDCP_BKSVHI(dev_priv, cpu_transcoder, port),
0789 bksv.reg[1]);
0790
0791 ret = shim->repeater_present(dig_port, &repeater_present);
0792 if (ret)
0793 return ret;
0794 if (repeater_present)
0795 intel_de_write(dev_priv, HDCP_REP_CTL,
0796 intel_hdcp_get_repeater_ctl(dev_priv, cpu_transcoder, port));
0797
0798 ret = shim->toggle_signalling(dig_port, cpu_transcoder, true);
0799 if (ret)
0800 return ret;
0801
0802 intel_de_write(dev_priv, HDCP_CONF(dev_priv, cpu_transcoder, port),
0803 HDCP_CONF_AUTH_AND_ENC);
0804
0805
0806 if (wait_for(intel_de_read(dev_priv, HDCP_STATUS(dev_priv, cpu_transcoder, port)) &
0807 (HDCP_STATUS_R0_READY | HDCP_STATUS_ENC), 1)) {
0808 drm_err(&dev_priv->drm, "Timed out waiting for R0 ready\n");
0809 return -ETIMEDOUT;
0810 }
0811
0812
0813
0814
0815
0816
0817
0818
0819
0820
0821 wait_remaining_ms_from_jiffies(r0_prime_gen_start, 300);
0822
0823 tries = 3;
0824
0825
0826
0827
0828
0829 for (i = 0; i < tries; i++) {
0830 ri.reg = 0;
0831 ret = shim->read_ri_prime(dig_port, ri.shim);
0832 if (ret)
0833 return ret;
0834 intel_de_write(dev_priv,
0835 HDCP_RPRIME(dev_priv, cpu_transcoder, port),
0836 ri.reg);
0837
0838
0839 if (!wait_for(intel_de_read(dev_priv, HDCP_STATUS(dev_priv, cpu_transcoder, port)) &
0840 (HDCP_STATUS_RI_MATCH | HDCP_STATUS_ENC), 1))
0841 break;
0842 }
0843
0844 if (i == tries) {
0845 drm_dbg_kms(&dev_priv->drm,
0846 "Timed out waiting for Ri prime match (%x)\n",
0847 intel_de_read(dev_priv, HDCP_STATUS(dev_priv,
0848 cpu_transcoder, port)));
0849 return -ETIMEDOUT;
0850 }
0851
0852
0853 if (intel_de_wait_for_set(dev_priv,
0854 HDCP_STATUS(dev_priv, cpu_transcoder, port),
0855 HDCP_STATUS_ENC,
0856 HDCP_ENCRYPT_STATUS_CHANGE_TIMEOUT_MS)) {
0857 drm_err(&dev_priv->drm, "Timed out waiting for encryption\n");
0858 return -ETIMEDOUT;
0859 }
0860
0861
0862 if (shim->stream_encryption) {
0863 ret = shim->stream_encryption(connector, true);
0864 if (ret) {
0865 drm_err(&dev_priv->drm, "[%s:%d] Failed to enable HDCP 1.4 stream enc\n",
0866 connector->base.name, connector->base.base.id);
0867 return ret;
0868 }
0869 drm_dbg_kms(&dev_priv->drm, "HDCP 1.4 transcoder: %s stream encrypted\n",
0870 transcoder_name(hdcp->stream_transcoder));
0871 }
0872
0873 if (repeater_present)
0874 return intel_hdcp_auth_downstream(connector);
0875
0876 drm_dbg_kms(&dev_priv->drm, "HDCP is enabled (no repeater present)\n");
0877 return 0;
0878 }
0879
0880 static int _intel_hdcp_disable(struct intel_connector *connector)
0881 {
0882 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0883 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
0884 struct intel_hdcp *hdcp = &connector->hdcp;
0885 enum port port = dig_port->base.port;
0886 enum transcoder cpu_transcoder = hdcp->cpu_transcoder;
0887 u32 repeater_ctl;
0888 int ret;
0889
0890 drm_dbg_kms(&dev_priv->drm, "[%s:%d] HDCP is being disabled...\n",
0891 connector->base.name, connector->base.base.id);
0892
0893 if (hdcp->shim->stream_encryption) {
0894 ret = hdcp->shim->stream_encryption(connector, false);
0895 if (ret) {
0896 drm_err(&dev_priv->drm, "[%s:%d] Failed to disable HDCP 1.4 stream enc\n",
0897 connector->base.name, connector->base.base.id);
0898 return ret;
0899 }
0900 drm_dbg_kms(&dev_priv->drm, "HDCP 1.4 transcoder: %s stream encryption disabled\n",
0901 transcoder_name(hdcp->stream_transcoder));
0902
0903
0904
0905
0906
0907 if (dig_port->num_hdcp_streams > 0)
0908 return 0;
0909 }
0910
0911 hdcp->hdcp_encrypted = false;
0912 intel_de_write(dev_priv, HDCP_CONF(dev_priv, cpu_transcoder, port), 0);
0913 if (intel_de_wait_for_clear(dev_priv,
0914 HDCP_STATUS(dev_priv, cpu_transcoder, port),
0915 ~0, HDCP_ENCRYPT_STATUS_CHANGE_TIMEOUT_MS)) {
0916 drm_err(&dev_priv->drm,
0917 "Failed to disable HDCP, timeout clearing status\n");
0918 return -ETIMEDOUT;
0919 }
0920
0921 repeater_ctl = intel_hdcp_get_repeater_ctl(dev_priv, cpu_transcoder,
0922 port);
0923 intel_de_write(dev_priv, HDCP_REP_CTL,
0924 intel_de_read(dev_priv, HDCP_REP_CTL) & ~repeater_ctl);
0925
0926 ret = hdcp->shim->toggle_signalling(dig_port, cpu_transcoder, false);
0927 if (ret) {
0928 drm_err(&dev_priv->drm, "Failed to disable HDCP signalling\n");
0929 return ret;
0930 }
0931
0932 drm_dbg_kms(&dev_priv->drm, "HDCP is disabled\n");
0933 return 0;
0934 }
0935
0936 static int _intel_hdcp_enable(struct intel_connector *connector)
0937 {
0938 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
0939 struct intel_hdcp *hdcp = &connector->hdcp;
0940 int i, ret, tries = 3;
0941
0942 drm_dbg_kms(&dev_priv->drm, "[%s:%d] HDCP is being enabled...\n",
0943 connector->base.name, connector->base.base.id);
0944
0945 if (!hdcp_key_loadable(dev_priv)) {
0946 drm_err(&dev_priv->drm, "HDCP key Load is not possible\n");
0947 return -ENXIO;
0948 }
0949
0950 for (i = 0; i < KEY_LOAD_TRIES; i++) {
0951 ret = intel_hdcp_load_keys(dev_priv);
0952 if (!ret)
0953 break;
0954 intel_hdcp_clear_keys(dev_priv);
0955 }
0956 if (ret) {
0957 drm_err(&dev_priv->drm, "Could not load HDCP keys, (%d)\n",
0958 ret);
0959 return ret;
0960 }
0961
0962
0963 for (i = 0; i < tries; i++) {
0964 ret = intel_hdcp_auth(connector);
0965 if (!ret) {
0966 hdcp->hdcp_encrypted = true;
0967 return 0;
0968 }
0969
0970 drm_dbg_kms(&dev_priv->drm, "HDCP Auth failure (%d)\n", ret);
0971
0972
0973 _intel_hdcp_disable(connector);
0974 }
0975
0976 drm_dbg_kms(&dev_priv->drm,
0977 "HDCP authentication failed (%d tries/%d)\n", tries, ret);
0978 return ret;
0979 }
0980
0981 static struct intel_connector *intel_hdcp_to_connector(struct intel_hdcp *hdcp)
0982 {
0983 return container_of(hdcp, struct intel_connector, hdcp);
0984 }
0985
0986 static void intel_hdcp_update_value(struct intel_connector *connector,
0987 u64 value, bool update_property)
0988 {
0989 struct drm_device *dev = connector->base.dev;
0990 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
0991 struct intel_hdcp *hdcp = &connector->hdcp;
0992
0993 drm_WARN_ON(connector->base.dev, !mutex_is_locked(&hdcp->mutex));
0994
0995 if (hdcp->value == value)
0996 return;
0997
0998 drm_WARN_ON(dev, !mutex_is_locked(&dig_port->hdcp_mutex));
0999
1000 if (hdcp->value == DRM_MODE_CONTENT_PROTECTION_ENABLED) {
1001 if (!drm_WARN_ON(dev, dig_port->num_hdcp_streams == 0))
1002 dig_port->num_hdcp_streams--;
1003 } else if (value == DRM_MODE_CONTENT_PROTECTION_ENABLED) {
1004 dig_port->num_hdcp_streams++;
1005 }
1006
1007 hdcp->value = value;
1008 if (update_property) {
1009 drm_connector_get(&connector->base);
1010 schedule_work(&hdcp->prop_work);
1011 }
1012 }
1013
1014
1015 static int intel_hdcp_check_link(struct intel_connector *connector)
1016 {
1017 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1018 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1019 struct intel_hdcp *hdcp = &connector->hdcp;
1020 enum port port = dig_port->base.port;
1021 enum transcoder cpu_transcoder;
1022 int ret = 0;
1023
1024 mutex_lock(&hdcp->mutex);
1025 mutex_lock(&dig_port->hdcp_mutex);
1026
1027 cpu_transcoder = hdcp->cpu_transcoder;
1028
1029
1030 if (hdcp->value != DRM_MODE_CONTENT_PROTECTION_ENABLED ||
1031 !hdcp->hdcp_encrypted) {
1032 ret = -EINVAL;
1033 goto out;
1034 }
1035
1036 if (drm_WARN_ON(&dev_priv->drm,
1037 !intel_hdcp_in_use(dev_priv, cpu_transcoder, port))) {
1038 drm_err(&dev_priv->drm,
1039 "%s:%d HDCP link stopped encryption,%x\n",
1040 connector->base.name, connector->base.base.id,
1041 intel_de_read(dev_priv, HDCP_STATUS(dev_priv, cpu_transcoder, port)));
1042 ret = -ENXIO;
1043 intel_hdcp_update_value(connector,
1044 DRM_MODE_CONTENT_PROTECTION_DESIRED,
1045 true);
1046 goto out;
1047 }
1048
1049 if (hdcp->shim->check_link(dig_port, connector)) {
1050 if (hdcp->value != DRM_MODE_CONTENT_PROTECTION_UNDESIRED) {
1051 intel_hdcp_update_value(connector,
1052 DRM_MODE_CONTENT_PROTECTION_ENABLED, true);
1053 }
1054 goto out;
1055 }
1056
1057 drm_dbg_kms(&dev_priv->drm,
1058 "[%s:%d] HDCP link failed, retrying authentication\n",
1059 connector->base.name, connector->base.base.id);
1060
1061 ret = _intel_hdcp_disable(connector);
1062 if (ret) {
1063 drm_err(&dev_priv->drm, "Failed to disable hdcp (%d)\n", ret);
1064 intel_hdcp_update_value(connector,
1065 DRM_MODE_CONTENT_PROTECTION_DESIRED,
1066 true);
1067 goto out;
1068 }
1069
1070 ret = _intel_hdcp_enable(connector);
1071 if (ret) {
1072 drm_err(&dev_priv->drm, "Failed to enable hdcp (%d)\n", ret);
1073 intel_hdcp_update_value(connector,
1074 DRM_MODE_CONTENT_PROTECTION_DESIRED,
1075 true);
1076 goto out;
1077 }
1078
1079 out:
1080 mutex_unlock(&dig_port->hdcp_mutex);
1081 mutex_unlock(&hdcp->mutex);
1082 return ret;
1083 }
1084
1085 static void intel_hdcp_prop_work(struct work_struct *work)
1086 {
1087 struct intel_hdcp *hdcp = container_of(work, struct intel_hdcp,
1088 prop_work);
1089 struct intel_connector *connector = intel_hdcp_to_connector(hdcp);
1090 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1091
1092 drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex, NULL);
1093 mutex_lock(&hdcp->mutex);
1094
1095
1096
1097
1098
1099
1100 if (hdcp->value != DRM_MODE_CONTENT_PROTECTION_UNDESIRED)
1101 drm_hdcp_update_content_protection(&connector->base,
1102 hdcp->value);
1103
1104 mutex_unlock(&hdcp->mutex);
1105 drm_modeset_unlock(&dev_priv->drm.mode_config.connection_mutex);
1106
1107 drm_connector_put(&connector->base);
1108 }
1109
1110 bool is_hdcp_supported(struct drm_i915_private *dev_priv, enum port port)
1111 {
1112 return INTEL_INFO(dev_priv)->display.has_hdcp &&
1113 (DISPLAY_VER(dev_priv) >= 12 || port < PORT_E);
1114 }
1115
1116 static int
1117 hdcp2_prepare_ake_init(struct intel_connector *connector,
1118 struct hdcp2_ake_init *ake_data)
1119 {
1120 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1121 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1122 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1123 struct i915_hdcp_comp_master *comp;
1124 int ret;
1125
1126 mutex_lock(&dev_priv->hdcp_comp_mutex);
1127 comp = dev_priv->hdcp_master;
1128
1129 if (!comp || !comp->ops) {
1130 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1131 return -EINVAL;
1132 }
1133
1134 ret = comp->ops->initiate_hdcp2_session(comp->mei_dev, data, ake_data);
1135 if (ret)
1136 drm_dbg_kms(&dev_priv->drm, "Prepare_ake_init failed. %d\n",
1137 ret);
1138 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1139
1140 return ret;
1141 }
1142
1143 static int
1144 hdcp2_verify_rx_cert_prepare_km(struct intel_connector *connector,
1145 struct hdcp2_ake_send_cert *rx_cert,
1146 bool *paired,
1147 struct hdcp2_ake_no_stored_km *ek_pub_km,
1148 size_t *msg_sz)
1149 {
1150 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1151 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1152 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1153 struct i915_hdcp_comp_master *comp;
1154 int ret;
1155
1156 mutex_lock(&dev_priv->hdcp_comp_mutex);
1157 comp = dev_priv->hdcp_master;
1158
1159 if (!comp || !comp->ops) {
1160 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1161 return -EINVAL;
1162 }
1163
1164 ret = comp->ops->verify_receiver_cert_prepare_km(comp->mei_dev, data,
1165 rx_cert, paired,
1166 ek_pub_km, msg_sz);
1167 if (ret < 0)
1168 drm_dbg_kms(&dev_priv->drm, "Verify rx_cert failed. %d\n",
1169 ret);
1170 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1171
1172 return ret;
1173 }
1174
1175 static int hdcp2_verify_hprime(struct intel_connector *connector,
1176 struct hdcp2_ake_send_hprime *rx_hprime)
1177 {
1178 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1179 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1180 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1181 struct i915_hdcp_comp_master *comp;
1182 int ret;
1183
1184 mutex_lock(&dev_priv->hdcp_comp_mutex);
1185 comp = dev_priv->hdcp_master;
1186
1187 if (!comp || !comp->ops) {
1188 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1189 return -EINVAL;
1190 }
1191
1192 ret = comp->ops->verify_hprime(comp->mei_dev, data, rx_hprime);
1193 if (ret < 0)
1194 drm_dbg_kms(&dev_priv->drm, "Verify hprime failed. %d\n", ret);
1195 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1196
1197 return ret;
1198 }
1199
1200 static int
1201 hdcp2_store_pairing_info(struct intel_connector *connector,
1202 struct hdcp2_ake_send_pairing_info *pairing_info)
1203 {
1204 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1205 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1206 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1207 struct i915_hdcp_comp_master *comp;
1208 int ret;
1209
1210 mutex_lock(&dev_priv->hdcp_comp_mutex);
1211 comp = dev_priv->hdcp_master;
1212
1213 if (!comp || !comp->ops) {
1214 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1215 return -EINVAL;
1216 }
1217
1218 ret = comp->ops->store_pairing_info(comp->mei_dev, data, pairing_info);
1219 if (ret < 0)
1220 drm_dbg_kms(&dev_priv->drm, "Store pairing info failed. %d\n",
1221 ret);
1222 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1223
1224 return ret;
1225 }
1226
1227 static int
1228 hdcp2_prepare_lc_init(struct intel_connector *connector,
1229 struct hdcp2_lc_init *lc_init)
1230 {
1231 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1232 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1233 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1234 struct i915_hdcp_comp_master *comp;
1235 int ret;
1236
1237 mutex_lock(&dev_priv->hdcp_comp_mutex);
1238 comp = dev_priv->hdcp_master;
1239
1240 if (!comp || !comp->ops) {
1241 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1242 return -EINVAL;
1243 }
1244
1245 ret = comp->ops->initiate_locality_check(comp->mei_dev, data, lc_init);
1246 if (ret < 0)
1247 drm_dbg_kms(&dev_priv->drm, "Prepare lc_init failed. %d\n",
1248 ret);
1249 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1250
1251 return ret;
1252 }
1253
1254 static int
1255 hdcp2_verify_lprime(struct intel_connector *connector,
1256 struct hdcp2_lc_send_lprime *rx_lprime)
1257 {
1258 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1259 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1260 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1261 struct i915_hdcp_comp_master *comp;
1262 int ret;
1263
1264 mutex_lock(&dev_priv->hdcp_comp_mutex);
1265 comp = dev_priv->hdcp_master;
1266
1267 if (!comp || !comp->ops) {
1268 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1269 return -EINVAL;
1270 }
1271
1272 ret = comp->ops->verify_lprime(comp->mei_dev, data, rx_lprime);
1273 if (ret < 0)
1274 drm_dbg_kms(&dev_priv->drm, "Verify L_Prime failed. %d\n",
1275 ret);
1276 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1277
1278 return ret;
1279 }
1280
1281 static int hdcp2_prepare_skey(struct intel_connector *connector,
1282 struct hdcp2_ske_send_eks *ske_data)
1283 {
1284 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1285 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1286 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1287 struct i915_hdcp_comp_master *comp;
1288 int ret;
1289
1290 mutex_lock(&dev_priv->hdcp_comp_mutex);
1291 comp = dev_priv->hdcp_master;
1292
1293 if (!comp || !comp->ops) {
1294 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1295 return -EINVAL;
1296 }
1297
1298 ret = comp->ops->get_session_key(comp->mei_dev, data, ske_data);
1299 if (ret < 0)
1300 drm_dbg_kms(&dev_priv->drm, "Get session key failed. %d\n",
1301 ret);
1302 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1303
1304 return ret;
1305 }
1306
1307 static int
1308 hdcp2_verify_rep_topology_prepare_ack(struct intel_connector *connector,
1309 struct hdcp2_rep_send_receiverid_list
1310 *rep_topology,
1311 struct hdcp2_rep_send_ack *rep_send_ack)
1312 {
1313 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1314 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1315 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1316 struct i915_hdcp_comp_master *comp;
1317 int ret;
1318
1319 mutex_lock(&dev_priv->hdcp_comp_mutex);
1320 comp = dev_priv->hdcp_master;
1321
1322 if (!comp || !comp->ops) {
1323 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1324 return -EINVAL;
1325 }
1326
1327 ret = comp->ops->repeater_check_flow_prepare_ack(comp->mei_dev, data,
1328 rep_topology,
1329 rep_send_ack);
1330 if (ret < 0)
1331 drm_dbg_kms(&dev_priv->drm,
1332 "Verify rep topology failed. %d\n", ret);
1333 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1334
1335 return ret;
1336 }
1337
1338 static int
1339 hdcp2_verify_mprime(struct intel_connector *connector,
1340 struct hdcp2_rep_stream_ready *stream_ready)
1341 {
1342 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1343 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1344 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1345 struct i915_hdcp_comp_master *comp;
1346 int ret;
1347
1348 mutex_lock(&dev_priv->hdcp_comp_mutex);
1349 comp = dev_priv->hdcp_master;
1350
1351 if (!comp || !comp->ops) {
1352 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1353 return -EINVAL;
1354 }
1355
1356 ret = comp->ops->verify_mprime(comp->mei_dev, data, stream_ready);
1357 if (ret < 0)
1358 drm_dbg_kms(&dev_priv->drm, "Verify mprime failed. %d\n", ret);
1359 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1360
1361 return ret;
1362 }
1363
1364 static int hdcp2_authenticate_port(struct intel_connector *connector)
1365 {
1366 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1367 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1368 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1369 struct i915_hdcp_comp_master *comp;
1370 int ret;
1371
1372 mutex_lock(&dev_priv->hdcp_comp_mutex);
1373 comp = dev_priv->hdcp_master;
1374
1375 if (!comp || !comp->ops) {
1376 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1377 return -EINVAL;
1378 }
1379
1380 ret = comp->ops->enable_hdcp_authentication(comp->mei_dev, data);
1381 if (ret < 0)
1382 drm_dbg_kms(&dev_priv->drm, "Enable hdcp auth failed. %d\n",
1383 ret);
1384 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1385
1386 return ret;
1387 }
1388
1389 static int hdcp2_close_mei_session(struct intel_connector *connector)
1390 {
1391 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1392 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1393 struct i915_hdcp_comp_master *comp;
1394 int ret;
1395
1396 mutex_lock(&dev_priv->hdcp_comp_mutex);
1397 comp = dev_priv->hdcp_master;
1398
1399 if (!comp || !comp->ops) {
1400 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1401 return -EINVAL;
1402 }
1403
1404 ret = comp->ops->close_hdcp_session(comp->mei_dev,
1405 &dig_port->hdcp_port_data);
1406 mutex_unlock(&dev_priv->hdcp_comp_mutex);
1407
1408 return ret;
1409 }
1410
1411 static int hdcp2_deauthenticate_port(struct intel_connector *connector)
1412 {
1413 return hdcp2_close_mei_session(connector);
1414 }
1415
1416
1417 static int hdcp2_authentication_key_exchange(struct intel_connector *connector)
1418 {
1419 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1420 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1421 struct intel_hdcp *hdcp = &connector->hdcp;
1422 union {
1423 struct hdcp2_ake_init ake_init;
1424 struct hdcp2_ake_send_cert send_cert;
1425 struct hdcp2_ake_no_stored_km no_stored_km;
1426 struct hdcp2_ake_send_hprime send_hprime;
1427 struct hdcp2_ake_send_pairing_info pairing_info;
1428 } msgs;
1429 const struct intel_hdcp_shim *shim = hdcp->shim;
1430 size_t size;
1431 int ret;
1432
1433
1434 hdcp->seq_num_v = 0;
1435 hdcp->seq_num_m = 0;
1436
1437 ret = hdcp2_prepare_ake_init(connector, &msgs.ake_init);
1438 if (ret < 0)
1439 return ret;
1440
1441 ret = shim->write_2_2_msg(dig_port, &msgs.ake_init,
1442 sizeof(msgs.ake_init));
1443 if (ret < 0)
1444 return ret;
1445
1446 ret = shim->read_2_2_msg(dig_port, HDCP_2_2_AKE_SEND_CERT,
1447 &msgs.send_cert, sizeof(msgs.send_cert));
1448 if (ret < 0)
1449 return ret;
1450
1451 if (msgs.send_cert.rx_caps[0] != HDCP_2_2_RX_CAPS_VERSION_VAL) {
1452 drm_dbg_kms(&dev_priv->drm, "cert.rx_caps dont claim HDCP2.2\n");
1453 return -EINVAL;
1454 }
1455
1456 hdcp->is_repeater = HDCP_2_2_RX_REPEATER(msgs.send_cert.rx_caps[2]);
1457
1458 if (drm_hdcp_check_ksvs_revoked(&dev_priv->drm,
1459 msgs.send_cert.cert_rx.receiver_id,
1460 1) > 0) {
1461 drm_err(&dev_priv->drm, "Receiver ID is revoked\n");
1462 return -EPERM;
1463 }
1464
1465
1466
1467
1468
1469 ret = hdcp2_verify_rx_cert_prepare_km(connector, &msgs.send_cert,
1470 &hdcp->is_paired,
1471 &msgs.no_stored_km, &size);
1472 if (ret < 0)
1473 return ret;
1474
1475 ret = shim->write_2_2_msg(dig_port, &msgs.no_stored_km, size);
1476 if (ret < 0)
1477 return ret;
1478
1479 ret = shim->read_2_2_msg(dig_port, HDCP_2_2_AKE_SEND_HPRIME,
1480 &msgs.send_hprime, sizeof(msgs.send_hprime));
1481 if (ret < 0)
1482 return ret;
1483
1484 ret = hdcp2_verify_hprime(connector, &msgs.send_hprime);
1485 if (ret < 0)
1486 return ret;
1487
1488 if (!hdcp->is_paired) {
1489
1490 ret = shim->read_2_2_msg(dig_port,
1491 HDCP_2_2_AKE_SEND_PAIRING_INFO,
1492 &msgs.pairing_info,
1493 sizeof(msgs.pairing_info));
1494 if (ret < 0)
1495 return ret;
1496
1497 ret = hdcp2_store_pairing_info(connector, &msgs.pairing_info);
1498 if (ret < 0)
1499 return ret;
1500 hdcp->is_paired = true;
1501 }
1502
1503 return 0;
1504 }
1505
1506 static int hdcp2_locality_check(struct intel_connector *connector)
1507 {
1508 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1509 struct intel_hdcp *hdcp = &connector->hdcp;
1510 union {
1511 struct hdcp2_lc_init lc_init;
1512 struct hdcp2_lc_send_lprime send_lprime;
1513 } msgs;
1514 const struct intel_hdcp_shim *shim = hdcp->shim;
1515 int tries = HDCP2_LC_RETRY_CNT, ret, i;
1516
1517 for (i = 0; i < tries; i++) {
1518 ret = hdcp2_prepare_lc_init(connector, &msgs.lc_init);
1519 if (ret < 0)
1520 continue;
1521
1522 ret = shim->write_2_2_msg(dig_port, &msgs.lc_init,
1523 sizeof(msgs.lc_init));
1524 if (ret < 0)
1525 continue;
1526
1527 ret = shim->read_2_2_msg(dig_port,
1528 HDCP_2_2_LC_SEND_LPRIME,
1529 &msgs.send_lprime,
1530 sizeof(msgs.send_lprime));
1531 if (ret < 0)
1532 continue;
1533
1534 ret = hdcp2_verify_lprime(connector, &msgs.send_lprime);
1535 if (!ret)
1536 break;
1537 }
1538
1539 return ret;
1540 }
1541
1542 static int hdcp2_session_key_exchange(struct intel_connector *connector)
1543 {
1544 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1545 struct intel_hdcp *hdcp = &connector->hdcp;
1546 struct hdcp2_ske_send_eks send_eks;
1547 int ret;
1548
1549 ret = hdcp2_prepare_skey(connector, &send_eks);
1550 if (ret < 0)
1551 return ret;
1552
1553 ret = hdcp->shim->write_2_2_msg(dig_port, &send_eks,
1554 sizeof(send_eks));
1555 if (ret < 0)
1556 return ret;
1557
1558 return 0;
1559 }
1560
1561 static
1562 int _hdcp2_propagate_stream_management_info(struct intel_connector *connector)
1563 {
1564 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1565 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1566 struct intel_hdcp *hdcp = &connector->hdcp;
1567 union {
1568 struct hdcp2_rep_stream_manage stream_manage;
1569 struct hdcp2_rep_stream_ready stream_ready;
1570 } msgs;
1571 const struct intel_hdcp_shim *shim = hdcp->shim;
1572 int ret, streams_size_delta, i;
1573
1574 if (connector->hdcp.seq_num_m > HDCP_2_2_SEQ_NUM_MAX)
1575 return -ERANGE;
1576
1577
1578 msgs.stream_manage.msg_id = HDCP_2_2_REP_STREAM_MANAGE;
1579 drm_hdcp_cpu_to_be24(msgs.stream_manage.seq_num_m, hdcp->seq_num_m);
1580
1581 msgs.stream_manage.k = cpu_to_be16(data->k);
1582
1583 for (i = 0; i < data->k; i++) {
1584 msgs.stream_manage.streams[i].stream_id = data->streams[i].stream_id;
1585 msgs.stream_manage.streams[i].stream_type = data->streams[i].stream_type;
1586 }
1587
1588 streams_size_delta = (HDCP_2_2_MAX_CONTENT_STREAMS_CNT - data->k) *
1589 sizeof(struct hdcp2_streamid_type);
1590
1591 ret = shim->write_2_2_msg(dig_port, &msgs.stream_manage,
1592 sizeof(msgs.stream_manage) - streams_size_delta);
1593 if (ret < 0)
1594 goto out;
1595
1596 ret = shim->read_2_2_msg(dig_port, HDCP_2_2_REP_STREAM_READY,
1597 &msgs.stream_ready, sizeof(msgs.stream_ready));
1598 if (ret < 0)
1599 goto out;
1600
1601 data->seq_num_m = hdcp->seq_num_m;
1602
1603 ret = hdcp2_verify_mprime(connector, &msgs.stream_ready);
1604
1605 out:
1606 hdcp->seq_num_m++;
1607
1608 return ret;
1609 }
1610
1611 static
1612 int hdcp2_authenticate_repeater_topology(struct intel_connector *connector)
1613 {
1614 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1615 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1616 struct intel_hdcp *hdcp = &connector->hdcp;
1617 union {
1618 struct hdcp2_rep_send_receiverid_list recvid_list;
1619 struct hdcp2_rep_send_ack rep_ack;
1620 } msgs;
1621 const struct intel_hdcp_shim *shim = hdcp->shim;
1622 u32 seq_num_v, device_cnt;
1623 u8 *rx_info;
1624 int ret;
1625
1626 ret = shim->read_2_2_msg(dig_port, HDCP_2_2_REP_SEND_RECVID_LIST,
1627 &msgs.recvid_list, sizeof(msgs.recvid_list));
1628 if (ret < 0)
1629 return ret;
1630
1631 rx_info = msgs.recvid_list.rx_info;
1632
1633 if (HDCP_2_2_MAX_CASCADE_EXCEEDED(rx_info[1]) ||
1634 HDCP_2_2_MAX_DEVS_EXCEEDED(rx_info[1])) {
1635 drm_dbg_kms(&dev_priv->drm, "Topology Max Size Exceeded\n");
1636 return -EINVAL;
1637 }
1638
1639
1640
1641
1642
1643 dig_port->hdcp_mst_type1_capable =
1644 !HDCP_2_2_HDCP1_DEVICE_CONNECTED(rx_info[1]) &&
1645 !HDCP_2_2_HDCP_2_0_REP_CONNECTED(rx_info[1]);
1646
1647
1648 seq_num_v =
1649 drm_hdcp_be24_to_cpu((const u8 *)msgs.recvid_list.seq_num_v);
1650
1651 if (!hdcp->hdcp2_encrypted && seq_num_v) {
1652 drm_dbg_kms(&dev_priv->drm,
1653 "Non zero Seq_num_v at first RecvId_List msg\n");
1654 return -EINVAL;
1655 }
1656
1657 if (seq_num_v < hdcp->seq_num_v) {
1658
1659 drm_dbg_kms(&dev_priv->drm, "Seq_num_v roll over.\n");
1660 return -EINVAL;
1661 }
1662
1663 device_cnt = (HDCP_2_2_DEV_COUNT_HI(rx_info[0]) << 4 |
1664 HDCP_2_2_DEV_COUNT_LO(rx_info[1]));
1665 if (drm_hdcp_check_ksvs_revoked(&dev_priv->drm,
1666 msgs.recvid_list.receiver_ids,
1667 device_cnt) > 0) {
1668 drm_err(&dev_priv->drm, "Revoked receiver ID(s) is in list\n");
1669 return -EPERM;
1670 }
1671
1672 ret = hdcp2_verify_rep_topology_prepare_ack(connector,
1673 &msgs.recvid_list,
1674 &msgs.rep_ack);
1675 if (ret < 0)
1676 return ret;
1677
1678 hdcp->seq_num_v = seq_num_v;
1679 ret = shim->write_2_2_msg(dig_port, &msgs.rep_ack,
1680 sizeof(msgs.rep_ack));
1681 if (ret < 0)
1682 return ret;
1683
1684 return 0;
1685 }
1686
1687 static int hdcp2_authenticate_sink(struct intel_connector *connector)
1688 {
1689 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1690 struct drm_i915_private *i915 = to_i915(connector->base.dev);
1691 struct intel_hdcp *hdcp = &connector->hdcp;
1692 const struct intel_hdcp_shim *shim = hdcp->shim;
1693 int ret;
1694
1695 ret = hdcp2_authentication_key_exchange(connector);
1696 if (ret < 0) {
1697 drm_dbg_kms(&i915->drm, "AKE Failed. Err : %d\n", ret);
1698 return ret;
1699 }
1700
1701 ret = hdcp2_locality_check(connector);
1702 if (ret < 0) {
1703 drm_dbg_kms(&i915->drm,
1704 "Locality Check failed. Err : %d\n", ret);
1705 return ret;
1706 }
1707
1708 ret = hdcp2_session_key_exchange(connector);
1709 if (ret < 0) {
1710 drm_dbg_kms(&i915->drm, "SKE Failed. Err : %d\n", ret);
1711 return ret;
1712 }
1713
1714 if (shim->config_stream_type) {
1715 ret = shim->config_stream_type(dig_port,
1716 hdcp->is_repeater,
1717 hdcp->content_type);
1718 if (ret < 0)
1719 return ret;
1720 }
1721
1722 if (hdcp->is_repeater) {
1723 ret = hdcp2_authenticate_repeater_topology(connector);
1724 if (ret < 0) {
1725 drm_dbg_kms(&i915->drm,
1726 "Repeater Auth Failed. Err: %d\n", ret);
1727 return ret;
1728 }
1729 }
1730
1731 return ret;
1732 }
1733
1734 static int hdcp2_enable_stream_encryption(struct intel_connector *connector)
1735 {
1736 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1737 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1738 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1739 struct intel_hdcp *hdcp = &connector->hdcp;
1740 enum transcoder cpu_transcoder = hdcp->cpu_transcoder;
1741 enum port port = dig_port->base.port;
1742 int ret = 0;
1743
1744 if (!(intel_de_read(dev_priv, HDCP2_STATUS(dev_priv, cpu_transcoder, port)) &
1745 LINK_ENCRYPTION_STATUS)) {
1746 drm_err(&dev_priv->drm, "[%s:%d] HDCP 2.2 Link is not encrypted\n",
1747 connector->base.name, connector->base.base.id);
1748 ret = -EPERM;
1749 goto link_recover;
1750 }
1751
1752 if (hdcp->shim->stream_2_2_encryption) {
1753 ret = hdcp->shim->stream_2_2_encryption(connector, true);
1754 if (ret) {
1755 drm_err(&dev_priv->drm, "[%s:%d] Failed to enable HDCP 2.2 stream enc\n",
1756 connector->base.name, connector->base.base.id);
1757 return ret;
1758 }
1759 drm_dbg_kms(&dev_priv->drm, "HDCP 2.2 transcoder: %s stream encrypted\n",
1760 transcoder_name(hdcp->stream_transcoder));
1761 }
1762
1763 return 0;
1764
1765 link_recover:
1766 if (hdcp2_deauthenticate_port(connector) < 0)
1767 drm_dbg_kms(&dev_priv->drm, "Port deauth failed.\n");
1768
1769 dig_port->hdcp_auth_status = false;
1770 data->k = 0;
1771
1772 return ret;
1773 }
1774
1775 static int hdcp2_enable_encryption(struct intel_connector *connector)
1776 {
1777 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1778 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1779 struct intel_hdcp *hdcp = &connector->hdcp;
1780 enum port port = dig_port->base.port;
1781 enum transcoder cpu_transcoder = hdcp->cpu_transcoder;
1782 int ret;
1783
1784 drm_WARN_ON(&dev_priv->drm,
1785 intel_de_read(dev_priv, HDCP2_STATUS(dev_priv, cpu_transcoder, port)) &
1786 LINK_ENCRYPTION_STATUS);
1787 if (hdcp->shim->toggle_signalling) {
1788 ret = hdcp->shim->toggle_signalling(dig_port, cpu_transcoder,
1789 true);
1790 if (ret) {
1791 drm_err(&dev_priv->drm,
1792 "Failed to enable HDCP signalling. %d\n",
1793 ret);
1794 return ret;
1795 }
1796 }
1797
1798 if (intel_de_read(dev_priv, HDCP2_STATUS(dev_priv, cpu_transcoder, port)) &
1799 LINK_AUTH_STATUS) {
1800
1801 intel_de_write(dev_priv,
1802 HDCP2_CTL(dev_priv, cpu_transcoder, port),
1803 intel_de_read(dev_priv, HDCP2_CTL(dev_priv, cpu_transcoder, port)) | CTL_LINK_ENCRYPTION_REQ);
1804 }
1805
1806 ret = intel_de_wait_for_set(dev_priv,
1807 HDCP2_STATUS(dev_priv, cpu_transcoder,
1808 port),
1809 LINK_ENCRYPTION_STATUS,
1810 HDCP_ENCRYPT_STATUS_CHANGE_TIMEOUT_MS);
1811 dig_port->hdcp_auth_status = true;
1812
1813 return ret;
1814 }
1815
1816 static int hdcp2_disable_encryption(struct intel_connector *connector)
1817 {
1818 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1819 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1820 struct intel_hdcp *hdcp = &connector->hdcp;
1821 enum port port = dig_port->base.port;
1822 enum transcoder cpu_transcoder = hdcp->cpu_transcoder;
1823 int ret;
1824
1825 drm_WARN_ON(&dev_priv->drm, !(intel_de_read(dev_priv, HDCP2_STATUS(dev_priv, cpu_transcoder, port)) &
1826 LINK_ENCRYPTION_STATUS));
1827
1828 intel_de_write(dev_priv, HDCP2_CTL(dev_priv, cpu_transcoder, port),
1829 intel_de_read(dev_priv, HDCP2_CTL(dev_priv, cpu_transcoder, port)) & ~CTL_LINK_ENCRYPTION_REQ);
1830
1831 ret = intel_de_wait_for_clear(dev_priv,
1832 HDCP2_STATUS(dev_priv, cpu_transcoder,
1833 port),
1834 LINK_ENCRYPTION_STATUS,
1835 HDCP_ENCRYPT_STATUS_CHANGE_TIMEOUT_MS);
1836 if (ret == -ETIMEDOUT)
1837 drm_dbg_kms(&dev_priv->drm, "Disable Encryption Timedout");
1838
1839 if (hdcp->shim->toggle_signalling) {
1840 ret = hdcp->shim->toggle_signalling(dig_port, cpu_transcoder,
1841 false);
1842 if (ret) {
1843 drm_err(&dev_priv->drm,
1844 "Failed to disable HDCP signalling. %d\n",
1845 ret);
1846 return ret;
1847 }
1848 }
1849
1850 return ret;
1851 }
1852
1853 static int
1854 hdcp2_propagate_stream_management_info(struct intel_connector *connector)
1855 {
1856 struct drm_i915_private *i915 = to_i915(connector->base.dev);
1857 int i, tries = 3, ret;
1858
1859 if (!connector->hdcp.is_repeater)
1860 return 0;
1861
1862 for (i = 0; i < tries; i++) {
1863 ret = _hdcp2_propagate_stream_management_info(connector);
1864 if (!ret)
1865 break;
1866
1867
1868 if (connector->hdcp.seq_num_m > HDCP_2_2_SEQ_NUM_MAX) {
1869 drm_dbg_kms(&i915->drm,
1870 "seq_num_m roll over.(%d)\n", ret);
1871 break;
1872 }
1873
1874 drm_dbg_kms(&i915->drm,
1875 "HDCP2 stream management %d of %d Failed.(%d)\n",
1876 i + 1, tries, ret);
1877 }
1878
1879 return ret;
1880 }
1881
1882 static int hdcp2_authenticate_and_encrypt(struct intel_connector *connector)
1883 {
1884 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1885 struct drm_i915_private *i915 = to_i915(connector->base.dev);
1886 int ret = 0, i, tries = 3;
1887
1888 for (i = 0; i < tries && !dig_port->hdcp_auth_status; i++) {
1889 ret = hdcp2_authenticate_sink(connector);
1890 if (!ret) {
1891 ret = intel_hdcp_prepare_streams(connector);
1892 if (ret) {
1893 drm_dbg_kms(&i915->drm,
1894 "Prepare streams failed.(%d)\n",
1895 ret);
1896 break;
1897 }
1898
1899 ret = hdcp2_propagate_stream_management_info(connector);
1900 if (ret) {
1901 drm_dbg_kms(&i915->drm,
1902 "Stream management failed.(%d)\n",
1903 ret);
1904 break;
1905 }
1906
1907 ret = hdcp2_authenticate_port(connector);
1908 if (!ret)
1909 break;
1910 drm_dbg_kms(&i915->drm, "HDCP2 port auth failed.(%d)\n",
1911 ret);
1912 }
1913
1914
1915 drm_dbg_kms(&i915->drm, "HDCP2.2 Auth %d of %d Failed.(%d)\n",
1916 i + 1, tries, ret);
1917 if (hdcp2_deauthenticate_port(connector) < 0)
1918 drm_dbg_kms(&i915->drm, "Port deauth failed.\n");
1919 }
1920
1921 if (!ret && !dig_port->hdcp_auth_status) {
1922
1923
1924
1925
1926 msleep(HDCP_2_2_DELAY_BEFORE_ENCRYPTION_EN);
1927 ret = hdcp2_enable_encryption(connector);
1928 if (ret < 0) {
1929 drm_dbg_kms(&i915->drm,
1930 "Encryption Enable Failed.(%d)\n", ret);
1931 if (hdcp2_deauthenticate_port(connector) < 0)
1932 drm_dbg_kms(&i915->drm, "Port deauth failed.\n");
1933 }
1934 }
1935
1936 if (!ret)
1937 ret = hdcp2_enable_stream_encryption(connector);
1938
1939 return ret;
1940 }
1941
1942 static int _intel_hdcp2_enable(struct intel_connector *connector)
1943 {
1944 struct drm_i915_private *i915 = to_i915(connector->base.dev);
1945 struct intel_hdcp *hdcp = &connector->hdcp;
1946 int ret;
1947
1948 drm_dbg_kms(&i915->drm, "[%s:%d] HDCP2.2 is being enabled. Type: %d\n",
1949 connector->base.name, connector->base.base.id,
1950 hdcp->content_type);
1951
1952 ret = hdcp2_authenticate_and_encrypt(connector);
1953 if (ret) {
1954 drm_dbg_kms(&i915->drm, "HDCP2 Type%d Enabling Failed. (%d)\n",
1955 hdcp->content_type, ret);
1956 return ret;
1957 }
1958
1959 drm_dbg_kms(&i915->drm, "[%s:%d] HDCP2.2 is enabled. Type %d\n",
1960 connector->base.name, connector->base.base.id,
1961 hdcp->content_type);
1962
1963 hdcp->hdcp2_encrypted = true;
1964 return 0;
1965 }
1966
1967 static int
1968 _intel_hdcp2_disable(struct intel_connector *connector, bool hdcp2_link_recovery)
1969 {
1970 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
1971 struct drm_i915_private *i915 = to_i915(connector->base.dev);
1972 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
1973 struct intel_hdcp *hdcp = &connector->hdcp;
1974 int ret;
1975
1976 drm_dbg_kms(&i915->drm, "[%s:%d] HDCP2.2 is being Disabled\n",
1977 connector->base.name, connector->base.base.id);
1978
1979 if (hdcp->shim->stream_2_2_encryption) {
1980 ret = hdcp->shim->stream_2_2_encryption(connector, false);
1981 if (ret) {
1982 drm_err(&i915->drm, "[%s:%d] Failed to disable HDCP 2.2 stream enc\n",
1983 connector->base.name, connector->base.base.id);
1984 return ret;
1985 }
1986 drm_dbg_kms(&i915->drm, "HDCP 2.2 transcoder: %s stream encryption disabled\n",
1987 transcoder_name(hdcp->stream_transcoder));
1988
1989 if (dig_port->num_hdcp_streams > 0 && !hdcp2_link_recovery)
1990 return 0;
1991 }
1992
1993 ret = hdcp2_disable_encryption(connector);
1994
1995 if (hdcp2_deauthenticate_port(connector) < 0)
1996 drm_dbg_kms(&i915->drm, "Port deauth failed.\n");
1997
1998 connector->hdcp.hdcp2_encrypted = false;
1999 dig_port->hdcp_auth_status = false;
2000 data->k = 0;
2001
2002 return ret;
2003 }
2004
2005
2006 static int intel_hdcp2_check_link(struct intel_connector *connector)
2007 {
2008 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
2009 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
2010 struct intel_hdcp *hdcp = &connector->hdcp;
2011 enum port port = dig_port->base.port;
2012 enum transcoder cpu_transcoder;
2013 int ret = 0;
2014
2015 mutex_lock(&hdcp->mutex);
2016 mutex_lock(&dig_port->hdcp_mutex);
2017 cpu_transcoder = hdcp->cpu_transcoder;
2018
2019
2020 if (hdcp->value != DRM_MODE_CONTENT_PROTECTION_ENABLED ||
2021 !hdcp->hdcp2_encrypted) {
2022 ret = -EINVAL;
2023 goto out;
2024 }
2025
2026 if (drm_WARN_ON(&dev_priv->drm,
2027 !intel_hdcp2_in_use(dev_priv, cpu_transcoder, port))) {
2028 drm_err(&dev_priv->drm,
2029 "HDCP2.2 link stopped the encryption, %x\n",
2030 intel_de_read(dev_priv, HDCP2_STATUS(dev_priv, cpu_transcoder, port)));
2031 ret = -ENXIO;
2032 _intel_hdcp2_disable(connector, true);
2033 intel_hdcp_update_value(connector,
2034 DRM_MODE_CONTENT_PROTECTION_DESIRED,
2035 true);
2036 goto out;
2037 }
2038
2039 ret = hdcp->shim->check_2_2_link(dig_port, connector);
2040 if (ret == HDCP_LINK_PROTECTED) {
2041 if (hdcp->value != DRM_MODE_CONTENT_PROTECTION_UNDESIRED) {
2042 intel_hdcp_update_value(connector,
2043 DRM_MODE_CONTENT_PROTECTION_ENABLED,
2044 true);
2045 }
2046 goto out;
2047 }
2048
2049 if (ret == HDCP_TOPOLOGY_CHANGE) {
2050 if (hdcp->value == DRM_MODE_CONTENT_PROTECTION_UNDESIRED)
2051 goto out;
2052
2053 drm_dbg_kms(&dev_priv->drm,
2054 "HDCP2.2 Downstream topology change\n");
2055 ret = hdcp2_authenticate_repeater_topology(connector);
2056 if (!ret) {
2057 intel_hdcp_update_value(connector,
2058 DRM_MODE_CONTENT_PROTECTION_ENABLED,
2059 true);
2060 goto out;
2061 }
2062 drm_dbg_kms(&dev_priv->drm,
2063 "[%s:%d] Repeater topology auth failed.(%d)\n",
2064 connector->base.name, connector->base.base.id,
2065 ret);
2066 } else {
2067 drm_dbg_kms(&dev_priv->drm,
2068 "[%s:%d] HDCP2.2 link failed, retrying auth\n",
2069 connector->base.name, connector->base.base.id);
2070 }
2071
2072 ret = _intel_hdcp2_disable(connector, true);
2073 if (ret) {
2074 drm_err(&dev_priv->drm,
2075 "[%s:%d] Failed to disable hdcp2.2 (%d)\n",
2076 connector->base.name, connector->base.base.id, ret);
2077 intel_hdcp_update_value(connector,
2078 DRM_MODE_CONTENT_PROTECTION_DESIRED, true);
2079 goto out;
2080 }
2081
2082 ret = _intel_hdcp2_enable(connector);
2083 if (ret) {
2084 drm_dbg_kms(&dev_priv->drm,
2085 "[%s:%d] Failed to enable hdcp2.2 (%d)\n",
2086 connector->base.name, connector->base.base.id,
2087 ret);
2088 intel_hdcp_update_value(connector,
2089 DRM_MODE_CONTENT_PROTECTION_DESIRED,
2090 true);
2091 goto out;
2092 }
2093
2094 out:
2095 mutex_unlock(&dig_port->hdcp_mutex);
2096 mutex_unlock(&hdcp->mutex);
2097 return ret;
2098 }
2099
2100 static void intel_hdcp_check_work(struct work_struct *work)
2101 {
2102 struct intel_hdcp *hdcp = container_of(to_delayed_work(work),
2103 struct intel_hdcp,
2104 check_work);
2105 struct intel_connector *connector = intel_hdcp_to_connector(hdcp);
2106
2107 if (drm_connector_is_unregistered(&connector->base))
2108 return;
2109
2110 if (!intel_hdcp2_check_link(connector))
2111 schedule_delayed_work(&hdcp->check_work,
2112 DRM_HDCP2_CHECK_PERIOD_MS);
2113 else if (!intel_hdcp_check_link(connector))
2114 schedule_delayed_work(&hdcp->check_work,
2115 DRM_HDCP_CHECK_PERIOD_MS);
2116 }
2117
2118 static int i915_hdcp_component_bind(struct device *i915_kdev,
2119 struct device *mei_kdev, void *data)
2120 {
2121 struct drm_i915_private *dev_priv = kdev_to_i915(i915_kdev);
2122
2123 drm_dbg(&dev_priv->drm, "I915 HDCP comp bind\n");
2124 mutex_lock(&dev_priv->hdcp_comp_mutex);
2125 dev_priv->hdcp_master = (struct i915_hdcp_comp_master *)data;
2126 dev_priv->hdcp_master->mei_dev = mei_kdev;
2127 mutex_unlock(&dev_priv->hdcp_comp_mutex);
2128
2129 return 0;
2130 }
2131
2132 static void i915_hdcp_component_unbind(struct device *i915_kdev,
2133 struct device *mei_kdev, void *data)
2134 {
2135 struct drm_i915_private *dev_priv = kdev_to_i915(i915_kdev);
2136
2137 drm_dbg(&dev_priv->drm, "I915 HDCP comp unbind\n");
2138 mutex_lock(&dev_priv->hdcp_comp_mutex);
2139 dev_priv->hdcp_master = NULL;
2140 mutex_unlock(&dev_priv->hdcp_comp_mutex);
2141 }
2142
2143 static const struct component_ops i915_hdcp_component_ops = {
2144 .bind = i915_hdcp_component_bind,
2145 .unbind = i915_hdcp_component_unbind,
2146 };
2147
2148 static enum mei_fw_ddi intel_get_mei_fw_ddi_index(enum port port)
2149 {
2150 switch (port) {
2151 case PORT_A:
2152 return MEI_DDI_A;
2153 case PORT_B ... PORT_F:
2154 return (enum mei_fw_ddi)port;
2155 default:
2156 return MEI_DDI_INVALID_PORT;
2157 }
2158 }
2159
2160 static enum mei_fw_tc intel_get_mei_fw_tc(enum transcoder cpu_transcoder)
2161 {
2162 switch (cpu_transcoder) {
2163 case TRANSCODER_A ... TRANSCODER_D:
2164 return (enum mei_fw_tc)(cpu_transcoder | 0x10);
2165 default:
2166 return MEI_INVALID_TRANSCODER;
2167 }
2168 }
2169
2170 static int initialize_hdcp_port_data(struct intel_connector *connector,
2171 struct intel_digital_port *dig_port,
2172 const struct intel_hdcp_shim *shim)
2173 {
2174 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
2175 struct hdcp_port_data *data = &dig_port->hdcp_port_data;
2176 struct intel_hdcp *hdcp = &connector->hdcp;
2177 enum port port = dig_port->base.port;
2178
2179 if (DISPLAY_VER(dev_priv) < 12)
2180 data->fw_ddi = intel_get_mei_fw_ddi_index(port);
2181 else
2182
2183
2184
2185
2186 data->fw_ddi = MEI_DDI_INVALID_PORT;
2187
2188
2189
2190
2191
2192
2193 data->fw_tc = MEI_INVALID_TRANSCODER;
2194
2195 data->port_type = (u8)HDCP_PORT_TYPE_INTEGRATED;
2196 data->protocol = (u8)shim->protocol;
2197
2198 if (!data->streams)
2199 data->streams = kcalloc(INTEL_NUM_PIPES(dev_priv),
2200 sizeof(struct hdcp2_streamid_type),
2201 GFP_KERNEL);
2202 if (!data->streams) {
2203 drm_err(&dev_priv->drm, "Out of Memory\n");
2204 return -ENOMEM;
2205 }
2206
2207 data->streams[0].stream_id = 0;
2208 data->streams[0].stream_type = hdcp->content_type;
2209
2210 return 0;
2211 }
2212
2213 static bool is_hdcp2_supported(struct drm_i915_private *dev_priv)
2214 {
2215 if (!IS_ENABLED(CONFIG_INTEL_MEI_HDCP))
2216 return false;
2217
2218 return (DISPLAY_VER(dev_priv) >= 10 ||
2219 IS_KABYLAKE(dev_priv) ||
2220 IS_COFFEELAKE(dev_priv) ||
2221 IS_COMETLAKE(dev_priv));
2222 }
2223
2224 void intel_hdcp_component_init(struct drm_i915_private *dev_priv)
2225 {
2226 int ret;
2227
2228 if (!is_hdcp2_supported(dev_priv))
2229 return;
2230
2231 mutex_lock(&dev_priv->hdcp_comp_mutex);
2232 drm_WARN_ON(&dev_priv->drm, dev_priv->hdcp_comp_added);
2233
2234 dev_priv->hdcp_comp_added = true;
2235 mutex_unlock(&dev_priv->hdcp_comp_mutex);
2236 ret = component_add_typed(dev_priv->drm.dev, &i915_hdcp_component_ops,
2237 I915_COMPONENT_HDCP);
2238 if (ret < 0) {
2239 drm_dbg_kms(&dev_priv->drm, "Failed at component add(%d)\n",
2240 ret);
2241 mutex_lock(&dev_priv->hdcp_comp_mutex);
2242 dev_priv->hdcp_comp_added = false;
2243 mutex_unlock(&dev_priv->hdcp_comp_mutex);
2244 return;
2245 }
2246 }
2247
2248 static void intel_hdcp2_init(struct intel_connector *connector,
2249 struct intel_digital_port *dig_port,
2250 const struct intel_hdcp_shim *shim)
2251 {
2252 struct drm_i915_private *i915 = to_i915(connector->base.dev);
2253 struct intel_hdcp *hdcp = &connector->hdcp;
2254 int ret;
2255
2256 ret = initialize_hdcp_port_data(connector, dig_port, shim);
2257 if (ret) {
2258 drm_dbg_kms(&i915->drm, "Mei hdcp data init failed\n");
2259 return;
2260 }
2261
2262 hdcp->hdcp2_supported = true;
2263 }
2264
2265 int intel_hdcp_init(struct intel_connector *connector,
2266 struct intel_digital_port *dig_port,
2267 const struct intel_hdcp_shim *shim)
2268 {
2269 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
2270 struct intel_hdcp *hdcp = &connector->hdcp;
2271 int ret;
2272
2273 if (!shim)
2274 return -EINVAL;
2275
2276 if (is_hdcp2_supported(dev_priv))
2277 intel_hdcp2_init(connector, dig_port, shim);
2278
2279 ret =
2280 drm_connector_attach_content_protection_property(&connector->base,
2281 hdcp->hdcp2_supported);
2282 if (ret) {
2283 hdcp->hdcp2_supported = false;
2284 kfree(dig_port->hdcp_port_data.streams);
2285 return ret;
2286 }
2287
2288 hdcp->shim = shim;
2289 mutex_init(&hdcp->mutex);
2290 INIT_DELAYED_WORK(&hdcp->check_work, intel_hdcp_check_work);
2291 INIT_WORK(&hdcp->prop_work, intel_hdcp_prop_work);
2292 init_waitqueue_head(&hdcp->cp_irq_queue);
2293
2294 return 0;
2295 }
2296
2297 int intel_hdcp_enable(struct intel_connector *connector,
2298 const struct intel_crtc_state *pipe_config, u8 content_type)
2299 {
2300 struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
2301 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
2302 struct intel_hdcp *hdcp = &connector->hdcp;
2303 unsigned long check_link_interval = DRM_HDCP_CHECK_PERIOD_MS;
2304 int ret = -EINVAL;
2305
2306 if (!hdcp->shim)
2307 return -ENOENT;
2308
2309 if (!connector->encoder) {
2310 drm_err(&dev_priv->drm, "[%s:%d] encoder is not initialized\n",
2311 connector->base.name, connector->base.base.id);
2312 return -ENODEV;
2313 }
2314
2315 mutex_lock(&hdcp->mutex);
2316 mutex_lock(&dig_port->hdcp_mutex);
2317 drm_WARN_ON(&dev_priv->drm,
2318 hdcp->value == DRM_MODE_CONTENT_PROTECTION_ENABLED);
2319 hdcp->content_type = content_type;
2320
2321 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DP_MST)) {
2322 hdcp->cpu_transcoder = pipe_config->mst_master_transcoder;
2323 hdcp->stream_transcoder = pipe_config->cpu_transcoder;
2324 } else {
2325 hdcp->cpu_transcoder = pipe_config->cpu_transcoder;
2326 hdcp->stream_transcoder = INVALID_TRANSCODER;
2327 }
2328
2329 if (DISPLAY_VER(dev_priv) >= 12)
2330 dig_port->hdcp_port_data.fw_tc = intel_get_mei_fw_tc(hdcp->cpu_transcoder);
2331
2332
2333
2334
2335
2336 if (intel_hdcp2_capable(connector)) {
2337 ret = _intel_hdcp2_enable(connector);
2338 if (!ret)
2339 check_link_interval = DRM_HDCP2_CHECK_PERIOD_MS;
2340 }
2341
2342
2343
2344
2345
2346 if (ret && intel_hdcp_capable(connector) &&
2347 hdcp->content_type != DRM_MODE_HDCP_CONTENT_TYPE1) {
2348 ret = _intel_hdcp_enable(connector);
2349 }
2350
2351 if (!ret) {
2352 schedule_delayed_work(&hdcp->check_work, check_link_interval);
2353 intel_hdcp_update_value(connector,
2354 DRM_MODE_CONTENT_PROTECTION_ENABLED,
2355 true);
2356 }
2357
2358 mutex_unlock(&dig_port->hdcp_mutex);
2359 mutex_unlock(&hdcp->mutex);
2360 return ret;
2361 }
2362
2363 int intel_hdcp_disable(struct intel_connector *connector)
2364 {
2365 struct intel_digital_port *dig_port = intel_attached_dig_port(connector);
2366 struct intel_hdcp *hdcp = &connector->hdcp;
2367 int ret = 0;
2368
2369 if (!hdcp->shim)
2370 return -ENOENT;
2371
2372 mutex_lock(&hdcp->mutex);
2373 mutex_lock(&dig_port->hdcp_mutex);
2374
2375 if (hdcp->value == DRM_MODE_CONTENT_PROTECTION_UNDESIRED)
2376 goto out;
2377
2378 intel_hdcp_update_value(connector,
2379 DRM_MODE_CONTENT_PROTECTION_UNDESIRED, false);
2380 if (hdcp->hdcp2_encrypted)
2381 ret = _intel_hdcp2_disable(connector, false);
2382 else if (hdcp->hdcp_encrypted)
2383 ret = _intel_hdcp_disable(connector);
2384
2385 out:
2386 mutex_unlock(&dig_port->hdcp_mutex);
2387 mutex_unlock(&hdcp->mutex);
2388 cancel_delayed_work_sync(&hdcp->check_work);
2389 return ret;
2390 }
2391
2392 void intel_hdcp_update_pipe(struct intel_atomic_state *state,
2393 struct intel_encoder *encoder,
2394 const struct intel_crtc_state *crtc_state,
2395 const struct drm_connector_state *conn_state)
2396 {
2397 struct intel_connector *connector =
2398 to_intel_connector(conn_state->connector);
2399 struct intel_hdcp *hdcp = &connector->hdcp;
2400 bool content_protection_type_changed, desired_and_not_enabled = false;
2401
2402 if (!connector->hdcp.shim)
2403 return;
2404
2405 content_protection_type_changed =
2406 (conn_state->hdcp_content_type != hdcp->content_type &&
2407 conn_state->content_protection !=
2408 DRM_MODE_CONTENT_PROTECTION_UNDESIRED);
2409
2410
2411
2412
2413
2414 if (conn_state->content_protection ==
2415 DRM_MODE_CONTENT_PROTECTION_UNDESIRED ||
2416 content_protection_type_changed)
2417 intel_hdcp_disable(connector);
2418
2419
2420
2421
2422
2423 if (content_protection_type_changed) {
2424 mutex_lock(&hdcp->mutex);
2425 hdcp->value = DRM_MODE_CONTENT_PROTECTION_DESIRED;
2426 drm_connector_get(&connector->base);
2427 schedule_work(&hdcp->prop_work);
2428 mutex_unlock(&hdcp->mutex);
2429 }
2430
2431 if (conn_state->content_protection ==
2432 DRM_MODE_CONTENT_PROTECTION_DESIRED) {
2433 mutex_lock(&hdcp->mutex);
2434
2435 desired_and_not_enabled =
2436 hdcp->value != DRM_MODE_CONTENT_PROTECTION_ENABLED;
2437 mutex_unlock(&hdcp->mutex);
2438
2439
2440
2441
2442 if (!desired_and_not_enabled && !content_protection_type_changed) {
2443 drm_connector_get(&connector->base);
2444 schedule_work(&hdcp->prop_work);
2445 }
2446 }
2447
2448 if (desired_and_not_enabled || content_protection_type_changed)
2449 intel_hdcp_enable(connector,
2450 crtc_state,
2451 (u8)conn_state->hdcp_content_type);
2452 }
2453
2454 void intel_hdcp_component_fini(struct drm_i915_private *dev_priv)
2455 {
2456 mutex_lock(&dev_priv->hdcp_comp_mutex);
2457 if (!dev_priv->hdcp_comp_added) {
2458 mutex_unlock(&dev_priv->hdcp_comp_mutex);
2459 return;
2460 }
2461
2462 dev_priv->hdcp_comp_added = false;
2463 mutex_unlock(&dev_priv->hdcp_comp_mutex);
2464
2465 component_del(dev_priv->drm.dev, &i915_hdcp_component_ops);
2466 }
2467
2468 void intel_hdcp_cleanup(struct intel_connector *connector)
2469 {
2470 struct intel_hdcp *hdcp = &connector->hdcp;
2471
2472 if (!hdcp->shim)
2473 return;
2474
2475
2476
2477
2478
2479 drm_WARN_ON(connector->base.dev,
2480 connector->base.registration_state == DRM_CONNECTOR_REGISTERED);
2481
2482
2483
2484
2485
2486 cancel_delayed_work_sync(&hdcp->check_work);
2487
2488
2489
2490
2491
2492
2493
2494
2495
2496
2497 drm_WARN_ON(connector->base.dev, work_pending(&hdcp->prop_work));
2498
2499 mutex_lock(&hdcp->mutex);
2500 hdcp->shim = NULL;
2501 mutex_unlock(&hdcp->mutex);
2502 }
2503
2504 void intel_hdcp_atomic_check(struct drm_connector *connector,
2505 struct drm_connector_state *old_state,
2506 struct drm_connector_state *new_state)
2507 {
2508 u64 old_cp = old_state->content_protection;
2509 u64 new_cp = new_state->content_protection;
2510 struct drm_crtc_state *crtc_state;
2511
2512 if (!new_state->crtc) {
2513
2514
2515
2516
2517 if (old_cp == DRM_MODE_CONTENT_PROTECTION_ENABLED)
2518 new_state->content_protection =
2519 DRM_MODE_CONTENT_PROTECTION_DESIRED;
2520 return;
2521 }
2522
2523 crtc_state = drm_atomic_get_new_crtc_state(new_state->state,
2524 new_state->crtc);
2525
2526
2527
2528
2529
2530 if (drm_atomic_crtc_needs_modeset(crtc_state) &&
2531 (old_cp == DRM_MODE_CONTENT_PROTECTION_ENABLED &&
2532 new_cp != DRM_MODE_CONTENT_PROTECTION_UNDESIRED))
2533 new_state->content_protection =
2534 DRM_MODE_CONTENT_PROTECTION_DESIRED;
2535
2536
2537
2538
2539
2540 if (old_cp == new_cp ||
2541 (old_cp == DRM_MODE_CONTENT_PROTECTION_DESIRED &&
2542 new_cp == DRM_MODE_CONTENT_PROTECTION_ENABLED)) {
2543 if (old_state->hdcp_content_type ==
2544 new_state->hdcp_content_type)
2545 return;
2546 }
2547
2548 crtc_state->mode_changed = true;
2549 }
2550
2551
2552 void intel_hdcp_handle_cp_irq(struct intel_connector *connector)
2553 {
2554 struct intel_hdcp *hdcp = &connector->hdcp;
2555
2556 if (!hdcp->shim)
2557 return;
2558
2559 atomic_inc(&connector->hdcp.cp_irq_count);
2560 wake_up_all(&connector->hdcp.cp_irq_queue);
2561
2562 schedule_delayed_work(&hdcp->check_work, 0);
2563 }