0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 #include <drm/drm_atomic_helper.h>
0025 #include <drm/drm_damage_helper.h>
0026
0027 #include "display/intel_dp.h"
0028
0029 #include "i915_drv.h"
0030 #include "intel_atomic.h"
0031 #include "intel_crtc.h"
0032 #include "intel_de.h"
0033 #include "intel_display_types.h"
0034 #include "intel_dp_aux.h"
0035 #include "intel_hdmi.h"
0036 #include "intel_psr.h"
0037 #include "intel_snps_phy.h"
0038 #include "skl_universal_plane.h"
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087 static bool psr_global_enabled(struct intel_dp *intel_dp)
0088 {
0089 struct intel_connector *connector = intel_dp->attached_connector;
0090 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0091
0092 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
0093 case I915_PSR_DEBUG_DEFAULT:
0094 if (i915->params.enable_psr == -1)
0095 return connector->panel.vbt.psr.enable;
0096 return i915->params.enable_psr;
0097 case I915_PSR_DEBUG_DISABLE:
0098 return false;
0099 default:
0100 return true;
0101 }
0102 }
0103
0104 static bool psr2_global_enabled(struct intel_dp *intel_dp)
0105 {
0106 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0107
0108 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
0109 case I915_PSR_DEBUG_DISABLE:
0110 case I915_PSR_DEBUG_FORCE_PSR1:
0111 return false;
0112 default:
0113 if (i915->params.enable_psr == 1)
0114 return false;
0115 return true;
0116 }
0117 }
0118
0119 static void psr_irq_control(struct intel_dp *intel_dp)
0120 {
0121 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0122 enum transcoder trans_shift;
0123 i915_reg_t imr_reg;
0124 u32 mask, val;
0125
0126
0127
0128
0129
0130
0131 if (DISPLAY_VER(dev_priv) >= 12) {
0132 trans_shift = 0;
0133 imr_reg = TRANS_PSR_IMR(intel_dp->psr.transcoder);
0134 } else {
0135 trans_shift = intel_dp->psr.transcoder;
0136 imr_reg = EDP_PSR_IMR;
0137 }
0138
0139 mask = EDP_PSR_ERROR(trans_shift);
0140 if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
0141 mask |= EDP_PSR_POST_EXIT(trans_shift) |
0142 EDP_PSR_PRE_ENTRY(trans_shift);
0143
0144
0145 val = intel_de_read(dev_priv, imr_reg);
0146 val &= ~EDP_PSR_TRANS_MASK(trans_shift);
0147 val |= ~mask;
0148 intel_de_write(dev_priv, imr_reg, val);
0149 }
0150
0151 static void psr_event_print(struct drm_i915_private *i915,
0152 u32 val, bool psr2_enabled)
0153 {
0154 drm_dbg_kms(&i915->drm, "PSR exit events: 0x%x\n", val);
0155 if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
0156 drm_dbg_kms(&i915->drm, "\tPSR2 watchdog timer expired\n");
0157 if ((val & PSR_EVENT_PSR2_DISABLED) && psr2_enabled)
0158 drm_dbg_kms(&i915->drm, "\tPSR2 disabled\n");
0159 if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
0160 drm_dbg_kms(&i915->drm, "\tSU dirty FIFO underrun\n");
0161 if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
0162 drm_dbg_kms(&i915->drm, "\tSU CRC FIFO underrun\n");
0163 if (val & PSR_EVENT_GRAPHICS_RESET)
0164 drm_dbg_kms(&i915->drm, "\tGraphics reset\n");
0165 if (val & PSR_EVENT_PCH_INTERRUPT)
0166 drm_dbg_kms(&i915->drm, "\tPCH interrupt\n");
0167 if (val & PSR_EVENT_MEMORY_UP)
0168 drm_dbg_kms(&i915->drm, "\tMemory up\n");
0169 if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
0170 drm_dbg_kms(&i915->drm, "\tFront buffer modification\n");
0171 if (val & PSR_EVENT_WD_TIMER_EXPIRE)
0172 drm_dbg_kms(&i915->drm, "\tPSR watchdog timer expired\n");
0173 if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
0174 drm_dbg_kms(&i915->drm, "\tPIPE registers updated\n");
0175 if (val & PSR_EVENT_REGISTER_UPDATE)
0176 drm_dbg_kms(&i915->drm, "\tRegister updated\n");
0177 if (val & PSR_EVENT_HDCP_ENABLE)
0178 drm_dbg_kms(&i915->drm, "\tHDCP enabled\n");
0179 if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
0180 drm_dbg_kms(&i915->drm, "\tKVMR session enabled\n");
0181 if (val & PSR_EVENT_VBI_ENABLE)
0182 drm_dbg_kms(&i915->drm, "\tVBI enabled\n");
0183 if (val & PSR_EVENT_LPSP_MODE_EXIT)
0184 drm_dbg_kms(&i915->drm, "\tLPSP mode exited\n");
0185 if ((val & PSR_EVENT_PSR_DISABLE) && !psr2_enabled)
0186 drm_dbg_kms(&i915->drm, "\tPSR disabled\n");
0187 }
0188
0189 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
0190 {
0191 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
0192 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0193 ktime_t time_ns = ktime_get();
0194 enum transcoder trans_shift;
0195 i915_reg_t imr_reg;
0196
0197 if (DISPLAY_VER(dev_priv) >= 12) {
0198 trans_shift = 0;
0199 imr_reg = TRANS_PSR_IMR(intel_dp->psr.transcoder);
0200 } else {
0201 trans_shift = intel_dp->psr.transcoder;
0202 imr_reg = EDP_PSR_IMR;
0203 }
0204
0205 if (psr_iir & EDP_PSR_PRE_ENTRY(trans_shift)) {
0206 intel_dp->psr.last_entry_attempt = time_ns;
0207 drm_dbg_kms(&dev_priv->drm,
0208 "[transcoder %s] PSR entry attempt in 2 vblanks\n",
0209 transcoder_name(cpu_transcoder));
0210 }
0211
0212 if (psr_iir & EDP_PSR_POST_EXIT(trans_shift)) {
0213 intel_dp->psr.last_exit = time_ns;
0214 drm_dbg_kms(&dev_priv->drm,
0215 "[transcoder %s] PSR exit completed\n",
0216 transcoder_name(cpu_transcoder));
0217
0218 if (DISPLAY_VER(dev_priv) >= 9) {
0219 u32 val = intel_de_read(dev_priv,
0220 PSR_EVENT(cpu_transcoder));
0221 bool psr2_enabled = intel_dp->psr.psr2_enabled;
0222
0223 intel_de_write(dev_priv, PSR_EVENT(cpu_transcoder),
0224 val);
0225 psr_event_print(dev_priv, val, psr2_enabled);
0226 }
0227 }
0228
0229 if (psr_iir & EDP_PSR_ERROR(trans_shift)) {
0230 u32 val;
0231
0232 drm_warn(&dev_priv->drm, "[transcoder %s] PSR aux error\n",
0233 transcoder_name(cpu_transcoder));
0234
0235 intel_dp->psr.irq_aux_error = true;
0236
0237
0238
0239
0240
0241
0242
0243
0244
0245 val = intel_de_read(dev_priv, imr_reg);
0246 val |= EDP_PSR_ERROR(trans_shift);
0247 intel_de_write(dev_priv, imr_reg, val);
0248
0249 schedule_work(&intel_dp->psr.work);
0250 }
0251 }
0252
0253 static bool intel_dp_get_alpm_status(struct intel_dp *intel_dp)
0254 {
0255 u8 alpm_caps = 0;
0256
0257 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_RECEIVER_ALPM_CAP,
0258 &alpm_caps) != 1)
0259 return false;
0260 return alpm_caps & DP_ALPM_CAP;
0261 }
0262
0263 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
0264 {
0265 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0266 u8 val = 8;
0267
0268 if (drm_dp_dpcd_readb(&intel_dp->aux,
0269 DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
0270 val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
0271 else
0272 drm_dbg_kms(&i915->drm,
0273 "Unable to get sink synchronization latency, assuming 8 frames\n");
0274 return val;
0275 }
0276
0277 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
0278 {
0279 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
0280 ssize_t r;
0281 u16 w;
0282 u8 y;
0283
0284
0285 if (!(intel_dp->psr_dpcd[1] & DP_PSR2_SU_GRANULARITY_REQUIRED)) {
0286
0287 w = 4;
0288 y = 4;
0289 goto exit;
0290 }
0291
0292 r = drm_dp_dpcd_read(&intel_dp->aux, DP_PSR2_SU_X_GRANULARITY, &w, 2);
0293 if (r != 2)
0294 drm_dbg_kms(&i915->drm,
0295 "Unable to read DP_PSR2_SU_X_GRANULARITY\n");
0296
0297
0298
0299
0300 if (r != 2 || w == 0)
0301 w = 4;
0302
0303 r = drm_dp_dpcd_read(&intel_dp->aux, DP_PSR2_SU_Y_GRANULARITY, &y, 1);
0304 if (r != 1) {
0305 drm_dbg_kms(&i915->drm,
0306 "Unable to read DP_PSR2_SU_Y_GRANULARITY\n");
0307 y = 4;
0308 }
0309 if (y == 0)
0310 y = 1;
0311
0312 exit:
0313 intel_dp->psr.su_w_granularity = w;
0314 intel_dp->psr.su_y_granularity = y;
0315 }
0316
0317 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
0318 {
0319 struct drm_i915_private *dev_priv =
0320 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
0321
0322 drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
0323 sizeof(intel_dp->psr_dpcd));
0324
0325 if (!intel_dp->psr_dpcd[0])
0326 return;
0327 drm_dbg_kms(&dev_priv->drm, "eDP panel supports PSR version %x\n",
0328 intel_dp->psr_dpcd[0]);
0329
0330 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
0331 drm_dbg_kms(&dev_priv->drm,
0332 "PSR support not currently available for this panel\n");
0333 return;
0334 }
0335
0336 if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
0337 drm_dbg_kms(&dev_priv->drm,
0338 "Panel lacks power state control, PSR cannot be enabled\n");
0339 return;
0340 }
0341
0342 intel_dp->psr.sink_support = true;
0343 intel_dp->psr.sink_sync_latency =
0344 intel_dp_get_sink_sync_latency(intel_dp);
0345
0346 if (DISPLAY_VER(dev_priv) >= 9 &&
0347 (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_IS_SUPPORTED)) {
0348 bool y_req = intel_dp->psr_dpcd[1] &
0349 DP_PSR2_SU_Y_COORDINATE_REQUIRED;
0350 bool alpm = intel_dp_get_alpm_status(intel_dp);
0351
0352
0353
0354
0355
0356
0357
0358
0359
0360
0361
0362
0363 intel_dp->psr.sink_psr2_support = y_req && alpm;
0364 drm_dbg_kms(&dev_priv->drm, "PSR2 %ssupported\n",
0365 intel_dp->psr.sink_psr2_support ? "" : "not ");
0366
0367 if (intel_dp->psr.sink_psr2_support) {
0368 intel_dp->psr.colorimetry_support =
0369 intel_dp_get_colorimetry_status(intel_dp);
0370 intel_dp_get_su_granularity(intel_dp);
0371 }
0372 }
0373 }
0374
0375 static void intel_psr_enable_sink(struct intel_dp *intel_dp)
0376 {
0377 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0378 u8 dpcd_val = DP_PSR_ENABLE;
0379
0380
0381 if (intel_dp->psr.psr2_enabled) {
0382 drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG,
0383 DP_ALPM_ENABLE |
0384 DP_ALPM_LOCK_ERROR_IRQ_HPD_ENABLE);
0385
0386 dpcd_val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
0387 } else {
0388 if (intel_dp->psr.link_standby)
0389 dpcd_val |= DP_PSR_MAIN_LINK_ACTIVE;
0390
0391 if (DISPLAY_VER(dev_priv) >= 8)
0392 dpcd_val |= DP_PSR_CRC_VERIFICATION;
0393 }
0394
0395 if (intel_dp->psr.req_psr2_sdp_prior_scanline)
0396 dpcd_val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
0397
0398 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, dpcd_val);
0399
0400 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
0401 }
0402
0403 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
0404 {
0405 struct intel_connector *connector = intel_dp->attached_connector;
0406 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0407 u32 val = 0;
0408
0409 if (DISPLAY_VER(dev_priv) >= 11)
0410 val |= EDP_PSR_TP4_TIME_0US;
0411
0412 if (dev_priv->params.psr_safest_params) {
0413 val |= EDP_PSR_TP1_TIME_2500us;
0414 val |= EDP_PSR_TP2_TP3_TIME_2500us;
0415 goto check_tp3_sel;
0416 }
0417
0418 if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
0419 val |= EDP_PSR_TP1_TIME_0us;
0420 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
0421 val |= EDP_PSR_TP1_TIME_100us;
0422 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
0423 val |= EDP_PSR_TP1_TIME_500us;
0424 else
0425 val |= EDP_PSR_TP1_TIME_2500us;
0426
0427 if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
0428 val |= EDP_PSR_TP2_TP3_TIME_0us;
0429 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
0430 val |= EDP_PSR_TP2_TP3_TIME_100us;
0431 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
0432 val |= EDP_PSR_TP2_TP3_TIME_500us;
0433 else
0434 val |= EDP_PSR_TP2_TP3_TIME_2500us;
0435
0436 check_tp3_sel:
0437 if (intel_dp_source_supports_tps3(dev_priv) &&
0438 drm_dp_tps3_supported(intel_dp->dpcd))
0439 val |= EDP_PSR_TP1_TP3_SEL;
0440 else
0441 val |= EDP_PSR_TP1_TP2_SEL;
0442
0443 return val;
0444 }
0445
0446 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
0447 {
0448 struct intel_connector *connector = intel_dp->attached_connector;
0449 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0450 int idle_frames;
0451
0452
0453
0454
0455 idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
0456 idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
0457
0458 if (drm_WARN_ON(&dev_priv->drm, idle_frames > 0xf))
0459 idle_frames = 0xf;
0460
0461 return idle_frames;
0462 }
0463
0464 static void hsw_activate_psr1(struct intel_dp *intel_dp)
0465 {
0466 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0467 u32 max_sleep_time = 0x1f;
0468 u32 val = EDP_PSR_ENABLE;
0469
0470 val |= psr_compute_idle_frames(intel_dp) << EDP_PSR_IDLE_FRAME_SHIFT;
0471
0472 val |= max_sleep_time << EDP_PSR_MAX_SLEEP_TIME_SHIFT;
0473 if (IS_HASWELL(dev_priv))
0474 val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
0475
0476 if (intel_dp->psr.link_standby)
0477 val |= EDP_PSR_LINK_STANDBY;
0478
0479 val |= intel_psr1_get_tp_time(intel_dp);
0480
0481 if (DISPLAY_VER(dev_priv) >= 8)
0482 val |= EDP_PSR_CRC_ENABLE;
0483
0484 val |= (intel_de_read(dev_priv, EDP_PSR_CTL(intel_dp->psr.transcoder)) &
0485 EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK);
0486 intel_de_write(dev_priv, EDP_PSR_CTL(intel_dp->psr.transcoder), val);
0487 }
0488
0489 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
0490 {
0491 struct intel_connector *connector = intel_dp->attached_connector;
0492 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0493 u32 val = 0;
0494
0495 if (dev_priv->params.psr_safest_params)
0496 return EDP_PSR2_TP2_TIME_2500us;
0497
0498 if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
0499 connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
0500 val |= EDP_PSR2_TP2_TIME_50us;
0501 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
0502 val |= EDP_PSR2_TP2_TIME_100us;
0503 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
0504 val |= EDP_PSR2_TP2_TIME_500us;
0505 else
0506 val |= EDP_PSR2_TP2_TIME_2500us;
0507
0508 return val;
0509 }
0510
0511 static void hsw_activate_psr2(struct intel_dp *intel_dp)
0512 {
0513 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0514 u32 val = EDP_PSR2_ENABLE;
0515
0516 val |= psr_compute_idle_frames(intel_dp) << EDP_PSR2_IDLE_FRAME_SHIFT;
0517
0518 if (!IS_ALDERLAKE_P(dev_priv))
0519 val |= EDP_SU_TRACK_ENABLE;
0520
0521 if (DISPLAY_VER(dev_priv) >= 10 && DISPLAY_VER(dev_priv) <= 12)
0522 val |= EDP_Y_COORDINATE_ENABLE;
0523
0524 val |= EDP_PSR2_FRAME_BEFORE_SU(max_t(u8, intel_dp->psr.sink_sync_latency + 1, 2));
0525 val |= intel_psr2_get_tp_time(intel_dp);
0526
0527
0528 if (IS_ADLP_DISPLAY_STEP(dev_priv, STEP_A0, STEP_E0)) {
0529 static const u8 map[] = {
0530 2,
0531 1,
0532 0,
0533 3,
0534 6,
0535 5,
0536 4,
0537 7,
0538 };
0539
0540
0541
0542
0543 u32 tmp, lines = 7;
0544
0545 val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
0546
0547 tmp = map[lines - TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
0548 tmp = tmp << TGL_EDP_PSR2_IO_BUFFER_WAKE_SHIFT;
0549 val |= tmp;
0550
0551 tmp = map[lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
0552 tmp = tmp << TGL_EDP_PSR2_FAST_WAKE_MIN_SHIFT;
0553 val |= tmp;
0554 } else if (DISPLAY_VER(dev_priv) >= 12) {
0555
0556
0557
0558
0559
0560
0561
0562 val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
0563 val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(7);
0564 val |= TGL_EDP_PSR2_FAST_WAKE(7);
0565 } else if (DISPLAY_VER(dev_priv) >= 9) {
0566 val |= EDP_PSR2_IO_BUFFER_WAKE(7);
0567 val |= EDP_PSR2_FAST_WAKE(7);
0568 }
0569
0570 if (intel_dp->psr.req_psr2_sdp_prior_scanline)
0571 val |= EDP_PSR2_SU_SDP_SCANLINE;
0572
0573 if (intel_dp->psr.psr2_sel_fetch_enabled) {
0574 u32 tmp;
0575
0576
0577 if (IS_TGL_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0))
0578 intel_de_rmw(dev_priv, CHICKEN_PAR1_1,
0579 DIS_RAM_BYPASS_PSR2_MAN_TRACK,
0580 DIS_RAM_BYPASS_PSR2_MAN_TRACK);
0581
0582 tmp = intel_de_read(dev_priv, PSR2_MAN_TRK_CTL(intel_dp->psr.transcoder));
0583 drm_WARN_ON(&dev_priv->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
0584 } else if (HAS_PSR2_SEL_FETCH(dev_priv)) {
0585 intel_de_write(dev_priv,
0586 PSR2_MAN_TRK_CTL(intel_dp->psr.transcoder), 0);
0587 }
0588
0589
0590
0591
0592
0593 intel_de_write(dev_priv, EDP_PSR_CTL(intel_dp->psr.transcoder), 0);
0594
0595 intel_de_write(dev_priv, EDP_PSR2_CTL(intel_dp->psr.transcoder), val);
0596 }
0597
0598 static bool
0599 transcoder_has_psr2(struct drm_i915_private *dev_priv, enum transcoder trans)
0600 {
0601 if (IS_ALDERLAKE_P(dev_priv))
0602 return trans == TRANSCODER_A || trans == TRANSCODER_B;
0603 else if (DISPLAY_VER(dev_priv) >= 12)
0604 return trans == TRANSCODER_A;
0605 else
0606 return trans == TRANSCODER_EDP;
0607 }
0608
0609 static u32 intel_get_frame_time_us(const struct intel_crtc_state *cstate)
0610 {
0611 if (!cstate || !cstate->hw.active)
0612 return 0;
0613
0614 return DIV_ROUND_UP(1000 * 1000,
0615 drm_mode_vrefresh(&cstate->hw.adjusted_mode));
0616 }
0617
0618 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
0619 u32 idle_frames)
0620 {
0621 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0622 u32 val;
0623
0624 idle_frames <<= EDP_PSR2_IDLE_FRAME_SHIFT;
0625 val = intel_de_read(dev_priv, EDP_PSR2_CTL(intel_dp->psr.transcoder));
0626 val &= ~EDP_PSR2_IDLE_FRAME_MASK;
0627 val |= idle_frames;
0628 intel_de_write(dev_priv, EDP_PSR2_CTL(intel_dp->psr.transcoder), val);
0629 }
0630
0631 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
0632 {
0633 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0634
0635 psr2_program_idle_frames(intel_dp, 0);
0636 intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_DC3CO);
0637 }
0638
0639 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
0640 {
0641 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0642
0643 intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
0644 psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
0645 }
0646
0647 static void tgl_dc3co_disable_work(struct work_struct *work)
0648 {
0649 struct intel_dp *intel_dp =
0650 container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
0651
0652 mutex_lock(&intel_dp->psr.lock);
0653
0654 if (delayed_work_pending(&intel_dp->psr.dc3co_work))
0655 goto unlock;
0656
0657 tgl_psr2_disable_dc3co(intel_dp);
0658 unlock:
0659 mutex_unlock(&intel_dp->psr.lock);
0660 }
0661
0662 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
0663 {
0664 if (!intel_dp->psr.dc3co_exitline)
0665 return;
0666
0667 cancel_delayed_work(&intel_dp->psr.dc3co_work);
0668
0669 tgl_psr2_disable_dc3co(intel_dp);
0670 }
0671
0672 static bool
0673 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
0674 struct intel_crtc_state *crtc_state)
0675 {
0676 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
0677 enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
0678 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0679 enum port port = dig_port->base.port;
0680
0681 if (IS_ALDERLAKE_P(dev_priv))
0682 return pipe <= PIPE_B && port <= PORT_B;
0683 else
0684 return pipe == PIPE_A && port == PORT_A;
0685 }
0686
0687 static void
0688 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
0689 struct intel_crtc_state *crtc_state)
0690 {
0691 const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
0692 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0693 u32 exit_scanlines;
0694
0695
0696
0697
0698
0699
0700 return;
0701
0702
0703
0704
0705
0706 if (crtc_state->enable_psr2_sel_fetch)
0707 return;
0708
0709 if (!(dev_priv->dmc.allowed_dc_mask & DC_STATE_EN_DC3CO))
0710 return;
0711
0712 if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
0713 return;
0714
0715
0716 if (IS_ADLP_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0))
0717 return;
0718
0719
0720
0721
0722
0723 exit_scanlines =
0724 intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
0725
0726 if (drm_WARN_ON(&dev_priv->drm, exit_scanlines > crtc_vdisplay))
0727 return;
0728
0729 crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
0730 }
0731
0732 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
0733 struct intel_crtc_state *crtc_state)
0734 {
0735 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0736
0737 if (!dev_priv->params.enable_psr2_sel_fetch &&
0738 intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
0739 drm_dbg_kms(&dev_priv->drm,
0740 "PSR2 sel fetch not enabled, disabled by parameter\n");
0741 return false;
0742 }
0743
0744 if (crtc_state->uapi.async_flip) {
0745 drm_dbg_kms(&dev_priv->drm,
0746 "PSR2 sel fetch not enabled, async flip enabled\n");
0747 return false;
0748 }
0749
0750
0751 if (IS_TGL_DISPLAY_STEP(dev_priv, STEP_A0, STEP_C0)) {
0752 drm_dbg_kms(&dev_priv->drm,
0753 "PSR2 sel fetch not enabled, missing the implementation of WAs\n");
0754 return false;
0755 }
0756
0757 return crtc_state->enable_psr2_sel_fetch = true;
0758 }
0759
0760 static bool psr2_granularity_check(struct intel_dp *intel_dp,
0761 struct intel_crtc_state *crtc_state)
0762 {
0763 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0764 const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
0765 const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
0766 u16 y_granularity = 0;
0767
0768
0769 if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
0770 return false;
0771
0772 if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
0773 return false;
0774
0775
0776 if (!crtc_state->enable_psr2_sel_fetch)
0777 return intel_dp->psr.su_y_granularity == 4;
0778
0779
0780
0781
0782
0783
0784 if (IS_ALDERLAKE_P(dev_priv))
0785 y_granularity = intel_dp->psr.su_y_granularity;
0786 else if (intel_dp->psr.su_y_granularity <= 2)
0787 y_granularity = 4;
0788 else if ((intel_dp->psr.su_y_granularity % 4) == 0)
0789 y_granularity = intel_dp->psr.su_y_granularity;
0790
0791 if (y_granularity == 0 || crtc_vdisplay % y_granularity)
0792 return false;
0793
0794 crtc_state->su_y_granularity = y_granularity;
0795 return true;
0796 }
0797
0798 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
0799 struct intel_crtc_state *crtc_state)
0800 {
0801 const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
0802 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0803 u32 hblank_total, hblank_ns, req_ns;
0804
0805 hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
0806 hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
0807
0808
0809 req_ns = (72 / crtc_state->lane_count) * 1000 / (crtc_state->port_clock / 1000);
0810
0811 if ((hblank_ns - req_ns) > 100)
0812 return true;
0813
0814 if (DISPLAY_VER(dev_priv) < 13 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
0815 return false;
0816
0817 crtc_state->req_psr2_sdp_prior_scanline = true;
0818 return true;
0819 }
0820
0821 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
0822 struct intel_crtc_state *crtc_state)
0823 {
0824 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0825 int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
0826 int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
0827 int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
0828
0829 if (!intel_dp->psr.sink_psr2_support)
0830 return false;
0831
0832
0833 if (IS_JSL_EHL(dev_priv)) {
0834 drm_dbg_kms(&dev_priv->drm, "PSR2 not supported by phy\n");
0835 return false;
0836 }
0837
0838
0839 if (IS_ROCKETLAKE(dev_priv) || IS_ALDERLAKE_S(dev_priv) ||
0840 IS_DG2(dev_priv)) {
0841 drm_dbg_kms(&dev_priv->drm, "PSR2 is defeatured for this platform\n");
0842 return false;
0843 }
0844
0845 if (IS_ADLP_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0)) {
0846 drm_dbg_kms(&dev_priv->drm, "PSR2 not completely functional in this stepping\n");
0847 return false;
0848 }
0849
0850 if (!transcoder_has_psr2(dev_priv, crtc_state->cpu_transcoder)) {
0851 drm_dbg_kms(&dev_priv->drm,
0852 "PSR2 not supported in transcoder %s\n",
0853 transcoder_name(crtc_state->cpu_transcoder));
0854 return false;
0855 }
0856
0857 if (!psr2_global_enabled(intel_dp)) {
0858 drm_dbg_kms(&dev_priv->drm, "PSR2 disabled by flag\n");
0859 return false;
0860 }
0861
0862
0863
0864
0865
0866
0867 if (crtc_state->dsc.compression_enable) {
0868 drm_dbg_kms(&dev_priv->drm,
0869 "PSR2 cannot be enabled since DSC is enabled\n");
0870 return false;
0871 }
0872
0873 if (crtc_state->crc_enabled) {
0874 drm_dbg_kms(&dev_priv->drm,
0875 "PSR2 not enabled because it would inhibit pipe CRC calculation\n");
0876 return false;
0877 }
0878
0879 if (DISPLAY_VER(dev_priv) >= 12) {
0880 psr_max_h = 5120;
0881 psr_max_v = 3200;
0882 max_bpp = 30;
0883 } else if (DISPLAY_VER(dev_priv) >= 10) {
0884 psr_max_h = 4096;
0885 psr_max_v = 2304;
0886 max_bpp = 24;
0887 } else if (DISPLAY_VER(dev_priv) == 9) {
0888 psr_max_h = 3640;
0889 psr_max_v = 2304;
0890 max_bpp = 24;
0891 }
0892
0893 if (crtc_state->pipe_bpp > max_bpp) {
0894 drm_dbg_kms(&dev_priv->drm,
0895 "PSR2 not enabled, pipe bpp %d > max supported %d\n",
0896 crtc_state->pipe_bpp, max_bpp);
0897 return false;
0898 }
0899
0900
0901 if (crtc_state->vrr.enable &&
0902 IS_ADLP_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0)) {
0903 drm_dbg_kms(&dev_priv->drm,
0904 "PSR2 not enabled, not compatible with HW stepping + VRR\n");
0905 return false;
0906 }
0907
0908 if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
0909 drm_dbg_kms(&dev_priv->drm,
0910 "PSR2 not enabled, PSR2 SDP indication do not fit in hblank\n");
0911 return false;
0912 }
0913
0914 if (HAS_PSR2_SEL_FETCH(dev_priv)) {
0915 if (!intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
0916 !HAS_PSR_HW_TRACKING(dev_priv)) {
0917 drm_dbg_kms(&dev_priv->drm,
0918 "PSR2 not enabled, selective fetch not valid and no HW tracking available\n");
0919 return false;
0920 }
0921 }
0922
0923
0924 if (!crtc_state->enable_psr2_sel_fetch &&
0925 IS_TGL_DISPLAY_STEP(dev_priv, STEP_A0, STEP_C0)) {
0926 drm_dbg_kms(&dev_priv->drm, "PSR2 HW tracking is not supported this Display stepping\n");
0927 goto unsupported;
0928 }
0929
0930 if (!psr2_granularity_check(intel_dp, crtc_state)) {
0931 drm_dbg_kms(&dev_priv->drm, "PSR2 not enabled, SU granularity not compatible\n");
0932 goto unsupported;
0933 }
0934
0935 if (!crtc_state->enable_psr2_sel_fetch &&
0936 (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
0937 drm_dbg_kms(&dev_priv->drm,
0938 "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
0939 crtc_hdisplay, crtc_vdisplay,
0940 psr_max_h, psr_max_v);
0941 goto unsupported;
0942 }
0943
0944 tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
0945 return true;
0946
0947 unsupported:
0948 crtc_state->enable_psr2_sel_fetch = false;
0949 return false;
0950 }
0951
0952 void intel_psr_compute_config(struct intel_dp *intel_dp,
0953 struct intel_crtc_state *crtc_state,
0954 struct drm_connector_state *conn_state)
0955 {
0956 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
0957 const struct drm_display_mode *adjusted_mode =
0958 &crtc_state->hw.adjusted_mode;
0959 int psr_setup_time;
0960
0961
0962
0963
0964
0965 if (crtc_state->vrr.enable)
0966 return;
0967
0968 if (!CAN_PSR(intel_dp))
0969 return;
0970
0971 if (!psr_global_enabled(intel_dp)) {
0972 drm_dbg_kms(&dev_priv->drm, "PSR disabled by flag\n");
0973 return;
0974 }
0975
0976 if (intel_dp->psr.sink_not_reliable) {
0977 drm_dbg_kms(&dev_priv->drm,
0978 "PSR sink implementation is not reliable\n");
0979 return;
0980 }
0981
0982 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
0983 drm_dbg_kms(&dev_priv->drm,
0984 "PSR condition failed: Interlaced mode enabled\n");
0985 return;
0986 }
0987
0988 psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
0989 if (psr_setup_time < 0) {
0990 drm_dbg_kms(&dev_priv->drm,
0991 "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
0992 intel_dp->psr_dpcd[1]);
0993 return;
0994 }
0995
0996 if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
0997 adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
0998 drm_dbg_kms(&dev_priv->drm,
0999 "PSR condition failed: PSR setup time (%d us) too long\n",
1000 psr_setup_time);
1001 return;
1002 }
1003
1004 crtc_state->has_psr = true;
1005 crtc_state->has_psr2 = intel_psr2_config_valid(intel_dp, crtc_state);
1006
1007 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1008 intel_dp_compute_psr_vsc_sdp(intel_dp, crtc_state, conn_state,
1009 &crtc_state->psr_vsc);
1010 }
1011
1012 void intel_psr_get_config(struct intel_encoder *encoder,
1013 struct intel_crtc_state *pipe_config)
1014 {
1015 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1016 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1017 struct intel_dp *intel_dp;
1018 u32 val;
1019
1020 if (!dig_port)
1021 return;
1022
1023 intel_dp = &dig_port->dp;
1024 if (!CAN_PSR(intel_dp))
1025 return;
1026
1027 mutex_lock(&intel_dp->psr.lock);
1028 if (!intel_dp->psr.enabled)
1029 goto unlock;
1030
1031
1032
1033
1034
1035 pipe_config->has_psr = true;
1036 pipe_config->has_psr2 = intel_dp->psr.psr2_enabled;
1037 pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1038
1039 if (!intel_dp->psr.psr2_enabled)
1040 goto unlock;
1041
1042 if (HAS_PSR2_SEL_FETCH(dev_priv)) {
1043 val = intel_de_read(dev_priv, PSR2_MAN_TRK_CTL(intel_dp->psr.transcoder));
1044 if (val & PSR2_MAN_TRK_CTL_ENABLE)
1045 pipe_config->enable_psr2_sel_fetch = true;
1046 }
1047
1048 if (DISPLAY_VER(dev_priv) >= 12) {
1049 val = intel_de_read(dev_priv, EXITLINE(intel_dp->psr.transcoder));
1050 val &= EXITLINE_MASK;
1051 pipe_config->dc3co_exitline = val;
1052 }
1053 unlock:
1054 mutex_unlock(&intel_dp->psr.lock);
1055 }
1056
1057 static void intel_psr_activate(struct intel_dp *intel_dp)
1058 {
1059 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1060 enum transcoder transcoder = intel_dp->psr.transcoder;
1061
1062 if (transcoder_has_psr2(dev_priv, transcoder))
1063 drm_WARN_ON(&dev_priv->drm,
1064 intel_de_read(dev_priv, EDP_PSR2_CTL(transcoder)) & EDP_PSR2_ENABLE);
1065
1066 drm_WARN_ON(&dev_priv->drm,
1067 intel_de_read(dev_priv, EDP_PSR_CTL(transcoder)) & EDP_PSR_ENABLE);
1068 drm_WARN_ON(&dev_priv->drm, intel_dp->psr.active);
1069 lockdep_assert_held(&intel_dp->psr.lock);
1070
1071
1072 if (intel_dp->psr.psr2_enabled)
1073 hsw_activate_psr2(intel_dp);
1074 else
1075 hsw_activate_psr1(intel_dp);
1076
1077 intel_dp->psr.active = true;
1078 }
1079
1080 static u32 wa_16013835468_bit_get(struct intel_dp *intel_dp)
1081 {
1082 switch (intel_dp->psr.pipe) {
1083 case PIPE_A:
1084 return LATENCY_REPORTING_REMOVED_PIPE_A;
1085 case PIPE_B:
1086 return LATENCY_REPORTING_REMOVED_PIPE_B;
1087 case PIPE_C:
1088 return LATENCY_REPORTING_REMOVED_PIPE_C;
1089 default:
1090 MISSING_CASE(intel_dp->psr.pipe);
1091 return 0;
1092 }
1093 }
1094
1095 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1096 const struct intel_crtc_state *crtc_state)
1097 {
1098 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1099 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1100 u32 mask;
1101
1102
1103
1104
1105
1106
1107
1108 mask = EDP_PSR_DEBUG_MASK_MEMUP |
1109 EDP_PSR_DEBUG_MASK_HPD |
1110 EDP_PSR_DEBUG_MASK_LPSP |
1111 EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1112
1113 if (DISPLAY_VER(dev_priv) < 11)
1114 mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1115
1116 intel_de_write(dev_priv, EDP_PSR_DEBUG(intel_dp->psr.transcoder),
1117 mask);
1118
1119 psr_irq_control(intel_dp);
1120
1121 if (intel_dp->psr.dc3co_exitline) {
1122 u32 val;
1123
1124
1125
1126
1127
1128 val = intel_de_read(dev_priv, EXITLINE(cpu_transcoder));
1129 val &= ~EXITLINE_MASK;
1130 val |= intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT;
1131 val |= EXITLINE_ENABLE;
1132 intel_de_write(dev_priv, EXITLINE(cpu_transcoder), val);
1133 }
1134
1135 if (HAS_PSR_HW_TRACKING(dev_priv) && HAS_PSR2_SEL_FETCH(dev_priv))
1136 intel_de_rmw(dev_priv, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1137 intel_dp->psr.psr2_sel_fetch_enabled ?
1138 IGNORE_PSR2_HW_TRACKING : 0);
1139
1140 if (intel_dp->psr.psr2_enabled) {
1141 if (DISPLAY_VER(dev_priv) == 9)
1142 intel_de_rmw(dev_priv, CHICKEN_TRANS(cpu_transcoder), 0,
1143 PSR2_VSC_ENABLE_PROG_HEADER |
1144 PSR2_ADD_VERTICAL_LINE_COUNT);
1145
1146
1147
1148
1149
1150
1151 if (IS_ALDERLAKE_P(dev_priv))
1152 intel_de_rmw(dev_priv, CHICKEN_TRANS(cpu_transcoder), 0,
1153 ADLP_1_BASED_X_GRANULARITY);
1154
1155
1156 if (IS_ADLP_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0))
1157 intel_de_rmw(dev_priv,
1158 TRANS_SET_CONTEXT_LATENCY(intel_dp->psr.transcoder),
1159 TRANS_SET_CONTEXT_LATENCY_MASK,
1160 TRANS_SET_CONTEXT_LATENCY_VALUE(1));
1161
1162
1163 if (IS_ALDERLAKE_P(dev_priv))
1164 intel_de_rmw(dev_priv, CLKGATE_DIS_MISC, 0,
1165 CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1166
1167
1168 if (IS_TGL_DISPLAY_STEP(dev_priv, STEP_B0, STEP_FOREVER) ||
1169 IS_DG1(dev_priv)) {
1170 u16 vtotal, vblank;
1171
1172 vtotal = crtc_state->uapi.adjusted_mode.crtc_vtotal -
1173 crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1174 vblank = crtc_state->uapi.adjusted_mode.crtc_vblank_end -
1175 crtc_state->uapi.adjusted_mode.crtc_vblank_start;
1176 if (vblank > vtotal)
1177 intel_de_rmw(dev_priv, GEN8_CHICKEN_DCPR_1, 0,
1178 wa_16013835468_bit_get(intel_dp));
1179 }
1180 }
1181 }
1182
1183 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1184 {
1185 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1186 u32 val;
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196 if (DISPLAY_VER(dev_priv) >= 12) {
1197 val = intel_de_read(dev_priv,
1198 TRANS_PSR_IIR(intel_dp->psr.transcoder));
1199 val &= EDP_PSR_ERROR(0);
1200 } else {
1201 val = intel_de_read(dev_priv, EDP_PSR_IIR);
1202 val &= EDP_PSR_ERROR(intel_dp->psr.transcoder);
1203 }
1204 if (val) {
1205 intel_dp->psr.sink_not_reliable = true;
1206 drm_dbg_kms(&dev_priv->drm,
1207 "PSR interruption error set, not enabling PSR\n");
1208 return false;
1209 }
1210
1211 return true;
1212 }
1213
1214 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
1215 const struct intel_crtc_state *crtc_state)
1216 {
1217 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1218 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1219 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port);
1220 struct intel_encoder *encoder = &dig_port->base;
1221 u32 val;
1222
1223 drm_WARN_ON(&dev_priv->drm, intel_dp->psr.enabled);
1224
1225 intel_dp->psr.psr2_enabled = crtc_state->has_psr2;
1226 intel_dp->psr.busy_frontbuffer_bits = 0;
1227 intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1228 intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
1229
1230 val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
1231 intel_dp->psr.dc3co_exit_delay = val;
1232 intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
1233 intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
1234 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1235 intel_dp->psr.req_psr2_sdp_prior_scanline =
1236 crtc_state->req_psr2_sdp_prior_scanline;
1237
1238 if (!psr_interrupt_error_check(intel_dp))
1239 return;
1240
1241 drm_dbg_kms(&dev_priv->drm, "Enabling PSR%s\n",
1242 intel_dp->psr.psr2_enabled ? "2" : "1");
1243 intel_write_dp_vsc_sdp(encoder, crtc_state, &crtc_state->psr_vsc);
1244 intel_snps_phy_update_psr_power_state(dev_priv, phy, true);
1245 intel_psr_enable_sink(intel_dp);
1246 intel_psr_enable_source(intel_dp, crtc_state);
1247 intel_dp->psr.enabled = true;
1248 intel_dp->psr.paused = false;
1249
1250 intel_psr_activate(intel_dp);
1251 }
1252
1253 static void intel_psr_exit(struct intel_dp *intel_dp)
1254 {
1255 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1256 u32 val;
1257
1258 if (!intel_dp->psr.active) {
1259 if (transcoder_has_psr2(dev_priv, intel_dp->psr.transcoder)) {
1260 val = intel_de_read(dev_priv,
1261 EDP_PSR2_CTL(intel_dp->psr.transcoder));
1262 drm_WARN_ON(&dev_priv->drm, val & EDP_PSR2_ENABLE);
1263 }
1264
1265 val = intel_de_read(dev_priv,
1266 EDP_PSR_CTL(intel_dp->psr.transcoder));
1267 drm_WARN_ON(&dev_priv->drm, val & EDP_PSR_ENABLE);
1268
1269 return;
1270 }
1271
1272 if (intel_dp->psr.psr2_enabled) {
1273 tgl_disallow_dc3co_on_psr2_exit(intel_dp);
1274 val = intel_de_read(dev_priv,
1275 EDP_PSR2_CTL(intel_dp->psr.transcoder));
1276 drm_WARN_ON(&dev_priv->drm, !(val & EDP_PSR2_ENABLE));
1277 val &= ~EDP_PSR2_ENABLE;
1278 intel_de_write(dev_priv,
1279 EDP_PSR2_CTL(intel_dp->psr.transcoder), val);
1280 } else {
1281 val = intel_de_read(dev_priv,
1282 EDP_PSR_CTL(intel_dp->psr.transcoder));
1283 drm_WARN_ON(&dev_priv->drm, !(val & EDP_PSR_ENABLE));
1284 val &= ~EDP_PSR_ENABLE;
1285 intel_de_write(dev_priv,
1286 EDP_PSR_CTL(intel_dp->psr.transcoder), val);
1287 }
1288 intel_dp->psr.active = false;
1289 }
1290
1291 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
1292 {
1293 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1294 i915_reg_t psr_status;
1295 u32 psr_status_mask;
1296
1297 if (intel_dp->psr.psr2_enabled) {
1298 psr_status = EDP_PSR2_STATUS(intel_dp->psr.transcoder);
1299 psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
1300 } else {
1301 psr_status = EDP_PSR_STATUS(intel_dp->psr.transcoder);
1302 psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
1303 }
1304
1305
1306 if (intel_de_wait_for_clear(dev_priv, psr_status,
1307 psr_status_mask, 2000))
1308 drm_err(&dev_priv->drm, "Timed out waiting PSR idle state\n");
1309 }
1310
1311 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
1312 {
1313 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1314 enum phy phy = intel_port_to_phy(dev_priv,
1315 dp_to_dig_port(intel_dp)->base.port);
1316
1317 lockdep_assert_held(&intel_dp->psr.lock);
1318
1319 if (!intel_dp->psr.enabled)
1320 return;
1321
1322 drm_dbg_kms(&dev_priv->drm, "Disabling PSR%s\n",
1323 intel_dp->psr.psr2_enabled ? "2" : "1");
1324
1325 intel_psr_exit(intel_dp);
1326 intel_psr_wait_exit_locked(intel_dp);
1327
1328
1329 if (intel_dp->psr.psr2_sel_fetch_enabled &&
1330 IS_TGL_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0))
1331 intel_de_rmw(dev_priv, CHICKEN_PAR1_1,
1332 DIS_RAM_BYPASS_PSR2_MAN_TRACK, 0);
1333
1334 if (intel_dp->psr.psr2_enabled) {
1335
1336 if (IS_ADLP_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0))
1337 intel_de_rmw(dev_priv,
1338 TRANS_SET_CONTEXT_LATENCY(intel_dp->psr.transcoder),
1339 TRANS_SET_CONTEXT_LATENCY_MASK, 0);
1340
1341
1342 if (IS_ALDERLAKE_P(dev_priv))
1343 intel_de_rmw(dev_priv, CLKGATE_DIS_MISC,
1344 CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
1345
1346
1347 if (IS_TGL_DISPLAY_STEP(dev_priv, STEP_B0, STEP_FOREVER) ||
1348 IS_DG1(dev_priv))
1349 intel_de_rmw(dev_priv, GEN8_CHICKEN_DCPR_1,
1350 wa_16013835468_bit_get(intel_dp), 0);
1351 }
1352
1353 intel_snps_phy_update_psr_power_state(dev_priv, phy, false);
1354
1355
1356 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
1357
1358 if (intel_dp->psr.psr2_enabled)
1359 drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, 0);
1360
1361 intel_dp->psr.enabled = false;
1362 intel_dp->psr.psr2_enabled = false;
1363 intel_dp->psr.psr2_sel_fetch_enabled = false;
1364 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1365 }
1366
1367
1368
1369
1370
1371
1372
1373
1374 void intel_psr_disable(struct intel_dp *intel_dp,
1375 const struct intel_crtc_state *old_crtc_state)
1376 {
1377 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1378
1379 if (!old_crtc_state->has_psr)
1380 return;
1381
1382 if (drm_WARN_ON(&dev_priv->drm, !CAN_PSR(intel_dp)))
1383 return;
1384
1385 mutex_lock(&intel_dp->psr.lock);
1386
1387 intel_psr_disable_locked(intel_dp);
1388
1389 mutex_unlock(&intel_dp->psr.lock);
1390 cancel_work_sync(&intel_dp->psr.work);
1391 cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
1392 }
1393
1394
1395
1396
1397
1398
1399
1400 void intel_psr_pause(struct intel_dp *intel_dp)
1401 {
1402 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1403 struct intel_psr *psr = &intel_dp->psr;
1404
1405 if (!CAN_PSR(intel_dp))
1406 return;
1407
1408 mutex_lock(&psr->lock);
1409
1410 if (!psr->enabled) {
1411 mutex_unlock(&psr->lock);
1412 return;
1413 }
1414
1415
1416 drm_WARN_ON(&dev_priv->drm, psr->paused);
1417
1418 intel_psr_exit(intel_dp);
1419 intel_psr_wait_exit_locked(intel_dp);
1420 psr->paused = true;
1421
1422 mutex_unlock(&psr->lock);
1423
1424 cancel_work_sync(&psr->work);
1425 cancel_delayed_work_sync(&psr->dc3co_work);
1426 }
1427
1428
1429
1430
1431
1432
1433
1434 void intel_psr_resume(struct intel_dp *intel_dp)
1435 {
1436 struct intel_psr *psr = &intel_dp->psr;
1437
1438 if (!CAN_PSR(intel_dp))
1439 return;
1440
1441 mutex_lock(&psr->lock);
1442
1443 if (!psr->paused)
1444 goto unlock;
1445
1446 psr->paused = false;
1447 intel_psr_activate(intel_dp);
1448
1449 unlock:
1450 mutex_unlock(&psr->lock);
1451 }
1452
1453 static u32 man_trk_ctl_enable_bit_get(struct drm_i915_private *dev_priv)
1454 {
1455 return IS_ALDERLAKE_P(dev_priv) ? 0 : PSR2_MAN_TRK_CTL_ENABLE;
1456 }
1457
1458 static u32 man_trk_ctl_single_full_frame_bit_get(struct drm_i915_private *dev_priv)
1459 {
1460 return IS_ALDERLAKE_P(dev_priv) ?
1461 ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
1462 PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
1463 }
1464
1465 static u32 man_trk_ctl_partial_frame_bit_get(struct drm_i915_private *dev_priv)
1466 {
1467 return IS_ALDERLAKE_P(dev_priv) ?
1468 ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
1469 PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
1470 }
1471
1472 static u32 man_trk_ctl_continuos_full_frame(struct drm_i915_private *dev_priv)
1473 {
1474 return IS_ALDERLAKE_P(dev_priv) ?
1475 ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
1476 PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
1477 }
1478
1479 static void psr_force_hw_tracking_exit(struct intel_dp *intel_dp)
1480 {
1481 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1482
1483 if (intel_dp->psr.psr2_sel_fetch_enabled)
1484 intel_de_write(dev_priv,
1485 PSR2_MAN_TRK_CTL(intel_dp->psr.transcoder),
1486 man_trk_ctl_enable_bit_get(dev_priv) |
1487 man_trk_ctl_partial_frame_bit_get(dev_priv) |
1488 man_trk_ctl_single_full_frame_bit_get(dev_priv));
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503 intel_de_write(dev_priv, CURSURFLIVE(intel_dp->psr.pipe), 0);
1504 }
1505
1506 void intel_psr2_disable_plane_sel_fetch(struct intel_plane *plane,
1507 const struct intel_crtc_state *crtc_state)
1508 {
1509 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
1510 enum pipe pipe = plane->pipe;
1511
1512 if (!crtc_state->enable_psr2_sel_fetch)
1513 return;
1514
1515 intel_de_write_fw(dev_priv, PLANE_SEL_FETCH_CTL(pipe, plane->id), 0);
1516 }
1517
1518 void intel_psr2_program_plane_sel_fetch(struct intel_plane *plane,
1519 const struct intel_crtc_state *crtc_state,
1520 const struct intel_plane_state *plane_state,
1521 int color_plane)
1522 {
1523 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
1524 enum pipe pipe = plane->pipe;
1525 const struct drm_rect *clip;
1526 u32 val;
1527 int x, y;
1528
1529 if (!crtc_state->enable_psr2_sel_fetch)
1530 return;
1531
1532 if (plane->id == PLANE_CURSOR) {
1533 intel_de_write_fw(dev_priv, PLANE_SEL_FETCH_CTL(pipe, plane->id),
1534 plane_state->ctl);
1535 return;
1536 }
1537
1538 clip = &plane_state->psr2_sel_fetch_area;
1539
1540 val = (clip->y1 + plane_state->uapi.dst.y1) << 16;
1541 val |= plane_state->uapi.dst.x1;
1542 intel_de_write_fw(dev_priv, PLANE_SEL_FETCH_POS(pipe, plane->id), val);
1543
1544 x = plane_state->view.color_plane[color_plane].x;
1545
1546
1547
1548
1549
1550 if (!color_plane)
1551 y = plane_state->view.color_plane[color_plane].y + clip->y1;
1552 else
1553 y = plane_state->view.color_plane[color_plane].y + clip->y1 / 2;
1554
1555 val = y << 16 | x;
1556
1557 intel_de_write_fw(dev_priv, PLANE_SEL_FETCH_OFFSET(pipe, plane->id),
1558 val);
1559
1560
1561 val = (drm_rect_height(clip) - 1) << 16;
1562 val |= (drm_rect_width(&plane_state->uapi.src) >> 16) - 1;
1563 intel_de_write_fw(dev_priv, PLANE_SEL_FETCH_SIZE(pipe, plane->id), val);
1564
1565 intel_de_write_fw(dev_priv, PLANE_SEL_FETCH_CTL(pipe, plane->id),
1566 PLANE_SEL_FETCH_CTL_ENABLE);
1567 }
1568
1569 void intel_psr2_program_trans_man_trk_ctl(const struct intel_crtc_state *crtc_state)
1570 {
1571 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1572 struct intel_encoder *encoder;
1573
1574 if (!crtc_state->enable_psr2_sel_fetch)
1575 return;
1576
1577 for_each_intel_encoder_mask_with_psr(&dev_priv->drm, encoder,
1578 crtc_state->uapi.encoder_mask) {
1579 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1580
1581 lockdep_assert_held(&intel_dp->psr.lock);
1582 if (intel_dp->psr.psr2_sel_fetch_cff_enabled)
1583 return;
1584 break;
1585 }
1586
1587 intel_de_write(dev_priv, PSR2_MAN_TRK_CTL(crtc_state->cpu_transcoder),
1588 crtc_state->psr2_man_track_ctl);
1589 }
1590
1591 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
1592 struct drm_rect *clip, bool full_update)
1593 {
1594 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1595 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1596 u32 val = man_trk_ctl_enable_bit_get(dev_priv);
1597
1598
1599 val |= man_trk_ctl_partial_frame_bit_get(dev_priv);
1600
1601 if (full_update) {
1602
1603
1604
1605
1606 val |= man_trk_ctl_single_full_frame_bit_get(dev_priv);
1607 goto exit;
1608 }
1609
1610 if (clip->y1 == -1)
1611 goto exit;
1612
1613 if (IS_ALDERLAKE_P(dev_priv)) {
1614 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(clip->y1);
1615 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(clip->y2 - 1);
1616 } else {
1617 drm_WARN_ON(crtc_state->uapi.crtc->dev, clip->y1 % 4 || clip->y2 % 4);
1618
1619 val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(clip->y1 / 4 + 1);
1620 val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(clip->y2 / 4 + 1);
1621 }
1622 exit:
1623 crtc_state->psr2_man_track_ctl = val;
1624 }
1625
1626 static void clip_area_update(struct drm_rect *overlap_damage_area,
1627 struct drm_rect *damage_area,
1628 struct drm_rect *pipe_src)
1629 {
1630 if (!drm_rect_intersect(damage_area, pipe_src))
1631 return;
1632
1633 if (overlap_damage_area->y1 == -1) {
1634 overlap_damage_area->y1 = damage_area->y1;
1635 overlap_damage_area->y2 = damage_area->y2;
1636 return;
1637 }
1638
1639 if (damage_area->y1 < overlap_damage_area->y1)
1640 overlap_damage_area->y1 = damage_area->y1;
1641
1642 if (damage_area->y2 > overlap_damage_area->y2)
1643 overlap_damage_area->y2 = damage_area->y2;
1644 }
1645
1646 static void intel_psr2_sel_fetch_pipe_alignment(const struct intel_crtc_state *crtc_state,
1647 struct drm_rect *pipe_clip)
1648 {
1649 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1650 const u16 y_alignment = crtc_state->su_y_granularity;
1651
1652 pipe_clip->y1 -= pipe_clip->y1 % y_alignment;
1653 if (pipe_clip->y2 % y_alignment)
1654 pipe_clip->y2 = ((pipe_clip->y2 / y_alignment) + 1) * y_alignment;
1655
1656 if (IS_ALDERLAKE_P(dev_priv) && crtc_state->dsc.compression_enable)
1657 drm_warn(&dev_priv->drm, "Missing PSR2 sel fetch alignment with DSC\n");
1658 }
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
1670 {
1671 if (plane_state->uapi.dst.y1 < 0 ||
1672 plane_state->uapi.dst.x1 < 0 ||
1673 plane_state->scaler_id >= 0 ||
1674 plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
1675 return false;
1676
1677 return true;
1678 }
1679
1680
1681
1682
1683
1684
1685
1686
1687 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
1688 {
1689 if (crtc_state->scaler_state.scaler_id >= 0)
1690 return false;
1691
1692 return true;
1693 }
1694
1695 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
1696 struct intel_crtc *crtc)
1697 {
1698 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
1699 struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
1700 struct drm_rect pipe_clip = { .x1 = 0, .y1 = -1, .x2 = INT_MAX, .y2 = -1 };
1701 struct intel_plane_state *new_plane_state, *old_plane_state;
1702 struct intel_plane *plane;
1703 bool full_update = false;
1704 int i, ret;
1705
1706 if (!crtc_state->enable_psr2_sel_fetch)
1707 return 0;
1708
1709 if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
1710 full_update = true;
1711 goto skip_sel_fetch_set_loop;
1712 }
1713
1714
1715
1716
1717
1718
1719
1720 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
1721 new_plane_state, i) {
1722 struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
1723 .x2 = INT_MAX };
1724 struct drm_atomic_helper_damage_iter iter;
1725 struct drm_rect clip;
1726
1727 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
1728 continue;
1729
1730 if (!new_plane_state->uapi.visible &&
1731 !old_plane_state->uapi.visible)
1732 continue;
1733
1734 if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
1735 full_update = true;
1736 break;
1737 }
1738
1739
1740
1741
1742
1743
1744 if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
1745 !drm_rect_equals(&new_plane_state->uapi.dst,
1746 &old_plane_state->uapi.dst)) {
1747 if (old_plane_state->uapi.visible) {
1748 damaged_area.y1 = old_plane_state->uapi.dst.y1;
1749 damaged_area.y2 = old_plane_state->uapi.dst.y2;
1750 clip_area_update(&pipe_clip, &damaged_area,
1751 &crtc_state->pipe_src);
1752 }
1753
1754 if (new_plane_state->uapi.visible) {
1755 damaged_area.y1 = new_plane_state->uapi.dst.y1;
1756 damaged_area.y2 = new_plane_state->uapi.dst.y2;
1757 clip_area_update(&pipe_clip, &damaged_area,
1758 &crtc_state->pipe_src);
1759 }
1760 continue;
1761 } else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
1762
1763 damaged_area.y1 = new_plane_state->uapi.dst.y1;
1764 damaged_area.y2 = new_plane_state->uapi.dst.y2;
1765 clip_area_update(&pipe_clip, &damaged_area,
1766 &crtc_state->pipe_src);
1767 continue;
1768 }
1769
1770 drm_rect_fp_to_int(&src, &new_plane_state->uapi.src);
1771
1772 drm_atomic_helper_damage_iter_init(&iter,
1773 &old_plane_state->uapi,
1774 &new_plane_state->uapi);
1775 drm_atomic_for_each_plane_damage(&iter, &clip) {
1776 if (drm_rect_intersect(&clip, &src))
1777 clip_area_update(&damaged_area, &clip,
1778 &crtc_state->pipe_src);
1779 }
1780
1781 if (damaged_area.y1 == -1)
1782 continue;
1783
1784 damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
1785 damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
1786 clip_area_update(&pipe_clip, &damaged_area, &crtc_state->pipe_src);
1787 }
1788
1789
1790
1791
1792
1793
1794
1795 if (pipe_clip.y1 == -1) {
1796 drm_info_once(&dev_priv->drm,
1797 "Selective fetch area calculation failed in pipe %c\n",
1798 pipe_name(crtc->pipe));
1799 full_update = true;
1800 }
1801
1802 if (full_update)
1803 goto skip_sel_fetch_set_loop;
1804
1805 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
1806 if (ret)
1807 return ret;
1808
1809 intel_psr2_sel_fetch_pipe_alignment(crtc_state, &pipe_clip);
1810
1811
1812
1813
1814
1815 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
1816 new_plane_state, i) {
1817 struct drm_rect *sel_fetch_area, inter;
1818 struct intel_plane *linked = new_plane_state->planar_linked_plane;
1819
1820 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
1821 !new_plane_state->uapi.visible)
1822 continue;
1823
1824 inter = pipe_clip;
1825 if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
1826 continue;
1827
1828 if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
1829 full_update = true;
1830 break;
1831 }
1832
1833 sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
1834 sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
1835 sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
1836 crtc_state->update_planes |= BIT(plane->id);
1837
1838
1839
1840
1841
1842 if (linked) {
1843 struct intel_plane_state *linked_new_plane_state;
1844 struct drm_rect *linked_sel_fetch_area;
1845
1846 linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
1847 if (IS_ERR(linked_new_plane_state))
1848 return PTR_ERR(linked_new_plane_state);
1849
1850 linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
1851 linked_sel_fetch_area->y1 = sel_fetch_area->y1;
1852 linked_sel_fetch_area->y2 = sel_fetch_area->y2;
1853 crtc_state->update_planes |= BIT(linked->id);
1854 }
1855 }
1856
1857 skip_sel_fetch_set_loop:
1858 psr2_man_trk_ctl_calc(crtc_state, &pipe_clip, full_update);
1859 return 0;
1860 }
1861
1862 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
1863 struct intel_crtc *crtc)
1864 {
1865 struct drm_i915_private *i915 = to_i915(state->base.dev);
1866 const struct intel_crtc_state *crtc_state =
1867 intel_atomic_get_new_crtc_state(state, crtc);
1868 struct intel_encoder *encoder;
1869
1870 if (!HAS_PSR(i915))
1871 return;
1872
1873 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
1874 crtc_state->uapi.encoder_mask) {
1875 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1876 struct intel_psr *psr = &intel_dp->psr;
1877 bool needs_to_disable = false;
1878
1879 mutex_lock(&psr->lock);
1880
1881
1882
1883
1884
1885
1886
1887 needs_to_disable |= intel_crtc_needs_modeset(crtc_state);
1888 needs_to_disable |= !crtc_state->has_psr;
1889 needs_to_disable |= !crtc_state->active_planes;
1890 needs_to_disable |= crtc_state->has_psr2 != psr->psr2_enabled;
1891
1892 if (psr->enabled && needs_to_disable)
1893 intel_psr_disable_locked(intel_dp);
1894
1895 mutex_unlock(&psr->lock);
1896 }
1897 }
1898
1899 static void _intel_psr_post_plane_update(const struct intel_atomic_state *state,
1900 const struct intel_crtc_state *crtc_state)
1901 {
1902 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
1903 struct intel_encoder *encoder;
1904
1905 if (!crtc_state->has_psr)
1906 return;
1907
1908 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
1909 crtc_state->uapi.encoder_mask) {
1910 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1911 struct intel_psr *psr = &intel_dp->psr;
1912
1913 mutex_lock(&psr->lock);
1914
1915 if (psr->sink_not_reliable)
1916 goto exit;
1917
1918 drm_WARN_ON(&dev_priv->drm, psr->enabled && !crtc_state->active_planes);
1919
1920
1921 if (!psr->enabled && crtc_state->active_planes)
1922 intel_psr_enable_locked(intel_dp, crtc_state);
1923
1924
1925 if (crtc_state->crc_enabled && psr->enabled)
1926 psr_force_hw_tracking_exit(intel_dp);
1927
1928 exit:
1929 mutex_unlock(&psr->lock);
1930 }
1931 }
1932
1933 void intel_psr_post_plane_update(const struct intel_atomic_state *state)
1934 {
1935 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
1936 struct intel_crtc_state *crtc_state;
1937 struct intel_crtc *crtc;
1938 int i;
1939
1940 if (!HAS_PSR(dev_priv))
1941 return;
1942
1943 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i)
1944 _intel_psr_post_plane_update(state, crtc_state);
1945 }
1946
1947 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
1948 {
1949 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1950
1951
1952
1953
1954
1955
1956 return intel_de_wait_for_clear(dev_priv,
1957 EDP_PSR2_STATUS(intel_dp->psr.transcoder),
1958 EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
1959 }
1960
1961 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
1962 {
1963 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1964
1965
1966
1967
1968
1969
1970
1971 return intel_de_wait_for_clear(dev_priv,
1972 EDP_PSR_STATUS(intel_dp->psr.transcoder),
1973 EDP_PSR_STATUS_STATE_MASK, 50);
1974 }
1975
1976
1977
1978
1979
1980
1981
1982
1983 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
1984 {
1985 struct drm_i915_private *dev_priv = to_i915(new_crtc_state->uapi.crtc->dev);
1986 struct intel_encoder *encoder;
1987
1988 if (!new_crtc_state->has_psr)
1989 return;
1990
1991 for_each_intel_encoder_mask_with_psr(&dev_priv->drm, encoder,
1992 new_crtc_state->uapi.encoder_mask) {
1993 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1994 int ret;
1995
1996 lockdep_assert_held(&intel_dp->psr.lock);
1997
1998 if (!intel_dp->psr.enabled)
1999 continue;
2000
2001 if (intel_dp->psr.psr2_enabled)
2002 ret = _psr2_ready_for_pipe_update_locked(intel_dp);
2003 else
2004 ret = _psr1_ready_for_pipe_update_locked(intel_dp);
2005
2006 if (ret)
2007 drm_err(&dev_priv->drm, "PSR wait timed out, atomic update may fail\n");
2008 }
2009 }
2010
2011 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
2012 {
2013 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2014 i915_reg_t reg;
2015 u32 mask;
2016 int err;
2017
2018 if (!intel_dp->psr.enabled)
2019 return false;
2020
2021 if (intel_dp->psr.psr2_enabled) {
2022 reg = EDP_PSR2_STATUS(intel_dp->psr.transcoder);
2023 mask = EDP_PSR2_STATUS_STATE_MASK;
2024 } else {
2025 reg = EDP_PSR_STATUS(intel_dp->psr.transcoder);
2026 mask = EDP_PSR_STATUS_STATE_MASK;
2027 }
2028
2029 mutex_unlock(&intel_dp->psr.lock);
2030
2031 err = intel_de_wait_for_clear(dev_priv, reg, mask, 50);
2032 if (err)
2033 drm_err(&dev_priv->drm,
2034 "Timed out waiting for PSR Idle for re-enable\n");
2035
2036
2037 mutex_lock(&intel_dp->psr.lock);
2038 return err == 0 && intel_dp->psr.enabled;
2039 }
2040
2041 static int intel_psr_fastset_force(struct drm_i915_private *dev_priv)
2042 {
2043 struct drm_connector_list_iter conn_iter;
2044 struct drm_device *dev = &dev_priv->drm;
2045 struct drm_modeset_acquire_ctx ctx;
2046 struct drm_atomic_state *state;
2047 struct drm_connector *conn;
2048 int err = 0;
2049
2050 state = drm_atomic_state_alloc(dev);
2051 if (!state)
2052 return -ENOMEM;
2053
2054 drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
2055 state->acquire_ctx = &ctx;
2056
2057 retry:
2058
2059 drm_connector_list_iter_begin(dev, &conn_iter);
2060 drm_for_each_connector_iter(conn, &conn_iter) {
2061 struct drm_connector_state *conn_state;
2062 struct drm_crtc_state *crtc_state;
2063
2064 if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
2065 continue;
2066
2067 conn_state = drm_atomic_get_connector_state(state, conn);
2068 if (IS_ERR(conn_state)) {
2069 err = PTR_ERR(conn_state);
2070 break;
2071 }
2072
2073 if (!conn_state->crtc)
2074 continue;
2075
2076 crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
2077 if (IS_ERR(crtc_state)) {
2078 err = PTR_ERR(crtc_state);
2079 break;
2080 }
2081
2082
2083 crtc_state->mode_changed = true;
2084 }
2085 drm_connector_list_iter_end(&conn_iter);
2086
2087 if (err == 0)
2088 err = drm_atomic_commit(state);
2089
2090 if (err == -EDEADLK) {
2091 drm_atomic_state_clear(state);
2092 err = drm_modeset_backoff(&ctx);
2093 if (!err)
2094 goto retry;
2095 }
2096
2097 drm_modeset_drop_locks(&ctx);
2098 drm_modeset_acquire_fini(&ctx);
2099 drm_atomic_state_put(state);
2100
2101 return err;
2102 }
2103
2104 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
2105 {
2106 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2107 const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
2108 u32 old_mode;
2109 int ret;
2110
2111 if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_MODE_MASK) ||
2112 mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
2113 drm_dbg_kms(&dev_priv->drm, "Invalid debug mask %llx\n", val);
2114 return -EINVAL;
2115 }
2116
2117 ret = mutex_lock_interruptible(&intel_dp->psr.lock);
2118 if (ret)
2119 return ret;
2120
2121 old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
2122 intel_dp->psr.debug = val;
2123
2124
2125
2126
2127
2128 if (intel_dp->psr.enabled)
2129 psr_irq_control(intel_dp);
2130
2131 mutex_unlock(&intel_dp->psr.lock);
2132
2133 if (old_mode != mode)
2134 ret = intel_psr_fastset_force(dev_priv);
2135
2136 return ret;
2137 }
2138
2139 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
2140 {
2141 struct intel_psr *psr = &intel_dp->psr;
2142
2143 intel_psr_disable_locked(intel_dp);
2144 psr->sink_not_reliable = true;
2145
2146 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
2147 }
2148
2149 static void intel_psr_work(struct work_struct *work)
2150 {
2151 struct intel_dp *intel_dp =
2152 container_of(work, typeof(*intel_dp), psr.work);
2153
2154 mutex_lock(&intel_dp->psr.lock);
2155
2156 if (!intel_dp->psr.enabled)
2157 goto unlock;
2158
2159 if (READ_ONCE(intel_dp->psr.irq_aux_error))
2160 intel_psr_handle_irq(intel_dp);
2161
2162
2163
2164
2165
2166
2167
2168 if (!__psr_wait_for_idle_locked(intel_dp))
2169 goto unlock;
2170
2171
2172
2173
2174
2175
2176 if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
2177 goto unlock;
2178
2179 intel_psr_activate(intel_dp);
2180 unlock:
2181 mutex_unlock(&intel_dp->psr.lock);
2182 }
2183
2184 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
2185 {
2186 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2187
2188 if (intel_dp->psr.psr2_sel_fetch_enabled) {
2189 u32 val;
2190
2191 if (intel_dp->psr.psr2_sel_fetch_cff_enabled)
2192 return;
2193
2194 val = man_trk_ctl_enable_bit_get(dev_priv) |
2195 man_trk_ctl_partial_frame_bit_get(dev_priv) |
2196 man_trk_ctl_continuos_full_frame(dev_priv);
2197 intel_de_write(dev_priv, PSR2_MAN_TRK_CTL(intel_dp->psr.transcoder), val);
2198 intel_de_write(dev_priv, CURSURFLIVE(intel_dp->psr.pipe), 0);
2199 intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
2200 } else {
2201 intel_psr_exit(intel_dp);
2202 }
2203 }
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218 void intel_psr_invalidate(struct drm_i915_private *dev_priv,
2219 unsigned frontbuffer_bits, enum fb_op_origin origin)
2220 {
2221 struct intel_encoder *encoder;
2222
2223 if (origin == ORIGIN_FLIP)
2224 return;
2225
2226 for_each_intel_encoder_with_psr(&dev_priv->drm, encoder) {
2227 unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
2228 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2229
2230 mutex_lock(&intel_dp->psr.lock);
2231 if (!intel_dp->psr.enabled) {
2232 mutex_unlock(&intel_dp->psr.lock);
2233 continue;
2234 }
2235
2236 pipe_frontbuffer_bits &=
2237 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
2238 intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
2239
2240 if (pipe_frontbuffer_bits)
2241 _psr_invalidate_handle(intel_dp);
2242
2243 mutex_unlock(&intel_dp->psr.lock);
2244 }
2245 }
2246
2247
2248
2249
2250
2251
2252 static void
2253 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
2254 enum fb_op_origin origin)
2255 {
2256 if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.psr2_enabled ||
2257 !intel_dp->psr.active)
2258 return;
2259
2260
2261
2262
2263
2264 if (!(frontbuffer_bits &
2265 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
2266 return;
2267
2268 tgl_psr2_enable_dc3co(intel_dp);
2269 mod_delayed_work(system_wq, &intel_dp->psr.dc3co_work,
2270 intel_dp->psr.dc3co_exit_delay);
2271 }
2272
2273 static void _psr_flush_handle(struct intel_dp *intel_dp)
2274 {
2275 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2276
2277 if (intel_dp->psr.psr2_sel_fetch_enabled) {
2278 if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
2279
2280 if (intel_dp->psr.busy_frontbuffer_bits == 0) {
2281 u32 val = man_trk_ctl_enable_bit_get(dev_priv) |
2282 man_trk_ctl_partial_frame_bit_get(dev_priv) |
2283 man_trk_ctl_single_full_frame_bit_get(dev_priv);
2284
2285
2286
2287
2288
2289 intel_de_write(dev_priv, PSR2_MAN_TRK_CTL(intel_dp->psr.transcoder),
2290 val);
2291 intel_de_write(dev_priv, CURSURFLIVE(intel_dp->psr.pipe), 0);
2292 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2293 }
2294 } else {
2295
2296
2297
2298
2299 psr_force_hw_tracking_exit(intel_dp);
2300 }
2301 } else {
2302 psr_force_hw_tracking_exit(intel_dp);
2303
2304 if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
2305 schedule_work(&intel_dp->psr.work);
2306 }
2307 }
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322 void intel_psr_flush(struct drm_i915_private *dev_priv,
2323 unsigned frontbuffer_bits, enum fb_op_origin origin)
2324 {
2325 struct intel_encoder *encoder;
2326
2327 for_each_intel_encoder_with_psr(&dev_priv->drm, encoder) {
2328 unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
2329 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2330
2331 mutex_lock(&intel_dp->psr.lock);
2332 if (!intel_dp->psr.enabled) {
2333 mutex_unlock(&intel_dp->psr.lock);
2334 continue;
2335 }
2336
2337 pipe_frontbuffer_bits &=
2338 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
2339 intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
2340
2341
2342
2343
2344
2345
2346 if (intel_dp->psr.paused)
2347 goto unlock;
2348
2349 if (origin == ORIGIN_FLIP ||
2350 (origin == ORIGIN_CURSOR_UPDATE &&
2351 !intel_dp->psr.psr2_sel_fetch_enabled)) {
2352 tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
2353 goto unlock;
2354 }
2355
2356 if (pipe_frontbuffer_bits == 0)
2357 goto unlock;
2358
2359
2360 _psr_flush_handle(intel_dp);
2361 unlock:
2362 mutex_unlock(&intel_dp->psr.lock);
2363 }
2364 }
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374 void intel_psr_init(struct intel_dp *intel_dp)
2375 {
2376 struct intel_connector *connector = intel_dp->attached_connector;
2377 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2378 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2379
2380 if (!HAS_PSR(dev_priv))
2381 return;
2382
2383
2384
2385
2386
2387
2388
2389
2390
2391
2392 if (DISPLAY_VER(dev_priv) < 12 && dig_port->base.port != PORT_A) {
2393 drm_dbg_kms(&dev_priv->drm,
2394 "PSR condition failed: Port not supported\n");
2395 return;
2396 }
2397
2398 intel_dp->psr.source_support = true;
2399
2400
2401 if (DISPLAY_VER(dev_priv) < 12)
2402
2403 intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
2404
2405 INIT_WORK(&intel_dp->psr.work, intel_psr_work);
2406 INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
2407 mutex_init(&intel_dp->psr.lock);
2408 }
2409
2410 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
2411 u8 *status, u8 *error_status)
2412 {
2413 struct drm_dp_aux *aux = &intel_dp->aux;
2414 int ret;
2415
2416 ret = drm_dp_dpcd_readb(aux, DP_PSR_STATUS, status);
2417 if (ret != 1)
2418 return ret;
2419
2420 ret = drm_dp_dpcd_readb(aux, DP_PSR_ERROR_STATUS, error_status);
2421 if (ret != 1)
2422 return ret;
2423
2424 *status = *status & DP_PSR_SINK_STATE_MASK;
2425
2426 return 0;
2427 }
2428
2429 static void psr_alpm_check(struct intel_dp *intel_dp)
2430 {
2431 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2432 struct drm_dp_aux *aux = &intel_dp->aux;
2433 struct intel_psr *psr = &intel_dp->psr;
2434 u8 val;
2435 int r;
2436
2437 if (!psr->psr2_enabled)
2438 return;
2439
2440 r = drm_dp_dpcd_readb(aux, DP_RECEIVER_ALPM_STATUS, &val);
2441 if (r != 1) {
2442 drm_err(&dev_priv->drm, "Error reading ALPM status\n");
2443 return;
2444 }
2445
2446 if (val & DP_ALPM_LOCK_TIMEOUT_ERROR) {
2447 intel_psr_disable_locked(intel_dp);
2448 psr->sink_not_reliable = true;
2449 drm_dbg_kms(&dev_priv->drm,
2450 "ALPM lock timeout error, disabling PSR\n");
2451
2452
2453 drm_dp_dpcd_writeb(aux, DP_RECEIVER_ALPM_STATUS, val);
2454 }
2455 }
2456
2457 static void psr_capability_changed_check(struct intel_dp *intel_dp)
2458 {
2459 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2460 struct intel_psr *psr = &intel_dp->psr;
2461 u8 val;
2462 int r;
2463
2464 r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
2465 if (r != 1) {
2466 drm_err(&dev_priv->drm, "Error reading DP_PSR_ESI\n");
2467 return;
2468 }
2469
2470 if (val & DP_PSR_CAPS_CHANGE) {
2471 intel_psr_disable_locked(intel_dp);
2472 psr->sink_not_reliable = true;
2473 drm_dbg_kms(&dev_priv->drm,
2474 "Sink PSR capability changed, disabling PSR\n");
2475
2476
2477 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
2478 }
2479 }
2480
2481 void intel_psr_short_pulse(struct intel_dp *intel_dp)
2482 {
2483 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2484 struct intel_psr *psr = &intel_dp->psr;
2485 u8 status, error_status;
2486 const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
2487 DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
2488 DP_PSR_LINK_CRC_ERROR;
2489
2490 if (!CAN_PSR(intel_dp))
2491 return;
2492
2493 mutex_lock(&psr->lock);
2494
2495 if (!psr->enabled)
2496 goto exit;
2497
2498 if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
2499 drm_err(&dev_priv->drm,
2500 "Error reading PSR status or error status\n");
2501 goto exit;
2502 }
2503
2504 if (status == DP_PSR_SINK_INTERNAL_ERROR || (error_status & errors)) {
2505 intel_psr_disable_locked(intel_dp);
2506 psr->sink_not_reliable = true;
2507 }
2508
2509 if (status == DP_PSR_SINK_INTERNAL_ERROR && !error_status)
2510 drm_dbg_kms(&dev_priv->drm,
2511 "PSR sink internal error, disabling PSR\n");
2512 if (error_status & DP_PSR_RFB_STORAGE_ERROR)
2513 drm_dbg_kms(&dev_priv->drm,
2514 "PSR RFB storage error, disabling PSR\n");
2515 if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
2516 drm_dbg_kms(&dev_priv->drm,
2517 "PSR VSC SDP uncorrectable error, disabling PSR\n");
2518 if (error_status & DP_PSR_LINK_CRC_ERROR)
2519 drm_dbg_kms(&dev_priv->drm,
2520 "PSR Link CRC error, disabling PSR\n");
2521
2522 if (error_status & ~errors)
2523 drm_err(&dev_priv->drm,
2524 "PSR_ERROR_STATUS unhandled errors %x\n",
2525 error_status & ~errors);
2526
2527 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
2528
2529 psr_alpm_check(intel_dp);
2530 psr_capability_changed_check(intel_dp);
2531
2532 exit:
2533 mutex_unlock(&psr->lock);
2534 }
2535
2536 bool intel_psr_enabled(struct intel_dp *intel_dp)
2537 {
2538 bool ret;
2539
2540 if (!CAN_PSR(intel_dp))
2541 return false;
2542
2543 mutex_lock(&intel_dp->psr.lock);
2544 ret = intel_dp->psr.enabled;
2545 mutex_unlock(&intel_dp->psr.lock);
2546
2547 return ret;
2548 }
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
2559 {
2560 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2561 struct intel_encoder *encoder;
2562
2563 if (!crtc_state->has_psr)
2564 return;
2565
2566 for_each_intel_encoder_mask_with_psr(&i915->drm, encoder,
2567 crtc_state->uapi.encoder_mask) {
2568 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2569
2570 mutex_lock(&intel_dp->psr.lock);
2571 break;
2572 }
2573 }
2574
2575
2576
2577
2578
2579
2580
2581 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
2582 {
2583 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2584 struct intel_encoder *encoder;
2585
2586 if (!crtc_state->has_psr)
2587 return;
2588
2589 for_each_intel_encoder_mask_with_psr(&i915->drm, encoder,
2590 crtc_state->uapi.encoder_mask) {
2591 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2592
2593 mutex_unlock(&intel_dp->psr.lock);
2594 break;
2595 }
2596 }