1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24 #include <linux/debugfs.h>
25
26 #include <drm/drm_atomic_helper.h>
27 #include <drm/drm_damage_helper.h>
28 #include <drm/drm_debugfs.h>
29
30 #include "i915_drv.h"
31 #include "i915_reg.h"
32 #include "intel_alpm.h"
33 #include "intel_atomic.h"
34 #include "intel_crtc.h"
35 #include "intel_cursor_regs.h"
36 #include "intel_ddi.h"
37 #include "intel_de.h"
38 #include "intel_display_irq.h"
39 #include "intel_display_types.h"
40 #include "intel_dp.h"
41 #include "intel_dp_aux.h"
42 #include "intel_frontbuffer.h"
43 #include "intel_hdmi.h"
44 #include "intel_psr.h"
45 #include "intel_psr_regs.h"
46 #include "intel_snps_phy.h"
47 #include "intel_vblank.h"
48 #include "skl_universal_plane.h"
49
50 /**
51 * DOC: Panel Self Refresh (PSR/SRD)
52 *
53 * Since Haswell Display controller supports Panel Self-Refresh on display
54 * panels witch have a remote frame buffer (RFB) implemented according to PSR
55 * spec in eDP1.3. PSR feature allows the display to go to lower standby states
56 * when system is idle but display is on as it eliminates display refresh
57 * request to DDR memory completely as long as the frame buffer for that
58 * display is unchanged.
59 *
60 * Panel Self Refresh must be supported by both Hardware (source) and
61 * Panel (sink).
62 *
63 * PSR saves power by caching the framebuffer in the panel RFB, which allows us
64 * to power down the link and memory controller. For DSI panels the same idea
65 * is called "manual mode".
66 *
67 * The implementation uses the hardware-based PSR support which automatically
68 * enters/exits self-refresh mode. The hardware takes care of sending the
69 * required DP aux message and could even retrain the link (that part isn't
70 * enabled yet though). The hardware also keeps track of any frontbuffer
71 * changes to know when to exit self-refresh mode again. Unfortunately that
72 * part doesn't work too well, hence why the i915 PSR support uses the
73 * software frontbuffer tracking to make sure it doesn't miss a screen
74 * update. For this integration intel_psr_invalidate() and intel_psr_flush()
75 * get called by the frontbuffer tracking code. Note that because of locking
76 * issues the self-refresh re-enable code is done from a work queue, which
77 * must be correctly synchronized/cancelled when shutting down the pipe."
78 *
79 * DC3CO (DC3 clock off)
80 *
81 * On top of PSR2, GEN12 adds a intermediate power savings state that turns
82 * clock off automatically during PSR2 idle state.
83 * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
84 * entry/exit allows the HW to enter a low-power state even when page flipping
85 * periodically (for instance a 30fps video playback scenario).
86 *
87 * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
88 * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
89 * frames, if no other flip occurs and the function above is executed, DC3CO is
90 * disabled and PSR2 is configured to enter deep sleep, resetting again in case
91 * of another flip.
92 * Front buffer modifications do not trigger DC3CO activation on purpose as it
93 * would bring a lot of complexity and most of the moderns systems will only
94 * use page flips.
95 */
96
97 /*
98 * Description of PSR mask bits:
99 *
100 * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
101 *
102 * When unmasked (nearly) all display register writes (eg. even
103 * SWF) trigger a PSR exit. Some registers are excluded from this
104 * and they have a more specific mask (described below). On icl+
105 * this bit no longer exists and is effectively always set.
106 *
107 * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
108 *
109 * When unmasked (nearly) all pipe/plane register writes
110 * trigger a PSR exit. Some plane registers are excluded from this
111 * and they have a more specific mask (described below).
112 *
113 * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
114 * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
115 * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
116 *
117 * When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
118 * SPR_SURF/CURBASE are not included in this and instead are
119 * controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
120 * EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
121 *
122 * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
123 * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
124 *
125 * When unmasked PSR is blocked as long as the sprite
126 * plane is enabled. skl+ with their universal planes no
127 * longer have a mask bit like this, and no plane being
128 * enabledb blocks PSR.
129 *
130 * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
131 * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
132 *
133 * When umasked CURPOS writes trigger a PSR exit. On skl+
134 * this doesn't exit but CURPOS is included in the
135 * PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
136 *
137 * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
138 * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
139 *
140 * When unmasked PSR is blocked as long as vblank and/or vsync
141 * interrupt is unmasked in IMR *and* enabled in IER.
142 *
143 * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
144 * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
145 *
146 * Selectcs whether PSR exit generates an extra vblank before
147 * the first frame is transmitted. Also note the opposite polarity
148 * if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
149 * unmasked==do not generate the extra vblank).
150 *
151 * With DC states enabled the extra vblank happens after link training,
152 * with DC states disabled it happens immediately upuon PSR exit trigger.
153 * No idea as of now why there is a difference. HSW/BDW (which don't
154 * even have DMC) always generate it after link training. Go figure.
155 *
156 * Unfortunately CHICKEN_TRANS itself seems to be double buffered
157 * and thus won't latch until the first vblank. So with DC states
158 * enabled the register effectively uses the reset value during DC5
159 * exit+PSR exit sequence, and thus the bit does nothing until
160 * latched by the vblank that it was trying to prevent from being
161 * generated in the first place. So we should probably call this
162 * one a chicken/egg bit instead on skl+.
163 *
164 * In standby mode (as opposed to link-off) this makes no difference
165 * as the timing generator keeps running the whole time generating
166 * normal periodic vblanks.
167 *
168 * WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
169 * and doing so makes the behaviour match the skl+ reset value.
170 *
171 * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
172 * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
173 *
174 * On BDW without this bit is no vblanks whatsoever are
175 * generated after PSR exit. On HSW this has no apparent effect.
176 * WaPsrDPRSUnmaskVBlankInSRD says to set this.
177 *
178 * The rest of the bits are more self-explanatory and/or
179 * irrelevant for normal operation.
180 *
181 * Description of intel_crtc_state variables. has_psr, has_panel_replay and
182 * has_sel_update:
183 *
184 * has_psr (alone): PSR1
185 * has_psr + has_sel_update: PSR2
186 * has_psr + has_panel_replay: Panel Replay
187 * has_psr + has_panel_replay + has_sel_update: Panel Replay Selective Update
188 *
189 * Description of some intel_psr variables. enabled, panel_replay_enabled,
190 * sel_update_enabled
191 *
192 * enabled (alone): PSR1
193 * enabled + sel_update_enabled: PSR2
194 * enabled + panel_replay_enabled: Panel Replay
195 * enabled + panel_replay_enabled + sel_update_enabled: Panel Replay SU
196 */
197
198 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
199 (intel_dp)->psr.source_support)
200
intel_encoder_can_psr(struct intel_encoder * encoder)201 bool intel_encoder_can_psr(struct intel_encoder *encoder)
202 {
203 if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
204 return CAN_PSR(enc_to_intel_dp(encoder)) ||
205 CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
206 else
207 return false;
208 }
209
intel_psr_needs_aux_io_power(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)210 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
211 const struct intel_crtc_state *crtc_state)
212 {
213 /*
214 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
215 * the output is enabled. For non-eDP outputs the main link is always
216 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
217 * for eDP.
218 *
219 * TODO:
220 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
221 * the ALPM with main-link off mode is not enabled.
222 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
223 * main-link off mode is added for it and this mode gets enabled.
224 */
225 return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
226 intel_encoder_can_psr(encoder);
227 }
228
psr_global_enabled(struct intel_dp * intel_dp)229 static bool psr_global_enabled(struct intel_dp *intel_dp)
230 {
231 struct intel_display *display = to_intel_display(intel_dp);
232 struct intel_connector *connector = intel_dp->attached_connector;
233
234 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
235 case I915_PSR_DEBUG_DEFAULT:
236 if (display->params.enable_psr == -1)
237 return intel_dp_is_edp(intel_dp) ?
238 connector->panel.vbt.psr.enable :
239 true;
240 return display->params.enable_psr;
241 case I915_PSR_DEBUG_DISABLE:
242 return false;
243 default:
244 return true;
245 }
246 }
247
psr2_global_enabled(struct intel_dp * intel_dp)248 static bool psr2_global_enabled(struct intel_dp *intel_dp)
249 {
250 struct intel_display *display = to_intel_display(intel_dp);
251
252 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
253 case I915_PSR_DEBUG_DISABLE:
254 case I915_PSR_DEBUG_FORCE_PSR1:
255 return false;
256 default:
257 if (display->params.enable_psr == 1)
258 return false;
259 return true;
260 }
261 }
262
psr2_su_region_et_global_enabled(struct intel_dp * intel_dp)263 static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp)
264 {
265 struct intel_display *display = to_intel_display(intel_dp);
266
267 if (display->params.enable_psr != -1)
268 return false;
269
270 return true;
271 }
272
panel_replay_global_enabled(struct intel_dp * intel_dp)273 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
274 {
275 struct intel_display *display = to_intel_display(intel_dp);
276
277 if ((display->params.enable_psr != -1) ||
278 (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE))
279 return false;
280 return true;
281 }
282
psr_irq_psr_error_bit_get(struct intel_dp * intel_dp)283 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
284 {
285 struct intel_display *display = to_intel_display(intel_dp);
286
287 return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
288 EDP_PSR_ERROR(intel_dp->psr.transcoder);
289 }
290
psr_irq_post_exit_bit_get(struct intel_dp * intel_dp)291 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
292 {
293 struct intel_display *display = to_intel_display(intel_dp);
294
295 return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
296 EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
297 }
298
psr_irq_pre_entry_bit_get(struct intel_dp * intel_dp)299 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
300 {
301 struct intel_display *display = to_intel_display(intel_dp);
302
303 return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
304 EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
305 }
306
psr_irq_mask_get(struct intel_dp * intel_dp)307 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
308 {
309 struct intel_display *display = to_intel_display(intel_dp);
310
311 return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
312 EDP_PSR_MASK(intel_dp->psr.transcoder);
313 }
314
psr_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)315 static i915_reg_t psr_ctl_reg(struct intel_display *display,
316 enum transcoder cpu_transcoder)
317 {
318 if (DISPLAY_VER(display) >= 8)
319 return EDP_PSR_CTL(display, cpu_transcoder);
320 else
321 return HSW_SRD_CTL;
322 }
323
psr_debug_reg(struct intel_display * display,enum transcoder cpu_transcoder)324 static i915_reg_t psr_debug_reg(struct intel_display *display,
325 enum transcoder cpu_transcoder)
326 {
327 if (DISPLAY_VER(display) >= 8)
328 return EDP_PSR_DEBUG(display, cpu_transcoder);
329 else
330 return HSW_SRD_DEBUG;
331 }
332
psr_perf_cnt_reg(struct intel_display * display,enum transcoder cpu_transcoder)333 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
334 enum transcoder cpu_transcoder)
335 {
336 if (DISPLAY_VER(display) >= 8)
337 return EDP_PSR_PERF_CNT(display, cpu_transcoder);
338 else
339 return HSW_SRD_PERF_CNT;
340 }
341
psr_status_reg(struct intel_display * display,enum transcoder cpu_transcoder)342 static i915_reg_t psr_status_reg(struct intel_display *display,
343 enum transcoder cpu_transcoder)
344 {
345 if (DISPLAY_VER(display) >= 8)
346 return EDP_PSR_STATUS(display, cpu_transcoder);
347 else
348 return HSW_SRD_STATUS;
349 }
350
psr_imr_reg(struct intel_display * display,enum transcoder cpu_transcoder)351 static i915_reg_t psr_imr_reg(struct intel_display *display,
352 enum transcoder cpu_transcoder)
353 {
354 if (DISPLAY_VER(display) >= 12)
355 return TRANS_PSR_IMR(display, cpu_transcoder);
356 else
357 return EDP_PSR_IMR;
358 }
359
psr_iir_reg(struct intel_display * display,enum transcoder cpu_transcoder)360 static i915_reg_t psr_iir_reg(struct intel_display *display,
361 enum transcoder cpu_transcoder)
362 {
363 if (DISPLAY_VER(display) >= 12)
364 return TRANS_PSR_IIR(display, cpu_transcoder);
365 else
366 return EDP_PSR_IIR;
367 }
368
psr_aux_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)369 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
370 enum transcoder cpu_transcoder)
371 {
372 if (DISPLAY_VER(display) >= 8)
373 return EDP_PSR_AUX_CTL(display, cpu_transcoder);
374 else
375 return HSW_SRD_AUX_CTL;
376 }
377
psr_aux_data_reg(struct intel_display * display,enum transcoder cpu_transcoder,int i)378 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
379 enum transcoder cpu_transcoder, int i)
380 {
381 if (DISPLAY_VER(display) >= 8)
382 return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
383 else
384 return HSW_SRD_AUX_DATA(i);
385 }
386
psr_irq_control(struct intel_dp * intel_dp)387 static void psr_irq_control(struct intel_dp *intel_dp)
388 {
389 struct intel_display *display = to_intel_display(intel_dp);
390 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
391 u32 mask;
392
393 if (intel_dp->psr.panel_replay_enabled)
394 return;
395
396 mask = psr_irq_psr_error_bit_get(intel_dp);
397 if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
398 mask |= psr_irq_post_exit_bit_get(intel_dp) |
399 psr_irq_pre_entry_bit_get(intel_dp);
400
401 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
402 psr_irq_mask_get(intel_dp), ~mask);
403 }
404
psr_event_print(struct intel_display * display,u32 val,bool sel_update_enabled)405 static void psr_event_print(struct intel_display *display,
406 u32 val, bool sel_update_enabled)
407 {
408 drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
409 if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
410 drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
411 if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
412 drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
413 if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
414 drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
415 if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
416 drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
417 if (val & PSR_EVENT_GRAPHICS_RESET)
418 drm_dbg_kms(display->drm, "\tGraphics reset\n");
419 if (val & PSR_EVENT_PCH_INTERRUPT)
420 drm_dbg_kms(display->drm, "\tPCH interrupt\n");
421 if (val & PSR_EVENT_MEMORY_UP)
422 drm_dbg_kms(display->drm, "\tMemory up\n");
423 if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
424 drm_dbg_kms(display->drm, "\tFront buffer modification\n");
425 if (val & PSR_EVENT_WD_TIMER_EXPIRE)
426 drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
427 if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
428 drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
429 if (val & PSR_EVENT_REGISTER_UPDATE)
430 drm_dbg_kms(display->drm, "\tRegister updated\n");
431 if (val & PSR_EVENT_HDCP_ENABLE)
432 drm_dbg_kms(display->drm, "\tHDCP enabled\n");
433 if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
434 drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
435 if (val & PSR_EVENT_VBI_ENABLE)
436 drm_dbg_kms(display->drm, "\tVBI enabled\n");
437 if (val & PSR_EVENT_LPSP_MODE_EXIT)
438 drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
439 if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
440 drm_dbg_kms(display->drm, "\tPSR disabled\n");
441 }
442
intel_psr_irq_handler(struct intel_dp * intel_dp,u32 psr_iir)443 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
444 {
445 struct intel_display *display = to_intel_display(intel_dp);
446 struct drm_i915_private *dev_priv = to_i915(display->drm);
447 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
448 ktime_t time_ns = ktime_get();
449
450 if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
451 intel_dp->psr.last_entry_attempt = time_ns;
452 drm_dbg_kms(display->drm,
453 "[transcoder %s] PSR entry attempt in 2 vblanks\n",
454 transcoder_name(cpu_transcoder));
455 }
456
457 if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
458 intel_dp->psr.last_exit = time_ns;
459 drm_dbg_kms(display->drm,
460 "[transcoder %s] PSR exit completed\n",
461 transcoder_name(cpu_transcoder));
462
463 if (DISPLAY_VER(display) >= 9) {
464 u32 val;
465
466 val = intel_de_rmw(dev_priv,
467 PSR_EVENT(dev_priv, cpu_transcoder),
468 0, 0);
469
470 psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
471 }
472 }
473
474 if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
475 drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
476 transcoder_name(cpu_transcoder));
477
478 intel_dp->psr.irq_aux_error = true;
479
480 /*
481 * If this interruption is not masked it will keep
482 * interrupting so fast that it prevents the scheduled
483 * work to run.
484 * Also after a PSR error, we don't want to arm PSR
485 * again so we don't care about unmask the interruption
486 * or unset irq_aux_error.
487 */
488 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
489 0, psr_irq_psr_error_bit_get(intel_dp));
490
491 queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
492 }
493 }
494
intel_dp_get_sink_sync_latency(struct intel_dp * intel_dp)495 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
496 {
497 struct intel_display *display = to_intel_display(intel_dp);
498 u8 val = 8; /* assume the worst if we can't read the value */
499
500 if (drm_dp_dpcd_readb(&intel_dp->aux,
501 DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
502 val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
503 else
504 drm_dbg_kms(display->drm,
505 "Unable to get sink synchronization latency, assuming 8 frames\n");
506 return val;
507 }
508
intel_dp_get_su_capability(struct intel_dp * intel_dp)509 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
510 {
511 u8 su_capability = 0;
512
513 if (intel_dp->psr.sink_panel_replay_su_support)
514 drm_dp_dpcd_readb(&intel_dp->aux,
515 DP_PANEL_PANEL_REPLAY_CAPABILITY,
516 &su_capability);
517 else
518 su_capability = intel_dp->psr_dpcd[1];
519
520 return su_capability;
521 }
522
523 static unsigned int
intel_dp_get_su_x_granularity_offset(struct intel_dp * intel_dp)524 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
525 {
526 return intel_dp->psr.sink_panel_replay_su_support ?
527 DP_PANEL_PANEL_REPLAY_X_GRANULARITY :
528 DP_PSR2_SU_X_GRANULARITY;
529 }
530
531 static unsigned int
intel_dp_get_su_y_granularity_offset(struct intel_dp * intel_dp)532 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
533 {
534 return intel_dp->psr.sink_panel_replay_su_support ?
535 DP_PANEL_PANEL_REPLAY_Y_GRANULARITY :
536 DP_PSR2_SU_Y_GRANULARITY;
537 }
538
539 /*
540 * Note: Bits related to granularity are same in panel replay and psr
541 * registers. Rely on PSR definitions on these "common" bits.
542 */
intel_dp_get_su_granularity(struct intel_dp * intel_dp)543 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
544 {
545 struct intel_display *display = to_intel_display(intel_dp);
546 ssize_t r;
547 u16 w;
548 u8 y;
549
550 /*
551 * TODO: Do we need to take into account panel supporting both PSR and
552 * Panel replay?
553 */
554
555 /*
556 * If sink don't have specific granularity requirements set legacy
557 * ones.
558 */
559 if (!(intel_dp_get_su_capability(intel_dp) &
560 DP_PSR2_SU_GRANULARITY_REQUIRED)) {
561 /* As PSR2 HW sends full lines, we do not care about x granularity */
562 w = 4;
563 y = 4;
564 goto exit;
565 }
566
567 r = drm_dp_dpcd_read(&intel_dp->aux,
568 intel_dp_get_su_x_granularity_offset(intel_dp),
569 &w, 2);
570 if (r != 2)
571 drm_dbg_kms(display->drm,
572 "Unable to read selective update x granularity\n");
573 /*
574 * Spec says that if the value read is 0 the default granularity should
575 * be used instead.
576 */
577 if (r != 2 || w == 0)
578 w = 4;
579
580 r = drm_dp_dpcd_read(&intel_dp->aux,
581 intel_dp_get_su_y_granularity_offset(intel_dp),
582 &y, 1);
583 if (r != 1) {
584 drm_dbg_kms(display->drm,
585 "Unable to read selective update y granularity\n");
586 y = 4;
587 }
588 if (y == 0)
589 y = 1;
590
591 exit:
592 intel_dp->psr.su_w_granularity = w;
593 intel_dp->psr.su_y_granularity = y;
594 }
595
_panel_replay_init_dpcd(struct intel_dp * intel_dp)596 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
597 {
598 struct intel_display *display = to_intel_display(intel_dp);
599
600 if (intel_dp_is_edp(intel_dp)) {
601 if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
602 drm_dbg_kms(display->drm,
603 "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
604 return;
605 }
606
607 if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
608 drm_dbg_kms(display->drm,
609 "Panel doesn't support early transport, eDP Panel Replay not possible\n");
610 return;
611 }
612 }
613
614 intel_dp->psr.sink_panel_replay_support = true;
615
616 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT)
617 intel_dp->psr.sink_panel_replay_su_support = true;
618
619 drm_dbg_kms(display->drm,
620 "Panel replay %sis supported by panel\n",
621 intel_dp->psr.sink_panel_replay_su_support ?
622 "selective_update " : "");
623 }
624
_psr_init_dpcd(struct intel_dp * intel_dp)625 static void _psr_init_dpcd(struct intel_dp *intel_dp)
626 {
627 struct intel_display *display = to_intel_display(intel_dp);
628
629 drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
630 intel_dp->psr_dpcd[0]);
631
632 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
633 drm_dbg_kms(display->drm,
634 "PSR support not currently available for this panel\n");
635 return;
636 }
637
638 if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
639 drm_dbg_kms(display->drm,
640 "Panel lacks power state control, PSR cannot be enabled\n");
641 return;
642 }
643
644 intel_dp->psr.sink_support = true;
645 intel_dp->psr.sink_sync_latency =
646 intel_dp_get_sink_sync_latency(intel_dp);
647
648 if (DISPLAY_VER(display) >= 9 &&
649 intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
650 bool y_req = intel_dp->psr_dpcd[1] &
651 DP_PSR2_SU_Y_COORDINATE_REQUIRED;
652
653 /*
654 * All panels that supports PSR version 03h (PSR2 +
655 * Y-coordinate) can handle Y-coordinates in VSC but we are
656 * only sure that it is going to be used when required by the
657 * panel. This way panel is capable to do selective update
658 * without a aux frame sync.
659 *
660 * To support PSR version 02h and PSR version 03h without
661 * Y-coordinate requirement panels we would need to enable
662 * GTC first.
663 */
664 intel_dp->psr.sink_psr2_support = y_req &&
665 intel_alpm_aux_wake_supported(intel_dp);
666 drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
667 intel_dp->psr.sink_psr2_support ? "" : "not ");
668 }
669 }
670
intel_psr_init_dpcd(struct intel_dp * intel_dp)671 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
672 {
673 drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
674 sizeof(intel_dp->psr_dpcd));
675 drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP,
676 &intel_dp->pr_dpcd);
677
678 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT)
679 _panel_replay_init_dpcd(intel_dp);
680
681 if (intel_dp->psr_dpcd[0])
682 _psr_init_dpcd(intel_dp);
683
684 if (intel_dp->psr.sink_psr2_support ||
685 intel_dp->psr.sink_panel_replay_su_support)
686 intel_dp_get_su_granularity(intel_dp);
687 }
688
hsw_psr_setup_aux(struct intel_dp * intel_dp)689 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
690 {
691 struct intel_display *display = to_intel_display(intel_dp);
692 struct drm_i915_private *dev_priv = to_i915(display->drm);
693 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
694 u32 aux_clock_divider, aux_ctl;
695 /* write DP_SET_POWER=D0 */
696 static const u8 aux_msg[] = {
697 [0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
698 [1] = (DP_SET_POWER >> 8) & 0xff,
699 [2] = DP_SET_POWER & 0xff,
700 [3] = 1 - 1,
701 [4] = DP_SET_POWER_D0,
702 };
703 int i;
704
705 BUILD_BUG_ON(sizeof(aux_msg) > 20);
706 for (i = 0; i < sizeof(aux_msg); i += 4)
707 intel_de_write(dev_priv,
708 psr_aux_data_reg(display, cpu_transcoder, i >> 2),
709 intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
710
711 aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
712
713 /* Start with bits set for DDI_AUX_CTL register */
714 aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
715 aux_clock_divider);
716
717 /* Select only valid bits for SRD_AUX_CTL */
718 aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
719 EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
720 EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
721 EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
722
723 intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
724 aux_ctl);
725 }
726
psr2_su_region_et_valid(struct intel_dp * intel_dp,bool panel_replay)727 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
728 {
729 struct intel_display *display = to_intel_display(intel_dp);
730
731 if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
732 intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
733 return false;
734
735 return panel_replay ?
736 intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
737 intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED &&
738 psr2_su_region_et_global_enabled(intel_dp);
739 }
740
_panel_replay_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)741 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
742 const struct intel_crtc_state *crtc_state)
743 {
744 u8 val = DP_PANEL_REPLAY_ENABLE |
745 DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
746 DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
747 DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
748 DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
749 u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
750
751 if (crtc_state->has_sel_update)
752 val |= DP_PANEL_REPLAY_SU_ENABLE;
753
754 if (crtc_state->enable_psr2_su_region_et)
755 val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
756
757 if (crtc_state->req_psr2_sdp_prior_scanline)
758 panel_replay_config2 |=
759 DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
760
761 drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
762
763 drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
764 panel_replay_config2);
765 }
766
_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)767 static void _psr_enable_sink(struct intel_dp *intel_dp,
768 const struct intel_crtc_state *crtc_state)
769 {
770 struct intel_display *display = to_intel_display(intel_dp);
771 u8 val = 0;
772
773 if (crtc_state->has_sel_update) {
774 val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
775 } else {
776 if (intel_dp->psr.link_standby)
777 val |= DP_PSR_MAIN_LINK_ACTIVE;
778
779 if (DISPLAY_VER(display) >= 8)
780 val |= DP_PSR_CRC_VERIFICATION;
781 }
782
783 if (crtc_state->req_psr2_sdp_prior_scanline)
784 val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
785
786 if (crtc_state->enable_psr2_su_region_et)
787 val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
788
789 if (intel_dp->psr.entry_setup_frames > 0)
790 val |= DP_PSR_FRAME_CAPTURE;
791 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
792
793 val |= DP_PSR_ENABLE;
794 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
795 }
796
intel_psr_enable_sink_alpm(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)797 static void intel_psr_enable_sink_alpm(struct intel_dp *intel_dp,
798 const struct intel_crtc_state *crtc_state)
799 {
800 u8 val;
801
802 /*
803 * eDP Panel Replay uses always ALPM
804 * PSR2 uses ALPM but PSR1 doesn't
805 */
806 if (!intel_dp_is_edp(intel_dp) || (!crtc_state->has_panel_replay &&
807 !crtc_state->has_sel_update))
808 return;
809
810 val = DP_ALPM_ENABLE | DP_ALPM_LOCK_ERROR_IRQ_HPD_ENABLE;
811
812 if (crtc_state->has_panel_replay)
813 val |= DP_ALPM_MODE_AUX_LESS;
814
815 drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, val);
816 }
817
intel_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)818 static void intel_psr_enable_sink(struct intel_dp *intel_dp,
819 const struct intel_crtc_state *crtc_state)
820 {
821 intel_psr_enable_sink_alpm(intel_dp, crtc_state);
822
823 crtc_state->has_panel_replay ?
824 _panel_replay_enable_sink(intel_dp, crtc_state) :
825 _psr_enable_sink(intel_dp, crtc_state);
826
827 if (intel_dp_is_edp(intel_dp))
828 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
829 }
830
intel_psr_panel_replay_enable_sink(struct intel_dp * intel_dp)831 void intel_psr_panel_replay_enable_sink(struct intel_dp *intel_dp)
832 {
833 if (CAN_PANEL_REPLAY(intel_dp))
834 drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG,
835 DP_PANEL_REPLAY_ENABLE);
836 }
837
intel_psr1_get_tp_time(struct intel_dp * intel_dp)838 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
839 {
840 struct intel_display *display = to_intel_display(intel_dp);
841 struct intel_connector *connector = intel_dp->attached_connector;
842 struct drm_i915_private *dev_priv = to_i915(display->drm);
843 u32 val = 0;
844
845 if (DISPLAY_VER(display) >= 11)
846 val |= EDP_PSR_TP4_TIME_0us;
847
848 if (display->params.psr_safest_params) {
849 val |= EDP_PSR_TP1_TIME_2500us;
850 val |= EDP_PSR_TP2_TP3_TIME_2500us;
851 goto check_tp3_sel;
852 }
853
854 if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
855 val |= EDP_PSR_TP1_TIME_0us;
856 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
857 val |= EDP_PSR_TP1_TIME_100us;
858 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
859 val |= EDP_PSR_TP1_TIME_500us;
860 else
861 val |= EDP_PSR_TP1_TIME_2500us;
862
863 if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
864 val |= EDP_PSR_TP2_TP3_TIME_0us;
865 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
866 val |= EDP_PSR_TP2_TP3_TIME_100us;
867 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
868 val |= EDP_PSR_TP2_TP3_TIME_500us;
869 else
870 val |= EDP_PSR_TP2_TP3_TIME_2500us;
871
872 /*
873 * WA 0479: hsw,bdw
874 * "Do not skip both TP1 and TP2/TP3"
875 */
876 if (DISPLAY_VER(dev_priv) < 9 &&
877 connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
878 connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
879 val |= EDP_PSR_TP2_TP3_TIME_100us;
880
881 check_tp3_sel:
882 if (intel_dp_source_supports_tps3(display) &&
883 drm_dp_tps3_supported(intel_dp->dpcd))
884 val |= EDP_PSR_TP_TP1_TP3;
885 else
886 val |= EDP_PSR_TP_TP1_TP2;
887
888 return val;
889 }
890
psr_compute_idle_frames(struct intel_dp * intel_dp)891 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
892 {
893 struct intel_display *display = to_intel_display(intel_dp);
894 struct intel_connector *connector = intel_dp->attached_connector;
895 int idle_frames;
896
897 /* Let's use 6 as the minimum to cover all known cases including the
898 * off-by-one issue that HW has in some cases.
899 */
900 idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
901 idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
902
903 if (drm_WARN_ON(display->drm, idle_frames > 0xf))
904 idle_frames = 0xf;
905
906 return idle_frames;
907 }
908
hsw_activate_psr1(struct intel_dp * intel_dp)909 static void hsw_activate_psr1(struct intel_dp *intel_dp)
910 {
911 struct intel_display *display = to_intel_display(intel_dp);
912 struct drm_i915_private *dev_priv = to_i915(display->drm);
913 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
914 u32 max_sleep_time = 0x1f;
915 u32 val = EDP_PSR_ENABLE;
916
917 val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
918
919 if (DISPLAY_VER(display) < 20)
920 val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
921
922 if (IS_HASWELL(dev_priv))
923 val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
924
925 if (intel_dp->psr.link_standby)
926 val |= EDP_PSR_LINK_STANDBY;
927
928 val |= intel_psr1_get_tp_time(intel_dp);
929
930 if (DISPLAY_VER(display) >= 8)
931 val |= EDP_PSR_CRC_ENABLE;
932
933 if (DISPLAY_VER(display) >= 20)
934 val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
935
936 intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
937 ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
938 }
939
intel_psr2_get_tp_time(struct intel_dp * intel_dp)940 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
941 {
942 struct intel_display *display = to_intel_display(intel_dp);
943 struct intel_connector *connector = intel_dp->attached_connector;
944 u32 val = 0;
945
946 if (display->params.psr_safest_params)
947 return EDP_PSR2_TP2_TIME_2500us;
948
949 if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
950 connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
951 val |= EDP_PSR2_TP2_TIME_50us;
952 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
953 val |= EDP_PSR2_TP2_TIME_100us;
954 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
955 val |= EDP_PSR2_TP2_TIME_500us;
956 else
957 val |= EDP_PSR2_TP2_TIME_2500us;
958
959 return val;
960 }
961
psr2_block_count_lines(struct intel_dp * intel_dp)962 static int psr2_block_count_lines(struct intel_dp *intel_dp)
963 {
964 return intel_dp->alpm_parameters.io_wake_lines < 9 &&
965 intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
966 }
967
psr2_block_count(struct intel_dp * intel_dp)968 static int psr2_block_count(struct intel_dp *intel_dp)
969 {
970 return psr2_block_count_lines(intel_dp) / 4;
971 }
972
frames_before_su_entry(struct intel_dp * intel_dp)973 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
974 {
975 u8 frames_before_su_entry;
976
977 frames_before_su_entry = max_t(u8,
978 intel_dp->psr.sink_sync_latency + 1,
979 2);
980
981 /* Entry setup frames must be at least 1 less than frames before SU entry */
982 if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
983 frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
984
985 return frames_before_su_entry;
986 }
987
dg2_activate_panel_replay(struct intel_dp * intel_dp)988 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
989 {
990 struct intel_display *display = to_intel_display(intel_dp);
991 struct intel_psr *psr = &intel_dp->psr;
992 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
993
994 if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
995 u32 val = psr->su_region_et_enabled ?
996 LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
997
998 if (intel_dp->psr.req_psr2_sdp_prior_scanline)
999 val |= EDP_PSR2_SU_SDP_SCANLINE;
1000
1001 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
1002 val);
1003 }
1004
1005 intel_de_rmw(display,
1006 PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
1007 0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
1008
1009 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
1010 TRANS_DP2_PANEL_REPLAY_ENABLE);
1011 }
1012
hsw_activate_psr2(struct intel_dp * intel_dp)1013 static void hsw_activate_psr2(struct intel_dp *intel_dp)
1014 {
1015 struct intel_display *display = to_intel_display(intel_dp);
1016 struct drm_i915_private *dev_priv = to_i915(display->drm);
1017 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1018 u32 val = EDP_PSR2_ENABLE;
1019 u32 psr_val = 0;
1020
1021 val |= EDP_PSR2_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
1022
1023 if (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))
1024 val |= EDP_SU_TRACK_ENABLE;
1025
1026 if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1027 val |= EDP_Y_COORDINATE_ENABLE;
1028
1029 val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1030
1031 val |= intel_psr2_get_tp_time(intel_dp);
1032
1033 if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1034 if (psr2_block_count(intel_dp) > 2)
1035 val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1036 else
1037 val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1038 }
1039
1040 /* Wa_22012278275:adl-p */
1041 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1042 static const u8 map[] = {
1043 2, /* 5 lines */
1044 1, /* 6 lines */
1045 0, /* 7 lines */
1046 3, /* 8 lines */
1047 6, /* 9 lines */
1048 5, /* 10 lines */
1049 4, /* 11 lines */
1050 7, /* 12 lines */
1051 };
1052 /*
1053 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1054 * comments below for more information
1055 */
1056 int tmp;
1057
1058 tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1059 TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1060 val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1061
1062 tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1063 val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1064 } else if (DISPLAY_VER(display) >= 20) {
1065 val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1066 } else if (DISPLAY_VER(display) >= 12) {
1067 val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1068 val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1069 } else if (DISPLAY_VER(display) >= 9) {
1070 val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1071 val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1072 }
1073
1074 if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1075 val |= EDP_PSR2_SU_SDP_SCANLINE;
1076
1077 if (DISPLAY_VER(display) >= 20)
1078 psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1079
1080 if (intel_dp->psr.psr2_sel_fetch_enabled) {
1081 u32 tmp;
1082
1083 tmp = intel_de_read(display,
1084 PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1085 drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1086 } else if (HAS_PSR2_SEL_FETCH(display)) {
1087 intel_de_write(display,
1088 PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1089 }
1090
1091 if (intel_dp->psr.su_region_et_enabled)
1092 val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1093
1094 /*
1095 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1096 * recommending keep this bit unset while PSR2 is enabled.
1097 */
1098 intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1099
1100 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1101 }
1102
1103 static bool
transcoder_has_psr2(struct intel_display * display,enum transcoder cpu_transcoder)1104 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1105 {
1106 struct drm_i915_private *dev_priv = to_i915(display->drm);
1107
1108 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1109 return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1110 else if (DISPLAY_VER(display) >= 12)
1111 return cpu_transcoder == TRANSCODER_A;
1112 else if (DISPLAY_VER(display) >= 9)
1113 return cpu_transcoder == TRANSCODER_EDP;
1114 else
1115 return false;
1116 }
1117
intel_get_frame_time_us(const struct intel_crtc_state * crtc_state)1118 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1119 {
1120 if (!crtc_state->hw.active)
1121 return 0;
1122
1123 return DIV_ROUND_UP(1000 * 1000,
1124 drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1125 }
1126
psr2_program_idle_frames(struct intel_dp * intel_dp,u32 idle_frames)1127 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1128 u32 idle_frames)
1129 {
1130 struct intel_display *display = to_intel_display(intel_dp);
1131 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1132
1133 intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1134 EDP_PSR2_IDLE_FRAMES_MASK,
1135 EDP_PSR2_IDLE_FRAMES(idle_frames));
1136 }
1137
tgl_psr2_enable_dc3co(struct intel_dp * intel_dp)1138 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1139 {
1140 struct intel_display *display = to_intel_display(intel_dp);
1141
1142 psr2_program_idle_frames(intel_dp, 0);
1143 intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO);
1144 }
1145
tgl_psr2_disable_dc3co(struct intel_dp * intel_dp)1146 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1147 {
1148 struct intel_display *display = to_intel_display(intel_dp);
1149
1150 intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6);
1151 psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1152 }
1153
tgl_dc3co_disable_work(struct work_struct * work)1154 static void tgl_dc3co_disable_work(struct work_struct *work)
1155 {
1156 struct intel_dp *intel_dp =
1157 container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1158
1159 mutex_lock(&intel_dp->psr.lock);
1160 /* If delayed work is pending, it is not idle */
1161 if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1162 goto unlock;
1163
1164 tgl_psr2_disable_dc3co(intel_dp);
1165 unlock:
1166 mutex_unlock(&intel_dp->psr.lock);
1167 }
1168
tgl_disallow_dc3co_on_psr2_exit(struct intel_dp * intel_dp)1169 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1170 {
1171 if (!intel_dp->psr.dc3co_exitline)
1172 return;
1173
1174 cancel_delayed_work(&intel_dp->psr.dc3co_work);
1175 /* Before PSR2 exit disallow dc3co*/
1176 tgl_psr2_disable_dc3co(intel_dp);
1177 }
1178
1179 static bool
dc3co_is_pipe_port_compatible(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1180 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1181 struct intel_crtc_state *crtc_state)
1182 {
1183 struct intel_display *display = to_intel_display(intel_dp);
1184 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1185 enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1186 struct drm_i915_private *dev_priv = to_i915(display->drm);
1187 enum port port = dig_port->base.port;
1188
1189 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1190 return pipe <= PIPE_B && port <= PORT_B;
1191 else
1192 return pipe == PIPE_A && port == PORT_A;
1193 }
1194
1195 static void
tgl_dc3co_exitline_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1196 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1197 struct intel_crtc_state *crtc_state)
1198 {
1199 struct intel_display *display = to_intel_display(intel_dp);
1200 struct drm_i915_private *dev_priv = to_i915(display->drm);
1201 const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1202 struct i915_power_domains *power_domains = &display->power.domains;
1203 u32 exit_scanlines;
1204
1205 /*
1206 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1207 * disable DC3CO until the changed dc3co activating/deactivating sequence
1208 * is applied. B.Specs:49196
1209 */
1210 return;
1211
1212 /*
1213 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1214 * TODO: when the issue is addressed, this restriction should be removed.
1215 */
1216 if (crtc_state->enable_psr2_sel_fetch)
1217 return;
1218
1219 if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1220 return;
1221
1222 if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1223 return;
1224
1225 /* Wa_16011303918:adl-p */
1226 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1227 return;
1228
1229 /*
1230 * DC3CO Exit time 200us B.Spec 49196
1231 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1232 */
1233 exit_scanlines =
1234 intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1235
1236 if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1237 return;
1238
1239 crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1240 }
1241
intel_psr2_sel_fetch_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1242 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1243 struct intel_crtc_state *crtc_state)
1244 {
1245 struct intel_display *display = to_intel_display(intel_dp);
1246
1247 if (!display->params.enable_psr2_sel_fetch &&
1248 intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1249 drm_dbg_kms(display->drm,
1250 "PSR2 sel fetch not enabled, disabled by parameter\n");
1251 return false;
1252 }
1253
1254 if (crtc_state->uapi.async_flip) {
1255 drm_dbg_kms(display->drm,
1256 "PSR2 sel fetch not enabled, async flip enabled\n");
1257 return false;
1258 }
1259
1260 return crtc_state->enable_psr2_sel_fetch = true;
1261 }
1262
psr2_granularity_check(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1263 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1264 struct intel_crtc_state *crtc_state)
1265 {
1266 struct intel_display *display = to_intel_display(intel_dp);
1267 struct drm_i915_private *dev_priv = to_i915(display->drm);
1268 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1269 const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1270 const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1271 u16 y_granularity = 0;
1272
1273 /* PSR2 HW only send full lines so we only need to validate the width */
1274 if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1275 return false;
1276
1277 if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1278 return false;
1279
1280 /* HW tracking is only aligned to 4 lines */
1281 if (!crtc_state->enable_psr2_sel_fetch)
1282 return intel_dp->psr.su_y_granularity == 4;
1283
1284 /*
1285 * adl_p and mtl platforms have 1 line granularity.
1286 * For other platforms with SW tracking we can adjust the y coordinates
1287 * to match sink requirement if multiple of 4.
1288 */
1289 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1290 y_granularity = intel_dp->psr.su_y_granularity;
1291 else if (intel_dp->psr.su_y_granularity <= 2)
1292 y_granularity = 4;
1293 else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1294 y_granularity = intel_dp->psr.su_y_granularity;
1295
1296 if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1297 return false;
1298
1299 if (crtc_state->dsc.compression_enable &&
1300 vdsc_cfg->slice_height % y_granularity)
1301 return false;
1302
1303 crtc_state->su_y_granularity = y_granularity;
1304 return true;
1305 }
1306
_compute_psr2_sdp_prior_scanline_indication(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1307 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1308 struct intel_crtc_state *crtc_state)
1309 {
1310 struct intel_display *display = to_intel_display(intel_dp);
1311 const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1312 u32 hblank_total, hblank_ns, req_ns;
1313
1314 hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1315 hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1316
1317 /* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1318 req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1319
1320 if ((hblank_ns - req_ns) > 100)
1321 return true;
1322
1323 /* Not supported <13 / Wa_22012279113:adl-p */
1324 if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1325 return false;
1326
1327 crtc_state->req_psr2_sdp_prior_scanline = true;
1328 return true;
1329 }
1330
intel_psr_entry_setup_frames(struct intel_dp * intel_dp,const struct drm_display_mode * adjusted_mode)1331 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1332 const struct drm_display_mode *adjusted_mode)
1333 {
1334 struct intel_display *display = to_intel_display(intel_dp);
1335 int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1336 int entry_setup_frames = 0;
1337
1338 if (psr_setup_time < 0) {
1339 drm_dbg_kms(display->drm,
1340 "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1341 intel_dp->psr_dpcd[1]);
1342 return -ETIME;
1343 }
1344
1345 if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1346 adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1347 if (DISPLAY_VER(display) >= 20) {
1348 /* setup entry frames can be up to 3 frames */
1349 entry_setup_frames = 1;
1350 drm_dbg_kms(display->drm,
1351 "PSR setup entry frames %d\n",
1352 entry_setup_frames);
1353 } else {
1354 drm_dbg_kms(display->drm,
1355 "PSR condition failed: PSR setup time (%d us) too long\n",
1356 psr_setup_time);
1357 return -ETIME;
1358 }
1359 }
1360
1361 return entry_setup_frames;
1362 }
1363
wake_lines_fit_into_vblank(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1364 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1365 const struct intel_crtc_state *crtc_state,
1366 bool aux_less)
1367 {
1368 struct intel_display *display = to_intel_display(intel_dp);
1369 int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1370 crtc_state->hw.adjusted_mode.crtc_vblank_start;
1371 int wake_lines;
1372
1373 if (aux_less)
1374 wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1375 else
1376 wake_lines = DISPLAY_VER(display) < 20 ?
1377 psr2_block_count_lines(intel_dp) :
1378 intel_dp->alpm_parameters.io_wake_lines;
1379
1380 if (crtc_state->req_psr2_sdp_prior_scanline)
1381 vblank -= 1;
1382
1383 /* Vblank >= PSR2_CTL Block Count Number maximum line count */
1384 if (vblank < wake_lines)
1385 return false;
1386
1387 return true;
1388 }
1389
alpm_config_valid(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1390 static bool alpm_config_valid(struct intel_dp *intel_dp,
1391 const struct intel_crtc_state *crtc_state,
1392 bool aux_less)
1393 {
1394 struct intel_display *display = to_intel_display(intel_dp);
1395
1396 if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1397 drm_dbg_kms(display->drm,
1398 "PSR2/Panel Replay not enabled, Unable to use long enough wake times\n");
1399 return false;
1400 }
1401
1402 if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1403 drm_dbg_kms(display->drm,
1404 "PSR2/Panel Replay not enabled, too short vblank time\n");
1405 return false;
1406 }
1407
1408 return true;
1409 }
1410
intel_psr2_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1411 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1412 struct intel_crtc_state *crtc_state)
1413 {
1414 struct intel_display *display = to_intel_display(intel_dp);
1415 struct drm_i915_private *dev_priv = to_i915(display->drm);
1416 int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1417 int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1418 int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1419
1420 if (!intel_dp->psr.sink_psr2_support)
1421 return false;
1422
1423 /* JSL and EHL only supports eDP 1.3 */
1424 if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv)) {
1425 drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1426 return false;
1427 }
1428
1429 /* Wa_16011181250 */
1430 if (IS_ROCKETLAKE(dev_priv) || IS_ALDERLAKE_S(dev_priv) ||
1431 IS_DG2(dev_priv)) {
1432 drm_dbg_kms(display->drm,
1433 "PSR2 is defeatured for this platform\n");
1434 return false;
1435 }
1436
1437 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1438 drm_dbg_kms(display->drm,
1439 "PSR2 not completely functional in this stepping\n");
1440 return false;
1441 }
1442
1443 if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1444 drm_dbg_kms(display->drm,
1445 "PSR2 not supported in transcoder %s\n",
1446 transcoder_name(crtc_state->cpu_transcoder));
1447 return false;
1448 }
1449
1450 /*
1451 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1452 * resolution requires DSC to be enabled, priority is given to DSC
1453 * over PSR2.
1454 */
1455 if (crtc_state->dsc.compression_enable &&
1456 (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))) {
1457 drm_dbg_kms(display->drm,
1458 "PSR2 cannot be enabled since DSC is enabled\n");
1459 return false;
1460 }
1461
1462 if (DISPLAY_VER(display) >= 20) {
1463 psr_max_h = crtc_hdisplay;
1464 psr_max_v = crtc_vdisplay;
1465 max_bpp = crtc_state->pipe_bpp;
1466 } else if (IS_DISPLAY_VER(display, 12, 14)) {
1467 psr_max_h = 5120;
1468 psr_max_v = 3200;
1469 max_bpp = 30;
1470 } else if (IS_DISPLAY_VER(display, 10, 11)) {
1471 psr_max_h = 4096;
1472 psr_max_v = 2304;
1473 max_bpp = 24;
1474 } else if (DISPLAY_VER(display) == 9) {
1475 psr_max_h = 3640;
1476 psr_max_v = 2304;
1477 max_bpp = 24;
1478 }
1479
1480 if (crtc_state->pipe_bpp > max_bpp) {
1481 drm_dbg_kms(display->drm,
1482 "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1483 crtc_state->pipe_bpp, max_bpp);
1484 return false;
1485 }
1486
1487 /* Wa_16011303918:adl-p */
1488 if (crtc_state->vrr.enable &&
1489 IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1490 drm_dbg_kms(display->drm,
1491 "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1492 return false;
1493 }
1494
1495 if (!alpm_config_valid(intel_dp, crtc_state, false))
1496 return false;
1497
1498 if (!crtc_state->enable_psr2_sel_fetch &&
1499 (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1500 drm_dbg_kms(display->drm,
1501 "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1502 crtc_hdisplay, crtc_vdisplay,
1503 psr_max_h, psr_max_v);
1504 return false;
1505 }
1506
1507 tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1508
1509 return true;
1510 }
1511
intel_sel_update_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1512 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1513 struct intel_crtc_state *crtc_state)
1514 {
1515 struct intel_display *display = to_intel_display(intel_dp);
1516
1517 if (HAS_PSR2_SEL_FETCH(display) &&
1518 !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1519 !HAS_PSR_HW_TRACKING(display)) {
1520 drm_dbg_kms(display->drm,
1521 "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1522 goto unsupported;
1523 }
1524
1525 if (!psr2_global_enabled(intel_dp)) {
1526 drm_dbg_kms(display->drm,
1527 "Selective update disabled by flag\n");
1528 goto unsupported;
1529 }
1530
1531 if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1532 goto unsupported;
1533
1534 if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1535 drm_dbg_kms(display->drm,
1536 "Selective update not enabled, SDP indication do not fit in hblank\n");
1537 goto unsupported;
1538 }
1539
1540 if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1541 !intel_dp->psr.sink_panel_replay_su_support))
1542 goto unsupported;
1543
1544 if (crtc_state->crc_enabled) {
1545 drm_dbg_kms(display->drm,
1546 "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1547 goto unsupported;
1548 }
1549
1550 if (!psr2_granularity_check(intel_dp, crtc_state)) {
1551 drm_dbg_kms(display->drm,
1552 "Selective update not enabled, SU granularity not compatible\n");
1553 goto unsupported;
1554 }
1555
1556 crtc_state->enable_psr2_su_region_et =
1557 psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1558
1559 return true;
1560
1561 unsupported:
1562 crtc_state->enable_psr2_sel_fetch = false;
1563 return false;
1564 }
1565
_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1566 static bool _psr_compute_config(struct intel_dp *intel_dp,
1567 struct intel_crtc_state *crtc_state)
1568 {
1569 struct intel_display *display = to_intel_display(intel_dp);
1570 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1571 int entry_setup_frames;
1572
1573 if (!CAN_PSR(intel_dp))
1574 return false;
1575
1576 entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1577
1578 if (entry_setup_frames >= 0) {
1579 intel_dp->psr.entry_setup_frames = entry_setup_frames;
1580 } else {
1581 drm_dbg_kms(display->drm,
1582 "PSR condition failed: PSR setup timing not met\n");
1583 return false;
1584 }
1585
1586 return true;
1587 }
1588
1589 static bool
_panel_replay_compute_config(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1590 _panel_replay_compute_config(struct intel_dp *intel_dp,
1591 const struct intel_crtc_state *crtc_state,
1592 const struct drm_connector_state *conn_state)
1593 {
1594 struct intel_display *display = to_intel_display(intel_dp);
1595 struct intel_connector *connector =
1596 to_intel_connector(conn_state->connector);
1597 struct intel_hdcp *hdcp = &connector->hdcp;
1598
1599 if (!CAN_PANEL_REPLAY(intel_dp))
1600 return false;
1601
1602 if (!panel_replay_global_enabled(intel_dp)) {
1603 drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1604 return false;
1605 }
1606
1607 if (!intel_dp_is_edp(intel_dp))
1608 return true;
1609
1610 /* Remaining checks are for eDP only */
1611
1612 if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A &&
1613 to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B)
1614 return false;
1615
1616 /* 128b/132b Panel Replay is not supported on eDP */
1617 if (intel_dp_is_uhbr(crtc_state)) {
1618 drm_dbg_kms(display->drm,
1619 "Panel Replay is not supported with 128b/132b\n");
1620 return false;
1621 }
1622
1623 /* HW will not allow Panel Replay on eDP when HDCP enabled */
1624 if (conn_state->content_protection ==
1625 DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1626 (conn_state->content_protection ==
1627 DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1628 DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1629 drm_dbg_kms(display->drm,
1630 "Panel Replay is not supported with HDCP\n");
1631 return false;
1632 }
1633
1634 if (!alpm_config_valid(intel_dp, crtc_state, true))
1635 return false;
1636
1637 if (crtc_state->crc_enabled) {
1638 drm_dbg_kms(display->drm,
1639 "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1640 return false;
1641 }
1642
1643 return true;
1644 }
1645
intel_psr_needs_wa_18037818876(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1646 static bool intel_psr_needs_wa_18037818876(struct intel_dp *intel_dp,
1647 struct intel_crtc_state *crtc_state)
1648 {
1649 struct intel_display *display = to_intel_display(intel_dp);
1650
1651 return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 &&
1652 !crtc_state->has_sel_update);
1653 }
1654
intel_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)1655 void intel_psr_compute_config(struct intel_dp *intel_dp,
1656 struct intel_crtc_state *crtc_state,
1657 struct drm_connector_state *conn_state)
1658 {
1659 struct intel_display *display = to_intel_display(intel_dp);
1660 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1661
1662 if (!psr_global_enabled(intel_dp)) {
1663 drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1664 return;
1665 }
1666
1667 if (intel_dp->psr.sink_not_reliable) {
1668 drm_dbg_kms(display->drm,
1669 "PSR sink implementation is not reliable\n");
1670 return;
1671 }
1672
1673 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1674 drm_dbg_kms(display->drm,
1675 "PSR condition failed: Interlaced mode enabled\n");
1676 return;
1677 }
1678
1679 /*
1680 * FIXME figure out what is wrong with PSR+joiner and
1681 * fix it. Presumably something related to the fact that
1682 * PSR is a transcoder level feature.
1683 */
1684 if (crtc_state->joiner_pipes) {
1685 drm_dbg_kms(display->drm,
1686 "PSR disabled due to joiner\n");
1687 return;
1688 }
1689
1690 /*
1691 * Currently PSR/PR doesn't work reliably with VRR enabled.
1692 */
1693 if (crtc_state->vrr.enable)
1694 return;
1695
1696 crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1697 crtc_state,
1698 conn_state);
1699
1700 crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1701 _psr_compute_config(intel_dp, crtc_state);
1702
1703 if (!crtc_state->has_psr)
1704 return;
1705
1706 crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1707
1708 /* Wa_18037818876 */
1709 if (intel_psr_needs_wa_18037818876(intel_dp, crtc_state)) {
1710 crtc_state->has_psr = false;
1711 drm_dbg_kms(display->drm,
1712 "PSR disabled to workaround PSR FSM hang issue\n");
1713 }
1714 }
1715
intel_psr_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1716 void intel_psr_get_config(struct intel_encoder *encoder,
1717 struct intel_crtc_state *pipe_config)
1718 {
1719 struct intel_display *display = to_intel_display(encoder);
1720 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1721 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1722 struct intel_dp *intel_dp;
1723 u32 val;
1724
1725 if (!dig_port)
1726 return;
1727
1728 intel_dp = &dig_port->dp;
1729 if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1730 return;
1731
1732 mutex_lock(&intel_dp->psr.lock);
1733 if (!intel_dp->psr.enabled)
1734 goto unlock;
1735
1736 if (intel_dp->psr.panel_replay_enabled) {
1737 pipe_config->has_psr = pipe_config->has_panel_replay = true;
1738 } else {
1739 /*
1740 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1741 * enabled/disabled because of frontbuffer tracking and others.
1742 */
1743 pipe_config->has_psr = true;
1744 }
1745
1746 pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1747 pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1748
1749 if (!intel_dp->psr.sel_update_enabled)
1750 goto unlock;
1751
1752 if (HAS_PSR2_SEL_FETCH(display)) {
1753 val = intel_de_read(display,
1754 PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1755 if (val & PSR2_MAN_TRK_CTL_ENABLE)
1756 pipe_config->enable_psr2_sel_fetch = true;
1757 }
1758
1759 pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1760
1761 if (DISPLAY_VER(display) >= 12) {
1762 val = intel_de_read(display,
1763 TRANS_EXITLINE(display, cpu_transcoder));
1764 pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1765 }
1766 unlock:
1767 mutex_unlock(&intel_dp->psr.lock);
1768 }
1769
intel_psr_activate(struct intel_dp * intel_dp)1770 static void intel_psr_activate(struct intel_dp *intel_dp)
1771 {
1772 struct intel_display *display = to_intel_display(intel_dp);
1773 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1774
1775 drm_WARN_ON(display->drm,
1776 transcoder_has_psr2(display, cpu_transcoder) &&
1777 intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1778
1779 drm_WARN_ON(display->drm,
1780 intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1781
1782 drm_WARN_ON(display->drm, intel_dp->psr.active);
1783
1784 lockdep_assert_held(&intel_dp->psr.lock);
1785
1786 /* psr1, psr2 and panel-replay are mutually exclusive.*/
1787 if (intel_dp->psr.panel_replay_enabled)
1788 dg2_activate_panel_replay(intel_dp);
1789 else if (intel_dp->psr.sel_update_enabled)
1790 hsw_activate_psr2(intel_dp);
1791 else
1792 hsw_activate_psr1(intel_dp);
1793
1794 intel_dp->psr.active = true;
1795 }
1796
1797 /*
1798 * Wa_16013835468
1799 * Wa_14015648006
1800 */
wm_optimization_wa(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1801 static void wm_optimization_wa(struct intel_dp *intel_dp,
1802 const struct intel_crtc_state *crtc_state)
1803 {
1804 struct intel_display *display = to_intel_display(intel_dp);
1805 enum pipe pipe = intel_dp->psr.pipe;
1806 bool activate = false;
1807
1808 /* Wa_14015648006 */
1809 if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled)
1810 activate = true;
1811
1812 /* Wa_16013835468 */
1813 if (DISPLAY_VER(display) == 12 &&
1814 crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1815 crtc_state->hw.adjusted_mode.crtc_vdisplay)
1816 activate = true;
1817
1818 if (activate)
1819 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1820 0, LATENCY_REPORTING_REMOVED(pipe));
1821 else
1822 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1823 LATENCY_REPORTING_REMOVED(pipe), 0);
1824 }
1825
intel_psr_enable_source(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1826 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1827 const struct intel_crtc_state *crtc_state)
1828 {
1829 struct intel_display *display = to_intel_display(intel_dp);
1830 struct drm_i915_private *dev_priv = to_i915(display->drm);
1831 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1832 u32 mask = 0;
1833
1834 /*
1835 * Only HSW and BDW have PSR AUX registers that need to be setup.
1836 * SKL+ use hardcoded values PSR AUX transactions
1837 */
1838 if (DISPLAY_VER(display) < 9)
1839 hsw_psr_setup_aux(intel_dp);
1840
1841 /*
1842 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1843 * mask LPSP to avoid dependency on other drivers that might block
1844 * runtime_pm besides preventing other hw tracking issues now we
1845 * can rely on frontbuffer tracking.
1846 *
1847 * From bspec prior LunarLake:
1848 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1849 * panel replay mode.
1850 *
1851 * From bspec beyod LunarLake:
1852 * Panel Replay on DP: No bits are applicable
1853 * Panel Replay on eDP: All bits are applicable
1854 */
1855 if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1856 mask = EDP_PSR_DEBUG_MASK_HPD;
1857
1858 if (intel_dp_is_edp(intel_dp)) {
1859 mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1860
1861 /*
1862 * For some unknown reason on HSW non-ULT (or at least on
1863 * Dell Latitude E6540) external displays start to flicker
1864 * when PSR is enabled on the eDP. SR/PC6 residency is much
1865 * higher than should be possible with an external display.
1866 * As a workaround leave LPSP unmasked to prevent PSR entry
1867 * when external displays are active.
1868 */
1869 if (DISPLAY_VER(display) >= 8 || IS_HASWELL_ULT(dev_priv))
1870 mask |= EDP_PSR_DEBUG_MASK_LPSP;
1871
1872 if (DISPLAY_VER(display) < 20)
1873 mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1874
1875 /*
1876 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1877 * registers in order to keep the CURSURFLIVE tricks working :(
1878 */
1879 if (IS_DISPLAY_VER(display, 9, 10))
1880 mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1881
1882 /* allow PSR with sprite enabled */
1883 if (IS_HASWELL(dev_priv))
1884 mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1885 }
1886
1887 intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1888
1889 psr_irq_control(intel_dp);
1890
1891 /*
1892 * TODO: if future platforms supports DC3CO in more than one
1893 * transcoder, EXITLINE will need to be unset when disabling PSR
1894 */
1895 if (intel_dp->psr.dc3co_exitline)
1896 intel_de_rmw(display,
1897 TRANS_EXITLINE(display, cpu_transcoder),
1898 EXITLINE_MASK,
1899 intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1900
1901 if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1902 intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1903 intel_dp->psr.psr2_sel_fetch_enabled ?
1904 IGNORE_PSR2_HW_TRACKING : 0);
1905
1906 if (intel_dp_is_edp(intel_dp))
1907 intel_alpm_configure(intel_dp, crtc_state);
1908
1909 /*
1910 * Wa_16013835468
1911 * Wa_14015648006
1912 */
1913 wm_optimization_wa(intel_dp, crtc_state);
1914
1915 if (intel_dp->psr.sel_update_enabled) {
1916 if (DISPLAY_VER(display) == 9)
1917 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0,
1918 PSR2_VSC_ENABLE_PROG_HEADER |
1919 PSR2_ADD_VERTICAL_LINE_COUNT);
1920
1921 /*
1922 * Wa_16014451276:adlp,mtl[a0,b0]
1923 * All supported adlp panels have 1-based X granularity, this may
1924 * cause issues if non-supported panels are used.
1925 */
1926 if (!intel_dp->psr.panel_replay_enabled &&
1927 (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
1928 IS_ALDERLAKE_P(dev_priv)))
1929 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
1930 0, ADLP_1_BASED_X_GRANULARITY);
1931
1932 /* Wa_16012604467:adlp,mtl[a0,b0] */
1933 if (!intel_dp->psr.panel_replay_enabled &&
1934 IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
1935 intel_de_rmw(display,
1936 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1937 0,
1938 MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1939 else if (IS_ALDERLAKE_P(dev_priv))
1940 intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1941 CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1942 }
1943 }
1944
psr_interrupt_error_check(struct intel_dp * intel_dp)1945 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1946 {
1947 struct intel_display *display = to_intel_display(intel_dp);
1948 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1949 u32 val;
1950
1951 if (intel_dp->psr.panel_replay_enabled)
1952 goto no_err;
1953
1954 /*
1955 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1956 * will still keep the error set even after the reset done in the
1957 * irq_preinstall and irq_uninstall hooks.
1958 * And enabling in this situation cause the screen to freeze in the
1959 * first time that PSR HW tries to activate so lets keep PSR disabled
1960 * to avoid any rendering problems.
1961 */
1962 val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1963 val &= psr_irq_psr_error_bit_get(intel_dp);
1964 if (val) {
1965 intel_dp->psr.sink_not_reliable = true;
1966 drm_dbg_kms(display->drm,
1967 "PSR interruption error set, not enabling PSR\n");
1968 return false;
1969 }
1970
1971 no_err:
1972 return true;
1973 }
1974
intel_psr_enable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1975 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
1976 const struct intel_crtc_state *crtc_state)
1977 {
1978 struct intel_display *display = to_intel_display(intel_dp);
1979 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1980 u32 val;
1981
1982 drm_WARN_ON(display->drm, intel_dp->psr.enabled);
1983
1984 intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
1985 intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
1986 intel_dp->psr.busy_frontbuffer_bits = 0;
1987 intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1988 intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
1989 /* DC5/DC6 requires at least 6 idle frames */
1990 val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
1991 intel_dp->psr.dc3co_exit_delay = val;
1992 intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
1993 intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
1994 intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
1995 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1996 intel_dp->psr.req_psr2_sdp_prior_scanline =
1997 crtc_state->req_psr2_sdp_prior_scanline;
1998
1999 if (!psr_interrupt_error_check(intel_dp))
2000 return;
2001
2002 if (intel_dp->psr.panel_replay_enabled)
2003 drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
2004 else
2005 drm_dbg_kms(display->drm, "Enabling PSR%s\n",
2006 intel_dp->psr.sel_update_enabled ? "2" : "1");
2007
2008 /*
2009 * Enabling here only for PSR. Panel Replay enable bit is already
2010 * written at this point. See
2011 * intel_psr_panel_replay_enable_sink. Modifiers/options:
2012 * - Selective Update
2013 * - Region Early Transport
2014 * - Selective Update Region Scanline Capture
2015 * - VSC_SDP_CRC
2016 * - HPD on different Errors
2017 * - CRC verification
2018 * are written for PSR and Panel Replay here.
2019 */
2020 intel_psr_enable_sink(intel_dp, crtc_state);
2021
2022 if (intel_dp_is_edp(intel_dp))
2023 intel_snps_phy_update_psr_power_state(&dig_port->base, true);
2024
2025 intel_psr_enable_source(intel_dp, crtc_state);
2026 intel_dp->psr.enabled = true;
2027 intel_dp->psr.paused = false;
2028
2029 /*
2030 * Link_ok is sticky and set here on PSR enable. We can assume link
2031 * training is complete as we never continue to PSR enable with
2032 * untrained link. Link_ok is kept as set until first short pulse
2033 * interrupt. This is targeted to workaround panels stating bad link
2034 * after PSR is enabled.
2035 */
2036 intel_dp->psr.link_ok = true;
2037
2038 intel_psr_activate(intel_dp);
2039 }
2040
intel_psr_exit(struct intel_dp * intel_dp)2041 static void intel_psr_exit(struct intel_dp *intel_dp)
2042 {
2043 struct intel_display *display = to_intel_display(intel_dp);
2044 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2045 u32 val;
2046
2047 if (!intel_dp->psr.active) {
2048 if (transcoder_has_psr2(display, cpu_transcoder)) {
2049 val = intel_de_read(display,
2050 EDP_PSR2_CTL(display, cpu_transcoder));
2051 drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2052 }
2053
2054 val = intel_de_read(display,
2055 psr_ctl_reg(display, cpu_transcoder));
2056 drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2057
2058 return;
2059 }
2060
2061 if (intel_dp->psr.panel_replay_enabled) {
2062 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2063 TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2064 } else if (intel_dp->psr.sel_update_enabled) {
2065 tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2066
2067 val = intel_de_rmw(display,
2068 EDP_PSR2_CTL(display, cpu_transcoder),
2069 EDP_PSR2_ENABLE, 0);
2070
2071 drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2072 } else {
2073 val = intel_de_rmw(display,
2074 psr_ctl_reg(display, cpu_transcoder),
2075 EDP_PSR_ENABLE, 0);
2076
2077 drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2078 }
2079 intel_dp->psr.active = false;
2080 }
2081
intel_psr_wait_exit_locked(struct intel_dp * intel_dp)2082 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2083 {
2084 struct intel_display *display = to_intel_display(intel_dp);
2085 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2086 i915_reg_t psr_status;
2087 u32 psr_status_mask;
2088
2089 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2090 intel_dp->psr.panel_replay_enabled)) {
2091 psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2092 psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2093 } else {
2094 psr_status = psr_status_reg(display, cpu_transcoder);
2095 psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2096 }
2097
2098 /* Wait till PSR is idle */
2099 if (intel_de_wait_for_clear(display, psr_status,
2100 psr_status_mask, 2000))
2101 drm_err(display->drm, "Timed out waiting PSR idle state\n");
2102 }
2103
intel_psr_disable_locked(struct intel_dp * intel_dp)2104 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2105 {
2106 struct intel_display *display = to_intel_display(intel_dp);
2107 struct drm_i915_private *dev_priv = to_i915(display->drm);
2108 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2109
2110 lockdep_assert_held(&intel_dp->psr.lock);
2111
2112 if (!intel_dp->psr.enabled)
2113 return;
2114
2115 if (intel_dp->psr.panel_replay_enabled)
2116 drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2117 else
2118 drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2119 intel_dp->psr.sel_update_enabled ? "2" : "1");
2120
2121 intel_psr_exit(intel_dp);
2122 intel_psr_wait_exit_locked(intel_dp);
2123
2124 /*
2125 * Wa_16013835468
2126 * Wa_14015648006
2127 */
2128 if (DISPLAY_VER(display) >= 11)
2129 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2130 LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0);
2131
2132 if (intel_dp->psr.sel_update_enabled) {
2133 /* Wa_16012604467:adlp,mtl[a0,b0] */
2134 if (!intel_dp->psr.panel_replay_enabled &&
2135 IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0))
2136 intel_de_rmw(display,
2137 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2138 MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2139 else if (IS_ALDERLAKE_P(dev_priv))
2140 intel_de_rmw(display, CLKGATE_DIS_MISC,
2141 CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2142 }
2143
2144 if (intel_dp_is_edp(intel_dp))
2145 intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2146
2147 /* Panel Replay on eDP is always using ALPM aux less. */
2148 if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp)) {
2149 intel_de_rmw(display, ALPM_CTL(display, cpu_transcoder),
2150 ALPM_CTL_ALPM_ENABLE |
2151 ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2152
2153 intel_de_rmw(display,
2154 PORT_ALPM_CTL(cpu_transcoder),
2155 PORT_ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2156 }
2157
2158 /* Disable PSR on Sink */
2159 if (!intel_dp->psr.panel_replay_enabled) {
2160 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2161
2162 if (intel_dp->psr.sel_update_enabled)
2163 drm_dp_dpcd_writeb(&intel_dp->aux,
2164 DP_RECEIVER_ALPM_CONFIG, 0);
2165 }
2166
2167 intel_dp->psr.enabled = false;
2168 intel_dp->psr.panel_replay_enabled = false;
2169 intel_dp->psr.sel_update_enabled = false;
2170 intel_dp->psr.psr2_sel_fetch_enabled = false;
2171 intel_dp->psr.su_region_et_enabled = false;
2172 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2173 }
2174
2175 /**
2176 * intel_psr_disable - Disable PSR
2177 * @intel_dp: Intel DP
2178 * @old_crtc_state: old CRTC state
2179 *
2180 * This function needs to be called before disabling pipe.
2181 */
intel_psr_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)2182 void intel_psr_disable(struct intel_dp *intel_dp,
2183 const struct intel_crtc_state *old_crtc_state)
2184 {
2185 struct intel_display *display = to_intel_display(intel_dp);
2186
2187 if (!old_crtc_state->has_psr)
2188 return;
2189
2190 if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp) &&
2191 !CAN_PANEL_REPLAY(intel_dp)))
2192 return;
2193
2194 mutex_lock(&intel_dp->psr.lock);
2195
2196 intel_psr_disable_locked(intel_dp);
2197
2198 intel_dp->psr.link_ok = false;
2199
2200 mutex_unlock(&intel_dp->psr.lock);
2201 cancel_work_sync(&intel_dp->psr.work);
2202 cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2203 }
2204
2205 /**
2206 * intel_psr_pause - Pause PSR
2207 * @intel_dp: Intel DP
2208 *
2209 * This function need to be called after enabling psr.
2210 */
intel_psr_pause(struct intel_dp * intel_dp)2211 void intel_psr_pause(struct intel_dp *intel_dp)
2212 {
2213 struct intel_display *display = to_intel_display(intel_dp);
2214 struct intel_psr *psr = &intel_dp->psr;
2215
2216 if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2217 return;
2218
2219 mutex_lock(&psr->lock);
2220
2221 if (!psr->enabled) {
2222 mutex_unlock(&psr->lock);
2223 return;
2224 }
2225
2226 /* If we ever hit this, we will need to add refcount to pause/resume */
2227 drm_WARN_ON(display->drm, psr->paused);
2228
2229 intel_psr_exit(intel_dp);
2230 intel_psr_wait_exit_locked(intel_dp);
2231 psr->paused = true;
2232
2233 mutex_unlock(&psr->lock);
2234
2235 cancel_work_sync(&psr->work);
2236 cancel_delayed_work_sync(&psr->dc3co_work);
2237 }
2238
2239 /**
2240 * intel_psr_resume - Resume PSR
2241 * @intel_dp: Intel DP
2242 *
2243 * This function need to be called after pausing psr.
2244 */
intel_psr_resume(struct intel_dp * intel_dp)2245 void intel_psr_resume(struct intel_dp *intel_dp)
2246 {
2247 struct intel_psr *psr = &intel_dp->psr;
2248
2249 if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2250 return;
2251
2252 mutex_lock(&psr->lock);
2253
2254 if (!psr->paused)
2255 goto unlock;
2256
2257 psr->paused = false;
2258 intel_psr_activate(intel_dp);
2259
2260 unlock:
2261 mutex_unlock(&psr->lock);
2262 }
2263
2264 /**
2265 * intel_psr_needs_block_dc_vblank - Check if block dc entry is needed
2266 * @crtc_state: CRTC status
2267 *
2268 * We need to block DC6 entry in case of Panel Replay as enabling VBI doesn't
2269 * prevent it in case of Panel Replay. Panel Replay switches main link off on
2270 * DC entry. This means vblank interrupts are not fired and is a problem if
2271 * user-space is polling for vblank events.
2272 */
intel_psr_needs_block_dc_vblank(const struct intel_crtc_state * crtc_state)2273 bool intel_psr_needs_block_dc_vblank(const struct intel_crtc_state *crtc_state)
2274 {
2275 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2276 struct intel_encoder *encoder;
2277
2278 for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) {
2279 struct intel_dp *intel_dp;
2280
2281 if (!intel_encoder_is_dp(encoder))
2282 continue;
2283
2284 intel_dp = enc_to_intel_dp(encoder);
2285
2286 if (intel_dp_is_edp(intel_dp) &&
2287 CAN_PANEL_REPLAY(intel_dp))
2288 return true;
2289 }
2290
2291 return false;
2292 }
2293
2294 /**
2295 * intel_psr_trigger_frame_change_event - Trigger "Frame Change" event
2296 * @dsb: DSB context
2297 * @state: the atomic state
2298 * @crtc: the CRTC
2299 *
2300 * Generate PSR "Frame Change" event.
2301 */
intel_psr_trigger_frame_change_event(struct intel_dsb * dsb,struct intel_atomic_state * state,struct intel_crtc * crtc)2302 void intel_psr_trigger_frame_change_event(struct intel_dsb *dsb,
2303 struct intel_atomic_state *state,
2304 struct intel_crtc *crtc)
2305 {
2306 const struct intel_crtc_state *crtc_state =
2307 intel_pre_commit_crtc_state(state, crtc);
2308 struct intel_display *display = to_intel_display(crtc);
2309
2310 if (crtc_state->has_psr)
2311 intel_de_write_dsb(display, dsb,
2312 CURSURFLIVE(display, crtc->pipe), 0);
2313 }
2314
man_trk_ctl_enable_bit_get(struct intel_display * display)2315 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2316 {
2317 struct drm_i915_private *dev_priv = to_i915(display->drm);
2318
2319 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? 0 :
2320 PSR2_MAN_TRK_CTL_ENABLE;
2321 }
2322
man_trk_ctl_single_full_frame_bit_get(struct intel_display * display)2323 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2324 {
2325 struct drm_i915_private *dev_priv = to_i915(display->drm);
2326
2327 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2328 ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2329 PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2330 }
2331
man_trk_ctl_partial_frame_bit_get(struct intel_display * display)2332 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2333 {
2334 struct drm_i915_private *dev_priv = to_i915(display->drm);
2335
2336 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2337 ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2338 PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2339 }
2340
man_trk_ctl_continuos_full_frame(struct intel_display * display)2341 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2342 {
2343 struct drm_i915_private *dev_priv = to_i915(display->drm);
2344
2345 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2346 ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2347 PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2348 }
2349
intel_psr_force_update(struct intel_dp * intel_dp)2350 static void intel_psr_force_update(struct intel_dp *intel_dp)
2351 {
2352 struct intel_display *display = to_intel_display(intel_dp);
2353
2354 /*
2355 * Display WA #0884: skl+
2356 * This documented WA for bxt can be safely applied
2357 * broadly so we can force HW tracking to exit PSR
2358 * instead of disabling and re-enabling.
2359 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2360 * but it makes more sense write to the current active
2361 * pipe.
2362 *
2363 * This workaround do not exist for platforms with display 10 or newer
2364 * but testing proved that it works for up display 13, for newer
2365 * than that testing will be needed.
2366 */
2367 intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2368 }
2369
intel_psr2_program_trans_man_trk_ctl(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)2370 void intel_psr2_program_trans_man_trk_ctl(struct intel_dsb *dsb,
2371 const struct intel_crtc_state *crtc_state)
2372 {
2373 struct intel_display *display = to_intel_display(crtc_state);
2374 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2375 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2376 struct intel_encoder *encoder;
2377
2378 if (!crtc_state->enable_psr2_sel_fetch)
2379 return;
2380
2381 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2382 crtc_state->uapi.encoder_mask) {
2383 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2384
2385 if (!dsb)
2386 lockdep_assert_held(&intel_dp->psr.lock);
2387
2388 if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_cff_enabled)
2389 return;
2390 break;
2391 }
2392
2393 intel_de_write_dsb(display, dsb,
2394 PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2395 crtc_state->psr2_man_track_ctl);
2396
2397 if (!crtc_state->enable_psr2_su_region_et)
2398 return;
2399
2400 intel_de_write_dsb(display, dsb, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2401 crtc_state->pipe_srcsz_early_tpt);
2402 }
2403
psr2_man_trk_ctl_calc(struct intel_crtc_state * crtc_state,bool full_update)2404 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2405 bool full_update)
2406 {
2407 struct intel_display *display = to_intel_display(crtc_state);
2408 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2409 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2410 u32 val = man_trk_ctl_enable_bit_get(display);
2411
2412 /* SF partial frame enable has to be set even on full update */
2413 val |= man_trk_ctl_partial_frame_bit_get(display);
2414
2415 if (full_update) {
2416 val |= man_trk_ctl_continuos_full_frame(display);
2417 goto exit;
2418 }
2419
2420 if (crtc_state->psr2_su_area.y1 == -1)
2421 goto exit;
2422
2423 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) {
2424 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2425 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2426 } else {
2427 drm_WARN_ON(crtc_state->uapi.crtc->dev,
2428 crtc_state->psr2_su_area.y1 % 4 ||
2429 crtc_state->psr2_su_area.y2 % 4);
2430
2431 val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2432 crtc_state->psr2_su_area.y1 / 4 + 1);
2433 val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2434 crtc_state->psr2_su_area.y2 / 4 + 1);
2435 }
2436 exit:
2437 crtc_state->psr2_man_track_ctl = val;
2438 }
2439
psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state * crtc_state,bool full_update)2440 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2441 bool full_update)
2442 {
2443 int width, height;
2444
2445 if (!crtc_state->enable_psr2_su_region_et || full_update)
2446 return 0;
2447
2448 width = drm_rect_width(&crtc_state->psr2_su_area);
2449 height = drm_rect_height(&crtc_state->psr2_su_area);
2450
2451 return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2452 }
2453
clip_area_update(struct drm_rect * overlap_damage_area,struct drm_rect * damage_area,struct drm_rect * pipe_src)2454 static void clip_area_update(struct drm_rect *overlap_damage_area,
2455 struct drm_rect *damage_area,
2456 struct drm_rect *pipe_src)
2457 {
2458 if (!drm_rect_intersect(damage_area, pipe_src))
2459 return;
2460
2461 if (overlap_damage_area->y1 == -1) {
2462 overlap_damage_area->y1 = damage_area->y1;
2463 overlap_damage_area->y2 = damage_area->y2;
2464 return;
2465 }
2466
2467 if (damage_area->y1 < overlap_damage_area->y1)
2468 overlap_damage_area->y1 = damage_area->y1;
2469
2470 if (damage_area->y2 > overlap_damage_area->y2)
2471 overlap_damage_area->y2 = damage_area->y2;
2472 }
2473
intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state * crtc_state)2474 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2475 {
2476 struct intel_display *display = to_intel_display(crtc_state);
2477 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2478 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2479 u16 y_alignment;
2480
2481 /* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2482 if (crtc_state->dsc.compression_enable &&
2483 (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14))
2484 y_alignment = vdsc_cfg->slice_height;
2485 else
2486 y_alignment = crtc_state->su_y_granularity;
2487
2488 crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2489 if (crtc_state->psr2_su_area.y2 % y_alignment)
2490 crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2491 y_alignment) + 1) * y_alignment;
2492 }
2493
2494 /*
2495 * When early transport is in use we need to extend SU area to cover
2496 * cursor fully when cursor is in SU area.
2497 */
2498 static void
intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state * state,struct intel_crtc * crtc,bool * cursor_in_su_area)2499 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2500 struct intel_crtc *crtc,
2501 bool *cursor_in_su_area)
2502 {
2503 struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2504 struct intel_plane_state *new_plane_state;
2505 struct intel_plane *plane;
2506 int i;
2507
2508 if (!crtc_state->enable_psr2_su_region_et)
2509 return;
2510
2511 for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2512 struct drm_rect inter;
2513
2514 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2515 continue;
2516
2517 if (plane->id != PLANE_CURSOR)
2518 continue;
2519
2520 if (!new_plane_state->uapi.visible)
2521 continue;
2522
2523 inter = crtc_state->psr2_su_area;
2524 if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2525 continue;
2526
2527 clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2528 &crtc_state->pipe_src);
2529 *cursor_in_su_area = true;
2530 }
2531 }
2532
2533 /*
2534 * TODO: Not clear how to handle planes with negative position,
2535 * also planes are not updated if they have a negative X
2536 * position so for now doing a full update in this cases
2537 *
2538 * Plane scaling and rotation is not supported by selective fetch and both
2539 * properties can change without a modeset, so need to be check at every
2540 * atomic commit.
2541 */
psr2_sel_fetch_plane_state_supported(const struct intel_plane_state * plane_state)2542 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2543 {
2544 if (plane_state->uapi.dst.y1 < 0 ||
2545 plane_state->uapi.dst.x1 < 0 ||
2546 plane_state->scaler_id >= 0 ||
2547 plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2548 return false;
2549
2550 return true;
2551 }
2552
2553 /*
2554 * Check for pipe properties that is not supported by selective fetch.
2555 *
2556 * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2557 * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2558 * enabled and going to the full update path.
2559 */
psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state * crtc_state)2560 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2561 {
2562 if (crtc_state->scaler_state.scaler_id >= 0)
2563 return false;
2564
2565 return true;
2566 }
2567
2568 /* Wa 14019834836 */
intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state * crtc_state)2569 static void intel_psr_apply_pr_link_on_su_wa(struct intel_crtc_state *crtc_state)
2570 {
2571 struct intel_display *display = to_intel_display(crtc_state);
2572 struct intel_encoder *encoder;
2573 int hactive_limit;
2574
2575 if (crtc_state->psr2_su_area.y1 != 0 ||
2576 crtc_state->psr2_su_area.y2 != 0)
2577 return;
2578
2579 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2580 hactive_limit = intel_dp_is_uhbr(crtc_state) ? 1230 : 546;
2581 else
2582 hactive_limit = intel_dp_is_uhbr(crtc_state) ? 615 : 273;
2583
2584 if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit)
2585 return;
2586
2587 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2588 crtc_state->uapi.encoder_mask) {
2589 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2590
2591 if (!intel_dp_is_edp(intel_dp) &&
2592 intel_dp->psr.panel_replay_enabled &&
2593 intel_dp->psr.sel_update_enabled) {
2594 crtc_state->psr2_su_area.y2++;
2595 return;
2596 }
2597 }
2598 }
2599
2600 static void
intel_psr_apply_su_area_workarounds(struct intel_crtc_state * crtc_state)2601 intel_psr_apply_su_area_workarounds(struct intel_crtc_state *crtc_state)
2602 {
2603 struct intel_display *display = to_intel_display(crtc_state);
2604 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
2605
2606 /* Wa_14014971492 */
2607 if (!crtc_state->has_panel_replay &&
2608 ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) ||
2609 IS_ALDERLAKE_P(i915) || IS_TIGERLAKE(i915))) &&
2610 crtc_state->splitter.enable)
2611 crtc_state->psr2_su_area.y1 = 0;
2612
2613 /* Wa 14019834836 */
2614 if (DISPLAY_VER(display) == 30)
2615 intel_psr_apply_pr_link_on_su_wa(crtc_state);
2616 }
2617
intel_psr2_sel_fetch_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2618 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2619 struct intel_crtc *crtc)
2620 {
2621 struct intel_display *display = to_intel_display(state);
2622 struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2623 struct intel_plane_state *new_plane_state, *old_plane_state;
2624 struct intel_plane *plane;
2625 bool full_update = false, cursor_in_su_area = false;
2626 int i, ret;
2627
2628 if (!crtc_state->enable_psr2_sel_fetch)
2629 return 0;
2630
2631 if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2632 full_update = true;
2633 goto skip_sel_fetch_set_loop;
2634 }
2635
2636 crtc_state->psr2_su_area.x1 = 0;
2637 crtc_state->psr2_su_area.y1 = -1;
2638 crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2639 crtc_state->psr2_su_area.y2 = -1;
2640
2641 /*
2642 * Calculate minimal selective fetch area of each plane and calculate
2643 * the pipe damaged area.
2644 * In the next loop the plane selective fetch area will actually be set
2645 * using whole pipe damaged area.
2646 */
2647 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2648 new_plane_state, i) {
2649 struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2650 .x2 = INT_MAX };
2651
2652 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2653 continue;
2654
2655 if (!new_plane_state->uapi.visible &&
2656 !old_plane_state->uapi.visible)
2657 continue;
2658
2659 if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2660 full_update = true;
2661 break;
2662 }
2663
2664 /*
2665 * If visibility or plane moved, mark the whole plane area as
2666 * damaged as it needs to be complete redraw in the new and old
2667 * position.
2668 */
2669 if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2670 !drm_rect_equals(&new_plane_state->uapi.dst,
2671 &old_plane_state->uapi.dst)) {
2672 if (old_plane_state->uapi.visible) {
2673 damaged_area.y1 = old_plane_state->uapi.dst.y1;
2674 damaged_area.y2 = old_plane_state->uapi.dst.y2;
2675 clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2676 &crtc_state->pipe_src);
2677 }
2678
2679 if (new_plane_state->uapi.visible) {
2680 damaged_area.y1 = new_plane_state->uapi.dst.y1;
2681 damaged_area.y2 = new_plane_state->uapi.dst.y2;
2682 clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2683 &crtc_state->pipe_src);
2684 }
2685 continue;
2686 } else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2687 /* If alpha changed mark the whole plane area as damaged */
2688 damaged_area.y1 = new_plane_state->uapi.dst.y1;
2689 damaged_area.y2 = new_plane_state->uapi.dst.y2;
2690 clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2691 &crtc_state->pipe_src);
2692 continue;
2693 }
2694
2695 src = drm_plane_state_src(&new_plane_state->uapi);
2696 drm_rect_fp_to_int(&src, &src);
2697
2698 if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2699 &new_plane_state->uapi, &damaged_area))
2700 continue;
2701
2702 damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2703 damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2704 damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2705 damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2706
2707 clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2708 }
2709
2710 /*
2711 * TODO: For now we are just using full update in case
2712 * selective fetch area calculation fails. To optimize this we
2713 * should identify cases where this happens and fix the area
2714 * calculation for those.
2715 */
2716 if (crtc_state->psr2_su_area.y1 == -1) {
2717 drm_info_once(display->drm,
2718 "Selective fetch area calculation failed in pipe %c\n",
2719 pipe_name(crtc->pipe));
2720 full_update = true;
2721 }
2722
2723 if (full_update)
2724 goto skip_sel_fetch_set_loop;
2725
2726 intel_psr_apply_su_area_workarounds(crtc_state);
2727
2728 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2729 if (ret)
2730 return ret;
2731
2732 /*
2733 * Adjust su area to cover cursor fully as necessary (early
2734 * transport). This needs to be done after
2735 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2736 * affected planes even when cursor is not updated by itself.
2737 */
2738 intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2739
2740 intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2741
2742 /*
2743 * Now that we have the pipe damaged area check if it intersect with
2744 * every plane, if it does set the plane selective fetch area.
2745 */
2746 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2747 new_plane_state, i) {
2748 struct drm_rect *sel_fetch_area, inter;
2749 struct intel_plane *linked = new_plane_state->planar_linked_plane;
2750
2751 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2752 !new_plane_state->uapi.visible)
2753 continue;
2754
2755 inter = crtc_state->psr2_su_area;
2756 sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2757 if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2758 sel_fetch_area->y1 = -1;
2759 sel_fetch_area->y2 = -1;
2760 /*
2761 * if plane sel fetch was previously enabled ->
2762 * disable it
2763 */
2764 if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2765 crtc_state->update_planes |= BIT(plane->id);
2766
2767 continue;
2768 }
2769
2770 if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2771 full_update = true;
2772 break;
2773 }
2774
2775 sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2776 sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2777 sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2778 crtc_state->update_planes |= BIT(plane->id);
2779
2780 /*
2781 * Sel_fetch_area is calculated for UV plane. Use
2782 * same area for Y plane as well.
2783 */
2784 if (linked) {
2785 struct intel_plane_state *linked_new_plane_state;
2786 struct drm_rect *linked_sel_fetch_area;
2787
2788 linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2789 if (IS_ERR(linked_new_plane_state))
2790 return PTR_ERR(linked_new_plane_state);
2791
2792 linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2793 linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2794 linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2795 crtc_state->update_planes |= BIT(linked->id);
2796 }
2797 }
2798
2799 skip_sel_fetch_set_loop:
2800 psr2_man_trk_ctl_calc(crtc_state, full_update);
2801 crtc_state->pipe_srcsz_early_tpt =
2802 psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2803 return 0;
2804 }
2805
intel_psr_pre_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2806 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2807 struct intel_crtc *crtc)
2808 {
2809 struct intel_display *display = to_intel_display(state);
2810 struct drm_i915_private *i915 = to_i915(state->base.dev);
2811 const struct intel_crtc_state *old_crtc_state =
2812 intel_atomic_get_old_crtc_state(state, crtc);
2813 const struct intel_crtc_state *new_crtc_state =
2814 intel_atomic_get_new_crtc_state(state, crtc);
2815 struct intel_encoder *encoder;
2816
2817 if (!HAS_PSR(display))
2818 return;
2819
2820 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2821 old_crtc_state->uapi.encoder_mask) {
2822 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2823 struct intel_psr *psr = &intel_dp->psr;
2824
2825 mutex_lock(&psr->lock);
2826
2827 if (psr->enabled) {
2828 /*
2829 * Reasons to disable:
2830 * - PSR disabled in new state
2831 * - All planes will go inactive
2832 * - Changing between PSR versions
2833 * - Region Early Transport changing
2834 * - Display WA #1136: skl, bxt
2835 */
2836 if (intel_crtc_needs_modeset(new_crtc_state) ||
2837 !new_crtc_state->has_psr ||
2838 !new_crtc_state->active_planes ||
2839 new_crtc_state->has_sel_update != psr->sel_update_enabled ||
2840 new_crtc_state->enable_psr2_su_region_et != psr->su_region_et_enabled ||
2841 new_crtc_state->has_panel_replay != psr->panel_replay_enabled ||
2842 (DISPLAY_VER(i915) < 11 && new_crtc_state->wm_level_disabled))
2843 intel_psr_disable_locked(intel_dp);
2844 else if (new_crtc_state->wm_level_disabled)
2845 /* Wa_14015648006 */
2846 wm_optimization_wa(intel_dp, new_crtc_state);
2847 }
2848
2849 mutex_unlock(&psr->lock);
2850 }
2851 }
2852
intel_psr_post_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2853 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2854 struct intel_crtc *crtc)
2855 {
2856 struct intel_display *display = to_intel_display(state);
2857 const struct intel_crtc_state *crtc_state =
2858 intel_atomic_get_new_crtc_state(state, crtc);
2859 struct intel_encoder *encoder;
2860
2861 if (!crtc_state->has_psr)
2862 return;
2863
2864 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2865 crtc_state->uapi.encoder_mask) {
2866 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2867 struct intel_psr *psr = &intel_dp->psr;
2868 bool keep_disabled = false;
2869
2870 mutex_lock(&psr->lock);
2871
2872 drm_WARN_ON(display->drm,
2873 psr->enabled && !crtc_state->active_planes);
2874
2875 keep_disabled |= psr->sink_not_reliable;
2876 keep_disabled |= !crtc_state->active_planes;
2877
2878 /* Display WA #1136: skl, bxt */
2879 keep_disabled |= DISPLAY_VER(display) < 11 &&
2880 crtc_state->wm_level_disabled;
2881
2882 if (!psr->enabled && !keep_disabled)
2883 intel_psr_enable_locked(intel_dp, crtc_state);
2884 else if (psr->enabled && !crtc_state->wm_level_disabled)
2885 /* Wa_14015648006 */
2886 wm_optimization_wa(intel_dp, crtc_state);
2887
2888 /* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2889 if (crtc_state->crc_enabled && psr->enabled)
2890 intel_psr_force_update(intel_dp);
2891
2892 /*
2893 * Clear possible busy bits in case we have
2894 * invalidate -> flip -> flush sequence.
2895 */
2896 intel_dp->psr.busy_frontbuffer_bits = 0;
2897
2898 mutex_unlock(&psr->lock);
2899 }
2900 }
2901
_psr2_ready_for_pipe_update_locked(struct intel_dp * intel_dp)2902 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2903 {
2904 struct intel_display *display = to_intel_display(intel_dp);
2905 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2906
2907 /*
2908 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
2909 * As all higher states has bit 4 of PSR2 state set we can just wait for
2910 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
2911 */
2912 return intel_de_wait_for_clear(display,
2913 EDP_PSR2_STATUS(display, cpu_transcoder),
2914 EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
2915 }
2916
_psr1_ready_for_pipe_update_locked(struct intel_dp * intel_dp)2917 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2918 {
2919 struct intel_display *display = to_intel_display(intel_dp);
2920 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2921
2922 /*
2923 * From bspec: Panel Self Refresh (BDW+)
2924 * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
2925 * exit training time + 1.5 ms of aux channel handshake. 50 ms is
2926 * defensive enough to cover everything.
2927 */
2928 return intel_de_wait_for_clear(display,
2929 psr_status_reg(display, cpu_transcoder),
2930 EDP_PSR_STATUS_STATE_MASK, 50);
2931 }
2932
2933 /**
2934 * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
2935 * @new_crtc_state: new CRTC state
2936 *
2937 * This function is expected to be called from pipe_update_start() where it is
2938 * not expected to race with PSR enable or disable.
2939 */
intel_psr_wait_for_idle_locked(const struct intel_crtc_state * new_crtc_state)2940 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
2941 {
2942 struct intel_display *display = to_intel_display(new_crtc_state);
2943 struct intel_encoder *encoder;
2944
2945 if (!new_crtc_state->has_psr)
2946 return;
2947
2948 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2949 new_crtc_state->uapi.encoder_mask) {
2950 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2951 int ret;
2952
2953 lockdep_assert_held(&intel_dp->psr.lock);
2954
2955 if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
2956 continue;
2957
2958 if (intel_dp->psr.sel_update_enabled)
2959 ret = _psr2_ready_for_pipe_update_locked(intel_dp);
2960 else
2961 ret = _psr1_ready_for_pipe_update_locked(intel_dp);
2962
2963 if (ret)
2964 drm_err(display->drm,
2965 "PSR wait timed out, atomic update may fail\n");
2966 }
2967 }
2968
__psr_wait_for_idle_locked(struct intel_dp * intel_dp)2969 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
2970 {
2971 struct intel_display *display = to_intel_display(intel_dp);
2972 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2973 i915_reg_t reg;
2974 u32 mask;
2975 int err;
2976
2977 if (!intel_dp->psr.enabled)
2978 return false;
2979
2980 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2981 intel_dp->psr.panel_replay_enabled)) {
2982 reg = EDP_PSR2_STATUS(display, cpu_transcoder);
2983 mask = EDP_PSR2_STATUS_STATE_MASK;
2984 } else {
2985 reg = psr_status_reg(display, cpu_transcoder);
2986 mask = EDP_PSR_STATUS_STATE_MASK;
2987 }
2988
2989 mutex_unlock(&intel_dp->psr.lock);
2990
2991 err = intel_de_wait_for_clear(display, reg, mask, 50);
2992 if (err)
2993 drm_err(display->drm,
2994 "Timed out waiting for PSR Idle for re-enable\n");
2995
2996 /* After the unlocked wait, verify that PSR is still wanted! */
2997 mutex_lock(&intel_dp->psr.lock);
2998 return err == 0 && intel_dp->psr.enabled;
2999 }
3000
intel_psr_fastset_force(struct intel_display * display)3001 static int intel_psr_fastset_force(struct intel_display *display)
3002 {
3003 struct drm_connector_list_iter conn_iter;
3004 struct drm_modeset_acquire_ctx ctx;
3005 struct drm_atomic_state *state;
3006 struct drm_connector *conn;
3007 int err = 0;
3008
3009 state = drm_atomic_state_alloc(display->drm);
3010 if (!state)
3011 return -ENOMEM;
3012
3013 drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
3014
3015 state->acquire_ctx = &ctx;
3016 to_intel_atomic_state(state)->internal = true;
3017
3018 retry:
3019 drm_connector_list_iter_begin(display->drm, &conn_iter);
3020 drm_for_each_connector_iter(conn, &conn_iter) {
3021 struct drm_connector_state *conn_state;
3022 struct drm_crtc_state *crtc_state;
3023
3024 if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
3025 continue;
3026
3027 conn_state = drm_atomic_get_connector_state(state, conn);
3028 if (IS_ERR(conn_state)) {
3029 err = PTR_ERR(conn_state);
3030 break;
3031 }
3032
3033 if (!conn_state->crtc)
3034 continue;
3035
3036 crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
3037 if (IS_ERR(crtc_state)) {
3038 err = PTR_ERR(crtc_state);
3039 break;
3040 }
3041
3042 /* Mark mode as changed to trigger a pipe->update() */
3043 crtc_state->mode_changed = true;
3044 }
3045 drm_connector_list_iter_end(&conn_iter);
3046
3047 if (err == 0)
3048 err = drm_atomic_commit(state);
3049
3050 if (err == -EDEADLK) {
3051 drm_atomic_state_clear(state);
3052 err = drm_modeset_backoff(&ctx);
3053 if (!err)
3054 goto retry;
3055 }
3056
3057 drm_modeset_drop_locks(&ctx);
3058 drm_modeset_acquire_fini(&ctx);
3059 drm_atomic_state_put(state);
3060
3061 return err;
3062 }
3063
intel_psr_debug_set(struct intel_dp * intel_dp,u64 val)3064 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
3065 {
3066 struct intel_display *display = to_intel_display(intel_dp);
3067 const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
3068 const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3069 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3070 u32 old_mode, old_disable_bits;
3071 int ret;
3072
3073 if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3074 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
3075 I915_PSR_DEBUG_MODE_MASK) ||
3076 mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
3077 drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
3078 return -EINVAL;
3079 }
3080
3081 ret = mutex_lock_interruptible(&intel_dp->psr.lock);
3082 if (ret)
3083 return ret;
3084
3085 old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
3086 old_disable_bits = intel_dp->psr.debug &
3087 (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
3088 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
3089
3090 intel_dp->psr.debug = val;
3091
3092 /*
3093 * Do it right away if it's already enabled, otherwise it will be done
3094 * when enabling the source.
3095 */
3096 if (intel_dp->psr.enabled)
3097 psr_irq_control(intel_dp);
3098
3099 mutex_unlock(&intel_dp->psr.lock);
3100
3101 if (old_mode != mode || old_disable_bits != disable_bits)
3102 ret = intel_psr_fastset_force(display);
3103
3104 return ret;
3105 }
3106
intel_psr_handle_irq(struct intel_dp * intel_dp)3107 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
3108 {
3109 struct intel_psr *psr = &intel_dp->psr;
3110
3111 intel_psr_disable_locked(intel_dp);
3112 psr->sink_not_reliable = true;
3113 /* let's make sure that sink is awaken */
3114 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
3115 }
3116
intel_psr_work(struct work_struct * work)3117 static void intel_psr_work(struct work_struct *work)
3118 {
3119 struct intel_dp *intel_dp =
3120 container_of(work, typeof(*intel_dp), psr.work);
3121
3122 mutex_lock(&intel_dp->psr.lock);
3123
3124 if (!intel_dp->psr.enabled)
3125 goto unlock;
3126
3127 if (READ_ONCE(intel_dp->psr.irq_aux_error))
3128 intel_psr_handle_irq(intel_dp);
3129
3130 /*
3131 * We have to make sure PSR is ready for re-enable
3132 * otherwise it keeps disabled until next full enable/disable cycle.
3133 * PSR might take some time to get fully disabled
3134 * and be ready for re-enable.
3135 */
3136 if (!__psr_wait_for_idle_locked(intel_dp))
3137 goto unlock;
3138
3139 /*
3140 * The delayed work can race with an invalidate hence we need to
3141 * recheck. Since psr_flush first clears this and then reschedules we
3142 * won't ever miss a flush when bailing out here.
3143 */
3144 if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3145 goto unlock;
3146
3147 intel_psr_activate(intel_dp);
3148 unlock:
3149 mutex_unlock(&intel_dp->psr.lock);
3150 }
3151
intel_psr_configure_full_frame_update(struct intel_dp * intel_dp)3152 static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp)
3153 {
3154 struct intel_display *display = to_intel_display(intel_dp);
3155 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3156
3157 if (!intel_dp->psr.psr2_sel_fetch_enabled)
3158 return;
3159
3160 if (DISPLAY_VER(display) >= 20)
3161 intel_de_write(display, LNL_SFF_CTL(cpu_transcoder),
3162 LNL_SFF_CTL_SF_SINGLE_FULL_FRAME);
3163 else
3164 intel_de_write(display,
3165 PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3166 man_trk_ctl_enable_bit_get(display) |
3167 man_trk_ctl_partial_frame_bit_get(display) |
3168 man_trk_ctl_single_full_frame_bit_get(display) |
3169 man_trk_ctl_continuos_full_frame(display));
3170 }
3171
_psr_invalidate_handle(struct intel_dp * intel_dp)3172 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3173 {
3174 if (intel_dp->psr.psr2_sel_fetch_enabled) {
3175 if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3176 intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3177 intel_psr_configure_full_frame_update(intel_dp);
3178 }
3179
3180 intel_psr_force_update(intel_dp);
3181 } else {
3182 intel_psr_exit(intel_dp);
3183 }
3184 }
3185
3186 /**
3187 * intel_psr_invalidate - Invalidate PSR
3188 * @display: display device
3189 * @frontbuffer_bits: frontbuffer plane tracking bits
3190 * @origin: which operation caused the invalidate
3191 *
3192 * Since the hardware frontbuffer tracking has gaps we need to integrate
3193 * with the software frontbuffer tracking. This function gets called every
3194 * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3195 * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3196 *
3197 * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3198 */
intel_psr_invalidate(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3199 void intel_psr_invalidate(struct intel_display *display,
3200 unsigned frontbuffer_bits, enum fb_op_origin origin)
3201 {
3202 struct intel_encoder *encoder;
3203
3204 if (origin == ORIGIN_FLIP)
3205 return;
3206
3207 for_each_intel_encoder_with_psr(display->drm, encoder) {
3208 unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3209 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3210
3211 mutex_lock(&intel_dp->psr.lock);
3212 if (!intel_dp->psr.enabled) {
3213 mutex_unlock(&intel_dp->psr.lock);
3214 continue;
3215 }
3216
3217 pipe_frontbuffer_bits &=
3218 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3219 intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3220
3221 if (pipe_frontbuffer_bits)
3222 _psr_invalidate_handle(intel_dp);
3223
3224 mutex_unlock(&intel_dp->psr.lock);
3225 }
3226 }
3227 /*
3228 * When we will be completely rely on PSR2 S/W tracking in future,
3229 * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3230 * event also therefore tgl_dc3co_flush_locked() require to be changed
3231 * accordingly in future.
3232 */
3233 static void
tgl_dc3co_flush_locked(struct intel_dp * intel_dp,unsigned int frontbuffer_bits,enum fb_op_origin origin)3234 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3235 enum fb_op_origin origin)
3236 {
3237 struct intel_display *display = to_intel_display(intel_dp);
3238 struct drm_i915_private *i915 = to_i915(display->drm);
3239
3240 if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3241 !intel_dp->psr.active)
3242 return;
3243
3244 /*
3245 * At every frontbuffer flush flip event modified delay of delayed work,
3246 * when delayed work schedules that means display has been idle.
3247 */
3248 if (!(frontbuffer_bits &
3249 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3250 return;
3251
3252 tgl_psr2_enable_dc3co(intel_dp);
3253 mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work,
3254 intel_dp->psr.dc3co_exit_delay);
3255 }
3256
_psr_flush_handle(struct intel_dp * intel_dp)3257 static void _psr_flush_handle(struct intel_dp *intel_dp)
3258 {
3259 struct intel_display *display = to_intel_display(intel_dp);
3260 struct drm_i915_private *dev_priv = to_i915(display->drm);
3261
3262 if (intel_dp->psr.psr2_sel_fetch_enabled) {
3263 if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3264 /* can we turn CFF off? */
3265 if (intel_dp->psr.busy_frontbuffer_bits == 0)
3266 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3267 }
3268
3269 /*
3270 * Still keep cff bit enabled as we don't have proper SU
3271 * configuration in case update is sent for any reason after
3272 * sff bit gets cleared by the HW on next vblank.
3273 *
3274 * NOTE: Setting cff bit is not needed for LunarLake onwards as
3275 * we have own register for SFF bit and we are not overwriting
3276 * existing SU configuration
3277 */
3278 intel_psr_configure_full_frame_update(intel_dp);
3279 }
3280
3281 intel_psr_force_update(intel_dp);
3282
3283 if (!intel_dp->psr.psr2_sel_fetch_enabled && !intel_dp->psr.active &&
3284 !intel_dp->psr.busy_frontbuffer_bits)
3285 queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
3286 }
3287
3288 /**
3289 * intel_psr_flush - Flush PSR
3290 * @display: display device
3291 * @frontbuffer_bits: frontbuffer plane tracking bits
3292 * @origin: which operation caused the flush
3293 *
3294 * Since the hardware frontbuffer tracking has gaps we need to integrate
3295 * with the software frontbuffer tracking. This function gets called every
3296 * time frontbuffer rendering has completed and flushed out to memory. PSR
3297 * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3298 *
3299 * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3300 */
intel_psr_flush(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3301 void intel_psr_flush(struct intel_display *display,
3302 unsigned frontbuffer_bits, enum fb_op_origin origin)
3303 {
3304 struct intel_encoder *encoder;
3305
3306 for_each_intel_encoder_with_psr(display->drm, encoder) {
3307 unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3308 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3309
3310 mutex_lock(&intel_dp->psr.lock);
3311 if (!intel_dp->psr.enabled) {
3312 mutex_unlock(&intel_dp->psr.lock);
3313 continue;
3314 }
3315
3316 pipe_frontbuffer_bits &=
3317 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3318 intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3319
3320 /*
3321 * If the PSR is paused by an explicit intel_psr_paused() call,
3322 * we have to ensure that the PSR is not activated until
3323 * intel_psr_resume() is called.
3324 */
3325 if (intel_dp->psr.paused)
3326 goto unlock;
3327
3328 if (origin == ORIGIN_FLIP ||
3329 (origin == ORIGIN_CURSOR_UPDATE &&
3330 !intel_dp->psr.psr2_sel_fetch_enabled)) {
3331 tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3332 goto unlock;
3333 }
3334
3335 if (pipe_frontbuffer_bits == 0)
3336 goto unlock;
3337
3338 /* By definition flush = invalidate + flush */
3339 _psr_flush_handle(intel_dp);
3340 unlock:
3341 mutex_unlock(&intel_dp->psr.lock);
3342 }
3343 }
3344
3345 /**
3346 * intel_psr_init - Init basic PSR work and mutex.
3347 * @intel_dp: Intel DP
3348 *
3349 * This function is called after the initializing connector.
3350 * (the initializing of connector treats the handling of connector capabilities)
3351 * And it initializes basic PSR stuff for each DP Encoder.
3352 */
intel_psr_init(struct intel_dp * intel_dp)3353 void intel_psr_init(struct intel_dp *intel_dp)
3354 {
3355 struct intel_display *display = to_intel_display(intel_dp);
3356 struct intel_connector *connector = intel_dp->attached_connector;
3357 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3358
3359 if (!(HAS_PSR(display) || HAS_DP20(display)))
3360 return;
3361
3362 /*
3363 * HSW spec explicitly says PSR is tied to port A.
3364 * BDW+ platforms have a instance of PSR registers per transcoder but
3365 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3366 * than eDP one.
3367 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3368 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3369 * But GEN12 supports a instance of PSR registers per transcoder.
3370 */
3371 if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3372 drm_dbg_kms(display->drm,
3373 "PSR condition failed: Port not supported\n");
3374 return;
3375 }
3376
3377 if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) ||
3378 DISPLAY_VER(display) >= 20)
3379 intel_dp->psr.source_panel_replay_support = true;
3380
3381 if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3382 intel_dp->psr.source_support = true;
3383
3384 /* Set link_standby x link_off defaults */
3385 if (DISPLAY_VER(display) < 12)
3386 /* For new platforms up to TGL let's respect VBT back again */
3387 intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3388
3389 INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3390 INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3391 mutex_init(&intel_dp->psr.lock);
3392 }
3393
psr_get_status_and_error_status(struct intel_dp * intel_dp,u8 * status,u8 * error_status)3394 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3395 u8 *status, u8 *error_status)
3396 {
3397 struct drm_dp_aux *aux = &intel_dp->aux;
3398 int ret;
3399 unsigned int offset;
3400
3401 offset = intel_dp->psr.panel_replay_enabled ?
3402 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3403
3404 ret = drm_dp_dpcd_readb(aux, offset, status);
3405 if (ret != 1)
3406 return ret;
3407
3408 offset = intel_dp->psr.panel_replay_enabled ?
3409 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3410
3411 ret = drm_dp_dpcd_readb(aux, offset, error_status);
3412 if (ret != 1)
3413 return ret;
3414
3415 *status = *status & DP_PSR_SINK_STATE_MASK;
3416
3417 return 0;
3418 }
3419
psr_alpm_check(struct intel_dp * intel_dp)3420 static void psr_alpm_check(struct intel_dp *intel_dp)
3421 {
3422 struct intel_display *display = to_intel_display(intel_dp);
3423 struct drm_dp_aux *aux = &intel_dp->aux;
3424 struct intel_psr *psr = &intel_dp->psr;
3425 u8 val;
3426 int r;
3427
3428 if (!psr->sel_update_enabled)
3429 return;
3430
3431 r = drm_dp_dpcd_readb(aux, DP_RECEIVER_ALPM_STATUS, &val);
3432 if (r != 1) {
3433 drm_err(display->drm, "Error reading ALPM status\n");
3434 return;
3435 }
3436
3437 if (val & DP_ALPM_LOCK_TIMEOUT_ERROR) {
3438 intel_psr_disable_locked(intel_dp);
3439 psr->sink_not_reliable = true;
3440 drm_dbg_kms(display->drm,
3441 "ALPM lock timeout error, disabling PSR\n");
3442
3443 /* Clearing error */
3444 drm_dp_dpcd_writeb(aux, DP_RECEIVER_ALPM_STATUS, val);
3445 }
3446 }
3447
psr_capability_changed_check(struct intel_dp * intel_dp)3448 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3449 {
3450 struct intel_display *display = to_intel_display(intel_dp);
3451 struct intel_psr *psr = &intel_dp->psr;
3452 u8 val;
3453 int r;
3454
3455 r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3456 if (r != 1) {
3457 drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3458 return;
3459 }
3460
3461 if (val & DP_PSR_CAPS_CHANGE) {
3462 intel_psr_disable_locked(intel_dp);
3463 psr->sink_not_reliable = true;
3464 drm_dbg_kms(display->drm,
3465 "Sink PSR capability changed, disabling PSR\n");
3466
3467 /* Clearing it */
3468 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3469 }
3470 }
3471
3472 /*
3473 * On common bits:
3474 * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3475 * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3476 * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3477 * this function is relying on PSR definitions
3478 */
intel_psr_short_pulse(struct intel_dp * intel_dp)3479 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3480 {
3481 struct intel_display *display = to_intel_display(intel_dp);
3482 struct intel_psr *psr = &intel_dp->psr;
3483 u8 status, error_status;
3484 const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3485 DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3486 DP_PSR_LINK_CRC_ERROR;
3487
3488 if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3489 return;
3490
3491 mutex_lock(&psr->lock);
3492
3493 psr->link_ok = false;
3494
3495 if (!psr->enabled)
3496 goto exit;
3497
3498 if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3499 drm_err(display->drm,
3500 "Error reading PSR status or error status\n");
3501 goto exit;
3502 }
3503
3504 if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3505 (error_status & errors)) {
3506 intel_psr_disable_locked(intel_dp);
3507 psr->sink_not_reliable = true;
3508 }
3509
3510 if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3511 !error_status)
3512 drm_dbg_kms(display->drm,
3513 "PSR sink internal error, disabling PSR\n");
3514 if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3515 drm_dbg_kms(display->drm,
3516 "PSR RFB storage error, disabling PSR\n");
3517 if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3518 drm_dbg_kms(display->drm,
3519 "PSR VSC SDP uncorrectable error, disabling PSR\n");
3520 if (error_status & DP_PSR_LINK_CRC_ERROR)
3521 drm_dbg_kms(display->drm,
3522 "PSR Link CRC error, disabling PSR\n");
3523
3524 if (error_status & ~errors)
3525 drm_err(display->drm,
3526 "PSR_ERROR_STATUS unhandled errors %x\n",
3527 error_status & ~errors);
3528 /* clear status register */
3529 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3530
3531 if (!psr->panel_replay_enabled) {
3532 psr_alpm_check(intel_dp);
3533 psr_capability_changed_check(intel_dp);
3534 }
3535
3536 exit:
3537 mutex_unlock(&psr->lock);
3538 }
3539
intel_psr_enabled(struct intel_dp * intel_dp)3540 bool intel_psr_enabled(struct intel_dp *intel_dp)
3541 {
3542 bool ret;
3543
3544 if (!CAN_PSR(intel_dp))
3545 return false;
3546
3547 mutex_lock(&intel_dp->psr.lock);
3548 ret = intel_dp->psr.enabled;
3549 mutex_unlock(&intel_dp->psr.lock);
3550
3551 return ret;
3552 }
3553
3554 /**
3555 * intel_psr_link_ok - return psr->link_ok
3556 * @intel_dp: struct intel_dp
3557 *
3558 * We are seeing unexpected link re-trainings with some panels. This is caused
3559 * by panel stating bad link status after PSR is enabled. Code checking link
3560 * status can call this to ensure it can ignore bad link status stated by the
3561 * panel I.e. if panel is stating bad link and intel_psr_link_ok is stating link
3562 * is ok caller should rely on latter.
3563 *
3564 * Return value of link_ok
3565 */
intel_psr_link_ok(struct intel_dp * intel_dp)3566 bool intel_psr_link_ok(struct intel_dp *intel_dp)
3567 {
3568 bool ret;
3569
3570 if ((!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp)) ||
3571 !intel_dp_is_edp(intel_dp))
3572 return false;
3573
3574 mutex_lock(&intel_dp->psr.lock);
3575 ret = intel_dp->psr.link_ok;
3576 mutex_unlock(&intel_dp->psr.lock);
3577
3578 return ret;
3579 }
3580
3581 /**
3582 * intel_psr_lock - grab PSR lock
3583 * @crtc_state: the crtc state
3584 *
3585 * This is initially meant to be used by around CRTC update, when
3586 * vblank sensitive registers are updated and we need grab the lock
3587 * before it to avoid vblank evasion.
3588 */
intel_psr_lock(const struct intel_crtc_state * crtc_state)3589 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3590 {
3591 struct intel_display *display = to_intel_display(crtc_state);
3592 struct intel_encoder *encoder;
3593
3594 if (!crtc_state->has_psr)
3595 return;
3596
3597 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3598 crtc_state->uapi.encoder_mask) {
3599 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3600
3601 mutex_lock(&intel_dp->psr.lock);
3602 break;
3603 }
3604 }
3605
3606 /**
3607 * intel_psr_unlock - release PSR lock
3608 * @crtc_state: the crtc state
3609 *
3610 * Release the PSR lock that was held during pipe update.
3611 */
intel_psr_unlock(const struct intel_crtc_state * crtc_state)3612 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3613 {
3614 struct intel_display *display = to_intel_display(crtc_state);
3615 struct intel_encoder *encoder;
3616
3617 if (!crtc_state->has_psr)
3618 return;
3619
3620 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3621 crtc_state->uapi.encoder_mask) {
3622 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3623
3624 mutex_unlock(&intel_dp->psr.lock);
3625 break;
3626 }
3627 }
3628
3629 static void
psr_source_status(struct intel_dp * intel_dp,struct seq_file * m)3630 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3631 {
3632 struct intel_display *display = to_intel_display(intel_dp);
3633 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3634 const char *status = "unknown";
3635 u32 val, status_val;
3636
3637 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3638 intel_dp->psr.panel_replay_enabled)) {
3639 static const char * const live_status[] = {
3640 "IDLE",
3641 "CAPTURE",
3642 "CAPTURE_FS",
3643 "SLEEP",
3644 "BUFON_FW",
3645 "ML_UP",
3646 "SU_STANDBY",
3647 "FAST_SLEEP",
3648 "DEEP_SLEEP",
3649 "BUF_ON",
3650 "TG_ON"
3651 };
3652 val = intel_de_read(display,
3653 EDP_PSR2_STATUS(display, cpu_transcoder));
3654 status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3655 if (status_val < ARRAY_SIZE(live_status))
3656 status = live_status[status_val];
3657 } else {
3658 static const char * const live_status[] = {
3659 "IDLE",
3660 "SRDONACK",
3661 "SRDENT",
3662 "BUFOFF",
3663 "BUFON",
3664 "AUXACK",
3665 "SRDOFFACK",
3666 "SRDENT_ON",
3667 };
3668 val = intel_de_read(display,
3669 psr_status_reg(display, cpu_transcoder));
3670 status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3671 if (status_val < ARRAY_SIZE(live_status))
3672 status = live_status[status_val];
3673 }
3674
3675 seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3676 }
3677
intel_psr_sink_capability(struct intel_dp * intel_dp,struct seq_file * m)3678 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3679 struct seq_file *m)
3680 {
3681 struct intel_psr *psr = &intel_dp->psr;
3682
3683 seq_printf(m, "Sink support: PSR = %s",
3684 str_yes_no(psr->sink_support));
3685
3686 if (psr->sink_support)
3687 seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3688 if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3689 seq_printf(m, " (Early Transport)");
3690 seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3691 seq_printf(m, ", Panel Replay Selective Update = %s",
3692 str_yes_no(psr->sink_panel_replay_su_support));
3693 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3694 seq_printf(m, " (Early Transport)");
3695 seq_printf(m, "\n");
3696 }
3697
intel_psr_print_mode(struct intel_dp * intel_dp,struct seq_file * m)3698 static void intel_psr_print_mode(struct intel_dp *intel_dp,
3699 struct seq_file *m)
3700 {
3701 struct intel_psr *psr = &intel_dp->psr;
3702 const char *status, *mode, *region_et;
3703
3704 if (psr->enabled)
3705 status = " enabled";
3706 else
3707 status = "disabled";
3708
3709 if (psr->panel_replay_enabled && psr->sel_update_enabled)
3710 mode = "Panel Replay Selective Update";
3711 else if (psr->panel_replay_enabled)
3712 mode = "Panel Replay";
3713 else if (psr->sel_update_enabled)
3714 mode = "PSR2";
3715 else if (psr->enabled)
3716 mode = "PSR1";
3717 else
3718 mode = "";
3719
3720 if (psr->su_region_et_enabled)
3721 region_et = " (Early Transport)";
3722 else
3723 region_et = "";
3724
3725 seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
3726 }
3727
intel_psr_status(struct seq_file * m,struct intel_dp * intel_dp)3728 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
3729 {
3730 struct intel_display *display = to_intel_display(intel_dp);
3731 struct drm_i915_private *dev_priv = to_i915(display->drm);
3732 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3733 struct intel_psr *psr = &intel_dp->psr;
3734 intel_wakeref_t wakeref;
3735 bool enabled;
3736 u32 val, psr2_ctl;
3737
3738 intel_psr_sink_capability(intel_dp, m);
3739
3740 if (!(psr->sink_support || psr->sink_panel_replay_support))
3741 return 0;
3742
3743 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3744 mutex_lock(&psr->lock);
3745
3746 intel_psr_print_mode(intel_dp, m);
3747
3748 if (!psr->enabled) {
3749 seq_printf(m, "PSR sink not reliable: %s\n",
3750 str_yes_no(psr->sink_not_reliable));
3751
3752 goto unlock;
3753 }
3754
3755 if (psr->panel_replay_enabled) {
3756 val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
3757
3758 if (intel_dp_is_edp(intel_dp))
3759 psr2_ctl = intel_de_read(display,
3760 EDP_PSR2_CTL(display,
3761 cpu_transcoder));
3762
3763 enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
3764 } else if (psr->sel_update_enabled) {
3765 val = intel_de_read(display,
3766 EDP_PSR2_CTL(display, cpu_transcoder));
3767 enabled = val & EDP_PSR2_ENABLE;
3768 } else {
3769 val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
3770 enabled = val & EDP_PSR_ENABLE;
3771 }
3772 seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
3773 str_enabled_disabled(enabled), val);
3774 if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
3775 seq_printf(m, "PSR2_CTL: 0x%08x\n",
3776 psr2_ctl);
3777 psr_source_status(intel_dp, m);
3778 seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
3779 psr->busy_frontbuffer_bits);
3780
3781 /*
3782 * SKL+ Perf counter is reset to 0 everytime DC state is entered
3783 */
3784 val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
3785 seq_printf(m, "Performance counter: %u\n",
3786 REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
3787
3788 if (psr->debug & I915_PSR_DEBUG_IRQ) {
3789 seq_printf(m, "Last attempted entry at: %lld\n",
3790 psr->last_entry_attempt);
3791 seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
3792 }
3793
3794 if (psr->sel_update_enabled) {
3795 u32 su_frames_val[3];
3796 int frame;
3797
3798 /*
3799 * Reading all 3 registers before hand to minimize crossing a
3800 * frame boundary between register reads
3801 */
3802 for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
3803 val = intel_de_read(display,
3804 PSR2_SU_STATUS(display, cpu_transcoder, frame));
3805 su_frames_val[frame / 3] = val;
3806 }
3807
3808 seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
3809
3810 for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
3811 u32 su_blocks;
3812
3813 su_blocks = su_frames_val[frame / 3] &
3814 PSR2_SU_STATUS_MASK(frame);
3815 su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
3816 seq_printf(m, "%d\t%d\n", frame, su_blocks);
3817 }
3818
3819 seq_printf(m, "PSR2 selective fetch: %s\n",
3820 str_enabled_disabled(psr->psr2_sel_fetch_enabled));
3821 }
3822
3823 unlock:
3824 mutex_unlock(&psr->lock);
3825 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3826
3827 return 0;
3828 }
3829
i915_edp_psr_status_show(struct seq_file * m,void * data)3830 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
3831 {
3832 struct intel_display *display = m->private;
3833 struct intel_dp *intel_dp = NULL;
3834 struct intel_encoder *encoder;
3835
3836 if (!HAS_PSR(display))
3837 return -ENODEV;
3838
3839 /* Find the first EDP which supports PSR */
3840 for_each_intel_encoder_with_psr(display->drm, encoder) {
3841 intel_dp = enc_to_intel_dp(encoder);
3842 break;
3843 }
3844
3845 if (!intel_dp)
3846 return -ENODEV;
3847
3848 return intel_psr_status(m, intel_dp);
3849 }
3850 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
3851
3852 static int
i915_edp_psr_debug_set(void * data,u64 val)3853 i915_edp_psr_debug_set(void *data, u64 val)
3854 {
3855 struct intel_display *display = data;
3856 struct drm_i915_private *dev_priv = to_i915(display->drm);
3857 struct intel_encoder *encoder;
3858 intel_wakeref_t wakeref;
3859 int ret = -ENODEV;
3860
3861 if (!HAS_PSR(display))
3862 return ret;
3863
3864 for_each_intel_encoder_with_psr(display->drm, encoder) {
3865 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3866
3867 drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
3868
3869 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3870
3871 // TODO: split to each transcoder's PSR debug state
3872 ret = intel_psr_debug_set(intel_dp, val);
3873
3874 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3875 }
3876
3877 return ret;
3878 }
3879
3880 static int
i915_edp_psr_debug_get(void * data,u64 * val)3881 i915_edp_psr_debug_get(void *data, u64 *val)
3882 {
3883 struct intel_display *display = data;
3884 struct intel_encoder *encoder;
3885
3886 if (!HAS_PSR(display))
3887 return -ENODEV;
3888
3889 for_each_intel_encoder_with_psr(display->drm, encoder) {
3890 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3891
3892 // TODO: split to each transcoder's PSR debug state
3893 *val = READ_ONCE(intel_dp->psr.debug);
3894 return 0;
3895 }
3896
3897 return -ENODEV;
3898 }
3899
3900 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
3901 i915_edp_psr_debug_get, i915_edp_psr_debug_set,
3902 "%llu\n");
3903
intel_psr_debugfs_register(struct intel_display * display)3904 void intel_psr_debugfs_register(struct intel_display *display)
3905 {
3906 struct drm_minor *minor = display->drm->primary;
3907
3908 debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root,
3909 display, &i915_edp_psr_debug_fops);
3910
3911 debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root,
3912 display, &i915_edp_psr_status_fops);
3913 }
3914
psr_mode_str(struct intel_dp * intel_dp)3915 static const char *psr_mode_str(struct intel_dp *intel_dp)
3916 {
3917 if (intel_dp->psr.panel_replay_enabled)
3918 return "PANEL-REPLAY";
3919 else if (intel_dp->psr.enabled)
3920 return "PSR";
3921
3922 return "unknown";
3923 }
3924
i915_psr_sink_status_show(struct seq_file * m,void * data)3925 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
3926 {
3927 struct intel_connector *connector = m->private;
3928 struct intel_dp *intel_dp = intel_attached_dp(connector);
3929 static const char * const sink_status[] = {
3930 "inactive",
3931 "transition to active, capture and display",
3932 "active, display from RFB",
3933 "active, capture and display on sink device timings",
3934 "transition to inactive, capture and display, timing re-sync",
3935 "reserved",
3936 "reserved",
3937 "sink internal error",
3938 };
3939 const char *str;
3940 int ret;
3941 u8 status, error_status;
3942
3943 if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
3944 seq_puts(m, "PSR/Panel-Replay Unsupported\n");
3945 return -ENODEV;
3946 }
3947
3948 if (connector->base.status != connector_status_connected)
3949 return -ENODEV;
3950
3951 ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
3952 if (ret)
3953 return ret;
3954
3955 status &= DP_PSR_SINK_STATE_MASK;
3956 if (status < ARRAY_SIZE(sink_status))
3957 str = sink_status[status];
3958 else
3959 str = "unknown";
3960
3961 seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
3962
3963 seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
3964
3965 if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
3966 DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3967 DP_PSR_LINK_CRC_ERROR))
3968 seq_puts(m, ":\n");
3969 else
3970 seq_puts(m, "\n");
3971 if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3972 seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
3973 if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3974 seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
3975 if (error_status & DP_PSR_LINK_CRC_ERROR)
3976 seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
3977
3978 return ret;
3979 }
3980 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
3981
i915_psr_status_show(struct seq_file * m,void * data)3982 static int i915_psr_status_show(struct seq_file *m, void *data)
3983 {
3984 struct intel_connector *connector = m->private;
3985 struct intel_dp *intel_dp = intel_attached_dp(connector);
3986
3987 return intel_psr_status(m, intel_dp);
3988 }
3989 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
3990
intel_psr_connector_debugfs_add(struct intel_connector * connector)3991 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
3992 {
3993 struct intel_display *display = to_intel_display(connector);
3994 struct dentry *root = connector->base.debugfs_entry;
3995
3996 if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
3997 connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort)
3998 return;
3999
4000 debugfs_create_file("i915_psr_sink_status", 0444, root,
4001 connector, &i915_psr_sink_status_fops);
4002
4003 if (HAS_PSR(display) || HAS_DP20(display))
4004 debugfs_create_file("i915_psr_status", 0444, root,
4005 connector, &i915_psr_status_fops);
4006 }
4007