xref: /linux/drivers/gpu/drm/i915/display/intel_vrr.c (revision dc1d9408c961c1c4d4b3b99a1d9390c17e13de71) !
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  */
6 
7 #include <drm/drm_print.h>
8 
9 #include "intel_alpm.h"
10 #include "intel_crtc.h"
11 #include "intel_de.h"
12 #include "intel_display_regs.h"
13 #include "intel_display_types.h"
14 #include "intel_dmc.h"
15 #include "intel_dmc_regs.h"
16 #include "intel_dp.h"
17 #include "intel_psr.h"
18 #include "intel_vrr.h"
19 #include "intel_vrr_regs.h"
20 #include "skl_prefill.h"
21 #include "skl_watermark.h"
22 
23 #define FIXED_POINT_PRECISION		100
24 #define CMRR_PRECISION_TOLERANCE	10
25 
26 /*
27  * Tunable parameters for DC Balance correction.
28  * These are captured based on experimentations.
29  */
30 #define DCB_CORRECTION_SENSITIVITY	30
31 #define DCB_CORRECTION_AGGRESSIVENESS	1000 /* ms × 100; 10 ms */
32 #define DCB_BLANK_TARGET		50
33 
intel_vrr_is_capable(struct intel_connector * connector)34 bool intel_vrr_is_capable(struct intel_connector *connector)
35 {
36 	struct intel_display *display = to_intel_display(connector);
37 	const struct drm_display_info *info = &connector->base.display_info;
38 	struct intel_dp *intel_dp;
39 
40 	if (!HAS_VRR(display))
41 		return false;
42 
43 	/*
44 	 * DP Sink is capable of VRR video timings if
45 	 * Ignore MSA bit is set in DPCD.
46 	 * EDID monitor range also should be atleast 10 for reasonable
47 	 * Adaptive Sync or Variable Refresh Rate end user experience.
48 	 */
49 	switch (connector->base.connector_type) {
50 	case DRM_MODE_CONNECTOR_eDP:
51 		if (!connector->panel.vbt.vrr)
52 			return false;
53 		fallthrough;
54 	case DRM_MODE_CONNECTOR_DisplayPort:
55 		if (connector->mst.dp)
56 			return false;
57 		intel_dp = intel_attached_dp(connector);
58 
59 		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
60 			return false;
61 
62 		break;
63 	default:
64 		return false;
65 	}
66 
67 	return info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
68 }
69 
intel_vrr_is_in_range(struct intel_connector * connector,int vrefresh)70 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
71 {
72 	const struct drm_display_info *info = &connector->base.display_info;
73 
74 	return intel_vrr_is_capable(connector) &&
75 		vrefresh >= info->monitor_range.min_vfreq &&
76 		vrefresh <= info->monitor_range.max_vfreq;
77 }
78 
intel_vrr_possible(const struct intel_crtc_state * crtc_state)79 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
80 {
81 	return crtc_state->vrr.flipline;
82 }
83 
84 void
intel_vrr_check_modeset(struct intel_atomic_state * state)85 intel_vrr_check_modeset(struct intel_atomic_state *state)
86 {
87 	int i;
88 	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
89 	struct intel_crtc *crtc;
90 
91 	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
92 					    new_crtc_state, i) {
93 		if (new_crtc_state->uapi.vrr_enabled !=
94 		    old_crtc_state->uapi.vrr_enabled)
95 			new_crtc_state->uapi.mode_changed = true;
96 	}
97 }
98 
intel_vrr_extra_vblank_delay(struct intel_display * display)99 static int intel_vrr_extra_vblank_delay(struct intel_display *display)
100 {
101 	/*
102 	 * On ICL/TGL VRR hardware inserts one extra scanline
103 	 * just after vactive, which pushes the vmin decision
104 	 * boundary ahead accordingly, and thus reduces the
105 	 * max guardband length by one scanline.
106 	 */
107 	return DISPLAY_VER(display) < 13 ? 1 : 0;
108 }
109 
intel_vrr_vmin_flipline_offset(struct intel_display * display)110 static int intel_vrr_vmin_flipline_offset(struct intel_display *display)
111 {
112 	/*
113 	 * ICL/TGL hardware imposes flipline>=vmin+1
114 	 *
115 	 * We reduce the vmin value to compensate when programming the
116 	 * hardware. This approach allows flipline to remain set at the
117 	 * original value, and thus the frame will have the desired
118 	 * minimum vtotal.
119 	 */
120 	return DISPLAY_VER(display) < 13 ? 1 : 0;
121 }
122 
intel_vrr_guardband_to_pipeline_full(const struct intel_crtc_state * crtc_state,int guardband)123 static int intel_vrr_guardband_to_pipeline_full(const struct intel_crtc_state *crtc_state,
124 						int guardband)
125 {
126 	/* hardware imposes one extra scanline somewhere */
127 	return guardband - crtc_state->framestart_delay - 1;
128 }
129 
intel_vrr_pipeline_full_to_guardband(const struct intel_crtc_state * crtc_state,int pipeline_full)130 static int intel_vrr_pipeline_full_to_guardband(const struct intel_crtc_state *crtc_state,
131 						int pipeline_full)
132 {
133 	/* hardware imposes one extra scanline somewhere */
134 	return pipeline_full + crtc_state->framestart_delay + 1;
135 }
136 
137 /*
138  * Without VRR registers get latched at:
139  *  vblank_start
140  *
141  * With VRR the earliest registers can get latched is:
142  *  intel_vrr_vmin_vblank_start(), which if we want to maintain
143  *  the correct min vtotal is >=vblank_start+1
144  *
145  * The latest point registers can get latched is the vmax decision boundary:
146  *  intel_vrr_vmax_vblank_start()
147  *
148  * Between those two points the vblank exit starts (and hence registers get
149  * latched) ASAP after a push is sent.
150  *
151  * framestart_delay is programmable 1-4.
152  */
153 
intel_vrr_vmin_vtotal(const struct intel_crtc_state * crtc_state)154 int intel_vrr_vmin_vtotal(const struct intel_crtc_state *crtc_state)
155 {
156 	/* Min vblank actually determined by flipline */
157 	return crtc_state->vrr.vmin;
158 }
159 
intel_vrr_vmax_vtotal(const struct intel_crtc_state * crtc_state)160 int intel_vrr_vmax_vtotal(const struct intel_crtc_state *crtc_state)
161 {
162 	return crtc_state->vrr.vmax;
163 }
164 
intel_vrr_vmin_vblank_start(const struct intel_crtc_state * crtc_state)165 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
166 {
167 	return intel_vrr_vmin_vtotal(crtc_state) - crtc_state->vrr.guardband;
168 }
169 
intel_vrr_vmax_vblank_start(const struct intel_crtc_state * crtc_state)170 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
171 {
172 	return intel_vrr_vmax_vtotal(crtc_state) - crtc_state->vrr.guardband;
173 }
174 
175 static bool
is_cmrr_frac_required(struct intel_crtc_state * crtc_state)176 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
177 {
178 	struct intel_display *display = to_intel_display(crtc_state);
179 	int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
180 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
181 
182 	/* Avoid CMRR for now till we have VRR with fixed timings working */
183 	if (!HAS_CMRR(display) || true)
184 		return false;
185 
186 	actual_refresh_k =
187 		drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
188 	pixel_clock_per_line =
189 		adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
190 	calculated_refresh_k =
191 		pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
192 
193 	if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
194 		return false;
195 
196 	return true;
197 }
198 
199 static unsigned int
cmrr_get_vtotal(struct intel_crtc_state * crtc_state,bool video_mode_required)200 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
201 {
202 	int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
203 	u64 adjusted_pixel_rate;
204 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
205 
206 	desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
207 
208 	if (video_mode_required) {
209 		multiplier_m = 1001;
210 		multiplier_n = 1000;
211 	}
212 
213 	crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
214 					      multiplier_n);
215 	vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
216 				  crtc_state->cmrr.cmrr_n);
217 	adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
218 	crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
219 
220 	return vtotal;
221 }
222 
223 static
intel_vrr_compute_cmrr_timings(struct intel_crtc_state * crtc_state)224 void intel_vrr_compute_cmrr_timings(struct intel_crtc_state *crtc_state)
225 {
226 	/*
227 	 * TODO: Compute precise target refresh rate to determine
228 	 * if video_mode_required should be true. Currently set to
229 	 * false due to uncertainty about the precise target
230 	 * refresh Rate.
231 	 */
232 	crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
233 	crtc_state->vrr.vmin = crtc_state->vrr.vmax;
234 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
235 
236 	crtc_state->cmrr.enable = true;
237 	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
238 }
239 
240 static
intel_vrr_compute_vrr_timings(struct intel_crtc_state * crtc_state,int vmin,int vmax)241 void intel_vrr_compute_vrr_timings(struct intel_crtc_state *crtc_state,
242 				   int vmin, int vmax)
243 {
244 	crtc_state->vrr.vmax = vmax;
245 	crtc_state->vrr.vmin = vmin;
246 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
247 
248 	crtc_state->vrr.enable = true;
249 	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
250 }
251 
252 static
intel_vrr_compute_fixed_rr_timings(struct intel_crtc_state * crtc_state)253 void intel_vrr_compute_fixed_rr_timings(struct intel_crtc_state *crtc_state)
254 {
255 	/* For fixed rr,  vmin = vmax = flipline */
256 	crtc_state->vrr.vmax = crtc_state->hw.adjusted_mode.crtc_vtotal;
257 	crtc_state->vrr.vmin = crtc_state->vrr.vmax;
258 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
259 }
260 
intel_vrr_hw_value(const struct intel_crtc_state * crtc_state,int value)261 static int intel_vrr_hw_value(const struct intel_crtc_state *crtc_state,
262 			      int value)
263 {
264 	struct intel_display *display = to_intel_display(crtc_state);
265 
266 	/*
267 	 * On TGL vmin/vmax/flipline also need to be
268 	 * adjusted by the SCL to maintain correct vtotals.
269 	 */
270 	if (DISPLAY_VER(display) >= 13)
271 		return value;
272 	else
273 		return value - crtc_state->set_context_latency;
274 }
275 
intel_vrr_vblank_start(const struct intel_crtc_state * crtc_state,int vmin_vmax)276 static int intel_vrr_vblank_start(const struct intel_crtc_state *crtc_state,
277 				  int vmin_vmax)
278 {
279 	return intel_vrr_hw_value(crtc_state, vmin_vmax) - crtc_state->vrr.guardband;
280 }
281 
282 /*
283  * For fixed refresh rate mode Vmin, Vmax and Flipline all are set to
284  * Vtotal value.
285  */
286 static
intel_vrr_fixed_rr_hw_vtotal(const struct intel_crtc_state * crtc_state)287 int intel_vrr_fixed_rr_hw_vtotal(const struct intel_crtc_state *crtc_state)
288 {
289 	return intel_vrr_hw_value(crtc_state, crtc_state->hw.adjusted_mode.crtc_vtotal);
290 }
291 
292 static
intel_vrr_fixed_rr_hw_vmax(const struct intel_crtc_state * crtc_state)293 int intel_vrr_fixed_rr_hw_vmax(const struct intel_crtc_state *crtc_state)
294 {
295 	return intel_vrr_fixed_rr_hw_vtotal(crtc_state);
296 }
297 
298 static
intel_vrr_fixed_rr_hw_vmin(const struct intel_crtc_state * crtc_state)299 int intel_vrr_fixed_rr_hw_vmin(const struct intel_crtc_state *crtc_state)
300 {
301 	struct intel_display *display = to_intel_display(crtc_state);
302 
303 	return intel_vrr_fixed_rr_hw_vtotal(crtc_state) -
304 		intel_vrr_vmin_flipline_offset(display);
305 }
306 
307 static
intel_vrr_fixed_rr_hw_flipline(const struct intel_crtc_state * crtc_state)308 int intel_vrr_fixed_rr_hw_flipline(const struct intel_crtc_state *crtc_state)
309 {
310 	return intel_vrr_fixed_rr_hw_vtotal(crtc_state);
311 }
312 
intel_vrr_set_fixed_rr_timings(const struct intel_crtc_state * crtc_state)313 void intel_vrr_set_fixed_rr_timings(const struct intel_crtc_state *crtc_state)
314 {
315 	struct intel_display *display = to_intel_display(crtc_state);
316 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
317 
318 	if (!intel_vrr_possible(crtc_state))
319 		return;
320 
321 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
322 		       intel_vrr_fixed_rr_hw_vmin(crtc_state) - 1);
323 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
324 		       intel_vrr_fixed_rr_hw_vmax(crtc_state) - 1);
325 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
326 		       intel_vrr_fixed_rr_hw_flipline(crtc_state) - 1);
327 }
328 
329 static
intel_vrr_compute_vmin(struct intel_crtc_state * crtc_state)330 int intel_vrr_compute_vmin(struct intel_crtc_state *crtc_state)
331 {
332 	/*
333 	 * To make fixed rr and vrr work seamless the guardband/pipeline full
334 	 * should be set such that it satisfies both the fixed and variable
335 	 * timings.
336 	 * For this set the vmin as crtc_vtotal. With this we never need to
337 	 * change anything to do with the guardband.
338 	 */
339 	return crtc_state->hw.adjusted_mode.crtc_vtotal;
340 }
341 
342 static
intel_vrr_compute_vmax(struct intel_connector * connector,const struct drm_display_mode * adjusted_mode)343 int intel_vrr_compute_vmax(struct intel_connector *connector,
344 			   const struct drm_display_mode *adjusted_mode)
345 {
346 	const struct drm_display_info *info = &connector->base.display_info;
347 	int vmax;
348 
349 	vmax = adjusted_mode->crtc_clock * 1000 /
350 		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
351 	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
352 
353 	return vmax;
354 }
355 
intel_vrr_dc_balance_possible(const struct intel_crtc_state * crtc_state)356 static bool intel_vrr_dc_balance_possible(const struct intel_crtc_state *crtc_state)
357 {
358 	struct intel_display *display = to_intel_display(crtc_state);
359 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
360 	enum pipe pipe = crtc->pipe;
361 
362 	/*
363 	 * FIXME: Currently Firmware supports DC Balancing on PIPE A
364 	 * and PIPE B. Account those limitation while computing DC
365 	 * Balance parameters.
366 	 */
367 	return (HAS_VRR_DC_BALANCE(display) &&
368 		((pipe == PIPE_A) || (pipe == PIPE_B)));
369 }
370 
371 static void
intel_vrr_dc_balance_compute_config(struct intel_crtc_state * crtc_state)372 intel_vrr_dc_balance_compute_config(struct intel_crtc_state *crtc_state)
373 {
374 	int guardband_usec, adjustment_usec;
375 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
376 
377 	if (!intel_vrr_dc_balance_possible(crtc_state) || !crtc_state->vrr.enable)
378 		return;
379 
380 	crtc_state->vrr.dc_balance.vmax = crtc_state->vrr.vmax;
381 	crtc_state->vrr.dc_balance.vmin = crtc_state->vrr.vmin;
382 	crtc_state->vrr.dc_balance.max_increase =
383 		crtc_state->vrr.vmax - crtc_state->vrr.vmin;
384 	crtc_state->vrr.dc_balance.max_decrease =
385 		crtc_state->vrr.vmax - crtc_state->vrr.vmin;
386 	crtc_state->vrr.dc_balance.guardband =
387 		DIV_ROUND_UP(crtc_state->vrr.dc_balance.vmax *
388 			     DCB_CORRECTION_SENSITIVITY, 100);
389 	guardband_usec =
390 		intel_scanlines_to_usecs(adjusted_mode,
391 					 crtc_state->vrr.dc_balance.guardband);
392 	/*
393 	 *  The correction_aggressiveness/100 is the number of milliseconds to
394 	 *  adjust by when the balance is at twice the guardband.
395 	 *  guardband_slope = correction_aggressiveness / (guardband * 100)
396 	 */
397 	adjustment_usec = DCB_CORRECTION_AGGRESSIVENESS * 10;
398 	crtc_state->vrr.dc_balance.slope =
399 		DIV_ROUND_UP(adjustment_usec, guardband_usec);
400 	crtc_state->vrr.dc_balance.vblank_target =
401 		DIV_ROUND_UP((crtc_state->vrr.vmax - crtc_state->vrr.vmin) *
402 			     DCB_BLANK_TARGET, 100);
403 	crtc_state->vrr.dc_balance.enable = true;
404 }
405 
406 void
intel_vrr_compute_config(struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)407 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
408 			 struct drm_connector_state *conn_state)
409 {
410 	struct intel_display *display = to_intel_display(crtc_state);
411 	struct intel_connector *connector =
412 		to_intel_connector(conn_state->connector);
413 	struct intel_dp *intel_dp = intel_attached_dp(connector);
414 	bool is_edp = intel_dp_is_edp(intel_dp);
415 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
416 	int vmin, vmax;
417 
418 	if (!HAS_VRR(display))
419 		return;
420 
421 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
422 		return;
423 
424 	crtc_state->vrr.in_range =
425 		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
426 
427 	/*
428 	 * Allow fixed refresh rate with VRR Timing Generator.
429 	 * For now set the vrr.in_range to 0, to allow fixed_rr but skip actual
430 	 * VRR and LRR.
431 	 * #TODO For actual VRR with joiner, we need to figure out how to
432 	 * correctly sequence transcoder level stuff vs. pipe level stuff
433 	 * in the commit.
434 	 */
435 	if (crtc_state->joiner_pipes)
436 		crtc_state->vrr.in_range = false;
437 
438 	vmin = intel_vrr_compute_vmin(crtc_state);
439 
440 	if (crtc_state->vrr.in_range) {
441 		if (HAS_LRR(display))
442 			crtc_state->update_lrr = true;
443 		vmax = intel_vrr_compute_vmax(connector, adjusted_mode);
444 	} else {
445 		vmax = vmin;
446 	}
447 
448 	if (crtc_state->uapi.vrr_enabled && vmin < vmax)
449 		intel_vrr_compute_vrr_timings(crtc_state, vmin, vmax);
450 	else if (is_cmrr_frac_required(crtc_state) && is_edp)
451 		intel_vrr_compute_cmrr_timings(crtc_state);
452 	else
453 		intel_vrr_compute_fixed_rr_timings(crtc_state);
454 
455 	if (HAS_AS_SDP(display)) {
456 		crtc_state->vrr.vsync_start =
457 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
458 			 crtc_state->hw.adjusted_mode.crtc_vsync_start);
459 		crtc_state->vrr.vsync_end =
460 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
461 			 crtc_state->hw.adjusted_mode.crtc_vsync_end);
462 	}
463 
464 	intel_vrr_dc_balance_compute_config(crtc_state);
465 }
466 
467 static int
intel_vrr_max_hw_guardband(const struct intel_crtc_state * crtc_state)468 intel_vrr_max_hw_guardband(const struct intel_crtc_state *crtc_state)
469 {
470 	struct intel_display *display = to_intel_display(crtc_state);
471 	int max_pipeline_full = REG_FIELD_MAX(VRR_CTL_PIPELINE_FULL_MASK);
472 
473 	if (DISPLAY_VER(display) >= 13)
474 		return REG_FIELD_MAX(XELPD_VRR_CTL_VRR_GUARDBAND_MASK);
475 	else
476 		return intel_vrr_pipeline_full_to_guardband(crtc_state,
477 							    max_pipeline_full);
478 }
479 
480 static int
intel_vrr_max_vblank_guardband(const struct intel_crtc_state * crtc_state)481 intel_vrr_max_vblank_guardband(const struct intel_crtc_state *crtc_state)
482 {
483 	struct intel_display *display = to_intel_display(crtc_state);
484 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
485 
486 	return crtc_state->vrr.vmin -
487 	       adjusted_mode->crtc_vdisplay -
488 	       crtc_state->set_context_latency -
489 	       intel_vrr_extra_vblank_delay(display);
490 }
491 
492 static int
intel_vrr_max_guardband(struct intel_crtc_state * crtc_state)493 intel_vrr_max_guardband(struct intel_crtc_state *crtc_state)
494 {
495 	return min(intel_vrr_max_hw_guardband(crtc_state),
496 		   intel_vrr_max_vblank_guardband(crtc_state));
497 }
498 
499 static
intel_vrr_compute_optimized_guardband(struct intel_crtc_state * crtc_state)500 int intel_vrr_compute_optimized_guardband(struct intel_crtc_state *crtc_state)
501 {
502 	struct intel_display *display = to_intel_display(crtc_state);
503 	struct skl_prefill_ctx prefill_ctx;
504 	int prefill_latency_us;
505 	int guardband = 0;
506 
507 	skl_prefill_init_worst(&prefill_ctx, crtc_state);
508 
509 	/*
510 	 * The SoC power controller runs SAGV mutually exclusive with package C states,
511 	 * so the max of package C and SAGV latencies is used to compute the min prefill guardband.
512 	 * PM delay = max(sagv_latency, pkgc_max_latency (highest enabled wm level 1 and up))
513 	 */
514 	prefill_latency_us = max(display->sagv.block_time_us,
515 				 skl_watermark_max_latency(display, 1));
516 
517 	guardband = skl_prefill_min_guardband(&prefill_ctx,
518 					      crtc_state,
519 					      prefill_latency_us);
520 
521 	if (intel_crtc_has_dp_encoder(crtc_state)) {
522 		guardband = max(guardband, intel_psr_min_guardband(crtc_state));
523 		guardband = max(guardband, intel_dp_sdp_min_guardband(crtc_state, true));
524 		guardband = max(guardband, intel_alpm_lobf_min_guardband(crtc_state));
525 	}
526 
527 	return guardband;
528 }
529 
intel_vrr_use_optimized_guardband(const struct intel_crtc_state * crtc_state)530 static bool intel_vrr_use_optimized_guardband(const struct intel_crtc_state *crtc_state)
531 {
532 	/*
533 	 * #TODO: Enable optimized guardband for HDMI
534 	 * For HDMI lot of infoframes are transmitted a line or two after vsync.
535 	 * Since with optimized guardband the double bufferring point is at delayed vblank,
536 	 * we need to ensure that vsync happens after delayed vblank for the HDMI case.
537 	 */
538 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
539 		return false;
540 
541 	return true;
542 }
543 
intel_vrr_compute_guardband(struct intel_crtc_state * crtc_state)544 void intel_vrr_compute_guardband(struct intel_crtc_state *crtc_state)
545 {
546 	struct intel_display *display = to_intel_display(crtc_state);
547 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
548 	struct drm_display_mode *pipe_mode = &crtc_state->hw.pipe_mode;
549 	int guardband;
550 
551 	if (!intel_vrr_possible(crtc_state))
552 		return;
553 
554 	if (intel_vrr_use_optimized_guardband(crtc_state))
555 		guardband = intel_vrr_compute_optimized_guardband(crtc_state);
556 	else
557 		guardband = crtc_state->vrr.vmin - adjusted_mode->crtc_vdisplay;
558 
559 	crtc_state->vrr.guardband = min(guardband, intel_vrr_max_guardband(crtc_state));
560 
561 	if (intel_vrr_always_use_vrr_tg(display)) {
562 		adjusted_mode->crtc_vblank_start  =
563 			adjusted_mode->crtc_vtotal - crtc_state->vrr.guardband;
564 		/*
565 		 * pipe_mode has already been derived from the
566 		 * original adjusted_mode, keep the two in sync.
567 		 */
568 		pipe_mode->crtc_vblank_start =
569 			adjusted_mode->crtc_vblank_start;
570 	}
571 
572 	if (DISPLAY_VER(display) < 13)
573 		crtc_state->vrr.pipeline_full =
574 			intel_vrr_guardband_to_pipeline_full(crtc_state,
575 							     crtc_state->vrr.guardband);
576 }
577 
trans_vrr_ctl(const struct intel_crtc_state * crtc_state)578 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
579 {
580 	struct intel_display *display = to_intel_display(crtc_state);
581 
582 	if (DISPLAY_VER(display) >= 14)
583 		return VRR_CTL_FLIP_LINE_EN |
584 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
585 	else if (DISPLAY_VER(display) >= 13)
586 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
587 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
588 	else
589 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
590 			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
591 			VRR_CTL_PIPELINE_FULL_OVERRIDE;
592 }
593 
intel_vrr_set_transcoder_timings(const struct intel_crtc_state * crtc_state)594 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
595 {
596 	struct intel_display *display = to_intel_display(crtc_state);
597 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
598 
599 	if (!HAS_VRR(display))
600 		return;
601 
602 	/*
603 	 * Bspec says:
604 	 * "(note: VRR needs to be programmed after
605 	 *  TRANS_DDI_FUNC_CTL and before TRANS_CONF)."
606 	 *
607 	 * In practice it turns out that ICL can hang if
608 	 * TRANS_VRR_VMAX/FLIPLINE are written before
609 	 * enabling TRANS_DDI_FUNC_CTL.
610 	 */
611 	drm_WARN_ON(display->drm,
612 		    !(intel_de_read(display, TRANS_DDI_FUNC_CTL(display, cpu_transcoder)) & TRANS_DDI_FUNC_ENABLE));
613 
614 	/*
615 	 * This bit seems to have two meanings depending on the platform:
616 	 * TGL: generate VRR "safe window" for DSB vblank waits
617 	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
618 	 */
619 	if (IS_DISPLAY_VER(display, 12, 13))
620 		intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
621 			     0, PIPE_VBLANK_WITH_DELAY);
622 
623 	if (!intel_vrr_possible(crtc_state)) {
624 		intel_de_write(display,
625 			       TRANS_VRR_CTL(display, cpu_transcoder), 0);
626 		return;
627 	}
628 
629 	if (crtc_state->cmrr.enable) {
630 		intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
631 			       upper_32_bits(crtc_state->cmrr.cmrr_m));
632 		intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
633 			       lower_32_bits(crtc_state->cmrr.cmrr_m));
634 		intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
635 			       upper_32_bits(crtc_state->cmrr.cmrr_n));
636 		intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
637 			       lower_32_bits(crtc_state->cmrr.cmrr_n));
638 	}
639 
640 	intel_vrr_set_fixed_rr_timings(crtc_state);
641 
642 	if (!intel_vrr_always_use_vrr_tg(display))
643 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
644 			       trans_vrr_ctl(crtc_state));
645 
646 	if (HAS_AS_SDP(display))
647 		intel_de_write(display,
648 			       TRANS_VRR_VSYNC(display, cpu_transcoder),
649 			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
650 			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
651 
652 	/*
653 	 * For BMG and LNL+ onwards the EMP_AS_SDP_TL is used for programming
654 	 * double buffering point and transmission line for VRR packets for
655 	 * HDMI2.1/DP/eDP/DP->HDMI2.1 PCON.
656 	 * Since currently we support VRR only for DP/eDP, so this is programmed
657 	 * to for Adaptive Sync SDP to Vsync start.
658 	 */
659 	if (DISPLAY_VERx100(display) == 1401 || DISPLAY_VER(display) >= 20)
660 		intel_de_write(display,
661 			       EMP_AS_SDP_TL(display, cpu_transcoder),
662 			       EMP_AS_SDP_DB_TL(crtc_state->vrr.vsync_start));
663 }
664 
665 void
intel_vrr_dcb_increment_flip_count(struct intel_crtc_state * crtc_state,struct intel_crtc * crtc)666 intel_vrr_dcb_increment_flip_count(struct intel_crtc_state *crtc_state,
667 				   struct intel_crtc *crtc)
668 {
669 	struct intel_display *display = to_intel_display(crtc_state);
670 	enum pipe pipe = crtc->pipe;
671 
672 	if (!crtc_state->vrr.dc_balance.enable)
673 		return;
674 
675 	intel_de_write(display, PIPEDMC_DCB_FLIP_COUNT(pipe),
676 		       ++crtc->dc_balance.flip_count);
677 }
678 
679 void
intel_vrr_dcb_reset(const struct intel_crtc_state * old_crtc_state,struct intel_crtc * crtc)680 intel_vrr_dcb_reset(const struct intel_crtc_state *old_crtc_state,
681 		    struct intel_crtc *crtc)
682 {
683 	struct intel_display *display = to_intel_display(old_crtc_state);
684 	enum pipe pipe = crtc->pipe;
685 
686 	if (!old_crtc_state->vrr.dc_balance.enable)
687 		return;
688 
689 	intel_de_write(display, PIPEDMC_DCB_FLIP_COUNT(pipe), 0);
690 	intel_de_write(display, PIPEDMC_DCB_BALANCE_RESET(pipe), 0);
691 }
692 
trans_vrr_push(const struct intel_crtc_state * crtc_state,bool send_push)693 static u32 trans_vrr_push(const struct intel_crtc_state *crtc_state,
694 			  bool send_push)
695 {
696 	struct intel_display *display = to_intel_display(crtc_state);
697 	u32 trans_vrr_push = 0;
698 
699 	if (intel_vrr_always_use_vrr_tg(display) ||
700 	    crtc_state->vrr.enable)
701 		trans_vrr_push |= TRANS_PUSH_EN;
702 
703 	if (send_push)
704 		trans_vrr_push |= TRANS_PUSH_SEND;
705 
706 	if (HAS_PSR_TRANS_PUSH_FRAME_CHANGE(display))
707 		trans_vrr_push |= LNL_TRANS_PUSH_PSR_PR_EN;
708 
709 	return trans_vrr_push;
710 }
711 
intel_vrr_send_push(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)712 void intel_vrr_send_push(struct intel_dsb *dsb,
713 			 const struct intel_crtc_state *crtc_state)
714 {
715 	struct intel_display *display = to_intel_display(crtc_state);
716 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
717 
718 	if (!crtc_state->vrr.enable && !intel_psr_use_trans_push(crtc_state))
719 		return;
720 
721 	if (dsb)
722 		intel_dsb_nonpost_start(dsb);
723 
724 	intel_de_write_dsb(display, dsb,
725 			   TRANS_PUSH(display, cpu_transcoder),
726 			   trans_vrr_push(crtc_state, true));
727 	if (dsb)
728 		intel_dsb_nonpost_end(dsb);
729 }
730 
intel_vrr_check_push_sent(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)731 void intel_vrr_check_push_sent(struct intel_dsb *dsb,
732 			       const struct intel_crtc_state *crtc_state)
733 {
734 	struct intel_display *display = to_intel_display(crtc_state);
735 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
736 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
737 
738 	if (!crtc_state->vrr.enable)
739 		return;
740 
741 	/*
742 	 * Make sure the push send bit has cleared. This should
743 	 * already be the case as long as the caller makes sure
744 	 * this is called after the delayed vblank has occurred.
745 	 */
746 	if (dsb) {
747 		int wait_us, count;
748 
749 		wait_us = 2;
750 		count = 1;
751 
752 		/*
753 		 * If the bit hasn't cleared the DSB will
754 		 * raise the poll error interrupt.
755 		 */
756 		intel_dsb_poll(dsb, TRANS_PUSH(display, cpu_transcoder),
757 			       TRANS_PUSH_SEND, 0, wait_us, count);
758 	} else {
759 		if (intel_vrr_is_push_sent(crtc_state))
760 			drm_err(display->drm, "[CRTC:%d:%s] VRR push send still pending\n",
761 				crtc->base.base.id, crtc->base.name);
762 	}
763 }
764 
intel_vrr_is_push_sent(const struct intel_crtc_state * crtc_state)765 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
766 {
767 	struct intel_display *display = to_intel_display(crtc_state);
768 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
769 
770 	if (!crtc_state->vrr.enable)
771 		return false;
772 
773 	return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
774 }
775 
intel_vrr_always_use_vrr_tg(struct intel_display * display)776 bool intel_vrr_always_use_vrr_tg(struct intel_display *display)
777 {
778 	if (!HAS_VRR(display))
779 		return false;
780 
781 	if (DISPLAY_VER(display) >= 30)
782 		return true;
783 
784 	return false;
785 }
786 
intel_vrr_hw_vmin(const struct intel_crtc_state * crtc_state)787 static int intel_vrr_hw_vmin(const struct intel_crtc_state *crtc_state)
788 {
789 	struct intel_display *display = to_intel_display(crtc_state);
790 
791 	return intel_vrr_hw_value(crtc_state, crtc_state->vrr.vmin) -
792 		intel_vrr_vmin_flipline_offset(display);
793 }
794 
intel_vrr_hw_vmax(const struct intel_crtc_state * crtc_state)795 static int intel_vrr_hw_vmax(const struct intel_crtc_state *crtc_state)
796 {
797 	return intel_vrr_hw_value(crtc_state, crtc_state->vrr.vmax);
798 }
799 
intel_vrr_hw_flipline(const struct intel_crtc_state * crtc_state)800 static int intel_vrr_hw_flipline(const struct intel_crtc_state *crtc_state)
801 {
802 	return intel_vrr_hw_value(crtc_state, crtc_state->vrr.flipline);
803 }
804 
intel_vrr_set_vrr_timings(const struct intel_crtc_state * crtc_state)805 static void intel_vrr_set_vrr_timings(const struct intel_crtc_state *crtc_state)
806 {
807 	struct intel_display *display = to_intel_display(crtc_state);
808 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
809 
810 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
811 		       intel_vrr_hw_vmin(crtc_state) - 1);
812 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
813 		       intel_vrr_hw_vmax(crtc_state) - 1);
814 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
815 		       intel_vrr_hw_flipline(crtc_state) - 1);
816 }
817 
818 static void
intel_vrr_enable_dc_balancing(const struct intel_crtc_state * crtc_state)819 intel_vrr_enable_dc_balancing(const struct intel_crtc_state *crtc_state)
820 {
821 	struct intel_display *display = to_intel_display(crtc_state);
822 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
823 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
824 	enum pipe pipe = crtc->pipe;
825 	u32 vrr_ctl = intel_de_read(display, TRANS_VRR_CTL(display, cpu_transcoder));
826 
827 	if (!crtc_state->vrr.dc_balance.enable)
828 		return;
829 
830 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG(cpu_transcoder),
831 		       VRR_DCB_ADJ_VMAX(crtc_state->vrr.vmax - 1));
832 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder),
833 		       VRR_DCB_ADJ_VMAX(crtc_state->vrr.vmax - 1));
834 	intel_de_write(display, TRANS_VRR_DCB_VMAX(cpu_transcoder),
835 		       VRR_DCB_VMAX(crtc_state->vrr.vmax - 1));
836 	intel_de_write(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder),
837 		       VRR_DCB_VMAX(crtc_state->vrr.vmax - 1));
838 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE(cpu_transcoder),
839 		       VRR_DCB_FLIPLINE(crtc_state->vrr.flipline - 1));
840 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder),
841 		       VRR_DCB_FLIPLINE(crtc_state->vrr.flipline - 1));
842 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder),
843 		       VRR_DCB_ADJ_FLIPLINE(crtc_state->vrr.flipline - 1));
844 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG(cpu_transcoder),
845 		       VRR_DCB_ADJ_FLIPLINE(crtc_state->vrr.flipline - 1));
846 	intel_de_write(display, PIPEDMC_DCB_VMIN(pipe),
847 		       crtc_state->vrr.dc_balance.vmin - 1);
848 	intel_de_write(display, PIPEDMC_DCB_VMAX(pipe),
849 		       crtc_state->vrr.dc_balance.vmax - 1);
850 	intel_de_write(display, PIPEDMC_DCB_MAX_INCREASE(pipe),
851 		       crtc_state->vrr.dc_balance.max_increase);
852 	intel_de_write(display, PIPEDMC_DCB_MAX_DECREASE(pipe),
853 		       crtc_state->vrr.dc_balance.max_decrease);
854 	intel_de_write(display, PIPEDMC_DCB_GUARDBAND(pipe),
855 		       crtc_state->vrr.dc_balance.guardband);
856 	intel_de_write(display, PIPEDMC_DCB_SLOPE(pipe),
857 		       crtc_state->vrr.dc_balance.slope);
858 	intel_de_write(display, PIPEDMC_DCB_VBLANK(pipe),
859 		       crtc_state->vrr.dc_balance.vblank_target);
860 	intel_dmc_configure_dc_balance_event(display, pipe, true);
861 	intel_de_write(display, TRANS_ADAPTIVE_SYNC_DCB_CTL(cpu_transcoder),
862 		       ADAPTIVE_SYNC_COUNTER_EN);
863 	intel_pipedmc_dcb_enable(NULL, crtc);
864 
865 	vrr_ctl |= VRR_CTL_DCB_ADJ_ENABLE;
866 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
867 }
868 
869 static void
intel_vrr_disable_dc_balancing(const struct intel_crtc_state * old_crtc_state)870 intel_vrr_disable_dc_balancing(const struct intel_crtc_state *old_crtc_state)
871 {
872 	struct intel_display *display = to_intel_display(old_crtc_state);
873 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
874 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
875 	enum pipe pipe = crtc->pipe;
876 	u32 vrr_ctl = intel_de_read(display, TRANS_VRR_CTL(display, cpu_transcoder));
877 
878 	if (!old_crtc_state->vrr.dc_balance.enable)
879 		return;
880 
881 	intel_pipedmc_dcb_disable(NULL, crtc);
882 	intel_dmc_configure_dc_balance_event(display, pipe, false);
883 	intel_de_write(display, TRANS_ADAPTIVE_SYNC_DCB_CTL(cpu_transcoder), 0);
884 	intel_de_write(display, PIPEDMC_DCB_VMIN(pipe), 0);
885 	intel_de_write(display, PIPEDMC_DCB_VMAX(pipe), 0);
886 	intel_de_write(display, PIPEDMC_DCB_MAX_INCREASE(pipe), 0);
887 	intel_de_write(display, PIPEDMC_DCB_MAX_DECREASE(pipe), 0);
888 	intel_de_write(display, PIPEDMC_DCB_GUARDBAND(pipe), 0);
889 	intel_de_write(display, PIPEDMC_DCB_SLOPE(pipe), 0);
890 	intel_de_write(display, PIPEDMC_DCB_VBLANK(pipe), 0);
891 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder), 0);
892 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder), 0);
893 	intel_de_write(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder), 0);
894 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder), 0);
895 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG(cpu_transcoder), 0);
896 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG(cpu_transcoder), 0);
897 	intel_de_write(display, TRANS_VRR_DCB_VMAX(cpu_transcoder), 0);
898 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE(cpu_transcoder), 0);
899 
900 	vrr_ctl &= ~VRR_CTL_DCB_ADJ_ENABLE;
901 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
902 }
903 
intel_vrr_tg_enable(const struct intel_crtc_state * crtc_state,bool cmrr_enable)904 static void intel_vrr_tg_enable(const struct intel_crtc_state *crtc_state,
905 				bool cmrr_enable)
906 {
907 	struct intel_display *display = to_intel_display(crtc_state);
908 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
909 	u32 vrr_ctl;
910 
911 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
912 		       trans_vrr_push(crtc_state, false));
913 
914 	vrr_ctl = VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state);
915 
916 	/*
917 	 * FIXME this might be broken as bspec seems to imply that
918 	 * even VRR_CTL_CMRR_ENABLE is armed by TRANS_CMRR_N_HI
919 	 * when enabling CMRR (but not when disabling CMRR?).
920 	 */
921 	if (cmrr_enable)
922 		vrr_ctl |= VRR_CTL_CMRR_ENABLE;
923 
924 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
925 }
926 
intel_vrr_tg_disable(const struct intel_crtc_state * old_crtc_state)927 static void intel_vrr_tg_disable(const struct intel_crtc_state *old_crtc_state)
928 {
929 	struct intel_display *display = to_intel_display(old_crtc_state);
930 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
931 
932 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
933 		       trans_vrr_ctl(old_crtc_state));
934 
935 	if (intel_de_wait_for_clear_ms(display,
936 				       TRANS_VRR_STATUS(display, cpu_transcoder),
937 				       VRR_STATUS_VRR_EN_LIVE, 1000))
938 		drm_err(display->drm, "Timed out waiting for VRR live status to clear\n");
939 
940 	intel_de_rmw(display, TRANS_PUSH(display, cpu_transcoder),
941 		     TRANS_PUSH_EN, 0);
942 }
943 
intel_vrr_enable(const struct intel_crtc_state * crtc_state)944 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
945 {
946 	struct intel_display *display = to_intel_display(crtc_state);
947 
948 	if (!crtc_state->vrr.enable)
949 		return;
950 
951 	intel_vrr_set_vrr_timings(crtc_state);
952 	intel_vrr_enable_dc_balancing(crtc_state);
953 
954 	if (!intel_vrr_always_use_vrr_tg(display))
955 		intel_vrr_tg_enable(crtc_state, crtc_state->cmrr.enable);
956 }
957 
intel_vrr_disable(const struct intel_crtc_state * old_crtc_state)958 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
959 {
960 	struct intel_display *display = to_intel_display(old_crtc_state);
961 
962 	if (!old_crtc_state->vrr.enable)
963 		return;
964 
965 	if (!intel_vrr_always_use_vrr_tg(display))
966 		intel_vrr_tg_disable(old_crtc_state);
967 
968 	intel_vrr_disable_dc_balancing(old_crtc_state);
969 	intel_vrr_set_fixed_rr_timings(old_crtc_state);
970 }
971 
intel_vrr_transcoder_enable(const struct intel_crtc_state * crtc_state)972 void intel_vrr_transcoder_enable(const struct intel_crtc_state *crtc_state)
973 {
974 	struct intel_display *display = to_intel_display(crtc_state);
975 
976 	intel_vrr_set_transcoder_timings(crtc_state);
977 
978 	if (!intel_vrr_possible(crtc_state))
979 		return;
980 
981 	if (intel_vrr_always_use_vrr_tg(display))
982 		intel_vrr_tg_enable(crtc_state, false);
983 }
984 
intel_vrr_transcoder_disable(const struct intel_crtc_state * old_crtc_state)985 void intel_vrr_transcoder_disable(const struct intel_crtc_state *old_crtc_state)
986 {
987 	struct intel_display *display = to_intel_display(old_crtc_state);
988 
989 	if (!intel_vrr_possible(old_crtc_state))
990 		return;
991 
992 	if (intel_vrr_always_use_vrr_tg(display))
993 		intel_vrr_tg_disable(old_crtc_state);
994 }
995 
intel_vrr_psr_frame_change_enable(const struct intel_crtc_state * crtc_state)996 void intel_vrr_psr_frame_change_enable(const struct intel_crtc_state *crtc_state)
997 {
998 	struct intel_display *display = to_intel_display(crtc_state);
999 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1000 
1001 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
1002 		       trans_vrr_push(crtc_state, false));
1003 }
1004 
intel_vrr_is_fixed_rr(const struct intel_crtc_state * crtc_state)1005 bool intel_vrr_is_fixed_rr(const struct intel_crtc_state *crtc_state)
1006 {
1007 	return crtc_state->vrr.flipline &&
1008 	       crtc_state->vrr.flipline == crtc_state->vrr.vmax &&
1009 	       crtc_state->vrr.flipline == crtc_state->vrr.vmin;
1010 }
1011 
1012 static
intel_vrr_get_dc_balance_config(struct intel_crtc_state * crtc_state)1013 void intel_vrr_get_dc_balance_config(struct intel_crtc_state *crtc_state)
1014 {
1015 	u32 reg_val;
1016 	struct intel_display *display = to_intel_display(crtc_state);
1017 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1018 	enum pipe pipe = crtc->pipe;
1019 
1020 	if (!intel_vrr_dc_balance_possible(crtc_state))
1021 		return;
1022 
1023 	reg_val = intel_de_read(display, PIPEDMC_DCB_VMIN(pipe));
1024 	crtc_state->vrr.dc_balance.vmin = reg_val ? reg_val + 1 : 0;
1025 
1026 	reg_val = intel_de_read(display, PIPEDMC_DCB_VMAX(pipe));
1027 	crtc_state->vrr.dc_balance.vmax = reg_val ? reg_val + 1 : 0;
1028 
1029 	crtc_state->vrr.dc_balance.guardband =
1030 		intel_de_read(display, PIPEDMC_DCB_GUARDBAND(pipe));
1031 	crtc_state->vrr.dc_balance.max_increase =
1032 		intel_de_read(display, PIPEDMC_DCB_MAX_INCREASE(pipe));
1033 	crtc_state->vrr.dc_balance.max_decrease =
1034 		intel_de_read(display, PIPEDMC_DCB_MAX_DECREASE(pipe));
1035 	crtc_state->vrr.dc_balance.slope =
1036 		intel_de_read(display, PIPEDMC_DCB_SLOPE(pipe));
1037 	crtc_state->vrr.dc_balance.vblank_target =
1038 		intel_de_read(display, PIPEDMC_DCB_VBLANK(pipe));
1039 }
1040 
intel_vrr_get_config(struct intel_crtc_state * crtc_state)1041 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
1042 {
1043 	struct intel_display *display = to_intel_display(crtc_state);
1044 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1045 	u32 trans_vrr_ctl, trans_vrr_vsync;
1046 	bool vrr_enable;
1047 
1048 	trans_vrr_ctl = intel_de_read(display,
1049 				      TRANS_VRR_CTL(display, cpu_transcoder));
1050 
1051 	if (HAS_CMRR(display))
1052 		crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
1053 
1054 	if (crtc_state->cmrr.enable) {
1055 		crtc_state->cmrr.cmrr_n =
1056 			intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
1057 					     TRANS_CMRR_N_HI(display, cpu_transcoder));
1058 		crtc_state->cmrr.cmrr_m =
1059 			intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
1060 					     TRANS_CMRR_M_HI(display, cpu_transcoder));
1061 	}
1062 
1063 	if (DISPLAY_VER(display) >= 13) {
1064 		crtc_state->vrr.guardband =
1065 			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
1066 	} else {
1067 		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE) {
1068 			crtc_state->vrr.pipeline_full =
1069 				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
1070 
1071 			crtc_state->vrr.guardband =
1072 				intel_vrr_pipeline_full_to_guardband(crtc_state,
1073 								     crtc_state->vrr.pipeline_full);
1074 		}
1075 	}
1076 
1077 	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
1078 		crtc_state->vrr.flipline = intel_de_read(display,
1079 							 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
1080 		crtc_state->vrr.vmax = intel_de_read(display,
1081 						     TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
1082 		crtc_state->vrr.vmin = intel_de_read(display,
1083 						     TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
1084 
1085 		if (DISPLAY_VER(display) < 13) {
1086 			/* undo what intel_vrr_hw_value() does when writing the values */
1087 			crtc_state->vrr.flipline += crtc_state->set_context_latency;
1088 			crtc_state->vrr.vmax += crtc_state->set_context_latency;
1089 			crtc_state->vrr.vmin += crtc_state->set_context_latency;
1090 
1091 			crtc_state->vrr.vmin += intel_vrr_vmin_flipline_offset(display);
1092 		}
1093 
1094 		/*
1095 		 * For platforms that always use VRR Timing Generator, the VTOTAL.Vtotal
1096 		 * bits are not filled. Since for these platforms TRAN_VMIN is always
1097 		 * filled with crtc_vtotal, use TRAN_VRR_VMIN to get the vtotal for
1098 		 * adjusted_mode.
1099 		 */
1100 		if (intel_vrr_always_use_vrr_tg(display))
1101 			crtc_state->hw.adjusted_mode.crtc_vtotal =
1102 				intel_vrr_vmin_vtotal(crtc_state);
1103 
1104 		if (HAS_AS_SDP(display)) {
1105 			trans_vrr_vsync =
1106 				intel_de_read(display,
1107 					      TRANS_VRR_VSYNC(display, cpu_transcoder));
1108 			crtc_state->vrr.vsync_start =
1109 				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
1110 			crtc_state->vrr.vsync_end =
1111 				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
1112 		}
1113 	}
1114 
1115 	vrr_enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
1116 
1117 	if (intel_vrr_always_use_vrr_tg(display))
1118 		crtc_state->vrr.enable = vrr_enable && !intel_vrr_is_fixed_rr(crtc_state);
1119 	else
1120 		crtc_state->vrr.enable = vrr_enable;
1121 
1122 	intel_vrr_get_dc_balance_config(crtc_state);
1123 
1124 	/*
1125 	 * #TODO: For Both VRR and CMRR the flag I915_MODE_FLAG_VRR is set for mode_flags.
1126 	 * Since CMRR is currently disabled, set this flag for VRR for now.
1127 	 * Need to keep this in mind while re-enabling CMRR.
1128 	 */
1129 	if (crtc_state->vrr.enable)
1130 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
1131 
1132 	/*
1133 	 * For platforms that always use the VRR timing generator, we overwrite
1134 	 * crtc_vblank_start with vtotal - guardband to reflect the delayed
1135 	 * vblank start. This works for both default and optimized guardband values.
1136 	 * On other platforms, we keep the original value from
1137 	 * intel_get_transcoder_timings() and apply adjustments only in VRR-specific
1138 	 * paths as needed.
1139 	 */
1140 	if (intel_vrr_always_use_vrr_tg(display))
1141 		crtc_state->hw.adjusted_mode.crtc_vblank_start =
1142 			crtc_state->hw.adjusted_mode.crtc_vtotal -
1143 			crtc_state->vrr.guardband;
1144 }
1145 
intel_vrr_safe_window_start(const struct intel_crtc_state * crtc_state)1146 int intel_vrr_safe_window_start(const struct intel_crtc_state *crtc_state)
1147 {
1148 	struct intel_display *display = to_intel_display(crtc_state);
1149 
1150 	if (DISPLAY_VER(display) >= 30)
1151 		return crtc_state->hw.adjusted_mode.crtc_vdisplay -
1152 		       crtc_state->set_context_latency;
1153 	else
1154 		return crtc_state->hw.adjusted_mode.crtc_vdisplay;
1155 }
1156 
1157 static int
intel_vrr_dcb_vmin_vblank_start(const struct intel_crtc_state * crtc_state)1158 intel_vrr_dcb_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
1159 {
1160 	return (intel_vrr_dcb_vmin_vblank_start_next(crtc_state) < 0) ?
1161 		intel_vrr_dcb_vmin_vblank_start_final(crtc_state) :
1162 		intel_vrr_dcb_vmin_vblank_start_next(crtc_state);
1163 }
1164 
intel_vrr_vmin_safe_window_end(const struct intel_crtc_state * crtc_state)1165 int intel_vrr_vmin_safe_window_end(const struct intel_crtc_state *crtc_state)
1166 {
1167 	int vmin_vblank_start = crtc_state->vrr.dc_balance.enable ?
1168 			intel_vrr_dcb_vmin_vblank_start(crtc_state) :
1169 			intel_vrr_vmin_vblank_start(crtc_state);
1170 
1171 	return vmin_vblank_start - crtc_state->set_context_latency;
1172 }
1173 
intel_vrr_dcb_vmin_vblank_start_next(const struct intel_crtc_state * crtc_state)1174 int intel_vrr_dcb_vmin_vblank_start_next(const struct intel_crtc_state *crtc_state)
1175 {
1176 	struct intel_display *display = to_intel_display(crtc_state);
1177 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1178 	u32 tmp = 0;
1179 
1180 	tmp = intel_de_read(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder));
1181 
1182 	if (REG_FIELD_GET(VRR_DCB_ADJ_FLIPLINE_CNT_MASK, tmp) == 0)
1183 		return -EINVAL;
1184 
1185 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_ADJ_FLIPLINE(tmp) + 1);
1186 }
1187 
intel_vrr_dcb_vmax_vblank_start_next(const struct intel_crtc_state * crtc_state)1188 int intel_vrr_dcb_vmax_vblank_start_next(const struct intel_crtc_state *crtc_state)
1189 {
1190 	struct intel_display *display = to_intel_display(crtc_state);
1191 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1192 	u32 tmp = 0;
1193 
1194 	tmp = intel_de_read(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder));
1195 
1196 	if (REG_FIELD_GET(VRR_DCB_ADJ_VMAX_CNT_MASK, tmp) == 0)
1197 		return -EINVAL;
1198 
1199 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_ADJ_VMAX(tmp) + 1);
1200 }
1201 
intel_vrr_dcb_vmin_vblank_start_final(const struct intel_crtc_state * crtc_state)1202 int intel_vrr_dcb_vmin_vblank_start_final(const struct intel_crtc_state *crtc_state)
1203 {
1204 	struct intel_display *display = to_intel_display(crtc_state);
1205 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1206 	u32 tmp = 0;
1207 
1208 	tmp = intel_de_read(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder));
1209 
1210 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_FLIPLINE(tmp) + 1);
1211 }
1212 
intel_vrr_dcb_vmax_vblank_start_final(const struct intel_crtc_state * crtc_state)1213 int intel_vrr_dcb_vmax_vblank_start_final(const struct intel_crtc_state *crtc_state)
1214 {
1215 	struct intel_display *display = to_intel_display(crtc_state);
1216 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1217 	u32 tmp = 0;
1218 
1219 	tmp = intel_de_read(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder));
1220 
1221 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_VMAX(tmp) + 1);
1222 }
1223