1 // SPDX-License-Identifier: MIT
2 /*
3 * Copyright © 2020 Intel Corporation
4 *
5 */
6
7 #include "i915_reg.h"
8 #include "intel_de.h"
9 #include "intel_display_types.h"
10 #include "intel_dp.h"
11 #include "intel_vrr.h"
12 #include "intel_vrr_regs.h"
13
14 #define FIXED_POINT_PRECISION 100
15 #define CMRR_PRECISION_TOLERANCE 10
16
intel_vrr_is_capable(struct intel_connector * connector)17 bool intel_vrr_is_capable(struct intel_connector *connector)
18 {
19 struct intel_display *display = to_intel_display(connector);
20 const struct drm_display_info *info = &connector->base.display_info;
21 struct intel_dp *intel_dp;
22
23 /*
24 * DP Sink is capable of VRR video timings if
25 * Ignore MSA bit is set in DPCD.
26 * EDID monitor range also should be atleast 10 for reasonable
27 * Adaptive Sync or Variable Refresh Rate end user experience.
28 */
29 switch (connector->base.connector_type) {
30 case DRM_MODE_CONNECTOR_eDP:
31 if (!connector->panel.vbt.vrr)
32 return false;
33 fallthrough;
34 case DRM_MODE_CONNECTOR_DisplayPort:
35 intel_dp = intel_attached_dp(connector);
36
37 if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
38 return false;
39
40 break;
41 default:
42 return false;
43 }
44
45 return HAS_VRR(display) &&
46 info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
47 }
48
intel_vrr_is_in_range(struct intel_connector * connector,int vrefresh)49 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
50 {
51 const struct drm_display_info *info = &connector->base.display_info;
52
53 return intel_vrr_is_capable(connector) &&
54 vrefresh >= info->monitor_range.min_vfreq &&
55 vrefresh <= info->monitor_range.max_vfreq;
56 }
57
intel_vrr_possible(const struct intel_crtc_state * crtc_state)58 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
59 {
60 return crtc_state->vrr.flipline;
61 }
62
63 void
intel_vrr_check_modeset(struct intel_atomic_state * state)64 intel_vrr_check_modeset(struct intel_atomic_state *state)
65 {
66 int i;
67 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
68 struct intel_crtc *crtc;
69
70 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
71 new_crtc_state, i) {
72 if (new_crtc_state->uapi.vrr_enabled !=
73 old_crtc_state->uapi.vrr_enabled)
74 new_crtc_state->uapi.mode_changed = true;
75 }
76 }
77
intel_vrr_real_vblank_delay(const struct intel_crtc_state * crtc_state)78 static int intel_vrr_real_vblank_delay(const struct intel_crtc_state *crtc_state)
79 {
80 return crtc_state->hw.adjusted_mode.crtc_vblank_start -
81 crtc_state->hw.adjusted_mode.crtc_vdisplay;
82 }
83
intel_vrr_extra_vblank_delay(struct intel_display * display)84 static int intel_vrr_extra_vblank_delay(struct intel_display *display)
85 {
86 /*
87 * On ICL/TGL VRR hardware inserts one extra scanline
88 * just after vactive, which pushes the vmin decision
89 * boundary ahead accordingly. We'll include the extra
90 * scanline in our vblank delay estimates to make sure
91 * that we never underestimate how long we have until
92 * the delayed vblank has passed.
93 */
94 return DISPLAY_VER(display) < 13 ? 1 : 0;
95 }
96
intel_vrr_vblank_delay(const struct intel_crtc_state * crtc_state)97 int intel_vrr_vblank_delay(const struct intel_crtc_state *crtc_state)
98 {
99 struct intel_display *display = to_intel_display(crtc_state);
100
101 return intel_vrr_real_vblank_delay(crtc_state) +
102 intel_vrr_extra_vblank_delay(display);
103 }
104
intel_vrr_flipline_offset(struct intel_display * display)105 static int intel_vrr_flipline_offset(struct intel_display *display)
106 {
107 /* ICL/TGL hardware imposes flipline>=vmin+1 */
108 return DISPLAY_VER(display) < 13 ? 1 : 0;
109 }
110
intel_vrr_vmin_flipline(const struct intel_crtc_state * crtc_state)111 static int intel_vrr_vmin_flipline(const struct intel_crtc_state *crtc_state)
112 {
113 struct intel_display *display = to_intel_display(crtc_state);
114
115 return crtc_state->vrr.vmin + intel_vrr_flipline_offset(display);
116 }
117
118 /*
119 * Without VRR registers get latched at:
120 * vblank_start
121 *
122 * With VRR the earliest registers can get latched is:
123 * intel_vrr_vmin_vblank_start(), which if we want to maintain
124 * the correct min vtotal is >=vblank_start+1
125 *
126 * The latest point registers can get latched is the vmax decision boundary:
127 * intel_vrr_vmax_vblank_start()
128 *
129 * Between those two points the vblank exit starts (and hence registers get
130 * latched) ASAP after a push is sent.
131 *
132 * framestart_delay is programmable 1-4.
133 */
intel_vrr_vblank_exit_length(const struct intel_crtc_state * crtc_state)134 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
135 {
136 struct intel_display *display = to_intel_display(crtc_state);
137
138 if (DISPLAY_VER(display) >= 13)
139 return crtc_state->vrr.guardband;
140 else
141 /* hardware imposes one extra scanline somewhere */
142 return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
143 }
144
intel_vrr_vmin_vtotal(const struct intel_crtc_state * crtc_state)145 int intel_vrr_vmin_vtotal(const struct intel_crtc_state *crtc_state)
146 {
147 struct intel_display *display = to_intel_display(crtc_state);
148
149 /* Min vblank actually determined by flipline */
150 if (DISPLAY_VER(display) >= 13)
151 return intel_vrr_vmin_flipline(crtc_state);
152 else
153 return intel_vrr_vmin_flipline(crtc_state) +
154 intel_vrr_real_vblank_delay(crtc_state);
155 }
156
intel_vrr_vmax_vtotal(const struct intel_crtc_state * crtc_state)157 int intel_vrr_vmax_vtotal(const struct intel_crtc_state *crtc_state)
158 {
159 struct intel_display *display = to_intel_display(crtc_state);
160
161 if (DISPLAY_VER(display) >= 13)
162 return crtc_state->vrr.vmax;
163 else
164 return crtc_state->vrr.vmax +
165 intel_vrr_real_vblank_delay(crtc_state);
166 }
167
intel_vrr_vmin_vblank_start(const struct intel_crtc_state * crtc_state)168 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
169 {
170 return intel_vrr_vmin_vtotal(crtc_state) - intel_vrr_vblank_exit_length(crtc_state);
171 }
172
intel_vrr_vmax_vblank_start(const struct intel_crtc_state * crtc_state)173 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
174 {
175 return intel_vrr_vmax_vtotal(crtc_state) - intel_vrr_vblank_exit_length(crtc_state);
176 }
177
178 static bool
is_cmrr_frac_required(struct intel_crtc_state * crtc_state)179 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
180 {
181 struct intel_display *display = to_intel_display(crtc_state);
182 int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
183 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
184
185 if (!HAS_CMRR(display))
186 return false;
187
188 actual_refresh_k =
189 drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
190 pixel_clock_per_line =
191 adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
192 calculated_refresh_k =
193 pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
194
195 if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
196 return false;
197
198 return true;
199 }
200
201 static unsigned int
cmrr_get_vtotal(struct intel_crtc_state * crtc_state,bool video_mode_required)202 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
203 {
204 int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
205 u64 adjusted_pixel_rate;
206 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
207
208 desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
209
210 if (video_mode_required) {
211 multiplier_m = 1001;
212 multiplier_n = 1000;
213 }
214
215 crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
216 multiplier_n);
217 vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
218 crtc_state->cmrr.cmrr_n);
219 adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
220 crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
221
222 return vtotal;
223 }
224
225 void
intel_vrr_compute_config(struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)226 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
227 struct drm_connector_state *conn_state)
228 {
229 struct intel_display *display = to_intel_display(crtc_state);
230 struct intel_connector *connector =
231 to_intel_connector(conn_state->connector);
232 struct intel_dp *intel_dp = intel_attached_dp(connector);
233 bool is_edp = intel_dp_is_edp(intel_dp);
234 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
235 const struct drm_display_info *info = &connector->base.display_info;
236 int vmin, vmax;
237
238 /*
239 * FIXME all joined pipes share the same transcoder.
240 * Need to account for that during VRR toggle/push/etc.
241 */
242 if (crtc_state->joiner_pipes)
243 return;
244
245 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
246 return;
247
248 crtc_state->vrr.in_range =
249 intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
250 if (!crtc_state->vrr.in_range)
251 return;
252
253 if (HAS_LRR(display))
254 crtc_state->update_lrr = true;
255
256 vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
257 adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
258 vmax = adjusted_mode->crtc_clock * 1000 /
259 (adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
260
261 vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
262 vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
263
264 if (vmin >= vmax)
265 return;
266
267 crtc_state->vrr.vmin = vmin;
268 crtc_state->vrr.vmax = vmax;
269
270 crtc_state->vrr.flipline = crtc_state->vrr.vmin;
271
272 /*
273 * flipline determines the min vblank length the hardware will
274 * generate, and on ICL/TGL flipline>=vmin+1, hence we reduce
275 * vmin by one to make sure we can get the actual min vblank length.
276 */
277 crtc_state->vrr.vmin -= intel_vrr_flipline_offset(display);
278
279 /*
280 * When panel is VRR capable and userspace has
281 * not enabled adaptive sync mode then Fixed Average
282 * Vtotal mode should be enabled.
283 */
284 if (crtc_state->uapi.vrr_enabled) {
285 crtc_state->vrr.enable = true;
286 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
287 } else if (is_cmrr_frac_required(crtc_state) && is_edp) {
288 crtc_state->vrr.enable = true;
289 crtc_state->cmrr.enable = true;
290 /*
291 * TODO: Compute precise target refresh rate to determine
292 * if video_mode_required should be true. Currently set to
293 * false due to uncertainty about the precise target
294 * refresh Rate.
295 */
296 crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
297 crtc_state->vrr.vmin = crtc_state->vrr.vmax;
298 crtc_state->vrr.flipline = crtc_state->vrr.vmin;
299 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
300 }
301
302 if (HAS_AS_SDP(display)) {
303 crtc_state->vrr.vsync_start =
304 (crtc_state->hw.adjusted_mode.crtc_vtotal -
305 crtc_state->hw.adjusted_mode.vsync_start);
306 crtc_state->vrr.vsync_end =
307 (crtc_state->hw.adjusted_mode.crtc_vtotal -
308 crtc_state->hw.adjusted_mode.vsync_end);
309 }
310 }
311
intel_vrr_compute_config_late(struct intel_crtc_state * crtc_state)312 void intel_vrr_compute_config_late(struct intel_crtc_state *crtc_state)
313 {
314 struct intel_display *display = to_intel_display(crtc_state);
315 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
316
317 if (!intel_vrr_possible(crtc_state))
318 return;
319
320 if (DISPLAY_VER(display) >= 13) {
321 crtc_state->vrr.guardband =
322 crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start;
323 } else {
324 /* hardware imposes one extra scanline somewhere */
325 crtc_state->vrr.pipeline_full =
326 min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
327 crtc_state->framestart_delay - 1);
328
329 /*
330 * vmin/vmax/flipline also need to be adjusted by
331 * the vblank delay to maintain correct vtotals.
332 */
333 crtc_state->vrr.vmin -= intel_vrr_real_vblank_delay(crtc_state);
334 crtc_state->vrr.vmax -= intel_vrr_real_vblank_delay(crtc_state);
335 crtc_state->vrr.flipline -= intel_vrr_real_vblank_delay(crtc_state);
336 }
337 }
338
trans_vrr_ctl(const struct intel_crtc_state * crtc_state)339 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
340 {
341 struct intel_display *display = to_intel_display(crtc_state);
342
343 if (DISPLAY_VER(display) >= 13)
344 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
345 XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
346 else
347 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
348 VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
349 VRR_CTL_PIPELINE_FULL_OVERRIDE;
350 }
351
intel_vrr_set_transcoder_timings(const struct intel_crtc_state * crtc_state)352 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
353 {
354 struct intel_display *display = to_intel_display(crtc_state);
355 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
356
357 /*
358 * This bit seems to have two meanings depending on the platform:
359 * TGL: generate VRR "safe window" for DSB vblank waits
360 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
361 */
362 if (IS_DISPLAY_VER(display, 12, 13))
363 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
364 0, PIPE_VBLANK_WITH_DELAY);
365
366 if (!intel_vrr_possible(crtc_state)) {
367 intel_de_write(display,
368 TRANS_VRR_CTL(display, cpu_transcoder), 0);
369 return;
370 }
371
372 if (crtc_state->cmrr.enable) {
373 intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
374 upper_32_bits(crtc_state->cmrr.cmrr_m));
375 intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
376 lower_32_bits(crtc_state->cmrr.cmrr_m));
377 intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
378 upper_32_bits(crtc_state->cmrr.cmrr_n));
379 intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
380 lower_32_bits(crtc_state->cmrr.cmrr_n));
381 }
382
383 intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
384 crtc_state->vrr.vmin - 1);
385 intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
386 crtc_state->vrr.vmax - 1);
387 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
388 trans_vrr_ctl(crtc_state));
389 intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
390 crtc_state->vrr.flipline - 1);
391
392 if (HAS_AS_SDP(display))
393 intel_de_write(display,
394 TRANS_VRR_VSYNC(display, cpu_transcoder),
395 VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
396 VRR_VSYNC_START(crtc_state->vrr.vsync_start));
397 }
398
intel_vrr_send_push(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)399 void intel_vrr_send_push(struct intel_dsb *dsb,
400 const struct intel_crtc_state *crtc_state)
401 {
402 struct intel_display *display = to_intel_display(crtc_state);
403 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
404
405 if (!crtc_state->vrr.enable)
406 return;
407
408 if (dsb)
409 intel_dsb_nonpost_start(dsb);
410
411 intel_de_write_dsb(display, dsb,
412 TRANS_PUSH(display, cpu_transcoder),
413 TRANS_PUSH_EN | TRANS_PUSH_SEND);
414
415 if (dsb)
416 intel_dsb_nonpost_end(dsb);
417 }
418
intel_vrr_check_push_sent(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)419 void intel_vrr_check_push_sent(struct intel_dsb *dsb,
420 const struct intel_crtc_state *crtc_state)
421 {
422 struct intel_display *display = to_intel_display(crtc_state);
423 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
424 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
425
426 if (!crtc_state->vrr.enable)
427 return;
428
429 /*
430 * Make sure the push send bit has cleared. This should
431 * already be the case as long as the caller makes sure
432 * this is called after the delayed vblank has occurred.
433 */
434 if (dsb) {
435 int wait_us, count;
436
437 wait_us = 2;
438 count = 1;
439
440 /*
441 * If the bit hasn't cleared the DSB will
442 * raise the poll error interrupt.
443 */
444 intel_dsb_poll(dsb, TRANS_PUSH(display, cpu_transcoder),
445 TRANS_PUSH_SEND, 0, wait_us, count);
446 } else {
447 if (intel_vrr_is_push_sent(crtc_state))
448 drm_err(display->drm, "[CRTC:%d:%s] VRR push send still pending\n",
449 crtc->base.base.id, crtc->base.name);
450 }
451 }
452
intel_vrr_is_push_sent(const struct intel_crtc_state * crtc_state)453 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
454 {
455 struct intel_display *display = to_intel_display(crtc_state);
456 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
457
458 if (!crtc_state->vrr.enable)
459 return false;
460
461 return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
462 }
463
intel_vrr_enable(const struct intel_crtc_state * crtc_state)464 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
465 {
466 struct intel_display *display = to_intel_display(crtc_state);
467 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
468
469 if (!crtc_state->vrr.enable)
470 return;
471
472 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
473 TRANS_PUSH_EN);
474
475 if (crtc_state->cmrr.enable) {
476 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
477 VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE |
478 trans_vrr_ctl(crtc_state));
479 } else {
480 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
481 VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
482 }
483 }
484
intel_vrr_disable(const struct intel_crtc_state * old_crtc_state)485 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
486 {
487 struct intel_display *display = to_intel_display(old_crtc_state);
488 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
489
490 if (!old_crtc_state->vrr.enable)
491 return;
492
493 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
494 trans_vrr_ctl(old_crtc_state));
495 intel_de_wait_for_clear(display,
496 TRANS_VRR_STATUS(display, cpu_transcoder),
497 VRR_STATUS_VRR_EN_LIVE, 1000);
498 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
499 }
500
intel_vrr_get_config(struct intel_crtc_state * crtc_state)501 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
502 {
503 struct intel_display *display = to_intel_display(crtc_state);
504 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
505 u32 trans_vrr_ctl, trans_vrr_vsync;
506
507 trans_vrr_ctl = intel_de_read(display,
508 TRANS_VRR_CTL(display, cpu_transcoder));
509
510 crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
511 if (HAS_CMRR(display))
512 crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
513
514 if (crtc_state->cmrr.enable) {
515 crtc_state->cmrr.cmrr_n =
516 intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
517 TRANS_CMRR_N_HI(display, cpu_transcoder));
518 crtc_state->cmrr.cmrr_m =
519 intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
520 TRANS_CMRR_M_HI(display, cpu_transcoder));
521 }
522
523 if (DISPLAY_VER(display) >= 13)
524 crtc_state->vrr.guardband =
525 REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
526 else
527 if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
528 crtc_state->vrr.pipeline_full =
529 REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
530
531 if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
532 crtc_state->vrr.flipline = intel_de_read(display,
533 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
534 crtc_state->vrr.vmax = intel_de_read(display,
535 TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
536 crtc_state->vrr.vmin = intel_de_read(display,
537 TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
538
539 if (HAS_AS_SDP(display)) {
540 trans_vrr_vsync =
541 intel_de_read(display,
542 TRANS_VRR_VSYNC(display, cpu_transcoder));
543 crtc_state->vrr.vsync_start =
544 REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
545 crtc_state->vrr.vsync_end =
546 REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
547 }
548 }
549
550 if (crtc_state->vrr.enable)
551 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
552 }
553