xref: /linux/drivers/gpu/drm/i915/display/intel_display_device.c (revision ab93e0dd72c37d378dd936f031ffb83ff2bd87ce)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2023 Intel Corporation
4  */
5 
6 #include <linux/pci.h>
7 
8 #include <drm/drm_color_mgmt.h>
9 #include <drm/drm_drv.h>
10 #include <drm/drm_print.h>
11 #include <drm/intel/pciids.h>
12 
13 #include "i915_reg.h"
14 #include "intel_cx0_phy_regs.h"
15 #include "intel_de.h"
16 #include "intel_display.h"
17 #include "intel_display_device.h"
18 #include "intel_display_params.h"
19 #include "intel_display_power.h"
20 #include "intel_display_reg_defs.h"
21 #include "intel_display_regs.h"
22 #include "intel_display_types.h"
23 #include "intel_fbc.h"
24 #include "intel_step.h"
25 
26 __diag_push();
27 __diag_ignore_all("-Woverride-init", "Allow field initialization overrides for display info");
28 
29 struct stepping_desc {
30 	const enum intel_step *map; /* revid to step map */
31 	size_t size; /* map size */
32 };
33 
34 #define STEP_INFO(_map)				\
35 	.step_info.map = _map,			\
36 	.step_info.size = ARRAY_SIZE(_map)
37 
38 struct subplatform_desc {
39 	struct intel_display_platforms platforms;
40 	const char *name;
41 	const u16 *pciidlist;
42 	struct stepping_desc step_info;
43 };
44 
45 #define SUBPLATFORM(_platform, _subplatform)				\
46 	.platforms._platform##_##_subplatform = 1,			\
47 	.name = #_subplatform
48 
49 /*
50  * Group subplatform alias that matches multiple subplatforms. For making ult
51  * cover both ult and ulx on HSW/BDW.
52  */
53 #define SUBPLATFORM_GROUP(_platform, _subplatform)			\
54 	.platforms._platform##_##_subplatform = 1
55 
56 struct platform_desc {
57 	struct intel_display_platforms platforms;
58 	const char *name;
59 	const struct subplatform_desc *subplatforms;
60 	const struct intel_display_device_info *info; /* NULL for GMD ID */
61 	struct stepping_desc step_info;
62 };
63 
64 #define PLATFORM(_platform)			 \
65 	.platforms._platform = 1,		 \
66 	.name = #_platform
67 
68 /*
69  * Group platform alias that matches multiple platforms. For aliases such as g4x
70  * that covers both g45 and gm45.
71  */
72 #define PLATFORM_GROUP(_platform)		\
73 	.platforms._platform = 1
74 
75 #define ID(id) (id)
76 
77 static const struct intel_display_device_info no_display = {};
78 
79 #define PIPE_A_OFFSET		0x70000
80 #define PIPE_B_OFFSET		0x71000
81 #define PIPE_C_OFFSET		0x72000
82 #define PIPE_D_OFFSET		0x73000
83 #define CHV_PIPE_C_OFFSET	0x74000
84 /*
85  * There's actually no pipe EDP. Some pipe registers have
86  * simply shifted from the pipe to the transcoder, while
87  * keeping their original offset. Thus we need PIPE_EDP_OFFSET
88  * to access such registers in transcoder EDP.
89  */
90 #define PIPE_EDP_OFFSET	0x7f000
91 
92 /* ICL DSI 0 and 1 */
93 #define PIPE_DSI0_OFFSET	0x7b000
94 #define PIPE_DSI1_OFFSET	0x7b800
95 
96 #define TRANSCODER_A_OFFSET 0x60000
97 #define TRANSCODER_B_OFFSET 0x61000
98 #define TRANSCODER_C_OFFSET 0x62000
99 #define CHV_TRANSCODER_C_OFFSET 0x63000
100 #define TRANSCODER_D_OFFSET 0x63000
101 #define TRANSCODER_EDP_OFFSET 0x6f000
102 #define TRANSCODER_DSI0_OFFSET	0x6b000
103 #define TRANSCODER_DSI1_OFFSET	0x6b800
104 
105 #define CURSOR_A_OFFSET 0x70080
106 #define CURSOR_B_OFFSET 0x700c0
107 #define CHV_CURSOR_C_OFFSET 0x700e0
108 #define IVB_CURSOR_B_OFFSET 0x71080
109 #define IVB_CURSOR_C_OFFSET 0x72080
110 #define TGL_CURSOR_D_OFFSET 0x73080
111 
112 #define I845_PIPE_OFFSETS \
113 	.pipe_offsets = { \
114 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
115 	}, \
116 	.trans_offsets = { \
117 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
118 	}
119 
120 #define I9XX_PIPE_OFFSETS \
121 	.pipe_offsets = { \
122 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
123 		[TRANSCODER_B] = PIPE_B_OFFSET, \
124 	}, \
125 	.trans_offsets = { \
126 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
127 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
128 	}
129 
130 #define IVB_PIPE_OFFSETS \
131 	.pipe_offsets = { \
132 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
133 		[TRANSCODER_B] = PIPE_B_OFFSET, \
134 		[TRANSCODER_C] = PIPE_C_OFFSET, \
135 	}, \
136 	.trans_offsets = { \
137 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
138 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
139 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
140 	}
141 
142 #define HSW_PIPE_OFFSETS \
143 	.pipe_offsets = { \
144 		[TRANSCODER_A] = PIPE_A_OFFSET,	\
145 		[TRANSCODER_B] = PIPE_B_OFFSET, \
146 		[TRANSCODER_C] = PIPE_C_OFFSET, \
147 		[TRANSCODER_EDP] = PIPE_EDP_OFFSET, \
148 	}, \
149 	.trans_offsets = { \
150 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
151 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
152 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
153 		[TRANSCODER_EDP] = TRANSCODER_EDP_OFFSET, \
154 	}
155 
156 #define CHV_PIPE_OFFSETS \
157 	.pipe_offsets = { \
158 		[TRANSCODER_A] = PIPE_A_OFFSET, \
159 		[TRANSCODER_B] = PIPE_B_OFFSET, \
160 		[TRANSCODER_C] = CHV_PIPE_C_OFFSET, \
161 	}, \
162 	.trans_offsets = { \
163 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
164 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
165 		[TRANSCODER_C] = CHV_TRANSCODER_C_OFFSET, \
166 	}
167 
168 #define I845_CURSOR_OFFSETS \
169 	.cursor_offsets = { \
170 		[PIPE_A] = CURSOR_A_OFFSET, \
171 	}
172 
173 #define I9XX_CURSOR_OFFSETS \
174 	.cursor_offsets = { \
175 		[PIPE_A] = CURSOR_A_OFFSET, \
176 		[PIPE_B] = CURSOR_B_OFFSET, \
177 	}
178 
179 #define CHV_CURSOR_OFFSETS \
180 	.cursor_offsets = { \
181 		[PIPE_A] = CURSOR_A_OFFSET, \
182 		[PIPE_B] = CURSOR_B_OFFSET, \
183 		[PIPE_C] = CHV_CURSOR_C_OFFSET, \
184 	}
185 
186 #define IVB_CURSOR_OFFSETS \
187 	.cursor_offsets = { \
188 		[PIPE_A] = CURSOR_A_OFFSET, \
189 		[PIPE_B] = IVB_CURSOR_B_OFFSET, \
190 		[PIPE_C] = IVB_CURSOR_C_OFFSET, \
191 	}
192 
193 #define TGL_CURSOR_OFFSETS \
194 	.cursor_offsets = { \
195 		[PIPE_A] = CURSOR_A_OFFSET, \
196 		[PIPE_B] = IVB_CURSOR_B_OFFSET, \
197 		[PIPE_C] = IVB_CURSOR_C_OFFSET, \
198 		[PIPE_D] = TGL_CURSOR_D_OFFSET, \
199 	}
200 
201 #define I845_COLORS \
202 	.color = { .gamma_lut_size = 256 }
203 #define I9XX_COLORS \
204 	.color = { .gamma_lut_size = 129, \
205 		   .gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
206 	}
207 #define ILK_COLORS \
208 	.color = { .gamma_lut_size = 1024 }
209 #define IVB_COLORS \
210 	.color = { .degamma_lut_size = 1024, .gamma_lut_size = 1024 }
211 #define CHV_COLORS \
212 	.color = { \
213 		.degamma_lut_size = 65, .gamma_lut_size = 257, \
214 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
215 		.gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
216 	}
217 #define GLK_COLORS \
218 	.color = { \
219 		.degamma_lut_size = 33, .gamma_lut_size = 1024, \
220 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING | \
221 				     DRM_COLOR_LUT_EQUAL_CHANNELS, \
222 	}
223 #define ICL_COLORS \
224 	.color = { \
225 		.degamma_lut_size = 33, .gamma_lut_size = 262145, \
226 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING | \
227 				     DRM_COLOR_LUT_EQUAL_CHANNELS, \
228 		.gamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING, \
229 	}
230 
231 #define I830_DISPLAY \
232 	.has_overlay = 1, \
233 	.cursor_needs_physical = 1, \
234 	.overlay_needs_physical = 1, \
235 	.has_gmch = 1, \
236 	I9XX_PIPE_OFFSETS, \
237 	I9XX_CURSOR_OFFSETS, \
238 	I9XX_COLORS, \
239 	\
240 	.__runtime_defaults.ip.ver = 2, \
241 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
242 	.__runtime_defaults.cpu_transcoder_mask = \
243 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B)
244 
245 #define I845_DISPLAY \
246 	.has_overlay = 1, \
247 	.overlay_needs_physical = 1, \
248 	.has_gmch = 1, \
249 	I845_PIPE_OFFSETS, \
250 	I845_CURSOR_OFFSETS, \
251 	I845_COLORS, \
252 	\
253 	.__runtime_defaults.ip.ver = 2, \
254 	.__runtime_defaults.pipe_mask = BIT(PIPE_A), \
255 	.__runtime_defaults.cpu_transcoder_mask = BIT(TRANSCODER_A)
256 
257 static const struct platform_desc i830_desc = {
258 	PLATFORM(i830),
259 	PLATFORM_GROUP(mobile),
260 	.info = &(const struct intel_display_device_info) {
261 		I830_DISPLAY,
262 
263 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C), /* DVO A/B/C */
264 	},
265 };
266 
267 static const struct platform_desc i845_desc = {
268 	PLATFORM(i845g),
269 	.info = &(const struct intel_display_device_info) {
270 		I845_DISPLAY,
271 
272 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
273 	},
274 };
275 
276 static const struct platform_desc i85x_desc = {
277 	PLATFORM(i85x),
278 	PLATFORM_GROUP(mobile),
279 	.info = &(const struct intel_display_device_info) {
280 		I830_DISPLAY,
281 
282 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
283 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
284 	},
285 };
286 
287 static const struct platform_desc i865g_desc = {
288 	PLATFORM(i865g),
289 	.info = &(const struct intel_display_device_info) {
290 		I845_DISPLAY,
291 
292 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* DVO B/C */
293 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
294 	},
295 };
296 
297 #define GEN3_DISPLAY   \
298 	.has_gmch = 1, \
299 	.has_overlay = 1, \
300 	I9XX_PIPE_OFFSETS, \
301 	I9XX_CURSOR_OFFSETS, \
302 	\
303 	.__runtime_defaults.ip.ver = 3, \
304 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
305 	.__runtime_defaults.cpu_transcoder_mask = \
306 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B), \
307 	.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) /* SDVO B/C */
308 
309 static const struct platform_desc i915g_desc = {
310 	PLATFORM(i915g),
311 	.info = &(const struct intel_display_device_info) {
312 		GEN3_DISPLAY,
313 		I845_COLORS,
314 		.cursor_needs_physical = 1,
315 		.overlay_needs_physical = 1,
316 	},
317 };
318 
319 static const struct platform_desc i915gm_desc = {
320 	PLATFORM(i915gm),
321 	PLATFORM_GROUP(mobile),
322 	.info = &(const struct intel_display_device_info) {
323 		GEN3_DISPLAY,
324 		I9XX_COLORS,
325 		.cursor_needs_physical = 1,
326 		.overlay_needs_physical = 1,
327 		.supports_tv = 1,
328 
329 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
330 	},
331 };
332 
333 static const struct platform_desc i945g_desc = {
334 	PLATFORM(i945g),
335 	.info = &(const struct intel_display_device_info) {
336 		GEN3_DISPLAY,
337 		I845_COLORS,
338 		.has_hotplug = 1,
339 		.cursor_needs_physical = 1,
340 		.overlay_needs_physical = 1,
341 	},
342 };
343 
344 static const struct platform_desc i945gm_desc = {
345 	PLATFORM(i915gm),
346 	PLATFORM_GROUP(mobile),
347 	.info = &(const struct intel_display_device_info) {
348 		GEN3_DISPLAY,
349 		I9XX_COLORS,
350 		.has_hotplug = 1,
351 		.cursor_needs_physical = 1,
352 		.overlay_needs_physical = 1,
353 		.supports_tv = 1,
354 
355 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
356 	},
357 };
358 
359 static const struct platform_desc g33_desc = {
360 	PLATFORM(g33),
361 	.info = &(const struct intel_display_device_info) {
362 		GEN3_DISPLAY,
363 		I845_COLORS,
364 		.has_hotplug = 1,
365 	},
366 };
367 
368 static const struct intel_display_device_info pnv_display = {
369 	GEN3_DISPLAY,
370 	I9XX_COLORS,
371 	.has_hotplug = 1,
372 };
373 
374 static const struct platform_desc pnv_g_desc = {
375 	PLATFORM(pineview),
376 	.info = &pnv_display,
377 };
378 
379 static const struct platform_desc pnv_m_desc = {
380 	PLATFORM(pineview),
381 	PLATFORM_GROUP(mobile),
382 	.info = &pnv_display,
383 };
384 
385 #define GEN4_DISPLAY \
386 	.has_hotplug = 1, \
387 	.has_gmch = 1, \
388 	I9XX_PIPE_OFFSETS, \
389 	I9XX_CURSOR_OFFSETS, \
390 	I9XX_COLORS, \
391 	\
392 	.__runtime_defaults.ip.ver = 4, \
393 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
394 	.__runtime_defaults.cpu_transcoder_mask = \
395 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B)
396 
397 static const struct platform_desc i965g_desc = {
398 	PLATFORM(i965g),
399 	.info = &(const struct intel_display_device_info) {
400 		GEN4_DISPLAY,
401 		.has_overlay = 1,
402 
403 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* SDVO B/C */
404 	},
405 };
406 
407 static const struct platform_desc i965gm_desc = {
408 	PLATFORM(i965gm),
409 	PLATFORM_GROUP(mobile),
410 	.info = &(const struct intel_display_device_info) {
411 		GEN4_DISPLAY,
412 		.has_overlay = 1,
413 		.supports_tv = 1,
414 
415 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* SDVO B/C */
416 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
417 	},
418 };
419 
420 static const struct platform_desc g45_desc = {
421 	PLATFORM(g45),
422 	PLATFORM_GROUP(g4x),
423 	.info = &(const struct intel_display_device_info) {
424 		GEN4_DISPLAY,
425 
426 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* SDVO/HDMI/DP B/C, DP D */
427 	},
428 };
429 
430 static const struct platform_desc gm45_desc = {
431 	PLATFORM(gm45),
432 	PLATFORM_GROUP(g4x),
433 	PLATFORM_GROUP(mobile),
434 	.info = &(const struct intel_display_device_info) {
435 		GEN4_DISPLAY,
436 		.supports_tv = 1,
437 
438 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* SDVO/HDMI/DP B/C, DP D */
439 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
440 	},
441 };
442 
443 #define ILK_DISPLAY \
444 	.has_hotplug = 1, \
445 	I9XX_PIPE_OFFSETS, \
446 	I9XX_CURSOR_OFFSETS, \
447 	ILK_COLORS, \
448 	\
449 	.__runtime_defaults.ip.ver = 5, \
450 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B), \
451 	.__runtime_defaults.cpu_transcoder_mask = \
452 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B), \
453 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
454 
455 static const struct platform_desc ilk_d_desc = {
456 	PLATFORM(ironlake),
457 	.info = &(const struct intel_display_device_info) {
458 		ILK_DISPLAY,
459 	},
460 };
461 
462 static const struct platform_desc ilk_m_desc = {
463 	PLATFORM(ironlake),
464 	PLATFORM_GROUP(mobile),
465 	.info = &(const struct intel_display_device_info) {
466 		ILK_DISPLAY,
467 
468 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
469 	},
470 };
471 
472 static const struct intel_display_device_info snb_display = {
473 	.has_hotplug = 1,
474 	I9XX_PIPE_OFFSETS,
475 	I9XX_CURSOR_OFFSETS,
476 	ILK_COLORS,
477 
478 	.__runtime_defaults.ip.ver = 6,
479 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B),
480 	.__runtime_defaults.cpu_transcoder_mask =
481 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B),
482 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
483 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
484 };
485 
486 static const struct platform_desc snb_d_desc = {
487 	PLATFORM(sandybridge),
488 	.info = &snb_display,
489 };
490 
491 static const struct platform_desc snb_m_desc = {
492 	PLATFORM(sandybridge),
493 	PLATFORM_GROUP(mobile),
494 	.info = &snb_display,
495 };
496 
497 static const struct intel_display_device_info ivb_display = {
498 	.has_hotplug = 1,
499 	IVB_PIPE_OFFSETS,
500 	IVB_CURSOR_OFFSETS,
501 	IVB_COLORS,
502 
503 	.__runtime_defaults.ip.ver = 7,
504 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
505 	.__runtime_defaults.cpu_transcoder_mask =
506 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
507 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* DP A, SDVO/HDMI/DP B, HDMI/DP C/D */
508 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
509 };
510 
511 static const struct platform_desc ivb_d_desc = {
512 	PLATFORM(ivybridge),
513 	.info = &ivb_display,
514 };
515 
516 static const struct platform_desc ivb_m_desc = {
517 	PLATFORM(ivybridge),
518 	PLATFORM_GROUP(mobile),
519 	.info = &ivb_display,
520 };
521 
522 static const struct platform_desc vlv_desc = {
523 	PLATFORM(valleyview),
524 	.info = &(const struct intel_display_device_info) {
525 		.has_gmch = 1,
526 		.has_hotplug = 1,
527 		.mmio_offset = VLV_DISPLAY_BASE,
528 		I9XX_PIPE_OFFSETS,
529 		I9XX_CURSOR_OFFSETS,
530 		I9XX_COLORS,
531 
532 		.__runtime_defaults.ip.ver = 7,
533 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B),
534 		.__runtime_defaults.cpu_transcoder_mask =
535 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B),
536 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C), /* HDMI/DP B/C */
537 	},
538 };
539 
540 static const u16 hsw_ult_ids[] = {
541 	INTEL_HSW_ULT_GT1_IDS(ID),
542 	INTEL_HSW_ULT_GT2_IDS(ID),
543 	INTEL_HSW_ULT_GT3_IDS(ID),
544 	0
545 };
546 
547 static const u16 hsw_ulx_ids[] = {
548 	INTEL_HSW_ULX_GT1_IDS(ID),
549 	INTEL_HSW_ULX_GT2_IDS(ID),
550 	0
551 };
552 
553 static const struct platform_desc hsw_desc = {
554 	PLATFORM(haswell),
555 	.subplatforms = (const struct subplatform_desc[]) {
556 		/* Special case: Use ult both as group and subplatform. */
557 		{
558 			SUBPLATFORM(haswell, ult),
559 			SUBPLATFORM_GROUP(haswell, ult),
560 			.pciidlist = hsw_ult_ids,
561 		},
562 		{
563 			SUBPLATFORM(haswell, ulx),
564 			SUBPLATFORM_GROUP(haswell, ult),
565 			.pciidlist = hsw_ulx_ids,
566 		},
567 		{},
568 	},
569 	.info = &(const struct intel_display_device_info) {
570 		.has_ddi = 1,
571 		.has_dp_mst = 1,
572 		.has_fpga_dbg = 1,
573 		.has_hotplug = 1,
574 		.has_psr = 1,
575 		.has_psr_hw_tracking = 1,
576 		HSW_PIPE_OFFSETS,
577 		IVB_CURSOR_OFFSETS,
578 		IVB_COLORS,
579 
580 		.__runtime_defaults.ip.ver = 7,
581 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
582 		.__runtime_defaults.cpu_transcoder_mask =
583 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
584 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
585 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
586 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
587 	},
588 };
589 
590 static const u16 bdw_ult_ids[] = {
591 	INTEL_BDW_ULT_GT1_IDS(ID),
592 	INTEL_BDW_ULT_GT2_IDS(ID),
593 	INTEL_BDW_ULT_GT3_IDS(ID),
594 	INTEL_BDW_ULT_RSVD_IDS(ID),
595 	0
596 };
597 
598 static const u16 bdw_ulx_ids[] = {
599 	INTEL_BDW_ULX_GT1_IDS(ID),
600 	INTEL_BDW_ULX_GT2_IDS(ID),
601 	INTEL_BDW_ULX_GT3_IDS(ID),
602 	INTEL_BDW_ULX_RSVD_IDS(ID),
603 	0
604 };
605 
606 static const struct platform_desc bdw_desc = {
607 	PLATFORM(broadwell),
608 	.subplatforms = (const struct subplatform_desc[]) {
609 		/* Special case: Use ult both as group and subplatform. */
610 		{
611 			SUBPLATFORM(broadwell, ult),
612 			SUBPLATFORM_GROUP(broadwell, ult),
613 			.pciidlist = bdw_ult_ids,
614 		},
615 		{
616 			SUBPLATFORM(broadwell, ulx),
617 			SUBPLATFORM_GROUP(broadwell, ult),
618 			.pciidlist = bdw_ulx_ids,
619 		},
620 		{},
621 	},
622 	.info = &(const struct intel_display_device_info) {
623 		.has_ddi = 1,
624 		.has_dp_mst = 1,
625 		.has_fpga_dbg = 1,
626 		.has_hotplug = 1,
627 		.has_psr = 1,
628 		.has_psr_hw_tracking = 1,
629 		HSW_PIPE_OFFSETS,
630 		IVB_CURSOR_OFFSETS,
631 		IVB_COLORS,
632 
633 		.__runtime_defaults.ip.ver = 8,
634 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
635 		.__runtime_defaults.cpu_transcoder_mask =
636 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
637 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
638 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
639 		.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
640 	},
641 };
642 
643 static const struct platform_desc chv_desc = {
644 	PLATFORM(cherryview),
645 	.info = &(const struct intel_display_device_info) {
646 		.has_hotplug = 1,
647 		.has_gmch = 1,
648 		.mmio_offset = VLV_DISPLAY_BASE,
649 		CHV_PIPE_OFFSETS,
650 		CHV_CURSOR_OFFSETS,
651 		CHV_COLORS,
652 
653 		.__runtime_defaults.ip.ver = 8,
654 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
655 		.__runtime_defaults.cpu_transcoder_mask =
656 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
657 		.__runtime_defaults.port_mask = BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D), /* HDMI/DP B/C/D */
658 	},
659 };
660 
661 static const struct intel_display_device_info skl_display = {
662 	.dbuf.size = 896 - 4, /* 4 blocks for bypass path allocation */
663 	.dbuf.slice_mask = BIT(DBUF_S1),
664 	.has_ddi = 1,
665 	.has_dp_mst = 1,
666 	.has_fpga_dbg = 1,
667 	.has_hotplug = 1,
668 	.has_ipc = 1,
669 	.has_psr = 1,
670 	.has_psr_hw_tracking = 1,
671 	HSW_PIPE_OFFSETS,
672 	IVB_CURSOR_OFFSETS,
673 	IVB_COLORS,
674 
675 	.__runtime_defaults.ip.ver = 9,
676 	.__runtime_defaults.has_dmc = 1,
677 	.__runtime_defaults.has_hdcp = 1,
678 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
679 	.__runtime_defaults.cpu_transcoder_mask =
680 	BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
681 	BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP),
682 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
683 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),
684 };
685 
686 static const u16 skl_ult_ids[] = {
687 	INTEL_SKL_ULT_GT1_IDS(ID),
688 	INTEL_SKL_ULT_GT2_IDS(ID),
689 	INTEL_SKL_ULT_GT3_IDS(ID),
690 	0
691 };
692 
693 static const u16 skl_ulx_ids[] = {
694 	INTEL_SKL_ULX_GT1_IDS(ID),
695 	INTEL_SKL_ULX_GT2_IDS(ID),
696 	0
697 };
698 
699 static const enum intel_step skl_steppings[] = {
700 	[0x6] = STEP_G0,
701 	[0x7] = STEP_H0,
702 	[0x9] = STEP_J0,
703 	[0xA] = STEP_I1,
704 };
705 
706 static const struct platform_desc skl_desc = {
707 	PLATFORM(skylake),
708 	.subplatforms = (const struct subplatform_desc[]) {
709 		{
710 			SUBPLATFORM(skylake, ult),
711 			.pciidlist = skl_ult_ids,
712 		},
713 		{
714 			SUBPLATFORM(skylake, ulx),
715 			.pciidlist = skl_ulx_ids,
716 		},
717 		{},
718 	},
719 	.info = &skl_display,
720 	STEP_INFO(skl_steppings),
721 };
722 
723 static const u16 kbl_ult_ids[] = {
724 	INTEL_KBL_ULT_GT1_IDS(ID),
725 	INTEL_KBL_ULT_GT2_IDS(ID),
726 	INTEL_KBL_ULT_GT3_IDS(ID),
727 	0
728 };
729 
730 static const u16 kbl_ulx_ids[] = {
731 	INTEL_KBL_ULX_GT1_IDS(ID),
732 	INTEL_KBL_ULX_GT2_IDS(ID),
733 	INTEL_AML_KBL_GT2_IDS(ID),
734 	0
735 };
736 
737 static const enum intel_step kbl_steppings[] = {
738 	[1] = STEP_B0,
739 	[2] = STEP_B0,
740 	[3] = STEP_B0,
741 	[4] = STEP_C0,
742 	[5] = STEP_B1,
743 	[6] = STEP_B1,
744 	[7] = STEP_C0,
745 };
746 
747 static const struct platform_desc kbl_desc = {
748 	PLATFORM(kabylake),
749 	.subplatforms = (const struct subplatform_desc[]) {
750 		{
751 			SUBPLATFORM(kabylake, ult),
752 			.pciidlist = kbl_ult_ids,
753 		},
754 		{
755 			SUBPLATFORM(kabylake, ulx),
756 			.pciidlist = kbl_ulx_ids,
757 		},
758 		{},
759 	},
760 	.info = &skl_display,
761 	STEP_INFO(kbl_steppings),
762 };
763 
764 static const u16 cfl_ult_ids[] = {
765 	INTEL_CFL_U_GT2_IDS(ID),
766 	INTEL_CFL_U_GT3_IDS(ID),
767 	INTEL_WHL_U_GT1_IDS(ID),
768 	INTEL_WHL_U_GT2_IDS(ID),
769 	INTEL_WHL_U_GT3_IDS(ID),
770 	0
771 };
772 
773 static const u16 cfl_ulx_ids[] = {
774 	INTEL_AML_CFL_GT2_IDS(ID),
775 	0
776 };
777 
778 static const struct platform_desc cfl_desc = {
779 	PLATFORM(coffeelake),
780 	.subplatforms = (const struct subplatform_desc[]) {
781 		{
782 			SUBPLATFORM(coffeelake, ult),
783 			.pciidlist = cfl_ult_ids,
784 		},
785 		{
786 			SUBPLATFORM(coffeelake, ulx),
787 			.pciidlist = cfl_ulx_ids,
788 		},
789 		{},
790 	},
791 	.info = &skl_display,
792 };
793 
794 static const u16 cml_ult_ids[] = {
795 	INTEL_CML_U_GT1_IDS(ID),
796 	INTEL_CML_U_GT2_IDS(ID),
797 	0
798 };
799 
800 static const struct platform_desc cml_desc = {
801 	PLATFORM(cometlake),
802 	.subplatforms = (const struct subplatform_desc[]) {
803 		{
804 			SUBPLATFORM(cometlake, ult),
805 			.pciidlist = cml_ult_ids,
806 		},
807 		{},
808 	},
809 	.info = &skl_display,
810 };
811 
812 #define GEN9_LP_DISPLAY			 \
813 	.dbuf.slice_mask = BIT(DBUF_S1), \
814 	.has_dp_mst = 1, \
815 	.has_ddi = 1, \
816 	.has_fpga_dbg = 1, \
817 	.has_hotplug = 1, \
818 	.has_ipc = 1, \
819 	.has_psr = 1, \
820 	.has_psr_hw_tracking = 1, \
821 	HSW_PIPE_OFFSETS, \
822 	IVB_CURSOR_OFFSETS, \
823 	IVB_COLORS, \
824 	\
825 	.__runtime_defaults.has_dmc = 1, \
826 	.__runtime_defaults.has_hdcp = 1, \
827 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A), \
828 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), \
829 	.__runtime_defaults.cpu_transcoder_mask = \
830 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
831 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP) | \
832 		BIT(TRANSCODER_DSI_A) | BIT(TRANSCODER_DSI_C), \
833 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C)
834 
835 static const enum intel_step bxt_steppings[] = {
836 	[0xA] = STEP_C0,
837 	[0xB] = STEP_C0,
838 	[0xC] = STEP_D0,
839 	[0xD] = STEP_E0,
840 };
841 
842 static const struct platform_desc bxt_desc = {
843 	PLATFORM(broxton),
844 	.info = &(const struct intel_display_device_info) {
845 		GEN9_LP_DISPLAY,
846 		.dbuf.size = 512 - 4, /* 4 blocks for bypass path allocation */
847 
848 		.__runtime_defaults.ip.ver = 9,
849 	},
850 	STEP_INFO(bxt_steppings),
851 };
852 
853 static const enum intel_step glk_steppings[] = {
854 	[3] = STEP_B0,
855 };
856 
857 static const struct platform_desc glk_desc = {
858 	PLATFORM(geminilake),
859 	.info = &(const struct intel_display_device_info) {
860 		GEN9_LP_DISPLAY,
861 		.dbuf.size = 1024 - 4, /* 4 blocks for bypass path allocation */
862 		GLK_COLORS,
863 
864 		.__runtime_defaults.ip.ver = 10,
865 	},
866 	STEP_INFO(glk_steppings),
867 };
868 
869 #define ICL_DISPLAY \
870 	.abox_mask = BIT(0), \
871 	.dbuf.size = 2048, \
872 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \
873 	.has_ddi = 1, \
874 	.has_dp_mst = 1, \
875 	.has_fpga_dbg = 1, \
876 	.has_hotplug = 1, \
877 	.has_ipc = 1, \
878 	.has_psr = 1, \
879 	.has_psr_hw_tracking = 1, \
880 	.pipe_offsets = { \
881 		[TRANSCODER_A] = PIPE_A_OFFSET, \
882 		[TRANSCODER_B] = PIPE_B_OFFSET, \
883 		[TRANSCODER_C] = PIPE_C_OFFSET, \
884 		[TRANSCODER_EDP] = PIPE_EDP_OFFSET, \
885 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET, \
886 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET, \
887 	}, \
888 	.trans_offsets = { \
889 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
890 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
891 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
892 		[TRANSCODER_EDP] = TRANSCODER_EDP_OFFSET, \
893 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET, \
894 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET, \
895 	}, \
896 	IVB_CURSOR_OFFSETS, \
897 	ICL_COLORS, \
898 	\
899 	.__runtime_defaults.ip.ver = 11, \
900 	.__runtime_defaults.has_dmc = 1, \
901 	.__runtime_defaults.has_dsc = 1, \
902 	.__runtime_defaults.has_hdcp = 1, \
903 	.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), \
904 	.__runtime_defaults.cpu_transcoder_mask = \
905 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
906 		BIT(TRANSCODER_C) | BIT(TRANSCODER_EDP) | \
907 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1), \
908 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A)
909 
910 static const u16 icl_port_f_ids[] = {
911 	INTEL_ICL_PORT_F_IDS(ID),
912 	0
913 };
914 
915 static const enum intel_step icl_steppings[] = {
916 	[7] = STEP_D0,
917 };
918 
919 static const struct platform_desc icl_desc = {
920 	PLATFORM(icelake),
921 	.subplatforms = (const struct subplatform_desc[]) {
922 		{
923 			SUBPLATFORM(icelake, port_f),
924 			.pciidlist = icl_port_f_ids,
925 		},
926 		{},
927 	},
928 	.info = &(const struct intel_display_device_info) {
929 		ICL_DISPLAY,
930 
931 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D) | BIT(PORT_E),
932 	},
933 	STEP_INFO(icl_steppings),
934 };
935 
936 static const struct intel_display_device_info jsl_ehl_display = {
937 	ICL_DISPLAY,
938 
939 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D),
940 };
941 
942 static const enum intel_step jsl_ehl_steppings[] = {
943 	[0] = STEP_A0,
944 	[1] = STEP_B0,
945 };
946 
947 static const struct platform_desc jsl_desc = {
948 	PLATFORM(jasperlake),
949 	.info = &jsl_ehl_display,
950 	STEP_INFO(jsl_ehl_steppings),
951 };
952 
953 static const struct platform_desc ehl_desc = {
954 	PLATFORM(elkhartlake),
955 	.info = &jsl_ehl_display,
956 	STEP_INFO(jsl_ehl_steppings),
957 };
958 
959 #define XE_D_DISPLAY \
960 	.abox_mask = GENMASK(2, 1), \
961 	.dbuf.size = 2048, \
962 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2), \
963 	.has_ddi = 1, \
964 	.has_dp_mst = 1, \
965 	.has_dsb = 1, \
966 	.has_fpga_dbg = 1, \
967 	.has_hotplug = 1, \
968 	.has_ipc = 1, \
969 	.has_psr = 1, \
970 	.has_psr_hw_tracking = 1, \
971 	.pipe_offsets = { \
972 		[TRANSCODER_A] = PIPE_A_OFFSET, \
973 		[TRANSCODER_B] = PIPE_B_OFFSET, \
974 		[TRANSCODER_C] = PIPE_C_OFFSET, \
975 		[TRANSCODER_D] = PIPE_D_OFFSET, \
976 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET, \
977 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET, \
978 	}, \
979 	.trans_offsets = { \
980 		[TRANSCODER_A] = TRANSCODER_A_OFFSET, \
981 		[TRANSCODER_B] = TRANSCODER_B_OFFSET, \
982 		[TRANSCODER_C] = TRANSCODER_C_OFFSET, \
983 		[TRANSCODER_D] = TRANSCODER_D_OFFSET, \
984 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET, \
985 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET, \
986 	}, \
987 	TGL_CURSOR_OFFSETS, \
988 	ICL_COLORS, \
989 	\
990 	.__runtime_defaults.ip.ver = 12, \
991 	.__runtime_defaults.has_dmc = 1, \
992 	.__runtime_defaults.has_dsc = 1, \
993 	.__runtime_defaults.has_hdcp = 1, \
994 	.__runtime_defaults.pipe_mask = \
995 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D), \
996 	.__runtime_defaults.cpu_transcoder_mask = \
997 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | \
998 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D) | \
999 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1), \
1000 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A)
1001 
1002 static const u16 tgl_uy_ids[] = {
1003 	INTEL_TGL_GT2_IDS(ID),
1004 	0
1005 };
1006 
1007 static const enum intel_step tgl_steppings[] = {
1008 	[0] = STEP_B0,
1009 	[1] = STEP_D0,
1010 };
1011 
1012 static const enum intel_step tgl_uy_steppings[] = {
1013 	[0] = STEP_A0,
1014 	[1] = STEP_C0,
1015 	[2] = STEP_C0,
1016 	[3] = STEP_D0,
1017 };
1018 
1019 static const struct platform_desc tgl_desc = {
1020 	PLATFORM(tigerlake),
1021 	.subplatforms = (const struct subplatform_desc[]) {
1022 		{
1023 			SUBPLATFORM(tigerlake, uy),
1024 			.pciidlist = tgl_uy_ids,
1025 			STEP_INFO(tgl_uy_steppings),
1026 		},
1027 		{},
1028 	},
1029 	.info = &(const struct intel_display_device_info) {
1030 		XE_D_DISPLAY,
1031 
1032 		/*
1033 		 * FIXME DDI C/combo PHY C missing due to combo PHY
1034 		 * code making a mess on SKUs where the PHY is missing.
1035 		 */
1036 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1037 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4) | BIT(PORT_TC5) | BIT(PORT_TC6),
1038 	},
1039 	STEP_INFO(tgl_steppings),
1040 };
1041 
1042 static const enum intel_step dg1_steppings[] = {
1043 	[0] = STEP_A0,
1044 	[1] = STEP_B0,
1045 };
1046 
1047 static const struct platform_desc dg1_desc = {
1048 	PLATFORM(dg1),
1049 	PLATFORM_GROUP(dgfx),
1050 	.info = &(const struct intel_display_device_info) {
1051 		XE_D_DISPLAY,
1052 
1053 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1054 		BIT(PORT_TC1) | BIT(PORT_TC2),
1055 	},
1056 	STEP_INFO(dg1_steppings),
1057 };
1058 
1059 static const enum intel_step rkl_steppings[] = {
1060 	[0] = STEP_A0,
1061 	[1] = STEP_B0,
1062 	[4] = STEP_C0,
1063 };
1064 
1065 static const struct platform_desc rkl_desc = {
1066 	PLATFORM(rocketlake),
1067 	.info = &(const struct intel_display_device_info) {
1068 		XE_D_DISPLAY,
1069 		.abox_mask = BIT(0),
1070 		.has_hti = 1,
1071 		.has_psr_hw_tracking = 0,
1072 
1073 		.__runtime_defaults.pipe_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C),
1074 		.__runtime_defaults.cpu_transcoder_mask =
1075 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | BIT(TRANSCODER_C),
1076 		.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1077 		BIT(PORT_TC1) | BIT(PORT_TC2),
1078 	},
1079 	STEP_INFO(rkl_steppings),
1080 };
1081 
1082 static const u16 adls_rpls_ids[] = {
1083 	INTEL_RPLS_IDS(ID),
1084 	0
1085 };
1086 
1087 static const enum intel_step adl_s_steppings[] = {
1088 	[0x0] = STEP_A0,
1089 	[0x1] = STEP_A2,
1090 	[0x4] = STEP_B0,
1091 	[0x8] = STEP_B0,
1092 	[0xC] = STEP_C0,
1093 };
1094 
1095 static const enum intel_step adl_s_rpl_s_steppings[] = {
1096 	[0x4] = STEP_D0,
1097 	[0xC] = STEP_C0,
1098 };
1099 
1100 static const struct platform_desc adl_s_desc = {
1101 	PLATFORM(alderlake_s),
1102 	.subplatforms = (const struct subplatform_desc[]) {
1103 		{
1104 			SUBPLATFORM(alderlake_s, raptorlake_s),
1105 			.pciidlist = adls_rpls_ids,
1106 			STEP_INFO(adl_s_rpl_s_steppings),
1107 		},
1108 		{},
1109 	},
1110 	.info = &(const struct intel_display_device_info) {
1111 		XE_D_DISPLAY,
1112 		.has_hti = 1,
1113 		.has_psr_hw_tracking = 0,
1114 
1115 		.__runtime_defaults.port_mask = BIT(PORT_A) |
1116 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1117 	},
1118 	STEP_INFO(adl_s_steppings),
1119 };
1120 
1121 #define XE_LPD_FEATURES \
1122 	.abox_mask = GENMASK(1, 0),						\
1123 	.color = {								\
1124 		.degamma_lut_size = 129, .gamma_lut_size = 1024,		\
1125 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING |		\
1126 		DRM_COLOR_LUT_EQUAL_CHANNELS,					\
1127 	},									\
1128 	.dbuf.size = 4096,							\
1129 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2) | BIT(DBUF_S3) |		\
1130 		BIT(DBUF_S4),							\
1131 	.has_ddi = 1,								\
1132 	.has_dp_mst = 1,							\
1133 	.has_dsb = 1,								\
1134 	.has_fpga_dbg = 1,							\
1135 	.has_hotplug = 1,							\
1136 	.has_ipc = 1,								\
1137 	.has_psr = 1,								\
1138 	.pipe_offsets = {							\
1139 		[TRANSCODER_A] = PIPE_A_OFFSET,					\
1140 		[TRANSCODER_B] = PIPE_B_OFFSET,					\
1141 		[TRANSCODER_C] = PIPE_C_OFFSET,					\
1142 		[TRANSCODER_D] = PIPE_D_OFFSET,					\
1143 		[TRANSCODER_DSI_0] = PIPE_DSI0_OFFSET,				\
1144 		[TRANSCODER_DSI_1] = PIPE_DSI1_OFFSET,				\
1145 	},									\
1146 	.trans_offsets = {							\
1147 		[TRANSCODER_A] = TRANSCODER_A_OFFSET,				\
1148 		[TRANSCODER_B] = TRANSCODER_B_OFFSET,				\
1149 		[TRANSCODER_C] = TRANSCODER_C_OFFSET,				\
1150 		[TRANSCODER_D] = TRANSCODER_D_OFFSET,				\
1151 		[TRANSCODER_DSI_0] = TRANSCODER_DSI0_OFFSET,			\
1152 		[TRANSCODER_DSI_1] = TRANSCODER_DSI1_OFFSET,			\
1153 	},									\
1154 	TGL_CURSOR_OFFSETS,							\
1155 										\
1156 	.__runtime_defaults.ip.ver = 13,					\
1157 	.__runtime_defaults.has_dmc = 1,					\
1158 	.__runtime_defaults.has_dsc = 1,					\
1159 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A),			\
1160 	.__runtime_defaults.has_hdcp = 1,					\
1161 	.__runtime_defaults.pipe_mask =						\
1162 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D)
1163 
1164 static const struct intel_display_device_info xe_lpd_display = {
1165 	XE_LPD_FEATURES,
1166 	.has_cdclk_crawl = 1,
1167 	.has_psr_hw_tracking = 0,
1168 
1169 	.__runtime_defaults.cpu_transcoder_mask =
1170 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
1171 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D) |
1172 		BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1),
1173 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |
1174 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1175 };
1176 
1177 static const u16 adlp_adln_ids[] = {
1178 	INTEL_ADLN_IDS(ID),
1179 	0
1180 };
1181 
1182 static const u16 adlp_rplu_ids[] = {
1183 	INTEL_RPLU_IDS(ID),
1184 	0
1185 };
1186 
1187 static const u16 adlp_rplp_ids[] = {
1188 	INTEL_RPLP_IDS(ID),
1189 	0
1190 };
1191 
1192 static const enum intel_step adl_p_steppings[] = {
1193 	[0x0] = STEP_A0,
1194 	[0x4] = STEP_B0,
1195 	[0x8] = STEP_C0,
1196 	[0xC] = STEP_D0,
1197 };
1198 
1199 static const enum intel_step adl_p_adl_n_steppings[] = {
1200 	[0x0] = STEP_D0,
1201 };
1202 
1203 static const enum intel_step adl_p_rpl_pu_steppings[] = {
1204 	[0x4] = STEP_E0,
1205 };
1206 
1207 static const struct platform_desc adl_p_desc = {
1208 	PLATFORM(alderlake_p),
1209 	.subplatforms = (const struct subplatform_desc[]) {
1210 		{
1211 			SUBPLATFORM(alderlake_p, alderlake_n),
1212 			.pciidlist = adlp_adln_ids,
1213 			STEP_INFO(adl_p_adl_n_steppings),
1214 		},
1215 		{
1216 			SUBPLATFORM(alderlake_p, raptorlake_p),
1217 			.pciidlist = adlp_rplp_ids,
1218 			STEP_INFO(adl_p_rpl_pu_steppings),
1219 		},
1220 		{
1221 			SUBPLATFORM(alderlake_p, raptorlake_u),
1222 			.pciidlist = adlp_rplu_ids,
1223 			STEP_INFO(adl_p_rpl_pu_steppings),
1224 		},
1225 		{},
1226 	},
1227 	.info = &xe_lpd_display,
1228 	STEP_INFO(adl_p_steppings),
1229 };
1230 
1231 static const struct intel_display_device_info xe_hpd_display = {
1232 	XE_LPD_FEATURES,
1233 	.has_cdclk_squash = 1,
1234 
1235 	.__runtime_defaults.cpu_transcoder_mask =
1236 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
1237 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D),
1238 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) | BIT(PORT_C) | BIT(PORT_D_XELPD) |
1239 		BIT(PORT_TC1),
1240 };
1241 
1242 static const u16 dg2_g10_ids[] = {
1243 	INTEL_DG2_G10_IDS(ID),
1244 	0
1245 };
1246 
1247 static const u16 dg2_g11_ids[] = {
1248 	INTEL_DG2_G11_IDS(ID),
1249 	0
1250 };
1251 
1252 static const u16 dg2_g12_ids[] = {
1253 	INTEL_DG2_G12_IDS(ID),
1254 	0
1255 };
1256 
1257 static const enum intel_step dg2_g10_steppings[] = {
1258 	[0x0] = STEP_A0,
1259 	[0x1] = STEP_A0,
1260 	[0x4] = STEP_B0,
1261 	[0x8] = STEP_C0,
1262 };
1263 
1264 static const enum intel_step dg2_g11_steppings[] = {
1265 	[0x0] = STEP_B0,
1266 	[0x4] = STEP_C0,
1267 	[0x5] = STEP_C0,
1268 };
1269 
1270 static const enum intel_step dg2_g12_steppings[] = {
1271 	[0x0] = STEP_C0,
1272 	[0x1] = STEP_C0,
1273 };
1274 
1275 static const struct platform_desc dg2_desc = {
1276 	PLATFORM(dg2),
1277 	PLATFORM_GROUP(dgfx),
1278 	.subplatforms = (const struct subplatform_desc[]) {
1279 		{
1280 			SUBPLATFORM(dg2, g10),
1281 			.pciidlist = dg2_g10_ids,
1282 			STEP_INFO(dg2_g10_steppings),
1283 		},
1284 		{
1285 			SUBPLATFORM(dg2, g11),
1286 			.pciidlist = dg2_g11_ids,
1287 			STEP_INFO(dg2_g11_steppings),
1288 		},
1289 		{
1290 			SUBPLATFORM(dg2, g12),
1291 			.pciidlist = dg2_g12_ids,
1292 			STEP_INFO(dg2_g12_steppings),
1293 		},
1294 		{},
1295 	},
1296 	.info = &xe_hpd_display,
1297 };
1298 
1299 #define XE_LPDP_FEATURES							\
1300 	.abox_mask = GENMASK(1, 0),						\
1301 	.color = {								\
1302 		.degamma_lut_size = 129, .gamma_lut_size = 1024,		\
1303 		.degamma_lut_tests = DRM_COLOR_LUT_NON_DECREASING |		\
1304 		DRM_COLOR_LUT_EQUAL_CHANNELS,					\
1305 	},									\
1306 	.dbuf.size = 4096,							\
1307 	.dbuf.slice_mask = BIT(DBUF_S1) | BIT(DBUF_S2) | BIT(DBUF_S3) |		\
1308 		BIT(DBUF_S4),							\
1309 	.has_cdclk_crawl = 1,							\
1310 	.has_cdclk_squash = 1,							\
1311 	.has_ddi = 1,								\
1312 	.has_dp_mst = 1,							\
1313 	.has_dsb = 1,								\
1314 	.has_fpga_dbg = 1,							\
1315 	.has_hotplug = 1,							\
1316 	.has_ipc = 1,								\
1317 	.has_psr = 1,								\
1318 	.pipe_offsets = {							\
1319 		[TRANSCODER_A] = PIPE_A_OFFSET,					\
1320 		[TRANSCODER_B] = PIPE_B_OFFSET,					\
1321 		[TRANSCODER_C] = PIPE_C_OFFSET,					\
1322 		[TRANSCODER_D] = PIPE_D_OFFSET,					\
1323 	},									\
1324 	.trans_offsets = {							\
1325 		[TRANSCODER_A] = TRANSCODER_A_OFFSET,				\
1326 		[TRANSCODER_B] = TRANSCODER_B_OFFSET,				\
1327 		[TRANSCODER_C] = TRANSCODER_C_OFFSET,				\
1328 		[TRANSCODER_D] = TRANSCODER_D_OFFSET,				\
1329 	},									\
1330 	TGL_CURSOR_OFFSETS,							\
1331 										\
1332 	.__runtime_defaults.cpu_transcoder_mask =				\
1333 		BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |				\
1334 		BIT(TRANSCODER_C) | BIT(TRANSCODER_D),				\
1335 	.__runtime_defaults.fbc_mask = BIT(INTEL_FBC_A) | BIT(INTEL_FBC_B),	\
1336 	.__runtime_defaults.has_dmc = 1,					\
1337 	.__runtime_defaults.has_dsc = 1,					\
1338 	.__runtime_defaults.has_hdcp = 1,					\
1339 	.__runtime_defaults.pipe_mask =						\
1340 		BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D),		\
1341 	.__runtime_defaults.port_mask = BIT(PORT_A) | BIT(PORT_B) |		\
1342 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4)
1343 
1344 static const struct intel_display_device_info xe_lpdp_display = {
1345 	XE_LPDP_FEATURES,
1346 };
1347 
1348 static const struct intel_display_device_info xe2_lpd_display = {
1349 	XE_LPDP_FEATURES,
1350 
1351 	.__runtime_defaults.fbc_mask =
1352 		BIT(INTEL_FBC_A) | BIT(INTEL_FBC_B) |
1353 		BIT(INTEL_FBC_C) | BIT(INTEL_FBC_D),
1354 	.__runtime_defaults.has_dbuf_overlap_detection = true,
1355 };
1356 
1357 static const struct intel_display_device_info xe2_hpd_display = {
1358 	XE_LPDP_FEATURES,
1359 	.__runtime_defaults.port_mask = BIT(PORT_A) |
1360 		BIT(PORT_TC1) | BIT(PORT_TC2) | BIT(PORT_TC3) | BIT(PORT_TC4),
1361 };
1362 
1363 static const u16 mtl_u_ids[] = {
1364 	INTEL_MTL_U_IDS(ID),
1365 	INTEL_ARL_U_IDS(ID),
1366 	0
1367 };
1368 
1369 /*
1370  * Do not initialize the .info member of the platform desc for GMD ID based
1371  * platforms. Their display will be probed automatically based on the IP version
1372  * reported by the hardware.
1373  */
1374 static const struct platform_desc mtl_desc = {
1375 	PLATFORM(meteorlake),
1376 	.subplatforms = (const struct subplatform_desc[]) {
1377 		{
1378 			SUBPLATFORM(meteorlake, u),
1379 			.pciidlist = mtl_u_ids,
1380 		},
1381 		{},
1382 	}
1383 };
1384 
1385 static const struct platform_desc lnl_desc = {
1386 	PLATFORM(lunarlake),
1387 };
1388 
1389 static const struct platform_desc bmg_desc = {
1390 	PLATFORM(battlemage),
1391 	PLATFORM_GROUP(dgfx),
1392 };
1393 
1394 static const struct platform_desc ptl_desc = {
1395 	PLATFORM(pantherlake),
1396 };
1397 
1398 __diag_pop();
1399 
1400 /*
1401  * Separate detection for no display cases to keep the display id array simple.
1402  *
1403  * IVB Q requires subvendor and subdevice matching to differentiate from IVB D
1404  * GT2 server.
1405  */
has_no_display(struct pci_dev * pdev)1406 static bool has_no_display(struct pci_dev *pdev)
1407 {
1408 	static const struct pci_device_id ids[] = {
1409 		INTEL_IVB_Q_IDS(INTEL_VGA_DEVICE, 0),
1410 		{}
1411 	};
1412 
1413 	return pci_match_id(ids, pdev);
1414 }
1415 
1416 #define INTEL_DISPLAY_DEVICE(_id, _desc) { .devid = (_id), .desc = (_desc) }
1417 
1418 static const struct {
1419 	u32 devid;
1420 	const struct platform_desc *desc;
1421 } intel_display_ids[] = {
1422 	INTEL_I830_IDS(INTEL_DISPLAY_DEVICE, &i830_desc),
1423 	INTEL_I845G_IDS(INTEL_DISPLAY_DEVICE, &i845_desc),
1424 	INTEL_I85X_IDS(INTEL_DISPLAY_DEVICE, &i85x_desc),
1425 	INTEL_I865G_IDS(INTEL_DISPLAY_DEVICE, &i865g_desc),
1426 	INTEL_I915G_IDS(INTEL_DISPLAY_DEVICE, &i915g_desc),
1427 	INTEL_I915GM_IDS(INTEL_DISPLAY_DEVICE, &i915gm_desc),
1428 	INTEL_I945G_IDS(INTEL_DISPLAY_DEVICE, &i945g_desc),
1429 	INTEL_I945GM_IDS(INTEL_DISPLAY_DEVICE, &i945gm_desc),
1430 	INTEL_I965G_IDS(INTEL_DISPLAY_DEVICE, &i965g_desc),
1431 	INTEL_G33_IDS(INTEL_DISPLAY_DEVICE, &g33_desc),
1432 	INTEL_I965GM_IDS(INTEL_DISPLAY_DEVICE, &i965gm_desc),
1433 	INTEL_GM45_IDS(INTEL_DISPLAY_DEVICE, &gm45_desc),
1434 	INTEL_G45_IDS(INTEL_DISPLAY_DEVICE, &g45_desc),
1435 	INTEL_PNV_G_IDS(INTEL_DISPLAY_DEVICE, &pnv_g_desc),
1436 	INTEL_PNV_M_IDS(INTEL_DISPLAY_DEVICE, &pnv_m_desc),
1437 	INTEL_ILK_D_IDS(INTEL_DISPLAY_DEVICE, &ilk_d_desc),
1438 	INTEL_ILK_M_IDS(INTEL_DISPLAY_DEVICE, &ilk_m_desc),
1439 	INTEL_SNB_D_IDS(INTEL_DISPLAY_DEVICE, &snb_d_desc),
1440 	INTEL_SNB_M_IDS(INTEL_DISPLAY_DEVICE, &snb_m_desc),
1441 	INTEL_IVB_D_IDS(INTEL_DISPLAY_DEVICE, &ivb_d_desc),
1442 	INTEL_IVB_M_IDS(INTEL_DISPLAY_DEVICE, &ivb_m_desc),
1443 	INTEL_HSW_IDS(INTEL_DISPLAY_DEVICE, &hsw_desc),
1444 	INTEL_VLV_IDS(INTEL_DISPLAY_DEVICE, &vlv_desc),
1445 	INTEL_BDW_IDS(INTEL_DISPLAY_DEVICE, &bdw_desc),
1446 	INTEL_CHV_IDS(INTEL_DISPLAY_DEVICE, &chv_desc),
1447 	INTEL_SKL_IDS(INTEL_DISPLAY_DEVICE, &skl_desc),
1448 	INTEL_BXT_IDS(INTEL_DISPLAY_DEVICE, &bxt_desc),
1449 	INTEL_GLK_IDS(INTEL_DISPLAY_DEVICE, &glk_desc),
1450 	INTEL_KBL_IDS(INTEL_DISPLAY_DEVICE, &kbl_desc),
1451 	INTEL_CFL_IDS(INTEL_DISPLAY_DEVICE, &cfl_desc),
1452 	INTEL_WHL_IDS(INTEL_DISPLAY_DEVICE, &cfl_desc),
1453 	INTEL_CML_IDS(INTEL_DISPLAY_DEVICE, &cml_desc),
1454 	INTEL_ICL_IDS(INTEL_DISPLAY_DEVICE, &icl_desc),
1455 	INTEL_EHL_IDS(INTEL_DISPLAY_DEVICE, &ehl_desc),
1456 	INTEL_JSL_IDS(INTEL_DISPLAY_DEVICE, &jsl_desc),
1457 	INTEL_TGL_IDS(INTEL_DISPLAY_DEVICE, &tgl_desc),
1458 	INTEL_DG1_IDS(INTEL_DISPLAY_DEVICE, &dg1_desc),
1459 	INTEL_RKL_IDS(INTEL_DISPLAY_DEVICE, &rkl_desc),
1460 	INTEL_ADLS_IDS(INTEL_DISPLAY_DEVICE, &adl_s_desc),
1461 	INTEL_RPLS_IDS(INTEL_DISPLAY_DEVICE, &adl_s_desc),
1462 	INTEL_ADLP_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1463 	INTEL_ADLN_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1464 	INTEL_RPLU_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1465 	INTEL_RPLP_IDS(INTEL_DISPLAY_DEVICE, &adl_p_desc),
1466 	INTEL_DG2_IDS(INTEL_DISPLAY_DEVICE, &dg2_desc),
1467 	INTEL_ARL_IDS(INTEL_DISPLAY_DEVICE, &mtl_desc),
1468 	INTEL_MTL_IDS(INTEL_DISPLAY_DEVICE, &mtl_desc),
1469 	INTEL_LNL_IDS(INTEL_DISPLAY_DEVICE, &lnl_desc),
1470 	INTEL_BMG_IDS(INTEL_DISPLAY_DEVICE, &bmg_desc),
1471 	INTEL_PTL_IDS(INTEL_DISPLAY_DEVICE, &ptl_desc),
1472 };
1473 
1474 static const struct {
1475 	u16 ver;
1476 	u16 rel;
1477 	const struct intel_display_device_info *display;
1478 } gmdid_display_map[] = {
1479 	{ 14,  0, &xe_lpdp_display },
1480 	{ 14,  1, &xe2_hpd_display },
1481 	{ 20,  0, &xe2_lpd_display },
1482 	{ 30,  0, &xe2_lpd_display },
1483 	{ 30,  2, &xe2_lpd_display },
1484 };
1485 
1486 static const struct intel_display_device_info *
probe_gmdid_display(struct intel_display * display,struct intel_display_ip_ver * ip_ver)1487 probe_gmdid_display(struct intel_display *display, struct intel_display_ip_ver *ip_ver)
1488 {
1489 	struct pci_dev *pdev = to_pci_dev(display->drm->dev);
1490 	struct intel_display_ip_ver gmd_id;
1491 	void __iomem *addr;
1492 	u32 val;
1493 	int i;
1494 
1495 	addr = pci_iomap_range(pdev, 0, i915_mmio_reg_offset(GMD_ID_DISPLAY), sizeof(u32));
1496 	if (!addr) {
1497 		drm_err(display->drm,
1498 			"Cannot map MMIO BAR to read display GMD_ID\n");
1499 		return NULL;
1500 	}
1501 
1502 	val = ioread32(addr);
1503 	pci_iounmap(pdev, addr);
1504 
1505 	if (val == 0) {
1506 		drm_dbg_kms(display->drm, "Device doesn't have display\n");
1507 		return NULL;
1508 	}
1509 
1510 	gmd_id.ver = REG_FIELD_GET(GMD_ID_ARCH_MASK, val);
1511 	gmd_id.rel = REG_FIELD_GET(GMD_ID_RELEASE_MASK, val);
1512 	gmd_id.step = REG_FIELD_GET(GMD_ID_STEP, val);
1513 
1514 	for (i = 0; i < ARRAY_SIZE(gmdid_display_map); i++) {
1515 		if (gmd_id.ver == gmdid_display_map[i].ver &&
1516 		    gmd_id.rel == gmdid_display_map[i].rel) {
1517 			*ip_ver = gmd_id;
1518 			return gmdid_display_map[i].display;
1519 		}
1520 	}
1521 
1522 	drm_err(display->drm,
1523 		"Unrecognized display IP version %d.%02d; disabling display.\n",
1524 		gmd_id.ver, gmd_id.rel);
1525 	return NULL;
1526 }
1527 
find_platform_desc(struct pci_dev * pdev)1528 static const struct platform_desc *find_platform_desc(struct pci_dev *pdev)
1529 {
1530 	int i;
1531 
1532 	for (i = 0; i < ARRAY_SIZE(intel_display_ids); i++) {
1533 		if (intel_display_ids[i].devid == pdev->device)
1534 			return intel_display_ids[i].desc;
1535 	}
1536 
1537 	return NULL;
1538 }
1539 
1540 static const struct subplatform_desc *
find_subplatform_desc(struct pci_dev * pdev,const struct platform_desc * desc)1541 find_subplatform_desc(struct pci_dev *pdev, const struct platform_desc *desc)
1542 {
1543 	const struct subplatform_desc *sp;
1544 	const u16 *id;
1545 
1546 	for (sp = desc->subplatforms; sp && sp->pciidlist; sp++)
1547 		for (id = sp->pciidlist; *id; id++)
1548 			if (*id == pdev->device)
1549 				return sp;
1550 
1551 	return NULL;
1552 }
1553 
get_pre_gmdid_step(struct intel_display * display,const struct stepping_desc * main,const struct stepping_desc * sub)1554 static enum intel_step get_pre_gmdid_step(struct intel_display *display,
1555 					  const struct stepping_desc *main,
1556 					  const struct stepping_desc *sub)
1557 {
1558 	struct pci_dev *pdev = to_pci_dev(display->drm->dev);
1559 	const enum intel_step *map = main->map;
1560 	int size = main->size;
1561 	int revision = pdev->revision;
1562 	enum intel_step step;
1563 
1564 	/* subplatform stepping info trumps main platform info */
1565 	if (sub && sub->map && sub->size) {
1566 		map = sub->map;
1567 		size = sub->size;
1568 	}
1569 
1570 	/* not all platforms define steppings, and it's fine */
1571 	if (!map || !size)
1572 		return STEP_NONE;
1573 
1574 	if (revision < size && map[revision] != STEP_NONE) {
1575 		step = map[revision];
1576 	} else {
1577 		drm_warn(display->drm, "Unknown revision 0x%02x\n", revision);
1578 
1579 		/*
1580 		 * If we hit a gap in the revision to step map, use the information
1581 		 * for the next revision.
1582 		 *
1583 		 * This may be wrong in all sorts of ways, especially if the
1584 		 * steppings in the array are not monotonically increasing, but
1585 		 * it's better than defaulting to 0.
1586 		 */
1587 		while (revision < size && map[revision] == STEP_NONE)
1588 			revision++;
1589 
1590 		if (revision < size) {
1591 			drm_dbg_kms(display->drm, "Using display stepping for revision 0x%02x\n",
1592 				    revision);
1593 			step = map[revision];
1594 		} else {
1595 			drm_dbg_kms(display->drm, "Using future display stepping\n");
1596 			step = STEP_FUTURE;
1597 		}
1598 	}
1599 
1600 	drm_WARN_ON(display->drm, step == STEP_NONE);
1601 
1602 	return step;
1603 }
1604 
1605 /* Size of the entire bitmap, not the number of platforms */
display_platforms_num_bits(void)1606 static unsigned int display_platforms_num_bits(void)
1607 {
1608 	return sizeof(((struct intel_display_platforms *)0)->bitmap) * BITS_PER_BYTE;
1609 }
1610 
1611 /* Number of platform bits set */
display_platforms_weight(const struct intel_display_platforms * p)1612 static unsigned int display_platforms_weight(const struct intel_display_platforms *p)
1613 {
1614 	return bitmap_weight(p->bitmap, display_platforms_num_bits());
1615 }
1616 
1617 /* Merge the subplatform information from src to dst */
display_platforms_or(struct intel_display_platforms * dst,const struct intel_display_platforms * src)1618 static void display_platforms_or(struct intel_display_platforms *dst,
1619 				 const struct intel_display_platforms *src)
1620 {
1621 	bitmap_or(dst->bitmap, dst->bitmap, src->bitmap, display_platforms_num_bits());
1622 }
1623 
intel_display_device_probe(struct pci_dev * pdev)1624 struct intel_display *intel_display_device_probe(struct pci_dev *pdev)
1625 {
1626 	struct intel_display *display;
1627 	const struct intel_display_device_info *info;
1628 	struct intel_display_ip_ver ip_ver = {};
1629 	const struct platform_desc *desc;
1630 	const struct subplatform_desc *subdesc;
1631 	enum intel_step step;
1632 
1633 	display = kzalloc(sizeof(*display), GFP_KERNEL);
1634 	if (!display)
1635 		return ERR_PTR(-ENOMEM);
1636 
1637 	/* Add drm device backpointer as early as possible. */
1638 	display->drm = pci_get_drvdata(pdev);
1639 
1640 	intel_display_params_copy(&display->params);
1641 
1642 	if (has_no_display(pdev)) {
1643 		drm_dbg_kms(display->drm, "Device doesn't have display\n");
1644 		goto no_display;
1645 	}
1646 
1647 	desc = find_platform_desc(pdev);
1648 	if (!desc) {
1649 		drm_dbg_kms(display->drm,
1650 			    "Unknown device ID %04x; disabling display.\n",
1651 			    pdev->device);
1652 		goto no_display;
1653 	}
1654 
1655 	info = desc->info;
1656 	if (!info)
1657 		info = probe_gmdid_display(display, &ip_ver);
1658 	if (!info)
1659 		goto no_display;
1660 
1661 	DISPLAY_INFO(display) = info;
1662 
1663 	memcpy(DISPLAY_RUNTIME_INFO(display),
1664 	       &DISPLAY_INFO(display)->__runtime_defaults,
1665 	       sizeof(*DISPLAY_RUNTIME_INFO(display)));
1666 
1667 	drm_WARN_ON(display->drm, !desc->name ||
1668 		    !display_platforms_weight(&desc->platforms));
1669 
1670 	display->platform = desc->platforms;
1671 
1672 	subdesc = find_subplatform_desc(pdev, desc);
1673 	if (subdesc) {
1674 		drm_WARN_ON(display->drm, !subdesc->name ||
1675 			    !display_platforms_weight(&subdesc->platforms));
1676 
1677 		display_platforms_or(&display->platform, &subdesc->platforms);
1678 
1679 		/* Ensure platform and subplatform are distinct */
1680 		drm_WARN_ON(display->drm,
1681 			    display_platforms_weight(&display->platform) !=
1682 			    display_platforms_weight(&desc->platforms) +
1683 			    display_platforms_weight(&subdesc->platforms));
1684 	}
1685 
1686 	if (ip_ver.ver || ip_ver.rel || ip_ver.step) {
1687 		DISPLAY_RUNTIME_INFO(display)->ip = ip_ver;
1688 		step = STEP_A0 + ip_ver.step;
1689 		if (step > STEP_FUTURE) {
1690 			drm_dbg_kms(display->drm, "Using future display stepping\n");
1691 			step = STEP_FUTURE;
1692 		}
1693 	} else {
1694 		step = get_pre_gmdid_step(display, &desc->step_info,
1695 					  subdesc ? &subdesc->step_info : NULL);
1696 	}
1697 
1698 	DISPLAY_RUNTIME_INFO(display)->step = step;
1699 
1700 	drm_info(display->drm, "Found %s%s%s (device ID %04x) %s display version %u.%02u stepping %s\n",
1701 		 desc->name, subdesc ? "/" : "", subdesc ? subdesc->name : "",
1702 		 pdev->device, display->platform.dgfx ? "discrete" : "integrated",
1703 		 DISPLAY_RUNTIME_INFO(display)->ip.ver,
1704 		 DISPLAY_RUNTIME_INFO(display)->ip.rel,
1705 		 step != STEP_NONE ? intel_step_name(step) : "N/A");
1706 
1707 	return display;
1708 
1709 no_display:
1710 	DISPLAY_INFO(display) = &no_display;
1711 
1712 	return display;
1713 }
1714 
intel_display_device_remove(struct intel_display * display)1715 void intel_display_device_remove(struct intel_display *display)
1716 {
1717 	if (!display)
1718 		return;
1719 
1720 	intel_display_params_free(&display->params);
1721 	kfree(display);
1722 }
1723 
__intel_display_device_info_runtime_init(struct intel_display * display)1724 static void __intel_display_device_info_runtime_init(struct intel_display *display)
1725 {
1726 	struct intel_display_runtime_info *display_runtime = DISPLAY_RUNTIME_INFO(display);
1727 	enum pipe pipe;
1728 
1729 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->pipe_mask) < I915_MAX_PIPES);
1730 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->cpu_transcoder_mask) < I915_MAX_TRANSCODERS);
1731 	BUILD_BUG_ON(BITS_PER_TYPE(display_runtime->port_mask) < I915_MAX_PORTS);
1732 
1733 	/* This covers both ULT and ULX */
1734 	if (display->platform.haswell_ult || display->platform.broadwell_ult)
1735 		display_runtime->port_mask &= ~BIT(PORT_D);
1736 
1737 	if (display->platform.icelake_port_f)
1738 		display_runtime->port_mask |= BIT(PORT_F);
1739 
1740 	/* Wa_14011765242: adl-s A0,A1 */
1741 	if (display->platform.alderlake_s && IS_DISPLAY_STEP(display, STEP_A0, STEP_A2))
1742 		for_each_pipe(display, pipe)
1743 			display_runtime->num_scalers[pipe] = 0;
1744 	else if (DISPLAY_VER(display) >= 11) {
1745 		for_each_pipe(display, pipe)
1746 			display_runtime->num_scalers[pipe] = 2;
1747 	} else if (DISPLAY_VER(display) >= 9) {
1748 		display_runtime->num_scalers[PIPE_A] = 2;
1749 		display_runtime->num_scalers[PIPE_B] = 2;
1750 		display_runtime->num_scalers[PIPE_C] = 1;
1751 	}
1752 
1753 	if (DISPLAY_VER(display) >= 13 || HAS_D12_PLANE_MINIMIZATION(display))
1754 		for_each_pipe(display, pipe)
1755 			display_runtime->num_sprites[pipe] = 4;
1756 	else if (DISPLAY_VER(display) >= 11)
1757 		for_each_pipe(display, pipe)
1758 			display_runtime->num_sprites[pipe] = 6;
1759 	else if (DISPLAY_VER(display) == 10)
1760 		for_each_pipe(display, pipe)
1761 			display_runtime->num_sprites[pipe] = 3;
1762 	else if (display->platform.broxton) {
1763 		/*
1764 		 * Skylake and Broxton currently don't expose the topmost plane as its
1765 		 * use is exclusive with the legacy cursor and we only want to expose
1766 		 * one of those, not both. Until we can safely expose the topmost plane
1767 		 * as a DRM_PLANE_TYPE_CURSOR with all the features exposed/supported,
1768 		 * we don't expose the topmost plane at all to prevent ABI breakage
1769 		 * down the line.
1770 		 */
1771 
1772 		display_runtime->num_sprites[PIPE_A] = 2;
1773 		display_runtime->num_sprites[PIPE_B] = 2;
1774 		display_runtime->num_sprites[PIPE_C] = 1;
1775 	} else if (display->platform.valleyview || display->platform.cherryview) {
1776 		for_each_pipe(display, pipe)
1777 			display_runtime->num_sprites[pipe] = 2;
1778 	} else if (DISPLAY_VER(display) >= 5 || display->platform.g4x) {
1779 		for_each_pipe(display, pipe)
1780 			display_runtime->num_sprites[pipe] = 1;
1781 	}
1782 
1783 	if ((display->platform.dgfx || DISPLAY_VER(display) >= 14) &&
1784 	    !(intel_de_read(display, GU_CNTL_PROTECTED) & DEPRESENT)) {
1785 		drm_info(display->drm, "Display not present, disabling\n");
1786 		goto display_fused_off;
1787 	}
1788 
1789 	if (IS_DISPLAY_VER(display, 7, 8) && HAS_PCH_SPLIT(display)) {
1790 		u32 fuse_strap = intel_de_read(display, FUSE_STRAP);
1791 		u32 sfuse_strap = intel_de_read(display, SFUSE_STRAP);
1792 
1793 		/*
1794 		 * SFUSE_STRAP is supposed to have a bit signalling the display
1795 		 * is fused off. Unfortunately it seems that, at least in
1796 		 * certain cases, fused off display means that PCH display
1797 		 * reads don't land anywhere. In that case, we read 0s.
1798 		 *
1799 		 * On CPT/PPT, we can detect this case as SFUSE_STRAP_FUSE_LOCK
1800 		 * should be set when taking over after the firmware.
1801 		 */
1802 		if (fuse_strap & ILK_INTERNAL_DISPLAY_DISABLE ||
1803 		    sfuse_strap & SFUSE_STRAP_DISPLAY_DISABLED ||
1804 		    (HAS_PCH_CPT(display) &&
1805 		     !(sfuse_strap & SFUSE_STRAP_FUSE_LOCK))) {
1806 			drm_info(display->drm,
1807 				 "Display fused off, disabling\n");
1808 			goto display_fused_off;
1809 		} else if (fuse_strap & IVB_PIPE_C_DISABLE) {
1810 			drm_info(display->drm, "PipeC fused off\n");
1811 			display_runtime->pipe_mask &= ~BIT(PIPE_C);
1812 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_C);
1813 		}
1814 	} else if (DISPLAY_VER(display) >= 9) {
1815 		u32 dfsm = intel_de_read(display, SKL_DFSM);
1816 
1817 		if (dfsm & SKL_DFSM_PIPE_A_DISABLE) {
1818 			display_runtime->pipe_mask &= ~BIT(PIPE_A);
1819 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_A);
1820 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_A);
1821 		}
1822 		if (dfsm & SKL_DFSM_PIPE_B_DISABLE) {
1823 			display_runtime->pipe_mask &= ~BIT(PIPE_B);
1824 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_B);
1825 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_B);
1826 		}
1827 		if (dfsm & SKL_DFSM_PIPE_C_DISABLE) {
1828 			display_runtime->pipe_mask &= ~BIT(PIPE_C);
1829 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_C);
1830 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_C);
1831 		}
1832 
1833 		if (DISPLAY_VER(display) >= 12 &&
1834 		    (dfsm & TGL_DFSM_PIPE_D_DISABLE)) {
1835 			display_runtime->pipe_mask &= ~BIT(PIPE_D);
1836 			display_runtime->cpu_transcoder_mask &= ~BIT(TRANSCODER_D);
1837 			display_runtime->fbc_mask &= ~BIT(INTEL_FBC_D);
1838 		}
1839 
1840 		if (!display_runtime->pipe_mask)
1841 			goto display_fused_off;
1842 
1843 		if (dfsm & SKL_DFSM_DISPLAY_HDCP_DISABLE)
1844 			display_runtime->has_hdcp = 0;
1845 
1846 		if (display->platform.dg2 || DISPLAY_VER(display) < 13) {
1847 			if (dfsm & SKL_DFSM_DISPLAY_PM_DISABLE)
1848 				display_runtime->fbc_mask = 0;
1849 		}
1850 
1851 		if (DISPLAY_VER(display) >= 11 && (dfsm & ICL_DFSM_DMC_DISABLE))
1852 			display_runtime->has_dmc = 0;
1853 
1854 		if (IS_DISPLAY_VER(display, 10, 12) &&
1855 		    (dfsm & GLK_DFSM_DISPLAY_DSC_DISABLE))
1856 			display_runtime->has_dsc = 0;
1857 
1858 		if (DISPLAY_VER(display) >= 20 &&
1859 		    (dfsm & XE2LPD_DFSM_DBUF_OVERLAP_DISABLE))
1860 			display_runtime->has_dbuf_overlap_detection = false;
1861 	}
1862 
1863 	if (DISPLAY_VER(display) >= 20) {
1864 		u32 cap = intel_de_read(display, XE2LPD_DE_CAP);
1865 
1866 		if (REG_FIELD_GET(XE2LPD_DE_CAP_DSC_MASK, cap) ==
1867 		    XE2LPD_DE_CAP_DSC_REMOVED)
1868 			display_runtime->has_dsc = 0;
1869 
1870 		if (REG_FIELD_GET(XE2LPD_DE_CAP_SCALER_MASK, cap) ==
1871 		    XE2LPD_DE_CAP_SCALER_SINGLE) {
1872 			for_each_pipe(display, pipe)
1873 				if (display_runtime->num_scalers[pipe])
1874 					display_runtime->num_scalers[pipe] = 1;
1875 		}
1876 	}
1877 
1878 	if (DISPLAY_VER(display) >= 30)
1879 		display_runtime->edp_typec_support =
1880 			intel_de_read(display, PICA_PHY_CONFIG_CONTROL) & EDP_ON_TYPEC;
1881 
1882 	display_runtime->rawclk_freq = intel_read_rawclk(display);
1883 	drm_dbg_kms(display->drm, "rawclk rate: %d kHz\n",
1884 		    display_runtime->rawclk_freq);
1885 
1886 	return;
1887 
1888 display_fused_off:
1889 	memset(display_runtime, 0, sizeof(*display_runtime));
1890 }
1891 
intel_display_device_info_runtime_init(struct intel_display * display)1892 void intel_display_device_info_runtime_init(struct intel_display *display)
1893 {
1894 	if (HAS_DISPLAY(display))
1895 		__intel_display_device_info_runtime_init(display);
1896 
1897 	/* Display may have been disabled by runtime init */
1898 	if (!HAS_DISPLAY(display)) {
1899 		display->drm->driver_features &= ~(DRIVER_MODESET | DRIVER_ATOMIC);
1900 		display->info.__device_info = &no_display;
1901 	}
1902 
1903 	/* Disable nuclear pageflip by default on pre-g4x */
1904 	if (!display->params.nuclear_pageflip &&
1905 	    DISPLAY_VER(display) < 5 && !display->platform.g4x)
1906 		display->drm->driver_features &= ~DRIVER_ATOMIC;
1907 }
1908 
intel_display_device_info_print(const struct intel_display_device_info * info,const struct intel_display_runtime_info * runtime,struct drm_printer * p)1909 void intel_display_device_info_print(const struct intel_display_device_info *info,
1910 				     const struct intel_display_runtime_info *runtime,
1911 				     struct drm_printer *p)
1912 {
1913 	if (runtime->ip.rel)
1914 		drm_printf(p, "display version: %u.%02u\n",
1915 			   runtime->ip.ver,
1916 			   runtime->ip.rel);
1917 	else
1918 		drm_printf(p, "display version: %u\n",
1919 			   runtime->ip.ver);
1920 
1921 	drm_printf(p, "display stepping: %s\n", intel_step_name(runtime->step));
1922 
1923 #define PRINT_FLAG(name) drm_printf(p, "%s: %s\n", #name, str_yes_no(info->name))
1924 	DEV_INFO_DISPLAY_FOR_EACH_FLAG(PRINT_FLAG);
1925 #undef PRINT_FLAG
1926 
1927 	drm_printf(p, "has_hdcp: %s\n", str_yes_no(runtime->has_hdcp));
1928 	drm_printf(p, "has_dmc: %s\n", str_yes_no(runtime->has_dmc));
1929 	drm_printf(p, "has_dsc: %s\n", str_yes_no(runtime->has_dsc));
1930 
1931 	drm_printf(p, "rawclk rate: %u kHz\n", runtime->rawclk_freq);
1932 }
1933 
1934 /*
1935  * Assuming the device has display hardware, should it be enabled?
1936  *
1937  * It's an error to call this function if the device does not have display
1938  * hardware.
1939  *
1940  * Disabling display means taking over the display hardware, putting it to
1941  * sleep, and preventing connectors from being connected via any means.
1942  */
intel_display_device_enabled(struct intel_display * display)1943 bool intel_display_device_enabled(struct intel_display *display)
1944 {
1945 	/* Only valid when HAS_DISPLAY() is true */
1946 	drm_WARN_ON(display->drm, !HAS_DISPLAY(display));
1947 
1948 	return !display->params.disable_display &&
1949 		!intel_opregion_headless_sku(display);
1950 }
1951