1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include "i915_drv.h"
7 #include "intel_dram.h"
8 
9 struct dram_dimm_info {
10 	u8 size, width, ranks;
11 };
12 
13 struct dram_channel_info {
14 	struct dram_dimm_info dimm_l, dimm_s;
15 	u8 ranks;
16 	bool is_16gb_dimm;
17 };
18 
19 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
20 
intel_dram_type_str(enum intel_dram_type type)21 static const char *intel_dram_type_str(enum intel_dram_type type)
22 {
23 	static const char * const str[] = {
24 		DRAM_TYPE_STR(UNKNOWN),
25 		DRAM_TYPE_STR(DDR3),
26 		DRAM_TYPE_STR(DDR4),
27 		DRAM_TYPE_STR(LPDDR3),
28 		DRAM_TYPE_STR(LPDDR4),
29 	};
30 
31 	if (type >= ARRAY_SIZE(str))
32 		type = INTEL_DRAM_UNKNOWN;
33 
34 	return str[type];
35 }
36 
37 #undef DRAM_TYPE_STR
38 
intel_dimm_num_devices(const struct dram_dimm_info * dimm)39 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
40 {
41 	return dimm->ranks * 64 / (dimm->width ?: 1);
42 }
43 
44 /* Returns total GB for the whole DIMM */
skl_get_dimm_size(u16 val)45 static int skl_get_dimm_size(u16 val)
46 {
47 	return val & SKL_DRAM_SIZE_MASK;
48 }
49 
skl_get_dimm_width(u16 val)50 static int skl_get_dimm_width(u16 val)
51 {
52 	if (skl_get_dimm_size(val) == 0)
53 		return 0;
54 
55 	switch (val & SKL_DRAM_WIDTH_MASK) {
56 	case SKL_DRAM_WIDTH_X8:
57 	case SKL_DRAM_WIDTH_X16:
58 	case SKL_DRAM_WIDTH_X32:
59 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
60 		return 8 << val;
61 	default:
62 		MISSING_CASE(val);
63 		return 0;
64 	}
65 }
66 
skl_get_dimm_ranks(u16 val)67 static int skl_get_dimm_ranks(u16 val)
68 {
69 	if (skl_get_dimm_size(val) == 0)
70 		return 0;
71 
72 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
73 
74 	return val + 1;
75 }
76 
77 /* Returns total GB for the whole DIMM */
cnl_get_dimm_size(u16 val)78 static int cnl_get_dimm_size(u16 val)
79 {
80 	return (val & CNL_DRAM_SIZE_MASK) / 2;
81 }
82 
cnl_get_dimm_width(u16 val)83 static int cnl_get_dimm_width(u16 val)
84 {
85 	if (cnl_get_dimm_size(val) == 0)
86 		return 0;
87 
88 	switch (val & CNL_DRAM_WIDTH_MASK) {
89 	case CNL_DRAM_WIDTH_X8:
90 	case CNL_DRAM_WIDTH_X16:
91 	case CNL_DRAM_WIDTH_X32:
92 		val = (val & CNL_DRAM_WIDTH_MASK) >> CNL_DRAM_WIDTH_SHIFT;
93 		return 8 << val;
94 	default:
95 		MISSING_CASE(val);
96 		return 0;
97 	}
98 }
99 
cnl_get_dimm_ranks(u16 val)100 static int cnl_get_dimm_ranks(u16 val)
101 {
102 	if (cnl_get_dimm_size(val) == 0)
103 		return 0;
104 
105 	val = (val & CNL_DRAM_RANK_MASK) >> CNL_DRAM_RANK_SHIFT;
106 
107 	return val + 1;
108 }
109 
110 static bool
skl_is_16gb_dimm(const struct dram_dimm_info * dimm)111 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
112 {
113 	/* Convert total GB to Gb per DRAM device */
114 	return 8 * dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
115 }
116 
117 static void
skl_dram_get_dimm_info(struct drm_i915_private * i915,struct dram_dimm_info * dimm,int channel,char dimm_name,u16 val)118 skl_dram_get_dimm_info(struct drm_i915_private *i915,
119 		       struct dram_dimm_info *dimm,
120 		       int channel, char dimm_name, u16 val)
121 {
122 	if (INTEL_GEN(i915) >= 10) {
123 		dimm->size = cnl_get_dimm_size(val);
124 		dimm->width = cnl_get_dimm_width(val);
125 		dimm->ranks = cnl_get_dimm_ranks(val);
126 	} else {
127 		dimm->size = skl_get_dimm_size(val);
128 		dimm->width = skl_get_dimm_width(val);
129 		dimm->ranks = skl_get_dimm_ranks(val);
130 	}
131 
132 	drm_dbg_kms(&i915->drm,
133 		    "CH%u DIMM %c size: %u GB, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
134 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
135 		    yesno(skl_is_16gb_dimm(dimm)));
136 }
137 
138 static int
skl_dram_get_channel_info(struct drm_i915_private * i915,struct dram_channel_info * ch,int channel,u32 val)139 skl_dram_get_channel_info(struct drm_i915_private *i915,
140 			  struct dram_channel_info *ch,
141 			  int channel, u32 val)
142 {
143 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
144 			       channel, 'L', val & 0xffff);
145 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
146 			       channel, 'S', val >> 16);
147 
148 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
149 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
150 		return -EINVAL;
151 	}
152 
153 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
154 		ch->ranks = 2;
155 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
156 		ch->ranks = 2;
157 	else
158 		ch->ranks = 1;
159 
160 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
161 		skl_is_16gb_dimm(&ch->dimm_s);
162 
163 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
164 		    channel, ch->ranks, yesno(ch->is_16gb_dimm));
165 
166 	return 0;
167 }
168 
169 static bool
intel_is_dram_symmetric(const struct dram_channel_info * ch0,const struct dram_channel_info * ch1)170 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
171 			const struct dram_channel_info *ch1)
172 {
173 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
174 		(ch0->dimm_s.size == 0 ||
175 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
176 }
177 
178 static int
skl_dram_get_channels_info(struct drm_i915_private * i915)179 skl_dram_get_channels_info(struct drm_i915_private *i915)
180 {
181 	struct dram_info *dram_info = &i915->dram_info;
182 	struct dram_channel_info ch0 = {}, ch1 = {};
183 	u32 val;
184 	int ret;
185 
186 	val = intel_uncore_read(&i915->uncore,
187 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
188 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
189 	if (ret == 0)
190 		dram_info->num_channels++;
191 
192 	val = intel_uncore_read(&i915->uncore,
193 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
194 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
195 	if (ret == 0)
196 		dram_info->num_channels++;
197 
198 	if (dram_info->num_channels == 0) {
199 		drm_info(&i915->drm, "Number of memory channels is zero\n");
200 		return -EINVAL;
201 	}
202 
203 	/*
204 	 * If any of the channel is single rank channel, worst case output
205 	 * will be same as if single rank memory, so consider single rank
206 	 * memory.
207 	 */
208 	if (ch0.ranks == 1 || ch1.ranks == 1)
209 		dram_info->ranks = 1;
210 	else
211 		dram_info->ranks = max(ch0.ranks, ch1.ranks);
212 
213 	if (dram_info->ranks == 0) {
214 		drm_info(&i915->drm, "couldn't get memory rank information\n");
215 		return -EINVAL;
216 	}
217 
218 	dram_info->is_16gb_dimm = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
219 
220 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
221 
222 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
223 		    yesno(dram_info->symmetric_memory));
224 
225 	return 0;
226 }
227 
228 static enum intel_dram_type
skl_get_dram_type(struct drm_i915_private * i915)229 skl_get_dram_type(struct drm_i915_private *i915)
230 {
231 	u32 val;
232 
233 	val = intel_uncore_read(&i915->uncore,
234 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
235 
236 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
237 	case SKL_DRAM_DDR_TYPE_DDR3:
238 		return INTEL_DRAM_DDR3;
239 	case SKL_DRAM_DDR_TYPE_DDR4:
240 		return INTEL_DRAM_DDR4;
241 	case SKL_DRAM_DDR_TYPE_LPDDR3:
242 		return INTEL_DRAM_LPDDR3;
243 	case SKL_DRAM_DDR_TYPE_LPDDR4:
244 		return INTEL_DRAM_LPDDR4;
245 	default:
246 		MISSING_CASE(val);
247 		return INTEL_DRAM_UNKNOWN;
248 	}
249 }
250 
251 static int
skl_get_dram_info(struct drm_i915_private * i915)252 skl_get_dram_info(struct drm_i915_private *i915)
253 {
254 	struct dram_info *dram_info = &i915->dram_info;
255 	u32 mem_freq_khz, val;
256 	int ret;
257 
258 	dram_info->type = skl_get_dram_type(i915);
259 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
260 		    intel_dram_type_str(dram_info->type));
261 
262 	ret = skl_dram_get_channels_info(i915);
263 	if (ret)
264 		return ret;
265 
266 	val = intel_uncore_read(&i915->uncore,
267 				SKL_MC_BIOS_DATA_0_0_0_MCHBAR_PCU);
268 	mem_freq_khz = DIV_ROUND_UP((val & SKL_REQ_DATA_MASK) *
269 				    SKL_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
270 
271 	dram_info->bandwidth_kbps = dram_info->num_channels *
272 		mem_freq_khz * 8;
273 
274 	if (dram_info->bandwidth_kbps == 0) {
275 		drm_info(&i915->drm,
276 			 "Couldn't get system memory bandwidth\n");
277 		return -EINVAL;
278 	}
279 
280 	dram_info->valid = true;
281 	return 0;
282 }
283 
284 /* Returns Gb per DRAM device */
bxt_get_dimm_size(u32 val)285 static int bxt_get_dimm_size(u32 val)
286 {
287 	switch (val & BXT_DRAM_SIZE_MASK) {
288 	case BXT_DRAM_SIZE_4GBIT:
289 		return 4;
290 	case BXT_DRAM_SIZE_6GBIT:
291 		return 6;
292 	case BXT_DRAM_SIZE_8GBIT:
293 		return 8;
294 	case BXT_DRAM_SIZE_12GBIT:
295 		return 12;
296 	case BXT_DRAM_SIZE_16GBIT:
297 		return 16;
298 	default:
299 		MISSING_CASE(val);
300 		return 0;
301 	}
302 }
303 
bxt_get_dimm_width(u32 val)304 static int bxt_get_dimm_width(u32 val)
305 {
306 	if (!bxt_get_dimm_size(val))
307 		return 0;
308 
309 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
310 
311 	return 8 << val;
312 }
313 
bxt_get_dimm_ranks(u32 val)314 static int bxt_get_dimm_ranks(u32 val)
315 {
316 	if (!bxt_get_dimm_size(val))
317 		return 0;
318 
319 	switch (val & BXT_DRAM_RANK_MASK) {
320 	case BXT_DRAM_RANK_SINGLE:
321 		return 1;
322 	case BXT_DRAM_RANK_DUAL:
323 		return 2;
324 	default:
325 		MISSING_CASE(val);
326 		return 0;
327 	}
328 }
329 
bxt_get_dimm_type(u32 val)330 static enum intel_dram_type bxt_get_dimm_type(u32 val)
331 {
332 	if (!bxt_get_dimm_size(val))
333 		return INTEL_DRAM_UNKNOWN;
334 
335 	switch (val & BXT_DRAM_TYPE_MASK) {
336 	case BXT_DRAM_TYPE_DDR3:
337 		return INTEL_DRAM_DDR3;
338 	case BXT_DRAM_TYPE_LPDDR3:
339 		return INTEL_DRAM_LPDDR3;
340 	case BXT_DRAM_TYPE_DDR4:
341 		return INTEL_DRAM_DDR4;
342 	case BXT_DRAM_TYPE_LPDDR4:
343 		return INTEL_DRAM_LPDDR4;
344 	default:
345 		MISSING_CASE(val);
346 		return INTEL_DRAM_UNKNOWN;
347 	}
348 }
349 
bxt_get_dimm_info(struct dram_dimm_info * dimm,u32 val)350 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
351 {
352 	dimm->width = bxt_get_dimm_width(val);
353 	dimm->ranks = bxt_get_dimm_ranks(val);
354 
355 	/*
356 	 * Size in register is Gb per DRAM device. Convert to total
357 	 * GB to match the way we report this for non-LP platforms.
358 	 */
359 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm) / 8;
360 }
361 
bxt_get_dram_info(struct drm_i915_private * i915)362 static int bxt_get_dram_info(struct drm_i915_private *i915)
363 {
364 	struct dram_info *dram_info = &i915->dram_info;
365 	u32 dram_channels;
366 	u32 mem_freq_khz, val;
367 	u8 num_active_channels;
368 	int i;
369 
370 	val = intel_uncore_read(&i915->uncore, BXT_P_CR_MC_BIOS_REQ_0_0_0);
371 	mem_freq_khz = DIV_ROUND_UP((val & BXT_REQ_DATA_MASK) *
372 				    BXT_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
373 
374 	dram_channels = val & BXT_DRAM_CHANNEL_ACTIVE_MASK;
375 	num_active_channels = hweight32(dram_channels);
376 
377 	/* Each active bit represents 4-byte channel */
378 	dram_info->bandwidth_kbps = (mem_freq_khz * num_active_channels * 4);
379 
380 	if (dram_info->bandwidth_kbps == 0) {
381 		drm_info(&i915->drm,
382 			 "Couldn't get system memory bandwidth\n");
383 		return -EINVAL;
384 	}
385 
386 	/*
387 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
388 	 */
389 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
390 		struct dram_dimm_info dimm;
391 		enum intel_dram_type type;
392 
393 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
394 		if (val == 0xFFFFFFFF)
395 			continue;
396 
397 		dram_info->num_channels++;
398 
399 		bxt_get_dimm_info(&dimm, val);
400 		type = bxt_get_dimm_type(val);
401 
402 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
403 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
404 			    dram_info->type != type);
405 
406 		drm_dbg_kms(&i915->drm,
407 			    "CH%u DIMM size: %u GB, width: X%u, ranks: %u, type: %s\n",
408 			    i - BXT_D_CR_DRP0_DUNIT_START,
409 			    dimm.size, dimm.width, dimm.ranks,
410 			    intel_dram_type_str(type));
411 
412 		/*
413 		 * If any of the channel is single rank channel,
414 		 * worst case output will be same as if single rank
415 		 * memory, so consider single rank memory.
416 		 */
417 		if (dram_info->ranks == 0)
418 			dram_info->ranks = dimm.ranks;
419 		else if (dimm.ranks == 1)
420 			dram_info->ranks = 1;
421 
422 		if (type != INTEL_DRAM_UNKNOWN)
423 			dram_info->type = type;
424 	}
425 
426 	if (dram_info->type == INTEL_DRAM_UNKNOWN || dram_info->ranks == 0) {
427 		drm_info(&i915->drm, "couldn't get memory information\n");
428 		return -EINVAL;
429 	}
430 
431 	dram_info->valid = true;
432 
433 	return 0;
434 }
435 
intel_dram_detect(struct drm_i915_private * i915)436 void intel_dram_detect(struct drm_i915_private *i915)
437 {
438 	struct dram_info *dram_info = &i915->dram_info;
439 	int ret;
440 
441 	/*
442 	 * Assume 16Gb DIMMs are present until proven otherwise.
443 	 * This is only used for the level 0 watermark latency
444 	 * w/a which does not apply to bxt/glk.
445 	 */
446 	dram_info->is_16gb_dimm = !IS_GEN9_LP(i915);
447 
448 	if (INTEL_GEN(i915) < 9 || !HAS_DISPLAY(i915))
449 		return;
450 
451 	if (IS_GEN9_LP(i915))
452 		ret = bxt_get_dram_info(i915);
453 	else
454 		ret = skl_get_dram_info(i915);
455 	if (ret)
456 		return;
457 
458 	drm_dbg_kms(&i915->drm, "DRAM bandwidth: %u kBps, channels: %u\n",
459 		    dram_info->bandwidth_kbps, dram_info->num_channels);
460 
461 	drm_dbg_kms(&i915->drm, "DRAM ranks: %u, 16Gb DIMMs: %s\n",
462 		    dram_info->ranks, yesno(dram_info->is_16gb_dimm));
463 }
464 
gen9_edram_size_mb(struct drm_i915_private * i915,u32 cap)465 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
466 {
467 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
468 	static const u8 sets[4] = { 1, 1, 2, 2 };
469 
470 	return EDRAM_NUM_BANKS(cap) *
471 		ways[EDRAM_WAYS_IDX(cap)] *
472 		sets[EDRAM_SETS_IDX(cap)];
473 }
474 
intel_dram_edram_detect(struct drm_i915_private * i915)475 void intel_dram_edram_detect(struct drm_i915_private *i915)
476 {
477 	u32 edram_cap = 0;
478 
479 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || INTEL_GEN(i915) >= 9))
480 		return;
481 
482 	edram_cap = __raw_uncore_read32(&i915->uncore, HSW_EDRAM_CAP);
483 
484 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
485 
486 	if (!(edram_cap & EDRAM_ENABLED))
487 		return;
488 
489 	/*
490 	 * The needed capability bits for size calculation are not there with
491 	 * pre gen9 so return 128MB always.
492 	 */
493 	if (INTEL_GEN(i915) < 9)
494 		i915->edram_size_mb = 128;
495 	else
496 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
497 
498 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
499 }
500