• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include "i915_drv.h"
7 #include "intel_dram.h"
8 #include "intel_sideband.h"
9 
10 struct dram_dimm_info {
11 	u16 size;
12 	u8 width, ranks;
13 };
14 
15 struct dram_channel_info {
16 	struct dram_dimm_info dimm_l, dimm_s;
17 	u8 ranks;
18 	bool is_16gb_dimm;
19 };
20 
21 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
22 
intel_dram_type_str(enum intel_dram_type type)23 static const char *intel_dram_type_str(enum intel_dram_type type)
24 {
25 	static const char * const str[] = {
26 		DRAM_TYPE_STR(UNKNOWN),
27 		DRAM_TYPE_STR(DDR3),
28 		DRAM_TYPE_STR(DDR4),
29 		DRAM_TYPE_STR(LPDDR3),
30 		DRAM_TYPE_STR(LPDDR4),
31 	};
32 
33 	if (type >= ARRAY_SIZE(str))
34 		type = INTEL_DRAM_UNKNOWN;
35 
36 	return str[type];
37 }
38 
39 #undef DRAM_TYPE_STR
40 
intel_dimm_num_devices(const struct dram_dimm_info * dimm)41 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
42 {
43 	return dimm->ranks * 64 / (dimm->width ?: 1);
44 }
45 
46 /* Returns total Gb for the whole DIMM */
skl_get_dimm_size(u16 val)47 static int skl_get_dimm_size(u16 val)
48 {
49 	return (val & SKL_DRAM_SIZE_MASK) * 8;
50 }
51 
skl_get_dimm_width(u16 val)52 static int skl_get_dimm_width(u16 val)
53 {
54 	if (skl_get_dimm_size(val) == 0)
55 		return 0;
56 
57 	switch (val & SKL_DRAM_WIDTH_MASK) {
58 	case SKL_DRAM_WIDTH_X8:
59 	case SKL_DRAM_WIDTH_X16:
60 	case SKL_DRAM_WIDTH_X32:
61 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
62 		return 8 << val;
63 	default:
64 		MISSING_CASE(val);
65 		return 0;
66 	}
67 }
68 
skl_get_dimm_ranks(u16 val)69 static int skl_get_dimm_ranks(u16 val)
70 {
71 	if (skl_get_dimm_size(val) == 0)
72 		return 0;
73 
74 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
75 
76 	return val + 1;
77 }
78 
79 /* Returns total Gb for the whole DIMM */
icl_get_dimm_size(u16 val)80 static int icl_get_dimm_size(u16 val)
81 {
82 	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
83 }
84 
icl_get_dimm_width(u16 val)85 static int icl_get_dimm_width(u16 val)
86 {
87 	if (icl_get_dimm_size(val) == 0)
88 		return 0;
89 
90 	switch (val & ICL_DRAM_WIDTH_MASK) {
91 	case ICL_DRAM_WIDTH_X8:
92 	case ICL_DRAM_WIDTH_X16:
93 	case ICL_DRAM_WIDTH_X32:
94 		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
95 		return 8 << val;
96 	default:
97 		MISSING_CASE(val);
98 		return 0;
99 	}
100 }
101 
icl_get_dimm_ranks(u16 val)102 static int icl_get_dimm_ranks(u16 val)
103 {
104 	if (icl_get_dimm_size(val) == 0)
105 		return 0;
106 
107 	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
108 
109 	return val + 1;
110 }
111 
112 static bool
skl_is_16gb_dimm(const struct dram_dimm_info * dimm)113 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
114 {
115 	/* Convert total Gb to Gb per DRAM device */
116 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
117 }
118 
119 static void
skl_dram_get_dimm_info(struct drm_i915_private * i915,struct dram_dimm_info * dimm,int channel,char dimm_name,u16 val)120 skl_dram_get_dimm_info(struct drm_i915_private *i915,
121 		       struct dram_dimm_info *dimm,
122 		       int channel, char dimm_name, u16 val)
123 {
124 	if (GRAPHICS_VER(i915) >= 11) {
125 		dimm->size = icl_get_dimm_size(val);
126 		dimm->width = icl_get_dimm_width(val);
127 		dimm->ranks = icl_get_dimm_ranks(val);
128 	} else {
129 		dimm->size = skl_get_dimm_size(val);
130 		dimm->width = skl_get_dimm_width(val);
131 		dimm->ranks = skl_get_dimm_ranks(val);
132 	}
133 
134 	drm_dbg_kms(&i915->drm,
135 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
136 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
137 		    yesno(skl_is_16gb_dimm(dimm)));
138 }
139 
140 static int
skl_dram_get_channel_info(struct drm_i915_private * i915,struct dram_channel_info * ch,int channel,u32 val)141 skl_dram_get_channel_info(struct drm_i915_private *i915,
142 			  struct dram_channel_info *ch,
143 			  int channel, u32 val)
144 {
145 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
146 			       channel, 'L', val & 0xffff);
147 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
148 			       channel, 'S', val >> 16);
149 
150 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
151 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
152 		return -EINVAL;
153 	}
154 
155 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
156 		ch->ranks = 2;
157 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
158 		ch->ranks = 2;
159 	else
160 		ch->ranks = 1;
161 
162 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
163 		skl_is_16gb_dimm(&ch->dimm_s);
164 
165 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
166 		    channel, ch->ranks, yesno(ch->is_16gb_dimm));
167 
168 	return 0;
169 }
170 
171 static bool
intel_is_dram_symmetric(const struct dram_channel_info * ch0,const struct dram_channel_info * ch1)172 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
173 			const struct dram_channel_info *ch1)
174 {
175 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
176 		(ch0->dimm_s.size == 0 ||
177 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
178 }
179 
180 static int
skl_dram_get_channels_info(struct drm_i915_private * i915)181 skl_dram_get_channels_info(struct drm_i915_private *i915)
182 {
183 	struct dram_info *dram_info = &i915->dram_info;
184 	struct dram_channel_info ch0 = {}, ch1 = {};
185 	u32 val;
186 	int ret;
187 
188 	val = intel_uncore_read(&i915->uncore,
189 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
190 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
191 	if (ret == 0)
192 		dram_info->num_channels++;
193 
194 	val = intel_uncore_read(&i915->uncore,
195 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
196 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
197 	if (ret == 0)
198 		dram_info->num_channels++;
199 
200 	if (dram_info->num_channels == 0) {
201 		drm_info(&i915->drm, "Number of memory channels is zero\n");
202 		return -EINVAL;
203 	}
204 
205 	if (ch0.ranks == 0 && ch1.ranks == 0) {
206 		drm_info(&i915->drm, "couldn't get memory rank information\n");
207 		return -EINVAL;
208 	}
209 
210 	dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
211 
212 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
213 
214 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
215 		    yesno(dram_info->symmetric_memory));
216 
217 	return 0;
218 }
219 
220 static enum intel_dram_type
skl_get_dram_type(struct drm_i915_private * i915)221 skl_get_dram_type(struct drm_i915_private *i915)
222 {
223 	u32 val;
224 
225 	val = intel_uncore_read(&i915->uncore,
226 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
227 
228 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
229 	case SKL_DRAM_DDR_TYPE_DDR3:
230 		return INTEL_DRAM_DDR3;
231 	case SKL_DRAM_DDR_TYPE_DDR4:
232 		return INTEL_DRAM_DDR4;
233 	case SKL_DRAM_DDR_TYPE_LPDDR3:
234 		return INTEL_DRAM_LPDDR3;
235 	case SKL_DRAM_DDR_TYPE_LPDDR4:
236 		return INTEL_DRAM_LPDDR4;
237 	default:
238 		MISSING_CASE(val);
239 		return INTEL_DRAM_UNKNOWN;
240 	}
241 }
242 
243 static int
skl_get_dram_info(struct drm_i915_private * i915)244 skl_get_dram_info(struct drm_i915_private *i915)
245 {
246 	struct dram_info *dram_info = &i915->dram_info;
247 	int ret;
248 
249 	dram_info->type = skl_get_dram_type(i915);
250 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
251 		    intel_dram_type_str(dram_info->type));
252 
253 	ret = skl_dram_get_channels_info(i915);
254 	if (ret)
255 		return ret;
256 
257 	return 0;
258 }
259 
260 /* Returns Gb per DRAM device */
bxt_get_dimm_size(u32 val)261 static int bxt_get_dimm_size(u32 val)
262 {
263 	switch (val & BXT_DRAM_SIZE_MASK) {
264 	case BXT_DRAM_SIZE_4GBIT:
265 		return 4;
266 	case BXT_DRAM_SIZE_6GBIT:
267 		return 6;
268 	case BXT_DRAM_SIZE_8GBIT:
269 		return 8;
270 	case BXT_DRAM_SIZE_12GBIT:
271 		return 12;
272 	case BXT_DRAM_SIZE_16GBIT:
273 		return 16;
274 	default:
275 		MISSING_CASE(val);
276 		return 0;
277 	}
278 }
279 
bxt_get_dimm_width(u32 val)280 static int bxt_get_dimm_width(u32 val)
281 {
282 	if (!bxt_get_dimm_size(val))
283 		return 0;
284 
285 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
286 
287 	return 8 << val;
288 }
289 
bxt_get_dimm_ranks(u32 val)290 static int bxt_get_dimm_ranks(u32 val)
291 {
292 	if (!bxt_get_dimm_size(val))
293 		return 0;
294 
295 	switch (val & BXT_DRAM_RANK_MASK) {
296 	case BXT_DRAM_RANK_SINGLE:
297 		return 1;
298 	case BXT_DRAM_RANK_DUAL:
299 		return 2;
300 	default:
301 		MISSING_CASE(val);
302 		return 0;
303 	}
304 }
305 
bxt_get_dimm_type(u32 val)306 static enum intel_dram_type bxt_get_dimm_type(u32 val)
307 {
308 	if (!bxt_get_dimm_size(val))
309 		return INTEL_DRAM_UNKNOWN;
310 
311 	switch (val & BXT_DRAM_TYPE_MASK) {
312 	case BXT_DRAM_TYPE_DDR3:
313 		return INTEL_DRAM_DDR3;
314 	case BXT_DRAM_TYPE_LPDDR3:
315 		return INTEL_DRAM_LPDDR3;
316 	case BXT_DRAM_TYPE_DDR4:
317 		return INTEL_DRAM_DDR4;
318 	case BXT_DRAM_TYPE_LPDDR4:
319 		return INTEL_DRAM_LPDDR4;
320 	default:
321 		MISSING_CASE(val);
322 		return INTEL_DRAM_UNKNOWN;
323 	}
324 }
325 
bxt_get_dimm_info(struct dram_dimm_info * dimm,u32 val)326 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
327 {
328 	dimm->width = bxt_get_dimm_width(val);
329 	dimm->ranks = bxt_get_dimm_ranks(val);
330 
331 	/*
332 	 * Size in register is Gb per DRAM device. Convert to total
333 	 * Gb to match the way we report this for non-LP platforms.
334 	 */
335 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
336 }
337 
bxt_get_dram_info(struct drm_i915_private * i915)338 static int bxt_get_dram_info(struct drm_i915_private *i915)
339 {
340 	struct dram_info *dram_info = &i915->dram_info;
341 	u32 val;
342 	u8 valid_ranks = 0;
343 	int i;
344 
345 	/*
346 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
347 	 */
348 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
349 		struct dram_dimm_info dimm;
350 		enum intel_dram_type type;
351 
352 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
353 		if (val == 0xFFFFFFFF)
354 			continue;
355 
356 		dram_info->num_channels++;
357 
358 		bxt_get_dimm_info(&dimm, val);
359 		type = bxt_get_dimm_type(val);
360 
361 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
362 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
363 			    dram_info->type != type);
364 
365 		drm_dbg_kms(&i915->drm,
366 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u, type: %s\n",
367 			    i - BXT_D_CR_DRP0_DUNIT_START,
368 			    dimm.size, dimm.width, dimm.ranks,
369 			    intel_dram_type_str(type));
370 
371 		if (valid_ranks == 0)
372 			valid_ranks = dimm.ranks;
373 
374 		if (type != INTEL_DRAM_UNKNOWN)
375 			dram_info->type = type;
376 	}
377 
378 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
379 		drm_info(&i915->drm, "couldn't get memory information\n");
380 		return -EINVAL;
381 	}
382 
383 	return 0;
384 }
385 
icl_pcode_read_mem_global_info(struct drm_i915_private * dev_priv)386 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)
387 {
388 	struct dram_info *dram_info = &dev_priv->dram_info;
389 	u32 val = 0;
390 	int ret;
391 
392 	ret = sandybridge_pcode_read(dev_priv,
393 				     ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
394 				     ICL_PCODE_MEM_SS_READ_GLOBAL_INFO,
395 				     &val, NULL);
396 	if (ret)
397 		return ret;
398 
399 	if (GRAPHICS_VER(dev_priv) == 12) {
400 		switch (val & 0xf) {
401 		case 0:
402 			dram_info->type = INTEL_DRAM_DDR4;
403 			break;
404 		case 1:
405 			dram_info->type = INTEL_DRAM_DDR5;
406 			break;
407 		case 2:
408 			dram_info->type = INTEL_DRAM_LPDDR5;
409 			break;
410 		case 3:
411 			dram_info->type = INTEL_DRAM_LPDDR4;
412 			break;
413 		case 4:
414 			dram_info->type = INTEL_DRAM_DDR3;
415 			break;
416 		case 5:
417 			dram_info->type = INTEL_DRAM_LPDDR3;
418 			break;
419 		default:
420 			MISSING_CASE(val & 0xf);
421 			return -1;
422 		}
423 	} else {
424 		switch (val & 0xf) {
425 		case 0:
426 			dram_info->type = INTEL_DRAM_DDR4;
427 			break;
428 		case 1:
429 			dram_info->type = INTEL_DRAM_DDR3;
430 			break;
431 		case 2:
432 			dram_info->type = INTEL_DRAM_LPDDR3;
433 			break;
434 		case 3:
435 			dram_info->type = INTEL_DRAM_LPDDR4;
436 			break;
437 		default:
438 			MISSING_CASE(val & 0xf);
439 			return -1;
440 		}
441 	}
442 
443 	dram_info->num_channels = (val & 0xf0) >> 4;
444 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
445 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
446 
447 	return 0;
448 }
449 
gen11_get_dram_info(struct drm_i915_private * i915)450 static int gen11_get_dram_info(struct drm_i915_private *i915)
451 {
452 	int ret = skl_get_dram_info(i915);
453 
454 	if (ret)
455 		return ret;
456 
457 	return icl_pcode_read_mem_global_info(i915);
458 }
459 
gen12_get_dram_info(struct drm_i915_private * i915)460 static int gen12_get_dram_info(struct drm_i915_private *i915)
461 {
462 	i915->dram_info.wm_lv_0_adjust_needed = false;
463 
464 	return icl_pcode_read_mem_global_info(i915);
465 }
466 
intel_dram_detect(struct drm_i915_private * i915)467 void intel_dram_detect(struct drm_i915_private *i915)
468 {
469 	struct dram_info *dram_info = &i915->dram_info;
470 	int ret;
471 
472 	if (GRAPHICS_VER(i915) < 9 || IS_DG2(i915) || !HAS_DISPLAY(i915))
473 		return;
474 
475 	/*
476 	 * Assume level 0 watermark latency adjustment is needed until proven
477 	 * otherwise, this w/a is not needed by bxt/glk.
478 	 */
479 	dram_info->wm_lv_0_adjust_needed = !IS_GEN9_LP(i915);
480 
481 	if (GRAPHICS_VER(i915) >= 12)
482 		ret = gen12_get_dram_info(i915);
483 	else if (GRAPHICS_VER(i915) >= 11)
484 		ret = gen11_get_dram_info(i915);
485 	else if (IS_GEN9_LP(i915))
486 		ret = bxt_get_dram_info(i915);
487 	else
488 		ret = skl_get_dram_info(i915);
489 	if (ret)
490 		return;
491 
492 	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
493 
494 	drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
495 		    yesno(dram_info->wm_lv_0_adjust_needed));
496 }
497 
gen9_edram_size_mb(struct drm_i915_private * i915,u32 cap)498 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
499 {
500 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
501 	static const u8 sets[4] = { 1, 1, 2, 2 };
502 
503 	return EDRAM_NUM_BANKS(cap) *
504 		ways[EDRAM_WAYS_IDX(cap)] *
505 		sets[EDRAM_SETS_IDX(cap)];
506 }
507 
intel_dram_edram_detect(struct drm_i915_private * i915)508 void intel_dram_edram_detect(struct drm_i915_private *i915)
509 {
510 	u32 edram_cap = 0;
511 
512 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
513 		return;
514 
515 	edram_cap = __raw_uncore_read32(&i915->uncore, HSW_EDRAM_CAP);
516 
517 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
518 
519 	if (!(edram_cap & EDRAM_ENABLED))
520 		return;
521 
522 	/*
523 	 * The needed capability bits for size calculation are not there with
524 	 * pre gen9 so return 128MB always.
525 	 */
526 	if (GRAPHICS_VER(i915) < 9)
527 		i915->edram_size_mb = 128;
528 	else
529 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
530 
531 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
532 }
533