1 // SPDX-License-Identifier: MIT
3 * Copyright © 2020 Intel Corporation
7 #include "intel_dram.h"
8 #include "intel_sideband.h"
10 struct dram_dimm_info {
15 struct dram_channel_info {
16 struct dram_dimm_info dimm_l, dimm_s;
21 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
23 static const char *intel_dram_type_str(enum intel_dram_type type)
25 static const char * const str[] = {
26 DRAM_TYPE_STR(UNKNOWN),
29 DRAM_TYPE_STR(LPDDR3),
30 DRAM_TYPE_STR(LPDDR4),
33 if (type >= ARRAY_SIZE(str))
34 type = INTEL_DRAM_UNKNOWN;
41 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
43 return dimm->ranks * 64 / (dimm->width ?: 1);
46 /* Returns total Gb for the whole DIMM */
47 static int skl_get_dimm_size(u16 val)
49 return (val & SKL_DRAM_SIZE_MASK) * 8;
52 static int skl_get_dimm_width(u16 val)
54 if (skl_get_dimm_size(val) == 0)
57 switch (val & SKL_DRAM_WIDTH_MASK) {
58 case SKL_DRAM_WIDTH_X8:
59 case SKL_DRAM_WIDTH_X16:
60 case SKL_DRAM_WIDTH_X32:
61 val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
69 static int skl_get_dimm_ranks(u16 val)
71 if (skl_get_dimm_size(val) == 0)
74 val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
79 /* Returns total Gb for the whole DIMM */
80 static int cnl_get_dimm_size(u16 val)
82 return (val & CNL_DRAM_SIZE_MASK) * 8 / 2;
85 static int cnl_get_dimm_width(u16 val)
87 if (cnl_get_dimm_size(val) == 0)
90 switch (val & CNL_DRAM_WIDTH_MASK) {
91 case CNL_DRAM_WIDTH_X8:
92 case CNL_DRAM_WIDTH_X16:
93 case CNL_DRAM_WIDTH_X32:
94 val = (val & CNL_DRAM_WIDTH_MASK) >> CNL_DRAM_WIDTH_SHIFT;
102 static int cnl_get_dimm_ranks(u16 val)
104 if (cnl_get_dimm_size(val) == 0)
107 val = (val & CNL_DRAM_RANK_MASK) >> CNL_DRAM_RANK_SHIFT;
113 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
115 /* Convert total Gb to Gb per DRAM device */
116 return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
120 skl_dram_get_dimm_info(struct drm_i915_private *i915,
121 struct dram_dimm_info *dimm,
122 int channel, char dimm_name, u16 val)
124 if (INTEL_GEN(i915) >= 10) {
125 dimm->size = cnl_get_dimm_size(val);
126 dimm->width = cnl_get_dimm_width(val);
127 dimm->ranks = cnl_get_dimm_ranks(val);
129 dimm->size = skl_get_dimm_size(val);
130 dimm->width = skl_get_dimm_width(val);
131 dimm->ranks = skl_get_dimm_ranks(val);
134 drm_dbg_kms(&i915->drm,
135 "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
136 channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
137 yesno(skl_is_16gb_dimm(dimm)));
141 skl_dram_get_channel_info(struct drm_i915_private *i915,
142 struct dram_channel_info *ch,
143 int channel, u32 val)
145 skl_dram_get_dimm_info(i915, &ch->dimm_l,
146 channel, 'L', val & 0xffff);
147 skl_dram_get_dimm_info(i915, &ch->dimm_s,
148 channel, 'S', val >> 16);
150 if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
151 drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
155 if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
157 else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
162 ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
163 skl_is_16gb_dimm(&ch->dimm_s);
165 drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
166 channel, ch->ranks, yesno(ch->is_16gb_dimm));
172 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
173 const struct dram_channel_info *ch1)
175 return !memcmp(ch0, ch1, sizeof(*ch0)) &&
176 (ch0->dimm_s.size == 0 ||
177 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
181 skl_dram_get_channels_info(struct drm_i915_private *i915)
183 struct dram_info *dram_info = &i915->dram_info;
184 struct dram_channel_info ch0 = {}, ch1 = {};
188 val = intel_uncore_read(&i915->uncore,
189 SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
190 ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
192 dram_info->num_channels++;
194 val = intel_uncore_read(&i915->uncore,
195 SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
196 ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
198 dram_info->num_channels++;
200 if (dram_info->num_channels == 0) {
201 drm_info(&i915->drm, "Number of memory channels is zero\n");
205 if (ch0.ranks == 0 && ch1.ranks == 0) {
206 drm_info(&i915->drm, "couldn't get memory rank information\n");
210 dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
212 dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
214 drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
215 yesno(dram_info->symmetric_memory));
220 static enum intel_dram_type
221 skl_get_dram_type(struct drm_i915_private *i915)
225 val = intel_uncore_read(&i915->uncore,
226 SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
228 switch (val & SKL_DRAM_DDR_TYPE_MASK) {
229 case SKL_DRAM_DDR_TYPE_DDR3:
230 return INTEL_DRAM_DDR3;
231 case SKL_DRAM_DDR_TYPE_DDR4:
232 return INTEL_DRAM_DDR4;
233 case SKL_DRAM_DDR_TYPE_LPDDR3:
234 return INTEL_DRAM_LPDDR3;
235 case SKL_DRAM_DDR_TYPE_LPDDR4:
236 return INTEL_DRAM_LPDDR4;
239 return INTEL_DRAM_UNKNOWN;
244 skl_get_dram_info(struct drm_i915_private *i915)
246 struct dram_info *dram_info = &i915->dram_info;
247 u32 mem_freq_khz, val;
250 dram_info->type = skl_get_dram_type(i915);
251 drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
252 intel_dram_type_str(dram_info->type));
254 ret = skl_dram_get_channels_info(i915);
258 val = intel_uncore_read(&i915->uncore,
259 SKL_MC_BIOS_DATA_0_0_0_MCHBAR_PCU);
260 mem_freq_khz = DIV_ROUND_UP((val & SKL_REQ_DATA_MASK) *
261 SKL_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
263 if (dram_info->num_channels * mem_freq_khz == 0) {
265 "Couldn't get system memory bandwidth\n");
272 /* Returns Gb per DRAM device */
273 static int bxt_get_dimm_size(u32 val)
275 switch (val & BXT_DRAM_SIZE_MASK) {
276 case BXT_DRAM_SIZE_4GBIT:
278 case BXT_DRAM_SIZE_6GBIT:
280 case BXT_DRAM_SIZE_8GBIT:
282 case BXT_DRAM_SIZE_12GBIT:
284 case BXT_DRAM_SIZE_16GBIT:
292 static int bxt_get_dimm_width(u32 val)
294 if (!bxt_get_dimm_size(val))
297 val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
302 static int bxt_get_dimm_ranks(u32 val)
304 if (!bxt_get_dimm_size(val))
307 switch (val & BXT_DRAM_RANK_MASK) {
308 case BXT_DRAM_RANK_SINGLE:
310 case BXT_DRAM_RANK_DUAL:
318 static enum intel_dram_type bxt_get_dimm_type(u32 val)
320 if (!bxt_get_dimm_size(val))
321 return INTEL_DRAM_UNKNOWN;
323 switch (val & BXT_DRAM_TYPE_MASK) {
324 case BXT_DRAM_TYPE_DDR3:
325 return INTEL_DRAM_DDR3;
326 case BXT_DRAM_TYPE_LPDDR3:
327 return INTEL_DRAM_LPDDR3;
328 case BXT_DRAM_TYPE_DDR4:
329 return INTEL_DRAM_DDR4;
330 case BXT_DRAM_TYPE_LPDDR4:
331 return INTEL_DRAM_LPDDR4;
334 return INTEL_DRAM_UNKNOWN;
338 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
340 dimm->width = bxt_get_dimm_width(val);
341 dimm->ranks = bxt_get_dimm_ranks(val);
344 * Size in register is Gb per DRAM device. Convert to total
345 * Gb to match the way we report this for non-LP platforms.
347 dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
350 static int bxt_get_dram_info(struct drm_i915_private *i915)
352 struct dram_info *dram_info = &i915->dram_info;
354 u32 mem_freq_khz, val;
355 u8 num_active_channels, valid_ranks = 0;
358 val = intel_uncore_read(&i915->uncore, BXT_P_CR_MC_BIOS_REQ_0_0_0);
359 mem_freq_khz = DIV_ROUND_UP((val & BXT_REQ_DATA_MASK) *
360 BXT_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
362 dram_channels = val & BXT_DRAM_CHANNEL_ACTIVE_MASK;
363 num_active_channels = hweight32(dram_channels);
365 if (mem_freq_khz * num_active_channels == 0) {
367 "Couldn't get system memory bandwidth\n");
372 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
374 for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
375 struct dram_dimm_info dimm;
376 enum intel_dram_type type;
378 val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
379 if (val == 0xFFFFFFFF)
382 dram_info->num_channels++;
384 bxt_get_dimm_info(&dimm, val);
385 type = bxt_get_dimm_type(val);
387 drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
388 dram_info->type != INTEL_DRAM_UNKNOWN &&
389 dram_info->type != type);
391 drm_dbg_kms(&i915->drm,
392 "CH%u DIMM size: %u Gb, width: X%u, ranks: %u, type: %s\n",
393 i - BXT_D_CR_DRP0_DUNIT_START,
394 dimm.size, dimm.width, dimm.ranks,
395 intel_dram_type_str(type));
397 if (valid_ranks == 0)
398 valid_ranks = dimm.ranks;
400 if (type != INTEL_DRAM_UNKNOWN)
401 dram_info->type = type;
404 if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
405 drm_info(&i915->drm, "couldn't get memory information\n");
412 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)
414 struct dram_info *dram_info = &dev_priv->dram_info;
418 ret = sandybridge_pcode_read(dev_priv,
419 ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
420 ICL_PCODE_MEM_SS_READ_GLOBAL_INFO,
425 if (IS_GEN(dev_priv, 12)) {
428 dram_info->type = INTEL_DRAM_DDR4;
431 dram_info->type = INTEL_DRAM_DDR5;
434 dram_info->type = INTEL_DRAM_LPDDR5;
437 dram_info->type = INTEL_DRAM_LPDDR4;
440 dram_info->type = INTEL_DRAM_DDR3;
443 dram_info->type = INTEL_DRAM_LPDDR3;
446 MISSING_CASE(val & 0xf);
452 dram_info->type = INTEL_DRAM_DDR4;
455 dram_info->type = INTEL_DRAM_DDR3;
458 dram_info->type = INTEL_DRAM_LPDDR3;
461 dram_info->type = INTEL_DRAM_LPDDR4;
464 MISSING_CASE(val & 0xf);
469 dram_info->num_channels = (val & 0xf0) >> 4;
470 dram_info->num_qgv_points = (val & 0xf00) >> 8;
475 static int gen11_get_dram_info(struct drm_i915_private *i915)
477 int ret = skl_get_dram_info(i915);
482 return icl_pcode_read_mem_global_info(i915);
485 static int gen12_get_dram_info(struct drm_i915_private *i915)
487 /* Always needed for GEN12+ */
488 i915->dram_info.wm_lv_0_adjust_needed = true;
490 return icl_pcode_read_mem_global_info(i915);
493 void intel_dram_detect(struct drm_i915_private *i915)
495 struct dram_info *dram_info = &i915->dram_info;
499 * Assume level 0 watermark latency adjustment is needed until proven
500 * otherwise, this w/a is not needed by bxt/glk.
502 dram_info->wm_lv_0_adjust_needed = !IS_GEN9_LP(i915);
504 if (INTEL_GEN(i915) < 9 || !HAS_DISPLAY(i915))
507 if (INTEL_GEN(i915) >= 12)
508 ret = gen12_get_dram_info(i915);
509 else if (INTEL_GEN(i915) >= 11)
510 ret = gen11_get_dram_info(i915);
511 else if (IS_GEN9_LP(i915))
512 ret = bxt_get_dram_info(i915);
514 ret = skl_get_dram_info(i915);
518 drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
520 drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
521 yesno(dram_info->wm_lv_0_adjust_needed));
524 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
526 static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
527 static const u8 sets[4] = { 1, 1, 2, 2 };
529 return EDRAM_NUM_BANKS(cap) *
530 ways[EDRAM_WAYS_IDX(cap)] *
531 sets[EDRAM_SETS_IDX(cap)];
534 void intel_dram_edram_detect(struct drm_i915_private *i915)
538 if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || INTEL_GEN(i915) >= 9))
541 edram_cap = __raw_uncore_read32(&i915->uncore, HSW_EDRAM_CAP);
543 /* NB: We can't write IDICR yet because we don't have gt funcs set up */
545 if (!(edram_cap & EDRAM_ENABLED))
549 * The needed capability bits for size calculation are not there with
550 * pre gen9 so return 128MB always.
552 if (INTEL_GEN(i915) < 9)
553 i915->edram_size_mb = 128;
555 i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
557 drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);