intel PCI ops: Remove explicit PCI MMCONF access
[coreboot.git] / src / northbridge / intel / sandybridge / raminit.c
blob4d18c0b6b7d44d63275a010d60956ef340679a5f
1 /*
2 * This file is part of the coreboot project.
4 * Copyright (C) 2014 Damien Zammit <damien@zamaudio.com>
5 * Copyright (C) 2014 Vladimir Serbinenko <phcoder@gmail.com>
6 * Copyright (C) 2016 Patrick Rudolph <siro@das-labor.org>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; version 2 of the License.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
18 #include <console/console.h>
19 #include <console/usb.h>
20 #include <bootmode.h>
21 #include <string.h>
22 #include <arch/io.h>
23 #include <cbmem.h>
24 #include <halt.h>
25 #include <timestamp.h>
26 #include <northbridge/intel/common/mrc_cache.h>
27 #include <southbridge/intel/bd82x6x/me.h>
28 #include <southbridge/intel/bd82x6x/smbus.h>
29 #include <cpu/x86/msr.h>
30 #include <delay.h>
31 #include <smbios.h>
32 #include <memory_info.h>
33 #include <lib.h>
34 #include "raminit_native.h"
35 #include "raminit_common.h"
36 #include "sandybridge.h"
38 /* FIXME: no ECC support. */
39 /* FIXME: no support for 3-channel chipsets. */
41 static const char *ecc_decoder[] = {
42 "inactive",
43 "active on IO",
44 "disabled on IO",
45 "active"
48 static void wait_txt_clear(void)
50 struct cpuid_result cp;
52 cp = cpuid_ext(0x1, 0x0);
53 /* Check if TXT is supported? */
54 if (!(cp.ecx & 0x40))
55 return;
56 /* Some TXT public bit. */
57 if (!(read32((void *)0xfed30010) & 1))
58 return;
59 /* Wait for TXT clear. */
60 while (!(read8((void *)0xfed40000) & (1 << 7)));
64 * Disable a channel in ramctr_timing.
66 static void disable_channel(ramctr_timing *ctrl, int channel) {
67 ctrl->rankmap[channel] = 0;
68 memset(&ctrl->rank_mirror[channel][0], 0, sizeof(ctrl->rank_mirror[0]));
69 ctrl->channel_size_mb[channel] = 0;
70 ctrl->cmd_stretch[channel] = 0;
71 ctrl->mad_dimm[channel] = 0;
72 memset(&ctrl->timings[channel][0], 0, sizeof(ctrl->timings[0]));
73 memset(&ctrl->info.dimm[channel][0], 0, sizeof(ctrl->info.dimm[0]));
77 * Fill cbmem with information for SMBIOS type 17.
79 static void fill_smbios17(ramctr_timing *ctrl)
81 struct memory_info *mem_info;
82 int channel, slot;
83 struct dimm_info *dimm;
84 uint16_t ddr_freq;
85 dimm_info *info = &ctrl->info;
87 ddr_freq = (1000 << 8) / ctrl->tCK;
90 * Allocate CBMEM area for DIMM information used to populate SMBIOS
91 * table 17
93 mem_info = cbmem_add(CBMEM_ID_MEMINFO, sizeof(*mem_info));
94 printk(BIOS_DEBUG, "CBMEM entry for DIMM info: 0x%p\n", mem_info);
95 if (!mem_info)
96 return;
98 memset(mem_info, 0, sizeof(*mem_info));
100 FOR_ALL_CHANNELS for (slot = 0; slot < NUM_SLOTS; slot++) {
101 dimm = &mem_info->dimm[mem_info->dimm_cnt];
102 if (info->dimm[channel][slot].size_mb) {
103 dimm->ddr_type = MEMORY_TYPE_DDR3;
104 dimm->ddr_frequency = ddr_freq;
105 dimm->dimm_size = info->dimm[channel][slot].size_mb;
106 dimm->channel_num = channel;
107 dimm->rank_per_dimm = info->dimm[channel][slot].ranks;
108 dimm->dimm_num = slot;
109 memcpy(dimm->module_part_number,
110 info->dimm[channel][slot].part_number, 16);
111 dimm->mod_id = info->dimm[channel][slot].manufacturer_id;
112 dimm->mod_type = info->dimm[channel][slot].dimm_type;
113 dimm->bus_width = info->dimm[channel][slot].width;
114 mem_info->dimm_cnt++;
120 * Dump in the log memory controller configuration as read from the memory
121 * controller registers.
123 static void report_memory_config(void)
125 u32 addr_decoder_common, addr_decode_ch[NUM_CHANNELS];
126 int i;
128 addr_decoder_common = MCHBAR32(0x5000);
129 addr_decode_ch[0] = MCHBAR32(0x5004);
130 addr_decode_ch[1] = MCHBAR32(0x5008);
132 printk(BIOS_DEBUG, "memcfg DDR3 clock %d MHz\n",
133 (MCHBAR32(MC_BIOS_DATA) * 13333 * 2 + 50) / 100);
134 printk(BIOS_DEBUG, "memcfg channel assignment: A: %d, B % d, C % d\n",
135 addr_decoder_common & 3, (addr_decoder_common >> 2) & 3,
136 (addr_decoder_common >> 4) & 3);
138 for (i = 0; i < ARRAY_SIZE(addr_decode_ch); i++) {
139 u32 ch_conf = addr_decode_ch[i];
140 printk(BIOS_DEBUG, "memcfg channel[%d] config (%8.8x):\n", i,
141 ch_conf);
142 printk(BIOS_DEBUG, " ECC %s\n",
143 ecc_decoder[(ch_conf >> 24) & 3]);
144 printk(BIOS_DEBUG, " enhanced interleave mode %s\n",
145 ((ch_conf >> 22) & 1) ? "on" : "off");
146 printk(BIOS_DEBUG, " rank interleave %s\n",
147 ((ch_conf >> 21) & 1) ? "on" : "off");
148 printk(BIOS_DEBUG, " DIMMA %d MB width x%d %s rank%s\n",
149 ((ch_conf >> 0) & 0xff) * 256,
150 ((ch_conf >> 19) & 1) ? 16 : 8,
151 ((ch_conf >> 17) & 1) ? "dual" : "single",
152 ((ch_conf >> 16) & 1) ? "" : ", selected");
153 printk(BIOS_DEBUG, " DIMMB %d MB width x%d %s rank%s\n",
154 ((ch_conf >> 8) & 0xff) * 256,
155 ((ch_conf >> 20) & 1) ? 16 : 8,
156 ((ch_conf >> 18) & 1) ? "dual" : "single",
157 ((ch_conf >> 16) & 1) ? ", selected" : "");
162 * Return CRC16 match for all SPDs.
164 static int verify_crc16_spds_ddr3(spd_raw_data *spd, ramctr_timing *ctrl)
166 int channel, slot, spd_slot;
167 int match = 1;
169 FOR_ALL_CHANNELS {
170 for (slot = 0; slot < NUM_SLOTS; slot++) {
171 spd_slot = 2 * channel + slot;
172 match &= ctrl->spd_crc[channel][slot] ==
173 spd_ddr3_calc_unique_crc(spd[spd_slot], sizeof(spd_raw_data));
176 return match;
179 void read_spd(spd_raw_data * spd, u8 addr, bool id_only)
181 int j;
182 if (id_only) {
183 for (j = 117; j < 128; j++)
184 (*spd)[j] = do_smbus_read_byte(SMBUS_IO_BASE, addr, j);
185 } else {
186 for (j = 0; j < 256; j++)
187 (*spd)[j] = do_smbus_read_byte(SMBUS_IO_BASE, addr, j);
191 static void dram_find_spds_ddr3(spd_raw_data *spd, ramctr_timing *ctrl)
193 int dimms = 0, dimms_on_channel;
194 int channel, slot, spd_slot;
195 dimm_info *dimm = &ctrl->info;
197 memset (ctrl->rankmap, 0, sizeof(ctrl->rankmap));
199 ctrl->extended_temperature_range = 1;
200 ctrl->auto_self_refresh = 1;
202 FOR_ALL_CHANNELS {
203 ctrl->channel_size_mb[channel] = 0;
205 dimms_on_channel = 0;
206 /* count dimms on channel */
207 for (slot = 0; slot < NUM_SLOTS; slot++) {
208 spd_slot = 2 * channel + slot;
209 spd_decode_ddr3(&dimm->dimm[channel][slot], spd[spd_slot]);
210 if (dimm->dimm[channel][slot].dram_type == SPD_MEMORY_TYPE_SDRAM_DDR3)
211 dimms_on_channel++;
214 for (slot = 0; slot < NUM_SLOTS; slot++) {
215 spd_slot = 2 * channel + slot;
216 /* search for XMP profile */
217 spd_xmp_decode_ddr3(&dimm->dimm[channel][slot],
218 spd[spd_slot],
219 DDR3_XMP_PROFILE_1);
221 if (dimm->dimm[channel][slot].dram_type != SPD_MEMORY_TYPE_SDRAM_DDR3) {
222 printram("No valid XMP profile found.\n");
223 spd_decode_ddr3(&dimm->dimm[channel][slot], spd[spd_slot]);
224 } else if (dimms_on_channel > dimm->dimm[channel][slot].dimms_per_channel) {
225 printram("XMP profile supports %u DIMMs, but %u DIMMs are installed.\n",
226 dimm->dimm[channel][slot].dimms_per_channel,
227 dimms_on_channel);
228 spd_decode_ddr3(&dimm->dimm[channel][slot], spd[spd_slot]);
229 } else if (dimm->dimm[channel][slot].voltage != 1500) {
230 /* TODO: support other DDR3 voltage than 1500mV */
231 printram("XMP profile's requested %u mV is unsupported.\n",
232 dimm->dimm[channel][slot].voltage);
233 spd_decode_ddr3(&dimm->dimm[channel][slot], spd[spd_slot]);
236 /* fill in CRC16 for MRC cache */
237 ctrl->spd_crc[channel][slot] =
238 spd_ddr3_calc_unique_crc(spd[spd_slot], sizeof(spd_raw_data));
240 if (dimm->dimm[channel][slot].dram_type != SPD_MEMORY_TYPE_SDRAM_DDR3) {
241 // set dimm invalid
242 dimm->dimm[channel][slot].ranks = 0;
243 dimm->dimm[channel][slot].size_mb = 0;
244 continue;
247 dram_print_spd_ddr3(&dimm->dimm[channel][slot]);
248 dimms++;
249 ctrl->rank_mirror[channel][slot * 2] = 0;
250 ctrl->rank_mirror[channel][slot * 2 + 1] = dimm->dimm[channel][slot].flags.pins_mirrored;
251 ctrl->channel_size_mb[channel] += dimm->dimm[channel][slot].size_mb;
253 ctrl->auto_self_refresh &= dimm->dimm[channel][slot].flags.asr;
254 ctrl->extended_temperature_range &= dimm->dimm[channel][slot].flags.ext_temp_refresh;
256 ctrl->rankmap[channel] |= ((1 << dimm->dimm[channel][slot].ranks) - 1) << (2 * slot);
257 printk(BIOS_DEBUG, "channel[%d] rankmap = 0x%x\n",
258 channel, ctrl->rankmap[channel]);
260 if ((ctrl->rankmap[channel] & 3) && (ctrl->rankmap[channel] & 0xc)
261 && dimm->dimm[channel][0].reference_card <= 5 && dimm->dimm[channel][1].reference_card <= 5) {
262 const int ref_card_offset_table[6][6] = {
263 { 0, 0, 0, 0, 2, 2, },
264 { 0, 0, 0, 0, 2, 2, },
265 { 0, 0, 0, 0, 2, 2, },
266 { 0, 0, 0, 0, 1, 1, },
267 { 2, 2, 2, 1, 0, 0, },
268 { 2, 2, 2, 1, 0, 0, },
270 ctrl->ref_card_offset[channel] = ref_card_offset_table[dimm->dimm[channel][0].reference_card]
271 [dimm->dimm[channel][1].reference_card];
272 } else
273 ctrl->ref_card_offset[channel] = 0;
276 if (!dimms)
277 die("No DIMMs were found");
280 /* Frequency multiplier. */
281 static u32 get_FRQ(u32 tCK)
283 u32 FRQ;
284 FRQ = 256000 / (tCK * BASEFREQ);
285 if (FRQ > 8)
286 return 8;
287 if (FRQ < 3)
288 return 3;
289 return FRQ;
292 static u32 get_REFI(u32 tCK)
294 /* Get REFI based on MCU frequency using the following rule:
295 * _________________________________________
296 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
297 * REFI: | 3120 | 4160 | 5200 | 6240 | 7280 | 8320 |
299 static const u32 frq_refi_map[] =
300 { 3120, 4160, 5200, 6240, 7280, 8320 };
301 return frq_refi_map[get_FRQ(tCK) - 3];
304 static u8 get_XSOffset(u32 tCK)
306 /* Get XSOffset based on MCU frequency using the following rule:
307 * _________________________
308 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
309 * XSOffset : | 4 | 6 | 7 | 8 | 10 | 11 |
311 static const u8 frq_xs_map[] = { 4, 6, 7, 8, 10, 11 };
312 return frq_xs_map[get_FRQ(tCK) - 3];
315 static u8 get_MOD(u32 tCK)
317 /* Get MOD based on MCU frequency using the following rule:
318 * _____________________________
319 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
320 * MOD : | 12 | 12 | 12 | 12 | 15 | 16 |
322 static const u8 frq_mod_map[] = { 12, 12, 12, 12, 15, 16 };
323 return frq_mod_map[get_FRQ(tCK) - 3];
326 static u8 get_WLO(u32 tCK)
328 /* Get WLO based on MCU frequency using the following rule:
329 * _______________________
330 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
331 * WLO : | 4 | 5 | 6 | 6 | 8 | 8 |
333 static const u8 frq_wlo_map[] = { 4, 5, 6, 6, 8, 8 };
334 return frq_wlo_map[get_FRQ(tCK) - 3];
337 static u8 get_CKE(u32 tCK)
339 /* Get CKE based on MCU frequency using the following rule:
340 * _______________________
341 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
342 * CKE : | 3 | 3 | 4 | 4 | 5 | 6 |
344 static const u8 frq_cke_map[] = { 3, 3, 4, 4, 5, 6 };
345 return frq_cke_map[get_FRQ(tCK) - 3];
348 static u8 get_XPDLL(u32 tCK)
350 /* Get XPDLL based on MCU frequency using the following rule:
351 * _____________________________
352 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
353 * XPDLL : | 10 | 13 | 16 | 20 | 23 | 26 |
355 static const u8 frq_xpdll_map[] = { 10, 13, 16, 20, 23, 26 };
356 return frq_xpdll_map[get_FRQ(tCK) - 3];
359 static u8 get_XP(u32 tCK)
361 /* Get XP based on MCU frequency using the following rule:
362 * _______________________
363 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
364 * XP : | 3 | 4 | 4 | 5 | 6 | 7 |
366 static const u8 frq_xp_map[] = { 3, 4, 4, 5, 6, 7 };
367 return frq_xp_map[get_FRQ(tCK) - 3];
370 static u8 get_AONPD(u32 tCK)
372 /* Get AONPD based on MCU frequency using the following rule:
373 * ________________________
374 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
375 * AONPD : | 4 | 5 | 6 | 8 | 8 | 10 |
377 static const u8 frq_aonpd_map[] = { 4, 5, 6, 8, 8, 10 };
378 return frq_aonpd_map[get_FRQ(tCK) - 3];
381 static u32 get_COMP2(u32 tCK)
383 /* Get COMP2 based on MCU frequency using the following rule:
384 * ___________________________________________________________
385 * FRQ : | 3 | 4 | 5 | 6 | 7 | 8 |
386 * COMP : | D6BEDCC | CE7C34C | CA57A4C | C6369CC | C42514C | C21410C |
388 static const u32 frq_comp2_map[] = { 0xD6BEDCC, 0xCE7C34C, 0xCA57A4C,
389 0xC6369CC, 0xC42514C, 0xC21410C
391 return frq_comp2_map[get_FRQ(tCK) - 3];
394 static void dram_timing(ramctr_timing * ctrl)
396 u8 val;
397 u32 val32;
399 /* Maximum supported DDR3 frequency is 1066MHz (DDR3 2133) so make sure
400 * we cap it if we have faster DIMMs.
401 * Then, align it to the closest JEDEC standard frequency */
402 if (ctrl->tCK <= TCK_1066MHZ) {
403 ctrl->tCK = TCK_1066MHZ;
404 ctrl->edge_offset[0] = 16;
405 ctrl->edge_offset[1] = 7;
406 ctrl->edge_offset[2] = 7;
407 ctrl->timC_offset[0] = 18;
408 ctrl->timC_offset[1] = 7;
409 ctrl->timC_offset[2] = 7;
410 ctrl->reg_320c_range_threshold = 13;
411 } else if (ctrl->tCK <= TCK_933MHZ) {
412 ctrl->tCK = TCK_933MHZ;
413 ctrl->edge_offset[0] = 14;
414 ctrl->edge_offset[1] = 6;
415 ctrl->edge_offset[2] = 6;
416 ctrl->timC_offset[0] = 15;
417 ctrl->timC_offset[1] = 6;
418 ctrl->timC_offset[2] = 6;
419 ctrl->reg_320c_range_threshold = 15;
420 } else if (ctrl->tCK <= TCK_800MHZ) {
421 ctrl->tCK = TCK_800MHZ;
422 ctrl->edge_offset[0] = 13;
423 ctrl->edge_offset[1] = 5;
424 ctrl->edge_offset[2] = 5;
425 ctrl->timC_offset[0] = 14;
426 ctrl->timC_offset[1] = 5;
427 ctrl->timC_offset[2] = 5;
428 ctrl->reg_320c_range_threshold = 15;
429 } else if (ctrl->tCK <= TCK_666MHZ) {
430 ctrl->tCK = TCK_666MHZ;
431 ctrl->edge_offset[0] = 10;
432 ctrl->edge_offset[1] = 4;
433 ctrl->edge_offset[2] = 4;
434 ctrl->timC_offset[0] = 11;
435 ctrl->timC_offset[1] = 4;
436 ctrl->timC_offset[2] = 4;
437 ctrl->reg_320c_range_threshold = 16;
438 } else if (ctrl->tCK <= TCK_533MHZ) {
439 ctrl->tCK = TCK_533MHZ;
440 ctrl->edge_offset[0] = 8;
441 ctrl->edge_offset[1] = 3;
442 ctrl->edge_offset[2] = 3;
443 ctrl->timC_offset[0] = 9;
444 ctrl->timC_offset[1] = 3;
445 ctrl->timC_offset[2] = 3;
446 ctrl->reg_320c_range_threshold = 17;
447 } else {
448 ctrl->tCK = TCK_400MHZ;
449 ctrl->edge_offset[0] = 6;
450 ctrl->edge_offset[1] = 2;
451 ctrl->edge_offset[2] = 2;
452 ctrl->timC_offset[0] = 6;
453 ctrl->timC_offset[1] = 2;
454 ctrl->timC_offset[2] = 2;
455 ctrl->reg_320c_range_threshold = 17;
458 /* Initial phase between CLK/CMD pins */
459 ctrl->reg_c14_offset = (256000 / ctrl->tCK) / 66;
461 /* DLL_CONFIG_MDLL_W_TIMER */
462 ctrl->reg_5064b0 = (128000 / ctrl->tCK) + 3;
464 val32 = (1000 << 8) / ctrl->tCK;
465 printk(BIOS_DEBUG, "Selected DRAM frequency: %u MHz\n", val32);
467 /* Find CAS latency */
468 val = (ctrl->tAA + ctrl->tCK - 1) / ctrl->tCK;
469 printk(BIOS_DEBUG, "Minimum CAS latency : %uT\n", val);
470 /* Find lowest supported CAS latency that satisfies the minimum value */
471 while (!((ctrl->cas_supported >> (val - MIN_CAS)) & 1)
472 && (ctrl->cas_supported >> (val - MIN_CAS))) {
473 val++;
475 /* Is CAS supported */
476 if (!(ctrl->cas_supported & (1 << (val - MIN_CAS)))) {
477 printk(BIOS_ERR, "CAS %uT not supported. ", val);
478 val = MAX_CAS;
479 /* Find highest supported CAS latency */
480 while (!((ctrl->cas_supported >> (val - MIN_CAS)) & 1))
481 val--;
483 printk(BIOS_ERR, "Using CAS %uT instead.\n", val);
486 printk(BIOS_DEBUG, "Selected CAS latency : %uT\n", val);
487 ctrl->CAS = val;
488 ctrl->CWL = get_CWL(ctrl->tCK);
489 printk(BIOS_DEBUG, "Selected CWL latency : %uT\n", ctrl->CWL);
491 /* Find tRCD */
492 ctrl->tRCD = (ctrl->tRCD + ctrl->tCK - 1) / ctrl->tCK;
493 printk(BIOS_DEBUG, "Selected tRCD : %uT\n", ctrl->tRCD);
495 ctrl->tRP = (ctrl->tRP + ctrl->tCK - 1) / ctrl->tCK;
496 printk(BIOS_DEBUG, "Selected tRP : %uT\n", ctrl->tRP);
498 /* Find tRAS */
499 ctrl->tRAS = (ctrl->tRAS + ctrl->tCK - 1) / ctrl->tCK;
500 printk(BIOS_DEBUG, "Selected tRAS : %uT\n", ctrl->tRAS);
502 /* Find tWR */
503 ctrl->tWR = (ctrl->tWR + ctrl->tCK - 1) / ctrl->tCK;
504 printk(BIOS_DEBUG, "Selected tWR : %uT\n", ctrl->tWR);
506 /* Find tFAW */
507 ctrl->tFAW = (ctrl->tFAW + ctrl->tCK - 1) / ctrl->tCK;
508 printk(BIOS_DEBUG, "Selected tFAW : %uT\n", ctrl->tFAW);
510 /* Find tRRD */
511 ctrl->tRRD = (ctrl->tRRD + ctrl->tCK - 1) / ctrl->tCK;
512 printk(BIOS_DEBUG, "Selected tRRD : %uT\n", ctrl->tRRD);
514 /* Find tRTP */
515 ctrl->tRTP = (ctrl->tRTP + ctrl->tCK - 1) / ctrl->tCK;
516 printk(BIOS_DEBUG, "Selected tRTP : %uT\n", ctrl->tRTP);
518 /* Find tWTR */
519 ctrl->tWTR = (ctrl->tWTR + ctrl->tCK - 1) / ctrl->tCK;
520 printk(BIOS_DEBUG, "Selected tWTR : %uT\n", ctrl->tWTR);
522 /* Refresh-to-Active or Refresh-to-Refresh (tRFC) */
523 ctrl->tRFC = (ctrl->tRFC + ctrl->tCK - 1) / ctrl->tCK;
524 printk(BIOS_DEBUG, "Selected tRFC : %uT\n", ctrl->tRFC);
526 ctrl->tREFI = get_REFI(ctrl->tCK);
527 ctrl->tMOD = get_MOD(ctrl->tCK);
528 ctrl->tXSOffset = get_XSOffset(ctrl->tCK);
529 ctrl->tWLO = get_WLO(ctrl->tCK);
530 ctrl->tCKE = get_CKE(ctrl->tCK);
531 ctrl->tXPDLL = get_XPDLL(ctrl->tCK);
532 ctrl->tXP = get_XP(ctrl->tCK);
533 ctrl->tAONPD = get_AONPD(ctrl->tCK);
536 static void dram_freq(ramctr_timing * ctrl)
538 if (ctrl->tCK > TCK_400MHZ) {
539 printk (BIOS_ERR, "DRAM frequency is under lowest supported frequency (400 MHz). Increasing to 400 MHz as last resort");
540 ctrl->tCK = TCK_400MHZ;
542 while (1) {
543 u8 val2;
544 u32 reg1 = 0;
546 /* Step 1 - Set target PCU frequency */
548 if (ctrl->tCK <= TCK_1066MHZ) {
549 ctrl->tCK = TCK_1066MHZ;
550 } else if (ctrl->tCK <= TCK_933MHZ) {
551 ctrl->tCK = TCK_933MHZ;
552 } else if (ctrl->tCK <= TCK_800MHZ) {
553 ctrl->tCK = TCK_800MHZ;
554 } else if (ctrl->tCK <= TCK_666MHZ) {
555 ctrl->tCK = TCK_666MHZ;
556 } else if (ctrl->tCK <= TCK_533MHZ) {
557 ctrl->tCK = TCK_533MHZ;
558 } else if (ctrl->tCK <= TCK_400MHZ) {
559 ctrl->tCK = TCK_400MHZ;
560 } else {
561 die ("No lock frequency found");
564 /* Frequency multiplier. */
565 u32 FRQ = get_FRQ(ctrl->tCK);
567 /* The PLL will never lock if the required frequency is
568 * already set. Exit early to prevent a system hang.
570 reg1 = MCHBAR32(MC_BIOS_DATA);
571 val2 = (u8) reg1;
572 if (val2)
573 return;
575 /* Step 2 - Select frequency in the MCU */
576 reg1 = FRQ;
577 reg1 |= 0x80000000; // set running bit
578 MCHBAR32(MC_BIOS_REQ) = reg1;
579 int i=0;
580 printk(BIOS_DEBUG, "PLL busy... ");
581 while (reg1 & 0x80000000) {
582 udelay(10);
583 i++;
584 reg1 = MCHBAR32(MC_BIOS_REQ);
586 printk(BIOS_DEBUG, "done in %d us\n", i * 10);
588 /* Step 3 - Verify lock frequency */
589 reg1 = MCHBAR32(MC_BIOS_DATA);
590 val2 = (u8) reg1;
591 if (val2 >= FRQ) {
592 printk(BIOS_DEBUG, "MCU frequency is set at : %d MHz\n",
593 (1000 << 8) / ctrl->tCK);
594 return;
596 printk(BIOS_DEBUG, "PLL didn't lock. Retrying at lower frequency\n");
597 ctrl->tCK++;
601 static void dram_ioregs(ramctr_timing * ctrl)
603 u32 reg, comp2;
605 int channel;
607 // IO clock
608 FOR_ALL_CHANNELS {
609 MCHBAR32(0xc00 + 0x100 * channel) = ctrl->rankmap[channel];
612 // IO command
613 FOR_ALL_CHANNELS {
614 MCHBAR32(0x3200 + 0x100 * channel) = ctrl->rankmap[channel];
617 // IO control
618 FOR_ALL_POPULATED_CHANNELS {
619 program_timings(ctrl, channel);
622 // Rcomp
623 printram("RCOMP...");
624 reg = 0;
625 while (reg == 0) {
626 reg = MCHBAR32(0x5084) & 0x10000;
628 printram("done\n");
630 // Set comp2
631 comp2 = get_COMP2(ctrl->tCK);
632 MCHBAR32(0x3714) = comp2;
633 printram("COMP2 done\n");
635 // Set comp1
636 FOR_ALL_POPULATED_CHANNELS {
637 reg = MCHBAR32(0x1810 + channel * 0x100); //ch0
638 reg = (reg & ~0xe00) | (1 << 9); //odt
639 reg = (reg & ~0xe00000) | (1 << 21); //clk drive up
640 reg = (reg & ~0x38000000) | (1 << 27); //ctl drive up
641 MCHBAR32(0x1810 + channel * 0x100) = reg;
643 printram("COMP1 done\n");
645 printram("FORCE RCOMP and wait 20us...");
646 MCHBAR32(0x5f08) |= 0x100;
647 udelay(20);
648 printram("done\n");
651 static void save_timings(ramctr_timing *ctrl)
653 /* Save the MRC S3 restore data to cbmem */
654 store_current_mrc_cache(ctrl, sizeof(*ctrl));
657 static int try_init_dram_ddr3(ramctr_timing *ctrl, int fast_boot,
658 int s3_resume, int me_uma_size)
660 int err;
662 printk(BIOS_DEBUG, "Starting RAM training (%d).\n", fast_boot);
664 if (!fast_boot) {
665 /* Find fastest common supported parameters */
666 dram_find_common_params(ctrl);
668 dram_dimm_mapping(ctrl);
671 /* Set MCU frequency */
672 dram_freq(ctrl);
674 if (!fast_boot) {
675 /* Calculate timings */
676 dram_timing(ctrl);
679 /* Set version register */
680 MCHBAR32(0x5034) = 0xC04EB002;
682 /* Enable crossover */
683 dram_xover(ctrl);
685 /* Set timing and refresh registers */
686 dram_timing_regs(ctrl);
688 /* Power mode preset */
689 MCHBAR32(0x4e80) = 0x5500;
691 /* Set scheduler parameters */
692 MCHBAR32(0x4c20) = 0x10100005;
694 /* Set CPU specific register */
695 set_4f8c();
697 /* Clear IO reset bit */
698 MCHBAR32(0x5030) &= ~0x20;
700 /* Set MAD-DIMM registers */
701 dram_dimm_set_mapping(ctrl);
702 printk(BIOS_DEBUG, "Done dimm mapping\n");
704 /* Zone config */
705 dram_zones(ctrl, 1);
707 /* Set memory map */
708 dram_memorymap(ctrl, me_uma_size);
709 printk(BIOS_DEBUG, "Done memory map\n");
711 /* Set IO registers */
712 dram_ioregs(ctrl);
713 printk(BIOS_DEBUG, "Done io registers\n");
715 udelay(1);
717 if (fast_boot) {
718 restore_timings(ctrl);
719 } else {
720 /* Do jedec ddr3 reset sequence */
721 dram_jedecreset(ctrl);
722 printk(BIOS_DEBUG, "Done jedec reset\n");
724 /* MRS commands */
725 dram_mrscommands(ctrl);
726 printk(BIOS_DEBUG, "Done MRS commands\n");
728 /* Prepare for memory training */
729 prepare_training(ctrl);
731 err = read_training(ctrl);
732 if (err)
733 return err;
735 err = write_training(ctrl);
736 if (err)
737 return err;
739 printram("CP5a\n");
741 err = discover_edges(ctrl);
742 if (err)
743 return err;
745 printram("CP5b\n");
747 err = command_training(ctrl);
748 if (err)
749 return err;
751 printram("CP5c\n");
753 err = discover_edges_write(ctrl);
754 if (err)
755 return err;
757 err = discover_timC_write(ctrl);
758 if (err)
759 return err;
761 normalize_training(ctrl);
764 set_4008c(ctrl);
766 write_controller_mr(ctrl);
768 if (!s3_resume) {
769 err = channel_test(ctrl);
770 if (err)
771 return err;
774 return 0;
777 static void init_dram_ddr3(int mobile, int min_tck, int s3resume)
779 int me_uma_size;
780 int cbmem_was_inited;
781 ramctr_timing ctrl;
782 int fast_boot;
783 spd_raw_data spds[4];
784 struct mrc_data_container *mrc_cache;
785 ramctr_timing *ctrl_cached;
786 int err;
788 MCHBAR32(0x5f00) |= 1;
790 report_platform_info();
792 /* Wait for ME to be ready */
793 intel_early_me_init();
794 me_uma_size = intel_early_me_uma_size();
796 printk(BIOS_DEBUG, "Starting native Platform init\n");
798 u32 reg_5d10;
800 wait_txt_clear();
802 wrmsr(0x000002e6, (msr_t) { .lo = 0, .hi = 0 });
804 reg_5d10 = read32(DEFAULT_MCHBAR + 0x5d10); // !!! = 0x00000000
805 if ((pci_read_config16(SOUTHBRIDGE, 0xa2) & 0xa0) == 0x20 /* 0x0004 */
806 && reg_5d10 && !s3resume) {
807 write32(DEFAULT_MCHBAR + 0x5d10, 0);
808 /* Need reset. */
809 outb(0x6, 0xcf9);
811 halt();
814 early_pch_init_native();
815 early_thermal_init();
817 /* try to find timings in MRC cache */
818 mrc_cache = find_current_mrc_cache();
819 if (!mrc_cache || (mrc_cache->mrc_data_size < sizeof(ctrl))) {
820 if (s3resume) {
821 /* Failed S3 resume, reset to come up cleanly */
822 outb(0x6, 0xcf9);
823 halt();
825 ctrl_cached = NULL;
826 } else {
827 ctrl_cached = (ramctr_timing *)mrc_cache->mrc_data;
830 /* verify MRC cache for fast boot */
831 if (!s3resume && ctrl_cached) {
832 /* Load SPD unique information data. */
833 memset(spds, 0, sizeof(spds));
834 mainboard_get_spd(spds, 1);
836 /* check SPD CRC16 to make sure the DIMMs haven't been replaced */
837 fast_boot = verify_crc16_spds_ddr3(spds, ctrl_cached);
838 if (!fast_boot)
839 printk(BIOS_DEBUG, "Stored timings CRC16 mismatch.\n");
840 } else {
841 fast_boot = s3resume;
844 if (fast_boot) {
845 printk(BIOS_DEBUG, "Trying stored timings.\n");
846 memcpy(&ctrl, ctrl_cached, sizeof(ctrl));
848 err = try_init_dram_ddr3(&ctrl, fast_boot, s3resume, me_uma_size);
849 if (err) {
850 if (s3resume) {
851 /* Failed S3 resume, reset to come up cleanly */
852 outb(0x6, 0xcf9);
853 halt();
855 /* no need to erase bad mrc cache here, it gets overwritten on
856 * successful boot. */
857 printk(BIOS_ERR, "Stored timings are invalid !\n");
858 fast_boot = 0;
861 if (!fast_boot) {
862 /* Reset internal state */
863 memset(&ctrl, 0, sizeof(ctrl));
864 ctrl.mobile = mobile;
865 ctrl.tCK = min_tck;
867 /* Get DDR3 SPD data */
868 memset(spds, 0, sizeof(spds));
869 mainboard_get_spd(spds, 0);
870 dram_find_spds_ddr3(spds, &ctrl);
872 err = try_init_dram_ddr3(&ctrl, fast_boot, s3resume, me_uma_size);
875 if (err) {
876 /* fallback: disable failing channel */
877 printk(BIOS_ERR, "RAM training failed, trying fallback.\n");
878 printram("Disable failing channel.\n");
880 /* Reset internal state */
881 memset(&ctrl, 0, sizeof(ctrl));
882 ctrl.mobile = mobile;
883 ctrl.tCK = min_tck;
885 /* Reset DDR3 frequency */
886 dram_find_spds_ddr3(spds, &ctrl);
888 /* disable failing channel */
889 disable_channel(&ctrl, GET_ERR_CHANNEL(err));
891 err = try_init_dram_ddr3(&ctrl, fast_boot, s3resume, me_uma_size);
894 if (err)
895 die("raminit failed");
897 /* FIXME: should be hardware revision-dependent. */
898 write32(DEFAULT_MCHBAR + 0x5024, 0x00a030ce);
900 set_scrambling_seed(&ctrl);
902 set_42a0(&ctrl);
904 final_registers(&ctrl);
906 /* Zone config */
907 dram_zones(&ctrl, 0);
909 if (!fast_boot)
910 quick_ram_check();
912 intel_early_me_status();
913 intel_early_me_init_done(ME_INIT_STATUS_SUCCESS);
914 intel_early_me_status();
916 report_memory_config();
918 cbmem_was_inited = !cbmem_recovery(s3resume);
919 if (!fast_boot)
920 save_timings(&ctrl);
921 if (s3resume && !cbmem_was_inited) {
922 /* Failed S3 resume, reset to come up cleanly */
923 outb(0x6, 0xcf9);
924 halt();
927 fill_smbios17(&ctrl);
930 void perform_raminit(int s3resume)
932 post_code(0x3a);
934 timestamp_add_now(TS_BEFORE_INITRAM);
936 init_dram_ddr3(1, get_mem_min_tck(), s3resume);