rockchip/rk3399: Clean up comments in sdram.c
[coreboot.git] / src / soc / rockchip / rk3399 / sdram.c
blobf56ac175cea44ae8baeaea353a60e0cdd1930dae
1 /*
2 * This file is part of the coreboot project.
4 * Copyright 2016 Rockchip Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
16 #include <arch/io.h>
17 #include <console/console.h>
18 #include <delay.h>
19 #include <soc/addressmap.h>
20 #include <soc/clock.h>
21 #include <soc/sdram.h>
22 #include <soc/grf.h>
23 #include <soc/soc.h>
24 #include <string.h>
25 #include <types.h>
27 #define DDR_PI_OFFSET 0x800
28 #define DDR_PHY_OFFSET 0x2000
29 #define DDRC0_PI_BASE_ADDR (DDRC0_BASE_ADDR + DDR_PI_OFFSET)
30 #define DDRC0_PHY_BASE_ADDR (DDRC0_BASE_ADDR + DDR_PHY_OFFSET)
31 #define DDRC1_PI_BASE_ADDR (DDRC1_BASE_ADDR + DDR_PI_OFFSET)
32 #define DDRC1_PHY_BASE_ADDR (DDRC1_BASE_ADDR + DDR_PHY_OFFSET)
34 static struct rk3399_ddr_pctl_regs * const rk3399_ddr_pctl[2] = {
35 (void *)DDRC0_BASE_ADDR, (void *)DDRC1_BASE_ADDR };
36 static struct rk3399_ddr_pi_regs * const rk3399_ddr_pi[2] = {
37 (void *)DDRC0_PI_BASE_ADDR, (void *)DDRC1_PI_BASE_ADDR };
38 static struct rk3399_ddr_publ_regs * const rk3399_ddr_publ[2] = {
39 (void *)DDRC0_PHY_BASE_ADDR, (void *)DDRC1_PHY_BASE_ADDR };
40 static struct rk3399_msch_regs * const rk3399_msch[2] = {
41 (void *)SERVER_MSCH0_BASE_ADDR, (void *)SERVER_MSCH1_BASE_ADDR };
44 * sys_reg bitfield struct
45 * [31] row_3_4_ch1
46 * [30] row_3_4_ch0
47 * [29:28] chinfo
48 * [27] rank_ch1
49 * [26:25] col_ch1
50 * [24] bk_ch1
51 * [23:22] cs0_row_ch1
52 * [21:20] cs1_row_ch1
53 * [19:18] bw_ch1
54 * [17:16] dbw_ch1;
55 * [15:13] ddrtype
56 * [12] channelnum
57 * [11] rank_ch0
58 * [10:9] col_ch0
59 * [8] bk_ch0
60 * [7:6] cs0_row_ch0
61 * [5:4] cs1_row_ch0
62 * [3:2] bw_ch0
63 * [1:0] dbw_ch0
65 #define SYS_REG_ENC_ROW_3_4(n, ch) ((n) << (30 + (ch)))
66 #define SYS_REG_DEC_ROW_3_4(n, ch) ((n >> (30 + ch)) & 0x1)
67 #define SYS_REG_ENC_CHINFO(ch) (1 << (28 + (ch)))
68 #define SYS_REG_ENC_DDRTYPE(n) ((n) << 13)
69 #define SYS_REG_ENC_NUM_CH(n) (((n) - 1) << 12)
70 #define SYS_REG_DEC_NUM_CH(n) (1 + ((n >> 12) & 0x1))
71 #define SYS_REG_ENC_RANK(n, ch) (((n) - 1) << (11 + ((ch) * 16)))
72 #define SYS_REG_DEC_RANK(n, ch) (1 + ((n >> (11 + 16 * ch)) & 0x1))
73 #define SYS_REG_ENC_COL(n, ch) (((n) - 9) << (9 + ((ch) * 16)))
74 #define SYS_REG_DEC_COL(n, ch) (9 + ((n >> (9 + 16 * ch)) & 0x3))
75 #define SYS_REG_ENC_BK(n, ch) (((n) == 3 ? 0 : 1) \
76 << (8 + ((ch) * 16)))
77 #define SYS_REG_DEC_BK(n, ch) (3 - ((n >> (8 + 16 * ch)) & 0x1))
78 #define SYS_REG_ENC_CS0_ROW(n, ch) (((n) - 13) << (6 + ((ch) * 16)))
79 #define SYS_REG_DEC_CS0_ROW(n, ch) (13 + ((n >> (6 + 16 * ch)) & 0x3))
80 #define SYS_REG_ENC_CS1_ROW(n, ch) (((n) - 13) << (4 + ((ch) * 16)))
81 #define SYS_REG_DEC_CS1_ROW(n, ch) (13 + ((n >> (4 + 16 * ch)) & 0x3))
82 #define SYS_REG_ENC_BW(n, ch) ((2 >> (n)) << (2 + ((ch) * 16)))
83 #define SYS_REG_DEC_BW(n, ch) (2 >> ((n >> (2 + 16 * ch)) & 0x3))
84 #define SYS_REG_ENC_DBW(n, ch) ((2 >> (n)) << (0 + ((ch) * 16)))
85 #define SYS_REG_DEC_DBW(n, ch) (2 >> ((n >> (0 + 16 * ch)) & 0x3))
87 #define DDR_STRIDE(n) write32(&rk3399_pmusgrf->soc_con4,\
88 (0x1F << (10 + 16)) | (n << 10))
90 #define PRESET_SGRF_HOLD(n) ((0x1 << (6+16)) | ((n) << 6))
91 #define PRESET_GPIO0_HOLD(n) ((0x1 << (7+16)) | ((n) << 7))
92 #define PRESET_GPIO1_HOLD(n) ((0x1 << (8+16)) | ((n) << 8))
94 #define PHY_DRV_ODT_Hi_Z (0x0)
95 #define PHY_DRV_ODT_240 (0x1)
96 #define PHY_DRV_ODT_120 (0x8)
97 #define PHY_DRV_ODT_80 (0x9)
98 #define PHY_DRV_ODT_60 (0xc)
99 #define PHY_DRV_ODT_48 (0xd)
100 #define PHY_DRV_ODT_40 (0xe)
101 #define PHY_DRV_ODT_34_3 (0xf)
103 static void copy_to_reg(u32 *dest, u32 *src, u32 n)
105 int i;
107 for (i = 0; i < n / sizeof(u32); i++) {
108 write32(dest, *src);
109 src++;
110 dest++;
114 static void ddr_move_to_access_state(u32 channel)
118 static void phy_dll_bypass_set(u32 channel,
119 struct rk3399_ddr_publ_regs *ddr_publ_regs, u32 freq)
121 if (freq <= 125*MHz) {
122 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
123 setbits_le32(&ddr_publ_regs->denali_phy[86],
124 (0x3 << 2) << 8);
125 setbits_le32(&ddr_publ_regs->denali_phy[214],
126 (0x3 << 2) << 8);
127 setbits_le32(&ddr_publ_regs->denali_phy[342],
128 (0x3 << 2) << 8);
129 setbits_le32(&ddr_publ_regs->denali_phy[470],
130 (0x3 << 2) << 8);
132 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
133 setbits_le32(&ddr_publ_regs->denali_phy[547],
134 (0x3 << 2) << 16);
135 setbits_le32(&ddr_publ_regs->denali_phy[675],
136 (0x3 << 2) << 16);
137 setbits_le32(&ddr_publ_regs->denali_phy[803],
138 (0x3 << 2) << 16);
139 } else {
140 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
141 clrbits_le32(&ddr_publ_regs->denali_phy[86],
142 (0x3 << 2) << 8);
143 clrbits_le32(&ddr_publ_regs->denali_phy[214],
144 (0x3 << 2) << 8);
145 clrbits_le32(&ddr_publ_regs->denali_phy[342],
146 (0x3 << 2) << 8);
147 clrbits_le32(&ddr_publ_regs->denali_phy[470],
148 (0x3 << 2) << 8);
150 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
151 clrbits_le32(&ddr_publ_regs->denali_phy[547],
152 (0x3 << 2) << 16);
153 clrbits_le32(&ddr_publ_regs->denali_phy[675],
154 (0x3 << 2) << 16);
155 clrbits_le32(&ddr_publ_regs->denali_phy[803],
156 (0x3 << 2) << 16);
160 static void set_memory_map(u32 channel,
161 const struct rk3399_sdram_params *sdram_params)
163 struct rk3399_ddr_pctl_regs *ddr_pctl_regs =
164 rk3399_ddr_pctl[channel];
165 u32 cs_map;
166 u32 reduc;
167 struct rk3399_ddr_pi_regs *ddr_pi_regs =
168 rk3399_ddr_pi[channel];
170 cs_map = (sdram_params->ch[channel].rank > 1) ? 3 : 1;
171 reduc = (sdram_params->ch[channel].bw == 2) ? 0 : 1;
173 clrsetbits_le32(&ddr_pctl_regs->denali_ctl[191], 0xF,
174 (12 - sdram_params->ch[channel].col));
176 clrsetbits_le32(&ddr_pctl_regs->denali_ctl[190],
177 (0x3 << 16) | (0x7 << 24),
178 ((3 - sdram_params->ch[channel].bk) << 16) |
179 ((16 - sdram_params->ch[channel].cs0_row) << 24));
181 clrsetbits_le32(&ddr_pctl_regs->denali_ctl[196],
182 0x3 | (1 << 16), cs_map | (reduc << 16));
184 /* PI_199 PI_COL_DIFF:RW:0:4 */
185 clrsetbits_le32(&ddr_pi_regs->denali_pi[199],
186 0xF, (12 - sdram_params->ch[channel].col));
188 /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
189 clrsetbits_le32(&ddr_pi_regs->denali_pi[155],
190 (0x3 << 16) | (0x7 << 24),
191 ((3 - sdram_params->ch[channel].bk) << 16) |
192 ((16 - sdram_params->ch[channel].cs0_row) << 24));
193 /* PI_41 PI_CS_MAP:RW:24:4 */
194 clrsetbits_le32(&ddr_pi_regs->denali_pi[41],
195 0xf << 24, cs_map << 24);
196 if ((sdram_params->ch[channel].rank == 1) &&
197 (sdram_params->dramtype == DDR3))
198 write32(&ddr_pi_regs->denali_pi[34], 0x2EC7FFFF);
201 static void set_ds_odt(u32 channel,
202 const struct rk3399_sdram_params *sdram_params)
204 struct rk3399_ddr_publ_regs *ddr_publ_regs = rk3399_ddr_publ[channel];
206 u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
207 u32 tsel_idle_select_p, tsel_wr_select_p, tsel_rd_select_p;
208 u32 tsel_idle_select_n, tsel_wr_select_n, tsel_rd_select_n;
209 u32 reg_value;
211 if (sdram_params->dramtype == LPDDR4) {
212 tsel_rd_select_p = PHY_DRV_ODT_Hi_Z;
213 tsel_wr_select_p = PHY_DRV_ODT_40;
214 tsel_idle_select_p = PHY_DRV_ODT_Hi_Z;
216 tsel_rd_select_n = PHY_DRV_ODT_240;
217 tsel_wr_select_n = PHY_DRV_ODT_40;
218 tsel_idle_select_n = PHY_DRV_ODT_240;
219 } else if (sdram_params->dramtype == LPDDR3) {
220 tsel_rd_select_p = PHY_DRV_ODT_240;
221 tsel_wr_select_p = PHY_DRV_ODT_34_3;
222 tsel_idle_select_p = PHY_DRV_ODT_240;
224 tsel_rd_select_n = PHY_DRV_ODT_Hi_Z;
225 tsel_wr_select_n = PHY_DRV_ODT_34_3;
226 tsel_idle_select_n = PHY_DRV_ODT_Hi_Z;
227 } else {
228 tsel_rd_select_p = PHY_DRV_ODT_240;
229 tsel_wr_select_p = PHY_DRV_ODT_34_3;
230 tsel_idle_select_p = PHY_DRV_ODT_240;
232 tsel_rd_select_n = PHY_DRV_ODT_240;
233 tsel_wr_select_n = PHY_DRV_ODT_34_3;
234 tsel_idle_select_n = PHY_DRV_ODT_240;
237 if (sdram_params->odt == 1)
238 tsel_rd_en = 1;
239 else
240 tsel_rd_en = 0;
242 tsel_wr_en = 0;
243 tsel_idle_en = 0;
246 * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
247 * sets termination values for read/idle cycles and drive strength
248 * for write cycles for DQ/DM
250 reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
251 (tsel_wr_select_n << 8) | (tsel_wr_select_p << 12) |
252 (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
253 clrsetbits_le32(&ddr_publ_regs->denali_phy[6], 0xffffff, reg_value);
254 clrsetbits_le32(&ddr_publ_regs->denali_phy[134], 0xffffff, reg_value);
255 clrsetbits_le32(&ddr_publ_regs->denali_phy[262], 0xffffff, reg_value);
256 clrsetbits_le32(&ddr_publ_regs->denali_phy[390], 0xffffff, reg_value);
258 * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
259 * sets termination values for read/idle cycles and drive strength
260 * for write cycles for DQS
262 clrsetbits_le32(&ddr_publ_regs->denali_phy[7], 0xffffff, reg_value);
263 clrsetbits_le32(&ddr_publ_regs->denali_phy[135], 0xffffff, reg_value);
264 clrsetbits_le32(&ddr_publ_regs->denali_phy[263], 0xffffff, reg_value);
265 clrsetbits_le32(&ddr_publ_regs->denali_phy[391], 0xffffff, reg_value);
267 /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
268 reg_value = tsel_wr_select_n | (tsel_wr_select_p << 0x4);
269 clrsetbits_le32(&ddr_publ_regs->denali_phy[544], 0xff, reg_value);
270 clrsetbits_le32(&ddr_publ_regs->denali_phy[672], 0xff, reg_value);
271 clrsetbits_le32(&ddr_publ_regs->denali_phy[800], 0xff, reg_value);
273 /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
274 clrsetbits_le32((&ddr_publ_regs->denali_phy[928]), 0xff, reg_value);
276 /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
277 clrsetbits_le32(&ddr_publ_regs->denali_phy[937], 0xff, reg_value);
279 /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
280 clrsetbits_le32(&ddr_publ_regs->denali_phy[935], 0xff, reg_value);
282 /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
283 clrsetbits_le32(&ddr_publ_regs->denali_phy[939], 0xff, reg_value);
285 /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
286 clrsetbits_le32(&ddr_publ_regs->denali_phy[929], 0xff, reg_value);
288 /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
289 clrsetbits_le32(&ddr_publ_regs->denali_phy[924], 0xff,
290 tsel_wr_select_n | (tsel_wr_select_p << 4));
291 clrsetbits_le32(&ddr_publ_regs->denali_phy[925], 0xff,
292 tsel_rd_select_n | (tsel_rd_select_p << 4));
294 /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
295 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
296 << 16;
297 clrsetbits_le32(&ddr_publ_regs->denali_phy[5], 0x7 << 16, reg_value);
298 clrsetbits_le32(&ddr_publ_regs->denali_phy[133], 0x7 << 16, reg_value);
299 clrsetbits_le32(&ddr_publ_regs->denali_phy[261], 0x7 << 16, reg_value);
300 clrsetbits_le32(&ddr_publ_regs->denali_phy[389], 0x7 << 16, reg_value);
302 /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
303 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
304 << 24;
305 clrsetbits_le32(&ddr_publ_regs->denali_phy[6], 0x7 << 24, reg_value);
306 clrsetbits_le32(&ddr_publ_regs->denali_phy[134], 0x7 << 24, reg_value);
307 clrsetbits_le32(&ddr_publ_regs->denali_phy[262], 0x7 << 24, reg_value);
308 clrsetbits_le32(&ddr_publ_regs->denali_phy[390], 0x7 << 24, reg_value);
310 /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
311 reg_value = tsel_wr_en << 8;
312 clrsetbits_le32(&ddr_publ_regs->denali_phy[518], 0x1 << 8, reg_value);
313 clrsetbits_le32(&ddr_publ_regs->denali_phy[646], 0x1 << 8, reg_value);
314 clrsetbits_le32(&ddr_publ_regs->denali_phy[774], 0x1 << 8, reg_value);
316 /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
317 reg_value = tsel_wr_en << 17;
318 clrsetbits_le32((&ddr_publ_regs->denali_phy[933]), 0x1 << 17,
319 reg_value);
321 * pad_rst/cke/cs/clk_term tsel 1bits
322 * DENALI_PHY_938/936/940/934 offset_17
324 clrsetbits_le32(&ddr_publ_regs->denali_phy[938], 0x1 << 17, reg_value);
325 clrsetbits_le32(&ddr_publ_regs->denali_phy[936], 0x1 << 17, reg_value);
326 clrsetbits_le32(&ddr_publ_regs->denali_phy[940], 0x1 << 17, reg_value);
327 clrsetbits_le32(&ddr_publ_regs->denali_phy[934], 0x1 << 17, reg_value);
329 /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
330 clrsetbits_le32(&ddr_publ_regs->denali_phy[930], 0x1 << 17, reg_value);
333 static void phy_io_config(u32 channel,
334 const struct rk3399_sdram_params *sdram_params)
336 struct rk3399_ddr_publ_regs *ddr_publ_regs = rk3399_ddr_publ[channel];
337 u32 vref_mode, vref_value;
338 u32 mode_sel = 0;
339 u32 speed;
340 u32 reg_value;
342 /* vref setting */
343 if (sdram_params->dramtype == LPDDR4)
344 vref_mode = 0x6;
345 else if (sdram_params->dramtype == LPDDR3)
346 vref_mode = 0x2;
347 else if (sdram_params->dramtype == DDR3)
348 vref_mode = 0x1;
349 else
350 die("Halting: Unknown DRAM type.\n");
351 vref_value = 0x1f;
353 reg_value = (vref_mode << 9) | (0x1 << 8) | vref_value;
354 /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
355 clrsetbits_le32(&ddr_publ_regs->denali_phy[913], 0xfff << 8,
356 reg_value << 8);
357 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
358 clrsetbits_le32(&ddr_publ_regs->denali_phy[914], 0xfff, reg_value);
359 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
360 clrsetbits_le32(&ddr_publ_regs->denali_phy[914], 0xfff << 16,
361 reg_value << 16);
362 /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
363 clrsetbits_le32(&ddr_publ_regs->denali_phy[915], 0xfff, reg_value);
364 /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
365 clrsetbits_le32(&ddr_publ_regs->denali_phy[915], 0xfff << 16,
366 reg_value << 16);
368 if (sdram_params->dramtype == LPDDR4)
369 mode_sel = 0x6;
370 else if (sdram_params->dramtype == LPDDR3)
371 mode_sel = 0x0;
372 else if (sdram_params->dramtype == DDR3)
373 mode_sel = 0x1;
375 /* PHY_924 PHY_PAD_FDBK_DRIVE */
376 clrsetbits_le32(&ddr_publ_regs->denali_phy[924], 0x7 << 15,
377 mode_sel << 15);
378 /* PHY_926 PHY_PAD_DATA_DRIVE */
379 clrsetbits_le32(&ddr_publ_regs->denali_phy[926], 0x7 << 6,
380 mode_sel << 6);
381 /* PHY_927 PHY_PAD_DQS_DRIVE */
382 clrsetbits_le32(&ddr_publ_regs->denali_phy[926], 0x7 << 6,
383 mode_sel << 6);
384 /* PHY_928 PHY_PAD_ADDR_DRIVE */
385 clrsetbits_le32(&ddr_publ_regs->denali_phy[928], 0x7 << 14,
386 mode_sel << 14);
387 /* PHY_929 PHY_PAD_CLK_DRIVE */
388 clrsetbits_le32(&ddr_publ_regs->denali_phy[929], 0x7 << 14,
389 mode_sel << 14);
390 /* PHY_935 PHY_PAD_CKE_DRIVE */
391 clrsetbits_le32(&ddr_publ_regs->denali_phy[935], 0x7 << 14,
392 mode_sel << 14);
393 /* PHY_937 PHY_PAD_RST_DRIVE */
394 clrsetbits_le32(&ddr_publ_regs->denali_phy[937], 0x7 << 14,
395 mode_sel << 14);
396 /* PHY_939 PHY_PAD_CS_DRIVE */
397 clrsetbits_le32(&ddr_publ_regs->denali_phy[939], 0x7 << 14,
398 mode_sel << 14);
400 if (sdram_params->ddr_freq < 400 * MHz)
401 speed = 0x0;
402 else if (sdram_params->ddr_freq < 800 * MHz)
403 speed = 0x1;
404 else if (sdram_params->ddr_freq < 1200 * MHz)
405 speed = 0x2;
406 else
407 die("Halting: Unknown DRAM speed.\n");
409 /* PHY_924 PHY_PAD_FDBK_DRIVE */
410 clrsetbits_le32(&ddr_publ_regs->denali_phy[924], 0x3 << 21,
411 speed << 21);
412 /* PHY_926 PHY_PAD_DATA_DRIVE */
413 clrsetbits_le32(&ddr_publ_regs->denali_phy[926], 0x3 << 9,
414 speed << 9);
415 /* PHY_927 PHY_PAD_DQS_DRIVE */
416 clrsetbits_le32(&ddr_publ_regs->denali_phy[926], 0x3 << 9,
417 speed << 9);
418 /* PHY_928 PHY_PAD_ADDR_DRIVE */
419 clrsetbits_le32(&ddr_publ_regs->denali_phy[928], 0x3 << 17,
420 speed << 17);
421 /* PHY_929 PHY_PAD_CLK_DRIVE */
422 clrsetbits_le32(&ddr_publ_regs->denali_phy[929], 0x3 << 17,
423 speed << 17);
424 /* PHY_935 PHY_PAD_CKE_DRIVE */
425 clrsetbits_le32(&ddr_publ_regs->denali_phy[935], 0x3 << 17,
426 speed << 17);
427 /* PHY_937 PHY_PAD_RST_DRIVE */
428 clrsetbits_le32(&ddr_publ_regs->denali_phy[937], 0x3 << 17,
429 speed << 17);
430 /* PHY_939 PHY_PAD_CS_DRIVE */
431 clrsetbits_le32(&ddr_publ_regs->denali_phy[939], 0x3 << 17,
432 speed << 17);
436 static void pctl_cfg(u32 channel,
437 const struct rk3399_sdram_params *sdram_params)
439 struct rk3399_ddr_pctl_regs *ddr_pctl_regs = rk3399_ddr_pctl[channel];
440 struct rk3399_ddr_pi_regs *ddr_pi_regs = rk3399_ddr_pi[channel];
441 struct rk3399_ddr_publ_regs *ddr_publ_regs = rk3399_ddr_publ[channel];
442 u32 tmp, tmp1, tmp2;
443 u32 pwrup_srefresh_exit;
446 * work around controller bug:
447 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
449 copy_to_reg(&ddr_pctl_regs->denali_ctl[1],
450 (u32 *)&sdram_params->pctl_regs.denali_ctl[1],
451 sizeof(struct rk3399_ddr_pctl_regs) - 4);
452 write32(&ddr_pctl_regs->denali_ctl[0],
453 sdram_params->pctl_regs.denali_ctl[0]);
454 copy_to_reg((u32 *)ddr_pi_regs,
455 (u32 *)&sdram_params->pi_regs.denali_pi[0],
456 sizeof(struct rk3399_ddr_pi_regs));
457 /* rank count need to set for init */
458 set_memory_map(channel, sdram_params);
460 write32(&ddr_publ_regs->denali_phy[910], 0x6400);
461 write32(&ddr_publ_regs->denali_phy[911], 0x01221102);
462 write32(&ddr_publ_regs->denali_phy[912], 0x0);
463 pwrup_srefresh_exit = read32(&ddr_pctl_regs->denali_ctl[68]) &
464 PWRUP_SREFRESH_EXIT;
465 clrbits_le32(&ddr_pctl_regs->denali_ctl[68], PWRUP_SREFRESH_EXIT);
467 /* PHY_DLL_RST_EN */
468 clrsetbits_le32(&ddr_publ_regs->denali_phy[957],
469 0x3 << 24, 1 << 24);
471 setbits_le32(&ddr_pi_regs->denali_pi[0], START);
472 setbits_le32(&ddr_pctl_regs->denali_ctl[0], START);
474 while (1) {
475 tmp = read32(&ddr_publ_regs->denali_phy[920]);
476 tmp1 = read32(&ddr_publ_regs->denali_phy[921]);
477 tmp2 = read32(&ddr_publ_regs->denali_phy[922]);
478 if ((((tmp >> 16) & 0x1) == 0x1) &&
479 (((tmp1 >> 16) & 0x1) == 0x1) &&
480 (((tmp1 >> 0) & 0x1) == 0x1) &&
481 (((tmp2 >> 0) & 0x1) == 0x1))
482 break;
485 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[896],
486 (u32 *)&sdram_params->phy_regs.denali_phy[896],
487 (958 - 895) * 4);
488 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[0],
489 (u32 *)&sdram_params->phy_regs.denali_phy[0],
490 (90 - 0 + 1) * 4);
491 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[128],
492 (u32 *)&sdram_params->phy_regs.denali_phy[128],
493 (218 - 128 + 1) * 4);
494 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[256],
495 (u32 *)&sdram_params->phy_regs.denali_phy[256],
496 (346 - 256 + 1) * 4);
497 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[384],
498 (u32 *)&sdram_params->phy_regs.denali_phy[384],
499 (474 - 384 + 1) * 4);
500 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[512],
501 (u32 *)&sdram_params->phy_regs.denali_phy[512],
502 (549 - 512 + 1) * 4);
503 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[640],
504 (u32 *)&sdram_params->phy_regs.denali_phy[640],
505 (677 - 640 + 1) * 4);
506 copy_to_reg((u32 *)&ddr_publ_regs->denali_phy[768],
507 (u32 *)&sdram_params->phy_regs.denali_phy[768],
508 (805 - 768 + 1) * 4);
509 set_ds_odt(channel, sdram_params);
511 /* phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8 */
512 /* dqs_tsel_wr_end[7:4] add Half cycle */
513 tmp = (read32(&ddr_publ_regs->denali_phy[84]) >> 8) & 0xff;
514 clrsetbits_le32((&ddr_publ_regs->denali_phy[84]),
515 0xff << 8, (tmp + 0x10) << 8);
516 tmp = (read32(&ddr_publ_regs->denali_phy[212]) >> 8) & 0xff;
517 clrsetbits_le32(&ddr_publ_regs->denali_phy[212],
518 0xff << 8, (tmp + 0x10) << 8);
519 tmp = (read32(&ddr_publ_regs->denali_phy[340]) >> 8) & 0xff;
520 clrsetbits_le32(&ddr_publ_regs->denali_phy[340],
521 0xff << 8, (tmp + 0x10) << 8);
522 tmp = (read32(&ddr_publ_regs->denali_phy[468]) >> 8) & 0xff;
523 clrsetbits_le32(&ddr_publ_regs->denali_phy[468],
524 0xff << 8, (tmp + 0x10) << 8);
527 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
528 * dq_tsel_wr_end[7:4] add Half cycle
530 tmp = (read32(&ddr_publ_regs->denali_phy[83]) >> 16) & 0xff;
531 clrsetbits_le32(&ddr_publ_regs->denali_phy[83],
532 0xff << 16, (tmp + 0x10) << 16);
533 tmp = (read32(&ddr_publ_regs->denali_phy[211]) >> 16) & 0xff;
534 clrsetbits_le32(&ddr_publ_regs->denali_phy[211],
535 0xff << 16, (tmp + 0x10) << 16);
536 tmp = (read32(&ddr_publ_regs->denali_phy[339]) >> 16) & 0xff;
537 clrsetbits_le32(&ddr_publ_regs->denali_phy[339],
538 0xff << 16, (tmp + 0x10) << 16);
539 tmp = (read32(&ddr_publ_regs->denali_phy[467]) >> 16) & 0xff;
540 clrsetbits_le32(&ddr_publ_regs->denali_phy[467],
541 0xff << 16, (tmp + 0x10) << 16);
543 phy_io_config(channel, sdram_params);
545 /* PHY_DLL_RST_EN */
546 clrsetbits_le32(&ddr_publ_regs->denali_phy[957],
547 0x3 << 24, 0x2 << 24);
550 * FIXME:
551 * need to care ERROR bit
553 while (!(read32(&ddr_pctl_regs->denali_ctl[203]) & (1 << 3)))
555 clrsetbits_le32(&ddr_pctl_regs->denali_ctl[68],
556 PWRUP_SREFRESH_EXIT,
557 pwrup_srefresh_exit);
560 static void select_per_cs_training_index(u32 channel, u32 rank)
562 struct rk3399_ddr_publ_regs *ddr_publ_regs = rk3399_ddr_publ[channel];
564 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
565 if ((read32(&ddr_publ_regs->denali_phy[84])>>16) & 1) {
567 * PHY_8/136/264/392
568 * phy_per_cs_training_index_X 1bit offset_24
570 clrsetbits_le32(&ddr_publ_regs->denali_phy[8],
571 0x1 << 24, rank << 24);
572 clrsetbits_le32(&ddr_publ_regs->denali_phy[136],
573 0x1 << 24, rank << 24);
574 clrsetbits_le32(&ddr_publ_regs->denali_phy[264],
575 0x1 << 24, rank << 24);
576 clrsetbits_le32(&ddr_publ_regs->denali_phy[392],
577 0x1 << 24, rank << 24);
582 * After write leveling for all ranks, check the PHY_CLK_WRDQS_SLAVE_DELAY
583 * result, if the two ranks in one slice both met
584 * "0x200-PHY_CLK_WRDQS_SLAVE_DELAY < 0x20 or
585 * 0x200-PHY_CLK_WRDQS_SLAVE > 0x1E0",
586 * enable PHY_WRLVL_EARLY_FORCE_ZERO for this slice, and trigger write
587 * leveling again. Else no additional write leveling is required.
589 static void check_write_leveling_value(u32 channel,
590 const struct rk3399_sdram_params
591 *sdram_params)
593 struct rk3399_ddr_pi_regs *ddr_pi_regs = rk3399_ddr_pi[channel];
594 struct rk3399_ddr_publ_regs *ddr_publ_regs = rk3399_ddr_publ[channel];
595 u32 i, tmp;
596 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
597 u32 wl_value[2][4];
598 u32 rank = sdram_params->ch[channel].rank;
600 for (i = 0; i < rank; i++) {
601 /* PHY_8/136/264/392 phy_per_cs_training_index_X 1bit offset_24 */
602 clrsetbits_le32(&ddr_publ_regs->denali_phy[8], 0x1 << 24,
603 i << 24);
604 clrsetbits_le32(&ddr_publ_regs->denali_phy[136], 0x1 << 24,
605 i << 24);
606 clrsetbits_le32(&ddr_publ_regs->denali_phy[264], 0x1 << 24,
607 i << 24);
608 clrsetbits_le32(&ddr_publ_regs->denali_phy[392], 0x1 << 24,
609 i << 24);
610 wl_value[i][0] = (read32(&ddr_publ_regs->denali_phy[63]) >>
611 16) & 0x3ff;
612 wl_value[i][1] = (read32(&ddr_publ_regs->denali_phy[191]) >>
613 16) & 0x3ff;
614 wl_value[i][2] = (read32(&ddr_publ_regs->denali_phy[319]) >>
615 16) & 0x3ff;
616 wl_value[i][3] = (read32(&ddr_publ_regs->denali_phy[447]) >>
617 16) & 0x3ff;
620 for (i = 0; i < 4; i++) {
621 if (((wl_value[0][i] > 0x1E0) || (wl_value[0][i] < 0x20)) &&
622 ((wl_value[1][i] > 0x1E0) || (wl_value[1][i] < 0x20))) {
623 switch (i) {
624 case 0:
625 setbits_le32(&ddr_publ_regs->denali_phy[79],
626 0x1 << 16);
627 break;
628 case 1:
629 setbits_le32(&ddr_publ_regs->denali_phy[207],
630 0x1 << 16);
631 break;
632 case 2:
633 setbits_le32(&ddr_publ_regs->denali_phy[335],
634 0x1 << 16);
635 break;
636 case 3:
637 setbits_le32(&ddr_publ_regs->denali_phy[463],
638 0x1 << 16);
639 break;
640 default:
641 break;
646 for (i = 0; i < rank; i++) {
648 /* FIXME: denali_phy[463] value wrong if miss this delay */
649 udelay(100);
651 /* PI_60 PI_WRLVL_EN:RW:8:2 */
652 clrsetbits_le32(&ddr_pi_regs->denali_pi[60],
653 0x3 << 8,
654 0x2 << 8);
655 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
656 clrsetbits_le32(&ddr_pi_regs->denali_pi[59],
657 (0x1 << 8) | (0x3 << 16),
658 (0x1 << 8) | (i << 16));
660 select_per_cs_training_index(channel, i);
661 while (1) {
662 /* PI_174 PI_INT_STATUS:RD:8:25 */
663 tmp = read32(&ddr_pi_regs->denali_pi[174]) >> 8;
666 * check status obs,
667 * if error maybe can not get leveling done
668 * PHY_40/168/296/424 phy_wrlvl_status_obs_X:0:13
670 obs_0 = read32(&ddr_publ_regs->denali_phy[40]);
671 obs_1 = read32(&ddr_publ_regs->denali_phy[168]);
672 obs_2 = read32(&ddr_publ_regs->denali_phy[296]);
673 obs_3 = read32(&ddr_publ_regs->denali_phy[424]);
674 if (((obs_0 >> 12) & 0x1) ||
675 ((obs_1 >> 12) & 0x1) ||
676 ((obs_2 >> 12) & 0x1) ||
677 ((obs_3 >> 12) & 0x1))
678 obs_err = 1;
679 if ((((tmp >> 10) & 0x1) == 0x1) &&
680 (((tmp >> 13) & 0x1) == 0x1) &&
681 (((tmp >> 4) & 0x1) == 0x0) &&
682 (obs_err == 0))
683 break;
684 else if ((((tmp >> 4) & 0x1) == 0x1) ||
685 (obs_err == 1))
686 printk(BIOS_DEBUG,
687 "check_write_leveling_value error!!!\n");
689 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
690 write32((&ddr_pi_regs->denali_pi[175]), 0x00003f7c);
694 static int data_training(u32 channel,
695 const struct rk3399_sdram_params *sdram_params,
696 u32 training_flag)
698 struct rk3399_ddr_pi_regs *ddr_pi_regs = rk3399_ddr_pi[channel];
699 struct rk3399_ddr_publ_regs *ddr_publ_regs = rk3399_ddr_publ[channel];
700 u32 i, tmp;
701 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
702 u32 rank = sdram_params->ch[channel].rank;
704 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
705 setbits_le32(&ddr_publ_regs->denali_phy[927], (1 << 22));
707 if (training_flag == PI_FULL_TARINING) {
708 if (sdram_params->dramtype == LPDDR4) {
709 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
710 PI_READ_GATE_TRAINING |
711 PI_READ_LEVELING |
712 PI_WDQ_LEVELING;
713 } else if (sdram_params->dramtype == LPDDR3) {
714 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
715 PI_READ_GATE_TRAINING |
716 PI_READ_LEVELING;
717 } else if (sdram_params->dramtype == DDR3) {
718 training_flag = PI_WRITE_LEVELING |
719 PI_READ_GATE_TRAINING |
720 PI_READ_LEVELING;
724 /* ca training(LPDDR4,LPDDR3 support) */
725 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
726 for (i = 0; i < rank; i++) {
727 /* PI_100 PI_CALVL_EN:RW:8:2 */
728 clrsetbits_le32(&ddr_pi_regs->denali_pi[100],
729 0x3 << 8,
730 0x2 << 8);
731 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
732 clrsetbits_le32(&ddr_pi_regs->denali_pi[92],
733 (0x1 << 16) | (0x3 << 24),
734 (0x1 << 16) | (i << 24));
736 select_per_cs_training_index(channel, i);
737 while (1) {
738 /* PI_174 PI_INT_STATUS:RD:8:18 */
739 tmp = read32(&ddr_pi_regs->denali_pi[174]) >> 8;
741 * check status obs
742 * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
744 obs_0 = read32(&ddr_publ_regs->denali_phy[532]);
745 obs_1 = read32(&ddr_publ_regs->denali_phy[660]);
746 obs_2 = read32(&ddr_publ_regs->denali_phy[789]);
747 if (((obs_0 >> 30) & 0x3) ||
748 ((obs_1 >> 30) & 0x3) ||
749 ((obs_2 >> 30) & 0x3))
750 obs_err = 1;
751 if ((((tmp >> 11) & 0x1) == 0x1) &&
752 (((tmp >> 13) & 0x1) == 0x1) &&
753 (((tmp >> 5) & 0x1) == 0x0) &&
754 (obs_err == 0))
755 break;
756 else if ((((tmp >> 5) & 0x1) == 0x1) ||
757 (obs_err == 1))
758 return -1;
760 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
761 write32((&ddr_pi_regs->denali_pi[175]), 0x00003f7c);
765 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
766 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
767 for (i = 0; i < rank; i++) {
768 /* PI_60 PI_WRLVL_EN:RW:8:2 */
769 clrsetbits_le32(&ddr_pi_regs->denali_pi[60],
770 0x3 << 8,
771 0x2 << 8);
772 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
773 clrsetbits_le32(&ddr_pi_regs->denali_pi[59],
774 (0x1 << 8) | (0x3 << 16),
775 (0x1 << 8) | (i << 16));
777 select_per_cs_training_index(channel, i);
778 while (1) {
779 /* PI_174 PI_INT_STATUS:RD:8:18 */
780 tmp = read32(&ddr_pi_regs->denali_pi[174]) >> 8;
783 * check status obs, if error maybe can not
784 * get leveling done PHY_40/168/296/424
785 * phy_wrlvl_status_obs_X:0:13
787 obs_0 = read32(&ddr_publ_regs->denali_phy[40]);
788 obs_1 = read32(&ddr_publ_regs->denali_phy[168]);
789 obs_2 = read32(&ddr_publ_regs->denali_phy[296]);
790 obs_3 = read32(&ddr_publ_regs->denali_phy[424]);
791 if (((obs_0 >> 12) & 0x1) ||
792 ((obs_1 >> 12) & 0x1) ||
793 ((obs_2 >> 12) & 0x1) ||
794 ((obs_3 >> 12) & 0x1))
795 obs_err = 1;
796 if ((((tmp >> 10) & 0x1) == 0x1) &&
797 (((tmp >> 13) & 0x1) == 0x1) &&
798 (((tmp >> 4) & 0x1) == 0x0) &&
799 (obs_err == 0)) {
800 if ((rank == 2) && (i == 1))
801 check_write_leveling_value
802 (channel, sdram_params);
803 break;
804 } else if ((((tmp >> 4) & 0x1) == 0x1) ||
805 (obs_err == 1))
806 return -1;
808 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
809 write32((&ddr_pi_regs->denali_pi[175]), 0x00003f7c);
813 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
814 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
815 for (i = 0; i < rank; i++) {
816 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
817 clrsetbits_le32(&ddr_pi_regs->denali_pi[80],
818 0x3 << 24,
819 0x2 << 24);
821 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
822 * PI_RDLVL_CS:RW:24:2
824 clrsetbits_le32(&ddr_pi_regs->denali_pi[74],
825 (0x1 << 16) | (0x3 << 24),
826 (0x1 << 16) | (i << 24));
828 select_per_cs_training_index(channel, i);
829 while (1) {
830 /* PI_174 PI_INT_STATUS:RD:8:18 */
831 tmp = read32(&ddr_pi_regs->denali_pi[174]) >> 8;
833 * check status obs
834 * PHY_43/171/299/427
835 * PHY_GTLVL_STATUS_OBS_x:16:8
837 obs_0 = read32(&ddr_publ_regs->denali_phy[43]);
838 obs_1 = read32(&ddr_publ_regs->denali_phy[171]);
839 obs_2 = read32(&ddr_publ_regs->denali_phy[299]);
840 obs_3 = read32(&ddr_publ_regs->denali_phy[427]);
841 if (((obs_0 >> (16 + 6)) & 0x3) ||
842 ((obs_1 >> (16 + 6)) & 0x3) ||
843 ((obs_2 >> (16 + 6)) & 0x3) ||
844 ((obs_3 >> (16 + 6)) & 0x3))
845 obs_err = 1;
846 if ((((tmp >> 9) & 0x1) == 0x1) &&
847 (((tmp >> 13) & 0x1) == 0x1) &&
848 (((tmp >> 3) & 0x1) == 0x0) &&
849 (obs_err == 0))
850 break;
851 else if ((((tmp >> 3) & 0x1) == 0x1) ||
852 (obs_err == 1))
853 return -1;
855 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
856 write32((&ddr_pi_regs->denali_pi[175]), 0x00003f7c);
860 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
861 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
862 for (i = 0; i < rank; i++) {
863 /* PI_80 PI_RDLVL_EN:RW:16:2 */
864 clrsetbits_le32(&ddr_pi_regs->denali_pi[80],
865 0x3 << 16,
866 0x2 << 16);
867 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
868 clrsetbits_le32(&ddr_pi_regs->denali_pi[74],
869 (0x1 << 8) | (0x3 << 24),
870 (0x1 << 8) | (i << 24));
872 select_per_cs_training_index(channel, i);
873 while (1) {
874 /* PI_174 PI_INT_STATUS:RD:8:18 */
875 tmp = read32(&ddr_pi_regs->denali_pi[174]) >> 8;
877 * make sure status obs not report error bit
878 * PHY_46/174/302/430
879 * phy_rdlvl_status_obs_X:16:8
881 if ((((tmp >> 8) & 0x1) == 0x1) &&
882 (((tmp >> 13) & 0x1) == 0x1) &&
883 (((tmp >> 2) & 0x1) == 0x0))
884 break;
885 else if (((tmp >> 2) & 0x1) == 0x1)
886 return -1;
888 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
889 write32((&ddr_pi_regs->denali_pi[175]), 0x00003f7c);
893 /* wdq leveling(LPDDR4 support) */
894 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
895 for (i = 0; i < rank; i++) {
897 * disable PI_WDQLVL_VREF_EN before wdq leveling?
898 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
900 clrbits_le32(&ddr_pi_regs->denali_pi[181], 0x1 << 8);
901 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
902 clrsetbits_le32(&ddr_pi_regs->denali_pi[124],
903 0x3 << 16,
904 0x2 << 16);
905 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
906 clrsetbits_le32(&ddr_pi_regs->denali_pi[121],
907 (0x1 << 8) | (0x3 << 16),
908 (0x1 << 8) | (i << 16));
910 select_per_cs_training_index(channel, i);
911 while (1) {
912 /* PI_174 PI_INT_STATUS:RD:8:18 */
913 tmp = read32(&ddr_pi_regs->denali_pi[174]) >> 8;
914 if ((((tmp >> 12) & 0x1) == 0x1) &&
915 (((tmp >> 13) & 0x1) == 0x1) &&
916 (((tmp >> 6) & 0x1) == 0x0))
917 break;
918 else if (((tmp >> 6) & 0x1) == 0x1)
919 return -1;
921 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
922 write32((&ddr_pi_regs->denali_pi[175]), 0x00003f7c);
926 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
927 clrbits_le32(&ddr_publ_regs->denali_phy[927], (1 << 22));
929 return 0;
932 static void set_ddrconfig(const struct rk3399_sdram_params *sdram_params,
933 unsigned char channel, u32 ddrconfig)
935 /* only need to set ddrconfig */
936 struct rk3399_msch_regs *ddr_msch_regs = rk3399_msch[channel];
937 unsigned int cs0_cap = 0;
938 unsigned int cs1_cap = 0;
940 cs0_cap = (1 << (sdram_params->ch[channel].cs0_row
941 + sdram_params->ch[channel].col
942 + sdram_params->ch[channel].bk
943 + sdram_params->ch[channel].bw - 20));
944 if (sdram_params->ch[channel].rank > 1)
945 cs1_cap = cs0_cap >> (sdram_params->ch[channel].cs0_row
946 - sdram_params->ch[channel].cs1_row);
947 if (sdram_params->ch[channel].row_3_4) {
948 cs0_cap = cs0_cap * 3 / 4;
949 cs1_cap = cs1_cap * 3 / 4;
952 write32(&ddr_msch_regs->ddrconf, ddrconfig | (ddrconfig << 6));
953 write32(&ddr_msch_regs->ddrsize, ((cs0_cap / 32) & 0xff) |
954 (((cs1_cap / 32) & 0xff) << 8));
957 static void dram_all_config(const struct rk3399_sdram_params *sdram_params)
959 u32 sys_reg = 0;
960 unsigned int channel;
961 unsigned int use;
962 struct rk3399_msch_regs *ddr_msch_regs;
963 const struct rk3399_msch_timings *noc_timing;
965 sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
966 sys_reg |= SYS_REG_ENC_NUM_CH(sdram_params->num_channels);
967 for (channel = 0, use = 0;
968 (use < sdram_params->num_channels) && (channel < 2); channel++) {
969 struct rk3399_ddr_pctl_regs *ddr_pctl_regs =
970 rk3399_ddr_pctl[channel];
971 const struct rk3399_sdram_channel *info =
972 &sdram_params->ch[channel];
973 ddr_msch_regs = rk3399_msch[channel];
975 if (sdram_params->ch[channel].col == 0)
976 continue;
977 use++;
978 sys_reg |= SYS_REG_ENC_ROW_3_4(info->row_3_4, channel);
979 sys_reg |= SYS_REG_ENC_CHINFO(channel);
980 sys_reg |= SYS_REG_ENC_RANK(info->rank, channel);
981 sys_reg |= SYS_REG_ENC_COL(info->col, channel);
982 sys_reg |= SYS_REG_ENC_BK(info->bk, channel);
983 sys_reg |= SYS_REG_ENC_CS0_ROW(info->cs0_row, channel);
984 if (sdram_params->ch[channel].rank > 1)
985 sys_reg |= SYS_REG_ENC_CS1_ROW(info->cs1_row, channel);
986 sys_reg |= SYS_REG_ENC_BW(info->bw, channel);
987 sys_reg |= SYS_REG_ENC_DBW(info->dbw, channel);
989 noc_timing = &sdram_params->ch[channel].noc_timings;
990 write32(&ddr_msch_regs->ddrtiminga0.d32,
991 noc_timing->ddrtiminga0.d32);
992 write32(&ddr_msch_regs->ddrtimingb0.d32,
993 noc_timing->ddrtimingb0.d32);
994 write32(&ddr_msch_regs->ddrtimingc0.d32,
995 noc_timing->ddrtimingc0.d32);
996 write32(&ddr_msch_regs->devtodev0.d32,
997 noc_timing->devtodev0.d32);
998 write32(&ddr_msch_regs->ddrmode.d32,
999 noc_timing->ddrmode.d32);
1001 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
1002 if (sdram_params->ch[channel].rank == 1)
1003 setbits_le32(&ddr_pctl_regs->denali_ctl[276], 1 << 17);
1006 write32(&rk3399_pmugrf->os_reg2, sys_reg);
1007 DDR_STRIDE(sdram_params->stride);
1009 /* reboot hold register set */
1010 write32(&pmucru_ptr->pmucru_rstnhold_con[1],
1011 PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
1012 PRESET_GPIO1_HOLD(1));
1013 clrsetbits_le32(&cru_ptr->glb_rst_con, 0x3, 0x3);
1016 void sdram_init(const struct rk3399_sdram_params *sdram_params)
1018 int channel;
1020 printk(BIOS_INFO, "Starting SDRAM initialization...\n");
1022 if ((sdram_params->dramtype == DDR3
1023 && sdram_params->ddr_freq > 800*MHz) ||
1024 (sdram_params->dramtype == LPDDR3
1025 && sdram_params->ddr_freq > 928*MHz) ||
1026 (sdram_params->dramtype == LPDDR4
1027 && sdram_params->ddr_freq > 800*MHz))
1028 die("SDRAM frequency is to high!");
1030 rkclk_configure_ddr(sdram_params->ddr_freq);
1032 for (channel = 0; channel < 2; channel++) {
1033 struct rk3399_ddr_publ_regs *ddr_publ_regs =
1034 rk3399_ddr_publ[channel];
1036 phy_dll_bypass_set(channel, ddr_publ_regs,
1037 sdram_params->ddr_freq);
1039 if (channel >= sdram_params->num_channels)
1040 continue;
1042 pctl_cfg(channel, sdram_params);
1044 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
1045 if (sdram_params->dramtype == LPDDR3)
1046 udelay(10);
1048 if (data_training(channel, sdram_params, PI_FULL_TARINING))
1049 die("SDRAM initialization failed!");
1051 set_ddrconfig(sdram_params, channel,
1052 sdram_params->ch[channel].ddrconfig);
1053 ddr_move_to_access_state(channel);
1055 dram_all_config(sdram_params);
1056 printk(BIOS_INFO, "Finish SDRAM initialization...\n");
1059 size_t sdram_size_mb(void)
1061 return CONFIG_DRAM_SIZE_MB;