2 ** Copyright 2000, 2001 Broadcom Corporation
5 ** No portions of this material may be reproduced in any form
6 ** without the written permission of:
8 ** Broadcom Corporation
9 ** 5300 California Avenue
10 ** Irvine, California 92617
12 ** All information contained in this document is Broadcom
13 ** Corporation company private proprietary, and trade secret.
15 ** ----------------------------------------------------------
19 ** $Id:: ddr40_phy_init.c 1504 2012-07-18 18:30:39Z gennady $:
20 ** $Rev::file = : Global SVN Revision = 1780 $:
32 #include <ddr40_phy_registers.h>
33 /* detect whether this is 32b phy */
34 #include <ddr40_variant.h>
35 #include <ddr40_phy_init.h>
37 /* uint32_t ddr40_phy_setup_pll(uint32_t speed, ddr40_addr_t offset) */
38 /* speed - DDR clock speed, as number */
39 /* offset - Address of beginning of PHY register space in the chip register space */
42 /* DDR40_PHY_RETURN_OK - PLL has been setup correctly */
43 /* DDR40_PHY_RETURN_PLL_NOLOCK - PLL did not lock within expected time frame. */
44 FUNC_PREFIX
uint32_t ddr40_phy_setup_pll(uint32_t speed
, ddr40_addr_t offset
) FUNC_SUFFIX
47 int vco_freq
, timeout
;
49 DDR40_PHY_Print("offset = 0x%lX\n", offset
);
50 vco_freq
= speed
* 2; /* VCO is bit clock, i.e. twice faster than DDR clock */
51 /* enable div-by-2 post divider for low frequencies */
53 DDR40_PHY_Print("VCO_FREQ is %0d which is less than 500 Mhz.\n", vco_freq
);
54 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_PLL_DIVIDERS
+ offset
);
55 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, PLL_DIVIDERS
, NDIV
, 64);
56 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, PLL_DIVIDERS
, POST_DIV
, 4);
57 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_PLL_DIVIDERS
+ offset
, data
);
59 else if (vco_freq
< 1000) {
60 DDR40_PHY_Print("VCO_FREQ is %0d which is less than 1Ghz.\n", vco_freq
);
61 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_PLL_DIVIDERS
+ offset
);
62 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, PLL_DIVIDERS
, NDIV
, 32);
63 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, PLL_DIVIDERS
, POST_DIV
, 2);
64 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_PLL_DIVIDERS
+ offset
, data
);
67 DDR40_PHY_Print("VCO_FREQ is %0d which is greater than 1Ghz.\n", vco_freq
);
68 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_PLL_DIVIDERS
+ offset
);
69 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, PLL_DIVIDERS
, NDIV
, 16);
70 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, PLL_DIVIDERS
, POST_DIV
, 1);
71 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_PLL_DIVIDERS
+ offset
, data
);
74 /* release PLL reset */
75 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_PLL_CONFIG
+ offset
);
76 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, PLL_CONFIG
, RESET
, 0);
77 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_PLL_CONFIG
+ offset
, data
);
80 DDR40_PHY_Print("DDR Phy PLL polling for lock \n");
83 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_PLL_STATUS
+ offset
);
84 while ((timeout
> 0) &&
85 ((tmp
& DDR40_CORE_PHY_CONTROL_REGS_PLL_STATUS_LOCK_MASK
) == 0))
88 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_PLL_STATUS
+ offset
);
92 DDR40_PHY_Fatal("ddr40_phy_init.c: Timed out waiting for DDR Phy PLL to lock\n");
93 return (DDR40_PHY_RETURN_PLL_NOLOCK
);
95 DDR40_PHY_Print("DDR Phy PLL locked.\n");
96 return (DDR40_PHY_RETURN_OK
);
99 /* void ddr40_phy_addr_ctl_adjust(uint32_t total_steps, ddr40_addr_t offset) */
100 /* total_steps - Desired delay in steps for addr/ctl adjustment */
101 /* offset - Address beginning of PHY register space in the chip register space */
102 FUNC_PREFIX
void ddr40_phy_addr_ctl_adjust(uint32_t total_steps
, ddr40_addr_t offset
) FUNC_SUFFIX
106 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_WR_CHAN_CALIB_STATUS
+ offset
);
107 if (GET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_WR_CHAN_CALIB_STATUS
,
108 wr_chan_calib_byte_sel
) == 0)
110 /* we don't do adjustment if we are in BYTE mode, because it means the clock
111 * is very slow and no adjustment is needed
114 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_OVRIDE_BIT_CTL
, ovr_step
,
115 GET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_WR_CHAN_CALIB_STATUS
,
116 wr_chan_calib_total
) >> 4);
117 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_OVRIDE_BIT_CTL
, ovr_en
, 1);
118 /* use BYTE VDLs for adjustment, so switch to BYTE mode */
119 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_OVRIDE_BIT_CTL
, byte_sel
, 1);
120 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_OVRIDE_BIT_CTL
+ offset
, tmp
);
122 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_OVRIDE_BYTE_CTL
, ovr_step
,
123 (total_steps
> 10) ? (total_steps
- 10)/2 : 0); /* avoid negative */
124 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_OVRIDE_BYTE_CTL
, ovr_en
, 1);
125 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_OVRIDE_BYTE_CTL
+ offset
, tmp
);
131 /* void ddr40_phy_rd_en_adjust(uint32_t total_steps0, uint32_t total_steps1,
132 * uint32_t rd_en_byte_mode, ddr40_addr_t offset, uint32_t wl_offset)
133 * total_steps0 - Desired delay for byte lane 0 within the word lane in steps
134 * total_steps1 - Desired delay for byte lane 1 within the word lane in steps
135 * rd_en_byte_mode - Byte (pair) mode vs. bit (single) mode for RD_EN VDL
136 * offset - Address beginning of PHY register space in the chip register space
137 * wl_offset - Offset of this word lane relative to word lane 0
139 FUNC_PREFIX
void ddr40_phy_rd_en_adjust(uint32_t total_steps0
, uint32_t total_steps1
,
140 uint32_t rd_en_byte_mode
, ddr40_addr_t offset
, uint32_t wl_offset
) FUNC_SUFFIX
142 uint32_t tmp
, bit_vdl_steps0
, bit_vdl_steps1
, byte_vdl_steps
;
143 uint32_t fixed_steps
, adj_steps0
, adj_steps1
;
145 /* Compute new Read Enable VDL settings.
147 * The C0 PHY contains up to 4 VDL's on the read enable path. In normal mode, the
148 * read enable VDL path contains 3 VDL's arranged in a "byte/bit" structure. The
149 * auto-init/override modes enable the 4-VDL path (two common for each byte lane
150 * and two individual for each byte lane). This logic takes that into account when
151 * overriding the VDL settings.
154 fixed_steps
= ((rd_en_byte_mode
) ? 8 : 24);
155 adj_steps0
= ((total_steps0
< fixed_steps
) ? 0 : (total_steps0
- fixed_steps
));
156 adj_steps1
= ((total_steps1
< fixed_steps
) ? 0 : (total_steps1
- fixed_steps
));
158 /* The total number of steps are being applied across 4 VDL's. The smaller 1/4
159 * is applied to the common VDL pair and the remaining 1/4 that is unique to
160 * each byte lane is applied to the byte lane specific VDL pair.
163 if (adj_steps0
< adj_steps1
) {
164 byte_vdl_steps
= (adj_steps0
>> 2);
166 byte_vdl_steps
= (adj_steps1
>> 2);
169 if (byte_vdl_steps
> 63)
172 bit_vdl_steps0
= (((adj_steps0
- (byte_vdl_steps
<< 1)) >> 1));
173 bit_vdl_steps1
= (((adj_steps1
- (byte_vdl_steps
<< 1)) >> 1));
176 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE_RD_EN
, ovr_en
, 1);
177 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE_RD_EN
, ovr_force
, 1);
178 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE_RD_EN
, ovr_step
,
180 DDR40_PHY_RegWr((DDR40_CORE_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE_RD_EN
+ offset
+
182 DDR40_PHY_Print("ddr40_phy_init:: VDL_OVRIDE_BYTEx_RD_EN set to: 0x%02X (%d)\n",
186 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE0_BIT_RD_EN
, ovr_en
, 1);
187 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE0_BIT_RD_EN
, ovr_force
, 1);
188 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE0_BIT_RD_EN
, ovr_step
,
190 DDR40_PHY_RegWr((DDR40_CORE_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE0_BIT_RD_EN
+ offset
+
192 DDR40_PHY_Print("ddr40_phy_init:: VDL_OVRIDE_BYTEx_BIT_RD_EN set to: 0x%02X (%d)\n",
196 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE1_BIT_RD_EN
, ovr_en
, 1);
197 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE1_BIT_RD_EN
, ovr_force
, 1);
198 SET_FIELD(tmp
, DDR40_CORE_PHY_WORD_LANE_0
, VDL_OVRIDE_BYTE1_BIT_RD_EN
, ovr_step
,
200 DDR40_PHY_RegWr((DDR40_CORE_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE1_BIT_RD_EN
+ offset
+
202 DDR40_PHY_Print("ddr40_phy_init:: VDL_OVRIDE_BYTEx_BIT_RD_EN set to: 0x%02X (%d)\n",
206 #ifdef DDR40_INCLUDE_ECC
207 /* void ddr40_phy_ecc_rd_en_adjust(uint32_t total_steps0, uint32_t rd_en_byte_mode,
208 * ddr40_addr_t offset)
209 * total_steps0 - Desired delay for byte lane 0 within the word lane in steps
210 * rd_en_byte_mode - Byte (pair) mode vs. bit (single) mode for RD_EN VDL
211 * offset - Address beginning of PHY register space in the chip register space
213 FUNC_PREFIX
void ddr40_phy_ecc_rd_en_adjust(uint32_t total_steps0
, uint32_t rd_en_byte_mode
,
214 ddr40_addr_t offset
) FUNC_SUFFIX
216 uint32_t tmp
, bit_vdl_steps0
, byte_vdl_steps
, fixed_steps
, adj_steps0
;
218 /* Compute new Read Enable VDL settings.
220 * The C0 PHY contains up to 4 VDL's on the read enable path. In normal mode, the
221 * read enable VDL path contains 3 VDL's arranged in a "byte/bit" structure. The
222 * auto-init/override modes enable the 4-VDL path (two common for each byte lane
223 * and two individual for each byte lane). This logic takes that into account when
224 * overriding the VDL settings.
227 fixed_steps
= ((rd_en_byte_mode
) ? 8 : 24);
228 adj_steps0
= ((total_steps0
< fixed_steps
) ? 0 : (total_steps0
- fixed_steps
));
230 /* The total number of steps are being applied across 4 VDL's. The smaller 1/4
231 * is applied to the common VDL pair and the remaining 1/4 that is unique to
232 * each byte lane is applied to the byte lane specific VDL pair.
235 byte_vdl_steps
= (adj_steps0
>> 2);
236 if (byte_vdl_steps
> 63)
238 bit_vdl_steps0
= (((adj_steps0
- (byte_vdl_steps
<< 1)) >> 1));
241 SET_FIELD(tmp
, DDR40_CORE_PHY_ECC_LANE
, VDL_OVRIDE_BYTE_RD_EN
, ovr_en
, 1);
242 SET_FIELD(tmp
, DDR40_CORE_PHY_ECC_LANE
, VDL_OVRIDE_BYTE_RD_EN
, ovr_force
, 1);
243 SET_FIELD(tmp
, DDR40_CORE_PHY_ECC_LANE
, VDL_OVRIDE_BYTE_RD_EN
, ovr_step
, byte_vdl_steps
);
244 DDR40_PHY_RegWr((DDR40_CORE_PHY_ECC_LANE_VDL_OVRIDE_BYTE_RD_EN
+ offset
), tmp
);
245 DDR40_PHY_Print("ddr40_phy_init:: VDL_OVRIDE_BYTEx_RD_EN set to: 0x%02X (%d)\n",
249 SET_FIELD(tmp
, DDR40_CORE_PHY_ECC_LANE
, VDL_OVRIDE_BYTE_BIT_RD_EN
, ovr_en
, 1);
250 SET_FIELD(tmp
, DDR40_CORE_PHY_ECC_LANE
, VDL_OVRIDE_BYTE_BIT_RD_EN
, ovr_force
, 1);
251 SET_FIELD(tmp
, DDR40_CORE_PHY_ECC_LANE
, VDL_OVRIDE_BYTE_BIT_RD_EN
, ovr_step
,
253 DDR40_PHY_RegWr((DDR40_CORE_PHY_ECC_LANE_VDL_OVRIDE_BYTE_BIT_RD_EN
+ offset
), tmp
);
254 DDR40_PHY_Print("ddr40_phy_init:: VDL_OVRIDE_BYTEx_BIT_RD_EN set to: 0x%02X (%d)\n",
257 #endif /* DDR40_INCLUDE_ECC */
260 /* uint32_t ddr40_phy_vdl_normal(uint32_t vdl_no_lock, ddr40_addr_t offset)
261 * vdl_no_lock - Allow VDL not to lock
262 * offset - Address beginning of PHY register space in the chip register space
265 * DDR40_PHY_RETURN_OK - function finished normally
266 * DDR40_PHY_RETURN_VDL_CALIB_FAIL - VDL calibration failed (timed out)
267 * DDR40_PHY_RETURN_VDL_CALIB_NOLOCK - VDL calibration did not lock
269 FUNC_PREFIX
uint32_t ddr40_phy_vdl_normal(uint32_t vdl_no_lock
, ddr40_addr_t offset
) FUNC_SUFFIX
273 uint32_t return_code
;
275 return_code
= DDR40_PHY_RETURN_OK
;
277 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIBRATE
+ offset
, 0);
280 /* TBD : Talk to Efim. JAL. */
281 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIBRATE
, calib_fast
, 1);
282 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIBRATE
, calib_once
, 1);
283 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIBRATE
, calib_auto
, 1);
284 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIBRATE
+ offset
, tmp
);
287 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS
+ offset
);
288 while ((timeout
> 0) && (GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
,
289 VDL_CALIB_STATUS
, calib_idle
) == 0))
291 DDR40_PHY_Timeout(1);
293 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS
+ offset
);
297 DDR40_PHY_Fatal("ddr40_phy_init.c: DDR PHY VDL Calibration failed\n");
298 return (DDR40_PHY_RETURN_VDL_CALIB_FAIL
);
301 if (GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIB_STATUS
, calib_lock
) == 0) {
304 "DDR PHY VDL calibration complete but did not lock! Step = %d\n",
305 GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIB_STATUS
,
309 "DDR PHY VDL calibration complete but did not lock! Step = %d\n",
310 GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIB_STATUS
,
312 return_code
= DDR40_PHY_RETURN_VDL_CALIB_NOLOCK
;
316 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS
+ offset
);
317 cal_steps
= GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIB_STATUS
, calib_total
) >> 4;
319 DDR40_PHY_Print("ddr40_phy_init:: VDL calibration result: 0x%02X (%d)\n", tmp
, cal_steps
);
321 DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_WR_CHAN_CALIB_STATUS
+ offset
);
322 DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_RD_EN_CALIB_STATUS
+ offset
);
323 DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_DQ_CALIB_STATUS
+ offset
);
325 /* clear VDL calib control */
326 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIBRATE
+ offset
, 0);
328 return (return_code
);
331 /* void ddr40_phy_vtt_on(uint32_t connect, uint32_t override, ddr40_addr_t offset)
332 * connect - Mask of the signals connected to the Virtual VTT capacitor
333 * override - Mask of the signals used to control voltage on the Virtual VTT capacitor
334 * offset - Address beginning of PHY register space in the chip register space
336 FUNC_PREFIX
void ddr40_phy_vtt_on(uint32_t connect
, uint32_t override
,
337 ddr40_addr_t offset
) FUNC_SUFFIX
341 DDR40_PHY_Print("ddr40_phy_init:: Virtual VttSetup onm CONNECT=0x%08X, OVERRIDE=0x%08X\n",
343 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VIRTUAL_VTT_CONNECTIONS
+ offset
, connect
);
344 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VIRTUAL_VTT_OVERRIDE
+ offset
, override
);
345 /* use RAS CAS WE to determine idle cycles */
346 tmp
= DDR40_CORE_PHY_CONTROL_REGS_VIRTUAL_VTT_CONTROL_enable_ctl_idle_MASK
;
347 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VIRTUAL_VTT_CONTROL
+ offset
, tmp
);
348 DDR40_PHY_Print("ddr40_phy_init:: Virtual Vtt Enabled\n");
351 /* uint32_t ddr40_phy_calib_zq(uint32_t params, ddr40_addr_t offset)
352 * params - Set of flags
353 * offset - Address beginning of PHY register space in the chip register space
356 * DDR40_PHY_RETURN_OK - function finished normally
357 * DDR40_PHY_RETURN_ZQ_CALIB_FAIL - ZQ calibration failed (timed out)
359 FUNC_PREFIX
uint32_t ddr40_phy_calib_zq(uint32_t params
, ddr40_addr_t offset
) FUNC_SUFFIX
364 if (params
& DDR40_PHY_PARAM_MAX_ZQ
) {
366 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
,
367 dq_nd_override_val
, 7);
368 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
,
369 dq_pd_override_val
, 7);
370 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
,
371 addr_nd_override_val
, 7);
372 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
,
373 addr_pd_override_val
, 7);
374 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
,
376 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
,
378 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_ZQ_PVT_COMP_CTL
+ offset
, tmp
);
380 return (DDR40_PHY_RETURN_OK
);
384 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, sample_en
, 0);
385 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_ZQ_PVT_COMP_CTL
+ offset
, tmp
);
387 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, sample_en
, 1);
388 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_ZQ_PVT_COMP_CTL
+ offset
, tmp
);
391 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_ZQ_PVT_COMP_CTL
+ offset
);
392 while ((timeout
> 0) &&
393 (GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, sample_done
) == 0))
395 DDR40_PHY_Timeout(1);
397 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_ZQ_PVT_COMP_CTL
+ offset
);
402 "ddr40_phy_init.c: ddr40_phy_init: DDR PHY ZQ Calibration failed\n");
403 return (DDR40_PHY_RETURN_ZQ_CALIB_FAIL
);
406 DDR40_PHY_Print("ddr40_phy_init: ZQ Cal complete. ND_COMP = %d, PD_COMP = %d,"
407 " ND_DONE = %d, PD_DONE = %d\n",
408 GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, nd_comp
),
409 GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, pd_comp
),
410 GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, nd_done
),
411 GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, pd_done
));
413 return (DDR40_PHY_RETURN_OK
);
417 /* void ddr40_phy_force_tmode(ddr40_addr_t offset)
418 * offset - Address beginning of PHY register space in the chip register space
420 FUNC_PREFIX
void ddr40_phy_force_tmode(ddr40_addr_t offset
) FUNC_SUFFIX
424 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIBRATE
+ offset
,
425 DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIBRATE_calib_ftm_MASK
);
427 DDR40_PHY_Timeout(1);
430 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, addr_ovr_en
, 1);
431 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, dq_ovr_en
, 1);
432 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, addr_pd_override_val
, 5);
433 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, addr_nd_override_val
, 5);
434 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, dq_pd_override_val
, 5);
435 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, ZQ_PVT_COMP_CTL
, dq_nd_override_val
, 5);
436 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_ZQ_PVT_COMP_CTL
+ offset
, tmp
);
440 /* void ddr40_phy_rdly_odt(uint32_t speed, uint32_t params, ddr40_addr_t offset)
441 * speed - DDR clock speed
442 * params - Set of flags
443 * offset - Address beginning of PHY register space in the chip register space
445 FUNC_PREFIX
void ddr40_phy_rdly_odt(uint32_t speed
, uint32_t params
,
446 ddr40_addr_t offset
) FUNC_SUFFIX
452 #ifdef DDR40_PROCESS_TYPE_IS_40LP
453 dly
= (speed
<= 400)? 4: (speed
<= 900) ? 5 : 6;
455 dly
= (speed
<= 400)? 2: (speed
<= 900) ? 3 : 4;
456 #endif /* DDR40_PROCESS_TYPE_IS_40LP */
458 dly
= (speed
<= 400)? 1: (speed
<= 667) ? 2 : 3;
459 #endif /* POSTLAYOUT */
461 /* two versions, for backwards compatibility */
462 #ifdef DDR40_CORE_PHY_WORD_LANE_0_READ_DATA_DLY_rd_data_dly_MASK
463 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, READ_DATA_DLY
, rd_data_dly
, dly
);
464 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_0_READ_DATA_DLY
+ offset
, data
);
465 #ifdef DDR40_WIDTH_IS_32
466 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_1_READ_DATA_DLY
+ offset
, data
);
468 #ifdef DDR40_INCLUDE_ECC
469 DDR40_PHY_RegWr(DDR40_CORE_PHY_ECC_LANE_READ_DATA_DLY
+ offset
, data
);
472 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, READ_CONTROL
, rd_data_dly
, dly
);
473 #endif /* DDR40_CORE_PHY_WORD_LANE_0_READ_DATA_DLY_rd_data_dly_MASK */
474 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, READ_CONTROL
, dq_odt_enable
,
475 (params
& (DDR40_PHY_PARAM_DIS_DQS_ODT
| DDR40_PHY_PARAM_DIS_ODT
)) ? 0:1);
476 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, READ_CONTROL
, dq_odt_le_adj
,
477 (params
& DDR40_PHY_PARAM_ODT_EARLY
) ? 1 : 0);
478 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, READ_CONTROL
, dq_odt_te_adj
,
479 (params
& DDR40_PHY_PARAM_ODT_LATE
) ? 1 : 0);
481 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_0_READ_CONTROL
+ offset
, data
);
482 #ifdef DDR40_WIDTH_IS_32
483 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_1_READ_CONTROL
+ offset
, data
);
485 #ifdef DDR40_INCLUDE_ECC
486 DDR40_PHY_RegWr(DDR40_CORE_PHY_ECC_LANE_READ_CONTROL
+ offset
, data
);
490 /* void ddr40_phy_ddr3_misc(uint32_t speed, uint32_t params, ddr40_addr_t offset)
491 * speed - DDR clock speed
492 * params - Set of flags
493 * offset - Address beginning of PHY register space in the chip register space
495 FUNC_PREFIX
void ddr40_phy_ddr3_misc(uint32_t speed
, uint32_t params
,
496 ddr40_addr_t offset
) FUNC_SUFFIX
501 vddo_volts
= ((params
& DDR40_PHY_PARAM_VDDO_VOLT_0
)? 1: 0) +
502 ((params
& DDR40_PHY_PARAM_VDDO_VOLT_1
)? 2: 0);
505 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, DRIVE_PAD_CTL
, vddo_volts
, vddo_volts
);
506 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, DRIVE_PAD_CTL
, rt120b_g
, 1);
507 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_DRIVE_PAD_CTL
+ offset
, data
);
509 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, DRIVE_PAD_CTL
, vddo_volts
, vddo_volts
);
510 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, DRIVE_PAD_CTL
, rt120b_g
, 1);
511 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_0_DRIVE_PAD_CTL
+ offset
, data
);
512 #ifdef DDR40_WIDTH_IS_32
513 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_1_DRIVE_PAD_CTL
+ offset
, data
);
515 #ifdef DDR40_INCLUDE_ECC
516 DDR40_PHY_RegWr(DDR40_CORE_PHY_ECC_LANE_DRIVE_PAD_CTL
+ offset
, data
);
518 ddr40_phy_rdly_odt(speed
, params
, offset
);
520 data
= DDR40_CORE_PHY_WORD_LANE_0_WR_PREAMBLE_MODE_mode_MASK
; /* DDR3 */
521 if (params
& DDR40_PHY_PARAM_LONG_PREAMBLE
)
522 data
|= DDR40_CORE_PHY_WORD_LANE_0_WR_PREAMBLE_MODE_long_MASK
;
523 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_0_WR_PREAMBLE_MODE
+ offset
, data
);
524 #ifdef DDR40_WIDTH_IS_32
525 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_1_WR_PREAMBLE_MODE
+ offset
, data
);
527 #ifdef DDR40_INCLUDE_ECC
528 DDR40_PHY_RegWr(DDR40_CORE_PHY_ECC_LANE_WR_PREAMBLE_MODE
+ offset
, data
);
532 /* void ddr40_phy_ddr2_misc(uint32_t speed, uint32_t params, ddr40_addr_t offset)
533 * speed - DDR clock speed
534 * params - Set of flags
535 * offset - Address beginning of PHY register space in the chip register space
537 FUNC_PREFIX
void ddr40_phy_ddr2_misc(uint32_t speed
, uint32_t params
,
538 ddr40_addr_t offset
) FUNC_SUFFIX
543 vddo_volts
= ((params
& DDR40_PHY_PARAM_VDDO_VOLT_0
)? 1: 0) +
544 ((params
& DDR40_PHY_PARAM_VDDO_VOLT_1
)? 2: 0);
547 SET_FIELD(data
, DDR40_CORE_PHY_CONTROL_REGS
, DRIVE_PAD_CTL
, vddo_volts
, vddo_volts
);
548 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_DRIVE_PAD_CTL
+ offset
, data
);
550 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, DRIVE_PAD_CTL
, vddo_volts
, vddo_volts
);
551 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_0_DRIVE_PAD_CTL
+ offset
, data
);
552 #ifdef DDR40_WIDTH_IS_32
553 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_1_DRIVE_PAD_CTL
+ offset
, data
);
555 #ifdef DDR40_INCLUDE_ECC
556 DDR40_PHY_RegWr(DDR40_CORE_PHY_ECC_LANE_DRIVE_PAD_CTL
+ offset
, data
);
559 ddr40_phy_rdly_odt(speed
, params
, offset
);
561 data
= (params
& DDR40_PHY_PARAM_LONG_PREAMBLE
) ?
562 DDR40_CORE_PHY_WORD_LANE_0_WR_PREAMBLE_MODE_long_MASK
: 0; /* DDR2 */
564 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_0_WR_PREAMBLE_MODE
+ offset
, data
);
565 #ifdef DDR40_WIDTH_IS_32
566 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_1_WR_PREAMBLE_MODE
+ offset
, data
);
568 #ifdef DDR40_INCLUDE_ECC
569 DDR40_PHY_RegWr(DDR40_CORE_PHY_ECC_LANE_WR_PREAMBLE_MODE
+ offset
, data
);
573 /* void ddr40_phy_autoidle_on(uint32_t params, ddr40_addr_t offset) */
574 /* params - Set of flags */
575 /* offset - Address beginning of PHY register space in the chip register space */
576 FUNC_PREFIX
void ddr40_phy_set_autoidle(uint32_t params
, ddr40_addr_t offset
) FUNC_SUFFIX
582 iddq_code
= ((params
& DDR40_PHY_PARAM_AUTO_IDDQ_VALID
)? 1:0) +
583 ((params
& DDR40_PHY_PARAM_AUTO_IDDQ_CMD
)? 2:0);
584 rxenb_code
= ((params
& DDR40_PHY_PARAM_AUTO_RXENB_VALID
)? 1:0) +
585 ((params
& DDR40_PHY_PARAM_AUTO_RXENB_CMD
)? 2:0);
587 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_WORD_LANE_0_IDLE_PAD_CONTROL
+ offset
);
588 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, IDLE_PAD_CONTROL
,
589 auto_dq_iddq_mode
, iddq_code
);
590 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_0
, IDLE_PAD_CONTROL
,
591 auto_dq_rxenb_mode
, rxenb_code
);
592 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_0_IDLE_PAD_CONTROL
+ offset
, data
);
594 #ifdef DDR40_WIDTH_IS_32
595 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_WORD_LANE_1_IDLE_PAD_CONTROL
+ offset
);
596 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_1
, IDLE_PAD_CONTROL
,
597 auto_dq_iddq_mode
, iddq_code
);
598 SET_FIELD(data
, DDR40_CORE_PHY_WORD_LANE_1
, IDLE_PAD_CONTROL
,
599 auto_dq_rxenb_mode
, rxenb_code
);
600 DDR40_PHY_RegWr(DDR40_CORE_PHY_WORD_LANE_1_IDLE_PAD_CONTROL
+ offset
, data
);
602 #ifdef DDR40_INCLUDE_ECC
603 data
= DDR40_PHY_RegRd(DDR40_CORE_PHY_ECC_LANE_IDLE_PAD_CONTROL
+ offset
);
604 SET_FIELD(data
, DDR40_CORE_PHY_ECC_LANE
, IDLE_PAD_CONTROL
,
605 auto_dq_iddq_mode
, iddq_code
);
606 SET_FIELD(data
, DDR40_CORE_PHY_ECC_LANE
, IDLE_PAD_CONTROL
,
607 auto_dq_rxenb_mode
, rxenb_code
);
608 DDR40_PHY_RegWr(DDR40_CORE_PHY_ECC_LANE_IDLE_PAD_CONTROL
+ offset
, data
);
613 /* uint32_t ddr40_phy_init(uint32_t ddr_clk, uint32_t params, int ddr_type,
614 * uint32_t * wire_dly, uint32_t connect, uint32_t override, ddr40_addr_t offset)
615 * ddr_clk - DDR clock speed, as number (Important: DDR clock speed in MHz, not a bit rate).
616 * params - Set of flags
617 * ddr_type - 0: DDR2, 1: DDR3
618 * wire_dly - Array of wire delays for all byte lanes (CLK/DQS roundtrip in ps)
619 * connect - Mask of the signals connected to the Virtual VTT capacitor
620 * (see RDB for specific bits)
621 * override - Mask of the signals used to control voltage on the Virtual VTT capacitor
622 * (see RDB for specific bits)
623 * offset - Address of beginning of PHY register space in the chip register space
627 * DDR40_PHY_RETURN_OK - PHY has been setup correctly
628 * DDR40_PHY_RETURN_PLL_NOLOCK - PLL did not lock within expected time frame.
629 * DDR40_PHY_RETURN_VDL_CALIB_FAIL - VDL calibration failed (timed out)
630 * DDR40_PHY_RETURN_VDL_CALIB_NOLOCK - VDL calibration did not lock
631 * DDR40_PHY_RETURN_ZQ_CALIB_FAIL - ZQ calibration failed (timed out)
632 * DDR40_PHY_RETURN_RDEN_CALIB_FAIL - RD_EN calibration failed (timed out)
633 * DDR40_PHY_RETURN_RDEN_CALIB_NOLOCK - RD_EN calibration did not lock
634 * DDR40_PHY_RETURN_STEP_CALIB_FAIL - Step calibration failed (timed out)
636 FUNC_PREFIX
unsigned int ddr40_phy_init(uint32_t ddr_clk
, uint32_t params
, int ddr_type
,
637 uint32_t *wire_dly
, uint32_t connect
, uint32_t override
, ddr40_addr_t offset
) FUNC_SUFFIX
639 /* This coding style does not work in MIPS boot loader, as it requires that the memory that
640 * holds the offset be initialized before the program starts.
641 * This data segment runs out of SID SRAM and must not be initialize by the compiler,
642 * as this would force a pre-load of memory
643 * Look at the alternative coding style in this file.
645 uint32_t tmp
, ddr_clk_per
, cal_steps
, step_size
, timeout
;
646 uint32_t total_steps0
;
648 uint32_t return_code
= (uint32_t)wire_dly
;
650 return_code
= DDR40_PHY_RETURN_OK
;
652 tmode
= (params
& DDR40_PHY_PARAM_TESTMODE
) ? 1 : 0;
654 /* Emulation mode does NOT init PLL or VDL */
658 #ifdef CONFIG_DDR_LONG_PREAMBLE
659 params
|= DDR40_PHY_PARAM_LONG_PREAMBLE
;
662 if (params
& DDR40_PHY_PARAM_SKIP_PLL_VDL
)
663 return (DDR40_PHY_RETURN_OK
);
666 DDR40_PHY_Print("ddr40_phy_init.c: Configuring DDR Controller PLLs\n");
667 if ((return_code
= ddr40_phy_setup_pll(ddr_clk
, offset
)) != DDR40_PHY_RETURN_OK
)
668 return (return_code
);
670 return_code
|= ddr40_phy_vdl_normal(
671 (params
& DDR40_PHY_PARAM_ALLOW_VDL_NO_LOCK
)? 1 : 0, offset
);
673 /* TBD : I updated this code to support per byte lane trace lengths. The part
674 * where separate trace lengths are specified needs to be added. JAL.
676 /* 'ddr_clk' is the DDR bus clock period in Mhz. Convert to ps. */
677 ddr_clk_per
= ((1000 * 1000) / ddr_clk
);
678 /* Perform a step size calibration. */
680 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIBRATE
+ offset
, tmp
);
681 SET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIBRATE
, calib_steps
, 1);
682 DDR40_PHY_RegWr(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIBRATE
+ offset
, tmp
);
685 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS
+ offset
);
687 while ((timeout
> 0) &&
688 ((tmp
& DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS_calib_idle_MASK
) == 0))
690 DDR40_PHY_Timeout(1);
691 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS
696 if ((timeout
<= 0) ||
697 ((tmp
& DDR40_CORE_PHY_CONTROL_REGS_VDL_CALIB_STATUS_calib_lock_MASK
) == 0))
700 "ddr40_phy_init.c: DDR PHY step size calibration failed.\n");
701 step_size
= DDR40_PHY_DEFAULT_STEP_SIZE
;
702 cal_steps
= ddr_clk_per
/step_size
;
703 return_code
|= DDR40_PHY_RETURN_STEP_CALIB_FAIL
;
706 "ddr40_phy_init::DDR PHY step size calibration complete.\n");
707 cal_steps
= GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
, VDL_CALIB_STATUS
,
709 step_size
= (ddr_clk_per
/ cal_steps
);
712 tmp
= ((params
& DDR40_PHY_PARAM_ADDR_CTL_ADJUST_1
) ? 2 : 0) + ((params
&
713 DDR40_PHY_PARAM_ADDR_CTL_ADJUST_0
) ? 1 : 0);
714 total_steps0
= (cal_steps
>> 4) * tmp
;
716 ddr40_phy_addr_ctl_adjust(total_steps0
, offset
);
719 if (!(params
& DDR40_PHY_PARAM_SKIP_RD_EN_ADJUST
)) {
720 /* The READ enable VDL calibration tracks the DQS setting
721 * (half bit period or full bit period).
723 tmp
= DDR40_PHY_RegRd(DDR40_CORE_PHY_CONTROL_REGS_VDL_RD_EN_CALIB_STATUS
+
725 rd_en_byte_mode
= GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
,
726 VDL_RD_EN_CALIB_STATUS
, rd_en_calib_byte_sel
);
727 rd_en_byte_vdl_steps
= GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
,
728 VDL_RD_EN_CALIB_STATUS
, rd_en_calib_total
) >> 4;
729 rd_en_bit_vdl_offset
= GET_FIELD(tmp
, DDR40_CORE_PHY_CONTROL_REGS
,
730 VDL_RD_EN_CALIB_STATUS
, rd_en_calib_bit_offset
);
732 /* This is where the separate 'wire_dly' values are used to create
733 * per-byte-lane read enable VDL adjusts.
735 tmp
= ((rd_en_byte_vdl_steps
<< rd_en_byte_mode
) + rd_en_bit_vdl_offset
);
737 "ddr40_phy_init:: Calibrated Read Enable VDL Steps = 0x%X (%d)\n",
740 total_steps0
= ((rd_en_byte_vdl_steps
<< rd_en_byte_mode
) +
741 (wire_dly
[0] / step_size
) + rd_en_bit_vdl_offset
);
742 total_steps1
= ((rd_en_byte_vdl_steps
<< rd_en_byte_mode
) +
743 (wire_dly
[1] / step_size
) + rd_en_bit_vdl_offset
);
745 DDR40_PHY_Print("ddr40_phy_init:: BL0 RD_EN adjustment (%d ps): "
746 "Total Steps = 0x%X (%d)\n",
747 wire_dly
[0], total_steps0
, total_steps0
);
748 DDR40_PHY_Print("ddr40_phy_init:: BL1 RD_EN adjustment (%d ps): "
749 "Total Steps = 0x%X (%d)\n",
750 wire_dly
[1], total_steps1
, total_steps1
);
752 ddr40_phy_rd_en_adjust(total_steps0
, total_steps1
, rd_en_byte_mode
,
753 offset
, (DDR40_CORE_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE0_W
-
754 DDR40_CORE_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE0_W
));
756 #ifdef DDR40_WIDTH_IS_32
757 total_steps0
= ((rd_en_byte_vdl_steps
<< rd_en_byte_mode
) +
758 (wire_dly
[2] / step_size
) + rd_en_bit_vdl_offset
);
759 total_steps1
= ((rd_en_byte_vdl_steps
<< rd_en_byte_mode
) +
760 (wire_dly
[3] / step_size
) + rd_en_bit_vdl_offset
);
761 DDR40_PHY_Print("ddr40_phy_init:: BL2 RD_EN adjustment (%d ps): "
762 "Total Steps = 0x%X (%d)\n",
763 wire_dly
[2], total_steps0
, total_steps0
);
764 DDR40_PHY_Print("ddr40_phy_init:: BL3 RD_EN adjustment (%d ps): "
765 "Total Steps = 0x%X (%d)\n",
766 wire_dly
[3], total_steps1
, total_steps1
);
767 ddr40_phy_rd_en_adjust(total_steps0
, total_steps1
, rd_en_byte_mode
,
768 offset
, (DDR40_CORE_PHY_WORD_LANE_1_VDL_OVRIDE_BYTE0_W
-
769 DDR40_CORE_PHY_WORD_LANE_0_VDL_OVRIDE_BYTE0_W
));
771 #ifdef DDR40_INCLUDE_ECC
772 total_steps0
= ((rd_en_byte_vdl_steps
<< rd_en_byte_mode
) +
773 (wire_dly
[4] / step_size
) + rd_en_bit_vdl_offset
);
774 DDR40_PHY_Print("ddr40_phy_init:: ECC RD_EN adjustment (%d ps): "
775 "Total Steps = 0x%X (%d)\n",
776 wire_dly
[4], total_steps0
, total_steps0
);
777 ddr40_phy_ecc_rd_en_adjust(total_steps0
, rd_en_byte_mode
, offset
);
781 DDR40_PHY_Print("ddr40_phy_init:: RD_EN VDL adjustment has been skipped\n");
784 if (params
& DDR40_PHY_PARAM_USE_VTT
)
785 ddr40_phy_vtt_on(connect
, override
, offset
);
788 return_code
|= ddr40_phy_calib_zq(params
, offset
);
793 ddr40_phy_force_tmode(offset
);
797 if ((ddr_type
== 1)) { /* DDR3 */
798 ddr40_phy_ddr3_misc(ddr_clk
, params
, offset
);
801 /* DDR-2 (type == 0) requires ODT_EARLY. */
802 ddr40_phy_ddr2_misc(ddr_clk
, params
| DDR40_PHY_PARAM_ODT_EARLY
, offset
);
805 ddr40_phy_set_autoidle(params
, offset
);
808 /* required some delay (really 500 us) from PAD_CTL to CKE enable */
809 DDR40_PHY_Timeout(1);
811 /* required some delay (really 500 us) from PAD_CTL to CKE enable */
812 DDR40_PHY_Timeout(500);
815 DDR40_PHY_Print("DDR Controller PLL Configuration Complete\n");
817 return (return_code
);